gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
|---|---|
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
try:
import unittest2 as unittest
except ImportError:
import unittest
import binascii
import dns.exception
import dns.flags
import dns.message
import dns.name
import dns.rdataclass
import dns.rdatatype
from dns._compat import xrange
query_text = """id 1234
opcode QUERY
rcode NOERROR
flags RD
edns 0
eflags DO
payload 4096
;QUESTION
wwww.dnspython.org. IN A
;ANSWER
;AUTHORITY
;ADDITIONAL"""
goodhex = b'04d201000001000000000001047777777709646e73707974686f6e' \
b'036f726700000100010000291000000080000000'
goodwire = binascii.unhexlify(goodhex)
answer_text = """id 1234
opcode QUERY
rcode NOERROR
flags QR AA RD
;QUESTION
dnspython.org. IN SOA
;ANSWER
dnspython.org. 3600 IN SOA woof.dnspython.org. hostmaster.dnspython.org. 2003052700 3600 1800 604800 3600
;AUTHORITY
dnspython.org. 3600 IN NS ns1.staff.nominum.org.
dnspython.org. 3600 IN NS ns2.staff.nominum.org.
dnspython.org. 3600 IN NS woof.play-bow.org.
;ADDITIONAL
woof.play-bow.org. 3600 IN A 204.152.186.150
"""
goodhex2 = '04d2 8500 0001 0001 0003 0001' \
'09646e73707974686f6e036f726700 0006 0001' \
'c00c 0006 0001 00000e10 0028 ' \
'04776f6f66c00c 0a686f73746d6173746572c00c' \
'7764289c 00000e10 00000708 00093a80 00000e10' \
'c00c 0002 0001 00000e10 0014' \
'036e7331057374616666076e6f6d696e756dc016' \
'c00c 0002 0001 00000e10 0006 036e7332c063' \
'c00c 0002 0001 00000e10 0010 04776f6f6608706c61792d626f77c016' \
'c091 0001 0001 00000e10 0004 cc98ba96'
goodwire2 = binascii.unhexlify(goodhex2.replace(' ', '').encode())
query_text_2 = """id 1234
opcode QUERY
rcode 4095
flags RD
edns 0
eflags DO
payload 4096
;QUESTION
wwww.dnspython.org. IN A
;ANSWER
;AUTHORITY
;ADDITIONAL"""
goodhex3 = b'04d2010f0001000000000001047777777709646e73707974686f6e' \
b'036f726700000100010000291000ff0080000000'
goodwire3 = binascii.unhexlify(goodhex3)
class MessageTestCase(unittest.TestCase):
def test_comparison_eq1(self):
q1 = dns.message.from_text(query_text)
q2 = dns.message.from_text(query_text)
self.failUnless(q1 == q2)
def test_comparison_ne1(self):
q1 = dns.message.from_text(query_text)
q2 = dns.message.from_text(query_text)
q2.id = 10
self.failUnless(q1 != q2)
def test_comparison_ne2(self):
q1 = dns.message.from_text(query_text)
q2 = dns.message.from_text(query_text)
q2.question = []
self.failUnless(q1 != q2)
def test_comparison_ne3(self):
q1 = dns.message.from_text(query_text)
self.failUnless(q1 != 1)
def test_EDNS_to_wire1(self):
q = dns.message.from_text(query_text)
w = q.to_wire()
self.failUnless(w == goodwire)
def test_EDNS_from_wire1(self):
m = dns.message.from_wire(goodwire)
self.assertEqual(str(m), query_text)
def test_EDNS_to_wire2(self):
q = dns.message.from_text(query_text_2)
w = q.to_wire()
self.failUnless(w == goodwire3)
def test_EDNS_from_wire2(self):
m = dns.message.from_wire(goodwire3)
self.failUnless(str(m) == query_text_2)
def test_TooBig(self):
def bad():
q = dns.message.from_text(query_text)
for i in xrange(0, 25):
rrset = dns.rrset.from_text('foo%d.' % i, 3600,
dns.rdataclass.IN,
dns.rdatatype.A,
'10.0.0.%d' % i)
q.additional.append(rrset)
q.to_wire(max_size=512)
self.failUnlessRaises(dns.exception.TooBig, bad)
def test_answer1(self):
a = dns.message.from_text(answer_text)
wire = a.to_wire(want_shuffle=False)
self.failUnless(wire == goodwire2)
def test_TrailingJunk(self):
def bad():
badwire = goodwire + b'\x00'
dns.message.from_wire(badwire)
self.failUnlessRaises(dns.message.TrailingJunk, bad)
def test_ShortHeader(self):
def bad():
badwire = b'\x00' * 11
dns.message.from_wire(badwire)
self.failUnlessRaises(dns.message.ShortHeader, bad)
def test_RespondingToResponse(self):
def bad():
q = dns.message.make_query('foo', 'A')
r1 = dns.message.make_response(q)
dns.message.make_response(r1)
self.failUnlessRaises(dns.exception.FormError, bad)
def test_ExtendedRcodeSetting(self):
m = dns.message.make_query('foo', 'A')
m.set_rcode(4095)
self.failUnless(m.rcode() == 4095)
m.set_rcode(2)
self.failUnless(m.rcode() == 2)
def test_EDNSVersionCoherence(self):
m = dns.message.make_query('foo', 'A')
m.use_edns(1)
self.failUnless((m.ednsflags >> 16) & 0xFF == 1)
def test_SettingNoEDNSOptionsImpliesNoEDNS(self):
m = dns.message.make_query('foo', 'A')
self.failUnless(m.edns == -1)
def test_SettingEDNSFlagsImpliesEDNS(self):
m = dns.message.make_query('foo', 'A', ednsflags=dns.flags.DO)
self.failUnless(m.edns == 0)
def test_SettingEDNSPayloadImpliesEDNS(self):
m = dns.message.make_query('foo', 'A', payload=4096)
self.failUnless(m.edns == 0)
def test_SettingEDNSRequestPayloadImpliesEDNS(self):
m = dns.message.make_query('foo', 'A', request_payload=4096)
self.failUnless(m.edns == 0)
def test_SettingOptionsImpliesEDNS(self):
m = dns.message.make_query('foo', 'A', options=[])
self.failUnless(m.edns == 0)
def test_FindRRset(self):
a = dns.message.from_text(answer_text)
n = dns.name.from_text('dnspython.org.')
rrs1 = a.find_rrset(a.answer, n, dns.rdataclass.IN, dns.rdatatype.SOA)
rrs2 = a.find_rrset(dns.message.ANSWER, n, dns.rdataclass.IN,
dns.rdatatype.SOA)
self.failUnless(rrs1 == rrs2)
if __name__ == '__main__':
unittest.main()
|
|
# coding: utf-8
from django.conf import settings
from django.contrib.contenttypes.generic import generic_inlineformset_factory
from django.test import TestCase
# local test models
from models import Episode, EpisodeExtra, EpisodeMaxNum, EpisodeExclude, \
Media, EpisodePermanent, MediaPermanentInline, Category
class GenericAdminViewTest(TestCase):
fixtures = ['users.xml']
def setUp(self):
# set TEMPLATE_DEBUG to True to ensure {% include %} will raise
# exceptions since that is how inlines are rendered and #9498 will
# bubble up if it is an issue.
self.original_template_debug = settings.TEMPLATE_DEBUG
settings.TEMPLATE_DEBUG = True
self.client.login(username='super', password='secret')
# Can't load content via a fixture (since the GenericForeignKey
# relies on content type IDs, which will vary depending on what
# other tests have been run), thus we do it here.
e = Episode.objects.create(name='This Week in Django')
self.episode_pk = e.pk
m = Media(content_object=e, url='http://example.com/podcast.mp3')
m.save()
self.mp3_media_pk = m.pk
m = Media(content_object=e, url='http://example.com/logo.png')
m.save()
self.png_media_pk = m.pk
def tearDown(self):
self.client.logout()
settings.TEMPLATE_DEBUG = self.original_template_debug
def testBasicAddGet(self):
"""
A smoke test to ensure GET on the add_view works.
"""
response = self.client.get('/generic_inline_admin/admin/generic_inline_admin/episode/add/')
self.assertEqual(response.status_code, 200)
def testBasicEditGet(self):
"""
A smoke test to ensure GET on the change_view works.
"""
response = self.client.get('/generic_inline_admin/admin/generic_inline_admin/episode/%d/' % self.episode_pk)
self.assertEqual(response.status_code, 200)
def testBasicAddPost(self):
"""
A smoke test to ensure POST on add_view works.
"""
post_data = {
"name": u"This Week in Django",
# inline data
"generic_inline_admin-media-content_type-object_id-TOTAL_FORMS": u"1",
"generic_inline_admin-media-content_type-object_id-INITIAL_FORMS": u"0",
"generic_inline_admin-media-content_type-object_id-MAX_NUM_FORMS": u"0",
}
response = self.client.post('/generic_inline_admin/admin/generic_inline_admin/episode/add/', post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def testBasicEditPost(self):
"""
A smoke test to ensure POST on edit_view works.
"""
post_data = {
"name": u"This Week in Django",
# inline data
"generic_inline_admin-media-content_type-object_id-TOTAL_FORMS": u"3",
"generic_inline_admin-media-content_type-object_id-INITIAL_FORMS": u"2",
"generic_inline_admin-media-content_type-object_id-MAX_NUM_FORMS": u"0",
"generic_inline_admin-media-content_type-object_id-0-id": u"%d" % self.mp3_media_pk,
"generic_inline_admin-media-content_type-object_id-0-url": u"http://example.com/podcast.mp3",
"generic_inline_admin-media-content_type-object_id-1-id": u"%d" % self.png_media_pk,
"generic_inline_admin-media-content_type-object_id-1-url": u"http://example.com/logo.png",
"generic_inline_admin-media-content_type-object_id-2-id": u"",
"generic_inline_admin-media-content_type-object_id-2-url": u"",
}
url = '/generic_inline_admin/admin/generic_inline_admin/episode/%d/' % self.episode_pk
response = self.client.post(url, post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def testGenericInlineFormset(self):
EpisodeMediaFormSet = generic_inlineformset_factory(Media, can_delete=False, extra=3)
e = Episode.objects.get(name='This Week in Django')
# Works with no queryset
formset = EpisodeMediaFormSet(instance=e)
self.assertEquals(len(formset.forms), 5)
self.assertEquals(formset.forms[0].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-0-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-0-url" type="text" name="generic_inline_admin-media-content_type-object_id-0-url" value="http://example.com/podcast.mp3" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-0-id" value="%s" id="id_generic_inline_admin-media-content_type-object_id-0-id" /></p>' % self.mp3_media_pk)
self.assertEquals(formset.forms[1].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-1-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-1-url" type="text" name="generic_inline_admin-media-content_type-object_id-1-url" value="http://example.com/logo.png" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-1-id" value="%s" id="id_generic_inline_admin-media-content_type-object_id-1-id" /></p>' % self.png_media_pk)
self.assertEquals(formset.forms[2].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-2-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-2-url" type="text" name="generic_inline_admin-media-content_type-object_id-2-url" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-2-id" id="id_generic_inline_admin-media-content_type-object_id-2-id" /></p>')
# A queryset can be used to alter display ordering
formset = EpisodeMediaFormSet(instance=e, queryset=Media.objects.order_by('url'))
self.assertEquals(len(formset.forms), 5)
self.assertEquals(formset.forms[0].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-0-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-0-url" type="text" name="generic_inline_admin-media-content_type-object_id-0-url" value="http://example.com/logo.png" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-0-id" value="%s" id="id_generic_inline_admin-media-content_type-object_id-0-id" /></p>' % self.png_media_pk)
self.assertEquals(formset.forms[1].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-1-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-1-url" type="text" name="generic_inline_admin-media-content_type-object_id-1-url" value="http://example.com/podcast.mp3" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-1-id" value="%s" id="id_generic_inline_admin-media-content_type-object_id-1-id" /></p>' % self.mp3_media_pk)
self.assertEquals(formset.forms[2].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-2-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-2-url" type="text" name="generic_inline_admin-media-content_type-object_id-2-url" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-2-id" id="id_generic_inline_admin-media-content_type-object_id-2-id" /></p>')
# Works with a queryset that omits items
formset = EpisodeMediaFormSet(instance=e, queryset=Media.objects.filter(url__endswith=".png"))
self.assertEquals(len(formset.forms), 4)
self.assertEquals(formset.forms[0].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-0-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-0-url" type="text" name="generic_inline_admin-media-content_type-object_id-0-url" value="http://example.com/logo.png" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-0-id" value="%s" id="id_generic_inline_admin-media-content_type-object_id-0-id" /></p>' % self.png_media_pk)
self.assertEquals(formset.forms[1].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-1-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-1-url" type="text" name="generic_inline_admin-media-content_type-object_id-1-url" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-1-id" id="id_generic_inline_admin-media-content_type-object_id-1-id" /></p>')
def testGenericInlineFormsetFactory(self):
# Regression test for #10522.
inline_formset = generic_inlineformset_factory(Media,
exclude=('url',))
# Regression test for #12340.
e = Episode.objects.get(name='This Week in Django')
formset = inline_formset(instance=e)
self.assertTrue(formset.get_queryset().ordered)
class GenericInlineAdminParametersTest(TestCase):
fixtures = ['users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def _create_object(self, model):
"""
Create a model with an attached Media object via GFK. We can't
load content via a fixture (since the GenericForeignKey relies on
content type IDs, which will vary depending on what other tests
have been run), thus we do it here.
"""
e = model.objects.create(name='This Week in Django')
Media.objects.create(content_object=e, url='http://example.com/podcast.mp3')
return e
def testNoParam(self):
"""
With one initial form, extra (default) at 3, there should be 4 forms.
"""
e = self._create_object(Episode)
response = self.client.get('/generic_inline_admin/admin/generic_inline_admin/episode/%s/' % e.pk)
formset = response.context['inline_admin_formsets'][0].formset
self.assertEqual(formset.total_form_count(), 4)
self.assertEqual(formset.initial_form_count(), 1)
def testExtraParam(self):
"""
With extra=0, there should be one form.
"""
e = self._create_object(EpisodeExtra)
response = self.client.get('/generic_inline_admin/admin/generic_inline_admin/episodeextra/%s/' % e.pk)
formset = response.context['inline_admin_formsets'][0].formset
self.assertEqual(formset.total_form_count(), 1)
self.assertEqual(formset.initial_form_count(), 1)
def testMaxNumParam(self):
"""
With extra=5 and max_num=2, there should be only 2 forms.
"""
e = self._create_object(EpisodeMaxNum)
inline_form_data = '<input type="hidden" name="generic_inline_admin-media-content_type-object_id-TOTAL_FORMS" value="2" id="id_generic_inline_admin-media-content_type-object_id-TOTAL_FORMS" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-INITIAL_FORMS" value="1" id="id_generic_inline_admin-media-content_type-object_id-INITIAL_FORMS" />'
response = self.client.get('/generic_inline_admin/admin/generic_inline_admin/episodemaxnum/%s/' % e.pk)
formset = response.context['inline_admin_formsets'][0].formset
self.assertEqual(formset.total_form_count(), 2)
self.assertEqual(formset.initial_form_count(), 1)
def testExcludeParam(self):
"""
Generic inline formsets should respect include.
"""
e = self._create_object(EpisodeExclude)
response = self.client.get('/generic_inline_admin/admin/generic_inline_admin/episodeexclude/%s/' % e.pk)
formset = response.context['inline_admin_formsets'][0].formset
self.assertFalse('url' in formset.forms[0], 'The formset has excluded "url" field.')
class GenericInlineAdminWithUniqueTogetherTest(TestCase):
fixtures = ['users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def testAdd(self):
category_id = Category.objects.create(name='male').pk
post_data = {
"name": u"John Doe",
# inline data
"generic_inline_admin-phonenumber-content_type-object_id-TOTAL_FORMS": u"1",
"generic_inline_admin-phonenumber-content_type-object_id-INITIAL_FORMS": u"0",
"generic_inline_admin-phonenumber-content_type-object_id-MAX_NUM_FORMS": u"0",
"generic_inline_admin-phonenumber-content_type-object_id-0-id": "",
"generic_inline_admin-phonenumber-content_type-object_id-0-phone_number": "555-555-5555",
"generic_inline_admin-phonenumber-content_type-object_id-0-category": "%s" % category_id,
}
response = self.client.get('/generic_inline_admin/admin/generic_inline_admin/contact/add/')
self.assertEqual(response.status_code, 200)
response = self.client.post('/generic_inline_admin/admin/generic_inline_admin/contact/add/', post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
class NoInlineDeletionTest(TestCase):
def test_no_deletion(self):
fake_site = object()
inline = MediaPermanentInline(EpisodePermanent, fake_site)
fake_request = object()
formset = inline.get_formset(fake_request)
self.assertFalse(formset.can_delete)
|
|
"""Maintains the ball count for a ball device."""
import asyncio
from typing import Optional
from mpf.devices.ball_device.physical_ball_counter import PhysicalBallCounter, EjectTracker
from mpf.devices.ball_device.entrance_switch_counter import EntranceSwitchCounter
from mpf.devices.ball_device.switch_counter import SwitchCounter
from mpf.core.utility_functions import Util
from mpf.devices.ball_device.ball_device_state_handler import BallDeviceStateHandler
class BallCountHandler(BallDeviceStateHandler):
"""Handles the ball count in the device."""
__slots__ = ["_is_counting", "_count_valid", "_revalidate", "_eject_started", "_eject_ended", "_has_balls",
"_ball_count", "_ball_count_changed_futures", "counter"]
def __init__(self, ball_device):
"""Initialise ball count handler."""
super().__init__(ball_device)
# inputs
self._is_counting = asyncio.Lock()
self._count_valid = asyncio.Event()
self._revalidate = asyncio.Event()
self._eject_started = asyncio.Event()
self._eject_ended = asyncio.Event()
self._has_balls = asyncio.Event()
self._ball_count = 0
self._ball_count_changed_futures = []
self.counter = None # type: Optional[PhysicalBallCounter]
def wait_for_ball_count_changed(self):
"""Wait until ball count changed."""
future = asyncio.Future()
self._ball_count_changed_futures.append(future)
return future
def stop(self):
"""Stop counter."""
super().stop()
if self.counter:
self.counter.stop()
self.counter = None
@property
def handled_balls(self):
"""Return balls which are already handled."""
return self._ball_count
def _set_ball_count(self, count):
self._ball_count = count
# mirror variable at ball device for monitor
self.ball_device.counted_balls = count
if self._ball_count > 0:
self._has_balls.set()
else:
self._has_balls.clear()
self.machine.events.post("balldevice_{}_ball_count_changed".format(self.ball_device.name), balls=count)
'''event: balldevice_(name)_ball_count_changed
config_section: ball_devices
class_label: ball_device
desc: The ball count for device (name) just changed.
This event may also be called without a change in some circumstances.
args:
balls: The number of new balls in this device.
'''
for future in self._ball_count_changed_futures:
if not future.done():
future.set_result(count)
# reset futures
self._ball_count_changed_futures = []
async def initialise(self):
"""Initialise handler."""
counter_config = self.ball_device.config.get("counter", {})
if counter_config:
counter_class = Util.string_to_class(counter_config["class"])
elif self.ball_device.config.get('ball_switches'):
counter_class = SwitchCounter
else:
counter_class = EntranceSwitchCounter
self.counter = counter_class(self.ball_device, self.ball_device.config.get("counter", {}))
self._ball_count = await self.counter.count_balls()
# on start try to reorder balls if count is unstable
if self.counter.is_count_unreliable():
self.info_log("BCH: Count is unstable. Trying to reorder balls.")
await self.ball_device.ejector.reorder_balls()
self.info_log("BCH: Repulse done. Waiting for balls to settle.")
# recount
self._ball_count = await self.counter.count_balls()
self.info_log("BCH: Initial count: %s", self._ball_count)
if self._ball_count > 0:
self._has_balls.set()
self.ball_device.counted_balls = self._ball_count
await super().initialise()
self._count_valid.set()
@property
def has_ball(self) -> bool:
"""Return true if the device has at least one ball."""
return self._ball_count > 0
@property
def is_full(self) -> bool:
"""Return true if the device is full."""
if not self.counter:
raise asyncio.CancelledError
return self.counter.capacity - self._ball_count <= 0
async def wait_for_ball(self):
"""Wait until the device has a ball."""
if self.has_ball:
self.debug_log("We have %s balls.", self._ball_count)
return
self.debug_log("No ball found. Waiting for balls.")
# wait until we have more than 0 balls
if not self.counter:
raise asyncio.CancelledError
ball_changes = asyncio.ensure_future(self.counter.wait_for_ball_count_changes(0))
new_balls = await ball_changes
# update count
old_ball_count = self._ball_count
self._ball_count = new_balls
if new_balls > old_ball_count:
self.debug_log("BCH: Found %s new balls", new_balls - old_ball_count)
# handle new balls via incoming balls handler
for _ in range(new_balls - old_ball_count):
await self.ball_device.incoming_balls_handler.ball_arrived()
self._set_ball_count(new_balls)
self.debug_log("A ball arrived. Progressing.")
# pylint: disable-msg=inconsistent-return-statements
async def wait_for_ready_to_receive(self, source):
"""Wait until this device is ready to receive a ball."""
while True:
if not self.counter:
raise asyncio.CancelledError
free_space = self.counter.capacity - self._ball_count
incoming_balls = self.ball_device.incoming_balls_handler.get_num_incoming_balls()
if free_space <= incoming_balls:
self.debug_log(
"Not ready to receive from %s. Not enough space. "
"Free space %s (Capacity: %s, Balls: %s), incoming_balls: %s",
source, free_space, self.counter.capacity, self._ball_count,
incoming_balls)
await self.wait_for_ball_count_changed()
continue
if not self.counter:
raise asyncio.CancelledError
if not self.counter.is_ready_to_receive:
self.debug_log(
"Not ready to receive from %s. Waiting on counter to become ready. "
"Free space %s (Capacity: %s, Balls: %s), incoming_balls: %s",
source, free_space, self.counter.capacity, self._ball_count,
incoming_balls)
# wait for the counter to be ready
await self.counter.wait_for_ready_to_receive()
continue
# wait until any eject conditions have passed which would break on an incoming ball
if not self.ball_device.outgoing_balls_handler.is_ready_to_receive:
self.debug_log(
"Not ready to receive from %s. Target is currently ejecting. "
"Free space %s (Capacity: %s, Balls: %s), incoming_balls: %s",
source, free_space, self.counter.capacity, self._ball_count,
incoming_balls)
await self.ball_device.outgoing_balls_handler.wait_for_ready_to_receive()
continue
if not self.counter:
raise asyncio.CancelledError
self.debug_log("Ready to receive from %s. Free space %s (Capacity: %s, Balls: %s), incoming_balls: %s",
source, free_space, self.counter.capacity, self._ball_count,
incoming_balls)
return True
async def start_eject(self, already_left=False) -> EjectTracker:
"""Start eject."""
await self.ball_device.incoming_balls_handler.start_eject()
await self._is_counting.acquire()
self._eject_started.set()
self.debug_log("Entered eject mode.")
eject_process = EjectTracker(self, already_left)
if already_left:
self._set_ball_count(self._ball_count + 1)
await eject_process.will_eject()
return eject_process
async def end_eject(self, eject_process: EjectTracker, ball_left):
"""End eject."""
eject_process.cancel()
self.debug_log("Exited eject mode. Eject success: %s", ball_left)
if ball_left:
self._set_ball_count(self._ball_count - 1)
self._eject_started.clear()
self._is_counting.release()
self.ball_device.incoming_balls_handler.end_eject()
async def _run(self):
if not self.counter:
raise asyncio.CancelledError
changes = self.counter.register_change_stream()
while True:
# wait for ball changes
ball_changes = asyncio.ensure_future(changes.get())
revalidate_future = asyncio.ensure_future(self._revalidate.wait())
await Util.first([ball_changes, revalidate_future, self._eject_started.wait()])
self._revalidate.clear()
# get lock and update count
await self._is_counting.acquire()
if not self.counter:
raise asyncio.CancelledError
new_balls = await self.counter.count_balls()
# try to re-order the device if count is unstable
if not self.counter:
raise asyncio.CancelledError
if self.counter.is_count_unreliable():
self.info_log("BCH: Count is unstable. Trying to reorder balls.")
await self.ball_device.ejector.reorder_balls()
self.info_log("BCH: Repulse done. Waiting for balls to settle.")
new_balls = await self.counter.count_balls()
self.debug_log("BCH: Counting. New count: %s Old count: %s", new_balls, self._ball_count)
# when jammed do not trust other switches except the jam. keep old count
if not self.counter:
raise asyncio.CancelledError
if not self.counter.is_count_unreliable():
# otherwise handle balls
old_ball_count = self._ball_count
if new_balls > old_ball_count:
self.debug_log("BCH: Found %s new balls", new_balls - old_ball_count)
self._set_ball_count(new_balls)
# handle new balls via incoming balls handler
for _ in range(new_balls - old_ball_count):
await self.ball_device.incoming_balls_handler.ball_arrived()
elif new_balls < old_ball_count:
await self._handle_missing_balls(new_balls, old_ball_count - new_balls)
self._is_counting.release()
self._count_valid.set()
async def _handle_missing_balls(self, new_balls, missing_balls):
if self.ball_device.outgoing_balls_handler.is_idle:
if self.ball_device.config['mechanical_eject']:
self.debug_log("BCH: Lost %s balls. Assuming mechanical eject.", missing_balls)
self._set_ball_count(new_balls)
await self.ball_device.handle_mechanial_eject_during_idle()
else:
try:
if not self.counter:
raise asyncio.CancelledError
await asyncio.wait_for(self.counter.wait_for_ball_activity(),
timeout=self.ball_device.config['idle_missing_ball_timeout'])
except asyncio.TimeoutError:
self.debug_log("BCH: Lost %s balls", missing_balls)
self._set_ball_count(new_balls)
for _ in range(missing_balls):
await self.ball_device.lost_idle_ball()
else:
self._revalidate.set()
else:
self.debug_log("Lost ball %s balls between ejects. Ignoring.", missing_balls)
async def wait_for_count_is_valid(self):
"""Wait until count is valid."""
self._count_valid.clear()
self._revalidate.set()
# wait for ball_counter to become ready
await self._count_valid.wait()
async def entrance_during_eject(self):
"""Received an entrance during eject."""
await self.ball_device.incoming_balls_handler.ball_arrived()
self._set_ball_count(self._ball_count + 1)
|
|
"""Tests for certbot._internal.cli."""
import argparse
import copy
from importlib import reload as reload_module
import io
import tempfile
import unittest
from acme import challenges
from certbot import errors
from certbot._internal import cli
from certbot._internal import constants
from certbot._internal.plugins import disco
from certbot.compat import filesystem
from certbot.compat import os
import certbot.tests.util as test_util
from certbot.tests.util import TempDirTestCase
try:
import mock
except ImportError: # pragma: no cover
from unittest import mock
PLUGINS = disco.PluginsRegistry.find_all()
class TestReadFile(TempDirTestCase):
"""Test cli.read_file"""
def test_read_file(self):
curr_dir = os.getcwd()
try:
# On Windows current directory may be on a different drive than self.tempdir.
# However a relative path between two different drives is invalid. So we move to
# self.tempdir to ensure that we stay on the same drive.
os.chdir(self.tempdir)
# The read-only filesystem introduced with macOS Catalina can break
# code using relative paths below. See
# https://bugs.python.org/issue38295 for another example of this.
# Eliminating any possible symlinks in self.tempdir before passing
# it to os.path.relpath solves the problem. This is done by calling
# filesystem.realpath which removes any symlinks in the path on
# POSIX systems.
real_path = filesystem.realpath(os.path.join(self.tempdir, 'foo'))
relative_path = os.path.relpath(real_path)
self.assertRaises(
argparse.ArgumentTypeError, cli.read_file, relative_path)
test_contents = b'bar\n'
with open(relative_path, 'wb') as f:
f.write(test_contents)
path, contents = cli.read_file(relative_path)
self.assertEqual(path, os.path.abspath(path))
self.assertEqual(contents, test_contents)
finally:
os.chdir(curr_dir)
class FlagDefaultTest(unittest.TestCase):
"""Tests cli.flag_default"""
def test_default_directories(self):
if os.name != 'nt':
self.assertEqual(cli.flag_default('config_dir'), '/etc/letsencrypt')
self.assertEqual(cli.flag_default('work_dir'), '/var/lib/letsencrypt')
self.assertEqual(cli.flag_default('logs_dir'), '/var/log/letsencrypt')
else:
self.assertEqual(cli.flag_default('config_dir'), 'C:\\Certbot')
self.assertEqual(cli.flag_default('work_dir'), 'C:\\Certbot\\lib')
self.assertEqual(cli.flag_default('logs_dir'), 'C:\\Certbot\\log')
class ParseTest(unittest.TestCase):
'''Test the cli args entrypoint'''
def setUp(self):
reload_module(cli)
@staticmethod
def _unmocked_parse(*args, **kwargs):
"""Get result of cli.prepare_and_parse_args."""
return cli.prepare_and_parse_args(PLUGINS, *args, **kwargs)
@staticmethod
def parse(*args, **kwargs):
"""Mocks zope.component.getUtility and calls _unmocked_parse."""
with test_util.patch_display_util():
return ParseTest._unmocked_parse(*args, **kwargs)
def _help_output(self, args):
"Run a command, and return the output string for scrutiny"
output = io.StringIO()
def write_msg(message, *args, **kwargs): # pylint: disable=missing-docstring,unused-argument
output.write(message)
with mock.patch('certbot._internal.main.sys.stdout', new=output):
with test_util.patch_display_util() as mock_get_utility:
mock_get_utility().notification.side_effect = write_msg
with mock.patch('certbot._internal.main.sys.stderr'):
self.assertRaises(SystemExit, self._unmocked_parse, args, output)
return output.getvalue()
@mock.patch("certbot._internal.cli.helpful.flag_default")
def test_cli_ini_domains(self, mock_flag_default):
with tempfile.NamedTemporaryFile() as tmp_config:
tmp_config.close() # close now because of compatibility issues on Windows
# use a shim to get ConfigArgParse to pick up tmp_config
shim = (
lambda v: copy.deepcopy(constants.CLI_DEFAULTS[v])
if v != "config_files"
else [tmp_config.name]
)
mock_flag_default.side_effect = shim
namespace = self.parse(["certonly"])
self.assertEqual(namespace.domains, [])
with open(tmp_config.name, 'w') as file_h:
file_h.write("domains = example.com")
namespace = self.parse(["certonly"])
self.assertEqual(namespace.domains, ["example.com"])
namespace = self.parse(["renew"])
self.assertEqual(namespace.domains, [])
def test_no_args(self):
namespace = self.parse([])
for d in ('config_dir', 'logs_dir', 'work_dir'):
self.assertEqual(getattr(namespace, d), cli.flag_default(d))
def test_install_abspath(self):
cert = 'cert'
key = 'key'
chain = 'chain'
fullchain = 'fullchain'
with mock.patch('certbot._internal.main.install'):
namespace = self.parse(['install', '--cert-path', cert,
'--key-path', 'key', '--chain-path',
'chain', '--fullchain-path', 'fullchain'])
self.assertEqual(namespace.cert_path, os.path.abspath(cert))
self.assertEqual(namespace.key_path, os.path.abspath(key))
self.assertEqual(namespace.chain_path, os.path.abspath(chain))
self.assertEqual(namespace.fullchain_path, os.path.abspath(fullchain))
def test_help(self):
self._help_output(['--help']) # assert SystemExit is raised here
out = self._help_output(['--help', 'all'])
self.assertIn("--configurator", out)
self.assertIn("how a certificate is deployed", out)
self.assertIn("--webroot-path", out)
self.assertNotIn("--text", out)
self.assertNotIn("%s", out)
self.assertNotIn("{0}", out)
self.assertNotIn("--renew-hook", out)
out = self._help_output(['-h', 'nginx'])
if "nginx" in PLUGINS:
# may be false while building distributions without plugins
self.assertIn("--nginx-ctl", out)
self.assertNotIn("--webroot-path", out)
self.assertNotIn("--checkpoints", out)
out = self._help_output(['-h'])
if "nginx" in PLUGINS:
self.assertIn("Use the Nginx plugin", out)
else:
self.assertIn("(the certbot nginx plugin is not", out)
out = self._help_output(['--help', 'plugins'])
self.assertNotIn("--webroot-path", out)
self.assertIn("--prepare", out)
self.assertIn('"plugins" subcommand', out)
# test multiple topics
out = self._help_output(['-h', 'renew'])
self.assertIn("--keep", out)
out = self._help_output(['-h', 'automation'])
self.assertIn("--keep", out)
out = self._help_output(['-h', 'revoke'])
self.assertNotIn("--keep", out)
out = self._help_output(['--help', 'install'])
self.assertIn("--cert-path", out)
self.assertIn("--key-path", out)
out = self._help_output(['--help', 'revoke'])
self.assertIn("--cert-path", out)
self.assertIn("--key-path", out)
self.assertIn("--reason", out)
self.assertIn("--delete-after-revoke", out)
self.assertIn("--no-delete-after-revoke", out)
out = self._help_output(['-h', 'register'])
self.assertNotIn("--cert-path", out)
self.assertNotIn("--key-path", out)
out = self._help_output(['-h'])
self.assertIn(cli.SHORT_USAGE, out)
self.assertIn(cli.COMMAND_OVERVIEW[:100], out)
self.assertNotIn("%s", out)
self.assertNotIn("{0}", out)
def test_help_no_dashes(self):
self._help_output(['help']) # assert SystemExit is raised here
out = self._help_output(['help', 'all'])
self.assertIn("--configurator", out)
self.assertIn("how a certificate is deployed", out)
self.assertIn("--webroot-path", out)
self.assertNotIn("--text", out)
self.assertNotIn("%s", out)
self.assertNotIn("{0}", out)
out = self._help_output(['help', 'install'])
self.assertIn("--cert-path", out)
self.assertIn("--key-path", out)
out = self._help_output(['help', 'revoke'])
self.assertIn("--cert-path", out)
self.assertIn("--key-path", out)
def test_parse_domains(self):
short_args = ['-d', 'example.com']
namespace = self.parse(short_args)
self.assertEqual(namespace.domains, ['example.com'])
short_args = ['-d', 'trailing.period.com.']
namespace = self.parse(short_args)
self.assertEqual(namespace.domains, ['trailing.period.com'])
short_args = ['-d', 'example.com,another.net,third.org,example.com']
namespace = self.parse(short_args)
self.assertEqual(namespace.domains, ['example.com', 'another.net',
'third.org'])
long_args = ['--domains', 'example.com']
namespace = self.parse(long_args)
self.assertEqual(namespace.domains, ['example.com'])
long_args = ['--domains', 'trailing.period.com.']
namespace = self.parse(long_args)
self.assertEqual(namespace.domains, ['trailing.period.com'])
long_args = ['--domains', 'example.com,another.net,example.com']
namespace = self.parse(long_args)
self.assertEqual(namespace.domains, ['example.com', 'another.net'])
def test_preferred_challenges(self):
short_args = ['--preferred-challenges', 'http, dns']
namespace = self.parse(short_args)
expected = [challenges.HTTP01.typ, challenges.DNS01.typ]
self.assertEqual(namespace.pref_challs, expected)
short_args = ['--preferred-challenges', 'jumping-over-the-moon']
# argparse.ArgumentError makes argparse print more information
# to stderr and call sys.exit()
with mock.patch('sys.stderr'):
self.assertRaises(SystemExit, self.parse, short_args)
def test_server_flag(self):
namespace = self.parse('--server example.com'.split())
self.assertEqual(namespace.server, 'example.com')
def test_must_staple_flag(self):
short_args = ['--must-staple']
namespace = self.parse(short_args)
self.assertIs(namespace.must_staple, True)
self.assertIs(namespace.staple, True)
def _check_server_conflict_message(self, parser_args, conflicting_args):
try:
self.parse(parser_args)
self.fail( # pragma: no cover
"The following flags didn't conflict with "
'--server: {0}'.format(', '.join(conflicting_args)))
except errors.Error as error:
self.assertIn('--server', str(error))
for arg in conflicting_args:
self.assertIn(arg, str(error))
def test_staging_flag(self):
short_args = ['--staging']
namespace = self.parse(short_args)
self.assertIs(namespace.staging, True)
self.assertEqual(namespace.server, constants.STAGING_URI)
short_args += '--server example.com'.split()
self._check_server_conflict_message(short_args, '--staging')
def _assert_dry_run_flag_worked(self, namespace, existing_account):
self.assertIs(namespace.dry_run, True)
self.assertIs(namespace.break_my_certs, True)
self.assertIs(namespace.staging, True)
self.assertEqual(namespace.server, constants.STAGING_URI)
if existing_account:
self.assertIs(namespace.tos, True)
self.assertIs(namespace.register_unsafely_without_email, True)
else:
self.assertIs(namespace.tos, False)
self.assertIs(namespace.register_unsafely_without_email, False)
def test_dry_run_flag(self):
config_dir = tempfile.mkdtemp()
short_args = '--dry-run --config-dir {0}'.format(config_dir).split()
self.assertRaises(errors.Error, self.parse, short_args)
self._assert_dry_run_flag_worked(
self.parse(short_args + ['auth']), False)
self._assert_dry_run_flag_worked(
self.parse(short_args + ['certonly']), False)
self._assert_dry_run_flag_worked(
self.parse(short_args + ['renew']), False)
account_dir = os.path.join(config_dir, constants.ACCOUNTS_DIR)
filesystem.mkdir(account_dir)
filesystem.mkdir(os.path.join(account_dir, 'fake_account_dir'))
self._assert_dry_run_flag_worked(self.parse(short_args + ['auth']), True)
self._assert_dry_run_flag_worked(self.parse(short_args + ['renew']), True)
self._assert_dry_run_flag_worked(self.parse(short_args + ['certonly']), True)
short_args += ['certonly']
# `--dry-run --server example.com` should emit example.com
self.assertEqual(self.parse(short_args + ['--server', 'example.com']).server,
'example.com')
# `--dry-run --server STAGING_URI` should emit STAGING_URI
self.assertEqual(self.parse(short_args + ['--server', constants.STAGING_URI]).server,
constants.STAGING_URI)
# `--dry-run --server LIVE` should emit STAGING_URI
self.assertEqual(self.parse(short_args + ['--server', cli.flag_default("server")]).server,
constants.STAGING_URI)
# `--dry-run --server example.com --staging` should emit an error
conflicts = ['--staging']
self._check_server_conflict_message(short_args + ['--server', 'example.com', '--staging'],
conflicts)
def test_option_was_set(self):
key_size_option = 'rsa_key_size'
key_size_value = cli.flag_default(key_size_option)
self.parse('--rsa-key-size {0}'.format(key_size_value).split())
self.assertIs(cli.option_was_set(key_size_option, key_size_value), True)
self.assertIs(cli.option_was_set('no_verify_ssl', True), True)
config_dir_option = 'config_dir'
self.assertFalse(cli.option_was_set(
config_dir_option, cli.flag_default(config_dir_option)))
self.assertFalse(cli.option_was_set(
'authenticator', cli.flag_default('authenticator')))
def test_ecdsa_key_option(self):
elliptic_curve_option = 'elliptic_curve'
elliptic_curve_option_value = cli.flag_default(elliptic_curve_option)
self.parse('--elliptic-curve {0}'.format(elliptic_curve_option_value).split())
self.assertIs(cli.option_was_set(elliptic_curve_option, elliptic_curve_option_value), True)
def test_invalid_key_type(self):
key_type_option = 'key_type'
key_type_value = cli.flag_default(key_type_option)
self.parse('--key-type {0}'.format(key_type_value).split())
self.assertIs(cli.option_was_set(key_type_option, key_type_value), True)
with self.assertRaises(SystemExit):
self.parse("--key-type foo")
def test_encode_revocation_reason(self):
for reason, code in constants.REVOCATION_REASONS.items():
namespace = self.parse(['--reason', reason])
self.assertEqual(namespace.reason, code)
for reason, code in constants.REVOCATION_REASONS.items():
namespace = self.parse(['--reason', reason.upper()])
self.assertEqual(namespace.reason, code)
def test_force_interactive(self):
self.assertRaises(
errors.Error, self.parse, "renew --force-interactive".split())
self.assertRaises(
errors.Error, self.parse, "-n --force-interactive".split())
def test_deploy_hook_conflict(self):
with mock.patch("certbot._internal.cli.sys.stderr"):
self.assertRaises(SystemExit, self.parse,
"--renew-hook foo --deploy-hook bar".split())
def test_deploy_hook_matches_renew_hook(self):
value = "foo"
namespace = self.parse(["--renew-hook", value,
"--deploy-hook", value,
"--disable-hook-validation"])
self.assertEqual(namespace.deploy_hook, value)
self.assertEqual(namespace.renew_hook, value)
def test_deploy_hook_sets_renew_hook(self):
value = "foo"
namespace = self.parse(
["--deploy-hook", value, "--disable-hook-validation"])
self.assertEqual(namespace.deploy_hook, value)
self.assertEqual(namespace.renew_hook, value)
def test_renew_hook_conflict(self):
with mock.patch("certbot._internal.cli.sys.stderr"):
self.assertRaises(SystemExit, self.parse,
"--deploy-hook foo --renew-hook bar".split())
def test_renew_hook_matches_deploy_hook(self):
value = "foo"
namespace = self.parse(["--deploy-hook", value,
"--renew-hook", value,
"--disable-hook-validation"])
self.assertEqual(namespace.deploy_hook, value)
self.assertEqual(namespace.renew_hook, value)
def test_renew_hook_does_not_set_renew_hook(self):
value = "foo"
namespace = self.parse(
["--renew-hook", value, "--disable-hook-validation"])
self.assertIsNone(namespace.deploy_hook)
self.assertEqual(namespace.renew_hook, value)
def test_max_log_backups_error(self):
with mock.patch('certbot._internal.cli.sys.stderr'):
self.assertRaises(
SystemExit, self.parse, "--max-log-backups foo".split())
self.assertRaises(
SystemExit, self.parse, "--max-log-backups -42".split())
def test_max_log_backups_success(self):
value = "42"
namespace = self.parse(["--max-log-backups", value])
self.assertEqual(namespace.max_log_backups, int(value))
def test_unchanging_defaults(self):
namespace = self.parse([])
self.assertEqual(namespace.domains, [])
self.assertEqual(namespace.pref_challs, [])
namespace.pref_challs = [challenges.HTTP01.typ]
namespace.domains = ['example.com']
namespace = self.parse([])
self.assertEqual(namespace.domains, [])
self.assertEqual(namespace.pref_challs, [])
def test_no_directory_hooks_set(self):
self.assertFalse(self.parse(["--no-directory-hooks"]).directory_hooks)
def test_no_directory_hooks_unset(self):
self.assertIs(self.parse([]).directory_hooks, True)
def test_delete_after_revoke(self):
namespace = self.parse(["--delete-after-revoke"])
self.assertIs(namespace.delete_after_revoke, True)
def test_delete_after_revoke_default(self):
namespace = self.parse([])
self.assertIsNone(namespace.delete_after_revoke)
def test_no_delete_after_revoke(self):
namespace = self.parse(["--no-delete-after-revoke"])
self.assertIs(namespace.delete_after_revoke, False)
def test_allow_subset_with_wildcard(self):
self.assertRaises(errors.Error, self.parse,
"--allow-subset-of-names -d *.example.org".split())
def test_route53_no_revert(self):
for help_flag in ['-h', '--help']:
for topic in ['all', 'plugins', 'dns-route53']:
self.assertNotIn('certbot-route53:auth', self._help_output([help_flag, topic]))
class DefaultTest(unittest.TestCase):
"""Tests for certbot._internal.cli._Default."""
def setUp(self):
# pylint: disable=protected-access
self.default1 = cli._Default()
self.default2 = cli._Default()
def test_boolean(self):
self.assertIs(bool(self.default1), False)
self.assertIs(bool(self.default2), False)
def test_equality(self):
self.assertEqual(self.default1, self.default2)
def test_hash(self):
self.assertEqual(hash(self.default1), hash(self.default2))
class SetByCliTest(unittest.TestCase):
"""Tests for certbot.set_by_cli and related functions."""
def setUp(self):
reload_module(cli)
def test_deploy_hook(self):
self.assertTrue(_call_set_by_cli(
'renew_hook', '--deploy-hook foo'.split(), 'renew'))
def test_webroot_map(self):
args = '-w /var/www/html -d example.com'.split()
verb = 'renew'
self.assertIs(_call_set_by_cli('webroot_map', args, verb), True)
def _call_set_by_cli(var, args, verb):
with mock.patch('certbot._internal.cli.helpful_parser') as mock_parser:
with test_util.patch_display_util():
mock_parser.args = args
mock_parser.verb = verb
return cli.set_by_cli(var)
if __name__ == '__main__':
unittest.main() # pragma: no cover
|
|
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Service Accounts: JSON Web Token (JWT) Profile for OAuth 2.0
This module implements the JWT Profile for OAuth 2.0 Authorization Grants
as defined by `RFC 7523`_ with particular support for how this RFC is
implemented in Google's infrastructure. Google refers to these credentials
as *Service Accounts*.
Service accounts are used for server-to-server communication, such as
interactions between a web application server and a Google service. The
service account belongs to your application instead of to an individual end
user. In contrast to other OAuth 2.0 profiles, no users are involved and your
application "acts" as the service account.
Typically an application uses a service account when the application uses
Google APIs to work with its own data rather than a user's data. For example,
an application that uses Google Cloud Datastore for data persistence would use
a service account to authenticate its calls to the Google Cloud Datastore API.
However, an application that needs to access a user's Drive documents would
use the normal OAuth 2.0 profile.
Additionally, Google Apps domain administrators can grant service accounts
`domain-wide delegation`_ authority to access user data on behalf of users in
the domain.
This profile uses a JWT to acquire an OAuth 2.0 access token. The JWT is used
in place of the usual authorization token returned during the standard
OAuth 2.0 Authorization Code grant. The JWT is only used for this purpose, as
the acquired access token is used as the bearer token when making requests
using these credentials.
This profile differs from normal OAuth 2.0 profile because no user consent
step is required. The use of the private key allows this profile to assert
identity directly.
This profile also differs from the :mod:`google.auth.jwt` authentication
because the JWT credentials use the JWT directly as the bearer token. This
profile instead only uses the JWT to obtain an OAuth 2.0 access token. The
obtained OAuth 2.0 access token is used as the bearer token.
Domain-wide delegation
----------------------
Domain-wide delegation allows a service account to access user data on
behalf of any user in a Google Apps domain without consent from the user.
For example, an application that uses the Google Calendar API to add events to
the calendars of all users in a Google Apps domain would use a service account
to access the Google Calendar API on behalf of users.
The Google Apps administrator must explicitly authorize the service account to
do this. This authorization step is referred to as "delegating domain-wide
authority" to a service account.
You can use domain-wise delegation by creating a set of credentials with a
specific subject using :meth:`~Credentials.with_subject`.
.. _RFC 7523: https://tools.ietf.org/html/rfc7523
"""
import copy
import datetime
from google.auth import _helpers
from google.auth import _service_account_info
from google.auth import credentials
from google.auth import jwt
from google.oauth2 import _client
_DEFAULT_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
class Credentials(
credentials.Signing, credentials.Scoped, credentials.CredentialsWithQuotaProject
):
"""Service account credentials
Usually, you'll create these credentials with one of the helper
constructors. To create credentials using a Google service account
private key JSON file::
credentials = service_account.Credentials.from_service_account_file(
'service-account.json')
Or if you already have the service account file loaded::
service_account_info = json.load(open('service_account.json'))
credentials = service_account.Credentials.from_service_account_info(
service_account_info)
Both helper methods pass on arguments to the constructor, so you can
specify additional scopes and a subject if necessary::
credentials = service_account.Credentials.from_service_account_file(
'service-account.json',
scopes=['email'],
subject='user@example.com')
The credentials are considered immutable. If you want to modify the scopes
or the subject used for delegation, use :meth:`with_scopes` or
:meth:`with_subject`::
scoped_credentials = credentials.with_scopes(['email'])
delegated_credentials = credentials.with_subject(subject)
To add a quota project, use :meth:`with_quota_project`::
credentials = credentials.with_quota_project('myproject-123')
"""
def __init__(
self,
signer,
service_account_email,
token_uri,
scopes=None,
default_scopes=None,
subject=None,
project_id=None,
quota_project_id=None,
additional_claims=None,
):
"""
Args:
signer (google.auth.crypt.Signer): The signer used to sign JWTs.
service_account_email (str): The service account's email.
scopes (Sequence[str]): User-defined scopes to request during the
authorization grant.
default_scopes (Sequence[str]): Default scopes passed by a
Google client library. Use 'scopes' for user-defined scopes.
token_uri (str): The OAuth 2.0 Token URI.
subject (str): For domain-wide delegation, the email address of the
user to for which to request delegated access.
project_id (str): Project ID associated with the service account
credential.
quota_project_id (Optional[str]): The project ID used for quota and
billing.
additional_claims (Mapping[str, str]): Any additional claims for
the JWT assertion used in the authorization grant.
.. note:: Typically one of the helper constructors
:meth:`from_service_account_file` or
:meth:`from_service_account_info` are used instead of calling the
constructor directly.
"""
super(Credentials, self).__init__()
self._scopes = scopes
self._default_scopes = default_scopes
self._signer = signer
self._service_account_email = service_account_email
self._subject = subject
self._project_id = project_id
self._quota_project_id = quota_project_id
self._token_uri = token_uri
self._jwt_credentials = None
if additional_claims is not None:
self._additional_claims = additional_claims
else:
self._additional_claims = {}
@classmethod
def _from_signer_and_info(cls, signer, info, **kwargs):
"""Creates a Credentials instance from a signer and service account
info.
Args:
signer (google.auth.crypt.Signer): The signer used to sign JWTs.
info (Mapping[str, str]): The service account info.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.jwt.Credentials: The constructed credentials.
Raises:
ValueError: If the info is not in the expected format.
"""
return cls(
signer,
service_account_email=info["client_email"],
token_uri=info["token_uri"],
project_id=info.get("project_id"),
**kwargs
)
@classmethod
def from_service_account_info(cls, info, **kwargs):
"""Creates a Credentials instance from parsed service account info.
Args:
info (Mapping[str, str]): The service account info in Google
format.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.service_account.Credentials: The constructed
credentials.
Raises:
ValueError: If the info is not in the expected format.
"""
signer = _service_account_info.from_dict(
info, require=["client_email", "token_uri"]
)
return cls._from_signer_and_info(signer, info, **kwargs)
@classmethod
def from_service_account_file(cls, filename, **kwargs):
"""Creates a Credentials instance from a service account json file.
Args:
filename (str): The path to the service account json file.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.service_account.Credentials: The constructed
credentials.
"""
info, signer = _service_account_info.from_filename(
filename, require=["client_email", "token_uri"]
)
return cls._from_signer_and_info(signer, info, **kwargs)
@property
def service_account_email(self):
"""The service account email."""
return self._service_account_email
@property
def project_id(self):
"""Project ID associated with this credential."""
return self._project_id
@property
def requires_scopes(self):
"""Checks if the credentials requires scopes.
Returns:
bool: True if there are no scopes set otherwise False.
"""
return True if not self._scopes else False
@_helpers.copy_docstring(credentials.Scoped)
def with_scopes(self, scopes, default_scopes=None):
return self.__class__(
self._signer,
service_account_email=self._service_account_email,
scopes=scopes,
default_scopes=default_scopes,
token_uri=self._token_uri,
subject=self._subject,
project_id=self._project_id,
quota_project_id=self._quota_project_id,
additional_claims=self._additional_claims.copy(),
)
def with_subject(self, subject):
"""Create a copy of these credentials with the specified subject.
Args:
subject (str): The subject claim.
Returns:
google.auth.service_account.Credentials: A new credentials
instance.
"""
return self.__class__(
self._signer,
service_account_email=self._service_account_email,
scopes=self._scopes,
default_scopes=self._default_scopes,
token_uri=self._token_uri,
subject=subject,
project_id=self._project_id,
quota_project_id=self._quota_project_id,
additional_claims=self._additional_claims.copy(),
)
def with_claims(self, additional_claims):
"""Returns a copy of these credentials with modified claims.
Args:
additional_claims (Mapping[str, str]): Any additional claims for
the JWT payload. This will be merged with the current
additional claims.
Returns:
google.auth.service_account.Credentials: A new credentials
instance.
"""
new_additional_claims = copy.deepcopy(self._additional_claims)
new_additional_claims.update(additional_claims or {})
return self.__class__(
self._signer,
service_account_email=self._service_account_email,
scopes=self._scopes,
default_scopes=self._default_scopes,
token_uri=self._token_uri,
subject=self._subject,
project_id=self._project_id,
quota_project_id=self._quota_project_id,
additional_claims=new_additional_claims,
)
@_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
def with_quota_project(self, quota_project_id):
return self.__class__(
self._signer,
service_account_email=self._service_account_email,
default_scopes=self._default_scopes,
scopes=self._scopes,
token_uri=self._token_uri,
subject=self._subject,
project_id=self._project_id,
quota_project_id=quota_project_id,
additional_claims=self._additional_claims.copy(),
)
def _make_authorization_grant_assertion(self):
"""Create the OAuth 2.0 assertion.
This assertion is used during the OAuth 2.0 grant to acquire an
access token.
Returns:
bytes: The authorization grant assertion.
"""
now = _helpers.utcnow()
lifetime = datetime.timedelta(seconds=_DEFAULT_TOKEN_LIFETIME_SECS)
expiry = now + lifetime
payload = {
"iat": _helpers.datetime_to_secs(now),
"exp": _helpers.datetime_to_secs(expiry),
# The issuer must be the service account email.
"iss": self._service_account_email,
# The audience must be the auth token endpoint's URI
"aud": self._token_uri,
"scope": _helpers.scopes_to_string(self._scopes or ()),
}
payload.update(self._additional_claims)
# The subject can be a user email for domain-wide delegation.
if self._subject:
payload.setdefault("sub", self._subject)
token = jwt.encode(self._signer, payload)
return token
@_helpers.copy_docstring(credentials.Credentials)
def refresh(self, request):
if self._jwt_credentials is not None:
self._jwt_credentials.refresh(request)
self.token = self._jwt_credentials.token
self.expiry = self._jwt_credentials.expiry
else:
assertion = self._make_authorization_grant_assertion()
access_token, expiry, _ = _client.jwt_grant(
request, self._token_uri, assertion
)
self.token = access_token
self.expiry = expiry
def _create_self_signed_jwt(self, audience):
"""Create a self-signed JWT from the credentials if requirements are met.
Args:
audience (str): The service URL. ``https://[API_ENDPOINT]/``
"""
# https://google.aip.dev/auth/4111
# If the user has not defined scopes, create a self-signed jwt
if not self.scopes:
self._jwt_credentials = jwt.Credentials.from_signing_credentials(
self, audience
)
@_helpers.copy_docstring(credentials.Signing)
def sign_bytes(self, message):
return self._signer.sign(message)
@property
@_helpers.copy_docstring(credentials.Signing)
def signer(self):
return self._signer
@property
@_helpers.copy_docstring(credentials.Signing)
def signer_email(self):
return self._service_account_email
class IDTokenCredentials(credentials.Signing, credentials.CredentialsWithQuotaProject):
"""Open ID Connect ID Token-based service account credentials.
These credentials are largely similar to :class:`.Credentials`, but instead
of using an OAuth 2.0 Access Token as the bearer token, they use an Open
ID Connect ID Token as the bearer token. These credentials are useful when
communicating to services that require ID Tokens and can not accept access
tokens.
Usually, you'll create these credentials with one of the helper
constructors. To create credentials using a Google service account
private key JSON file::
credentials = (
service_account.IDTokenCredentials.from_service_account_file(
'service-account.json'))
Or if you already have the service account file loaded::
service_account_info = json.load(open('service_account.json'))
credentials = (
service_account.IDTokenCredentials.from_service_account_info(
service_account_info))
Both helper methods pass on arguments to the constructor, so you can
specify additional scopes and a subject if necessary::
credentials = (
service_account.IDTokenCredentials.from_service_account_file(
'service-account.json',
scopes=['email'],
subject='user@example.com'))
The credentials are considered immutable. If you want to modify the scopes
or the subject used for delegation, use :meth:`with_scopes` or
:meth:`with_subject`::
scoped_credentials = credentials.with_scopes(['email'])
delegated_credentials = credentials.with_subject(subject)
"""
def __init__(
self,
signer,
service_account_email,
token_uri,
target_audience,
additional_claims=None,
quota_project_id=None,
):
"""
Args:
signer (google.auth.crypt.Signer): The signer used to sign JWTs.
service_account_email (str): The service account's email.
token_uri (str): The OAuth 2.0 Token URI.
target_audience (str): The intended audience for these credentials,
used when requesting the ID Token. The ID Token's ``aud`` claim
will be set to this string.
additional_claims (Mapping[str, str]): Any additional claims for
the JWT assertion used in the authorization grant.
quota_project_id (Optional[str]): The project ID used for quota and billing.
.. note:: Typically one of the helper constructors
:meth:`from_service_account_file` or
:meth:`from_service_account_info` are used instead of calling the
constructor directly.
"""
super(IDTokenCredentials, self).__init__()
self._signer = signer
self._service_account_email = service_account_email
self._token_uri = token_uri
self._target_audience = target_audience
self._quota_project_id = quota_project_id
if additional_claims is not None:
self._additional_claims = additional_claims
else:
self._additional_claims = {}
@classmethod
def _from_signer_and_info(cls, signer, info, **kwargs):
"""Creates a credentials instance from a signer and service account
info.
Args:
signer (google.auth.crypt.Signer): The signer used to sign JWTs.
info (Mapping[str, str]): The service account info.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.jwt.IDTokenCredentials: The constructed credentials.
Raises:
ValueError: If the info is not in the expected format.
"""
kwargs.setdefault("service_account_email", info["client_email"])
kwargs.setdefault("token_uri", info["token_uri"])
return cls(signer, **kwargs)
@classmethod
def from_service_account_info(cls, info, **kwargs):
"""Creates a credentials instance from parsed service account info.
Args:
info (Mapping[str, str]): The service account info in Google
format.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.service_account.IDTokenCredentials: The constructed
credentials.
Raises:
ValueError: If the info is not in the expected format.
"""
signer = _service_account_info.from_dict(
info, require=["client_email", "token_uri"]
)
return cls._from_signer_and_info(signer, info, **kwargs)
@classmethod
def from_service_account_file(cls, filename, **kwargs):
"""Creates a credentials instance from a service account json file.
Args:
filename (str): The path to the service account json file.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.service_account.IDTokenCredentials: The constructed
credentials.
"""
info, signer = _service_account_info.from_filename(
filename, require=["client_email", "token_uri"]
)
return cls._from_signer_and_info(signer, info, **kwargs)
def with_target_audience(self, target_audience):
"""Create a copy of these credentials with the specified target
audience.
Args:
target_audience (str): The intended audience for these credentials,
used when requesting the ID Token.
Returns:
google.auth.service_account.IDTokenCredentials: A new credentials
instance.
"""
return self.__class__(
self._signer,
service_account_email=self._service_account_email,
token_uri=self._token_uri,
target_audience=target_audience,
additional_claims=self._additional_claims.copy(),
quota_project_id=self.quota_project_id,
)
@_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
def with_quota_project(self, quota_project_id):
return self.__class__(
self._signer,
service_account_email=self._service_account_email,
token_uri=self._token_uri,
target_audience=self._target_audience,
additional_claims=self._additional_claims.copy(),
quota_project_id=quota_project_id,
)
def _make_authorization_grant_assertion(self):
"""Create the OAuth 2.0 assertion.
This assertion is used during the OAuth 2.0 grant to acquire an
ID token.
Returns:
bytes: The authorization grant assertion.
"""
now = _helpers.utcnow()
lifetime = datetime.timedelta(seconds=_DEFAULT_TOKEN_LIFETIME_SECS)
expiry = now + lifetime
payload = {
"iat": _helpers.datetime_to_secs(now),
"exp": _helpers.datetime_to_secs(expiry),
# The issuer must be the service account email.
"iss": self.service_account_email,
# The audience must be the auth token endpoint's URI
"aud": self._token_uri,
# The target audience specifies which service the ID token is
# intended for.
"target_audience": self._target_audience,
}
payload.update(self._additional_claims)
token = jwt.encode(self._signer, payload)
return token
@_helpers.copy_docstring(credentials.Credentials)
def refresh(self, request):
assertion = self._make_authorization_grant_assertion()
access_token, expiry, _ = _client.id_token_jwt_grant(
request, self._token_uri, assertion
)
self.token = access_token
self.expiry = expiry
@property
def service_account_email(self):
"""The service account email."""
return self._service_account_email
@_helpers.copy_docstring(credentials.Signing)
def sign_bytes(self, message):
return self._signer.sign(message)
@property
@_helpers.copy_docstring(credentials.Signing)
def signer(self):
return self._signer
@property
@_helpers.copy_docstring(credentials.Signing)
def signer_email(self):
return self._service_account_email
|
|
# -*- coding: utf-8 -*-
"""
Sutra YAML Processor
@author: kmadathil
"""
from sanskrit_parser.generator.sutra import LRSutra
from sanskrit_parser.generator.maheshvara import * # noqa: F403
from sanskrit_parser.generator.paribhasha import * # noqa: F403
from sanskrit_parser.generator.pratyaya import * # noqa: F403
from sanskrit_parser.generator.pratipadika import * # noqa: F403
import logging
logger = logging.getLogger(__name__)
def process_yaml(y):
'''
Process yaml file to return sutras dict
Inputs:
y: Sutra Yaml
Outputs
sutra_dict: dict of sutras keyed by sutra id
'''
logger.debug(f'Processing YAML {y}')
sutra_dict = {}
for s in y:
logger.debug(f"processing {s}")
if "sutra" not in s:
logger.error("No sutra name")
assert False
if "id" not in s:
logger.error("No sutra id")
assert False
for c in ["condition", "domain", "xform", "update", "insert"]:
if c not in s:
s[c] = None
if "bahiranga" not in s:
s["bahiranga"] = 1
# svar = "sutra_"+s["id"].replace(".", "_")
sname = s["sutra"]
soverrides = None
sopt = False
if "optional" in s:
sopt = s["optional"]
if "overrides" in s:
if isinstance(s["overrides"], str):
soverrides = [s["overrides"]]
else:
soverrides = s["overrides"]
logger.debug(f"Sutra {s['id']} Overrides {soverrides}")
scond = None
if s["condition"] is not None:
logger.debug("Processing Condition")
def _exec_cond(s):
logger.debug(f"Cond dict {s}")
# FIXME Fix variables after fixing sutra_engine
def _cond(env):
def _c(env):
# _s a dict
# LHS = variable
# RHS = _pratyahara , {variable}, or savarna
x = True
for k in _s:
logger.debug(f"Checking cond {_s[k]} against {k}")
def _cond_single(sk, k):
if (sk[0] == "_"):
# Pratyahara
logger.debug(f"Checking pratyahara {sk[1:]} {k}")
_x = isInPratyahara(sk[1:], k) # noqa: F405
elif (sk[0:2] == "$$"):
# function call
logger.debug(f"Checking function {sk[2:]} {k}")
_x = eval(f"{sk[2:]}(k)")
elif (sk[0] == "$"):
# Variable
logger.debug(f"Checking variable {sk[1:]} {k}")
_x = isSavarna(env[sk[1:]], k) # noqa: F405
elif (sk[0] == "="):
# Raw equality
logger.debug(f"Checking raw {sk[1:]} {k}")
_x = (sk[1:] == k.canonical())
elif (sk[0:2] == "!="):
# Raw inequality
logger.debug(f"Checking raw inequality {sk[2:]} {k}")
_x = (sk[2:] != k.canonical())
elif (sk[0:2] == "?!"): # Tag false check
logger.debug(f"Checking tag false {sk[2:]} {k}")
_x = not k.hasTag(sk[2:])
elif (sk[0] == "?"): # Tag check
logger.debug(f"Checking tag {sk[1:]} {k}")
_x = k.hasTag(sk[1:])
elif (sk[0] == "+"): # It check
logger.debug(f"Checking it {sk[1:]} {k}")
_x = k.hasTag("pratyaya") and k.hasIt(sk[1:])
else:
logger.debug(f"Checking savarna {sk} {k} ")
_x = isSavarna(sk, k) # noqa: F405
logger.debug(f"Return {_x}")
return _x
if isinstance(_s[k], list):
logger.debug("List")
if _s[k][0] == "and":
logger.debug("Checking and condition")
_x = True
for sk in _s[k][1:]:
_x = _x and _cond_single(sk, env[k])
else:
_x = False
for sk in _s[k]:
_x = _x or _cond_single(sk, env[k])
else:
logger.debug("Single")
_x = _cond_single(_s[k], env[k])
logger.debug(f"Got {_x}")
x = x and _x
return x
# List implies an or condition
if isinstance(s, list):
_ret = False
for _s in s:
_ret = _ret or _c(env)
return _ret
else:
_s = s
return _c(env)
return _cond
scond = _exec_cond(s["condition"])
sxform = None
if s["xform"] is not None:
logger.debug("Processing Xform")
def _exec_xform(s):
logger.debug(f"Xform dict {s}")
xdict = s
# FIXME Fix variables after fixing sutra_engine
def _xform(env):
# Don't Remove - keep for debug
# logger.debug(f"Env {env}")
# for k in env:
# logger.debug(f"{k} {type(env[k])}")
_l = l = env["l"].canonical() # noqa: E741, F841
_r = r = env["r"].canonical() # noqa: E741, F841
_lc = lc = env["lc"].canonical() # noqa: E741, F841
_rc = rc = env["rc"].canonical() # noqa: E741, F841
logger.debug(f"Xform dict {xdict}")
logger.debug(f"Before: {_lc} {_l} {_r} {_rc}")
# Execute transforms for predefined variables
# FIXME: We assume our code in xform is safe to eval
if "l" in xdict:
if xdict["l"] is not None:
_l = eval(xdict["l"])
else:
_l = ""
if "r" in xdict:
if xdict["r"] is not None:
_r = eval(xdict["r"])
else:
_r = ""
if "lc" in xdict:
if xdict["lc"] is not None:
_lc = eval(xdict["lc"])
else:
_lc = ""
if "rc" in xdict:
if xdict["rc"] is not None:
_rc = eval(xdict["rc"])
else:
_rc = ""
logger.debug(f"After {_lc} {_l} {_r} {_rc}")
ret = [_lc+_l, _r+_rc]
return ret
return _xform
sxform = _exec_xform(s["xform"])
logger.debug(f"Xform def {sxform}")
sinsert = None
if s["insert"] is not None:
logger.debug("Processing insert")
def _exec_insert(s):
logger.debug(f"insert dict {s}")
idict = s
def _insert(env):
_r = {}
for i in idict:
logger.debug(f"Insert {i} {idict[i]}")
_r[i] = eval(idict[i])
return _r
return _insert
sinsert = _exec_insert(s["insert"])
logger.debug(f"Insert def {sinsert}")
sdom = None
if s["domain"] is not None:
logger.debug("Processing domain")
def _exec_trig(s):
logger.debug(f"Trig {s}")
def _trig(domains):
# list of domains
logger.debug(f"Domain checks {s}")
if isinstance(s, list):
x = True
for t in s:
x = x and domains.isdomain(t)
else:
return domains.isdomain(s)
return _trig
sdom = _exec_trig(s["domain"])
supdate = None
if s["update"] is not None:
logger.debug("Processing update")
def _exec_update(s):
logger.debug(f"Update {s}")
def _update(env, domains):
def _c(env):
# _s a dict
# LHS = variable
# RHS = _pratyahara , {variable}, or savarna
x = True
for k in _s:
logger.debug(f"Checking cond {_s[k]} against {k}")
if (_s[k][0] == "_"):
# Pratyahara
logger.debug(f"Checking pratyahara {_s[k][1:]}")
_x = isInPratyahara(_s[k][1:], env[k]) # noqa: F405
elif (_s[k][0] == "$"):
# Variable
logger.debug(f"Checking variable {_s[k][1:]} ")
_x = isSavarna(env[_s[k][1:]], env[k]) # noqa: F405
else:
_x = isSavarna(_s[k], env[k]) # noqa: F405
logger.debug(f"Got {_x}")
x = x and _x
return x
for k in ["olp", "orp", "lp", "rp"]:
if k in s.keys():
# Set or remove one tag
def _tag(k, sk):
if sk[0:2] == "++":
logger.debug(f"Setting {k} it {sk[2:]}")
env[k].setIt(sk[2:])
elif sk[0:2] == "--":
logger.debug(f"Removing {k} it {sk[1:]}")
env[k].deleteIt(sk[2:])
elif sk[0] == "+":
logger.debug(f"Setting {k} tag {sk[1:]}")
env[k].setTag(sk[1:])
elif sk[0] == "-":
logger.debug(f"Removing {k} tag {sk[1:]}")
if env[k].hasTag(sk[1:]):
env[k].deleteTag(sk[1:])
elif sk == "_lu":
logger.debug("Removing all tags ")
env[k].luTags()
elif sk[0] == "=": # Replace
logger.debug(f"Replacing {k} with {sk[1:]} {eval(sk[1:])}")
env[k] = eval(sk[1:]) # Must be defined!
# Possibly set/remove multiple tags
if isinstance(s[k], list):
for sk in s[k]:
_tag(k, sk)
else:
_tag(k, s[k])
if "domain" in s.keys():
st = s["domain"]
for k in st:
logger.debug(f"Updating domain {k} {st[k]}")
cond = True
if "condition" in st[k]:
logger.debug(f"Update condition check {st[k]['condition']}")
# List implies an or in condition
if isinstance(st[k]['condition'], list):
cond = False
for _s in st[k]['condition']:
cond = cond or _c(env)
else:
_s = st[k]['condition']
cond = _c(env)
logger.debug(f"Check got {cond}")
if cond:
setattr(domains, k, st[k]["value"])
return _update
supdate = _exec_update(s["update"])
if s["id"] in sutra_dict:
logger.error(f"Duplicate Sutra {s['id']} - {sutra_dict[s['id']]} and {sname}")
assert False
sutra_dict[s["id"]] = LRSutra(sname, s["id"],
cond=scond,
xform=sxform,
insert=sinsert,
domain=sdom,
update=supdate,
optional=sopt,
bahiranga=s["bahiranga"],
overrides=soverrides)
return sutra_dict
|
|
#!/usr/bin/env python3
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A python script that converts TrafficOps information into an Ansible Inventory"""
import json
import argparse
import os
import collections
from trafficops.tosession import TOSession
def empty_inventory():
"""Generate a valid empty inventory"""
return {'_meta': {'hostvars': {}}}
class AnsibleInventory():
"""Wrapper class for needed methods"""
def __init__(self, user, password, url, verify_cert):
"""Init base members"""
self.to_user = user
self.to_pass = password
self.to_url = url
self.verify_cert = verify_cert
@staticmethod
def populate_server_profile_vars(api, profile_id):
"""Generate the server profile variables once as we see it"""
server_vars = {}
server_vars['hosts'] = []
server_vars['vars'] = {}
profile = api.get_profiles(id=profile_id)[0]
server_vars['vars']['server_profile_description'] = profile[0]['description']
server_vars['vars']['server_profile_type'] = profile[0]['type']
server_vars['vars']['server_profile_routingDisabled'] = profile[0]['routingDisabled']
server_vars['vars']['server_profile_parameters'] = []
params = api.get_parameters_by_profile_id(id=profile_id)[0]
for param in params:
tmp_param = {
'name': param['name'],
'value': param['value'],
'configFile': param['configFile']}
server_vars['vars']['server_profile_parameters'].append(tmp_param)
return server_vars
@staticmethod
def populate_cachegroups(api, cachegroup_id):
"""Generate the values for cachegroups once on first sight"""
var_data = {}
cgdata = collections.namedtuple(
'Cgdata', [
'cgvars', 'primary_parent_group_name', 'secondary_parent_group_name'])
var_data['hosts'] = []
var_data['vars'] = {}
cachegroup = api.get_cachegroups(id=cachegroup_id)[0]
var_data['vars']['cachegroup_name'] = cachegroup[0]['name']
var_data['vars']['cachegroup_shortName'] = cachegroup[0]['shortName']
var_data['vars']['cachegroup_parentCachegroupName'] = \
cachegroup[0]['parentCachegroupName']
var_data['vars']['cachegroup_secondaryParentCachegroupName'] = \
cachegroup[0]['secondaryParentCachegroupName']
var_data['vars']['cachegroup_typeName'] = cachegroup[0]['typeName']
if cachegroup[0]['parentCachegroupName'] is None:
flat_parent_cg = "parentCachegroup|None"
else:
flat_parent_cg = "parentCachegroup|" + \
cachegroup[0]['parentCachegroupName']
if cachegroup[0]['secondaryParentCachegroupName'] is None:
flat_second_parent_cg = "secondaryParentCachegroup|None"
else:
flat_second_parent_cg = "secondaryParentCachegroup|" + \
cachegroup[0]['secondaryParentCachegroupName']
out = cgdata(cgvars=var_data,
primary_parent_group_name=flat_parent_cg,
secondary_parent_group_name=flat_second_parent_cg)
return out
def generate_inventory_list(self, target_to):
"""Generate the inventory list for the specified TrafficOps instance"""
with TOSession(self.to_url, verify_cert=self.verify_cert) as traffic_ops_api:
traffic_ops_api.login(self.to_user, self.to_pass)
servers = traffic_ops_api.get_servers()[0]
out = {}
out['_meta'] = {}
out['_meta']['hostvars'] = {}
out[target_to] = {}
out[target_to]['hosts'] = []
out["ungrouped"] = {}
out['ungrouped']['hosts'] = []
out['cachegroup'] = {}
out['cachegroup']['children'] = []
out['server_type'] = {}
out['server_type']['children'] = []
out['server_cdnName'] = {}
out['server_cdnName']['children'] = []
out['server_profile'] = {}
out['server_profile']['children'] = []
out['server_status'] = {}
out['server_status']['children'] = []
for server in servers:
fqdn = server['hostName'] + '.' + server['domainName']
out["ungrouped"]['hosts'].append(fqdn)
out[target_to]['hosts'].append(fqdn)
out['_meta']['hostvars'][fqdn] = {}
out['_meta']['hostvars'][fqdn]['server_toFQDN'] = target_to
out['_meta']['hostvars'][fqdn]['server_cachegroup'] = server['cachegroup']
out['_meta']['hostvars'][fqdn]['server_cdnName'] = server['cdnName']
out['_meta']['hostvars'][fqdn]['server_id'] = server['id']
out['_meta']['hostvars'][fqdn]['server_ipAddress'] = server['ipAddress']
out['_meta']['hostvars'][fqdn]['server_ip6Address'] = server['ip6Address']
out['_meta']['hostvars'][fqdn]['server_offlineReason'] = server['offlineReason']
out['_meta']['hostvars'][fqdn]['server_physLocation'] = server['physLocation']
out['_meta']['hostvars'][fqdn]['server_profile'] = server['profile']
out['_meta']['hostvars'][fqdn]['server_profileDesc'] = server['profileDesc']
out['_meta']['hostvars'][fqdn]['server_status'] = server['status']
out['_meta']['hostvars'][fqdn]['server_type'] = server['type']
flat_server_profile = "server_profile|" + server['profile']
flat_cachegroup = "cachegroup|" + server['cachegroup']
flat_server_type = "server_type|" + server['type']
flat_server_cdn_name = "server_cdnName|" + server['cdnName']
flat_server_status = "server_status|" + server['status']
if flat_server_profile not in out:
out['server_profile']['children'].append(
flat_server_profile)
out[flat_server_profile] = self.populate_server_profile_vars(
traffic_ops_api, server['profileId'])
out[flat_server_profile]['hosts'].append(fqdn)
if flat_cachegroup not in out:
out['cachegroup']['children'].append(flat_cachegroup)
cgdata = self.populate_cachegroups(
traffic_ops_api,
server['cachegroupId'])
out[flat_cachegroup] = cgdata.cgvars
flat_parent_cg = cgdata.primary_parent_group_name
flat_second_parent_cg = cgdata.secondary_parent_group_name
if flat_parent_cg not in out:
out[flat_parent_cg] = {}
out[flat_parent_cg]['children'] = []
if flat_second_parent_cg not in out:
out[flat_second_parent_cg] = {}
out[flat_second_parent_cg]['children'] = []
out[flat_parent_cg]['children'].append(flat_cachegroup)
out[flat_second_parent_cg]['children'].append(
flat_cachegroup)
out[flat_cachegroup]['hosts'].append(fqdn)
if flat_server_type not in out:
out['server_type']['children'].append(flat_server_type)
out[flat_server_type] = {}
out[flat_server_type]['hosts'] = []
out[flat_server_type]['hosts'].append(fqdn)
if flat_server_cdn_name not in out:
out['server_cdnName']['children'].append(
flat_server_cdn_name)
out[flat_server_cdn_name] = {}
out[flat_server_cdn_name]['hosts'] = []
out[flat_server_cdn_name]['hosts'].append(fqdn)
if flat_server_status not in out:
out['server_status']['children'].append(flat_server_status)
out[flat_server_status] = {}
out[flat_server_status]['hosts'] = []
out[flat_server_status]['hosts'].append(fqdn)
return out
def to_inventory(self):
"""A wrapper function blending the target url in"""
return self.generate_inventory_list(self.to_url)
#
# Thanks to Maxim for the snipit on handling bool parameters.
# https://stackoverflow.com/questions/15008758/parsing-boolean-values-with-argparse
#
def str2bool(x):
"""A helper function to help with truthiness"""
if isinstance(x, bool):
return x
if x.lower() in ('yes', 'true', 't', 'y', '1'):
return True
if x.lower() in ('no', 'false', 'f', 'n', '0'):
return False
raise argparse.ArgumentTypeError('Boolean value expected.')
if __name__ == "__main__":
PARSER = argparse.ArgumentParser(
description='Generate an Ansible inventory from TrafficOps')
PARSER.add_argument(
'--username',
type=str,
metavar='username',
default=os.environ.get('TO_USER', None),
help='TrafficOps Username. Environment Var: TO_USER Default: None')
PARSER.add_argument(
'--password',
type=str,
metavar='password',
default=os.environ.get('TO_PASSWORD', None),
help='TrafficOps Password. Environment Var: TO_PASSWORD Default: None')
PARSER.add_argument(
'--url',
type=str,
metavar='to.kabletown.invalid:8443',
default=os.environ.get('TO_URL', None),
help='TrafficOps FQDN and optional HTTPS Port. Environment Var: TO_URL Default: None')
PARSER.add_argument(
'--verify_cert',
type=str2bool,
default=os.environ.get('TO_VERIFY_CERT', "true"),
metavar="(true, false, yes, no, t, f, y, n, 0, or 1)",
help='Perform SSL Certificate Verification. Environment Var: TO_VERIFY_CERT Default: true')
PARSER.add_argument(
'--list',
action='store_true',
help='Primary argument to enable retrieval of TO data. Required per calling convention.')
PARSER.add_argument(
'--host',
type=str,
metavar='do_not_use',
default=None,
help='Ignored parameter that must be present due to calling convention.')
ARGS = PARSER.parse_args()
if ARGS.username and ARGS.password and ARGS.url:
if ARGS.list:
INVENTORY = AnsibleInventory(
ARGS.username,
ARGS.password,
ARGS.url,
ARGS.verify_cert).to_inventory()
# Since we're supplying hostvar metadata, --host support isn't required
else:
INVENTORY = empty_inventory()
else:
INVENTORY = empty_inventory()
print(json.dumps(INVENTORY))
|
|
# -*- coding: utf-8 -*-
import json
import re
import datetime
from dateutil.parser import parse
from django.core.exceptions import ObjectDoesNotExist
from django.core.management import CommandError
import pytz
import requests
from events.exporter.base import register_exporter, Exporter
from events.models import Event, ExportInfo, Keyword, Place
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.core.serializers.json import DjangoJSONEncoder
from icalendar import Calendar, Event as CalendarEvent
from httmock import all_requests, HTTMock, response
BASE_API_URL = settings.CITYSDK_API_SETTINGS['CITYSDK_URL']
EVENTS_URL = BASE_API_URL + 'events/'
POIS_URL = BASE_API_URL + 'pois/'
CATEGORY_URL = BASE_API_URL + 'categories?List=event'
DRY_RUN_MODE = False # If set True, do just local DB actions
VERBOSE = False # If set to True, print verbose creation logs
# maps ISO 639-1 alpha-2 to BCP 47 tags consumed by CitySDK
bcp47_lang_map = {
"fi": "fi-FI",
"sv": "sv-SE", # or sv-FI?
"en": "en-GB"
}
# CITYSDK_DEFAULT_AUTHOR = {
# "term": "primary",
# "href": "http://events.hel.fi",
# "value": "linkedevents"
# }
CITYSDK_DEFAULT_AUTHOR = {
"term": "primary",
"value": "admin"
}
CITYSDK_DEFAULT_LICENSE = {
"term": "primary",
"value": "open-data"
}
CITYSDK_EVENT_DEFAULTS_TPL = {
"base": EVENTS_URL,
"lang": bcp47_lang_map[settings.LANGUAGES[0][0]],
"author": CITYSDK_DEFAULT_AUTHOR,
"license": CITYSDK_DEFAULT_LICENSE
}
CITYSDK_POI_DEFAULTS_TPL = {
"base": POIS_URL,
"lang": settings.LANGUAGES[0][0],
"author": CITYSDK_DEFAULT_AUTHOR,
"license": CITYSDK_DEFAULT_LICENSE
}
# Create and set default category before use!
DEFAULT_POI_CATEGORY = settings.CITYSDK_API_SETTINGS['DEFAULT_POI_CATEGORY']
def jsonize(from_dict):
return json.dumps(from_dict, cls=DjangoJSONEncoder)
def generate_icalendar_element(event):
icalendar_event = CalendarEvent()
if event.start_time:
icalendar_event.add('dtstart', event.start_time)
if event.end_time:
icalendar_event.add('dtend', event.end_time)
if event.name_en:
icalendar_event.add('summary', event.name_en)
cal = Calendar()
cal.add('version', '2.0')
cal.add('prodid', '-//events.hel.fi//NONSGML Feeder//EN')
cal.add_component(icalendar_event)
term = None
if event.start_time and event.end_time:
term = "open"
elif event.start_time:
term = "open"
elif event.end_time:
term = "close"
if term:
return {
"term": "open",
"value": cal.to_ical().decode('utf8'),
"type": "text/icalendar"
}
else:
return None
@register_exporter
class CitySDKExporter(Exporter):
name = 'CitySDK'
session_cookies = None
response_headers = {'content-type': 'application/json'}
def setup(self):
self.authenticate()
def authenticate(self):
"""
Authenticate, CitySDK uses session based username/password auth
"""
username = settings.CITYSDK_API_SETTINGS['USERNAME']
password = settings.CITYSDK_API_SETTINGS['PASSWORD']
# noinspection PyUnusedLocal
session_response = requests.get(
'%sauth?username=%s&password=%s' %
(BASE_API_URL, username, password))
if session_response.status_code == 200:
self.session_cookies = session_response.cookies
print("Authentication successful with response: %s"
% session_response.text)
else:
raise CommandError(
"Authentication failed with credentials %s:%s" % (
(username, password)
))
def _generate_exportable_event(self, event):
citysdk_event = CITYSDK_EVENT_DEFAULTS_TPL.copy()
# fetch category ID from exported categories
citysdk_event['category'] = []
for category in event.keywords.all():
exported_category = ExportInfo.objects.filter(
content_type=ContentType.objects.get_for_model(Keyword),
object_id=category.id,
target_system=self.name).first()
citysdk_event['category'].append(
{"id": exported_category.target_id})
if event.location:
exported_poi = ExportInfo.objects.filter(
content_type=ContentType.objects.get_for_model(Place),
object_id=event.location.id,
target_system=self.name).first()
citysdk_event['location'] = {
"relationship": [
{
"targetPOI": exported_poi.target_id,
"term": "equal",
"base": POIS_URL
}
]
}
citysdk_event['time'] = generate_icalendar_element(event)
# Translated fields are processed in similar manner.
# The url/link is a bit different beast as there's no actual lang
# definition in CitySDK for links
# -> just generate normal links for all lang versions
for from_field_name, to_field_name in [("description", "description"),
("name", "label"),
("info_url", "link")]:
citysdk_event[to_field_name] = []
for lang in [x[0] for x in settings.LANGUAGES]:
value = getattr(event, '%s_%s' % (from_field_name, lang))
if value:
lang_dict = {}
if to_field_name == 'link':
lang_dict['term'] = 'related' # something else?
lang_dict['href'] = value
lang_dict['type'] = 'text/html'
else:
lang_dict['value'] = value
lang_dict['lang'] = bcp47_lang_map[lang]
if to_field_name == 'label':
lang_dict['term'] = 'primary'
citysdk_event[to_field_name].append(lang_dict)
return citysdk_event
@staticmethod
def _generate_exportable_category(event):
citysdk_category = dict()
citysdk_category['author'] = CITYSDK_DEFAULT_AUTHOR
citysdk_category['lang'] = bcp47_lang_map[settings.LANGUAGES[0][0]]
citysdk_category['term'] = 'category'
to_field_name = 'label'
citysdk_category[to_field_name] = []
for lang in [x[0] for x in settings.LANGUAGES]:
value = getattr(event, '%s_%s' % ("name", lang))
if value:
lang_dict = {
'term': 'primary',
'value': value,
'lang': bcp47_lang_map[lang]
}
citysdk_category[to_field_name].append(lang_dict)
return citysdk_category
@staticmethod
def _generate_exportable_place(place):
citysdk_place = CITYSDK_POI_DEFAULTS_TPL.copy()
# Linked Events places don't have any categories yet,
# use default ID
citysdk_place['category'] = [{"id": DEFAULT_POI_CATEGORY}]
# Support for bboxes later, now Point is only possible value
if place.position:
coords_as_wkt = place.position.json
matches = re.search('POINT \\((.*)\\)', place.position.wkt)
if matches:
coords = matches.group(1)
citysdk_place['location'] = {
"point": [
{
"Point": {
"posList": coords,
"srsName":
settings.CITYSDK_API_SETTINGS['SRS_URL']
},
"term": "entrance",
}
]
}
for from_field_name, to_field_name in [("description", "description"),
("name", "label")]:
citysdk_place[to_field_name] = []
for lang in [x[0] for x in settings.LANGUAGES]:
value = getattr(place, '%s_%s' % (from_field_name, lang))
if value:
lang_dict = {}
if to_field_name == 'link':
lang_dict['term'] = 'related' # something else?
lang_dict['href'] = value
lang_dict['type'] = 'text/html'
else:
lang_dict['value'] = value
lang_dict['lang'] = bcp47_lang_map[lang]
if to_field_name == 'label':
lang_dict['term'] = 'primary'
citysdk_place[to_field_name].append(lang_dict)
return citysdk_place
def _export_new(self):
self._export_categories()
self._export_places()
self._export_events()
def _export_models(self, klass, generate, url, json_wrapper,
extra_filters=None):
model_type = ContentType.objects.get_for_model(klass)
# get all exported
export_infos = ExportInfo.objects.filter(content_type=model_type,
target_system=self.name)
model_name = klass.__name__
modify_count = 0
delete_count = 0
new_count = 0
# deleted or modified
for export_info in export_infos:
try:
model = klass.objects.get(pk=export_info.object_id)
if model.last_modified_time > export_info.last_exported_time:
citysdk_model = generate(model)
citysdk_model['id'] = export_info.target_id
if model_name == 'Keyword':
data = {
'list': 'event',
'category': citysdk_model
}
else:
data = {json_wrapper: citysdk_model}
modify_response = self._do_req('post', url, data)
if modify_response.status_code == 200:
export_info.save() # refresh last export date
print("%s updated (original id: %s, target id: %s)" %
(model_name, model.pk, export_info.target_id))
modify_count += 1
except ObjectDoesNotExist:
if model_name == 'Keyword':
delete_response = self._do_req(
'delete', url,
data={"id": export_info.target_id}
)
else:
delete_response = self._do_req(
'delete', url + export_info.target_id)
if delete_response.status_code == 200:
export_info.delete()
print("%s removed (original id: %d, target id: %s) "
"from target system" %
(model_name, export_info.object_id,
export_info.target_id))
delete_count += 1
#new
imported = {'id__in': export_infos.values("object_id")}
if extra_filters:
qs = klass.objects.exclude(
**imported).filter(**extra_filters).distinct()
else:
qs = klass.objects.exclude(**imported)
for model in qs:
citysdk_model = generate(model)
citysdk_model['created'] = datetime.datetime.utcnow().replace(
tzinfo=pytz.utc)
if model_name == 'Keyword':
data = {
'list': 'event',
'category': citysdk_model
}
else:
data = {json_wrapper: citysdk_model}
new_response = self._do_req('put', url, data)
if new_response.status_code == 200:
new = new_response.json()
if isinstance(new, dict) and 'id' in new:
new_id = new['id']
else:
new_id = new
if VERBOSE:
print("%s exported (original id: %d, target id: %s)" %
(model_name, model.pk, new_id))
new_export_info = ExportInfo(content_object=model,
content_type=model_type,
target_id=new_id,
target_system=self.name)
new_export_info.save()
new_count += 1
else:
print("%s export failed (original id: %s)" %
(model_name, model.pk))
print(model_name + " items added: " + str(new_count))
print(model_name + " items modified: " + str(modify_count))
print(model_name + " items deleted: " + str(delete_count))
def _do_req(self, method, url, data=None):
kwargs = {
'headers': self.response_headers,
'cookies': self.session_cookies
}
if data:
kwargs['data'] = jsonize(data)
if DRY_RUN_MODE:
with HTTMock(citysdk_mock):
return requests.request(method, url, **kwargs)
else:
resp = requests.request(method, url, **kwargs)
# if session dies while doing exporting
if resp.status_code == 401:
self.authenticate()
return requests.request(method, url, **kwargs)
assert resp.status_code == 200
return resp
def _export_categories(self):
filters = {'event__in': Event.objects.all()}
self._export_models(Keyword, self._generate_exportable_category,
CATEGORY_URL, 'poi', extra_filters=filters)
def _export_places(self):
filters = {'event__in': Event.objects.all()}
self._export_models(Place, self._generate_exportable_place,
POIS_URL, 'poi', extra_filters=filters)
def _export_events(self):
self._export_models(Event, self._generate_exportable_event,
EVENTS_URL, 'event')
def __delete_resource(self, resource, url):
response = self._do_req('delete', '%s/%s' % (
url, resource.target_id))
if response.status_code == 200:
resource.delete()
print("%s removed (original id: %d, target id: %s) from "
"target system" %
(str(resource.content_type).capitalize(),
resource.object_id, resource.target_id))
def _delete_exported_from_target(self):
"""
Convenience method to delete everything imported from the source API,
avoid in production use.
"""
get_type = lambda klass: ContentType.objects.get_for_model(klass)
for event_info in ExportInfo.objects.filter(
content_type=get_type(Event),
target_system=self.name):
self.__delete_resource(event_info, EVENTS_URL)
for place_info in ExportInfo.objects.filter(
content_type=get_type(Place),
target_system=self.name):
self.__delete_resource(place_info, POIS_URL)
for category_info in ExportInfo.objects.filter(
content_type=get_type(Keyword),
target_system=self.name):
try:
category_response = self._do_req(
'delete', CATEGORY_URL + 'event',
data={"id": category_info.target_id}
)
if category_response.status_code == 200:
category_info.delete()
print("Category removed (original id: %d, target id: %s) "
"from target system" %
(category_info.object_id, category_info.target_id))
except ObjectDoesNotExist:
print("ERROR: Category (original id: %d) "
"does not exist in local database" %
category_info.object_id)
def export_events(self, is_delete=False):
if is_delete:
self._delete_exported_from_target()
else:
self._export_new()
# For dry run request mocking
@all_requests
def citysdk_mock(url, request):
foo = 'foo'
if request.method == 'PUT':
data = '"'+foo+'"'
else:
data = {'id': str(foo)}
headers = {'content-type': 'application/json'}
return response(200, data, headers, None, 0, request)
|
|
import textwrap
import warnings
import inspect
import uuid
import collections
from .compat import callable, exec_, string_types, with_metaclass
from sqlalchemy.util import format_argspec_plus, update_wrapper
from sqlalchemy.util.compat import inspect_getfullargspec
class _ModuleClsMeta(type):
def __setattr__(cls, key, value):
super(_ModuleClsMeta, cls).__setattr__(key, value)
cls._update_module_proxies(key)
class ModuleClsProxy(with_metaclass(_ModuleClsMeta)):
"""Create module level proxy functions for the
methods on a given class.
The functions will have a compatible signature
as the methods.
"""
_setups = collections.defaultdict(lambda: (set(), []))
@classmethod
def _update_module_proxies(cls, name):
attr_names, modules = cls._setups[cls]
for globals_, locals_ in modules:
cls._add_proxied_attribute(name, globals_, locals_, attr_names)
def _install_proxy(self):
attr_names, modules = self._setups[self.__class__]
for globals_, locals_ in modules:
globals_['_proxy'] = self
for attr_name in attr_names:
globals_[attr_name] = getattr(self, attr_name)
def _remove_proxy(self):
attr_names, modules = self._setups[self.__class__]
for globals_, locals_ in modules:
globals_['_proxy'] = None
for attr_name in attr_names:
del globals_[attr_name]
@classmethod
def create_module_class_proxy(cls, globals_, locals_):
attr_names, modules = cls._setups[cls]
modules.append(
(globals_, locals_)
)
cls._setup_proxy(globals_, locals_, attr_names)
@classmethod
def _setup_proxy(cls, globals_, locals_, attr_names):
for methname in dir(cls):
cls._add_proxied_attribute(methname, globals_, locals_, attr_names)
@classmethod
def _add_proxied_attribute(cls, methname, globals_, locals_, attr_names):
if not methname.startswith('_'):
meth = getattr(cls, methname)
if callable(meth):
locals_[methname] = cls._create_method_proxy(
methname, globals_, locals_)
else:
attr_names.add(methname)
@classmethod
def _create_method_proxy(cls, name, globals_, locals_):
fn = getattr(cls, name)
spec = inspect.getargspec(fn)
if spec[0] and spec[0][0] == 'self':
spec[0].pop(0)
args = inspect.formatargspec(*spec)
num_defaults = 0
if spec[3]:
num_defaults += len(spec[3])
name_args = spec[0]
if num_defaults:
defaulted_vals = name_args[0 - num_defaults:]
else:
defaulted_vals = ()
apply_kw = inspect.formatargspec(
name_args, spec[1], spec[2],
defaulted_vals,
formatvalue=lambda x: '=' + x)
def _name_error(name):
raise NameError(
"Can't invoke function '%s', as the proxy object has "
"not yet been "
"established for the Alembic '%s' class. "
"Try placing this code inside a callable." % (
name, cls.__name__
))
globals_['_name_error'] = _name_error
translations = getattr(fn, "_legacy_translations", [])
if translations:
outer_args = inner_args = "*args, **kw"
translate_str = "args, kw = _translate(%r, %r, %r, args, kw)" % (
fn.__name__,
tuple(spec),
translations
)
def translate(fn_name, spec, translations, args, kw):
return_kw = {}
return_args = []
for oldname, newname in translations:
if oldname in kw:
warnings.warn(
"Argument %r is now named %r "
"for method %s()." % (
oldname, newname, fn_name
))
return_kw[newname] = kw.pop(oldname)
return_kw.update(kw)
args = list(args)
if spec[3]:
pos_only = spec[0][:-len(spec[3])]
else:
pos_only = spec[0]
for arg in pos_only:
if arg not in return_kw:
try:
return_args.append(args.pop(0))
except IndexError:
raise TypeError(
"missing required positional argument: %s"
% arg)
return_args.extend(args)
return return_args, return_kw
globals_['_translate'] = translate
else:
outer_args = args[1:-1]
inner_args = apply_kw[1:-1]
translate_str = ""
func_text = textwrap.dedent("""\
def %(name)s(%(args)s):
%(doc)r
%(translate)s
try:
p = _proxy
except NameError:
_name_error('%(name)s')
return _proxy.%(name)s(%(apply_kw)s)
e
""" % {
'name': name,
'translate': translate_str,
'args': outer_args,
'apply_kw': inner_args,
'doc': fn.__doc__,
})
lcl = {}
exec_(func_text, globals_, lcl)
return lcl[name]
def _with_legacy_names(translations):
def decorate(fn):
fn._legacy_translations = translations
return fn
return decorate
def asbool(value):
return value is not None and \
value.lower() == 'true'
def rev_id():
val = int(uuid.uuid4()) % 100000000000000
return hex(val)[2:-1]
def to_list(x, default=None):
if x is None:
return default
elif isinstance(x, string_types):
return [x]
elif isinstance(x, collections.Iterable):
return list(x)
else:
return [x]
def to_tuple(x, default=None):
if x is None:
return default
elif isinstance(x, string_types):
return (x, )
elif isinstance(x, collections.Iterable):
return tuple(x)
else:
return (x, )
def unique_list(seq, hashfunc=None):
seen = set()
seen_add = seen.add
if not hashfunc:
return [x for x in seq
if x not in seen
and not seen_add(x)]
else:
return [x for x in seq
if hashfunc(x) not in seen
and not seen_add(hashfunc(x))]
def dedupe_tuple(tup):
return tuple(unique_list(tup))
class memoized_property(object):
"""A read-only @property that is only evaluated once."""
def __init__(self, fget, doc=None):
self.fget = fget
self.__doc__ = doc or fget.__doc__
self.__name__ = fget.__name__
def __get__(self, obj, cls):
if obj is None:
return self
obj.__dict__[self.__name__] = result = self.fget(obj)
return result
class immutabledict(dict):
def _immutable(self, *arg, **kw):
raise TypeError("%s object is immutable" % self.__class__.__name__)
__delitem__ = __setitem__ = __setattr__ = \
clear = pop = popitem = setdefault = \
update = _immutable
def __new__(cls, *args):
new = dict.__new__(cls)
dict.__init__(new, *args)
return new
def __init__(self, *args):
pass
def __reduce__(self):
return immutabledict, (dict(self), )
def union(self, d):
if not self:
return immutabledict(d)
else:
d2 = immutabledict(self)
dict.update(d2, d)
return d2
def __repr__(self):
return "immutabledict(%s)" % dict.__repr__(self)
class Dispatcher(object):
def __init__(self, uselist=False):
self._registry = {}
self.uselist = uselist
def dispatch_for(self, target, qualifier='default'):
def decorate(fn):
if self.uselist:
self._registry.setdefault((target, qualifier), []).append(fn)
else:
assert (target, qualifier) not in self._registry
self._registry[(target, qualifier)] = fn
return fn
return decorate
def dispatch(self, obj, qualifier='default'):
if isinstance(obj, string_types):
targets = [obj]
elif isinstance(obj, type):
targets = obj.__mro__
else:
targets = type(obj).__mro__
for spcls in targets:
if qualifier != 'default' and (spcls, qualifier) in self._registry:
return self._fn_or_list(
self._registry[(spcls, qualifier)])
elif (spcls, 'default') in self._registry:
return self._fn_or_list(
self._registry[(spcls, 'default')])
else:
raise ValueError("no dispatch function for object: %s" % obj)
def _fn_or_list(self, fn_or_list):
if self.uselist:
def go(*arg, **kw):
for fn in fn_or_list:
fn(*arg, **kw)
return go
else:
return fn_or_list
def branch(self):
"""Return a copy of this dispatcher that is independently
writable."""
d = Dispatcher()
if self.uselist:
d._registry.update(
(k, [fn for fn in self._registry[k]])
for k in self._registry
)
else:
d._registry.update(self._registry)
return d
|
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from email.Message import Message
from email.Utils import formatdate
from zope.interface import implements
from twisted.internet import defer
from buildbot import interfaces
from buildbot.status import mail
from buildbot.status.results import SUCCESS, WARNINGS, EXCEPTION, RETRY
from buildbot.steps.shell import WithProperties
import gzip, bz2, base64, re, cStringIO
# TODO: docs, maybe a test of some sort just to make sure it actually imports
# and can format email without raising an exception.
class TinderboxMailNotifier(mail.MailNotifier):
"""This is a Tinderbox status notifier. It can send e-mail to a number of
different tinderboxes or people. E-mails are sent at the beginning and
upon completion of each build. It can be configured to send out e-mails
for only certain builds.
The most basic usage is as follows::
TinderboxMailNotifier(fromaddr="buildbot@localhost",
tree="MyTinderboxTree",
extraRecipients=["tinderboxdaemon@host.org"])
The builder name (as specified in master.cfg) is used as the "build"
tinderbox option.
"""
implements(interfaces.IEmailSender)
compare_attrs = ["extraRecipients", "fromaddr", "categories", "builders",
"addLogs", "relayhost", "subject", "binaryURL", "tree",
"logCompression", "errorparser", "columnName",
"useChangeTime"]
def __init__(self, fromaddr, tree, extraRecipients,
categories=None, builders=None, relayhost="localhost",
subject="buildbot %(result)s in %(builder)s", binaryURL="",
logCompression="", errorparser="unix", columnName=None,
useChangeTime=False):
"""
@type fromaddr: string
@param fromaddr: the email address to be used in the 'From' header.
@type tree: string
@param tree: The Tinderbox tree to post to.
When tree is a WithProperties instance it will be
interpolated as such. See WithProperties for more detail
@type extraRecipients: tuple of string
@param extraRecipients: E-mail addresses of recipients. This should at
least include the tinderbox daemon.
@type categories: list of strings
@param categories: a list of category names to serve status
information for. Defaults to None (all
categories). Use either builders or categories,
but not both.
@type builders: list of strings
@param builders: a list of builder names for which mail should be
sent. Defaults to None (send mail for all builds).
Use either builders or categories, but not both.
@type relayhost: string
@param relayhost: the host to which the outbound SMTP connection
should be made. Defaults to 'localhost'
@type subject: string
@param subject: a string to be used as the subject line of the message.
%(builder)s will be replaced with the name of the
%builder which provoked the message.
This parameter is not significant for the tinderbox
daemon.
@type binaryURL: string
@param binaryURL: If specified, this should be the location where final
binary for a build is located.
(ie. http://www.myproject.org/nightly/08-08-2006.tgz)
It will be posted to the Tinderbox.
@type logCompression: string
@param logCompression: The type of compression to use on the log.
Valid options are"bzip2" and "gzip". gzip is
only known to work on Python 2.4 and above.
@type errorparser: string
@param errorparser: The error parser that the Tinderbox server
should use when scanning the log file.
Default is "unix".
@type columnName: string
@param columnName: When columnName is None, use the buildername as
the Tinderbox column name. When columnName is a
string this exact string will be used for all
builders that this TinderboxMailNotifier cares
about (not recommended). When columnName is a
WithProperties instance it will be interpolated
as such. See WithProperties for more detail.
@type useChangeTime: bool
@param useChangeTime: When True, the time of the first Change for a
build is used as the builddate. When False,
the current time is used as the builddate.
"""
mail.MailNotifier.__init__(self, fromaddr, categories=categories,
builders=builders, relayhost=relayhost,
subject=subject,
extraRecipients=extraRecipients,
sendToInterestedUsers=False)
assert isinstance(tree, basestring) \
or isinstance(tree, WithProperties), \
"tree must be a string or a WithProperties instance"
self.tree = tree
self.binaryURL = binaryURL
self.logCompression = logCompression
self.errorparser = errorparser
self.useChangeTime = useChangeTime
assert columnName is None or type(columnName) is str \
or isinstance(columnName, WithProperties), \
"columnName must be None, a string, or a WithProperties instance"
self.columnName = columnName
def buildStarted(self, name, build):
builder = build.getBuilder()
if self.builders is not None and name not in self.builders:
return # ignore this Build
if self.categories is not None and \
builder.category not in self.categories:
return # ignore this build
self.buildMessage(name, build, "building")
def buildMessage(self, name, build, results):
text = ""
res = ""
# shortform
t = "tinderbox:"
if type(self.tree) is str:
# use the exact string given
text += "%s tree: %s\n" % (t, self.tree)
elif isinstance(self.tree, WithProperties):
# interpolate the WithProperties instance, use that
text += "%s tree: %s\n" % (t, build.render(self.tree))
else:
raise Exception("tree is an unhandled value")
# the start time
# getTimes() returns a fractioned time that tinderbox doesn't understand
builddate = int(build.getTimes()[0])
# attempt to pull a Change time from this Build's Changes.
# if that doesn't work, fall back on the current time
if self.useChangeTime:
try:
builddate = build.getChanges()[-1].when
except:
pass
text += "%s builddate: %s\n" % (t, builddate)
text += "%s status: " % t
if results == "building":
res = "building"
text += res
elif results == SUCCESS:
res = "success"
text += res
elif results == WARNINGS:
res = "testfailed"
text += res
elif results in (EXCEPTION, RETRY):
res = "exception"
text += res
else:
res += "busted"
text += res
text += "\n";
if self.columnName is None:
# use the builder name
text += "%s build: %s\n" % (t, name)
elif type(self.columnName) is str:
# use the exact string given
text += "%s build: %s\n" % (t, self.columnName)
elif isinstance(self.columnName, WithProperties):
# interpolate the WithProperties instance, use that
text += "%s build: %s\n" % (t, build.render(self.columnName))
else:
raise Exception("columnName is an unhandled value")
text += "%s errorparser: %s\n" % (t, self.errorparser)
# if the build just started...
if results == "building":
text += "%s END\n" % t
# if the build finished...
else:
text += "%s binaryurl: %s\n" % (t, self.binaryURL)
text += "%s logcompression: %s\n" % (t, self.logCompression)
# logs will always be appended
logEncoding = ""
tinderboxLogs = ""
for bs in build.getSteps():
# Make sure that shortText is a regular string, so that bad
# data in the logs don't generate UnicodeDecodeErrors
shortText = "%s\n" % ' '.join(bs.getText()).encode('ascii', 'replace')
# ignore steps that haven't happened
if not re.match(".*[^\s].*", shortText):
continue
# we ignore TinderboxPrint's here so we can do things like:
# ShellCommand(command=['echo', 'TinderboxPrint:', ...])
if re.match(".+TinderboxPrint.*", shortText):
shortText = shortText.replace("TinderboxPrint",
"Tinderbox Print")
logs = bs.getLogs()
tinderboxLogs += "======== BuildStep started ========\n"
tinderboxLogs += shortText
tinderboxLogs += "=== Output ===\n"
for log in logs:
logText = log.getTextWithHeaders()
# Because we pull in the log headers here we have to ignore
# some of them. Basically, if we're TinderboxPrint'ing in
# a ShellCommand, the only valid one(s) are at the start
# of a line. The others are prendeded by whitespace, quotes,
# or brackets/parentheses
for line in logText.splitlines():
if re.match(".+TinderboxPrint.*", line):
line = line.replace("TinderboxPrint",
"Tinderbox Print")
tinderboxLogs += line + "\n"
tinderboxLogs += "=== Output ended ===\n"
tinderboxLogs += "======== BuildStep ended ========\n"
if self.logCompression == "bzip2":
cLog = bz2.compress(tinderboxLogs)
tinderboxLogs = base64.encodestring(cLog)
logEncoding = "base64"
elif self.logCompression == "gzip":
cLog = cStringIO.StringIO()
gz = gzip.GzipFile(mode="w", fileobj=cLog)
gz.write(tinderboxLogs)
gz.close()
cLog = cLog.getvalue()
tinderboxLogs = base64.encodestring(cLog)
logEncoding = "base64"
text += "%s logencoding: %s\n" % (t, logEncoding)
text += "%s END\n\n" % t
text += tinderboxLogs
text += "\n"
m = Message()
m.set_payload(text)
m['Date'] = formatdate(localtime=True)
m['Subject'] = self.subject % { 'result': res,
'builder': name,
}
m['From'] = self.fromaddr
# m['To'] is added later
d = defer.DeferredList([])
d.addCallback(self._gotRecipients, self.extraRecipients, m)
return d
|
|
import sys
import re
import textwrap
from doctest import OutputChecker, ELLIPSIS
from tests.test_pip import reset_env, run_pip, write_file, get_env, pyversion
from tests.local_repos import local_checkout, local_repo
distribute_re = re.compile('^distribute==[0-9.]+\n', re.MULTILINE)
def _check_output(result, expected):
checker = OutputChecker()
actual = str(result)
## FIXME! The following is a TOTAL hack. For some reason the
## __str__ result for pkg_resources.Requirement gets downcased on
## Windows. Since INITools is the only package we're installing
## in this file with funky case requirements, I'm forcibly
## upcasing it. You can also normalize everything to lowercase,
## but then you have to remember to upcase <BLANKLINE>. The right
## thing to do in the end is probably to find out how to report
## the proper fully-cased package name in our error message.
if sys.platform == 'win32':
actual = actual.replace('initools', 'INITools')
# This allows our existing tests to work when run in a context
# with distribute installed.
actual = distribute_re.sub('', actual)
def banner(msg):
return '\n========== %s ==========\n' % msg
assert checker.check_output(expected, actual, ELLIPSIS), banner('EXPECTED')+expected+banner('ACTUAL')+actual+banner(6*'=')
def test_freeze_basic():
"""
Some tests of freeze, first we have to install some stuff. Note that
the test is a little crude at the end because Python 2.5+ adds egg
info to the standard library, so stuff like wsgiref will show up in
the freezing. (Probably that should be accounted for in pip, but
currently it is not).
"""
env = reset_env()
write_file('initools-req.txt', textwrap.dedent("""\
INITools==0.2
# and something else to test out:
MarkupSafe<=0.12
"""))
result = run_pip('install', '-r', env.scratch_path/'initools-req.txt')
result = run_pip('freeze', expect_stderr=True)
expected = textwrap.dedent("""\
Script result: pip freeze
-- stdout: --------------------
INITools==0.2
MarkupSafe==0.12...
<BLANKLINE>""")
_check_output(result, expected)
def test_freeze_svn():
"""Now lets try it with an svn checkout"""
env = reset_env()
result = env.run('svn', 'co', '-r10',
local_repo('svn+http://svn.colorstudy.com/INITools/trunk'),
'initools-trunk')
result = env.run('python', 'setup.py', 'develop',
cwd=env.scratch_path/ 'initools-trunk', expect_stderr=True)
result = run_pip('freeze', expect_stderr=True)
expected = textwrap.dedent("""\
Script result: ...pip freeze
-- stdout: --------------------
-e %s@10#egg=INITools-0.3.1dev...-dev_r10
...""" % local_checkout('svn+http://svn.colorstudy.com/INITools/trunk'))
_check_output(result, expected)
def test_freeze_git_clone():
"""
Test freezing a Git clone.
"""
env = reset_env()
result = env.run('git', 'clone', local_repo('git+http://github.com/pypa/pip-test-package.git'), 'pip-test-package')
result = env.run('git', 'checkout', '7d654e66c8fa7149c165ddeffa5b56bc06619458',
cwd=env.scratch_path / 'pip-test-package', expect_stderr=True)
result = env.run('python', 'setup.py', 'develop',
cwd=env.scratch_path / 'pip-test-package')
result = run_pip('freeze', expect_stderr=True)
expected = textwrap.dedent("""\
Script result: ...pip freeze
-- stdout: --------------------
...-e %s@...#egg=pip_test_package-...
...""" % local_checkout('git+http://github.com/pypa/pip-test-package.git'))
_check_output(result, expected)
result = run_pip('freeze', '-f',
'%s#egg=pip_test_package' % local_checkout('git+http://github.com/pypa/pip-test-package.git'),
expect_stderr=True)
expected = textwrap.dedent("""\
Script result: pip freeze -f %(repo)s#egg=pip_test_package
-- stdout: --------------------
-f %(repo)s#egg=pip_test_package...
-e %(repo)s@...#egg=pip_test_package-dev
...""" % {'repo': local_checkout('git+http://github.com/pypa/pip-test-package.git')})
_check_output(result, expected)
def test_freeze_mercurial_clone():
"""
Test freezing a Mercurial clone.
"""
reset_env()
env = get_env()
result = env.run('hg', 'clone',
'-r', '7bc186caa7dc',
local_repo('hg+http://bitbucket.org/jezdez/django-authority'),
'django-authority')
result = env.run('python', 'setup.py', 'develop',
cwd=env.scratch_path/'django-authority', expect_stderr=True)
result = run_pip('freeze', expect_stderr=True)
expected = textwrap.dedent("""\
Script result: ...pip freeze
-- stdout: --------------------
...-e %s@...#egg=django_authority-...
...""" % local_checkout('hg+http://bitbucket.org/jezdez/django-authority'))
_check_output(result, expected)
result = run_pip('freeze', '-f',
'%s#egg=django_authority' % local_checkout('hg+http://bitbucket.org/jezdez/django-authority'),
expect_stderr=True)
expected = textwrap.dedent("""\
Script result: ...pip freeze -f %(repo)s#egg=django_authority
-- stdout: --------------------
-f %(repo)s#egg=django_authority
...-e %(repo)s@...#egg=django_authority-dev
...""" % {'repo': local_checkout('hg+http://bitbucket.org/jezdez/django-authority')})
_check_output(result, expected)
def test_freeze_bazaar_clone():
"""
Test freezing a Bazaar clone.
"""
reset_env()
env = get_env()
result = env.run('bzr', 'checkout', '-r', '174',
local_repo('bzr+http://bazaar.launchpad.net/%7Edjango-wikiapp/django-wikiapp/release-0.1'),
'django-wikiapp')
result = env.run('python', 'setup.py', 'develop',
cwd=env.scratch_path/'django-wikiapp')
result = run_pip('freeze', expect_stderr=True)
expected = textwrap.dedent("""\
Script result: ...pip freeze
-- stdout: --------------------
...-e %s@...#egg=django_wikiapp-...
...""" % local_checkout('bzr+http://bazaar.launchpad.net/%7Edjango-wikiapp/django-wikiapp/release-0.1'))
_check_output(result, expected)
result = run_pip('freeze', '-f',
'%s/#egg=django-wikiapp' %
local_checkout('bzr+http://bazaar.launchpad.net/%7Edjango-wikiapp/django-wikiapp/release-0.1'),
expect_stderr=True)
expected = textwrap.dedent("""\
Script result: ...pip freeze -f %(repo)s/#egg=django-wikiapp
-- stdout: --------------------
-f %(repo)s/#egg=django-wikiapp
...-e %(repo)s@...#egg=django_wikiapp-...
...""" % {'repo':
local_checkout('bzr+http://bazaar.launchpad.net/%7Edjango-wikiapp/django-wikiapp/release-0.1')})
_check_output(result, expected)
def test_freeze_with_local_option():
"""
Test that wsgiref (from global site-packages) is reported normally, but not with --local.
"""
reset_env()
result = run_pip('install', 'initools==0.2')
result = run_pip('freeze', expect_stderr=True)
expected = textwrap.dedent("""\
Script result: ...pip freeze
-- stdout: --------------------
INITools==0.2
wsgiref==...
<BLANKLINE>""")
# The following check is broken (see
# http://bitbucket.org/ianb/pip/issue/110). For now we are simply
# neutering this test, but if we can't find a way to fix it,
# this whole function should be removed.
# _check_output(result, expected)
result = run_pip('freeze', '--local', expect_stderr=True)
expected = textwrap.dedent("""\
Script result: ...pip freeze --local
-- stdout: --------------------
INITools==0.2
<BLANKLINE>""")
_check_output(result, expected)
def test_freeze_with_requirement_option():
"""
Test that new requirements are created correctly with --requirement hints
"""
reset_env()
ignores = textwrap.dedent("""\
# Unchanged requirements below this line
-r ignore.txt
--requirement ignore.txt
-Z ignore
--always-unzip ignore
-f http://ignore
-i http://ignore
--extra-index-url http://ignore
--find-links http://ignore
--index-url http://ignore
""")
write_file('hint.txt', textwrap.dedent("""\
INITools==0.1
NoExist==4.2
""") + ignores)
result = run_pip('install', 'initools==0.2')
result = run_pip('install', 'MarkupSafe')
result = run_pip('freeze', '--requirement', 'hint.txt', expect_stderr=True)
expected = textwrap.dedent("""\
Script result: pip freeze --requirement hint.txt
-- stderr: --------------------
Requirement file contains NoExist==4.2, but that package is not installed
-- stdout: --------------------
INITools==0.2
""") + ignores + "## The following requirements were added by pip --freeze:..."
_check_output(result, expected)
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import logging
from saml2 import BINDING_HTTP_REDIRECT, BINDING_SOAP, BINDING_HTTP_POST
from saml2.config import SPConfig, IdPConfig, Config
from saml2.metadata import MetaData
from py.test import raises
from saml2 import root_logger
sp1 = {
"entityid" : "urn:mace:umu.se:saml:roland:sp",
"service": {
"sp": {
"endpoints" : {
"assertion_consumer_service" : ["http://lingon.catalogix.se:8087/"],
},
"name": "test",
"idp" : {
"urn:mace:example.com:saml:roland:idp": {'single_sign_on_service':
{'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect':
'http://localhost:8088/sso/'}},
}
}
},
"key_file" : "mykey.pem",
"cert_file" : "mycert.pem",
#"xmlsec_binary" : "/opt/local/bin/xmlsec1",
"metadata": {
"local": ["metadata.xml",
"urn-mace-swami.se-swamid-test-1.0-metadata.xml"],
},
"virtual_organization" : {
"coip":{
"nameid_format" : "urn:oasis:names:tc:SAML:2.0:nameid-format:transient",
"common_identifier": "eduPersonPrincipalName",
"attribute_auth": [
"https://coip-test.sunet.se/idp/shibboleth",
]
}
},
"attribute_map_dir": "attributemaps",
"only_use_keys_in_metadata": True,
}
sp2 = {
"entityid" : "urn:mace:umu.se:saml:roland:sp",
"name" : "Rolands SP",
"service": {
"sp": {
"endpoints" : {
"assertion_consumer_service" : ["http://lingon.catalogix.se:8087/"],
},
"required_attributes": ["surName", "givenName", "mail"],
"optional_attributes": ["title"],
"idp": {
"" : "https://example.com/saml2/idp/SSOService.php",
}
}
},
#"xmlsec_binary" : "/opt/local/bin/xmlsec1",
}
IDP1 = {
"entityid" : "urn:mace:umu.se:saml:roland:idp",
"name" : "Rolands IdP",
"service": {
"idp": {
"endpoints": {
"single_sign_on_service" : ["http://localhost:8088/"],
},
"policy": {
"default": {
"attribute_restrictions": {
"givenName": None,
"surName": None,
"eduPersonAffiliation": ["(member|staff)"],
"mail": [".*@example.com"],
}
},
"urn:mace:umu.se:saml:roland:sp": None
},
}
},
#"xmlsec_binary" : "/usr/local/bin/xmlsec1",
}
IDP2 = {
"entityid" : "urn:mace:umu.se:saml:roland:idp",
"name" : "Rolands IdP",
"service": {
"idp": {
"endpoints": {
"single_sign_on_service" : ["http://localhost:8088/"],
"single_logout_service" : [("http://localhost:8088/", BINDING_HTTP_REDIRECT)],
},
"policy":{
"default": {
"attribute_restrictions": {
"givenName": None,
"surName": None,
"eduPersonAffiliation": ["(member|staff)"],
"mail": [".*@example.com"],
}
},
"urn:mace:umu.se:saml:roland:sp": None
},
}
},
#"xmlsec_binary" : "/usr/local/bin/xmlsec1",
}
PDP = {
"entityid" : "http://example.org/pysaml2/pdp",
"name" : "Rolands PdP",
"service": {
"pdp": {
"endpoints": {
"authz_service" : [("http://example.org/pysaml2/pdp/authz",
BINDING_SOAP)],
},
}
},
"key_file" : "test.key",
"cert_file" : "test.pem",
"organization": {
"name": "Exempel AB",
"display_name": [("Exempel AB","se"),("Example Co.","en")],
"url":"http://www.example.com/roland",
},
"contact_person": [{
"given_name":"John",
"sur_name": "Smith",
"email_address": ["john.smith@example.com"],
"contact_type": "technical",
},
],
}
ECP_SP = {
"entityid" : "urn:mace:umu.se:saml:roland:ecpsp",
"name" : "Rolands ECP_SP",
"service": {
"sp": {
"endpoints" : {
"assertion_consumer_service" : ["http://lingon.catalogix.se:8087/"],
},
"ecp" : {
"130.239.": "http://example.com/idp",
}
}
},
#"xmlsec_binary" : "/opt/local/bin/xmlsec1",
}
def _eq(l1,l2):
return set(l1) == set(l2)
def test_1():
c = SPConfig().load(sp1)
c.context = "sp"
print c
assert c.endpoints
assert c.name
assert c.idp
md = c.metadata
assert isinstance(md, MetaData)
assert len(c.idp) == 1
assert c.idp.keys() == ["urn:mace:example.com:saml:roland:idp"]
assert c.idp.values() == [{'single_sign_on_service':
{'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect':
'http://localhost:8088/sso/'}}]
assert c.only_use_keys_in_metadata
def test_2():
c = SPConfig().load(sp2)
c.context = "sp"
print c
assert c.endpoints
assert c.idp
assert c.optional_attributes
assert c.name
assert c.required_attributes
assert len(c.idp) == 1
assert c.idp.keys() == [""]
assert c.idp.values() == ["https://example.com/saml2/idp/SSOService.php"]
assert c.only_use_keys_in_metadata is None
def test_minimum():
minimum = {
"entityid" : "urn:mace:example.com:saml:roland:sp",
"service": {
"sp": {
"endpoints" : {
"assertion_consumer_service" : ["http://sp.example.org/"],
},
"name" : "test",
"idp": {
"" : "https://example.com/idp/SSOService.php",
},
}
},
#"xmlsec_binary" : "/usr/local/bin/xmlsec1",
}
c = SPConfig().load(minimum)
c.context = "sp"
assert c is not None
def test_idp_1():
c = IdPConfig().load(IDP1)
c.context = "idp"
print c
assert c.endpoint("single_sign_on_service")[0] == 'http://localhost:8088/'
attribute_restrictions = c.policy.get_attribute_restriction("")
assert attribute_restrictions["eduPersonAffiliation"][0].match("staff")
def test_idp_2():
c = IdPConfig().load(IDP2)
c.context = "idp"
print c
assert c.endpoint("single_logout_service",
BINDING_SOAP) == []
assert c.endpoint("single_logout_service",
BINDING_HTTP_REDIRECT) == ["http://localhost:8088/"]
attribute_restrictions = c.policy.get_attribute_restriction("")
assert attribute_restrictions["eduPersonAffiliation"][0].match("staff")
def test_wayf():
c = SPConfig().load_file("server_conf")
c.context = "sp"
idps = c.idps()
assert idps == {'urn:mace:example.com:saml:roland:idp': 'Example Co.'}
idps = c.idps(["se","en"])
assert idps == {'urn:mace:example.com:saml:roland:idp': 'Exempel AB'}
c.setup_logger()
assert root_logger.level != logging.NOTSET
assert root_logger.level == logging.WARNING
assert len(root_logger.handlers) == 1
assert isinstance(root_logger.handlers[0],
logging.handlers.RotatingFileHandler)
handler = root_logger.handlers[0]
assert handler.backupCount == 5
assert handler.maxBytes == 100000
assert handler.mode == "a"
assert root_logger.name == "pySAML2"
assert root_logger.level == 30
def test_conf_syslog():
c = SPConfig().load_file("server_conf_syslog")
c.context = "sp"
# otherwise the logger setting is not changed
root_logger.level = logging.NOTSET
root_logger.handlers = []
print c.logger
c.setup_logger()
assert root_logger.level != logging.NOTSET
assert root_logger.level == logging.INFO
assert len(root_logger.handlers) == 1
assert isinstance(root_logger.handlers[0],
logging.handlers.SysLogHandler)
handler = root_logger.handlers[0]
print handler.__dict__
assert handler.facility == "local3"
assert handler.address == ('localhost', 514)
if sys.version >= (2, 7):
assert handler.socktype == 2
else:
pass
assert root_logger.name == "pySAML2"
assert root_logger.level == 20
#noinspection PyUnresolvedReferences
def test_3():
cnf = Config()
cnf.load_file("sp_1_conf")
assert cnf.entityid == "urn:mace:example.com:saml:roland:sp"
assert cnf.debug == 1
assert cnf.key_file == "test.key"
assert cnf.cert_file == "test.pem"
#assert cnf.xmlsec_binary == "/usr/local/bin/xmlsec1"
assert cnf.accepted_time_diff == 60
assert cnf.secret == "0123456789"
assert cnf.metadata is not None
assert cnf.attribute_converters is not None
def test_sp():
cnf = SPConfig()
cnf.load_file("sp_1_conf")
assert cnf.single_logout_services("urn:mace:example.com:saml:roland:idp",
BINDING_HTTP_POST) == ["http://localhost:8088/slo"]
assert cnf.endpoint("assertion_consumer_service") == \
["http://lingon.catalogix.se:8087/"]
assert len(cnf.idps()) == 1
def test_dual():
cnf = Config().load_file("idp_sp_conf")
assert cnf.serves() == ["sp", "idp"]
spcnf = cnf.copy_into("sp")
assert isinstance(spcnf, SPConfig)
assert spcnf.context == "sp"
idpcnf = cnf.copy_into("idp")
assert isinstance(idpcnf, IdPConfig)
assert idpcnf.context == "idp"
def test_ecp():
cnf = SPConfig()
cnf.load(ECP_SP)
assert cnf.endpoint("assertion_consumer_service") == \
["http://lingon.catalogix.se:8087/"]
eid = cnf.ecp_endpoint("130.239.16.3")
assert eid == "http://example.com/idp"
eid = cnf.ecp_endpoint("130.238.20.20")
assert eid is None
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import json
import os
import random as _random
import sys
import traceback
from getopt import getopt, GetoptError
from multiprocessing import Process
from os import environ
from wsgiref.simple_server import make_server
import requests as _requests
from jsonrpcbase import JSONRPCService, InvalidParamsError, KeywordError, \
JSONRPCError, InvalidRequestError
from jsonrpcbase import ServerError as JSONServerError
from biokbase import log
from kb_gblocks.authclient import KBaseAuth as _KBaseAuth
try:
from ConfigParser import ConfigParser
except ImportError:
from configparser import ConfigParser
DEPLOY = 'KB_DEPLOYMENT_CONFIG'
SERVICE = 'KB_SERVICE_NAME'
AUTH = 'auth-service-url'
# Note that the error fields do not match the 2.0 JSONRPC spec
def get_config_file():
return environ.get(DEPLOY, None)
def get_service_name():
return environ.get(SERVICE, None)
def get_config():
if not get_config_file():
return None
retconfig = {}
config = ConfigParser()
config.read(get_config_file())
for nameval in config.items(get_service_name() or 'kb_gblocks'):
retconfig[nameval[0]] = nameval[1]
return retconfig
config = get_config()
from kb_gblocks.kb_gblocksImpl import kb_gblocks # noqa @IgnorePep8
impl_kb_gblocks = kb_gblocks(config)
class JSONObjectEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, set):
return list(obj)
if isinstance(obj, frozenset):
return list(obj)
if hasattr(obj, 'toJSONable'):
return obj.toJSONable()
return json.JSONEncoder.default(self, obj)
class JSONRPCServiceCustom(JSONRPCService):
def call(self, ctx, jsondata):
"""
Calls jsonrpc service's method and returns its return value in a JSON
string or None if there is none.
Arguments:
jsondata -- remote method call in jsonrpc format
"""
result = self.call_py(ctx, jsondata)
if result is not None:
return json.dumps(result, cls=JSONObjectEncoder)
return None
def _call_method(self, ctx, request):
"""Calls given method with given params and returns it value."""
method = self.method_data[request['method']]['method']
params = request['params']
result = None
try:
if isinstance(params, list):
# Does it have enough arguments?
if len(params) < self._man_args(method) - 1:
raise InvalidParamsError('not enough arguments')
# Does it have too many arguments?
if(not self._vargs(method) and len(params) >
self._max_args(method) - 1):
raise InvalidParamsError('too many arguments')
result = method(ctx, *params)
elif isinstance(params, dict):
# Do not accept keyword arguments if the jsonrpc version is
# not >=1.1.
if request['jsonrpc'] < 11:
raise KeywordError
result = method(ctx, **params)
else: # No params
result = method(ctx)
except JSONRPCError:
raise
except Exception as e:
# log.exception('method %s threw an exception' % request['method'])
# Exception was raised inside the method.
newerr = JSONServerError()
newerr.trace = traceback.format_exc()
if len(e.args) == 1:
newerr.data = repr(e.args[0])
else:
newerr.data = repr(e.args)
raise newerr
return result
def call_py(self, ctx, jsondata):
"""
Calls jsonrpc service's method and returns its return value in python
object format or None if there is none.
This method is same as call() except the return value is a python
object instead of JSON string. This method is mainly only useful for
debugging purposes.
"""
rdata = jsondata
# we already deserialize the json string earlier in the server code, no
# need to do it again
# try:
# rdata = json.loads(jsondata)
# except ValueError:
# raise ParseError
# set some default values for error handling
request = self._get_default_vals()
if isinstance(rdata, dict) and rdata:
# It's a single request.
self._fill_request(request, rdata)
respond = self._handle_request(ctx, request)
# Don't respond to notifications
if respond is None:
return None
return respond
elif isinstance(rdata, list) and rdata:
# It's a batch.
requests = []
responds = []
for rdata_ in rdata:
# set some default values for error handling
request_ = self._get_default_vals()
self._fill_request(request_, rdata_)
requests.append(request_)
for request_ in requests:
respond = self._handle_request(ctx, request_)
# Don't respond to notifications
if respond is not None:
responds.append(respond)
if responds:
return responds
# Nothing to respond.
return None
else:
# empty dict, list or wrong type
raise InvalidRequestError
def _handle_request(self, ctx, request):
"""Handles given request and returns its response."""
if 'types' in self.method_data[request['method']]:
self._validate_params_types(request['method'], request['params'])
result = self._call_method(ctx, request)
# Do not respond to notifications.
if request['id'] is None:
return None
respond = {}
self._fill_ver(request['jsonrpc'], respond)
respond['result'] = result
respond['id'] = request['id']
return respond
class MethodContext(dict):
def __init__(self, logger):
self['client_ip'] = None
self['user_id'] = None
self['authenticated'] = None
self['token'] = None
self['module'] = None
self['method'] = None
self['call_id'] = None
self['rpc_context'] = None
self['provenance'] = None
self._debug_levels = set([7, 8, 9, 'DEBUG', 'DEBUG2', 'DEBUG3'])
self._logger = logger
def log_err(self, message):
self._log(log.ERR, message)
def log_info(self, message):
self._log(log.INFO, message)
def log_debug(self, message, level=1):
if level in self._debug_levels:
pass
else:
level = int(level)
if level < 1 or level > 3:
raise ValueError("Illegal log level: " + str(level))
level = level + 6
self._log(level, message)
def set_log_level(self, level):
self._logger.set_log_level(level)
def get_log_level(self):
return self._logger.get_log_level()
def clear_log_level(self):
self._logger.clear_user_log_level()
def _log(self, level, message):
self._logger.log_message(level, message, self['client_ip'],
self['user_id'], self['module'],
self['method'], self['call_id'])
def provenance(self):
callbackURL = os.environ.get('SDK_CALLBACK_URL')
if callbackURL:
# OK, there's a callback server from which we can get provenance
arg_hash = {'method': 'CallbackServer.get_provenance',
'params': [],
'version': '1.1',
'id': str(_random.random())[2:]
}
body = json.dumps(arg_hash)
response = _requests.post(callbackURL, data=body,
timeout=60)
response.encoding = 'utf-8'
if response.status_code == 500:
if ('content-type' in response.headers and
response.headers['content-type'] ==
'application/json'):
err = response.json()
if 'error' in err:
raise ServerError(**err['error'])
else:
raise ServerError('Unknown', 0, response.text)
else:
raise ServerError('Unknown', 0, response.text)
if not response.ok:
response.raise_for_status()
resp = response.json()
if 'result' not in resp:
raise ServerError('Unknown', 0,
'An unknown server error occurred')
return resp['result'][0]
else:
return self.get('provenance')
class ServerError(Exception):
'''
The call returned an error. Fields:
name - the name of the error.
code - the error code.
message - a human readable error message.
data - the server side stacktrace.
'''
def __init__(self, name, code, message, data=None, error=None):
super(Exception, self).__init__(message)
self.name = name
self.code = code
self.message = message if message else ''
self.data = data or error or ''
# data = JSON RPC 2.0, error = 1.1
def __str__(self):
return self.name + ': ' + str(self.code) + '. ' + self.message + \
'\n' + self.data
def getIPAddress(environ):
xFF = environ.get('HTTP_X_FORWARDED_FOR')
realIP = environ.get('HTTP_X_REAL_IP')
trustXHeaders = config is None or \
config.get('dont_trust_x_ip_headers') != 'true'
if (trustXHeaders):
if (xFF):
return xFF.split(',')[0].strip()
if (realIP):
return realIP.strip()
return environ.get('REMOTE_ADDR')
class Application(object):
# Wrap the wsgi handler in a class definition so that we can
# do some initialization and avoid regenerating stuff over
# and over
def logcallback(self):
self.serverlog.set_log_file(self.userlog.get_log_file())
def log(self, level, context, message):
self.serverlog.log_message(level, message, context['client_ip'],
context['user_id'], context['module'],
context['method'], context['call_id'])
def __init__(self):
submod = get_service_name() or 'kb_gblocks'
self.userlog = log.log(
submod, ip_address=True, authuser=True, module=True, method=True,
call_id=True, changecallback=self.logcallback,
config=get_config_file())
self.serverlog = log.log(
submod, ip_address=True, authuser=True, module=True, method=True,
call_id=True, logfile=self.userlog.get_log_file())
self.serverlog.set_log_level(6)
self.rpc_service = JSONRPCServiceCustom()
self.method_authentication = dict()
self.rpc_service.add(impl_kb_gblocks.run_Gblocks,
name='kb_gblocks.run_Gblocks',
types=[dict])
self.method_authentication['kb_gblocks.run_Gblocks'] = 'required' # noqa
self.rpc_service.add(impl_kb_gblocks.status,
name='kb_gblocks.status',
types=[dict])
authurl = config.get(AUTH) if config else None
self.auth_client = _KBaseAuth(authurl)
def __call__(self, environ, start_response):
# Context object, equivalent to the perl impl CallContext
ctx = MethodContext(self.userlog)
ctx['client_ip'] = getIPAddress(environ)
status = '500 Internal Server Error'
try:
body_size = int(environ.get('CONTENT_LENGTH', 0))
except (ValueError):
body_size = 0
if environ['REQUEST_METHOD'] == 'OPTIONS':
# we basically do nothing and just return headers
status = '200 OK'
rpc_result = ""
else:
request_body = environ['wsgi.input'].read(body_size)
try:
req = json.loads(request_body)
except ValueError as ve:
err = {'error': {'code': -32700,
'name': "Parse error",
'message': str(ve),
}
}
rpc_result = self.process_error(err, ctx, {'version': '1.1'})
else:
ctx['module'], ctx['method'] = req['method'].split('.')
ctx['call_id'] = req['id']
ctx['rpc_context'] = {
'call_stack': [{'time': self.now_in_utc(),
'method': req['method']}
]
}
prov_action = {'service': ctx['module'],
'method': ctx['method'],
'method_params': req['params']
}
ctx['provenance'] = [prov_action]
try:
token = environ.get('HTTP_AUTHORIZATION')
# parse out the method being requested and check if it
# has an authentication requirement
method_name = req['method']
auth_req = self.method_authentication.get(
method_name, 'none')
if auth_req != 'none':
if token is None and auth_req == 'required':
err = JSONServerError()
err.data = (
'Authentication required for ' +
'kb_gblocks ' +
'but no authentication header was passed')
raise err
elif token is None and auth_req == 'optional':
pass
else:
try:
user = self.auth_client.get_user(token)
ctx['user_id'] = user
ctx['authenticated'] = 1
ctx['token'] = token
except Exception as e:
if auth_req == 'required':
err = JSONServerError()
err.data = \
"Token validation failed: %s" % e
raise err
if (environ.get('HTTP_X_FORWARDED_FOR')):
self.log(log.INFO, ctx, 'X-Forwarded-For: ' +
environ.get('HTTP_X_FORWARDED_FOR'))
self.log(log.INFO, ctx, 'start method')
rpc_result = self.rpc_service.call(ctx, req)
self.log(log.INFO, ctx, 'end method')
status = '200 OK'
except JSONRPCError as jre:
err = {'error': {'code': jre.code,
'name': jre.message,
'message': jre.data
}
}
trace = jre.trace if hasattr(jre, 'trace') else None
rpc_result = self.process_error(err, ctx, req, trace)
except Exception:
err = {'error': {'code': 0,
'name': 'Unexpected Server Error',
'message': 'An unexpected server error ' +
'occurred',
}
}
rpc_result = self.process_error(err, ctx, req,
traceback.format_exc())
# print('Request method was %s\n' % environ['REQUEST_METHOD'])
# print('Environment dictionary is:\n%s\n' % pprint.pformat(environ))
# print('Request body was: %s' % request_body)
# print('Result from the method call is:\n%s\n' % \
# pprint.pformat(rpc_result))
if rpc_result:
response_body = rpc_result
else:
response_body = ''
response_headers = [
('Access-Control-Allow-Origin', '*'),
('Access-Control-Allow-Headers', environ.get(
'HTTP_ACCESS_CONTROL_REQUEST_HEADERS', 'authorization')),
('content-type', 'application/json'),
('content-length', str(len(response_body)))]
start_response(status, response_headers)
return [response_body.encode('utf8')]
def process_error(self, error, context, request, trace=None):
if trace:
self.log(log.ERR, context, trace.split('\n')[0:-1])
if 'id' in request:
error['id'] = request['id']
if 'version' in request:
error['version'] = request['version']
e = error['error'].get('error')
if not e:
error['error']['error'] = trace
elif 'jsonrpc' in request:
error['jsonrpc'] = request['jsonrpc']
error['error']['data'] = trace
else:
error['version'] = '1.0'
error['error']['error'] = trace
return json.dumps(error)
def now_in_utc(self):
# noqa Taken from http://stackoverflow.com/questions/3401428/how-to-get-an-isoformat-datetime-string-including-the-default-timezone @IgnorePep8
dtnow = datetime.datetime.now()
dtutcnow = datetime.datetime.utcnow()
delta = dtnow - dtutcnow
hh, mm = divmod((delta.days * 24 * 60 * 60 + delta.seconds + 30) // 60,
60)
return "%s%+02d:%02d" % (dtnow.isoformat(), hh, mm)
application = Application()
# This is the uwsgi application dictionary. On startup uwsgi will look
# for this dict and pull its configuration from here.
# This simply lists where to "mount" the application in the URL path
#
# This uwsgi module "magically" appears when running the app within
# uwsgi and is not available otherwise, so wrap an exception handler
# around it
#
# To run this server in uwsgi with 4 workers listening on port 9999 use:
# uwsgi -M -p 4 --http :9999 --wsgi-file _this_file_
# To run a using the single threaded python BaseHTTP service
# listening on port 9999 by default execute this file
#
try:
import uwsgi
# Before we do anything with the application, see if the
# configs specify patching all std routines to be asynch
# *ONLY* use this if you are going to wrap the service in
# a wsgi container that has enabled gevent, such as
# uwsgi with the --gevent option
if config is not None and config.get('gevent_monkeypatch_all', False):
print("Monkeypatching std libraries for async")
from gevent import monkey
monkey.patch_all()
uwsgi.applications = {'': application}
except ImportError:
# Not available outside of wsgi, ignore
pass
_proc = None
def start_server(host='localhost', port=0, newprocess=False):
'''
By default, will start the server on localhost on a system assigned port
in the main thread. Excecution of the main thread will stay in the server
main loop until interrupted. To run the server in a separate process, and
thus allow the stop_server method to be called, set newprocess = True. This
will also allow returning of the port number.'''
global _proc
if _proc:
raise RuntimeError('server is already running')
httpd = make_server(host, port, application)
port = httpd.server_address[1]
print("Listening on port %s" % port)
if newprocess:
_proc = Process(target=httpd.serve_forever)
_proc.daemon = True
_proc.start()
else:
httpd.serve_forever()
return port
def stop_server():
global _proc
_proc.terminate()
_proc = None
def process_async_cli(input_file_path, output_file_path, token):
exit_code = 0
with open(input_file_path) as data_file:
req = json.load(data_file)
if 'version' not in req:
req['version'] = '1.1'
if 'id' not in req:
req['id'] = str(_random.random())[2:]
ctx = MethodContext(application.userlog)
if token:
user = application.auth_client.get_user(token)
ctx['user_id'] = user
ctx['authenticated'] = 1
ctx['token'] = token
if 'context' in req:
ctx['rpc_context'] = req['context']
ctx['CLI'] = 1
ctx['module'], ctx['method'] = req['method'].split('.')
prov_action = {'service': ctx['module'], 'method': ctx['method'],
'method_params': req['params']}
ctx['provenance'] = [prov_action]
resp = None
try:
resp = application.rpc_service.call_py(ctx, req)
except JSONRPCError as jre:
trace = jre.trace if hasattr(jre, 'trace') else None
resp = {'id': req['id'],
'version': req['version'],
'error': {'code': jre.code,
'name': jre.message,
'message': jre.data,
'error': trace}
}
except Exception:
trace = traceback.format_exc()
resp = {'id': req['id'],
'version': req['version'],
'error': {'code': 0,
'name': 'Unexpected Server Error',
'message': 'An unexpected server error occurred',
'error': trace}
}
if 'error' in resp:
exit_code = 500
with open(output_file_path, "w") as f:
f.write(json.dumps(resp, cls=JSONObjectEncoder))
return exit_code
if __name__ == "__main__":
if (len(sys.argv) >= 3 and len(sys.argv) <= 4 and
os.path.isfile(sys.argv[1])):
token = None
if len(sys.argv) == 4:
if os.path.isfile(sys.argv[3]):
with open(sys.argv[3]) as token_file:
token = token_file.read()
else:
token = sys.argv[3]
sys.exit(process_async_cli(sys.argv[1], sys.argv[2], token))
try:
opts, args = getopt(sys.argv[1:], "", ["port=", "host="])
except GetoptError as err:
# print help information and exit:
print(str(err)) # will print something like "option -a not recognized"
sys.exit(2)
port = 9999
host = 'localhost'
for o, a in opts:
if o == '--port':
port = int(a)
elif o == '--host':
host = a
print("Host set to %s" % host)
else:
assert False, "unhandled option"
start_server(host=host, port=port)
# print("Listening on port %s" % port)
# httpd = make_server( host, port, application)
#
# httpd.serve_forever()
|
|
# subprocess - Subprocesses with accessible I/O streams
#
# For more information about this module, see PEP 324.
#
# Copyright (c) 2003-2004 by Peter Astrand <astrand@lysator.liu.se>
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of the
# author not be used in advertising or publicity pertaining to
# distribution of the software without specific, written prior
# permission.
#
# THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, INDIRECT OR
# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
r"""subprocess - Subprocesses with accessible I/O streams
This module allows you to spawn processes, connect to their
input/output/error pipes, and obtain their return codes. This module
intends to replace several other, older modules and functions, like:
os.system
os.spawn*
os.popen*
popen2.*
commands.*
Information about how the subprocess module can be used to replace these
modules and functions can be found below.
Using the subprocess module
===========================
This module defines one class called Popen:
class Popen(args, bufsize=0, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=False, shell=False,
cwd=None, env=None, universal_newlines=False,
startupinfo=None, creationflags=0):
Arguments are:
args should be a string, or a sequence of program arguments. The
program to execute is normally the first item in the args sequence or
string, but can be explicitly set by using the executable argument.
On UNIX, with shell=False (default): In this case, the Popen class
uses os.execvp() to execute the child program. args should normally
be a sequence. A string will be treated as a sequence with the string
as the only item (the program to execute).
On UNIX, with shell=True: If args is a string, it specifies the
command string to execute through the shell. If args is a sequence,
the first item specifies the command string, and any additional items
will be treated as additional shell arguments.
On Windows: the Popen class uses CreateProcess() to execute the child
program, which operates on strings. If args is a sequence, it will be
converted to a string using the list2cmdline method. Please note that
not all MS Windows applications interpret the command line the same
way: The list2cmdline is designed for applications using the same
rules as the MS C runtime.
bufsize, if given, has the same meaning as the corresponding argument
to the built-in open() function: 0 means unbuffered, 1 means line
buffered, any other positive value means use a buffer of
(approximately) that size. A negative bufsize means to use the system
default, which usually means fully buffered. The default value for
bufsize is 0 (unbuffered).
stdin, stdout and stderr specify the executed programs' standard
input, standard output and standard error file handles, respectively.
Valid values are PIPE, an existing file descriptor (a positive
integer), an existing file object, and None. PIPE indicates that a
new pipe to the child should be created. With None, no redirection
will occur; the child's file handles will be inherited from the
parent. Additionally, stderr can be STDOUT, which indicates that the
stderr data from the applications should be captured into the same
file handle as for stdout.
If preexec_fn is set to a callable object, this object will be called
in the child process just before the child is executed.
If close_fds is true, all file descriptors except 0, 1 and 2 will be
closed before the child process is executed.
if shell is true, the specified command will be executed through the
shell.
If cwd is not None, the current directory will be changed to cwd
before the child is executed.
If env is not None, it defines the environment variables for the new
process.
If universal_newlines is true, the file objects stdout and stderr are
opened as a text files, but lines may be terminated by any of '\n',
the Unix end-of-line convention, '\r', the Macintosh convention or
'\r\n', the Windows convention. All of these external representations
are seen as '\n' by the Python program. Note: This feature is only
available if Python is built with universal newline support (the
default). Also, the newlines attribute of the file objects stdout,
stdin and stderr are not updated by the communicate() method.
The startupinfo and creationflags, if given, will be passed to the
underlying CreateProcess() function. They can specify things such as
appearance of the main window and priority for the new process.
(Windows only)
This module also defines two shortcut functions:
call(*args, **kwargs):
Run command with arguments. Wait for command to complete, then
return the returncode attribute.
The arguments are the same as for the Popen constructor. Example:
retcode = call(["ls", "-l"])
Exceptions
----------
Exceptions raised in the child process, before the new program has
started to execute, will be re-raised in the parent. Additionally,
the exception object will have one extra attribute called
'child_traceback', which is a string containing traceback information
from the childs point of view.
The most common exception raised is OSError. This occurs, for
example, when trying to execute a non-existent file. Applications
should prepare for OSErrors.
A ValueError will be raised if Popen is called with invalid arguments.
Security
--------
Unlike some other popen functions, this implementation will never call
/bin/sh implicitly. This means that all characters, including shell
metacharacters, can safely be passed to child processes.
Popen objects
=============
Instances of the Popen class have the following methods:
poll()
Check if child process has terminated. Returns returncode
attribute.
wait()
Wait for child process to terminate. Returns returncode attribute.
communicate(input=None)
Interact with process: Send data to stdin. Read data from stdout
and stderr, until end-of-file is reached. Wait for process to
terminate. The optional stdin argument should be a string to be
sent to the child process, or None, if no data should be sent to
the child.
communicate() returns a tuple (stdout, stderr).
Note: The data read is buffered in memory, so do not use this
method if the data size is large or unlimited.
The following attributes are also available:
stdin
If the stdin argument is PIPE, this attribute is a file object
that provides input to the child process. Otherwise, it is None.
stdout
If the stdout argument is PIPE, this attribute is a file object
that provides output from the child process. Otherwise, it is
None.
stderr
If the stderr argument is PIPE, this attribute is file object that
provides error output from the child process. Otherwise, it is
None.
pid
The process ID of the child process.
returncode
The child return code. A None value indicates that the process
hasn't terminated yet. A negative value -N indicates that the
child was terminated by signal N (UNIX only).
Replacing older functions with the subprocess module
====================================================
In this section, "a ==> b" means that b can be used as a replacement
for a.
Note: All functions in this section fail (more or less) silently if
the executed program cannot be found; this module raises an OSError
exception.
In the following examples, we assume that the subprocess module is
imported with "from subprocess import *".
Replacing /bin/sh shell backquote
---------------------------------
output=`mycmd myarg`
==>
output = Popen(["mycmd", "myarg"], stdout=PIPE).communicate()[0]
Replacing shell pipe line
-------------------------
output=`dmesg | grep hda`
==>
p1 = Popen(["dmesg"], stdout=PIPE)
p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE)
output = p2.communicate()[0]
Replacing os.system()
---------------------
sts = os.system("mycmd" + " myarg")
==>
p = Popen("mycmd" + " myarg", shell=True)
sts = os.waitpid(p.pid, 0)
Note:
* Calling the program through the shell is usually not required.
* It's easier to look at the returncode attribute than the
exitstatus.
A more real-world example would look like this:
try:
retcode = call("mycmd" + " myarg", shell=True)
if retcode < 0:
print >>sys.stderr, "Child was terminated by signal", -retcode
else:
print >>sys.stderr, "Child returned", retcode
except OSError, e:
print >>sys.stderr, "Execution failed:", e
Replacing os.spawn*
-------------------
P_NOWAIT example:
pid = os.spawnlp(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg")
==>
pid = Popen(["/bin/mycmd", "myarg"]).pid
P_WAIT example:
retcode = os.spawnlp(os.P_WAIT, "/bin/mycmd", "mycmd", "myarg")
==>
retcode = call(["/bin/mycmd", "myarg"])
Vector example:
os.spawnvp(os.P_NOWAIT, path, args)
==>
Popen([path] + args[1:])
Environment example:
os.spawnlpe(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg", env)
==>
Popen(["/bin/mycmd", "myarg"], env={"PATH": "/usr/bin"})
Replacing os.popen*
-------------------
pipe = os.popen(cmd, mode='r', bufsize)
==>
pipe = Popen(cmd, shell=True, bufsize=bufsize, stdout=PIPE).stdout
pipe = os.popen(cmd, mode='w', bufsize)
==>
pipe = Popen(cmd, shell=True, bufsize=bufsize, stdin=PIPE).stdin
(child_stdin, child_stdout) = os.popen2(cmd, mode, bufsize)
==>
p = Popen(cmd, shell=True, bufsize=bufsize,
stdin=PIPE, stdout=PIPE, close_fds=True)
(child_stdin, child_stdout) = (p.stdin, p.stdout)
(child_stdin,
child_stdout,
child_stderr) = os.popen3(cmd, mode, bufsize)
==>
p = Popen(cmd, shell=True, bufsize=bufsize,
stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True)
(child_stdin,
child_stdout,
child_stderr) = (p.stdin, p.stdout, p.stderr)
(child_stdin, child_stdout_and_stderr) = os.popen4(cmd, mode, bufsize)
==>
p = Popen(cmd, shell=True, bufsize=bufsize,
stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True)
(child_stdin, child_stdout_and_stderr) = (p.stdin, p.stdout)
Replacing popen2.*
------------------
Note: If the cmd argument to popen2 functions is a string, the command
is executed through /bin/sh. If it is a list, the command is directly
executed.
(child_stdout, child_stdin) = popen2.popen2("somestring", bufsize, mode)
==>
p = Popen(["somestring"], shell=True, bufsize=bufsize
stdin=PIPE, stdout=PIPE, close_fds=True)
(child_stdout, child_stdin) = (p.stdout, p.stdin)
(child_stdout, child_stdin) = popen2.popen2(["mycmd", "myarg"], bufsize, mode)
==>
p = Popen(["mycmd", "myarg"], bufsize=bufsize,
stdin=PIPE, stdout=PIPE, close_fds=True)
(child_stdout, child_stdin) = (p.stdout, p.stdin)
The popen2.Popen3 and popen3.Popen4 basically works as subprocess.Popen,
except that:
* subprocess.Popen raises an exception if the execution fails
* the capturestderr argument is replaced with the stderr argument.
* stdin=PIPE and stdout=PIPE must be specified.
* popen2 closes all filedescriptors by default, but you have to specify
close_fds=True with subprocess.Popen.
"""
import sys
mswindows = (sys.platform == "win32")
import os
import types
import traceback
if mswindows:
import threading
import msvcrt
if 0: # <-- change this to use pywin32 instead of the _subprocess driver
import pywintypes
from win32api import GetStdHandle, STD_INPUT_HANDLE, \
STD_OUTPUT_HANDLE, STD_ERROR_HANDLE
from win32api import GetCurrentProcess, DuplicateHandle, \
GetModuleFileName, GetVersion
from win32con import DUPLICATE_SAME_ACCESS, SW_HIDE
from win32pipe import CreatePipe
from win32process import CreateProcess, STARTUPINFO, \
GetExitCodeProcess, STARTF_USESTDHANDLES, \
STARTF_USESHOWWINDOW, CREATE_NEW_CONSOLE
from win32event import WaitForSingleObject, INFINITE, WAIT_OBJECT_0
else:
from _subprocess import *
class STARTUPINFO:
dwFlags = 0
hStdInput = None
hStdOutput = None
hStdError = None
class pywintypes:
error = IOError
else:
import select
import errno
import fcntl
import pickle
__all__ = ["Popen", "PIPE", "STDOUT", "call"]
try:
MAXFD = os.sysconf("SC_OPEN_MAX")
except:
MAXFD = 256
# True/False does not exist on 2.2.0
try:
False
except NameError:
False = 0
True = 1
_active = []
def _cleanup():
for inst in _active[:]:
inst.poll()
PIPE = -1
STDOUT = -2
def call(*args, **kwargs):
"""Run command with arguments. Wait for command to complete, then
return the returncode attribute.
The arguments are the same as for the Popen constructor. Example:
retcode = call(["ls", "-l"])
"""
return Popen(*args, **kwargs).wait()
def list2cmdline(seq):
"""
Translate a sequence of arguments into a command line
string, using the same rules as the MS C runtime:
1) Arguments are delimited by white space, which is either a
space or a tab.
2) A string surrounded by double quotation marks is
interpreted as a single argument, regardless of white space
contained within. A quoted string can be embedded in an
argument.
3) A double quotation mark preceded by a backslash is
interpreted as a literal double quotation mark.
4) Backslashes are interpreted literally, unless they
immediately precede a double quotation mark.
5) If backslashes immediately precede a double quotation mark,
every pair of backslashes is interpreted as a literal
backslash. If the number of backslashes is odd, the last
backslash escapes the next double quotation mark as
described in rule 3.
"""
# See
# http://msdn.microsoft.com/library/en-us/vccelng/htm/progs_12.asp
result = []
needquote = False
for arg in seq:
bs_buf = []
# Add a space to separate this argument from the others
if result:
result.append(' ')
needquote = (" " in arg) or ("\t" in arg)
if needquote:
result.append('"')
for c in arg:
if c == '\\':
# Don't know if we need to double yet.
bs_buf.append(c)
elif c == '"':
# Double backspaces.
result.append('\\' * len(bs_buf)*2)
bs_buf = []
result.append('\\"')
else:
# Normal char
if bs_buf:
result.extend(bs_buf)
bs_buf = []
result.append(c)
# Add remaining backspaces, if any.
if bs_buf:
result.extend(bs_buf)
if needquote:
result.extend(bs_buf)
result.append('"')
return ''.join(result)
class Popen(object):
def __init__(self, args, bufsize=0, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=False, shell=False,
cwd=None, env=None, universal_newlines=False,
startupinfo=None, creationflags=0):
"""Create new Popen instance."""
_cleanup()
if not isinstance(bufsize, (int, long)):
raise TypeError("bufsize must be an integer")
if mswindows:
if preexec_fn is not None:
raise ValueError("preexec_fn is not supported on Windows "
"platforms")
if close_fds:
raise ValueError("close_fds is not supported on Windows "
"platforms")
else:
# POSIX
if startupinfo is not None:
raise ValueError("startupinfo is only supported on Windows "
"platforms")
if creationflags != 0:
raise ValueError("creationflags is only supported on Windows "
"platforms")
self.stdin = None
self.stdout = None
self.stderr = None
self.pid = None
self.returncode = None
self.universal_newlines = universal_newlines
# Input and output objects. The general principle is like
# this:
#
# Parent Child
# ------ -----
# p2cwrite ---stdin---> p2cread
# c2pread <--stdout--- c2pwrite
# errread <--stderr--- errwrite
#
# On POSIX, the child objects are file descriptors. On
# Windows, these are Windows file handles. The parent objects
# are file descriptors on both platforms. The parent objects
# are None when not using PIPEs. The child objects are None
# when not redirecting.
(p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite) = self._get_handles(stdin, stdout, stderr)
self._execute_child(args, executable, preexec_fn, close_fds,
cwd, env, universal_newlines,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
if p2cwrite:
self.stdin = os.fdopen(p2cwrite, 'wb', bufsize)
if c2pread:
if universal_newlines:
self.stdout = os.fdopen(c2pread, 'rU', bufsize)
else:
self.stdout = os.fdopen(c2pread, 'rb', bufsize)
if errread:
if universal_newlines:
self.stderr = os.fdopen(errread, 'rU', bufsize)
else:
self.stderr = os.fdopen(errread, 'rb', bufsize)
_active.append(self)
def _translate_newlines(self, data):
data = data.replace("\r\n", "\n")
data = data.replace("\r", "\n")
return data
if mswindows:
#
# Windows methods
#
def _get_handles(self, stdin, stdout, stderr):
"""Construct and return tupel with IO objects:
p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
"""
if stdin == None and stdout == None and stderr == None:
return (None, None, None, None, None, None)
p2cread, p2cwrite = None, None
c2pread, c2pwrite = None, None
errread, errwrite = None, None
if stdin == None:
p2cread = GetStdHandle(STD_INPUT_HANDLE)
elif stdin == PIPE:
p2cread, p2cwrite = CreatePipe(None, 0)
# Detach and turn into fd
p2cwrite = p2cwrite.Detach()
p2cwrite = msvcrt.open_osfhandle(p2cwrite, 0)
elif type(stdin) == types.IntType:
p2cread = msvcrt.get_osfhandle(stdin)
else:
# Assuming file-like object
p2cread = msvcrt.get_osfhandle(stdin.fileno())
p2cread = self._make_inheritable(p2cread)
if stdout == None:
c2pwrite = GetStdHandle(STD_OUTPUT_HANDLE)
elif stdout == PIPE:
c2pread, c2pwrite = CreatePipe(None, 0)
# Detach and turn into fd
c2pread = c2pread.Detach()
c2pread = msvcrt.open_osfhandle(c2pread, 0)
elif type(stdout) == types.IntType:
c2pwrite = msvcrt.get_osfhandle(stdout)
else:
# Assuming file-like object
c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
c2pwrite = self._make_inheritable(c2pwrite)
if stderr == None:
errwrite = GetStdHandle(STD_ERROR_HANDLE)
elif stderr == PIPE:
errread, errwrite = CreatePipe(None, 0)
# Detach and turn into fd
errread = errread.Detach()
errread = msvcrt.open_osfhandle(errread, 0)
elif stderr == STDOUT:
errwrite = c2pwrite
elif type(stderr) == types.IntType:
errwrite = msvcrt.get_osfhandle(stderr)
else:
# Assuming file-like object
errwrite = msvcrt.get_osfhandle(stderr.fileno())
errwrite = self._make_inheritable(errwrite)
return (p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
def _make_inheritable(self, handle):
"""Return a duplicate of handle, which is inheritable"""
return DuplicateHandle(GetCurrentProcess(), handle,
GetCurrentProcess(), 0, 1,
DUPLICATE_SAME_ACCESS)
def _find_w9xpopen(self):
"""Find and return absolut path to w9xpopen.exe"""
w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)),
"w9xpopen.exe")
if not os.path.exists(w9xpopen):
# Eeek - file-not-found - possibly an embedding
# situation - see if we can locate it in sys.exec_prefix
w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix),
"w9xpopen.exe")
if not os.path.exists(w9xpopen):
raise RuntimeError("Cannot locate w9xpopen.exe, which is "
"needed for Popen to work with your "
"shell or platform.")
return w9xpopen
def _execute_child(self, args, executable, preexec_fn, close_fds,
cwd, env, universal_newlines,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite):
"""Execute program (MS Windows version)"""
if not isinstance(args, types.StringTypes):
args = list2cmdline(args)
# Process startup details
default_startupinfo = STARTUPINFO()
if startupinfo == None:
startupinfo = default_startupinfo
if not None in (p2cread, c2pwrite, errwrite):
startupinfo.dwFlags |= STARTF_USESTDHANDLES
startupinfo.hStdInput = p2cread
startupinfo.hStdOutput = c2pwrite
startupinfo.hStdError = errwrite
if shell:
default_startupinfo.dwFlags |= STARTF_USESHOWWINDOW
default_startupinfo.wShowWindow = SW_HIDE
comspec = os.environ.get("COMSPEC", "cmd.exe")
args = comspec + " /c " + args
if (GetVersion() >= 0x80000000L or
os.path.basename(comspec).lower() == "command.com"):
# Win9x, or using command.com on NT. We need to
# use the w9xpopen intermediate program. For more
# information, see KB Q150956
# (http://web.archive.org/web/20011105084002/http://support.microsoft.com/support/kb/articles/Q150/9/56.asp)
w9xpopen = self._find_w9xpopen()
args = '"%s" %s' % (w9xpopen, args)
# Not passing CREATE_NEW_CONSOLE has been known to
# cause random failures on win9x. Specifically a
# dialog: "Your program accessed mem currently in
# use at xxx" and a hopeful warning about the
# stability of your system. Cost is Ctrl+C wont
# kill children.
creationflags |= CREATE_NEW_CONSOLE
# Start the process
try:
hp, ht, pid, tid = CreateProcess(executable, args,
# no special security
None, None,
# must inherit handles to pass std
# handles
1,
creationflags,
env,
cwd,
startupinfo)
except pywintypes.error, e:
# Translate pywintypes.error to WindowsError, which is
# a subclass of OSError. FIXME: We should really
# translate errno using _sys_errlist (or simliar), but
# how can this be done from Python?
raise WindowsError(*e.args)
# Retain the process handle, but close the thread handle
self._handle = hp
self.pid = pid
ht.Close()
# Child is launched. Close the parent's copy of those pipe
# handles that only the child should have open. You need
# to make sure that no handles to the write end of the
# output pipe are maintained in this process or else the
# pipe will not close when the child process exits and the
# ReadFile will hang.
if p2cread != None:
p2cread.Close()
if c2pwrite != None:
c2pwrite.Close()
if errwrite != None:
errwrite.Close()
def poll(self):
"""Check if child process has terminated. Returns returncode
attribute."""
if self.returncode == None:
if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0:
self.returncode = GetExitCodeProcess(self._handle)
_active.remove(self)
return self.returncode
def wait(self):
"""Wait for child process to terminate. Returns returncode
attribute."""
if self.returncode == None:
obj = WaitForSingleObject(self._handle, INFINITE)
self.returncode = GetExitCodeProcess(self._handle)
_active.remove(self)
return self.returncode
def _readerthread(self, fh, buffer):
buffer.append(fh.read())
def communicate(self, input=None):
"""Interact with process: Send data to stdin. Read data from
stdout and stderr, until end-of-file is reached. Wait for
process to terminate. The optional input argument should be a
string to be sent to the child process, or None, if no data
should be sent to the child.
communicate() returns a tuple (stdout, stderr)."""
stdout = None # Return
stderr = None # Return
if self.stdout:
stdout = []
stdout_thread = threading.Thread(target=self._readerthread,
args=(self.stdout, stdout))
stdout_thread.setDaemon(True)
stdout_thread.start()
if self.stderr:
stderr = []
stderr_thread = threading.Thread(target=self._readerthread,
args=(self.stderr, stderr))
stderr_thread.setDaemon(True)
stderr_thread.start()
if self.stdin:
if input != None:
self.stdin.write(input)
self.stdin.close()
if self.stdout:
stdout_thread.join()
if self.stderr:
stderr_thread.join()
# All data exchanged. Translate lists into strings.
if stdout != None:
stdout = stdout[0]
if stderr != None:
stderr = stderr[0]
# Translate newlines, if requested. We cannot let the file
# object do the translation: It is based on stdio, which is
# impossible to combine with select (unless forcing no
# buffering).
if self.universal_newlines and hasattr(open, 'newlines'):
if stdout:
stdout = self._translate_newlines(stdout)
if stderr:
stderr = self._translate_newlines(stderr)
self.wait()
return (stdout, stderr)
else:
#
# POSIX methods
#
def _get_handles(self, stdin, stdout, stderr):
"""Construct and return tupel with IO objects:
p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
"""
p2cread, p2cwrite = None, None
c2pread, c2pwrite = None, None
errread, errwrite = None, None
if stdin == None:
pass
elif stdin == PIPE:
p2cread, p2cwrite = os.pipe()
elif type(stdin) == types.IntType:
p2cread = stdin
else:
# Assuming file-like object
p2cread = stdin.fileno()
if stdout == None:
pass
elif stdout == PIPE:
c2pread, c2pwrite = os.pipe()
elif type(stdout) == types.IntType:
c2pwrite = stdout
else:
# Assuming file-like object
c2pwrite = stdout.fileno()
if stderr == None:
pass
elif stderr == PIPE:
errread, errwrite = os.pipe()
elif stderr == STDOUT:
errwrite = c2pwrite
elif type(stderr) == types.IntType:
errwrite = stderr
else:
# Assuming file-like object
errwrite = stderr.fileno()
return (p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
def _set_cloexec_flag(self, fd):
try:
cloexec_flag = fcntl.FD_CLOEXEC
except AttributeError:
cloexec_flag = 1
old = fcntl.fcntl(fd, fcntl.F_GETFD)
fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag)
def _close_fds(self, but):
for i in range(3, MAXFD):
if i == but:
continue
try:
os.close(i)
except:
pass
def _execute_child(self, args, executable, preexec_fn, close_fds,
cwd, env, universal_newlines,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite):
"""Execute program (POSIX version)"""
if isinstance(args, types.StringTypes):
args = [args]
if shell:
args = ["/bin/sh", "-c"] + args
if executable == None:
executable = args[0]
# For transferring possible exec failure from child to parent
# The first char specifies the exception type: 0 means
# OSError, 1 means some other error.
errpipe_read, errpipe_write = os.pipe()
self._set_cloexec_flag(errpipe_write)
self.pid = os.fork()
if self.pid == 0:
# Child
try:
# Close parent's pipe ends
if p2cwrite:
os.close(p2cwrite)
if c2pread:
os.close(c2pread)
if errread:
os.close(errread)
os.close(errpipe_read)
# Dup fds for child
if p2cread:
os.dup2(p2cread, 0)
if c2pwrite:
os.dup2(c2pwrite, 1)
if errwrite:
os.dup2(errwrite, 2)
# Close pipe fds. Make sure we doesn't close the same
# fd more than once.
if p2cread:
os.close(p2cread)
if c2pwrite and c2pwrite not in (p2cread,):
os.close(c2pwrite)
if errwrite and errwrite not in (p2cread, c2pwrite):
os.close(errwrite)
# Close all other fds, if asked for
if close_fds:
self._close_fds(but=errpipe_write)
if cwd != None:
os.chdir(cwd)
if preexec_fn:
apply(preexec_fn)
if env == None:
os.execvp(executable, args)
else:
os.execvpe(executable, args, env)
except:
exc_type, exc_value, tb = sys.exc_info()
# Save the traceback and attach it to the exception object
exc_lines = traceback.format_exception(exc_type,
exc_value,
tb)
exc_value.child_traceback = ''.join(exc_lines)
os.write(errpipe_write, pickle.dumps(exc_value))
# This exitcode won't be reported to applications, so it
# really doesn't matter what we return.
os._exit(255)
# Parent
os.close(errpipe_write)
if p2cread and p2cwrite:
os.close(p2cread)
if c2pwrite and c2pread:
os.close(c2pwrite)
if errwrite and errread:
os.close(errwrite)
# Wait for exec to fail or succeed; possibly raising exception
data = os.read(errpipe_read, 1048576) # Exceptions limited to 1 MB
os.close(errpipe_read)
if data != "":
os.waitpid(self.pid, 0)
child_exception = pickle.loads(data)
raise child_exception
def _handle_exitstatus(self, sts):
if os.WIFSIGNALED(sts):
self.returncode = -os.WTERMSIG(sts)
elif os.WIFEXITED(sts):
self.returncode = os.WEXITSTATUS(sts)
else:
# Should never happen
raise RuntimeError("Unknown child exit status!")
_active.remove(self)
def poll(self):
"""Check if child process has terminated. Returns returncode
attribute."""
if self.returncode == None:
try:
pid, sts = os.waitpid(self.pid, os.WNOHANG)
if pid == self.pid:
self._handle_exitstatus(sts)
except os.error:
pass
return self.returncode
def wait(self):
"""Wait for child process to terminate. Returns returncode
attribute."""
if self.returncode == None:
pid, sts = os.waitpid(self.pid, 0)
self._handle_exitstatus(sts)
return self.returncode
def communicate(self, input=None):
"""Interact with process: Send data to stdin. Read data from
stdout and stderr, until end-of-file is reached. Wait for
process to terminate. The optional input argument should be a
string to be sent to the child process, or None, if no data
should be sent to the child.
communicate() returns a tuple (stdout, stderr)."""
read_set = []
write_set = []
stdout = None # Return
stderr = None # Return
if self.stdin:
# Flush stdio buffer. This might block, if the user has
# been writing to .stdin in an uncontrolled fashion.
self.stdin.flush()
if input:
write_set.append(self.stdin)
else:
self.stdin.close()
if self.stdout:
read_set.append(self.stdout)
stdout = []
if self.stderr:
read_set.append(self.stderr)
stderr = []
while read_set or write_set:
rlist, wlist, xlist = select.select(read_set, write_set, [])
if self.stdin in wlist:
# When select has indicated that the file is writable,
# we can write up to PIPE_BUF bytes without risk
# blocking. POSIX defines PIPE_BUF >= 512
bytes_written = os.write(self.stdin.fileno(), input[:512])
input = input[bytes_written:]
if not input:
self.stdin.close()
write_set.remove(self.stdin)
if self.stdout in rlist:
data = os.read(self.stdout.fileno(), 1024)
if data == "":
self.stdout.close()
read_set.remove(self.stdout)
stdout.append(data)
if self.stderr in rlist:
data = os.read(self.stderr.fileno(), 1024)
if data == "":
self.stderr.close()
read_set.remove(self.stderr)
stderr.append(data)
# All data exchanged. Translate lists into strings.
if stdout != None:
stdout = ''.join(stdout)
if stderr != None:
stderr = ''.join(stderr)
# Translate newlines, if requested. We cannot let the file
# object do the translation: It is based on stdio, which is
# impossible to combine with select (unless forcing no
# buffering).
if self.universal_newlines and hasattr(open, 'newlines'):
if stdout:
stdout = self._translate_newlines(stdout)
if stderr:
stderr = self._translate_newlines(stderr)
self.wait()
return (stdout, stderr)
def _demo_posix():
#
# Example 1: Simple redirection: Get process list
#
plist = Popen(["ps"], stdout=PIPE).communicate()[0]
print "Process list:"
print plist
#
# Example 2: Change uid before executing child
#
if os.getuid() == 0:
p = Popen(["id"], preexec_fn=lambda: os.setuid(100))
p.wait()
#
# Example 3: Connecting several subprocesses
#
print "Looking for 'hda'..."
p1 = Popen(["dmesg"], stdout=PIPE)
p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE)
print repr(p2.communicate()[0])
#
# Example 4: Catch execution error
#
print
print "Trying a weird file..."
try:
print Popen(["/this/path/does/not/exist"]).communicate()
except OSError, e:
if e.errno == errno.ENOENT:
print "The file didn't exist. I thought so..."
print "Child traceback:"
print e.child_traceback
else:
print "Error", e.errno
else:
print >>sys.stderr, "Gosh. No error."
def _demo_windows():
#
# Example 1: Connecting several subprocesses
#
print "Looking for 'PROMPT' in set output..."
p1 = Popen("set", stdout=PIPE, shell=True)
p2 = Popen('find "PROMPT"', stdin=p1.stdout, stdout=PIPE)
print repr(p2.communicate()[0])
#
# Example 2: Simple execution of program
#
print "Executing calc..."
p = Popen("calc")
p.wait()
if __name__ == "__main__":
if mswindows:
_demo_windows()
else:
_demo_posix()
|
|
from fontbakery.callable import check
from fontbakery.checkrunner import FAIL, PASS, WARN, INFO
from fontbakery.message import Message
# used to inform get_module_profile whether and how to create a profile
from fontbakery.fonts_profile import profile_factory # NOQA pylint: disable=unused-import
profile_imports = [
('.shared_conditions', ('vmetrics', )),
('.googlefonts_conditions', ('RIBBI_ttFonts', ))
]
@check(
id = 'com.google.fonts/check/family/panose_proportion'
)
def com_google_fonts_check_family_panose_proportion(ttFonts):
"""Fonts have consistent PANOSE proportion?"""
passed = True
proportion = None
missing = False
for ttFont in ttFonts:
if "OS/2" not in ttFont:
missing = True
passed = False
continue
if proportion is None:
proportion = ttFont['OS/2'].panose.bProportion
if proportion != ttFont['OS/2'].panose.bProportion:
passed = False
if missing:
yield FAIL,\
Message("lacks-OS/2",
"One or more fonts lack the required OS/2 table.")
if not passed:
yield FAIL,\
Message("inconsistency",
"PANOSE proportion is not the same across this family."
" In order to fix this, please make sure that"
" the panose.bProportion value is the same"
" in the OS/2 table of all of this family font files.")
else:
yield PASS, "Fonts have consistent PANOSE proportion."
@check(
id = 'com.google.fonts/check/family/panose_familytype'
)
def com_google_fonts_check_family_panose_familytype(ttFonts):
"""Fonts have consistent PANOSE family type?"""
passed = True
familytype = None
missing = False
for ttfont in ttFonts:
if "OS/2" not in ttfont:
passed = False
missing = True
continue
if familytype is None:
familytype = ttfont['OS/2'].panose.bFamilyType
if familytype != ttfont['OS/2'].panose.bFamilyType:
passed = False
if missing:
yield FAIL,\
Message("lacks-OS/2",
"One or more fonts lack the required OS/2 table.")
if not passed:
yield FAIL,\
Message("inconsistency",
"PANOSE family type is not the same across this family."
" In order to fix this, please make sure that"
" the panose.bFamilyType value is the same"
" in the OS/2 table of all of this family font files.")
else:
yield PASS, "Fonts have consistent PANOSE family type."
@check(
id = 'com.google.fonts/check/xavgcharwidth',
conditions = ['is_ttf']
)
def com_google_fonts_check_xavgcharwidth(ttFont):
"""Check if OS/2 xAvgCharWidth is correct."""
if "OS/2" not in ttFont:
yield FAIL,\
Message("lacks-OS/2",
"Required OS/2 table is missing.")
return
current_value = ttFont['OS/2'].xAvgCharWidth
ACCEPTABLE_ERROR = 10 # Width deviation tolerance in font units
# Since version 3, the average is computed using _all_ glyphs in a font.
if ttFont['OS/2'].version >= 3:
calculation_rule = "the average of the widths of all glyphs in the font"
if not ttFont['hmtx'].metrics: # May contain just '.notdef', which is valid.
yield FAIL,\
Message("missing-glyphs",
"CRITICAL: Found no glyph width data in the hmtx table!")
return
width_sum = 0
count = 0
for glyph_id in ttFont['glyf'].glyphs: # At least .notdef must be present.
width = ttFont['hmtx'].metrics[glyph_id][0]
# The OpenType spec doesn't exclude negative widths, but only positive
# widths seems to be the assumption in the wild?
if width > 0:
count += 1
width_sum += width
expected_value = int(round(width_sum / count))
else: # Version 2 and below only consider lowercase latin glyphs and space.
calculation_rule = ("the weighted average of the widths of the latin"
" lowercase glyphs in the font")
weightFactors = {
'a': 64,
'b': 14,
'c': 27,
'd': 35,
'e': 100,
'f': 20,
'g': 14,
'h': 42,
'i': 63,
'j': 3,
'k': 6,
'l': 35,
'm': 20,
'n': 56,
'o': 56,
'p': 17,
'q': 4,
'r': 49,
's': 56,
't': 71,
'u': 31,
'v': 10,
'w': 18,
'x': 3,
'y': 18,
'z': 2,
'space': 166
}
glyph_order = ttFont.getGlyphOrder()
if not all(character in glyph_order for character in weightFactors):
yield FAIL,\
Message("missing-glyphs",
"Font is missing the required"
" latin lowercase letters and/or space.")
return
width_sum = 0
for glyph_id in weightFactors:
width = ttFont['hmtx'].metrics[glyph_id][0]
width_sum += (width * weightFactors[glyph_id])
expected_value = int(width_sum / 1000.0 + 0.5) # round to closest int
difference = abs(current_value - expected_value)
# We accept matches and off-by-ones due to rounding as correct.
if current_value == expected_value or difference == 1:
yield PASS, "OS/2 xAvgCharWidth value is correct."
elif difference < ACCEPTABLE_ERROR:
yield INFO, (f"OS/2 xAvgCharWidth is {current_value} but it should be"
f" {expected_value} which corresponds to {calculation_rule}."
f" These are similar values, which"
f" may be a symptom of the slightly different"
f" calculation of the xAvgCharWidth value in"
f" font editors. There's further discussion on"
f" this at https://github.com/googlefonts/fontbakery"
f"/issues/1622")
else:
yield WARN, (f"OS/2 xAvgCharWidth is {current_value} but it should be"
f" {expected_value} which corresponds to {calculation_rule}.")
@check(
id = 'com.adobe.fonts/check/fsselection_matches_macstyle',
rationale = """
The bold and italic bits in OS/2.fsSelection must match the bold and italic bits in head.macStyle per the OpenType spec.
"""
)
def com_adobe_fonts_check_fsselection_matches_macstyle(ttFont):
"""Check if OS/2 fsSelection matches head macStyle bold and italic bits."""
# Check both OS/2 and head are present.
missing_tables = False
required = ["OS/2", "head"]
for key in required:
if key not in ttFont:
missing_tables = True
yield FAIL,\
Message(f'lacks-{key}',
f"The '{key}' table is missing.")
if missing_tables:
return
from fontbakery.constants import FsSelection, MacStyle
failed = False
head_bold = (ttFont['head'].macStyle & MacStyle.BOLD) != 0
os2_bold = (ttFont['OS/2'].fsSelection & FsSelection.BOLD) != 0
if head_bold != os2_bold:
failed = True
yield FAIL, "The OS/2.fsSelection and head.macStyle " \
"bold settings do not match."
head_italic = (ttFont['head'].macStyle & MacStyle.ITALIC) != 0
os2_italic = (ttFont['OS/2'].fsSelection & FsSelection.ITALIC) != 0
if head_italic != os2_italic:
failed = True
yield FAIL, "The OS/2.fsSelection and head.macStyle " \
"italic settings do not match."
if not failed:
yield PASS, "The OS/2.fsSelection and head.macStyle " \
"bold and italic settings match."
@check(
id = 'com.adobe.fonts/check/family/bold_italic_unique_for_nameid1',
conditions=['RIBBI_ttFonts'],
rationale = """
Per the OpenType spec: name ID 1 'is used in combination with Font Subfamily name (name ID 2), and should be shared among at most four fonts that differ only in weight or style...
This four-way distinction should also be reflected in the OS/2.fsSelection field, using bits 0 and 5.
"""
)
def com_adobe_fonts_check_family_bold_italic_unique_for_nameid1(RIBBI_ttFonts):
"""Check that OS/2.fsSelection bold & italic settings are unique
for each NameID1"""
from collections import Counter
from fontbakery.utils import get_name_entry_strings
from fontbakery.constants import NameID, FsSelection
failed = False
family_name_and_bold_italic = list()
for ttFont in RIBBI_ttFonts:
names_list = get_name_entry_strings(ttFont, NameID.FONT_FAMILY_NAME)
# names_list will likely contain multiple entries, e.g. multiple copies
# of the same name in the same language for different platforms, but
# also different names in different languages, we use set() below
# to remove the duplicates and only store the unique family name(s)
# used for a given font
names_set = set(names_list)
bold = (ttFont['OS/2'].fsSelection & FsSelection.BOLD) != 0
italic = (ttFont['OS/2'].fsSelection & FsSelection.ITALIC) != 0
bold_italic = 'Bold=%r, Italic=%r' % (bold, italic)
for name in names_set:
family_name_and_bold_italic.append((name, bold_italic,))
counter = Counter(family_name_and_bold_italic)
for (family_name, bold_italic), count in counter.items():
if count > 1:
failed = True
yield FAIL, (f"Family '{family_name}' has {count} fonts"
f" (should be no more than 1) with the"
f" same OS/2.fsSelection bold & italic settings:"
f" {bold_italic}")
if not failed:
yield PASS, ("The OS/2.fsSelection bold & italic settings were unique "
"within each compatible family group.")
@check(
id = 'com.google.fonts/check/code_pages',
rationale = """
At least some programs (such as Word and Sublime Text) under Windows 7 do not recognize fonts unless code page bits are properly set on the ulCodePageRange1 (and/or ulCodePageRange2) fields of the OS/2 table.
More specifically, the fonts are selectable in the font menu, but whichever Windows API these applications use considers them unsuitable for any character set, so anything set in these fonts is rendered with a fallback font of Arial.
This check currently does not identify which code pages should be set. Auto-detecting coverage is not trivial since the OpenType specification leaves the interpretation of whether a given code page is "functional" or not open to the font developer to decide.
So here we simply detect as a FAIL when a given font has no code page declared at all.
"""
)
def com_google_fonts_check_code_pages(ttFont):
"""Check code page character ranges"""
if "OS/2" not in ttFont:
yield FAIL,\
Message("lacks-OS/2",
"The required OS/2 table is missing.")
return
if not hasattr(ttFont['OS/2'], "ulCodePageRange1") or \
not hasattr(ttFont['OS/2'], "ulCodePageRange2") or \
(ttFont['OS/2'].ulCodePageRange1 == 0 and \
ttFont['OS/2'].ulCodePageRange2 == 0):
yield FAIL, ("No code pages defined in the OS/2 table"
" ulCodePageRange1 and CodePageRange2 fields.")
else:
yield PASS, "At least one code page is defined."
|
|
from __future__ import unicode_literals
import logging
import sys
from raven.utils.testutils import TestCase
from raven.utils import six
from raven.base import Client
from raven.handlers.logging import SentryHandler
from raven.utils.stacks import iter_stack_frames
class TempStoreClient(Client):
def __init__(self, servers=None, **kwargs):
self.events = []
super(TempStoreClient, self).__init__(servers=servers, **kwargs)
def is_enabled(self):
return True
def send(self, **kwargs):
self.events.append(kwargs)
class LoggingIntegrationTest(TestCase):
def setUp(self):
self.client = TempStoreClient(include_paths=['tests', 'raven'])
self.handler = SentryHandler(self.client)
def make_record(self, msg, args=(), level=logging.INFO, extra=None, exc_info=None):
record = logging.LogRecord('root', level, __file__, 27, msg, args, exc_info, 'make_record')
if extra:
for key, value in six.iteritems(extra):
record.__dict__[key] = value
return record
def test_logger_basic(self):
record = self.make_record('This is a test error')
self.handler.emit(record)
self.assertEqual(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEqual(event['logger'], 'root')
self.assertEqual(event['level'], logging.INFO)
self.assertEqual(event['message'], 'This is a test error')
self.assertFalse('sentry.interfaces.Stacktrace' in event)
self.assertFalse('sentry.interfaces.Exception' in event)
self.assertTrue('sentry.interfaces.Message' in event)
msg = event['sentry.interfaces.Message']
self.assertEqual(msg['message'], 'This is a test error')
self.assertEqual(msg['params'], ())
def test_logger_extra_data(self):
record = self.make_record('This is a test error', extra={'data': {
'url': 'http://example.com',
}})
self.handler.emit(record)
self.assertEqual(len(self.client.events), 1)
event = self.client.events.pop(0)
if six.PY3:
expected = "'http://example.com'"
else:
expected = "u'http://example.com'"
self.assertEqual(event['extra']['url'], expected)
def test_logger_exc_info(self):
try:
raise ValueError('This is a test ValueError')
except ValueError:
record = self.make_record('This is a test info with an exception', exc_info=sys.exc_info())
else:
self.fail('Should have raised an exception')
self.handler.emit(record)
self.assertEqual(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEqual(event['message'], 'This is a test info with an exception')
self.assertTrue('sentry.interfaces.Stacktrace' in event)
self.assertTrue('sentry.interfaces.Exception' in event)
exc = event['sentry.interfaces.Exception']
self.assertEqual(exc['type'], 'ValueError')
self.assertEqual(exc['value'], 'This is a test ValueError')
self.assertTrue('sentry.interfaces.Message' in event)
msg = event['sentry.interfaces.Message']
self.assertEqual(msg['message'], 'This is a test info with an exception')
self.assertEqual(msg['params'], ())
def test_message_params(self):
record = self.make_record('This is a test of %s', args=('args',))
self.handler.emit(record)
self.assertEqual(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEqual(event['message'], 'This is a test of args')
msg = event['sentry.interfaces.Message']
self.assertEqual(msg['message'], 'This is a test of %s')
expected = ("'args'",) if six.PY3 else ("u'args'",)
self.assertEqual(msg['params'], expected)
def test_record_stack(self):
record = self.make_record('This is a test of stacks', extra={'stack': True})
self.handler.emit(record)
self.assertEqual(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertTrue('sentry.interfaces.Stacktrace' in event)
frames = event['sentry.interfaces.Stacktrace']['frames']
self.assertNotEquals(len(frames), 1)
frame = frames[0]
self.assertEqual(frame['module'], 'raven.handlers.logging')
self.assertFalse('sentry.interfaces.Exception' in event)
self.assertTrue('sentry.interfaces.Message' in event)
self.assertEqual(event['culprit'], 'root in make_record')
self.assertEqual(event['message'], 'This is a test of stacks')
def test_no_record_stack(self):
record = self.make_record('This is a test with no stacks', extra={'stack': False})
self.handler.emit(record)
self.assertEqual(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEqual(event['message'], 'This is a test with no stacks')
self.assertFalse('sentry.interfaces.Stacktrace' in event)
def test_explicit_stack(self):
record = self.make_record('This is a test of stacks', extra={'stack': iter_stack_frames()})
self.handler.emit(record)
self.assertEqual(len(self.client.events), 1)
event = self.client.events.pop(0)
assert 'sentry.interfaces.Stacktrace' in event
assert 'culprit' in event
assert event['culprit'] == 'root in make_record'
self.assertTrue('message' in event, event)
self.assertEqual(event['message'], 'This is a test of stacks')
self.assertFalse('sentry.interfaces.Exception' in event)
self.assertTrue('sentry.interfaces.Message' in event)
msg = event['sentry.interfaces.Message']
self.assertEqual(msg['message'], 'This is a test of stacks')
self.assertEqual(msg['params'], ())
def test_extra_culprit(self):
record = self.make_record('This is a test of stacks', extra={'culprit': 'foo in bar'})
self.handler.emit(record)
self.assertEqual(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEqual(event['culprit'], 'foo in bar')
def test_extra_data_as_string(self):
record = self.make_record('Message', extra={'data': 'foo'})
self.handler.emit(record)
self.assertEqual(len(self.client.events), 1)
event = self.client.events.pop(0)
expected = "'foo'" if six.PY3 else "u'foo'"
self.assertEqual(event['extra']['data'], expected)
def test_tags(self):
record = self.make_record('Message', extra={'tags': {'foo': 'bar'}})
self.handler.emit(record)
self.assertEqual(len(self.client.events), 1)
event = self.client.events.pop(0)
assert event['tags'] == {'foo': 'bar'}
def test_tags_on_error(self):
try:
raise ValueError('This is a test ValueError')
except ValueError:
record = self.make_record('Message', extra={'tags': {'foo': 'bar'}}, exc_info=sys.exc_info())
self.handler.emit(record)
self.assertEqual(len(self.client.events), 1)
event = self.client.events.pop(0)
assert event['tags'] == {'foo': 'bar'}
class LoggingHandlerTest(TestCase):
def test_client_arg(self):
client = TempStoreClient(include_paths=['tests'])
handler = SentryHandler(client)
self.assertEqual(handler.client, client)
def test_client_kwarg(self):
client = TempStoreClient(include_paths=['tests'])
handler = SentryHandler(client=client)
self.assertEqual(handler.client, client)
def test_args_as_servers_and_key(self):
handler = SentryHandler(['http://sentry.local/api/store/'], 'KEY')
self.assertTrue(isinstance(handler.client, Client))
def test_first_arg_as_dsn(self):
handler = SentryHandler('http://public:secret@example.com/1')
self.assertTrue(isinstance(handler.client, Client))
def test_custom_client_class(self):
handler = SentryHandler('http://public:secret@example.com/1', client_cls=TempStoreClient)
self.assertTrue(type(handler.client), TempStoreClient)
def test_invalid_first_arg_type(self):
self.assertRaises(ValueError, SentryHandler, object)
def test_logging_level_set(self):
handler = SentryHandler('http://public:secret@example.com/1', level="ERROR")
# XXX: some version of python 2.6 seem to pass the string on instead of coercing it
self.assertTrue(handler.level in (logging.ERROR, 'ERROR'))
def test_logging_level_not_set(self):
handler = SentryHandler('http://public:secret@example.com/1')
self.assertEqual(handler.level, logging.NOTSET)
|
|
import rethinkdb as r
from mockthink.test.common import assertEqUnordered
from mockthink.test.functional.common import MockTest
def common_join_data():
people_data = [
{'id': 'joe-id', 'name': 'Joe'},
{'id': 'tom-id', 'name': 'Tom'},
{'id': 'arnold-id', 'name': 'Arnold'}
]
job_data = [
{'id': 'lawyer-id', 'name': 'Lawyer'},
{'id': 'nurse-id', 'name': 'Nurse'},
{'id': 'semipro-wombat-id', 'name': 'Semi-Professional Wombat'}
]
employee_data = [
{'id': 'joe-emp-id', 'person': 'joe-id', 'job': 'lawyer-id'},
{'id': 'arnold-emp-id', 'person': 'arnold-id', 'job': 'nurse-id'}
]
data = {
'dbs': {
'jezebel': {
'tables': {
'people': people_data,
'jobs': job_data,
'employees': employee_data
}
}
}
}
return data
class TestEqJoin(MockTest):
@staticmethod
def get_data():
return common_join_data()
def test_eq_join_1(self, conn):
expected = [
{
'left': {
'id': 'joe-emp-id',
'person': 'joe-id',
'job': 'lawyer-id'
},
'right': {
'id': 'joe-id',
'name': 'Joe'
}
},
{
'left': {
'id': 'arnold-emp-id',
'person': 'arnold-id',
'job': 'nurse-id'
},
'right': {
'id': 'arnold-id',
'name': 'Arnold'
}
}
]
result = r.db('jezebel').table('employees').eq_join('person', r.db('jezebel').table('people')).run(conn)
assertEqUnordered(expected, list(result))
class TestInnerJoin(MockTest):
@staticmethod
def get_data():
return common_join_data()
def test_inner_join_1(self, conn):
expected = [
{
'left': {
'id': 'joe-emp-id',
'person': 'joe-id',
'job': 'lawyer-id'
},
'right': {
'id': 'joe-id',
'name': 'Joe'
}
},
{
'left': {
'id': 'arnold-emp-id',
'person': 'arnold-id',
'job': 'nurse-id'
},
'right': {
'id': 'arnold-id',
'name': 'Arnold'
}
}
]
result = r.db('jezebel').table('employees').inner_join(
r.db('jezebel').table('people'),
lambda employee, person: employee['person'] == person['id']
).run(conn)
assertEqUnordered(expected, list(result))
class TestOuterJoin(MockTest):
@staticmethod
def get_data():
people = [
{'id': 'sam-id', 'name': 'Sam'},
{'id': 'miguel-id', 'name': 'Miguel'},
{'id': 'mark-id', 'name': 'Mark'}
]
pets = [
{'id': 'pet1-id', 'name': 'Pet1', 'owner': 'miguel-id'},
{'id': 'pet2-id', 'name': 'Pet2', 'owner': 'mark-id'},
{'id': 'pet3-id', 'name': 'Pet3', 'owner': 'miguel-id'},
]
return {
'dbs': {
'awesomesauce': {
'tables': {
'pets': pets,
'people': people
}
}
}
}
def test_outer_join_1(self, conn):
expected = [
{
'left': {
'id': 'miguel-id',
'name': 'Miguel'
},
'right': {
'id': 'pet1-id',
'name': 'Pet1',
'owner': 'miguel-id'
}
},
{
'left': {
'id': 'miguel-id',
'name': 'Miguel'
},
'right': {
'id': 'pet3-id',
'name': 'Pet3',
'owner': 'miguel-id'
}
},
{
'left': {
'id': 'mark-id',
'name': 'Mark'
},
'right': {
'id': 'pet2-id',
'name': 'Pet2',
'owner': 'mark-id'
}
},
{
'left': {
'id': 'sam-id',
'name': 'Sam'
}
}
]
result = r.db('awesomesauce').table('people').outer_join(
r.db('awesomesauce').table('pets'),
lambda person, pet: pet['owner'] == person['id']
).run(conn)
assertEqUnordered(expected, list(result))
class TestZip(MockTest):
@staticmethod
def get_data():
left = [
{
'id': 'one',
'lname': 'One',
'rval': 'r-one'
},
{
'id': 'two',
'lname': 'Two',
'rval': 'r-two'
}
]
right = [
{
'id': 'r-one',
'rname': 'RightOne'
},
{
'id': 'r-two',
'rname': 'RightTwo'
}
]
return {
'dbs': {
'x': {
'tables': {
'ltab': left,
'rtab': right
}
}
}
}
def test_zip_1(self, conn):
expected = [
{
'id': 'r-one',
'lname': 'One',
'rname': 'RightOne',
'rval': 'r-one'
},
{
'id': 'r-two',
'lname': 'Two',
'rname': 'RightTwo',
'rval': 'r-two'
}
]
result = r.db('x').table('ltab').eq_join(
'rval', r.db('x').table('rtab')
).zip().run(conn)
assertEqUnordered(expected, list(result))
|
|
from io import BytesIO
import json
import logging
import uuid
import os
from celery import uuid as celery_uuid
from celery.result import AsyncResult
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth import login
from django.contrib import messages
from django.core.cache import cache
from django.core.urlresolvers import reverse_lazy
from django.core.files.storage import FileSystemStorage
from django.http import JsonResponse, HttpResponseServerError
from django.http import HttpResponse
from django.http import Http404
from django.shortcuts import render
from django.shortcuts import redirect
from django.shortcuts import get_object_or_404
from django.views.generic import FormView
from django.views.generic.edit import ProcessFormView
from django.views.generic import TemplateView
from django.utils import timezone
import msgpack
from paypal.standard.forms import PayPalPaymentsForm
from configurable_elements.models import get_legend_rows
from disease.files_utils import process_filename
from disease.files_utils import get_genome_data as _get_genome_data
from disease.files_utils import get_genome_dirpath
from disease.files_utils import get_genome_filepath
from .models import CustomizedTag
from .forms import UploadGenomeForm
from .models import AnalyzeDataOrder
from .models import AlleleColor
from .models import SNPMarker
from .models import SNPMarkerArticle
from .tasks import recompute_genome_file
log = logging.getLogger(__name__)
storage = FileSystemStorage()
def upload_progress(request):
"""
Return JSON object with information about the progress of an upload.
"""
progress_id = ''
if 'X-Progress-ID' in request.GET:
progress_id = request.GET['X-Progress-ID']
elif 'X-Progress-ID' in request.META:
progress_id = request.META['X-Progress-ID']
if progress_id:
cache_key = "%s_%s" % (request.META['REMOTE_ADDR'], progress_id)
data = cache.get(cache_key)
log.debug('PID: %s, Upload progress cache %s',os.getpid(), data)
if data is None:
data = {'length': 1, 'uploaded': 1}
return JsonResponse(data)
else:
return HttpResponseServerError('Server Error: You must provide X-Progress-ID header or query param.')
class GenomeFilePathMixin(object):
def process_filename(self, filename, filename_suffix=None):
return process_filename(filename, filename_suffix)
def get_dirpath(self, user=None):
if user is None:
user = self.request.user
return get_genome_dirpath(user)
def get_filepath(self, filename, user=None):
if user is None:
user = self.request.user
return get_genome_filepath(user, filename)
class JSONResponseMixin(object):
"""
A mixin that can be used to render a JSON response.
"""
def render_to_json_response(self, context, **response_kwargs):
"""
Returns a JSON response, transforming 'context' to make the payload.
"""
return JsonResponse(
self.get_data(context),
**response_kwargs
)
def get_data(self, context):
return context
class UploadGenome(GenomeFilePathMixin, FormView):
template_name = 'upload_genome.html'
form_class = UploadGenomeForm
# success_url = reverse_lazy('disease:upload_success')
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def get(self, request, *args, **kwargs):
"""
Handles GET requests and instantiates a blank version of the form.
"""
form = self.get_form()
upload_id = uuid.uuid4()
return self.render_to_response(self.get_context_data(form=form, upload_id=upload_id))
def save_processed_data(self, data):
buffer = BytesIO()
pickle.dump(data, buffer)
filename = self.process_filename(self.request.FILES['file'].name, filename_suffix='_processed')
storage.save(self.get_filepath(filename), buffer)
def get_success_url(self):
return reverse_lazy('disease:upload_success', kwargs={'pk': self.analyze_order_pk})
def form_valid(self, form):
# save file
# create AnalyzeFileOrder
# raw_filepath = self.get_filepath(raw_filename)
cd = form.cleaned_data
email = cd.get('email', None)
raw_file = cd.get('file', None)
raw_filename = getattr(raw_file, 'name', None)
user_model = get_user_model()
if not self.request.user.is_authenticated():
try:
user = user_model.objects.get(email=email)
except user_model.DoesNotExist: # user doesn't have an account, create one
user = user_model(email=email, username=email)
user.save()
# Dirty hack to allow user login by model
user.backend = 'django.contrib.auth.backends.ModelBackend'
login(self.request, user)
# Dirty hack to fix some parts requiring request.user...
self.request.user = user
else:
user = self.request.user
storage.save(self.get_filepath(raw_filename, user=user), raw_file)
task_id = celery_uuid()
analyze_order = AnalyzeDataOrder(uploaded_filename=raw_filename, user=user, task_uuid=task_id)
if user.is_staff and user.is_active:
log.info('User %s skipping payment due to staff membership', user)
analyze_order.paid = timezone.now()
analyze_order.save()
recompute_genome_file.apply_async(args=(self.get_filepath(raw_filename, user=user),),
task_id=task_id)
# table = process_genoome_data(data)
# file_exists = os.path.isfile(os.path.join(settings.MEDIA_ROOT, self.get_filepath(self.process_filename(raw_filename, filename_suffix='_processed'))))
# if self.request.user.is_authenticated() and not file_exists:
# self.save_processed_data(table)
# ctx = self.get_context_data(form=form, table=table, analyzed=True)
self.analyze_order_pk = analyze_order.pk
return super().form_valid(form)
def allele_description(request, pk):
"""
login_required
user should be able to view only his files
"""
allele = request.GET['allele']
marker = get_object_or_404(SNPMarker, pk=pk)
try:
article = get_object_or_404(SNPMarkerArticle, snp_marker=marker)
except Http404:
return redirect(marker.link)
colours = AlleleColor.objects.filter(snp_marker=marker)
your_allele = colours.get(allele=allele)
ctx = {
'marker': marker,
'article': article,
'colors': colours,
'your_allele': your_allele,
'base_template': 'base.html',
}
if 'ajax' in request.REQUEST:
ctx['base_template'] = 'allele_ajax.html'
return render(request, 'allele_description.html', ctx)
class UploadGenomeSuccessView(TemplateView):
template_name = 'upload_success.html'
def post(self, *args, **kwargs):
return self.get(*args, **kwargs)
def get(self, request, *args, **kwargs):
self.analyze_data_order = AnalyzeDataOrder.objects.get(pk=kwargs['pk'])
user = request.user
if self.analyze_data_order.is_paid or (user.is_staff and user.is_active):
return redirect('{}?file={}'.format(reverse_lazy('disease:browse_genome'),
self.analyze_data_order.uploaded_filename))
return super().get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
kwargs.update(dict(bitpay_checkout_url=settings.BITPAY_API,
analyze_order=self.analyze_data_order,
paypal_form=PayPalPaymentsForm(
initial=self.analyze_data_order.paypal_data(self.request))
))
return super().get_context_data(**kwargs)
class DisplayGenomeResult(JSONResponseMixin, GenomeFilePathMixin, TemplateView):
template_name = 'display_genome_result.html'
def get(self, request, *args, **kwargs):
self.user = self.request.user
if self.is_browsing_via_admin:
self.user = get_user_model().objects.get(pk=int(self.request.GET['pk']))
return super().get(request, *args, **kwargs)
def get_genome_data(self):
filename = self.process_filename(self.request.GET['file'], filename_suffix='_processed')
filepath = self.get_filepath(filename)
data = _get_genome_data(filepath)
data = list(reversed(sorted(data, key=lambda r: r.get('priority', -1))))
return data
@property
def is_admin(self): # TODO use permissions?
return bool(self.request.user.is_staff and self.request.user.is_active)
@property
def is_browsing_via_admin(self):
return bool(('pk' in self.request.GET) and self.is_admin)
def get_filepath(self, filename):
if self.is_browsing_via_admin:
return get_genome_filepath(self.user, filename)
return super().get_filepath(filename)
def get_analyze_data_order(self):
order_kwargs = dict(uploaded_filename=self.request.GET['file'], user=self.user)
try:
analyze_data_order = AnalyzeDataOrder.objects.get(**order_kwargs)
except AnalyzeDataOrder.DoesNotExist:
if not self.is_browsing_via_admin:
analyze_data_order = AnalyzeDataOrder(**order_kwargs)
analyze_data_order.save()
return analyze_data_order
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx['legend_rows'] = get_legend_rows()
ctx['allele_tags'] = CustomizedTag.objects.filter(show_on_data=True)
ctx['is_admin'] = is_admin = self.is_admin
analyze_data_order = self.get_analyze_data_order()
paid = analyze_data_order.is_paid
job = AsyncResult(analyze_data_order.task_uuid)
ctx['genome_data_url'] = '{}?file={}'.format(reverse_lazy('disease:browse_genome'), self.request.GET['file'])
if self.is_browsing_via_admin:
ctx['genome_data_url'] += '&pk={}'.format(self.user.pk)
ctx['is_job_ready'] = is_job_ready = job.ready()
ctx['is_job_successful'] = is_job_successful = job.successful()
ctx['is_job_failure'] = is_job_failure = job.failed()
if is_job_ready and is_job_successful:
ctx['paid'] = paid
if paid or is_admin:
ctx['table'] = self.get_genome_data()
ctx['bitpay_checkout_url'] = settings.BITPAY_API
ctx['analyze_order'] = analyze_data_order
ctx['pos_data'] = analyze_data_order.posData()
ctx['paypal_form'] = PayPalPaymentsForm(
initial=analyze_data_order.paypal_data(self.request))
elif is_job_ready and is_job_failure:
if not self.request.is_ajax():
messages.add_message(self.request, settings.DANGER, "An error occured while processing your genome data. Please contact us for details.")
else:
if not self.request.is_ajax():
messages.add_message(self.request, messages.INFO, 'Your genome data is being analyzed. Wait a few second and try this page again')
return ctx
def get_data(self, context):
analyze_data_order = self.get_analyze_data_order()
if analyze_data_order is not None:
paid = analyze_data_order.is_paid
result = []
job = AsyncResult(analyze_data_order.task_uuid)
if paid or self.is_admin:
result = self.get_genome_data()
return {
'data': result,
'is_ready': job.ready(),
}
def render_to_response(self, context, **response_kwargs):
if self.request.is_ajax():
return self.render_to_json_response(context)
else:
return super().render_to_response(context, **response_kwargs)
def landing_genome_data(request):
sample_data_filepath = 'disease/samplegenotype'
data = {'data': _get_genome_data(sample_data_filepath)}
data['data'] = list(reversed(sorted(data['data'], key=lambda r: r.get('priority', -1))))
return JsonResponse(data)
class PaymentStatusView(ProcessFormView, TemplateView):
http_method_names = ['post']
def post(self, request, *args, **kwargs):
post_data = self.request.POST
if post_data['status'] in {'paid', 'complete', 'confirmed'}:
posData = json.loads(post_data['posData'])
analyze_order_pk = posData['analyze_order_pk']
user_pk = posData['user_pk']
analyze_order = AnalyzeDataOrder.objects.get(pk=analyze_order_pk)
analyze_order.paid = timezone.now()
analyze_order.save()
return HttpResponse('OK')
|
|
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import _mysql
import itertools
import time
import sys
import collections
import os.path
import simplejson as json
def ConnectToMySQL(host=None, port=3307, user='root', database='', **kwargs):
if not host:
abs_prefix = os.path.join(os.path.dirname(__file__), "../data")
with open(abs_prefix + '/config.json','r') as f:
host = json.load(f)["dbHost"]
db = Connection(host=host, user=user, database=database, **kwargs)
return db
class Connection(object):
def __init__(self, host, user, database):
sys_vars = dict(
character_set_server = "utf8mb4",
collation_server = "utf8mb4_unicode_ci",)
sys_vars["sql_mode"] = "STRICT_ALL_TABLES"
args = dict(db=database, local_infile=1)
from MySQLdb.converters import conversions
args["user"] = user
self.socket = None
pair = host.split(":")
if len(pair) == 2:
args["host"] = pair[0]
args["port"] = int(pair[1])
else:
args["host"] = host
args["port"] = 3306
self.port = args["port"]
args["connect_timeout"] = 10
args["init_command"] = 'set names "utf8mb4" collate "utf8mb4_bin"' + ''.join([', @@%s = "%s"' % t for t in sys_vars.items()])
self._db = None
self._db_args = args
self.encoders = dict([ (k, v) for k, v in conversions.items()
if type(k) is not int ])
self._last_use_time = time.time()
self.reconnect()
self._db.set_character_set("utf8")
def __del__(self):
self.close()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def set_print_queries(self, print_queries):
self.print_queries = print_queries
def close(self):
"""Closes this database connection."""
if getattr(self, "_db", None) is not None:
self._db.close()
self._db = None
def reconnect(self):
"""Closes the existing database connection and re-opens it."""
conn = _mysql.connect(**self._db_args)
if conn is not None:
self.close()
self._db = conn
def query(self, query, *parameters):
self._execute(query, *parameters)
self._result = self._db.use_result()
if self._result is None:
return self._rowcount
fields = zip(*self._result.describe())[0]
rows = list(self._result.fetch_row(0))
ret = SelectResult(fields, rows)
return ret;
def affected_rows(self):
return self._rowcount
def get(self, query, *parameters):
"""Returns the first row returned for the given query."""
rows = self.query(query, *parameters)
if not rows:
return None
elif not isinstance(rows, list):
raise Exception("Query is not a select query")
elif len(rows) > 1:
raise Exception("Multiple rows returned for Database.get() query")
else:
return rows[0]
def execute(self, query, *parameters):
"""Executes the given query, returning the lastrowid from the query."""
return self.execute_lastrowid(query, *parameters)
def execute_lastrowid(self, query, *parameters):
"""Executes the given query, returning the lastrowid from the query."""
self._execute(query, *parameters)
self._result = self._db.store_result()
return self._db.insert_id()
def execute_rowcount(self, query, *parameters):
"""Executes the given query, returning the rowcount from the query."""
self._execute(query, *parameters)
self._result = self._db.store_result()
return self._result.num_rows()
def _execute(self, query, *parameters):
if parameters != None and parameters != ():
query = query % tuple([self._db.escape(p, self.encoders) for p in parameters])
if isinstance(query, unicode):
query = query.encode(self._db.character_set_name())
self._db.query(query)
self._rowcount = self._db.affected_rows()
from collections import OrderedDict
class Row(OrderedDict):
"""A dict that allows for object-like property access syntax."""
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
def __hash__(self):
return hash(tuple(sorted(self.iteritems())))
class SelectResult(list):
""" Goal: create a container to hold a sql result that doesn't lose any
information, but is compatable with our current scripts """
def __init__(self, fieldnames, values, format="dict"):
self.fieldnames = fieldnames
self.values = values
self.format = format
def __iter__(self):
if self.format == "dict":
return iter(self.old_format())
else:
return iter(self.values)
def __len__(self):
return len(self.values)
def width(self):
return len(self.fieldnames)
def __getitem__(self, rowId):
if isinstance(rowId, slice):
return SelectResult(self.fieldnames, self.values[rowId], self.format);
elif self.format == "dict":
return Row(zip(self.fieldnames, self.values[rowId]))
else:
return self.values[rowId]
def __eq__(self, other):
# don't use isinstance here because this class inherits list
if type(other)==list:
# remain compatible with old tests
return other == self.old_format()
return results_equal(self, other, True)
def __ne__(self, other):
return not self.__eq__(other)
def __str__(self):
return str(self.old_format())
def __repr__(self):
return str(self)
def sort(self):
self.values.sort()
def old_format(self):
return [Row(itertools.izip(self.fieldnames, row)) for row in self.values]
def filter_columns(self, keys):
ikeys = [key for key in keys if isinstance(key, int)]
skeys = [key for key in keys if isinstance(key, str)]
selection = [i for i in range(self.width()) if i in ikeys or self.fieldnames[i] in skeys]
return SelectResult([self.fieldnames[i] for i in selection],
[[value[i] for i in selection] for value in self.values], self.format)
def set_format(self, format):
self.format = format
def format_column(self, value):
if value is None:
return "NULL"
if type(value) == "date":
return value.isoformat()
if type(value) == "datetime":
return value.isoformat()
return str(value)
def format_assoc(self):
return [", ".join(["%s:%s" % (col[0], self.format_column(col[1])) for col in zip(self.fieldnames, row)]) for row in self.values]
def format_table(self, return_list=False):
if len(self) == 0:
ret = ["Empty Set"]
else:
values = [[self.format_column(column) for column in row] for row in self.values]
widths = [max(len(self.fieldnames[i]), max([len(row[i]) for row in values])) for i in xrange(len(self.fieldnames))]
separator = '+' + ''.join(['-' * (width+2) + '+' for width in widths])
format_string = "| " + " | ".join(["{%d:%d}" % (i, widths[i])
for i in range(len(widths))]) + " |"
footer = "%d row%s in set" % (len(values), "" if len(values) == 1 else "s")
ret = [separator]
ret += [format_string.format(*self.fieldnames)]
ret += [separator]
ret += [format_string.format(*row) for row in values]
ret += [separator]
ret += [footer]
if return_list:
return ret
return '\n'.join(ret)
|
|
from django.test import TestCase, Client
from django.contrib.auth.models import User
from django.core.urlresolvers import resolve, reverse
from account.views import ForgotPasswordView, ResetPasswordView
from mock import patch
from account.emails import SendGrid
from resources.models import Resource
from pairprogram.models import Session, Participant
class IndexViewTest(TestCase):
def setUp(self):
self.client = Client()
User.objects.create_user(
username='lade',
password='password',
)
self.initiator = User.objects.create_user(
username='andela',
password='awesome',
email='andela@andela.com'
)
self.pair_session = Session.objects.create(id=1,
initiator=self.initiator, session_name="SomeRandomSession")
def test_can_reach_index_page(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
def test_right_view_for_index_is_returned(self):
match = resolve('/')
self.assertEqual(match.url_name, 'index')
def test_can_login(self):
response = self.client.post('/login', {
'username': 'lade',
'password': 'password'
})
self.assertEqual(response.status_code, 302)
def test_can_register(self):
response = self.client.post('/register', {
'username': 'lade.o',
'password': 'password',
'password_conf': 'password',
'email': 'olufunmilade.oshodi@andela.com'
})
self.assertEqual(response.status_code, 302)
def test_can_register_and_create_session(self):
response = self.client.post('/register', {
'username': 'lade.o',
'password': 'password',
'password_conf': 'password',
'session_id': 1,
'email': 'olufunmilade.oshodi@andela.com'
})
session_program = Participant.objects.all()
self.assertEqual(len(session_program), 1)
self.assertEqual(response.status_code, 302)
class HomeViewTest(TestCase):
def setUp(self):
self.client = Client()
self.user = User.objects.create_user(
username='lade',
password='password'
)
self.user.set_password('password')
self.user.save()
self.login = self.client.login(
username='lade', password='password')
def test_can_reach_home_page(self):
self.assertEqual(self.login, True)
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
def test_right_view_for_home_is_returned(self):
match = resolve('/home')
self.assertEqual(match.url_name, 'home')
class SearchViewTest(TestCase):
def setUp(self):
self.client = Client()
self.user = User.objects.create_user(
username='lade',
password='password'
)
self.user.set_password('password')
self.user.save()
self.login = self.client.login(
username='lade', password='password')
def create_resources(self, text='some more words',
resource_file='resource_file'):
return Resource.objects.create(
text=text, author=self.user, resource_file=resource_file
)
def test_can_reach_search_page(self):
self.assertEqual(self.login, True)
response = self.client.get(reverse('search'))
self.assertEqual(response.status_code, 200)
def test_can_search_based_on_user_or_resource(self):
self.create_resources()
self.create_resources()
url = reverse('search_by', kwargs={'searchby': 'resources'})
url2 = reverse('search_by', kwargs={'searchby': 'users'})
response = self.client.get(url)
response2 = self.client.get(url2)
self.assertEqual(len(response.context['resources']), 2)
self.assertEqual(len(response.context['users']), 1)
self.assertEqual(response2.status_code, 200)
self.assertEqual(response.status_code, 200)
def test_return_no_user_or_response_when_not_resource_is_found(self):
self.create_resources()
self.create_resources()
url = reverse('search_by', kwargs={'searchby': 'resources'})
url2 = reverse('search_by', kwargs={'searchby': 'users'})
response = self.client.get(url + "?q=eaiofaowfjieaowef")
response2 = self.client.get(url2 + "?q=eaiofaowfjieaowef")
self.assertEqual(len(response.context['resources']), 0)
self.assertEqual(len(response.context['users']), 0)
self.assertEqual(response2.status_code, 200)
self.assertEqual(response.status_code, 200)
class ForgotResetTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_forgot_route_resolves_to_correct_view(self):
response = self.client.get('/recovery')
self.assertEqual(
response.resolver_match.func.__name__,
ForgotPasswordView.as_view().__name__)
def test_reset_route_resolves_to_correct_view(self):
response = self.client.get(
'/recovery/ajkzfYba9847DgJ7wbkwAaSbkTjUdawGG998qo3HG8qae83')
self.assertEqual(
response.resolver_match.func.__name__,
ResetPasswordView.as_view().__name__)
class PasswordResetTestCase(TestCase):
def setUp(self):
# create a test client:
self.client = Client()
# register a sample user:
self.user_account = User.objects.create_user(
'inioluwafageyinbo', 'inioluwafageyinbo@gmail.com', 'codango')
self.user_account.first_name = 'Inioluwa'
self.user_account.last_name = 'Fageyinbo'
self.user_account.save()
def test_get_returns_200(self):
response = self.client.get('/recovery')
self.assertEquals(response.status_code, 200)
def test_recovery_email_not_sent_for_unregistered_user(self):
response = self.client.post(
'/recovery', {"email": "fagemaki.iniruto@gmail.com"})
self.assertNotIn('email_status', response.context)
class ProfileViewTestCase(TestCase):
def setUp(self):
self.client = Client()
self.user = User.objects.create_user(
username='lade',
password='password'
)
self.user.set_password('password')
self.user.save()
self.login = self.client.login(
username='lade', password='password')
def test_can_reach_profile_page(self):
response = self.client.get('/user/lade')
self.assertEqual(response.status_code, 200)
def test_can_reach_profile_edit_page(self):
response = self.client.post('/user/lade/edit',
{'position': 'Software Developer',
'place_of_work': 'Andela',
'first_name': 'Lade',
'last_name': 'Oshodi',
'about': 'I love to Code'})
self.assertEqual(response.status_code, 302)
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
import uuid
from .. import models
class ApiVersionLocalOperations(object):
"""ApiVersionLocalOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: This should appear as a method parameter, use value '2.0'. Constant value: "2.0".
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2.0"
self.config = config
def get_method_local_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get method with api-version modeled in the method. pass in
api-version = '2.0' to succeed.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsAzureSpecials.models.ErrorException>`
"""
# Construct URL
url = '/azurespecials/apiVersion/method/string/none/query/local/2.0'
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_method_local_null(
self, api_version=None, custom_headers=None, raw=False, **operation_config):
"""Get method with api-version modeled in the method. pass in
api-version = null to succeed.
:param api_version: This should appear as a method parameter, use
value null, this should result in no serialized parameter
:type api_version: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsAzureSpecials.models.ErrorException>`
"""
# Construct URL
url = '/azurespecials/apiVersion/method/string/none/query/local/null'
# Construct parameters
query_parameters = {}
if api_version is not None:
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_path_local_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get method with api-version modeled in the method. pass in
api-version = '2.0' to succeed.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsAzureSpecials.models.ErrorException>`
"""
# Construct URL
url = '/azurespecials/apiVersion/path/string/none/query/local/2.0'
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_swagger_local_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get method with api-version modeled in the method. pass in
api-version = '2.0' to succeed.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsAzureSpecials.models.ErrorException>`
"""
# Construct URL
url = '/azurespecials/apiVersion/swagger/string/none/query/local/2.0'
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
|
|
"""
Create metrics for Dataverses.
This may be used for APIs, views with visualizations, etc.
"""
from collections import OrderedDict
from django.db import models
from dv_apps.utils.date_helper import get_month_name_abbreviation,\
get_month_name
from dv_apps.dataverses.models import Dataverse, DATAVERSE_TYPE_UNCATEGORIZED
from dv_apps.metrics.stats_util_base import StatsMakerBase, TruncYearMonth
from dv_apps.metrics.stats_result import StatsResult
from dv_apps.dvobjects.models import DVOBJECT_CREATEDATE_ATTR
from dv_apps.harvesting.models import HarvestingDataverseConfig
class StatsMakerDataverses(StatsMakerBase):
"""
Utility class to create stats for Dataverses
"""
def __init__(self, **kwargs):
"""
Start and end dates are optional.
start_date = string in YYYY-MM-DD format
end_date = string in YYYY-MM-DD format
"""
super(StatsMakerDataverses, self).__init__(**kwargs)
# Default to include harvested Dataverses
# Note! To really work, this logic needs to be applied to
# Datasets, Datafiles, etc.
self.include_harvested = kwargs.get('include_harvested', True)
# ----------------------------
# Dataverse counts
# ----------------------------
def get_dataverse_count_published(self):
"""
Return the count of published Dataverses
"""
return self.get_dataverse_count(**self.get_is_published_filter_param())
def get_dataverse_count_unpublished(self):
"""
Return the count of unpublished Dataverses
"""
return self.get_dataverse_count(**self.get_is_NOT_published_filter_param())
def get_dataverse_count(self, **extra_filters):
"""
Return the Dataverse count -- a single number
"""
if self.was_error_found():
return self.get_error_msg_return()
filter_params = self.get_date_filter_params()
if extra_filters:
for k, v in extra_filters.items():
filter_params[k] = v
if self.include_harvested:
q = Dataverse.objects.filter(**filter_params)
else:
q = Dataverse.objects.filter(**filter_params\
).exclude(self.get_harvested_dataverse_ids()\
)
sql_query = str(q.query)
data_dict = OrderedDict()
data_dict['count'] = q.count()
data_dict['count_string'] = "{:,}".format(data_dict['count'])
return StatsResult.build_success_result(data_dict, sql_query)
# ----------------------------
# Dataverse counts by month
# ----------------------------
def get_dataverse_counts_by_month_unpublished(self):
"""
Get # of --UNPUBLISHED-- datasets created each month
"""
return self.get_dataverse_counts_by_month(**self.get_is_NOT_published_filter_param())
def get_dataverse_counts_by_month_published(self):
"""
Get # of --UNPUBLISHED-- datasets created each month
"""
return self.get_dataverse_counts_by_month(**self.get_is_published_filter_param())
def get_dataverse_count_start_point(self, **extra_filters):
"""Get the startpoint when keeping a running total of file downloads"""
start_point_filters = self.get_running_total_base_date_filters()
if start_point_filters is None:
return 0
if extra_filters:
for k, v in extra_filters.items():
start_point_filters[k] = v
exclude_params = {}
if self.include_harvested:
exclude_params['dvobject__id__in'] = self.get_harvested_dataverse_ids()
return Dataverse.objects.select_related('dvobject').filter(**start_point_filters).exclude(**exclude_params).count()
def get_harvested_dataverse_ids(self):
"""Return the ids of harvested Dataverses"""
return HarvestingDataverseConfig.objects.values_list('dataverse__id'\
, flat=True).all()
def get_dataverse_counts_by_month(self, date_param=DVOBJECT_CREATEDATE_ATTR, **extra_filters):
"""
Return Dataverse counts by month
"""
# Was an error found earlier?
#
if self.was_error_found():
return self.get_error_msg_return()
# -----------------------------------
# (1) Build query filters
# -----------------------------------
# Exclude records where dates are null
# - e.g. a record may not have a publication date
exclude_params = { '%s__isnull' % date_param : True}
if self.include_harvested:
exclude_params['dvobject__id__in'] = self.get_harvested_dataverse_ids()
# Retrieve the date parameters
#
filter_params = self.get_date_filter_params()
# Add extra filters from kwargs
#
if extra_filters:
for k, v in extra_filters.items():
filter_params[k] = v
# -----------------------------------
# (2) Construct query
# -----------------------------------
# add exclude filters date filters
#
dv_counts_by_month = Dataverse.objects.select_related('dvobject'\
).exclude(**exclude_params\
).filter(**filter_params)
# annotate query adding "month_year" and "count"
#
dv_counts_by_month = dv_counts_by_month.annotate(\
yyyy_mm=TruncYearMonth('%s' % date_param)\
).values('yyyy_mm'\
).annotate(count=models.Count('dvobject_id')\
).values('yyyy_mm', 'count'\
).order_by('%syyyy_mm' % self.time_sort)
# -----------------------------------
# (2a) Get SQL query string
# -----------------------------------
sql_query = str(dv_counts_by_month.query)
# -----------------------------------
# (3) Format results
# -----------------------------------
# hold the running total count
running_total = self.get_dataverse_count_start_point(**extra_filters)
formatted_records = [] # move from a queryset to a []
for d in dv_counts_by_month:
rec_fmt = OrderedDict()
# change the datetime object to a string
rec_fmt['yyyy_mm'] = d['yyyy_mm'].strftime('%Y-%m')
rec_fmt['count'] = d['count']
# running total
running_total += d['count']
rec_fmt['running_total'] = running_total
# d['month_year'] = d['yyyy_mm'].strftime('%Y-%m')
# Add year and month numbers
rec_fmt['year_num'] = d['yyyy_mm'].year
rec_fmt['month_num'] = d['yyyy_mm'].month
# Add month name
month_name_found, month_name_short = get_month_name_abbreviation(d['yyyy_mm'].month)
if month_name_found:
assume_month_name_found, rec_fmt['month_name'] = get_month_name(d['yyyy_mm'].month)
rec_fmt['month_name_short'] = month_name_short
else:
# Log it!!!!!!
pass
# Add formatted record
formatted_records.append(rec_fmt)
data_dict = OrderedDict()
data_dict['record_count'] = len(formatted_records)
data_dict['total_count'] = running_total
data_dict['records'] = formatted_records
return StatsResult.build_success_result(data_dict, sql_query)
def get_dataverse_counts_by_type_published(self, exclude_uncategorized=True):
"""Return dataverse counts by 'dataversetype' for published dataverses"""
return self.get_dataverse_counts_by_type(exclude_uncategorized,\
**self.get_is_published_filter_param())
def get_dataverse_counts_by_type_unpublished(self, exclude_uncategorized=True):
"""Return dataverse counts by 'dataversetype' for unpublished dataverses"""
return self.get_dataverse_counts_by_type(exclude_uncategorized,\
**self.get_is_NOT_published_filter_param())
def get_dataverse_counts_by_type(self, exclude_uncategorized=True, **extra_filters):
"""
Return dataverse counts by 'dataversetype'
Optional if a dataverse is uncategorized:
- Specifying 'uncategorized_replacement_name' will
set "UNCATEGORIZED" to another string
Returns: { "dv_counts_by_type": [
{
"dataversetype": "RESEARCH_PROJECTS",
"type_count": 85,
"total_count": 356,
"percent_string": "23.9%"
},
{
"dataversetype": "TEACHING_COURSES",
"type_count": 10,
"total_count": 356,
"percent_string": "2.8%"
}
... etc
]
}
"""
if self.was_error_found():
return self.get_error_msg_return()
# Retrieve the date parameters
#
filter_params = self.get_date_filter_params(DVOBJECT_CREATEDATE_ATTR)
# Add extra filters
if extra_filters:
for k, v in extra_filters.items():
filter_params[k] = v
if exclude_uncategorized:
exclude_params = dict(dataversetype=DATAVERSE_TYPE_UNCATEGORIZED)
else:
exclude_params = {}
dataverse_counts_by_type = Dataverse.objects.select_related('dvobject'\
).filter(**filter_params\
).exclude(**exclude_params\
).values('dataversetype'\
).order_by('dataversetype'\
).annotate(type_count=models.Count('dataversetype')\
).order_by('-type_count')
# -----------------------------------
# Get SQL query string
# -----------------------------------
sql_query = str(dataverse_counts_by_type.query)
# Count all dataverses
#
total_count = sum([rec.get('type_count', 0) for rec in dataverse_counts_by_type])
total_count = total_count + 0.0
# Format the records, adding 'total_count' and 'percent_string' to each one
#
formatted_records = []
for rec in dataverse_counts_by_type:
fmt_dict = OrderedDict()
fmt_dict['dataversetype'] = rec['dataversetype']
fmt_dict['dataversetype_label'] = rec['dataversetype'].replace('_', ' ')
fmt_dict['type_count'] = rec.get('type_count', 0)
if total_count > 0:
float_percent = rec.get('type_count', 0) / total_count
fmt_dict['total_count'] = int(total_count)
fmt_dict['percent_string'] = '{0:.1%}'.format(float_percent)
else:
fmt_dict['total_count'] = 0
fmt_dict['percent_string'] = '0%'
formatted_records.append(fmt_dict)
data_dict = OrderedDict()
data_dict['record_count'] = len(formatted_records)
data_dict['records'] = formatted_records
return StatsResult.build_success_result(data_dict, sql_query)
def get_dataverse_affiliation_counts_published(self):
"""Return published Dataverse counts by affiliation"""
return self.get_dataverse_affiliation_counts(\
**self.get_is_published_filter_param())
def get_dataverse_affiliation_counts_unpublished(self):
"""Return unpublished Dataverse counts by affiliation"""
return self.get_dataverse_affiliation_counts(\
**self.get_is_NOT_published_filter_param())
def get_dataverse_affiliation_counts(self, **extra_filters):
"""
Return Dataverse counts by affiliation
Returns: dv_counts_by_affiliation": [
{
"affiliation": "University of Oxford",
"affiliation_count": 2,
"total_count": 191,
"percent_string": "1.0%"
},
{
"affiliation": "University of Illinois",
"affiliation_count": 1,
"total_count": 191,
"percent_string": "0.5%"
}
...
]
"""
if self.was_error_found():
return self.get_error_msg_return()
# Retrieve the date parameters
#
filter_params = self.get_date_filter_params(DVOBJECT_CREATEDATE_ATTR)
if extra_filters:
for k, v in extra_filters.items():
filter_params[k] = v
dataverse_counts_by_affil = Dataverse.objects.select_related('dvobject'\
).filter(**filter_params\
).values('affiliation'\
).order_by('affiliation'\
).annotate(affiliation_count=models.Count('affiliation')\
).order_by('-affiliation_count')
# -----------------------------------
# Get SQL query string
# -----------------------------------
sql_query = str(dataverse_counts_by_affil.query)
# Count all dataverses
#
total_count = sum([rec.get('affiliation_count', 0) for rec in dataverse_counts_by_affil])
total_count = total_count + 0.0
print 'dataverse_counts_by_affil', dataverse_counts_by_affil
# Format the records, adding 'total_count' and 'percent_string' to each one
#
formatted_records = []
for rec in dataverse_counts_by_affil:
if rec.get('affiliation_count', 0) > 0:
fmt_dict = OrderedDict()
affil_str = rec.get('affiliation', None)
if affil_str is not None:
affil_str = affil_str.encode('utf-8')
fmt_dict['affiliation'] = affil_str
fmt_dict['affiliation_count'] = rec.get('affiliation_count', 0)
if total_count > 0:
float_percent = rec.get('affiliation_count', 0) / total_count
fmt_dict['total_count'] = int(total_count)
fmt_dict['percent_string'] = '{0:.1%}'.format(float_percent)
else:
fmt_dict['total_count'] = 0
fmt_dict['percent_string'] = '0%'
formatted_records.append(fmt_dict)
data_dict = OrderedDict()
data_dict['record_count'] = len(formatted_records)
data_dict['records'] = formatted_records
return StatsResult.build_success_result(data_dict, sql_query)
'''
def get_number_of_datafile_types(self):
"""Return the number of distinct contenttypes found in Datafile objects"""
if self.was_error_found():
return self.get_error_msg_return()
# Retrieve the date parameters
#
filter_params = self.get_date_filter_params(DVOBJECT_CREATEDATE_ATTR)
datafile_counts_by_type = Datafile.objects.select_related('dvobject'\
).filter(**filter_params\
).values('contenttype'\
).distinct().count()
return True, dict(datafile_counts_by_type=datafile_counts_by_type)
'''
|
|
import os,io
import pandas as pd
from shlex import quote
from contextlib import redirect_stdout
from igf_data.utils.fileutils import get_temp_dir,remove_dir,check_file_path,copy_local_file
def run_plotCoverage(bam_files,output_raw_counts,plotcov_stdout,output_plot=None,
blacklist_file=None,thread=1,params_list=None,dry_run=False,
use_ephemeral_space=0):
'''
A function for running Deeptools plotCoverage
:param bam_files: A list of indexed bam files
:param output_raw_counts: Output raw count filepath
:param plotcov_stdout: Output path of plotCoverage stdout logs
:param output_plot: Output plots filepath, default None
:param blacklist_file: Input blacklist region filepath, default None
:param thread: Number of threads to use, default 1
:param params_list: Additional deeptools plotCoverage params as list, default None
:param dry_run: Return Deeptools command list without running it
:param use_ephemeral_space: A toggle for tmp dir settings, default 0
:returns: Deeptools command list
'''
try:
if len(bam_files)==0:
raise ValueError('No bamfiles found to generate coverage plot data')
plotcov_args = ['--bamfiles'] # prepare to add input bams to args
for path in bam_files:
check_file_path(path) # check input bams
plotcov_args.append(quote(path)) # adding input bams
temp_dir = \
get_temp_dir(
use_ephemeral_space=use_ephemeral_space)
temp_output_raw_counts = \
os.path.join(
temp_dir,
os.path.basename(output_raw_counts)) # path for temp raw counts
temp_plotcov_stdout = \
os.path.join(
temp_dir,
os.path.basename(plotcov_stdout)) # path for temp raw counts
plotcov_args.\
extend([
"--numberOfProcessors",quote(str(thread)),
"--outRawCounts",temp_output_raw_counts ])
if output_plot is not None:
temp_output_plot = \
os.path.join(
temp_dir,
os.path.basename(output_plot)) # path for temp raw counts
plotcov_args.extend(["--plotFile",temp_output_plot])
if blacklist_file is not None:
check_file_path(blacklist_file)
plotcov_args.extend(["--blackListFileName",quote(blacklist_file)])
if (params_list is not None or \
params_list !='') and \
isinstance(params_list,list) and \
len(params_list) > 0:
params_list = [
quote(param)
for param in params_list]
plotcov_args.extend(params_list) # add additional params to the list
if dry_run:
return plotcov_args
from deeptools.plotCoverage import main as plotCoverage_main
f = io.StringIO()
with redirect_stdout(f):
plotCoverage_main(plotcov_args)
stdout_logs = f.getvalue()
with open(temp_plotcov_stdout,'w') as fp:
fp.write(stdout_logs)
copy_local_file(
source_path=temp_plotcov_stdout,
destination_path=plotcov_stdout)
copy_local_file(
source_path=temp_output_raw_counts,
destination_path=output_raw_counts)
if output_plot is not None:
copy_local_file(
source_path=temp_output_plot,
destination_path=output_plot)
remove_dir(temp_dir) # clean up temp dir
plotcov_args.insert(0,'plotCoverage') # fix for deeptools commandline
return plotcov_args
except:
raise
def run_bamCoverage(bam_files,output_file,blacklist_file=None,thread=1,dry_run=False,
params_list=("--outFileFormat","bigwig"),use_ephemeral_space=0):
'''
A function for running Deeptools bamCoverage
:param bam_files: A list of bam files to run tool,expecting only one file
:param output_file: Ouput filepath for the coverage plot
:param blacklist_file: Input blacklist region filepath, default None
:param thread: Number of threads to use, default 1
:param dry_run: Return Deeptools command list without running it
:param params_list: Additional deeptools plotCoverage params as list, default ("--outFileFormat","bigwig")
:param use_ephemeral_space: A toggle for temp dir settings, default 0
:returns: Deeptools command as list
'''
try:
if len(bam_files)==0:
raise ValueError('No bamfiles found to generate coverage data')
if len(bam_files)>1:
raise ValueError(
'Expecting only one bam for bamCoverage tools, found : {0}'.\
format(len(bam_files)))
bamcov_args = ['--bam'] # prepare to add input bams to args
for path in bam_files:
check_file_path(path) # check input bams
bamcov_args.append(quote(path)) # adding input bams
temp_dir = \
get_temp_dir(use_ephemeral_space=use_ephemeral_space)
temp_output = \
os.path.join(
temp_dir,
os.path.basename(output_file))
bamcov_args.\
extend([
"--numberOfProcessors",quote(str(thread)),
"--outFileName",temp_output])
if blacklist_file is not None:
check_file_path(blacklist_file)
bamcov_args.extend(["--blackListFileName",quote(blacklist_file)])
if (params_list is not None or \
params_list != '') and \
(isinstance(params_list,list) or \
isinstance(params_list,tuple)) and \
len(params_list)>0:
params_list = list(params_list)
if len(params_list) > 0:
params_list = [
quote(param)
for param in params_list]
bamcov_args.extend(params_list) # add additional params to the list
if dry_run:
return bamcov_args
from deeptools.bamCoverage import main as bamCoverage_main
bamCoverage_main(bamcov_args) # generate bam coverage file
copy_local_file(
source_path=temp_output,
destination_path=output_file) # copy output file
remove_dir(temp_dir) # clean up temp dir
bamcov_args.insert(0,'bamCoverage') # fix for deeptools commandline
return bamcov_args
except:
raise
def run_plotFingerprint(bam_files,output_raw_counts,output_matrics,output_plot=None,dry_run=False,
blacklist_file=None,thread=1,params_list=None,use_ephemeral_space=0):
'''
A function for running Deeptools plotFingerprint
:param bam_files: A list of indexed bam files
:param output_raw_counts: Output raw count filepath
:param output_matrics: Output matrics file
:param output_plot: Output plots filepath, default None
:param blacklist_file: Input blacklist region filepath, default None
:param thread: Number of threads to use, default 1
:param dry_run: Return Deeptools command list without running it
:param params_list: Additional deeptools plotCoverage params as list, default None
:param use_ephemeral_space: A toggle for temp dir settings, default 0
:returns: Deeptools command list
'''
try:
if len(bam_files)==0:
raise ValueError('No bamfiles found to generate coverage plot data')
plotFgCov_args = ['--bamfiles'] # prepare to add input bams to args
for path in bam_files:
check_file_path(path) # check input bams
plotFgCov_args.append(quote(path)) # adding input bams
temp_dir = \
get_temp_dir(use_ephemeral_space=use_ephemeral_space)
temp_output_raw_counts = \
os.path.join(
temp_dir,
os.path.basename(output_raw_counts)) # path for temp raw counts
temp_output_matrics =\
os.path.join(
temp_dir,
os.path.basename(output_matrics)) # path for temp matrics counts
plotFgCov_args.\
extend([
"--numberOfProcessors",quote(str(thread)),
"--outRawCounts",temp_output_raw_counts,
"--outQualityMetrics",temp_output_matrics ])
if output_plot is not None:
temp_output_plot = \
os.path.join(
temp_dir,
os.path.basename(output_plot)) # path for temp raw counts
plotFgCov_args.extend(["--plotFile",temp_output_plot])
if blacklist_file is not None:
check_file_path(blacklist_file)
plotFgCov_args.extend(["--blackListFileName",quote(blacklist_file)])
if (params_list is not None or \
params_list != '') and \
isinstance(params_list,list) and \
len(params_list) > 0:
params_list = [
quote(param)
for param in params_list]
plotFgCov_args.extend(params_list) # add additional params to the list
if dry_run:
return plotFgCov_args
from deeptools.plotFingerprint import main as plotFingerprint_main
plotFingerprint_main(plotFgCov_args)
copy_local_file(
source_path=temp_output_raw_counts,
destination_path=output_raw_counts)
copy_local_file(
source_path=temp_output_matrics,
destination_path=output_matrics)
if output_plot is not None:
copy_local_file(
source_path=temp_output_plot,
destination_path=output_plot)
remove_dir(temp_dir)
plotFgCov_args.insert(0,'plotFingerprint') # fix for deeptools commandline
return plotFgCov_args
except:
raise
|
|
"""Float class.
Represents an unbounded float using a widget.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .widget import DOMWidget, register
from IPython.utils.traitlets import Unicode, CFloat, Bool, CaselessStrEnum, Tuple
from IPython.utils.warn import DeprecatedClass
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class _Float(DOMWidget):
value = CFloat(0.0, help="Float value", sync=True)
disabled = Bool(False, help="Enable or disable user changes", sync=True)
description = Unicode(
help="Description of the value this widget represents", sync=True)
class _BoundedFloat(_Float):
max = CFloat(100.0, help="Max value", sync=True)
min = CFloat(0.0, help="Min value", sync=True)
step = CFloat(
0.1, help="Minimum step that the value can take (ignored by some views)", sync=True)
def __init__(self, *pargs, **kwargs):
"""Constructor"""
DOMWidget.__init__(self, *pargs, **kwargs)
self._validate('value', None, self.value)
self.on_trait_change(self._validate, ['value', 'min', 'max'])
def _validate(self, name, old, new):
"""Validate value, max, min."""
if self.min > new or new > self.max:
self.value = min(max(new, self.min), self.max)
@register('IPython.FloatText')
class FloatText(_Float):
""" Displays a float value within a textbox. For a textbox in
which the value must be within a specific range, use BoundedFloatText.
Parameters
----------
value : float
value displayed
description : str
description displayed next to the textbox
color : str Unicode color code (eg. '#C13535'), optional
color of the value displayed
"""
_view_name = Unicode('FloatTextView', sync=True)
@register('IPython.BoundedFloatText')
class BoundedFloatText(_BoundedFloat):
""" Displays a float value within a textbox. Value must be within the range specified.
For a textbox in which the value doesn't need to be within a specific range, use FloatText.
Parameters
----------
value : float
value displayed
min : float
minimal value of the range of possible values displayed
max : float
maximal value of the range of possible values displayed
description : str
description displayed next to the textbox
color : str Unicode color code (eg. '#C13535'), optional
color of the value displayed
"""
_view_name = Unicode('FloatTextView', sync=True)
@register('IPython.FloatSlider')
class FloatSlider(_BoundedFloat):
""" Slider/trackbar of floating values with the specified range.
Parameters
----------
value : float
position of the slider
min : float
minimal position of the slider
max : float
maximal position of the slider
step : float
step of the trackbar
description : str
name of the slider
orientation : {'vertical', 'horizontal}, optional
default is horizontal
readout : {True, False}, optional
default is True, display the current value of the slider next to it
slider_color : str Unicode color code (eg. '#C13535'), optional
color of the slider
color : str Unicode color code (eg. '#C13535'), optional
color of the value displayed (if readout == True)
"""
_view_name = Unicode('FloatSliderView', sync=True)
orientation = CaselessStrEnum(values=['horizontal', 'vertical'],
default_value='horizontal',
help="Vertical or horizontal.", allow_none=False, sync=True)
_range = Bool(False, help="Display a range selector", sync=True)
readout = Bool(
True, help="Display the current value of the slider next to it.", sync=True)
slider_color = Unicode(sync=True)
@register('IPython.FloatProgress')
class FloatProgress(_BoundedFloat):
""" Displays a progress bar.
Parameters
-----------
value : float
position within the range of the progress bar
min : float
minimal position of the slider
max : float
maximal position of the slider
step : float
step of the progress bar
description : str
name of the progress bar
bar_style: {'success', 'info', 'warning', 'danger', ''}, optional
color of the progress bar, default is '' (blue)
colors are: 'success'-green, 'info'-light blue, 'warning'-orange, 'danger'-red
"""
_view_name = Unicode('ProgressView', sync=True)
bar_style = CaselessStrEnum(
values=['success', 'info', 'warning', 'danger', ''],
default_value='', allow_none=True, sync=True, help="""Use a
predefined styling for the progess bar.""")
class _FloatRange(_Float):
value = Tuple(CFloat, CFloat, default_value=(
0.0, 1.0), help="Tuple of (lower, upper) bounds", sync=True)
lower = CFloat(0.0, help="Lower bound", sync=False)
upper = CFloat(1.0, help="Upper bound", sync=False)
def __init__(self, *pargs, **kwargs):
value_given = 'value' in kwargs
lower_given = 'lower' in kwargs
upper_given = 'upper' in kwargs
if value_given and (lower_given or upper_given):
raise ValueError(
"Cannot specify both 'value' and 'lower'/'upper' for range widget")
if lower_given != upper_given:
raise ValueError(
"Must specify both 'lower' and 'upper' for range widget")
DOMWidget.__init__(self, *pargs, **kwargs)
# ensure the traits match, preferring whichever (if any) was given in
# kwargs
if value_given:
self.lower, self.upper = self.value
else:
self.value = (self.lower, self.upper)
self.on_trait_change(self._validate, ['value', 'upper', 'lower'])
def _validate(self, name, old, new):
if name == 'value':
self.lower, self.upper = min(new), max(new)
elif name == 'lower':
self.value = (new, self.value[1])
elif name == 'upper':
self.value = (self.value[0], new)
class _BoundedFloatRange(_FloatRange):
step = CFloat(
1.0, help="Minimum step that the value can take (ignored by some views)", sync=True)
max = CFloat(100.0, help="Max value", sync=True)
min = CFloat(0.0, help="Min value", sync=True)
def __init__(self, *pargs, **kwargs):
any_value_given = 'value' in kwargs or 'upper' in kwargs or 'lower' in kwargs
_FloatRange.__init__(self, *pargs, **kwargs)
# ensure a minimal amount of sanity
if self.min > self.max:
raise ValueError("min must be <= max")
if any_value_given:
# if a value was given, clamp it within (min, max)
self._validate("value", None, self.value)
else:
# otherwise, set it to 25-75% to avoid the handles overlapping
self.value = (0.75 * self.min + 0.25 * self.max,
0.25 * self.min + 0.75 * self.max)
# callback already set for 'value', 'lower', 'upper'
self.on_trait_change(self._validate, ['min', 'max'])
def _validate(self, name, old, new):
if name == "min":
if new > self.max:
raise ValueError("setting min > max")
self.min = new
elif name == "max":
if new < self.min:
raise ValueError("setting max < min")
self.max = new
low, high = self.value
if name == "value":
low, high = min(new), max(new)
elif name == "upper":
if new < self.lower:
raise ValueError("setting upper < lower")
high = new
elif name == "lower":
if new > self.upper:
raise ValueError("setting lower > upper")
low = new
low = max(self.min, min(low, self.max))
high = min(self.max, max(high, self.min))
# determine the order in which we should update the
# lower, upper traits to avoid a temporary inverted overlap
lower_first = high < self.lower
self.value = (low, high)
if lower_first:
self.lower = low
self.upper = high
else:
self.upper = high
self.lower = low
@register('IPython.FloatRangeSlider')
class FloatRangeSlider(_BoundedFloatRange):
""" Slider/trackbar for displaying a floating value range (within the specified range of values).
Parameters
----------
value : float tuple
range of the slider displayed
min : float
minimal position of the slider
max : float
maximal position of the slider
step : float
step of the trackbar
description : str
name of the slider
orientation : {'vertical', 'horizontal}, optional
default is horizontal
readout : {True, False}, optional
default is True, display the current value of the slider next to it
slider_color : str Unicode color code (eg. '#C13535'), optional
color of the slider
color : str Unicode color code (eg. '#C13535'), optional
color of the value displayed (if readout == True)
"""
_view_name = Unicode('FloatSliderView', sync=True)
orientation = CaselessStrEnum(values=['horizontal', 'vertical'],
default_value='horizontal', allow_none=False,
help="Vertical or horizontal.", sync=True)
_range = Bool(True, help="Display a range selector", sync=True)
readout = Bool(
True, help="Display the current value of the slider next to it.", sync=True)
slider_color = Unicode(sync=True)
# Remove in IPython 4.0
FloatTextWidget = DeprecatedClass(FloatText, 'FloatTextWidget')
BoundedFloatTextWidget = DeprecatedClass(
BoundedFloatText, 'BoundedFloatTextWidget')
FloatSliderWidget = DeprecatedClass(FloatSlider, 'FloatSliderWidget')
FloatProgressWidget = DeprecatedClass(FloatProgress, 'FloatProgressWidget')
|
|
#!/usr/bin/env python
"""Memory Map File Analyser for ARM mbed"""
import sys
import os
import re
import csv
import json
import argparse
from prettytable import PrettyTable
from tools.utils import argparse_filestring_type, \
argparse_lowercase_hyphen_type, argparse_uppercase_type
DEBUG = False
RE_ARMCC = re.compile(
r'^\s+0x(\w{8})\s+0x(\w{8})\s+(\w+)\s+(\w+)\s+(\d+)\s+[*]?.+\s+(.+)$')
RE_IAR = re.compile(
r'^\s+(.+)\s+(zero|const|ro code|inited|uninit)\s'
r'+0x(\w{8})\s+0x(\w+)\s+(.+)\s.+$')
class MemapParser(object):
"""An object that represents parsed results, parses the memory map files,
and writes out different file types of memory results
"""
print_sections = ('.text', '.data', '.bss')
misc_flash_sections = ('.interrupts', '.flash_config')
other_sections = ('.interrupts_ram', '.init', '.ARM.extab',
'.ARM.exidx', '.ARM.attributes', '.eh_frame',
'.init_array', '.fini_array', '.jcr', '.stab',
'.stabstr', '.ARM.exidx', '.ARM')
# sections to print info (generic for all toolchains)
sections = ('.text', '.data', '.bss', '.heap', '.stack')
def __init__(self):
""" General initialization
"""
# list of all modules and their sections
self.modules = dict()
# sections must be defined in this order to take irrelevant out
self.all_sections = self.sections + self.other_sections + \
self.misc_flash_sections + ('unknown', 'OUTPUT')
# list of all object files and mappting to module names
self.object_to_module = dict()
# Memory usage summary structure
self.mem_summary = dict()
def module_add(self, module_name, size, section):
""" Adds a module / section to the list
Positional arguments:
module_name - name of the module to add
size - the size of the module being added
section - the section the module contributes to
"""
if module_name in self.modules:
self.modules[module_name][section] += size
else:
temp_dic = dict()
for section_idx in self.all_sections:
temp_dic[section_idx] = 0
temp_dic[section] = size
self.modules[module_name] = temp_dic
def check_new_section_gcc(self, line):
""" Check whether a new section in a map file has been detected (only
applies to gcc)
Positional arguments:
line - the line to check for a new section
"""
for i in self.all_sections:
if line.startswith(i):
# should name of the section (assuming it's a known one)
return i
if line.startswith('.'):
return 'unknown' # all others are classified are unknown
else:
return False # everything else, means no change in section
@staticmethod
def path_object_to_module_name(txt):
""" Parse a path to object file to extract it's module and object data
Positional arguments:
txt - the path to parse the object and module name from
"""
txt = txt.replace('\\', '/')
rex_mbed_os_name = r'^.+mbed-os\/(.+)\/(.+\.o)$'
test_rex_mbed_os_name = re.match(rex_mbed_os_name, txt)
if test_rex_mbed_os_name:
object_name = test_rex_mbed_os_name.group(2)
data = test_rex_mbed_os_name.group(1).split('/')
ndata = len(data)
if ndata == 1:
module_name = data[0]
else:
module_name = data[0] + '/' + data[1]
return [module_name, object_name]
else:
return ['Misc', ""]
def parse_section_gcc(self, line):
""" Parse data from a section of gcc map file
examples:
0x00004308 0x7c ./.build/K64F/GCC_ARM/mbed-os/hal/targets/hal/TARGET_Freescale/TARGET_KPSDK_MCUS/spi_api.o
.text 0x00000608 0x198 ./.build/K64F/GCC_ARM/mbed-os/core/mbed-rtos/rtx/TARGET_CORTEX_M/TARGET_RTOS_M4_M7/TOOLCHAIN_GCC/HAL_CM4.o
Positional arguments:
line - the line to parse a section from
"""
rex_address_len_name = re.compile(
r'^\s+.*0x(\w{8,16})\s+0x(\w+)\s(.+)$')
test_address_len_name = re.match(rex_address_len_name, line)
if test_address_len_name:
if int(test_address_len_name.group(2), 16) == 0: # size == 0
return ["", 0] # no valid entry
else:
m_name, _ = self.path_object_to_module_name(
test_address_len_name.group(3))
m_size = int(test_address_len_name.group(2), 16)
return [m_name, m_size]
else: # special corner case for *fill* sections
# example
# *fill* 0x0000abe4 0x4
rex_address_len = r'^\s+\*fill\*\s+0x(\w{8,16})\s+0x(\w+).*$'
test_address_len = re.match(rex_address_len, line)
if test_address_len:
if int(test_address_len.group(2), 16) == 0: # size == 0
return ["", 0] # no valid entry
else:
m_name = 'Fill'
m_size = int(test_address_len.group(2), 16)
return [m_name, m_size]
else:
return ["", 0] # no valid entry
def parse_map_file_gcc(self, file_desc):
""" Main logic to decode gcc map files
Positional arguments:
file_desc - a stream object to parse as a gcc map file
"""
current_section = 'unknown'
with file_desc as infile:
# Search area to parse
for line in infile:
if line.startswith('Linker script and memory map'):
current_section = "unknown"
break
# Start decoding the map file
for line in infile:
change_section = self.check_new_section_gcc(line)
if change_section == "OUTPUT": # finish parsing file: exit
break
elif change_section != False:
current_section = change_section
[module_name, module_size] = self.parse_section_gcc(line)
if module_size == 0 or module_name == "":
pass
else:
self.module_add(module_name, module_size, current_section)
if DEBUG:
print "Line: %s" % line,
print "Module: %s\tSection: %s\tSize: %s" % \
(module_name, current_section, module_size)
raw_input("----------")
def parse_section_armcc(self, line):
""" Parse data from an armcc map file
Examples of armcc map file:
Base_Addr Size Type Attr Idx E Section Name Object
0x00000000 0x00000400 Data RO 11222 RESET startup_MK64F12.o
0x00000410 0x00000008 Code RO 49364 * !!!main c_w.l(__main.o)
Positional arguments:
line - the line to parse the section data from
"""
test_rex_armcc = re.match(RE_ARMCC, line)
if test_rex_armcc:
size = int(test_rex_armcc.group(2), 16)
if test_rex_armcc.group(4) == 'RO':
section = '.text'
else:
if test_rex_armcc.group(3) == 'Data':
section = '.data'
elif test_rex_armcc.group(3) == 'Zero':
section = '.bss'
else:
print "BUG armcc map parser"
raw_input()
# lookup object in dictionary and return module name
object_name = test_rex_armcc.group(6)
if object_name in self.object_to_module:
module_name = self.object_to_module[object_name]
else:
module_name = 'Misc'
return [module_name, size, section]
else:
return ["", 0, ""] # no valid entry
def parse_section_iar(self, line):
""" Parse data from an IAR map file
Examples of IAR map file:
Section Kind Address Size Object
.intvec ro code 0x00000000 0x198 startup_MK64F12.o [15]
.rodata const 0x00000198 0x0 zero_init3.o [133]
.iar.init_table const 0x00008384 0x2c - Linker created -
Initializer bytes const 0x00000198 0xb2 <for P3 s0>
.data inited 0x20000000 0xd4 driverAtmelRFInterface.o [70]
.bss zero 0x20000598 0x318 RTX_Conf_CM.o [4]
.iar.dynexit uninit 0x20001448 0x204 <Block tail>
HEAP uninit 0x20001650 0x10000 <Block tail>
Positional_arguments:
line - the line to parse section data from
"""
test_rex_iar = re.match(RE_IAR, line)
if test_rex_iar:
size = int(test_rex_iar.group(4), 16)
if test_rex_iar.group(2) == 'const' or \
test_rex_iar.group(2) == 'ro code':
section = '.text'
elif test_rex_iar.group(2) == 'zero' or \
test_rex_iar.group(2) == 'uninit':
if test_rex_iar.group(1)[0:4] == 'HEAP':
section = '.heap'
elif test_rex_iar.group(1)[0:6] == 'CSTACK':
section = '.stack'
else:
section = '.bss' # default section
elif test_rex_iar.group(2) == 'inited':
section = '.data'
else:
print "BUG IAR map parser"
raw_input()
# lookup object in dictionary and return module name
object_name = test_rex_iar.group(5)
if object_name in self.object_to_module:
module_name = self.object_to_module[object_name]
else:
module_name = 'Misc'
return [module_name, size, section]
else:
return ["", 0, ""] # no valid entry
def parse_map_file_armcc(self, file_desc):
""" Main logic to decode armc5 map files
Positional arguments:
file_desc - a file like object to parse as an armc5 map file
"""
with file_desc as infile:
# Search area to parse
for line in infile:
if line.startswith(' Base Addr Size'):
break
# Start decoding the map file
for line in infile:
[name, size, section] = self.parse_section_armcc(line)
if size == 0 or name == "" or section == "":
pass
else:
self.module_add(name, size, section)
def parse_map_file_iar(self, file_desc):
""" Main logic to decode IAR map files
Positional arguments:
file_desc - a file like object to parse as an IAR map file
"""
with file_desc as infile:
# Search area to parse
for line in infile:
if line.startswith(' Section '):
break
# Start decoding the map file
for line in infile:
[name, size, section] = self.parse_section_iar(line)
if size == 0 or name == "" or section == "":
pass
else:
self.module_add(name, size, section)
def search_objects(self, path, toolchain):
""" Check whether the specified map file matches with the toolchain.
Searches for object files and creates mapping: object --> module
Positional arguments:
path - the path to an object file
toolchain - the toolchain used to build the object file
"""
path = path.replace('\\', '/')
# check location of map file
rex = r'^(.+\/)' + re.escape(toolchain) + r'\/(.+\.map)$'
test_rex = re.match(rex, path)
if test_rex:
search_path = test_rex.group(1) + toolchain + '/mbed-os/'
else:
# It looks this is not an mbed project
# object-to-module mapping cannot be generated
print "Warning: specified toolchain doesn't match with"\
" path to the memory map file."
return
for root, _, obj_files in os.walk(search_path):
for obj_file in obj_files:
if obj_file.endswith(".o"):
module_name, object_name = self.path_object_to_module_name(
os.path.join(root, obj_file))
if object_name in self.object_to_module:
if DEBUG:
print "WARNING: multiple usages of object file: %s"\
% object_name
print " Current: %s" % \
self.object_to_module[object_name]
print " New: %s" % module_name
print " "
else:
self.object_to_module.update({object_name:module_name})
export_formats = ["json", "csv-ci", "table"]
def generate_output(self, export_format, file_output=None):
""" Generates summary of memory map data
Positional arguments:
export_format - the format to dump
Keyword arguments:
file_desc - descriptor (either stdout or file)
"""
try:
if file_output:
file_desc = open(file_output, 'wb')
else:
file_desc = sys.stdout
except IOError as error:
print "I/O error({0}): {1}".format(error.errno, error.strerror)
return False
subtotal = dict()
for k in self.sections:
subtotal[k] = 0
# Calculate misc flash sections
misc_flash_mem = 0
for i in self.modules:
for k in self.misc_flash_sections:
if self.modules[i][k]:
misc_flash_mem += self.modules[i][k]
json_obj = []
for i in sorted(self.modules):
row = []
row.append(i)
for k in self.sections:
subtotal[k] += self.modules[i][k]
for k in self.print_sections:
row.append(self.modules[i][k])
json_obj.append({
"module":i,
"size":{
k:self.modules[i][k] for k in self.print_sections
}
})
summary = {
'summary':{
'static_ram': (subtotal['.data'] + subtotal['.bss']),
'heap': (subtotal['.heap']),
'stack': (subtotal['.stack']),
'total_ram': (subtotal['.data'] + subtotal['.bss'] +
subtotal['.heap']+subtotal['.stack']),
'total_flash': (subtotal['.text'] + subtotal['.data'] +
misc_flash_mem),
}
}
self.mem_summary = json_obj + [summary]
to_call = {'json': self.generate_json,
'csv-ci': self.generate_csv,
'table': self.generate_table}[export_format]
to_call(subtotal, misc_flash_mem, file_desc)
if file_desc is not sys.stdout:
file_desc.close()
def generate_json(self, _, dummy, file_desc):
"""Generate a json file from a memory map
Positional arguments:
subtotal - total sizes for each module
misc_flash_mem - size of misc flash sections
file_desc - the file to write out the final report to
"""
file_desc.write(json.dumps(self.mem_summary, indent=4))
file_desc.write('\n')
def generate_csv(self, subtotal, misc_flash_mem, file_desc):
"""Generate a CSV file from a memoy map
Positional arguments:
subtotal - total sizes for each module
misc_flash_mem - size of misc flash sections
file_desc - the file to write out the final report to
"""
csv_writer = csv.writer(file_desc, delimiter=',',
quoting=csv.QUOTE_NONE)
csv_module_section = []
csv_sizes = []
for i in sorted(self.modules):
for k in self.print_sections:
csv_module_section += [i+k]
csv_sizes += [self.modules[i][k]]
csv_module_section += ['static_ram']
csv_sizes += [subtotal['.data']+subtotal['.bss']]
csv_module_section += ['heap']
if subtotal['.heap'] == 0:
csv_sizes += ['unknown']
else:
csv_sizes += [subtotal['.heap']]
csv_module_section += ['stack']
if subtotal['.stack'] == 0:
csv_sizes += ['unknown']
else:
csv_sizes += [subtotal['.stack']]
csv_module_section += ['total_ram']
csv_sizes += [subtotal['.data'] + subtotal['.bss'] +
subtotal['.heap'] + subtotal['.stack']]
csv_module_section += ['total_flash']
csv_sizes += [subtotal['.text']+subtotal['.data']+misc_flash_mem]
csv_writer.writerow(csv_module_section)
csv_writer.writerow(csv_sizes)
def generate_table(self, subtotal, misc_flash_mem, file_desc):
"""Generate a table from a memoy map
Positional arguments:
subtotal - total sizes for each module
misc_flash_mem - size of misc flash sections
file_desc - the file to write out the final report to
"""
# Create table
columns = ['Module']
columns.extend(self.print_sections)
table = PrettyTable(columns)
table.align["Module"] = "l"
for col in self.print_sections:
table.align[col] = 'r'
for i in list(self.print_sections):
table.align[i] = 'r'
subtotal_row = ['Subtotals']
for k in self.print_sections:
subtotal_row.append(subtotal[k])
table.add_row(subtotal_row)
file_desc.write(table.get_string())
file_desc.write('\n')
if subtotal['.heap'] == 0:
file_desc.write("Allocated Heap: unknown\n")
else:
file_desc.write("Allocated Heap: %s bytes\n" %
str(subtotal['.heap']))
if subtotal['.stack'] == 0:
file_desc.write("Allocated Stack: unknown\n")
else:
file_desc.write("Allocated Stack: %s bytes\n" %
str(subtotal['.stack']))
file_desc.write("Total Static RAM memory (data + bss): %s bytes\n" %
(str(subtotal['.data'] + subtotal['.bss'])))
file_desc.write(
"Total RAM memory (data + bss + heap + stack): %s bytes\n"
% (str(subtotal['.data'] + subtotal['.bss'] + subtotal['.heap'] +
subtotal['.stack'])))
file_desc.write("Total Flash memory (text + data + misc): %s bytes\n" %
(str(subtotal['.text'] + subtotal['.data'] +
misc_flash_mem)))
toolchains = ["ARM", "ARM_STD", "ARM_MICRO", "GCC_ARM", "IAR"]
def parse(self, mapfile, toolchain):
""" Parse and decode map file depending on the toolchain
Positional arguments:
mapfile - the file name of the memory map file
toolchain - the toolchain used to create the file
"""
result = True
try:
with open(mapfile, 'r') as file_input:
if toolchain == "ARM" or toolchain == "ARM_STD" or\
toolchain == "ARM_MICRO":
self.search_objects(os.path.abspath(mapfile), "ARM")
self.parse_map_file_armcc(file_input)
elif toolchain == "GCC_ARM":
self.parse_map_file_gcc(file_input)
elif toolchain == "IAR":
self.search_objects(os.path.abspath(mapfile), toolchain)
self.parse_map_file_iar(file_input)
else:
result = False
except IOError as error:
print "I/O error({0}): {1}".format(error.errno, error.strerror)
result = False
return result
def main():
"""Entry Point"""
version = '0.3.11'
# Parser handling
parser = argparse.ArgumentParser(
description="Memory Map File Analyser for ARM mbed\nversion %s" %
version)
parser.add_argument(
'file', type=argparse_filestring_type, help='memory map file')
parser.add_argument(
'-t', '--toolchain', dest='toolchain',
help='select a toolchain used to build the memory map file (%s)' %
", ".join(MemapParser.toolchains),
required=True,
type=argparse_uppercase_type(MemapParser.toolchains, "toolchain"))
parser.add_argument(
'-o', '--output', help='output file name', required=False)
parser.add_argument(
'-e', '--export', dest='export', required=False, default='table',
type=argparse_lowercase_hyphen_type(MemapParser.export_formats,
'export format'),
help="export format (examples: %s: default)" %
", ".join(MemapParser.export_formats))
parser.add_argument('-v', '--version', action='version', version=version)
# Parse/run command
if len(sys.argv) <= 1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
# Create memap object
memap = MemapParser()
# Parse and decode a map file
if args.file and args.toolchain:
if memap.parse(args.file, args.toolchain) is False:
sys.exit(0)
# Write output in file
if args.output != None:
memap.generate_output(args.export, args.output)
else: # Write output in screen
memap.generate_output(args.export)
sys.exit(0)
if __name__ == "__main__":
main()
|
|
import sublime
import unittest
from unittest.mock import patch, MagicMock
from Terminality.macro import Macro
def file_content(region):
contents = """
Hello, World!
This might be a long file
In which use to test something
Blah blah blah...
"""
return contents[region.begin():region.end()]
MockView = MagicMock(spec=sublime.View)
MockView.substr = MagicMock(side_effect=file_content)
MockView.file_name.return_value = "path/to/file.ext"
MockWindow = MagicMock(spec=sublime.Window)
MockWindow.active_view.return_value = MockView
MockWindow.folders.return_value = ["another/path/to/directory",
"path/to"]
class TestMacroParser(unittest.TestCase):
@patch('sublime.active_window', return_value=MockWindow)
def test_none(self, active_window):
macros = {
"test": None,
"expected": None,
"required": None,
"macros": None
}
self.assertEqual(
Macro.parse_macro(
string=macros["test"],
custom_macros=macros["macros"],
required=macros["required"]
),
macros["expected"]
)
@patch('sublime.active_window', return_value=MockWindow)
def test_empty(self, active_window):
macros = {
"test": "",
"expected": "",
"required": [],
"macros": {}
}
self.assertEqual(
Macro.parse_macro(
string=macros["test"],
custom_macros=macros["macros"],
required=macros["required"]
),
macros["expected"]
)
@patch('sublime.active_window', return_value=MockWindow)
def test_predefined_macro1(self, active_window):
macros = {
"test": "",
"expected": "",
"required": ["file", "file_name"],
"macros": {}
}
self.assertEqual(
Macro.parse_macro(
string=macros["test"],
custom_macros=macros["macros"],
required=macros["required"]
),
macros["expected"]
)
@patch('sublime.active_window', return_value=MockWindow)
def test_predefined_macro2(self, active_window):
macros = {
"test": "$file_name",
"expected": None,
"required": ["required", "file_name"],
"macros": {}
}
self.assertEqual(
Macro.parse_macro(
string=macros["test"],
custom_macros=macros["macros"],
required=macros["required"]
),
macros["expected"]
)
@patch('sublime.active_window', return_value=MockWindow)
def test_predefined_macro3(self, active_window):
macros = {
"test": "$require ; $file",
"expected": " ; path/to/file.ext",
"required": [],
"macros": {}
}
self.assertEqual(
Macro.parse_macro(
string=macros["test"],
custom_macros=macros["macros"],
required=macros["required"]
),
macros["expected"]
)
@patch('sublime.active_window', return_value=MockWindow)
def test_predefined_macro4(self, active_window):
macros = {
"test": "$parent$file$file_name",
"expected": "path/topath/to/file.extfile.ext",
"required": [],
"macros": {}
}
self.assertEqual(
Macro.parse_macro(
string=macros["test"],
custom_macros=macros["macros"],
required=macros["required"]
),
macros["expected"]
)
@patch('sublime.active_window', return_value=MockWindow)
def test_predefined_macro5(self, active_window):
macros = {
"test": "$working$$$working_project$$$project",
"expected": "path/to$path/to$another/path/to/directory",
"required": [],
"macros": {}
}
self.assertEqual(
Macro.parse_macro(
string=macros["test"],
custom_macros=macros["macros"],
required=macros["required"]
),
macros["expected"]
)
@patch('sublime.active_window', return_value=MockWindow)
def test_required_macro1(self, active_window):
macros = {
"test": "",
"expected": None,
"required": ["required"],
"macros": {}
}
self.assertEqual(
Macro.parse_macro(
string=macros["test"],
custom_macros=macros["macros"],
required=macros["required"]
),
macros["expected"]
)
@patch('sublime.active_window', return_value=MockWindow)
def test_required_macro2(self, active_window):
macros = {
"test": "",
"expected": None,
"required": ["required"],
"macros": {
"required": []
}
}
self.assertEqual(
Macro.parse_macro(
string=macros["test"],
custom_macros=macros["macros"],
required=macros["required"]
),
macros["expected"]
)
@patch('sublime.active_window', return_value=MockWindow)
def test_required_macro3(self, active_window):
macros = {
"test": "",
"expected": None,
"required": ["required"],
"macros": {
"required": [
1,
[1, 2],
None,
[None, None]
]
}
}
self.assertEqual(
Macro.parse_macro(
string=macros["test"],
custom_macros=macros["macros"],
required=macros["required"]
),
macros["expected"]
)
@patch('sublime.active_window', return_value=MockWindow)
def test_required_macro4(self, active_window):
macros = {
"test": "",
"expected": "",
"required": ["required"],
"macros": {
"required": [
1,
[1, 2],
None,
[None, None],
"macro_output"
]
}
}
self.assertEqual(
Macro.parse_macro(
string=macros["test"],
custom_macros=macros["macros"],
required=macros["required"]
),
macros["expected"]
)
@patch('sublime.active_window', return_value=MockWindow)
def test_required_macro5(self, active_window):
macros = {
"test": "$required",
"expected": "",
"required": [],
"macros": {
"required": [
1,
[1, 2],
None,
[None, None]
]
}
}
self.assertEqual(
Macro.parse_macro(
string=macros["test"],
custom_macros=macros["macros"],
required=macros["required"]
),
macros["expected"]
)
@patch('sublime.active_window', return_value=MockWindow)
def test_required_macro6(self, active_window):
macros = {
"test": "$selection",
"expected": "",
"required": [],
"macros": {}
}
self.assertEqual(
Macro.parse_macro(
string=macros["test"],
custom_macros=macros["macros"],
required=macros["required"]
),
macros["expected"]
)
@patch('sublime.active_window', return_value=MockWindow)
def test_required_macro7(self, active_window):
macros = {
"test": "$selection",
"expected": None,
"required": ["selection"],
"macros": {}
}
self.assertEqual(
Macro.parse_macro(
string=macros["test"],
custom_macros=macros["macros"],
required=macros["required"]
),
macros["expected"]
)
@patch('sublime.active_window', return_value=MockWindow)
def test_required_macro8(self, active_window):
MockView.sel.return_value = [sublime.Region(5, 10)]
macros = {
"test": "$selection",
"expected": "Hello",
"required": [],
"macros": {}
}
self.assertEqual(
Macro.parse_macro(
string=macros["test"],
custom_macros=macros["macros"],
required=macros["required"]
),
macros["expected"]
)
@patch('sublime.active_window', return_value=MockWindow)
def test_required_macro9(self, active_window):
MockView.sel.return_value = [sublime.Region(5, 10)]
macros = {
"test": "$selection",
"expected": "Hello",
"required": ["selection"],
"macros": {}
}
self.assertEqual(
Macro.parse_macro(
string=macros["test"],
custom_macros=macros["macros"],
required=macros["required"]
),
macros["expected"]
)
@patch('sublime.active_window', return_value=MockWindow)
def test_required_macro10(self, active_window):
macros = {
"test": "",
"expected": None,
"required": [""],
"macros": {}
}
self.assertEqual(
Macro.parse_macro(
string=macros["test"],
custom_macros=macros["macros"],
required=macros["required"]
),
macros["expected"]
)
@patch('sublime.active_window', return_value=MockWindow)
def test_recursion_macro(self, active_window):
macros = {
"test": "$required",
"expected": "",
"required": [],
"macros": {
"required": [
"$required"
]
}
}
self.assertEqual(
Macro.parse_macro(
string=macros["test"],
custom_macros=macros["macros"],
required=macros["required"]
),
macros["expected"]
)
@patch('sublime.active_window', return_value=MockWindow)
def test_recursion_macro2(self, active_window):
macros = {
"test": "$required",
"expected": "",
"required": [],
"macros": {
"required": [
"$required2"
],
"required2": [
"$required"
]
}
}
self.assertEqual(
Macro.parse_macro(
string=macros["test"],
custom_macros=macros["macros"],
required=macros["required"]
),
macros["expected"]
)
@patch('sublime.active_window', return_value=MockWindow)
def test_recursion_macro3(self, active_window):
macros = {
"test": "$required$required2",
"expected": "OutputOutput",
"required": [],
"macros": {
"required": [
"$required2",
"Output"
],
"required2": [
"$required"
]
}
}
self.assertEqual(
Macro.parse_macro(
string=macros["test"],
custom_macros=macros["macros"],
required=macros["required"]
),
macros["expected"]
)
@patch('sublime.active_window', return_value=MockWindow)
def test_substring_macro(self, active_window):
macros = {
"test": "$custom;$custom2;$custom3;$custom4",
"expected": ".ext;.ext;.ext;.ext",
"required": [],
"macros": {
"custom": [
"$file",
["-4:"]
],
"custom2": [
"$file_name",
["-4:"]
],
"custom3": [
["$file", "-4:"]
],
"custom4": [
["$file_name", "-4:"]
]
}
}
self.assertEqual(
Macro.parse_macro(
string=macros["test"],
custom_macros=macros["macros"],
required=macros["required"]
),
macros["expected"]
)
@patch('sublime.active_window', return_value=MockWindow)
def test_regex_macro(self, active_window):
macros = {
"test": "$custom;$custom2;$custom3;$custom4",
"expected": ".ext;.ext;.ext;.ext",
"required": [],
"macros": {
"custom": [
"$file",
["\\.\\w+$"]
],
"custom2": [
"$file_name",
["\\.\\w+$"]
],
"custom3": [
["$file", "\\.\\w+$"]
],
"custom4": [
["$file_name", "\\.\\w+$"]
]
}
}
self.assertEqual(
Macro.parse_macro(
string=macros["test"],
custom_macros=macros["macros"],
required=macros["required"]
),
macros["expected"]
)
|
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import oilpan_gc_times
from telemetry.core import util
from telemetry.results import page_test_results
from telemetry.timeline import model
from telemetry.timeline import slice as slice_data
from telemetry.unittest_util import options_for_unittests
from telemetry.unittest_util import page_test_test_case
from telemetry.page import page as page_module
util.AddDirToPythonPath(util.GetTelemetryDir(), 'third_party', 'mock')
import mock # pylint: disable=import-error
class TestOilpanGCTimesPage(page_module.Page):
def __init__(self, page_set):
super(TestOilpanGCTimesPage, self).__init__(
'file://blank.html', page_set, page_set.base_dir)
def RunPageInteractions(self, action_runner):
with action_runner.CreateGestureInteraction('ScrollAction'):
action_runner.ScrollPage()
class OilpanGCTimesTestData(object):
def __init__(self, thread_name):
self._model = model.TimelineModel()
self._renderer_process = self._model.GetOrCreateProcess(1)
self._renderer_thread = self._renderer_process.GetOrCreateThread(2)
self._renderer_thread.name = thread_name
self._results = page_test_results.PageTestResults()
@property
def results(self):
return self._results
def AddSlice(self, name, timestamp, duration, args):
new_slice = slice_data.Slice(
None,
'category',
name,
timestamp,
duration,
timestamp,
duration,
args)
self._renderer_thread.all_slices.append(new_slice)
return new_slice
def AddAsyncSlice(self, name, timestamp, duration, args):
new_slice = slice_data.Slice(
None,
'category',
name,
timestamp,
duration,
timestamp,
duration,
args)
self._renderer_thread.async_slices.append(new_slice)
return new_slice
def ClearResults(self):
self._results = page_test_results.PageTestResults()
class OilpanGCTimesTest(page_test_test_case.PageTestTestCase):
"""Smoke test for Oilpan GC pause time measurements.
Runs OilpanGCTimes measurement on some simple pages and verifies
that all metrics were added to the results. The test is purely functional,
i.e. it only checks if the metrics are present and non-zero.
"""
_KEY_MARK = 'Heap::collectGarbage'
_KEY_LAZY_SWEEP = 'ThreadHeap::lazySweepPages'
_KEY_COMPLETE_SWEEP = 'ThreadState::completeSweep'
_KEY_COALESCE = 'ThreadHeap::coalesce'
_KEY_MEASURE = 'BlinkGCTimeMeasurement'
def setUp(self):
self._options = options_for_unittests.GetCopy()
# Disable for accessing private API of _OilpanGCTimesBase.
# pylint: disable=protected-access
def testForParsingOldFormat(self):
def getMetric(results, name):
metrics = results.FindAllPageSpecificValuesNamed(name)
self.assertEquals(1, len(metrics))
return metrics[0].GetBuildbotValue()
data = self._GenerateDataForParsingOldFormat()
measurement = oilpan_gc_times._OilpanGCTimesBase()
tab = mock.MagicMock()
with mock.patch(
'measurements.oilpan_gc_times.TimelineModel') as MockTimelineModel:
MockTimelineModel.return_value = data._model
measurement.ValidateAndMeasurePage(None, tab, data.results)
results = data.results
self.assertEquals(7, len(getMetric(results, 'oilpan_coalesce')))
self.assertEquals(3, len(getMetric(results, 'oilpan_precise_mark')))
self.assertEquals(3, len(getMetric(results, 'oilpan_precise_lazy_sweep')))
self.assertEquals(3, len(getMetric(results,
'oilpan_precise_complete_sweep')))
self.assertEquals(1, len(getMetric(results, 'oilpan_conservative_mark')))
self.assertEquals(1, len(getMetric(results,
'oilpan_conservative_lazy_sweep')))
self.assertEquals(1, len(getMetric(results,
'oilpan_conservative_complete_sweep')))
self.assertEquals(2, len(getMetric(results, 'oilpan_forced_mark')))
self.assertEquals(2, len(getMetric(results, 'oilpan_forced_lazy_sweep')))
self.assertEquals(2, len(getMetric(results,
'oilpan_forced_complete_sweep')))
# Disable for accessing private API of _OilpanGCTimesBase.
# pylint: disable=protected-access
def testForParsing(self):
def getMetric(results, name):
metrics = results.FindAllPageSpecificValuesNamed(name)
self.assertEquals(1, len(metrics))
return metrics[0].GetBuildbotValue()
data = self._GenerateDataForParsing()
measurement = oilpan_gc_times._OilpanGCTimesBase()
measurement._timeline_model = data._model
tab = mock.MagicMock()
with mock.patch(
'measurements.oilpan_gc_times.TimelineModel') as MockTimelineModel:
MockTimelineModel.return_value = data._model
measurement.ValidateAndMeasurePage(None, tab, data.results)
results = data.results
self.assertEquals(8, len(getMetric(results, 'oilpan_coalesce')))
self.assertEquals(4, len(getMetric(results, 'oilpan_precise_mark')))
self.assertEquals(4, len(getMetric(results, 'oilpan_precise_lazy_sweep')))
self.assertEquals(4, len(getMetric(results,
'oilpan_precise_complete_sweep')))
self.assertEquals(4, len(getMetric(results, 'oilpan_conservative_mark')))
self.assertEquals(4, len(getMetric(results,
'oilpan_conservative_lazy_sweep')))
self.assertEquals(4, len(getMetric(results,
'oilpan_conservative_complete_sweep')))
self.assertEquals(1, len(getMetric(results, 'oilpan_forced_mark')))
self.assertEquals(1, len(getMetric(results, 'oilpan_forced_lazy_sweep')))
self.assertEquals(1, len(getMetric(results,
'oilpan_forced_complete_sweep')))
self.assertEquals(2, len(getMetric(results, 'oilpan_idle_mark')))
self.assertEquals(2, len(getMetric(results, 'oilpan_idle_lazy_sweep')))
self.assertEquals(2, len(getMetric(results,
'oilpan_idle_complete_sweep')))
def testForSmoothness(self):
ps = self.CreatePageSetFromFileInUnittestDataDir('create_many_objects.html')
measurement = oilpan_gc_times.OilpanGCTimesForSmoothness()
results = self.RunMeasurement(measurement, ps, options=self._options)
self.assertEquals(0, len(results.failures))
precise = results.FindAllPageSpecificValuesNamed('oilpan_precise_mark')
conservative = results.FindAllPageSpecificValuesNamed(
'oilpan_conservative_mark')
self.assertLess(0, len(precise) + len(conservative))
def testForBlinkPerf(self):
ps = self.CreatePageSetFromFileInUnittestDataDir('create_many_objects.html')
measurement = oilpan_gc_times.OilpanGCTimesForBlinkPerf()
results = self.RunMeasurement(measurement, ps, options=self._options)
self.assertEquals(0, len(results.failures))
precise = results.FindAllPageSpecificValuesNamed('oilpan_precise_mark')
conservative = results.FindAllPageSpecificValuesNamed(
'oilpan_conservative_mark')
self.assertLess(0, len(precise) + len(conservative))
def _GenerateDataForEmptyPageSet(self):
page_set = self.CreateEmptyPageSet()
page = TestOilpanGCTimesPage(page_set)
page_set.AddUserStory(page)
data = OilpanGCTimesTestData('CrRendererMain')
# Pretend we are about to run the tests to silence lower level asserts.
data.results.WillRunPage(page)
return data
def _GenerateDataForParsingOldFormat(self):
data = self._GenerateDataForEmptyPageSet()
data.AddSlice(self._KEY_MARK, 1, 1, {'precise': True, 'forced': False})
data.AddSlice(self._KEY_LAZY_SWEEP, 2, 2, {})
data.AddSlice(self._KEY_COALESCE, 4, 3, {})
data.AddSlice(self._KEY_LAZY_SWEEP, 7, 4, {})
data.AddSlice(self._KEY_COALESCE, 11, 5, {})
data.AddSlice(self._KEY_COMPLETE_SWEEP, 16, 6, {})
data.AddSlice(self._KEY_MARK, 22, 7, {'precise': True, 'forced': False})
data.AddSlice(self._KEY_LAZY_SWEEP, 29, 8, {})
data.AddSlice(self._KEY_COALESCE, 37, 9, {})
data.AddSlice(self._KEY_COMPLETE_SWEEP, 46, 10, {})
data.AddSlice(self._KEY_MARK, 56, 11, {'precise': False, 'forced': False})
data.AddSlice(self._KEY_LAZY_SWEEP, 67, 12, {})
data.AddSlice(self._KEY_COALESCE, 79, 13, {})
data.AddSlice(self._KEY_COMPLETE_SWEEP, 92, 14, {})
data.AddSlice(self._KEY_MARK, 106, 15, {'precise': True, 'forced': False})
data.AddSlice(self._KEY_LAZY_SWEEP, 121, 16, {})
data.AddSlice(self._KEY_COALESCE, 137, 17, {})
data.AddSlice(self._KEY_COMPLETE_SWEEP, 154, 18, {})
data.AddSlice(self._KEY_MARK, 172, 19, {'precise': False, 'forced': True})
data.AddSlice(self._KEY_COALESCE, 191, 20, {})
data.AddSlice(self._KEY_LAZY_SWEEP, 211, 21, {})
data.AddSlice(self._KEY_COMPLETE_SWEEP, 232, 22, {})
data.AddSlice(self._KEY_MARK, 254, 23, {'precise': True, 'forced': True})
data.AddSlice(self._KEY_COALESCE, 277, 24, {})
data.AddSlice(self._KEY_LAZY_SWEEP, 301, 25, {})
data.AddSlice(self._KEY_COMPLETE_SWEEP, 326, 26, {})
return data
def _GenerateDataForParsing(self):
data = self._GenerateDataForEmptyPageSet()
data.AddSlice(self._KEY_MARK, 1, 1,
{'lazySweeping': True, 'gcReason': 'ConservativeGC'})
data.AddSlice(self._KEY_LAZY_SWEEP, 2, 2, {})
data.AddSlice(self._KEY_COALESCE, 4, 3, {})
data.AddSlice(self._KEY_LAZY_SWEEP, 7, 4, {})
data.AddSlice(self._KEY_COALESCE, 11, 5, {})
data.AddSlice(self._KEY_COMPLETE_SWEEP, 16, 6, {})
data.AddSlice(self._KEY_MARK, 22, 7,
{'lazySweeping': True, 'gcReason': 'PreciseGC'})
data.AddSlice(self._KEY_LAZY_SWEEP, 29, 8, {})
data.AddSlice(self._KEY_COALESCE, 37, 9, {})
data.AddSlice(self._KEY_COMPLETE_SWEEP, 46, 10, {})
data.AddSlice(self._KEY_MARK, 56, 11,
{'lazySweeping': False, 'gcReason': 'ConservativeGC'})
data.AddSlice(self._KEY_LAZY_SWEEP, 67, 12, {})
data.AddSlice(self._KEY_COALESCE, 79, 13, {})
data.AddSlice(self._KEY_COMPLETE_SWEEP, 92, 14, {})
data.AddSlice(self._KEY_MARK, 106, 15,
{'lazySweeping': False, 'gcReason': 'PreciseGC'})
data.AddSlice(self._KEY_LAZY_SWEEP, 121, 16, {})
data.AddSlice(self._KEY_COALESCE, 137, 17, {})
data.AddSlice(self._KEY_COMPLETE_SWEEP, 154, 18, {})
data.AddSlice(self._KEY_MARK, 172, 19,
{'lazySweeping': False, 'gcReason': 'ForcedGCForTesting'})
data.AddSlice(self._KEY_COALESCE, 191, 20, {})
data.AddSlice(self._KEY_LAZY_SWEEP, 211, 21, {})
data.AddSlice(self._KEY_COMPLETE_SWEEP, 232, 22, {})
data.AddSlice(self._KEY_MARK, 254, 23,
{'lazySweeping': False, 'gcReason': 'IdleGC'})
data.AddSlice(self._KEY_COALESCE, 277, 24, {})
data.AddSlice(self._KEY_LAZY_SWEEP, 301, 25, {})
data.AddSlice(self._KEY_COMPLETE_SWEEP, 326, 26, {})
# Following events are covered with 'BlinkGCTimeMeasurement' event.
first_measure = data.AddSlice(self._KEY_COALESCE, 352, 27, {})
data.AddSlice(self._KEY_MARK, 380, 28,
{'lazySweeping': True, 'gcReason': 'ConservativeGC'})
data.AddSlice(self._KEY_LAZY_SWEEP, 408, 29, {})
data.AddSlice(self._KEY_LAZY_SWEEP, 437, 30, {})
data.AddSlice(self._KEY_COMPLETE_SWEEP, 467, 31, {})
data.AddSlice(self._KEY_MARK, 498, 32,
{'lazySweeping': True, 'gcReason': 'PreciseGC'})
data.AddSlice(self._KEY_LAZY_SWEEP, 530, 33, {})
data.AddSlice(self._KEY_COMPLETE_SWEEP, 563, 34, {})
data.AddSlice(self._KEY_MARK, 597, 35,
{'lazySweeping': False, 'gcReason': 'ConservativeGC'})
data.AddSlice(self._KEY_LAZY_SWEEP, 632, 36, {})
data.AddSlice(self._KEY_COMPLETE_SWEEP, 667, 37, {})
data.AddSlice(self._KEY_MARK, 704, 38,
{'lazySweeping': False, 'gcReason': 'PreciseGC'})
data.AddSlice(self._KEY_LAZY_SWEEP, 742, 39, {})
data.AddSlice(self._KEY_COMPLETE_SWEEP, 781, 40, {})
data.AddSlice(self._KEY_MARK, 821, 41,
{'lazySweeping': False, 'gcReason': 'ForcedGCForTesting'})
data.AddSlice(self._KEY_COMPLETE_SWEEP, 862, 42, {})
data.AddSlice(self._KEY_MARK, 904, 43,
{'lazySweeping': False, 'gcReason': 'IdleGC'})
last_measure = data.AddSlice(self._KEY_COMPLETE_SWEEP, 947, 44, {})
# Async event
async_dur = last_measure.end - first_measure.start
data.AddAsyncSlice(self._KEY_MEASURE, first_measure.start, async_dur, {})
return data
|
|
from __future__ import print_function
from spectrum import *
from scipy.stats import kurtosis
from sklearn.linear_model import LogisticRegression, LassoCV
from sklearn.metrics import auc, roc_curve
from datetime import datetime
import numpy as np
import matplotlib.pyplot as plt
import csv
import wfdb
data_path = "sample_data/challenge_training_data/"
answers_filename = "sample_data/answers.csv"
features_filename = "sample_data/features.csv"
start_time = 290
end_time = 300
fs = 250.
TRAINING_THRESHOLD = 600
def get_psd(channel_subsig, order, nfft):
channel_subsig = channel_subsig-np.mean(channel_subsig)
ar, rho, ref = arburg(channel_subsig, order)
psd = arma2psd(ar, rho=rho, NFFT=nfft)
psd = psd[len(psd):len(psd)/2:-1]
# plt.figure()
# plt.plot(linspace(0, 1, len(psd)), abs(psd)*2./(2.*np.pi))
# plt.title('PSD')
# plt.ylabel('Log of PSD')
# plt.xlabel('Normalized Frequency')
# plt.show()
# print(len(psd))
return psd
def get_baseline(channel_subsig, order=30, nfft=1024):
psd = get_psd(channel_subsig, order, nfft)
numerator_min_freq = int(0 * nfft / 125.)
numerator_max_freq = int(1 * nfft / 125.)
denominator_min_freq =int( 0 * nfft / 125.)
denominator_max_freq = int(40 * nfft / 125.)
numerator = sum(psd[numerator_min_freq:numerator_max_freq+1])
denominator = sum(psd[denominator_min_freq:denominator_max_freq+1])
baseline = float(numerator) / denominator
return 1 - baseline
def get_power(channel_subsig, order=30, nfft=1024):
psd = get_psd(channel_subsig, order, nfft)
numerator_min_freq =int( 5 * nfft / 125.)
numerator_max_freq = int(15 * nfft / 125.)
denominator_min_freq = int(5 * nfft / 125.)
denominator_max_freq = int(40 * nfft / 125.)
numerator = sum(psd[numerator_min_freq:numerator_max_freq+1])
denominator = sum(psd[denominator_min_freq:denominator_max_freq+1])
power = float(numerator) / denominator
return power
def get_ksqi(channel_subsig):
ksqi = kurtosis(channel_subsig)
if abs(ksqi) >= 25:
return 25
return ksqi
def get_pursqi(channel_subsig):
s = channel_subsig
sd = np.diff(channel_subsig);
sdd = np.zeros(len(channel_subsig))
for i in range(len(channel_subsig)):
if i == 0:
sdd[i] = channel_subsig[2] - 2*channel_subsig[1] + channel_subsig[0]
elif i == len(channel_subsig) - 1:
sdd[i] = channel_subsig[-1] - 2*channel_subsig[-2] + channel_subsig[-3]
else:
sdd[i] = channel_subsig[i+1] - 2*channel_subsig[i] + channel_subsig[i-1]
w0 = (2*np.pi/len(s))*sum(np.square(s)) # 2pi E[s^2]=2pi Rs(0)
w2 = (2*np.pi/len(s))*sum(np.square(sd)) # 2pi Ts^2 E[sd^2],
w4 = (2*np.pi/len(s))*sum(np.square(sdd)) #2pi Ts^4 E[sdd^2]
pursqi = (w2**2)/(w0*w4)
return pursqi
def get_channel_type(channel_name):
channel_types_dict = {}
with open("sample_data/sigtypes", "r") as f:
for line in f:
splitted_line = line.split("\t")
channel = splitted_line[-1].rstrip()
channel_type = splitted_line[0]
if channel_name == channel:
return channel_type
raise Exception("Unknown channel name")
# Return list of channel indices for channels of type channel_type
def get_channels_of_type(channels, channel_type):
channel_indices = np.array([])
for channel_index in range(len(channels)):
channel_name = channels[channel_index]
if channel_type == get_channel_type(channel_name):
channel_indices = np.append(channel_indices, channel_index)
return channel_indices
# x includes sample names --> exclude for classification
# training = sample num < 600
# testing = sample num > 600
def generate_features(features_filename):
training_x, training_y = [], []
testing_x, testing_y = [], []
with open(features_filename, 'w') as fo:
writer = csv.writer(fo)
writer.writerow(['sample_name', 'is_training', 'is_true', 'baseline', 'dtw', 'psd', 'power', 'ksqi', 'pursqi'])
with open(answers_filename, 'r') as f:
reader = csv.reader(f)
headers = reader.next()
reader = csv.DictReader(f, fieldnames=headers)
for row in reader:
sample_name = row['sample_name']
sample_number = sample_name[1:-1]
sig, fields = wfdb.rdsamp(data_path + sample_name)
subsig = sig[int(start_time*fs):int(end_time*fs),:]
ecg_channels = get_channels_of_type(fields['signame'], "ECG")
if len(ecg_channels) == 0:
print("NO ECG CHANNELS FOR SAMPLE: {}".format(sample_name))
continue
channel_subsig = subsig[:,int(ecg_channels[0])]
try:
baseline = get_baseline(channel_subsig)
power = get_power(channel_subsig)
ksqi = get_ksqi(channel_subsig)
pursqi = get_pursqi(channel_subsig)
except Exception as e:
print("sample_name: {}\n{}".format(sample_name, e))
continue
if np.isnan([baseline, power, ksqi, pursqi]).any():
print("sample containing nan: {}\n{}".format(sample_name, [baseline, power, ksqi, pursqi]))
continue
if int(sample_number) < TRAINING_THRESHOLD:
is_training = 1
else:
is_training = 0
x_val = [
row['sample_name'],
is_training,
int(row['is_true']),
int(row['baseline_is_classified_true']),
int(row['dtw_is_classified_true']),
baseline,
power,
ksqi,
pursqi
]
writer.writerow(x_val)
def generate_datasets(features_filename):
training_x, training_y, testing_x, testing_y = [], [], [], []
with open(features_filename, 'r') as f:
reader = csv.reader(f)
headers = reader.next()
reader = csv.DictReader(f, fieldnames=headers)
for row in reader:
x_val = [
int(row['baseline']),
int(row['dtw']),
float(row['psd']),
float(row['power']),
float(row['ksqi']),
float(row['pursqi'])
]
y_val = int(row['is_true'])
if int(row['is_training']) == 1 and row['sample_name'][0] == 'v':
training_x.append(x_val)
training_y.append(y_val)
elif row['sample_name'][0] == 'v':
testing_x.append(x_val)
testing_y.append(y_val)
return training_x, training_y, testing_x, testing_y
def get_score(prediction, true):
TP = np.sum([(prediction[i] == 1) and (true[i] == 1) for i in range(len(prediction))])
TN = np.sum([(prediction[i] == 0) and (true[i] == 0) for i in range(len(prediction))])
FP = np.sum([(prediction[i] == 1) and (true[i] == 0) for i in range(len(prediction))])
FN = np.sum([(prediction[i] == 0) and (true[i] == 1) for i in range(len(prediction))])
# print('{} {} {} {}'.format(TP, TN, FP, FN))
numerator = TP + TN
denominator = FP + 5*FN + numerator
return float(numerator) / denominator
if __name__ == '__main__':
print("Nothing to do!")
# print("Generating datasets...")
# generate_features(features_filename)
# training_x, training_y, testing_x, testing_y = generate_datasets(features_filename)
# print("{} {}".format(len(training_y), len(testing_y)))
# # start = datetime.now()
# # print("Starting at".format(start))
# # print("Generating datasets...")
# # training_x, training_y, testing_x, testing_y = generate_training_testing()
# print("Running classifier...")
# classifier = LogisticRegression(penalty='l1')
# lasso = LassoCV()
# classifier.fit(training_x, training_y)
# # probability of class 1 (versus 0)
# # predictions_y = classifier.predict_proba(testing_x)[:,1]
# # score = classifier.score(testing_x, testing_y)
# # fpr, tpr, thresholds = roc_curve(testing_y, predictions_y)
# # auc = auc(fpr, tpr)
# # print("auc: {}".format(auc))
# # print("score: {}".format(score)
# # print("fpr: {}".format(fpr), end=" "))
# # print("tpr: {}".format(tpr)
# # plt.figure()
# # plt.title("ROC curve for DTW-only classiifer")
# # plt.xlabel("False positive rate")
# # plt.ylabel("True positive rate")
# # plt.plot(fpr, tpr)
# # plt.show()
# lasso.fit(training_x, training_y)
# predictions_y = lasso.predict(testing_x)
# fpr, tpr, thresholds = roc_curve(testing_y, predictions_y)
# chall_score = list()
# for th in thresholds:
# chall_score.append(get_score([x >= th for x in predictions_y], testing_y))
# auc = auc(fpr, tpr)
# print(classifier.coef_)
# print("auc: {}".format(auc))
# print("score: {}".format(score))
# print("fpr: {}".format(fpr))
# print("tpr: {}".format(tpr))
# plt.figure()
# plt.title("ROC curve for top-level classifier with challenge scores")
# plt.xlabel("False positive rate")
# plt.ylabel("True positive rate")
# plt.plot(fpr, tpr, label='ROC Curve')
# plt.plot(fpr, chall_score, label='Challenge score')
# plt.show()
# # DTW only
# # auc: 0.461675144589
# # score: 0.529166666667
# # Baseline only
# # auc: 0.877012054909
# # score: 0.875
# # Combined
# # auc: 0.910041112118
# # score: 0.841666666667
|
|
# -*- coding: utf-8 -*-
import pytest
from marshmallow import (
Schema,
fields,
pre_dump,
post_dump,
pre_load,
post_load,
validates,
validates_schema,
ValidationError,
)
def test_decorated_processors():
class ExampleSchema(Schema):
"""Includes different ways to invoke decorators and set up methods"""
TAG = 'TAG'
value = fields.Integer(as_string=True)
# Implicit default raw, pre dump, static method, return modified item.
@pre_dump
def increment_value(self, item):
item['value'] += 1
return item
# Implicit default raw, post dump, class method, modify in place.
@post_dump
def add_tag(self, item):
item['value'] = self.TAG + item['value']
# Explicitly raw, post dump, instance method, return modified item.
@post_dump(pass_many=True)
def add_envelope(self, data, many):
key = self.get_envelope_key(many)
return {key: data}
# Explicitly raw, pre load, instance method, return modified item.
@pre_load(pass_many=True)
def remove_envelope(self, data, many):
key = self.get_envelope_key(many)
return data[key]
@staticmethod
def get_envelope_key(many):
return 'data' if many else 'datum'
# Explicitly not raw, pre load, instance method, modify in place.
@pre_load(pass_many=False)
def remove_tag(self, item):
item['value'] = item['value'][len(self.TAG):]
# Explicit default raw, post load, instance method, modify in place.
@post_load()
def decrement_value(self, item):
item['value'] -= 1
schema = ExampleSchema()
# Need to re-create these because the processors will modify in place.
make_item = lambda: {'value': 3}
make_items = lambda: [make_item(), {'value': 5}]
item_dumped = schema.dump(make_item()).data
assert item_dumped == {'datum': {'value': 'TAG4'}}
item_loaded = schema.load(item_dumped).data
assert item_loaded == make_item()
items_dumped = schema.dump(make_items(), many=True).data
assert items_dumped == {'data': [{'value': 'TAG4'}, {'value': 'TAG6'}]}
items_loaded = schema.load(items_dumped, many=True).data
assert items_loaded == make_items()
class TestPassOriginal:
def test_pass_original_single_no_mutation(self):
class MySchema(Schema):
foo = fields.Field()
@post_load(pass_original=True)
def post_load(self, data, input_data):
ret = data.copy()
ret['_post_load'] = input_data['sentinel']
return ret
@post_dump(pass_original=True)
def post_dump(self, data, obj):
ret = data.copy()
ret['_post_dump'] = obj['sentinel']
return ret
schema = MySchema()
datum = {'foo': 42, 'sentinel': 24}
item_loaded = schema.load(datum).data
assert item_loaded['foo'] == 42
assert item_loaded['_post_load'] == 24
item_dumped = schema.dump(datum).data
assert item_dumped['foo'] == 42
assert item_dumped['_post_dump'] == 24
def test_pass_original_single_with_mutation(self):
class MySchema(Schema):
foo = fields.Field()
@post_load(pass_original=True)
def post_load(self, data, input_data):
data['_post_load'] = input_data['post_load']
schema = MySchema()
item_loaded = schema.load({'foo': 42, 'post_load': 24}).data
assert item_loaded['foo'] == 42
assert item_loaded['_post_load'] == 24
def test_pass_original_many(self):
class MySchema(Schema):
foo = fields.Field()
@post_load(pass_many=True, pass_original=True)
def post_load(self, data, many, original):
if many:
ret = []
for item, orig_item in zip(data, original):
item['_post_load'] = orig_item['sentinel']
ret.append(item)
else:
ret = data.copy()
ret['_post_load'] = original['sentinel']
return ret
@post_dump(pass_many=True, pass_original=True)
def post_dump(self, data, many, original):
if many:
ret = []
for item, orig_item in zip(data, original):
item['_post_dump'] = orig_item['sentinel']
ret.append(item)
else:
ret = data.copy()
ret['_post_dump'] = original['sentinel']
return ret
schema = MySchema()
data = [{'foo': 42, 'sentinel': 24}, {'foo': 424, 'sentinel': 242}]
items_loaded = schema.load(data, many=True).data
assert items_loaded == [
{'foo': 42, '_post_load': 24},
{'foo': 424, '_post_load': 242},
]
test_values = [e['_post_load'] for e in items_loaded]
assert test_values == [24, 242]
items_dumped = schema.dump(data, many=True).data
assert items_dumped == [
{'foo': 42, '_post_dump': 24},
{'foo': 424, '_post_dump': 242},
]
# Also check load/dump of single item
datum = {'foo': 42, 'sentinel': 24}
item_loaded = schema.load(datum, many=False).data
assert item_loaded == {'foo': 42, '_post_load': 24}
item_dumped = schema.dump(datum, many=False).data
assert item_dumped == {'foo': 42, '_post_dump': 24}
def test_decorated_processor_inheritance():
class ParentSchema(Schema):
@post_dump
def inherited(self, item):
item['inherited'] = 'inherited'
return item
@post_dump
def overridden(self, item):
item['overridden'] = 'base'
return item
@post_dump
def deleted(self, item):
item['deleted'] = 'retained'
return item
class ChildSchema(ParentSchema):
@post_dump
def overridden(self, item):
item['overridden'] = 'overridden'
return item
deleted = None
parent_dumped = ParentSchema().dump({}).data
assert parent_dumped == {
'inherited': 'inherited',
'overridden': 'base',
'deleted': 'retained'
}
child_dumped = ChildSchema().dump({}).data
assert child_dumped == {
'inherited': 'inherited',
'overridden': 'overridden'
}
# https://github.com/marshmallow-code/marshmallow/issues/229#issuecomment-138949436
def test_pre_dump_is_invoked_before_implicit_field_generation():
class Foo(Schema):
field = fields.Integer()
@pre_dump
def hook(s, data):
data['generated_field'] = 7
class Meta:
# Removing generated_field from here drops it from the output
fields = ('field', 'generated_field')
assert Foo().dump({"field": 5}).data == {'field': 5, 'generated_field': 7}
class ValidatesSchema(Schema):
foo = fields.Int()
@validates('foo')
def validate_foo(self, value):
if value != 42:
raise ValidationError('The answer to life the universe and everything.')
class TestValidatesDecorator:
def test_validates_and_strict(self):
class VSchema(Schema):
s = fields.String()
@validates('s')
def validate_string(self, data):
raise ValidationError('nope')
with pytest.raises(ValidationError) as excinfo:
VSchema(strict=True).load({'s': 'bar'})
assert excinfo.value.messages == {'s': ['nope']}
# Regression test for https://github.com/marshmallow-code/marshmallow/issues/350
def test_validates_with_attribute_and_strict(self):
class S1(Schema):
s = fields.String(attribute='string_name')
@validates('s')
def validate_string(self, data):
raise ValidationError('nope')
with pytest.raises(ValidationError) as excinfo:
S1(strict=True).load({'s': 'foo'})
assert excinfo.value.messages == {'s': ['nope']}
with pytest.raises(ValidationError):
S1(strict=True, many=True).load([{'s': 'foo'}])
def test_validates_decorator(self):
schema = ValidatesSchema()
errors = schema.validate({'foo': 41})
assert 'foo' in errors
assert errors['foo'][0] == 'The answer to life the universe and everything.'
errors = schema.validate({'foo': 42})
assert errors == {}
errors = schema.validate([{'foo': 42}, {'foo': 43}], many=True)
assert 'foo' in errors[1]
assert len(errors[1]['foo']) == 1
assert errors[1]['foo'][0] == 'The answer to life the universe and everything.'
errors = schema.validate([{'foo': 42}, {'foo': 42}], many=True)
assert errors == {}
errors = schema.validate({})
assert errors == {}
result, errors = schema.load({'foo': 41})
assert errors
assert result == {}
result, errors = schema.load([{'foo': 42}, {'foo': 43}], many=True)
assert len(result) == 2
assert result[0] == {'foo': 42}
assert result[1] == {}
assert 1 in errors
assert 'foo' in errors[1]
assert errors[1]['foo'] == ['The answer to life the universe and everything.']
def test_field_not_present(self):
class BadSchema(ValidatesSchema):
@validates('bar')
def validate_bar(self, value):
raise ValidationError('Never raised.')
schema = BadSchema()
with pytest.raises(ValueError) as excinfo:
schema.validate({'foo': 42})
assert '"bar" field does not exist.' in str(excinfo)
def test_precedence(self):
class Schema2(ValidatesSchema):
foo = fields.Int(validate=lambda n: n != 42)
bar = fields.Int(validate=lambda n: n == 1)
@validates('bar')
def validate_bar(self, value):
if value != 2:
raise ValidationError('Must be 2')
schema = Schema2()
errors = schema.validate({'foo': 42})
assert 'foo' in errors
assert len(errors['foo']) == 1
assert 'Invalid value.' in errors['foo'][0]
errors = schema.validate({'bar': 3})
assert 'bar' in errors
assert len(errors['bar']) == 1
assert 'Invalid value.' in errors['bar'][0]
errors = schema.validate({'bar': 1})
assert 'bar' in errors
assert len(errors['bar']) == 1
assert errors['bar'][0] == 'Must be 2'
class TestValidatesSchemaDecorator:
def test_validator_nested_many(self):
class NestedSchema(Schema):
foo = fields.Int(required=True)
@validates_schema
def validate_schema(self, data):
raise ValidationError('This will never work', 'foo')
class MySchema(Schema):
nested = fields.Nested(NestedSchema, required=True, many=True)
schema = MySchema()
errors = schema.validate({'nested': [1]})
assert errors
assert 'nested' in errors
assert 0 in errors['nested']
assert '_schema' in errors['nested']
assert 'foo' not in errors['nested']
def test_decorated_validators(self):
class MySchema(Schema):
foo = fields.Int()
bar = fields.Int()
@validates_schema
def validate_schema(self, data):
if data['foo'] <= 3:
raise ValidationError('Must be greater than 3')
@validates_schema(pass_many=True)
def validate_raw(self, data, many):
if many:
assert type(data) is list
if len(data) < 2:
raise ValidationError('Must provide at least 2 items')
@validates_schema
def validate_bar(self, data):
if 'bar' in data and data['bar'] < 0:
raise ValidationError('bar must not be negative', 'bar')
schema = MySchema()
errors = schema.validate({'foo': 3})
assert '_schema' in errors
assert errors['_schema'][0] == 'Must be greater than 3'
errors = schema.validate([{'foo': 4}], many=True)
assert '_schema' in errors
assert len(errors['_schema']) == 1
assert errors['_schema'][0] == 'Must provide at least 2 items'
errors = schema.validate({'foo': 4, 'bar': -1})
assert 'bar' in errors
assert len(errors['bar']) == 1
assert errors['bar'][0] == 'bar must not be negative'
def test_multiple_validators(self):
class MySchema(Schema):
foo = fields.Int()
bar = fields.Int()
@validates_schema
def validate_schema(self, data):
if data['foo'] <= 3:
raise ValidationError('Must be greater than 3')
@validates_schema
def validate_bar(self, data):
if 'bar' in data and data['bar'] < 0:
raise ValidationError('bar must not be negative')
schema = MySchema()
errors = schema.validate({'foo': 3, 'bar': -1})
assert type(errors) is dict
assert '_schema' in errors
assert len(errors['_schema']) == 2
assert 'Must be greater than 3' in errors['_schema']
assert 'bar must not be negative' in errors['_schema']
errors = schema.validate([{'foo': 3, 'bar': -1}, {'foo': 3}], many=True)
assert type(errors) is dict
assert '_schema' in errors[0]
assert len(errors[0]['_schema']) == 2
assert 'Must be greater than 3' in errors[0]['_schema']
assert 'bar must not be negative' in errors[0]['_schema']
assert len(errors[1]['_schema']) == 1
assert 'Must be greater than 3' in errors[0]['_schema']
def test_passing_original_data(self):
class MySchema(Schema):
foo = fields.Int()
bar = fields.Int()
@validates_schema(pass_original=True)
def validate_original(self, data, original_data):
if isinstance(original_data, dict) and isinstance(original_data['foo'], str):
raise ValidationError('foo cannot be a string')
# See https://github.com/marshmallow-code/marshmallow/issues/127
@validates_schema(pass_many=True, pass_original=True)
def check_unknown_fields(self, data, original_data, many):
def check(datum):
for key, val in datum.items():
if key not in self.fields:
raise ValidationError({'code': 'invalid_field'})
if many:
for each in original_data:
check(each)
else:
check(original_data)
schema = MySchema()
errors = schema.validate({'foo': 4, 'baz': 42})
assert '_schema' in errors
assert len(errors['_schema']) == 1
assert errors['_schema'][0] == {'code': 'invalid_field'}
errors = schema.validate({'foo': '4'})
assert '_schema' in errors
assert len(errors['_schema']) == 1
assert errors['_schema'][0] == 'foo cannot be a string'
schema = MySchema()
errors = schema.validate([{'foo': 4, 'baz': 42}], many=True)
assert '_schema' in errors
assert len(errors['_schema']) == 1
assert errors['_schema'][0] == {'code': 'invalid_field'}
# https://github.com/marshmallow-code/marshmallow/issues/273
def test_allow_arbitrary_field_names_in_error(self):
class MySchema(Schema):
foo = fields.Int()
bar = fields.Int()
@validates_schema(pass_original=True)
def strict_fields(self, data, original_data):
for key in original_data:
if key not in self.fields:
raise ValidationError('Unknown field name', key)
schema = MySchema()
errors = schema.validate({'foo': 2, 'baz': 42})
assert 'baz' in errors
assert len(errors['baz']) == 1
assert errors['baz'][0] == 'Unknown field name'
def test_skip_on_field_errors(self):
class MySchema(Schema):
foo = fields.Int(required=True, validate=lambda n: n == 3)
bar = fields.Int(required=True)
@validates_schema(skip_on_field_errors=True)
def validate_schema(self, data):
if data['foo'] != data['bar']:
raise ValidationError('Foo and bar must be equal.')
@validates_schema(skip_on_field_errors=True, pass_many=True)
def validate_many(self, data, many):
if many:
assert type(data) is list
if len(data) < 2:
raise ValidationError('Must provide at least 2 items')
schema = MySchema()
# check that schema errors still occur with no field errors
errors = schema.validate({'foo': 3, 'bar': 4})
assert '_schema' in errors
assert errors['_schema'][0] == 'Foo and bar must be equal.'
errors = schema.validate([{'foo': 3, 'bar': 3}], many=True)
assert '_schema' in errors
assert errors['_schema'][0] == 'Must provide at least 2 items'
# check that schema errors don't occur when field errors do
errors = schema.validate({'foo': 3, 'bar': 'not an int'})
assert 'bar' in errors
assert '_schema' not in errors
errors = schema.validate({'foo': 2, 'bar': 2})
assert 'foo' in errors
assert '_schema' not in errors
errors = schema.validate([{'foo': 3, 'bar': 'not an int'}], many=True)
assert 'bar' in errors[0]
assert '_schema' not in errors
def test_decorator_error_handling():
class ExampleSchema(Schema):
foo = fields.Int()
bar = fields.Int()
@pre_load()
def pre_load_error1(self, item):
if item['foo'] != 0:
return
errors = {
'foo' : ['preloadmsg1',],
'bar' : ['preloadmsg2', 'preloadmsg3'],
}
raise ValidationError(errors)
@pre_load()
def pre_load_error2(self, item):
if item['foo'] != 4:
return
raise ValidationError('preloadmsg1', 'foo')
@pre_load()
def pre_load_error3(self, item):
if item['foo'] != 8:
return
raise ValidationError('preloadmsg1')
@post_load()
def post_load_error1(self, item):
if item['foo'] != 1:
return item
errors = {
'foo' : ['postloadmsg1',],
'bar' : ['postloadmsg2', 'postloadmsg3'],
}
raise ValidationError(errors)
@post_load()
def post_load_error2(self, item):
if item['foo'] != 5:
return item
raise ValidationError('postloadmsg1', 'foo')
@pre_dump()
def pre_dump_error1(self, item):
if item['foo'] != 2:
return
errors = {
'foo' : ['predumpmsg1',],
'bar' : ['predumpmsg2', 'predumpmsg3'],
}
raise ValidationError(errors)
@pre_dump()
def pre_dump_error2(self, item):
if item['foo'] != 6:
return
raise ValidationError('predumpmsg1', 'foo')
@post_dump()
def post_dump_error1(self, item):
if item['foo'] != 3:
return item
errors = {
'foo' : ['postdumpmsg1',],
'bar' : ['postdumpmsg2', 'postdumpmsg3'],
}
raise ValidationError(errors)
@post_dump()
def post_dump_error2(self, item):
if item['foo'] != 7:
return
raise ValidationError('postdumpmsg1', 'foo')
def make_item(foo, bar):
data, errors = schema.load({'foo' : foo, 'bar' : bar})
assert data is not None
assert not errors
return data
schema = ExampleSchema()
data, errors = schema.load({'foo' : 0, 'bar' : 1})
assert 'foo' in errors
assert len(errors['foo']) == 1
assert errors['foo'][0] == 'preloadmsg1'
assert 'bar' in errors
assert len(errors['bar']) == 2
assert 'preloadmsg2' in errors['bar']
assert 'preloadmsg3' in errors['bar']
data, errors = schema.load({'foo' : 1, 'bar' : 1})
assert 'foo' in errors
assert len(errors['foo']) == 1
assert errors['foo'][0] == 'postloadmsg1'
assert 'bar' in errors
assert len(errors['bar']) == 2
assert 'postloadmsg2' in errors['bar']
assert 'postloadmsg3' in errors['bar']
data, errors = schema.dump(make_item(2, 1))
assert 'foo' in errors
assert len(errors['foo']) == 1
assert errors['foo'][0] == 'predumpmsg1'
assert 'bar' in errors
assert len(errors['bar']) == 2
assert 'predumpmsg2' in errors['bar']
assert 'predumpmsg3' in errors['bar']
data, errors = schema.dump(make_item(3, 1))
assert 'foo' in errors
assert len(errors['foo']) == 1
assert errors['foo'][0] == 'postdumpmsg1'
assert 'bar' in errors
assert len(errors['bar']) == 2
assert 'postdumpmsg2' in errors['bar']
assert 'postdumpmsg3' in errors['bar']
data, errors = schema.load({'foo' : 4, 'bar' : 1})
assert len(errors) == 1
assert 'foo' in errors
assert len(errors['foo']) == 1
assert errors['foo'][0] == 'preloadmsg1'
data, errors = schema.load({'foo' : 5, 'bar' : 1})
assert len(errors) == 1
assert 'foo' in errors
assert len(errors['foo']) == 1
assert errors['foo'][0] == 'postloadmsg1'
data, errors = schema.dump(make_item(6, 1))
assert 'foo' in errors
assert len(errors['foo']) == 1
assert errors['foo'][0] == 'predumpmsg1'
data, errors = schema.dump(make_item(7, 1))
assert 'foo' in errors
assert len(errors['foo']) == 1
assert errors['foo'][0] == 'postdumpmsg1'
data, errors = schema.load({'foo' : 8, 'bar' : 1})
assert len(errors) == 1
assert '_schema' in errors
assert len(errors['_schema']) == 1
assert errors['_schema'][0] == 'preloadmsg1'
@pytest.mark.parametrize(
'decorator',
[
pre_load,
post_load,
]
)
def test_decorator_strict_error_handling_with_load(decorator):
class ExampleSchema(Schema):
@decorator
def raise_value_error(self, item):
raise ValidationError({'foo': 'error'})
schema = ExampleSchema(strict=True)
with pytest.raises(ValidationError) as exc:
schema.load({})
assert exc.value.messages == {'foo': 'error'}
schema.dump(object())
@pytest.mark.parametrize(
'decorator',
[
pre_dump,
post_dump,
]
)
def test_decorator_strict_error_handling_with_dump(decorator):
class ExampleSchema(Schema):
@decorator
def raise_value_error(self, item):
raise ValidationError({'foo': 'error'})
schema = ExampleSchema(strict=True)
with pytest.raises(ValidationError) as exc:
schema.dump(object())
assert exc.value.messages == {'foo': 'error'}
schema.load({})
|
|
# Created By: Eric Mc Sween
# Created On: 2008-05-29
# Copyright 2015 Hardcoded Software (http://www.hardcoded.net)
#
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.gnu.org/licenses/gpl-3.0.html
from collections import MutableSequence, namedtuple
from .base import GUIObject
from .selectable_list import Selectable
# We used to directly subclass list, but it caused problems at some point with deepcopy
class Table(MutableSequence, Selectable):
"""Sortable and selectable sequence of :class:`Row`.
In fact, the Table is very similar to :class:`.SelectableList` in
practice and differs mostly in principle. Their difference lies in the nature of their items
they manage. With the Table, rows usually have many properties, presented in columns, and they
have to subclass :class:`Row`.
Usually used with :class:`~hscommon.gui.column.Column`.
Subclasses :class:`.Selectable`.
"""
def __init__(self):
Selectable.__init__(self)
self._rows = []
self._header = None
self._footer = None
def __delitem__(self, key):
self._rows.__delitem__(key)
if self._header is not None and ((not self) or (self[0] is not self._header)):
self._header = None
if self._footer is not None and ((not self) or (self[-1] is not self._footer)):
self._footer = None
self._check_selection_range()
def __getitem__(self, key):
return self._rows.__getitem__(key)
def __len__(self):
return len(self._rows)
def __setitem__(self, key, value):
self._rows.__setitem__(key, value)
def append(self, item):
"""Appends ``item`` at the end of the table.
If there's a footer, the item is inserted before it.
"""
if self._footer is not None:
self._rows.insert(-1, item)
else:
self._rows.append(item)
def insert(self, index, item):
"""Inserts ``item`` at ``index`` in the table.
If there's a header, will make sure we don't insert before it, and if there's a footer, will
make sure that we don't insert after it.
"""
if (self._header is not None) and (index == 0):
index = 1
if (self._footer is not None) and (index >= len(self)):
index = len(self) - 1
self._rows.insert(index, item)
def remove(self, row):
"""Removes ``row`` from table.
If ``row`` is a header or footer, that header or footer will be set to ``None``.
"""
if row is self._header:
self._header = None
if row is self._footer:
self._footer = None
self._rows.remove(row)
self._check_selection_range()
def sort_by(self, column_name, desc=False):
"""Sort table by ``column_name``.
Sort key for each row is computed from :meth:`Row.sort_key_for_column`.
If ``desc`` is ``True``, sort order is reversed.
If present, header and footer will always be first and last, respectively.
"""
if self._header is not None:
self._rows.pop(0)
if self._footer is not None:
self._rows.pop()
key = lambda row: row.sort_key_for_column(column_name)
self._rows.sort(key=key, reverse=desc)
if self._header is not None:
self._rows.insert(0, self._header)
if self._footer is not None:
self._rows.append(self._footer)
#--- Properties
@property
def footer(self):
"""If set, a row that always stay at the bottom of the table.
:class:`Row`. *get/set*.
When set to something else than ``None``, ``header`` and ``footer`` represent rows that will
always be kept in first and/or last position, regardless of sorting. ``len()`` and indexing
will include them, which means that if there's a header, ``table[0]`` returns it and if
there's a footer, ``table[-1]`` returns it. To make things short, all list-like functions
work with header and footer "on". But things get fuzzy for ``append()`` and ``insert()``
because these will ensure that no "normal" row gets inserted before the header or after the
footer.
Adding and removing footer here and there might seem (and is) hackish, but it's much simpler
than the alternative (when, of course, you need such a feature), which is to override magic
methods and adjust the results. When we do that, there the slice stuff that we have to
implement and it gets quite complex. Moreover, the most frequent operation on a table is
``__getitem__``, and making checks to know whether the key is a header or footer at each
call would make that operation, which is the most used, slower.
"""
return self._footer
@footer.setter
def footer(self, value):
if self._footer is not None:
self._rows.pop()
if value is not None:
self._rows.append(value)
self._footer = value
@property
def header(self):
"""If set, a row that always stay at the bottom of the table.
See :attr:`footer` for details.
"""
return self._header
@header.setter
def header(self, value):
if self._header is not None:
self._rows.pop(0)
if value is not None:
self._rows.insert(0, value)
self._header = value
@property
def row_count(self):
"""Number or rows in the table (without counting header and footer).
*int*. *read-only*.
"""
result = len(self)
if self._footer is not None:
result -= 1
if self._header is not None:
result -= 1
return result
@property
def rows(self):
"""List of rows in the table, excluding header and footer.
List of :class:`Row`. *read-only*.
"""
start = None
end = None
if self._footer is not None:
end = -1
if self._header is not None:
start = 1
return self[start:end]
@property
def selected_row(self):
"""Selected row according to :attr:`Selectable.selected_index`.
:class:`Row`. *get/set*.
When setting this attribute, we look up the index of the row and set the selected index from
there. If the row isn't in the list, selection isn't changed.
"""
return self[self.selected_index] if self.selected_index is not None else None
@selected_row.setter
def selected_row(self, value):
try:
self.selected_index = self.index(value)
except ValueError:
pass
@property
def selected_rows(self):
"""List of selected rows based on :attr:`.selected_indexes`.
List of :class:`Row`. *read-only*.
"""
return [self[index] for index in self.selected_indexes]
class GUITableView:
"""Expected interface for :class:`GUITable`'s view.
*Not actually used in the code. For documentation purposes only.*
Our view, some kind of table view, is expected to sync with the table's contents by
appropriately behave to all callbacks in this interface.
When in edit mode, the content types by the user is expected to be sent as soon as possible
to the :class:`Row`.
Whenever the user changes the selection, we expect the view to call :meth:`Table.select`.
"""
def refresh(self):
"""Refreshes the contents of the table widget.
Ensures that the contents of the table widget is synced with the model. This includes
selection.
"""
def start_editing(self):
"""Start editing the currently selected row.
Begin whatever inline editing support that the view supports.
"""
def stop_editing(self):
"""Stop editing if there's an inline editing in effect.
There's no "aborting" implied in this call, so it's appropriate to send whatever the user
has typed and might not have been sent down to the :class:`Row` yet. After you've done that,
stop the editing mechanism.
"""
SortDescriptor = namedtuple('SortDescriptor', 'column desc')
class GUITable(Table, GUIObject):
"""Cross-toolkit GUI-enabled table view.
Represents a UI element presenting the user with a sortable, selectable, possibly editable,
table view.
Behaves like the :class:`Table` which it subclasses, but is more focused on being the presenter
of some model data to its :attr:`.GUIObject.view`. There's a :meth:`refresh`
mechanism which ensures fresh data while preserving sorting order and selection. There's also an
editing mechanism which tracks whether (and which) row is being edited (or added) and
save/cancel edits when appropriate.
Subclasses :class:`Table` and :class:`.GUIObject`. Expected view:
:class:`GUITableView`.
"""
def __init__(self):
GUIObject.__init__(self)
Table.__init__(self)
#: The row being currently edited by the user. ``None`` if no edit is taking place.
self.edited = None
self._sort_descriptor = None
#--- Virtual
def _do_add(self):
"""(Virtual) Creates a new row, adds it in the table.
Returns ``(row, insert_index)``.
"""
raise NotImplementedError()
def _do_delete(self):
"""(Virtual) Delete the selected rows.
"""
pass
def _fill(self):
"""(Virtual/Required) Fills the table with all the rows that this table is supposed to have.
Called by :meth:`refresh`. Does nothing by default.
"""
pass
def _is_edited_new(self):
"""(Virtual) Returns whether the currently edited row should be considered "new".
This is used in :meth:`cancel_edits` to know whether the cancellation of the edit means a
revert of the row's value or the removal of the row.
By default, always false.
"""
return False
def _restore_selection(self, previous_selection):
"""(Virtual) Restores row selection after a contents-changing operation.
Before each contents changing operation, we store our previously selected indexes because in
many cases, such as in :meth:`refresh`, our selection will be lost. After the operation is
over, we call this method with our previously selected indexes (in ``previous_selection``).
The default behavior is (if we indeed have an empty :attr:`.selected_indexes`) to re-select
``previous_selection``. If it was empty, we select the last row of the table.
This behavior can, of course, be overriden.
"""
if not self.selected_indexes:
if previous_selection:
self.select(previous_selection)
else:
self.select([len(self) - 1])
#--- Public
def add(self):
"""Add a new row in edit mode.
Requires :meth:`do_add` to be implemented. The newly added row will be selected and in edit
mode.
"""
self.view.stop_editing()
if self.edited is not None:
self.save_edits()
row, insert_index = self._do_add()
self.insert(insert_index, row)
self.select([insert_index])
self.view.refresh()
# We have to set "edited" after calling refresh() because some UI are trigger-happy
# about calling save_edits() and they do so during calls to refresh(). We don't want
# a call to save_edits() during refresh prematurely mess with our newly added item.
self.edited = row
self.view.start_editing()
def can_edit_cell(self, column_name, row_index):
"""Returns whether the cell at ``row_index`` and ``column_name`` can be edited.
A row is, by default, editable as soon as it has an attr with the same name as `column`.
If :meth:`Row.can_edit` returns False, the row is not editable at all. You can set
editability of rows at the attribute level with can_edit_* properties.
Mostly just a shortcut to :meth:`Row.can_edit_cell`.
"""
row = self[row_index]
return row.can_edit_cell(column_name)
def cancel_edits(self):
"""Cancels the current edit operation.
If there's an :attr:`edited` row, it will be re-initialized (with :meth:`Row.load`).
"""
if self.edited is None:
return
self.view.stop_editing()
if self._is_edited_new():
previous_selection = self.selected_indexes
self.remove(self.edited)
self._restore_selection(previous_selection)
self._update_selection()
else:
self.edited.load()
self.edited = None
self.view.refresh()
def delete(self):
"""Delete the currently selected rows.
Requires :meth:`_do_delete` for this to have any effect on the model. Cancels editing if
relevant.
"""
self.view.stop_editing()
if self.edited is not None:
self.cancel_edits()
return
if self:
self._do_delete()
def refresh(self, refresh_view=True):
"""Empty the table and re-create its rows.
:meth:`_fill` is called after we emptied the table to create our rows. Previous sort order
will be preserved, regardless of the order in which the rows were filled. If there was any
edit operation taking place, it's cancelled.
:param bool refresh_view: Whether we tell our view to refresh after our refill operation.
Most of the time, it's what we want, but there's some cases where
we don't.
"""
self.cancel_edits()
previous_selection = self.selected_indexes
del self[:]
self._fill()
sd = self._sort_descriptor
if sd is not None:
Table.sort_by(self, column_name=sd.column, desc=sd.desc)
self._restore_selection(previous_selection)
if refresh_view:
self.view.refresh()
def save_edits(self):
"""Commit user edits to the model.
This is done by calling :meth:`Row.save`.
"""
if self.edited is None:
return
row = self.edited
self.edited = None
row.save()
def sort_by(self, column_name, desc=False):
"""Sort table by ``column_name``.
Overrides :meth:`Table.sort_by`. After having performed sorting, calls
:meth:`~.Selectable._update_selection` to give you the chance,
if appropriate, to update your selected indexes according to, maybe, the selection that you
have in your model.
Then, we refresh our view.
"""
Table.sort_by(self, column_name=column_name, desc=desc)
self._sort_descriptor = SortDescriptor(column_name, desc)
self._update_selection()
self.view.refresh()
class Row:
"""Represents a row in a :class:`Table`.
It holds multiple values to be represented through columns. It's its role to prepare data
fetched from model instances into ready-to-present-in-a-table fashion. You will do this in
:meth:`load`.
When you do this, you'll put the result into arbitrary attributes, which will later be fetched
by your table for presentation to the user.
You can organize your attributes in whatever way you want, but there's a convention you can
follow if you want to minimize subclassing and use default behavior:
1. Attribute name = column name. If your attribute is ``foobar``, whenever we refer to
``column_name``, you refer to that attribute with the column name ``foobar``.
2. Public attributes are for *formatted* value, that is, user readable strings.
3. Underscore prefix is the unformatted (computable) value. For example, you could have
``_foobar`` at ``42`` and ``foobar`` at ``"42 seconds"`` (what you present to the user).
4. Unformatted values are used for sorting.
5. If your column name is a python keyword, add an underscore suffix (``from_``).
Of course, this is only default behavior. This can be overriden.
"""
def __init__(self, table):
super(Row, self).__init__()
self.table = table
def _edit(self):
if self.table.edited is self:
return
assert self.table.edited is None
self.table.edited = self
#--- Virtual
def can_edit(self):
"""(Virtual) Whether the whole row can be edited.
By default, always returns ``True``. This is for the *whole* row. For individual cells, it's
:meth:`can_edit_cell`.
"""
return True
def load(self):
"""(Virtual/Required) Loads up values from the model to be presented in the table.
Usually, our model instances contain values that are not quite ready for display. If you
have number formatting, display calculations and other whatnots to perform, you do it here
and then you put the result in an arbitrary attribute of the row.
"""
raise NotImplementedError()
def save(self):
"""(Virtual/Required) Saves user edits into your model.
If your table is editable, this is called when the user commits his changes. Usually, these
are typed up stuff, or selected indexes. You have to do proper parsing and reference
linking, and save that stuff into your model.
"""
raise NotImplementedError()
def sort_key_for_column(self, column_name):
"""(Virtual) Return the value that is to be used to sort by column ``column_name``.
By default, looks for an attribute with the same name as ``column_name``, but with an
underscore prefix ("unformatted value"). If there's none, tries without the underscore. If
there's none, raises ``AttributeError``.
"""
try:
return getattr(self, '_' + column_name)
except AttributeError:
return getattr(self, column_name)
#--- Public
def can_edit_cell(self, column_name):
"""Returns whether cell for column ``column_name`` can be edited.
By the default, the check is done in many steps:
1. We check whether the whole row can be edited with :meth:`can_edit`. If it can't, the cell
can't either.
2. If the column doesn't exist as an attribute, we can't edit.
3. If we have an attribute ``can_edit_<column_name>``, return that.
4. Check if our attribute is a property. If it's not, it's not editable.
5. If our attribute is in fact a property, check whether the property is "settable" (has a
``fset`` method). The cell is editable only if the property is "settable".
"""
if not self.can_edit():
return False
# '_' is in case column is a python keyword
if not hasattr(self, column_name):
if hasattr(self, column_name + '_'):
column_name = column_name + '_'
else:
return False
if hasattr(self, 'can_edit_' + column_name):
return getattr(self, 'can_edit_' + column_name)
# If the row has a settable property, we can edit the cell
rowclass = self.__class__
prop = getattr(rowclass, column_name, None)
if prop is None:
return False
return bool(getattr(prop, 'fset', None))
def get_cell_value(self, attrname):
"""Get cell value for ``attrname``.
By default, does a simple ``getattr()``, but it is used to allow subclasses to have
alternative value storage mechanisms.
"""
if attrname == 'from':
attrname = 'from_'
return getattr(self, attrname)
def set_cell_value(self, attrname, value):
"""Set cell value to ``value`` for ``attrname``.
By default, does a simple ``setattr()``, but it is used to allow subclasses to have
alternative value storage mechanisms.
"""
if attrname == 'from':
attrname = 'from_'
setattr(self, attrname, value)
|
|
# coding=utf-8
from django import forms
from django.contrib.auth.forms import (UserCreationForm, UserChangeForm,
AdminPasswordChangeForm, PasswordChangeForm)
from django.contrib.auth.models import Group, Permission
from django.core.exceptions import PermissionDenied
from django.template.response import TemplateResponse
from django.utils.decorators import method_decorator
from django.http import HttpResponseRedirect
from django.utils.html import escape
from django.utils.translation import ugettext as _
from django.views.decorators.debug import sensitive_post_parameters
from django.forms import ModelMultipleChoiceField
from django.contrib.auth.models import User
from xadmin.layout import Fieldset, Main, Side, Row, FormHelper
from xadmin.sites import site
from xadmin.util import unquote
from xadmin.views import BaseAdminPlugin, ModelFormAdminView, ModelAdminView, CommAdminView, csrf_protect_m
# fix the xadmin bug
from django.contrib.auth import get_user_model
User = get_user_model()
ACTION_NAME = {
'add': _('Can add %s'),
'change': _('Can change %s'),
'edit': _('Can edit %s'),
'delete': _('Can delete %s'),
'view': _('Can view %s'),
}
def get_permission_name(p):
action = p.codename.split('_')[0]
if action in ACTION_NAME:
return ACTION_NAME[action] % str(p.content_type)
else:
return p.name
class PermissionModelMultipleChoiceField(ModelMultipleChoiceField):
def label_from_instance(self, p):
return get_permission_name(p)
class GroupAdmin(object):
search_fields = ('name',)
ordering = ('name',)
style_fields = {'permissions': 'm2m_transfer'}
model_icon = 'fa fa-group'
def get_field_attrs(self, db_field, **kwargs):
attrs = super(GroupAdmin, self).get_field_attrs(db_field, **kwargs)
if db_field.name == 'permissions':
attrs['form_class'] = PermissionModelMultipleChoiceField
return attrs
class UserAdmin(object):
change_user_password_template = None
list_display = ('username', 'email', 'first_name', 'last_name', 'is_staff')
list_filter = ('is_staff', 'is_superuser', 'is_active')
search_fields = ('username', 'first_name', 'last_name', 'email')
ordering = ('username',)
style_fields = {'user_permissions': 'm2m_transfer'}
model_icon = 'fa fa-user'
relfield_style = 'fk-ajax'
def get_field_attrs(self, db_field, **kwargs):
attrs = super(UserAdmin, self).get_field_attrs(db_field, **kwargs)
if db_field.name == 'user_permissions':
attrs['form_class'] = PermissionModelMultipleChoiceField
return attrs
def get_model_form(self, **kwargs):
if self.org_obj is None:
self.form = UserCreationForm
else:
self.form = UserChangeForm
return super(UserAdmin, self).get_model_form(**kwargs)
def get_form_layout(self):
if self.org_obj:
self.form_layout = (
Main(
Fieldset('',
'username', 'password',
css_class='unsort no_title'
),
Fieldset(_('Personal info'),
Row('first_name', 'last_name'),
'email'
),
Fieldset(_('Permissions'),
'groups', 'user_permissions'
),
Fieldset(_('Important dates'),
'last_login', 'date_joined'
),
),
Side(
Fieldset(_('Status'),
'is_active', 'is_staff', 'is_superuser',
),
)
)
return super(UserAdmin, self).get_form_layout()
class PermissionAdmin(object):
def show_name(self, p):
return get_permission_name(p)
show_name.short_description = _('Permission Name')
show_name.is_column = True
model_icon = 'fa fa-lock'
list_display = ('show_name', )
site.register(Group, GroupAdmin)
site.register(User, UserAdmin)
site.register(Permission, PermissionAdmin)
class UserFieldPlugin(BaseAdminPlugin):
user_fields = []
def get_field_attrs(self, __, db_field, **kwargs):
if self.user_fields and db_field.name in self.user_fields:
return {'widget': forms.HiddenInput}
return __()
def get_form_datas(self, datas):
if self.user_fields and 'data' in datas:
if hasattr(datas['data'],'_mutable') and not datas['data']._mutable:
datas['data'] = datas['data'].copy()
for f in self.user_fields:
datas['data'][f] = self.user.id
return datas
site.register_plugin(UserFieldPlugin, ModelFormAdminView)
class ModelPermissionPlugin(BaseAdminPlugin):
user_can_access_owned_objects_only = False
user_owned_objects_field = 'user'
def queryset(self, qs):
if self.user_can_access_owned_objects_only and \
not self.user.is_superuser:
filters = {self.user_owned_objects_field: self.user}
qs = qs.filter(**filters)
return qs
def get_list_display(self, list_display):
if self.user_can_access_owned_objects_only and \
not self.user.is_superuser and \
self.user_owned_objects_field in list_display:
list_display.remove(self.user_owned_objects_field)
return list_display
site.register_plugin(ModelPermissionPlugin, ModelAdminView)
class AccountMenuPlugin(BaseAdminPlugin):
def block_top_account_menu(self, context, nodes):
return '<li><a href="%s"><i class="fa fa-key"></i> %s</a></li>' % (self.get_admin_url('account_password'), _('Change Password'))
site.register_plugin(AccountMenuPlugin, CommAdminView)
class ChangePasswordView(ModelAdminView):
model = User
change_password_form = AdminPasswordChangeForm
change_user_password_template = None
@csrf_protect_m
def get(self, request, object_id):
if not self.has_change_permission(request):
raise PermissionDenied
self.obj = self.get_object(unquote(object_id))
self.form = self.change_password_form(self.obj)
return self.get_response()
def get_media(self):
media = super(ChangePasswordView, self).get_media()
media = media + self.vendor('xadmin.form.css', 'xadmin.page.form.js') + self.form.media
return media
def get_context(self):
context = super(ChangePasswordView, self).get_context()
helper = FormHelper()
helper.form_tag = False
helper.include_media = False
self.form.helper = helper
context.update({
'title': _('Change password: %s') % escape(unicode(self.obj)),
'form': self.form,
'has_delete_permission': False,
'has_change_permission': True,
'has_view_permission': True,
'original': self.obj,
})
return context
def get_response(self):
return TemplateResponse(self.request, [
self.change_user_password_template or
'xadmin/auth/user/change_password.html'
], self.get_context(), current_app=self.admin_site.name)
@method_decorator(sensitive_post_parameters())
@csrf_protect_m
def post(self, request, object_id):
if not self.has_change_permission(request):
raise PermissionDenied
self.obj = self.get_object(unquote(object_id))
self.form = self.change_password_form(self.obj, request.POST)
if self.form.is_valid():
self.form.save()
self.message_user(_('Password changed successfully.'), 'success')
return HttpResponseRedirect(self.model_admin_url('change', self.obj.pk))
else:
return self.get_response()
class ChangeAccountPasswordView(ChangePasswordView):
change_password_form = PasswordChangeForm
@csrf_protect_m
def get(self, request):
self.obj = self.user
self.form = self.change_password_form(self.obj)
return self.get_response()
def get_context(self):
context = super(ChangeAccountPasswordView, self).get_context()
context.update({
'title': _('Change password'),
'account_view': True,
})
return context
@method_decorator(sensitive_post_parameters())
@csrf_protect_m
def post(self, request):
self.obj = self.user
self.form = self.change_password_form(self.obj, request.POST)
if self.form.is_valid():
self.form.save()
self.message_user(_('Password changed successfully.'), 'success')
return HttpResponseRedirect(self.get_admin_url('index'))
else:
return self.get_response()
site.register_view(r'^users/userprofile/(.+)/password/$',
ChangePasswordView, name='user_change_password')
site.register_view(r'^account/password/$', ChangeAccountPasswordView,
name='account_password')
|
|
# TNC Python interface
# @(#) $Jeannot: tnc.py,v 1.11 2005/01/28 18:27:31 js Exp $
# Copyright (c) 2004-2005, Jean-Sebastien Roy (js@jeannot.org)
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
TNC: A python interface to the TNC non-linear optimizer
TNC is a non-linear optimizer. To use it, you must provide a function to
minimize. The function must take one argument: the list of coordinates where to
evaluate the function; and it must return either a tuple, whose first element is the
value of the function, and whose second argument is the gradient of the function
(as a list of values); or None, to abort the minimization.
"""
from __future__ import division, print_function, absolute_import
from scipy.optimize import moduleTNC, approx_fprime
from .optimize import MemoizeJac, Result, _check_unknown_options
from numpy import inf, array, zeros, asfarray
__all__ = ['fmin_tnc']
MSG_NONE = 0 # No messages
MSG_ITER = 1 # One line per iteration
MSG_INFO = 2 # Informational messages
MSG_VERS = 4 # Version info
MSG_EXIT = 8 # Exit reasons
MSG_ALL = MSG_ITER + MSG_INFO + MSG_VERS + MSG_EXIT
MSGS = {
MSG_NONE : "No messages",
MSG_ITER : "One line per iteration",
MSG_INFO : "Informational messages",
MSG_VERS : "Version info",
MSG_EXIT : "Exit reasons",
MSG_ALL : "All messages"
}
INFEASIBLE = -1 # Infeasible (low > up)
LOCALMINIMUM = 0 # Local minima reach (|pg| ~= 0)
FCONVERGED = 1 # Converged (|f_n-f_(n-1)| ~= 0)
XCONVERGED = 2 # Converged (|x_n-x_(n-1)| ~= 0)
MAXFUN = 3 # Max. number of function evaluations reach
LSFAIL = 4 # Linear search failed
CONSTANT = 5 # All lower bounds are equal to the upper bounds
NOPROGRESS = 6 # Unable to progress
USERABORT = 7 # User requested end of minimization
RCSTRINGS = {
INFEASIBLE : "Infeasible (low > up)",
LOCALMINIMUM : "Local minima reach (|pg| ~= 0)",
FCONVERGED : "Converged (|f_n-f_(n-1)| ~= 0)",
XCONVERGED : "Converged (|x_n-x_(n-1)| ~= 0)",
MAXFUN : "Max. number of function evaluations reach",
LSFAIL : "Linear search failed",
CONSTANT : "All lower bounds are equal to the upper bounds",
NOPROGRESS : "Unable to progress",
USERABORT : "User requested end of minimization"
}
# Changes to interface made by Travis Oliphant, Apr. 2004 for inclusion in
# SciPy
def fmin_tnc(func, x0, fprime=None, args=(), approx_grad=0,
bounds=None, epsilon=1e-8, scale=None, offset=None,
messages=MSG_ALL, maxCGit=-1, maxfun=None, eta=-1,
stepmx=0, accuracy=0, fmin=0, ftol=-1, xtol=-1, pgtol=-1,
rescale=-1, disp=None, callback=None):
"""
Minimize a function with variables subject to bounds, using
gradient information in a truncated Newton algorithm. This
method wraps a C implementation of the algorithm.
Parameters
----------
func : callable ``func(x, *args)``
Function to minimize. Must do one of:
1. Return f and g, where f is the value of the function and g its
gradient (a list of floats).
2. Return the function value but supply gradient function
seperately as `fprime`.
3. Return the function value and set ``approx_grad=True``.
If the function returns None, the minimization
is aborted.
x0 : array_like
Initial estimate of minimum.
fprime : callable ``fprime(x, *args)``
Gradient of `func`. If None, then either `func` must return the
function value and the gradient (``f,g = func(x, *args)``)
or `approx_grad` must be True.
args : tuple
Arguments to pass to function.
approx_grad : bool
If true, approximate the gradient numerically.
bounds : list
(min, max) pairs for each element in x0, defining the
bounds on that parameter. Use None or +/-inf for one of
min or max when there is no bound in that direction.
epsilon : float
Used if approx_grad is True. The stepsize in a finite
difference approximation for fprime.
scale : array_like
Scaling factors to apply to each variable. If None, the
factors are up-low for interval bounded variables and
1+|x| for the others. Defaults to None.
offset : array_like
Value to substract from each variable. If None, the
offsets are (up+low)/2 for interval bounded variables
and x for the others.
messages :
Bit mask used to select messages display during
minimization values defined in the MSGS dict. Defaults to
MGS_ALL.
disp : int
Integer interface to messages. 0 = no message, 5 = all messages
maxCGit : int
Maximum number of hessian*vector evaluations per main
iteration. If maxCGit == 0, the direction chosen is
-gradient if maxCGit < 0, maxCGit is set to
max(1,min(50,n/2)). Defaults to -1.
maxfun : int
Maximum number of function evaluation. if None, maxfun is
set to max(100, 10*len(x0)). Defaults to None.
eta : float
Severity of the line search. if < 0 or > 1, set to 0.25.
Defaults to -1.
stepmx : float
Maximum step for the line search. May be increased during
call. If too small, it will be set to 10.0. Defaults to 0.
accuracy : float
Relative precision for finite difference calculations. If
<= machine_precision, set to sqrt(machine_precision).
Defaults to 0.
fmin : float
Minimum function value estimate. Defaults to 0.
ftol : float
Precision goal for the value of f in the stoping criterion.
If ftol < 0.0, ftol is set to 0.0 defaults to -1.
xtol : float
Precision goal for the value of x in the stopping
criterion (after applying x scaling factors). If xtol <
0.0, xtol is set to sqrt(machine_precision). Defaults to
-1.
pgtol : float
Precision goal for the value of the projected gradient in
the stopping criterion (after applying x scaling factors).
If pgtol < 0.0, pgtol is set to 1e-2 * sqrt(accuracy).
Setting it to 0.0 is not recommended. Defaults to -1.
rescale : float
Scaling factor (in log10) used to trigger f value
rescaling. If 0, rescale at each iteration. If a large
value, never rescale. If < 0, rescale is set to 1.3.
callback : callable, optional
Called after each iteration, as callback(xk), where xk is the
current parameter vector.
Returns
-------
x : ndarray
The solution.
nfeval : int
The number of function evaluations.
rc : int
Return code as defined in the RCSTRINGS dict.
See also
--------
minimize: Interface to minimization algorithms for multivariate
functions. See the 'TNC' `method` in particular.
Notes
-----
The underlying algorithm is truncated Newton, also called
Newton Conjugate-Gradient. This method differs from
scipy.optimize.fmin_ncg in that
1. It wraps a C implementation of the algorithm
2. It allows each variable to be given an upper and lower bound.
The algorithm incoporates the bound constraints by determining
the descent direction as in an unconstrained truncated Newton,
but never taking a step-size large enough to leave the space
of feasible x's. The algorithm keeps track of a set of
currently active constraints, and ignores them when computing
the minimum allowable step size. (The x's associated with the
active constraint are kept fixed.) If the maximum allowable
step size is zero then a new constraint is added. At the end
of each iteration one of the constraints may be deemed no
longer active and removed. A constraint is considered
no longer active is if it is currently active
but the gradient for that variable points inward from the
constraint. The specific constraint removed is the one
associated with the variable of largest index whose
constraint is no longer active.
References
----------
Wright S., Nocedal J. (2006), 'Numerical Optimization'
Nash S.G. (1984), "Newton-Type Minimization Via the Lanczos Method",
SIAM Journal of Numerical Analysis 21, pp. 770-778
"""
# handle fprime/approx_grad
if approx_grad:
fun = func
jac = None
elif fprime is None:
fun = MemoizeJac(func)
jac = fun.derivative
else:
fun = func
jac = fprime
if disp is not None: # disp takes precedence over messages
mesg_num = disp
else:
mesg_num = {0:MSG_NONE, 1:MSG_ITER, 2:MSG_INFO, 3:MSG_VERS,
4:MSG_EXIT, 5:MSG_ALL}.get(messages, MSG_ALL)
# build options
opts = {'eps' : epsilon,
'scale': scale,
'offset': offset,
'mesg_num': mesg_num,
'maxCGit': maxCGit,
'maxiter': maxfun,
'eta': eta,
'stepmx': stepmx,
'accuracy': accuracy,
'minfev': fmin,
'ftol': ftol,
'xtol': xtol,
'gtol': pgtol,
'rescale': rescale,
'disp': False}
res = _minimize_tnc(fun, x0, args, jac, bounds, callback=callback, **opts)
return res['x'], res['nfev'], res['status']
def _minimize_tnc(fun, x0, args=(), jac=None, bounds=None,
eps=1e-8, scale=None, offset=None, mesg_num=None,
maxCGit=-1, maxiter=None, eta=-1, stepmx=0, accuracy=0,
minfev=0, ftol=-1, xtol=-1, gtol=-1, rescale=-1, disp=False,
callback=None, **unknown_options):
"""
Minimize a scalar function of one or more variables using a truncated
Newton (TNC) algorithm.
Options for the TNC algorithm are:
eps : float
Step size used for numerical approximation of the jacobian.
scale : list of floats
Scaling factors to apply to each variable. If None, the
factors are up-low for interval bounded variables and
1+|x] fo the others. Defaults to None
offset : float
Value to substract from each variable. If None, the
offsets are (up+low)/2 for interval bounded variables
and x for the others.
disp : bool
Set to True to print convergence messages.
maxCGit : int
Maximum number of hessian*vector evaluations per main
iteration. If maxCGit == 0, the direction chosen is
-gradient if maxCGit < 0, maxCGit is set to
max(1,min(50,n/2)). Defaults to -1.
maxiter : int
Maximum number of function evaluation. if None, `maxiter` is
set to max(100, 10*len(x0)). Defaults to None.
eta : float
Severity of the line search. if < 0 or > 1, set to 0.25.
Defaults to -1.
stepmx : float
Maximum step for the line search. May be increased during
call. If too small, it will be set to 10.0. Defaults to 0.
accuracy : float
Relative precision for finite difference calculations. If
<= machine_precision, set to sqrt(machine_precision).
Defaults to 0.
minfev : float
Minimum function value estimate. Defaults to 0.
ftol : float
Precision goal for the value of f in the stoping criterion.
If ftol < 0.0, ftol is set to 0.0 defaults to -1.
xtol : float
Precision goal for the value of x in the stopping
criterion (after applying x scaling factors). If xtol <
0.0, xtol is set to sqrt(machine_precision). Defaults to
-1.
gtol : float
Precision goal for the value of the projected gradient in
the stopping criterion (after applying x scaling factors).
If gtol < 0.0, gtol is set to 1e-2 * sqrt(accuracy).
Setting it to 0.0 is not recommended. Defaults to -1.
rescale : float
Scaling factor (in log10) used to trigger f value
rescaling. If 0, rescale at each iteration. If a large
value, never rescale. If < 0, rescale is set to 1.3.
This function is called by the `minimize` function with `method=TNC`.
It is not supposed to be called directly.
"""
_check_unknown_options(unknown_options)
epsilon = eps
maxfun = maxiter
fmin = minfev
pgtol = gtol
x0 = asfarray(x0).flatten()
n = len(x0)
if bounds is None:
bounds = [(None,None)] * n
if len(bounds) != n:
raise ValueError('length of x0 != length of bounds')
if mesg_num is not None:
messages = {0:MSG_NONE, 1:MSG_ITER, 2:MSG_INFO, 3:MSG_VERS,
4:MSG_EXIT, 5:MSG_ALL}.get(mesg_num, MSG_ALL)
elif disp:
messages = MSG_ALL
else:
messages = MSG_NONE
if jac is None:
def func_and_grad(x):
f = fun(x, *args)
g = approx_fprime(x, fun, epsilon, *args)
return f, g
else:
def func_and_grad(x):
f = fun(x, *args)
g = jac(x, *args)
return f, g
"""
low, up : the bounds (lists of floats)
if low is None, the lower bounds are removed.
if up is None, the upper bounds are removed.
low and up defaults to None
"""
low = zeros(n)
up = zeros(n)
for i in range(n):
if bounds[i] is None:
l, u = -inf, inf
else:
l,u = bounds[i]
if l is None:
low[i] = -inf
else:
low[i] = l
if u is None:
up[i] = inf
else:
up[i] = u
if scale is None:
scale = array([])
if offset is None:
offset = array([])
if maxfun is None:
maxfun = max(100, 10*len(x0))
rc, nf, nit, x = moduleTNC.minimize(func_and_grad, x0, low, up, scale,
offset, messages, maxCGit, maxfun,
eta, stepmx, accuracy, fmin, ftol,
xtol, pgtol, rescale, callback)
funv, jacv = func_and_grad(x)
return Result(x=x, fun=funv, jac=jacv, nfev=nf, nit=nit, status=rc,
message=RCSTRINGS[rc], success=(-1 < rc < 3))
if __name__ == '__main__':
# Examples for TNC
def example():
print("Example")
# A function to minimize
def function(x):
f = pow(x[0],2.0)+pow(abs(x[1]),3.0)
g = [0,0]
g[0] = 2.0*x[0]
g[1] = 3.0*pow(abs(x[1]),2.0)
if x[1]<0:
g[1] = -g[1]
return f, g
# Optimizer call
x, nf, rc = fmin_tnc(function, [-7, 3], bounds=([-10, 1], [10, 10]))
print("After", nf, "function evaluations, TNC returned:", RCSTRINGS[rc])
print("x =", x)
print("exact value = [0, 1]")
print()
example()
|
|
import sys
import time
import subprocess
from subprocess import Popen
from subprocess import PIPE
import json
import os
import re
import base64, uuid, io, codecs, mimetypes
import shutil
import shlex
import xml.dom.minidom
try:
from urllib2 import Request, urlopen
from urllib2 import URLError, HTTPError
from httplib import HTTPSConnection
except ImportError:
from urllib.request import Request, urlopen
from urllib.error import URLError, HTTPError
from http.client import HTTPSConnection
from xml.dom.minidom import parse
def execCommand(command):
print("Executing " + command)
statementStatus = subprocess.call(command, shell=True)
if statementStatus == 1 :
print("Error executing " + command)
sys.exit("Error executing " + command)
return statementStatus
def deleteExistingApplication(applicationName):
if doesItExist("cf a ", applicationName, 0) :
deleteRequest = "cf delete -f -r " + applicationName
statementStatus = execCommand(deleteRequest)
if statementStatus == 1 :
time.sleep(5) # Delay for 5 seconds
execCommand(deleteRequest)
#check if really gone - POTENTIAL FOR INFINITE LOOP - Let the delete fail
#if doesItExist("cf a ", applicationName, 0) :
# print("Unable to delete an application, trying again : " +deleteRequest)
# time.sleep(5) # Delay for 5 seconds
# deleteExistingApplication(applicationName)
def deleteExistingService(serviceName):
if doesItExist("cf s ", serviceName, 0) :
deleteRequest = "cf delete-service -f " + serviceName
statementStatus = execCommand(deleteRequest)
if statementStatus == 1 :
time.sleep(5) # Delay for 5 seconds
execCommand(deleteRequest)
#check if really gone - POTENTIAL FOR INFINITE LOOP - Let the delete fail
#if doesItExist("cf s ", serviceName, 0) :
# print("Unable to delete an service, trying again: " +deleteRequest)
# deleteExistingService(serviceName)
def doesItExist(command, name, sectionNumber ) :
'''handle duplicates due to similar spellings, avoid using regular expressions'''
result, err, exitcode = call(command)
#print("Result = " + result)
#print("Err = " + err)
#print(exitcode)
rows = result.split('\n')
#print("Rows = ")
#print(rows)
#print(rows.encode('utf-8'))
if name in result:
print(name + " does EXIST")
return True
else:
print(name + " does not EXIST")
return False
#for row in rows:
# existingSection = row.split(" ")[sectionNumber]
# print(name)
# print("section= ")
# print(existingSection)
# if existingSection == name :
# return True
def createService(serviceName, serviceRequest):
print("Create service if it does not exist: " +serviceName)
print(serviceRequest)
if doesItExist("cf s ", serviceName, 0) :
print("Service Intance already exists:" + serviceName)
return None
else:
statementStatus = subprocess.call(serviceRequest, shell=True)
if statementStatus == 1 :
print("I am here 1")
print("Error creating a service: " +serviceName)
time.sleep(5) # Delay for 5 seconds
print("I am here after sleep 2")
statementStatus = subprocess.call(serviceRequest, shell=True)
if statementStatus == 1 :
print("Error creating a service: " +serviceName)
sys.exit("Error creating a service instance: " +serviceName)
else:
#does it really exist yet
print("I am here 2")
if not doesItExist("cf s ", serviceName, 0) :
time.sleep(5)
print("I am here after sleep 2")
createService(serviceName, serviceRequest)
def unbind(applicationName,serviceName):
if doesItExist("cf a ", applicationName, 0) and doesItExist("cf a ", serviceName, 0):
unbindRequest = "cf us " + applicationName + " " + serviceName
print(unbindRequest)
statementStatus = subprocess.call(unbindRequest, shell=True)
if statementStatus == 1 :
print("Error unbinding an application: " + unbindRequest)
time.sleep(5) # Delay for 5 seconds
statementStatus = subprocess.call(unbindRequest, shell=True)
if statementStatus == 1 :
print("Error unbinding an application: " + unbindRequest)
sys.exit("Error unbinding an application instance: " +applicationName + " from " + serviceName)
def call(cmd):
"""Runs the given command locally and returns the output, err and exit_code, handles Pipes."""
if "|" in cmd:
cmd_parts = cmd.split('|')
else:
cmd_parts = []
cmd_parts.append(cmd)
i = 0
p = {}
for cmd_part in cmd_parts:
cmd_part = cmd_part.strip()
if i == 0:
p[i]=Popen(shlex.split(cmd_part),stdin=None, stdout=PIPE, stderr=PIPE)
else:
p[i]=Popen(shlex.split(cmd_part),stdin=p[i-1].stdout, stdout=PIPE, stderr=PIPE)
i = i +1
(output, err) = p[i-1].communicate()
exit_code = p[0].wait()
return str(output).strip(), str(err), exit_code
# checkout submodules
def checkoutSubmodules():
print("Pulling Submodules for " + os.getcwd())
statementStatus = subprocess.call('git submodule init', shell=True)
if statementStatus == 1 :
sys.exit("Error when init submodule ")
statementStatus = subprocess.call('git submodule update --init --remote', shell=True)
if statementStatus == 1 :
sys.exit("Error when updating submodules")
return statementStatus
def updateGitModules(config):
print("CurrentDir " + os.getcwd())
if 'git@' in open('.gitmodules').read():
config.updateGitModules='true'
f1 = open('.gitmodules', 'r')
f2 = open('.gitmodules.script', 'w')
for line in f1:
line = line.replace(':', '/')
line = line.replace('git@', "https://")
f2.write(line)
f1.close()
f2.close()
shutil.copy(".gitmodules", ".gitmodules.bak")
shutil.copy(".gitmodules.script", ".gitmodules")
def restoreGitModules(config):
if ( os.path.isfile(".gitmodules.bak") ):
print("restoring .gitmodules")
shutil.copy(".gitmodules.bak", ".gitmodules")
def buildProject(mavenCommand,projectDir):
statementStatus = subprocess.call(mavenCommand, shell=True)
if statementStatus == 1 :
sys.exit("Error building the project "+projectDir)
return statementStatus
class MultipartFormdataEncoder(object):
def __init__(self):
self.boundary = "FILEBOUNDARY"
self.content_type = 'multipart/form-data; boundary={}'.format(self.boundary)
@classmethod
def u(cls, s):
if sys.hexversion < 0x03000000 and isinstance(s, str):
s = s.decode('utf-8')
if sys.hexversion >= 0x03000000 and isinstance(s, bytes):
s = s.decode('utf-8')
return s
def iter(self, fields, files):
"""
fields is a sequence of (name, value) elements for regular form fields.
files is a sequence of (name, filename, file-type) elements for data to be uploaded as files
Yield body's chunk as bytes
"""
encoder = codecs.getencoder('utf-8')
print(fields)
for (key, value) in fields:
key = self.u(key)
yield encoder('--{}\r\n'.format(self.boundary))
yield encoder(self.u('Content-Disposition: form-data; name="{}"\r\n').format(key))
yield encoder('\r\n')
if isinstance(value, int) or isinstance(value, float):
value = str(value)
yield encoder(self.u(value))
yield encoder('\r\n')
for (key, filename, fpath) in files:
key = self.u(key)
filename = self.u(filename)
yield encoder('--{}\r\n'.format(self.boundary))
yield encoder(self.u('Content-Disposition: form-data; name="{}"; filename="{}"\r\n').format(key, filename))
yield encoder('Content-Type: {}\r\n'.format(mimetypes.guess_type(filename)[0] or 'application/octet-stream'))
yield encoder('\r\n')
with open(fpath,'rb') as fd:
buff = fd.read()
yield (buff, len(buff))
yield encoder('\r\n')
yield encoder('--{}--\r\n'.format(self.boundary))
def encode(self, fields, files):
body = io.BytesIO()
for chunk, chunk_len in self.iter(fields, files):
body.write(chunk)
return self.content_type, body.getvalue()
def evaluatePom(config, cfCommand, projectDir):
try :
print("\tevaluate Pom")
curDir=os.getcwd()
print ("\tCurrent Directory = " + os.getcwd())
print ("\tProject Directory = " + projectDir)
os.chdir(projectDir)
print ("\tCurrent Directory = " + os.getcwd())
f = open("pom.xml", 'r')
f1 = f.read()
f.close()
print("\t============================")
artifactIdTemp=re.search(r'<artifactId[^>]*>([^<]+)</artifactId>', f1)
if artifactIdTemp:
print("\t" + artifactIdTemp.group(1))
config.artifactId=artifactIdTemp.group(1)
else:
sys.exit("Error getting artifactId from " + projectDir + "/pom.xml")
versionTemp=re.search(r'<version[^>]*>([^<]+)</version>', f1)
if versionTemp:
print("\t" + versionTemp.group(1))
config.jarVersion=versionTemp.group(1)
else:
sys.exit("Error getting jarVersion from " + projectDir + "/pom.xml")
print("\tArtifactId derived from pom.xml = " + config.artifactId)
print("\tJar Version derived from pom.xml=" + config.jarVersion)
finally:
print ("\tCurrent Directory = " + os.getcwd())
os.chdir(curDir)
print ("\tCurrent Directory = " + os.getcwd())
print ("\txxx")
def getJarFromArtifactory(config, cfCommand, projectDir):
print("\tFast install =" + config.fastinstall)
if config.fastinstall == 'y' :
print("\tretrieve jar from Artifactory")
print("\tartifactory repo=" + config.artifactoryrepo)
print("\tartifactory user =" + config.artifactoryuser)
#print("\tartifactory pass =" + config.artifactorypass)
curDir=os.getcwd()
print ("\tCurrent Directory = " + os.getcwd())
print ("\tProject Directory = " + projectDir)
print('\tmvnsettings=' + config.mvnsettings)
print('\tmavenRepo=' + config.mavenRepo)
evaluatePom(config, cfCommand, projectDir)
print("\tCopying artifacts..")
f = open(config.mvnsettings, 'r')
f1 = f.read()
f.close()
#print(f1)
found = 0
dom = parse(config.mvnsettings)
serverlist = dom.getElementsByTagName("server")
try :
print("\tChdir to " + projectDir + " Current Directory = " + os.getcwd())
os.chdir(projectDir)
print("\tCurrent Directory = " + os.getcwd())
print("")
for aServer in serverlist:
artifactory1 = aServer.getElementsByTagName("id")[0].firstChild.data
artifactoryuser = aServer.getElementsByTagName("username")[0].firstChild.data
artifactorypass = aServer.getElementsByTagName("password")[0].firstChild.data
print( "\tserver id === " + artifactory1 )
repolist = dom.getElementsByTagName("repository")
for aRepo in repolist:
artifactory2 = aRepo.getElementsByTagName("id")[0].firstChild.data
artifactoryrepo = aRepo.getElementsByTagName("url")[0].firstChild.data
print("\tREPOSITORY INFO :looking for=" + artifactory1 + " found=" + artifactory2 + ":" + artifactoryrepo)
if artifactory1 == artifactory2 :
print("\tArtifactory derived from maven settings.xml ==== " + artifactory2)
print("\tArtifactory url from maven settings.xml ==== " + artifactoryrepo)
print("\tArtifactory user derived from maven settings.xml ==== " + artifactoryuser)
#print("Artifactory pass derived from maven settings.xml ==== " + artifactorypass)
if artifactorypass.find("${") == 0 :
print("\tpassword is set to an environment variable that was not found, moving on to next entry")
else:
try:
os.stat("target")
except:
os.mkdir("target")
urlOfJar=artifactoryrepo + "/com/ge/predix/solsvc/" + config.artifactId + "/" + config.jarVersion + "/" + config.artifactId + "-" + config.jarVersion + ".jar"
print("/turlOfJar=" + urlOfJar)
request = Request(urlOfJar)
authString = artifactoryuser + ":" + artifactorypass
base64string = base64.b64encode(bytearray(authString, 'UTF-8')).decode("ascii")
request.add_header("Authorization", "Basic %s" % base64string)
try:
downloadFile="target/" + config.artifactId + "-" + config.jarVersion + ".jar"
print("\tDownloading " + downloadFile)
result = urlopen(request)
with open(downloadFile, "wb") as local_file:
local_file.write(result.read())
print("\tFrom: url: " + artifactoryrepo)
print("\tDownloading DONE")
print("\t============================")
found = 1
break
except URLError as err:
e = sys.exc_info()[1]
print("\tNot found in that repo, let's try another." + urlOfJar + " Error: %s" % e)
found = 0
continue
except HTTPError as err:
e = sys.exc_info()[1]
print("\tNot found in that repo, let's try another." + urlOfJar + " Error: %s" % e)
found = 0
continue
if found == 1:
break
finally:
print("\tCurrent Directory = " + os.getcwd())
os.chdir(curDir)
print("\tCurrent Directory = " + os.getcwd())
if found == 0:
sys.exit("\tError copying artifact "+projectDir)
def pushProject(config, appName, cfCommand, projectDir, checkIfExists):
print("****************** Running pushProject for "+ appName + " ******************" )
if checkIfExists == "true" :
#check if really gone
if doesItExist("cf a ", applicationName, 0) :
print(appName + " already exists, skipping push")
return
if config.fastinstall == 'y' :
getJarFromArtifactory(config, cfCommand, projectDir)
statementStatus = cfPush(appName, cfCommand)
return statementStatus
def cfPush(appName, cfCommand):
print("Deploying to CF..., Current Directory = " + os.getcwd())
print(cfCommand)
statementStatus = subprocess.call(cfCommand, shell=True)
if statementStatus == 1 :
sys.exit("Error deploying the project " + appName)
print("Deployment to CF done.")
return statementStatus
def createPredixUAASecurityService(config):
#create UAA instance
uaa_payload_filename = 'uaa_payload.json'
data = {}
data['adminClientSecret'] = config.uaaAdminSecret
#cross-os compatibility requires json to be in a file
with open(uaa_payload_filename, 'w') as outfile:
json.dump(data, outfile)
outfile.close()
uaaJsonrequest = "cf cs "+config.predixUaaService+" "+config.predixUaaServicePlan +" "+config.rmdUaaName+ " -c " + os.getcwd()+'/'+uaa_payload_filename
createService(config.rmdUaaName,uaaJsonrequest)
def getVcapJsonForPredixBoot (config):
print("cf env " + config.predixbootAppName)
predixBootEnv = subprocess.check_output(["cf", "env" ,config.predixbootAppName])
systemProvidedVars=predixBootEnv.decode('utf-8').split('System-Provided:')[1].split('No user-defined env variables have been set')[0]
config.formattedJson = "[" + systemProvidedVars.replace("\n","").replace("'","").replace("}{","},{") + "]"
#print ("formattedJson=" + config.formattedJson)
def addUAAUser(config, userId , password, email,adminToken):
createUserBody = {"userName":"","password":"","emails":[{"value":""}]}
createUserBody["userName"] = userId
createUserBody["password"] = password
createUserBody["emails"][0]['value'] = email
createUserBodyStr = json.dumps(createUserBody)
print(createUserBodyStr)
statementStatusJson = invokeURLJsonResponse(config.UAA_URI+"/Users", {"Content-Type": "application/json", "Authorization": adminToken}, createUserBodyStr, "")
if statementStatusJson.get('error'):
statementStatus = statementStatusJson['error']
statementStatusDesc = statementStatusJson['error_description']
else :
statementStatus = 'success'
#statementStatusDesc = statementStatusJson['id']
if statementStatus == 'success' or 'scim_resource_already_exists' not in statementStatusDesc :
print(userId + "User is UAA ")
else :
sys.exit("Error adding Users "+statementStatusDesc )
def invokeURLJsonResponse(url, headers, data, method):
responseCode = invokeURL(url, headers, data, method)
return json.loads(open("json_output.txt").read())
def invokeURL(url, headers1, data, method):
request = Request(url, headers=headers1)
if method :
request.get_method=lambda: method
print ("Invoking URL ----" + request.get_full_url())
print ("\tmethod ----" + request.get_method())
print ("\t" + str(request.header_items()))
print ("\tInput data=" + str(data))
responseCode = 0
try:
if data :
result = urlopen(request, data.encode('utf-8'))
else :
result = urlopen(request)
print (request.data)
with open("json_output.txt", "wb") as local_file:
local_file.write(result.read())
print ("\t*******OUTPUT**********" + open("json_output.txt").read())
responseCode = result.getcode()
print ("\tRESPONSE=" + str(responseCode))
print ("\t" + str(result.info()))
except URLError as err:
if err.code == 409:
e = sys.exc_info()[0]
print( "Resource found - continue: %s" % e)
with open("json_output.txt", "wt") as local_file:
local_file.write(json.dumps({'error': 'Resource found - continue','errorCode':+err.code,'error_description':'Resource found - continue'}))
print ("\t*******OUTPUT**********" + open("json_output.txt").read())
responseCode = err.code
elif err.code == 404:
e = sys.exc_info()[0]
print( "Resource not found - continue with create: %s" % e)
with open("json_output.txt", "wt") as local_file:
local_file.write(json.dumps({'error': 'Resource not found - continue','errorCode':+err.code,'error_description':'Resource not found - continue'}))
print ("\t*******OUTPUT**********" + open("json_output.txt").read())
responseCode = err.code
else :
e = sys.exc_info()[0]
print( "Error: %s" % e)
e = sys.exc_info()[1]
print( "Error: %s" % e)
sys.exit()
except HTTPError as err:
if err.code == 409:
e = sys.exc_info()[0]
print( "Resource found - continue: %s" % e)
with open("json_output.txt", "wt") as local_file:
local_file.write(json.dumps({'error': 'Resource found - continue','errorCode':+err.code,'error_description':'Resource found - continue'}))
print ("\t*******OUTPUT**********" + open("json_output.txt").read())
responseCode = err.code
elif err.code == 404:
e = sys.exc_info()[0]
print( "Resource not found - continue with create: %s" % e)
with open("json_output.txt", "wt") as local_file:
local_file.write(json.dumps({'error': 'Resource not found - continue','errorCode':+err.code,'error_description':'Resource not found - continue'}))
print ("\t*******OUTPUT**********" + open("json_output.txt").read())
responseCode = err.code
else :
e = sys.exc_info()[0]
print( "Error: %s" % e)
sys.exit()
print ("\tInvoking URL Complete----" + request.get_full_url())
print ("\tInvoking URL Complete with response code" + str(responseCode))
return responseCode
def createClientIdAndAddUser(config):
# setup the UAA login
adminToken = processUAAClientId(config,config.UAA_URI+"/oauth/clients","POST")
# Add users
print("****************** Adding users ******************")
addUAAUser(config, config.rmdUser1 , config.rmdUser1Pass, config.rmdUser1 + "@gegrctest.com",adminToken)
addUAAUser(config, config.rmdAdmin1 , config.rmdAdmin1Pass, config.rmdAdmin1 + "@gegrctest.com",adminToken)
def createBindPredixACSService(config, rmdAcsName):
acs_payload_filename = 'acs_payload.json'
data = {}
data['trustedIssuerIds'] = config.uaaIssuerId
with open(acs_payload_filename, 'w') as outfile:
json.dump(data, outfile)
outfile.close()
#create UAA instance
acsJsonrequest = "cf cs "+config.predixAcsService+" "+config.predixAcsServicePlan +" "+rmdAcsName+ " -c "+ os.getcwd()+'/'+ acs_payload_filename
print(acsJsonrequest)
statementStatus = subprocess.call(acsJsonrequest, shell=True)
if statementStatus == 1 :
sys.exit("Error creating a uaa service instance")
statementStatus = subprocess.call("cf bs "+config.predixbootAppName +" " + rmdAcsName , shell=True)
if statementStatus == 1 :
sys.exit("Error binding a uaa service instance to boot ")
return statementStatus
def createGroup(config, adminToken,policyGrp):
print("****************** Add Group ******************")
createGroupBody = {"displayName":""}
createGroupBody["displayName"] = policyGrp
createGroupBodyStr = json.dumps(createGroupBody)
print(createGroupBodyStr)
statementStatusJson = invokeURLJsonResponse(config.UAA_URI+"/Groups", {"Content-Type": "application/json", "Authorization": adminToken}, createGroupBodyStr, "")
if statementStatusJson.get('error'):
statementStatus = statementStatusJson['error']
statementStatusDesc = statementStatusJson['error_description']
else :
statementStatus = 'success'
statementStatusDesc = 'success'
if statementStatus == 'success' or 'scim_resource_exists' not in statementStatusDesc :
print("Success creating or reusing the Group")
else :
sys.exit("Error Processing Adding Group on UAA "+statementStatusDesc )
def getGroupOrUserByDisplayName(uri, adminToken):
getResponseJson=invokeURLJsonResponse(uri, {"Content-Type": "application/json", "Authorization": adminToken}, "", "")
found = True
statementStatus = 'success'
if getResponseJson.get('totalResults') <=0 :
statementStatus = 'not found'
found = False
return found, getResponseJson
def getGroup(config, adminToken ,grpname):
return getGroupOrUserByDisplayName(config.UAA_URI+ "/Groups/?filter=displayName+eq+%22" + grpname + "%22&startIndex=1", adminToken)
def getUserbyDisplayName(config, adminToken ,username):
return getGroupOrUserByDisplayName(config.UAA_URI+ "/Users/?attributes=id%2CuserName&filter=userName+eq+%22" + username + "%22&startIndex=1", adminToken)
def addAdminUserPolicyGroup(config, policyGrp,userName):
adminToken = getTokenFromUAA(config, 1)
if not adminToken :
sys.exit("Error getting admin token from the UAA instance ")
#check Get Group
groupFound,groupJson = getGroup(config, adminToken,policyGrp)
if not groupFound :
createGroup(config,adminToken,policyGrp)
groupFound,groupJson = getGroup(config, adminToken,policyGrp)
userFound,userJson = getUserbyDisplayName(config,adminToken,userName)
if not userFound :
sys.exit(" User is not found in the UAA - error adding member to the group")
members = []
if groupJson.get('resources') :
grpName = groupJson['resources'][0]
if grpName.get('members') :
groupMeberList = grpName.get('members')
for groupMeber in groupMeberList:
members.insert(0 ,groupMeber['value'])
members.insert(0, userJson['resources'][0]['id'])
print (' Member to be updated for the Group ,'.join(members))
#update Group
groupId = groupJson['resources'][0]['id']
updateGroupBody = { "meta": {}, "schemas": [],"members": [],"id": "","displayName": ""}
updateGroupBody["meta"] = groupJson['resources'][0]['meta']
updateGroupBody["members"] = members
updateGroupBody["displayName"] = groupJson['resources'][0]['displayName']
updateGroupBody["schemas"] = groupJson['resources'][0]['schemas']
updateGroupBody["id"] = groupId
updateGroupBodyStr = json.dumps(updateGroupBody)
uuaGroupURL = config.UAA_URI + "/Groups/"+groupId
statementStatusJson = invokeURLJsonResponse(uuaGroupURL, {"Content-Type": "application/json", "Authorization": "%s" %adminToken, "if-match" : "*", "accept" : "application/json"}, updateGroupBodyStr, "PUT")
if statementStatusJson.get('error'):
statementStatus = statementStatusJson['error']
statementStatusDesc = statementStatusJson['error_description']
else :
statementStatus = 'success'
statementStatusDesc = 'success'
if statementStatus == 'success' or 'Client already exists' not in statementStatusDesc :
print ("User Successful adding " +userName + " to the group "+policyGrp)
else :
sys.exit("Error adding " +userName + " to the group "+policyGrp + " statementStatusDesc=" + statementStatusDesc )
def updateUserACS(config):
addAdminUserPolicyGroup(config, "acs.policies.read",config.rmdAdmin1)
addAdminUserPolicyGroup(config, "acs.policies.write",config.rmdAdmin1)
addAdminUserPolicyGroup(config, "acs.attributes.read",config.rmdAdmin1)
addAdminUserPolicyGroup(config, "acs.attributes.write",config.rmdAdmin1)
addAdminUserPolicyGroup(config, "acs.policies.read",config.rmdUser1)
addAdminUserPolicyGroup(config, "acs.attributes.read",config.rmdUser1)
def processUAAClientId (config,uuaClientURL,method):
adminToken = getTokenFromUAA(config, 1)
if not adminToken :
sys.exit("Error getting admin token from the UAA instance ")
print(config.clientScope)
print(config.clientScopeList)
createClientIdBody = {"client_id":"","client_secret":"","scope":[],"authorized_grant_types":[],"authorities":[],"autoapprove":["openid"]}
createClientIdBody["client_id"] = config.rmdAppClientId
createClientIdBody["client_secret"] = config.rmdAppSecret
createClientIdBody["scope"] = config.clientScopeList
createClientIdBody["authorized_grant_types"] = config.clientGrantType
createClientIdBody["authorities"] = config.clientAuthoritiesList
createClientIdBodyStr = json.dumps(createClientIdBody)
print("****************** Creating client id ******************")
# check if the client exists
uaaClientResponseJson = invokeURLJsonResponse(config.UAA_URI+"/oauth/clients/"+config.rmdAppClientId, {"Content-Type": "application/json", "Authorization": adminToken}, '', 'GET')
print("reponse from get client "+str(uaaClientResponseJson))
if uaaClientResponseJson.get('error'):
# failure since client does not exits, create the client
uaaClientResponseJson = invokeURLJsonResponse(uuaClientURL, {"Content-Type": "application/json", "Authorization": adminToken}, createClientIdBodyStr, method)
if uaaClientResponseJson.get('error'):
statementStatus = uaaClientResponseJson['error']
statementStatusDesc = uaaClientResponseJson['error_description']
else :
statementStatus = 'success'
statementStatusDesc = 'success'
else :
statementStatus = 'success'
statementStatusDesc = 'success'
if statementStatus == 'success' or 'Client already exists' in statementStatusDesc :
print("Success creating or reusing the Client Id")
# setting client details on config
config.clientScopeList=uaaClientResponseJson.get('scope')
config.clientGrantType=uaaClientResponseJson.get('authorized_grant_types')
config.clientAuthoritiesList=uaaClientResponseJson.get('authorities')
else :
sys.exit("Error Processing ClientId on UAA "+statementStatusDesc )
return adminToken
def updateClientIdAuthorities(config):
adminToken = getTokenFromUAA(config, 1)
if not adminToken :
sys.exit("Error getting admin token from the UAA instance ")
print(config.clientScope)
print(config.clientScopeList)
createClientIdBody = {"client_id":"","client_secret":"","scope":[],"authorized_grant_types":[],"authorities":[],"autoapprove":["openid"]}
createClientIdBody["client_id"] = config.rmdAppClientId
createClientIdBody["client_secret"] = config.rmdAppSecret
createClientIdBody["scope"] = config.clientScopeList
createClientIdBody["authorized_grant_types"] = config.clientGrantType
createClientIdBody["authorities"] = config.clientAuthoritiesList
createClientIdBodyStr = json.dumps(createClientIdBody)
print("****************** Updating client id ******************")
uaaClientResponseJson = invokeURLJsonResponse(config.UAA_URI+"/oauth/clients/"+config.rmdAppClientId, {"Content-Type": "application/json", "Authorization": adminToken}, createClientIdBodyStr, "PUT")
if uaaClientResponseJson.get('error'):
statementStatus = uaaClientResponseJson['error']
statementStatusDesc = uaaClientResponseJson['error_description']
else :
statementStatus = 'success'
statementStatusDesc = 'success'
#processUAAClientId(config,config.UAA_URI+"/oauth/clients/"+config.rmdAppClientId,"PUT")
def getTokenFromUAA(config, isAdmin):
realmStr=""
if isAdmin == 1:
realmStr = "admin:"+config.uaaAdminSecret
else :
realmStr = config.rmdAppClientId+":"+config.rmdAppSecret
authKey = base64.b64encode(bytearray(realmStr, 'UTF-8')).decode("ascii")
queryClientCreds= "grant_type=client_credentials"
getClientTokenResponseJson=invokeURLJsonResponse(config.uaaIssuerId + "?" + queryClientCreds, {"Content-Type": "application/x-www-form-urlencoded", "Authorization": "Basic %s" % authKey}, "", "")
print("Client Token is "+getClientTokenResponseJson['token_type']+" "+getClientTokenResponseJson['access_token'])
return (getClientTokenResponseJson['token_type']+" "+getClientTokenResponseJson['access_token'])
def createRefAppACSPolicyAndSubject(config,acs_zone_header):
adminUserTOken = getTokenFromUAA(config, 0)
acsJsonResponse = invokeURLJsonResponse(config.ACS_URI+'/v1/policy-set/'+config.acsPolicyName, {"Content-Type": "application/json", "Authorization": "%s" %adminUserTOken, "Predix-Zone-Id" : "%s" %acs_zone_header},"", "GET")
print("ACS JSON Response"+str(acsJsonResponse))
if acsJsonResponse.get('error'):
statementStatusDesc = acsJsonResponse['error_description']
statementStatus = 'not-found'
else :
statementStatus = 'success'
if('not-found' == statementStatus):
invokeURL(config.ACS_URI+'/v1/policy-set/'+config.acsPolicyName, {"Content-Type": "application/json", "Authorization": "%s" %adminUserTOken, "Predix-Zone-Id" : "%s" %acs_zone_header}, open("./acs/rmd_app_policy.json").read(), "PUT")
#acsSubjectCurl = 'curl -X PUT "'+config.ACS_URI+'/v1/subject/' + config.rmdAdmin1 + '"' + ' -d "@./acs/' + config.rmdAdmin1 + '_role_attribute.json"'+headers
invokeURL(config.ACS_URI+'/v1/subject/' + config.rmdAdmin1, {"Content-Type": "application/json", "Authorization": "%s" %adminUserTOken, "Predix-Zone-Id" : "%s" %acs_zone_header}, open("./acs/" + config.rmdAdmin1 + "_role_attribute.json").read(), "PUT")
#acsSubjectCurl = 'curl -X PUT "'+config.ACS_URI+'/v1/subject/' + config.rmdUser1 + '"' + ' -d "@./acs/"' + config.rmdUser1 + '"_role_attribute.json"'+headers
invokeURL(config.ACS_URI+'/v1/subject/' + config.rmdUser1, {"Content-Type": "application/json", "Authorization": "%s" %adminUserTOken, "Predix-Zone-Id" : "%s" %acs_zone_header}, open("./acs/" + config.rmdUser1+ "_role_attribute.json").read(), "PUT")
def createAsssetInstance(config,rmdPredixAssetName ,predixAssetName ):
getPredixUAAConfigfromVcaps(config)
asset_payload_filename = 'asset_payload.json'
uaaList = [config.uaaIssuerId]
data = {}
data['trustedIssuerIds'] = uaaList
with open(asset_payload_filename, 'w') as outfile:
json.dump(data, outfile)
print(data)
outfile.close()
request = "cf cs "+predixAssetName+" "+config.predixAssetServicePlan +" "+rmdPredixAssetName+ " -c "+os.getcwd()+'/' +asset_payload_filename
print ("Creating Service cmd "+request)
statementStatus = subprocess.call(request, shell=True)
#if statementStatus == 1 :
#sys.exit("Error creating a assset service instance")
def createTimeSeriesInstance(config,rmdPredixTimeSeriesName,predixTimeSeriesName):
timeSeries_payload_filename = 'timeseries_payload.json'
uaaList = [config.uaaIssuerId]
data = {}
data['trustedIssuerIds'] =uaaList
with open(timeSeries_payload_filename, 'w') as outfile:
json.dump(data, outfile)
outfile.close()
tsJsonrequest = "cf cs "+predixTimeSeriesName+" "+config.predixTimeSeriesServicePlan +" "+rmdPredixTimeSeriesName+ " -c "+os.getcwd()+'/'+timeSeries_payload_filename
print ("Creating Service cmd "+tsJsonrequest)
statementStatus = subprocess.call(tsJsonrequest, shell=True)
if statementStatus == 1 :
sys.exit("Error creating a assset service instance")
def createAnalyticsRuntimeInstance(config,rmdPredixAnalyticsRuntime, predixAnalyticsRuntime):
print("Creating Analytics runtime instance..")
getPredixUAAConfigfromVcaps(config)
asset_payload_filename = 'asset_payload.json'
uaaList = [config.uaaIssuerId]
data = {}
data['trustedIssuerIds'] = uaaList
with open(asset_payload_filename, 'w') as outfile:
json.dump(data, outfile)
print(data)
outfile.close()
request = "cf cs "+predixAnalyticsRuntime+" "+config.predixAnalyticsRuntimePlan +" "+rmdPredixAnalyticsRuntime+ " -c "+os.getcwd()+'/' +asset_payload_filename
print ("Creating Service cmd "+request)
statementStatus = subprocess.call(request, shell=True)
#if statementStatus == 1 :
#sys.exit("Error creating a assset service instance")
def createAnalyticsCatalogInstance(config,rmdPredixAnalyticsCatalog, predixAnalyticsCatalog):
print("Creating Analytics catalog instance..")
getPredixUAAConfigfromVcaps(config)
asset_payload_filename = 'asset_payload.json'
uaaList = [config.uaaIssuerId]
data = {}
data['trustedIssuerIds'] = uaaList
with open(asset_payload_filename, 'w') as outfile:
json.dump(data, outfile)
print(data)
outfile.close()
request = "cf cs "+predixAnalyticsCatalog+" "+config.predixAnalyticsCatalogPlan +" "+rmdPredixAnalyticsCatalog+ " -c "+os.getcwd()+'/' +asset_payload_filename
print ("Creating Service cmd "+request)
statementStatus = subprocess.call(request, shell=True)
#if statementStatus == 1 :
#sys.exit("Error creating a assset service instance")
def createRabbitMQInstance(config):
print("Creating Rabbit MQ instance..")
request = "cf cs "+config.predixRabbitMQ+" "+config.predixRabbitMQPlan +" "+config.rmdRabbitMQ
print ("Creating Service cmd "+request)
statementStatus = subprocess.call(request, shell=True)
#if statementStatus == 1 :
#sys.exit("Error creating a assset service instance")
def getPredixUAAConfigfromVcaps(config):
if not hasattr(config,'uaaIssuerId') :
getVcapJsonForPredixBoot(config)
d = json.loads(config.formattedJson)
config.uaaIssuerId = d[0]['VCAP_SERVICES'][config.predixUaaService][0]['credentials']['issuerId']
config.UAA_URI = d[0]['VCAP_SERVICES'][config.predixUaaService][0]['credentials']['uri']
uaaZoneHttpHeaderName = d[0]['VCAP_SERVICES'][config.predixUaaService][0]['credentials']['zone']['http-header-name']
uaaZoneHttpHeaderValue = d[0]['VCAP_SERVICES'][config.predixUaaService][0]['credentials']['zone']['http-header-value']
print("****************** UAA configured As ******************")
print ("\n uaaIssuerId = " + config.uaaIssuerId + "\n UAA_URI = " + config.UAA_URI + "\n "+uaaZoneHttpHeaderName+" = " +uaaZoneHttpHeaderValue+"\n")
print("****************** ***************** ******************")
def getPredixACSConfigfromVcaps(config):
if not hasattr(config,'ACS_URI') :
getVcapJsonForPredixBoot(config)
d = json.loads(config.formattedJson)
config.ACS_URI = d[0]['VCAP_SERVICES'][config.predixAcsService][0]['credentials']['uri']
config.acsPredixZoneHeaderName = d[0]['VCAP_SERVICES'][config.predixAcsService][0]['credentials']['zone']['http-header-name']
config.acsPredixZoneHeaderValue = d[0]['VCAP_SERVICES'][config.predixAcsService][0]['credentials']['zone']['http-header-value']
config.acsOauthScope = d[0]['VCAP_SERVICES'][config.predixAcsService][0]['credentials']['zone']['oauth-scope']
def bindService(applicationName , rmdServiceInstanceName):
statementStatus = subprocess.call("cf bs "+applicationName +" " + rmdServiceInstanceName , shell=True)
if statementStatus == 1 :
sys.exit("Error binding a "+rmdServiceInstanceName+" service instance to boot ")
def restageApplication(applicationName):
statementStatus = subprocess.call("cf restage "+applicationName, shell=True)
if statementStatus == 1 :
sys.exit("Error restaging a uaa service instance to boot")
def getAnalyticsRuntimeURLandZone(config):
if not hasattr(config,'ANALYTICRUNTIME_ZONE') :
print("parsing analytics runtime zone and uri from vcap")
analyticsRuntimeUri = ''
analyticsRuntimeZone = ''
d = json.loads(config.formattedJson)
analyticsRuntimeZone = d[0]['VCAP_SERVICES'][config.predixAnalyticsRuntime][0]['credentials']['zone-http-header-value']
analyticsRuntimeUri = d[0]['VCAP_SERVICES'][config.predixAnalyticsRuntime][0]['credentials']['execution_uri']
if "https" in analyticsRuntimeUri:
config.ANALYTICRUNTIME_URI = analyticsRuntimeUri.split('https://')[1].strip()
else :
config.ANALYTICRUNTIME_URI = analyticsRuntimeUri.split('http://')[1].strip()
config.ANALYTICRUNTIME_ZONE = analyticsRuntimeZone
def getAnalyticsCatalogURLandZone(config):
if not hasattr(config,'CATALOG_ZONE') :
catalogUri = ''
catalogZone = ''
d = json.loads(config.formattedJson)
catalogZone = d[0]['VCAP_SERVICES'][config.predixAnalyticsCatalog][0]['credentials']['zone-http-header-value']
catalogUri = d[0]['VCAP_SERVICES'][config.predixAnalyticsCatalog][0]['credentials']['catalog_uri']
if "https" in catalogUri:
config.CATALOG_URI = catalogUri.split('https://')[1].strip()
else :
config.CATALOG_URI = catalogUri.split('http://')[1].strip()
config.CATALOG_ZONE = catalogZone
def getAssetURLandZone(config):
if not hasattr(config,'ASSET_ZONE') :
assetUrl = ''
assetZone =''
d = json.loads(config.formattedJson)
assetZone = d[0]['VCAP_SERVICES'][config.predixAssetService][0]['credentials']['instanceId']
assetUrl = d[0]['VCAP_SERVICES'][config.predixAssetService][0]['credentials']['uri']
config.ASSET_ZONE = assetZone
config.ASSET_URI = assetUrl
def getTimeseriesURLandZone(config):
if not hasattr(config,'TS_ZONE') :
timeseriesUrl = ''
timeseriesZone =''
d = json.loads(config.formattedJson)
timeseriesZone = d[0]['VCAP_SERVICES'][config.predixTimeSeriesService][0]['credentials']['query']['zone-http-header-value']
timeseriesUrl = d[0]['VCAP_SERVICES'][config.predixTimeSeriesService][0]['credentials']['query']['uri']
config.TS_ZONE = timeseriesZone
config.TS_URI = timeseriesUrl
def getClientAuthoritiesforAssetAndTimeSeriesService(config):
d = json.loads(config.formattedJson)
config.assetScopes = config.predixAssetService+".zones."+d[0]['VCAP_SERVICES'][config.predixAssetService][0]['credentials']['instanceId']+".user"
#get Ingest authorities
tsInjest = d[0]['VCAP_SERVICES'][config.predixTimeSeriesService][0]['credentials']['ingest']
config.timeSeriesInjestScopes = tsInjest['zone-token-scopes'][0] +"," + tsInjest['zone-token-scopes'][1]
# get query authorities
tsQuery = d[0]['VCAP_SERVICES'][config.predixTimeSeriesService][0]['credentials']['query']
config.timeSeriesQueryScopes = tsQuery['zone-token-scopes'][0] +"," + tsQuery['zone-token-scopes'][1]
if hasattr(config,'ANALYTICRUNTIME_ZONE') :
config.analyticRuntimeScopes = "analytics.zones." + config.ANALYTICRUNTIME_ZONE + ".user"
#config.catalogScopes = "analytics.zones." + config.CATALOG_ZONE + ".user"
config.clientAuthoritiesList.append(config.assetScopes)
config.clientAuthoritiesList.append(config.timeSeriesInjestScopes)
config.clientAuthoritiesList.append(config.timeSeriesQueryScopes)
if hasattr(config,'analyticRuntimeScopes') :
config.clientAuthoritiesList.append(config.analyticRuntimeScopes)
#config.clientAuthoritiesList.append(config.catalogScopes)
config.clientScopeList.append(config.assetScopes)
config.clientScopeList.append(config.timeSeriesInjestScopes)
config.clientScopeList.append(config.timeSeriesQueryScopes)
if hasattr(config,'analyticRuntimeScopes') :
config.clientScopeList.append(config.analyticRuntimeScopes)
#config.clientScopeList.append(config.catalogScopes)
print ("returning timeseries client zone scopes query -->"+config.timeSeriesQueryScopes + " timeSeriesInjestAuthorities -->"+config.timeSeriesInjestScopes )
def updateUAAUserGroups(config, serviceGroups):
groups = serviceGroups.split(",")
#print (groups)
for group in groups:
#print (group)
addAdminUserPolicyGroup(config, group,config.rmdAdmin1Pass)
addAdminUserPolicyGroup(config, group,config.rmdUser1Pass)
def findRedisService(config):
#setup Redis
result = []
process = subprocess.Popen('cf m',
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE )
for line in process.stdout:
result.append(line)
errcode = process.returncode
#print (errcode)
search_redis = config.predixRedis
for line in result:
line1 = line.decode('utf-8')
if(line1.find(search_redis) > -1):
#print(line)
config.predixRedis = line1.split()[0].strip()
print ("Setting Redis config.predixRedis as ")
print (config.predixRedis)
def getAuthorities(config):
if not hasattr(config,'clientAuthoritiesList') :
config.clientAuthoritiesList = list(config.clientAuthorities)
config.clientScopeList = list(config.clientScope)
def updateClientAuthoritiesACS(config):
getPredixACSConfigfromVcaps(config)
# get ACS scopes
config.clientAuthoritiesList.append(config.acsOauthScope)
config.clientScopeList.append(config.acsOauthScope)
# merge with exisiting client
config.clientAuthoritiesList = config.clientAuthorities + list(set(config.clientAuthoritiesList) - set(config.clientAuthorities))
config.clientScopeList = config.clientScope + list(set(config.clientScopeList) - set(config.clientScope))
|
|
# -*- coding: utf-8 -*-
"""
Parse, stream, create, sign and verify Bitcoin transactions as Tx structures.
The MIT License (MIT)
Copyright (c) 2013 by Richard Kiss
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import functools
import logging
from ...intbytes import byte_to_int, int_to_bytes
from . import opcodes
from . import ScriptError
from .check_signature import op_checksig, op_checkmultisig
from .flags import (VERIFY_P2SH, VERIFY_DISCOURAGE_UPGRADABLE_NOPS, VERIFY_MINIMALDATA,
VERIFY_SIGPUSHONLY, VERIFY_CHECKLOCKTIMEVERIFY, VERIFY_CLEANSTACK)
from .microcode import MICROCODE_LOOKUP
from .tools import get_opcode, bin_script, bool_from_script_bytes, int_from_script_bytes
logger = logging.getLogger(__name__)
VERIFY_OPS = frozenset((opcodes.OPCODE_TO_INT[s] for s in (
"OP_NUMEQUALVERIFY OP_EQUALVERIFY OP_CHECKSIGVERIFY OP_VERIFY OP_CHECKMULTISIGVERIFY".split())))
INVALID_OPCODE_VALUES = frozenset((opcodes.OPCODE_TO_INT[s] for s in (
"OP_CAT OP_SUBSTR OP_LEFT OP_RIGHT OP_INVERT OP_AND OP_OR OP_XOR OP_2MUL OP_2DIV OP_MUL "
"OP_DIV OP_MOD OP_LSHIFT OP_RSHIFT OP_VERIF OP_VERNOTIF".split())))
NOP_SET = frozenset((opcodes.OPCODE_TO_INT[s] for s in (
"OP_NOP1 OP_NOP3 OP_NOP4 OP_NOP5 OP_NOP6 OP_NOP7 OP_NOP8 OP_NOP9 OP_NOP10".split())))
class Stack(list):
def pop(self, *args, **kwargs):
try:
return super(Stack, self).pop(*args, **kwargs)
except IndexError:
raise ScriptError("pop from empty stack")
def __getitem__(self, *args, **kwargs):
try:
return super(Stack, self).__getitem__(*args, **kwargs)
except IndexError:
raise ScriptError("getitem out of range")
def verify_minimal_data(opcode, data):
ld = len(data)
if ld == 0 and opcode == opcodes.OP_0:
return
if ld == 1:
v = byte_to_int(data[0])
if v == 0x81:
if opcode == opcodes.OP_1NEGATE:
return
elif v == 0 or v > 16:
return
elif v == (opcode - 1 + opcodes.OP_1):
return
if 1 < ld < 0x4c and opcode == ld:
return
if 0x4c <= ld < 256 and opcode == opcodes.OP_PUSHDATA1:
return
if 256 < ld < 65536 and opcode == opcodes.OP_PUSHDATA2:
return
raise ScriptError("not minimal push of %s" % repr(data))
def eval_script(script, signature_for_hash_type_f, lock_time, expected_hash_type=None, stack=[],
disallow_long_scripts=True, traceback_f=None, is_signature=False, flags=0):
altstack = Stack()
if disallow_long_scripts and len(script) > 10000:
return False
pc = 0
begin_code_hash = pc
if_condition_stack = []
op_count = 0
require_minimal = flags & VERIFY_MINIMALDATA
require_minimal = flags & VERIFY_MINIMALDATA
try:
while pc < len(script):
old_pc = pc
opcode, data, pc = get_opcode(script, pc)
if traceback_f:
traceback_f(old_pc, opcode, data, stack, altstack, if_condition_stack, is_signature)
# deal with if_condition_stack first
all_if_true = functools.reduce(lambda x, y: x and y, if_condition_stack, True)
if opcode > opcodes.OP_16:
op_count += 1
if op_count > 201:
raise ScriptError("script contains too many operations")
if len(stack) + len(altstack) > 1000:
raise ScriptError("stack has > 1000 items")
if opcode in INVALID_OPCODE_VALUES:
raise ScriptError("invalid opcode %s at %d" % (
opcodes.INT_TO_OPCODE.get(opcode, hex(opcode)), pc-1))
if data and len(data) > 520 and disallow_long_scripts:
raise ScriptError("pushing too much data onto stack")
if len(if_condition_stack):
if opcode == opcodes.OP_ELSE:
if_condition_stack[-1] = not if_condition_stack[-1]
continue
if opcode == opcodes.OP_ENDIF:
if_condition_stack.pop()
continue
if not all_if_true and not (opcodes.OP_IF <= opcode <= opcodes.OP_ENDIF):
continue
if opcode in (opcodes.OP_IF, opcodes.OP_NOTIF):
v = False
if all_if_true:
v = bool_from_script_bytes(stack.pop())
if opcode == opcodes.OP_NOTIF:
v = not v
if_condition_stack.append(v)
continue
if opcode > 76 and opcode not in opcodes.INT_TO_OPCODE:
raise ScriptError("invalid opcode %s at %d" % (
opcodes.INT_TO_OPCODE.get(opcode, hex(opcode)), pc-1))
if (flags & VERIFY_DISCOURAGE_UPGRADABLE_NOPS) and opcode in NOP_SET:
raise ScriptError("discouraging nops")
if data is not None:
if require_minimal:
verify_minimal_data(opcode, data)
stack.append(data)
continue
if opcode == opcodes.OP_CODESEPARATOR:
begin_code_hash = pc - 1
continue
if opcode in MICROCODE_LOOKUP:
f = MICROCODE_LOOKUP[opcode]
if f.require_minimal:
f(stack, require_minimal=require_minimal)
else:
f(stack)
if opcode in VERIFY_OPS:
v = bool_from_script_bytes(stack.pop())
if not v:
raise ScriptError("VERIFY failed at %d" % (pc-1))
continue
if opcode == opcodes.OP_TOALTSTACK:
altstack.append(stack.pop())
continue
if opcode == opcodes.OP_FROMALTSTACK:
stack.append(altstack.pop())
continue
if opcode == opcodes.OP_1NEGATE:
stack.append(b'\x81')
continue
if opcode > opcodes.OP_1NEGATE and opcode <= opcodes.OP_16:
stack.append(int_to_bytes(opcode + 1 - opcodes.OP_1))
continue
if opcode in (opcodes.OP_ELSE, opcodes.OP_ENDIF):
raise ScriptError("%s without OP_IF" % opcodes.INT_TO_OPCODE[opcode])
if opcode in (opcodes.OP_CHECKSIG, opcodes.OP_CHECKSIGVERIFY):
# Subset of script starting at the most recent codeseparator
op_checksig(stack, signature_for_hash_type_f, expected_hash_type, script[begin_code_hash:],
flags)
if opcode == opcodes.OP_CHECKSIGVERIFY:
if not bool_from_script_bytes(stack.pop()):
raise ScriptError("VERIFY failed at %d" % (pc-1))
continue
if opcode in (opcodes.OP_CHECKMULTISIG, opcodes.OP_CHECKMULTISIGVERIFY):
# Subset of script starting at the most recent codeseparator
n_ops = op_checkmultisig(
stack, signature_for_hash_type_f, expected_hash_type, script[begin_code_hash:], flags)
op_count += n_ops
if op_count > 201:
raise ScriptError("script contains too many operations")
if opcode == opcodes.OP_CHECKLOCKTIMEVERIFY:
if not (flags & VERIFY_CHECKLOCKTIMEVERIFY):
if (flags & VERIFY_DISCOURAGE_UPGRADABLE_NOPS):
raise ScriptError("discouraging nops")
continue
if lock_time is None:
raise ScriptError("nSequence equal to 0xffffffff")
if len(stack) < 1:
raise ScriptError("empty stack on CHECKLOCKTIMEVERIFY")
max_lock_time = int_from_script_bytes(stack.pop())
if max_lock_time < 0:
raise ScriptError("top stack item negative on CHECKLOCKTIMEVERIFY")
era_max = (max_lock_time >= 500000000)
era_lock_time = (lock_time >= 500000000)
if era_max != era_lock_time:
raise ScriptError("eras differ in CHECKLOCKTIMEVERIFY")
if max_lock_time > lock_time:
raise ScriptError("nLockTime too soon")
continue
# BRAIN DAMAGE -- does it always get down here for each verify op? I think not
if opcode in VERIFY_OPS:
v = stack.pop()
if not bool_from_script_bytes(v):
raise ScriptError("VERIFY failed at %d" % pc-1)
except Exception:
logger.exception("script failed for unknown reason")
raise
if len(if_condition_stack):
raise ScriptError("missing ENDIF")
if len(stack) + len(altstack) > 1000:
raise ScriptError("stack has > 1000 items")
return len(stack) != 0
def check_script_push_only(script):
pc = 0
while pc < len(script):
opcode, data, pc = get_opcode(script, pc)
if opcode > opcodes.OP_16:
raise ScriptError("signature has non-push opcodes")
def is_pay_to_script_hash(script_public_key):
return (len(script_public_key) == 23 and byte_to_int(script_public_key[0]) == opcodes.OP_HASH160 and
byte_to_int(script_public_key[-1]) == opcodes.OP_EQUAL)
def verify_script(script_signature, script_public_key, signature_for_hash_type_f, lock_time,
flags=None, expected_hash_type=None, traceback_f=None):
stack = Stack()
is_p2h = is_pay_to_script_hash(script_public_key)
if flags is None:
flags = VERIFY_P2SH
if flags & VERIFY_SIGPUSHONLY:
check_script_push_only(script_signature)
try:
eval_script(script_signature, signature_for_hash_type_f, lock_time, expected_hash_type,
stack, traceback_f=traceback_f, flags=flags, is_signature=True)
if is_p2h and (flags & VERIFY_P2SH):
signatures, alt_script_public_key = stack[:-1], stack[-1]
alt_script_signature = bin_script(signatures)
eval_script(script_public_key, signature_for_hash_type_f, lock_time, expected_hash_type,
stack, traceback_f=traceback_f, flags=flags, is_signature=False)
except ScriptError:
return False
if is_p2h and bool_from_script_bytes(stack[-1]) and (flags & VERIFY_P2SH):
check_script_push_only(script_signature)
return verify_script(alt_script_signature, alt_script_public_key, signature_for_hash_type_f,
lock_time, flags & ~VERIFY_P2SH, expected_hash_type=expected_hash_type,
traceback_f=traceback_f)
if flags & VERIFY_CLEANSTACK and len(stack) != 1:
raise ScriptError("stack not clean after evaulation")
return len(stack) > 0 and bool_from_script_bytes(stack[-1])
|
|
#!/usr/bin/env python
import argparse
import logging
import subprocess
import sys
import os
import re
# Set logging
logging.basicConfig(filename="smrtsv.log", level=logging.DEBUG)
# Set cluster parameters
CLUSTER_SETTINGS = ' -V -cwd -e ./log -o ./log {cluster.params} -w n -S /bin/bash'
CLUSTER_FLAG = ("--drmaa", CLUSTER_SETTINGS, "-w", "60")
# Setup environment for executing commands
PROCESS_ENV = os.environ.copy()
# Prepend to PROCESS_ENV["PATH"]
INSTALL_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
INSTALL_PATH = [ # List of paths relative to INSTALL_DIR to be added to the environment $PATH
"bin",
"dist/miniconda/envs/python2/bin",
"dist/miniconda/envs/python3/bin",
"dist/miniconda/bin",
"dist/celera/wgs-8.3rc2/Linux-amd64/bin/",
"dist/amos-3.1.0/bin",
"canu/Linux-amd64/bin"
]
PROCESS_ENV_PATH = ":".join([os.path.join(INSTALL_DIR, THIS_PATH) for THIS_PATH in INSTALL_PATH])
if "PATH" in PROCESS_ENV:
PROCESS_ENV["PATH"] = PROCESS_ENV_PATH + ":" + PROCESS_ENV["PATH"]
else:
PROCESS_ENV["PATH"] = PROCESS_ENV_PATH
# Prepend to PROCESS_ENV["LD_LIBRARY_PATH"]
INSTALL_LD_PATH = [
"dist/hdf5/lib"
]
PROCESS_ENV_LD_PATH = ":".join([os.path.join(INSTALL_DIR, THIS_PATH) for THIS_PATH in INSTALL_LD_PATH])
if "LD_LIBRARY_PATH" in PROCESS_ENV:
PROCESS_ENV["LD_LIBRARY_PATH"] = PROCESS_ENV_LD_PATH + ":" + PROCESS_ENV["LD_LIBRARY_PATH"]
else:
PROCESS_ENV["LD_LIBRARY_PATH"] = PROCESS_ENV_LD_PATH
os.environ["LD_LIBRARY_PATH"] = PROCESS_ENV["LD_LIBRARY_PATH"]
# Function definitions
def _get_dist_dir():
dirname, filename = os.path.split(os.path.abspath(__file__))
return dirname
# def _build_prefix(args):
# prefix = ["snakemake", "-T", "--rerun-incomplete", "--snakefile", os.path.join(os.path.dirname(_get_dist_dir()), "Snakefile"), "-j", str(args.jobs)]
# if args.dryrun:
# prefix.append("-n")
#
# if args.distribute:
# prefix.extend(CLUSTER_FLAG)
#
# return tuple(prefix)
def _run_cmd(args):
"""
Run a command with the proper environment set.
:param args: A tuple of arguments starting with the command name.
:return: Return code or -1 if the process did not complete.
"""
sys.stdout.flush()
p = subprocess.Popen(args, env=PROCESS_ENV)
p.wait()
ret_code = p.returncode
return ret_code if ret_code is not None else -1
def _run_snake_target(args, *cmd):
"""
Run a snakemake target.
:param args: Arguments processed from the command line.
:param cmd: The command to run as a tuple starting with the name of the snakemake target.
:return: Return code from snakemake.
"""
# Use the user-defined cluster config path if one is given. Otherwise, use
# an empty config that comes with the SMRT-SV distribution.
if args.cluster_config is not None:
cluster_config_path = args.cluster_config
else:
cluster_config_path = os.path.join(os.path.dirname(_get_dist_dir()), "cluster.template.json")
# Setup snakemake command
prefix = [
"snakemake",
"-T",
"--rerun-incomplete",
"--cluster-config", cluster_config_path,
"--snakefile", os.path.join(os.path.dirname(_get_dist_dir()), "Snakefile"),
"-j", str(args.jobs)
]
if args.dryrun:
prefix.append("-n")
if args.distribute:
prefix.extend(CLUSTER_FLAG)
# Append command
prefix.extend(cmd)
# Append path and ld_path
prefix.extend([
"ld_path=%s" % PROCESS_ENV["LD_LIBRARY_PATH"],
"path=%s" % PROCESS_ENV["PATH"]
])
# Report (verbose)
if args.verbose:
print("Running snakemake command: %s" % " ".join(prefix))
# Run snakemake command
return _run_cmd(prefix)
def index(args):
return _run_snake_target(
args,
"prepare_reference",
"--config",
"reference=%s" % args.reference
)
def align(args):
return _run_snake_target(
args,
"align_reads",
"--config",
"reference=%s" % args.reference,
"reads=%s" % args.reads,
"alignments=%s" % args.alignments,
"alignments_dir=%s" % args.alignments_dir,
"batches=%s" % args.batches,
"threads=%s" % args.threads,
"tmp_dir=%s" % args.tmpdir,
"alignment_parameters=\"%s\"" % args.alignment_parameters
)
def detect(args):
"""
Detect SVs from signatures in read alignments.
"""
# Find candidate regions in alignments.
sys.stdout.write("Searching for candidate regions\n")
command = (
"get_regions",
"--config",
"reference=%s" % args.reference,
"alignments=%s" % args.alignments,
"assembly_window_size=%s" % args.assembly_window_size,
"assembly_window_slide=%s" % args.assembly_window_slide,
"min_length=%s" % args.min_length,
"min_support=%s" % args.min_support,
"max_support=%s" % args.max_support,
"min_coverage=%s" % args.min_coverage,
"max_coverage=%s" % args.max_coverage,
"min_hardstop_support=%s" % args.min_hardstop_support,
"max_candidate_length=%s" % args.max_candidate_length
)
if args.exclude:
command = command + ("regions_to_exclude=%s" % args.exclude,)
if args.candidates:
command = command + ("candidates=%s" % args.candidates,)
return _run_snake_target(args, *command)
def assemble(args):
"""
Assemble candidate regions from raw reads aligned to regions.
"""
# Generate local assemblies across the genome.
sys.stdout.write("Starting local assemblies\n")
base_command = (
"collect_assembly_alignments",
"--config",
"reference=%s" % args.reference,
"alignments=%s" % args.alignments,
"reads=%s" % args.reads,
"tmp_dir=%s" % args.tmpdir,
"alignment_parameters=\"%s\"" % args.alignment_parameters,
"mapping_quality=\"%s\"" % args.mapping_quality,
"minutes_to_delay_jobs=\"%s\"" % args.minutes_to_delay_jobs,
"assembly_log=\"%s\"" % args.assembly_log
)
if args.candidates:
# For each contig/chromosome in the candidates file, submit a separate
# Snakemake command. To do so, first split regions to assemble into one
# file per contig in a temporary directory.
tmpdir = os.path.join(os.getcwd(), "regions_by_contig")
rebuild_regions_by_contig = False
if not args.dryrun and (not os.path.exists(tmpdir) or args.rebuild_regions):
rebuild_regions_by_contig = True
if rebuild_regions_by_contig:
try:
os.mkdir(tmpdir)
except OSError:
pass
previous_contig = None
with open(args.candidates, "r") as fh:
contigs = set()
for line in fh:
contig = line.strip().split()[0]
if previous_contig != contig:
if previous_contig is not None and rebuild_regions_by_contig:
contig_file.close()
previous_contig = contig
contigs.add(contig)
if rebuild_regions_by_contig:
contig_file = open(os.path.join(tmpdir, "%s.bed" % contig), "w")
if rebuild_regions_by_contig:
contig_file.write(line)
if rebuild_regions_by_contig:
contig_file.close()
# Assemble regions per contig creating a single merged BAM for each contig.
local_assembly_basename = os.path.basename(args.assembly_alignments)
local_assemblies = set()
return_code = 0
for contig in contigs:
contig_local_assemblies = os.path.join("local_assemblies", local_assembly_basename.replace(".bam", ".%s.bam" % contig))
local_assemblies.add(contig_local_assemblies)
if os.path.exists(contig_local_assemblies):
sys.stdout.write("Local assemblies already exist for %s\n" % contig)
continue
command = base_command + ("regions_to_assemble=%s" % os.path.join(tmpdir, "%s.bed" % contig),)
command = command + ("assembly_alignments=%s" % contig_local_assemblies,)
sys.stdout.write("Starting local assemblies for %s\n" % contig)
logging.debug("Assembly command: %s", " ".join(command))
return_code = _run_snake_target(args, *command)
if return_code != 0:
break
# If the last command executed successfully, try to merge all local
# assemblies per contig into a single file.
if not args.dryrun and return_code == 0:
if len(local_assemblies) > 1:
return_code = _run_cmd(["samtools", "merge", args.assembly_alignments] + list(local_assemblies))
else:
return_code = _run_cmd(["samtools", "view", "-b", "-o", args.assembly_alignments] + list(local_assemblies))
if return_code == 0:
return_code = _run_cmd(["samtools", "index", args.assembly_alignments])
# Return the last return code.
return return_code
else:
if args.assembly_alignments:
command = base_command + ("assembly_alignments=%s" % args.assembly_alignments,)
logging.debug("Assembly command: %s", " ".join(command))
return _run_cmd(command)
def call(args):
# Call SVs, indels, and inversions.
sys.stdout.write("Calling variants\n")
return_code = _run_snake_target(
args,
"call_variants",
"--config",
"reference=%s" % args.reference,
"alignments=%s" % args.alignments,
"local_assembly_alignments=%s" % args.assembly_alignments,
"variants=%s" % args.variants,
"species=\"%s\"" % args.species,
"sample=\"%s\"" % args.sample
)
if return_code != 0:
sys.stderr.write("Failed to call variants\n")
return return_code
def run(args):
# Get default jobs
if "jobs" in args:
default_jobs = args.jobs
else:
default_jobs = 1
# Get the number of jobs for each step
job_step = re.split("\\s*[,;:]\\s*", args.runjobs.strip()) # Split into array
job_step = [job_step[i] if len(job_step) > i else '' for i in range(4)] # Extend to length 4
# Convert each number of jobs to integers
for i in range(4):
if job_step[i] != '':
try:
job_step[i] = int(job_step[i])
except ValueError:
sys.stderr.write("Invalid number of jobs for step %d: Must be an integer: \"%s\"\n" % ((i + 1), job_step[i]))
return 1
else:
job_step[i] = default_jobs
# Report the number of jobs for each task
if args.verbose and args.distribute:
print("Jobs per task:")
print("\t* Align: %s" % job_step[0])
print("\t* Detect: %s" % job_step[1])
print("\t* Assemble: %s" % job_step[2])
print("\t* Call: %s" % job_step[3])
# Build reference indices
return_code = index(args)
if return_code != 0:
sys.stderr.write("Failed to index reference\n")
return return_code
# Align
args.jobs = job_step[0]
return_code = align(args)
if return_code != 0:
sys.stderr.write("Failed to align reads\n")
return return_code
# Detect SVs.
args.jobs = job_step[1]
return_code = detect(args)
if return_code != 0:
sys.stderr.write("Failed to identify candidate regions\n")
return return_code
# Run local assemblies.
args.jobs = job_step[2]
return_code = assemble(args)
if return_code != 0:
sys.stderr.write("Failed to generate local assemblies\n")
return return_code
# Call SVs, indels, and inversions.
args.jobs = job_step[3]
return_code = call(args)
if return_code != 0:
sys.stderr.write("Failed to call variants\n")
return return_code
return 0
def genotype(args):
# Genotype SVs.
sys.stdout.write("Genotyping SVs\n")
return_code = _run_snake_target(
args,
"convert_genotypes_to_vcf",
"--config",
"genotyper_config=%s" % args.genotyper_config,
"genotyped_variants=%s" % args.genotyped_variants,
"threads=%s" % args.threads
)
if return_code != 0:
sys.stderr.write("Failed to genotype SVs\n")
return return_code
# Main
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--dryrun", "-n", action="store_true", help="Print commands that will run without running them")
parser.add_argument("--distribute", action="store_true", help="Distribute analysis to Grid Engine-style cluster")
parser.add_argument("--jobs", help="number of jobs to run simultaneously", type=int, default=1)
parser.add_argument("--tmpdir", help="temporary directory to use for distributed jobs", default="/var/tmp")
parser.add_argument("--verbose", "-v", help="print extra runtime information", action="store_true")
parser.add_argument("--cluster_config", help="JSON/YAML file specifying cluster configuration parameters to pass to Snakemake's --cluster-config option")
parser.add_argument("--drmaalib", help="For jobs that are distributed, this is the location to the DRMAA library (libdrmaa.so) installed with Grid Engine. Use this to set DRMAA_LIBRARY_PATH in the environment for pipelined commands. If DRMAA_LIBRARY_PATH is already set in the environment when calling this program, this option is not required.")
subparsers = parser.add_subparsers()
# Index a reference for use by BLASR.
parser_index = subparsers.add_parser("index", help="index a reference sequence for use by BLASR")
parser_index.add_argument("reference", help="FASTA file of reference to index")
parser_index.set_defaults(func=index)
# Align PacBio reads to an indexed reference with BLASR.
parser_align = subparsers.add_parser("align", help="align PacBio reads to an indexed reference with BLASR")
parser_align.add_argument("reference", help="FASTA file of indexed reference with .ctab and .sa in the same directory")
parser_align.add_argument("reads", help="text file with one absolute path to a PacBio reads file (.bax.h5) per line")
parser_align.add_argument("--alignments", help="text file with one absolute path to a BLASR alignments file (.bam) per line", default="alignments.fofn")
parser_align.add_argument("--alignments_dir", help="absolute path of directory for BLASR alignment files", default="alignments")
parser_align.add_argument("--batches", help="number of batches to split input reads into such that there will be one BAM output file per batch", type=int, default=1)
parser_align.add_argument("--threads", help="number of threads to use for each BLASR alignment job", type=int, default=1)
parser_align.add_argument("--alignment_parameters", help="BLASR parameters to use to align raw reads", default="-bestn 2 -maxAnchorsPerPosition 100 -advanceExactMatches 10 -affineAlign -affineOpen 100 -affineExtend 0 -insertion 5 -deletion 5 -extend -maxExtendDropoff 50")
parser_align.set_defaults(func=align)
# Detect SV signatures in BLASR alignments and build sliding windows to assemble.
parser_detector = subparsers.add_parser("detect", help="detect SV signatures in BLASR-aligned reads")
parser_detector.add_argument("reference", help="FASTA file of indexed reference with .ctab and .sa in the same directory")
parser_detector.add_argument("alignments", help="text file with one absolute path to a BLASR raw reads alignments file (.bam) per line")
parser_detector.add_argument("candidates", help="BED file of candidates detected in read alignments")
parser_detector.add_argument("--exclude", help="BED file of regions to exclude from local assembly (e.g., heterochromatic sequences, etc.)")
parser_detector.add_argument("--assembly_window_size", type=int, help="size of reference window for local assemblies", default=60000)
parser_detector.add_argument("--assembly_window_slide", type=int, help="size of reference window slide for local assemblies", default=20000)
parser_detector.add_argument("--min_length", type=int, help="minimum length required for SV candidates", default=50)
parser_detector.add_argument("--min_support", type=int, help="minimum number of supporting reads required to flag a region as an SV candidate", default=5)
parser_detector.add_argument("--max_support", type=int, help="maximum number of supporting reads allowed to flag a region as an SV candidate", default=100)
parser_detector.add_argument("--min_coverage", type=int, help="minimum number of total reads required to flag a region as an SV candidate", default=5)
parser_detector.add_argument("--max_coverage", type=int, help="maximum number of total reads allowed to flag a region as an SV candidate", default=100),
parser_detector.add_argument("--min_hardstop_support", type=int, help="minimum number of reads with hardstops required to flag a region as an SV candidate", default=11)
parser_detector.add_argument("--max_candidate_length", type=int, help="maximum length allowed for an SV candidate region", default=60000)
parser_detector.set_defaults(func=detect)
# Assemble candidate regions and align assemblies back to the reference.
parser_assembler = subparsers.add_parser("assemble", help="assemble candidate regions and align assemblies back to the reference")
parser_assembler.add_argument("reference", help="FASTA file of indexed reference with .ctab and .sa in the same directory")
parser_assembler.add_argument("reads", help="text file with one absolute path to a PacBio reads file (.bax.h5) per line")
parser_assembler.add_argument("alignments", help="text file with one absolute path to a BLASR raw reads alignments file (.bam) per line")
parser_assembler.add_argument("candidates", help="BED file of regions to assemble from raw read alignments")
parser_assembler.add_argument("assembly_alignments", help="BAM file with BLASR alignments of local assemblies against the reference")
parser_assembler.add_argument("--rebuild_regions", action="store_true", help="rebuild subset of regions to assemble")
parser_assembler.add_argument("--alignment_parameters", help="BLASR parameters to use to align local assemblies", default="-affineAlign -affineOpen 8 -affineExtend 0 -bestn 1 -maxMatch 30 -sdpTupleSize 13")
parser_assembler.add_argument("--mapping_quality", type=int, help="minimum mapping quality of raw reads to use for local assembly", default=30)
parser_assembler.add_argument("--minutes_to_delay_jobs", type=int, help="maximum number of minutes to delay local assembly jobs to limit simultaneous I/O on shared storage", default=1)
parser_assembler.add_argument("--assembly_log", help="name of log file for local assemblies", default="assembly.log")
parser_assembler.set_defaults(func=assemble)
# Call SVs and indels from BLASR alignments of local assemblies.
parser_caller = subparsers.add_parser("call", help="call SVs and indels by BLASR alignments of local or whole genome assemblies")
parser_caller.add_argument("reference", help="FASTA file of indexed reference with .ctab and .sa in the same directory")
parser_caller.add_argument("alignments", help="text file with one absolute path to a BLASR raw reads alignments file (.bam) per line")
parser_caller.add_argument("assembly_alignments", help="BAM file with BLASR alignments of local assemblies against the reference")
parser_caller.add_argument("variants", help="VCF of variants called by local assembly alignments")
parser_caller.add_argument("--sample", help="Sample name to use in final variant calls", default="UnnamedSample")
parser_caller.add_argument("--species", help="Common or scientific species name to pass to RepeatMasker", default="human")
parser_caller.set_defaults(func=call)
# Run: Call SVs and indels from BLASR alignments of raw reads.
parser_runner = subparsers.add_parser("run", help="call SVs and indels by local assembly of BLASR-aligned reads")
parser_runner.add_argument("reference", help="FASTA file of indexed reference with .ctab and .sa in the same directory")
parser_runner.add_argument("reads", help="text file with one absolute path to a PacBio reads file (.bax.h5) per line")
parser_runner.add_argument("--variants", help="VCF of variants called by local assembly alignments", default="variants.vcf")
parser_runner.add_argument("--alignments", help="text file with one absolute path to a BLASR raw reads alignments file (.bam) per line", default="alignments.fofn")
parser_runner.add_argument("--alignments_dir", help="absolute path of directory for BLASR alignment files", default="alignments")
parser_runner.add_argument("--candidates", help="BED file of candidates detected in read alignments", default="candidates.bed")
parser_runner.add_argument("--assembly_alignments", help="BAM file with BLASR alignments of local assemblies against the reference", default="local_assembly_alignments.bam")
parser_runner.add_argument("--batches", help="number of batches to split input reads into such that there will be one BAM output file per batch", type=int, default=1)
parser_runner.add_argument("--threads", help="number of threads to use for each BLASR alignment job", type=int, default=1)
parser_runner.add_argument("--exclude", help="BED file of regions to exclude from local assembly (e.g., heterochromatic sequences, etc.)")
parser_runner.add_argument("--assembly_window_size", type=int, help="size of reference window for local assemblies", default=60000)
parser_runner.add_argument("--assembly_window_slide", type=int, help="size of reference window slide for local assemblies", default=30000)
parser_runner.add_argument("--min_length", type=int, help="minimum length required for SV candidates", default=50)
parser_runner.add_argument("--min_support", type=int, help="minimum number of supporting reads required to flag a region as an SV candidate", default=5)
parser_runner.add_argument("--max_support", type=int, help="maximum number of supporting reads allowed to flag a region as an SV candidate", default=100)
parser_runner.add_argument("--min_coverage", type=int, help="minimum number of total reads required to flag a region as an SV candidate", default=5)
parser_runner.add_argument("--max_coverage", type=int, help="maximum number of total reads allowed to flag a region as an SV candidate", default=100),
parser_runner.add_argument("--rebuild_regions", action="store_true", help="rebuild subset of regions to assemble")
parser_runner.add_argument("--refindex", action="store_true", help="Generate a BLASR index on the reference sequence.")
parser_runner.add_argument("--sample", help="Sample name to use in final variant calls", default="UnnamedSample")
parser_runner.add_argument("--species", help="Common or scientific species name to pass to RepeatMasker", default="human")
parser_runner.add_argument("--runjobs", help="A comma-separated list of jobs for each step: align, detect, assemble, and call (in that order). A missing number uses the value set by --jobs (or 1 if --jobs was not set).", default="")
parser_runner.add_argument("--alignment_parameters", help="BLASR parameters to use to align raw reads", default="-bestn 2 -maxAnchorsPerPosition 100 -advanceExactMatches 10 -affineAlign -affineOpen 100 -affineExtend 0 -insertion 5 -deletion 5 -extend -maxExtendDropoff 50")
parser_runner.add_argument("--mapping_quality", type=int, help="minimum mapping quality of raw reads to use for local assembly", default=30)
parser_runner.add_argument("--minutes_to_delay_jobs", type=int, help="maximum number of minutes to delay local assembly jobs to limit simultaneous I/O on shared storage", default=1)
parser_runner.add_argument("--assembly_log", help="name of log file for local assemblies", default="assembly.log")
parser_runner.add_argument("--min_hardstop_support", type=int, help="minimum number of reads with hardstops required to flag a region as an SV candidate", default=11)
parser_runner.add_argument("--max_candidate_length", type=int, help="maximum length allowed for an SV candidate region", default=60000)
parser_runner.set_defaults(func=run)
# Genotype SVs with Illumina reads.
parser_genotyper = subparsers.add_parser("genotype", help="Genotype SVs with Illumina reads")
parser_genotyper.add_argument("genotyper_config", help="JSON configuration file with SV reference paths, samples to genotype as BAMs, and their corresponding references")
parser_genotyper.add_argument("genotyped_variants", help="VCF of SMRT SV variant genotypes for the given sample-level BAMs")
parser_genotyper.add_argument("--threads", help="number of threads to use for each BWA MEM alignment job", type=int, default=1)
parser_genotyper.set_defaults(func=genotype)
args = parser.parse_args()
# Set DRMAA library path
if args.drmaalib is not None:
PROCESS_ENV["DRMAA_LIBRARY_PATH"] = args.drmaalib
elif args.distribute and "DRMAA_LIBRARY_PATH" not in PROCESS_ENV:
sys.stderr.write("WARNING: --distribute is set, but DRMAA_LIBRARY_PATH is not set in the environment or via the --drmaalib option: Searching only in Python's library path for libdrmaa.so\n")
# Report paths if verbose
if args.verbose:
# Print python version
print('Python version: {0}'.format(re.sub('\s*\n\s*', ' - ', sys.version)))
# Print environment
print("PATH:")
for PATH_ELEMENT in PROCESS_ENV["PATH"].split(":"):
print("\t* %s" % PATH_ELEMENT)
print("LD_LIBRARY_PATH:")
for PATH_ELEMENT in PROCESS_ENV["LD_LIBRARY_PATH"].split(":"):
print("\t* %s" % PATH_ELEMENT)
if "DRMAA_LIBRARY_PATH" in PROCESS_ENV:
print("DRMAA_LIBRARY_PATH: %s" % PROCESS_ENV["DRMAA_LIBRARY_PATH"])
else:
print("DRMAA_LIBRARY_PATH: <NOT_SET>\n\t* Not required unless --distribute is set")
# Print arguments
print("Arguments:")
for key in sorted(vars(args).keys()):
print('\t* %s = %s' % (key, getattr(args, key)))
# Flush output
sys.stdout.flush()
# Make a log directory for grid-engine-style error logs if commands are
# being distributed in non-dryrun mode.
if args.distribute and not args.dryrun and not os.path.isdir("log"):
os.mkdir("log")
# Run target command
return_code = args.func(args)
sys.exit(return_code)
|
|
import contextlib
import blinker
import blinker._saferef
import pprint
import copy
import functools
import os
import typing
import textwrap
import ruamel.yaml
from mitmproxy import exceptions
from mitmproxy.utils import typecheck
"""
The base implementation for Options.
"""
unset = object()
class _Option:
__slots__ = ("name", "typespec", "value", "_default", "choices", "help")
def __init__(
self,
name: str,
typespec: type,
default: typing.Any,
help: str,
choices: typing.Optional[typing.Sequence[str]]
) -> None:
typecheck.check_type(name, default, typespec)
self.name = name
self.typespec = typespec
self._default = default
self.value = unset
self.help = textwrap.dedent(help).strip().replace("\n", " ")
self.choices = choices
def __repr__(self):
return "{value} [{type}]".format(value=self.current(), type=self.typespec)
@property
def default(self):
return copy.deepcopy(self._default)
def current(self) -> typing.Any:
if self.value is unset:
v = self.default
else:
v = self.value
return copy.deepcopy(v)
def set(self, value: typing.Any) -> None:
typecheck.check_type(self.name, value, self.typespec)
self.value = value
def reset(self) -> None:
self.value = unset
def has_changed(self) -> bool:
return self.current() != self.default
def __eq__(self, other) -> bool:
for i in self.__slots__:
if getattr(self, i) != getattr(other, i):
return False
return True
def __deepcopy__(self, _):
o = _Option(
self.name, self.typespec, self.default, self.help, self.choices
)
if self.has_changed():
o.value = self.current()
return o
class OptManager:
"""
OptManager is the base class from which Options objects are derived.
.changed is a blinker Signal that triggers whenever options are
updated. If any handler in the chain raises an exceptions.OptionsError
exception, all changes are rolled back, the exception is suppressed,
and the .errored signal is notified.
Optmanager always returns a deep copy of options to ensure that
mutation doesn't change the option state inadvertently.
"""
def __init__(self):
self.__dict__["_options"] = {}
self.__dict__["changed"] = blinker.Signal()
self.__dict__["errored"] = blinker.Signal()
self.__dict__["_processed"] = {}
def add_option(
self,
name: str,
typespec: type,
default: typing.Any,
help: str,
choices: typing.Optional[typing.Sequence[str]] = None
) -> None:
if name in self._options:
raise ValueError("Option %s already exists" % name)
self._options[name] = _Option(name, typespec, default, help, choices)
@contextlib.contextmanager
def rollback(self, updated, reraise=False):
old = copy.deepcopy(self._options)
try:
yield
except exceptions.OptionsError as e:
# Notify error handlers
self.errored.send(self, exc=e)
# Rollback
self.__dict__["_options"] = old
self.changed.send(self, updated=updated)
if reraise:
raise e
def subscribe(self, func, opts):
"""
Subscribe a callable to the .changed signal, but only for a
specified list of options. The callable should accept arguments
(options, updated), and may raise an OptionsError.
The event will automatically be unsubscribed if the callable goes out of scope.
"""
for i in opts:
if i not in self._options:
raise exceptions.OptionsError("No such option: %s" % i)
# We reuse blinker's safe reference functionality to cope with weakrefs
# to bound methods.
func = blinker._saferef.safe_ref(func)
@functools.wraps(func)
def _call(options, updated):
if updated.intersection(set(opts)):
f = func()
if f:
f(options, updated)
else:
self.changed.disconnect(_call)
# Our wrapper function goes out of scope immediately, so we have to set
# weakrefs to false. This means we need to keep our own weakref, and
# clean up the hook when it's gone.
self.changed.connect(_call, weak=False)
def __eq__(self, other):
return self._options == other._options
def __copy__(self):
o = OptManager()
o.__dict__["_options"] = copy.deepcopy(self._options)
return o
def __getattr__(self, attr):
if attr in self._options:
return self._options[attr].current()
else:
raise AttributeError("No such option: %s" % attr)
def __setattr__(self, attr, value):
self.update(**{attr: value})
def keys(self):
return set(self._options.keys())
def __contains__(self, k):
return k in self._options
def reset(self):
"""
Restore defaults for all options.
"""
for o in self._options.values():
o.reset()
self.changed.send(self, updated=set(self._options.keys()))
def update_known(self, **kwargs):
"""
Update and set all known options from kwargs. Returns a dictionary
of unknown options.
"""
known, unknown = {}, {}
for k, v in kwargs.items():
if k in self._options:
known[k] = v
else:
unknown[k] = v
updated = set(known.keys())
if updated:
with self.rollback(updated, reraise=True):
for k, v in known.items():
self._options[k].set(v)
self.changed.send(self, updated=updated)
return unknown
def update(self, **kwargs):
u = self.update_known(**kwargs)
if u:
raise KeyError("Unknown options: %s" % ", ".join(u.keys()))
def setter(self, attr):
"""
Generate a setter for a given attribute. This returns a callable
taking a single argument.
"""
if attr not in self._options:
raise KeyError("No such option: %s" % attr)
def setter(x):
setattr(self, attr, x)
return setter
def toggler(self, attr):
"""
Generate a toggler for a boolean attribute. This returns a callable
that takes no arguments.
"""
if attr not in self._options:
raise KeyError("No such option: %s" % attr)
o = self._options[attr]
if o.typespec != bool:
raise ValueError("Toggler can only be used with boolean options")
def toggle():
setattr(self, attr, not getattr(self, attr))
return toggle
def default(self, option: str) -> typing.Any:
return self._options[option].default
def has_changed(self, option):
"""
Has the option changed from the default?
"""
return self._options[option].has_changed()
def merge(self, opts):
"""
Merge a dict of options into this object. Options that have None
value are ignored. Lists and tuples are appended to the current
option value.
"""
toset = {}
for k, v in opts.items():
if v is not None:
if isinstance(v, (list, tuple)):
toset[k] = getattr(self, k) + v
else:
toset[k] = v
self.update(**toset)
def __repr__(self):
options = pprint.pformat(self._options, indent=4).strip(" {}")
if "\n" in options:
options = "\n " + options + "\n"
return "{mod}.{cls}({{{options}}})".format(
mod=type(self).__module__,
cls=type(self).__name__,
options=options
)
def set(self, *spec):
vals = {}
for i in spec:
vals.update(self._setspec(i))
self.update(**vals)
def parse_setval(self, optname: str, optstr: typing.Optional[str]) -> typing.Any:
"""
Convert a string to a value appropriate for the option type.
"""
if optname not in self._options:
raise exceptions.OptionsError("No such option %s" % optname)
o = self._options[optname]
if o.typespec in (str, typing.Optional[str]):
return optstr
elif o.typespec in (int, typing.Optional[int]):
if optstr:
try:
return int(optstr)
except ValueError:
raise exceptions.OptionsError("Not an integer: %s" % optstr)
elif o.typespec == int:
raise exceptions.OptionsError("Option is required: %s" % optname)
else:
return None
elif o.typespec == bool:
if not optstr or optstr == "true":
return True
elif optstr == "false":
return False
else:
raise exceptions.OptionsError(
"Boolean must be \"true\", \"false\", or have the value " "omitted (a synonym for \"true\")."
)
elif o.typespec == typing.Sequence[str]:
if not optstr:
return []
else:
return getattr(self, optname) + [optstr]
raise NotImplementedError("Unsupported option type: %s", o.typespec)
def _setspec(self, spec):
d = {}
parts = spec.split("=", maxsplit=1)
if len(parts) == 1:
optname, optval = parts[0], None
else:
optname, optval = parts[0], parts[1]
d[optname] = self.parse_setval(optname, optval)
return d
def make_parser(self, parser, optname, metavar=None, short=None):
o = self._options[optname]
def mkf(l, s):
l = l.replace("_", "-")
f = ["--%s" % l]
if s:
f.append("-" + s)
return f
flags = mkf(optname, short)
if o.typespec == bool:
g = parser.add_mutually_exclusive_group(required=False)
onf = mkf(optname, None)
offf = mkf("no-" + optname, None)
# The short option for a bool goes to whatever is NOT the default
if short:
if o.default:
offf = mkf("no-" + optname, short)
else:
onf = mkf(optname, short)
g.add_argument(
*offf,
action="store_false",
dest=optname,
)
g.add_argument(
*onf,
action="store_true",
dest=optname,
help=o.help
)
parser.set_defaults(**{optname: None})
elif o.typespec in (int, typing.Optional[int]):
parser.add_argument(
*flags,
action="store",
type=int,
dest=optname,
help=o.help,
metavar=metavar,
)
elif o.typespec in (str, typing.Optional[str]):
parser.add_argument(
*flags,
action="store",
type=str,
dest=optname,
help=o.help,
metavar=metavar,
choices=o.choices
)
elif o.typespec == typing.Sequence[str]:
parser.add_argument(
*flags,
action="append",
type=str,
dest=optname,
help=o.help + " May be passed multiple times.",
metavar=metavar,
choices=o.choices,
)
else:
raise ValueError("Unsupported option type: %s", o.typespec)
def dump_defaults(opts):
"""
Dumps an annotated file with all options.
"""
# Sort data
s = ruamel.yaml.comments.CommentedMap()
for k in sorted(opts.keys()):
o = opts._options[k]
s[k] = o.default
txt = o.help.strip()
if o.choices:
txt += " Valid values are %s." % ", ".join(repr(c) for c in o.choices)
else:
if o.typespec in (str, int, bool):
t = o.typespec.__name__
elif o.typespec == typing.Optional[str]:
t = "optional str"
elif o.typespec == typing.Sequence[str]:
t = "sequence of str"
else: # pragma: no cover
raise NotImplementedError
txt += " Type %s." % t
txt = "\n".join(textwrap.wrap(txt))
s.yaml_set_comment_before_after_key(k, before = "\n" + txt)
return ruamel.yaml.round_trip_dump(s)
def parse(text):
if not text:
return {}
try:
data = ruamel.yaml.load(text, ruamel.yaml.RoundTripLoader)
except ruamel.yaml.error.YAMLError as v:
if hasattr(v, "problem_mark"):
snip = v.problem_mark.get_snippet()
raise exceptions.OptionsError(
"Config error at line %s:\n%s\n%s" %
(v.problem_mark.line + 1, snip, v.problem)
)
else:
raise exceptions.OptionsError("Could not parse options.")
if isinstance(data, str):
raise exceptions.OptionsError("Config error - no keys found.")
return data
def load(opts, text):
"""
Load configuration from text, over-writing options already set in
this object. May raise OptionsError if the config file is invalid.
Returns a dictionary of all unknown options.
"""
data = parse(text)
return opts.update_known(**data)
def load_paths(opts, *paths):
"""
Load paths in order. Each path takes precedence over the previous
path. Paths that don't exist are ignored, errors raise an
OptionsError.
Returns a dictionary of unknown options.
"""
ret = {}
for p in paths:
p = os.path.expanduser(p)
if os.path.exists(p) and os.path.isfile(p):
with open(p, "rt", encoding="utf8") as f:
try:
txt = f.read()
except UnicodeDecodeError as e:
raise exceptions.OptionsError(
"Error reading %s: %s" % (p, e)
)
try:
ret.update(load(opts, txt))
except exceptions.OptionsError as e:
raise exceptions.OptionsError(
"Error reading %s: %s" % (p, e)
)
return ret
def serialize(opts, text, defaults=False):
"""
Performs a round-trip serialization. If text is not None, it is
treated as a previous serialization that should be modified
in-place.
- If "defaults" is False, only options with non-default values are
serialized. Default values in text are preserved.
- Unknown options in text are removed.
- Raises OptionsError if text is invalid.
"""
data = parse(text)
for k in opts.keys():
if defaults or opts.has_changed(k):
data[k] = getattr(opts, k)
for k in list(data.keys()):
if k not in opts._options:
del data[k]
return ruamel.yaml.round_trip_dump(data)
def save(opts, path, defaults=False):
"""
Save to path. If the destination file exists, modify it in-place.
Raises OptionsError if the existing data is corrupt.
"""
if os.path.exists(path) and os.path.isfile(path):
with open(path, "rt", encoding="utf8") as f:
try:
data = f.read()
except UnicodeDecodeError as e:
raise exceptions.OptionsError(
"Error trying to modify %s: %s" % (path, e)
)
else:
data = ""
data = serialize(opts, data, defaults)
with open(path, "wt", encoding="utf8") as f:
f.write(data)
|
|
from typing import Callable, Optional
import bitcoin
from PyQt5 import QtCore
from PyQt5.QtCore import pyqtSlot, Qt, QTimer
from PyQt5.QtGui import QTextDocument
from PyQt5.QtWidgets import QWidget, QLineEdit, QMessageBox, QAction, QApplication, QActionGroup
import dash_utils
import hw_intf
from app_config import MasternodeConfig, DMN_ROLE_OWNER, DMN_ROLE_OPERATOR, DMN_ROLE_VOTING, InputKeyType, AppConfig
from app_defs import DispMessage, AppTextMessageType
from bip44_wallet import Bip44Wallet, BreakFetchTransactionsException
from common import CancelException
from dashd_intf import DashdInterface
from find_coll_tx_dlg import ListCollateralTxsDlg
from thread_fun_dlg import CtrlObject
from ui import ui_masternode_details_wdg
from wnd_utils import WndUtils
class WdgMasternodeDetails(QWidget, ui_masternode_details_wdg.Ui_WdgMasternodeDetails):
name_modified = QtCore.pyqtSignal(object, str)
data_changed = QtCore.pyqtSignal(object)
role_modified = QtCore.pyqtSignal()
label_width_changed = QtCore.pyqtSignal(int)
app_text_message_sent = QtCore.pyqtSignal(int, str, object)
def __init__(self, parent, app_config: AppConfig, dashd_intf: DashdInterface, hw_session: hw_intf.HwSessionInfo):
QWidget.__init__(self, parent)
ui_masternode_details_wdg.Ui_WdgMasternodeDetails.__init__(self)
self.parent = parent
self.app_config = app_config
self.dashd_intf = dashd_intf
self.hw_session = hw_session
self.masternode = MasternodeConfig() # temporary object to avoid changing attributes of the global
# mn object, since user has the ability to cancel edition
self.updating_ui = False
self.edit_mode = False
self.owner_key_invalid = False
self.operator_key_invalid = False
self.voting_key_invalid = False
self.setupUi(self)
def setupUi(self, widget: QWidget):
ui_masternode_details_wdg.Ui_WdgMasternodeDetails.setupUi(self, self)
WndUtils.set_icon(self.parent, self.btnShowOwnerPrivateKey, 'eye@16px.png')
WndUtils.set_icon(self.parent, self.btnShowOperatorPrivateKey, 'eye@16px.png')
WndUtils.set_icon(self.parent, self.btnShowVotingPrivateKey, 'eye@16px.png')
WndUtils.set_icon(self.parent, self.btnCopyOwnerKey, 'content-copy@16px.png')
WndUtils.set_icon(self.parent, self.btnCopyOperatorKey, 'content-copy@16px.png')
WndUtils.set_icon(self.parent, self.btnCopyVotingKey, 'content-copy@16px.png')
WndUtils.set_icon(self.parent, self.btnCopyProtxHash, 'content-copy@16px.png')
WndUtils.set_icon(self.parent, self.btnShowCollateralPathAddress, 'eye@16px.png')
self.act_view_as_owner_private_key = QAction('View as private key', self)
self.act_view_as_owner_private_key.setData('privkey')
self.act_view_as_owner_private_key.triggered.connect(self.on_owner_view_key_type_changed)
self.act_view_as_owner_public_address = QAction('View as Dash address', self)
self.act_view_as_owner_public_address.setData('address')
self.act_view_as_owner_public_address.triggered.connect(self.on_owner_view_key_type_changed)
self.act_view_as_owner_public_key = QAction('View as public key', self)
self.act_view_as_owner_public_key.setData('pubkey')
self.act_view_as_owner_public_key.triggered.connect(self.on_owner_view_key_type_changed)
self.act_view_as_owner_public_key_hash = QAction('View as public key hash', self)
self.act_view_as_owner_public_key_hash.setData('pubkeyhash')
self.act_view_as_owner_public_key_hash.triggered.connect(self.on_owner_view_key_type_changed)
self.ag_owner_key = QActionGroup(self)
self.act_view_as_owner_private_key.setCheckable(True)
self.act_view_as_owner_public_address.setCheckable(True)
self.act_view_as_owner_public_key.setCheckable(True)
self.act_view_as_owner_public_key_hash.setCheckable(True)
self.act_view_as_owner_private_key.setActionGroup(self.ag_owner_key)
self.act_view_as_owner_public_address.setActionGroup(self.ag_owner_key)
self.act_view_as_owner_public_key.setActionGroup(self.ag_owner_key)
self.act_view_as_owner_public_key_hash.setActionGroup(self.ag_owner_key)
self.btnShowOwnerPrivateKey.addActions(
(self.act_view_as_owner_private_key, self.act_view_as_owner_public_address,
self.act_view_as_owner_public_key, self.act_view_as_owner_public_key_hash))
self.act_view_as_voting_private_key = QAction('View as private key', self)
self.act_view_as_voting_private_key.setData('privkey')
self.act_view_as_voting_private_key.triggered.connect(self.on_voting_view_key_type_changed)
self.act_view_as_voting_public_address = QAction('View as Dash address', self)
self.act_view_as_voting_public_address.setData('address')
self.act_view_as_voting_public_address.triggered.connect(self.on_voting_view_key_type_changed)
self.act_view_as_voting_public_key = QAction('View as public key', self)
self.act_view_as_voting_public_key.setData('pubkey')
self.act_view_as_voting_public_key.triggered.connect(self.on_voting_view_key_type_changed)
self.act_view_as_voting_public_key_hash = QAction('View as public key hash', self)
self.act_view_as_voting_public_key_hash.setData('pubkeyhash')
self.act_view_as_voting_public_key_hash.triggered.connect(self.on_voting_view_key_type_changed)
self.ag_voting_key = QActionGroup(self)
self.act_view_as_voting_private_key.setCheckable(True)
self.act_view_as_voting_public_address.setCheckable(True)
self.act_view_as_voting_public_key.setCheckable(True)
self.act_view_as_voting_public_key_hash.setCheckable(True)
self.act_view_as_voting_private_key.setActionGroup(self.ag_voting_key)
self.act_view_as_voting_public_address.setActionGroup(self.ag_voting_key)
self.act_view_as_voting_public_key.setActionGroup(self.ag_voting_key)
self.act_view_as_voting_public_key_hash.setActionGroup(self.ag_voting_key)
self.btnShowVotingPrivateKey.addActions((self.act_view_as_voting_private_key,
self.act_view_as_voting_public_address,
self.act_view_as_voting_public_key,
self.act_view_as_voting_public_key_hash))
self.act_view_as_operator_private_key = QAction('View as private key', self)
self.act_view_as_operator_private_key.setData('privkey')
self.act_view_as_operator_private_key.triggered.connect(self.on_operator_view_key_type_changed)
self.act_view_as_operator_public_key = QAction('View as public key', self)
self.act_view_as_operator_public_key.setData('pubkey')
self.act_view_as_operator_public_key.triggered.connect(self.on_operator_view_key_type_changed)
self.ag_operator_key = QActionGroup(self)
self.act_view_as_operator_private_key.setCheckable(True)
self.act_view_as_operator_public_key.setCheckable(True)
self.act_view_as_operator_private_key.setActionGroup(self.ag_operator_key)
self.act_view_as_operator_public_key.setActionGroup(self.ag_operator_key)
self.btnShowOperatorPrivateKey.addActions((self.act_view_as_operator_private_key,
self.act_view_as_operator_public_key))
self.update_ui_controls_state()
def showEvent(self, QShowEvent):
def apply():
self.update_key_controls_state()
self.lblOwnerKey.fontMetrics()
self.set_buttons_height()
QTimer.singleShot(100, apply)
def set_buttons_height(self):
h = self.edtName.height()
self.btnCopyOwnerKey.setFixedHeight(h)
self.btnShowOwnerPrivateKey.setFixedHeight(h)
self.btnGenerateOwnerPrivateKey.setFixedHeight(h)
self.btnCopyOperatorKey.setFixedHeight(h)
self.btnShowOperatorPrivateKey.setFixedHeight(h)
self.btnGenerateOperatorPrivateKey.setFixedHeight(h)
self.btnCopyVotingKey.setFixedHeight(h)
self.btnShowVotingPrivateKey.setFixedHeight(h)
self.btnGenerateVotingPrivateKey.setFixedHeight(h)
self.btnCopyProtxHash.setFixedHeight(h)
self.btnGetMNDataByIP.setFixedHeight(h)
self.btnShowCollateralPathAddress.setFixedHeight(h)
self.btnBip32PathToAddress.setFixedHeight(h)
self.btnLocateCollateral.setFixedHeight(h)
def update_ui_controls_state(self):
"""Update visibility and enabled/disabled state of the UI controls.
"""
self.lblDMNTxHash.setVisible(self.masternode is not None)
self.edtDMNTxHash.setVisible(self.masternode is not None)
self.btnGetMNDataByIP.setVisible(self.masternode is not None and self.edit_mode)
self.lblCollateral.setVisible(self.masternode is not None and
(self.masternode.dmn_user_roles & DMN_ROLE_OWNER > 0))
self.btnLocateCollateral.setVisible(self.masternode is not None and self.edit_mode and
(self.masternode.dmn_user_roles & DMN_ROLE_OWNER > 0))
self.btnBip32PathToAddress.setVisible(self.masternode is not None and self.edit_mode and
(self.masternode.dmn_user_roles & DMN_ROLE_OWNER > 0))
self.btnShowCollateralPathAddress.setVisible(self.masternode is not None and
(self.masternode.dmn_user_roles & DMN_ROLE_OWNER > 0))
self.edtCollateralAddress.setVisible(self.masternode is not None and
(self.masternode.dmn_user_roles & DMN_ROLE_OWNER > 0))
self.lblCollateralPath.setVisible(self.masternode is not None and
(self.masternode.dmn_user_roles & DMN_ROLE_OWNER > 0))
self.edtCollateralPath.setVisible(self.masternode is not None and
(self.masternode.dmn_user_roles & DMN_ROLE_OWNER > 0))
self.lblOwnerKey.setVisible(self.masternode is not None and
(self.masternode.dmn_user_roles & DMN_ROLE_OWNER > 0))
self.edtOwnerKey.setVisible(self.masternode is not None and
(self.masternode.dmn_user_roles & DMN_ROLE_OWNER > 0))
self.btnShowOwnerPrivateKey.setVisible(self.masternode is not None and
self.edit_mode is False and
(self.masternode.dmn_user_roles & DMN_ROLE_OWNER > 0))
self.btnCopyOwnerKey.setVisible(self.masternode is not None and
(self.masternode.dmn_user_roles & DMN_ROLE_OWNER > 0))
self.lblOperatorKey.setVisible(self.masternode is not None and
(self.masternode.dmn_user_roles & DMN_ROLE_OPERATOR > 0))
self.edtOperatorKey.setVisible(self.masternode is not None and
(self.masternode.dmn_user_roles & DMN_ROLE_OPERATOR > 0))
self.btnShowOperatorPrivateKey.setVisible(self.masternode is not None and
self.edit_mode is False and
(self.masternode.dmn_user_roles & DMN_ROLE_OPERATOR > 0))
self.btnCopyOperatorKey.setVisible(self.masternode is not None and
(self.masternode.dmn_user_roles & DMN_ROLE_OPERATOR > 0))
self.lblVotingKey.setVisible(self.masternode is not None and
(self.masternode.dmn_user_roles & DMN_ROLE_VOTING > 0))
self.edtVotingKey.setVisible(self.masternode is not None and
(self.masternode.dmn_user_roles & DMN_ROLE_VOTING > 0))
self.btnShowVotingPrivateKey.setVisible(self.masternode is not None and
self.edit_mode is False and
(self.masternode.dmn_user_roles & DMN_ROLE_VOTING > 0))
self.btnCopyVotingKey.setVisible(self.masternode is not None and
(self.masternode.dmn_user_roles & DMN_ROLE_VOTING > 0))
self.act_view_as_owner_private_key.setVisible(self.masternode is not None and
self.masternode.dmn_owner_key_type == InputKeyType.PRIVATE)
self.act_view_as_owner_public_key.setVisible(self.masternode is not None and
self.masternode.dmn_owner_key_type == InputKeyType.PRIVATE)
self.act_view_as_operator_private_key.setVisible(self.masternode is not None and
self.masternode.dmn_operator_key_type == InputKeyType.PRIVATE)
self.act_view_as_voting_private_key.setVisible(self.masternode is not None and
self.masternode.dmn_voting_key_type == InputKeyType.PRIVATE)
self.act_view_as_voting_public_key.setVisible(self.masternode is not None and
self.masternode.dmn_voting_key_type == InputKeyType.PRIVATE)
self.btnGenerateOwnerPrivateKey.setVisible(
self.masternode is not None and self.edit_mode and
self.masternode.dmn_owner_key_type == InputKeyType.PRIVATE and
self.masternode.dmn_user_roles & DMN_ROLE_OWNER > 0)
self.btnGenerateOperatorPrivateKey.setVisible(
self.masternode is not None and self.edit_mode and
self.masternode.dmn_operator_key_type == InputKeyType.PRIVATE and
self.masternode.dmn_user_roles & DMN_ROLE_OPERATOR > 0)
self.btnGenerateVotingPrivateKey.setVisible(
self.masternode is not None and self.edit_mode and
self.masternode.dmn_voting_key_type == InputKeyType.PRIVATE and
self.masternode.dmn_user_roles & DMN_ROLE_VOTING > 0)
self.lblUserRole.setVisible(self.masternode is not None)
self.chbRoleOwner.setVisible(self.masternode is not None)
self.chbRoleOperator.setVisible(self.masternode is not None)
self.chbRoleVoting.setVisible(self.masternode is not None)
self.btnCopyProtxHash.setVisible(self.masternode is not None)
# self.btnFindCollateral.setVisible(self.masternode is not None)
self.lblIP.setVisible(self.masternode is not None)
self.edtIP.setVisible(self.masternode is not None)
self.lblPort.setVisible(self.masternode is not None)
self.edtPort.setVisible(self.masternode is not None)
self.lblName.setVisible(self.masternode is not None)
self.edtName.setVisible(self.masternode is not None)
self.lblCollateralTxHash.setVisible(self.masternode is not None)
self.edtCollateralTxHash.setVisible(self.masternode is not None)
self.lblCollateralTxIndex.setVisible(self.masternode is not None)
self.edtCollateralTxIndex.setVisible(self.masternode is not None)
self.chbRoleVoting.setEnabled(self.edit_mode)
self.chbRoleOperator.setEnabled(self.edit_mode)
self.chbRoleOwner.setEnabled(self.edit_mode)
self.edtName.setReadOnly(self.edit_mode is False)
self.edtIP.setReadOnly(self.edit_mode is False)
self.edtPort.setReadOnly(self.edit_mode is False)
self.edtCollateralAddress.setReadOnly(self.edit_mode is False)
self.edtCollateralPath.setReadOnly(self.edit_mode is False)
self.edtCollateralTxHash.setReadOnly(self.edit_mode is False)
self.edtCollateralTxIndex.setReadOnly(self.edit_mode is False)
self.edtDMNTxHash.setReadOnly(self.edit_mode is False)
self.edtOwnerKey.setReadOnly(self.edit_mode is False)
self.edtOperatorKey.setReadOnly(self.edit_mode is False)
self.edtVotingKey.setReadOnly(self.edit_mode is False)
self.btnGenerateOwnerPrivateKey.setEnabled(self.edit_mode is True)
self.btnGenerateOperatorPrivateKey.setEnabled(self.edit_mode is True)
self.btnGenerateVotingPrivateKey.setEnabled(self.edit_mode is True)
self.btnLocateCollateral.setEnabled(self.edit_mode)
col_btn_visible = self.masternode is not None and (not self.masternode.collateral_tx or
not self.masternode.collateral_address or
not self.masternode.collateral_bip32_path)
self.update_key_controls_state()
def update_dynamic_labels(self):
def style_to_color(style: str) -> str:
if style == 'hl1':
color = 'color:#00802b'
elif style == 'hl2':
color = 'color:#0047b3'
else:
color = ''
return color
def get_label_text(prefix: str, cur_key_type: str, tooltip_anchor: str, group: QActionGroup, style: str,
error_msg: Optional[str] = None):
lbl = '???'
if self.edit_mode:
change_mode = f'<td>(<a href="{tooltip_anchor}">use {tooltip_anchor}</a>)</td>'
else:
a = group.checkedAction()
if a:
cur_key_type = a.data()
change_mode = ''
if cur_key_type == 'privkey':
lbl = prefix + ' private key'
elif cur_key_type == 'address':
lbl = prefix + ' Dash address'
elif cur_key_type == 'pubkey':
lbl = prefix + ' public key'
elif cur_key_type == 'pubkeyhash':
lbl = prefix + ' public key hash'
if error_msg:
err = '<td style="color:red">' + error_msg + '</td>'
else:
err = ''
return f'<table style="float:right;{style_to_color(style)}"><tr><td>{lbl}</td>{change_mode}{err}</tr></table>'
if self.masternode:
style = ''
if self.masternode.dmn_owner_key_type == InputKeyType.PRIVATE:
key_type, tooltip_anchor, placeholder_text = ('privkey', 'address', 'Enter the owner private key')
if not self.edit_mode and not self.act_view_as_owner_private_key.isChecked():
style = 'hl2'
else:
key_type, tooltip_anchor, placeholder_text = ('address', 'privkey', 'Enter the owner Dash address')
if not self.edit_mode:
style = 'hl1' if self.act_view_as_owner_public_address.isChecked() else 'hl2'
self.lblOwnerKey.setText(get_label_text(
'Owner', key_type, tooltip_anchor, self.ag_owner_key, style,
'[invalid key format]' if self.owner_key_invalid else ''))
self.edtOwnerKey.setPlaceholderText(placeholder_text)
style = ''
if self.masternode.dmn_operator_key_type == InputKeyType.PRIVATE:
key_type, tooltip_anchor, placeholder_text = ('privkey', 'pubkey', 'Enter the operator private key')
if not self.edit_mode and not self.act_view_as_operator_private_key.isChecked():
style = 'hl2'
else:
key_type, tooltip_anchor, placeholder_text = ('pubkey', 'privkey', 'Enter the operator public key')
if not self.edit_mode:
style = 'hl1' if self.act_view_as_operator_public_key.isChecked() else 'hl2'
self.lblOperatorKey.setText(get_label_text(
'Operator', key_type, tooltip_anchor, self.ag_operator_key,
style, '[invalid key format]' if self.operator_key_invalid else ''))
self.edtOperatorKey.setPlaceholderText(placeholder_text)
style = ''
if self.masternode.dmn_voting_key_type == InputKeyType.PRIVATE:
key_type, tooltip_anchor, placeholder_text = ('privkey', 'address', 'Enter the voting private key')
if not self.edit_mode and not self.act_view_as_voting_private_key.isChecked():
style = 'hl2'
else:
key_type, tooltip_anchor, placeholder_text = ('address', 'privkey', 'Enter the voting Dash address')
if not self.edit_mode:
style = 'hl1' if self.act_view_as_voting_public_address.isChecked() else 'hl2'
self.lblVotingKey.setText(get_label_text(
'Voting', key_type, tooltip_anchor, self.ag_voting_key, style,
'[invalid key format]' if self.voting_key_invalid else ''))
self.edtVotingKey.setPlaceholderText(placeholder_text)
self.set_left_label_width(self.get_max_left_label_width())
def update_key_controls_state(self):
self.edtOwnerKey.setEchoMode(QLineEdit.Normal if self.btnShowOwnerPrivateKey.isChecked() or
self.edit_mode else QLineEdit.Password)
self.edtOperatorKey.setEchoMode(QLineEdit.Normal if self.btnShowOperatorPrivateKey.isChecked() or
self.edit_mode else QLineEdit.Password)
self.edtVotingKey.setEchoMode(QLineEdit.Normal if self.btnShowVotingPrivateKey.isChecked() or
self.edit_mode else QLineEdit.Password)
self.update_dynamic_labels()
def masternode_data_to_ui(self, reset_key_view_type: bool = False):
if self.masternode:
if self.masternode.dmn_owner_key_type == InputKeyType.PRIVATE:
self.act_view_as_owner_private_key.setChecked(True)
else:
self.act_view_as_owner_public_address.setChecked(True)
if self.masternode.dmn_operator_key_type == InputKeyType.PRIVATE:
self.act_view_as_operator_private_key.setChecked(True)
else:
self.act_view_as_operator_public_key.setChecked(True)
if self.masternode.dmn_voting_key_type == InputKeyType.PRIVATE:
self.act_view_as_voting_private_key.setChecked(True)
else:
self.act_view_as_voting_public_address.setChecked(True)
if reset_key_view_type:
self.btnShowOwnerPrivateKey.setChecked(False)
self.btnShowOperatorPrivateKey.setChecked(False)
self.btnShowVotingPrivateKey.setChecked(False)
self.chbRoleOwner.setChecked(self.masternode.dmn_user_roles & DMN_ROLE_OWNER)
self.chbRoleOperator.setChecked(self.masternode.dmn_user_roles & DMN_ROLE_OPERATOR)
self.chbRoleVoting.setChecked(self.masternode.dmn_user_roles & DMN_ROLE_VOTING)
self.edtName.setText(self.masternode.name)
self.edtIP.setText(self.masternode.ip)
self.edtPort.setText(self.masternode.port)
self.edtCollateralAddress.setText(self.masternode.collateral_address)
self.edtCollateralPath.setText(self.masternode.collateral_bip32_path)
self.edtCollateralTxHash.setText(self.masternode.collateral_tx)
self.edtCollateralTxIndex.setText(self.masternode.collateral_tx_index)
self.edtDMNTxHash.setText(self.masternode.dmn_tx_hash)
self.edtOwnerKey.setText(self.get_owner_key_to_display())
self.edtVotingKey.setText(self.get_voting_key_to_display())
self.edtOperatorKey.setText(self.get_operator_key_to_display())
self.updating_ui = False
self.set_buttons_height()
else:
for e in self.findChildren(QLineEdit):
e.setText('')
self.update_ui_controls_state()
def get_owner_key_to_display(self) -> str:
ret = ''
if self.masternode:
if self.edit_mode:
if self.masternode.dmn_owner_key_type == InputKeyType.PRIVATE:
ret = self.masternode.dmn_owner_private_key
else:
ret = self.masternode.dmn_owner_address
else:
try:
if self.masternode.dmn_owner_key_type == InputKeyType.PRIVATE:
if self.act_view_as_owner_private_key.isChecked():
ret = self.masternode.dmn_owner_private_key
elif self.act_view_as_owner_public_address.isChecked():
if self.masternode.dmn_owner_private_key:
ret = dash_utils.wif_privkey_to_address(self.masternode.dmn_owner_private_key,
self.app_config.dash_network)
elif self.act_view_as_owner_public_key.isChecked():
if self.masternode.dmn_owner_private_key:
ret = dash_utils.wif_privkey_to_pubkey(self.masternode.dmn_owner_private_key)
elif self.act_view_as_owner_public_key_hash.isChecked():
if self.masternode.dmn_owner_private_key:
pubkey = dash_utils.wif_privkey_to_pubkey(self.masternode.dmn_owner_private_key)
pubkey_bin = bytes.fromhex(pubkey)
pub_hash = bitcoin.bin_hash160(pubkey_bin)
ret = pub_hash.hex()
else:
ret = '???'
else:
if self.act_view_as_owner_public_address.isChecked():
ret = self.masternode.dmn_owner_address
elif self.act_view_as_owner_public_key_hash.isChecked():
ret = self.masternode.get_dmn_owner_pubkey_hash()
else:
ret = '???'
except Exception as e:
msg = str(e)
if not msg:
msg = 'Key conversion error.'
WndUtils.error_msg(msg)
return ret
def get_voting_key_to_display(self) -> str:
ret = ''
if self.masternode:
if self.edit_mode:
if self.masternode.dmn_voting_key_type == InputKeyType.PRIVATE:
ret = self.masternode.dmn_voting_private_key
else:
ret = self.masternode.dmn_voting_address
else:
try:
if self.masternode.dmn_voting_key_type == InputKeyType.PRIVATE:
if self.act_view_as_voting_private_key.isChecked():
ret = self.masternode.dmn_voting_private_key
elif self.act_view_as_voting_public_address.isChecked():
if self.masternode.dmn_voting_private_key:
ret = dash_utils.wif_privkey_to_address(self.masternode.dmn_voting_private_key,
self.app_config.dash_network)
elif self.act_view_as_voting_public_key.isChecked():
if self.masternode.dmn_voting_private_key:
ret = dash_utils.wif_privkey_to_pubkey(self.masternode.dmn_voting_private_key)
elif self.act_view_as_voting_public_key_hash.isChecked():
if self.masternode.dmn_voting_private_key:
pubkey = dash_utils.wif_privkey_to_pubkey(self.masternode.dmn_voting_private_key)
pubkey_bin = bytes.fromhex(pubkey)
pub_hash = bitcoin.bin_hash160(pubkey_bin)
ret = pub_hash.hex()
else:
ret = '???'
else:
if self.act_view_as_voting_public_address.isChecked():
ret = self.masternode.dmn_voting_address
elif self.act_view_as_voting_public_key_hash.isChecked():
ret = self.masternode.get_dmn_voting_pubkey_hash()
else:
ret = '???'
except Exception as e:
msg = str(e)
if not msg:
msg = 'Key conversion error.'
WndUtils.error_msg(msg)
return ret
def get_operator_key_to_display(self) -> str:
ret = ''
if self.masternode:
if self.edit_mode:
if self.masternode.dmn_operator_key_type == InputKeyType.PRIVATE:
ret = self.masternode.dmn_operator_private_key
else:
ret = self.masternode.dmn_operator_public_key
else:
try:
if self.masternode.dmn_operator_key_type == InputKeyType.PRIVATE:
if self.act_view_as_operator_private_key.isChecked():
ret = self.masternode.dmn_operator_private_key
elif self.act_view_as_operator_public_key.isChecked():
ret = self.masternode.get_dmn_operator_pubkey()
else:
ret = '???'
else:
if self.act_view_as_operator_public_key.isChecked():
ret = self.masternode.dmn_operator_public_key
else:
ret = '???'
except Exception as e:
msg = str(e)
if not msg:
msg = 'Key conversion error.'
WndUtils.error_msg(msg)
return ret
@pyqtSlot(str)
def on_lblOwnerKey_linkActivated(self, link):
if self.masternode and self.edit_mode:
if self.masternode.dmn_owner_key_type == InputKeyType.PRIVATE:
self.masternode.dmn_owner_key_type = InputKeyType.PUBLIC
self.edtOwnerKey.setText(self.masternode.dmn_owner_address)
self.act_view_as_owner_private_key.setChecked(True)
else:
self.masternode.dmn_owner_key_type = InputKeyType.PRIVATE
self.edtOwnerKey.setText(self.masternode.dmn_owner_private_key)
self.act_view_as_owner_public_address.setChecked(True)
self.on_mn_data_modified()
self.update_ui_controls_state()
@pyqtSlot(str)
def on_lblOperatorKey_linkActivated(self, link):
if self.masternode and self.edit_mode:
if self.masternode.dmn_operator_key_type == InputKeyType.PRIVATE:
self.masternode.dmn_operator_key_type = InputKeyType.PUBLIC
self.edtOperatorKey.setText(self.masternode.dmn_operator_public_key)
self.act_view_as_operator_private_key.setChecked(True)
else:
self.masternode.dmn_operator_key_type = InputKeyType.PRIVATE
self.edtOperatorKey.setText(self.masternode.dmn_operator_private_key)
self.act_view_as_operator_public_key.setChecked(True)
self.on_mn_data_modified()
self.update_ui_controls_state()
@pyqtSlot(str)
def on_lblVotingKey_linkActivated(self, link):
if self.masternode and self.edit_mode:
if self.masternode.dmn_voting_key_type == InputKeyType.PRIVATE:
self.masternode.dmn_voting_key_type = InputKeyType.PUBLIC
self.edtVotingKey.setText(self.masternode.dmn_voting_address)
self.act_view_as_voting_private_key.setChecked(True)
else:
self.masternode.dmn_voting_key_type = InputKeyType.PRIVATE
self.edtVotingKey.setText(self.masternode.dmn_voting_private_key)
self.act_view_as_voting_public_address.setChecked(True)
self.on_mn_data_modified()
self.update_ui_controls_state()
@pyqtSlot(str)
def on_lblOwnerKey_linkHovered(self, link):
if link == 'address':
tt = 'Change input type to Dash address'
else:
tt = 'Change input type to private key'
self.lblOwnerKey.setToolTip(tt)
@pyqtSlot(str)
def on_lblOperatorKey_linkHovered(self, link):
if link == 'pub':
tt = 'Change input type to public key'
else:
tt = 'Change input type to private key'
self.lblOperatorKey.setToolTip(tt)
@pyqtSlot(str)
def on_lblVotingKey_linkHovered(self, link):
if link == 'address':
tt = 'Change input type to Dash address'
else:
tt = 'Change input type to private key'
self.lblVotingKey.setToolTip(tt)
def get_max_left_label_width(self):
doc = QTextDocument(self)
doc.setDocumentMargin(0)
doc.setDefaultFont(self.lblOwnerKey.font())
doc.setHtml('Test')
def get_lbl_text_width(lbl):
nonlocal doc
doc.setHtml(lbl.text())
return int(doc.size().width() + 5)
w = max(get_lbl_text_width(self.lblName),
get_lbl_text_width(self.lblIP),
get_lbl_text_width(self.lblCollateral),
get_lbl_text_width(self.lblCollateralTxHash),
get_lbl_text_width(self.lblDMNTxHash),
get_lbl_text_width(self.lblOwnerKey),
get_lbl_text_width(self.lblOperatorKey),
get_lbl_text_width(self.lblVotingKey))
return w
def set_left_label_width(self, width):
if self.lblName.width() != width:
self.label_width_changed.emit(width)
self.lblUserRole.setFixedWidth(width)
self.lblName.setFixedWidth(width)
self.lblIP.setFixedWidth(width)
self.lblCollateral.setFixedWidth(width)
self.lblCollateralTxHash.setFixedWidth(width)
self.lblDMNTxHash.setFixedWidth(width)
self.lblOwnerKey.setFixedWidth(width)
self.lblOperatorKey.setFixedWidth(width)
self.lblVotingKey.setFixedWidth(width)
def set_masternode(self, src_masternode: Optional[MasternodeConfig]):
self.updating_ui = True
if src_masternode:
self.masternode.copy_from(src_masternode)
self.masternode.modified = False
self.validate_keys()
self.masternode_data_to_ui(True)
def get_masternode_data(self, dest_masternode: MasternodeConfig):
"""Copies masternode data from the internal MasternodeConfig object to dest_masternode.
Used to get modified data and pass it to the global MasternodeConfig object.
"""
dest_masternode.copy_from(self.masternode)
def set_edit_mode(self, enabled: bool):
if self.edit_mode != enabled:
self.edit_mode = enabled
self.masternode_data_to_ui(True if enabled else False)
if not self.edit_mode:
self.lblOwnerKey.setToolTip('')
self.lblOperatorKey.setToolTip('')
self.lblVotingKey.setToolTip('')
def is_modified(self) -> bool:
return self.masternode and self.masternode.modified
def on_mn_data_modified(self):
if self.masternode and not self.updating_ui:
self.masternode.set_modified()
self.data_changed.emit(self.masternode)
@pyqtSlot(bool)
def on_chbRoleOwner_toggled(self, checked):
if not self.updating_ui:
if checked:
self.masternode.dmn_user_roles |= DMN_ROLE_OWNER
else:
self.masternode.dmn_user_roles &= ~DMN_ROLE_OWNER
self.update_ui_controls_state()
self.on_mn_data_modified()
self.role_modified.emit()
@pyqtSlot(bool)
def on_chbRoleOperator_toggled(self, checked):
if not self.updating_ui:
if checked:
self.masternode.dmn_user_roles |= DMN_ROLE_OPERATOR
else:
self.masternode.dmn_user_roles &= ~DMN_ROLE_OPERATOR
self.update_ui_controls_state()
self.on_mn_data_modified()
self.role_modified.emit()
@pyqtSlot(bool)
def on_chbRoleVoting_toggled(self, checked):
if not self.updating_ui:
if checked:
self.masternode.dmn_user_roles |= DMN_ROLE_VOTING
else:
self.masternode.dmn_user_roles &= ~DMN_ROLE_VOTING
self.update_ui_controls_state()
self.on_mn_data_modified()
self.role_modified.emit()
@pyqtSlot(str)
def on_edtName_textEdited(self, text):
if self.masternode and not self.updating_ui:
self.on_mn_data_modified()
self.masternode.name = text.strip()
self.name_modified.emit(self.masternode, text)
@pyqtSlot(str)
def on_edtIP_textEdited(self, text):
if self.masternode and not self.updating_ui:
self.on_mn_data_modified()
self.masternode.ip = text.strip()
@pyqtSlot(str)
def on_edtPort_textEdited(self, text):
if self.masternode and not self.updating_ui:
self.on_mn_data_modified()
self.masternode.port = text.strip()
@pyqtSlot(str)
def on_edtCollateralAddress_textEdited(self, text):
if self.masternode and not self.updating_ui:
update_ui = ((not text) != (not self.masternode.collateral_address))
self.on_mn_data_modified()
self.masternode.collateral_address = text.strip()
if update_ui:
self.update_ui_controls_state()
@pyqtSlot(str)
def on_edtCollateralPath_textEdited(self, text):
if self.masternode and not self.updating_ui:
update_ui = ((not text) != (not self.masternode.collateral_bip32_path))
self.on_mn_data_modified()
self.masternode.collateral_bip32_path = text.strip()
if update_ui:
self.update_ui_controls_state()
@pyqtSlot(str)
def on_edtCollateralTxHash_textEdited(self, text):
if self.masternode and not self.updating_ui:
update_ui = ((not text) != (not self.masternode.collateral_tx))
self.on_mn_data_modified()
self.masternode.collateral_tx = text.strip()
if update_ui:
self.update_ui_controls_state()
@pyqtSlot(str)
def on_edtCollateralTxIndex_textEdited(self, text):
if self.masternode and not self.updating_ui:
self.on_mn_data_modified()
self.masternode.collateral_tx_index = text.strip()
@pyqtSlot(str)
def on_edtDMNTxHash_textEdited(self, text):
if self.masternode and not self.updating_ui:
self.on_mn_data_modified()
self.masternode.dmn_tx_hash = text.strip()
@pyqtSlot(bool)
def on_btnGetMNDataByIP_clicked(self, _):
if self.masternode and not self.updating_ui:
if not (self.masternode.ip and self.masternode.port):
WndUtils.error_msg('Enter the masternode IP address and TCP port number.')
return
cache_max_age = 500
self.dashd_intf.get_masternodelist('json', data_max_age=cache_max_age, protx_data_max_age=cache_max_age)
mn = self.masternode
updated_fields = []
ip_port = mn.ip + ':' + mn.port
mn_info = self.dashd_intf.masternodes_by_ip_port.get(ip_port)
modified = False
keys_modified = []
if mn_info:
if mn_info.protx:
protx = mn_info.protx
if mn.collateral_address != protx.collateral_address:
updated_fields.append('collateral address')
# self.edtCollateralAddress.setText(protx.collateral_address)
mn.collateral_address = protx.collateral_address
modified = True
if mn.dmn_tx_hash != protx.protx_hash:
updated_fields.append('protx hash')
# self.edtDMNTxHash.setText(protx.protx_hash)
self.masternode.dmn_tx_hash = protx.protx_hash
modified = True
if mn.collateral_tx != protx.collateral_hash or str(mn.collateral_tx_index) != \
str(protx.collateral_index):
updated_fields.append('collateral hash/index')
# self.edtCollateralTxHash.setVisible(protx.collateral_hash)
mn.collateral_tx = protx.collateral_hash
# self.edtCollateralTxIndex.setText(str(protx.collateral_index))
mn.collateral_tx_index = str(protx.collateral_index)
modified = True
if mn.dmn_user_roles & DMN_ROLE_OWNER > 0 and \
((not mn.dmn_owner_private_key and mn.dmn_owner_key_type == InputKeyType.PRIVATE) or
(not mn.dmn_owner_address and mn.dmn_owner_key_type == InputKeyType.PUBLIC)):
mn.dmn_owner_key_type = InputKeyType.PUBLIC
mn.dmn_owner_address = protx.owner_address
modified = True
keys_modified.append('owner')
if mn.dmn_user_roles & DMN_ROLE_OPERATOR > 0 and \
((not mn.dmn_operator_private_key and mn.dmn_operator_key_type == InputKeyType.PRIVATE) or
(not mn.dmn_operator_public_key and mn.dmn_operator_key_type == InputKeyType.PUBLIC)):
mn.dmn_operator_key_type = InputKeyType.PUBLIC
mn.dmn_operator_public_key = protx.pubkey_operator
modified = True
keys_modified.append('operator')
if mn.dmn_user_roles & DMN_ROLE_VOTING > 0 and \
((not mn.dmn_voting_private_key and mn.dmn_voting_key_type == InputKeyType.PRIVATE) or
(not mn.dmn_voting_address and mn.dmn_voting_key_type == InputKeyType.PUBLIC)):
mn.dmn_voting_key_type = InputKeyType.PUBLIC
mn.dmn_voting_address = protx.voting_address
modified = True
keys_modified.append('voting')
if modified:
self.masternode_data_to_ui()
self.on_mn_data_modified()
self.app_text_message_sent.emit(
DispMessage.OTHER_1, 'The following mn data has been set: ' + ', '.join(updated_fields),
AppTextMessageType.INFO)
if keys_modified:
self.app_text_message_sent.emit(
DispMessage.OTHER_2,
'We\'ve set <b>public</b> keys for ' + ', '.join(keys_modified) +
'. You need to enter <b>private</b> keys instead, to have access to some of the features.',
AppTextMessageType.WARN)
else:
WndUtils.warn_msg(
'Couldn\'t find this masternode in the list of registered deterministic masternodes.')
@pyqtSlot(bool)
def on_btnBip32PathToAddress_clicked(self, checked):
if self.masternode.collateral_bip32_path:
if self.hw_session.connect_hardware_wallet():
try:
addr = hw_intf.get_address(self.hw_session, self.masternode.collateral_bip32_path,
show_display=True)
if addr:
self.masternode.collateral_address = addr.strip()
self.edtCollateralAddress.setText(addr.strip())
self.on_mn_data_modified()
self.update_ui_controls_state()
except CancelException:
pass
@pyqtSlot(bool)
def on_btnShowCollateralPathAddress_clicked(self, checked):
if self.masternode.collateral_bip32_path:
try:
if self.hw_session.connect_hardware_wallet():
addr = hw_intf.get_address(
self.hw_session, self.masternode.collateral_bip32_path, True,
f'Displaying address for the BIP32 path <b>{self.masternode.collateral_bip32_path}</b>.'
f'<br>Click the confirmation button on your device.')
except CancelException:
pass
@pyqtSlot(str)
def on_edtOwnerKey_textEdited(self, text):
if self.masternode and not self.updating_ui:
if self.masternode.dmn_owner_key_type == InputKeyType.PRIVATE:
self.masternode.dmn_owner_private_key = text.strip()
else:
self.masternode.dmn_owner_address = text.strip()
self.validate_keys()
self.update_dynamic_labels()
self.on_mn_data_modified()
@pyqtSlot(str)
def on_edtOperatorKey_textEdited(self, text):
if self.masternode and not self.updating_ui:
if self.masternode.dmn_operator_key_type == InputKeyType.PRIVATE:
self.masternode.dmn_operator_private_key = text.strip()
else:
self.masternode.dmn_operator_public_key = text.strip()
self.validate_keys()
self.update_dynamic_labels()
self.on_mn_data_modified()
@pyqtSlot(str)
def on_edtVotingKey_textEdited(self, text):
if self.masternode and not self.updating_ui:
if self.masternode.dmn_voting_key_type == InputKeyType.PRIVATE:
self.masternode.dmn_voting_private_key = text.strip()
else:
self.masternode.dmn_voting_address = text.strip()
self.validate_keys()
self.update_dynamic_labels()
self.on_mn_data_modified()
def validate_keys(self):
self.owner_key_invalid = False
self.operator_key_invalid = False
self.voting_key_invalid = False
if self.masternode:
if self.masternode.dmn_owner_key_type == InputKeyType.PRIVATE:
if self.masternode.dmn_owner_private_key:
self.owner_key_invalid = not dash_utils.validate_wif_privkey(self.masternode.dmn_owner_private_key,
self.app_config.dash_network)
else:
if self.masternode.dmn_owner_address:
self.owner_key_invalid = not dash_utils.validate_address(self.masternode.dmn_owner_address,
self.app_config.dash_network)
if self.masternode.dmn_operator_key_type == InputKeyType.PRIVATE:
if self.masternode.dmn_operator_private_key:
self.operator_key_invalid = not dash_utils.validate_bls_privkey(
self.masternode.dmn_operator_private_key)
else:
if self.masternode.dmn_operator_public_key:
self.operator_key_invalid = not dash_utils.validate_bls_pubkey(
self.masternode.dmn_operator_public_key)
if self.masternode.dmn_voting_key_type == InputKeyType.PRIVATE:
if self.masternode.dmn_voting_private_key:
self.voting_key_invalid = not dash_utils.validate_wif_privkey(self.masternode.dmn_voting_private_key,
self.app_config.dash_network)
else:
if self.masternode.dmn_voting_address:
self.voting_key_invalid = not dash_utils.validate_address(self.masternode.dmn_voting_address,
self.app_config.dash_network)
def generate_priv_key(self, pk_type: str, edit_control: QLineEdit, compressed: bool):
if edit_control.text():
if WndUtils.query_dlg(
f'This will overwrite the current {pk_type} private key value. Do you really want to proceed?',
buttons=QMessageBox.Yes | QMessageBox.Cancel,
default_button=QMessageBox.Yes, icon=QMessageBox.Warning) != QMessageBox.Yes:
return None
if pk_type == 'operator':
pk = dash_utils.generate_bls_privkey()
else:
pk = dash_utils.generate_wif_privkey(self.app_config.dash_network, compressed=compressed)
edit_control.setText(pk)
return pk
@pyqtSlot(bool)
def on_btnGenerateOwnerPrivateKey_clicked(self, checked):
if self.masternode:
pk = self.generate_priv_key('owner', self.edtOwnerKey, True)
if pk:
self.masternode.dmn_owner_private_key = pk
self.btnShowOwnerPrivateKey.setChecked(True)
self.on_mn_data_modified()
@pyqtSlot(bool)
def on_btnGenerateOperatorPrivateKey_clicked(self, checked):
if self.masternode:
pk = self.generate_priv_key('operator', self.edtOperatorKey, True)
if pk:
self.masternode.dmn_operator_private_key = pk
self.btnShowOperatorPrivateKey.setChecked(True)
self.on_mn_data_modified()
@pyqtSlot(bool)
def on_btnGenerateVotingPrivateKey_clicked(self, checked):
if self.masternode:
pk = self.generate_priv_key('voting', self.edtVotingKey, True)
if pk:
self.masternode.dmn_voting_private_key = pk
self.btnShowVotingPrivateKey.setChecked(True)
self.on_mn_data_modified()
@pyqtSlot(bool)
def on_btnShowOwnerPrivateKey_toggled(self, checked):
self.edtOwnerKey.setEchoMode(QLineEdit.Normal if checked else QLineEdit.Password)
self.update_key_controls_state()
@pyqtSlot(bool)
def on_btnShowOperatorPrivateKey_toggled(self, checked):
self.edtOperatorKey.setEchoMode(QLineEdit.Normal if checked else QLineEdit.Password)
self.update_key_controls_state()
@pyqtSlot(bool)
def on_btnShowVotingPrivateKey_toggled(self, checked):
self.edtVotingKey.setEchoMode(QLineEdit.Normal if checked else QLineEdit.Password)
self.update_key_controls_state()
@pyqtSlot(bool)
def on_btnLocateCollateral_clicked(self, checked):
try:
break_scanning = False
if not self.hw_session.connect_hardware_wallet():
return
def do_break_scanning():
nonlocal break_scanning
break_scanning = True
return False
def check_break_scanning():
nonlocal break_scanning
return break_scanning
def apply_utxo(utxo):
self.masternode.collateral_address = utxo.address
self.edtCollateralAddress.setText(utxo.address)
self.masternode.collateral_bip32_path = utxo.bip32_path
self.edtCollateralPath.setText(utxo.bip32_path)
self.masternode.collateral_tx = utxo.txid
self.edtCollateralTxHash.setText(utxo.txid)
self.masternode.collateral_tx_index = str(utxo.output_index)
self.edtCollateralTxIndex.setText(str(utxo.output_index))
self.update_ui_controls_state()
self.on_mn_data_modified()
bip44_wallet = Bip44Wallet(self.app_config.hw_coin_name, self.hw_session,
self.app_config.db_intf, self.dashd_intf, self.app_config.dash_network)
utxos = WndUtils.run_thread_dialog(
self.get_collateral_tx_address_thread,
(bip44_wallet, check_break_scanning, self.edtCollateralAddress.text()),
True, force_close_dlg_callback=do_break_scanning)
if utxos:
if len(utxos) == 1 and \
(not self.masternode.collateral_address or
(utxos[0].address_obj and self.masternode.collateral_address == utxos[0].address_obj.address)) \
and (not self.masternode.collateral_tx or utxos[0].txid == self.masternode.collateral_tx):
apply_utxo(utxos[0])
return
dlg = ListCollateralTxsDlg(self, self.masternode, self.app_config, False, utxos)
if dlg.exec_():
utxo = dlg.get_selected_utxo()
if utxo:
apply_utxo(utxo)
else:
if utxos is not None:
WndUtils.warn_msg('Couldn\'t find any 1000 Dash UTXO in your wallet.')
except Exception as e:
WndUtils.error_msg(str(e))
def get_collateral_tx_address_thread(self, ctrl: CtrlObject,
bip44_wallet: Bip44Wallet,
check_break_scanning_ext: Callable[[], bool],
src_address: str):
utxos = []
break_scanning = False
txes_cnt = 0
msg = 'Scanning wallet transactions for 1000 Dash UTXOs.<br>' \
'This may take a while (<a href="break">break</a>)....'
ctrl.dlg_config(dlg_title="Scanning wallet", show_progress_bar=False)
ctrl.display_msg(msg)
def check_break_scanning():
nonlocal break_scanning
if break_scanning:
# stop the scanning process if the dialog finishes or the address/bip32path has been found
raise BreakFetchTransactionsException()
if check_break_scanning_ext is not None and check_break_scanning_ext():
raise BreakFetchTransactionsException()
def fetch_txes_feedback(tx_cnt: int):
nonlocal msg, txes_cnt
txes_cnt += tx_cnt
ctrl.display_msg(msg + '<br><br>' + 'Number of transactions fetched so far: ' + str(txes_cnt))
def on_msg_link_activated(link: str):
nonlocal break_scanning
if link == 'break':
break_scanning = True
lbl = ctrl.get_msg_label_control()
if lbl:
def set():
lbl.setOpenExternalLinks(False)
lbl.setTextInteractionFlags(lbl.textInteractionFlags() & ~Qt.TextSelectableByMouse)
lbl.linkActivated.connect(on_msg_link_activated)
lbl.repaint()
WndUtils.call_in_main_thread(set)
try:
bip44_wallet.on_fetch_account_txs_feedback = fetch_txes_feedback
if src_address:
# limit transactions only to the specific address
# addr = bip44_wallet.get_address_item(src_address, False)
addr = bip44_wallet.scan_wallet_for_address(src_address, check_break_scanning,
feedback_fun=fetch_txes_feedback)
if addr and addr.tree_id == bip44_wallet.get_tree_id():
bip44_wallet.fetch_addresses_txs([addr], check_break_scanning)
for utxo in bip44_wallet.list_utxos_for_addresses([addr.id], filter_by_satoshis=int(1e11)):
utxos.append(utxo)
if not utxos:
bip44_wallet.fetch_all_accounts_txs(check_break_scanning)
for utxo in bip44_wallet.list_utxos_for_account(account_id=None, filter_by_satoshis=int(1e11)):
utxos.append(utxo)
except BreakFetchTransactionsException:
return None
return utxos
def on_owner_view_key_type_changed(self):
self.btnShowOwnerPrivateKey.setChecked(True)
self.update_key_controls_state()
self.edtOwnerKey.setText(self.get_owner_key_to_display())
def on_voting_view_key_type_changed(self):
self.btnShowVotingPrivateKey.setChecked(True)
self.update_key_controls_state()
self.edtVotingKey.setText(self.get_voting_key_to_display())
def on_operator_view_key_type_changed(self):
self.btnShowOperatorPrivateKey.setChecked(True)
self.update_key_controls_state()
self.edtOperatorKey.setText(self.get_operator_key_to_display())
@pyqtSlot()
def on_btnCopyOwnerKey_clicked(self):
cl = QApplication.clipboard()
cl.setText(self.edtOwnerKey.text())
@pyqtSlot()
def on_btnCopyVotingKey_clicked(self):
cl = QApplication.clipboard()
cl.setText(self.edtVotingKey.text())
@pyqtSlot()
def on_btnCopyOperatorKey_clicked(self):
cl = QApplication.clipboard()
cl.setText(self.edtOperatorKey.text())
@pyqtSlot()
def on_btnCopyProtxHash_clicked(self):
cl = QApplication.clipboard()
cl.setText(self.edtDMNTxHash.text())
|
|
# -*- coding: utf-8 -*-
# This coding header is significant for tests, as the debug view is parsing
# files to search for such a header to decode the source file content
from __future__ import unicode_literals
import importlib
import inspect
import os
import re
import shutil
import sys
from tempfile import NamedTemporaryFile, mkdtemp, mkstemp
from unittest import skipIf
from django.core import mail
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.urlresolvers import reverse
from django.template.base import TemplateDoesNotExist
from django.test import TestCase, RequestFactory, override_settings
from django.test.utils import (
setup_test_template_loader, restore_template_loaders)
from django.utils.encoding import force_text, force_bytes
from django.utils import six
from django.views.debug import ExceptionReporter
from .. import BrokenException, except_args
from ..views import (sensitive_view, non_sensitive_view, paranoid_view,
custom_exception_reporter_filter_view, sensitive_method_view,
sensitive_args_function_caller, sensitive_kwargs_function_caller,
multivalue_dict_key_error)
@override_settings(DEBUG=True, TEMPLATE_DEBUG=True)
class DebugViewTests(TestCase):
urls = "view_tests.urls"
def test_files(self):
response = self.client.get('/raises/')
self.assertEqual(response.status_code, 500)
data = {
'file_data.txt': SimpleUploadedFile('file_data.txt', b'haha'),
}
response = self.client.post('/raises/', data)
self.assertContains(response, 'file_data.txt', status_code=500)
self.assertNotContains(response, 'haha', status_code=500)
def test_403(self):
# Ensure no 403.html template exists to test the default case.
setup_test_template_loader({})
try:
response = self.client.get('/raises403/')
self.assertContains(response, '<h1>403 Forbidden</h1>', status_code=403)
finally:
restore_template_loaders()
def test_403_template(self):
# Set up a test 403.html template.
setup_test_template_loader(
{'403.html': 'This is a test template for a 403 Forbidden error.'}
)
try:
response = self.client.get('/raises403/')
self.assertContains(response, 'test template', status_code=403)
finally:
restore_template_loaders()
def test_404(self):
response = self.client.get('/raises404/')
self.assertEqual(response.status_code, 404)
def test_view_exceptions(self):
for n in range(len(except_args)):
self.assertRaises(BrokenException, self.client.get,
reverse('view_exception', args=(n,)))
def test_non_l10ned_numeric_ids(self):
"""
Numeric IDs and fancy traceback context blocks line numbers shouldn't be localized.
"""
with self.settings(DEBUG=True, USE_L10N=True):
response = self.client.get('/raises500/')
# We look for a HTML fragment of the form
# '<div class="context" id="c38123208">', not '<div class="context" id="c38,123,208"'
self.assertContains(response, '<div class="context" id="', status_code=500)
match = re.search(b'<div class="context" id="(?P<id>[^"]+)">', response.content)
self.assertFalse(match is None)
id_repr = match.group('id')
self.assertFalse(re.search(b'[^c\d]', id_repr),
"Numeric IDs in debug response HTML page shouldn't be localized (value: %s)." % id_repr)
def test_template_exceptions(self):
for n in range(len(except_args)):
try:
self.client.get(reverse('template_exception', args=(n,)))
except Exception:
raising_loc = inspect.trace()[-1][-2][0].strip()
self.assertFalse(raising_loc.find('raise BrokenException') == -1,
"Failed to find 'raise BrokenException' in last frame of traceback, instead found: %s" %
raising_loc)
def test_template_loader_postmortem(self):
"""Tests for not existing file"""
template_name = "notfound.html"
with NamedTemporaryFile(prefix=template_name) as tempfile:
tempdir = os.path.dirname(tempfile.name)
template_path = os.path.join(tempdir, template_name)
with override_settings(TEMPLATE_DIRS=(tempdir,)):
response = self.client.get(reverse('raises_template_does_not_exist', kwargs={"path": template_name}))
self.assertContains(response, "%s (File does not exist)" % template_path, status_code=500, count=1)
@skipIf(sys.platform == "win32", "Python on Windows doesn't have working os.chmod() and os.access().")
def test_template_loader_postmortem_notreadable(self):
"""Tests for not readable file"""
with NamedTemporaryFile() as tempfile:
template_name = tempfile.name
tempdir = os.path.dirname(tempfile.name)
template_path = os.path.join(tempdir, template_name)
os.chmod(template_path, 0o0222)
with override_settings(TEMPLATE_DIRS=(tempdir,)):
response = self.client.get(reverse('raises_template_does_not_exist', kwargs={"path": template_name}))
self.assertContains(response, "%s (File is not readable)" % template_path, status_code=500, count=1)
def test_template_loader_postmortem_notafile(self):
"""Tests for not being a file"""
try:
template_path = mkdtemp()
template_name = os.path.basename(template_path)
tempdir = os.path.dirname(template_path)
with override_settings(TEMPLATE_DIRS=(tempdir,)):
response = self.client.get(reverse('raises_template_does_not_exist', kwargs={"path": template_name}))
self.assertContains(response, "%s (Not a file)" % template_path, status_code=500, count=1)
finally:
shutil.rmtree(template_path)
def test_no_template_source_loaders(self):
"""
Make sure if you don't specify a template, the debug view doesn't blow up.
"""
self.assertRaises(TemplateDoesNotExist, self.client.get, '/render_no_template/')
@override_settings(ROOT_URLCONF='view_tests.default_urls')
def test_default_urlconf_template(self):
"""
Make sure that the default urlconf template is shown shown instead
of the technical 404 page, if the user has not altered their
url conf yet.
"""
response = self.client.get('/')
self.assertContains(
response,
"<h2>Congratulations on your first Django-powered page.</h2>"
)
@override_settings(ROOT_URLCONF='view_tests.regression_21530_urls')
def test_regression_21530(self):
"""
Regression test for bug #21530.
If the admin app include is replaced with exactly one url
pattern, then the technical 404 template should be displayed.
The bug here was that an AttributeError caused a 500 response.
"""
response = self.client.get('/')
self.assertContains(
response,
"Page not found <span>(404)</span>",
status_code=404
)
class ExceptionReporterTests(TestCase):
rf = RequestFactory()
def test_request_and_exception(self):
"A simple exception report can be generated"
try:
request = self.rf.get('/test_view/')
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn('<h1>ValueError at /test_view/</h1>', html)
self.assertIn('<pre class="exception_value">Can't find my keys</pre>', html)
self.assertIn('<th>Request Method:</th>', html)
self.assertIn('<th>Request URL:</th>', html)
self.assertIn('<th>Exception Type:</th>', html)
self.assertIn('<th>Exception Value:</th>', html)
self.assertIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertNotIn('<p>Request data not supplied</p>', html)
def test_no_request(self):
"An exception report can be generated without request"
try:
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn('<h1>ValueError</h1>', html)
self.assertIn('<pre class="exception_value">Can't find my keys</pre>', html)
self.assertNotIn('<th>Request Method:</th>', html)
self.assertNotIn('<th>Request URL:</th>', html)
self.assertIn('<th>Exception Type:</th>', html)
self.assertIn('<th>Exception Value:</th>', html)
self.assertIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertIn('<p>Request data not supplied</p>', html)
def test_eol_support(self):
"""Test that the ExceptionReporter supports Unix, Windows and Macintosh EOL markers"""
LINES = list('print %d' % i for i in range(1, 6))
reporter = ExceptionReporter(None, None, None, None)
for newline in ['\n', '\r\n', '\r']:
fd, filename = mkstemp(text=False)
os.write(fd, force_bytes(newline.join(LINES) + newline))
os.close(fd)
try:
self.assertEqual(
reporter._get_lines_from_file(filename, 3, 2),
(1, LINES[1:3], LINES[3], LINES[4:])
)
finally:
os.unlink(filename)
def test_no_exception(self):
"An exception report can be generated for just a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertIn('<h1>Report at /test_view/</h1>', html)
self.assertIn('<pre class="exception_value">No exception message supplied</pre>', html)
self.assertIn('<th>Request Method:</th>', html)
self.assertIn('<th>Request URL:</th>', html)
self.assertNotIn('<th>Exception Type:</th>', html)
self.assertNotIn('<th>Exception Value:</th>', html)
self.assertNotIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertNotIn('<p>Request data not supplied</p>', html)
def test_request_and_message(self):
"A message can be provided in addition to a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, "I'm a little teapot", None)
html = reporter.get_traceback_html()
self.assertIn('<h1>Report at /test_view/</h1>', html)
self.assertIn('<pre class="exception_value">I'm a little teapot</pre>', html)
self.assertIn('<th>Request Method:</th>', html)
self.assertIn('<th>Request URL:</th>', html)
self.assertNotIn('<th>Exception Type:</th>', html)
self.assertNotIn('<th>Exception Value:</th>', html)
self.assertNotIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertNotIn('<p>Request data not supplied</p>', html)
def test_message_only(self):
reporter = ExceptionReporter(None, None, "I'm a little teapot", None)
html = reporter.get_traceback_html()
self.assertIn('<h1>Report</h1>', html)
self.assertIn('<pre class="exception_value">I'm a little teapot</pre>', html)
self.assertNotIn('<th>Request Method:</th>', html)
self.assertNotIn('<th>Request URL:</th>', html)
self.assertNotIn('<th>Exception Type:</th>', html)
self.assertNotIn('<th>Exception Value:</th>', html)
self.assertNotIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertIn('<p>Request data not supplied</p>', html)
@skipIf(six.PY2, 'Bug manifests on PY3 only')
def test_unfrozen_importlib(self):
"""
importlib is not a frozen app, but its loader thinks it's frozen which
results in an ImportError on Python 3. Refs #21443.
"""
try:
request = self.rf.get('/test_view/')
importlib.import_module('abc.def.invalid.name')
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn('<h1>ImportError at /test_view/</h1>', html)
class PlainTextReportTests(TestCase):
rf = RequestFactory()
def test_request_and_exception(self):
"A simple exception report can be generated"
try:
request = self.rf.get('/test_view/')
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
text = reporter.get_traceback_text()
self.assertIn('ValueError at /test_view/', text)
self.assertIn("Can't find my keys", text)
self.assertIn('Request Method:', text)
self.assertIn('Request URL:', text)
self.assertIn('Exception Type:', text)
self.assertIn('Exception Value:', text)
self.assertIn('Traceback:', text)
self.assertIn('Request information:', text)
self.assertNotIn('Request data not supplied', text)
def test_no_request(self):
"An exception report can be generated without request"
try:
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
text = reporter.get_traceback_text()
self.assertIn('ValueError', text)
self.assertIn("Can't find my keys", text)
self.assertNotIn('Request Method:', text)
self.assertNotIn('Request URL:', text)
self.assertIn('Exception Type:', text)
self.assertIn('Exception Value:', text)
self.assertIn('Traceback:', text)
self.assertIn('Request data not supplied', text)
def test_no_exception(self):
"An exception report can be generated for just a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, None, None)
reporter.get_traceback_text()
def test_request_and_message(self):
"A message can be provided in addition to a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, "I'm a little teapot", None)
reporter.get_traceback_text()
def test_message_only(self):
reporter = ExceptionReporter(None, None, "I'm a little teapot", None)
reporter.get_traceback_text()
class ExceptionReportTestMixin(object):
# Mixin used in the ExceptionReporterFilterTests and
# AjaxResponseExceptionReporterFilter tests below
breakfast_data = {'sausage-key': 'sausage-value',
'baked-beans-key': 'baked-beans-value',
'hash-brown-key': 'hash-brown-value',
'bacon-key': 'bacon-value'}
def verify_unsafe_response(self, view, check_for_vars=True,
check_for_POST_params=True):
"""
Asserts that potentially sensitive info are displayed in the response.
"""
request = self.rf.post('/some_url/', self.breakfast_data)
response = view(request)
if check_for_vars:
# All variables are shown.
self.assertContains(response, 'cooked_eggs', status_code=500)
self.assertContains(response, 'scrambled', status_code=500)
self.assertContains(response, 'sauce', status_code=500)
self.assertContains(response, 'worcestershire', status_code=500)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters are shown.
self.assertContains(response, k, status_code=500)
self.assertContains(response, v, status_code=500)
def verify_safe_response(self, view, check_for_vars=True,
check_for_POST_params=True):
"""
Asserts that certain sensitive info are not displayed in the response.
"""
request = self.rf.post('/some_url/', self.breakfast_data)
response = view(request)
if check_for_vars:
# Non-sensitive variable's name and value are shown.
self.assertContains(response, 'cooked_eggs', status_code=500)
self.assertContains(response, 'scrambled', status_code=500)
# Sensitive variable's name is shown but not its value.
self.assertContains(response, 'sauce', status_code=500)
self.assertNotContains(response, 'worcestershire', status_code=500)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertContains(response, k, status_code=500)
# Non-sensitive POST parameters' values are shown.
self.assertContains(response, 'baked-beans-value', status_code=500)
self.assertContains(response, 'hash-brown-value', status_code=500)
# Sensitive POST parameters' values are not shown.
self.assertNotContains(response, 'sausage-value', status_code=500)
self.assertNotContains(response, 'bacon-value', status_code=500)
def verify_paranoid_response(self, view, check_for_vars=True,
check_for_POST_params=True):
"""
Asserts that no variables or POST parameters are displayed in the response.
"""
request = self.rf.post('/some_url/', self.breakfast_data)
response = view(request)
if check_for_vars:
# Show variable names but not their values.
self.assertContains(response, 'cooked_eggs', status_code=500)
self.assertNotContains(response, 'scrambled', status_code=500)
self.assertContains(response, 'sauce', status_code=500)
self.assertNotContains(response, 'worcestershire', status_code=500)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertContains(response, k, status_code=500)
# No POST parameters' values are shown.
self.assertNotContains(response, v, status_code=500)
def verify_unsafe_email(self, view, check_for_POST_params=True):
"""
Asserts that potentially sensitive info are displayed in the email report.
"""
with self.settings(ADMINS=(('Admin', 'admin@fattie-breakie.com'),)):
mail.outbox = [] # Empty outbox
request = self.rf.post('/some_url/', self.breakfast_data)
view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
body_plain = force_text(email.body)
self.assertNotIn('cooked_eggs', body_plain)
self.assertNotIn('scrambled', body_plain)
self.assertNotIn('sauce', body_plain)
self.assertNotIn('worcestershire', body_plain)
# Frames vars are shown in html email reports.
body_html = force_text(email.alternatives[0][0])
self.assertIn('cooked_eggs', body_html)
self.assertIn('scrambled', body_html)
self.assertIn('sauce', body_html)
self.assertIn('worcestershire', body_html)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters are shown.
self.assertIn(k, body_plain)
self.assertIn(v, body_plain)
self.assertIn(k, body_html)
self.assertIn(v, body_html)
def verify_safe_email(self, view, check_for_POST_params=True):
"""
Asserts that certain sensitive info are not displayed in the email report.
"""
with self.settings(ADMINS=(('Admin', 'admin@fattie-breakie.com'),)):
mail.outbox = [] # Empty outbox
request = self.rf.post('/some_url/', self.breakfast_data)
view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
body_plain = force_text(email.body)
self.assertNotIn('cooked_eggs', body_plain)
self.assertNotIn('scrambled', body_plain)
self.assertNotIn('sauce', body_plain)
self.assertNotIn('worcestershire', body_plain)
# Frames vars are shown in html email reports.
body_html = force_text(email.alternatives[0][0])
self.assertIn('cooked_eggs', body_html)
self.assertIn('scrambled', body_html)
self.assertIn('sauce', body_html)
self.assertNotIn('worcestershire', body_html)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertIn(k, body_plain)
# Non-sensitive POST parameters' values are shown.
self.assertIn('baked-beans-value', body_plain)
self.assertIn('hash-brown-value', body_plain)
self.assertIn('baked-beans-value', body_html)
self.assertIn('hash-brown-value', body_html)
# Sensitive POST parameters' values are not shown.
self.assertNotIn('sausage-value', body_plain)
self.assertNotIn('bacon-value', body_plain)
self.assertNotIn('sausage-value', body_html)
self.assertNotIn('bacon-value', body_html)
def verify_paranoid_email(self, view):
"""
Asserts that no variables or POST parameters are displayed in the email report.
"""
with self.settings(ADMINS=(('Admin', 'admin@fattie-breakie.com'),)):
mail.outbox = [] # Empty outbox
request = self.rf.post('/some_url/', self.breakfast_data)
view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
body = force_text(email.body)
self.assertNotIn('cooked_eggs', body)
self.assertNotIn('scrambled', body)
self.assertNotIn('sauce', body)
self.assertNotIn('worcestershire', body)
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertIn(k, body)
# No POST parameters' values are shown.
self.assertNotIn(v, body)
class ExceptionReporterFilterTests(TestCase, ExceptionReportTestMixin):
"""
Ensure that sensitive information can be filtered out of error reports.
Refs #14614.
"""
urls = 'view_tests.urls'
rf = RequestFactory()
def test_non_sensitive_request(self):
"""
Ensure that everything (request info and frame variables) can bee seen
in the default error reports for non-sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(non_sensitive_view)
self.verify_unsafe_email(non_sensitive_view)
with self.settings(DEBUG=False):
self.verify_unsafe_response(non_sensitive_view)
self.verify_unsafe_email(non_sensitive_view)
def test_sensitive_request(self):
"""
Ensure that sensitive POST parameters and frame variables cannot be
seen in the default error reports for sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_view)
self.verify_unsafe_email(sensitive_view)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_view)
self.verify_safe_email(sensitive_view)
def test_paranoid_request(self):
"""
Ensure that no POST parameters and frame variables can be seen in the
default error reports for "paranoid" requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(paranoid_view)
self.verify_unsafe_email(paranoid_view)
with self.settings(DEBUG=False):
self.verify_paranoid_response(paranoid_view)
self.verify_paranoid_email(paranoid_view)
def test_multivalue_dict_key_error(self):
"""
#21098 -- Ensure that sensitive POST parameters cannot be seen in the
error reports for if request.POST['nonexistent_key'] throws an error.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(multivalue_dict_key_error)
self.verify_unsafe_email(multivalue_dict_key_error)
with self.settings(DEBUG=False):
self.verify_safe_response(multivalue_dict_key_error)
self.verify_safe_email(multivalue_dict_key_error)
def test_custom_exception_reporter_filter(self):
"""
Ensure that it's possible to assign an exception reporter filter to
the request to bypass the one set in DEFAULT_EXCEPTION_REPORTER_FILTER.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(custom_exception_reporter_filter_view)
self.verify_unsafe_email(custom_exception_reporter_filter_view)
with self.settings(DEBUG=False):
self.verify_unsafe_response(custom_exception_reporter_filter_view)
self.verify_unsafe_email(custom_exception_reporter_filter_view)
def test_sensitive_method(self):
"""
Ensure that the sensitive_variables decorator works with object
methods.
Refs #18379.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_method_view,
check_for_POST_params=False)
self.verify_unsafe_email(sensitive_method_view,
check_for_POST_params=False)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_method_view,
check_for_POST_params=False)
self.verify_safe_email(sensitive_method_view,
check_for_POST_params=False)
def test_sensitive_function_arguments(self):
"""
Ensure that sensitive variables don't leak in the sensitive_variables
decorator's frame, when those variables are passed as arguments to the
decorated function.
Refs #19453.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_args_function_caller)
self.verify_unsafe_email(sensitive_args_function_caller)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_args_function_caller, check_for_POST_params=False)
self.verify_safe_email(sensitive_args_function_caller, check_for_POST_params=False)
def test_sensitive_function_keyword_arguments(self):
"""
Ensure that sensitive variables don't leak in the sensitive_variables
decorator's frame, when those variables are passed as keyword arguments
to the decorated function.
Refs #19453.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_kwargs_function_caller)
self.verify_unsafe_email(sensitive_kwargs_function_caller)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_kwargs_function_caller, check_for_POST_params=False)
self.verify_safe_email(sensitive_kwargs_function_caller, check_for_POST_params=False)
def test_callable_settings(self):
"""
Callable settings should not be evaluated in the debug page (#21345).
"""
def callable_setting():
return "This should not be displayed"
with self.settings(DEBUG=True, FOOBAR=callable_setting):
response = self.client.get('/raises500/')
self.assertNotContains(response, "This should not be displayed", status_code=500)
def test_dict_setting_with_non_str_key(self):
"""
A dict setting containing a non-string key should not break the
debug page (#12744).
"""
with self.settings(DEBUG=True, FOOBAR={42: None}):
response = self.client.get('/raises500/')
self.assertContains(response, 'FOOBAR', status_code=500)
def test_sensitive_settings(self):
"""
The debug page should not show some sensitive settings
(password, secret key, ...).
"""
sensitive_settings = [
'SECRET_KEY',
'PASSWORD',
'API_KEY',
'AUTH_TOKEN',
]
for setting in sensitive_settings:
with self.settings(DEBUG=True, **{setting: "should not be displayed"}):
response = self.client.get('/raises500/')
self.assertNotContains(response, 'should not be displayed', status_code=500)
def test_settings_with_sensitive_keys(self):
"""
The debug page should filter out some sensitive information found in
dict settings.
"""
sensitive_settings = [
'SECRET_KEY',
'PASSWORD',
'API_KEY',
'AUTH_TOKEN',
]
for setting in sensitive_settings:
FOOBAR = {
setting: "should not be displayed",
'recursive': {setting: "should not be displayed"},
}
with self.settings(DEBUG=True, FOOBAR=FOOBAR):
response = self.client.get('/raises500/')
self.assertNotContains(response, 'should not be displayed', status_code=500)
class AjaxResponseExceptionReporterFilter(TestCase, ExceptionReportTestMixin):
"""
Ensure that sensitive information can be filtered out of error reports.
Here we specifically test the plain text 500 debug-only error page served
when it has been detected the request was sent by JS code. We don't check
for (non)existence of frames vars in the traceback information section of
the response content because we don't include them in these error pages.
Refs #14614.
"""
rf = RequestFactory(HTTP_X_REQUESTED_WITH='XMLHttpRequest')
def test_non_sensitive_request(self):
"""
Ensure that request info can bee seen in the default error reports for
non-sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(non_sensitive_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_unsafe_response(non_sensitive_view, check_for_vars=False)
def test_sensitive_request(self):
"""
Ensure that sensitive POST parameters cannot be seen in the default
error reports for sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_view, check_for_vars=False)
def test_paranoid_request(self):
"""
Ensure that no POST parameters can be seen in the default error reports
for "paranoid" requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(paranoid_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_paranoid_response(paranoid_view, check_for_vars=False)
def test_custom_exception_reporter_filter(self):
"""
Ensure that it's possible to assign an exception reporter filter to
the request to bypass the one set in DEFAULT_EXCEPTION_REPORTER_FILTER.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(custom_exception_reporter_filter_view,
check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_unsafe_response(custom_exception_reporter_filter_view,
check_for_vars=False)
|
|
# Copyright 2014 ARM Limited
#
# Licensed under the Apache License, Version 2.0
# See LICENSE file for details.
# standard library modules, , ,
import logging
import re
import functools
# requests, apache2
import requests
# PyGithub, LGPL, Python library for Github API v3, pip install PyGithub
import github
from github import Github
# settings, , load and save settings, internal
import settings
# access_common, , things shared between different component access modules, internal
import access_common
# auth, , authenticate users, internal
import auth
# globalconf, share global arguments between modules, internal
import yotta.lib.globalconf as globalconf
# Constants
_github_url = 'https://api.github.com'
logger = logging.getLogger('access')
## NOTE
## It may be tempting to re-use resources (like Github instances) between
## functions below, however it must be possible to call these functions in
## parallel, so they must not share resources that are stateful and do not
## maintain their state in a threadsafe way
# Internal functions
def _userAuthedWithGithub():
return settings.getProperty('github', 'authtoken')
def _handleAuth(fn):
''' Decorator to re-try API calls after asking the user for authentication. '''
@functools.wraps(fn)
def wrapped(*args, **kwargs):
# if yotta is being run noninteractively, then we never retry, but we
# do call auth.authorizeUser, so that a login URL can be displayed:
interactive = globalconf.get('interactive')
if not interactive:
try:
return fn(*args, **kwargs)
except requests.exceptions.HTTPError as e:
if e.response.status_code == 401:
auth.authorizeUser(provider='github', interactive=False)
raise
except github.BadCredentialsException:
logger.debug("github: bad credentials")
auth.authorizeUser(provider='github', interactive=False)
raise
except github.UnknownObjectException:
logger.debug("github: unknown object")
# some endpoints return 404 if the user doesn't have access:
if not _userAuthedWithGithub():
logger.info('failed to fetch Github object, try re-authing...')
auth.authorizeUser(provider='github', interactive=False)
raise
else:
try:
return fn(*args, **kwargs)
except requests.exceptions.HTTPError as e:
if e.response.status_code == 401:
auth.authorizeUser(provider='github')
return fn(*args, **kwargs)
raise
except github.BadCredentialsException:
logger.debug("github: bad credentials")
auth.authorizeUser(provider='github')
logger.debug('trying with authtoken: %s', settings.getProperty('github', 'authtoken'))
return fn(*args, **kwargs)
except github.UnknownObjectException:
logger.debug("github: unknown object")
# some endpoints return 404 if the user doesn't have access, maybe
# it would be better to prompt for another username and password,
# and store multiple tokens that we can try for each request....
# but for now we assume that if the user is logged in then a 404
# really is a 404
if not _userAuthedWithGithub():
logger.info('failed to fetch Github object, re-trying with authentication...')
auth.authorizeUser(provider='github')
return fn(*args, **kwargs)
raise
return wrapped
@_handleAuth
def _getTags(repo):
''' return a dictionary of {tag: tarball_url}'''
logger.debug('get tags for %s', repo)
g = Github(settings.getProperty('github', 'authtoken'))
repo = g.get_repo(repo)
tags = repo.get_tags()
logger.debug('tags for %s: %s', repo, [t.name for t in tags])
return {t.name: t.tarball_url for t in tags}
def _tarballUrlForBranch(repo, branchname=None):
r = repo.url + u'/tarball'
if branchname:
r += '/' + branchname
return r
@_handleAuth
def _getBranchHeads(repo):
g = Github(settings.getProperty('github', 'authtoken'))
repo = g.get_repo(repo)
branches = repo.get_branches()
return {b.name:_tarballUrlForBranch(repo, b.name) for b in branches}
@_handleAuth
def _getTipArchiveURL(repo):
''' return a string containing a tarball url '''
g = Github(settings.getProperty('github', 'authtoken'))
repo = g.get_repo(repo)
return repo.get_archive_link('tarball')
@_handleAuth
def _getTarball(url, into_directory, cache_key):
'''unpack the specified tarball url into the specified directory'''
try:
access_common.unpackFromCache(cache_key, into_directory)
except KeyError as e:
tok = settings.getProperty('github', 'authtoken')
headers = {}
if tok is not None:
headers['Authorization'] = 'token ' + str(tok)
logger.debug('GET %s', url)
response = requests.get(url, allow_redirects=True, stream=True, headers=headers)
response.raise_for_status()
logger.debug('getting file: %s', url)
logger.debug('headers: %s', response.headers)
response.raise_for_status()
# github doesn't exposes hashes of the archives being downloaded as far
# as I can tell :(
access_common.unpackTarballStream(
stream = response,
into_directory = into_directory,
hash = {},
cache_key = cache_key
)
def _createCacheKey(*args):
# return a hash of the arguments (converted to strings) that can be used as
# a cache key:
import hashlib
h = hashlib.sha256()
h.update('this is the _createCacheKey seed')
for arg in args:
h.update(str(arg))
return h.hexdigest()
# API
def deauthorize():
if settings.getProperty('github', 'authtoken'):
settings.setProperty('github', 'authtoken', '')
class GithubComponentVersion(access_common.RemoteVersion):
def __init__(self, semver, tag, url, name, cache_key=None):
# if cache key is None, then we won't cache this version
self.cache_key = cache_key
self.tag = tag
github_spec = re.search('/(repos|codeload.github.com)/([^/]*/[^/]*)/', url).group(2)
super(GithubComponentVersion, self).__init__(
semver, url, name=name, friendly_version=(semver or tag), friendly_source=('GitHub %s' % github_spec)
)
def unpackInto(self, directory):
assert(self.url)
_getTarball(self.url, directory, self.cache_key)
class GithubComponent(access_common.RemoteComponent):
def __init__(self, repo, tag_or_branch=None, semantic_spec=None, name=None):
logging.debug('create Github component for repo:%s version spec:%s' % (repo, semantic_spec or tag_or_branch))
self.repo = repo
self.spec = semantic_spec
self.tag_or_branch = tag_or_branch
self.tags = None
self.name = name
@classmethod
def createFromSource(cls, vs, name=None):
''' returns a github component for any github url (including
git+ssh:// git+http:// etc. or None if this is not a Github URL.
For all of these we use the github api to grab a tarball, because
that's faster.
Normally version will be empty, unless the original url was of the
form: 'owner/repo @version' or 'url://...#version', which can be used
to grab a particular tagged version.
(Note that for github components we ignore the component name - it
doesn't have to match the github module name)
'''
return GithubComponent(vs.location, vs.spec, vs.semantic_spec, name)
def versionSpec(self):
return self.spec
def tagOrBranchSpec(self):
return self.tag_or_branch
def _getTags(self):
if self.tags is None:
try:
self.tags = _getTags(self.repo).items()
except github.UnknownObjectException as e:
raise access_common.Unavailable(
'could not locate github component "%s", either the name is misspelt, you do not have access to it, or it does not exist' % self.repo
)
return self.tags
def availableVersions(self):
''' return a list of Version objects, each with a tarball URL set '''
r = []
for t in self._getTags():
logger.debug("available version tag: %s", t)
# ignore empty tags:
if not len(t[0].strip()):
continue
try:
r.append(GithubComponentVersion(t[0], t[0], url=t[1], name=self.name, cache_key=None))
except ValueError:
logger.debug('invalid version tag: %s', t)
return r
def availableTags(self):
''' return a list of GithubComponentVersion objects for all tags
'''
return [
GithubComponentVersion(
'', t[0], t[1], self.name, cache_key=_createCacheKey('tag', t[0], t[1], self.name)
) for t in self._getTags()
]
def availableBranches(self):
''' return a list of GithubComponentVersion objects for the tip of each branch
'''
return [
GithubComponentVersion(
'', b[0], b[1], self.name, cache_key=None
) for b in _getBranchHeads(self.repo).items()
]
def tipVersion(self):
return GithubComponentVersion(
'', '', _getTipArchiveURL(self.repo), self.name, cache_key=None
)
@classmethod
def remoteType(cls):
return 'github'
|
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
from django.conf import settings
import pyconde.attendees.validators
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('sponsorship', '__first__'),
('conference', '__first__'),
]
operations = [
migrations.CreateModel(
name='DietaryPreference',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(unique=True, max_length=30, verbose_name='Name')),
],
),
migrations.CreateModel(
name='Purchase',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('company_name', models.CharField(max_length=100, verbose_name='Company', blank=True)),
('first_name', models.CharField(max_length=250, verbose_name='First name')),
('last_name', models.CharField(max_length=250, verbose_name='Last name')),
('email', models.EmailField(max_length=254, verbose_name='E-mail')),
('street', models.CharField(max_length=100, verbose_name='Street and house number')),
('zip_code', models.CharField(max_length=20, verbose_name='Zip code')),
('city', models.CharField(max_length=100, verbose_name='City')),
('country', models.CharField(max_length=100, verbose_name='Country')),
('vat_id', models.CharField(max_length=16, verbose_name='VAT-ID', blank=True)),
('date_added', models.DateTimeField(default=django.utils.timezone.now, verbose_name='Date (added)')),
('state', models.CharField(default='incomplete', max_length=25, verbose_name='Status', choices=[('incomplete', 'Purchase incomplete'), ('new', 'new'), ('invoice_created', 'invoice created'), ('payment_received', 'payment received'), ('canceled', 'canceled')])),
('comments', models.TextField(verbose_name='Comments', blank=True)),
('payment_method', models.CharField(default='invoice', max_length=20, verbose_name='Payment method', choices=[('invoice', 'Invoice'), ('creditcard', 'Credit card'), ('elv', 'ELV')])),
('payment_transaction', models.CharField(max_length=255, verbose_name='Transaction ID', blank=True)),
('payment_total', models.FloatField(null=True, verbose_name='Payment total', blank=True)),
('exported', models.BooleanField(default=False, verbose_name='Exported')),
('invoice_number', models.IntegerField(null=True, verbose_name='Invoice number', blank=True)),
('invoice_filename', models.CharField(max_length=255, null=True, verbose_name='Invoice filename', blank=True)),
('conference', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, verbose_name='conference', to='conference.Conference', null=True)),
('user', models.ForeignKey(verbose_name='User', to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'verbose_name': 'Purchase',
'verbose_name_plural': 'Purchases',
},
),
migrations.CreateModel(
name='Ticket',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('date_added', models.DateTimeField(default=django.utils.timezone.now, verbose_name='Date (added)')),
('canceled', models.BooleanField(default=False, verbose_name='Canceled')),
],
options={
'ordering': ('ticket_type__tutorial_ticket', 'ticket_type__product_number'),
},
),
migrations.CreateModel(
name='TicketType',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('product_number', models.IntegerField(help_text='Will be created when you save the first time.', verbose_name='Product number', blank=True)),
('name', models.CharField(max_length=50, verbose_name='Name')),
('fee', models.FloatField(default=0, verbose_name='Fee')),
('max_purchases', models.PositiveIntegerField(default=0, help_text='0 means no limit', verbose_name='Max. purchases')),
('is_active', models.BooleanField(default=False, verbose_name='Is active')),
('is_on_desk_active', models.BooleanField(default=False, verbose_name='Allow on desk purchase')),
('date_valid_from', models.DateTimeField(verbose_name='Sale start')),
('date_valid_to', models.DateTimeField(verbose_name='Sale end')),
('valid_on', models.DateField(blank=True, null=True, verbose_name='Valid on', validators=[pyconde.attendees.validators.during_conference])),
('tutorial_ticket', models.BooleanField(default=False, verbose_name='Tutorial ticket')),
('remarks', models.TextField(verbose_name='Remarks', blank=True)),
('allow_editing', models.NullBooleanField(verbose_name='Allow editing')),
('editable_fields', models.TextField(verbose_name='Editable fields', blank=True)),
('editable_until', models.DateTimeField(null=True, verbose_name='Editable until', blank=True)),
('prevent_invoice', models.BooleanField(default=False, help_text='If checked, a purchase, that contains only tickets of ticket types where this is checked, will not be send to the user. This can be useful for e.g. sponsor tickets', verbose_name='Conditionally prevent invoice to user')),
('conference', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, verbose_name='conference', to='conference.Conference', null=True)),
('content_type', models.ForeignKey(verbose_name='Ticket to generate', to='contenttypes.ContentType')),
],
options={
'ordering': ('tutorial_ticket', 'product_number', 'vouchertype_needed'),
'verbose_name': 'Ticket type',
'verbose_name_plural': 'Ticket type',
},
),
migrations.CreateModel(
name='TShirtSize',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('size', models.CharField(max_length=100, verbose_name='Size')),
('sort', models.IntegerField(default=999, verbose_name='Sort order')),
('conference', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, verbose_name='conference', to='conference.Conference', null=True)),
],
options={
'ordering': ('sort',),
'verbose_name': 'T-Shirt size',
'verbose_name_plural': 'T-Shirt sizes',
},
),
migrations.CreateModel(
name='Voucher',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('code', models.CharField(help_text='Can be left blank, code will be created when you save.', max_length=12, verbose_name='Code', blank=True)),
('remarks', models.CharField(max_length=254, verbose_name='Remarks', blank=True)),
('date_valid', models.DateTimeField(help_text='The voucher is valid until this date', verbose_name='Date (valid)')),
('is_used', models.BooleanField(default=False, verbose_name='Is used')),
],
options={
'verbose_name': 'Voucher',
'verbose_name_plural': 'Vouchers',
},
),
migrations.CreateModel(
name='VoucherType',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=50, verbose_name='voucher type')),
('conference', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, verbose_name='conference', to='conference.Conference', null=True)),
],
options={
'verbose_name': 'voucher type',
'verbose_name_plural': 'voucher types',
},
),
migrations.CreateModel(
name='SIMCardTicket',
fields=[
('ticket_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='attendees.Ticket')),
('first_name', models.CharField(max_length=250, verbose_name='First name')),
('last_name', models.CharField(max_length=250, verbose_name='Last name')),
('date_of_birth', models.DateField(verbose_name='Date of birth')),
('gender', models.CharField(max_length=6, verbose_name='Gender', choices=[('female', 'female'), ('male', 'male')])),
('hotel_name', models.CharField(help_text='Name of your hotel or host for your stay.', max_length=100, verbose_name='Host', blank=True)),
('email', models.EmailField(max_length=254, verbose_name='E-mail')),
('street', models.CharField(max_length=100, verbose_name='Street and house number of host')),
('zip_code', models.CharField(max_length=20, verbose_name='Zip code of host')),
('city', models.CharField(max_length=100, verbose_name='City of host')),
('country', models.CharField(max_length=100, verbose_name='Country of host')),
('phone', models.CharField(help_text='Please supply the phone number of your hotel or host.', max_length=100, verbose_name='Host phone number')),
('sim_id', models.CharField(help_text='The IMSI of the SIM Card associated with this account.', max_length=20, verbose_name='IMSI', blank=True)),
],
options={
'verbose_name': 'SIM Card',
'verbose_name_plural': 'SIM Cards',
},
bases=('attendees.ticket',),
),
migrations.CreateModel(
name='SupportTicket',
fields=[
('ticket_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='attendees.Ticket')),
],
options={
'verbose_name': 'Support Ticket',
'verbose_name_plural': 'Support Tickets',
},
bases=('attendees.ticket',),
),
migrations.CreateModel(
name='VenueTicket',
fields=[
('ticket_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='attendees.Ticket')),
('first_name', models.CharField(max_length=250, verbose_name='First name', blank=True)),
('last_name', models.CharField(max_length=250, verbose_name='Last name', blank=True)),
('organisation', models.CharField(max_length=100, verbose_name='Organization', blank=True)),
('dietary_preferences', models.ManyToManyField(to='attendees.DietaryPreference', verbose_name='Dietary preferences', blank=True)),
('shirtsize', models.ForeignKey(verbose_name='Desired T-Shirt size', blank=True, to='attendees.TShirtSize', null=True)),
('sponsor', models.ForeignKey(verbose_name='Sponsor', blank=True, to='sponsorship.Sponsor', null=True)),
],
options={
'verbose_name': 'Conference Ticket',
'verbose_name_plural': 'Conference Tickets',
},
bases=('attendees.ticket',),
),
migrations.AddField(
model_name='voucher',
name='type',
field=models.ForeignKey(verbose_name='voucher type', to='attendees.VoucherType', null=True),
),
migrations.AddField(
model_name='tickettype',
name='vouchertype_needed',
field=models.ForeignKey(verbose_name='voucher type needed', blank=True, to='attendees.VoucherType', null=True),
),
migrations.AddField(
model_name='ticket',
name='purchase',
field=models.ForeignKey(to='attendees.Purchase'),
),
migrations.AddField(
model_name='ticket',
name='ticket_type',
field=models.ForeignKey(verbose_name='Ticket type', to='attendees.TicketType'),
),
migrations.AddField(
model_name='ticket',
name='user',
field=models.ForeignKey(related_name='attendees_ticket_tickets', blank=True, to=settings.AUTH_USER_MODEL, null=True),
),
migrations.AddField(
model_name='venueticket',
name='voucher',
field=models.ForeignKey(verbose_name='Voucher', blank=True, to='attendees.Voucher', null=True),
),
migrations.AlterUniqueTogether(
name='tickettype',
unique_together=set([('product_number', 'conference')]),
),
]
|
|
import numpy as _np
import scipy.sparse as _sp
from ._basis_utils import _shuffle_sites
####################################################
# set of helper functions to implement the partial #
# trace of lattice density matrices. They do not #
# have any checks and states are assumed to be #
# in the non-symmetry reduced basis. #
####################################################
def _lattice_partial_trace_pure(psi,sub_sys_A,L,sps,return_rdm="A"):
"""
This function computes the partial trace of a dense pure state psi over set of sites sub_sys_A and returns
reduced DM. Vectorisation available.
"""
psi_v=_lattice_reshape_pure(psi,sub_sys_A,L,sps)
if return_rdm == "A":
return _np.squeeze(_np.einsum("...ij,...kj->...ik",psi_v,psi_v.conj())),None
elif return_rdm == "B":
return None,_np.squeeze(_np.einsum("...ji,...jk->...ik",psi_v.conj(),psi_v))
elif return_rdm == "both":
return _np.squeeze(_np.einsum("...ij,...kj->...ik",psi_v,psi_v.conj())),_np.squeeze(_np.einsum("...ji,...jk->...ik",psi_v.conj(),psi_v))
def _lattice_partial_trace_mixed(rho,sub_sys_A,L,sps,return_rdm="A"):
"""
This function computes the partial trace of a set of dense mixed states rho over set of sites sub_sys_A
and returns reduced DM. Vectorisation available.
"""
rho_v=_lattice_reshape_mixed(rho,sub_sys_A,L,sps)
if return_rdm == "A":
return _np.einsum("...jlkl->...jk",rho_v),None
elif return_rdm == "B":
return None,_np.einsum("...ljlk->...jk",rho_v.conj())
elif return_rdm == "both":
return _np.einsum("...jlkl->...jk",rho_v),_np.einsum("...ljlk->...jk",rho_v.conj())
def _lattice_partial_trace_sparse_pure(psi,sub_sys_A,L,sps,return_rdm="A"):
"""
This function computes the partial trace of a sparse pure state psi over set of sites sub_sys_A and returns
reduced DM.
"""
psi=_lattice_reshape_sparse_pure(psi,sub_sys_A,L,sps)
if return_rdm == "A":
return psi.dot(psi.H),None
elif return_rdm == "B":
return None,psi.H.dot(psi)
elif return_rdm == "both":
return psi.dot(psi.H),psi.H.dot(psi)
def _lattice_reshape_pure(psi,sub_sys_A,L,sps):
"""
This function reshapes the dense pure state psi over the Hilbert space defined by sub_sys_A and its complement.
Vectorisation available.
"""
extra_dims = psi.shape[:-1]
n_dims = len(extra_dims)
sub_sys_B = set(range(L))-set(sub_sys_A)
sub_sys_A = tuple(sub_sys_A)
sub_sys_B = tuple(sub_sys_B)
L_A = len(sub_sys_A)
L_B = len(sub_sys_B)
Ns_A = (sps**L_A)
Ns_B = (sps**L_B)
T_tup = sub_sys_A+sub_sys_B
psi_v = _shuffle_sites(sps,T_tup,psi)
psi_v = psi_v.reshape(extra_dims+(Ns_A,Ns_B))
return psi_v
'''
def _lattice_reshape_pure(psi,sub_sys_A,L,sps):
"""
This function reshapes the dense pure state psi over the Hilbert space defined by sub_sys_A and its complement.
Vectorisation available.
"""
extra_dims = psi.shape[:-1]
n_dims = len(extra_dims)
sub_sys_B = set(range(L))-set(sub_sys_A)
sub_sys_A = tuple(sub_sys_A)
sub_sys_B = tuple(sub_sys_B)
L_A = len(sub_sys_A)
L_B = len(sub_sys_B)
Ns_A = (sps**L_A)
Ns_B = (sps**L_B)
T_tup = sub_sys_A+sub_sys_B
T_tup = tuple(range(n_dims)) + tuple(n_dims + s for s in T_tup)
R_tup = extra_dims + tuple(sps for i in range(L))
psi_v = psi.reshape(R_tup) # DM where index is given per site as rho_v[i_1,...,i_L,j_1,...j_L]
psi_v = psi_v.transpose(T_tup) # take transpose to reshuffle indices
psi_v = psi_v.reshape(extra_dims+(Ns_A,Ns_B))
return psi_v
'''
def _lattice_reshape_mixed(rho,sub_sys_A,L,sps):
"""
This function reshapes the dense mixed state psi over the Hilbert space defined by sub_sys_A and its complement.
Vectorisation available.
"""
extra_dims = rho.shape[:-2]
n_dims = len(extra_dims)
sub_sys_B = set(range(L))-set(sub_sys_A)
sub_sys_A = tuple(sub_sys_A)
sub_sys_B = tuple(sub_sys_B)
L_A = len(sub_sys_A)
L_B = len(sub_sys_B)
Ns_A = (sps**L_A)
Ns_B = (sps**L_B)
# T_tup tells numpy how to reshuffle the indices such that when I reshape the array to the
# 4-_tensor rho_{ik,jl} i,j are for sub_sys_A and k,l are for sub_sys_B
# which means I need (sub_sys_A,sub_sys_B,sub_sys_A+L,sub_sys_B+L)
T_tup = sub_sys_A+sub_sys_B
T_tup = tuple(T_tup) + tuple(L+s for s in T_tup)
rho = rho.reshape(extra_dims+(-1,))
rho_v = _shuffle_sites(sps,T_tup,rho)
return rho_v.reshape(extra_dims+(Ns_A,Ns_B,Ns_A,Ns_B))
'''
def _lattice_reshape_mixed(rho,sub_sys_A,L,sps):
"""
This function reshapes the dense mixed state psi over the Hilbert space defined by sub_sys_A and its complement.
Vectorisation available.
"""
extra_dims = rho.shape[:-2]
n_dims = len(extra_dims)
sub_sys_B = set(range(L))-set(sub_sys_A)
sub_sys_A = tuple(sub_sys_A)
sub_sys_B = tuple(sub_sys_B)
L_A = len(sub_sys_A)
L_B = len(sub_sys_B)
Ns_A = (sps**L_A)
Ns_B = (sps**L_B)
# T_tup tells numpy how to reshuffle the indices such that when I reshape the array to the
# 4-_tensor rho_{ik,jl} i,j are for sub_sys_A and k,l are for sub_sys_B
# which means I need (sub_sys_A,sub_sys_B,sub_sys_A+L,sub_sys_B+L)
T_tup = sub_sys_A+sub_sys_B
T_tup = tuple(range(n_dims)) + tuple(s+n_dims for s in T_tup) + tuple(L+n_dims+s for s in T_tup)
R_tup = extra_dims + tuple(sps for i in range(2*L))
rho_v = rho.reshape(R_tup) # DM where index is given per site as rho_v[i_1,...,i_L,j_1,...j_L]
rho_v = rho_v.transpose(T_tup) # take transpose to reshuffle indices
return rho_v.reshape(extra_dims+(Ns_A,Ns_B,Ns_A,Ns_B))
'''
def _lattice_reshape_sparse_pure(psi,sub_sys_A,L,sps):
"""
This function reshapes the sparse pure state psi over the Hilbert space defined by sub_sys_A and its complement.
"""
sub_sys_B = set(range(L))-set(sub_sys_A)
sub_sys_A = tuple(sub_sys_A)
sub_sys_B = tuple(sub_sys_B)
L_A = len(sub_sys_A)
L_B = len(sub_sys_B)
Ns_A = (sps**L_A)
Ns_B = (sps**L_B)
psi = psi.tocoo()
T_tup = sub_sys_A+sub_sys_B
# reshuffle indices for the sub-systems.
# j = sum( j[i]*(sps**i) for i in range(L))
# this reshuffles the j[i] similar to the transpose operation
# on the dense arrays psi_v.transpose(T_tup)
if T_tup != tuple(range(L)):
indx = _np.zeros(psi.col.shape,dtype=psi.col.dtype)
for i_old,i_new in enumerate(T_tup):
indx += ((psi.col//(sps**(L-i_new-1))) % sps)*(sps**(L-i_old-1))
else:
indx = psi.col
# A = _np.array([0,1,2,3,4,5,6,7,8,9,10,11])
# print("make shift way of reshaping array")
# print("A = {}".format(A))
# print("A.reshape((3,4)): \n {}".format(A.reshape((3,4))))
# print("rows: A.reshape((3,4))/4: \n {}".format(A.reshape((3,4))/4))
# print("cols: A.reshape((3,4))%4: \n {}".format(A.reshape((3,4))%4))
psi._shape = (Ns_A,Ns_B)
psi.row[:] = indx / Ns_B
psi.col[:] = indx % Ns_B
return psi.tocsr()
def _tensor_reshape_pure(psi,sub_sys_A,Ns_l,Ns_r):
extra_dims = psi.shape[:-1]
if sub_sys_A == "left":
return psi.reshape(extra_dims+(Ns_l,Ns_r))
else:
n_dims = len(extra_dims)
T_tup = tuple(range(n_dims))+(n_dims+1,n_dims)
psi_v = psi.reshape(extra_dims+(Ns_l,Ns_r))
return psi_v.transpose(T_tup)
def _tensor_reshape_sparse_pure(psi,sub_sys_A,Ns_l,Ns_r):
psi = psi.tocoo()
# make shift way of reshaping array
# j = j_l + Ns_r * j_l
# j_l = j / Ns_r
# j_r = j % Ns_r
if sub_sys_A == "left":
psi._shape = (Ns_l,Ns_r)
psi.row[:] = psi.col / Ns_r
psi.col[:] = psi.col % Ns_r
return psi.tocsr()
else:
psi._shape = (Ns_l,Ns_r)
psi.row[:] = psi.col / Ns_r
psi.col[:] = psi.col % Ns_r
return psi.T.tocsr()
def _tensor_reshape_mixed(rho,sub_sys_A,Ns_l,Ns_r):
extra_dims = rho.shape[:-2]
if sub_sys_A == "left":
return rho.reshape(extra_dims+(Ns_l,Ns_r,Ns_l,Ns_r))
else:
n_dims = len(extra_dims)
T_tup = tuple(range(n_dims))+(n_dims+1,n_dims)+(n_dims+3,n_dims+2)
rho_v = rho.reshape(extra_dims+(Ns_l,Ns_r,Ns_l,Ns_r))
return rho_v.transpose(T_tup)
def _tensor_partial_trace_pure(psi,sub_sys_A,Ns_l,Ns_r,return_rdm="A"):
psi_v = _tensor_reshape_pure(psi,sub_sys_A,Ns_l,Ns_r)
if return_rdm == "A":
return _np.squeeze(_np.einsum("...ij,...kj->...ik",psi_v,psi_v.conj())),None
elif return_rdm == "B":
return None,_np.squeeze(_np.einsum("...ji,...jk->...ik",psi_v.conj(),psi_v))
elif return_rdm == "both":
return _np.squeeze(_np.einsum("...ij,...kj->...ik",psi_v,psi_v.conj())),_np.squeeze(_np.einsum("...ji,...jk->...ik",psi_v.conj(),psi_v))
def _tensor_partial_trace_sparse_pure(psi,sub_sys_A,Ns_l,Ns_r,return_rdm="A"):
psi = _tensor_reshape_sparse_pure(psi,sub_sys_A,Ns_l,Ns_r)
if return_rdm == "A":
return psi.dot(psi.H),None
elif return_rdm == "B":
return None,psi.H.dot(psi)
elif return_rdm == "both":
return psi.dot(psi.H),psi.H.dot(psi)
def _tensor_partial_trace_mixed(rho,sub_sys_A,Ns_l,Ns_r,return_rdm="A"):
rho_v = _tensor_reshape_mixed(rho,sub_sys_A,Ns_l,Ns_r)
if return_rdm == "A":
return _np.squeeze(_np.einsum("...ijkj->...ik",rho_v)),None
elif return_rdm == "B":
return None,_np.squeeze(_np.einsum("...jijk->...ik",rho_v.conj()))
elif return_rdm == "both":
return _np.squeeze(_np.einsum("...ijkj->...ik",rho_v)),_np.squeeze(_np.einsum("...jijk->...ik",rho_v.conj()))
|
|
#!/usr/bin/python
#
# Copyright (C) 2007, 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'davidbyttow@google.com (David Byttow)'
import httplib
import urllib
import urlparse
import http
import oauth
import simplejson
from data import *
from errors import *
from request import *
class ContainerConfig(object):
"""Setup parameters for connecting to a container."""
def __init__(self, oauth_consumer_key=None, oauth_consumer_secret=None,
server_rpc_base=None, server_rest_base=None,
security_token=None,
security_token_param=None,
sign_with_body=False):
"""Constructor for ContainerConfig.
If no oauth parameters are present, then oauth will not be used to sign
requests, and as such, the client connection will most likely not work.
At least one of server_rpc_base or server_rest_base should be specified,
otherwise, all requests will fail. If both are supplied, the container
will attempt to default to rpc and fall back on REST.
"""
self.oauth_consumer_key = oauth_consumer_key
self.oauth_consumer_secret = oauth_consumer_secret
self.server_rpc_base = server_rpc_base
self.server_rest_base = server_rest_base
self.security_token = security_token
self.security_token_param = security_token_param
self.sign_with_body = sign_with_body
if not server_rpc_base and not server_rest_base:
raise ConfigError("Neither 'server_rpc_base' nor 'server_rest_base' set")
class ContainerContext(object):
"""The context for a container connection.
This class manages the connection to a specific container and provides
methods for fetching common data via either te REST or RPC protocol, depending
on the configuration.
"""
def __init__(self, config, url_fetch=None):
"""Constructor for ContainerContext.
If a UrlFetch implementation is not given, will attempt to construct
the default implementation based on the environment.
Args:
config: The ContainerConfig to use for this connection.
url_fetch: (optional) An implementation of the UrlFetch interface.
"""
self.config = config
if not self.config:
raise ConfigError('Invalid ContainerConfig.')
self.url_fetch = url_fetch or http.get_default_urlfetch()
self.oauth_signature_method = oauth.OAuthSignatureMethod_HMAC_SHA1()
self.oauth_consumer = None
self.allow_rpc = True
if self.config.oauth_consumer_key and self.config.oauth_consumer_secret:
self.oauth_consumer = oauth.OAuthConsumer(
self.config.oauth_consumer_key,
self.config.oauth_consumer_secret)
def set_allow_rpc(self, allowed):
"""Sets if RPC requests are allowed if they are supported."""
self.allow_rpc = allowed
def supports_rpc(self):
"""Tells whether or not the container was setup for RPC protocol.
Returns: bool Is this container using the RPC protocol?
TODO: Figure out what is going wrong with POST body signing, fix and
re-enable this.
"""
return self.allow_rpc and self.config.server_rpc_base is not None
def fetch_person(self, user_id='@me', fields=None):
"""Fetches a person by user id.
Args:
user_id: str The person's container-specific id.
fields: list (optional) List of fields to retrieve.
Returns: A Person object representing the specified user id.
"""
request = FetchPersonRequest(user_id, fields=fields)
return self.send_request(request)
def fetch_friends(self, user_id='@me', fields=None):
"""Fetches the friends of a given user by id.
Args:
user_id: str The person's container-specific id for which to retrieve
friends.
fields: list (optional) List of fields to retrieve.
Returns: A Collection of Person objects.
"""
request = FetchPeopleRequest(user_id, '@friends', fields=fields)
return self.send_request(request)
def send_request(self, request, use_rest=False):
"""Sends the request.
May throw a BadRequestError, BadResponseError or
UnauthorizedRequestError exceptions.
Args:
request: A Request object.
use_rest: bool (optional) If True, will just use the REST protocol.
Returns: The OpenSocial object returned from the container.
"""
if not use_rest and self.supports_rpc():
batch = RequestBatch()
batch.add_request(0, request)
batch.send(self)
response = batch.get(0)
if isinstance(response, Error):
raise response
return response
else:
return self._send_rest_request(request)
def send_request_batch(self, batch, use_rest=False):
"""Send a batch of requests.
Batches are only useful when RPC is supported. Otherwise, all requests
are sent synchronously. May throw a BadRequest, BadResponse or
UnauthorizedRequest exceptions.
Args:
batch: The RequestBatch object.
use_rest: bool (optional) If True, will just use the REST protocol.
"""
if not use_rest and self.supports_rpc():
self._send_rpc_requests(batch)
else:
"""REST protocol does not support batching, so just process each
request individually.
"""
for key, request in batch.requests.iteritems():
try:
result = self._send_rest_request(request)
except Error, e:
result = e
batch._set_data(key, result)
def _send_rest_request(self, request):
http_request = request.make_rest_request(self.config.server_rest_base)
http_response = self._send_http_request(http_request)
json = self._handle_response(http_response)
return request.process_json(json)
def _send_rpc_requests(self, batch):
rpcs = []
id_to_key_map = {}
query_params = {}
"""Build up a list of RPC calls. Also, create a mapping of RPC request id's
to batch keys in order to populate the batch object with the responses.
"""
for key, request in batch.requests.iteritems():
query_params.update(request.get_query_params())
rpc_body = request.get_rpc_body()
rpc_id = rpc_body.get('id')
id_to_key_map[rpc_id] = key
rpcs.append(rpc_body)
http_request = http.Request(self.config.server_rpc_base,
method='POST',
signed_params=query_params,
post_body=rpcs)
http_response = self._send_http_request(http_request)
json = self._handle_response(http_response)
"""Pull out all of the results and insert them into the batch object."""
for response in json:
id = response.get('id')
key = id_to_key_map[id]
if 'error' in response:
code = response.get('error').get('code')
message = response.get('error').get('message')
error = BadResponseError(code, message)
batch._set_data(key, error)
else:
json = response.get('data')
request = batch.requests[key]
batch._set_data(key, request.process_json(json))
def _send_http_request(self, http_request):
if self.config.security_token:
http_request.add_security_token(self.config.security_token,
self.config.security_token_param)
if self.oauth_consumer and self.oauth_signature_method:
http_request.set_body_as_signing_parameter(self.config.sign_with_body)
http_request.sign_request(self.oauth_consumer,
self.oauth_signature_method)
http_response = self.url_fetch.fetch(http_request)
return http_response
def _handle_response(self, http_response):
""" If status code "OK", then we can safely inspect the returned JSON."""
if http_response.status == httplib.OK:
if http.VERBOSE > 0:
logging.info("http_response.content => %s" % http_response.content)
json = simplejson.loads(http_response.content)
# Check for any JSON-RPC 2.0 errors.
if 'error' in json:
code = json.get('error').get('code')
message = json.get('error').get('message')
if code == httplib.UNAUTHORIZED:
raise UnauthorizedRequestError(http_response)
else:
raise BadResponseError(code, message)
return json
else:
raise BadRequestError(http_response)
|
|
# -*- test-case-name: twisted.web.test.test_error -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Exception definitions for L{twisted.web}.
"""
from __future__ import division, absolute_import
try:
from future_builtins import ascii
except ImportError:
pass
__all__ = [
'Error', 'PageRedirect', 'InfiniteRedirection', 'RenderError',
'MissingRenderMethod', 'MissingTemplateLoader', 'UnexposedMethodError',
'UnfilledSlot', 'UnsupportedType', 'FlattenerError',
'RedirectWithNoLocation',
]
from collections import Sequence
from twisted.web._responses import RESPONSES
from twisted.python.compat import unicode, nativeString, intToBytes
def _codeToMessage(code):
"""
Returns the response message corresponding to an HTTP code, or None
if the code is unknown or unrecognized.
@type code: L{bytes}
@param code: Refers to an HTTP status code, for example C{http.NOT_FOUND}.
@return: A string message or none
@rtype: L{bytes}
"""
try:
return RESPONSES.get(int(code))
except (ValueError, AttributeError):
return None
class Error(Exception):
"""
A basic HTTP error.
@type status: L{bytes}
@ivar status: Refers to an HTTP status code, for example C{http.NOT_FOUND}.
@type message: L{bytes}
@param message: A short error message, for example "NOT FOUND".
@type response: L{bytes}
@ivar response: A complete HTML document for an error page.
"""
def __init__(self, code, message=None, response=None):
"""
Initializes a basic exception.
@type code: L{bytes} or L{int}
@param code: Refers to an HTTP status code (for example, 200) either as
an integer or a bytestring representing such. If no C{message} is
given, C{code} is mapped to a descriptive bytestring that is used
instead.
@type message: L{bytes}
@param message: A short error message, for example "NOT FOUND".
@type response: L{bytes}
@param response: A complete HTML document for an error page.
"""
message = message or _codeToMessage(code)
Exception.__init__(self, code, message, response)
if isinstance(code, int):
# If we're given an int, convert it to a bytestring
# downloadPage gives a bytes, Agent gives an int, and it worked by
# accident previously, so just make it keep working.
code = intToBytes(code)
self.status = code
self.message = message
self.response = response
def __str__(self):
return nativeString(self.status + b" " + self.message)
class PageRedirect(Error):
"""
A request resulted in an HTTP redirect.
@type location: L{bytes}
@ivar location: The location of the redirect which was not followed.
"""
def __init__(self, code, message=None, response=None, location=None):
"""
Initializes a page redirect exception.
@type code: L{bytes}
@param code: Refers to an HTTP status code, for example
C{http.NOT_FOUND}. If no C{message} is given, C{code} is mapped to a
descriptive string that is used instead.
@type message: L{bytes}
@param message: A short error message, for example "NOT FOUND".
@type response: L{bytes}
@param response: A complete HTML document for an error page.
@type location: L{bytes}
@param location: The location response-header field value. It is an
absolute URI used to redirect the receiver to a location other than
the Request-URI so the request can be completed.
"""
Error.__init__(self, code, message, response)
if self.message and location:
self.message = self.message + b" to " + location
self.location = location
class InfiniteRedirection(Error):
"""
HTTP redirection is occurring endlessly.
@type location: L{bytes}
@ivar location: The first URL in the series of redirections which was
not followed.
"""
def __init__(self, code, message=None, response=None, location=None):
"""
Initializes an infinite redirection exception.
@type code: L{bytes}
@param code: Refers to an HTTP status code, for example
C{http.NOT_FOUND}. If no C{message} is given, C{code} is mapped to a
descriptive string that is used instead.
@type message: L{bytes}
@param message: A short error message, for example "NOT FOUND".
@type response: L{bytes}
@param response: A complete HTML document for an error page.
@type location: L{bytes}
@param location: The location response-header field value. It is an
absolute URI used to redirect the receiver to a location other than
the Request-URI so the request can be completed.
"""
Error.__init__(self, code, message, response)
if self.message and location:
self.message = self.message + b" to " + location
self.location = location
class RedirectWithNoLocation(Error):
"""
Exception passed to L{ResponseFailed} if we got a redirect without a
C{Location} header field.
@type uri: L{bytes}
@ivar uri: The URI which failed to give a proper location header
field.
@since: 11.1
"""
def __init__(self, code, message, uri):
"""
Initializes a page redirect exception when no location is given.
@type code: L{bytes}
@param code: Refers to an HTTP status code, for example
C{http.NOT_FOUND}. If no C{message} is given, C{code} is mapped to
a descriptive string that is used instead.
@type message: L{bytes}
@param message: A short error message.
@type uri: L{bytes}
@param uri: The URI which failed to give a proper location header
field.
"""
Error.__init__(self, code, message)
self.message = self.message + b" to " + uri
self.uri = uri
class UnsupportedMethod(Exception):
"""
Raised by a resource when faced with a strange request method.
RFC 2616 (HTTP 1.1) gives us two choices when faced with this situation:
If the type of request is known to us, but not allowed for the requested
resource, respond with NOT_ALLOWED. Otherwise, if the request is something
we don't know how to deal with in any case, respond with NOT_IMPLEMENTED.
When this exception is raised by a Resource's render method, the server
will make the appropriate response.
This exception's first argument MUST be a sequence of the methods the
resource *does* support.
"""
allowedMethods = ()
def __init__(self, allowedMethods, *args):
Exception.__init__(self, allowedMethods, *args)
self.allowedMethods = allowedMethods
if not isinstance(allowedMethods, Sequence):
raise TypeError(
"First argument must be a sequence of supported methods, "
"but my first argument is not a sequence.")
def __str__(self):
return "Expected one of %r" % (self.allowedMethods,)
class SchemeNotSupported(Exception):
"""
The scheme of a URI was not one of the supported values.
"""
class RenderError(Exception):
"""
Base exception class for all errors which can occur during template
rendering.
"""
class MissingRenderMethod(RenderError):
"""
Tried to use a render method which does not exist.
@ivar element: The element which did not have the render method.
@ivar renderName: The name of the renderer which could not be found.
"""
def __init__(self, element, renderName):
RenderError.__init__(self, element, renderName)
self.element = element
self.renderName = renderName
def __repr__(self):
return '%r: %r had no render method named %r' % (
self.__class__.__name__, self.element, self.renderName)
class MissingTemplateLoader(RenderError):
"""
L{MissingTemplateLoader} is raised when trying to render an Element without
a template loader, i.e. a C{loader} attribute.
@ivar element: The Element which did not have a document factory.
"""
def __init__(self, element):
RenderError.__init__(self, element)
self.element = element
def __repr__(self):
return '%r: %r had no loader' % (self.__class__.__name__,
self.element)
class UnexposedMethodError(Exception):
"""
Raised on any attempt to get a method which has not been exposed.
"""
class UnfilledSlot(Exception):
"""
During flattening, a slot with no associated data was encountered.
"""
class UnsupportedType(Exception):
"""
During flattening, an object of a type which cannot be flattened was
encountered.
"""
class FlattenerError(Exception):
"""
An error occurred while flattening an object.
@ivar _roots: A list of the objects on the flattener's stack at the time
the unflattenable object was encountered. The first element is least
deeply nested object and the last element is the most deeply nested.
"""
def __init__(self, exception, roots, traceback):
self._exception = exception
self._roots = roots
self._traceback = traceback
Exception.__init__(self, exception, roots, traceback)
def _formatRoot(self, obj):
"""
Convert an object from C{self._roots} to a string suitable for
inclusion in a render-traceback (like a normal Python traceback, but
can include "frame" source locations which are not in Python source
files).
@param obj: Any object which can be a render step I{root}.
Typically, L{Tag}s, strings, and other simple Python types.
@return: A string representation of C{obj}.
@rtype: L{str}
"""
# There's a circular dependency between this class and 'Tag', although
# only for an isinstance() check.
from twisted.web.template import Tag
if isinstance(obj, (bytes, str, unicode)):
# It's somewhat unlikely that there will ever be a str in the roots
# list. However, something like a MemoryError during a str.replace
# call (eg, replacing " with ") could possibly cause this.
# Likewise, UTF-8 encoding a unicode string to a byte string might
# fail like this.
if len(obj) > 40:
if isinstance(obj, unicode):
ellipsis = u'<...>'
else:
ellipsis = b'<...>'
return ascii(obj[:20] + ellipsis + obj[-20:])
else:
return ascii(obj)
elif isinstance(obj, Tag):
if obj.filename is None:
return 'Tag <' + obj.tagName + '>'
else:
return "File \"%s\", line %d, column %d, in \"%s\"" % (
obj.filename, obj.lineNumber,
obj.columnNumber, obj.tagName)
else:
return ascii(obj)
def __repr__(self):
"""
Present a string representation which includes a template traceback, so
we can tell where this error occurred in the template, as well as in
Python.
"""
# Avoid importing things unnecessarily until we actually need them;
# since this is an 'error' module we should be extra paranoid about
# that.
from traceback import format_list
if self._roots:
roots = ' ' + '\n '.join([
self._formatRoot(r) for r in self._roots]) + '\n'
else:
roots = ''
if self._traceback:
traceback = '\n'.join([
line
for entry in format_list(self._traceback)
for line in entry.splitlines()]) + '\n'
else:
traceback = ''
return (
'Exception while flattening:\n' +
roots + traceback +
self._exception.__class__.__name__ + ': ' +
str(self._exception) + '\n')
def __str__(self):
return repr(self)
class UnsupportedSpecialHeader(Exception):
"""
A HTTP/2 request was received that contained a HTTP/2 pseudo-header field
that is not recognised by Twisted.
"""
|
|
"""The tests the for Locative device tracker platform."""
from http import HTTPStatus
from unittest.mock import patch
import pytest
from homeassistant import config_entries, data_entry_flow
from homeassistant.components import locative
from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN
from homeassistant.components.locative import DOMAIN, TRACKER_UPDATE
from homeassistant.config import async_process_ha_core_config
from homeassistant.helpers.dispatcher import DATA_DISPATCHER
from homeassistant.setup import async_setup_component
# pylint: disable=redefined-outer-name
@pytest.fixture(autouse=True)
def mock_dev_track(mock_device_tracker_conf):
"""Mock device tracker config loading."""
pass
@pytest.fixture
async def locative_client(loop, hass, hass_client):
"""Locative mock client."""
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
await hass.async_block_till_done()
with patch("homeassistant.components.device_tracker.legacy.update_config"):
return await hass_client()
@pytest.fixture
async def webhook_id(hass, locative_client):
"""Initialize the Geofency component and get the webhook_id."""
await async_process_ha_core_config(
hass,
{"internal_url": "http://example.local:8123"},
)
result = await hass.config_entries.flow.async_init(
"locative", context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM, result
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
await hass.async_block_till_done()
return result["result"].data["webhook_id"]
async def test_missing_data(locative_client, webhook_id):
"""Test missing data."""
url = f"/api/webhook/{webhook_id}"
data = {
"latitude": 1.0,
"longitude": 1.1,
"device": "123",
"id": "Home",
"trigger": "enter",
}
# No data
req = await locative_client.post(url)
assert req.status == HTTPStatus.UNPROCESSABLE_ENTITY
# No latitude
copy = data.copy()
del copy["latitude"]
req = await locative_client.post(url, data=copy)
assert req.status == HTTPStatus.UNPROCESSABLE_ENTITY
# No device
copy = data.copy()
del copy["device"]
req = await locative_client.post(url, data=copy)
assert req.status == HTTPStatus.UNPROCESSABLE_ENTITY
# No location
copy = data.copy()
del copy["id"]
req = await locative_client.post(url, data=copy)
assert req.status == HTTPStatus.UNPROCESSABLE_ENTITY
# No trigger
copy = data.copy()
del copy["trigger"]
req = await locative_client.post(url, data=copy)
assert req.status == HTTPStatus.UNPROCESSABLE_ENTITY
# Test message
copy = data.copy()
copy["trigger"] = "test"
req = await locative_client.post(url, data=copy)
assert req.status == HTTPStatus.OK
# Test message, no location
copy = data.copy()
copy["trigger"] = "test"
del copy["id"]
req = await locative_client.post(url, data=copy)
assert req.status == HTTPStatus.OK
# Unknown trigger
copy = data.copy()
copy["trigger"] = "foobar"
req = await locative_client.post(url, data=copy)
assert req.status == HTTPStatus.UNPROCESSABLE_ENTITY
async def test_enter_and_exit(hass, locative_client, webhook_id):
"""Test when there is a known zone."""
url = f"/api/webhook/{webhook_id}"
data = {
"latitude": 40.7855,
"longitude": -111.7367,
"device": "123",
"id": "Home",
"trigger": "enter",
}
# Enter the Home
req = await locative_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTPStatus.OK
state_name = hass.states.get(
"{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"])
).state
assert state_name == "home"
data["id"] = "HOME"
data["trigger"] = "exit"
# Exit Home
req = await locative_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTPStatus.OK
state_name = hass.states.get(
"{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"])
).state
assert state_name == "not_home"
data["id"] = "hOmE"
data["trigger"] = "enter"
# Enter Home again
req = await locative_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTPStatus.OK
state_name = hass.states.get(
"{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"])
).state
assert state_name == "home"
data["trigger"] = "exit"
# Exit Home
req = await locative_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTPStatus.OK
state_name = hass.states.get(
"{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"])
).state
assert state_name == "not_home"
data["id"] = "work"
data["trigger"] = "enter"
# Enter Work
req = await locative_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTPStatus.OK
state_name = hass.states.get(
"{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"])
).state
assert state_name == "work"
async def test_exit_after_enter(hass, locative_client, webhook_id):
"""Test when an exit message comes after an enter message."""
url = f"/api/webhook/{webhook_id}"
data = {
"latitude": 40.7855,
"longitude": -111.7367,
"device": "123",
"id": "Home",
"trigger": "enter",
}
# Enter Home
req = await locative_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTPStatus.OK
state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"]))
assert state.state == "home"
data["id"] = "Work"
# Enter Work
req = await locative_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTPStatus.OK
state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"]))
assert state.state == "work"
data["id"] = "Home"
data["trigger"] = "exit"
# Exit Home
req = await locative_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTPStatus.OK
state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"]))
assert state.state == "work"
async def test_exit_first(hass, locative_client, webhook_id):
"""Test when an exit message is sent first on a new device."""
url = f"/api/webhook/{webhook_id}"
data = {
"latitude": 40.7855,
"longitude": -111.7367,
"device": "new_device",
"id": "Home",
"trigger": "exit",
}
# Exit Home
req = await locative_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTPStatus.OK
state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"]))
assert state.state == "not_home"
async def test_two_devices(hass, locative_client, webhook_id):
"""Test updating two different devices."""
url = f"/api/webhook/{webhook_id}"
data_device_1 = {
"latitude": 40.7855,
"longitude": -111.7367,
"device": "device_1",
"id": "Home",
"trigger": "exit",
}
# Exit Home
req = await locative_client.post(url, data=data_device_1)
await hass.async_block_till_done()
assert req.status == HTTPStatus.OK
state = hass.states.get(
"{}.{}".format(DEVICE_TRACKER_DOMAIN, data_device_1["device"])
)
assert state.state == "not_home"
# Enter Home
data_device_2 = dict(data_device_1)
data_device_2["device"] = "device_2"
data_device_2["trigger"] = "enter"
req = await locative_client.post(url, data=data_device_2)
await hass.async_block_till_done()
assert req.status == HTTPStatus.OK
state = hass.states.get(
"{}.{}".format(DEVICE_TRACKER_DOMAIN, data_device_2["device"])
)
assert state.state == "home"
state = hass.states.get(
"{}.{}".format(DEVICE_TRACKER_DOMAIN, data_device_1["device"])
)
assert state.state == "not_home"
@pytest.mark.xfail(
reason="The device_tracker component does not support unloading yet."
)
async def test_load_unload_entry(hass, locative_client, webhook_id):
"""Test that the appropriate dispatch signals are added and removed."""
url = f"/api/webhook/{webhook_id}"
data = {
"latitude": 40.7855,
"longitude": -111.7367,
"device": "new_device",
"id": "Home",
"trigger": "exit",
}
# Exit Home
req = await locative_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTPStatus.OK
state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"]))
assert state.state == "not_home"
assert len(hass.data[DATA_DISPATCHER][TRACKER_UPDATE]) == 1
entry = hass.config_entries.async_entries(DOMAIN)[0]
await locative.async_unload_entry(hass, entry)
await hass.async_block_till_done()
assert not hass.data[DATA_DISPATCHER][TRACKER_UPDATE]
|
|
#!/usr/bin/env python
import rospy;
import math;
import threading;
import numpy as np;
from nav_msgs.srv import GetPlan ,GetPlanRequest;
from nav_msgs.msg import Path , OccupancyGrid, Odometry;
from communication_node.msg import Data_Goal,Data_Map;
from geometry_msgs.msg import Point , PoseStamped
from std_msgs.msg import Bool;
from Algorithms import *;
import roslib;
import actionlib;
from actionlib_msgs.msg import *;
from move_base_msgs.msg import *;
from frontier_search import *
a_star=None;
name_space="robot1";
robot_number=0;
number_of_robots=0;
######################
merged_map_lock=threading.Lock()
merged_map=None;
#####################
map_publisher=None;
goal_publisher=None;
#############
odom_subscriber=None;
robot_x=0;
robot_y=0;
#################
beta=1;
alpha=25;
utility=100;
laser_range=30;
#################
my_server=None;
goals_list=[];
goals_list_lock=threading.Lock();
other_robots_list=[];
#############################
checking_goals_flag=False;
checking_goals_subscriber=None;
checking_goals_publisher=None;
############################
map_pub_frequnecy=2;
map_pub_counter=0;
###############################
move_client_=None
move_client_goal_=None;
goal_pose=PoseStamped();
current_goal_status = 0 ; # goal status--- PENDING=0--- ACTIVE=1---PREEMPTED=2--SUCCEEDED=3--ABORTED=4---REJECTED=5--PREEMPTING=6---RECALLING=7---RECALLED=8---LOST=9
move_base_status_subscriber=None;
#########################
class MyWrapper:
def __init__(self,list_index,robot_name_space):
self.list_index=list_index;
self.robot_name_space=robot_name_space;
self.map_subscriber=rospy.Subscriber("/"+name_space+"/inbox_Map", Data_Map, self.set_Map);
self.goal_subscriber = rospy.Subscriber("/"+name_space+"/inbox_Goal", Data_Goal, self.set_Goal);
def set_Map(self,map_data):
global merged_map_lock;
global merged_map;
merged_map_lock.acquire();
if(merged_map==None):
merged_map_lock.release();
return;
temp_map=np.array([map_data.data.data,merged_map.data]);
merged_map.data=list(np.max(temp_map,axis=0));
merged_map_lock.release();
def set_Goal(self, goal_data):
global goals_list,goals_list_lock;
goals_list_lock.acquire()
if (goal_data.source==self.robot_name_space):
print(name_space,"new goal from",self.robot_name_space);
goals_list[self.list_index]=goal_data.data;
goals_list_lock.release();
################################################
################################################
def frontier_is_new(new_frontier,frontiers_list):
for i in frontiers_list:
if(((new_frontier[0]-i[0]) ** 2 + (new_frontier[1]-i[1]) ** 2)<5):
return False;
return True;
def get_frontiers(map_data):
global robot_x,robot_y;
frontiers=[];
fsc=FrontierSearch(map_data,5,"centroid");
frontiers=fsc.searchFrom(Point(robot_x,robot_y,0.0));
return list(frontiers);
'''
map_width=int( map_data.info.width); #max of x
map_height=int(map_data.info.height);#max of y
map_size=map_height*map_width;
temp_list=[-1,0,1];
for y in range(1,map_height-1):
for x in range(1,map_width-1):
counter=0;
if map_data.data[(y*map_width)+x]==0:
for i in temp_list:
for j in temp_list:
if not(j==i and i==0):
if(map_data.data[(y+i)*map_width+(x+j)]<0):
counter+=1;
if(map_data.data[(y+i)*map_width+(x+j)]>10):
counter=10;
if (counter==3):
temp_x=(x)*map_data.info.resolution+map_data.info.origin.position.x;
temp_y=(y)*map_data.info.resolution+map_data.info.origin.position.y;
if(frontier_is_new([temp_x,temp_y],frontiers)==True):
frontiers.append([temp_x,temp_y]);
print("this is number of fronteirs", len(frontiers))
return list(frontiers);
'''
def compute_frontier_distance(frontiers):
global robot_x,robot_y;
frontier_distances=[];
temp=-2;
temp1=0;
temp2=0;
for i in frontiers:
temp=-2;
while temp==-2:
temp=request(robot_x,robot_y,i.travel_point.x,i.travel_point.y);
if(temp==10000000):
temp1+=1;
else:
temp2+=1;
frontier_distances.append([i[0],i[1],utility-beta*temp]);
print(name_space,"no path",temp1 ,"path",temp2)
return list(frontier_distances);
################################################
################################################
def callback_goal_status(data,data2):
global current_goal_status;
current_goal_status=True;
return;
if len(data.status_list)==0 :
return;
current_goal_status = data.status_list[len(data.status_list) - 2].status;
def move_base_tools():
global move_client_goal_;
global move_client_;
global name_space;
global move_base_status_subscriber;
move_client_=actionlib.SimpleActionClient("/"+name_space+"/move_base", MoveBaseAction);
move_client_goal_=MoveBaseGoal();
print(name_space,"move base tools are ok")
#move_base_status_subscriber=rospy.Subscriber("/"+name_space+"/move_base/status", GoalStatusArray, callback_goal_status);
################################################
################################################################################################
################################################
def request(sx,sy,gx,gy):
global a_star;
temp_object=Problem(sx=int(merged_map.info.resolution*(sx-merged_map.info.origin.position.x)),sy=int(merged_map.info.resolution*(sy-merged_map.info.origin.position.y)),gy=int(merged_map.info.resolution*(gy-merged_map.info.origin.position.y)),gx=int(merged_map.info.resolution*(gx-merged_map.info.origin.position.x)),matrix=list(merged_map.data),width=int( merged_map.info.width),height=int( merged_map.info.height));
a_star.problem=temp_object;
path=a_star.Astar_graph();
if path==None:
return 10000000;
return path;
def send_goal(goal_x,goal_y):
global other_robots_list,goal_publisher;
global name_space;
global move_client_;
global move_client_goal_;
global goal_pose;
global my_current_goal;
my_current_goal=Point(goal_x,goal_y,0.0);
for i in other_robots_list:
new_data=Data_Goal();
new_data.source=name_space;
new_data.destination=i.robot_name_space;
new_data.data=my_current_goal;
goal_publisher.publish(new_data);
# set goal
goal_pose.pose.position.x = goal_x;
goal_pose.pose.position.y = goal_y;
goal_pose.pose.orientation.w = 1.0;
goal_pose.pose.orientation.z = 0;
goal_pose.header.frame_id = "/map";
goal_pose.header.stamp = rospy.Time.now();
# send goal
move_client_.cancel_goals_at_and_before_time(rospy.Time.now());
move_client_goal_.target_pose=goal_pose;
rospy.sleep(0.5);
checking_goals_publisher.publish(Bool(False));
#move_client_.send_goal_and_wait(goal=move_client_goal_,execute_timeout = rospy.Duration(300),preempt_timeout = rospy.Duration(1));
move_client_.send_goal(goal=move_client_goal_,done_cb=callback_goal_status);
print(name_space,"sent goal");
goal_pose.header.seq =goal_pose.header.seq+1 ;
################################################
################################################
def map_callback(map_data):
global merged_map,merged_map_lock;
global other_robots_list;
global map_publisher;
global map_pub_counter,map_pub_frequnecy;
merged_map_lock.acquire();
if (merged_map==None):
merged_map=map_data;
else:
temp_map2=list(merged_map.data);
merged_map=map_data;
temp_map=np.array([map_data.data,list(temp_map2)]);
merged_map.data=list(np.max(temp_map,axis=0));
merged_map_lock.release();
if (map_pub_counter==0):
for i in other_robots_list:
if map_publisher==None:continue;
new_data=Data_Map();
new_data.source=name_space;
new_data.destination=i.robot_name_space;
new_data.data=map_data;
map_publisher.publish(new_data);
elif(map_pub_counter>=10):
map_pub_counter=0;
else:
map_pub_counter+=map_pub_frequnecy;
def odom_callback(odom_data):
global robot_x,robot_y;
robot_x=odom_data.pose.pose.position.x;
robot_y=odom_data.pose.pose.position.y;
################################################
################################################
def burgard():
global merged_map_lock;
global merged_map;
global name_space;
global goals_list;
global goals_list_lock;
global alpha;
global checking_goals_publisher,checking_goals_flag;
global current_goal_status;
global goal_publisher,my_current_goal,other_robots_list;
while(merged_map==None):
pass;
while not rospy.is_shutdown():
merged_map_lock.acquire();
print(name_space,"going for frointiers")
frontiers=get_frontiers(merged_map);
merged_map_lock.release();
if (len(frontiers)==0):
print(name_space,"no new frontiers");
exit();
#frontiers=compute_frontier_distance(frontiers);
print(name_space,"we have frontiers",len(frontiers));
if (len(frontiers)==0):
print(name_space,"no path to frointiers");
exit();
rate = rospy.Rate(0.5);
for k in range(int(name_space[-1]),number_of_robots-1):
while goals_list[k]==None:
pass;
checking_goals_publisher.publish(Bool(True));
rate = rospy.Rate(0.5);
while(not checking_goals_flag):
rate.sleep();
rospy.sleep(0.5);
for i in range(0,len(frontiers)):
goals_list_lock.acquire();
for j in goals_list:
if(j==None):continue;
temp_distance=math.sqrt( (j.x-frontiers[i].travel_point.x)**2 + (j.y-frontiers[i].travel_point.y)**2);
if(temp_distance<=laser_range):
print(name_space,"before increment",str(frontiers[i].min_distance))
frontiers[i].min_distance+=alpha*(1-temp_distance/laser_range);
print(name_space,"after ",str(frontiers[i].min_distance))
else:
print(name_space," goal out of range ",str(j.x),str(j.y));
goals_list_lock.release();
print(name_space,"sorting");
frontiers.sort(key=lambda node: node.min_distance);
print(name_space,"worst frontier",frontiers[-1].min_distance," best frontier",frontiers[0].min_distance);
print(name_space," goal is ",str(frontiers[0].travel_point.x),str(frontiers[0].travel_point.y));
current_goal_status=False;
send_goal(frontiers[0].travel_point.x,frontiers[0].travel_point.y);
rospy.sleep(3.0);
time_counter=0;
while current_goal_status==False and time_counter<140:
rate.sleep();
time_counter+=2;
for i in other_robots_list:
new_data=Data_Goal();
new_data.source=name_space;
new_data.destination=i.robot_name_space;
new_data.data=my_current_goal;
goal_publisher.publish(new_data);
current_goal_status=False;
def checking_goals_response_callback(input_data):
global checking_goals_flag;
checking_goals_flag=input_data.data;
def main():
global name_space,robot_number,number_of_robots;
global merged_map,goals_list,other_robots_list;
global goal_publisher,a_star;
global map_publisher,odom_subscriber;
global checking_goals_subscriber,checking_goals_publisher;
rospy.init_node("burgard_exploration_node");
a_star=Algorithmes();
name_space = rospy.get_param("namespace", default="robot1");
robot_number=int(name_space[-1]);
number_of_robots=(int(rospy.get_param("number_of_robots", default=1)));
temp_i=0;
for i in range (0,number_of_robots):
if (i==robot_number):continue;
goals_list.append(None);
other_robots_list.append(MyWrapper(list_index=temp_i,robot_name_space="robot"+str(i)));
temp_i+=1;
move_base_tools();
map_subscriber=rospy.Subscriber("/"+name_space+"/map", OccupancyGrid, map_callback);
odom_subscriber=rospy.Subscriber("/"+name_space+"/odom", Odometry, odom_callback);
goal_publisher=rospy.Publisher("/message_server_Goal", Data_Goal,queue_size=15);
map_publisher=rospy.Publisher("/message_server_map", Data_Map,queue_size=15);
checking_goals_subscriber=rospy.Subscriber("/"+name_space+"/checking_goals_response", Bool, checking_goals_response_callback);
checking_goals_publisher=rospy.Publisher("/"+name_space+"/checking_goals_request", Bool,queue_size=15);
burgard();
rospy.spin();
if __name__ == '__main__':
main();
|
|
import mock
import pytest
import hmac
import hashlib
import lxml.etree
from django.utils import timezone
from osf_tests import factories
from website import settings
@pytest.mark.django_db
class TestCrossRefEmailResponse:
def make_mailgun_payload(self, crossref_response):
mailgun_payload = {
'From': ['CrossRef <admin@crossref.org>'],
'To': ['test@test.osf.io'],
'subject': ['CrossRef submission ID: 1390671938'],
'from': ['CrossRef <test-admin@crossref.org>'],
'Date': ['Fri, 27 Apr 2018 11:38:00 -0400 (EDT)'],
'body-plain': [crossref_response.strip()],
'Mime-Version': ['1.0'],
'timestamp': '123',
'recipient': ['test@test.osf.io'],
'sender': ['test-admin@crossref.org'],
'Content-Type': [u'text/plain; charset="UTF-8"'],
'Subject': [u'CrossRef submission ID: 1390671938'],
'token': 'secret'
}
# temporarily override MAILGUN_API_KEY
settings.MAILGUN_API_KEY = 'notsosecret'
data = {
'X-Mailgun-Sscore': 0,
'signature': hmac.new(
key=settings.MAILGUN_API_KEY,
msg='{}{}'.format(
mailgun_payload['timestamp'],
mailgun_payload['token']
),
digestmod=hashlib.sha256,
).hexdigest(),
}
data.update(mailgun_payload)
data = {
key: value
for key, value in data.iteritems()
if value is not None
}
return data
@pytest.fixture()
def preprint(self):
return factories.PreprintFactory(set_doi=False)
@pytest.fixture()
def error_xml(self, preprint):
return """
<?xml version="1.0" encoding="UTF-8"?>
<doi_batch_diagnostic status="completed" sp="cs3.crossref.org">
<submission_id>1390675109</submission_id>
<batch_id>{}</batch_id>
<record_diagnostic status="Failure">
<doi />
<msg>Error: cvc-complex-type.2.4.a: Invalid content was found starting with element 'program'</msg>
</record_diagnostic>
<batch_data>
<record_count>1</record_count>
<success_count>0</success_count>
<warning_count>0</warning_count>
<failure_count>1</failure_count>
</batch_data>
</doi_batch_diagnostic>
""".format(preprint._id)
@pytest.fixture()
def success_xml(self, preprint):
return """
<?xml version="1.0" encoding="UTF-8"?>
<doi_batch_diagnostic status="completed" sp="cs3.crossref.org">
<submission_id>1390675475</submission_id>
<batch_id>{}</batch_id>
<record_diagnostic status="Success">
<doi>10.31219/FK2OSF.IO/{}</doi>
<msg>Successfully added</msg>
</record_diagnostic>
<batch_data>
<record_count>1</record_count>
<success_count>1</success_count>
<warning_count>0</warning_count>
<failure_count>0</failure_count>
</batch_data>
</doi_batch_diagnostic>
""".format(preprint._id, preprint._id)
@pytest.fixture()
def update_success_xml(self, preprint):
return """
<?xml version="1.0" encoding="UTF-8"?>
<doi_batch_diagnostic status="completed" sp="cs3.crossref.org">
<submission_id>1390757455</submission_id>
<batch_id>{}</batch_id>
<record_diagnostic status="Success">
<doi>10.31219/FK2osf.io/{}</doi>
<msg>Successfully updated</msg>
</record_diagnostic>
<batch_data>
<record_count>1</record_count>
<success_count>1</success_count>
<warning_count>0</warning_count>
<failure_count>0</failure_count>
</batch_data>
</doi_batch_diagnostic>
""".format(preprint._id, preprint._id)
def build_batch_success_xml(self, preprint_list):
preprint_count = len(preprint_list)
base_xml_string = """
<?xml version="1.0" encoding="UTF-8"?>
<doi_batch_diagnostic status="completed" sp="cs3.crossref.org">
<submission_id>1390758391</submission_id>
<batch_id>1528233706</batch_id>
<batch_data>
<record_count>{}</record_count>
<success_count>{}</success_count>
<warning_count>0</warning_count>
<failure_count>0</failure_count>
</batch_data>
</doi_batch_diagnostic>
""".format(preprint_count, preprint_count)
base_xml = lxml.etree.fromstring(base_xml_string.strip())
provider_prefix = preprint_list[0].provider.doi_prefix
for preprint in preprint_list:
record_diagnostic = lxml.etree.Element('record_diagnostic')
record_diagnostic.attrib['status'] = 'Success'
doi = lxml.etree.Element('doi')
doi.text = settings.DOI_FORMAT.format(prefix=provider_prefix, guid=preprint._id)
msg = lxml.etree.Element('msg')
msg.text = 'Successfully added'
record_diagnostic.append(doi)
record_diagnostic.append(msg)
base_xml.append(record_diagnostic)
return lxml.etree.tostring(base_xml, pretty_print=False)
@pytest.fixture()
def url(self):
return '/_/crossref/email/'
def test_wrong_request_context_raises_permission_error(self, app, url, error_xml):
mailgun_response = self.make_mailgun_payload(error_xml)
mailgun_response.pop('signature')
response = app.post(url, mailgun_response, expect_errors=True)
assert response.status_code == 400
def test_error_response_sends_message_does_not_set_doi(self, app, url, preprint, error_xml):
assert not preprint.get_identifier_value('doi')
with mock.patch('framework.auth.views.mails.send_mail') as mock_send_mail:
context_data = self.make_mailgun_payload(crossref_response=error_xml)
app.post(url, context_data)
assert mock_send_mail.called
assert not preprint.get_identifier_value('doi')
def test_success_response_sets_doi(self, app, url, preprint, success_xml):
assert not preprint.get_identifier_value('doi')
with mock.patch('framework.auth.views.mails.send_mail') as mock_send_mail:
context_data = self.make_mailgun_payload(crossref_response=success_xml)
app.post(url, context_data)
preprint.reload()
assert not mock_send_mail.called
assert preprint.get_identifier_value('doi')
assert preprint.preprint_doi_created
def test_update_success_response(self, app, preprint, url, update_success_xml):
initial_value = 'TempDOIValue'
preprint.set_identifier_value(category='doi', value=initial_value)
update_xml = self.update_success_xml(preprint)
with mock.patch('framework.auth.views.mails.send_mail') as mock_send_mail:
context_data = self.make_mailgun_payload(crossref_response=update_xml)
app.post(url, context_data)
assert not mock_send_mail.called
assert preprint.get_identifier_value(category='doi') != initial_value
def test_update_success_does_not_set_preprint_doi_created(self, app, preprint, url, update_success_xml):
preprint.set_identifier_value(category='doi', value='test')
preprint.preprint_doi_created = timezone.now()
preprint.save()
update_xml = self.update_success_xml(preprint)
pre_created = preprint.preprint_doi_created
with mock.patch('framework.auth.views.mails.send_mail'):
context_data = self.make_mailgun_payload(crossref_response=update_xml)
app.post(url, context_data)
assert preprint.preprint_doi_created == pre_created
def test_success_batch_response(self, app, url):
provider = factories.PreprintProviderFactory()
provider.doi_prefix = '10.123yeah'
provider.save()
preprint_list = [factories.PreprintFactory(set_doi=False, provider=provider) for _ in range(5)]
xml_response = self.build_batch_success_xml(preprint_list)
context_data = self.make_mailgun_payload(xml_response)
app.post(url, context_data)
for preprint in preprint_list:
assert preprint.get_identifier_value('doi') == settings.DOI_FORMAT.format(prefix=provider.doi_prefix, guid=preprint._id)
def test_confirmation_marks_legacy_doi_as_deleted(self, app, url, preprint, update_success_xml):
legacy_value = 'IAmALegacyDOI'
preprint.set_identifier_value(category='legacy_doi', value=legacy_value)
update_xml = self.update_success_xml(preprint)
with mock.patch('framework.auth.views.mails.send_mail') as mock_send_mail:
context_data = self.make_mailgun_payload(crossref_response=update_xml)
app.post(url, context_data)
assert not mock_send_mail.called
assert preprint.identifiers.get(category='legacy_doi').deleted
|
|
# -*- coding: utf-8 -*-
import os
import unittest
import datetime
from decimal import Decimal
from dateutil.relativedelta import relativedelta
import pycountry
import trytond.tests.test_tryton
from trytond.tests.test_tryton import (
POOL, USER, CONTEXT,
ModuleTestCase, with_transaction
)
from trytond.transaction import Transaction
from trytond.exceptions import UserError
if 'DB_NAME' not in os.environ:
os.environ['TRYTOND_DATABASE_URI'] = 'sqlite://'
os.environ['DB_NAME'] = ':memory:'
class BaseTestCase(ModuleTestCase):
'''
Base Test Case sale payment module.
'''
module = 'sale_payment_gateway'
def setUp(self):
"""
Set up data used in the tests.
this method is called before each test function execution.
"""
self.Currency = POOL.get('currency.currency')
self.Company = POOL.get('company.company')
self.Party = POOL.get('party.party')
self.User = POOL.get('res.user')
self.ProductTemplate = POOL.get('product.template')
self.Uom = POOL.get('product.uom')
self.ProductCategory = POOL.get('product.category')
self.Product = POOL.get('product.product')
self.Country = POOL.get('country.country')
self.Subdivision = POOL.get('country.subdivision')
self.Employee = POOL.get('company.employee')
self.Journal = POOL.get('account.journal')
self.PaymentGateway = POOL.get('payment_gateway.gateway')
self.Sale = POOL.get('sale.sale')
self.SaleLine = POOL.get('sale.line')
self.SalePayment = POOL.get('sale.payment')
self.SaleConfiguration = POOL.get('sale.configuration')
self.Group = POOL.get('res.group')
def _create_fiscal_year(self, date=None, company=None):
"""
Creates a fiscal year and requried sequences
"""
FiscalYear = POOL.get('account.fiscalyear')
Sequence = POOL.get('ir.sequence')
SequenceStrict = POOL.get('ir.sequence.strict')
Company = POOL.get('company.company')
if date is None:
date = datetime.date.today()
if company is None:
company, = Company.search([], limit=1)
invoice_sequence, = SequenceStrict.create([{
'name': '%s' % date.year,
'code': 'account.invoice',
'company': company,
}])
fiscal_year, = FiscalYear.create([{
'name': '%s' % date.year,
'start_date': date + relativedelta(month=1, day=1),
'end_date': date + relativedelta(month=12, day=31),
'company': company,
'post_move_sequence': Sequence.create([{
'name': '%s' % date.year,
'code': 'account.move',
'company': company,
}])[0],
'out_invoice_sequence': invoice_sequence,
'in_invoice_sequence': invoice_sequence,
'out_credit_note_sequence': invoice_sequence,
'in_credit_note_sequence': invoice_sequence,
}])
FiscalYear.create_period([fiscal_year])
return fiscal_year
def _create_coa_minimal(self, company):
"""Create a minimal chart of accounts
"""
AccountTemplate = POOL.get('account.account.template')
Account = POOL.get('account.account')
account_create_chart = POOL.get(
'account.create_chart', type="wizard")
account_template, = AccountTemplate.search(
[('parent', '=', None),
('name', '=', 'Minimal Account Chart')]
)
session_id, _, _ = account_create_chart.create()
create_chart = account_create_chart(session_id)
create_chart.account.account_template = account_template
create_chart.account.company = company
create_chart.transition_create_account()
receivable, = Account.search([
('kind', '=', 'receivable'),
('company', '=', company),
])
payable, = Account.search([
('kind', '=', 'payable'),
('company', '=', company),
])
create_chart.properties.company = company
create_chart.properties.account_receivable = receivable
create_chart.properties.account_payable = payable
create_chart.transition_create_properties()
def _get_account_by_kind(self, kind, company=None, silent=True):
"""Returns an account with given spec
:param kind: receivable/payable/expense/revenue
:param silent: dont raise error if account is not found
"""
Account = POOL.get('account.account')
Company = POOL.get('company.company')
if company is None:
company, = Company.search([], limit=1)
accounts = Account.search([
('kind', '=', kind),
('company', '=', company)
], limit=1)
if not accounts and not silent:
raise Exception("Account not found")
return accounts[0] if accounts else False
def _create_payment_term(self):
"""Create a simple payment term with all advance
"""
PaymentTerm = POOL.get('account.invoice.payment_term')
return PaymentTerm.create([{
'name': 'Direct',
'lines': [('create', [{'type': 'remainder'}])]
}])
def _create_countries(self, count=5):
"""
Create some sample countries and subdivisions
"""
for country in list(pycountry.countries)[0:count]:
countries = self.Country.create([{
'name': country.name,
'code': country.alpha2,
}])
try:
divisions = pycountry.subdivisions.get(
country_code=country.alpha2
)
except KeyError:
pass
else:
for subdivision in list(divisions)[0:count]:
self.Subdivision.create([{
'country': countries[0].id,
'name': subdivision.name,
'code': subdivision.code,
'type': subdivision.type.lower(),
}])
def create_payment_profile(self, party, gateway):
"""
Create a payment profile for the party
"""
AddPaymentProfileWizard = POOL.get(
'party.party.payment_profile.add', type='wizard'
)
# create a profile
profile_wiz = AddPaymentProfileWizard(
AddPaymentProfileWizard.create()[0]
)
profile_wiz.card_info.party = party.id
profile_wiz.card_info.address = party.addresses[0].id
profile_wiz.card_info.provider = gateway.provider
profile_wiz.card_info.gateway = gateway
profile_wiz.card_info.owner = party.name
profile_wiz.card_info.number = '4111111111111111'
profile_wiz.card_info.expiry_month = '11'
profile_wiz.card_info.expiry_year = '2018'
profile_wiz.card_info.csc = '353'
with Transaction().set_context(return_profile=True):
return profile_wiz.transition_add()
def setup_defaults(self):
"""Creates default data for testing
"""
self.currency, = self.Currency.create([{
'name': 'US Dollar',
'code': 'USD',
'symbol': '$',
}])
with Transaction().set_context(company=None):
company_party, = self.Party.create([{
'name': 'openlabs'
}])
employee_party, = self.Party.create([{
'name': 'Jim'
}])
self.company, = self.Company.create([{
'party': company_party,
'currency': self.currency,
}])
self.employee, = self.Employee.create([{
'party': employee_party.id,
'company': self.company.id,
}])
self.User.write([self.User(USER)], {
'company': self.company,
'main_company': self.company,
'employees': [('add', [self.employee.id])],
})
# Write employee separately as employees needs to be saved first
self.User.write([self.User(USER)], {
'employee': self.employee.id,
})
CONTEXT.update(self.User.get_preferences(context_only=True))
# Create Fiscal Year
self._create_fiscal_year(company=self.company.id)
# Create Chart of Accounts
self._create_coa_minimal(company=self.company.id)
# Create a payment term
self.payment_term, = self._create_payment_term()
self.cash_journal, = self.Journal.search(
[('type', '=', 'cash')], limit=1
)
self.country, = self.Country.create([{
'name': 'United States of America',
'code': 'US',
}])
self.subdivision, = self.Subdivision.create([{
'country': self.country.id,
'name': 'California',
'code': 'CA',
'type': 'state',
}])
# Create party
self.party, = self.Party.create([{
'name': 'Bruce Wayne',
'addresses': [('create', [{
'name': 'Bruce Wayne',
'city': 'Gotham',
'country': self.country.id,
'subdivision': self.subdivision.id,
}])],
'customer_payment_term': self.payment_term.id,
'account_receivable': self._get_account_by_kind(
'receivable').id,
'contact_mechanisms': [('create', [
{'type': 'mobile', 'value': '8888888888'},
])],
}])
# Add user to sale_admin group so that he can create payments.
admin_group, = self.Group.search([('name', '=', 'Sales Administrator')])
self.User.write([self.User(USER)], {
'groups': [('add', [admin_group.id])]
})
with Transaction().set_context(use_dummy=True):
self.dummy_gateway, = self.PaymentGateway.create([{
'name': 'Dummy Gateway',
'journal': self.cash_journal.id,
'provider': 'dummy',
'method': 'credit_card',
}])
self.dummy_cc_payment_profile = self.create_payment_profile(
self.party, self.dummy_gateway
)
self.cash_gateway, = self.PaymentGateway.create([{
'name': 'Cash Gateway',
'journal': self.cash_journal.id,
'provider': 'self',
'method': 'manual',
}])
class TestSale(BaseTestCase):
"""Test Sale with Payments
"""
def _create_sale(self, payment_authorize_on, payment_capture_on):
"""Create test sale with provided payment_authorized and payment
capture options.
"""
sale, = self.Sale.create([{
'reference': 'Test Sale',
'payment_term': self.payment_term,
'currency': self.currency,
'party': self.party.id,
'invoice_address': self.party.addresses[0].id,
'shipment_address': self.party.addresses[0].id,
'company': self.company.id,
'invoice_method': 'manual',
'shipment_method': 'manual',
'payment_authorize_on': payment_authorize_on,
'payment_capture_on': payment_capture_on,
'lines': [('create', [{
'description': 'Some item',
'unit_price': Decimal('200'),
'quantity': 1
}])]
}])
return sale
def _confirm_sale_by_completing_payments(self, sales):
"""Confirm sale and complete payments.
"""
self.Sale.confirm(sales)
self.Sale.process_all_pending_payments()
def _process_sale_by_completing_payments(self, sales):
"""Process sale and complete payments.
"""
self.Sale.process(sales)
self.Sale.process_all_pending_payments()
@with_transaction()
def test_0005_single_payment_CASE1(self):
"""
===================================
Total Sale Amount | $200
Payment Authorize On: | 'manual'
Payment Capture On: | 'manual'
===================================
Total Payment Lines | 1
Payment 1 | $200
===================================
"""
self.setup_defaults()
sale = self._create_sale(
payment_authorize_on='manual',
payment_capture_on='manual',
)
self.assertEqual(sale.total_amount, Decimal('200'))
self.assertEqual(sale.payment_total, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
# Create a payment
payment_details = {
'sale': sale.id,
'amount': Decimal('200'),
'gateway': self.dummy_gateway,
'payment_profile': self.dummy_cc_payment_profile.id,
}
payment_details.update({
'credit_account':
self.SalePayment(
**payment_details).on_change_with_credit_account()
})
payment = self.SalePayment(**payment_details)
payment.save()
self.assertTrue(payment.description.startswith("Paid by Card"))
self.assertTrue(payment.credit_account)
self.assertEqual(
payment.credit_account, self.party.account_receivable)
self.assertEqual(payment.company.id, sale.company.id)
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
# confirm and process the sale, payment will not go
# through because capture and auth is manual.
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self._process_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
@with_transaction()
def test_0010_single_payment_CASE2(self):
"""
===================================
Total Sale Amount | $200
Payment Authorize On: | 'manual'
Payment Capture On: | 'sale_confirm'
===================================
Total Payment Lines | 1
Payment 1 | $200
===================================
"""
self.setup_defaults()
sale = self._create_sale(
payment_authorize_on='manual',
payment_capture_on='sale_confirm',
)
self.assertEqual(sale.total_amount, Decimal('200'))
self.assertEqual(sale.payment_total, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
# Create a payment
payment, = self.SalePayment.create([{
'sale': sale.id,
'amount': Decimal('200'),
'gateway': self.dummy_gateway,
'payment_profile': self.dummy_cc_payment_profile.id,
'credit_account': self.party.account_receivable.id,
}])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('200'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self._process_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('200'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
@with_transaction()
def test_0013_single_payment_CASE2B(self):
"""
===================================
Total Sale Amount | $200
Payment Authorize On: | 'manual'
Payment Capture On: | 'sale_confirm'
===================================
Total Payment Lines | 1
Payment 1 | $200
===================================
Cancel > Draft > Quote > Confirm
===================================
Total Sale Amount | $200
Total Payment Lines | 1
Payment 1 | $200
===================================
Cancel > Draft (line x 2 )> Quote > Confirm
===================================
Total Sale Amount | $400
Total Payment Lines | 2
Payment 1 | $400
"""
self.setup_defaults()
sale = self._create_sale(
payment_authorize_on='manual',
payment_capture_on='sale_confirm',
)
self.assertEqual(sale.total_amount, Decimal('200'))
self.assertEqual(sale.payment_total, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
# Create a payment
payment, = self.SalePayment.create([{
'sale': sale.id,
'amount': Decimal('200'),
'gateway': self.dummy_gateway,
'payment_profile': self.dummy_cc_payment_profile.id,
'credit_account': self.party.account_receivable.id,
}])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('200'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
# ===================================
# Cancel > Draft > Quote > Confirm
# ===================================
# This is not a standard feature, but some downstream modules do
# all kind of shit by allowing cancelling of orders. Deal with it
# too, because in the real world, people fucking cancel!
self.Sale.write([sale], {'state': 'draft'})
self.assertEqual(sale.state, 'draft')
# XXX: Clearing the cache because it does not seem to be doing it.
self.Sale.write([sale], {
'untaxed_amount_cache': None,
'tax_amount_cache': None,
'total_amount_cache': None,
})
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('200'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
# ===========================================
# Cancel > Draft (line x 2 )> Quote > Confirm
# ===========================================
self.Sale.cancel([sale])
self.Sale.draft([sale])
self.Sale.write([sale], {'state': 'draft'})
sale = self.Sale(sale.id)
self.assertEqual(sale.state, 'draft')
# XXX: Clearing the cache because it does not seem to be doing it.
self.Sale.write([sale], {
'untaxed_amount_cache': None,
'tax_amount_cache': None,
'total_amount_cache': None,
})
self.SaleLine.write([sale.lines[0]], {'quantity': 2})
sale = self.Sale(sale.id)
self.assertEqual(sale.total_amount, Decimal('400'))
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('200'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
with self.assertRaises(UserError):
# Complain, because there is not enough money!
self._confirm_sale_by_completing_payments([sale])
@with_transaction()
def test_0014_single_payment_CASE2C(self):
"""
===================================
Total Sale Amount | $200
Payment Authorize On: | 'manual'
Payment Capture On: | 'sale_confirm'
===================================
Total Payment Lines | 1
Payment 1 | $200
===================================
Cancel > Draft (line x 2 )> Quote > Confirm
===================================
Total Sale Amount | $400
Total Payment Lines | 2
Payment 1 | $200
Payment 2 | $200
"""
self.setup_defaults()
sale = self._create_sale(
payment_authorize_on='manual',
payment_capture_on='sale_confirm',
)
self.assertEqual(sale.total_amount, Decimal('200'))
self.assertEqual(sale.payment_total, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
# Create a payment
payment, = self.SalePayment.create([{
'sale': sale.id,
'amount': Decimal('200'),
'gateway': self.dummy_gateway,
'payment_profile': self.dummy_cc_payment_profile.id,
'credit_account': self.party.account_receivable.id,
}])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('200'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
# ===========================================
# Cancel > Draft (line x 2 )> Quote > Confirm
# ===========================================
self.Sale.cancel([sale])
self.Sale.draft([sale])
self.Sale.write([sale], {'state': 'draft'})
sale = self.Sale(sale.id)
self.assertEqual(sale.state, 'draft')
# XXX: Clearing the cache because it does not seem to be doing it.
self.Sale.write([sale], {
'untaxed_amount_cache': None,
'tax_amount_cache': None,
'total_amount_cache': None,
})
self.SaleLine.write([sale.lines[0]], {'quantity': 2})
sale = self.Sale(sale.id)
self.assertEqual(sale.total_amount, Decimal('400'))
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('200'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
# Create a payment
payment, = self.SalePayment.create([{
'sale': sale.id,
'amount': Decimal('200'),
'gateway': self.dummy_gateway,
'payment_profile': self.dummy_cc_payment_profile.id,
'credit_account': self.party.account_receivable.id,
}])
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self.assertEqual(sale.total_amount, Decimal('400'))
self.assertEqual(sale.payment_total, Decimal('400'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('400'))
self.assertEqual(sale.payment_captured, Decimal('400'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
@with_transaction()
def test_0014_single_payment_CASE2D(self):
"""
===================================
Total Sale Amount | $200
Payment Authorize On: | 'manual'
Payment Capture On: | 'sale_confirm'
===================================
Total Payment Lines | 1
Payment 1 | $200
===================================
Cancel > Draft (line x 2 )> Quote > Confirm
===================================
Total Sale Amount | $400
Total Payment Lines | 1 (Expansion of lines)
Payment 1 | $200
Payment 2 | $200
"""
self.setup_defaults()
sale = self._create_sale(
payment_authorize_on='manual',
payment_capture_on='sale_confirm',
)
self.assertEqual(sale.total_amount, Decimal('200'))
self.assertEqual(sale.payment_total, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
# Create a payment
payment, = self.SalePayment.create([{
'sale': sale.id,
'amount': Decimal('200'),
'gateway': self.dummy_gateway,
'payment_profile': self.dummy_cc_payment_profile.id,
'credit_account': self.party.account_receivable.id,
}])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('200'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
# ===========================================
# Cancel > Draft (line x 2 )> Quote > Confirm
# ===========================================
self.Sale.cancel([sale])
self.Sale.draft([sale])
self.Sale.write([sale], {'state': 'draft'})
sale = self.Sale(sale.id)
self.assertEqual(sale.state, 'draft')
# XXX: Clearing the cache because it does not seem to be doing it.
self.Sale.write([sale], {
'untaxed_amount_cache': None,
'tax_amount_cache': None,
'total_amount_cache': None,
})
self.SaleLine.write([sale.lines[0]], {'quantity': 2})
sale = self.Sale(sale.id)
self.assertEqual(sale.total_amount, Decimal('400'))
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('200'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
# Expand the payment
self.SalePayment.write([payment], {'amount': Decimal('400')})
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self.assertEqual(sale.total_amount, Decimal('400'))
self.assertEqual(sale.payment_total, Decimal('400'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('400'))
self.assertEqual(sale.payment_captured, Decimal('400'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
@with_transaction()
def test_0015_single_payment_CASE3(self):
"""
===================================
Total Sale Amount | $200
Payment Authorize On: | 'manual'
Payment Capture On: | 'sale_process'
===================================
Total Payment Lines | 1
Payment 1 | $200
===================================
"""
self.setup_defaults()
sale = self._create_sale(
payment_authorize_on='manual',
payment_capture_on='sale_process',
)
self.assertEqual(sale.total_amount, Decimal('200'))
self.assertEqual(sale.payment_total, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
payment, = self.SalePayment.create([{
'sale': sale.id,
'amount': Decimal('200'),
'gateway': self.dummy_gateway,
'payment_profile': self.dummy_cc_payment_profile.id,
'credit_account': self.party.account_receivable.id,
}])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self._process_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('200'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
self.assertEqual(sale.payment_available, Decimal('0'))
@with_transaction()
def test_0020_single_payment_CASE4(self):
"""
===================================
Total Sale Amount | $200
Payment Authorize On: | 'sale_confirm'
Payment Capture On: | 'manual'
===================================
Total Payment Lines | 1
Payment 1 | $200
===================================
"""
self.setup_defaults()
sale = self._create_sale(
payment_authorize_on='sale_confirm',
payment_capture_on='manual',
)
self.assertEqual(sale.total_amount, Decimal('200'))
self.assertEqual(sale.payment_total, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
# Create a payment
payment, = self.SalePayment.create([{
'sale': sale.id,
'amount': Decimal('200'),
'gateway': self.dummy_gateway,
'payment_profile': self.dummy_cc_payment_profile.id,
'credit_account': self.party.account_receivable.id,
}])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('200'))
with Transaction().set_context(company=self.company.id):
self._process_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('200'))
@with_transaction()
def test_0022_single_payment_CASE4A(self):
"""
===================================
Total Sale Amount | $200
Payment Authorize On: | 'sale_confirm'
Payment Capture On: | 'manual'
===================================
Total Payment Lines | 1
Payment 1 | $200
===================================
Cancel > Draft (line x 2 )> Quote > Confirm
===================================
Total Sale Amount | $400
Total Payment Lines | 2
Payment 1 | $200 (Problemo)
===================================
"""
self.setup_defaults()
sale = self._create_sale(
payment_authorize_on='sale_confirm',
payment_capture_on='manual',
)
self.assertEqual(sale.total_amount, Decimal('200'))
self.assertEqual(sale.payment_total, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
# Create a payment
payment, = self.SalePayment.create([{
'sale': sale.id,
'amount': Decimal('200'),
'gateway': self.dummy_gateway,
'payment_profile': self.dummy_cc_payment_profile.id,
'credit_account': self.party.account_receivable.id,
}])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('200'))
with Transaction().set_context(company=self.company.id):
self._process_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('200'))
# ===================================
# Cancel > Draft > Quote > Confirm
# ===================================
# This is not a standard feature, but some downstream modules do
# all kind of shit by allowing cancelling of orders. Deal with it
# too, because in the real world, people fucking cancel!
self.Sale.write([sale], {'state': 'draft'})
self.assertEqual(sale.state, 'draft')
# XXX: Clearing the cache because it does not seem to be doing it.
self.Sale.write([sale], {
'untaxed_amount_cache': None,
'tax_amount_cache': None,
'total_amount_cache': None,
})
self.SaleLine.write([sale.lines[0]], {'quantity': 2})
sale = self.Sale(sale.id)
self.assertEqual(sale.total_amount, Decimal('400'))
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('200'))
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
with self.assertRaises(UserError):
# Complain, because there is not enough money!
self._confirm_sale_by_completing_payments([sale])
@with_transaction()
def test_0022_single_payment_CASE4B(self):
"""
===================================
Total Sale Amount | $200
Payment Authorize On: | 'sale_confirm'
Payment Capture On: | 'manual'
===================================
Total Payment Lines | 1
Payment 1 | $200
===================================
Cancel > Draft (line x 2 )> Quote > Confirm
===================================
Total Sale Amount | $400
Total Payment Lines | 2
Payment 1 | $400 (No problemo)
===================================
"""
self.setup_defaults()
sale = self._create_sale(
payment_authorize_on='sale_confirm',
payment_capture_on='manual',
)
self.assertEqual(sale.total_amount, Decimal('200'))
self.assertEqual(sale.payment_total, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
# Create a payment
payment, = self.SalePayment.create([{
'sale': sale.id,
'amount': Decimal('200'),
'gateway': self.dummy_gateway,
'payment_profile': self.dummy_cc_payment_profile.id,
'credit_account': self.party.account_receivable.id,
}])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('200'))
with Transaction().set_context(company=self.company.id):
self._process_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('200'))
# ===================================
# Cancel > Draft > Quote > Confirm
# ===================================
# This is not a standard feature, but some downstream modules do
# all kind of shit by allowing cancelling of orders. Deal with it
# too, because in the real world, people fucking cancel!
self.Sale.write([sale], {'state': 'draft'})
self.assertEqual(sale.state, 'draft')
# XXX: Clearing the cache because it does not seem to be doing it.
self.Sale.write([sale], {
'untaxed_amount_cache': None,
'tax_amount_cache': None,
'total_amount_cache': None,
})
self.SaleLine.write([sale.lines[0]], {'quantity': 2})
sale = self.Sale(sale.id)
self.assertEqual(sale.total_amount, Decimal('400'))
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('200'))
# Create a payment
payment, = self.SalePayment.create([{
'sale': sale.id,
'amount': Decimal('200'),
'gateway': self.dummy_gateway,
'payment_profile': self.dummy_cc_payment_profile.id,
'credit_account': self.party.account_receivable.id,
}])
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self.assertEqual(sale.total_amount, Decimal('400'))
self.assertEqual(sale.payment_total, Decimal('400'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('400'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('400'))
@with_transaction()
def test_0025_single_payment_CASE5(self):
"""
===================================
Total Sale Amount | $200
Payment Authorize On: | 'sale_confirm'
Payment Capture On: | 'sale_confirm'
===================================
Total Payment Lines | 1
Payment 1 | $200
===================================
"""
self.setup_defaults()
sale = self._create_sale(
payment_authorize_on='sale_confirm',
payment_capture_on='sale_confirm',
)
self.assertEqual(sale.total_amount, Decimal('200'))
self.assertEqual(sale.payment_total, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
# Create a payment
payment, = self.SalePayment.create([{
'sale': sale.id,
'amount': Decimal('200'),
'gateway': self.dummy_gateway,
'payment_profile': self.dummy_cc_payment_profile.id,
'credit_account': self.party.account_receivable.id,
}])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('200'))
# No authorized amount becasue it was captured after that.
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self._process_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('200'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
@with_transaction()
def test_0030_single_payment_CASE6(self):
"""
===================================
Total Sale Amount | $200
Payment Authorize On: | 'sale_confirm'
Payment Capture On: | 'sale_process'
===================================
Total Payment Lines | 1
Payment 1 | $200
===================================
"""
self.setup_defaults()
sale = self._create_sale(
payment_authorize_on='sale_confirm',
payment_capture_on='sale_process',
)
self.assertEqual(sale.total_amount, Decimal('200'))
self.assertEqual(sale.payment_total, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
payment, = self.SalePayment.create([{
'sale': sale.id,
'amount': Decimal('200'),
'gateway': self.dummy_gateway,
'payment_profile': self.dummy_cc_payment_profile.id,
'credit_account': self.party.account_receivable.id,
}])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('200'))
with Transaction().set_context(company=self.company.id):
self._process_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('200'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
@with_transaction()
def test_0035_single_payment_CASE7(self):
"""
===================================
Total Sale Amount | $200
Payment Authorize On: | 'sale_process'
Payment Capture On: | 'manual'
===================================
Total Payment Lines | 1
Payment 1 | $200
===================================
"""
self.setup_defaults()
sale = self._create_sale(
payment_authorize_on='sale_process',
payment_capture_on='manual',
)
self.assertEqual(sale.total_amount, Decimal('200'))
self.assertEqual(sale.payment_total, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
payment, = self.SalePayment.create([{
'sale': sale.id,
'amount': Decimal('200'),
'gateway': self.dummy_gateway,
'payment_profile': self.dummy_cc_payment_profile.id,
'credit_account': self.party.account_receivable.id,
}])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self._process_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('200'))
@with_transaction()
def test_0040_single_payment_CASE8(self):
"""
===================================
Total Sale Amount | $200
Payment Authorize On: | 'sale_process'
Payment Capture On: | 'sale_confirm'
===================================
Total Payment Lines | 1
Payment 1 | $200
===================================
"""
self.setup_defaults()
sale_config = self.SaleConfiguration(1)
sale_config.payment_authorize_on = 'sale_process'
sale_config.payment_capture_on = 'sale_confirm'
# This is invalid case so it should raise user error.
with self.assertRaises(UserError):
sale_config.save()
@with_transaction()
def test_0045_single_payment_CASE9(self):
"""
===================================
Total Sale Amount | $200
Payment Authorize On: | 'sale_process'
Payment Capture On: | 'sale_process'
===================================
Total Payment Lines | 1
Payment 1 | $200
===================================
"""
self.setup_defaults()
sale = self._create_sale(
payment_authorize_on='sale_process',
payment_capture_on='sale_process',
)
self.assertEqual(sale.total_amount, Decimal('200'))
self.assertEqual(sale.payment_total, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
payment, = self.SalePayment.create([{
'sale': sale.id,
'amount': Decimal('200'),
'gateway': self.dummy_gateway,
'payment_profile': self.dummy_cc_payment_profile.id,
'credit_account': self.party.account_receivable.id,
}])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self._process_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('200'))
# Authorize amount is zero because payment captured after
# that.
self.assertEqual(sale.payment_authorized, Decimal('0'))
@with_transaction()
def test_0050_multi_payment_CASE1(self):
"""
===================================
Total Sale Amount | $200
Payment Authorize On: | 'manual'
Payment Capture On: | 'manual'
===================================
Total Payment Lines | 2
Payment 1 (manual) | $100
Payment 2 (cc) | $100
===================================
"""
self.setup_defaults()
sale = self._create_sale(
payment_authorize_on='manual',
payment_capture_on='manual',
)
self.assertEqual(sale.total_amount, Decimal('200'))
self.assertEqual(sale.payment_total, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
payment_1, payment_2 = self.SalePayment.create([{
'sale': sale.id,
'amount': Decimal('100'),
'gateway': self.cash_gateway,
'credit_account': self.party.account_receivable.id,
}, {
'sale': sale.id,
'amount': Decimal('100'),
'gateway': self.dummy_gateway,
'payment_profile': self.dummy_cc_payment_profile.id,
'credit_account': self.party.account_receivable.id,
}])
self.assertTrue(payment_1.description.startswith("Paid by Cash"))
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
# confirm and process the sale, payment will not go
# through because capture and auth is manual.
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self._process_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('100'))
self.assertEqual(sale.payment_collected, Decimal('100'))
self.assertEqual(sale.payment_captured, Decimal('100'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
@with_transaction()
def test_0055_multi_payment_CASE2(self):
"""
===================================
Total Sale Amount | $200
Payment Authorize On: | 'manual'
Payment Capture On: | 'sale_confirm'
===================================
Total Payment Lines | 2
Payment 1 (manual) | $100
Payment 2 (cc) | $100
===================================
"""
self.setup_defaults()
sale = self._create_sale(
payment_authorize_on='manual',
payment_capture_on='sale_confirm',
)
self.assertEqual(sale.total_amount, Decimal('200'))
self.assertEqual(sale.payment_total, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
self.SalePayment.create([{
'sale': sale.id,
'amount': Decimal('100'),
'gateway': self.cash_gateway,
'credit_account': self.party.account_receivable.id,
}, {
'sale': sale.id,
'amount': Decimal('100'),
'gateway': self.dummy_gateway,
'payment_profile': self.dummy_cc_payment_profile.id,
'credit_account': self.party.account_receivable.id,
}])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('200'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self._process_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('200'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
@with_transaction()
def test_0060_multi_payment_CASE3(self):
"""
===================================
Total Sale Amount | $200
Payment Authorize On: | 'manual'
Payment Capture On: | 'sale_process'
===================================
Total Payment Lines | 2
Payment 1 (manual) | $100
Payment 2 (cc) | $100
===================================
"""
self.setup_defaults()
sale = self._create_sale(
payment_authorize_on='manual',
payment_capture_on='sale_process',
)
self.assertEqual(sale.total_amount, Decimal('200'))
self.assertEqual(sale.payment_total, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
self.SalePayment.create([{
'sale': sale.id,
'amount': Decimal('100'),
'gateway': self.cash_gateway,
'credit_account': self.party.account_receivable.id,
}, {
'sale': sale.id,
'amount': Decimal('100'),
'gateway': self.dummy_gateway,
'payment_profile': self.dummy_cc_payment_profile.id,
'credit_account': self.party.account_receivable.id,
}])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self._process_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('200'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
@with_transaction()
def test_0065_multi_payment_CASE4(self):
"""
===================================
Total Sale Amount | $200
Payment Authorize On: | 'sale_confirm'
Payment Capture On: | 'manual'
===================================
Total Payment Lines | 2
Payment 1 (manual) | $100
Payment 2 (cc) | $100
===================================
"""
self.setup_defaults()
sale = self._create_sale(
payment_authorize_on='sale_confirm',
payment_capture_on='manual',
)
self.assertEqual(sale.total_amount, Decimal('200'))
self.assertEqual(sale.payment_total, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
self.SalePayment.create([{
'sale': sale.id,
'amount': Decimal('100'),
'gateway': self.cash_gateway,
'credit_account': self.party.account_receivable.id,
}, {
'sale': sale.id,
'amount': Decimal('100'),
'gateway': self.dummy_gateway,
'payment_profile': self.dummy_cc_payment_profile.id,
'credit_account': self.party.account_receivable.id,
}])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('100'))
self.assertEqual(sale.payment_collected, Decimal('100'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('100'))
with Transaction().set_context(company=self.company.id):
self._process_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('100'))
self.assertEqual(sale.payment_authorized, Decimal('100'))
@with_transaction()
def test_0070_multi_payment_CASE5(self):
"""
===================================
Total Sale Amount | $200
Payment Authorize On: | 'sale_confirm'
Payment Capture On: | 'sale_confirm'
===================================
Total Payment Lines | 2
Payment 1 (manual) | $100
Payment 2 (cc) | $100
===================================
"""
self.setup_defaults()
sale = self._create_sale(
payment_authorize_on='sale_confirm',
payment_capture_on='sale_confirm',
)
self.assertEqual(sale.total_amount, Decimal('200'))
self.assertEqual(sale.payment_total, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
self.SalePayment.create([{
'sale': sale.id,
'amount': Decimal('100'),
'gateway': self.cash_gateway,
'credit_account': self.party.account_receivable.id,
}, {
'sale': sale.id,
'amount': Decimal('100'),
'gateway': self.dummy_gateway,
'payment_profile': self.dummy_cc_payment_profile.id,
'credit_account': self.party.account_receivable.id,
}])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('200'))
# No authorized amount becasue it was captured after that.
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self._process_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('200'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
@with_transaction()
def test_0075_multi_payment_CASE6(self):
"""
===================================
Total Sale Amount | $200
Payment Authorize On: | 'sale_confirm'
Payment Capture On: | 'sale_process'
===================================
Total Payment Lines | 2
Payment 1 (manual) | $100
Payment 2 (cc) | $100
===================================
"""
self.setup_defaults()
sale = self._create_sale(
payment_authorize_on='sale_confirm',
payment_capture_on='sale_process',
)
self.assertEqual(sale.total_amount, Decimal('200'))
self.assertEqual(sale.payment_total, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
self.SalePayment.create([{
'sale': sale.id,
'amount': Decimal('100'),
'gateway': self.cash_gateway,
'credit_account': self.party.account_receivable.id,
}, {
'sale': sale.id,
'amount': Decimal('100'),
'gateway': self.dummy_gateway,
'payment_profile': self.dummy_cc_payment_profile.id,
'credit_account': self.party.account_receivable.id,
}])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('100'))
self.assertEqual(sale.payment_collected, Decimal('100'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('100'))
with Transaction().set_context(company=self.company.id):
self._process_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('200'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
@with_transaction()
def test_0080_multi_payment_CASE7(self):
"""
===================================
Total Sale Amount | $200
Payment Authorize On: | 'sale_process'
Payment Capture On: | 'manual'
===================================
Total Payment Lines | 2
Payment 1 (manual) | $100
Payment 2 (cc) | $100
===================================
"""
self.setup_defaults()
sale = self._create_sale(
payment_authorize_on='sale_process',
payment_capture_on='manual',
)
self.assertEqual(sale.total_amount, Decimal('200'))
self.assertEqual(sale.payment_total, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
self.SalePayment.create([{
'sale': sale.id,
'amount': Decimal('100'),
'gateway': self.cash_gateway,
'credit_account': self.party.account_receivable.id,
}, {
'sale': sale.id,
'amount': Decimal('100'),
'gateway': self.dummy_gateway,
'payment_profile': self.dummy_cc_payment_profile.id,
'credit_account': self.party.account_receivable.id,
}])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self._process_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('100'))
self.assertEqual(sale.payment_authorized, Decimal('100'))
@with_transaction()
def test_0085_multi_payment_CASE8(self):
"""
===================================
Total Sale Amount | $200
Payment Authorize On: | 'sale_process'
Payment Capture On: | 'sale_process'
===================================
Total Payment Lines | 2
Payment 1 (manual) | $100
Payment 2 (cc) | $100
===================================
"""
self.setup_defaults()
sale = self._create_sale(
payment_authorize_on='sale_process',
payment_capture_on='sale_process',
)
self.assertEqual(sale.total_amount, Decimal('200'))
self.assertEqual(sale.payment_total, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
self.SalePayment.create([{
'sale': sale.id,
'amount': Decimal('100'),
'gateway': self.cash_gateway,
'credit_account': self.party.account_receivable.id,
}, {
'sale': sale.id,
'amount': Decimal('100'),
'gateway': self.dummy_gateway,
'payment_profile': self.dummy_cc_payment_profile.id,
'credit_account': self.party.account_receivable.id,
}])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self.Sale.quote([sale])
self._confirm_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('200'))
self.assertEqual(sale.payment_collected, Decimal('0'))
self.assertEqual(sale.payment_captured, Decimal('0'))
self.assertEqual(sale.payment_authorized, Decimal('0'))
with Transaction().set_context(company=self.company.id):
self._process_sale_by_completing_payments([sale])
self.assertEqual(sale.payment_total, Decimal('200'))
self.assertEqual(sale.payment_available, Decimal('0'))
self.assertEqual(sale.payment_collected, Decimal('200'))
self.assertEqual(sale.payment_captured, Decimal('200'))
# Authorize amount is zero because payment captured after
# that.
self.assertEqual(sale.payment_authorized, Decimal('0'))
@with_transaction()
def test_0090_test_duplicate_sale(self):
"""
Test if payment_processing_state is not copied in duplicate sales
"""
self.setup_defaults()
sale = self._create_sale(
payment_authorize_on='sale_confirm',
payment_capture_on='sale_process',
)
sale.payment_processing_state = 'waiting_for_capture'
sale.save()
self.assertEqual(
sale.payment_processing_state, 'waiting_for_capture')
new_sales = self.Sale.copy([sale])
self.assertTrue(new_sales)
self.assertEqual(len(new_sales), 1)
self.assertIsNone(new_sales[0].payment_processing_state)
self.assertFalse(new_sales[0].payments)
@with_transaction()
def test_0100_test_sale_payment_wizard(self):
"""
Test the wizard used to create sale payments
"""
SalePayment = POOL.get('sale.payment')
SalePaymentWizard = POOL.get('sale.payment.add', type="wizard")
PaymentProfile = POOL.get('party.payment_profile')
self.setup_defaults()
sale = self._create_sale(
payment_authorize_on='sale_process',
payment_capture_on='sale_process',
)
# Case I: Manual Payment
sale_payment_wizard1 = SalePaymentWizard(
SalePaymentWizard.create()[0]
)
# Test default_payment_info
with Transaction().set_context(active_id=sale.id):
defaults = sale_payment_wizard1.default_payment_info()
self.assertEqual(defaults['sale'], sale.id)
self.assertEqual(defaults['party'], sale.party.id)
self.assertEqual(
defaults['currency_digits'], sale.currency_digits
)
sale_payment_wizard1.payment_info.sale = sale.id
sale_payment_wizard1.payment_info.credit_account = \
sale.party.account_receivable.id
sale_payment_wizard1.payment_info.party = sale.party.id
sale_payment_wizard1.payment_info.gateway = self.cash_gateway.id
sale_payment_wizard1.payment_info.method = self.cash_gateway.method
sale_payment_wizard1.payment_info.amount = 100
sale_payment_wizard1.payment_info.payment_profile = None
sale_payment_wizard1.payment_info.currency_digits = \
sale_payment_wizard1.payment_info.get_currency_digits(
name='currency_digits'
)
sale_payment_wizard1.payment_info.reference = 'Reference-1'
sale_payment_wizard1.payment_info.gift_card = None
with Transaction().set_context(active_id=sale.id):
sale_payment_wizard1.transition_add()
payment1, = SalePayment.search([
('sale', '=', sale.id),
('company', '=', self.company.id),
], limit=1)
self.assertEqual(payment1.amount, 100)
self.assertEqual(payment1.party, sale.party)
self.assertEqual(payment1.method, self.cash_gateway.method)
self.assertEqual(payment1.provider, self.cash_gateway.provider)
self.assertEqual(payment1.reference, 'Reference-1')
# Case II: Credit Card Payment with new payment profile
sale_payment_wizard2 = SalePaymentWizard(
SalePaymentWizard.create()[0]
)
# Test if party has 1 payment profile already created
payment_profiles = PaymentProfile.search([
('party', '=', sale.party.id)
])
self.assertEqual(len(payment_profiles), 1)
sale_payment_wizard2.payment_info.sale = sale.id
sale_payment_wizard2.payment_info.credit_account = \
sale.party.account_receivable.id
sale_payment_wizard2.payment_info.party = sale.party.id
sale_payment_wizard2.payment_info.gateway = self.dummy_gateway.id
sale_payment_wizard2.payment_info.method = \
sale_payment_wizard2.payment_info.get_method()
sale_payment_wizard2.payment_info.use_existing_card = False
sale_payment_wizard2.payment_info.amount = 55
sale_payment_wizard2.payment_info.owner = sale.party.name
sale_payment_wizard2.payment_info.number = '4111111111111111'
sale_payment_wizard2.payment_info.expiry_month = '01'
sale_payment_wizard2.payment_info.expiry_year = '2018'
sale_payment_wizard2.payment_info.csc = '911'
sale_payment_wizard2.payment_info.payment_profile = None
sale_payment_wizard2.payment_info.reference = 'Reference-2'
sale_payment_wizard2.payment_info.gift_card = None
with Transaction().set_context(active_id=sale.id):
sale_payment_wizard2.transition_add()
payment2, = SalePayment.search([
('sale', '=', sale.id),
('amount', '=', 55),
('company', '=', self.company.id),
], limit=1)
self.assertEqual(payment2.method, self.dummy_gateway.method)
self.assertEqual(payment2.provider, self.dummy_gateway.provider)
# Test if new payment profile was created for party
new_payment_profile = PaymentProfile.search([
('party', '=', sale.party.id)
], order=[('id', 'DESC')])
self.assertEqual(len(new_payment_profile), 2)
self.assertEqual(
new_payment_profile[0], payment2.payment_profile
)
# Case III: Credit Card Payment with existing card
sale_payment_wizard3 = SalePaymentWizard(
SalePaymentWizard.create()[0]
)
sale_payment_wizard3.payment_info.sale = sale.id
sale_payment_wizard3.payment_info.credit_account = \
sale.party.account_receivable.id
sale_payment_wizard3.payment_info.party = sale.party.id
sale_payment_wizard3.payment_info.gateway = self.dummy_gateway.id
sale_payment_wizard3.payment_info.method = self.dummy_gateway.method
sale_payment_wizard3.payment_info.use_existing_card = True
sale_payment_wizard3.payment_info.amount = 45
sale_payment_wizard3.payment_info.payment_profile = \
new_payment_profile[0]
sale_payment_wizard3.payment_info.reference = 'Reference-3'
sale_payment_wizard3.payment_info.gift_card = None
with Transaction().set_context(active_id=sale.id):
sale_payment_wizard3.transition_add()
payment3, = SalePayment.search([
('sale', '=', sale.id),
('amount', '=', 45),
('company', '=', self.company.id),
], limit=1)
self.assertEqual(payment3.method, self.dummy_gateway.method)
self.assertEqual(payment3.provider, self.dummy_gateway.provider)
self.assertEqual(
new_payment_profile[0], payment3.payment_profile
)
self.assertEqual(SalePayment.search([], count=True), 3)
# Delete a payment
SalePayment.delete([payment3])
self.assertEqual(SalePayment.search([], count=True), 2)
def suite():
"""
Define suite
"""
test_suite = trytond.tests.test_tryton.suite()
test_suite.addTests(
unittest.TestLoader().loadTestsFromTestCase(TestSale)
)
return test_suite
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite())
|
|
from typing import Undefined, Any
from flask.ext.restful import Resource, fields
from flask import request
import inspect
import functools
import re
from . import registry, registered, api_spec_endpoint
from . import html
basestring = str
resource_listing_endpoint = Undefined(Any)
resource_listing_endpoint = None
def docs(api, apiVersion='0.0', swaggerVersion='1.2',
basePath='http://localhost:5000',
resourcePath='/',
produces=["application/json"],
api_spec_url='/api/spec'):
api_add_resource = api.add_resource
def add_resource(resource, path, *args, **kvargs):
endpoint = swagger_endpoint(resource, path)
# Add a .help.json help url
swagger_path = extract_swagger_path(path)
endpoint_path = "%s_help_json" % resource.__name__
api_add_resource(endpoint, "%s.help.json" % swagger_path,
endpoint=endpoint_path)
# Add a .help.html help url
endpoint_path = "%s_help_html" % resource.__name__
api_add_resource(endpoint, "%s.help.html" % swagger_path,
endpoint=endpoint_path)
register_once(api_add_resource, apiVersion, swaggerVersion, basePath,
resourcePath, produces, api_spec_url)
return api_add_resource(resource, path, *args, **kvargs)
api.add_resource = add_resource
return api
def register_once(add_resource_func, apiVersion, swaggerVersion, basePath,
resourcePath, produces, endpoint):
global registered
global api_spec_endpoint
global resource_listing_endpoint
if not registered:
registered = True
registry['apiVersion'] = apiVersion
registry['swaggerVersion'] = swaggerVersion
registry['basePath'] = basePath
registry['resourcePath'] = resourcePath
registry['produces'] = produces
add_resource_func(SwaggerRegistry, endpoint, endpoint=endpoint)
api_spec_endpoint = endpoint + '.json'
add_resource_func(SwaggerRegistry, api_spec_endpoint, endpoint=api_spec_endpoint)
ep = endpoint + '.html'
add_resource_func(SwaggerRegistry, ep, endpoint=ep)
resource_listing_endpoint = endpoint + '/_/resource_list.json'
add_resource_func(ResourceLister, resource_listing_endpoint, endpoint=resource_listing_endpoint)
class ResourceLister(Resource):
def get(self):
return {
"apiVersion": registry['apiVersion'],
"swaggerVersion": registry['swaggerVersion'],
"apis": [
{
"path": '/..' * (len(api_spec_endpoint.split('/')) + 1) + api_spec_endpoint,
"description": "Auto generated API docs by flask-restful-swagger"
}
]
}
def swagger_endpoint(resource, path):
endpoint = SwaggerEndpoint(resource, path)
registry['apis'].append(endpoint.__dict__)
class SwaggerResource(Resource):
def get(self):
if request.path.endswith('.help.json'):
return endpoint.__dict__
if request.path.endswith('.help.html'):
return html.render_endpoint(endpoint)
return SwaggerResource
class SwaggerEndpoint(object):
def __init__(self, resource, path):
self.path = extract_swagger_path(path)
path_arguments = extract_path_arguments(path)
self.description = inspect.getdoc(resource)
self.operations = self.extract_operations(resource, path_arguments)
@staticmethod
def extract_operations(resource, path_arguments=[]):
operations = []
for method in resource.methods:
method_impl = resource.__dict__[method.lower()]
op = {
'method': method,
'parameters': path_arguments,
'nickname': 'nickname'
}
op['summary'] = inspect.getdoc(method_impl)
if '__swagger_attr' in method_impl.__dict__:
# This method was annotated with @swagger.operation
decorators = method_impl.__dict__['__swagger_attr']
for att_name, att_value in decorators.items():
if isinstance(att_value, (basestring, int, list)):
if att_name == 'parameters':
op['parameters'] = merge_parameter_list(op['parameters'], att_value)
else:
op[att_name] = att_value
elif isinstance(att_value, object):
op[att_name] = att_value.__name__
operations.append(op)
return operations
def merge_parameter_list(base, override):
base = list(base)
names = map(lambda x: x['name'], base)
for o in override:
if o['name'] in names:
for n, i in enumerate(base):
if i['name'] == o['name']:
base[n] = o
else:
base.append(o)
return base
class SwaggerRegistry(Resource):
def get(self):
if request.path.endswith('.html'):
return html.render_homepage(resource_listing_endpoint)
return registry
def operation(**kwargs):
"""
This dedorator marks a function as a swagger operation so that we can easily
extract attributes from it.
It saves the decorator's key-values at the function level so we can later
extract them later when add_resource is invoked.
"""
def inner(f):
f.__swagger_attr = kwargs
return f
return inner
def model(c=None, *args, **kwargs):
add_model(c)
return c
class _Nested(object):
def __init__(self, klass, **kwargs):
self._nested = kwargs
self._klass = klass
def __call__(self, *args, **kwargs):
return self._klass(*args, **kwargs)
def nested(self):
return self._nested
# wrap _Cache to allow for deferred calling
def nested(klass=None, **kwargs):
if klass:
ret = _Nested(klass)
functools.update_wrapper(ret, klass)
else:
def wrapper(klass):
wrapped = _Nested(klass, **kwargs)
functools.update_wrapper(wrapped, klass)
return wrapped
ret = wrapper
return ret
def add_model(model_class):
models = registry['models']
name = model_class.__name__
model = models[name] = {'id': name}
model['description'] = inspect.getdoc(model_class)
if 'resource_fields' in dir(model_class):
# We take special care when the model class has a field resource_fields.
# By convension this field specifies what flask-restful would return when
# this model is used as a return value from an HTTP endpoint.
# We look at the class and search for an attribute named
# resource_fields.
# If that attribute exists then we deduce the swagger model by the content
# of this attribute
properties = model['properties'] = {}
nested = model_class.nested() if isinstance(model_class, _Nested) else {}
for field_name, field_type in model_class.resource_fields.iteritems():
nested_type = nested[field_name] if field_name in nested else None
properties[field_name] = deduce_swagger_type(field_type, nested_type)
properties[field_name]['description'] = getattr(
field_type, 'attribute', '')
elif '__init__' in dir(model_class):
# Alternatively, if a resource_fields does not exist, we deduce the model
# fields from the parameters sent to its __init__ method
# Credits for this snippet go to Robin Walsh
# https://github.com/hobbeswalsh/flask-sillywalk
argspec = inspect.getargspec(model_class.__init__)
argspec.args.remove("self")
defaults = {}
required = model['required'] = []
if argspec.defaults:
defaults = zip(argspec.args[-len(argspec.defaults):], argspec.defaults)
properties = model['properties'] = {}
for arg in argspec.args[:-len(defaults)]:
required.append(arg)
# type: string for lack of better knowledge, until we add more metadata
properties[arg] = {'type': 'string'}
for k, v in defaults:
properties[k] = {'type': 'string', "default": v}
def deduce_swagger_type(python_type_or_object, nested_type=None):
import inspect
if inspect.isclass(python_type_or_object):
predicate = issubclass
else:
predicate = isinstance
if predicate(python_type_or_object, (basestring,
fields.String,
fields.FormattedString,
fields.Url,
int,
fields.Integer,
float,
fields.Float,
fields.Arbitrary,
fields.Fixed,
bool,
fields.Boolean,
fields.DateTime)):
return {'type': deduce_swagger_type_flat(python_type_or_object)}
if predicate(python_type_or_object, (fields.List)):
if inspect.isclass(python_type_or_object):
return {'type': 'array'}
else:
return {'type': 'array',
'items': {
'$ref': deduce_swagger_type_flat(python_type_or_object.container, nested_type)}}
if predicate(python_type_or_object, (fields.Nested)):
return {'type': nested_type}
return {'type': 'null'}
def deduce_swagger_type_flat(python_type_or_object, nested_type=None):
if nested_type:
return nested_type
import inspect
if inspect.isclass(python_type_or_object):
predicate = issubclass
else:
predicate = isinstance
if predicate(python_type_or_object, (basestring,
fields.String,
fields.FormattedString,
fields.Url)):
return 'string'
if predicate(python_type_or_object, (int,
fields.Integer)):
return 'integer'
if predicate(python_type_or_object, (float,
fields.Float,
fields.Arbitrary,
fields.Fixed)):
return 'number'
if predicate(python_type_or_object, (bool,
fields.Boolean)):
return 'boolean'
if predicate(python_type_or_object, (fields.DateTime,)):
return 'date-time'
def extract_swagger_path(path):
"""
Extracts a swagger type path from the given flask style path.
This /path/<parameter> turns into this /path/{parameter}
And this /<string(length=2):lang_code>/<string:id>/<float:probability>
to this: /{lang_code}/{id}/{probability}
"""
return re.sub('<(?:[^:]+:)?([^>]+)>', '{\\1}', path)
def extract_path_arguments(path):
"""
Extracts a swagger path arguments from the given flask path.
This /path/<parameter> extracts [{name: 'parameter'}]
And this /<string(length=2):lang_code>/<string:id>/<float:probability>
extracts: [
{name: 'lang_code', dataType: 'string'},
{name: 'id', dataType: 'string'}
{name: 'probability', dataType: 'float'}]
"""
# Remove all paranteses
path = re.sub('\([^\)]*\)', '', path)
args = re.findall('<([^>]+)>', path)
def split_arg(arg):
spl = arg.split(':')
if len(spl) == 1:
return {'name': spl[0],
'paramType': 'path'}
else:
return {'name': spl[1],
'dataType': spl[0],
'paramType': 'path'}
return map(split_arg, args)
|
|
#
#
# All Rights Reserved.
# Copyright 2011 OpenStack LLC.
# Copyright 2010 Jacob Kaplan-Moss
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Base utilities to build API operation managers and objects on top of.
"""
import abc
import six
import contextlib
import hashlib
import os
from paxes_cinder.k2aclient import utils
from paxes_cinder.k2aclient.v1 import v1k2creater
from paxes_cinder.k2aclient.v1 import v1k2loader
from paxes_cinder.k2aclient.v1 import k2web
# from paxes_cinder.k2aclient.k2exclogger import K2ResponseLogger
class Manager(object):
"""
Managers interact with a particular uom element
and provide CRUD operations for them.
"""
resource_class = None
def __init__(self, api):
self.api = api
def _list(self, url, xa=None):
obj_class = self.resource_class
k2resp = self.api.client.get(url, xa=xa)
with self.completion_cache('uuid', obj_class, mode="w"):
return [obj_class().loadAsRoot(self, k2entry, k2resp, False)
for k2entry in k2resp.feed.entries]
@contextlib.contextmanager
def completion_cache(self, cache_type, obj_class, mode):
"""
The completion cache store items that can be used for bash
autocompletion, like UUIDs.
A resource listing will clear and re-populate the cache.
A resource create will append to the cache.
Delete is not handled because listings are assumed to be performed
often enough to keep the cache reasonably up-to-date.
"""
base_dir = utils.env('K2ACLIENT_UUID_CACHE_DIR',
default="~/.k2aclient")
# NOTE(sirp): Keep separate UUID caches for each username + endpoint
# pair
username = utils.env('K2_USERNAME')
url = utils.env('K2_URL')
uniqifier = hashlib.md5(username + url).hexdigest()
cache_dir = os.path.expanduser(os.path.join(base_dir, uniqifier))
try:
os.makedirs(cache_dir, 0755)
except OSError:
# NOTE(kiall): This is typicaly either permission denied while
# attempting to create the directory, or the directory
# already exists. Either way, don't fail.
pass
resource = obj_class.__name__.lower()
filename = "%s-%s-cache" % (resource, cache_type.replace('_', '-'),)
path = os.path.join(cache_dir, filename)
cache_attr = "_%s_cache" % cache_type
try:
setattr(self, cache_attr, open(path, mode))
except IOError:
# NOTE(kiall): This is typicaly a permission denied while
# attempting to write the cache file.
pass
try:
yield
finally:
cache = getattr(self, cache_attr, None)
if cache:
cache.close()
delattr(self, cache_attr)
def write_to_completion_cache(self, cache_type, val):
cache = getattr(self, "_%s_cache" % cache_type, None)
if cache:
cache.write("%s\n" % val)
def _get(self, url, xag=[], xa=None):
k2resp = self.api.client.get(url, xag=xag, xa=xa)
obj_class = self.resource_class
obj = obj_class().loadAsRoot(self, k2resp.entry, k2resp, True)
return obj
def _refresh(self, service, root, child=None, xa=None):
etag = root.k2resp.headers['etag']
root_type = root.__class__.__name__
root_id = root.id
child_type = None
child_id = None
if child is not None:
etag = child.k2resp.headers['etag']
child_type = child.__class__.__name__
child_id = child.id
k2resp = self.api.client.refresh(service,
etag,
root_type,
root_id,
child_type=child_type,
child_id=child_id,
xa=xa)
obj = None
if k2resp.status != 304:
obj_class = self.resource_class
obj = obj_class().loadAsRoot(self, k2resp.entry, k2resp, True)
return obj
def _create(self, service, root, child=None, xa=None):
root_type = root.__class__.__name__
root_id = None
child_type = None
if child is not None:
element_type = type(child)
root_id = root.id
child_type = child.__class__.__name__
element = v1k2creater.process_root(service,
v1k2creater.Mode.CREATE,
child)
else:
element_type = type(root)
element = v1k2creater.process_root(service,
v1k2creater.Mode.CREATE,
root)
k2resp = self.api.client.create(service,
element,
root_type,
root_id=root_id,
child_type=child_type,
xa=xa)
obj_class = element_type
obj = obj_class().loadAsRoot(self, k2resp.entry, k2resp, True)
return obj
def _update(self, service, root, child=None, xa=None):
etag = root.k2resp.headers['etag']
root_type = root.__class__.__name__
root_id = root.id
child_type = None
child_id = None
if child is not None:
etag = child.k2resp.headers['etag']
element_type = type(child)
child_type = child.__class__.__name__
child_id = child.id
element = v1k2creater.process_root(service,
v1k2creater.Mode.UPDATE,
child)
else:
element_type = type(root)
element = v1k2creater.process_root(service,
v1k2creater.Mode.UPDATE,
root)
k2resp = self.api.client.update(service,
etag,
element,
root_type,
root_id,
child_type=child_type,
child_id=child_id,
xa=xa)
obj_class = element_type
# print "Updated: child_type: >%s<, child_id: >%s" % (child_type,
# child_id)
obj = obj_class().loadAsRoot(self, k2resp.entry, k2resp, True)
return obj
def _delete(self, service, root, child=None, xa=None):
root_type = root.__class__.__name__
root_id = root.id
child_type = None
child_id = None
if child is not None:
child_type = child.__class__.__name__
child_id = child.id
k2resp = self.api.client.delete(service,
root_type,
root_id,
child_type=child_type,
child_id=child_id,
xa=xa)
# krl = K2ResponseLogger("/tmp/testing")
# krl.emit("OK", "DELETE SHOULD WORK", k2resp)
return k2resp
def _deletebyid(self, service, root_type, root_id,
child_type=None, child_id=None, xa=None):
k2resp = self.api.client.delete(service,
root_type,
root_id,
child_type=child_type,
child_id=child_id,
xa=xa)
# krl = K2ResponseLogger("/tmp/testing")
# krl.emit("OK", "DELETE SHOULD WORK", k2resp)
return k2resp
####
# Job methods
def _getjob(self, root, jobname, xa=None):
root_type = root.__class__.__name__
root_id = root.id
url = "/rest/api/uom/%s/%s/do/%s" % (root_type, root_id, jobname,)
k2resp = self.api.client.get(url, xa=xa)
obj = k2web.JobRequest()
v1k2loader.process_root("web", obj, k2resp.entry.element)
obj._k2resp = k2resp
return obj
def _runjob(self, root, child, xa=None):
service = "web"
root_type = root.__class__.__name__
root_id = root.id
element = v1k2creater.process_root(service,
v1k2creater.Mode.UPDATE,
child)
k2resp = self.api.client.runjob(element,
root_type,
root_id=root_id,
xa=xa)
obj = k2web.JobResponse()
v1k2loader.process_root("web", obj, k2resp.entry.element)
obj._k2resp = k2resp
return obj
def _readjob(self, job_id, xa=None):
k2resp = self.api.client.readjob(job_id, xa=xa)
obj = k2web.JobResponse()
v1k2loader.process_root("web", obj, k2resp.entry.element)
obj._k2resp = k2resp
return obj
class ManagerWithFind(six.with_metaclass(abc.ABCMeta, Manager)):
"""
Like a `Manager`, but with additional `find()`/`findall()` methods.
"""
@abc.abstractmethod
def list(self):
pass
# def find(self, **kwargs):
# """
# Find a single item with attributes matching ``**kwargs``.
#
# This isn't very efficient: it loads the entire list then filters on
# the Python side.
# """
# matches = self.findall(**kwargs)
# num_matches = len(matches)
# if num_matches == 0:
# msg = "No %s matching %s." % (self.resource_class.__name__,
# kwargs)
# raise exceptions.NotFound(msg)
# elif num_matches > 1:
# raise exceptions.NoUniqueMatch
# else:
# return matches[0]
#
# def findall(self, **kwargs):
# """
# Find all items with attributes matching ``**kwargs``.
#
# This isn't very efficient: it loads the entire list then filters on
# the Python side.
# """
# found = []
# searches = kwargs.items()
# for obj in self.list():
# try:
# if all(getattr(obj, attr) == value
# for (attr, value) in searches):
# found.append(obj)
# except AttributeError:
# continue
#
# return found
|
|
"""
The :mod:`sklearn.feature_extraction.image` submodule gathers utilities to
extract features from images.
"""
# Authors: Emmanuelle Gouillart <emmanuelle.gouillart@normalesup.org>
# Gael Varoquaux <gael.varoquaux@normalesup.org>
# Olivier Grisel
# Vlad Niculae
# License: BSD 3 clause
from itertools import product
import numbers
import numpy as np
from scipy import sparse
from numpy.lib.stride_tricks import as_strided
from ..utils import check_array, check_random_state
from ..utils.fixes import astype
from ..base import BaseEstimator
__all__ = ['PatchExtractor',
'extract_patches_2d',
'grid_to_graph',
'img_to_graph',
'reconstruct_from_patches_2d']
###############################################################################
# From an image to a graph
def _make_edges_3d(n_x, n_y, n_z=1):
"""Returns a list of edges for a 3D image.
Parameters
===========
n_x : integer
The size of the grid in the x direction.
n_y : integer
The size of the grid in the y direction.
n_z : integer, optional
The size of the grid in the z direction, defaults to 1
"""
vertices = np.arange(n_x * n_y * n_z).reshape((n_x, n_y, n_z))
edges_deep = np.vstack((vertices[:, :, :-1].ravel(),
vertices[:, :, 1:].ravel()))
edges_right = np.vstack((vertices[:, :-1].ravel(),
vertices[:, 1:].ravel()))
edges_down = np.vstack((vertices[:-1].ravel(), vertices[1:].ravel()))
edges = np.hstack((edges_deep, edges_right, edges_down))
return edges
def _compute_gradient_3d(edges, img):
n_x, n_y, n_z = img.shape
gradient = np.abs(img[edges[0] // (n_y * n_z),
(edges[0] % (n_y * n_z)) // n_z,
(edges[0] % (n_y * n_z)) % n_z] -
img[edges[1] // (n_y * n_z),
(edges[1] % (n_y * n_z)) // n_z,
(edges[1] % (n_y * n_z)) % n_z])
return gradient
# XXX: Why mask the image after computing the weights?
def _mask_edges_weights(mask, edges, weights=None):
"""Apply a mask to edges (weighted or not)"""
inds = np.arange(mask.size)
inds = inds[mask.ravel()]
ind_mask = np.logical_and(np.in1d(edges[0], inds),
np.in1d(edges[1], inds))
edges = edges[:, ind_mask]
if weights is not None:
weights = weights[ind_mask]
if len(edges.ravel()):
maxval = edges.max()
else:
maxval = 0
order = np.searchsorted(np.unique(edges.ravel()), np.arange(maxval + 1))
edges = order[edges]
if weights is None:
return edges
else:
return edges, weights
def _to_graph(n_x, n_y, n_z, mask=None, img=None,
return_as=sparse.coo_matrix, dtype=None):
"""Auxiliary function for img_to_graph and grid_to_graph
"""
edges = _make_edges_3d(n_x, n_y, n_z)
if dtype is None:
if img is None:
dtype = np.int
else:
dtype = img.dtype
if img is not None:
img = np.atleast_3d(img)
weights = _compute_gradient_3d(edges, img)
if mask is not None:
edges, weights = _mask_edges_weights(mask, edges, weights)
diag = img.squeeze()[mask]
else:
diag = img.ravel()
n_voxels = diag.size
else:
if mask is not None:
mask = astype(mask, dtype=np.bool, copy=False)
mask = np.asarray(mask, dtype=np.bool)
edges = _mask_edges_weights(mask, edges)
n_voxels = np.sum(mask)
else:
n_voxels = n_x * n_y * n_z
weights = np.ones(edges.shape[1], dtype=dtype)
diag = np.ones(n_voxels, dtype=dtype)
diag_idx = np.arange(n_voxels)
i_idx = np.hstack((edges[0], edges[1]))
j_idx = np.hstack((edges[1], edges[0]))
graph = sparse.coo_matrix((np.hstack((weights, weights, diag)),
(np.hstack((i_idx, diag_idx)),
np.hstack((j_idx, diag_idx)))),
(n_voxels, n_voxels),
dtype=dtype)
if return_as is np.ndarray:
return graph.toarray()
return return_as(graph)
def img_to_graph(img, mask=None, return_as=sparse.coo_matrix, dtype=None):
"""Graph of the pixel-to-pixel gradient connections
Edges are weighted with the gradient values.
Read more in the :ref:`User Guide <image_feature_extraction>`.
Parameters
----------
img : ndarray, 2D or 3D
2D or 3D image
mask : ndarray of booleans, optional
An optional mask of the image, to consider only part of the
pixels.
return_as : np.ndarray or a sparse matrix class, optional
The class to use to build the returned adjacency matrix.
dtype : None or dtype, optional
The data of the returned sparse matrix. By default it is the
dtype of img
Notes
-----
For scikit-learn versions 0.14.1 and prior, return_as=np.ndarray was
handled by returning a dense np.matrix instance. Going forward, np.ndarray
returns an np.ndarray, as expected.
For compatibility, user code relying on this method should wrap its
calls in ``np.asarray`` to avoid type issues.
"""
img = np.atleast_3d(img)
n_x, n_y, n_z = img.shape
return _to_graph(n_x, n_y, n_z, mask, img, return_as, dtype)
def grid_to_graph(n_x, n_y, n_z=1, mask=None, return_as=sparse.coo_matrix,
dtype=np.int):
"""Graph of the pixel-to-pixel connections
Edges exist if 2 voxels are connected.
Parameters
----------
n_x : int
Dimension in x axis
n_y : int
Dimension in y axis
n_z : int, optional, default 1
Dimension in z axis
mask : ndarray of booleans, optional
An optional mask of the image, to consider only part of the
pixels.
return_as : np.ndarray or a sparse matrix class, optional
The class to use to build the returned adjacency matrix.
dtype : dtype, optional, default int
The data of the returned sparse matrix. By default it is int
Notes
-----
For scikit-learn versions 0.14.1 and prior, return_as=np.ndarray was
handled by returning a dense np.matrix instance. Going forward, np.ndarray
returns an np.ndarray, as expected.
For compatibility, user code relying on this method should wrap its
calls in ``np.asarray`` to avoid type issues.
"""
return _to_graph(n_x, n_y, n_z, mask=mask, return_as=return_as,
dtype=dtype)
###############################################################################
# From an image to a set of small image patches
def _compute_n_patches(i_h, i_w, p_h, p_w, max_patches=None):
"""Compute the number of patches that will be extracted in an image.
Read more in the :ref:`User Guide <image_feature_extraction>`.
Parameters
----------
i_h : int
The image height
i_w : int
The image with
p_h : int
The height of a patch
p_w : int
The width of a patch
max_patches : integer or float, optional default is None
The maximum number of patches to extract. If max_patches is a float
between 0 and 1, it is taken to be a proportion of the total number
of patches.
"""
n_h = i_h - p_h + 1
n_w = i_w - p_w + 1
all_patches = n_h * n_w
if max_patches:
if (isinstance(max_patches, (numbers.Integral))
and max_patches < all_patches):
return max_patches
elif (isinstance(max_patches, (numbers.Real))
and 0 < max_patches < 1):
return int(max_patches * all_patches)
else:
raise ValueError("Invalid value for max_patches: %r" % max_patches)
else:
return all_patches
def extract_patches(arr, patch_shape=8, extraction_step=1):
"""Extracts patches of any n-dimensional array in place using strides.
Given an n-dimensional array it will return a 2n-dimensional array with
the first n dimensions indexing patch position and the last n indexing
the patch content. This operation is immediate (O(1)). A reshape
performed on the first n dimensions will cause numpy to copy data, leading
to a list of extracted patches.
Read more in the :ref:`User Guide <image_feature_extraction>`.
Parameters
----------
arr : ndarray
n-dimensional array of which patches are to be extracted
patch_shape : integer or tuple of length arr.ndim
Indicates the shape of the patches to be extracted. If an
integer is given, the shape will be a hypercube of
sidelength given by its value.
extraction_step : integer or tuple of length arr.ndim
Indicates step size at which extraction shall be performed.
If integer is given, then the step is uniform in all dimensions.
Returns
-------
patches : strided ndarray
2n-dimensional array indexing patches on first n dimensions and
containing patches on the last n dimensions. These dimensions
are fake, but this way no data is copied. A simple reshape invokes
a copying operation to obtain a list of patches:
result.reshape([-1] + list(patch_shape))
"""
arr_ndim = arr.ndim
if isinstance(patch_shape, numbers.Number):
patch_shape = tuple([patch_shape] * arr_ndim)
if isinstance(extraction_step, numbers.Number):
extraction_step = tuple([extraction_step] * arr_ndim)
patch_strides = arr.strides
slices = [slice(None, None, st) for st in extraction_step]
indexing_strides = arr[slices].strides
patch_indices_shape = ((np.array(arr.shape) - np.array(patch_shape)) //
np.array(extraction_step)) + 1
shape = tuple(list(patch_indices_shape) + list(patch_shape))
strides = tuple(list(indexing_strides) + list(patch_strides))
patches = as_strided(arr, shape=shape, strides=strides)
return patches
def extract_patches_2d(image, patch_size, max_patches=None, random_state=None):
"""Reshape a 2D image into a collection of patches
The resulting patches are allocated in a dedicated array.
Read more in the :ref:`User Guide <image_feature_extraction>`.
Parameters
----------
image : array, shape = (image_height, image_width) or
(image_height, image_width, n_channels)
The original image data. For color images, the last dimension specifies
the channel: a RGB image would have `n_channels=3`.
patch_size : tuple of ints (patch_height, patch_width)
the dimensions of one patch
max_patches : integer or float, optional default is None
The maximum number of patches to extract. If max_patches is a float
between 0 and 1, it is taken to be a proportion of the total number
of patches.
random_state : int, RandomState instance or None, optional (default=None)
Pseudo number generator state used for random sampling to use if
`max_patches` is not None. If int, random_state is the seed used by
the random number generator; If RandomState instance, random_state is
the random number generator; If None, the random number generator is
the RandomState instance used by `np.random`.
Returns
-------
patches : array, shape = (n_patches, patch_height, patch_width) or
(n_patches, patch_height, patch_width, n_channels)
The collection of patches extracted from the image, where `n_patches`
is either `max_patches` or the total number of patches that can be
extracted.
Examples
--------
>>> from sklearn.feature_extraction import image
>>> one_image = np.arange(16).reshape((4, 4))
>>> one_image
array([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11],
[12, 13, 14, 15]])
>>> patches = image.extract_patches_2d(one_image, (2, 2))
>>> print(patches.shape)
(9, 2, 2)
>>> patches[0]
array([[0, 1],
[4, 5]])
>>> patches[1]
array([[1, 2],
[5, 6]])
>>> patches[8]
array([[10, 11],
[14, 15]])
"""
i_h, i_w = image.shape[:2]
p_h, p_w = patch_size
if p_h > i_h:
raise ValueError("Height of the patch should be less than the height"
" of the image.")
if p_w > i_w:
raise ValueError("Width of the patch should be less than the width"
" of the image.")
image = check_array(image, allow_nd=True)
image = image.reshape((i_h, i_w, -1))
n_colors = image.shape[-1]
extracted_patches = extract_patches(image,
patch_shape=(p_h, p_w, n_colors),
extraction_step=1)
n_patches = _compute_n_patches(i_h, i_w, p_h, p_w, max_patches)
if max_patches:
rng = check_random_state(random_state)
i_s = rng.randint(i_h - p_h + 1, size=n_patches)
j_s = rng.randint(i_w - p_w + 1, size=n_patches)
patches = extracted_patches[i_s, j_s, 0]
else:
patches = extracted_patches
patches = patches.reshape(-1, p_h, p_w, n_colors)
# remove the color dimension if useless
if patches.shape[-1] == 1:
return patches.reshape((n_patches, p_h, p_w))
else:
return patches
def reconstruct_from_patches_2d(patches, image_size):
"""Reconstruct the image from all of its patches.
Patches are assumed to overlap and the image is constructed by filling in
the patches from left to right, top to bottom, averaging the overlapping
regions.
Read more in the :ref:`User Guide <image_feature_extraction>`.
Parameters
----------
patches : array, shape = (n_patches, patch_height, patch_width) or
(n_patches, patch_height, patch_width, n_channels)
The complete set of patches. If the patches contain colour information,
channels are indexed along the last dimension: RGB patches would
have `n_channels=3`.
image_size : tuple of ints (image_height, image_width) or
(image_height, image_width, n_channels)
the size of the image that will be reconstructed
Returns
-------
image : array, shape = image_size
the reconstructed image
"""
i_h, i_w = image_size[:2]
p_h, p_w = patches.shape[1:3]
img = np.zeros(image_size)
# compute the dimensions of the patches array
n_h = i_h - p_h + 1
n_w = i_w - p_w + 1
for p, (i, j) in zip(patches, product(range(n_h), range(n_w))):
img[i:i + p_h, j:j + p_w] += p
for i in range(i_h):
for j in range(i_w):
# divide by the amount of overlap
# XXX: is this the most efficient way? memory-wise yes, cpu wise?
img[i, j] /= float(min(i + 1, p_h, i_h - i) *
min(j + 1, p_w, i_w - j))
return img
class PatchExtractor(BaseEstimator):
"""Extracts patches from a collection of images
Read more in the :ref:`User Guide <image_feature_extraction>`.
Parameters
----------
patch_size : tuple of ints (patch_height, patch_width)
the dimensions of one patch
max_patches : integer or float, optional default is None
The maximum number of patches per image to extract. If max_patches is a
float in (0, 1), it is taken to mean a proportion of the total number
of patches.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
"""
def __init__(self, patch_size=None, max_patches=None, random_state=None):
self.patch_size = patch_size
self.max_patches = max_patches
self.random_state = random_state
def fit(self, X, y=None):
"""Do nothing and return the estimator unchanged
This method is just there to implement the usual API and hence
work in pipelines.
"""
return self
def transform(self, X):
"""Transforms the image samples in X into a matrix of patch data.
Parameters
----------
X : array, shape = (n_samples, image_height, image_width) or
(n_samples, image_height, image_width, n_channels)
Array of images from which to extract patches. For color images,
the last dimension specifies the channel: a RGB image would have
`n_channels=3`.
Returns
-------
patches : array, shape = (n_patches, patch_height, patch_width) or
(n_patches, patch_height, patch_width, n_channels)
The collection of patches extracted from the images, where
`n_patches` is either `n_samples * max_patches` or the total
number of patches that can be extracted.
"""
self.random_state = check_random_state(self.random_state)
n_images, i_h, i_w = X.shape[:3]
X = np.reshape(X, (n_images, i_h, i_w, -1))
n_channels = X.shape[-1]
if self.patch_size is None:
patch_size = i_h // 10, i_w // 10
else:
patch_size = self.patch_size
# compute the dimensions of the patches array
p_h, p_w = patch_size
n_patches = _compute_n_patches(i_h, i_w, p_h, p_w, self.max_patches)
patches_shape = (n_images * n_patches,) + patch_size
if n_channels > 1:
patches_shape += (n_channels,)
# extract the patches
patches = np.empty(patches_shape)
for ii, image in enumerate(X):
patches[ii * n_patches:(ii + 1) * n_patches] = extract_patches_2d(
image, patch_size, self.max_patches, self.random_state)
return patches
|
|
# These are the run-once tasks to get AWS ready-for-use, in addition to
# utilities used by our scripts to access AWS.
import os
import time
import boto.ec2
import boto.exception
import boto.manage.cmdshell
import boto.sdb
import pprint
SSH_FOLDER = os.path.expanduser("~/.ssh/")
AWS_REGION = "us-east-1" # US East (Virginia)
def create_keypair(key_name):
"""
Create the public-key crypto pair so we can log in to our new instances.
AWS stores the public key under a name we provide, we need to save the
private key ourselves.
"""
if os.path.isfile(SSH_FOLDER + key_name + ".pem"):
return # Key already created
ec2 = boto.ec2.connect_to_region(AWS_REGION)
key = ec2.create_key_pair(key_name)
key.save(SSH_FOLDER)
def create_security_group(group_name):
"""
Instances are pretty locked down by default. We can assign them to
security groups to give access rights.
"""
ec2 = boto.ec2.connect_to_region(AWS_REGION)
for g in ec2.get_all_security_groups():
if g.name == group_name:
return # We already have this group setup
group = ec2.create_security_group(group_name,
"%s SSH access group" % group_name)
group.authorize("tcp", 22, 22, "0.0.0.0/0") # SSH is on port 22, all IPs
group.authorize("tcp", 80, 80, "0.0.0.0/0")
group.authorize("tcp", 61000, 65000, "0.0.0.0/0")
print "Created new security group"
def launch_instance(tag, key_name, group_name, inst_type, ami_name, user_data,
wait=True, returninfo=None):
"""
Launch a testing instance. Doesn't actually attempt to connect as
it can take quite a while between 'running' and connectability.
"""
ec2 = boto.ec2.connect_to_region(AWS_REGION)
failures = 0
max_failures = 10
while True:
try:
reservation = ec2.run_instances(ami_name,
key_name=key_name,
security_groups=[group_name],
instance_type=inst_type,
user_data=None)
break
except Exception, err:
# Failed to get instance; wait 15 seconds and then try again (up to
# 10 total times)
errortext = str(err)
if errortext.find("Not authorized for images") >= 0:
print "**************************************"
print "* Error from AWS suggests that the AMI code in"
print "* CloudSetup.py is deprecated. Please go to"
print "* https://aws.amazon.com/marketplace/ and search for"
print "* \"Ubuntu server lts hvm\", selecting the most recent"
print "* version. Click \"Continue\", \"Manual Launch\","
print "* and then copy the AMI ID for the US East region."
print "* Copy that to the AMI_NAME value in CloudSetup.py"
print "* and re-run."
print "***************************************"
print "* (Full text of error):"
print errortext
print "***************************************"
return None
elif errortext.find("accept terms and subscribe") >= 0:
print "**************************************"
print "* Error from AWS suggests that you have never used this"
print "* AMI before and need to accept its terms and"
print "* subscribe to it. Please follow the link in the below"
print "* error text. Click \"Continue\", \"Manual Launch\","
print "* and \"Accept Terms\". After receiving email"
print "* confirmation, you can re-run the code."
print "**************************************"
print "* (Full text of error):"
print errortext
print "**************************************"
return None
failures += 1
if failures == max_failures:
print "**************************************"
print "* Maximum number of instance launch failures reached."
print "* (Full text of error):"
print errortext
print "**************************************"
return None
print " ** ec2.run_instances failed for tag", tag, "; waiting 15"
print " ** seconds and then trying again..."
time.sleep(15)
time.sleep(5) # Slow things down -- they're never running super fast anyway
instance = reservation.instances[0]
time.sleep(5) # Slow things down -- they're never running super fast anyway
instance.add_tag("tag", tag)
time.sleep(5) # Slow things down -- they're never running super fast anyway
if wait:
print " Instance requested, waiting for 'running' for tag", tag
while instance.state != "running":
print " %s ..." % tag
time.sleep(5)
try:
instance.update()
except boto.exception.EC2ResponseError as e:
print "******************"
print "Error caught in instance.update():"
print e.strerror
print "******************"
print " %s done!" % tag
if returninfo:
returninfo.put(tag)
return instance
def get_instance(tag):
"""
Get instance by tag
"""
ec2 = boto.ec2.connect_to_region(AWS_REGION)
reservations = ec2.get_all_instances()
for res in reservations:
for inst in res.instances:
if "tag" in inst.tags.keys():
if inst.tags["tag"] == tag and inst.state == "running":
#print "Found %s"%tag
return inst
print "Couldn't find instance"
return None
def connect_instance(tag, key_name, user_name):
"""
Connect to a running instance using a tag
"""
inst = get_instance(tag)
cmd = boto.manage.cmdshell.sshclient_from_instance(
inst,
SSH_FOLDER + key_name + ".pem",
user_name=user_name
)
return inst, cmd
def terminate_instance(tag):
inst = get_instance(tag)
inst.terminate()
def add_tag(instance_tag, new_tag_key, new_tag_val):
inst = get_instance(instance_tag)
inst.add_tag(new_tag_key, new_tag_val)
###############################################################################
def setup_sdb_domain(domain_name):
sdb = boto.sdb.connect_to_region(AWS_REGION)
# Only create if it doesn't exist already
try:
dom = sdb.get_domain(domain_name, validate=True)
except:
# Doesn't exist yet
dom = sdb.create_domain(domain_name)
return sdb, dom
def delete_sdb_domain(domain_name):
sdb, dom = setup_sdb_domain(domain_name)
sdb.delete_domain(domain_name)
def dump_sdb_domain(domain_name):
pp = pprint.PrettyPrinter(indent=2)
sdb, dom = setup_sdb_domain(domain_name)
rs = dom.select('select * from `' + domain_name + '`')
for j in rs:
pp.pprint(j)
def get_sdb_domain_size(domain_name):
sdb, dom = setup_sdb_domain(domain_name)
rs = dom.select('select count(*) from `' + domain_name + '`')
ct = 0
for res in rs:
ct += int(res[u'Count'])
print "Size of", domain_name, ":", ct
###############################################################################
def setup_s3_bucket(bucket_name):
s3 = boto.s3.connect_to_region(AWS_REGION)
# Only create if it doesn't exist already
try:
bucket = s3.get_bucket(bucket_name, validate=True)
except:
# Doesn't exist yet
bucket = s3.create_bucket(bucket_name)
return s3, bucket
def delete_s3_bucket(bucket_name):
s3, bucket = setup_s3_bucket(bucket_name)
# TODO this needs to empty the bucket first
s3.delete_bucket(bucket_name)
def add_file_to_s3_bucket(bucket, filekey, filename):
key = boto.s3.key.Key(bucket)
key.key = filekey + "-" + filename
key.set_contents_from_filename(filename)
def download_s3_bucket(bucket_name, output_folder):
s3, bucket = setup_s3_bucket(bucket_name)
if not os.path.exists(output_folder):
os.mkdir(output_folder)
bucket_list = bucket.list()
for key in bucket_list:
d = os.path.join(output_folder, os.path.split(key.key)[1])
print(os.path.split(key.key)[1])
key.get_contents_to_filename(d)
###############################################################################
# Several cleanup tasks to make starting a cluster less annoying:
def clean_known_hosts():
with open(SSH_FOLDER + "known_hosts", "rU") as fp:
lines = fp.readlines()
filtered = [x for x in lines if x.find("ec2-") != 0]
with open(SSH_FOLDER + "known_hosts", "w") as fp:
for line in filtered:
fp.write(line)
print "Removed", len(lines) - len(filtered), "lines from ~/.ssh/known_hosts"
def get_num_running():
ec2 = boto.ec2.connect_to_region(AWS_REGION)
reservations = ec2.get_all_instances()
num_shutting_down = 0
num_pending_running = 0
num_stop = 0
num_terminate = 0
for res in reservations:
for inst in res.instances:
if inst.state == "shutting-down":
num_shutting_down += 1
elif inst.state in ["pending", "running"]:
num_pending_running += 1
elif inst.state in ["stopping", "stopped"]:
num_stop += 1
elif inst.state == "terminated":
num_terminate += 1
return (num_shutting_down, num_pending_running, num_stop, num_terminate)
def print_num_running():
nr = get_num_running()
print "Number Shutting Down:", nr[0]
print "Number Pending or Running:", nr[1]
print "Number Stopping or Stopped:", nr[2]
print "Number Terminated:", nr[3]
# Wait for all the EC2 nodes that are in shutting-down to go to status
def wait_for_shutdown():
while True:
n_shut_down, n_pend_run, n_stop, n_terminate = get_num_running()
if n_shut_down == 0:
print "No nodes shutting down"
return
else:
print (n_shut_down, "instance(s) still shutting down and",
n_pend_run, "pending/running; waiting")
time.sleep(5.0)
|
|
# Default Django settings. Override these with settings in the module
# pointed-to by the DJANGO_SETTINGS_MODULE environment variable.
# This is defined here as a do-nothing function because we can't import
# django.utils.translation -- that module depends on the settings.
gettext_noop = lambda s: s
####################
# CORE #
####################
DEBUG = False
TEMPLATE_DEBUG = False
# Whether the framework should propagate raw exceptions rather than catching
# them. This is useful under some testing situations and should never be used
# on a live site.
DEBUG_PROPAGATE_EXCEPTIONS = False
# Whether to use the "Etag" header. This saves bandwidth but slows down performance.
USE_ETAGS = False
# People who get code error notifications.
# In the format (('Full Name', 'email@example.com'), ('Full Name', 'anotheremail@example.com'))
ADMINS = ()
# Tuple of IP addresses, as strings, that:
# * See debug comments, when DEBUG is true
# * Receive x-headers
INTERNAL_IPS = ()
# Local time zone for this installation. All choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name (although not all
# systems may support all possibilities). When USE_TZ is True, this is
# interpreted as the default user time zone.
TIME_ZONE = 'America/Chicago'
# If you set this to True, Django will use timezone-aware datetimes.
USE_TZ = False
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
# Languages we provide translations for, out of the box.
LANGUAGES = (
('ar', gettext_noop('Arabic')),
('az', gettext_noop('Azerbaijani')),
('bg', gettext_noop('Bulgarian')),
('bn', gettext_noop('Bengali')),
('bs', gettext_noop('Bosnian')),
('ca', gettext_noop('Catalan')),
('cs', gettext_noop('Czech')),
('cy', gettext_noop('Welsh')),
('da', gettext_noop('Danish')),
('de', gettext_noop('German')),
('el', gettext_noop('Greek')),
('en', gettext_noop('English')),
('en-gb', gettext_noop('British English')),
('eo', gettext_noop('Esperanto')),
('es', gettext_noop('Spanish')),
('es-ar', gettext_noop('Argentinian Spanish')),
('es-mx', gettext_noop('Mexican Spanish')),
('es-ni', gettext_noop('Nicaraguan Spanish')),
('et', gettext_noop('Estonian')),
('eu', gettext_noop('Basque')),
('fa', gettext_noop('Persian')),
('fi', gettext_noop('Finnish')),
('fr', gettext_noop('French')),
('fy-nl', gettext_noop('Frisian')),
('ga', gettext_noop('Irish')),
('gl', gettext_noop('Galician')),
('he', gettext_noop('Hebrew')),
('hi', gettext_noop('Hindi')),
('hr', gettext_noop('Croatian')),
('hu', gettext_noop('Hungarian')),
('id', gettext_noop('Indonesian')),
('is', gettext_noop('Icelandic')),
('it', gettext_noop('Italian')),
('ja', gettext_noop('Japanese')),
('ka', gettext_noop('Georgian')),
('kk', gettext_noop('Kazakh')),
('km', gettext_noop('Khmer')),
('kn', gettext_noop('Kannada')),
('ko', gettext_noop('Korean')),
('lt', gettext_noop('Lithuanian')),
('lv', gettext_noop('Latvian')),
('mk', gettext_noop('Macedonian')),
('ml', gettext_noop('Malayalam')),
('mn', gettext_noop('Mongolian')),
('nb', gettext_noop('Norwegian Bokmal')),
('ne', gettext_noop('Nepali')),
('nl', gettext_noop('Dutch')),
('nn', gettext_noop('Norwegian Nynorsk')),
('pa', gettext_noop('Punjabi')),
('pl', gettext_noop('Polish')),
('pt', gettext_noop('Portuguese')),
('pt-br', gettext_noop('Brazilian Portuguese')),
('ro', gettext_noop('Romanian')),
('ru', gettext_noop('Russian')),
('sk', gettext_noop('Slovak')),
('sl', gettext_noop('Slovenian')),
('sq', gettext_noop('Albanian')),
('sr', gettext_noop('Serbian')),
('sr-latn', gettext_noop('Serbian Latin')),
('sv', gettext_noop('Swedish')),
('sw', gettext_noop('Swahili')),
('ta', gettext_noop('Tamil')),
('te', gettext_noop('Telugu')),
('th', gettext_noop('Thai')),
('tr', gettext_noop('Turkish')),
('tt', gettext_noop('Tatar')),
('uk', gettext_noop('Ukrainian')),
('ur', gettext_noop('Urdu')),
('vi', gettext_noop('Vietnamese')),
('zh-cn', gettext_noop('Simplified Chinese')),
('zh-tw', gettext_noop('Traditional Chinese')),
)
# Languages using BiDi (right-to-left) layout
LANGUAGES_BIDI = ("he", "ar", "fa")
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
LOCALE_PATHS = ()
LANGUAGE_COOKIE_NAME = 'django_language'
# If you set this to True, Django will format dates, numbers and calendars
# according to user current locale.
USE_L10N = False
# Not-necessarily-technical managers of the site. They get broken link
# notifications and other various emails.
MANAGERS = ADMINS
# Default content type and charset to use for all HttpResponse objects, if a
# MIME type isn't manually specified. These are used to construct the
# Content-Type header.
DEFAULT_CONTENT_TYPE = 'text/html'
DEFAULT_CHARSET = 'utf-8'
# Encoding of files read from disk (template and initial SQL files).
FILE_CHARSET = 'utf-8'
# Email address that error messages come from.
SERVER_EMAIL = 'root@localhost'
# Whether to send broken-link emails. Deprecated, must be removed in 1.8.
SEND_BROKEN_LINK_EMAILS = False
# Database connection info. If left empty, will default to the dummy backend.
DATABASES = {}
# Classes used to implement DB routing behavior.
DATABASE_ROUTERS = []
# The email backend to use. For possible shortcuts see django.core.mail.
# The default is to use the SMTP backend.
# Third-party backends can be specified by providing a Python path
# to a module that defines an EmailBackend class.
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# Host for sending email.
EMAIL_HOST = 'localhost'
# Port for sending email.
EMAIL_PORT = 25
# Optional SMTP authentication information for EMAIL_HOST.
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = False
# List of strings representing installed apps.
INSTALLED_APPS = ()
# List of locations of the template source files, in search order.
TEMPLATE_DIRS = ()
# List of callables that know how to import templates from various sources.
# See the comments in django/core/template/loader.py for interface
# documentation.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
# List of processors used by RequestContext to populate the context.
# Each one should be a callable that takes the request object as its
# only parameter and returns a dictionary to add to the context.
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
# 'django.core.context_processors.request',
'django.contrib.messages.context_processors.messages',
)
# Output to use in template system for invalid (e.g. misspelled) variables.
TEMPLATE_STRING_IF_INVALID = ''
# Default email address to use for various automated correspondence from
# the site managers.
DEFAULT_FROM_EMAIL = 'webmaster@localhost'
# Subject-line prefix for email messages send with django.core.mail.mail_admins
# or ...mail_managers. Make sure to include the trailing space.
EMAIL_SUBJECT_PREFIX = '[Django] '
# Whether to append trailing slashes to URLs.
APPEND_SLASH = True
# Whether to prepend the "www." subdomain to URLs that don't have it.
PREPEND_WWW = False
# Override the server-derived value of SCRIPT_NAME
FORCE_SCRIPT_NAME = None
# List of compiled regular expression objects representing User-Agent strings
# that are not allowed to visit any page, systemwide. Use this for bad
# robots/crawlers. Here are a few examples:
# import re
# DISALLOWED_USER_AGENTS = (
# re.compile(r'^NaverBot.*'),
# re.compile(r'^EmailSiphon.*'),
# re.compile(r'^SiteSucker.*'),
# re.compile(r'^sohu-search')
# )
DISALLOWED_USER_AGENTS = ()
ABSOLUTE_URL_OVERRIDES = {}
# Tuple of strings representing allowed prefixes for the {% ssi %} tag.
# Example: ('/home/html', '/var/www')
ALLOWED_INCLUDE_ROOTS = ()
# If this is a admin settings module, this should be a list of
# settings modules (in the format 'foo.bar.baz') for which this admin
# is an admin.
ADMIN_FOR = ()
# List of compiled regular expression objects representing URLs that need not
# be reported by BrokenLinkEmailsMiddleware. Here are a few examples:
# import re
# IGNORABLE_404_URLS = (
# re.compile(r'^/apple-touch-icon.*\.png$'),
# re.compile(r'^/favicon.ico$),
# re.compile(r'^/robots.txt$),
# re.compile(r'^/phpmyadmin/),
# re.compile(r'\.(cgi|php|pl)$'),
# )
IGNORABLE_404_URLS = ()
# A secret key for this particular Django installation. Used in secret-key
# hashing algorithms. Set this in your settings, or Django will complain
# loudly.
SECRET_KEY = ''
# Default file storage mechanism that holds media.
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL that handles the static files served from STATIC_ROOT.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = None
# List of upload handler classes to be applied in order.
FILE_UPLOAD_HANDLERS = (
'django.core.files.uploadhandler.MemoryFileUploadHandler',
'django.core.files.uploadhandler.TemporaryFileUploadHandler',
)
# Maximum size, in bytes, of a request before it will be streamed to the
# file system instead of into memory.
FILE_UPLOAD_MAX_MEMORY_SIZE = 2621440 # i.e. 2.5 MB
# Directory in which upload streamed files will be temporarily saved. A value of
# `None` will make Django use the operating system's default temporary directory
# (i.e. "/tmp" on *nix systems).
FILE_UPLOAD_TEMP_DIR = None
# The numeric mode to set newly-uploaded files to. The value should be a mode
# you'd pass directly to os.chmod; see http://docs.python.org/lib/os-file-dir.html.
FILE_UPLOAD_PERMISSIONS = None
# Python module path where user will place custom format definition.
# The directory where this setting is pointing should contain subdirectories
# named as the locales, containing a formats.py file
# (i.e. "myproject.locale" for myproject/locale/en/formats.py etc. use)
FORMAT_MODULE_PATH = None
# Default formatting for date objects. See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'N j, Y'
# Default formatting for datetime objects. See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATETIME_FORMAT = 'N j, Y, P'
# Default formatting for time objects. See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
TIME_FORMAT = 'P'
# Default formatting for date objects when only the year and month are relevant.
# See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
YEAR_MONTH_FORMAT = 'F Y'
# Default formatting for date objects when only the month and day are relevant.
# See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
MONTH_DAY_FORMAT = 'F j'
# Default short formatting for date objects. See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
SHORT_DATE_FORMAT = 'm/d/Y'
# Default short formatting for datetime objects.
# See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
SHORT_DATETIME_FORMAT = 'm/d/Y P'
# Default formats to be used when parsing dates from input boxes, in order
# See all available format string here:
# http://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
DATE_INPUT_FORMATS = (
'%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y', # '2006-10-25', '10/25/2006', '10/25/06'
'%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006'
'%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
'%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006'
'%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
)
# Default formats to be used when parsing times from input boxes, in order
# See all available format string here:
# http://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30'
)
# Default formats to be used when parsing dates and times from input boxes,
# in order
# See all available format string here:
# http://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
DATETIME_INPUT_FORMATS = (
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59'
'%m/%d/%Y %H:%M:%S.%f', # '10/25/2006 14:30:59.000200'
'%m/%d/%Y %H:%M', # '10/25/2006 14:30'
'%m/%d/%Y', # '10/25/2006'
'%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59'
'%m/%d/%y %H:%M:%S.%f', # '10/25/06 14:30:59.000200'
'%m/%d/%y %H:%M', # '10/25/06 14:30'
'%m/%d/%y', # '10/25/06'
)
# First day of week, to be used on calendars
# 0 means Sunday, 1 means Monday...
FIRST_DAY_OF_WEEK = 0
# Decimal separator symbol
DECIMAL_SEPARATOR = '.'
# Boolean that sets whether to add thousand separator when formatting numbers
USE_THOUSAND_SEPARATOR = False
# Number of digits that will be together, when splitting them by
# THOUSAND_SEPARATOR. 0 means no grouping, 3 means splitting by thousands...
NUMBER_GROUPING = 0
# Thousand separator symbol
THOUSAND_SEPARATOR = ','
# Do you want to manage transactions manually?
# Hint: you really don't!
TRANSACTIONS_MANAGED = False
# The tablespaces to use for each model when not specified otherwise.
DEFAULT_TABLESPACE = ''
DEFAULT_INDEX_TABLESPACE = ''
# Default X-Frame-Options header value
X_FRAME_OPTIONS = 'SAMEORIGIN'
USE_X_FORWARDED_HOST = False
# The Python dotted path to the WSGI application that Django's internal servers
# (runserver, runfcgi) will use. If `None`, the return value of
# 'django.core.wsgi.get_wsgi_application' is used, thus preserving the same
# behavior as previous versions of Django. Otherwise this should point to an
# actual WSGI application object.
WSGI_APPLICATION = None
# If your Django app is behind a proxy that sets a header to specify secure
# connections, AND that proxy ensures that user-submitted headers with the
# same name are ignored (so that people can't spoof it), set this value to
# a tuple of (header_name, header_value). For any requests that come in with
# that header/value, request.is_secure() will return True.
# WARNING! Only set this if you fully understand what you're doing. Otherwise,
# you may be opening yourself up to a security risk.
SECURE_PROXY_SSL_HEADER = None
##############
# MIDDLEWARE #
##############
# List of middleware classes to use. Order is important; in the request phase,
# this middleware classes will be applied in the order given, and in the
# response phase the middleware will be applied in reverse order.
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# 'django.middleware.http.ConditionalGetMiddleware',
# 'django.middleware.gzip.GZipMiddleware',
)
############
# SESSIONS #
############
SESSION_CACHE_ALIAS = 'default' # Cache to store session data if using the cache session backend.
SESSION_COOKIE_NAME = 'sessionid' # Cookie name. This can be whatever you want.
SESSION_COOKIE_AGE = 60 * 60 * 24 * 7 * 2 # Age of cookie, in seconds (default: 2 weeks).
SESSION_COOKIE_DOMAIN = None # A string like ".example.com", or None for standard domain cookie.
SESSION_COOKIE_SECURE = False # Whether the session cookie should be secure (https:// only).
SESSION_COOKIE_PATH = '/' # The path of the session cookie.
SESSION_COOKIE_HTTPONLY = True # Whether to use the non-RFC standard httpOnly flag (IE, FF3+, others)
SESSION_SAVE_EVERY_REQUEST = False # Whether to save the session data on every request.
SESSION_EXPIRE_AT_BROWSER_CLOSE = False # Whether a user's session cookie expires when the Web browser is closed.
SESSION_ENGINE = 'django.contrib.sessions.backends.db' # The module to store session data
SESSION_FILE_PATH = None # Directory to store session files if using the file session module. If None, the backend will use a sensible default.
#########
# CACHE #
#########
# The cache backends to use.
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
CACHE_MIDDLEWARE_KEY_PREFIX = ''
CACHE_MIDDLEWARE_SECONDS = 600
CACHE_MIDDLEWARE_ALIAS = 'default'
####################
# COMMENTS #
####################
COMMENTS_ALLOW_PROFANITIES = False
# The profanities that will trigger a validation error in
# CommentDetailsForm.clean_comment. All of these should be in lowercase.
PROFANITIES_LIST = ()
##################
# AUTHENTICATION #
##################
AUTH_USER_MODEL = 'auth.User'
AUTHENTICATION_BACKENDS = ('django.contrib.auth.backends.ModelBackend',)
LOGIN_URL = '/accounts/login/'
LOGOUT_URL = '/accounts/logout/'
LOGIN_REDIRECT_URL = '/accounts/profile/'
# The number of days a password reset link is valid for
PASSWORD_RESET_TIMEOUT_DAYS = 3
# the first hasher in this list is the preferred algorithm. any
# password using different algorithms will be converted automatically
# upon login
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
'django.contrib.auth.hashers.BCryptPasswordHasher',
'django.contrib.auth.hashers.SHA1PasswordHasher',
'django.contrib.auth.hashers.MD5PasswordHasher',
'django.contrib.auth.hashers.UnsaltedMD5PasswordHasher',
'django.contrib.auth.hashers.CryptPasswordHasher',
)
###########
# SIGNING #
###########
SIGNING_BACKEND = 'django.core.signing.TimestampSigner'
########
# CSRF #
########
# Dotted path to callable to be used as view when a request is
# rejected by the CSRF middleware.
CSRF_FAILURE_VIEW = 'django.views.csrf.csrf_failure'
# Settings for CSRF cookie.
CSRF_COOKIE_NAME = 'csrftoken'
CSRF_COOKIE_DOMAIN = None
CSRF_COOKIE_PATH = '/'
CSRF_COOKIE_SECURE = False
CSRF_COOKIE_HTTPONLY = False
############
# MESSAGES #
############
# Class to use as messages backend
MESSAGE_STORAGE = 'django.contrib.messages.storage.fallback.FallbackStorage'
# Default values of MESSAGE_LEVEL and MESSAGE_TAGS are defined within
# django.contrib.messages to avoid imports in this settings file.
###########
# LOGGING #
###########
# The callable to use to configure logging
LOGGING_CONFIG = 'django.utils.log.dictConfig'
# Custom logging configuration.
LOGGING = {}
# Default exception reporter filter class used in case none has been
# specifically assigned to the HttpRequest instance.
DEFAULT_EXCEPTION_REPORTER_FILTER = 'django.views.debug.SafeExceptionReporterFilter'
###########
# TESTING #
###########
# The name of the class to use to run the test suite
TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner'
############
# FIXTURES #
############
# The list of directories to search for fixtures
FIXTURE_DIRS = ()
###############
# STATICFILES #
###############
# A list of locations of additional static files
STATICFILES_DIRS = ()
# The default file storage backend used during the build process
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
|
|
"""Here we define the exported functions, types, etc... which need to be
exported through a global C pointer.
Each dictionary contains name -> index pair.
Whenever you change one index, you break the ABI (and the ABI version number
should be incremented). Whenever you add an item to one of the dict, the API
needs to be updated in both setup_common.py and by adding an appropriate
entry to cversion.txt (generate the hash via "python cversions.py").
When adding a function, make sure to use the next integer not used as an index
(in case you use an existing index or jump, the build will stop and raise an
exception, so it should hopefully not get unnoticed).
"""
from __future__ import division, absolute_import, print_function
from code_generators.genapi import StealRef, NonNull
# index, type
multiarray_global_vars = {
'NPY_NUMUSERTYPES': (7, 'int'),
'NPY_DEFAULT_ASSIGN_CASTING': (292, 'NPY_CASTING'),
}
multiarray_scalar_bool_values = {
'_PyArrayScalar_BoolValues': (9,)
}
# index, annotations
# please mark functions that have been checked to not need any annotations
multiarray_types_api = {
'PyBigArray_Type': (1,),
'PyArray_Type': (2,),
'PyArrayDescr_Type': (3,),
'PyArrayFlags_Type': (4,),
'PyArrayIter_Type': (5,),
'PyArrayMultiIter_Type': (6,),
'PyBoolArrType_Type': (8,),
'PyGenericArrType_Type': (10,),
'PyNumberArrType_Type': (11,),
'PyIntegerArrType_Type': (12,),
'PySignedIntegerArrType_Type': (13,),
'PyUnsignedIntegerArrType_Type': (14,),
'PyInexactArrType_Type': (15,),
'PyFloatingArrType_Type': (16,),
'PyComplexFloatingArrType_Type': (17,),
'PyFlexibleArrType_Type': (18,),
'PyCharacterArrType_Type': (19,),
'PyByteArrType_Type': (20,),
'PyShortArrType_Type': (21,),
'PyIntArrType_Type': (22,),
'PyLongArrType_Type': (23,),
'PyLongLongArrType_Type': (24,),
'PyUByteArrType_Type': (25,),
'PyUShortArrType_Type': (26,),
'PyUIntArrType_Type': (27,),
'PyULongArrType_Type': (28,),
'PyULongLongArrType_Type': (29,),
'PyFloatArrType_Type': (30,),
'PyDoubleArrType_Type': (31,),
'PyLongDoubleArrType_Type': (32,),
'PyCFloatArrType_Type': (33,),
'PyCDoubleArrType_Type': (34,),
'PyCLongDoubleArrType_Type': (35,),
'PyObjectArrType_Type': (36,),
'PyStringArrType_Type': (37,),
'PyUnicodeArrType_Type': (38,),
'PyVoidArrType_Type': (39,),
# End 1.5 API
'PyTimeIntegerArrType_Type': (214,),
'PyDatetimeArrType_Type': (215,),
'PyTimedeltaArrType_Type': (216,),
'PyHalfArrType_Type': (217,),
'NpyIter_Type': (218,),
# End 1.6 API
}
#define NPY_NUMUSERTYPES (*(int *)PyArray_API[6])
#define PyBoolArrType_Type (*(PyTypeObject *)PyArray_API[7])
#define _PyArrayScalar_BoolValues ((PyBoolScalarObject *)PyArray_API[8])
multiarray_funcs_api = {
'PyArray_GetNDArrayCVersion': (0,),
'PyArray_SetNumericOps': (40,),
'PyArray_GetNumericOps': (41,),
'PyArray_INCREF': (42,),
'PyArray_XDECREF': (43,),
'PyArray_SetStringFunction': (44,),
'PyArray_DescrFromType': (45,),
'PyArray_TypeObjectFromType': (46,),
'PyArray_Zero': (47,),
'PyArray_One': (48,),
'PyArray_CastToType': (49, StealRef(2), NonNull(2)),
'PyArray_CastTo': (50,),
'PyArray_CastAnyTo': (51,),
'PyArray_CanCastSafely': (52,),
'PyArray_CanCastTo': (53,),
'PyArray_ObjectType': (54,),
'PyArray_DescrFromObject': (55,),
'PyArray_ConvertToCommonType': (56,),
'PyArray_DescrFromScalar': (57,),
'PyArray_DescrFromTypeObject': (58,),
'PyArray_Size': (59,),
'PyArray_Scalar': (60,),
'PyArray_FromScalar': (61, StealRef(2)),
'PyArray_ScalarAsCtype': (62,),
'PyArray_CastScalarToCtype': (63,),
'PyArray_CastScalarDirect': (64,),
'PyArray_ScalarFromObject': (65,),
'PyArray_GetCastFunc': (66,),
'PyArray_FromDims': (67,),
'PyArray_FromDimsAndDataAndDescr': (68, StealRef(3)),
'PyArray_FromAny': (69, StealRef(2)),
'PyArray_EnsureArray': (70, StealRef(1)),
'PyArray_EnsureAnyArray': (71, StealRef(1)),
'PyArray_FromFile': (72,),
'PyArray_FromString': (73,),
'PyArray_FromBuffer': (74,),
'PyArray_FromIter': (75, StealRef(2)),
'PyArray_Return': (76, StealRef(1)),
'PyArray_GetField': (77, StealRef(2), NonNull(2)),
'PyArray_SetField': (78, StealRef(2), NonNull(2)),
'PyArray_Byteswap': (79,),
'PyArray_Resize': (80,),
'PyArray_MoveInto': (81,),
'PyArray_CopyInto': (82,),
'PyArray_CopyAnyInto': (83,),
'PyArray_CopyObject': (84,),
'PyArray_NewCopy': (85, NonNull(1)),
'PyArray_ToList': (86,),
'PyArray_ToString': (87,),
'PyArray_ToFile': (88,),
'PyArray_Dump': (89,),
'PyArray_Dumps': (90,),
'PyArray_ValidType': (91,),
'PyArray_UpdateFlags': (92,),
'PyArray_New': (93, NonNull(1)),
'PyArray_NewFromDescr': (94, StealRef(2), NonNull([1, 2])),
'PyArray_DescrNew': (95,),
'PyArray_DescrNewFromType': (96,),
'PyArray_GetPriority': (97,),
'PyArray_IterNew': (98,),
'PyArray_MultiIterNew': (99,),
'PyArray_PyIntAsInt': (100,),
'PyArray_PyIntAsIntp': (101,),
'PyArray_Broadcast': (102,),
'PyArray_FillObjectArray': (103,),
'PyArray_FillWithScalar': (104,),
'PyArray_CheckStrides': (105,),
'PyArray_DescrNewByteorder': (106,),
'PyArray_IterAllButAxis': (107,),
'PyArray_CheckFromAny': (108, StealRef(2)),
'PyArray_FromArray': (109, StealRef(2)),
'PyArray_FromInterface': (110,),
'PyArray_FromStructInterface': (111,),
'PyArray_FromArrayAttr': (112,),
'PyArray_ScalarKind': (113,),
'PyArray_CanCoerceScalar': (114,),
'PyArray_NewFlagsObject': (115,),
'PyArray_CanCastScalar': (116,),
'PyArray_CompareUCS4': (117,),
'PyArray_RemoveSmallest': (118,),
'PyArray_ElementStrides': (119,),
'PyArray_Item_INCREF': (120,),
'PyArray_Item_XDECREF': (121,),
'PyArray_FieldNames': (122,),
'PyArray_Transpose': (123,),
'PyArray_TakeFrom': (124,),
'PyArray_PutTo': (125,),
'PyArray_PutMask': (126,),
'PyArray_Repeat': (127,),
'PyArray_Choose': (128,),
'PyArray_Sort': (129,),
'PyArray_ArgSort': (130,),
'PyArray_SearchSorted': (131,),
'PyArray_ArgMax': (132,),
'PyArray_ArgMin': (133,),
'PyArray_Reshape': (134,),
'PyArray_Newshape': (135,),
'PyArray_Squeeze': (136,),
'PyArray_View': (137, StealRef(2)),
'PyArray_SwapAxes': (138,),
'PyArray_Max': (139,),
'PyArray_Min': (140,),
'PyArray_Ptp': (141,),
'PyArray_Mean': (142,),
'PyArray_Trace': (143,),
'PyArray_Diagonal': (144,),
'PyArray_Clip': (145,),
'PyArray_Conjugate': (146,),
'PyArray_Nonzero': (147,),
'PyArray_Std': (148,),
'PyArray_Sum': (149,),
'PyArray_CumSum': (150,),
'PyArray_Prod': (151,),
'PyArray_CumProd': (152,),
'PyArray_All': (153,),
'PyArray_Any': (154,),
'PyArray_Compress': (155,),
'PyArray_Flatten': (156,),
'PyArray_Ravel': (157,),
'PyArray_MultiplyList': (158,),
'PyArray_MultiplyIntList': (159,),
'PyArray_GetPtr': (160,),
'PyArray_CompareLists': (161,),
'PyArray_AsCArray': (162, StealRef(5)),
'PyArray_As1D': (163,),
'PyArray_As2D': (164,),
'PyArray_Free': (165,),
'PyArray_Converter': (166,),
'PyArray_IntpFromSequence': (167,),
'PyArray_Concatenate': (168,),
'PyArray_InnerProduct': (169,),
'PyArray_MatrixProduct': (170,),
'PyArray_CopyAndTranspose': (171,),
'PyArray_Correlate': (172,),
'PyArray_TypestrConvert': (173,),
'PyArray_DescrConverter': (174,),
'PyArray_DescrConverter2': (175,),
'PyArray_IntpConverter': (176,),
'PyArray_BufferConverter': (177,),
'PyArray_AxisConverter': (178,),
'PyArray_BoolConverter': (179,),
'PyArray_ByteorderConverter': (180,),
'PyArray_OrderConverter': (181,),
'PyArray_EquivTypes': (182,),
'PyArray_Zeros': (183, StealRef(3)),
'PyArray_Empty': (184, StealRef(3)),
'PyArray_Where': (185,),
'PyArray_Arange': (186,),
'PyArray_ArangeObj': (187,),
'PyArray_SortkindConverter': (188,),
'PyArray_LexSort': (189,),
'PyArray_Round': (190,),
'PyArray_EquivTypenums': (191,),
'PyArray_RegisterDataType': (192,),
'PyArray_RegisterCastFunc': (193,),
'PyArray_RegisterCanCast': (194,),
'PyArray_InitArrFuncs': (195,),
'PyArray_IntTupleFromIntp': (196,),
'PyArray_TypeNumFromName': (197,),
'PyArray_ClipmodeConverter': (198,),
'PyArray_OutputConverter': (199,),
'PyArray_BroadcastToShape': (200,),
'_PyArray_SigintHandler': (201,),
'_PyArray_GetSigintBuf': (202,),
'PyArray_DescrAlignConverter': (203,),
'PyArray_DescrAlignConverter2': (204,),
'PyArray_SearchsideConverter': (205,),
'PyArray_CheckAxis': (206,),
'PyArray_OverflowMultiplyList': (207,),
'PyArray_CompareString': (208,),
'PyArray_MultiIterFromObjects': (209,),
'PyArray_GetEndianness': (210,),
'PyArray_GetNDArrayCFeatureVersion': (211,),
'PyArray_Correlate2': (212,),
'PyArray_NeighborhoodIterNew': (213,),
# End 1.5 API
'PyArray_SetDatetimeParseFunction': (219,),
'PyArray_DatetimeToDatetimeStruct': (220,),
'PyArray_TimedeltaToTimedeltaStruct': (221,),
'PyArray_DatetimeStructToDatetime': (222,),
'PyArray_TimedeltaStructToTimedelta': (223,),
# NDIter API
'NpyIter_New': (224,),
'NpyIter_MultiNew': (225,),
'NpyIter_AdvancedNew': (226,),
'NpyIter_Copy': (227,),
'NpyIter_Deallocate': (228,),
'NpyIter_HasDelayedBufAlloc': (229,),
'NpyIter_HasExternalLoop': (230,),
'NpyIter_EnableExternalLoop': (231,),
'NpyIter_GetInnerStrideArray': (232,),
'NpyIter_GetInnerLoopSizePtr': (233,),
'NpyIter_Reset': (234,),
'NpyIter_ResetBasePointers': (235,),
'NpyIter_ResetToIterIndexRange': (236,),
'NpyIter_GetNDim': (237,),
'NpyIter_GetNOp': (238,),
'NpyIter_GetIterNext': (239,),
'NpyIter_GetIterSize': (240,),
'NpyIter_GetIterIndexRange': (241,),
'NpyIter_GetIterIndex': (242,),
'NpyIter_GotoIterIndex': (243,),
'NpyIter_HasMultiIndex': (244,),
'NpyIter_GetShape': (245,),
'NpyIter_GetGetMultiIndex': (246,),
'NpyIter_GotoMultiIndex': (247,),
'NpyIter_RemoveMultiIndex': (248,),
'NpyIter_HasIndex': (249,),
'NpyIter_IsBuffered': (250,),
'NpyIter_IsGrowInner': (251,),
'NpyIter_GetBufferSize': (252,),
'NpyIter_GetIndexPtr': (253,),
'NpyIter_GotoIndex': (254,),
'NpyIter_GetDataPtrArray': (255,),
'NpyIter_GetDescrArray': (256,),
'NpyIter_GetOperandArray': (257,),
'NpyIter_GetIterView': (258,),
'NpyIter_GetReadFlags': (259,),
'NpyIter_GetWriteFlags': (260,),
'NpyIter_DebugPrint': (261,),
'NpyIter_IterationNeedsAPI': (262,),
'NpyIter_GetInnerFixedStrideArray': (263,),
'NpyIter_RemoveAxis': (264,),
'NpyIter_GetAxisStrideArray': (265,),
'NpyIter_RequiresBuffering': (266,),
'NpyIter_GetInitialDataPtrArray': (267,),
'NpyIter_CreateCompatibleStrides': (268,),
#
'PyArray_CastingConverter': (269,),
'PyArray_CountNonzero': (270,),
'PyArray_PromoteTypes': (271,),
'PyArray_MinScalarType': (272,),
'PyArray_ResultType': (273,),
'PyArray_CanCastArrayTo': (274,),
'PyArray_CanCastTypeTo': (275,),
'PyArray_EinsteinSum': (276,),
'PyArray_NewLikeArray': (277, StealRef(3), NonNull(1)),
'PyArray_GetArrayParamsFromObject': (278,),
'PyArray_ConvertClipmodeSequence': (279,),
'PyArray_MatrixProduct2': (280,),
# End 1.6 API
'NpyIter_IsFirstVisit': (281,),
'PyArray_SetBaseObject': (282, StealRef(2)),
'PyArray_CreateSortedStridePerm': (283,),
'PyArray_RemoveAxesInPlace': (284,),
'PyArray_DebugPrint': (285,),
'PyArray_FailUnlessWriteable': (286,),
'PyArray_SetUpdateIfCopyBase': (287, StealRef(2)),
'PyDataMem_NEW': (288,),
'PyDataMem_FREE': (289,),
'PyDataMem_RENEW': (290,),
'PyDataMem_SetEventHook': (291,),
'PyArray_MapIterSwapAxes': (293,),
'PyArray_MapIterArray': (294,),
'PyArray_MapIterNext': (295,),
# End 1.7 API
'PyArray_Partition': (296,),
'PyArray_ArgPartition': (297,),
'PyArray_SelectkindConverter': (298,),
'PyDataMem_NEW_ZEROED': (299,),
# End 1.8 API
# End 1.9 API
'PyArray_CheckAnyScalarExact': (300, NonNull(1)),
# End 1.10 API
'PyArray_MapIterArrayCopyIfOverlap': (301,),
# End 1.13 API
'PyArray_ResolveWritebackIfCopy': (302,),
'PyArray_SetWritebackIfCopyBase': (303,),
# End 1.14 API
'NpyIter_Close': (304,),
# End 1.15 API
}
ufunc_types_api = {
'PyUFunc_Type': (0,)
}
ufunc_funcs_api = {
'PyUFunc_FromFuncAndData': (1,),
'PyUFunc_RegisterLoopForType': (2,),
'PyUFunc_GenericFunction': (3,),
'PyUFunc_f_f_As_d_d': (4,),
'PyUFunc_d_d': (5,),
'PyUFunc_f_f': (6,),
'PyUFunc_g_g': (7,),
'PyUFunc_F_F_As_D_D': (8,),
'PyUFunc_F_F': (9,),
'PyUFunc_D_D': (10,),
'PyUFunc_G_G': (11,),
'PyUFunc_O_O': (12,),
'PyUFunc_ff_f_As_dd_d': (13,),
'PyUFunc_ff_f': (14,),
'PyUFunc_dd_d': (15,),
'PyUFunc_gg_g': (16,),
'PyUFunc_FF_F_As_DD_D': (17,),
'PyUFunc_DD_D': (18,),
'PyUFunc_FF_F': (19,),
'PyUFunc_GG_G': (20,),
'PyUFunc_OO_O': (21,),
'PyUFunc_O_O_method': (22,),
'PyUFunc_OO_O_method': (23,),
'PyUFunc_On_Om': (24,),
'PyUFunc_GetPyValues': (25,),
'PyUFunc_checkfperr': (26,),
'PyUFunc_clearfperr': (27,),
'PyUFunc_getfperr': (28,),
'PyUFunc_handlefperr': (29,),
'PyUFunc_ReplaceLoopBySignature': (30,),
'PyUFunc_FromFuncAndDataAndSignature': (31,),
'PyUFunc_SetUsesArraysAsData': (32,),
# End 1.5 API
'PyUFunc_e_e': (33,),
'PyUFunc_e_e_As_f_f': (34,),
'PyUFunc_e_e_As_d_d': (35,),
'PyUFunc_ee_e': (36,),
'PyUFunc_ee_e_As_ff_f': (37,),
'PyUFunc_ee_e_As_dd_d': (38,),
# End 1.6 API
'PyUFunc_DefaultTypeResolver': (39,),
'PyUFunc_ValidateCasting': (40,),
# End 1.7 API
'PyUFunc_RegisterLoopForDescr': (41,),
# End 1.8 API
}
# List of all the dicts which define the C API
# XXX: DO NOT CHANGE THE ORDER OF TUPLES BELOW !
multiarray_api = (
multiarray_global_vars,
multiarray_scalar_bool_values,
multiarray_types_api,
multiarray_funcs_api,
)
ufunc_api = (
ufunc_funcs_api,
ufunc_types_api
)
full_api = multiarray_api + ufunc_api
|
|
"""
Test derivatives by comparing analytic to finite difference.
"""
from __future__ import print_function
import unittest
import numpy as np
from openmdao.api import Problem, IndepVarComp
from openmdao.utils.assert_utils import assert_rel_error, assert_check_partials
from CADRE.attitude import Attitude_Angular, Attitude_AngularRates, \
Attitude_Attitude, Attitude_Roll, Attitude_RotationMtx, \
Attitude_RotationMtxRates, Attitude_Sideslip, Attitude_Torque
from CADRE.battery import BatterySOC, BatteryPower, BatteryConstraints
from CADRE.comm import Comm_DataDownloaded, Comm_AntRotation, Comm_AntRotationMtx, \
Comm_BitRate, Comm_Distance, Comm_EarthsSpin, Comm_EarthsSpinMtx, Comm_GainPattern, \
Comm_GSposEarth, Comm_GSposECI, Comm_LOS, Comm_VectorAnt, Comm_VectorBody, \
Comm_VectorECI, Comm_VectorSpherical
from CADRE.orbit import Orbit_Dynamics, Orbit_Initial
from CADRE.parameters import BsplineParameters
from CADRE.power import Power_CellVoltage, Power_SolarPower, Power_Total
from CADRE.reactionwheel import ReactionWheel_Motor, ReactionWheel_Power, \
ReactionWheel_Torque, ReactionWheel_Dynamics
from CADRE.solar import Solar_ExposedArea
from CADRE.sun import Sun_LOS, Sun_PositionBody, Sun_PositionECI, Sun_PositionSpherical
from CADRE.thermal_temperature import ThermalTemperature
NTIME = 5
class TestCADRE(unittest.TestCase):
def setup(self, compname, inputs, state0):
# create instance of component type
try:
comp = eval('%s(NTIME)' % compname)
except TypeError:
try:
comp = eval('%s()' % compname)
except TypeError:
comp = eval('%s(NTIME, 300)' % compname)
# collect metadata for component inputs
prob = Problem(comp)
prob.setup()
prob.final_setup()
self.inputs_dict = {}
for name, meta in prob.model.list_inputs(units=True, out_stream=None):
self.inputs_dict[name.split('.')[-1]] = meta
# create independent vars for each input, initialized with random values
indep = IndepVarComp()
for item in inputs + state0:
shape = self.inputs_dict[item]['value'].shape
units = self.inputs_dict[item]['units']
indep.add_output(item, np.random.random(shape), units=units)
# setup problem for test
self.prob = Problem()
self.prob.model.add_subsystem('indep', indep, promotes=['*'])
self.prob.model.add_subsystem('comp', comp, promotes=['*'])
self.prob.setup()
def compare_derivatives(self, var_in, var_out, rel_error=False):
# check totals
J = self.prob.check_totals(of=var_out, wrt=var_in, out_stream=None)
for outp in var_out:
for inp in var_in:
Jn = J[outp, inp]['J_fd']
Jf = J[outp, inp]['J_fwd']
if rel_error:
diff = np.nan_to_num(abs(Jf - Jn) / Jn)
else:
diff = abs(Jf - Jn)
assert_rel_error(self, diff.max(), 0.0, 1e-3)
# check partials
# FIXME: several components fail check_partials
# partials = self.prob.check_partials()
# partials = self.prob.check_partials(compact_print=True, method='cs')
# partials = self.prob.check_partials(out_stream=None)
# assert_check_partials(partials, atol=1e-3, rtol=1e-3)
def test_Attitude_Angular(self):
compname = 'Attitude_Angular'
inputs = ['O_BI', 'Odot_BI']
outputs = ['w_B']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Attitude_AngularRates(self):
compname = 'Attitude_AngularRates'
inputs = ['w_B']
outputs = ['wdot_B']
state0 = []
self.setup(compname, inputs, state0)
self.prob.model.comp.h = 0.01
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Attitude_Attitude(self):
compname = 'Attitude_Attitude'
inputs = ['r_e2b_I']
outputs = ['O_RI']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Attitude_Roll(self):
compname = 'Attitude_Roll'
inputs = ['Gamma']
outputs = ['O_BR']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Attitude_RotationMtx(self):
compname = 'Attitude_RotationMtx'
inputs = ['O_BR', 'O_RI']
outputs = ['O_BI']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Attitude_RotationMtxRates(self):
compname = 'Attitude_RotationMtxRates'
inputs = ['O_BI']
outputs = ['Odot_BI']
state0 = []
self.setup(compname, inputs, state0)
self.prob.model.comp.h = 0.01
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Attitude_Sideslip(self):
compname = 'Attitude_Sideslip'
inputs = ['r_e2b_I', 'O_BI']
outputs = ['v_e2b_B']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Attitude_Torque(self):
compname = 'Attitude_Torque'
inputs = ['w_B', 'wdot_B']
outputs = ['T_tot']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_BatterySOC(self):
compname = 'BatterySOC'
inputs = ['P_bat', 'temperature']
outputs = ['SOC']
state0 = ['iSOC']
self.setup(compname, inputs, state0)
self.prob.model.comp.h = 0.01
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_BatteryPower(self):
compname = 'BatteryPower'
inputs = ['SOC', 'temperature', 'P_bat']
outputs = ['I_bat']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_BatteryConstraints(self):
compname = 'BatteryConstraints'
inputs = ['I_bat', 'SOC']
outputs = ['ConCh', 'ConDs', 'ConS0', 'ConS1']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_BsplineParameters(self):
compname = 'BsplineParameters'
inputs = ['CP_P_comm', 'CP_gamma', 'CP_Isetpt']
outputs = ['P_comm', 'Gamma', 'Isetpt']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Comm_DataDownloaded(self):
compname = 'Comm_DataDownloaded'
inputs = ['Dr']
outputs = ['Data']
state0 = ['Data0']
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Comm_AntRotation(self):
compname = 'Comm_AntRotation'
inputs = ['antAngle']
outputs = ['q_A']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Comm_AntRotationMtx(self):
compname = 'Comm_AntRotationMtx'
inputs = ['q_A']
outputs = ['O_AB']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Comm_BitRate(self):
compname = 'Comm_BitRate'
inputs = ['P_comm', 'gain', 'GSdist', 'CommLOS']
outputs = ['Dr']
state0 = []
np.random.seed(1001)
self.setup(compname, inputs, state0)
# These need to be a certain magnitude so it doesn't blow up
shape = self.inputs_dict['P_comm']['value'].shape
self.prob['P_comm'] = np.ones(shape)
shape = self.inputs_dict['GSdist']['value'].shape
self.prob['GSdist'] = np.random.random(shape) * 1e3
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Comm_Distance(self):
compname = 'Comm_Distance'
inputs = ['r_b2g_A']
outputs = ['GSdist']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Comm_EarthsSpin(self):
compname = 'Comm_EarthsSpin'
inputs = ['t']
outputs = ['q_E']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Comm_EarthsSpinMtx(self):
compname = 'Comm_EarthsSpinMtx'
inputs = ['q_E']
outputs = ['O_IE']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Comm_GainPattern(self):
compname = 'Comm_GainPattern'
inputs = ['azimuthGS', 'elevationGS']
outputs = ['gain']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Comm_GSposEarth(self):
compname = 'Comm_GSposEarth'
inputs = ['lon', 'lat', 'alt']
outputs = ['r_e2g_E']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Comm_GSposECI(self):
compname = 'Comm_GSposECI'
inputs = ['O_IE', 'r_e2g_E']
outputs = ['r_e2g_I']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Comm_LOS(self):
compname = 'Comm_LOS'
inputs = ['r_b2g_I', 'r_e2g_I']
outputs = ['CommLOS']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Comm_VectorAnt(self):
compname = 'Comm_VectorAnt'
inputs = ['r_b2g_B', 'O_AB']
outputs = ['r_b2g_A']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Comm_VectorBody(self):
compname = 'Comm_VectorBody'
inputs = ['r_b2g_I', 'O_BI']
outputs = ['r_b2g_B']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Comm_VectorECI(self):
compname = 'Comm_VectorECI'
inputs = ['r_e2g_I', 'r_e2b_I']
outputs = ['r_b2g_I']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Comm_VectorSpherical(self):
compname = 'Comm_VectorSpherical'
inputs = ['r_b2g_A']
outputs = ['azimuthGS', 'elevationGS']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Orbit_Dynamics(self):
compname = 'Orbit_Dynamics'
inputs = ['r_e2b_I0']
outputs = ['r_e2b_I']
state0 = []
self.setup(compname, inputs, state0)
self.prob.model.comp.h = 0.01
self.prob['r_e2b_I0'][:3] = np.random.random((3)) * 1e6
self.prob['r_e2b_I0'][3:] = np.random.random((3)) * 1e5
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Orbit_Initial(self):
compname = 'Orbit_Initial'
inputs = ['altPerigee', 'altApogee', 'RAAN', 'Inc', 'argPerigee', 'trueAnomaly']
outputs = ['r_e2b_I0']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Power_CellVoltage(self):
compname = 'Power_CellVoltage'
inputs = ['LOS', 'temperature', 'exposedArea', 'Isetpt']
outputs = ['V_sol']
state0 = []
self.setup(compname, inputs, state0)
shape = self.inputs_dict['temperature']['value'].shape
self.prob['temperature'] = np.random.random(shape) * 40 + 240
shape = self.inputs_dict['exposedArea']['value'].shape
self.prob['exposedArea'] = np.random.random(shape) * 1e-4
shape = self.inputs_dict['Isetpt']['value'].shape
self.prob['Isetpt'] = np.random.random(shape) * 1e-2
self.prob.run_model()
self.compare_derivatives(inputs, outputs, rel_error=True)
def test_Power_SolarPower(self):
compname = 'Power_SolarPower'
inputs = ['V_sol', 'Isetpt']
outputs = ['P_sol']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Power_Total(self):
compname = 'Power_Total'
inputs = ['P_sol', 'P_comm', 'P_RW']
outputs = ['P_bat']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_ReactionWheel_Motor(self):
compname = 'ReactionWheel_Motor'
inputs = ['T_RW', 'w_B', 'w_RW']
outputs = ['T_m']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_ReactionWheel_Power(self):
compname = 'ReactionWheel_Power'
inputs = ['w_RW', 'T_RW']
outputs = ['P_RW']
state0 = []
np.random.seed(1001)
self.setup(compname, inputs, state0)
self.prob['T_RW'] = np.random.random(self.prob['T_RW'].shape) * 1e-1
self.prob['w_RW'] = np.random.random(self.prob['w_RW'].shape) * 1e-1
self.prob.run_model()
self.compare_derivatives(inputs, outputs, rel_error=True)
def test_ReactionWheel_Torque(self):
compname = 'ReactionWheel_Torque'
inputs = ['T_tot']
outputs = ['T_RW']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_ReactionWheel_Dynamics(self):
compname = 'ReactionWheel_Dynamics'
inputs = ['w_B', 'T_RW']
outputs = ['w_RW']
# keep these at zeros
state0 = [] # ['w_RW0']
self.setup(compname, inputs, state0)
self.prob.model.comp.h = 0.01
shape = self.inputs_dict['w_B']['value'].shape
self.prob['w_B'] = np.random.random(shape) * 1e-4
shape = self.inputs_dict['T_RW']['value'].shape
self.prob['T_RW'] = np.random.random(shape) * 1e-9
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Solar_ExposedArea(self):
compname = 'Solar_ExposedArea'
inputs = ['finAngle', 'azimuth', 'elevation']
outputs = ['exposedArea']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Sun_LOS(self):
compname = 'Sun_LOS'
inputs = ['r_e2b_I', 'r_e2s_I']
outputs = ['LOS']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Sun_PositionBody(self):
compname = 'Sun_PositionBody'
inputs = ['O_BI', 'r_e2s_I']
outputs = ['r_e2s_B']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Sun_PositionECI(self):
compname = 'Sun_PositionECI'
inputs = ['t', 'LD']
outputs = ['r_e2s_I']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_Sun_PositionSpherical(self):
compname = 'Sun_PositionSpherical'
inputs = ['r_e2s_B']
outputs = ['azimuth', 'elevation']
state0 = []
self.setup(compname, inputs, state0)
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
def test_ThermalTemperature(self):
compname = 'ThermalTemperature'
inputs = ['exposedArea', 'cellInstd', 'LOS', 'P_comm']
outputs = ['temperature']
state0 = ['T0']
self.setup(compname, inputs, state0)
self.prob.model.comp.h = 0.01
self.prob.run_model()
self.compare_derivatives(inputs+state0, outputs)
if __name__ == "__main__":
unittest.main()
|
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import os
import six
from st2common.content.loader import MetaLoader
from st2common.models.api.action import (ActionAPI, LiveActionAPI, ActionExecutionStateAPI,
RunnerTypeAPI, ActionAliasAPI)
from st2common.models.api.auth import ApiKeyAPI, UserAPI
from st2common.models.api.execution import (ActionExecutionAPI)
from st2common.models.api.policy import (PolicyTypeAPI, PolicyAPI)
from st2common.models.api.rule import (RuleAPI)
from st2common.models.api.rule_enforcement import RuleEnforcementAPI
from st2common.models.api.sensor import SensorTypeAPI
from st2common.models.api.trace import TraceAPI
from st2common.models.api.trigger import (TriggerAPI, TriggerTypeAPI, TriggerInstanceAPI)
from st2common.models.db.action import ActionDB
from st2common.models.db.actionalias import ActionAliasDB
from st2common.models.db.auth import ApiKeyDB, UserDB
from st2common.models.db.liveaction import LiveActionDB
from st2common.models.db.executionstate import ActionExecutionStateDB
from st2common.models.db.runner import RunnerTypeDB
from st2common.models.db.execution import (ActionExecutionDB)
from st2common.models.db.policy import (PolicyTypeDB, PolicyDB)
from st2common.models.db.rule import RuleDB
from st2common.models.db.rule_enforcement import RuleEnforcementDB
from st2common.models.db.sensor import SensorTypeDB
from st2common.models.db.trace import TraceDB
from st2common.models.db.trigger import (TriggerDB, TriggerTypeDB, TriggerInstanceDB)
from st2common.persistence.action import Action
from st2common.persistence.actionalias import ActionAlias
from st2common.persistence.execution import ActionExecution
from st2common.persistence.executionstate import ActionExecutionState
from st2common.persistence.auth import ApiKey, User
from st2common.persistence.liveaction import LiveAction
from st2common.persistence.runner import RunnerType
from st2common.persistence.policy import (PolicyType, Policy)
from st2common.persistence.rule import Rule
from st2common.persistence.rule_enforcement import RuleEnforcement
from st2common.persistence.sensor import SensorType
from st2common.persistence.trace import Trace
from st2common.persistence.trigger import (Trigger, TriggerType, TriggerInstance)
ALLOWED_DB_FIXTURES = ['actions', 'actionstates', 'aliases', 'executions', 'liveactions',
'policies', 'policytypes', 'rules', 'runners', 'sensors',
'triggertypes', 'triggers', 'triggerinstances', 'traces', 'apikeys',
'users', 'enforcements']
ALLOWED_FIXTURES = copy.copy(ALLOWED_DB_FIXTURES)
ALLOWED_FIXTURES.extend(['actionchains', 'workflows'])
FIXTURE_DB_MODEL = {
'actions': ActionDB,
'aliases': ActionAliasDB,
'actionstates': ActionExecutionStateDB,
'apikeys': ApiKeyDB,
'enforcements': RuleEnforcementDB,
'executions': ActionExecutionDB,
'liveactions': LiveActionDB,
'policies': PolicyDB,
'policytypes': PolicyTypeDB,
'rules': RuleDB,
'runners': RunnerTypeDB,
'sensors': SensorTypeDB,
'traces': TraceDB,
'triggertypes': TriggerTypeDB,
'triggers': TriggerDB,
'triggerinstances': TriggerInstanceDB,
'users': UserDB
}
FIXTURE_API_MODEL = {
'actions': ActionAPI,
'aliases': ActionAliasAPI,
'actionstates': ActionExecutionStateAPI,
'apikeys': ApiKeyAPI,
'enforcements': RuleEnforcementAPI,
'executions': ActionExecutionAPI,
'liveactions': LiveActionAPI,
'policies': PolicyAPI,
'policytypes': PolicyTypeAPI,
'rules': RuleAPI,
'runners': RunnerTypeAPI,
'sensors': SensorTypeAPI,
'traces': TraceAPI,
'triggertypes': TriggerTypeAPI,
'triggers': TriggerAPI,
'triggerinstances': TriggerInstanceAPI,
'users': UserAPI
}
FIXTURE_PERSISTENCE_MODEL = {
'actions': Action,
'aliases': ActionAlias,
'actionstates': ActionExecutionState,
'apikeys': ApiKey,
'enforcements': RuleEnforcement,
'executions': ActionExecution,
'liveactions': LiveAction,
'policies': Policy,
'policytypes': PolicyType,
'rules': Rule,
'runners': RunnerType,
'sensors': SensorType,
'traces': Trace,
'triggertypes': TriggerType,
'triggers': Trigger,
'triggerinstances': TriggerInstance,
'users': User
}
def get_fixtures_base_path():
return os.path.join(os.path.dirname(__file__), 'fixtures')
def get_fixtures_packs_base_path():
return os.path.join(os.path.dirname(__file__), 'fixtures/packs')
def get_fixtures_runners_base_path():
return os.path.join(os.path.dirname(__file__), 'fixtures/packs/runners/')
def get_resources_base_path():
return os.path.join(os.path.dirname(__file__), 'resources')
class FixturesLoader(object):
def __init__(self):
self.meta_loader = MetaLoader()
def save_fixtures_to_db(self, fixtures_pack='generic', fixtures_dict=None):
"""
Loads fixtures specified in fixtures_dict into the database
and returns DB models for the fixtures.
fixtures_dict should be of the form:
{
'actions': ['action-1.yaml', 'action-2.yaml'],
'rules': ['rule-1.yaml'],
'liveactions': ['execution-1.yaml']
}
:param fixtures_pack: Name of the pack to load fixtures from.
:type fixtures_pack: ``str``
:param fixtures_dict: Dictionary specifying the fixtures to load for each type.
:type fixtures_dict: ``dict``
:rtype: ``dict``
"""
if fixtures_dict is None:
fixtures_dict = {}
fixtures_pack_path = self._validate_fixtures_pack(fixtures_pack)
self._validate_fixture_dict(fixtures_dict, allowed=ALLOWED_DB_FIXTURES)
db_models = {}
for fixture_type, fixtures in six.iteritems(fixtures_dict):
API_MODEL = FIXTURE_API_MODEL.get(fixture_type, None)
PERSISTENCE_MODEL = FIXTURE_PERSISTENCE_MODEL.get(fixture_type, None)
loaded_fixtures = {}
for fixture in fixtures:
# Guard against copy and type and similar typos
if fixture in loaded_fixtures:
msg = 'Fixture "%s" is specified twice, probably a typo.' % (fixture)
raise ValueError(msg)
fixture_dict = self.meta_loader.load(
self._get_fixture_file_path_abs(fixtures_pack_path, fixture_type, fixture))
api_model = API_MODEL(**fixture_dict)
db_model = API_MODEL.to_model(api_model)
db_model = PERSISTENCE_MODEL.add_or_update(db_model)
loaded_fixtures[fixture] = db_model
db_models[fixture_type] = loaded_fixtures
return db_models
def load_fixtures(self, fixtures_pack='generic', fixtures_dict=None):
"""
Loads fixtures specified in fixtures_dict. We
simply want to load the meta into dict objects.
fixtures_dict should be of the form:
{
'actionchains': ['actionchain1.yaml', 'actionchain2.yaml'],
'workflows': ['workflow.yaml']
}
:param fixtures_pack: Name of the pack to load fixtures from.
:type fixtures_pack: ``str``
:param fixtures_dict: Dictionary specifying the fixtures to load for each type.
:type fixtures_dict: ``dict``
:rtype: ``dict``
"""
if not fixtures_dict:
return {}
fixtures_pack_path = self._validate_fixtures_pack(fixtures_pack)
self._validate_fixture_dict(fixtures_dict)
all_fixtures = {}
for fixture_type, fixtures in six.iteritems(fixtures_dict):
loaded_fixtures = {}
for fixture in fixtures:
fixture_dict = self.meta_loader.load(
self._get_fixture_file_path_abs(fixtures_pack_path, fixture_type, fixture))
loaded_fixtures[fixture] = fixture_dict
all_fixtures[fixture_type] = loaded_fixtures
return all_fixtures
def load_models(self, fixtures_pack='generic', fixtures_dict=None):
"""
Loads fixtures specified in fixtures_dict as db models. This method must be
used for fixtures that have associated DB models. We simply want to load the
meta as DB models but don't want to save them to db.
fixtures_dict should be of the form:
{
'actions': ['action-1.yaml', 'action-2.yaml'],
'rules': ['rule-1.yaml'],
'liveactions': ['execution-1.yaml']
}
:param fixtures_pack: Name of the pack to load fixtures from.
:type fixtures_pack: ``str``
:param fixtures_dict: Dictionary specifying the fixtures to load for each type.
:type fixtures_dict: ``dict``
:rtype: ``dict``
"""
if not fixtures_dict:
return {}
fixtures_pack_path = self._validate_fixtures_pack(fixtures_pack)
self._validate_fixture_dict(fixtures_dict, allowed=ALLOWED_DB_FIXTURES)
all_fixtures = {}
for fixture_type, fixtures in six.iteritems(fixtures_dict):
API_MODEL = FIXTURE_API_MODEL.get(fixture_type, None)
loaded_models = {}
for fixture in fixtures:
fixture_dict = self.meta_loader.load(
self._get_fixture_file_path_abs(fixtures_pack_path, fixture_type, fixture))
api_model = API_MODEL(**fixture_dict)
db_model = API_MODEL.to_model(api_model)
loaded_models[fixture] = db_model
all_fixtures[fixture_type] = loaded_models
return all_fixtures
def delete_fixtures_from_db(self, fixtures_pack='generic', fixtures_dict=None,
raise_on_fail=False):
"""
Deletes fixtures specified in fixtures_dict from the database.
fixtures_dict should be of the form:
{
'actions': ['action-1.yaml', 'action-2.yaml'],
'rules': ['rule-1.yaml'],
'liveactions': ['execution-1.yaml']
}
:param fixtures_pack: Name of the pack to delete fixtures from.
:type fixtures_pack: ``str``
:param fixtures_dict: Dictionary specifying the fixtures to delete for each type.
:type fixtures_dict: ``dict``
:param raise_on_fail: Optional If True, raises exception if delete fails on any fixture.
:type raise_on_fail: ``boolean``
"""
if not fixtures_dict:
return
fixtures_pack_path = self._validate_fixtures_pack(fixtures_pack)
self._validate_fixture_dict(fixtures_dict)
for fixture_type, fixtures in six.iteritems(fixtures_dict):
API_MODEL = FIXTURE_API_MODEL.get(fixture_type, None)
PERSISTENCE_MODEL = FIXTURE_PERSISTENCE_MODEL.get(fixture_type, None)
for fixture in fixtures:
fixture_dict = self.meta_loader.load(
self._get_fixture_file_path_abs(fixtures_pack_path, fixture_type, fixture))
# Note that when we have a reference mechanism consistent for
# every model, we can just do a get and delete the object. Until
# then, this model conversions are necessary.
api_model = API_MODEL(**fixture_dict)
db_model = API_MODEL.to_model(api_model)
try:
PERSISTENCE_MODEL.delete(db_model)
except:
if raise_on_fail:
raise
def delete_models_from_db(self, models_dict, raise_on_fail=False):
"""
Deletes models specified in models_dict from the database.
models_dict should be of the form:
{
'actions': [ACTION1, ACTION2],
'rules': [RULE1],
'liveactions': [EXECUTION]
}
:param fixtures_dict: Dictionary specifying the fixtures to delete for each type.
:type fixtures_dict: ``dict``.
:param raise_on_fail: Optional If True, raises exception if delete fails on any model.
:type raise_on_fail: ``boolean``
"""
for model_type, models in six.iteritems(models_dict):
PERSISTENCE_MODEL = FIXTURE_PERSISTENCE_MODEL.get(model_type, None)
for model in models:
try:
PERSISTENCE_MODEL.delete(model)
except:
if raise_on_fail:
raise
def _validate_fixtures_pack(self, fixtures_pack):
fixtures_pack_path = self._get_fixtures_pack_path(fixtures_pack)
if not self._is_fixture_pack_exists(fixtures_pack_path):
raise Exception('Fixtures pack not found ' +
'in fixtures path %s.' % get_fixtures_base_path())
return fixtures_pack_path
def _validate_fixture_dict(self, fixtures_dict, allowed=ALLOWED_FIXTURES):
fixture_types = fixtures_dict.keys()
for fixture_type in fixture_types:
if fixture_type not in allowed:
raise Exception('Disallowed fixture type: %s' % fixture_type)
def _is_fixture_pack_exists(self, fixtures_pack_path):
return os.path.exists(fixtures_pack_path)
def _get_fixture_file_path_abs(self, fixtures_pack_path, fixtures_type, fixture_name):
return os.path.join(fixtures_pack_path, fixtures_type, fixture_name)
def _get_fixtures_pack_path(self, fixtures_pack_name):
return os.path.join(get_fixtures_base_path(), fixtures_pack_name)
def get_fixture_file_path_abs(self, fixtures_pack, fixtures_type, fixture_name):
return os.path.join(get_fixtures_base_path(), fixtures_pack, fixtures_type, fixture_name)
|
|
from __future__ import division
from .eigenframe import EigenFrame
from astropy.coordinates import SkyCoord
from astropy.table import Table
from astropy.utils.console import ProgressBar
from astropy.utils.misc import NumpyRNGContext
import healpy as hp
import numpy as np
import numpy.linalg as nl
from scipy.stats import gaussian_kde
from lalinference.bayestar import distance, moc
from functools import partial
from six.moves import copyreg
from six import with_metaclass
__all__ = ('Clustered2DSkyKDE', 'Clustered3DSkyKDE', 'Clustered2Plus1DSkyKDE')
def km_assign(mus, cov, pts):
"""Implements the assignment step in the k-means algorithm. Given a
set of centers, ``mus``, a covariance matrix used to produce a
metric on the space, ``cov``, and a set of points, ``pts`` (shape
``(npts, ndim)``), assigns each point to its nearest center,
returning an array of indices of shape ``(npts,)`` giving the
assignments.
"""
k = mus.shape[0]
n = pts.shape[0]
dists = np.zeros((k, n))
for i, mu in enumerate(mus):
dx = pts - mu
try:
dists[i, :] = np.sum(dx * nl.solve(cov, dx.T).T, axis=1)
except nl.LinAlgError:
dists[i, :] = np.nan
return np.nanargmin(dists, axis=0)
def km_centroids(pts, assign, k):
"""Implements the centroid-update step of the k-means algorithm.
Given a set of points, ``pts``, of shape ``(npts, ndim)``, and an
assignment of each point to a region, ``assign``, and the number
of means, ``k``, returns an array of shape ``(k, ndim)`` giving
the centroid of each region.
"""
mus = np.zeros((k, pts.shape[1]))
for i in range(k):
sel = assign == i
if np.sum(sel) > 0:
mus[i, :] = np.mean(pts[sel, :], axis=0)
else:
mus[i, :] = pts[np.random.randint(pts.shape[0]), :]
return mus
def k_means(pts, k):
"""Implements k-means clustering on the set of points.
:param pts: Array of shape ``(npts, ndim)`` giving the points on
which k-means is to operate.
:param k: Positive integer giving the number of regions.
:return: ``(centroids, assign)``, where ``centroids`` is an ``(k,
ndim)`` array giving the centroid of each region, and ``assign``
is a ``(npts,)`` array of integers between 0 (inclusive) and k
(exclusive) indicating the assignment of each point to a region.
"""
assert pts.shape[0] > k, 'must have more points than means'
cov = np.cov(pts, rowvar=0)
mus = np.random.permutation(pts)[:k, :]
assign = km_assign(mus, cov, pts)
while True:
old_mus = mus
old_assign = assign
mus = km_centroids(pts, assign, k)
assign = km_assign(mus, cov, pts)
if np.all(assign == old_assign):
break
return mus, assign
def _cluster(cls, pts, trials, i, seed):
k = i // trials
if k == 0:
raise ValueError('Expected at least one cluster')
try:
if k == 1:
assign = np.zeros(len(pts), dtype=np.intp)
else:
with NumpyRNGContext(i + seed):
_, assign = k_means(pts, k)
obj = cls(pts, assign=assign)
except np.linalg.LinAlgError:
return -np.inf,
else:
return obj.bic, k, obj.kdes
class _mapfunc(object):
def __init__(self, func):
self._func = func
def __call__(self, i_arg):
i, arg = i_arg
return i, self._func(arg)
class ClusteredKDE(object):
def __init__(self, pts, max_k=40, trials=5, assign=None,
multiprocess=False):
self.multiprocess = multiprocess
if assign is None:
print('clustering ...')
# Make sure that each thread gets a different random number state.
# We start by drawing a random integer s in the main thread, and
# then the i'th subprocess will seed itself with the integer i + s.
#
# The seed must be an unsigned 32-bit integer, so if there are n
# threads, then s must be drawn from the interval [0, 2**32 - n).
seed = np.random.randint(0, 2**32 - max_k * trials)
func = partial(_cluster, type(self), pts, trials, seed=seed)
self.bic, self.k, self.kdes = max(
self._map(func, range(trials, (max_k + 1) * trials)),
key=lambda items: items[:2])
else:
# Build KDEs for each cluster, skipping degenerate clusters
self.kdes = []
npts, ndim = pts.shape
self.k = assign.max() + 1
for i in range(self.k):
sel = (assign == i)
cluster_pts = pts[sel, :]
# Equivalent to but faster than len(set(pts))
# FIXME: replace with the following in Numpy >= 1.13.0:
# nuniq = len(np.unique(cluster_pts, axis=0))
nuniq = len(np.unique(
np.ascontiguousarray(cluster_pts).view(
'V{}'.format(ndim * pts.dtype.itemsize))))
# Skip if there are fewer unique points than dimensions
if nuniq <= ndim:
continue
try:
kde = gaussian_kde(cluster_pts.T)
except (np.linalg.LinAlgError, ValueError):
# If there are fewer unique points than degrees of freedom,
# then the KDE will fail because the covariance matrix is
# singular. In that case, don't bother adding that cluster.
pass
else:
self.kdes.append(kde)
# Calculate BIC
# The number of parameters is:
#
# * ndim for each centroid location
#
# * (ndim+1)*ndim/2 Kernel covariances for each cluster
#
# * one weighting factor for the cluster (minus one for the
# overall constraint that the weights must sum to one)
nparams = self.k*ndim + self.k*((ndim+1)*(ndim)/2) + self.k - 1
with np.errstate(divide='ignore'):
self.bic = (
np.sum(np.log(self.eval_kdes(pts))) -
nparams/2.0*np.log(npts))
def eval_kdes(self, pts):
pts = pts.T
return sum(w * kde(pts) for w, kde in zip(self.weights, self.kdes))
def __call__(self, pts):
return self.eval_kdes(pts)
@property
def weights(self):
"""Get the cluster weights: the fraction of the points within each
cluster."""
w = np.asarray([kde.n for kde in self.kdes])
return w / np.sum(w)
def _map(self, func, items):
# FIXME: ProgressBar.map(..., multiprocess=True) uses imap_unordered,
# but we want the result to come back in order. This should be fixed,
# or at least correctly documented, in Astropy.
if self.multiprocess:
_, result = zip(*sorted(ProgressBar.map(_mapfunc(func),
list(enumerate(items)),
multiprocess=True)))
return list(result)
else:
return ProgressBar.map(func, items, multiprocess=False)
class SkyKDE(ClusteredKDE):
@classmethod
def transform(cls, pts):
"""Override in sub-classes to transform points."""
raise NotImplementedError
def __init__(self, pts, max_k=40, trials=5, assign=None,
multiprocess=False):
if assign is None:
pts = self.transform(pts)
super(SkyKDE, self).__init__(
pts, max_k=max_k, trials=trials, assign=assign,
multiprocess=multiprocess)
def __call__(self, pts):
return super(SkyKDE, self).__call__(self.transform(pts))
def _bayestar_adaptive_grid(self, top_nside=16, rounds=8):
"""Implement of the BAYESTAR adaptive mesh refinement scheme as
described in Section VI of Singer & Price 2016, PRD, 93, 024013
(http://dx.doi.org/10.1103/PhysRevD.93.024013).
FIXME: Consider refactoring BAYESTAR itself to perform the adaptation
step in Python.
"""
top_npix = hp.nside2npix(top_nside)
nrefine = top_npix // 4
cells = zip([0] * nrefine, [top_nside // 2] * nrefine, range(nrefine))
for iround in range(rounds - 1):
print('adaptive refinement round {} of {} ...'.format(
iround + 1, rounds - 1))
cells = sorted(cells, key=lambda p_n_i: p_n_i[0] / p_n_i[1]**2)
new_nside, new_ipix = np.transpose([
(nside * 2, ipix * 4 + i)
for _, nside, ipix in cells[-nrefine:] for i in range(4)])
theta, phi = hp.pix2ang(new_nside, new_ipix, nest=True)
ra = phi
dec = 0.5 * np.pi - theta
p = self(np.column_stack((ra, dec)))
cells[-nrefine:] = zip(p, new_nside, new_ipix)
return cells
def as_healpix(self):
"""Returns a HEALPix multi-order map of the posterior density."""
post, nside, ipix = zip(*self._bayestar_adaptive_grid())
post = np.asarray(list(post))
nside = np.asarray(list(nside))
ipix = np.asarray(list(ipix))
# Make sure that sky map is normalized (it should be already)
post /= np.sum(post * hp.nside2pixarea(nside))
# Convert from NESTED to UNIQ pixel indices
order = np.log2(nside).astype(int)
uniq = moc.nest2uniq(order.astype(np.int8), ipix.astype(np.uint64))
# Done!
return Table([uniq, post], names=['UNIQ', 'PROBDENSITY'])
# We have to put in some hooks to make instances of Clustered2DSkyKDE picklable
# because we dynamically create subclasses with different values of the 'frame'
# class variable. This gets even trickier because we need both the class and
# instance objects to be picklable.
class _Clustered2DSkyKDEMeta(type):
"""Metaclass to make dynamically created subclasses of Clustered2DSkyKDE
picklable."""
def _Clustered2DSkyKDEMeta_pickle(cls):
"""Pickle dynamically created subclasses of Clustered2DSkyKDE."""
return type, (cls.__name__, cls.__bases__, {'frame': cls.frame})
# Register function to pickle subclasses of Clustered2DSkyKDE.
copyreg.pickle(_Clustered2DSkyKDEMeta, _Clustered2DSkyKDEMeta_pickle)
def _Clustered2DSkyKDE_factory(name, frame):
"""Unpickle instances of dynamically created subclasses of
Clustered2DSkyKDE.
FIXME: In Python 3, we could make this a class method of Clustered2DSkyKDE.
Unfortunately, Python 2 is picky about pickling bound class methods."""
new_cls = type(name, (Clustered2DSkyKDE,), {'frame': frame})
return super(Clustered2DSkyKDE, Clustered2DSkyKDE).__new__(new_cls)
class Clustered2DSkyKDE(with_metaclass(_Clustered2DSkyKDEMeta, SkyKDE)):
r"""Represents a kernel-density estimate of a sky-position PDF that has
been decomposed into clusters, using a different kernel for each
cluster.
The estimated PDF is
.. math::
p\left( \vec{\theta} \right) = \sum_{i = 0}^{k-1} \frac{N_i}{N}
\sum_{\vec{x} \in C_i} N\left[\vec{x}, \Sigma_i\right]\left( \vec{\theta}
\right)
where :math:`C_i` is the set of points belonging to cluster
:math:`i`, :math:`N_i` is the number of points in this cluster,
:math:`\Sigma_i` is the optimally-converging KDE covariance
associated to cluster :math:`i`.
The number of clusters, :math:`k` is chosen to maximize the `BIC
<http://en.wikipedia.org/wiki/Bayesian_information_criterion>`_
for the given set of points being drawn from the clustered KDE.
The points are assigned to clusters using the k-means algorithm,
with a decorrelated metric. The overall clustering behavior is
similar to the well-known `X-Means
<http://www.cs.cmu.edu/~dpelleg/download/xmeans.pdf>`_ algorithm.
"""
frame = None
@classmethod
def transform(cls, pts):
pts = SkyCoord(*pts.T, unit='rad').transform_to(cls.frame).spherical
return np.column_stack((pts.lon.rad, np.sin(pts.lat.rad)))
def __new__(cls, pts, *args, **kwargs):
frame = EigenFrame.for_coords(SkyCoord(*pts.T, unit='rad'))
name = '{:s}_{:x}'.format(cls.__name__, id(frame))
new_cls = type(name, (cls,), {'frame': frame})
return super(Clustered2DSkyKDE, cls).__new__(new_cls)
def __reduce__(self):
"""Pickle instances of dynamically created subclasses of
Clustered2DSkyKDE."""
factory_args = self.__class__.__name__, self.frame
return _Clustered2DSkyKDE_factory, factory_args, self.__dict__
def eval_kdes(self, pts):
base = super(Clustered2DSkyKDE, self).eval_kdes
dphis = (0.0, 2.0*np.pi, -2.0*np.pi)
phi, z = pts.T
return sum(base(np.column_stack((phi+dphi, z))) for dphi in dphis)
class Clustered3DSkyKDE(SkyKDE):
"""Like :class:`Clustered2DSkyKDE`, but clusters in 3D
space. Can compute volumetric posterior density (per cubic Mpc),
and also produce Healpix maps of the mean and standard deviation
of the log-distance."""
@classmethod
def transform(cls, pts):
return SkyCoord(*pts.T, unit='rad').cartesian.xyz.value.T
def __call__(self, pts, distances=False):
"""Given an array of positions in RA, DEC, compute the marginal sky
posterior and optinally the conditional distance parameters."""
func = partial(distance.cartesian_kde_to_moments,
datasets=[_.dataset for _ in self.kdes],
inverse_covariances=[_.inv_cov for _ in self.kdes],
weights=self.weights)
probdensity, mean, std = zip(*self._map(func, self.transform(pts)))
if distances:
mu, sigma, norm = distance.moments_to_parameters(mean, std)
return probdensity, mu, sigma, norm
else:
return probdensity
def posterior_spherical(self, pts):
"""Evaluate the posterior probability density in spherical polar
coordinates, as a function of (ra, dec, distance)."""
return super(Clustered3DSkyKDE, self).__call__(pts)
def as_healpix(self):
"""Returns a HEALPix multi-order map of the posterior density
and conditional distance distribution parameters."""
m = super(Clustered3DSkyKDE, self).as_healpix()
order, ipix = moc.uniq2nest(m['UNIQ'])
nside = 2 ** order.astype(int)
theta, phi = hp.pix2ang(nside, ipix.astype(np.int64), nest=True)
p = np.column_stack((phi, 0.5 * np.pi - theta))
print('evaluating distance layers ...')
_, m['DISTMU'], m['DISTSIGMA'], m['DISTNORM'] = self(p, distances=True)
return m
class Clustered2Plus1DSkyKDE(Clustered3DSkyKDE):
"""A hybrid sky map estimator that uses a 2D clustered KDE for the marginal
distribution as a function of (RA, Dec) and a 3D clustered KDE for the
conditional distance distribution."""
def __init__(self, pts, max_k=40, trials=5, assign=None,
multiprocess=False):
if assign is None:
self.twod = Clustered2DSkyKDE(
pts, max_k=max_k, trials=trials, assign=assign,
multiprocess=multiprocess)
super(Clustered2Plus1DSkyKDE, self).__init__(
pts, max_k=max_k, trials=trials, assign=assign,
multiprocess=multiprocess)
def __call__(self, pts, distances=False):
probdensity = self.twod(pts)
if distances:
base = super(Clustered2Plus1DSkyKDE, self)
_, distmu, distsigma, distnorm = base.__call__(pts, distances=True)
return probdensity, distmu, distsigma, distnorm
else:
return probdensity
def posterior_spherical(self, pts):
"""Evaluate the posterior probability density in spherical polar
coordinates, as a function of (ra, dec, distance)."""
base = super(Clustered2Plus1DSkyKDE, self)
return self(pts) * base.posterior_spherical(pts) / base.__call__(pts)
|
|
import json
import logging
import os
import simplejson
from redash.query_runner import *
from redash.settings import parse_boolean
from redash.utils import SimpleJSONEncoder
logger = logging.getLogger(__name__)
ANNOTATE_QUERY = parse_boolean(os.environ.get('ATHENA_ANNOTATE_QUERY', 'true'))
SHOW_EXTRA_SETTINGS = parse_boolean(os.environ.get('ATHENA_SHOW_EXTRA_SETTINGS', 'true'))
OPTIONAL_CREDENTIALS = parse_boolean(os.environ.get('ATHENA_OPTIONAL_CREDENTIALS', 'true'))
try:
import pyathena
import boto3
enabled = True
except ImportError:
enabled = False
_TYPE_MAPPINGS = {
'boolean': TYPE_BOOLEAN,
'tinyint': TYPE_INTEGER,
'smallint': TYPE_INTEGER,
'integer': TYPE_INTEGER,
'bigint': TYPE_INTEGER,
'double': TYPE_FLOAT,
'varchar': TYPE_STRING,
'timestamp': TYPE_DATETIME,
'date': TYPE_DATE,
'varbinary': TYPE_STRING,
'array': TYPE_STRING,
'map': TYPE_STRING,
'row': TYPE_STRING,
'decimal': TYPE_FLOAT,
}
class SimpleFormatter(object):
def format(self, operation, parameters=None):
return operation
class Athena(BaseQueryRunner):
noop_query = 'SELECT 1'
@classmethod
def name(cls):
return "Amazon Athena"
@classmethod
def configuration_schema(cls):
schema = {
'type': 'object',
'properties': {
'region': {
'type': 'string',
'title': 'AWS Region'
},
'aws_access_key': {
'type': 'string',
'title': 'AWS Access Key'
},
'aws_secret_key': {
'type': 'string',
'title': 'AWS Secret Key'
},
's3_staging_dir': {
'type': 'string',
'title': 'S3 Staging (Query Results) Bucket Path'
},
'schema': {
'type': 'string',
'title': 'Schema Name',
'default': 'default'
},
'glue': {
'type': 'boolean',
'title': 'Use Glue Data Catalog',
},
},
'required': ['region', 's3_staging_dir'],
'order': ['region', 'aws_access_key', 'aws_secret_key', 's3_staging_dir', 'schema'],
'secret': ['aws_secret_key']
}
if SHOW_EXTRA_SETTINGS:
schema['properties'].update({
'encryption_option': {
'type': 'string',
'title': 'Encryption Option',
},
'kms_key': {
'type': 'string',
'title': 'KMS Key',
},
})
if not OPTIONAL_CREDENTIALS:
schema['required'] += ['aws_access_key', 'aws_secret_key']
return schema
@classmethod
def enabled(cls):
return enabled
@classmethod
def annotate_query(cls):
return ANNOTATE_QUERY
@classmethod
def type(cls):
return "athena"
def __get_schema_from_glue(self):
client = boto3.client(
'glue',
aws_access_key_id=self.configuration.get('aws_access_key', None),
aws_secret_access_key=self.configuration.get('aws_secret_key', None),
region_name=self.configuration['region']
)
schema = {}
paginator = client.get_paginator('get_tables')
for database in client.get_databases()['DatabaseList']:
iterator = paginator.paginate(DatabaseName=database['Name'])
for table in iterator.search('TableList[]'):
table_name = '%s.%s' % (database['Name'], table['Name'])
if table_name not in schema:
column = [columns['Name'] for columns in table['StorageDescriptor']['Columns']]
schema[table_name] = {'name': table_name, 'columns': column}
for partition in table['PartitionKeys']:
schema[table_name]['columns'].append(partition['Name'])
return schema.values()
def get_schema(self, get_stats=False):
if self.configuration.get('glue', False):
return self.__get_schema_from_glue()
schema = {}
query = """
SELECT table_schema, table_name, column_name
FROM information_schema.columns
WHERE table_schema NOT IN ('information_schema')
"""
results, error = self.run_query(query, None)
if error is not None:
raise Exception("Failed getting schema.")
results = json.loads(results)
for row in results['rows']:
table_name = '{0}.{1}'.format(row['table_schema'], row['table_name'])
if table_name not in schema:
schema[table_name] = {'name': table_name, 'columns': []}
schema[table_name]['columns'].append(row['column_name'])
return schema.values()
def run_query(self, query, user):
cursor = pyathena.connect(
s3_staging_dir=self.configuration['s3_staging_dir'],
region_name=self.configuration['region'],
aws_access_key_id=self.configuration.get('aws_access_key', None),
aws_secret_access_key=self.configuration.get('aws_secret_key', None),
schema_name=self.configuration.get('schema', 'default'),
encryption_option=self.configuration.get('encryption_option', None),
kms_key=self.configuration.get('kms_key', None),
formatter=SimpleFormatter()).cursor()
try:
cursor.execute(query)
column_tuples = [(i[0], _TYPE_MAPPINGS.get(i[1], None)) for i in cursor.description]
columns = self.fetch_columns(column_tuples)
rows = [dict(zip(([c['name'] for c in columns]), r)) for i, r in enumerate(cursor.fetchall())]
qbytes = None
athena_query_id = None
try:
qbytes = cursor.data_scanned_in_bytes
except AttributeError as e:
logger.debug("Athena Upstream can't get data_scanned_in_bytes: %s", e)
try:
athena_query_id = cursor.query_id
except AttributeError as e:
logger.debug("Athena Upstream can't get query_id: %s", e)
data = {
'columns': columns,
'rows': rows,
'metadata': {
'data_scanned': qbytes,
'athena_query_id': athena_query_id
}
}
json_data = simplejson.dumps(data, ignore_nan=True, cls=SimpleJSONEncoder)
error = None
except KeyboardInterrupt:
if cursor.query_id:
cursor.cancel()
error = "Query cancelled by user."
json_data = None
except Exception as ex:
if cursor.query_id:
cursor.cancel()
error = ex.message
json_data = None
return json_data, error
register(Athena)
|
|
# mako/util.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import re
import collections
import codecs
import os
from mako import compat
import operator
def update_wrapper(decorated, fn):
decorated.__wrapped__ = fn
decorated.__name__ = fn.__name__
return decorated
class PluginLoader(object):
def __init__(self, group):
self.group = group
self.impls = {}
def load(self, name):
if name in self.impls:
return self.impls[name]()
else:
import pkg_resources
for impl in pkg_resources.iter_entry_points(
self.group,
name):
self.impls[name] = impl.load
return impl.load()
else:
from mako import exceptions
raise exceptions.RuntimeException(
"Can't load plugin %s %s" %
(self.group, name))
def register(self, name, modulepath, objname):
def load():
mod = __import__(modulepath)
for token in modulepath.split(".")[1:]:
mod = getattr(mod, token)
return getattr(mod, objname)
self.impls[name] = load
def verify_directory(dir):
"""create and/or verify a filesystem directory."""
tries = 0
while not os.path.exists(dir):
try:
tries += 1
os.makedirs(dir, compat.octal("0775"))
except:
if tries > 5:
raise
def to_list(x, default=None):
if x is None:
return default
if not isinstance(x, (list, tuple)):
return [x]
else:
return x
class memoized_property(object):
"""A read-only @property that is only evaluated once."""
def __init__(self, fget, doc=None):
self.fget = fget
self.__doc__ = doc or fget.__doc__
self.__name__ = fget.__name__
def __get__(self, obj, cls):
if obj is None:
return self
obj.__dict__[self.__name__] = result = self.fget(obj)
return result
class memoized_instancemethod(object):
"""Decorate a method memoize its return value.
Best applied to no-arg methods: memoization is not sensitive to
argument values, and will always return the same value even when
called with different arguments.
"""
def __init__(self, fget, doc=None):
self.fget = fget
self.__doc__ = doc or fget.__doc__
self.__name__ = fget.__name__
def __get__(self, obj, cls):
if obj is None:
return self
def oneshot(*args, **kw):
result = self.fget(obj, *args, **kw)
memo = lambda *a, **kw: result
memo.__name__ = self.__name__
memo.__doc__ = self.__doc__
obj.__dict__[self.__name__] = memo
return result
oneshot.__name__ = self.__name__
oneshot.__doc__ = self.__doc__
return oneshot
class SetLikeDict(dict):
"""a dictionary that has some setlike methods on it"""
def union(self, other):
"""produce a 'union' of this dict and another (at the key level).
values in the second dict take precedence over that of the first"""
x = SetLikeDict(**self)
x.update(other)
return x
class FastEncodingBuffer(object):
"""a very rudimentary buffer that is faster than StringIO,
but doesn't crash on unicode data like cStringIO."""
def __init__(self, encoding=None, errors='strict', as_unicode=False):
self.data = collections.deque()
self.encoding = encoding
if as_unicode:
self.delim = compat.u('')
else:
self.delim = ''
self.as_unicode = as_unicode
self.errors = errors
self.write = self.data.append
def truncate(self):
self.data = collections.deque()
self.write = self.data.append
def getvalue(self):
if self.encoding:
return self.delim.join(self.data).encode(self.encoding,
self.errors)
else:
return self.delim.join(self.data)
class LRUCache(dict):
"""A dictionary-like object that stores a limited number of items,
discarding lesser used items periodically.
this is a rewrite of LRUCache from Myghty to use a periodic timestamp-based
paradigm so that synchronization is not really needed. the size management
is inexact.
"""
class _Item(object):
def __init__(self, key, value):
self.key = key
self.value = value
self.timestamp = compat.time_func()
def __repr__(self):
return repr(self.value)
def __init__(self, capacity, threshold=.5):
self.capacity = capacity
self.threshold = threshold
def __getitem__(self, key):
item = dict.__getitem__(self, key)
item.timestamp = compat.time_func()
return item.value
def values(self):
return [i.value for i in dict.values(self)]
def setdefault(self, key, value):
if key in self:
return self[key]
else:
self[key] = value
return value
def __setitem__(self, key, value):
item = dict.get(self, key)
if item is None:
item = self._Item(key, value)
dict.__setitem__(self, key, item)
else:
item.value = value
self._manage_size()
def _manage_size(self):
while len(self) > self.capacity + self.capacity * self.threshold:
bytime = sorted(dict.values(self),
key=operator.attrgetter('timestamp'), reverse=True)
for item in bytime[self.capacity:]:
try:
del self[item.key]
except KeyError:
# if we couldn't find a key, most likely some other thread
# broke in on us. loop around and try again
break
# Regexp to match python magic encoding line
_PYTHON_MAGIC_COMMENT_re = re.compile(
r'[ \t\f]* \# .* coding[=:][ \t]*([-\w.]+)',
re.VERBOSE)
def parse_encoding(fp):
"""Deduce the encoding of a Python source file (binary mode) from magic
comment.
It does this in the same way as the `Python interpreter`__
.. __: http://docs.python.org/ref/encodings.html
The ``fp`` argument should be a seekable file object in binary mode.
"""
pos = fp.tell()
fp.seek(0)
try:
line1 = fp.readline()
has_bom = line1.startswith(codecs.BOM_UTF8)
if has_bom:
line1 = line1[len(codecs.BOM_UTF8):]
m = _PYTHON_MAGIC_COMMENT_re.match(line1.decode('ascii', 'ignore'))
if not m:
try:
import parser
parser.suite(line1.decode('ascii', 'ignore'))
except (ImportError, SyntaxError):
# Either it's a real syntax error, in which case the source
# is not valid python source, or line2 is a continuation of
# line1, in which case we don't want to scan line2 for a magic
# comment.
pass
else:
line2 = fp.readline()
m = _PYTHON_MAGIC_COMMENT_re.match(
line2.decode('ascii', 'ignore'))
if has_bom:
if m:
raise SyntaxError("python refuses to compile code with both a UTF8" \
" byte-order-mark and a magic encoding comment")
return 'utf_8'
elif m:
return m.group(1)
else:
return None
finally:
fp.seek(pos)
def sorted_dict_repr(d):
"""repr() a dictionary with the keys in order.
Used by the lexer unit test to compare parse trees based on strings.
"""
keys = list(d.keys())
keys.sort()
return "{" + ", ".join(["%r: %r" % (k, d[k]) for k in keys]) + "}"
def restore__ast(_ast):
"""Attempt to restore the required classes to the _ast module if it
appears to be missing them
"""
if hasattr(_ast, 'AST'):
return
_ast.PyCF_ONLY_AST = 2 << 9
m = compile("""\
def foo(): pass
class Bar(object): pass
if False: pass
baz = 'mako'
1 + 2 - 3 * 4 / 5
6 // 7 % 8 << 9 >> 10
11 & 12 ^ 13 | 14
15 and 16 or 17
-baz + (not +18) - ~17
baz and 'foo' or 'bar'
(mako is baz == baz) is not baz != mako
mako > baz < mako >= baz <= mako
mako in baz not in mako""", '<unknown>', 'exec', _ast.PyCF_ONLY_AST)
_ast.Module = type(m)
for cls in _ast.Module.__mro__:
if cls.__name__ == 'mod':
_ast.mod = cls
elif cls.__name__ == 'AST':
_ast.AST = cls
_ast.FunctionDef = type(m.body[0])
_ast.ClassDef = type(m.body[1])
_ast.If = type(m.body[2])
_ast.Name = type(m.body[3].targets[0])
_ast.Store = type(m.body[3].targets[0].ctx)
_ast.Str = type(m.body[3].value)
_ast.Sub = type(m.body[4].value.op)
_ast.Add = type(m.body[4].value.left.op)
_ast.Div = type(m.body[4].value.right.op)
_ast.Mult = type(m.body[4].value.right.left.op)
_ast.RShift = type(m.body[5].value.op)
_ast.LShift = type(m.body[5].value.left.op)
_ast.Mod = type(m.body[5].value.left.left.op)
_ast.FloorDiv = type(m.body[5].value.left.left.left.op)
_ast.BitOr = type(m.body[6].value.op)
_ast.BitXor = type(m.body[6].value.left.op)
_ast.BitAnd = type(m.body[6].value.left.left.op)
_ast.Or = type(m.body[7].value.op)
_ast.And = type(m.body[7].value.values[0].op)
_ast.Invert = type(m.body[8].value.right.op)
_ast.Not = type(m.body[8].value.left.right.op)
_ast.UAdd = type(m.body[8].value.left.right.operand.op)
_ast.USub = type(m.body[8].value.left.left.op)
_ast.Or = type(m.body[9].value.op)
_ast.And = type(m.body[9].value.values[0].op)
_ast.IsNot = type(m.body[10].value.ops[0])
_ast.NotEq = type(m.body[10].value.ops[1])
_ast.Is = type(m.body[10].value.left.ops[0])
_ast.Eq = type(m.body[10].value.left.ops[1])
_ast.Gt = type(m.body[11].value.ops[0])
_ast.Lt = type(m.body[11].value.ops[1])
_ast.GtE = type(m.body[11].value.ops[2])
_ast.LtE = type(m.body[11].value.ops[3])
_ast.In = type(m.body[12].value.ops[0])
_ast.NotIn = type(m.body[12].value.ops[1])
def read_file(path, mode='rb'):
fp = open(path, mode)
try:
data = fp.read()
return data
finally:
fp.close()
def read_python_file(path):
fp = open(path, "rb")
try:
encoding = parse_encoding(fp)
data = fp.read()
if encoding:
data = data.decode(encoding)
return data
finally:
fp.close()
|
|
"""Test zha lock."""
from unittest.mock import patch
import pytest
import zigpy.profiles.zha
import zigpy.zcl.clusters.closures as closures
import zigpy.zcl.clusters.general as general
import zigpy.zcl.foundation as zcl_f
from homeassistant.components.lock import DOMAIN
from homeassistant.const import STATE_LOCKED, STATE_UNAVAILABLE, STATE_UNLOCKED
from .common import async_enable_traffic, find_entity_id, send_attributes_report
from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE
from tests.common import mock_coro
LOCK_DOOR = 0
UNLOCK_DOOR = 1
SET_PIN_CODE = 5
CLEAR_PIN_CODE = 7
SET_USER_STATUS = 9
@pytest.fixture
async def lock(hass, zigpy_device_mock, zha_device_joined_restored):
"""Lock cluster fixture."""
zigpy_device = zigpy_device_mock(
{
1: {
SIG_EP_INPUT: [closures.DoorLock.cluster_id, general.Basic.cluster_id],
SIG_EP_OUTPUT: [],
SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.DOOR_LOCK,
}
},
)
zha_device = await zha_device_joined_restored(zigpy_device)
return zha_device, zigpy_device.endpoints[1].door_lock
async def test_lock(hass, lock):
"""Test zha lock platform."""
zha_device, cluster = lock
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
assert entity_id is not None
assert hass.states.get(entity_id).state == STATE_UNLOCKED
await async_enable_traffic(hass, [zha_device], enabled=False)
# test that the lock was created and that it is unavailable
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
# allow traffic to flow through the gateway and device
await async_enable_traffic(hass, [zha_device])
# test that the state has changed from unavailable to unlocked
assert hass.states.get(entity_id).state == STATE_UNLOCKED
# set state to locked
await send_attributes_report(hass, cluster, {1: 0, 0: 1, 2: 2})
assert hass.states.get(entity_id).state == STATE_LOCKED
# set state to unlocked
await send_attributes_report(hass, cluster, {1: 0, 0: 2, 2: 3})
assert hass.states.get(entity_id).state == STATE_UNLOCKED
# lock from HA
await async_lock(hass, cluster, entity_id)
# unlock from HA
await async_unlock(hass, cluster, entity_id)
# set user code
await async_set_user_code(hass, cluster, entity_id)
# clear user code
await async_clear_user_code(hass, cluster, entity_id)
# enable user code
await async_enable_user_code(hass, cluster, entity_id)
# disable user code
await async_disable_user_code(hass, cluster, entity_id)
async def async_lock(hass, cluster, entity_id):
"""Test lock functionality from hass."""
with patch(
"zigpy.zcl.Cluster.request", return_value=mock_coro([zcl_f.Status.SUCCESS])
):
# lock via UI
await hass.services.async_call(
DOMAIN, "lock", {"entity_id": entity_id}, blocking=True
)
assert cluster.request.call_count == 1
assert cluster.request.call_args[0][0] is False
assert cluster.request.call_args[0][1] == LOCK_DOOR
async def async_unlock(hass, cluster, entity_id):
"""Test lock functionality from hass."""
with patch(
"zigpy.zcl.Cluster.request", return_value=mock_coro([zcl_f.Status.SUCCESS])
):
# lock via UI
await hass.services.async_call(
DOMAIN, "unlock", {"entity_id": entity_id}, blocking=True
)
assert cluster.request.call_count == 1
assert cluster.request.call_args[0][0] is False
assert cluster.request.call_args[0][1] == UNLOCK_DOOR
async def async_set_user_code(hass, cluster, entity_id):
"""Test set lock code functionality from hass."""
with patch(
"zigpy.zcl.Cluster.request", return_value=mock_coro([zcl_f.Status.SUCCESS])
):
# set lock code via service call
await hass.services.async_call(
"zha",
"set_lock_user_code",
{"entity_id": entity_id, "code_slot": 3, "user_code": "13246579"},
blocking=True,
)
assert cluster.request.call_count == 1
assert cluster.request.call_args[0][0] is False
assert cluster.request.call_args[0][1] == SET_PIN_CODE
assert cluster.request.call_args[0][3] == 2 # user slot 3 => internal slot 2
assert cluster.request.call_args[0][4] == closures.DoorLock.UserStatus.Enabled
assert (
cluster.request.call_args[0][5] == closures.DoorLock.UserType.Unrestricted
)
assert cluster.request.call_args[0][6] == "13246579"
async def async_clear_user_code(hass, cluster, entity_id):
"""Test clear lock code functionality from hass."""
with patch(
"zigpy.zcl.Cluster.request", return_value=mock_coro([zcl_f.Status.SUCCESS])
):
# set lock code via service call
await hass.services.async_call(
"zha",
"clear_lock_user_code",
{
"entity_id": entity_id,
"code_slot": 3,
},
blocking=True,
)
assert cluster.request.call_count == 1
assert cluster.request.call_args[0][0] is False
assert cluster.request.call_args[0][1] == CLEAR_PIN_CODE
assert cluster.request.call_args[0][3] == 2 # user slot 3 => internal slot 2
async def async_enable_user_code(hass, cluster, entity_id):
"""Test enable lock code functionality from hass."""
with patch(
"zigpy.zcl.Cluster.request", return_value=mock_coro([zcl_f.Status.SUCCESS])
):
# set lock code via service call
await hass.services.async_call(
"zha",
"enable_lock_user_code",
{
"entity_id": entity_id,
"code_slot": 3,
},
blocking=True,
)
assert cluster.request.call_count == 1
assert cluster.request.call_args[0][0] is False
assert cluster.request.call_args[0][1] == SET_USER_STATUS
assert cluster.request.call_args[0][3] == 2 # user slot 3 => internal slot 2
assert cluster.request.call_args[0][4] == closures.DoorLock.UserStatus.Enabled
async def async_disable_user_code(hass, cluster, entity_id):
"""Test disable lock code functionality from hass."""
with patch(
"zigpy.zcl.Cluster.request", return_value=mock_coro([zcl_f.Status.SUCCESS])
):
# set lock code via service call
await hass.services.async_call(
"zha",
"disable_lock_user_code",
{
"entity_id": entity_id,
"code_slot": 3,
},
blocking=True,
)
assert cluster.request.call_count == 1
assert cluster.request.call_args[0][0] is False
assert cluster.request.call_args[0][1] == SET_USER_STATUS
assert cluster.request.call_args[0][3] == 2 # user slot 3 => internal slot 2
assert cluster.request.call_args[0][4] == closures.DoorLock.UserStatus.Disabled
|
|
from __future__ import print_function
import json
import urlparse
from utils import ProgressBar, Color, Symbol
from pprint import pprint
from settings import Settings
from influxdbclient import InfluxdbClient
import requests
class Query:
DEFAULT_FUNC = "mean"
def __init__(self, measurement, field):
self.func = Query.DEFAULT_FUNC
self.measurement = measurement
self.field = field
self.alias = self.field
def to_json(self, settings):
return {
"dsType": "influxdb",
"measurement": self.measurement,
"select": [[
{"params": [self.field], "type": "field"},
{"params": [], "type": self.func}
]],
"groupBy": [
{"params": ["$interval"], "type": "time"},
{"params": ["null"], "type": "fill"}
],
"resultFormat": "time_series",
"alias": self.alias
}
class Panel:
def __init__(self, title="", measurement=None):
self.title = title
self.measurement = measurement
self.queries = []
self.fill = 0
self.stack = False
self.leftYAxisLabel = None
self.overrides = []
self.alias_colors = {}
self.thresholds = {}
self.width = 6
self.linewidth = 1
def add_query(self, field):
query = Query(self.measurement, field)
self.queries.append(query)
return query
def sort_queries(self, order):
ordered_keys = order.split()
self.queries.sort(key=lambda x: ordered_keys.index(x.field) if x.field in ordered_keys else len(self.queries))
def process_graph_settings(self, plugin_settings):
if "graph_vlabel" in plugin_settings:
self.leftYAxisLabel = plugin_settings["graph_vlabel"].replace(
"${graph_period}",
plugin_settings.get("graph_period", "second")
)
if "graph_order" in plugin_settings:
self.sort_queries(plugin_settings["graph_order"])
def process_graph_thresholds(self, fields):
"""
@see http://munin-monitoring.org/wiki/fieldname.warning
@see http://munin-monitoring.org/wiki/fieldname.critical
"""
warnings = {fields[field].settings.get("warning") for field in fields if "warnings" in fields[field].settings}
criticals = {fields[field].settings.get("critical") for field in fields if "critical" in fields[field].settings}
if len(warnings) > 1 or len(criticals) > 1:
# per-metric thresholds are not supported right now
return
if warnings or criticals:
self.thresholds = {"thresholdLine": False}
# format = min:max
# min threshold not supported by Grafana :(
if criticals:
val = criticals.pop().split(":")
if val[-1]:
self.thresholds["threshold2"] = float(val[-1])
# critical doesn't show up if warning is not set to something
self.thresholds["threshold1"] = self.thresholds["threshold2"]
if warnings:
val = warnings.pop().split(":")
if val[-1]:
self.thresholds["threshold1"] = float(val[-1])
def process_graph_types(self, fields):
"""
Munin processes draw types on a per metric basis whereas Grafana sets the type for
the whole panel. However overrides are possible since https://github.com/grafana/grafana/issues/425
@see http://munin-monitoring.org/wiki/fieldname.draw
"""
draw_list = [(field, fields[field].settings.get("draw", "LINE2")) for field in fields]
hasStack = bool([x for x, y in draw_list if "STACK" in y])
hasArea = bool([x for x, y in draw_list if "AREA" in y])
if hasArea:
self.fill = 5
self.linewidth = 0
if hasArea:
self.stack = True
# build overrides list
self.overrides = []
for field, draw in draw_list:
current = {"alias": field}
# LINE* should be matched
if hasArea and draw.startswith("LINE"):
current["fill"] = 0
# LINE* should be matched *but not* LINESTACK*
if hasStack and draw.startswith("LINE") and not draw.startswith("LINESTACK"):
current["stack"] = False
if draw == 'LINE':
current["linewidth"] = 1
else:
current["linewidth"] = int(draw[-1])/2 # lines appear bigger on Grafana
if len(current) > 1:
self.overrides.append(current)
# colors
self.alias_colors = {field: '#'+fields[field].settings.get("colour") for field in fields if "colour" in fields[field].settings}
def to_json(self, settings):
return {
"title": self.title,
"datasource": settings.influxdb['database'],
"stack": self.stack,
"fill": self.fill,
"type": "graph",
"span": self.width,
"targets": [query.to_json(settings) for query in self.queries],
"tooltip": {
"shared": len(self.queries) > 1,
"value_type": "individual"
},
"legend": {
"show": True,
"values": True,
"min": settings.grafana['show_minmax'],
"max": settings.grafana['show_minmax'],
"current": settings.grafana['show_minmax'],
"total": False,
"avg": settings.grafana['show_minmax'],
"alignAsTable": settings.grafana['show_minmax'],
"rightSide": False
},
"xaxis": {
"show": True
},
"yaxes":[
{"format": "short", "label": None, "logBase": 1},
{"format": "short", "label": None, "logBase": 1}
],
"grid": self.thresholds,
"seriesOverrides": self.overrides,
"aliasColors": self.alias_colors,
"leftYAxisLabel": self.leftYAxisLabel,
"linewidth": self.linewidth,
}
class HeaderPanel(Panel):
def __init__(self, title):
self.title = title
self.content = ""
self.measurement = None
def to_json(self, _):
return {
"title": self.title,
"mode": "html",
"type": "text",
"editable": True,
"span": 12,
"links": [{
"type": "absolute",
"title": "Fork me on GitHub!",
"url": "https://github.com/mvonthron/munin-influxdb",
}],
"content": self.content
}
class Row:
def __init__(self, title=""):
self.title = title
self.panels = []
self.height = "250px"
def add_panel(self, *args, **kwargs):
p = Panel(*args, **kwargs)
self.panels.append(p)
return p
def to_json(self, settings):
self.panels.sort(key=lambda x: x.measurement)
return {
"title": self.title,
"height": self.height,
"panels": [panel.to_json(settings) for panel in self.panels],
"showTitle": len(self.title) > 0
}
class Dashboard:
def __init__(self, settings):
self.title = settings.grafana['title']
self.tags = settings.grafana['tags']
self.rows = []
self.settings = settings
def prompt_setup(self):
setup = self.settings.grafana
print("\nGrafana: Please enter your connection information")
setup['host'] = raw_input(" - host [http://localhost:3000]: ").strip() or "http://localhost:3000"
setup['auth'] = None
setup['filename'] = None
while not GrafanaApi.test_host(setup['host']) and not setup['filename']:
print("\n{0}We couldn't connect to {1}, please try again or leave empty to save to a local file{2}".format(Symbol.WARN_YELLOW, setup['host'], Color.CLEAR))
setup['host'] = raw_input(" - host: ").strip() or ""
if not setup['host']:
setup['filename'] = raw_input(" - local file [/tmp/munin-grafana.json]: ").strip() or "/tmp/munin-grafana.json"
if GrafanaApi.test_host(setup['host']):
while not GrafanaApi.test_auth(setup['host'], setup['auth']):
user = raw_input(" - user [admin]: ").strip() or "admin"
password = InfluxdbClient.ask_password()
setup['auth'] = (user, password)
setup['access'] = None
while setup['access'] not in ("proxy", "direct"):
setup['access'] = raw_input(" - data source access [proxy]/direct: ").strip() or "proxy"
self.title = raw_input(" Dashboard title [{0}]: ".format(self.title)).strip() or self.title
graph_per_row = raw_input(" Number of graphs per row [2]: ").strip() or "2"
setup['graph_per_row'] = int(graph_per_row)
show_minmax = raw_input(" Show min/max/current in legend [y]/n: ").strip() or "y"
setup['show_minmax'] = show_minmax in ("y", "Y")
def add_header(self, settings):
row = Row("")
panel = HeaderPanel("Welcome to your new dashboard!")
content = \
'''
<a href=\"https://github.com/mvonthron/munin-influxdb\"><img style=\"position: absolute; top: 0; right: 0; border: 0;\" src=\"https://camo.githubusercontent.com/365986a132ccd6a44c23a9169022c0b5c890c387/68747470733a2f2f73332e616d617a6f6e6177732e636f6d2f6769746875622f726962626f6e732f666f726b6d655f72696768745f7265645f6161303030302e706e67\" alt=\"Fork me on GitHub\" data-canonical-src =\"https://s3.amazonaws.com/github/ribbons/forkme_right_red_aa0000.png\"></a>
<p>Thanks for using Munin-InfluxDB and the Grafana generator.</p>
<ul>
<li>Edit the panels so they match your desires by clicking on their titles</li>
<li>You can remove this header through the green menu button on the top right corner of this panel</li>
<li>If all your panels show an "InfluxDB Error" sign, please check the datasource settings (here in Grafana)</li>
<li>Feel free to post your suggestions on the GitHub page</li>
</ul>
'''
panel.content = content.format(**settings.influxdb)
row.panels.append(panel)
self.rows.append(row)
def add_row(self, title=""):
row = Row(title)
self.rows.append(row)
return row
def to_json(self, settings):
return {
"id": None,
"title": self.title,
"tags": self.tags,
"rows": [row.to_json(settings) for row in self.rows],
"timezone": "browser",
"time": {"from": "now-5d", "to": "now"},
}
def save(self, filename=None):
if filename is None:
filename = self.settings.grafana['filename']
with open(filename, "w") as f:
json.dump(self.to_json(self.settings), f)
def upload(self):
api = GrafanaApi(self.settings)
api.create_datasource(self.settings.influxdb['database'], self.settings.influxdb['database'])
return api.create_dashboard(self.to_json(self.settings))
@staticmethod
def generate_simple(title, structure):
"""
Generates a simple dashboard based on the
@return:
"""
dashboard = Dashboard(title)
for series in structure:
row = dashboard.add_row()
panel = row.add_panel(series['name'].split(".")[-1], series['name'])
for col in series['fields']:
panel.add_query(col)
return dashboard
def generate(self):
progress_bar = ProgressBar(self.settings.nb_rrd_files)
self.add_header(self.settings)
for domain in self.settings.domains:
for host in self.settings.domains[domain].hosts:
row = self.add_row("{0} / {1}".format(domain, host))
for plugin in self.settings.domains[domain].hosts[host].plugins:
_plugin = self.settings.domains[domain].hosts[host].plugins[plugin]
panel = row.add_panel(_plugin.settings["graph_title"] or plugin, plugin)
for field in _plugin.fields:
query = panel.add_query(field)
if "label" in _plugin.fields[field].settings:
query.alias = _plugin.fields[field].settings["label"]
progress_bar.update()
panel.width = 12//self.settings.grafana['graph_per_row']
panel.process_graph_settings(_plugin.settings)
panel.process_graph_thresholds(_plugin.fields)
panel.process_graph_types(_plugin.fields)
class GrafanaApi:
def __init__(self, config):
# OAuth2 tokens not yet supported
self.auth = config.grafana['auth']
self.host = config.grafana['host'].rstrip('/')
self.config = config
@staticmethod
def test_host(host):
# should return "unauthorized"
r = requests.get(host.rstrip("/") + "/api/org")
return r.status_code == 401
@staticmethod
def test_auth(host, auth):
r = requests.get(host.rstrip("/") + "/api/org", auth=auth)
return r.status_code == 200
def create_datasource(self, name, dbname):
body = {
"name": name,
"database": dbname,
"type": "influxdb",
"url": "http://{0}:{1}".format(self.config.influxdb['host'].rstrip("/"), self.config.influxdb['port']),
"user": self.config.influxdb['user'],
"password": self.config.influxdb['password'],
"access": self.config.grafana['access'],
"basicAuth": False
}
r = requests.post(self.host + "/api/datasources", json=body, auth=self.auth)
return r.ok
def create_dashboard(self, dashboardJson):
r = requests.post(self.host + "/api/dashboards/db", json={"dashboard": dashboardJson}, auth=self.auth)
if r.ok:
return "".join([self.host, "/dashboard/db/", r.json()['slug']])
else:
print(r.json())
r.raise_for_status()
if __name__ == "__main__":
# main for dev/debug purpose only
dashboard = Dashboard("Munin")
dashboard.tags.append("munin")
dashboard.datasource = "munin"
row = dashboard.add_row("Tesla")
panel = row.add_panel("Memory", series="acadis.org.tesla.memory")
panel.datasource = dashboard.datasource
for field in ["apps", "free", "slab", "buffers"]:
panel.add_query(field)
# pprint(dashboard.to_json())
print(json.dumps(dashboard.to_json(),indent=2, separators=(',', ': ')))
# ---
import influxdbclient
client = influxdbclient.InfluxdbClient("...")
client.connect()
dashboard = Dashboard.generate_simple("Munin", client.list_columns())
with open("/tmp/munin-grafana.json", "w") as f:
json.dump(dashboard.to_json(), f, indent=2, separators=(',', ': '))
# with open("../data/config.json") as f:
# conf = json.load(f)
#
# dashboard = Dashboard("Munin dashboard")
# dashboard.generate(conf)
# print(json.dumps(dashboard.to_json(),indent=2, separators=(',', ': ')))
|
|
"""Testing utilities."""
import os
import re
import threading
import functools
from tempfile import NamedTemporaryFile
from numpy import testing
import numpy as np
from ._warnings import expected_warnings
import warnings
from .. import data, io, img_as_uint, img_as_float, img_as_int, img_as_ubyte
SKIP_RE = re.compile("(\s*>>>.*?)(\s*)#\s*skip\s+if\s+(.*)$")
def _assert_less(a, b, msg=None):
message = "%r is not lower than %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a < b, message
def _assert_greater(a, b, msg=None):
message = "%r is not greater than %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a > b, message
try:
from nose.tools import assert_less
except ImportError:
assert_less = _assert_less
try:
from nose.tools import assert_greater
except ImportError:
assert_greater = _assert_greater
def doctest_skip_parser(func):
""" Decorator replaces custom skip test markup in doctests
Say a function has a docstring::
>>> something # skip if not HAVE_AMODULE
>>> something + else
>>> something # skip if HAVE_BMODULE
This decorator will evaluate the expression after ``skip if``. If this
evaluates to True, then the comment is replaced by ``# doctest: +SKIP``. If
False, then the comment is just removed. The expression is evaluated in the
``globals`` scope of `func`.
For example, if the module global ``HAVE_AMODULE`` is False, and module
global ``HAVE_BMODULE`` is False, the returned function will have docstring::
>>> something # doctest: +SKIP
>>> something + else
>>> something
"""
lines = func.__doc__.split('\n')
new_lines = []
for line in lines:
match = SKIP_RE.match(line)
if match is None:
new_lines.append(line)
continue
code, space, expr = match.groups()
try:
# Works as a function decorator
if eval(expr, func.__globals__):
code = code + space + "# doctest: +SKIP"
except AttributeError:
# Works as a class decorator
if eval(expr, func.__init__.__globals__):
code = code + space + "# doctest: +SKIP"
new_lines.append(code)
func.__doc__ = "\n".join(new_lines)
return func
def roundtrip(img, plugin, suffix):
"""Save and read an image using a specified plugin"""
if not '.' in suffix:
suffix = '.' + suffix
temp_file = NamedTemporaryFile(suffix=suffix, delete=False)
fname = temp_file.name
temp_file.close()
io.imsave(fname, img, plugin=plugin)
new = io.imread(fname, plugin=plugin)
try:
os.remove(fname)
except Exception:
pass
return new
def color_check(plugin, fmt='png'):
"""Check roundtrip behavior for color images.
All major input types should be handled as ubytes and read
back correctly.
"""
img = img_as_ubyte(data.chelsea())
r1 = roundtrip(img, plugin, fmt)
testing.assert_allclose(img, r1)
img2 = img > 128
r2 = roundtrip(img2, plugin, fmt)
testing.assert_allclose(img2.astype(np.uint8), r2)
img3 = img_as_float(img)
with expected_warnings(['precision loss']):
r3 = roundtrip(img3, plugin, fmt)
testing.assert_allclose(r3, img)
with expected_warnings(['precision loss']):
img4 = img_as_int(img)
if fmt.lower() in (('tif', 'tiff')):
img4 -= 100
with expected_warnings(['sign loss']):
r4 = roundtrip(img4, plugin, fmt)
testing.assert_allclose(r4, img4)
else:
with expected_warnings(['sign loss|precision loss']):
r4 = roundtrip(img4, plugin, fmt)
testing.assert_allclose(r4, img_as_ubyte(img4))
img5 = img_as_uint(img)
with expected_warnings(['precision loss']):
r5 = roundtrip(img5, plugin, fmt)
testing.assert_allclose(r5, img)
def mono_check(plugin, fmt='png'):
"""Check the roundtrip behavior for images that support most types.
All major input types should be handled.
"""
img = img_as_ubyte(data.moon())
r1 = roundtrip(img, plugin, fmt)
testing.assert_allclose(img, r1)
img2 = img > 128
r2 = roundtrip(img2, plugin, fmt)
testing.assert_allclose(img2.astype(np.uint8), r2)
img3 = img_as_float(img)
with expected_warnings(['precision|\A\Z']):
r3 = roundtrip(img3, plugin, fmt)
if r3.dtype.kind == 'f':
testing.assert_allclose(img3, r3)
else:
testing.assert_allclose(r3, img_as_uint(img))
with expected_warnings(['precision loss']):
img4 = img_as_int(img)
if fmt.lower() in (('tif', 'tiff')):
img4 -= 100
with expected_warnings(['sign loss|\A\Z']):
r4 = roundtrip(img4, plugin, fmt)
testing.assert_allclose(r4, img4)
else:
with expected_warnings(['precision loss|sign loss']):
r4 = roundtrip(img4, plugin, fmt)
testing.assert_allclose(r4, img_as_uint(img4))
img5 = img_as_uint(img)
r5 = roundtrip(img5, plugin, fmt)
testing.assert_allclose(r5, img5)
def setup_test():
"""Default package level setup routine for skimage tests.
Import packages known to raise warnings, and then
force warnings to raise errors.
Also set the random seed to zero.
"""
warnings.simplefilter('default')
from scipy import signal, ndimage, special, optimize, linalg
from scipy.io import loadmat
from skimage import viewer
np.random.seed(0)
warnings.simplefilter('error')
def teardown_test():
"""Default package level teardown routine for skimage tests.
Restore warnings to default behavior
"""
warnings.simplefilter('default')
def test_parallel(num_threads=2):
"""Decorator to run the same function multiple times in parallel.
Parameters
----------
num_threads : int, optional
The number of times the function is run in parallel.
"""
assert num_threads > 0
def wrapper(func):
@functools.wraps(func)
def inner(*args, **kwargs):
threads = []
for i in range(num_threads - 1):
thread = threading.Thread(target=func, args=args, kwargs=kwargs)
threads.append(thread)
for thread in threads:
thread.start()
result = func(*args, **kwargs)
for thread in threads:
thread.join()
return result
return inner
return wrapper
if __name__ == '__main__':
color_check('pil')
mono_check('pil')
mono_check('pil', 'bmp')
mono_check('pil', 'tiff')
|
|
#!/usr/bin/env python
# Jay Smith
# jay.smith@fireeye.com
#
########################################################################
# Copyright 2012 Mandiant
# Copyright 2014 FireEye
#
# Mandiant licenses this file to you under the Apache License, Version
# 2.0 (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
#
########################################################################
# Traverse a directory, trying to find all exports for all valid PE
# executable files. Computes common shellcode hashes and stores them
# to a sqlite database file for later use, such as in IDA Pro.
import os
import sys
import time
import zlib
import ctypes
import os.path
import sqlite3
try:
import pefile
except ImportError, err:
print "Error while importing pefile module: %s" % str(err)
print "Please make sure it is installed: http://code.google.com/p/pefile/"
sys.exit(1)
#This is a list of interesting dll's to use if not traversing a directory
INTERESTING_DLLS = [
'kernel32.dll', 'comctl32.dll', 'advapi32.dll', 'comdlg32.dll',
'gdi32.dll', 'msvcrt.dll', 'netapi32.dll', 'ntdll.dll',
'ntoskrnl.exe', 'oleaut32.dll', 'psapi.dll', 'shell32.dll',
'shlwapi.dll', 'srsvc.dll', 'urlmon.dll', 'user32.dll',
'winhttp.dll', 'wininet.dll', 'ws2_32.dll', 'wship6.dll',
'advpack.dll',
]
VERBOSE=False
############################################################
# SQL queries
############################################################
sql_testTableExists='''
SELECT name
FROM sqlite_master
WHERE name=?;
'''
sql_create_tables='''
create table symbol_hashes (
hash_key integer primary key,
hash_val integer,
hash_type integer,
lib_key integer,
symbol_name varchar(256)
);
create table source_libs (
lib_key integer primary key,
lib_name varchar(256)
);
create table hash_types (
hash_type integer primary key,
hash_size integer,
hash_name varchar(256),
hash_code text
);
--Index just the hash vals for when we don't know the hash type
create index idx_hash_val on symbol_hashes (hash_val);
--Index with hash_type prefix for when we know the type we're
-- looking for
create index idx_hash_type_hash_val on symbol_hashes (hash_type, hash_val);
'''
sql_add_hash_type='''
insert into hash_types (
hash_size,
hash_name,
hash_code
) values (?,?,?);
'''
sql_get_hash_type='''
select
hash_type
from hash_types
where hash_name=?;
'''
sql_get_hash_type_hash_size='''
select
hash_type
from hash_types
where hash_name=? and hash_size=?;
'''
sql_add_source_lib='''
insert into source_libs (
lib_name
) values (?);
'''
sql_add_symbol_hash='''
insert into symbol_hashes (
hash_val,
hash_type,
lib_key,
symbol_name
) values (?,?,?,?);
'''
sql_lookup_hash_value='''
select
hash_key,
hash_val,
hash_type,
source_lib,
symbol_name
from symbol_hashes
where hash_val=?;
'''
sql_lookup_hash_value_hash_type='''
select
hash_key,
hash_val,
hash_type,
source_lib,
symbol_name
from symbol_hashes
where hash_val=? and hash_type=?;
'''
sql_find_source_lib_by_name='''
select
lib_key
from source_libs
where lib_name=?;
'''
sql_find_symbol_hash_type_lib_symbol='''
select
hash_key
from symbol_hashes
where hash_val=? and hash_type=? and lib_key=? and symbol_name=?;
'''
############################################################
# Start of functions to implement operator primitives
############################################################
ROTATE_BITMASK = {
8 : 0xff,
16 : 0xffff,
32 : 0xffffffff,
64 : 0xffffffffffffffff,
}
def rcr(inVal, numShifts, cb, dataSize=32):
'''rotate carry right instruction emulation'''
if numShifts == 0:
return inVal
if (numShifts < 0) or (numShifts > dataSize):
raise ValueError('Bad numShifts')
#make sure carry in bit is only 0 or 1
cb = cb & 1
if (dataSize != 8) and (dataSize != 16) and (dataSize != 32) and (dataSize != 64):
raise ValueError('Bad dataSize')
#or the carry value in there
bitMask = ROTATE_BITMASK[dataSize]
inVal = inVal | (cb << dataSize)
x = (dataSize - numShifts) + 1
res = (inVal >> numShifts) | (inVal << x)
return (bitMask & res, 1 & (res >> dataSize))
def ror(inVal, numShifts, dataSize=32):
'''rotate right instruction emulation'''
if numShifts == 0:
return inVal
if (numShifts < 0) or (numShifts > dataSize):
raise ValueError('Bad numShifts')
if (dataSize != 8) and (dataSize != 16) and (dataSize != 32) and (dataSize != 64):
raise ValueError('Bad dataSize')
bitMask = ROTATE_BITMASK[dataSize]
return bitMask & ((inVal >> numShifts) | (inVal << (dataSize-numShifts)))
def rol(inVal, numShifts, dataSize=32):
'''rotate left instruction emulation'''
if numShifts == 0:
return inVal
if (numShifts < 0) or (numShifts > dataSize):
raise ValueError('Bad numShifts')
if (dataSize != 8) and (dataSize != 16) and (dataSize != 32) and (dataSize != 64):
raise ValueError('Bad dataSize')
bitMask = ROTATE_BITMASK[dataSize]
currVal = inVal
return bitMask & ((inVal << numShifts) | (inVal >> (dataSize-numShifts)))
############################################################
# Start of hash implementations
############################################################
def poisonIvyHash(inStr,fName):
#need a null at the end of the string
if inStr[-1] != '\x00':
inStr = inStr + '\x00'
cx = 0xffff
dx = 0xffff
for b1 in inStr:
bx = 0
ax = ord(b1) ^ (cx & 0xff)
cx = ((cx>>8)&0xff) | ((dx&0xff)<<8)
dx = ((dx>>8)&0xff) | 0x800
while (dx & 0xff00) != 0:
c_in = bx & 1
bx = bx >> 1
ax, c_out = rcr(ax, 1, c_in, 16)
if c_out != 0:
ax = ax ^ 0x8320
bx = bx ^ 0xedb8
dx = (dx&0xff) | (((((dx>>8)&0xff)-1)&0xff)<<8)
cx = cx ^ ax
dx = dx ^ bx
dx = 0xffff & ~dx
cx = 0xffff & ~cx
return 0xffffffff & ((dx<<16) | cx)
pseudocode_poisonIvyHash = '''Too hard to explain.\nString hash function from POISON IVY RAT.\nSee code for information'''
def rol3XorEax(inString,fName):
if inString is None:
return 0
ecx = 0
eax = 0
for i in inString:
eax = eax | ord(i)
ecx = ecx ^ eax
ecx = rol(ecx, 0x3, 32)
ecx += 1
eax = 0xffffffff & (eax << 8)
return ecx
pseudocode_rol3XorEax = '''eax := 0;
ecx := 0;
for c in input_string {
eax := eax | c ;
ecx := ecx ^ eax;
ecx := ROL(ecx, 0x3);
ecx : ecx + 1;
eax := 0xffffffff & (eax << 8);
};
return ecx;
'''
def rol7AddHash32(inString,fName):
if inString is None:
return 0
val = 0
for i in inString:
val = rol(val, 0x7, 32)
val += ord(i)
return val
pseudocode_rol7AddHash32 = '''acc := 0;
for c in input_string {
acc := ROL(acc, 7):
acc := acc + c;
}
'''
def rol5AddHash32(inString,fName):
if inString is None:
return 0
val = 0
for i in inString:
val = rol(val, 0x5, 32)
val += ord(i)
return val
pseudocode_rol5AddHash32 = '''acc := 0;
for c in input_string {
acc := ROL(acc, 5):
acc := acc + c;
}
'''
def ror7AddHash32(inString,fName):
if inString is None:
return 0
val = 0
for i in inString:
val = ror(val, 0x7, 32)
val += ord(i)
return val
pseudocode_ror7AddHash32 = '''acc := 0;
for c in input_string {
acc := ROR(acc, 7):
acc := acc + c;
}
'''
def ror9AddHash32(inString,fName):
if inString is None:
return 0
val = 0
for i in inString:
val = ror(val, 0x9, 32)
val += ord(i)
return val
pseudocode_ror9AddHash32 = '''acc := 0;
for c in input_string {
acc := ROR(acc, 9);
acc := acc + c;
}
'''
def ror11AddHash32(inString,fName):
if inString is None:
return 0
val = 0
for i in inString:
val = ror(val, 0xb, 32)
val += ord(i)
return val
pseudocode_ror11AddHash32 = '''acc := 0;
for c in input_string {
acc := ROR(acc, 11);
acc := acc + c;
}
'''
def ror13AddHash32(inString,fName):
if inString is None:
return 0
val = 0
for i in inString:
val = ror(val, 0xd, 32)
val += ord(i)
return val
pseudocode_ror13AddHash32 = '''acc := 0;
for c in input_string {
acc := ROR(acc, 13);
acc := acc + c;
}
'''
def ror13AddWithNullHash32(inString,fName):
if inString is None:
return 0
val = 0
for i in inString + "\x00":
val = ror(val, 0xd, 32)
val += ord(i)
return val
pseudocode_ror13AddWithNullHash32 = '''acc := 0;
for c in input_string_with_trailing_NULL {
acc := ROR(acc, 13);
acc := acc + c;
}
'''
def ror13AddHash32Sub1(inString,fName):
'''Same as ror13AddHash32, but subtract 1 afterwards'''
return ror13AddHash32(inString,fName) - 1
pseudocode_ror13AddHash32 = '''acc := 0;
for c in input_string {
acc := ROR(acc, 13);
acc := acc + c;
}
acc := acc - 1;
'''
def shl7shr19Hash32(inString,fName):
val = 0
for i in inString:
edx = 0xffffffff & (val << 7)
ecx = 0xffffffff & (val >> 0x19)
eax = edx | ecx
t = 0xff & (ord(i) ^ 0xf4)
val = eax ^ t
return val
pseudocode_shl7shr19Hash32 = '''acc := 0;
for c in input_string {
t0 = (acc << 7);
t1 = (acc >> 0x19);
t2 = t0 | t1;
acc = t2 ^ c ^ 0xf4;
}
'''
def sll1AddHash32(inString,fName):
if inString is None:
return 0
val = 0
for i in inString:
b = ord(i)
b = 0xff & (b | 0x60)
val = val + b
val = val << 1
val = 0xffffffff & val
return val
pseudocode_sll1AddHash32 = '''acc := 0;
for c in input_string {
acc = acc + (c | 0x60);
acc = acc << 1;
}
'''
def playWith0xedb88320Hash(inString,fName):
esi = 0xFFFFFFFF
for d in inString:
c = ord(d)
for i in range(8):
eax = c
eax ^= esi
b0 = eax & 0xFF
b0 &= 0x01
b0 = -b0
if b0 % 2 == 0: # sbb eax, eax
eax = 0
else:
eax = 0xFFFFFFFF
eax &= 0xedb88320
esi >>= 1
esi ^= eax
c >>= 1
return esi ^ 0xFFFFFFFF
pseudocode_playWith0xedb88320Hash = \
'''Too hard to explain, AND's with 0xedb88320, though.
String hash function from Gatak sample.
See code for information'''
def crc32(inString,fName):
return 0xffffffff & (zlib.crc32(inString))
def ror13AddHash32AddDll(inString,fName):
dllHash = 0
for c in fName:
dllHash = ror(dllHash, 0xd, 32)
if ord(c) < 97:
dllHash = int(dllHash) + ord(c)
else:
dllHash = int(dllHash) + ord(c) - 32
dllHash = ror(dllHash, 0xd, 32)
dllHash = ror(dllHash, 0xd, 32)
dllHash = ror(dllHash, 0xd, 32)
if inString is None:
return 0
val = 0
for i in inString:
val = ror(val, 0xd, 32)
val += ord(i)
val = ror(val, 0xd, 32)
val += dllHash
if val >= 4294967296:
val -= 4294967296
return val
pseudocode_ror13AddHash32AddDll = '''acc := 0;
for c in input_string {
acc := ROR(acc, 13);
acc := acc + c;
}
acc := acc + ror13add(DllName);
'''
def mult21AddHash32(inString,fName):
acc = 0
for i in inString:
acc = 0xffffffff & (acc * 0x21)
acc = 0xffffffff & (acc + ord(i))
return acc
pseudocode_hashMult21 = '''acc := 0;
for c in input_string {
acc := acc * 0x21;
acc := acc + c;
}
'''
def add1505Shl5Hash32(inString,fName):
val = 0x1505
for ch in inString:
val += (val << 5)
val &= 0xFFFFFFFF
val += ord(ch)
val &= 0xFFFFFFFF
return val
pseudocode_add1505Shl5Hash32 = '''val := 0x1505;
for c in input_string {
val := val + (val << 5);
val := val + c;
}
'''
def rol7XorHash32(inString,fName):
if inString is None:
return 0
val = 0
for i in inString:
val = rol(val, 0x7, 32)
val = val ^ (0xff & ord(i))
return val
pseudocode_rol7XorHash32 = '''acc := 0;
for c in input_string {
acc := ROL(acc, 7):
acc := acc ^ c;
}
'''
def rol7AddXor2Hash32(inString,fName):
if inString is None:
return 0
val = 0
for i in inString:
val = rol(val, 0x7, 32)
val += (ord(i) ^ 2)
return val
pseudocode_rol7AddXor2Hash32 = '''acc := 0;
for c in input_string {
acc := ROL(acc, 7):
acc := acc + (c ^ 2);
}
'''
def dualaccModFFF1Hash(inString,fName):
if inString is None:
return 0
v4, v8 = 0, 1
for ltr in inString:
v8 = (ord(ltr) + v8) % 0x0FFF1
v4 = (v4 + v8) % 0x0FFF1
return (v4 << 0x10)|v8
pseudocode_dualaccModFFF1Hash = '''
acc_1 := 0
acc_2 := 0
for c in input_string {
acc_2 = (acc_2 + c) % 0x0FFF1
acc_1 = (acc_1 + acc2) % 0x0FFF1
}
return (acc_1 << 0x10) | acc2
'''
def hash_Carbanak(inString,fName):
a2 = map(ord, inString)
ctr = 0
for i in a2:
ctr = (ctr << 4) + i
if (ctr & 0xF0000000):
ctr = (((ctr & 0xF0000000) >> 24) ^ ctr) & 0x0FFFFFFF
return ctr
pseudocode_hash_Carbanak = '''
acc_1 = 0
for c in input_string:
acc_1 = (acc_1 << 4) + c
if (acc_1 & 0xF0000000):
acc_1 = (((acc_1 & 0xF0000000) >> 24) ^ acc_1) & 0x0FFFFFFF
return acc_1
'''
# The list of tuples of (supported hash name, hash size, pseudo_code)
HASH_TYPES = [
('ror13AddHash32', 32, pseudocode_ror13AddHash32),
('ror13AddWithNullHash32', 32, pseudocode_ror13AddWithNullHash32),
('ror13AddHash32AddDll', 32, pseudocode_ror13AddHash32AddDll),
('poisonIvyHash', 32, pseudocode_poisonIvyHash),
('rol7AddHash32', 32, pseudocode_rol7AddHash32),
('rol5AddHash32', 32, pseudocode_rol5AddHash32),
('rol3XorEax', 32, pseudocode_rol3XorEax),
('ror7AddHash32', 32, pseudocode_ror7AddHash32),
('ror9AddHash32', 32, pseudocode_ror9AddHash32),
('ror11AddHash32', 32, pseudocode_ror11AddHash32),
('ror13AddHash32Sub1', 32, pseudocode_ror13AddHash32),
('shl7shr19Hash32', 32, pseudocode_shl7shr19Hash32),
('sll1AddHash32', 32, pseudocode_sll1AddHash32),
('playWith0xedb88320Hash', 32, pseudocode_playWith0xedb88320Hash),
('crc32', 32, 'Standard crc32'),
('mult21AddHash32', 32, pseudocode_hashMult21),
('add1505Shl5Hash32', 32, pseudocode_add1505Shl5Hash32),
('rol7XorHash32', 32, pseudocode_rol7XorHash32),
('rol7AddXor2Hash32', 32, pseudocode_rol7AddXor2Hash32),
('dualaccModFFF1Hash', 32, pseudocode_dualaccModFFF1Hash),
('hash_Carbanak', 32, pseudocode_hash_Carbanak),
]
############################################################
# Database creator
############################################################
class ShellcodeDbCreator(object):
def __init__(self, dbPath, dirName):
self.dbPath = dbPath
self.dirName = dirName
self.conn = sqlite3.connect(dbPath)
self.initDb()
self.initHashesDict()
def close(self):
self.conn.close()
self.conn = None
def run(self):
#process all the files in the given directory
self.processDir(self.dirName)
def initDb(self):
#check for tables, create if not present
if not self.checkForTable('symbol_hashes'):
cur = self.conn.executescript(sql_create_tables)
self.conn.commit()
#add the known hashtypes
for hashName, hashSize, hashCode in HASH_TYPES:
self.addHashType(hashName, hashSize, hashCode)
def initHashesDict(self):
#The hashes dict will store tuple (hashtype_key, dyn method),
# indexed by name. used to iterate over when processing export names.
self.hashes = {}
for hashName, hashSize, hashCode in HASH_TYPES:
try:
meth = globals()[hashName]
hashType = self.getHashTypeByName(hashName)
self.hashes[hashName] = (hashType, meth)
except AttributeError, err:
print "Could not find method %s" % hashName
def processDir(self, dirName):
for fName in os.listdir(dirName):
filePath = os.path.join(dirName, fName)
if not os.path.isfile(filePath):
#print "Could not find file: %s. Skipping" % fName
continue
try:
peFile = pefile.PE(filePath)
if ((not hasattr(peFile, "DIRECTORY_ENTRY_EXPORT")) or (peFile.DIRECTORY_ENTRY_EXPORT is None)):
if VERBOSE:
print "No exports: %s" % filePath
else:
#add the library to the lib table
print "Processing file %s" % filePath
time1 = time.time()
libKey = self.addSourceLib(fName)
symCount = 0
for sym in peFile.DIRECTORY_ENTRY_EXPORT.symbols:
if sym.name is not None:
symCount += 1
for hashName in self.hashes.keys():
hashType, hashMeth = self.hashes[hashName]
#print "Trying to hash: %s:%s" % (hashName, sym.name)
symHash = hashMeth(sym.name,fName)
#print " Done hashing: %08x:%s" % (symHash, sym.name)
if symHash is not None:
self.addSymbolHash(symHash, hashType, libKey, sym.name)
#commit outstanding transaction
self.conn.commit()
time2 = time.time()
timeDiff = time2 - time1
print "Processed %d export symbols in %.02f seconds: %s" % (symCount, timeDiff, filePath)
except pefile.PEFormatError, err:
if VERBOSE:
print "Skipping non-PE file %s: %s" % (filePath, str(err))
except Exception, err:
if VERBOSE:
print "Skipping %s: %s" % (filePath, str(err))
raise
def addHashType(self, hashName, hashSize, code):
#check if the hashname already exists
cur = self.conn.execute(sql_get_hash_type_hash_size, (hashName, hashSize))
retList = cur.fetchall()
if len(retList) > 0:
return
cur = self.conn.execute(sql_add_hash_type, (hashSize, hashName, code))
self.conn.commit()
if cur is None:
raise RuntimeError("Cursor is None following hash type insert")
if cur.lastrowid is None:
raise RuntimeError("lastrowid is None following hash type insert")
return cur.lastrowid
def getHashTypeByName(self, hashName):
'''
Returns None if the hashName is not found, else returns
the integer hash type key for the requested hash
'''
cur = self.conn.execute(sql_get_hash_type, (hashName, ))
retList = cur.fetchall()
if len(retList) == 0:
return None
elif len(retList) > 1:
print "ERROR: database in odd state. Multiple entries for hash name: %s" % hashName
#always return first entry, even on error
return retList[0][0]
def getSourceLibByName(self, libName):
'''
Returns None if the libName is not found, else returns
the integer key for the requested souce lib.
'''
cur = self.conn.execute(sql_find_source_lib_by_name, (libName, ))
retList = cur.fetchall()
if len(retList) == 0:
return None
elif len(retList) > 1:
print "ERROR: database in odd state. Multiple entries for source lib: %s" % libName
#always return first entry, even on error
return retList[0][0]
def addSourceLib(self, libName):
'''
Adds the given source lib to the db (if not already present) & returns the lib key.
'''
#lookup the library, insert if it doesn't exist
libKey = self.getSourceLibByName(libName)
if libKey is None:
cur = self.conn.execute(sql_add_source_lib, (libName, ))
self.conn.commit()
if cur is None:
raise RuntimeError("Cursor is None following source lib insert")
if cur.lastrowid is None:
raise RuntimeError("lastrowid is None following source lib insert")
return cur.lastrowid
else:
return libKey
def addSymbolHash(self, hashVal, hashType, libKey, symbolName):
'''Note: requires explicit commit afterwards by caller'''
#determine if tuple (hashVal, hashType, libKey, symbolName) already exists or not
#print "Trying to add symbol: %s %s, %s %s, %s %s, %s %s" % (
# type(hashVal), str(hashVal),
# type(hashType), str(hashType),
# type(libKey), str(libKey),
# type(symbolName), str(symbolName))
cur = self.conn.execute(sql_find_symbol_hash_type_lib_symbol,
(hashVal, hashType, libKey, symbolName)
)
retList = cur.fetchall()
if len(retList) == 0:
#insert it now
cur = self.conn.execute(sql_add_symbol_hash,
(hashVal, hashType, libKey, symbolName)
)
if cur is None:
raise RuntimeError("Cursor is None following symbol hash insert")
if cur.lastrowid is None:
raise RuntimeError("lastrowid is None following symbol hash insert")
return cur.lastrowid
else:
#print "Skipping duplicate hash: %08x %08x %08x %s" % (hashVal, hashType, libKey, symbolName)
pass
def checkForTable(self, tableName):
'''
Returns True if the given table name already exists, else returns False.
'''
cur = self.conn.execute(sql_testTableExists, (tableName,))
row = cur.fetchone()
if row is None:
#raise UnpreparedDatabaseException("Missing database table: %s" % tableName)
return False
return True
if __name__ == '__main__':
if len(sys.argv) != 3:
print "python %s <db_path> <dll_dir>" % sys.argv[0]
sys.exit(1)
dbPath = sys.argv[1]
walkPath = sys.argv[2]
hasher = ShellcodeDbCreator(dbPath, walkPath)
hasher.run()
hasher.close()
print "Done with symbol name hashing"
|
|
"""Subset the genome into standard sets of regions surrounding transcripts.
Provides a central place to bin the genome into smaller transcript-based regions
for structural variant calling and prioritization.
"""
import collections
import itertools
import math
import operator
import os
import numpy as np
import pybedtools
import toolz as tz
from bcbio import utils
from bcbio.bam import ref
from bcbio.distributed.transaction import file_transaction
from bcbio.pipeline import datadict as dd
from bcbio.provenance import do
from bcbio.variation import bedutils, multi
def calculate_sv_bins(*items):
"""Determine bin sizes and regions to use for samples.
Unified approach to prepare regional bins for coverage calculations across
multiple CNV callers. Splits into target and antitarget regions allowing
callers to take advantage of both. Provides consistent target/anti-target
bin sizes across batches.
Uses callable_regions as the access BED file and mosdepth regions in
variant_regions to estimate depth for bin sizes.
"""
from bcbio.structural import cnvkit
if all(not cnvkit.use_general_sv_bins(utils.to_single_data(x)) for x in items):
return items
items = [utils.to_single_data(x) for x in items]
out = []
for cnv_group in _group_by_cnv_method(multi.group_by_batch(items, False)):
size_calc_fn = MemoizedSizes(cnv_group.region_file, cnv_group.items).get_target_antitarget_bin_sizes
for data in cnv_group.items:
target_bed, anti_bed = cnvkit.targets_w_bins(cnv_group.region_file, cnv_group.access_file, size_calc_fn,
cnv_group.work_dir, data)
if not data.get("regions"):
data["regions"] = {}
data["regions"]["bins"] = {"target": target_bed, "antitarget": anti_bed}
out.append([data])
if not len(out) == len(items):
raise AssertionError("Inconsistent samples in and out of SV bin calculation:\nout: %s\nin : %s" %
(sorted([dd.get_sample_name(utils.to_single_data(x)) for x in out]),
sorted([dd.get_sample_name(x) for x in items])))
return out
class MemoizedSizes:
"""Delay calculating sizes unless needed; cache to calculate a single time.
"""
def __init__(self, cnv_file, items):
self.result = None
self.cnv_file = cnv_file
self.items = items
def get_target_antitarget_bin_sizes(self):
if self.result:
return self.result
else:
self.result = self._calc_sizes(self.cnv_file, self.items)
return self.result
def _calc_sizes(self, cnv_file, items):
"""Retrieve target and antitarget bin sizes based on depth.
Similar to CNVkit's do_autobin but tries to have a standard set of
ranges (50bp intervals for target and 10kb intervals for antitarget).
"""
bp_per_bin = 100000 # same target as CNVkit
range_map = {"target": (100, 250), "antitarget": (10000, 1000000)}
target_bps = []
anti_bps = []
checked_beds = set([])
for data in items:
region_bed = tz.get_in(["depth", "variant_regions", "regions"], data)
if region_bed and region_bed not in checked_beds:
for r in pybedtools.BedTool(region_bed).intersect(cnv_file):
if r.stop - r.start > range_map["target"][0]:
target_bps.append(float(r.name))
for r in pybedtools.BedTool(region_bed).intersect(cnv_file, v=True):
if r.stop - r.start > range_map["target"][1]:
anti_bps.append(float(r.name))
checked_beds.add(region_bed)
def scale_in_boundary(raw, round_interval, (min_val, max_val)):
out = int(math.ceil(raw / float(round_interval)) * round_interval)
if out > max_val:
return max_val
elif out < min_val:
return min_val
else:
return out
if target_bps:
raw_target_bin = bp_per_bin / float(np.median(target_bps))
target_bin = scale_in_boundary(raw_target_bin, 50, range_map["target"])
else:
target_bin = range_map["target"][1]
if anti_bps:
raw_anti_bin = bp_per_bin / float(np.median(anti_bps))
anti_bin = scale_in_boundary(raw_anti_bin, 10000, range_map["antitarget"])
else:
anti_bin = range_map["antitarget"][1]
return target_bin, anti_bin
def _group_by_cnv_method(batches):
"""Group into batches samples with identical CNV/SV approaches.
Allows sharing of background samples across multiple batches,
using all normals from tumor/normal pairs with the same prep method
for background.
"""
CnvGroup = collections.namedtuple("CnvGroup", "items, work_dir, access_file, region_file")
out = []
groups = collections.defaultdict(list)
for batch, items in batches.items():
for data in items:
work_dir = utils.safe_makedir(os.path.join(dd.get_work_dir(data), "structural", "bins", batch))
cnv_file = get_base_cnv_regions(data, work_dir, "transcripts100", include_gene_names=False)
if cnv_file:
break
assert cnv_file, ("Did not find coverage regions for batch %s: %s" %
(batch, " ".join([dd.get_sample_name(d) for d in items])))
groups[(cnv_file, dd.get_prep_method(data))].append((items, data, work_dir))
for (cnv_file, _), cur_group in groups.items():
group_items = reduce(operator.add, [xs[0] for xs in cur_group])
access_file = tz.get_in(["config", "algorithm", "callable_regions"], cur_group[0][1])
out.append(CnvGroup(group_items, cur_group[0][2], access_file, cnv_file))
return out
def calculate_sv_coverage(data):
"""Calculate coverage within bins for downstream CNV calling.
Creates corrected cnr files with log2 ratios and depths.
"""
from bcbio.variation import coverage
from bcbio.structural import annotate, cnvkit
data = utils.to_single_data(data)
if not cnvkit.use_general_sv_bins(data):
return [[data]]
work_dir = utils.safe_makedir(os.path.join(dd.get_work_dir(data), "structural",
dd.get_sample_name(data), "bins"))
out_target_file = os.path.join(work_dir, "%s-target-coverage.cnn" % dd.get_sample_name(data))
out_anti_file = os.path.join(work_dir, "%s-antitarget-coverage.cnn" % dd.get_sample_name(data))
if ((not utils.file_exists(out_target_file) or not utils.file_exists(out_anti_file))
and (dd.get_align_bam(data) or dd.get_work_bam(data))):
# mosdepth
target_cov = coverage.run_mosdepth(data, "target", tz.get_in(["regions", "bins", "target"], data))
anti_cov = coverage.run_mosdepth(data, "antitarget", tz.get_in(["regions", "bins", "antitarget"], data))
target_cov_genes = annotate.add_genes(target_cov.regions, data, max_distance=0)
anti_cov_genes = annotate.add_genes(anti_cov.regions, data, max_distance=0)
out_target_file = _add_log2_depth(target_cov_genes, out_target_file, data)
out_anti_file = _add_log2_depth(anti_cov_genes, out_anti_file, data)
# TODO: Correct for GC bias
if os.path.exists(out_target_file):
data["depth"]["bins"] = {"target": out_target_file, "antitarget": out_anti_file}
return [[data]]
def _add_log2_depth(in_file, out_file, data):
"""Create a CNVkit cnn file with depths
http://cnvkit.readthedocs.io/en/stable/fileformats.html?highlight=cnn#target-and-antitarget-bin-level-coverages-cnn
"""
if not utils.file_exists(out_file):
with file_transaction(data, out_file) as tx_out_file:
with utils.open_gzipsafe(in_file) as in_handle:
with open(tx_out_file, "w") as out_handle:
out_handle.write("chromosome\tstart\tend\tgene\tlog2\tdepth\n")
for line in in_handle:
parts = line.rstrip().split()
if len(parts) > 4:
chrom, start, end, orig_name, depth, gene_name = parts
depth = float(depth)
log2_depth = math.log(float(depth), 2) if depth else -20.0
out_handle.write("%s\t%s\t%s\t%s\t%.3f\t%.2f\n" %
(chrom, start, end, gene_name, log2_depth, depth))
return out_file
def cnv_normalization(data):
"""Normalize CNV coverage depths by GC, repeat and background.
Provides normalized output based on CNVkit approaches, provides a
point for providing additional methods in the future:
- reference: calculates reference backgrounds from normals and pools
including GC and repeat information
- fix: Uses background to normalize coverage estimations
http://cnvkit.readthedocs.io/en/stable/pipeline.html#fix
"""
pass
# Region retrieval for SV calling
def get_base_cnv_regions(data, work_dir, genome_default="transcripts1e4", include_gene_names=True):
"""Retrieve set of target regions for CNV analysis.
Subsets to extended transcript regions for WGS experiments to avoid
long runtimes.
"""
cov_interval = dd.get_coverage_interval(data)
base_regions = get_sv_bed(data, include_gene_names=include_gene_names)
# if we don't have a configured BED or regions to use for SV caling
if not base_regions:
# For genome calls, subset to regions near genes as targets
if cov_interval == "genome":
base_regions = get_sv_bed(data, genome_default, work_dir, include_gene_names=include_gene_names)
if base_regions:
base_regions = remove_exclude_regions(base_regions, base_regions, [data])
# Finally, default to the defined variant regions
if not base_regions:
base_regions = dd.get_variant_regions(data)
return bedutils.clean_file(base_regions, data)
def remove_exclude_regions(orig_bed, base_file, items, remove_entire_feature=False):
"""Remove centromere and short end regions from an existing BED file of regions to target.
"""
from bcbio.structural import shared as sshared
out_bed = os.path.join("%s-noexclude.bed" % (utils.splitext_plus(base_file)[0]))
if not utils.file_uptodate(out_bed, orig_bed):
exclude_bed = sshared.prepare_exclude_file(items, base_file)
with file_transaction(items[0], out_bed) as tx_out_bed:
pybedtools.BedTool(orig_bed).subtract(pybedtools.BedTool(exclude_bed),
A=remove_entire_feature, nonamecheck=True).saveas(tx_out_bed)
if utils.file_exists(out_bed):
return out_bed
else:
return orig_bed
def get_sv_bed(data, method=None, out_dir=None, include_gene_names=True):
"""Retrieve a BED file of regions for SV and heterogeneity calling using the provided method.
method choices:
- exons: Raw BED file of exon regions
- transcripts: Full collapsed regions with the min and max of each transcript.
- transcriptsXXXX: Collapsed regions around transcripts with a window size of
XXXX.
- A custom BED file of regions
"""
if method is None:
method = tz.get_in(["config", "algorithm", "sv_regions"], data)
gene_file = dd.get_gene_bed(data)
if method and os.path.isfile(method):
return method
elif not gene_file or not method:
return None
elif method == "exons":
return gene_file
elif method.startswith("transcripts"):
window = method.split("transcripts")[-1]
window = int(float(window)) if window else 0
return _collapse_transcripts(gene_file, window, data, out_dir, include_gene_names=include_gene_names)
else:
raise ValueError("Unexpected transcript retrieval method: %s" % method)
def _collapse_transcripts(in_file, window, data, out_dir, include_gene_names=True):
"""Collapse transcripts into min/max coordinates and optionally add windows.
"""
if out_dir is None:
out_dir = os.path.dirname(in_file)
out_file = os.path.join(out_dir,
"%s-transcripts_w%s.bed" % (os.path.splitext(os.path.basename(in_file))[0],
window))
chrom_sizes = {}
for contig in ref.file_contigs(dd.get_ref_file(data), data["config"]):
chrom_sizes[contig.name] = contig.size
if not utils.file_uptodate(out_file, in_file):
with file_transaction(data, out_file) as tx_out_file:
prep_file = "%s-sortprep%s" % os.path.splitext(tx_out_file)
sort_cmd = bedutils.get_sort_cmd()
cmd = "{sort_cmd} -k4,4 -k1,1 {in_file} > {prep_file}"
do.run(cmd.format(**locals()), "Sort BED file by transcript name")
with open(tx_out_file, "w") as out_handle:
# Work around for segmentation fault issue with groupby
# https://github.com/daler/pybedtools/issues/131#issuecomment-89832476
x = pybedtools.BedTool(prep_file)
def gen():
for r in x:
yield r
for name, rs in itertools.groupby(gen(), lambda r: (r.name, r.chrom)):
rs = list(rs)
r = rs[0]
if r.chrom in chrom_sizes:
for gcoords in _group_coords(rs):
min_pos = max(min(gcoords) - window, 0)
max_pos = min(max(gcoords) + window, chrom_sizes[r.chrom])
if include_gene_names:
out_handle.write("%s\t%s\t%s\t%s\n" % (r.chrom, min_pos, max_pos, r.name))
else:
out_handle.write("%s\t%s\t%s\n" % (r.chrom, min_pos, max_pos))
return bedutils.sort_merge(out_file, data)
def _group_coords(rs):
"""Organize coordinate regions into groups for each transcript.
Avoids collapsing very large introns or repetitive genes spread across
the chromosome by limiting the intron size to 100kb for creating a single transcript
"""
max_intron_size = 1e5
coords = []
for r in rs:
coords.append(r.start)
coords.append(r.end)
coord_groups = []
cur_group = []
for coord in sorted(coords):
if not cur_group or coord - cur_group[-1] < max_intron_size:
cur_group.append(coord)
else:
coord_groups.append(cur_group)
cur_group = [coord]
if cur_group:
coord_groups.append(cur_group)
return coord_groups
|
|
# Copyright 2013-2015 DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import unittest2 as unittest
except ImportError:
import unittest # noqa
from mock import Mock, ANY, call, patch
import six
from six import BytesIO
import time
from threading import Lock
from cassandra.cluster import Cluster, Session
from cassandra.connection import (Connection, HEADER_DIRECTION_TO_CLIENT, ProtocolError,
locally_supported_compressions, ConnectionHeartbeat, _Frame)
from cassandra.marshal import uint8_pack, uint32_pack, int32_pack
from cassandra.protocol import (write_stringmultimap, write_int, write_string,
SupportedMessage, ProtocolHandler)
class ConnectionTest(unittest.TestCase):
def make_connection(self):
c = Connection('1.2.3.4')
c._socket = Mock()
c._socket.send.side_effect = lambda x: len(x)
return c
def make_header_prefix(self, message_class, version=Connection.protocol_version, stream_id=0):
if Connection.protocol_version < 3:
return six.binary_type().join(map(uint8_pack, [
0xff & (HEADER_DIRECTION_TO_CLIENT | version),
0, # flags (compression)
stream_id,
message_class.opcode # opcode
]))
else:
return six.binary_type().join(map(uint8_pack, [
0xff & (HEADER_DIRECTION_TO_CLIENT | version),
0, # flags (compression)
0, # MSB for v3+ stream
stream_id,
message_class.opcode # opcode
]))
def make_options_body(self):
options_buf = BytesIO()
write_stringmultimap(options_buf, {
'CQL_VERSION': ['3.0.1'],
'COMPRESSION': []
})
return options_buf.getvalue()
def make_error_body(self, code, msg):
buf = BytesIO()
write_int(buf, code)
write_string(buf, msg)
return buf.getvalue()
def make_msg(self, header, body=""):
return header + uint32_pack(len(body)) + body
def test_bad_protocol_version(self, *args):
c = self.make_connection()
c._requests = Mock()
c.defunct = Mock()
# read in a SupportedMessage response
header = self.make_header_prefix(SupportedMessage, version=0x7f)
options = self.make_options_body()
message = self.make_msg(header, options)
c._iobuf = BytesIO()
c._iobuf.write(message)
c.process_io_buffer()
# make sure it errored correctly
c.defunct.assert_called_once_with(ANY)
args, kwargs = c.defunct.call_args
self.assertIsInstance(args[0], ProtocolError)
def test_negative_body_length(self, *args):
c = self.make_connection()
c._requests = Mock()
c.defunct = Mock()
# read in a SupportedMessage response
header = self.make_header_prefix(SupportedMessage)
message = header + int32_pack(-13)
c._iobuf = BytesIO()
c._iobuf.write(message)
c.process_io_buffer()
# make sure it errored correctly
c.defunct.assert_called_once_with(ANY)
args, kwargs = c.defunct.call_args
self.assertIsInstance(args[0], ProtocolError)
def test_unsupported_cql_version(self, *args):
c = self.make_connection()
c._requests = {0: (c._handle_options_response, ProtocolHandler.decode_message)}
c.defunct = Mock()
c.cql_version = "3.0.3"
# read in a SupportedMessage response
header = self.make_header_prefix(SupportedMessage)
options_buf = BytesIO()
write_stringmultimap(options_buf, {
'CQL_VERSION': ['7.8.9'],
'COMPRESSION': []
})
options = options_buf.getvalue()
c.process_msg(_Frame(version=4, flags=0, stream=0, opcode=SupportedMessage.opcode, body_offset=9, end_pos=9 + len(options)), options)
# make sure it errored correctly
c.defunct.assert_called_once_with(ANY)
args, kwargs = c.defunct.call_args
self.assertIsInstance(args[0], ProtocolError)
def test_prefer_lz4_compression(self, *args):
c = self.make_connection()
c._requests = {0: (c._handle_options_response, ProtocolHandler.decode_message)}
c.defunct = Mock()
c.cql_version = "3.0.3"
locally_supported_compressions.pop('lz4', None)
locally_supported_compressions.pop('snappy', None)
locally_supported_compressions['lz4'] = ('lz4compress', 'lz4decompress')
locally_supported_compressions['snappy'] = ('snappycompress', 'snappydecompress')
# read in a SupportedMessage response
options_buf = BytesIO()
write_stringmultimap(options_buf, {
'CQL_VERSION': ['3.0.3'],
'COMPRESSION': ['snappy', 'lz4']
})
options = options_buf.getvalue()
c.process_msg(_Frame(version=4, flags=0, stream=0, opcode=SupportedMessage.opcode, body_offset=9, end_pos=9 + len(options)), options)
self.assertEqual(c.decompressor, locally_supported_compressions['lz4'][1])
def test_requested_compression_not_available(self, *args):
c = self.make_connection()
c._requests = {0: (c._handle_options_response, ProtocolHandler.decode_message)}
c.defunct = Mock()
# request lz4 compression
c.compression = "lz4"
locally_supported_compressions.pop('lz4', None)
locally_supported_compressions.pop('snappy', None)
locally_supported_compressions['lz4'] = ('lz4compress', 'lz4decompress')
locally_supported_compressions['snappy'] = ('snappycompress', 'snappydecompress')
# read in a SupportedMessage response
header = self.make_header_prefix(SupportedMessage)
# the server only supports snappy
options_buf = BytesIO()
write_stringmultimap(options_buf, {
'CQL_VERSION': ['3.0.3'],
'COMPRESSION': ['snappy']
})
options = options_buf.getvalue()
c.process_msg(_Frame(version=4, flags=0, stream=0, opcode=SupportedMessage.opcode, body_offset=9, end_pos=9 + len(options)), options)
# make sure it errored correctly
c.defunct.assert_called_once_with(ANY)
args, kwargs = c.defunct.call_args
self.assertIsInstance(args[0], ProtocolError)
def test_use_requested_compression(self, *args):
c = self.make_connection()
c._requests = {0: (c._handle_options_response, ProtocolHandler.decode_message)}
c.defunct = Mock()
# request snappy compression
c.compression = "snappy"
locally_supported_compressions.pop('lz4', None)
locally_supported_compressions.pop('snappy', None)
locally_supported_compressions['lz4'] = ('lz4compress', 'lz4decompress')
locally_supported_compressions['snappy'] = ('snappycompress', 'snappydecompress')
# read in a SupportedMessage response
header = self.make_header_prefix(SupportedMessage)
# the server only supports snappy
options_buf = BytesIO()
write_stringmultimap(options_buf, {
'CQL_VERSION': ['3.0.3'],
'COMPRESSION': ['snappy', 'lz4']
})
options = options_buf.getvalue()
c.process_msg(_Frame(version=4, flags=0, stream=0, opcode=SupportedMessage.opcode, body_offset=9, end_pos=9 + len(options)), options)
self.assertEqual(c.decompressor, locally_supported_compressions['snappy'][1])
def test_disable_compression(self, *args):
c = self.make_connection()
c._requests = {0: (c._handle_options_response, ProtocolHandler.decode_message)}
c.defunct = Mock()
# disable compression
c.compression = False
locally_supported_compressions.pop('lz4', None)
locally_supported_compressions.pop('snappy', None)
locally_supported_compressions['lz4'] = ('lz4compress', 'lz4decompress')
locally_supported_compressions['snappy'] = ('snappycompress', 'snappydecompress')
# read in a SupportedMessage response
header = self.make_header_prefix(SupportedMessage)
# the server only supports snappy
options_buf = BytesIO()
write_stringmultimap(options_buf, {
'CQL_VERSION': ['3.0.3'],
'COMPRESSION': ['snappy', 'lz4']
})
options = options_buf.getvalue()
message = self.make_msg(header, options)
c.process_msg(message, len(message) - 8)
self.assertEqual(c.decompressor, None)
def test_not_implemented(self):
"""
Ensure the following methods throw NIE's. If not, come back and test them.
"""
c = self.make_connection()
self.assertRaises(NotImplementedError, c.close)
def test_set_keyspace_blocking(self):
c = self.make_connection()
self.assertEqual(c.keyspace, None)
c.set_keyspace_blocking(None)
self.assertEqual(c.keyspace, None)
c.keyspace = 'ks'
c.set_keyspace_blocking('ks')
self.assertEqual(c.keyspace, 'ks')
def test_set_connection_class(self):
cluster = Cluster(connection_class='test')
self.assertEqual('test', cluster.connection_class)
@patch('cassandra.connection.ConnectionHeartbeat._raise_if_stopped')
class ConnectionHeartbeatTest(unittest.TestCase):
@staticmethod
def make_get_holders(len):
holders = []
for _ in range(len):
holder = Mock()
holder.get_connections = Mock(return_value=[])
holders.append(holder)
get_holders = Mock(return_value=holders)
return get_holders
def run_heartbeat(self, get_holders_fun, count=2, interval=0.05):
ch = ConnectionHeartbeat(interval, get_holders_fun)
time.sleep(interval * count)
ch.stop()
self.assertTrue(get_holders_fun.call_count)
def test_empty_connections(self, *args):
count = 3
get_holders = self.make_get_holders(1)
self.run_heartbeat(get_holders, count)
self.assertGreaterEqual(get_holders.call_count, count - 1) # lower bound to account for thread spinup time
self.assertLessEqual(get_holders.call_count, count)
holder = get_holders.return_value[0]
holder.get_connections.assert_has_calls([call()] * get_holders.call_count)
def test_idle_non_idle(self, *args):
request_id = 999
# connection.send_msg(OptionsMessage(), connection.get_request_id(), self._options_callback)
def send_msg(msg, req_id, msg_callback):
msg_callback(SupportedMessage([], {}))
idle_connection = Mock(spec=Connection, host='localhost',
max_request_id=127,
lock=Lock(),
in_flight=0, is_idle=True,
is_defunct=False, is_closed=False,
get_request_id=lambda: request_id,
send_msg=Mock(side_effect=send_msg))
non_idle_connection = Mock(spec=Connection, in_flight=0, is_idle=False, is_defunct=False, is_closed=False)
get_holders = self.make_get_holders(1)
holder = get_holders.return_value[0]
holder.get_connections.return_value.append(idle_connection)
holder.get_connections.return_value.append(non_idle_connection)
self.run_heartbeat(get_holders)
holder.get_connections.assert_has_calls([call()] * get_holders.call_count)
self.assertEqual(idle_connection.in_flight, 0)
self.assertEqual(non_idle_connection.in_flight, 0)
idle_connection.send_msg.assert_has_calls([call(ANY, request_id, ANY)] * get_holders.call_count)
self.assertEqual(non_idle_connection.send_msg.call_count, 0)
def test_closed_defunct(self, *args):
get_holders = self.make_get_holders(1)
closed_connection = Mock(spec=Connection, in_flight=0, is_idle=False, is_defunct=False, is_closed=True)
defunct_connection = Mock(spec=Connection, in_flight=0, is_idle=False, is_defunct=True, is_closed=False)
holder = get_holders.return_value[0]
holder.get_connections.return_value.append(closed_connection)
holder.get_connections.return_value.append(defunct_connection)
self.run_heartbeat(get_holders)
holder.get_connections.assert_has_calls([call()] * get_holders.call_count)
self.assertEqual(closed_connection.in_flight, 0)
self.assertEqual(defunct_connection.in_flight, 0)
self.assertEqual(closed_connection.send_msg.call_count, 0)
self.assertEqual(defunct_connection.send_msg.call_count, 0)
def test_no_req_ids(self, *args):
in_flight = 3
get_holders = self.make_get_holders(1)
max_connection = Mock(spec=Connection, host='localhost',
lock=Lock(),
max_request_id=in_flight, in_flight=in_flight,
is_idle=True, is_defunct=False, is_closed=False)
holder = get_holders.return_value[0]
holder.get_connections.return_value.append(max_connection)
self.run_heartbeat(get_holders)
holder.get_connections.assert_has_calls([call()] * get_holders.call_count)
self.assertEqual(max_connection.in_flight, in_flight)
self.assertEqual(max_connection.send_msg.call_count, 0)
self.assertEqual(max_connection.send_msg.call_count, 0)
max_connection.defunct.assert_has_calls([call(ANY)] * get_holders.call_count)
holder.return_connection.assert_has_calls([call(max_connection)] * get_holders.call_count)
def test_unexpected_response(self, *args):
request_id = 999
get_holders = self.make_get_holders(1)
def send_msg(msg, req_id, msg_callback):
msg_callback(object())
connection = Mock(spec=Connection, host='localhost',
max_request_id=127,
lock=Lock(),
in_flight=0, is_idle=True,
is_defunct=False, is_closed=False,
get_request_id=lambda: request_id,
send_msg=Mock(side_effect=send_msg))
holder = get_holders.return_value[0]
holder.get_connections.return_value.append(connection)
self.run_heartbeat(get_holders)
self.assertEqual(connection.in_flight, get_holders.call_count)
connection.send_msg.assert_has_calls([call(ANY, request_id, ANY)] * get_holders.call_count)
connection.defunct.assert_has_calls([call(ANY)] * get_holders.call_count)
exc = connection.defunct.call_args_list[0][0][0]
self.assertIsInstance(exc, Exception)
self.assertEqual(exc.args, Exception('Connection heartbeat failure').args)
holder.return_connection.assert_has_calls([call(connection)] * get_holders.call_count)
def test_timeout(self, *args):
request_id = 999
get_holders = self.make_get_holders(1)
def send_msg(msg, req_id, msg_callback):
pass
connection = Mock(spec=Connection, host='localhost',
max_request_id=127,
lock=Lock(),
in_flight=0, is_idle=True,
is_defunct=False, is_closed=False,
get_request_id=lambda: request_id,
send_msg=Mock(side_effect=send_msg))
holder = get_holders.return_value[0]
holder.get_connections.return_value.append(connection)
self.run_heartbeat(get_holders)
self.assertEqual(connection.in_flight, get_holders.call_count)
connection.send_msg.assert_has_calls([call(ANY, request_id, ANY)] * get_holders.call_count)
connection.defunct.assert_has_calls([call(ANY)] * get_holders.call_count)
exc = connection.defunct.call_args_list[0][0][0]
self.assertIsInstance(exc, Exception)
self.assertEqual(exc.args, Exception('Connection heartbeat failure').args)
holder.return_connection.assert_has_calls([call(connection)] * get_holders.call_count)
|
|
# Copyright 2021 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# https://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Synthesizer Attention modules."""
from collections.abc import Iterable # pylint: disable=g-importing-member
from absl import logging
from flax import nn
from flax.nn.attention import _CacheEntry
from flax.nn.attention import _make_causal_mask
from flax.nn.attention import Cache
from flax.nn.attention import make_padding_mask
from flax.nn.stochastic import make_rng
import jax
from jax import lax
from jax import random
import jax.numpy as jnp
import numpy as onp
def synthetic_attention(query,
key,
value,
synthetic,
dtype=jnp.float32,
bias=None,
axis=None,
broadcast_dropout=True,
dropout_rng=None,
dropout_rate=0.,
deterministic=False,
precision=None,
ignore_dot_product=False):
"""Computes dot-product attention given query, key, and value.
Supports additional synthetic weights mixture.
This is the core function for applying attention based on
https://arxiv.org/abs/1706.03762. It calculates the attention weights given
query and key and combines the values using the attention weights. This
function supports multi-dimensional inputs.
Args:
query: queries for calculating attention with shape of `[batch_size, dim1,
dim2, ..., dimN, num_heads, mem_channels]`.
key: keys for calculating attention with shape of `[batch_size, dim1, dim2,
..., dimN, num_heads, mem_channels]`.
value: values to be used in attention with shape of `[batch_size, dim1,
dim2,..., dimN, num_heads, value_channels]`.
synthetic: list of weight matrices of [len, len].
dtype: the dtype of the computation (default: float32)
bias: bias for the attention weights. This can be used for incorporating
autoregressive mask, padding mask, proximity bias.
axis: axises over which the attention is applied.
broadcast_dropout: bool: use a broadcasted dropout along batch dims.
dropout_rng: JAX PRNGKey: to be used for dropout
dropout_rate: dropout rate
deterministic: bool, deterministic or not (to apply dropout)
precision: numerical precision of the computation see `jax.lax.Precision`
for details.
ignore_dot_product: bool, to ignore dot product or not.
Returns:
Output of shape `[bs, dim1, dim2, ..., dimN,, num_heads, value_channels]`.
"""
if axis is None:
axis = tuple(range(1, key.ndim - 2))
if not isinstance(axis, Iterable):
axis = (axis,)
if not ignore_dot_product:
assert key.shape[:-1] == value.shape[:-1]
assert (query.shape[0:1] == key.shape[0:1] and
query.shape[-1] == key.shape[-1])
assert key.ndim == query.ndim
assert key.ndim == value.ndim
for ax in axis:
if not (query.ndim >= 3 and 1 <= ax < query.ndim - 2):
raise ValueError('Attention axis must be between the batch '
'axis and the last-two axes.')
depth = query.shape[-1]
n = key.ndim
# batch_dims is <bs, <non-attention dims>, num_heads>
batch_dims = tuple(onp.delete(range(n), axis + (n - 1,)))
# q & k -> (bs, <non-attention dims>, num_heads, <attention dims>, channels)
qk_perm = batch_dims + axis + (n - 1,)
key = key.transpose(qk_perm)
query = query.transpose(qk_perm)
# v -> (bs, <non-attention dims>, num_heads, channels, <attention dims>)
v_perm = batch_dims + (n - 1,) + axis
value = value.transpose(v_perm)
query = query / jnp.sqrt(depth).astype(dtype)
batch_dims_t = tuple(range(len(batch_dims)))
attn_weights = lax.dot_general(
query,
key, (((n - 1,), (n - 1,)), (batch_dims_t, batch_dims_t)),
precision=precision)
else:
n = key.ndim
batch_dims = tuple(onp.delete(range(n), axis + (n - 1,)))
v_perm = batch_dims + (n - 1,) + axis
qk_perm = batch_dims + axis + (n - 1,)
value = value.transpose(v_perm)
batch_dims_t = tuple(range(len(batch_dims)))
attn_weights = 0
if synthetic:
# add synthetic attention
for syn_weights in synthetic:
attn_weights += syn_weights
# apply attention bias: masking, droput, proximity bias, ect.
if bias is not None:
attn_weights = attn_weights + bias
# normalize the attention weights
norm_dims = tuple(range(attn_weights.ndim - len(axis), attn_weights.ndim))
attn_weights = jax.nn.softmax(attn_weights, axis=norm_dims)
attn_weights = attn_weights.astype(dtype)
# apply dropout
if not deterministic and dropout_rate > 0.:
if dropout_rng is None:
dropout_rng = make_rng()
keep_prob = jax.lax.tie_in(attn_weights, 1.0 - dropout_rate)
if broadcast_dropout:
# dropout is broadcast across the batch+head+non-attention dimension
dropout_dims = attn_weights.shape[-(2 * len(axis)):]
dropout_shape = (tuple([1] * len(batch_dims_t)) + dropout_dims)
keep = random.bernoulli(dropout_rng, keep_prob, dropout_shape)
else:
keep = random.bernoulli(dropout_rng, keep_prob, attn_weights.shape)
multiplier = (keep.astype(attn_weights.dtype) /
jnp.asarray(keep_prob, dtype=dtype))
attn_weights = attn_weights * multiplier
# compute the new values given the attention weights
wv_contracting_dims = (norm_dims, range(value.ndim - len(axis), value.ndim))
y = lax.dot_general(
attn_weights,
value, (wv_contracting_dims, (batch_dims_t, batch_dims_t)),
precision=precision)
# back to (bs, dim1, dim2, ..., dimN, num_heads, channels)
perm_inv = _invert_perm(qk_perm)
y = y.transpose(perm_inv)
return y
def _invert_perm(perm):
perm_inv = [0] * len(perm)
for i, j in enumerate(perm):
perm_inv[j] = i
return tuple(perm_inv)
class SynthesizerAttention(nn.Module):
"""Multi-head Synthesizer Architecture."""
def apply(self,
inputs_q,
inputs_kv,
num_heads,
dtype=jnp.float32,
qkv_features=None,
out_features=None,
attention_axis=None,
causal_mask=False,
padding_mask=None,
key_padding_mask=None,
segmentation=None,
key_segmentation=None,
cache=None,
broadcast_dropout=True,
dropout_rng=None,
dropout_rate=0.,
deterministic=False,
precision=None,
kernel_init=nn.linear.default_kernel_init,
bias_init=nn.initializers.zeros,
bias=True,
max_length=512,
ignore_dot_product=True,
synthesizer_mode='factorized_random',
k=32):
"""Applies multi-head synthesizer attention on the input data.
Projects the inputs into multi-headed query, key, and value vectors,
applies dot-product attention and project the results to an output vector.
This can be used for encoder-decoder attention by specifying both `inputs_q`
and `inputs_kv` orfor self-attention by only specifying `inputs_q` and
setting `inputs_kv` to None.
Args:
inputs_q: input queries of shape `[bs, dim1, dim2, ..., dimN, features]`.
inputs_kv: key/values of shape `[bs, dim1, dim2, ..., dimN, features]`
or None for self-attention, inn which case key/values will be derived
from inputs_q.
num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])
should be divisible by the number of heads.
dtype: the dtype of the computation (default: float32)
qkv_features: dimension of the key, query, and value.
out_features: dimension of the last projection
attention_axis: axes over which the attention is applied ( 'None' means
attention over all axes, but batch, heads, and features).
causal_mask: boolean specifying whether to apply a causal mask on the
attention weights. If True, the output at timestep `t` will not depend
on inputs at timesteps strictly greater than `t`.
padding_mask: boolean specifying query tokens that are pad token.
key_padding_mask: boolean specifying key-value tokens that are pad token.
segmentation: segment indices for packed inputs_q data.
key_segmentation: segment indices for packed inputs_kv data.
cache: an instance of `flax.nn.attention.Cache` used for efficient
autoregressive decoding.
broadcast_dropout: bool: use a broadcasted dropout along batch dims.
dropout_rng: JAX PRNGKey: to be used for dropout
dropout_rate: dropout rate
deterministic: bool, deterministic or not (to apply dropout)
precision: numerical precision of the computation see `jax.lax.Precision`
for details.
kernel_init: initializer for the kernel of the Dense layers.
bias_init: initializer for the bias of the Dense layers.
bias: bool: whether pointwise QKVO dense transforms use bias.
max_length: int, the maximum supported sequence length.
ignore_dot_product: bool, to ignore the dot product attention or not.
synthesizer_mode: str support 'dense' and 'random' or 'dense+random'
k: int, low rank factorized attention.
Returns:
output of shape `[bs, dim1, dim2, ..., dimN, features]`.
"""
assert causal_mask or not cache, (
'Caching is only support for causal attention.')
assert inputs_q.ndim == 3
if inputs_kv is None:
inputs_kv = inputs_q
if attention_axis is None:
attention_axis = tuple(range(1, inputs_q.ndim - 1))
features = out_features or inputs_q.shape[-1]
qkv_features = qkv_features or inputs_q.shape[-1]
assert qkv_features % num_heads == 0, (
'Memory dimension must be divisible by number of heads.')
head_dim = qkv_features // num_heads
dense = nn.DenseGeneral.partial(
axis=-1,
features=(num_heads, head_dim),
kernel_init=kernel_init,
bias_init=bias_init,
bias=bias,
precision=precision)
# project inputs_q to multi-headed q/k/v
# dimensions are then [bs, dims..., n_heads, n_features_per_head]
qlength = inputs_q.shape[-2]
kvlength = inputs_kv.shape[-2]
if ignore_dot_product:
value = dense(inputs_kv, dtype=dtype, name='value')
key = value
query = inputs_q
else:
query, key, value = (dense(inputs_q, dtype=dtype, name='query'),
dense(inputs_kv, dtype=dtype, name='key'),
dense(inputs_kv, dtype=dtype, name='value'))
syn_weights_list = []
logging.info(synthesizer_mode)
if 'random' in synthesizer_mode:
if 'factorized_random' in synthesizer_mode:
logging.info('Using factorized random')
rand_syn_weights1 = self.param('random1', (num_heads, max_length, k),
kernel_init)
rand_syn_weights2 = self.param('random2', (num_heads, k, max_length),
kernel_init)
rand_syn_weights1 = rand_syn_weights1[:, :qlength, :]
rand_syn_weights2 = rand_syn_weights2[:, :, :kvlength]
rand_syn_weights = jnp.einsum('hlk,hkn->hln', rand_syn_weights1,
rand_syn_weights2)
rand_syn_weights = jax.lax.broadcast(rand_syn_weights,
(inputs_q.shape[0],))
syn_weights_list.append(rand_syn_weights)
else:
rand_syn_weights = self.param('random',
(num_heads, max_length, max_length),
kernel_init)
rand_syn_weights = rand_syn_weights[:, :qlength, :kvlength]
rand_syn_weights = jax.lax.broadcast(rand_syn_weights,
(inputs_q.shape[0],))
syn_weights_list.append(rand_syn_weights)
if 'dense' in synthesizer_mode:
dense_syn = nn.DenseGeneral.partial(axis=-1,
features=(num_heads, head_dim),
kernel_init=kernel_init,
bias_init=bias_init,
bias=bias,
precision=precision,
name='dense_syn',
dtype=dtype)
# TODO(yitay): Change this to nn.Dense and make sure it works
dense_syn_length = nn.linear.DenseGeneral.partial(axis=-1,
features=(max_length),
kernel_init=kernel_init,
bias_init=bias_init,
bias=bias,
precision=precision,
name='dense_syn2',
dtype=dtype)
proj = dense_syn(inputs_q, dtype=dtype, name='dense_syn')
proj = jax.nn.relu(proj)
proj = dense_syn_length(proj, dtype=dtype, name='dense_syn_len')
# TODO(yitay) check if this reshape is needed
dense_syn_weights = proj.reshape((inputs_q.shape[0], num_heads,
qlength, max_length))
dense_syn_weights = dense_syn_weights[:, :, :, :qlength]
syn_weights_list.append(dense_syn_weights)
if cache:
assert isinstance(cache, Cache), 'cache must be an instance of Cache'
if self.is_initializing():
cache.store(onp.array((key.ndim,) + key.shape[-2:], dtype=onp.int32))
else:
cache_entry = cache.retrieve(None)
expected_shape = list(cache_entry.key.shape[:-2])
for attn_dim in attention_axis:
expected_shape[attn_dim] = 1
expected_shape = tuple(expected_shape) + inputs_q.shape[-1:]
if expected_shape != inputs_q.shape:
raise ValueError('Invalid shape provided, '
'expected shape %s instead got %s.' %
(expected_shape, inputs_q.shape))
if not isinstance(cache_entry, _CacheEntry):
raise ValueError('Cache is not initialized.')
cshape = cache_entry.key.shape
indices = [0] * len(cshape)
i = cache_entry.i
attn_size = onp.prod(onp.take(cshape, attention_axis))
for attn_dim in attention_axis:
attn_size //= cshape[attn_dim]
indices[attn_dim] = i // attn_size
i = i % attn_size
key = lax.dynamic_update_slice(cache_entry.key, key, indices)
value = lax.dynamic_update_slice(cache_entry.value, value, indices)
one = jnp.array(1, jnp.uint32)
cache_entry = cache_entry.replace(i=cache_entry.i + one,
key=key,
value=value)
cache.store(cache_entry)
key_padding_mask = jnp.broadcast_to(
(jnp.arange(cshape[1]) < cache_entry.i), cshape[:2])
key_padding_mask = key_padding_mask.astype(jnp.float32)[..., None]
# create attention masks
mask_components = []
if causal_mask:
if cache and not self.is_initializing():
bias_pre_shape = (1,) * (key.ndim - 1)
attn_shape = tuple(onp.take(key.shape, attention_axis))
attn_size = onp.prod(attn_shape)
ii = jnp.arange(attn_size, dtype=jnp.uint32)
mask = ii < cache_entry.i
mask_components.append(mask.reshape(bias_pre_shape + attn_shape))
else:
mask_components.append(_make_causal_mask(key, attention_axis))
if not ignore_dot_product:
if padding_mask is not None:
if key_padding_mask is None:
key_padding_mask = padding_mask
padding_mask = make_padding_mask(
padding_mask_query=padding_mask,
padding_mask_key=key_padding_mask,
query_shape=query.shape,
key_shape=key.shape,
attention_axis=attention_axis)
mask_components.append(padding_mask)
if segmentation is not None:
if key_segmentation is None:
key_segmentation = segmentation
segmentation_mask = make_padding_mask(
padding_mask_query=segmentation,
padding_mask_key=key_segmentation,
query_shape=query.shape,
key_shape=key.shape,
attention_axis=attention_axis,
segmentation_mask=True)
mask_components.append(segmentation_mask)
if mask_components:
attention_mask = mask_components[0]
for component in mask_components[1:]:
attention_mask = jnp.logical_and(attention_mask, component)
# attention mask in the form of attention bias
attention_bias = lax.select(
attention_mask > 0, jnp.full(attention_mask.shape, 0.).astype(dtype),
jnp.full(attention_mask.shape, -1e10).astype(dtype))
else:
attention_bias = None
# apply attention
x = synthetic_attention(
query,
key,
value,
syn_weights_list,
dtype=dtype,
axis=attention_axis,
bias=attention_bias,
precision=precision,
dropout_rng=dropout_rng,
dropout_rate=dropout_rate,
broadcast_dropout=broadcast_dropout,
deterministic=deterministic,
ignore_dot_product=ignore_dot_product)
# back to the original inputs dimensions
out = nn.DenseGeneral(
x,
features=features,
axis=(-2, -1),
kernel_init=kernel_init,
bias_init=bias_init,
bias=bias,
dtype=dtype,
precision=precision,
name='out')
return out
# TODO(flax-dev): Consider refactoring MultiHeadDotProductAttention and moving
# causal_mask and cache support into this class instead.
SynthesizerSelfAttention = SynthesizerAttention.partial(inputs_kv=None)
|
|
from __future__ import absolute_import
import datetime
import unittest
from django.test import TransactionTestCase
from django.db import connection, DatabaseError, IntegrityError
from django.db.models.fields import IntegerField, TextField, CharField, SlugField
from django.db.models.fields.related import ManyToManyField, ForeignKey
from django.db.transaction import atomic
from .models import Author, AuthorWithM2M, Book, BookWithSlug, BookWithM2M, Tag, TagIndexed, TagM2MTest, TagUniqueRename, UniqueTest
class SchemaTests(TransactionTestCase):
"""
Tests that the schema-alteration code works correctly.
Be aware that these tests are more liable than most to false results,
as sometimes the code to check if a test has worked is almost as complex
as the code it is testing.
"""
available_apps = []
models = [Author, AuthorWithM2M, Book, BookWithSlug, BookWithM2M, Tag, TagIndexed, TagM2MTest, TagUniqueRename, UniqueTest]
# Utility functions
def tearDown(self):
# Delete any tables made for our models
self.delete_tables()
def delete_tables(self):
"Deletes all model tables for our models for a clean test environment"
cursor = connection.cursor()
connection.disable_constraint_checking()
table_names = connection.introspection.table_names(cursor)
for model in self.models:
# Remove any M2M tables first
for field in model._meta.local_many_to_many:
with atomic():
tbl = field.rel.through._meta.db_table
if tbl in table_names:
cursor.execute(connection.schema_editor().sql_delete_table % {
"table": connection.ops.quote_name(tbl),
})
table_names.remove(tbl)
# Then remove the main tables
with atomic():
tbl = model._meta.db_table
if tbl in table_names:
cursor.execute(connection.schema_editor().sql_delete_table % {
"table": connection.ops.quote_name(tbl),
})
table_names.remove(tbl)
connection.enable_constraint_checking()
def column_classes(self, model):
cursor = connection.cursor()
columns = dict(
(d[0], (connection.introspection.get_field_type(d[1], d), d))
for d in connection.introspection.get_table_description(
cursor,
model._meta.db_table,
)
)
# SQLite has a different format for field_type
for name, (type, desc) in columns.items():
if isinstance(type, tuple):
columns[name] = (type[0], desc)
# SQLite also doesn't error properly
if not columns:
raise DatabaseError("Table does not exist (empty pragma)")
return columns
# Tests
def test_creation_deletion(self):
"""
Tries creating a model's table, and then deleting it.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Check that it's there
list(Author.objects.all())
# Clean up that table
with connection.schema_editor() as editor:
editor.delete_model(Author)
# Check that it's gone
self.assertRaises(
DatabaseError,
lambda: list(Author.objects.all()),
)
@unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support")
def test_fk(self):
"Tests that creating tables out of FK order, then repointing, works"
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Book)
editor.create_model(Author)
editor.create_model(Tag)
# Check that initial tables are there
list(Author.objects.all())
list(Book.objects.all())
# Make sure the FK constraint is present
with self.assertRaises(IntegrityError):
Book.objects.create(
author_id = 1,
title = "Much Ado About Foreign Keys",
pub_date = datetime.datetime.now(),
)
# Repoint the FK constraint
new_field = ForeignKey(Tag)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(
Book,
Book._meta.get_field_by_name("author")[0],
new_field,
strict=True,
)
# Make sure the new FK constraint is present
constraints = connection.introspection.get_constraints(connection.cursor(), Book._meta.db_table)
for name, details in constraints.items():
if details['columns'] == ["author_id"] and details['foreign_key']:
self.assertEqual(details['foreign_key'], ('schema_tag', 'id'))
break
else:
self.fail("No FK constraint for author_id found")
def test_add_field(self):
"""
Tests adding fields to models
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no age field
columns = self.column_classes(Author)
self.assertNotIn("age", columns)
# Alter the name field to a TextField
new_field = IntegerField(null=True)
new_field.set_attributes_from_name("age")
with connection.schema_editor() as editor:
editor.add_field(
Author,
new_field,
)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
self.assertEqual(columns['age'][0], "IntegerField")
self.assertEqual(columns['age'][1][6], True)
def test_alter(self):
"""
Tests simple altering of fields
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the field is right to begin with
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "CharField")
self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls))
# Alter the name field to a TextField
new_field = TextField(null=True)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(
Author,
Author._meta.get_field_by_name("name")[0],
new_field,
strict=True,
)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "TextField")
self.assertEqual(columns['name'][1][6], True)
# Change nullability again
new_field2 = TextField(null=False)
new_field2.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(
Author,
new_field,
new_field2,
strict=True,
)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "TextField")
self.assertEqual(bool(columns['name'][1][6]), False)
def test_rename(self):
"""
Tests simple altering of fields
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the field is right to begin with
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "CharField")
self.assertNotIn("display_name", columns)
# Alter the name field's name
new_field = CharField(max_length=254)
new_field.set_attributes_from_name("display_name")
with connection.schema_editor() as editor:
editor.alter_field(
Author,
Author._meta.get_field_by_name("name")[0],
new_field,
strict = True,
)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
self.assertEqual(columns['display_name'][0], "CharField")
self.assertNotIn("name", columns)
def test_m2m_create(self):
"""
Tests M2M fields on models during creation
"""
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(TagM2MTest)
editor.create_model(BookWithM2M)
# Ensure there is now an m2m table there
columns = self.column_classes(BookWithM2M._meta.get_field_by_name("tags")[0].rel.through)
self.assertEqual(columns['tagm2mtest_id'][0], "IntegerField")
def test_m2m(self):
"""
Tests adding/removing M2M fields on models
"""
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(AuthorWithM2M)
editor.create_model(TagM2MTest)
# Create an M2M field
new_field = ManyToManyField("schema.TagM2MTest", related_name="authors")
new_field.contribute_to_class(AuthorWithM2M, "tags")
try:
# Ensure there's no m2m table there
self.assertRaises(DatabaseError, self.column_classes, new_field.rel.through)
# Add the field
with connection.schema_editor() as editor:
editor.add_field(
Author,
new_field,
)
# Ensure there is now an m2m table there
columns = self.column_classes(new_field.rel.through)
self.assertEqual(columns['tagm2mtest_id'][0], "IntegerField")
# Remove the M2M table again
with connection.schema_editor() as editor:
editor.remove_field(
Author,
new_field,
)
# Ensure there's no m2m table there
self.assertRaises(DatabaseError, self.column_classes, new_field.rel.through)
finally:
# Cleanup model states
AuthorWithM2M._meta.local_many_to_many.remove(new_field)
def test_m2m_repoint(self):
"""
Tests repointing M2M fields
"""
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithM2M)
editor.create_model(TagM2MTest)
editor.create_model(UniqueTest)
# Ensure the M2M exists and points to TagM2MTest
constraints = connection.introspection.get_constraints(connection.cursor(), BookWithM2M._meta.get_field_by_name("tags")[0].rel.through._meta.db_table)
if connection.features.supports_foreign_keys:
for name, details in constraints.items():
if details['columns'] == ["tagm2mtest_id"] and details['foreign_key']:
self.assertEqual(details['foreign_key'], ('schema_tagm2mtest', 'id'))
break
else:
self.fail("No FK constraint for tagm2mtest_id found")
# Repoint the M2M
new_field = ManyToManyField(UniqueTest)
new_field.contribute_to_class(BookWithM2M, "uniques")
try:
with connection.schema_editor() as editor:
editor.alter_field(
Author,
BookWithM2M._meta.get_field_by_name("tags")[0],
new_field,
)
# Ensure old M2M is gone
self.assertRaises(DatabaseError, self.column_classes, BookWithM2M._meta.get_field_by_name("tags")[0].rel.through)
# Ensure the new M2M exists and points to UniqueTest
constraints = connection.introspection.get_constraints(connection.cursor(), new_field.rel.through._meta.db_table)
if connection.features.supports_foreign_keys:
for name, details in constraints.items():
if details['columns'] == ["uniquetest_id"] and details['foreign_key']:
self.assertEqual(details['foreign_key'], ('schema_uniquetest', 'id'))
break
else:
self.fail("No FK constraint for uniquetest_id found")
finally:
# Cleanup model states
BookWithM2M._meta.local_many_to_many.remove(new_field)
del BookWithM2M._meta._m2m_cache
@unittest.skipUnless(connection.features.supports_check_constraints, "No check constraints")
def test_check_constraints(self):
"""
Tests creating/deleting CHECK constraints
"""
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the constraint exists
constraints = connection.introspection.get_constraints(connection.cursor(), Author._meta.db_table)
for name, details in constraints.items():
if details['columns'] == ["height"] and details['check']:
break
else:
self.fail("No check constraint for height found")
# Alter the column to remove it
new_field = IntegerField(null=True, blank=True)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(
Author,
Author._meta.get_field_by_name("height")[0],
new_field,
strict = True,
)
constraints = connection.introspection.get_constraints(connection.cursor(), Author._meta.db_table)
for name, details in constraints.items():
if details['columns'] == ["height"] and details['check']:
self.fail("Check constraint for height found")
# Alter the column to re-add it
with connection.schema_editor() as editor:
editor.alter_field(
Author,
new_field,
Author._meta.get_field_by_name("height")[0],
strict = True,
)
constraints = connection.introspection.get_constraints(connection.cursor(), Author._meta.db_table)
for name, details in constraints.items():
if details['columns'] == ["height"] and details['check']:
break
else:
self.fail("No check constraint for height found")
def test_unique(self):
"""
Tests removing and adding unique constraints to a single column.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure the field is unique to begin with
Tag.objects.create(title="foo", slug="foo")
self.assertRaises(IntegrityError, Tag.objects.create, title="bar", slug="foo")
Tag.objects.all().delete()
# Alter the slug field to be non-unique
new_field = SlugField(unique=False)
new_field.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(
Tag,
Tag._meta.get_field_by_name("slug")[0],
new_field,
strict = True,
)
# Ensure the field is no longer unique
Tag.objects.create(title="foo", slug="foo")
Tag.objects.create(title="bar", slug="foo")
Tag.objects.all().delete()
# Alter the slug field to be unique
new_new_field = SlugField(unique=True)
new_new_field.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(
Tag,
new_field,
new_new_field,
strict = True,
)
# Ensure the field is unique again
Tag.objects.create(title="foo", slug="foo")
self.assertRaises(IntegrityError, Tag.objects.create, title="bar", slug="foo")
Tag.objects.all().delete()
# Rename the field
new_field = SlugField(unique=False)
new_field.set_attributes_from_name("slug2")
with connection.schema_editor() as editor:
editor.alter_field(
Tag,
Tag._meta.get_field_by_name("slug")[0],
TagUniqueRename._meta.get_field_by_name("slug2")[0],
strict = True,
)
# Ensure the field is still unique
TagUniqueRename.objects.create(title="foo", slug2="foo")
self.assertRaises(IntegrityError, TagUniqueRename.objects.create, title="bar", slug2="foo")
Tag.objects.all().delete()
def test_unique_together(self):
"""
Tests removing and adding unique_together constraints on a model.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(UniqueTest)
# Ensure the fields are unique to begin with
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.create(year=2011, slug="foo")
UniqueTest.objects.create(year=2011, slug="bar")
self.assertRaises(IntegrityError, UniqueTest.objects.create, year=2012, slug="foo")
UniqueTest.objects.all().delete()
# Alter the model to it's non-unique-together companion
with connection.schema_editor() as editor:
editor.alter_unique_together(
UniqueTest,
UniqueTest._meta.unique_together,
[],
)
# Ensure the fields are no longer unique
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.all().delete()
# Alter it back
new_new_field = SlugField(unique=True)
new_new_field.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_unique_together(
UniqueTest,
[],
UniqueTest._meta.unique_together,
)
# Ensure the fields are unique again
UniqueTest.objects.create(year=2012, slug="foo")
self.assertRaises(IntegrityError, UniqueTest.objects.create, year=2012, slug="foo")
UniqueTest.objects.all().delete()
def test_index_together(self):
"""
Tests removing and adding index_together constraints on a model.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure there's no index on the year/slug columns first
self.assertEqual(
False,
any(
c["index"]
for c in connection.introspection.get_constraints(connection.cursor(), "schema_tag").values()
if c['columns'] == ["slug", "title"]
),
)
# Alter the model to add an index
with connection.schema_editor() as editor:
editor.alter_index_together(
Tag,
[],
[("slug", "title")],
)
# Ensure there is now an index
self.assertEqual(
True,
any(
c["index"]
for c in connection.introspection.get_constraints(connection.cursor(), "schema_tag").values()
if c['columns'] == ["slug", "title"]
),
)
# Alter it back
new_new_field = SlugField(unique=True)
new_new_field.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_index_together(
Tag,
[("slug", "title")],
[],
)
# Ensure there's no index
self.assertEqual(
False,
any(
c["index"]
for c in connection.introspection.get_constraints(connection.cursor(), "schema_tag").values()
if c['columns'] == ["slug", "title"]
),
)
def test_create_index_together(self):
"""
Tests creating models with index_together already defined
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(TagIndexed)
# Ensure there is an index
self.assertEqual(
True,
any(
c["index"]
for c in connection.introspection.get_constraints(connection.cursor(), "schema_tagindexed").values()
if c['columns'] == ["slug", "title"]
),
)
def test_db_table(self):
"""
Tests renaming of the table
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the table is there to begin with
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "CharField")
# Alter the table
with connection.schema_editor() as editor:
editor.alter_db_table(
Author,
"schema_author",
"schema_otherauthor",
)
# Ensure the table is there afterwards
Author._meta.db_table = "schema_otherauthor"
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "CharField")
# Alter the table again
with connection.schema_editor() as editor:
editor.alter_db_table(
Author,
"schema_otherauthor",
"schema_author",
)
# Ensure the table is still there
Author._meta.db_table = "schema_author"
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "CharField")
def test_indexes(self):
"""
Tests creation/altering of indexes
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the table is there and has the right index
self.assertIn(
"title",
connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table),
)
# Alter to remove the index
new_field = CharField(max_length=100, db_index=False)
new_field.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(
Book,
Book._meta.get_field_by_name("title")[0],
new_field,
strict = True,
)
# Ensure the table is there and has no index
self.assertNotIn(
"title",
connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table),
)
# Alter to re-add the index
with connection.schema_editor() as editor:
editor.alter_field(
Book,
new_field,
Book._meta.get_field_by_name("title")[0],
strict = True,
)
# Ensure the table is there and has the index again
self.assertIn(
"title",
connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table),
)
# Add a unique column, verify that creates an implicit index
with connection.schema_editor() as editor:
editor.add_field(
Book,
BookWithSlug._meta.get_field_by_name("slug")[0],
)
self.assertIn(
"slug",
connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table),
)
# Remove the unique, check the index goes with it
new_field2 = CharField(max_length=20, unique=False)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(
BookWithSlug,
BookWithSlug._meta.get_field_by_name("slug")[0],
new_field2,
strict = True,
)
self.assertNotIn(
"slug",
connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table),
)
def test_primary_key(self):
"""
Tests altering of the primary key
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure the table is there and has the right PK
self.assertTrue(
connection.introspection.get_indexes(connection.cursor(), Tag._meta.db_table)['id']['primary_key'],
)
# Alter to change the PK
new_field = SlugField(primary_key=True)
new_field.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.remove_field(Tag, Tag._meta.get_field_by_name("id")[0])
editor.alter_field(
Tag,
Tag._meta.get_field_by_name("slug")[0],
new_field,
)
# Ensure the PK changed
self.assertNotIn(
'id',
connection.introspection.get_indexes(connection.cursor(), Tag._meta.db_table),
)
self.assertTrue(
connection.introspection.get_indexes(connection.cursor(), Tag._meta.db_table)['slug']['primary_key'],
)
def test_context_manager_exit(self):
"""
Ensures transaction is correctly closed when an error occurs
inside a SchemaEditor context.
"""
class SomeError(Exception):
pass
try:
with connection.schema_editor() as editor:
raise SomeError
except SomeError:
self.assertFalse(connection.in_atomic_block)
|
|
"""
Generate allele sequences for pan-class I models.
Additional dependency: biopython
"""
from __future__ import print_function
import sys
import argparse
import numpy
import pandas
import Bio.SeqIO # pylint: disable=import-error
from mhcflurry.common import normalize_allele_name
parser = argparse.ArgumentParser(usage=__doc__)
parser.add_argument(
"aligned_fasta",
help="Aligned sequences")
parser.add_argument(
"--recapitulate-sequences",
required=True,
help="CSV giving sequences to recapitulate")
parser.add_argument(
"--differentiate-alleles",
help="File listing alleles to differentiate using additional positions")
parser.add_argument(
"--out-csv",
help="Result file")
def normalize_allele_name_optional(s):
return normalize_allele_name(s, raise_on_error=False)
def run():
args = parser.parse_args(sys.argv[1:])
print(args)
allele_to_sequence = {}
reader = Bio.SeqIO.parse(args.aligned_fasta, "fasta")
for record in reader:
name = record.description.split()[1]
print(record.name, record.description)
allele_to_sequence[name] = str(record.seq)
print("Read %d aligned sequences" % len(allele_to_sequence))
allele_sequences = pandas.Series(allele_to_sequence).to_frame()
allele_sequences.columns = ['aligned']
allele_sequences['aligned'] = allele_sequences['aligned'].str.replace(
"-", "X")
allele_sequences['normalized_allele'] = allele_sequences.index.map(
normalize_allele_name_optional)
allele_sequences = allele_sequences.dropna().set_index("normalized_allele", drop=True)
selected_positions = []
recapitulate_df = pandas.read_csv(args.recapitulate_sequences)
recapitulate_df["normalized_allele"] = recapitulate_df.allele.map(normalize_allele_name_optional)
recapitulate_df = (
recapitulate_df
.dropna()
.drop_duplicates("normalized_allele")
.set_index("normalized_allele", drop=True))
allele_sequences["recapitulate_target"] = recapitulate_df.iloc[:,-1]
print("Sequences in recapitulate CSV that are not in aligned fasta:")
print(recapitulate_df.index[
~recapitulate_df.index.isin(allele_sequences.index)
].tolist())
allele_sequences_with_target = allele_sequences.loc[
~allele_sequences.recapitulate_target.isnull()
]
position_identities = []
target_length = int(
allele_sequences_with_target.recapitulate_target.str.len().max())
for i in range(target_length):
series_i = allele_sequences_with_target.recapitulate_target.str.get(i)
row = []
full_length_sequence_length = int(
allele_sequences_with_target.aligned.str.len().max())
for k in range(full_length_sequence_length):
series_k = allele_sequences_with_target.aligned.str.get(k)
row.append((series_i == series_k).mean())
position_identities.append(row)
position_identities = pandas.DataFrame(numpy.array(position_identities))
selected_positions = position_identities.idxmax(1).tolist()
fractions = position_identities.max(1)
print("Selected positions: ", *selected_positions)
print("Lowest concordance fraction: %0.5f" % fractions.min())
assert fractions.min() > 0.99
allele_sequences["recapitulated"] = allele_sequences.aligned.map(
lambda s: "".join(s[p] for p in selected_positions))
allele_sequences_with_target = allele_sequences.loc[
~allele_sequences.recapitulate_target.isnull()
]
agreement = (
allele_sequences_with_target.recapitulated ==
allele_sequences_with_target.recapitulate_target).mean()
print("Overall agreement: %0.5f" % agreement)
assert agreement > 0.9
# Add additional positions
additional_positions = []
if args.differentiate_alleles:
differentiate_alleles = pandas.read_csv(
args.differentiate_alleles).iloc[:,0].values
print(
"Read %d alleles to differentiate:" % len(differentiate_alleles),
differentiate_alleles)
to_differentiate = allele_sequences.loc[
allele_sequences.index.isin(differentiate_alleles)
].copy()
print(to_differentiate.shape)
additional_positions = []
# Greedy search, looking ahead 3 positions at a time.
possible_additional_positions = set()
for (_, sub_df) in to_differentiate.groupby("recapitulated"):
if sub_df.aligned.nunique() > 1:
differing = pandas.DataFrame(
dict([(pos, chars) for (pos, chars) in
enumerate(zip(*sub_df.aligned.values)) if
any(c != chars[0] for c in chars) and "X" not in chars])).T
possible_additional_positions.update(differing.index.values)
def disambiguation_score(sequences):
counts = pandas.Series(sequences, copy=False).value_counts()
score = -1 * (counts[counts > 1] - 1).sum()
return score
possible_additional_positions = sorted(possible_additional_positions)
current_sequences = to_differentiate.recapitulated
while current_sequences.value_counts().max() > 1:
to_differentiate["equivalence_class_size"] = (
current_sequences.map(current_sequences.value_counts())
)
print("Ambiguous alleles", " ".join(
to_differentiate.loc[
to_differentiate.equivalence_class_size > 1
].index))
position1s = []
position2s = []
position3s = []
negative_position1_distances = []
possible_additional_positions_scores = []
position1_scores = []
for position1 in possible_additional_positions:
new_sequence1 = (
current_sequences +
to_differentiate.aligned.str.get(position1))
negative_position1_distance = -1 * min(
abs(position1 - selected) for selected in selected_positions)
position1_score = disambiguation_score(new_sequence1)
for (i, position2) in enumerate(possible_additional_positions):
new_sequence2 = (
new_sequence1 +
to_differentiate.aligned.str.get(position2))
for position3 in possible_additional_positions:
new_sequence3 = (
new_sequence2 +
to_differentiate.aligned.str.get(position3))
score = disambiguation_score(new_sequence3)
position1s.append(position1)
position2s.append(position2)
position3s.append(position3)
possible_additional_positions_scores.append(score)
negative_position1_distances.append(
negative_position1_distance)
position1_scores.append(position1_score)
scores_df = pandas.DataFrame({
"position1": position1s,
"position2": position2s,
"position3": position3s,
"negative_position1_distance": negative_position1_distances,
"tuple_score": possible_additional_positions_scores,
"position1_score": position1_scores,
}).sort_values(
["tuple_score", "position1_score", "negative_position1_distance"],
ascending=False)
print(scores_df)
selected_additional_position = scores_df.iloc[0].position1
print("Selected additional position", selected_additional_position)
additional_positions.append(selected_additional_position)
current_sequences = (
current_sequences +
to_differentiate.aligned.str.get(
selected_additional_position))
possible_additional_positions.remove(selected_additional_position)
additional_positions = sorted(set(additional_positions))
print(
"Selected %d additional positions: " % len(additional_positions),
additional_positions)
extended_selected_positions = sorted(
set(selected_positions).union(set(additional_positions)))
print(
"Extended selected positions (%d)" % len(extended_selected_positions),
*extended_selected_positions)
allele_sequences["sequence"] = allele_sequences.aligned.map(
lambda s: "".join(s[p] for p in extended_selected_positions))
allele_sequences[["sequence"]].to_csv(args.out_csv, index=True)
print("Wrote: %s" % args.out_csv)
if __name__ == '__main__':
run()
|
|
import six
import theano
import theano.tensor as T
from .graph import TreeanoGraph
from .update_deltas import UpdateDeltas
from .variable import VariableWrapper
from .. import utils
class MissingHyperparameter(Exception):
pass
class Network(object):
"""
contains the state of multiple nodes
"""
def __init__(self,
root_node,
override_hyperparameters=None,
default_hyperparameters=None):
self.root_node = root_node
self.node_state = {}
self.update_deltas = UpdateDeltas()
self.override_hyperparameters = dict()
self.default_hyperparameters = dict(
batch_axis=0,
deterministic=False,
monitor=True,
)
if override_hyperparameters is not None:
self.override_hyperparameters.update(override_hyperparameters)
if default_hyperparameters is not None:
self.default_hyperparameters.update(default_hyperparameters)
@property
def is_built(self):
return hasattr(self, "graph")
def build(self):
"""
initialize network state
"""
# make building idempotent
# ---
# this allows building to be lazy. this way, if we don't have to
# do all the work until it is needed
# example use case: sequentially applying several transforms to a
# network
if self.is_built:
return
self.graph = TreeanoGraph(self.root_node)
# set node state for each node to be empty
# ---
# order doesn't matter
for node in self.graph.architectural_tree_nodes_root_to_leaves():
node_state = {}
# initialize some parts of node_state
node_state["current_variables"] = {}
node_state["original_variables"] = {}
node_state["additional_data"] = {}
node_state["set_hyperparameters"] = {}
self.node_state[node.name] = node_state
# initialize long range dependencies
# ---
# order doesn't matter
# done before init_state because some nodes need to know their inputs
for node in self.graph.architectural_tree_nodes_root_to_leaves():
node.init_long_range_dependencies(self.relative_network(node))
# initialize state
# ---
# outer nodes have their state initialized
# before inner nodes - this is important for sequential nodes, since
# the first child will depend on the input of the sequential node, and
# we would like to make that dependency explicit
for node in self.graph.architectural_tree_nodes_root_to_leaves():
node.init_state(self.relative_network(node))
# freeze computation graph
# ---
# if a node changes the computation graph while traversing it,
# there is a chance that the relevant nodes have already been processed
# thus being a likely source of error
self.graph.is_mutable = False
# compute and store outputs
# ---
# compute in the order of the computation DAG, so that all
# dependencies have been computed for each node by the time
# computation for the node has to occur
for node in self.graph.computation_graph_nodes_topological():
rel_network = self.relative_network(node)
# get input keys
input_keys = node.get_input_keys(rel_network)
# lookup input variables
inputs = []
for input_key in input_keys:
# find which node our input comes from, and the name of
# the variable containing the input
node_name, from_key = self.graph.input_edge_for_node(node.name,
input_key)
inputs.append(self[node_name].get_variable(from_key))
# store input variables for the node
# ---
# there is no immediate reason to do so, but doing it just in case
# for now
rel_network.store_inputs(dict(zip(input_keys, inputs)))
# compute outputs
output_res = node.compute_output(rel_network, *inputs)
# sanity check to make sure no user accidentaly returns a value
# instead of creating a variable
assert output_res is None
# compute updates
# ---
# compute from top (root) to bottom (leaves) so that low levels
# of the tree (ie. more specific update rules) can overwrite / mutate
# the update rules from higher leveles of the tree (ie. more general
# update rules)
for node in self.graph.architectural_tree_nodes_root_to_leaves():
node.mutate_update_deltas(self.relative_network(node),
self.update_deltas)
def relative_network(self, node=None):
"""
returns a network relative to a single node
"""
self.build()
if node is None:
node = self.root_node
return RelativeNetwork(self, node)
def __contains__(self, node_name):
"""
sugar for checking if a node name is in the graph
"""
self.build()
return node_name in self.graph.name_to_node
def __getitem__(self, node_name):
"""
sugar for accessing nodes in a graph
"""
self.build()
node = self.graph.name_to_node[node_name]
return self.relative_network(node)
def network_variable(self, query):
"""
converts node names into their corresponding theano variables,
with optional keys of which of the node's outputs to use
eg.
network.network_variable("input")
network.network_variable(("fc1", "W"))
network.network_variable(var) # no-op
"""
if isinstance(query, six.string_types):
node_name = query
from_key = "default"
elif isinstance(query, tuple):
node_name, from_key = query
else:
# this should be a theano variable
return query
return self[node_name].get_variable(from_key).variable
def function(self,
inputs,
outputs=None,
include_updates=False,
updates=None,
givens=None,
**kwargs):
"""
wrapper around theano.function that allows reference node outputs
with strings
example:
network.function(["input_node"], ["fc_node", "loss", ("conv1", "W")])
"""
self.build()
if outputs is None:
outputs = []
assert isinstance(inputs, list)
assert isinstance(outputs, list)
if include_updates:
# combine update_deltas with manually specified updates
if updates is None:
all_deltas = self.update_deltas
else:
extra_updates = UpdateDeltas.from_updates(updates)
all_deltas = self.update_deltas + extra_updates
# convert into format expected by theano.function
updates = all_deltas.to_updates()
transformed_inputs = [self.network_variable(i) for i in inputs]
transformed_outputs = [self.network_variable(i) for i in outputs]
if givens is None:
tmp_givens = []
elif isinstance(givens, dict):
tmp_givens = list(givens.items())
elif isinstance(givens, (list, tuple)):
tmp_givens = list(givens)
transformed_givens = [(self.network_variable(k), v)
for k, v in tmp_givens]
fn = theano.function(inputs=transformed_inputs,
outputs=transformed_outputs,
updates=updates,
givens=transformed_givens,
**kwargs)
return fn
def is_relative(self):
return False
class NoDefaultValue(object):
pass
class RelativeNetwork(object):
"""
network relative to a single node
"""
def __init__(self, network, node):
self._network = network
self._node = node
self._name = node.name
self._state = self._network.node_state[self._name]
def __getattr__(self, name):
"""
by default, behave like the non-relative network
"""
return getattr(self._network, name)
def __getitem__(self, name):
return self._network[name]
def is_relative(self):
return True
def store_inputs(self, inputs):
"""
stores the inputs for the current node
"""
self._state["inputs"] = inputs
def set_data(self, key, value):
# we don't want ambiguity with names, thus don't allow
# the same name as a variable, and also don't allow overwriting
# additional_data
assert key not in self._state["additional_data"]
assert key not in self._state["current_variables"]
self._state["additional_data"][key] = value
def get_data(self, key):
return self._state["additional_data"][key]
def get_variable(self, variable_name):
return self._state["current_variables"][variable_name]
def set_hyperparameter(self, node_name, key, value):
"""
sets a hyperparameter for a child node
"""
if node_name not in self._state["set_hyperparameters"]:
self._state["set_hyperparameters"][node_name] = {}
self._state["set_hyperparameters"][node_name][key] = value
def forward_hyperparameter(self,
node_name,
key,
hyperparameter_keys,
*args,
**kwargs):
"""
forwards a set of hyperparameters to a different node under a different
key
"""
value = self.find_hyperparameter(hyperparameter_keys, *args, **kwargs)
self.set_hyperparameter(node_name, key, value)
def maybe_forward_hyperparameter(self, *args, **kwargs):
"""
forwards hyperaparameters to a different node, if given
returns True if set, False otherwise
"""
try:
self.forward_hyperparameter(*args, **kwargs)
return True
except MissingHyperparameter:
return False
def find_hyperparameter(self,
hyperparameter_keys,
default_value=NoDefaultValue):
"""
throws an exception if no default value is given
example:
>>> network.find_hyperparameter(["foo", "bar", "choo"], 42)
the network first looks at override_hyperparameters, then searches the
current node for hyperparameters named "foo", "bar", or "choo" in that
order, then looks at the ancestor of the current node, repeating until
out of nodes. if no ancestor has a hyperparameter for one of the keys
42 is returned
"""
# return first valid hyperparameter
for val in self.find_hyperparameters(hyperparameter_keys,
default_value):
return val
else:
# otherwise, raise an exception
raise MissingHyperparameter(dict(
hyperparameter_keys=hyperparameter_keys,
))
def find_hyperparameters(self,
hyperparameter_keys,
default_value=NoDefaultValue):
"""
returns generator of all hyperparameters for the given keys
in the order of precedence
"""
# use override_hyperparameters
# ---
# this has highest precedence
for hyperparameter_key in hyperparameter_keys:
if hyperparameter_key in self.override_hyperparameters:
yield self.override_hyperparameters[hyperparameter_key]
# look through hyperparameters of all ancestors
ancestors = list(self.graph.architecture_ancestors(self._name))
# prefer closer nodes over more specific queries
done_ancestors_names = []
for node in [self._node] + ancestors:
# append current node to done ancestor
# ---
# this is done before the loop, so a node can set_hyperparameter
# for itself
done_ancestors_names.append(node.name)
# prepare set_hyperparameters state
node_hps = self.node_state[node.name]["set_hyperparameters"]
for hyperparameter_key in hyperparameter_keys:
# try finding set hyperparameters
for ancestor_name in done_ancestors_names:
try:
yield node_hps[ancestor_name][hyperparameter_key]
except KeyError:
pass
# try finding provided hyperparameters
try:
yield node.get_hyperparameter(self, hyperparameter_key)
except MissingHyperparameter:
pass
# try returning the given default value, if any
if default_value is not NoDefaultValue:
yield default_value
# try global default hyperparameters
# ---
# this has lowest precedence
for hyperparameter_key in hyperparameter_keys:
if hyperparameter_key in self.default_hyperparameters:
yield self.default_hyperparameters[hyperparameter_key]
def find_vws_in_subtree(self, tags=None, is_shared=None):
"""
return variable wrappers matching all of the given tags
"""
remaining_vws = [
vw
for name in self.graph.architecture_subtree_names(self._name)
for vw in self[name]._state["current_variables"].values()]
if tags is not None:
tags = set(tags)
# only keep variables where all tags match
remaining_vws = [vw for vw in remaining_vws
if len(tags - vw.tags) == 0]
if is_shared is not None:
remaining_vws = [vw for vw in remaining_vws
if is_shared == vw.is_shared]
return remaining_vws
def find_nodes_in_subtree(self, cls):
"""
return all nodes with the given class
"""
return [node for node in self.graph.architecture_subtree(self._name)
if node.__class__ is cls]
def create_vw(self, name, **kwargs):
"""
creates a new output variable for the current node
"""
# we don't want to overwrite an existing value
assert name not in self._state['current_variables']
assert name not in self._state['original_variables']
# FIXME have a defined name separator
new_name = "%s:%s" % (self._name, name)
# same metadata about the network
kwargs["relative_network"] = self
# create the variable
variable = VariableWrapper(new_name, **kwargs)
# save variable
self._state['current_variables'][name] = variable
self._state['original_variables'][name] = variable
return variable
def copy_vw(self, name, previous_vw, tags=None):
"""
creates a copy of previous_vw under a new name
the main use case for this is for wrapper nodes which just pass
their input as their output
"""
variable = previous_vw.variable
if utils.is_shared_variable(variable):
# creating a "copy"of the variable
# ---
# rationale: shared variables are seen as normal variables
# (so shared variables aren't accidentaly owned by multiple nodes)
# ---
# why tensor_copy? theano.compile.view_op doesn't support Rop
variable = T.tensor_copy(variable)
return self.create_vw(
name,
variable=variable,
shape=previous_vw.shape,
tags=tags,
)
def replace_variable(self, name, new_variable):
"""
replaces the given variable for a node in 'current_variables' state
with a new variable
NOTE: this is design for use with scan, so that non-sequence variables
can be replaced by their sequence versions
"""
assert name in self._state['original_variables']
self._state['current_variables'][name] = new_variable
return new_variable
def forward_input_to(self,
node_name,
previous_to_key="default",
to_key="default",
ignore_no_input=True):
"""
forwards input of current node, if any, to a new node with the
given node_name (presumable a child_node)
the main use case for this would be to have the input of a container
be sent to one of its children
"""
input_edge = self.graph.input_edge_for_node(self._name,
to_key=previous_to_key)
# there may not be an input
# (eg. if the wrapper node is holding the input node)
if input_edge is None:
if not ignore_no_input:
raise ValueError("forward_input_to called on node without "
"input key: %s" % previous_to_key)
else:
# ignore the issue and do nothing
pass
else:
name_from, from_key = input_edge
self.graph.add_dependency(name_from,
node_name,
from_key=from_key,
to_key=to_key)
def take_output_from(self,
node_name,
from_key="default",
to_key="default"):
"""
forwards output of a given node (with key from_key) to the current
node (with key to_key)
the main use case for this would be to have the output of a child of
a container node be sent to the container to allow it to propagate
forward in the DAG
"""
self.add_dependency(node_name,
self._name,
from_key=from_key,
to_key=to_key)
def forward_output_to(self,
node_name,
from_key="default",
to_key="default"):
"""
forwards output of the current node (with key from_key) to the given
node (with key to_key)
"""
self.add_dependency(self._name,
node_name,
from_key=from_key,
to_key=to_key)
def add_dependency(self,
from_name,
to_name,
from_key="default",
to_key="default"):
"""
wrapper around self.graph.add_dependency
"""
self.graph.add_dependency(from_name=from_name,
to_name=to_name,
from_key=from_key,
to_key=to_key)
def remove_dependency(self, from_name, to_name):
"""
wrapper around self.graph.remove_dependency
"""
self.graph.remove_dependency(from_name=from_name,
to_name=to_name)
def get_all_input_edges(self):
"""
returns a map from input keys of the current node to the node where
the edge is from
"""
edges = {}
for edge in self.graph.all_input_edges_for_node(self._name):
edge_from, edge_to, datamap = edge
assert edge_to == self._name
to_key = datamap.get("to_key")
if to_key is not None:
edges[to_key] = edge_from
return edges
|
|
from __future__ import unicode_literals
from datetime import datetime
from dateutil.tz import tzutc
import boto3
from freezegun import freeze_time
import httpretty
import requests
import sure # noqa
from moto import mock_apigateway
@freeze_time("2015-01-01")
@mock_apigateway
def test_create_and_get_rest_api():
client = boto3.client('apigateway', region_name='us-west-2')
response = client.create_rest_api(
name='my_api',
description='this is my api',
)
api_id = response['id']
response = client.get_rest_api(
restApiId=api_id
)
response.pop('ResponseMetadata')
response.should.equal({
'id': api_id,
'name': 'my_api',
'description': 'this is my api',
'createdDate': datetime(2015, 1, 1, tzinfo=tzutc())
})
@mock_apigateway
def test_list_and_delete_apis():
client = boto3.client('apigateway', region_name='us-west-2')
response = client.create_rest_api(
name='my_api',
description='this is my api',
)
api_id = response['id']
client.create_rest_api(
name='my_api2',
description='this is my api2',
)
response = client.get_rest_apis()
len(response['items']).should.equal(2)
client.delete_rest_api(
restApiId=api_id
)
response = client.get_rest_apis()
len(response['items']).should.equal(1)
@mock_apigateway
def test_create_resource():
client = boto3.client('apigateway', region_name='us-west-2')
response = client.create_rest_api(
name='my_api',
description='this is my api',
)
api_id = response['id']
resources = client.get_resources(restApiId=api_id)
root_id = [resource for resource in resources['items'] if resource['path'] == '/'][0]['id']
root_resource = client.get_resource(
restApiId=api_id,
resourceId=root_id,
)
root_resource.should.equal({
'path': '/',
'id': root_id,
'ResponseMetadata': {'HTTPStatusCode': 200},
'resourceMethods': {
'GET': {}
}
})
response = client.create_resource(
restApiId=api_id,
parentId=root_id,
pathPart='/users',
)
resources = client.get_resources(restApiId=api_id)['items']
len(resources).should.equal(2)
non_root_resource = [resource for resource in resources if resource['path'] != '/'][0]
response = client.delete_resource(
restApiId=api_id,
resourceId=non_root_resource['id']
)
len(client.get_resources(restApiId=api_id)['items']).should.equal(1)
@mock_apigateway
def test_child_resource():
client = boto3.client('apigateway', region_name='us-west-2')
response = client.create_rest_api(
name='my_api',
description='this is my api',
)
api_id = response['id']
resources = client.get_resources(restApiId=api_id)
root_id = [resource for resource in resources['items'] if resource['path'] == '/'][0]['id']
response = client.create_resource(
restApiId=api_id,
parentId=root_id,
pathPart='users',
)
users_id = response['id']
response = client.create_resource(
restApiId=api_id,
parentId=users_id,
pathPart='tags',
)
tags_id = response['id']
child_resource = client.get_resource(
restApiId=api_id,
resourceId=tags_id,
)
child_resource.should.equal({
'path': '/users/tags',
'pathPart': 'tags',
'parentId': users_id,
'id': tags_id,
'ResponseMetadata': {'HTTPStatusCode': 200},
'resourceMethods': {'GET': {}},
})
@mock_apigateway
def test_create_method():
client = boto3.client('apigateway', region_name='us-west-2')
response = client.create_rest_api(
name='my_api',
description='this is my api',
)
api_id = response['id']
resources = client.get_resources(restApiId=api_id)
root_id = [resource for resource in resources['items'] if resource['path'] == '/'][0]['id']
client.put_method(
restApiId=api_id,
resourceId=root_id,
httpMethod='GET',
authorizationType='none',
)
response = client.get_method(
restApiId=api_id,
resourceId=root_id,
httpMethod='GET'
)
response.should.equal({
'httpMethod': 'GET',
'authorizationType': 'none',
'ResponseMetadata': {'HTTPStatusCode': 200}
})
@mock_apigateway
def test_create_method_response():
client = boto3.client('apigateway', region_name='us-west-2')
response = client.create_rest_api(
name='my_api',
description='this is my api',
)
api_id = response['id']
resources = client.get_resources(restApiId=api_id)
root_id = [resource for resource in resources['items'] if resource['path'] == '/'][0]['id']
client.put_method(
restApiId=api_id,
resourceId=root_id,
httpMethod='GET',
authorizationType='none',
)
response = client.get_method(
restApiId=api_id,
resourceId=root_id,
httpMethod='GET'
)
response = client.put_method_response(
restApiId=api_id,
resourceId=root_id,
httpMethod='GET',
statusCode='200',
)
response.should.equal({
'ResponseMetadata': {'HTTPStatusCode': 200},
'statusCode': '200'
})
response = client.get_method_response(
restApiId=api_id,
resourceId=root_id,
httpMethod='GET',
statusCode='200',
)
response.should.equal({
'ResponseMetadata': {'HTTPStatusCode': 200},
'statusCode': '200'
})
response = client.delete_method_response(
restApiId=api_id,
resourceId=root_id,
httpMethod='GET',
statusCode='200',
)
response.should.equal({'ResponseMetadata': {'HTTPStatusCode': 200}})
@mock_apigateway
def test_integrations():
client = boto3.client('apigateway', region_name='us-west-2')
response = client.create_rest_api(
name='my_api',
description='this is my api',
)
api_id = response['id']
resources = client.get_resources(restApiId=api_id)
root_id = [resource for resource in resources['items'] if resource['path'] == '/'][0]['id']
client.put_method(
restApiId=api_id,
resourceId=root_id,
httpMethod='GET',
authorizationType='none',
)
client.put_method_response(
restApiId=api_id,
resourceId=root_id,
httpMethod='GET',
statusCode='200',
)
response = client.put_integration(
restApiId=api_id,
resourceId=root_id,
httpMethod='GET',
type='HTTP',
uri='http://httpbin.org/robots.txt',
)
response.should.equal({
'ResponseMetadata': {'HTTPStatusCode': 200},
'httpMethod': 'GET',
'integrationResponses': {
'200': {
'responseTemplates': {
'application/json': None
},
'statusCode': 200
}
},
'type': 'HTTP',
'uri': 'http://httpbin.org/robots.txt'
})
response = client.get_integration(
restApiId=api_id,
resourceId=root_id,
httpMethod='GET'
)
response.should.equal({
'ResponseMetadata': {'HTTPStatusCode': 200},
'httpMethod': 'GET',
'integrationResponses': {
'200': {
'responseTemplates': {
'application/json': None
},
'statusCode': 200
}
},
'type': 'HTTP',
'uri': 'http://httpbin.org/robots.txt'
})
response = client.get_resource(
restApiId=api_id,
resourceId=root_id,
)
response['resourceMethods']['GET']['methodIntegration'].should.equal({
'httpMethod': 'GET',
'integrationResponses': {
'200': {
'responseTemplates': {
'application/json': None
},
'statusCode': 200
}
},
'type': 'HTTP',
'uri': 'http://httpbin.org/robots.txt'
})
client.delete_integration(
restApiId=api_id,
resourceId=root_id,
httpMethod='GET'
)
response = client.get_resource(
restApiId=api_id,
resourceId=root_id,
)
response['resourceMethods']['GET'].shouldnt.contain("methodIntegration")
@mock_apigateway
def test_integration_response():
client = boto3.client('apigateway', region_name='us-west-2')
response = client.create_rest_api(
name='my_api',
description='this is my api',
)
api_id = response['id']
resources = client.get_resources(restApiId=api_id)
root_id = [resource for resource in resources['items'] if resource['path'] == '/'][0]['id']
client.put_method(
restApiId=api_id,
resourceId=root_id,
httpMethod='GET',
authorizationType='none',
)
client.put_method_response(
restApiId=api_id,
resourceId=root_id,
httpMethod='GET',
statusCode='200',
)
response = client.put_integration(
restApiId=api_id,
resourceId=root_id,
httpMethod='GET',
type='HTTP',
uri='http://httpbin.org/robots.txt',
)
response = client.put_integration_response(
restApiId=api_id,
resourceId=root_id,
httpMethod='GET',
statusCode='200',
selectionPattern='foobar',
)
response.should.equal({
'statusCode': '200',
'selectionPattern': 'foobar',
'ResponseMetadata': {'HTTPStatusCode': 200},
'responseTemplates': {
'application/json': None
}
})
response = client.get_integration_response(
restApiId=api_id,
resourceId=root_id,
httpMethod='GET',
statusCode='200',
)
response.should.equal({
'statusCode': '200',
'selectionPattern': 'foobar',
'ResponseMetadata': {'HTTPStatusCode': 200},
'responseTemplates': {
'application/json': None
}
})
response = client.get_method(
restApiId=api_id,
resourceId=root_id,
httpMethod='GET',
)
response['methodIntegration']['integrationResponses'].should.equal({
'200': {
'responseTemplates': {
'application/json': None
},
'selectionPattern': 'foobar',
'statusCode': '200'
}
})
response = client.delete_integration_response(
restApiId=api_id,
resourceId=root_id,
httpMethod='GET',
statusCode='200',
)
response = client.get_method(
restApiId=api_id,
resourceId=root_id,
httpMethod='GET',
)
response['methodIntegration']['integrationResponses'].should.equal({})
@mock_apigateway
def test_deployment():
client = boto3.client('apigateway', region_name='us-west-2')
response = client.create_rest_api(
name='my_api',
description='this is my api',
)
api_id = response['id']
response = client.create_deployment(
restApiId=api_id,
stageName='staging',
)
deployment_id = response['id']
response = client.get_deployment(
restApiId=api_id,
deploymentId=deployment_id,
)
response.should.equal({
'id': deployment_id,
'ResponseMetadata': {'HTTPStatusCode': 200}
})
response = client.get_deployments(
restApiId=api_id,
)
response['items'].should.equal([
{'id': deployment_id}
])
response = client.delete_deployment(
restApiId=api_id,
deploymentId=deployment_id,
)
response = client.get_deployments(
restApiId=api_id,
)
len(response['items']).should.equal(0)
@httpretty.activate
@mock_apigateway
def test_http_proxying_integration():
httpretty.register_uri(
httpretty.GET, "http://httpbin.org/robots.txt", body='a fake response'
)
region_name = 'us-west-2'
client = boto3.client('apigateway', region_name=region_name)
response = client.create_rest_api(
name='my_api',
description='this is my api',
)
api_id = response['id']
resources = client.get_resources(restApiId=api_id)
root_id = [resource for resource in resources['items'] if resource['path'] == '/'][0]['id']
client.put_method(
restApiId=api_id,
resourceId=root_id,
httpMethod='GET',
authorizationType='none',
)
client.put_method_response(
restApiId=api_id,
resourceId=root_id,
httpMethod='GET',
statusCode='200',
)
response = client.put_integration(
restApiId=api_id,
resourceId=root_id,
httpMethod='GET',
type='HTTP',
uri='http://httpbin.org/robots.txt',
)
stage_name = 'staging'
client.create_deployment(
restApiId=api_id,
stageName=stage_name,
)
deploy_url = "https://{api_id}.execute-api.{region_name}.amazonaws.com/{stage_name}".format(api_id=api_id, region_name=region_name, stage_name=stage_name)
requests.get(deploy_url).content.should.equal(b"a fake response")
|
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<i@binux.me>
# http://binux.me
# Created on 2014-10-08 15:04:08
import os, requests, json
from six.moves.urllib.parse import urlparse, parse_qs
def connect_database(url):
"""
create database object by url
mysql:
mysql+type://user:passwd@host:port/database
sqlite:
# relative path
sqlite+type:///path/to/database.db
# absolute path
sqlite+type:////path/to/database.db
# memory database
sqlite+type://
mongodb:
mongodb+type://[username:password@]host1[:port1][,host2[:port2],...[,hostN[:portN]]][/[database][?options]]
more: http://docs.mongodb.org/manual/reference/connection-string/
sqlalchemy:
sqlalchemy+postgresql+type://user:passwd@host:port/database
sqlalchemy+mysql+mysqlconnector+type://user:passwd@host:port/database
more: http://docs.sqlalchemy.org/en/rel_0_9/core/engines.html
redis:
redis+taskdb://host:port/db
elasticsearch:
elasticsearch+type://host:port/?index=pyspider
couchdb:
couchdb+type://[username:password@]host[:port]
local:
local+projectdb://filepath,filepath
type:
taskdb
projectdb
resultdb
"""
db = _connect_database(url)
db.copy = lambda: _connect_database(url)
return db
def _connect_database(url): # NOQA
parsed = urlparse(url)
scheme = parsed.scheme.split('+')
if len(scheme) == 1:
raise Exception('wrong scheme format: %s' % parsed.scheme)
else:
engine, dbtype = scheme[0], scheme[-1]
other_scheme = "+".join(scheme[1:-1])
if dbtype not in ('taskdb', 'projectdb', 'resultdb'):
raise LookupError('unknown database type: %s, '
'type should be one of ["taskdb", "projectdb", "resultdb"]', dbtype)
if engine == 'mysql':
return _connect_mysql(parsed,dbtype)
elif engine == 'sqlite':
return _connect_sqlite(parsed,dbtype)
elif engine == 'mongodb':
return _connect_mongodb(parsed,dbtype,url)
elif engine == 'sqlalchemy':
return _connect_sqlalchemy(parsed, dbtype, url, other_scheme)
elif engine == 'redis':
if dbtype == 'taskdb':
from .redis.taskdb import TaskDB
return TaskDB(parsed.hostname, parsed.port,
int(parsed.path.strip('/') or 0))
else:
raise LookupError('not supported dbtype: %s', dbtype)
elif engine == 'local':
scripts = url.split('//', 1)[1].split(',')
if dbtype == 'projectdb':
from .local.projectdb import ProjectDB
return ProjectDB(scripts)
else:
raise LookupError('not supported dbtype: %s', dbtype)
elif engine == 'elasticsearch' or engine == 'es':
return _connect_elasticsearch(parsed, dbtype)
elif engine == 'couchdb':
return _connect_couchdb(parsed, dbtype, url)
else:
raise Exception('unknown engine: %s' % engine)
def _connect_mysql(parsed,dbtype):
parames = {}
if parsed.username:
parames['user'] = parsed.username
if parsed.password:
parames['passwd'] = parsed.password
if parsed.hostname:
parames['host'] = parsed.hostname
if parsed.port:
parames['port'] = parsed.port
if parsed.path.strip('/'):
parames['database'] = parsed.path.strip('/')
if dbtype == 'taskdb':
from .mysql.taskdb import TaskDB
return TaskDB(**parames)
elif dbtype == 'projectdb':
from .mysql.projectdb import ProjectDB
return ProjectDB(**parames)
elif dbtype == 'resultdb':
from .mysql.resultdb import ResultDB
return ResultDB(**parames)
else:
raise LookupError
def _connect_sqlite(parsed,dbtype):
if parsed.path.startswith('//'):
path = '/' + parsed.path.strip('/')
elif parsed.path.startswith('/'):
path = './' + parsed.path.strip('/')
elif not parsed.path:
path = ':memory:'
else:
raise Exception('error path: %s' % parsed.path)
if dbtype == 'taskdb':
from .sqlite.taskdb import TaskDB
return TaskDB(path)
elif dbtype == 'projectdb':
from .sqlite.projectdb import ProjectDB
return ProjectDB(path)
elif dbtype == 'resultdb':
from .sqlite.resultdb import ResultDB
return ResultDB(path)
else:
raise LookupError
def _connect_mongodb(parsed,dbtype,url):
url = url.replace(parsed.scheme, 'mongodb')
parames = {}
if parsed.path.strip('/'):
parames['database'] = parsed.path.strip('/')
if dbtype == 'taskdb':
from .mongodb.taskdb import TaskDB
return TaskDB(url, **parames)
elif dbtype == 'projectdb':
from .mongodb.projectdb import ProjectDB
return ProjectDB(url, **parames)
elif dbtype == 'resultdb':
from .mongodb.resultdb import ResultDB
return ResultDB(url, **parames)
else:
raise LookupError
def _connect_sqlalchemy(parsed, dbtype,url, other_scheme):
if not other_scheme:
raise Exception('wrong scheme format: %s' % parsed.scheme)
url = url.replace(parsed.scheme, other_scheme)
if dbtype == 'taskdb':
from .sqlalchemy.taskdb import TaskDB
return TaskDB(url)
elif dbtype == 'projectdb':
from .sqlalchemy.projectdb import ProjectDB
return ProjectDB(url)
elif dbtype == 'resultdb':
from .sqlalchemy.resultdb import ResultDB
return ResultDB(url)
else:
raise LookupError
def _connect_elasticsearch(parsed, dbtype):
# in python 2.6 url like "http://host/?query", query will not been splitted
if parsed.path.startswith('/?'):
index = parse_qs(parsed.path[2:])
else:
index = parse_qs(parsed.query)
if 'index' in index and index['index']:
index = index['index'][0]
else:
index = 'pyspider'
if dbtype == 'projectdb':
from .elasticsearch.projectdb import ProjectDB
return ProjectDB([parsed.netloc], index=index)
elif dbtype == 'resultdb':
from .elasticsearch.resultdb import ResultDB
return ResultDB([parsed.netloc], index=index)
elif dbtype == 'taskdb':
from .elasticsearch.taskdb import TaskDB
return TaskDB([parsed.netloc], index=index)
def _connect_couchdb(parsed, dbtype, url):
if os.environ.get('COUCHDB_HTTPS'):
url = "https://" + parsed.netloc + "/"
else:
url = "http://" + parsed.netloc + "/"
params = {}
# default to env, then url, then hard coded
params['username'] = os.environ.get('COUCHDB_USER') or parsed.username
params['password'] = os.environ.get('COUCHDB_PASSWORD') or parsed.password
if dbtype == 'taskdb':
from .couchdb.taskdb import TaskDB
return TaskDB(url, **params)
elif dbtype == 'projectdb':
from .couchdb.projectdb import ProjectDB
return ProjectDB(url, **params)
elif dbtype == 'resultdb':
from .couchdb.resultdb import ResultDB
return ResultDB(url, **params)
else:
raise LookupError
|
|
import pyspheregl.sphere.touch_sphere as touch_lib
import time
import pyspheregl.sphere.sphere as sphere
import numpy as np
import calibration
calib_mode = 'cubic'
class SmoothedTracker(object):
"""
Class for managing a smoothed value
"""
def __init__(self, alpha=0.1, max_jump=1e6):
self.ddelta = 0
self.last_delta = 0
self.rotation_angle = 0
self.last_touched_delta = 0
self.alpha = alpha
self.last = None
self.max_jump = max_jump
self.actual_delta = 0
def reset(self):
self.last = None
self.ddelta = 0
self.last_delta = 0
self.rotation_angle = 0
self.last_touched_delta = 0
self.actual_delta = 0
def update(self, next, touched=True):
if self.last is None or next is None:
delta = 0
else:
if abs(self.last - next) < self.max_jump:
delta = -(next - self.last)
else:
delta = -(next - self.last)
delta = self.max_jump * (delta / abs(delta))
self.last = next
self.ddelta = self.alpha * (delta - self.last_delta) + (1 - self.alpha) * self.ddelta
self.last_delta = delta
ret_val = 0
if not touched:
self.rotation_angle -= self.ddelta
self.rotation_angle *= 0.7
if (abs(self.rotation_angle) < 0.1):
self.rotation_angle = 0
self.last_touched_delta *= 0.91
ret_val = self.last_touched_delta
else:
self.last_touched_delta = delta
self.rotation_angle = 0
if abs(delta) < 0.5:
ret_val = 0
else:
ret_val = (delta * .8) + (self.last_delta * .2)
if abs(self.actual_delta) < abs(ret_val) or touched:
self.actual_delta = 0.8 * self.actual_delta + 0.4 * ret_val
else:
self.actual_delta = 0.995 * self.actual_delta + 0.005 * ret_val
self.actual_delta = np.where(self.actual_delta == np.max(self.actual_delta), self.actual_delta, 0)
return self.actual_delta
class Hover(object):
"""
Class representing a potential hover touch
"""
def __init__(self, timeout, move_thresh):
self.x = 0
self.y = 0
self.time_since_last_move = 0.0
self.timeout = timeout
self.move_thresh = move_thresh
self.hovered = False
self.disabled = False
def update(self, dt, x, y):
d = np.sqrt((x - self.x) ** 2 + (y - self.y) ** 2)
if d > self.move_thresh:
self.time_since_last_move = 0.0
else:
self.time_since_last_move += dt
if not self.hovered:
self.x, self.y = x, y
if self.time_since_last_move > self.timeout and not self.disabled:
self.hovered = True
else:
if d > self.move_thresh:
self.hovered = False
self.time_since_last_move = 0.0
self.disabled = True
class HoverHandler(object):
"""
Class to manage potential hover touches
"""
def __init__(self, timeout, move_thresh):
self.fseqs = {}
self.timeout = timeout
self.move_thresh = move_thresh
def hovered(self):
return [i for i in self.fseqs.itervalues() if i.hovered]
def update(self, dt, pts):
for fseq, pt in pts.iteritems():
if fseq in self.fseqs:
lat, lon = calibration.get_calibrated_touch(pt[0], pt[1], calib_mode)
self.fseqs[fseq].update(dt, lat, lon)
else:
self.fseqs[fseq] = Hover(timeout=self.timeout, move_thresh=self.move_thresh)
kills = []
for fseq in self.fseqs:
if fseq not in pts:
kills.append(fseq)
for kill in kills:
del self.fseqs[kill]
class PalmTouch(object):
"""
Class representing potential palm touches
"""
def __init__(self, id, x, y, time_thresh, move_thresh):
self.id = id
self.x = x
self.y = y
self.lifetime = 0
self.valid = False
self.time_thresh = time_thresh
self.move_thresh = move_thresh
def update(self, dt, x, y):
if not self.valid:
self.valid = True
self.lifetime = 0
# TODO track movement?
self.x, self.y = x, y
self.lifetime += dt
if self.lifetime >= self.time_thresh:
self.valid = True
return
class PalmManager(object):
"""
Class to manage palm touch events
"""
def __init__(self, threshold=0.20, min_points=4, time_thresh=0.5, move_thresh=0.5):
self.min_points = min_points
self.threshold = threshold
self.palmed = (False, -1, 0, 0)
self.lifetime = 0
self.points = {}
self.time_thresh = time_thresh
self.move_thresh = move_thresh
self.curid = 0
self.lastpos = (1e6, 1e6)
def reset(self):
self.lifetime = 0
self.palmed = (False, -1, 0, 0)
self.points = {}
self.lastpos = (1e6, 1e6)
def update(self, dt, touches):
if len(touches) < self.min_points:
self.reset()
return
# update state of current touches
for id, touch in touches.iteritems():
lat, lon = calibration.get_calibrated_touch(touch[0], touch[1], calib_mode)
if id in self.points:
self.points[id].update(dt, lat, lon)
else:
self.points[id] = PalmTouch(id, lat, lon, self.time_thresh, self.move_thresh)
# remove any dead touches
removed = []
for id, touch in self.points.iteritems():
if id not in touches:
removed.append(id)
for r in removed:
try:
del self.points[id]
except KeyError:
# TODO ??
pass
# print('warning: point %d missing' % id)
sx, sy = 0, 0
coords = []
ids = []
mage = 0
# only want to consider points that have been active for long enough
valid_points = [t for t in self.points.itervalues() if t.valid]
if len(valid_points) < self.min_points:
return
for t in valid_points:
x, y = t.x, t.y
sx += x
sy += y
mage += t.lifetime
coords.append((x, y))
ids.append(t.id)
# calculate centroid plus average age
centroid = (sx / len(valid_points), sy / len(valid_points))
mage /= len(valid_points)
palm_touches = 0
# check if each point is within the threshold distance of the centroid
for dt in coords:
# TODO still not very good in some areas due to wraparound effects
xdiff = abs(centroid[0] - dt[0])
if xdiff > np.pi:
xdiff = (np.pi * 2) - xdiff
dist = np.sqrt(((centroid[0] - dt[0]) ** 2) + ((centroid[1] - dt[1]) ** 2))
# print(centroid, dt, ids[coords.index(dt)], dist)
if dist < self.threshold:
palm_touches += 1
#print('palm points: %d/%d' % (palm_touches, self.min_points))
is_palmed = palm_touches >= self.min_points
if is_palmed:
if sphere.spherical_distance(self.lastpos, centroid) >= self.move_thresh:
self.curid += 1
self.palmed = (is_palmed, self.curid, centroid[0], centroid[1])
self.lastpos = centroid
else:
self.palmed = (is_palmed, self.curid, self.lastpos[0], self.lastpos[1])
class Tap(object):
"""
Class representing potential tap touches
"""
def __init__(self, id, x, y, time_thresh, move_thresh):
self.id = id
self.x = x
self.y = y
self.lifetime = 0
self.valid = True
self.time_thresh = time_thresh
self.move_thresh = move_thresh
def update(self, dt, x, y):
if not self.valid:
return
d = np.sqrt((x - self.x) ** 2 + (y - self.y) ** 2)
if d >= self.move_thresh:
self.valid = False
# print('TAP invalid, moved too far')
return
self.lifetime += dt
if self.lifetime >= self.time_thresh:
self.valid = False
# print('TAP invalid, time exceeded')
return
self.x, self.y = x, y
class TapHandler(object):
"""
Class to manage tap events
"""
def __init__(self, zone, time_thresh, move_thresh):
self.time_thresh = time_thresh
self.move_thresh = move_thresh
self.zone = zone
self.taps = {}
self.events = {}
def update(self, dt, touches):
self.events = {}
for id, touch in touches.iteritems():
lat, lon = calibration.get_calibrated_touch(touch[0], touch[1], calib_mode)
if id in self.taps:
self.taps[id].update(dt, lat, lon)
else:
self.taps[id] = Tap(id, lat, lon, self.time_thresh, self.move_thresh)
removed = []
for id, tap in self.taps.iteritems():
if id not in touches:
removed.append(id)
for r in removed:
if self.taps[r].valid:
self.events[r] = self.taps[r]
del self.taps[r]
class TouchManager(object):
"""
Main class for processing TUIO touch points
"""
def __init__(self):
# touch zones for the sphere:
# main: the torus area
# tokens: a band above the top of the torus
# top: the circular area around the very top of the sphere
self.zones = {'main': (-1.0, 0.6), 'nowplaying': (0.6, 0.75), 'tokens': (0.75, 1.05), 'top': (1.05, 2.0)}
if not touch_lib.is_up():
touch_lib.init(ip="127.0.0.1", fseq=False, zones=self.zones)
touch_lib.add_handler()
touch_lib.start()
print "TouchLib Up"
self.drag_touch = None
self.last_touch = time.clock()
self.rotation = SmoothedTracker(alpha=0.2, max_jump=20)
self.tilt = SmoothedTracker(alpha=0.2, max_jump=20)
self.rotation_delta = 0
self.tilt_delta = 0
self.moved = False
self.palmer = PalmManager()
self.hover = HoverHandler(timeout=0.6, move_thresh=0.02)
self.taps = {}
for z in self.zones.keys():
self.taps[z] = TapHandler(z, time_thresh=0.4, move_thresh=0.02)
self.tap_events = {}
self.rotation_distance, self.tilt_distance = 0, 0
def stop(self):
self.drag_touch = None
self.rotation.update(None, touched=False)
self.tilt.update(None, touched=False)
self.rotation.reset()
self.tilt.reset()
self.rotation_delta, self.tilt_delta = 0, 0
def update_main_other(self, dt, touches):
rotation_delta, tilt_delta = 0, 0
self.moved = False
# If our dominant touch is no longer visible
if self.drag_touch not in touches:
if len(touches.keys()) > 0:
self.drag_touch = touches.keys()[0]
self.rotation.update(None, touched=False)
self.tilt.update(None, touched=False)
else:
self.drag_touch = None
self.rotation.update(None, touched=False)
self.tilt.update(None, touched=False)
# Process current dominant touch
if self.drag_touch in touches:
x, y = touches[self.drag_touch][0], touches[self.drag_touch][1]
lat, lon = calibration.get_calibrated_touch(x, y, calib_mode)
rotation_delta = self.rotation.update(np.degrees(lat))
tilt_delta = self.tilt.update(np.degrees(lon))
else:
if len(touches) == 0:
rotation_delta = self.rotation.update(None, touched=False)
tilt_delta = self.tilt.update(None, touched=False)
else:
rotation_delta = self.rotation.update(None)
tilt_delta = self.tilt.update(None)
# TODO why??
# if isinstance(rotation_delta, np.ndarray):
# if len(rotation_delta.shape) == 0:
# rotation_delta = 0.0
# else:
# rotation_delta = rotation_delta[0]
# if isinstance(tilt_delta, np.ndarray):
# if len(tilt_delta.shape) == 0:
# tilt_delta = 0.0
# else:
# tilt_delta = tilt_delta[0]
self.rotation_delta, self.tilt_delta = np.radians([rotation_delta, tilt_delta])
#print(self.rotation_delta, self.tilt_delta)
self.moved = (self.drag_touch is not None) and (abs(self.rotation_delta) > 0.001 or abs(self.tilt_delta) > 0.001)
if self.moved:
self.rotation_distance += abs(self.rotation_delta)
self.tilt_distance += abs(self.tilt_delta)
else:
self.rotation_distance, self.tilt_distance = 0, 0
def update(self, dt):
self.tap_events = {}
if touch_lib.is_up():
# all touches is a dict of all current touches, zonetouches
# contains the same info but splits them up between the defined
# zones of latitude, so you get a dict of dicts with the top level
# having a key for each zone (e.g. 'main', 'top')
alltouches, zonetouches = touch_lib.get_touches()
# Update Last Touch
if len(alltouches) > 0:
self.last_touch = time.clock()
# handle points according to zones:
# top zone: only interested in taps (to trigger reset state events)
# token zone: only interested in taps??? to delete tokens
# main zone: want to process:
# - taps (for buttons on selected track)
# - palming (to clear playlist)
# - other touches (to rotate/tilt the display)
for z in self.zones:
self.taps[z].update(dt, zonetouches.get(z, {}))
if len(self.taps[z].events) > 0:
self.tap_events[z] = self.taps[z].events
print('Found %d taps in %s' % (len(self.taps[z].events), z))
if len(self.tap_events) > 0:
# if there are any tap events, don't try to process any other
# types of touch event
return
# any non-tap events must come from the main (torus) zone
maintouches = zonetouches.get('main', {})
# check for (in order of priority)
# - palms
# - hovers
# - other touches
self.palmer.update(dt, maintouches)
if self.palmer.palmed[0]:
self.stop()
return
# only process hovers if we have a single point, otherwise
# it's likely to mistake other types of touch for hover events
if len(maintouches) < 2:
self.hover.update(dt, maintouches)
if len(self.hover.hovered()) > 0:
self.stop()
return
self.update_main_other(dt, maintouches)
|
|
# Jordi Torrents
# Test for k-cutsets
from operator import itemgetter
from nose.tools import assert_equal, assert_false, assert_true, assert_raises
import networkx as nx
from networkx.algorithms import flow
from networkx.algorithms.connectivity.kcutsets import _is_separating_set
flow_funcs = [
flow.boykov_kolmogorov,
flow.dinitz,
flow.edmonds_karp,
flow.preflow_push,
flow.shortest_augmenting_path,
]
##
## Some nice synthetic graphs
##
def graph_example_1():
G = nx.convert_node_labels_to_integers(nx.grid_graph([5,5]),
label_attribute='labels')
rlabels = nx.get_node_attributes(G, 'labels')
labels = dict((v, k) for k, v in rlabels.items())
for nodes in [(labels[(0,0)], labels[(1,0)]),
(labels[(0,4)], labels[(1,4)]),
(labels[(3,0)], labels[(4,0)]),
(labels[(3,4)], labels[(4,4)]) ]:
new_node = G.order()+1
# Petersen graph is triconnected
P = nx.petersen_graph()
G = nx.disjoint_union(G,P)
# Add two edges between the grid and P
G.add_edge(new_node+1, nodes[0])
G.add_edge(new_node, nodes[1])
# K5 is 4-connected
K = nx.complete_graph(5)
G = nx.disjoint_union(G,K)
# Add three edges between P and K5
G.add_edge(new_node+2,new_node+11)
G.add_edge(new_node+3,new_node+12)
G.add_edge(new_node+4,new_node+13)
# Add another K5 sharing a node
G = nx.disjoint_union(G,K)
nbrs = G[new_node+10]
G.remove_node(new_node+10)
for nbr in nbrs:
G.add_edge(new_node+17, nbr)
G.add_edge(new_node+16, new_node+5)
G.name = 'Example graph for connectivity'
return G
def torrents_and_ferraro_graph():
G = nx.convert_node_labels_to_integers(nx.grid_graph([5,5]),
label_attribute='labels')
rlabels = nx.get_node_attributes(G, 'labels')
labels = dict((v, k) for k, v in rlabels.items())
for nodes in [ (labels[(0,4)], labels[(1,4)]),
(labels[(3,4)], labels[(4,4)]) ]:
new_node = G.order()+1
# Petersen graph is triconnected
P = nx.petersen_graph()
G = nx.disjoint_union(G,P)
# Add two edges between the grid and P
G.add_edge(new_node+1, nodes[0])
G.add_edge(new_node, nodes[1])
# K5 is 4-connected
K = nx.complete_graph(5)
G = nx.disjoint_union(G,K)
# Add three edges between P and K5
G.add_edge(new_node+2,new_node+11)
G.add_edge(new_node+3,new_node+12)
G.add_edge(new_node+4,new_node+13)
# Add another K5 sharing a node
G = nx.disjoint_union(G,K)
nbrs = G[new_node+10]
G.remove_node(new_node+10)
for nbr in nbrs:
G.add_edge(new_node+17, nbr)
# Commenting this makes the graph not biconnected !!
# This stupid mistake make one reviewer very angry :P
G.add_edge(new_node+16, new_node+8)
for nodes in [(labels[(0,0)], labels[(1,0)]),
(labels[(3,0)], labels[(4,0)])]:
new_node = G.order()+1
# Petersen graph is triconnected
P = nx.petersen_graph()
G = nx.disjoint_union(G,P)
# Add two edges between the grid and P
G.add_edge(new_node+1, nodes[0])
G.add_edge(new_node, nodes[1])
# K5 is 4-connected
K = nx.complete_graph(5)
G = nx.disjoint_union(G,K)
# Add three edges between P and K5
G.add_edge(new_node+2,new_node+11)
G.add_edge(new_node+3,new_node+12)
G.add_edge(new_node+4,new_node+13)
# Add another K5 sharing two nodes
G = nx.disjoint_union(G,K)
nbrs = G[new_node+10]
G.remove_node(new_node+10)
for nbr in nbrs:
G.add_edge(new_node+17, nbr)
nbrs2 = G[new_node+9]
G.remove_node(new_node+9)
for nbr in nbrs2:
G.add_edge(new_node+18, nbr)
G.name = 'Example graph for connectivity'
return G
# Helper function
def _check_separating_sets(G):
for Gc in nx.connected_component_subgraphs(G):
if len(Gc) < 3:
continue
for cut in nx.all_node_cuts(Gc):
assert_equal(nx.node_connectivity(Gc), len(cut))
H = Gc.copy()
H.remove_nodes_from(cut)
assert_false(nx.is_connected(H))
def test_torrents_and_ferraro_graph():
G = torrents_and_ferraro_graph()
_check_separating_sets(G)
def test_example_1():
G = graph_example_1()
_check_separating_sets(G)
def test_random_gnp():
G = nx.gnp_random_graph(100, 0.1)
_check_separating_sets(G)
def test_shell():
constructor=[(20,80,0.8),(80,180,0.6)]
G = nx.random_shell_graph(constructor)
_check_separating_sets(G)
def test_configuration():
deg_seq = nx.utils.create_degree_sequence(100,nx.utils.powerlaw_sequence)
G = nx.Graph(nx.configuration_model(deg_seq))
G.remove_edges_from(G.selfloop_edges())
_check_separating_sets(G)
def test_karate():
G = nx.karate_club_graph()
_check_separating_sets(G)
def _generate_no_biconnected(max_attempts=50):
attempts = 0
while True:
G = nx.fast_gnp_random_graph(100,0.0575)
if nx.is_connected(G) and not nx.is_biconnected(G):
attempts = 0
yield G
else:
if attempts >= max_attempts:
msg = "Tried %d times: no suitable Graph."%attempts
raise Exception(msg % max_attempts)
else:
attempts += 1
def test_articulation_points():
Ggen = _generate_no_biconnected()
for i in range(2):
G = next(Ggen)
articulation_points = list({a} for a in nx.articulation_points(G))
for cut in nx.all_node_cuts(G):
assert_true(cut in articulation_points)
def test_grid_2d_graph():
# All minimum node cuts of a 2d grid
# are the four pairs of nodes that are
# neighbors of the four corner nodes.
G = nx.grid_2d_graph(5, 5)
solution = [
set([(0, 1), (1, 0)]),
set([(3, 0), (4, 1)]),
set([(3, 4), (4, 3)]),
set([(0, 3), (1, 4)]),
]
for cut in nx.all_node_cuts(G):
assert_true(cut in solution)
def test_disconnected_graph():
G = nx.fast_gnp_random_graph(100, 0.01)
cuts = nx.all_node_cuts(G)
assert_raises(nx.NetworkXError, next, cuts)
def test_alternative_flow_functions():
graph_funcs = [graph_example_1, nx.davis_southern_women_graph]
for graph_func in graph_funcs:
G = graph_func()
for flow_func in flow_funcs:
for cut in nx.all_node_cuts(G, flow_func=flow_func):
assert_equal(nx.node_connectivity(G), len(cut))
H = G.copy()
H.remove_nodes_from(cut)
assert_false(nx.is_connected(H))
def test_is_separating_set_complete_graph():
G = nx.complete_graph(5)
assert_true(_is_separating_set(G, {0, 1, 2, 3}))
def test_is_separating_set():
for i in [5, 10, 15]:
G = nx.star_graph(i)
max_degree_node = max(G, key=G.degree)
assert_true(_is_separating_set(G, {max_degree_node}))
def test_non_repeated_cuts():
# The algorithm was repeating the cut {0, 1} for the giant biconnected
# component of the Karate club graph.
K = nx.karate_club_graph()
G = max(list(nx.biconnected_component_subgraphs(K)), key=len)
solution = [{32, 33}, {2, 33}, {0, 3}, {0, 1}, {29, 33}]
cuts = list(nx.all_node_cuts(G))
if len(solution) != len(cuts):
print(nx.info(G))
print("Solution: {}".format(solution))
print("Result: {}".format(cuts))
assert_true(len(solution) == len(cuts))
for cut in cuts:
assert_true(cut in solution)
def test_cycle_graph():
G = nx.cycle_graph(5)
solution = [{0, 2}, {0, 3}, {1, 3}, {1, 4}, {2, 4}]
cuts = list(nx.all_node_cuts(G))
assert_true(len(solution) == len(cuts))
for cut in cuts:
assert_true(cut in solution)
def test_complete_graph():
G = nx.complete_graph(5)
solution = [
{0, 1, 2, 3},
{0, 1, 2, 4},
{0, 1, 3, 4},
{0, 2, 3, 4},
{1, 2, 3, 4},
]
cuts = list(nx.all_node_cuts(G))
assert_true(len(solution) == len(cuts))
for cut in cuts:
assert_true(cut in solution)
|
|
"""The airvisual component."""
from __future__ import annotations
from collections.abc import Mapping
from datetime import timedelta
from math import ceil
from typing import Any
from pyairvisual import CloudAPI, NodeSamba
from pyairvisual.errors import (
AirVisualError,
InvalidKeyError,
KeyExpiredError,
NodeProError,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_API_KEY,
CONF_IP_ADDRESS,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_PASSWORD,
CONF_SHOW_ON_MAP,
CONF_STATE,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers import (
aiohttp_client,
config_validation as cv,
entity_registry,
)
from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
UpdateFailed,
)
from .const import (
CONF_CITY,
CONF_COUNTRY,
CONF_GEOGRAPHIES,
CONF_INTEGRATION_TYPE,
DATA_COORDINATOR,
DOMAIN,
INTEGRATION_TYPE_GEOGRAPHY_COORDS,
INTEGRATION_TYPE_GEOGRAPHY_NAME,
INTEGRATION_TYPE_NODE_PRO,
LOGGER,
)
PLATFORMS = ["sensor"]
DATA_LISTENER = "listener"
DEFAULT_ATTRIBUTION = "Data provided by AirVisual"
DEFAULT_NODE_PRO_UPDATE_INTERVAL = timedelta(minutes=1)
CONFIG_SCHEMA = cv.deprecated(DOMAIN)
@callback
def async_get_geography_id(geography_dict: Mapping[str, Any]) -> str:
"""Generate a unique ID from a geography dict."""
if CONF_CITY in geography_dict:
return ", ".join(
(
geography_dict[CONF_CITY],
geography_dict[CONF_STATE],
geography_dict[CONF_COUNTRY],
)
)
return ", ".join(
(str(geography_dict[CONF_LATITUDE]), str(geography_dict[CONF_LONGITUDE]))
)
@callback
def async_get_cloud_api_update_interval(
hass: HomeAssistant, api_key: str, num_consumers: int
) -> timedelta:
"""Get a leveled scan interval for a particular cloud API key.
This will shift based on the number of active consumers, thus keeping the user
under the monthly API limit.
"""
# Assuming 10,000 calls per month and a "largest possible month" of 31 days; note
# that we give a buffer of 1500 API calls for any drift, restarts, etc.:
minutes_between_api_calls = ceil(num_consumers * 31 * 24 * 60 / 8500)
LOGGER.debug(
"Leveling API key usage (%s): %s consumers, %s minutes between updates",
api_key,
num_consumers,
minutes_between_api_calls,
)
return timedelta(minutes=minutes_between_api_calls)
@callback
def async_get_cloud_coordinators_by_api_key(
hass: HomeAssistant, api_key: str
) -> list[DataUpdateCoordinator]:
"""Get all DataUpdateCoordinator objects related to a particular API key."""
coordinators = []
for entry_id, coordinator in hass.data[DOMAIN][DATA_COORDINATOR].items():
config_entry = hass.config_entries.async_get_entry(entry_id)
if config_entry and config_entry.data.get(CONF_API_KEY) == api_key:
coordinators.append(coordinator)
return coordinators
@callback
def async_sync_geo_coordinator_update_intervals(
hass: HomeAssistant, api_key: str
) -> None:
"""Sync the update interval for geography-based data coordinators (by API key)."""
coordinators = async_get_cloud_coordinators_by_api_key(hass, api_key)
if not coordinators:
return
update_interval = async_get_cloud_api_update_interval(
hass, api_key, len(coordinators)
)
for coordinator in coordinators:
LOGGER.debug(
"Updating interval for coordinator: %s, %s",
coordinator.name,
update_interval,
)
coordinator.update_interval = update_interval
@callback
def _standardize_geography_config_entry(
hass: HomeAssistant, config_entry: ConfigEntry
) -> None:
"""Ensure that geography config entries have appropriate properties."""
entry_updates = {}
if not config_entry.unique_id:
# If the config entry doesn't already have a unique ID, set one:
entry_updates["unique_id"] = config_entry.data[CONF_API_KEY]
if not config_entry.options:
# If the config entry doesn't already have any options set, set defaults:
entry_updates["options"] = {CONF_SHOW_ON_MAP: True}
if config_entry.data.get(CONF_INTEGRATION_TYPE) not in [
INTEGRATION_TYPE_GEOGRAPHY_COORDS,
INTEGRATION_TYPE_GEOGRAPHY_NAME,
]:
# If the config entry data doesn't contain an integration type that we know
# about, infer it from the data we have:
entry_updates["data"] = {**config_entry.data}
if CONF_CITY in config_entry.data:
entry_updates["data"][
CONF_INTEGRATION_TYPE
] = INTEGRATION_TYPE_GEOGRAPHY_NAME
else:
entry_updates["data"][
CONF_INTEGRATION_TYPE
] = INTEGRATION_TYPE_GEOGRAPHY_COORDS
if not entry_updates:
return
hass.config_entries.async_update_entry(config_entry, **entry_updates)
@callback
def _standardize_node_pro_config_entry(
hass: HomeAssistant, config_entry: ConfigEntry
) -> None:
"""Ensure that Node/Pro config entries have appropriate properties."""
entry_updates: dict[str, Any] = {}
if CONF_INTEGRATION_TYPE not in config_entry.data:
# If the config entry data doesn't contain the integration type, add it:
entry_updates["data"] = {
**config_entry.data,
CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_NODE_PRO,
}
if not entry_updates:
return
hass.config_entries.async_update_entry(config_entry, **entry_updates)
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Set up AirVisual as config entry."""
hass.data.setdefault(DOMAIN, {DATA_COORDINATOR: {}, DATA_LISTENER: {}})
if CONF_API_KEY in config_entry.data:
_standardize_geography_config_entry(hass, config_entry)
websession = aiohttp_client.async_get_clientsession(hass)
cloud_api = CloudAPI(config_entry.data[CONF_API_KEY], session=websession)
async def async_update_data() -> dict[str, Any]:
"""Get new data from the API."""
if CONF_CITY in config_entry.data:
api_coro = cloud_api.air_quality.city(
config_entry.data[CONF_CITY],
config_entry.data[CONF_STATE],
config_entry.data[CONF_COUNTRY],
)
else:
api_coro = cloud_api.air_quality.nearest_city(
config_entry.data[CONF_LATITUDE],
config_entry.data[CONF_LONGITUDE],
)
try:
return await api_coro
except (InvalidKeyError, KeyExpiredError) as ex:
raise ConfigEntryAuthFailed from ex
except AirVisualError as err:
raise UpdateFailed(f"Error while retrieving data: {err}") from err
coordinator = DataUpdateCoordinator(
hass,
LOGGER,
name=async_get_geography_id(config_entry.data),
# We give a placeholder update interval in order to create the coordinator;
# then, below, we use the coordinator's presence (along with any other
# coordinators using the same API key) to calculate an actual, leveled
# update interval:
update_interval=timedelta(minutes=5),
update_method=async_update_data,
)
# Only geography-based entries have options:
hass.data[DOMAIN][DATA_LISTENER][
config_entry.entry_id
] = config_entry.add_update_listener(async_reload_entry)
else:
# Remove outdated air_quality entities from the entity registry if they exist:
ent_reg = entity_registry.async_get(hass)
for entity_entry in [
e
for e in ent_reg.entities.values()
if e.config_entry_id == config_entry.entry_id
and e.entity_id.startswith("air_quality")
]:
LOGGER.debug(
'Removing deprecated air_quality entity: "%s"', entity_entry.entity_id
)
ent_reg.async_remove(entity_entry.entity_id)
_standardize_node_pro_config_entry(hass, config_entry)
async def async_update_data() -> dict[str, Any]:
"""Get new data from the API."""
try:
async with NodeSamba(
config_entry.data[CONF_IP_ADDRESS], config_entry.data[CONF_PASSWORD]
) as node:
return await node.async_get_latest_measurements()
except NodeProError as err:
raise UpdateFailed(f"Error while retrieving data: {err}") from err
coordinator = DataUpdateCoordinator(
hass,
LOGGER,
name="Node/Pro data",
update_interval=DEFAULT_NODE_PRO_UPDATE_INTERVAL,
update_method=async_update_data,
)
await coordinator.async_config_entry_first_refresh()
hass.data[DOMAIN][DATA_COORDINATOR][config_entry.entry_id] = coordinator
# Reassess the interval between 2 server requests
if CONF_API_KEY in config_entry.data:
async_sync_geo_coordinator_update_intervals(
hass, config_entry.data[CONF_API_KEY]
)
hass.config_entries.async_setup_platforms(config_entry, PLATFORMS)
return True
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Migrate an old config entry."""
version = config_entry.version
LOGGER.debug("Migrating from version %s", version)
# 1 -> 2: One geography per config entry
if version == 1:
version = config_entry.version = 2
# Update the config entry to only include the first geography (there is always
# guaranteed to be at least one):
geographies = list(config_entry.data[CONF_GEOGRAPHIES])
first_geography = geographies.pop(0)
first_id = async_get_geography_id(first_geography)
hass.config_entries.async_update_entry(
config_entry,
unique_id=first_id,
title=f"Cloud API ({first_id})",
data={CONF_API_KEY: config_entry.data[CONF_API_KEY], **first_geography},
)
# For any geographies that remain, create a new config entry for each one:
for geography in geographies:
if CONF_LATITUDE in geography:
source = "geography_by_coords"
else:
source = "geography_by_name"
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": source},
data={CONF_API_KEY: config_entry.data[CONF_API_KEY], **geography},
)
)
LOGGER.info("Migration to version %s successful", version)
return True
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Unload an AirVisual config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(
config_entry, PLATFORMS
)
if unload_ok:
hass.data[DOMAIN][DATA_COORDINATOR].pop(config_entry.entry_id)
remove_listener = hass.data[DOMAIN][DATA_LISTENER].pop(config_entry.entry_id)
remove_listener()
if CONF_API_KEY in config_entry.data:
# Re-calculate the update interval period for any remaining consumers of
# this API key:
async_sync_geo_coordinator_update_intervals(
hass, config_entry.data[CONF_API_KEY]
)
return unload_ok
async def async_reload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> None:
"""Handle an options update."""
await hass.config_entries.async_reload(config_entry.entry_id)
class AirVisualEntity(CoordinatorEntity):
"""Define a generic AirVisual entity."""
def __init__(
self, coordinator: DataUpdateCoordinator, description: EntityDescription
) -> None:
"""Initialize."""
super().__init__(coordinator)
self._attr_extra_state_attributes = {ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION}
self.entity_description = description
async def async_added_to_hass(self) -> None:
"""Register callbacks."""
@callback
def update() -> None:
"""Update the state."""
self.update_from_latest_data()
self.async_write_ha_state()
self.async_on_remove(self.coordinator.async_add_listener(update))
self.update_from_latest_data()
@callback
def update_from_latest_data(self) -> None:
"""Update the entity from the latest data."""
raise NotImplementedError
|
|
"""
Support for local control of entities by emulating the Phillips Hue bridge.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/emulated_hue/
"""
import asyncio
import json
import logging
import voluptuous as vol
from homeassistant import util
from homeassistant.const import (
EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.components.http import REQUIREMENTS # NOQA
from homeassistant.components.http import HomeAssistantWSGI
from homeassistant.helpers.deprecation import get_deprecated
import homeassistant.helpers.config_validation as cv
from .hue_api import (
HueUsernameView, HueAllLightsStateView, HueOneLightStateView,
HueOneLightChangeView)
from .upnp import DescriptionXmlView, UPNPResponderThread
DOMAIN = 'emulated_hue'
_LOGGER = logging.getLogger(__name__)
NUMBERS_FILE = 'emulated_hue_ids.json'
CONF_HOST_IP = 'host_ip'
CONF_LISTEN_PORT = 'listen_port'
CONF_ADVERTISE_IP = 'advertise_ip'
CONF_ADVERTISE_PORT = 'advertise_port'
CONF_UPNP_BIND_MULTICAST = 'upnp_bind_multicast'
CONF_OFF_MAPS_TO_ON_DOMAINS = 'off_maps_to_on_domains'
CONF_EXPOSE_BY_DEFAULT = 'expose_by_default'
CONF_EXPOSED_DOMAINS = 'exposed_domains'
CONF_TYPE = 'type'
TYPE_ALEXA = 'alexa'
TYPE_GOOGLE = 'google_home'
DEFAULT_LISTEN_PORT = 8300
DEFAULT_UPNP_BIND_MULTICAST = True
DEFAULT_OFF_MAPS_TO_ON_DOMAINS = ['script', 'scene']
DEFAULT_EXPOSE_BY_DEFAULT = True
DEFAULT_EXPOSED_DOMAINS = [
'switch', 'light', 'group', 'input_boolean', 'media_player', 'fan'
]
DEFAULT_TYPE = TYPE_GOOGLE
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Optional(CONF_HOST_IP): cv.string,
vol.Optional(CONF_LISTEN_PORT, default=DEFAULT_LISTEN_PORT): cv.port,
vol.Optional(CONF_ADVERTISE_IP): cv.string,
vol.Optional(CONF_ADVERTISE_PORT): cv.port,
vol.Optional(CONF_UPNP_BIND_MULTICAST): cv.boolean,
vol.Optional(CONF_OFF_MAPS_TO_ON_DOMAINS): cv.ensure_list,
vol.Optional(CONF_EXPOSE_BY_DEFAULT): cv.boolean,
vol.Optional(CONF_EXPOSED_DOMAINS): cv.ensure_list,
vol.Optional(CONF_TYPE, default=DEFAULT_TYPE):
vol.Any(TYPE_ALEXA, TYPE_GOOGLE)
})
}, extra=vol.ALLOW_EXTRA)
ATTR_EMULATED_HUE = 'emulated_hue'
ATTR_EMULATED_HUE_HIDDEN = 'emulated_hue_hidden'
def setup(hass, yaml_config):
"""Activate the emulated_hue component."""
config = Config(hass, yaml_config.get(DOMAIN, {}))
server = HomeAssistantWSGI(
hass,
server_host=config.host_ip_addr,
server_port=config.listen_port,
api_password=None,
ssl_certificate=None,
ssl_key=None,
cors_origins=None,
use_x_forwarded_for=False,
trusted_networks=[],
login_threshold=0,
is_ban_enabled=False
)
server.register_view(DescriptionXmlView(config))
server.register_view(HueUsernameView)
server.register_view(HueAllLightsStateView(config))
server.register_view(HueOneLightStateView(config))
server.register_view(HueOneLightChangeView(config))
upnp_listener = UPNPResponderThread(
config.host_ip_addr, config.listen_port,
config.upnp_bind_multicast, config.advertise_ip,
config.advertise_port)
@asyncio.coroutine
def stop_emulated_hue_bridge(event):
"""Stop the emulated hue bridge."""
upnp_listener.stop()
yield from server.stop()
@asyncio.coroutine
def start_emulated_hue_bridge(event):
"""Start the emulated hue bridge."""
upnp_listener.start()
yield from server.start()
hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_STOP, stop_emulated_hue_bridge)
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, start_emulated_hue_bridge)
return True
class Config(object):
"""Hold configuration variables for the emulated hue bridge."""
def __init__(self, hass, conf):
"""Initialize the instance."""
self.hass = hass
self.type = conf.get(CONF_TYPE)
self.numbers = None
self.cached_states = {}
if self.type == TYPE_ALEXA:
_LOGGER.warning("Alexa type is deprecated and will be removed in a"
" future version")
# Get the IP address that will be passed to the Echo during discovery
self.host_ip_addr = conf.get(CONF_HOST_IP)
if self.host_ip_addr is None:
self.host_ip_addr = util.get_local_ip()
_LOGGER.warning(
"Listen IP address not specified, auto-detected address is %s",
self.host_ip_addr)
# Get the port that the Hue bridge will listen on
self.listen_port = conf.get(CONF_LISTEN_PORT)
if not isinstance(self.listen_port, int):
self.listen_port = DEFAULT_LISTEN_PORT
_LOGGER.warning(
"Listen port not specified, defaulting to %s",
self.listen_port)
if self.type == TYPE_GOOGLE and self.listen_port != 80:
_LOGGER.warning("When targeting Google Home, listening port has "
"to be port 80")
# Get whether or not UPNP binds to multicast address (239.255.255.250)
# or to the unicast address (host_ip_addr)
self.upnp_bind_multicast = conf.get(
CONF_UPNP_BIND_MULTICAST, DEFAULT_UPNP_BIND_MULTICAST)
# Get domains that cause both "on" and "off" commands to map to "on"
# This is primarily useful for things like scenes or scripts, which
# don't really have a concept of being off
self.off_maps_to_on_domains = conf.get(CONF_OFF_MAPS_TO_ON_DOMAINS)
if not isinstance(self.off_maps_to_on_domains, list):
self.off_maps_to_on_domains = DEFAULT_OFF_MAPS_TO_ON_DOMAINS
# Get whether or not entities should be exposed by default, or if only
# explicitly marked ones will be exposed
self.expose_by_default = conf.get(
CONF_EXPOSE_BY_DEFAULT, DEFAULT_EXPOSE_BY_DEFAULT)
# Get domains that are exposed by default when expose_by_default is
# True
self.exposed_domains = conf.get(
CONF_EXPOSED_DOMAINS, DEFAULT_EXPOSED_DOMAINS)
# Calculated effective advertised IP and port for network isolation
self.advertise_ip = conf.get(
CONF_ADVERTISE_IP) or self.host_ip_addr
self.advertise_port = conf.get(
CONF_ADVERTISE_PORT) or self.listen_port
def entity_id_to_number(self, entity_id):
"""Get a unique number for the entity id."""
if self.type == TYPE_ALEXA:
return entity_id
if self.numbers is None:
self.numbers = self._load_numbers_json()
# Google Home
for number, ent_id in self.numbers.items():
if entity_id == ent_id:
return number
number = '1'
if self.numbers:
number = str(max(int(k) for k in self.numbers) + 1)
self.numbers[number] = entity_id
self._save_numbers_json()
return number
def number_to_entity_id(self, number):
"""Convert unique number to entity id."""
if self.type == TYPE_ALEXA:
return number
if self.numbers is None:
self.numbers = self._load_numbers_json()
# Google Home
assert isinstance(number, str)
return self.numbers.get(number)
def is_entity_exposed(self, entity):
"""Determine if an entity should be exposed on the emulated bridge.
Async friendly.
"""
if entity.attributes.get('view') is not None:
# Ignore entities that are views
return False
domain = entity.domain.lower()
explicit_expose = entity.attributes.get(ATTR_EMULATED_HUE, None)
explicit_hidden = entity.attributes.get(ATTR_EMULATED_HUE_HIDDEN, None)
if explicit_expose is True or explicit_hidden is False:
expose = True
elif explicit_expose is False or explicit_hidden is True:
expose = False
else:
expose = None
get_deprecated(entity.attributes, ATTR_EMULATED_HUE_HIDDEN,
ATTR_EMULATED_HUE, None)
domain_exposed_by_default = \
self.expose_by_default and domain in self.exposed_domains
# Expose an entity if the entity's domain is exposed by default and
# the configuration doesn't explicitly exclude it from being
# exposed, or if the entity is explicitly exposed
is_default_exposed = \
domain_exposed_by_default and expose is not False
return is_default_exposed or expose
def _load_numbers_json(self):
"""Set up helper method to load numbers json."""
try:
with open(self.hass.config.path(NUMBERS_FILE),
encoding='utf-8') as fil:
return json.loads(fil.read())
except (OSError, ValueError) as err:
# OSError if file not found or unaccessible/no permissions
# ValueError if could not parse JSON
if not isinstance(err, FileNotFoundError):
_LOGGER.warning("Failed to open %s: %s", NUMBERS_FILE, err)
return {}
def _save_numbers_json(self):
"""Set up helper method to save numbers json."""
try:
with open(self.hass.config.path(NUMBERS_FILE), 'w',
encoding='utf-8') as fil:
fil.write(json.dumps(self.numbers))
except OSError as err:
# OSError if file write permissions
_LOGGER.warning("Failed to write %s: %s", NUMBERS_FILE, err)
|
|
import types
from django.db import models
from django.db.models.fields import Field
from django.db.models import Q
from django.db.models.expressions import F, ExpressionNode
from django.db.models.query import QuerySet, ValuesQuerySet, ValuesListQuerySet
from django.utils.encoding import smart_str, force_unicode
class Mapping(dict):
"""
Mapping is a simple dict subclass for patching the other dicts
(mostly **kwargs) keys/values according to self contents
"""
@staticmethod
def map_dict(d, f_key=None, f_val=None):
def as_is(x):
return x
f_key = f_key or as_is
f_val = f_val or as_is
return dict(zip(map(f_key, d.iterkeys()), map(f_val, d.itervalues())))
def of(self, k):
"""
Returns mapping of k, or k itself, if corresponding mapping does not
exist
"""
return self.get(k, k)
def map_keys(self, d):
return Mapping.map_dict(d, f_key=self.of)
def map_values(self, d):
return Mapping.map_dict(d, f_val=self.of)
def mirror(self):
return Mapping(zip(self.itervalues(), self.iterkeys()))
class QuerySetWrapper(object):
"""
QuerySetWrapper wraps extent's querysets to patch field names in the queries
"""
def __init__(self, qs, ExtentClass):
self._qs = qs
self._Extent = ExtentClass
super(QuerySetWrapper, self).__init__()
def _patch_expr(self, e):
"""
This function recursively patches django queryset expressions (like Q
or F), replacing fields occurrences with their mappings
"""
mapping = self._Extent._meta.proxies
if isinstance(e, F):
e.name = mapping.of(e.name)
return e
elif isinstance(e, (Q, ExpressionNode)):
e.children = map(self._patch_expr, e.children)
return e
elif isinstance(e, tuple):
return map(self._patch_expr, e)
elif isinstance(e, str):
return mapping.of(e)
else:
return e
def __getattr__(self, name):
mapping = self._Extent._meta.proxies
attr = getattr(self._qs, name)
def qsproxy(*args, **kwargs):
p_kwargs = Mapping.map_dict(kwargs, mapping.of, self._patch_expr)
p_args = map(self._patch_expr, args)
smth = attr(*p_args, **p_kwargs)
if isinstance(smth, ValuesListQuerySet): # subtype of ValuesQuerySet
return smth
elif isinstance(smth, ValuesQuerySet): # subtype of QuerySet
return map(mapping.mirror().map_keys, smth)
elif isinstance(smth, QuerySet): # common case
return QuerySetWrapper(smth, self._Extent)
elif isinstance(smth, self._Extent._meta.model):
return self._Extent(smth)
else:
return smth
if isinstance(attr, (models.base.ModelBase, Extent)):
return attr
elif callable(attr):
return qsproxy
else:
return attr
def __iter__(self):
return (self._Extent(i) for i in self._qs.__iter__())
def iterator(self):
for smth in self._qs.iterator():
if isinstance(smth, QuerySet):
yield QuerySetWrapper(smth, self._Extent)
elif isinstance(smth, self._Extent._meta.model):
yield self._Extent(smth)
else:
yield smth
def __len__(self):
return self._qs.__len__()
def __getitem__(self, k):
item = self._qs.__getitem__(k)
if isinstance(item, QuerySet):
return QuerySetWrapper(item, self._Extent)
else:
return self._Extent(item)
def __repr__(self):
from django.db.models.query import REPR_OUTPUT_SIZE
data = list(map(self._Extent, self._qs[:REPR_OUTPUT_SIZE + 1]))
if len(data) > REPR_OUTPUT_SIZE:
data[-1] = "...(remaining elements truncated)..."
return repr(data)
class ExtentConstructor(models.base.ModelBase):
"""
A metaclass for Extents subclasses construction
"""
def __new__(cls, name, bases, attrs):
# we don't need anything to be inherited from ModelBase
return type.__new__(cls, name, bases, attrs)
def wrap(cls, attrname):
attr = getattr(cls, attrname)
inject_name = '%s_%s' % (cls._meta.real_app_label, attrname)
if callable(attr):
def proxy(s, *args, **kwargs):
return attr(cls(s), *args, **kwargs)
cls._meta.model.add_to_class(inject_name, proxy)
elif isinstance(attr, Field):
cls._meta.model.add_to_class(inject_name, attr)
delattr(cls, attrname)
cls._meta.proxies[attrname] = inject_name
elif isinstance(attr, models.Manager):
class ManagerWrapper(models.Manager):
def get_query_set(self):
qs = super(ManagerWrapper, self).get_query_set()
return QuerySetWrapper(qs, cls)
attr.__class__.__bases__ = (ManagerWrapper, )
attr.model = cls._meta.model
attr.model._meta.db_table = cls._meta.model._meta.db_table
def __init__(cls, name, bases, attrs):
# check this to ensure that class construction works with Extent
# subclasses but not with Extent class itself
if '__metaclass__' not in cls.__dict__:
cls._meta = attrs.pop('Meta', type('Meta', (), {}))
# app label is usually a part just before ".models"
cls._meta.real_app_label = attrs['__module__'].split('.')[-2]
cls._meta.proxies = Mapping()
if 'objects' not in attrs:
cls.objects = QuerySetWrapper(cls._meta.model.objects, cls)
cls._default_manager = cls.objects
for k in attrs.iterkeys():
if k != '__module__':
cls.wrap(k)
cls.__realmodule__ = cls.__module__
cls.__realname__ = cls.__name__
# this voodoo magic is for registering extent in django admin site
cls.__module__ = cls._meta.model.__module__
cls.__name__ = cls._meta.model.__name__
cls._meta.model.add_to_class('as_%s_%s' % (cls._meta.real_app_label,
cls.__realname__), lambda s: cls(s))
for i in ('app_label', 'module_name', 'get_add_permission',
'get_change_permission', 'get_delete_permission',
'verbose_name_plural', 'ordering', 'pk', 'get_field',
'verbose_name', 'get_ordered_objects', 'object_name',
'get_field_by_name', 'fields', 'many_to_many', 'proxy',
'verbose_name_raw'):
if not hasattr(cls._meta, i):
setattr(cls._meta, i, getattr(cls._meta.model._meta, i))
super(ExtentConstructor, cls).__init__(name, bases, attrs)
class Extent(object):
"""
Extent is a main and the only package class for external using. It is a base
class for model extents, and it contains all necessary machinery to be
used instead of original model class in all (I hope) possible cases.
"""
__metaclass__ = ExtentConstructor
def __getattr__(self, name):
"""
Proxying attribute queries to the actual model instance
"""
return getattr(self._instance, self._meta.proxies.of(name))
def __init__(self, *args, **kwargs):
if len(args) == 1 and not kwargs and isinstance(args[0], models.Model):
instance = args[0]
else:
mapping = self._meta.proxies
instance = self._meta.model(*args, **mapping.map_keys(kwargs))
self.__dict__['_instance'] = instance
def __setattr__(self, name, value):
name = self._meta.proxies.of(name)
if hasattr(self._instance, name):
setattr(self._instance, name, value)
else:
self.__dict__[name] = value
def __repr__(self):
"""
copy-pasted from models.Model.__repr__
"""
try:
u = unicode(self)
except (UnicodeEncodeError, UnicodeDecodeError):
u = '[Bad Unicode data]'
return smart_str(u'<%s: %s>' % (self.__class__.__realname__, u))
def __str__(self):
if '__unicode__' in self.__dict__:
return force_unicode(self).encode('utf-8')
elif hasattr(self._meta.model, '__unicode__'):
return force_unicode(self._instance).encode('utf-8')
return '%s object' % self.__class__.__realname__
|
|
# Copyright 2011 OpenStack Foundation
# Copyright 2011 - 2012, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import itertools
import time
import eventlet
import greenlet
from oslo.config import cfg
import six
from ceilometer.openstack.common import excutils
from ceilometer.openstack.common.gettextutils import _ # noqa
from ceilometer.openstack.common import importutils
from ceilometer.openstack.common import jsonutils
from ceilometer.openstack.common import log as logging
from ceilometer.openstack.common.rpc import amqp as rpc_amqp
from ceilometer.openstack.common.rpc import common as rpc_common
qpid_codec = importutils.try_import("qpid.codec010")
qpid_messaging = importutils.try_import("qpid.messaging")
qpid_exceptions = importutils.try_import("qpid.messaging.exceptions")
LOG = logging.getLogger(__name__)
qpid_opts = [
cfg.StrOpt('qpid_hostname',
default='localhost',
help='Qpid broker hostname'),
cfg.IntOpt('qpid_port',
default=5672,
help='Qpid broker port'),
cfg.ListOpt('qpid_hosts',
default=['$qpid_hostname:$qpid_port'],
help='Qpid HA cluster host:port pairs'),
cfg.StrOpt('qpid_username',
default='',
help='Username for qpid connection'),
cfg.StrOpt('qpid_password',
default='',
help='Password for qpid connection',
secret=True),
cfg.StrOpt('qpid_sasl_mechanisms',
default='',
help='Space separated list of SASL mechanisms to use for auth'),
cfg.IntOpt('qpid_heartbeat',
default=60,
help='Seconds between connection keepalive heartbeats'),
cfg.StrOpt('qpid_protocol',
default='tcp',
help="Transport to use, either 'tcp' or 'ssl'"),
cfg.BoolOpt('qpid_tcp_nodelay',
default=True,
help='Disable Nagle algorithm'),
# NOTE(russellb) If any additional versions are added (beyond 1 and 2),
# this file could probably use some additional refactoring so that the
# differences between each version are split into different classes.
cfg.IntOpt('qpid_topology_version',
default=1,
help="The qpid topology version to use. Version 1 is what "
"was originally used by impl_qpid. Version 2 includes "
"some backwards-incompatible changes that allow broker "
"federation to work. Users should update to version 2 "
"when they are able to take everything down, as it "
"requires a clean break."),
]
cfg.CONF.register_opts(qpid_opts)
JSON_CONTENT_TYPE = 'application/json; charset=utf8'
def raise_invalid_topology_version(conf):
msg = (_("Invalid value for qpid_topology_version: %d") %
conf.qpid_topology_version)
LOG.error(msg)
raise Exception(msg)
class ConsumerBase(object):
"""Consumer base class."""
def __init__(self, conf, session, callback, node_name, node_opts,
link_name, link_opts):
"""Declare a queue on an amqp session.
'session' is the amqp session to use
'callback' is the callback to call when messages are received
'node_name' is the first part of the Qpid address string, before ';'
'node_opts' will be applied to the "x-declare" section of "node"
in the address string.
'link_name' goes into the "name" field of the "link" in the address
string
'link_opts' will be applied to the "x-declare" section of "link"
in the address string.
"""
self.callback = callback
self.receiver = None
self.session = None
if conf.qpid_topology_version == 1:
addr_opts = {
"create": "always",
"node": {
"type": "topic",
"x-declare": {
"durable": True,
"auto-delete": True,
},
},
"link": {
"durable": True,
"x-declare": {
"durable": False,
"auto-delete": True,
"exclusive": False,
},
},
}
addr_opts["node"]["x-declare"].update(node_opts)
elif conf.qpid_topology_version == 2:
addr_opts = {
"link": {
"x-declare": {
"auto-delete": True,
"exclusive": False,
},
},
}
else:
raise_invalid_topology_version()
addr_opts["link"]["x-declare"].update(link_opts)
if link_name:
addr_opts["link"]["name"] = link_name
self.address = "%s ; %s" % (node_name, jsonutils.dumps(addr_opts))
self.connect(session)
def connect(self, session):
"""Declare the receiver on connect."""
self._declare_receiver(session)
def reconnect(self, session):
"""Re-declare the receiver after a qpid reconnect."""
self._declare_receiver(session)
def _declare_receiver(self, session):
self.session = session
self.receiver = session.receiver(self.address)
self.receiver.capacity = 1
def _unpack_json_msg(self, msg):
"""Load the JSON data in msg if msg.content_type indicates that it
is necessary. Put the loaded data back into msg.content and
update msg.content_type appropriately.
A Qpid Message containing a dict will have a content_type of
'amqp/map', whereas one containing a string that needs to be converted
back from JSON will have a content_type of JSON_CONTENT_TYPE.
:param msg: a Qpid Message object
:returns: None
"""
if msg.content_type == JSON_CONTENT_TYPE:
msg.content = jsonutils.loads(msg.content)
msg.content_type = 'amqp/map'
def consume(self):
"""Fetch the message and pass it to the callback object."""
message = self.receiver.fetch()
try:
self._unpack_json_msg(message)
msg = rpc_common.deserialize_msg(message.content)
self.callback(msg)
except Exception:
LOG.exception(_("Failed to process message... skipping it."))
finally:
# TODO(sandy): Need support for optional ack_on_error.
self.session.acknowledge(message)
def get_receiver(self):
return self.receiver
def get_node_name(self):
return self.address.split(';')[0]
class DirectConsumer(ConsumerBase):
"""Queue/consumer class for 'direct'."""
def __init__(self, conf, session, msg_id, callback):
"""Init a 'direct' queue.
'session' is the amqp session to use
'msg_id' is the msg_id to listen on
'callback' is the callback to call when messages are received
"""
link_opts = {
"auto-delete": conf.amqp_auto_delete,
"exclusive": True,
"durable": conf.amqp_durable_queues,
}
if conf.qpid_topology_version == 1:
node_name = "%s/%s" % (msg_id, msg_id)
node_opts = {"type": "direct"}
link_name = msg_id
elif conf.qpid_topology_version == 2:
node_name = "amq.direct/%s" % msg_id
node_opts = {}
link_name = None
else:
raise_invalid_topology_version()
super(DirectConsumer, self).__init__(conf, session, callback,
node_name, node_opts, link_name,
link_opts)
class TopicConsumer(ConsumerBase):
"""Consumer class for 'topic'."""
def __init__(self, conf, session, topic, callback, name=None,
exchange_name=None):
"""Init a 'topic' queue.
:param session: the amqp session to use
:param topic: is the topic to listen on
:paramtype topic: str
:param callback: the callback to call when messages are received
:param name: optional queue name, defaults to topic
"""
exchange_name = exchange_name or rpc_amqp.get_control_exchange(conf)
link_opts = {
"auto-delete": conf.amqp_auto_delete,
"durable": conf.amqp_durable_queues,
}
if conf.qpid_topology_version == 1:
node_name = "%s/%s" % (exchange_name, topic)
elif conf.qpid_topology_version == 2:
node_name = "amq.topic/topic/%s/%s" % (exchange_name, topic)
else:
raise_invalid_topology_version()
super(TopicConsumer, self).__init__(conf, session, callback, node_name,
{}, name or topic, link_opts)
class FanoutConsumer(ConsumerBase):
"""Consumer class for 'fanout'."""
def __init__(self, conf, session, topic, callback):
"""Init a 'fanout' queue.
'session' is the amqp session to use
'topic' is the topic to listen on
'callback' is the callback to call when messages are received
"""
self.conf = conf
link_opts = {"exclusive": True}
if conf.qpid_topology_version == 1:
node_name = "%s_fanout" % topic
node_opts = {"durable": False, "type": "fanout"}
elif conf.qpid_topology_version == 2:
node_name = "amq.topic/fanout/%s" % topic
node_opts = {}
else:
raise_invalid_topology_version()
super(FanoutConsumer, self).__init__(conf, session, callback,
node_name, node_opts, None,
link_opts)
class Publisher(object):
"""Base Publisher class."""
def __init__(self, conf, session, node_name, node_opts=None):
"""Init the Publisher class with the exchange_name, routing_key,
and other options
"""
self.sender = None
self.session = session
if conf.qpid_topology_version == 1:
addr_opts = {
"create": "always",
"node": {
"type": "topic",
"x-declare": {
"durable": False,
# auto-delete isn't implemented for exchanges in qpid,
# but put in here anyway
"auto-delete": True,
},
},
}
if node_opts:
addr_opts["node"]["x-declare"].update(node_opts)
self.address = "%s ; %s" % (node_name, jsonutils.dumps(addr_opts))
elif conf.qpid_topology_version == 2:
self.address = node_name
else:
raise_invalid_topology_version()
self.reconnect(session)
def reconnect(self, session):
"""Re-establish the Sender after a reconnection."""
self.sender = session.sender(self.address)
def _pack_json_msg(self, msg):
"""Qpid cannot serialize dicts containing strings longer than 65535
characters. This function dumps the message content to a JSON
string, which Qpid is able to handle.
:param msg: May be either a Qpid Message object or a bare dict.
:returns: A Qpid Message with its content field JSON encoded.
"""
try:
msg.content = jsonutils.dumps(msg.content)
except AttributeError:
# Need to have a Qpid message so we can set the content_type.
msg = qpid_messaging.Message(jsonutils.dumps(msg))
msg.content_type = JSON_CONTENT_TYPE
return msg
def send(self, msg):
"""Send a message."""
try:
# Check if Qpid can encode the message
check_msg = msg
if not hasattr(check_msg, 'content_type'):
check_msg = qpid_messaging.Message(msg)
content_type = check_msg.content_type
enc, dec = qpid_messaging.message.get_codec(content_type)
enc(check_msg.content)
except qpid_codec.CodecException:
# This means the message couldn't be serialized as a dict.
msg = self._pack_json_msg(msg)
self.sender.send(msg)
class DirectPublisher(Publisher):
"""Publisher class for 'direct'."""
def __init__(self, conf, session, msg_id):
"""Init a 'direct' publisher."""
if conf.qpid_topology_version == 1:
node_name = msg_id
node_opts = {"type": "direct"}
elif conf.qpid_topology_version == 2:
node_name = "amq.direct/%s" % msg_id
node_opts = {}
else:
raise_invalid_topology_version()
super(DirectPublisher, self).__init__(conf, session, node_name,
node_opts)
class TopicPublisher(Publisher):
"""Publisher class for 'topic'."""
def __init__(self, conf, session, topic):
"""Init a 'topic' publisher.
"""
exchange_name = rpc_amqp.get_control_exchange(conf)
if conf.qpid_topology_version == 1:
node_name = "%s/%s" % (exchange_name, topic)
elif conf.qpid_topology_version == 2:
node_name = "amq.topic/topic/%s/%s" % (exchange_name, topic)
else:
raise_invalid_topology_version()
super(TopicPublisher, self).__init__(conf, session, node_name)
class FanoutPublisher(Publisher):
"""Publisher class for 'fanout'."""
def __init__(self, conf, session, topic):
"""Init a 'fanout' publisher.
"""
if conf.qpid_topology_version == 1:
node_name = "%s_fanout" % topic
node_opts = {"type": "fanout"}
elif conf.qpid_topology_version == 2:
node_name = "amq.topic/fanout/%s" % topic
node_opts = {}
else:
raise_invalid_topology_version()
super(FanoutPublisher, self).__init__(conf, session, node_name,
node_opts)
class NotifyPublisher(Publisher):
"""Publisher class for notifications."""
def __init__(self, conf, session, topic):
"""Init a 'topic' publisher.
"""
exchange_name = rpc_amqp.get_control_exchange(conf)
node_opts = {"durable": True}
if conf.qpid_topology_version == 1:
node_name = "%s/%s" % (exchange_name, topic)
elif conf.qpid_topology_version == 2:
node_name = "amq.topic/topic/%s/%s" % (exchange_name, topic)
else:
raise_invalid_topology_version()
super(NotifyPublisher, self).__init__(conf, session, node_name,
node_opts)
class Connection(object):
"""Connection object."""
pool = None
def __init__(self, conf, server_params=None):
if not qpid_messaging:
raise ImportError("Failed to import qpid.messaging")
self.session = None
self.consumers = {}
self.consumer_thread = None
self.proxy_callbacks = []
self.conf = conf
if server_params and 'hostname' in server_params:
# NOTE(russellb) This enables support for cast_to_server.
server_params['qpid_hosts'] = [
'%s:%d' % (server_params['hostname'],
server_params.get('port', 5672))
]
params = {
'qpid_hosts': self.conf.qpid_hosts,
'username': self.conf.qpid_username,
'password': self.conf.qpid_password,
}
params.update(server_params or {})
self.brokers = params['qpid_hosts']
self.username = params['username']
self.password = params['password']
self.connection_create(self.brokers[0])
self.reconnect()
def connection_create(self, broker):
# Create the connection - this does not open the connection
self.connection = qpid_messaging.Connection(broker)
# Check if flags are set and if so set them for the connection
# before we call open
self.connection.username = self.username
self.connection.password = self.password
self.connection.sasl_mechanisms = self.conf.qpid_sasl_mechanisms
# Reconnection is done by self.reconnect()
self.connection.reconnect = False
self.connection.heartbeat = self.conf.qpid_heartbeat
self.connection.transport = self.conf.qpid_protocol
self.connection.tcp_nodelay = self.conf.qpid_tcp_nodelay
def _register_consumer(self, consumer):
self.consumers[str(consumer.get_receiver())] = consumer
def _lookup_consumer(self, receiver):
return self.consumers[str(receiver)]
def reconnect(self):
"""Handles reconnecting and re-establishing sessions and queues."""
attempt = 0
delay = 1
while True:
# Close the session if necessary
if self.connection.opened():
try:
self.connection.close()
except qpid_exceptions.ConnectionError:
pass
broker = self.brokers[attempt % len(self.brokers)]
attempt += 1
try:
self.connection_create(broker)
self.connection.open()
except qpid_exceptions.ConnectionError as e:
msg_dict = dict(e=e, delay=delay)
msg = _("Unable to connect to AMQP server: %(e)s. "
"Sleeping %(delay)s seconds") % msg_dict
LOG.error(msg)
time.sleep(delay)
delay = min(2 * delay, 60)
else:
LOG.info(_('Connected to AMQP server on %s'), broker)
break
self.session = self.connection.session()
if self.consumers:
consumers = self.consumers
self.consumers = {}
for consumer in six.itervalues(consumers):
consumer.reconnect(self.session)
self._register_consumer(consumer)
LOG.debug(_("Re-established AMQP queues"))
def ensure(self, error_callback, method, *args, **kwargs):
while True:
try:
return method(*args, **kwargs)
except (qpid_exceptions.Empty,
qpid_exceptions.ConnectionError) as e:
if error_callback:
error_callback(e)
self.reconnect()
def close(self):
"""Close/release this connection."""
self.cancel_consumer_thread()
self.wait_on_proxy_callbacks()
try:
self.connection.close()
except Exception:
# NOTE(dripton) Logging exceptions that happen during cleanup just
# causes confusion; there's really nothing useful we can do with
# them.
pass
self.connection = None
def reset(self):
"""Reset a connection so it can be used again."""
self.cancel_consumer_thread()
self.wait_on_proxy_callbacks()
self.session.close()
self.session = self.connection.session()
self.consumers = {}
def declare_consumer(self, consumer_cls, topic, callback):
"""Create a Consumer using the class that was passed in and
add it to our list of consumers
"""
def _connect_error(exc):
log_info = {'topic': topic, 'err_str': str(exc)}
LOG.error(_("Failed to declare consumer for topic '%(topic)s': "
"%(err_str)s") % log_info)
def _declare_consumer():
consumer = consumer_cls(self.conf, self.session, topic, callback)
self._register_consumer(consumer)
return consumer
return self.ensure(_connect_error, _declare_consumer)
def iterconsume(self, limit=None, timeout=None):
"""Return an iterator that will consume from all queues/consumers."""
def _error_callback(exc):
if isinstance(exc, qpid_exceptions.Empty):
LOG.debug(_('Timed out waiting for RPC response: %s') %
str(exc))
raise rpc_common.Timeout()
else:
LOG.exception(_('Failed to consume message from queue: %s') %
str(exc))
def _consume():
nxt_receiver = self.session.next_receiver(timeout=timeout)
try:
self._lookup_consumer(nxt_receiver).consume()
except Exception:
LOG.exception(_("Error processing message. Skipping it."))
for iteration in itertools.count(0):
if limit and iteration >= limit:
raise StopIteration
yield self.ensure(_error_callback, _consume)
def cancel_consumer_thread(self):
"""Cancel a consumer thread."""
if self.consumer_thread is not None:
self.consumer_thread.kill()
try:
self.consumer_thread.wait()
except greenlet.GreenletExit:
pass
self.consumer_thread = None
def wait_on_proxy_callbacks(self):
"""Wait for all proxy callback threads to exit."""
for proxy_cb in self.proxy_callbacks:
proxy_cb.wait()
def publisher_send(self, cls, topic, msg):
"""Send to a publisher based on the publisher class."""
def _connect_error(exc):
log_info = {'topic': topic, 'err_str': str(exc)}
LOG.exception(_("Failed to publish message to topic "
"'%(topic)s': %(err_str)s") % log_info)
def _publisher_send():
publisher = cls(self.conf, self.session, topic)
publisher.send(msg)
return self.ensure(_connect_error, _publisher_send)
def declare_direct_consumer(self, topic, callback):
"""Create a 'direct' queue.
In nova's use, this is generally a msg_id queue used for
responses for call/multicall
"""
self.declare_consumer(DirectConsumer, topic, callback)
def declare_topic_consumer(self, topic, callback=None, queue_name=None,
exchange_name=None):
"""Create a 'topic' consumer."""
self.declare_consumer(functools.partial(TopicConsumer,
name=queue_name,
exchange_name=exchange_name,
),
topic, callback)
def declare_fanout_consumer(self, topic, callback):
"""Create a 'fanout' consumer."""
self.declare_consumer(FanoutConsumer, topic, callback)
def direct_send(self, msg_id, msg):
"""Send a 'direct' message."""
self.publisher_send(DirectPublisher, msg_id, msg)
def topic_send(self, topic, msg, timeout=None):
"""Send a 'topic' message."""
#
# We want to create a message with attributes, e.g. a TTL. We
# don't really need to keep 'msg' in its JSON format any longer
# so let's create an actual qpid message here and get some
# value-add on the go.
#
# WARNING: Request timeout happens to be in the same units as
# qpid's TTL (seconds). If this changes in the future, then this
# will need to be altered accordingly.
#
qpid_message = qpid_messaging.Message(content=msg, ttl=timeout)
self.publisher_send(TopicPublisher, topic, qpid_message)
def fanout_send(self, topic, msg):
"""Send a 'fanout' message."""
self.publisher_send(FanoutPublisher, topic, msg)
def notify_send(self, topic, msg, **kwargs):
"""Send a notify message on a topic."""
self.publisher_send(NotifyPublisher, topic, msg)
def consume(self, limit=None):
"""Consume from all queues/consumers."""
it = self.iterconsume(limit=limit)
while True:
try:
six.next(it)
except StopIteration:
return
def consume_in_thread(self):
"""Consumer from all queues/consumers in a greenthread."""
@excutils.forever_retry_uncaught_exceptions
def _consumer_thread():
try:
self.consume()
except greenlet.GreenletExit:
return
if self.consumer_thread is None:
self.consumer_thread = eventlet.spawn(_consumer_thread)
return self.consumer_thread
def create_consumer(self, topic, proxy, fanout=False):
"""Create a consumer that calls a method in a proxy object."""
proxy_cb = rpc_amqp.ProxyCallback(
self.conf, proxy,
rpc_amqp.get_connection_pool(self.conf, Connection))
self.proxy_callbacks.append(proxy_cb)
if fanout:
consumer = FanoutConsumer(self.conf, self.session, topic, proxy_cb)
else:
consumer = TopicConsumer(self.conf, self.session, topic, proxy_cb)
self._register_consumer(consumer)
return consumer
def create_worker(self, topic, proxy, pool_name):
"""Create a worker that calls a method in a proxy object."""
proxy_cb = rpc_amqp.ProxyCallback(
self.conf, proxy,
rpc_amqp.get_connection_pool(self.conf, Connection))
self.proxy_callbacks.append(proxy_cb)
consumer = TopicConsumer(self.conf, self.session, topic, proxy_cb,
name=pool_name)
self._register_consumer(consumer)
return consumer
def join_consumer_pool(self, callback, pool_name, topic,
exchange_name=None, ack_on_error=True):
"""Register as a member of a group of consumers for a given topic from
the specified exchange.
Exactly one member of a given pool will receive each message.
A message will be delivered to multiple pools, if more than
one is created.
"""
callback_wrapper = rpc_amqp.CallbackWrapper(
conf=self.conf,
callback=callback,
connection_pool=rpc_amqp.get_connection_pool(self.conf,
Connection),
wait_for_consumers=not ack_on_error
)
self.proxy_callbacks.append(callback_wrapper)
consumer = TopicConsumer(conf=self.conf,
session=self.session,
topic=topic,
callback=callback_wrapper,
name=pool_name,
exchange_name=exchange_name)
self._register_consumer(consumer)
return consumer
def create_connection(conf, new=True):
"""Create a connection."""
return rpc_amqp.create_connection(
conf, new,
rpc_amqp.get_connection_pool(conf, Connection))
def multicall(conf, context, topic, msg, timeout=None):
"""Make a call that returns multiple times."""
return rpc_amqp.multicall(
conf, context, topic, msg, timeout,
rpc_amqp.get_connection_pool(conf, Connection))
def call(conf, context, topic, msg, timeout=None):
"""Sends a message on a topic and wait for a response."""
return rpc_amqp.call(
conf, context, topic, msg, timeout,
rpc_amqp.get_connection_pool(conf, Connection))
def cast(conf, context, topic, msg):
"""Sends a message on a topic without waiting for a response."""
return rpc_amqp.cast(
conf, context, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection))
def fanout_cast(conf, context, topic, msg):
"""Sends a message on a fanout exchange without waiting for a response."""
return rpc_amqp.fanout_cast(
conf, context, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection))
def cast_to_server(conf, context, server_params, topic, msg):
"""Sends a message on a topic to a specific server."""
return rpc_amqp.cast_to_server(
conf, context, server_params, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection))
def fanout_cast_to_server(conf, context, server_params, topic, msg):
"""Sends a message on a fanout exchange to a specific server."""
return rpc_amqp.fanout_cast_to_server(
conf, context, server_params, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection))
def notify(conf, context, topic, msg, envelope):
"""Sends a notification event on a topic."""
return rpc_amqp.notify(conf, context, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection),
envelope)
def cleanup():
return rpc_amqp.cleanup(Connection.pool)
|
|
md_icons = {
'md-3d-rotation':
u"\uf000",
'md-accessibility':
u"\uf001",
'md-account-balance':
u"\uf002",
'md-account-balance-wallet':
u"\uf003",
'md-account-box':
u"\uf004",
'md-account-child':
u"\uf005",
'md-account-circle':
u"\uf006",
'md-add-shopping-cart':
u"\uf007",
'md-alarm':
u"\uf008",
'md-alarm-add':
u"\uf009",
'md-alarm-off':
u"\uf00a",
'md-alarm-on':
u"\uf00b",
'md-android':
u"\uf00c",
'md-announcement':
u"\uf00d",
'md-aspect-ratio':
u"\uf00e",
'md-assessment':
u"\uf00f",
'md-assignment':
u"\uf010",
'md-assignment-ind':
u"\uf011",
'md-assignment-late':
u"\uf012",
'md-assignment-return':
u"\uf013",
'md-assignment-returned':
u"\uf014",
'md-assignment-turned-in':
u"\uf015",
'md-autorenew':
u"\uf016",
'md-backup':
u"\uf017",
'md-book':
u"\uf018",
'md-bookmark':
u"\uf019",
'md-bookmark-outline':
u"\uf01a",
'md-bug-report':
u"\uf01b",
'md-cached':
u"\uf01c",
'md-class':
u"\uf01d",
'md-credit-card':
u"\uf01e",
'md-dashboard':
u"\uf01f",
'md-delete':
u"\uf020",
'md-description':
u"\uf021",
'md-dns':
u"\uf022",
'md-done':
u"\uf023",
'md-done-all':
u"\uf024",
'md-event':
u"\uf025",
'md-exit-to-app':
u"\uf026",
'md-explore':
u"\uf027",
'md-extension':
u"\uf028",
'md-face-unlock':
u"\uf029",
'md-favorite':
u"\uf02a",
'md-favorite-outline':
u"\uf02b",
'md-find-in-page':
u"\uf02c",
'md-find-replace':
u"\uf02d",
'md-flip-to-back':
u"\uf02e",
'md-flip-to-front':
u"\uf02f",
'md-get-app':
u"\uf030",
'md-grade':
u"\uf031",
'md-group-work':
u"\uf032",
'md-help':
u"\uf033",
'md-highlight-remove':
u"\uf034",
'md-history':
u"\uf035",
'md-home':
u"\uf036",
'md-https':
u"\uf037",
'md-info':
u"\uf038",
'md-info-outline':
u"\uf039",
'md-input':
u"\uf03a",
'md-invert-colors':
u"\uf03b",
'md-label':
u"\uf03c",
'md-label-outline':
u"\uf03d",
'md-language':
u"\uf03e",
'md-launch':
u"\uf03f",
'md-list':
u"\uf040",
'md-lock':
u"\uf041",
'md-lock-open':
u"\uf042",
'md-lock-outline':
u"\uf043",
'md-loyalty':
u"\uf044",
'md-markunread-mailbox':
u"\uf045",
'md-note-add':
u"\uf046",
'md-open-in-browser':
u"\uf047",
'md-open-in-new':
u"\uf048",
'md-open-with':
u"\uf049",
'md-pageview':
u"\uf04a",
'md-payment':
u"\uf04b",
'md-perm-camera-mic':
u"\uf04c",
'md-perm-contact-cal':
u"\uf04d",
'md-perm-data-setting':
u"\uf04e",
'md-perm-device-info':
u"\uf04f",
'md-perm-identity':
u"\uf050",
'md-perm-media':
u"\uf051",
'md-perm-phone-msg':
u"\uf052",
'md-perm-scan-wifi':
u"\uf053",
'md-picture-in-picture':
u"\uf054",
'md-polymer':
u"\uf055",
'md-print':
u"\uf056",
'md-query-builder':
u"\uf057",
'md-question-answer':
u"\uf058",
'md-receipt':
u"\uf059",
'md-redeem':
u"\uf05a",
'md-report-problem':
u"\uf05b",
'md-restore':
u"\uf05c",
'md-room':
u"\uf05d",
'md-schedule':
u"\uf05e",
'md-search':
u"\uf05f",
'md-settings':
u"\uf060",
'md-settings-applications':
u"\uf061",
'md-settings-backup-restore':
u"\uf062",
'md-settings-bluetooth':
u"\uf063",
'md-settings-cell':
u"\uf064",
'md-settings-display':
u"\uf065",
'md-settings-ethernet':
u"\uf066",
'md-settings-input-antenna':
u"\uf067",
'md-settings-input-component':
u"\uf068",
'md-settings-input-composite':
u"\uf069",
'md-settings-input-hdmi':
u"\uf06a",
'md-settings-input-svideo':
u"\uf06b",
'md-settings-overscan':
u"\uf06c",
'md-settings-phone':
u"\uf06d",
'md-settings-power':
u"\uf06e",
'md-settings-remote':
u"\uf06f",
'md-settings-voice':
u"\uf070",
'md-shop':
u"\uf071",
'md-shopping-basket':
u"\uf072",
'md-shopping-cart':
u"\uf073",
'md-shop-two':
u"\uf074",
'md-speaker-notes':
u"\uf075",
'md-spellcheck':
u"\uf076",
'md-star-rate':
u"\uf077",
'md-stars':
u"\uf078",
'md-store':
u"\uf079",
'md-subject':
u"\uf07a",
'md-swap-horiz':
u"\uf07b",
'md-swap-vert':
u"\uf07c",
'md-swap-vert-circle':
u"\uf07d",
'md-system-update-tv':
u"\uf07e",
'md-tab':
u"\uf07f",
'md-tab-unselected':
u"\uf080",
'md-theaters':
u"\uf081",
'md-thumb-down':
u"\uf082",
'md-thumbs-up-down':
u"\uf083",
'md-thumb-up':
u"\uf084",
'md-toc':
u"\uf085",
'md-today':
u"\uf086",
'md-track-changes':
u"\uf087",
'md-translate':
u"\uf088",
'md-trending-down':
u"\uf089",
'md-trending-neutral':
u"\uf08a",
'md-trending-up':
u"\uf08b",
'md-turned-in':
u"\uf08c",
'md-turned-in-not':
u"\uf08d",
'md-verified-user':
u"\uf08e",
'md-view-agenda':
u"\uf08f",
'md-view-array':
u"\uf090",
'md-view-carousel':
u"\uf091",
'md-view-column':
u"\uf092",
'md-view-day':
u"\uf093",
'md-view-headline':
u"\uf094",
'md-view-list':
u"\uf095",
'md-view-module':
u"\uf096",
'md-view-quilt':
u"\uf097",
'md-view-stream':
u"\uf098",
'md-view-week':
u"\uf099",
'md-visibility':
u"\uf09a",
'md-visibility-off':
u"\uf09b",
'md-wallet-giftcard':
u"\uf09c",
'md-wallet-membership':
u"\uf09d",
'md-wallet-travel':
u"\uf09e",
'md-work':
u"\uf09f",
'md-error':
u"\uf0a0",
'md-warning':
u"\uf0a1",
'md-album':
u"\uf0a2",
'md-av-timer':
u"\uf0a3",
'md-closed-caption':
u"\uf0a4",
'md-equalizer':
u"\uf0a5",
'md-explicit':
u"\uf0a6",
'md-fast-forward':
u"\uf0a7",
'md-fast-rewind':
u"\uf0a8",
'md-games':
u"\uf0a9",
'md-hearing':
u"\uf0aa",
'md-high-quality':
u"\uf0ab",
'md-loop':
u"\uf0ac",
'md-mic':
u"\uf0ad",
'md-mic-none':
u"\uf0ae",
'md-mic-off':
u"\uf0af",
'md-movie':
u"\uf0b0",
'md-my-library-add':
u"\uf0b1",
'md-my-library-books':
u"\uf0b2",
'md-my-library-music':
u"\uf0b3",
'md-new-releases':
u"\uf0b4",
'md-not-interested':
u"\uf0b5",
'md-pause':
u"\uf0b6",
'md-pause-circle-fill':
u"\uf0b7",
'md-pause-circle-outline':
u"\uf0b8",
'md-play-arrow':
u"\uf0b9",
'md-play-circle-fill':
u"\uf0ba",
'md-play-circle-outline':
u"\uf0bb",
'md-playlist-add':
u"\uf0bc",
'md-play-shopping-bag':
u"\uf0bd",
'md-queue':
u"\uf0be",
'md-queue-music':
u"\uf0bf",
'md-radio':
u"\uf0c0",
'md-recent-actors':
u"\uf0c1",
'md-repeat':
u"\uf0c2",
'md-repeat-one':
u"\uf0c3",
'md-replay':
u"\uf0c4",
'md-shuffle':
u"\uf0c5",
'md-skip-next':
u"\uf0c6",
'md-skip-previous':
u"\uf0c7",
'md-snooze':
u"\uf0c8",
'md-stop':
u"\uf0c9",
'md-subtitles':
u"\uf0ca",
'md-surround-sound':
u"\uf0cb",
'md-videocam':
u"\uf0cc",
'md-videocam-off':
u"\uf0cd",
'md-video-collection':
u"\uf0ce",
'md-volume-down':
u"\uf0cf",
'md-volume-mute':
u"\uf0d0",
'md-volume-off':
u"\uf0d1",
'md-volume-up':
u"\uf0d2",
'md-web':
u"\uf0d3",
'md-business':
u"\uf0d4",
'md-call':
u"\uf0d5",
'md-call-end':
u"\uf0d6",
'md-call-made':
u"\uf0d7",
'md-call-merge':
u"\uf0d8",
'md-call-missed':
u"\uf0d9",
'md-call-received':
u"\uf0da",
'md-call-split':
u"\uf0db",
'md-chat':
u"\uf0dc",
'md-clear-all':
u"\uf0dd",
'md-comment':
u"\uf0de",
'md-contacts':
u"\uf0df",
'md-dialer-sip':
u"\uf0e0",
'md-dialpad':
u"\uf0e1",
'md-dnd-on':
u"\uf0e2",
'md-email':
u"\uf0e3",
'md-forum':
u"\uf0e4",
'md-import-export':
u"\uf0e5",
'md-invert-colors-off':
u"\uf0e6",
'md-invert-colors-on':
u"\uf0e7",
'md-live-help':
u"\uf0e8",
'md-location-off':
u"\uf0e9",
'md-location-on':
u"\uf0ea",
'md-message':
u"\uf0eb",
'md-messenger':
u"\uf0ec",
'md-no-sim':
u"\uf0ed",
'md-phone':
u"\uf0ee",
'md-portable-wifi-off':
u"\uf0ef",
'md-quick-contacts-dialer':
u"\uf0f0",
'md-quick-contacts-mail':
u"\uf0f1",
'md-ring-volume':
u"\uf0f2",
'md-stay-current-landscape':
u"\uf0f3",
'md-stay-current-portrait':
u"\uf0f4",
'md-stay-primary-landscape':
u"\uf0f5",
'md-stay-primary-portrait':
u"\uf0f6",
'md-swap-calls':
u"\uf0f7",
'md-textsms':
u"\uf0f8",
'md-voicemail':
u"\uf0f9",
'md-vpn-key':
u"\uf0fa",
'md-add':
u"\uf0fb",
'md-add-box':
u"\uf0fc",
'md-add-circle':
u"\uf0fd",
'md-add-circle-outline':
u"\uf0fe",
'md-archive':
u"\uf0ff",
'md-backspace':
u"\uf100",
'md-block':
u"\uf101",
'md-clear':
u"\uf102",
'md-content-copy':
u"\uf103",
'md-content-cut':
u"\uf104",
'md-content-paste':
u"\uf105",
'md-create':
u"\uf106",
'md-drafts':
u"\uf107",
'md-filter-list':
u"\uf108",
'md-flag':
u"\uf109",
'md-forward':
u"\uf10a",
'md-gesture':
u"\uf10b",
'md-inbox':
u"\uf10c",
'md-link':
u"\uf10d",
'md-mail':
u"\uf10e",
'md-markunread':
u"\uf10f",
'md-redo':
u"\uf110",
'md-remove':
u"\uf111",
'md-remove-circle':
u"\uf112",
'md-remove-circle-outline':
u"\uf113",
'md-reply':
u"\uf114",
'md-reply-all':
u"\uf115",
'md-report':
u"\uf116",
'md-save':
u"\uf117",
'md-select-all':
u"\uf118",
'md-send':
u"\uf119",
'md-sort':
u"\uf11a",
'md-text-format':
u"\uf11b",
'md-undo':
u"\uf11c",
'md-access-alarm':
u"\uf11d",
'md-access-alarms':
u"\uf11e",
'md-access-time':
u"\uf11f",
'md-add-alarm':
u"\uf120",
'md-airplanemode-off':
u"\uf121",
'md-airplanemode-on':
u"\uf122",
'md-battery-20':
u"\uf123",
'md-battery-30':
u"\uf124",
'md-battery-50':
u"\uf125",
'md-battery-60':
u"\uf126",
'md-battery-80':
u"\uf127",
'md-battery-90':
u"\uf128",
'md-battery-alert':
u"\uf129",
'md-battery-charging-20':
u"\uf12a",
'md-battery-charging-30':
u"\uf12b",
'md-battery-charging-50':
u"\uf12c",
'md-battery-charging-60':
u"\uf12d",
'md-battery-charging-80':
u"\uf12e",
'md-battery-charging-90':
u"\uf12f",
'md-battery-charging-full':
u"\uf130",
'md-battery-full':
u"\uf131",
'md-battery-std':
u"\uf132",
'md-battery-unknown':
u"\uf133",
'md-bluetooth':
u"\uf134",
'md-bluetooth-connected':
u"\uf135",
'md-bluetooth-disabled':
u"\uf136",
'md-bluetooth-searching':
u"\uf137",
'md-brightness-auto':
u"\uf138",
'md-brightness-high':
u"\uf139",
'md-brightness-low':
u"\uf13a",
'md-brightness-medium':
u"\uf13b",
'md-data-usage':
u"\uf13c",
'md-developer-mode':
u"\uf13d",
'md-devices':
u"\uf13e",
'md-dvr':
u"\uf13f",
'md-gps-fixed':
u"\uf140",
'md-gps-not-fixed':
u"\uf141",
'md-gps-off':
u"\uf142",
'md-location-disabled':
u"\uf143",
'md-location-searching':
u"\uf144",
'md-multitrack-audio':
u"\uf145",
'md-network-cell':
u"\uf146",
'md-network-wifi':
u"\uf147",
'md-nfc':
u"\uf148",
'md-now-wallpaper':
u"\uf149",
'md-now-widgets':
u"\uf14a",
'md-screen-lock-landscape':
u"\uf14b",
'md-screen-lock-portrait':
u"\uf14c",
'md-screen-lock-rotation':
u"\uf14d",
'md-screen-rotation':
u"\uf14e",
'md-sd-storage':
u"\uf14f",
'md-settings-system-daydream':
u"\uf150",
'md-signal-cellular-0-bar':
u"\uf151",
'md-signal-cellular-1-bar':
u"\uf152",
'md-signal-cellular-2-bar':
u"\uf153",
'md-signal-cellular-3-bar':
u"\uf154",
'md-signal-cellular-4-bar':
u"\uf155",
'md-signal-cellular-connected-no-internet-0-bar':
u"\uf156",
'md-signal-cellular-connected-no-internet-1-bar':
u"\uf157",
'md-signal-cellular-connected-no-internet-2-bar':
u"\uf158",
'md-signal-cellular-connected-no-internet-3-bar':
u"\uf159",
'md-signal-cellular-connected-no-internet-4-bar':
u"\uf15a",
'md-signal-cellular-no-sim':
u"\uf15b",
'md-signal-cellular-null':
u"\uf15c",
'md-signal-cellular-off':
u"\uf15d",
'md-signal-wifi-0-bar':
u"\uf15e",
'md-signal-wifi-1-bar':
u"\uf15f",
'md-signal-wifi-2-bar':
u"\uf160",
'md-signal-wifi-3-bar':
u"\uf161",
'md-signal-wifi-4-bar':
u"\uf162",
'md-signal-wifi-off':
u"\uf163",
'md-storage':
u"\uf164",
'md-usb':
u"\uf165",
'md-wifi-lock':
u"\uf166",
'md-wifi-tethering':
u"\uf167",
'md-attach-file':
u"\uf168",
'md-attach-money':
u"\uf169",
'md-border-all':
u"\uf16a",
'md-border-bottom':
u"\uf16b",
'md-border-clear':
u"\uf16c",
'md-border-color':
u"\uf16d",
'md-border-horizontal':
u"\uf16e",
'md-border-inner':
u"\uf16f",
'md-border-left':
u"\uf170",
'md-border-outer':
u"\uf171",
'md-border-right':
u"\uf172",
'md-border-style':
u"\uf173",
'md-border-top':
u"\uf174",
'md-border-vertical':
u"\uf175",
'md-format-align-center':
u"\uf176",
'md-format-align-justify':
u"\uf177",
'md-format-align-left':
u"\uf178",
'md-format-align-right':
u"\uf179",
'md-format-bold':
u"\uf17a",
'md-format-clear':
u"\uf17b",
'md-format-color-fill':
u"\uf17c",
'md-format-color-reset':
u"\uf17d",
'md-format-color-text':
u"\uf17e",
'md-format-indent-decrease':
u"\uf17f",
'md-format-indent-increase':
u"\uf180",
'md-format-italic':
u"\uf181",
'md-format-line-spacing':
u"\uf182",
'md-format-list-bulleted':
u"\uf183",
'md-format-list-numbered':
u"\uf184",
'md-format-paint':
u"\uf185",
'md-format-quote':
u"\uf186",
'md-format-size':
u"\uf187",
'md-format-strikethrough':
u"\uf188",
'md-format-textdirection-l-to-r':
u"\uf189",
'md-format-textdirection-r-to-l':
u"\uf18a",
'md-format-underline':
u"\uf18b",
'md-functions':
u"\uf18c",
'md-insert-chart':
u"\uf18d",
'md-insert-comment':
u"\uf18e",
'md-insert-drive-file':
u"\uf18f",
'md-insert-emoticon':
u"\uf190",
'md-insert-invitation':
u"\uf191",
'md-insert-link':
u"\uf192",
'md-insert-photo':
u"\uf193",
'md-merge-type':
u"\uf194",
'md-mode-comment':
u"\uf195",
'md-mode-edit':
u"\uf196",
'md-publish':
u"\uf197",
'md-vertical-align-bottom':
u"\uf198",
'md-vertical-align-center':
u"\uf199",
'md-vertical-align-top':
u"\uf19a",
'md-wrap-text':
u"\uf19b",
'md-attachment':
u"\uf19c",
'md-cloud':
u"\uf19d",
'md-cloud-circle':
u"\uf19e",
'md-cloud-done':
u"\uf19f",
'md-cloud-download':
u"\uf1a0",
'md-cloud-off':
u"\uf1a1",
'md-cloud-queue':
u"\uf1a2",
'md-cloud-upload':
u"\uf1a3",
'md-file-download':
u"\uf1a4",
'md-file-upload':
u"\uf1a5",
'md-folder':
u"\uf1a6",
'md-folder-open':
u"\uf1a7",
'md-folder-shared':
u"\uf1a8",
'md-cast':
u"\uf1a9",
'md-cast-connected':
u"\uf1aa",
'md-computer':
u"\uf1ab",
'md-desktop-mac':
u"\uf1ac",
'md-desktop-windows':
u"\uf1ad",
'md-dock':
u"\uf1ae",
'md-gamepad':
u"\uf1af",
'md-headset':
u"\uf1b0",
'md-headset-mic':
u"\uf1b1",
'md-keyboard':
u"\uf1b2",
'md-keyboard-alt':
u"\uf1b3",
'md-keyboard-arrow-down':
u"\uf1b4",
'md-keyboard-arrow-left':
u"\uf1b5",
'md-keyboard-arrow-right':
u"\uf1b6",
'md-keyboard-arrow-up':
u"\uf1b7",
'md-keyboard-backspace':
u"\uf1b8",
'md-keyboard-capslock':
u"\uf1b9",
'md-keyboard-control':
u"\uf1ba",
'md-keyboard-hide':
u"\uf1bb",
'md-keyboard-return':
u"\uf1bc",
'md-keyboard-tab':
u"\uf1bd",
'md-keyboard-voice':
u"\uf1be",
'md-laptop':
u"\uf1bf",
'md-laptop-chromebook':
u"\uf1c0",
'md-laptop-mac':
u"\uf1c1",
'md-laptop-windows':
u"\uf1c2",
'md-memory':
u"\uf1c3",
'md-mouse':
u"\uf1c4",
'md-phone-android':
u"\uf1c5",
'md-phone-iphone':
u"\uf1c6",
'md-phonelink':
u"\uf1c7",
'md-phonelink-off':
u"\uf1c8",
'md-security':
u"\uf1c9",
'md-sim-card':
u"\uf1ca",
'md-smartphone':
u"\uf1cb",
'md-speaker':
u"\uf1cc",
'md-tablet':
u"\uf1cd",
'md-tablet-android':
u"\uf1ce",
'md-tablet-mac':
u"\uf1cf",
'md-tv':
u"\uf1d0",
'md-watch':
u"\uf1d1",
'md-add-to-photos':
u"\uf1d2",
'md-adjust':
u"\uf1d3",
'md-assistant-photo':
u"\uf1d4",
'md-audiotrack':
u"\uf1d5",
'md-blur-circular':
u"\uf1d6",
'md-blur-linear':
u"\uf1d7",
'md-blur-off':
u"\uf1d8",
'md-blur-on':
u"\uf1d9",
'md-brightness-1':
u"\uf1da",
'md-brightness-2':
u"\uf1db",
'md-brightness-3':
u"\uf1dc",
'md-brightness-4':
u"\uf1dd",
'md-brightness-5':
u"\uf1de",
'md-brightness-6':
u"\uf1df",
'md-brightness-7':
u"\uf1e0",
'md-brush':
u"\uf1e1",
'md-camera':
u"\uf1e2",
'md-camera-alt':
u"\uf1e3",
'md-camera-front':
u"\uf1e4",
'md-camera-rear':
u"\uf1e5",
'md-camera-roll':
u"\uf1e6",
'md-center-focus-strong':
u"\uf1e7",
'md-center-focus-weak':
u"\uf1e8",
'md-collections':
u"\uf1e9",
'md-colorize':
u"\uf1ea",
'md-color-lens':
u"\uf1eb",
'md-compare':
u"\uf1ec",
'md-control-point':
u"\uf1ed",
'md-control-point-duplicate':
u"\uf1ee",
'md-crop':
u"\uf1ef",
'md-crop-3-2':
u"\uf1f0",
'md-crop-5-4':
u"\uf1f1",
'md-crop-7-5':
u"\uf1f2",
'md-crop-16-9':
u"\uf1f3",
'md-crop-din':
u"\uf1f4",
'md-crop-free':
u"\uf1f5",
'md-crop-landscape':
u"\uf1f6",
'md-crop-original':
u"\uf1f7",
'md-crop-portrait':
u"\uf1f8",
'md-crop-square':
u"\uf1f9",
'md-dehaze':
u"\uf1fa",
'md-details':
u"\uf1fb",
'md-edit':
u"\uf1fc",
'md-exposure':
u"\uf1fd",
'md-exposure-minus-1':
u"\uf1fe",
'md-exposure-minus-2':
u"\uf1ff",
'md-exposure-zero':
u"\uf200",
'md-exposure-plus-1':
u"\uf201",
'md-exposure-plus-2':
u"\uf202",
'md-filter':
u"\uf203",
'md-filter-1':
u"\uf204",
'md-filter-2':
u"\uf205",
'md-filter-3':
u"\uf206",
'md-filter-4':
u"\uf207",
'md-filter-5':
u"\uf208",
'md-filter-6':
u"\uf209",
'md-filter-7':
u"\uf20a",
'md-filter-8':
u"\uf20b",
'md-filter-9':
u"\uf20c",
'md-filter-9-plus':
u"\uf20d",
'md-filter-b-and-w':
u"\uf20e",
'md-filter-center-focus':
u"\uf20f",
'md-filter-drama':
u"\uf210",
'md-filter-frames':
u"\uf211",
'md-filter-hdr':
u"\uf212",
'md-filter-none':
u"\uf213",
'md-filter-tilt-shift':
u"\uf214",
'md-filter-vintage':
u"\uf215",
'md-flare':
u"\uf216",
'md-flash-auto':
u"\uf217",
'md-flash-off':
u"\uf218",
'md-flash-on':
u"\uf219",
'md-flip':
u"\uf21a",
'md-gradient':
u"\uf21b",
'md-grain':
u"\uf21c",
'md-grid-off':
u"\uf21d",
'md-grid-on':
u"\uf21e",
'md-hdr-off':
u"\uf21f",
'md-hdr-on':
u"\uf220",
'md-hdr-strong':
u"\uf221",
'md-hdr-weak':
u"\uf222",
'md-healing':
u"\uf223",
'md-image':
u"\uf224",
'md-image-aspect-ratio':
u"\uf225",
'md-iso':
u"\uf226",
'md-landscape':
u"\uf227",
'md-leak-add':
u"\uf228",
'md-leak-remove':
u"\uf229",
'md-lens':
u"\uf22a",
'md-looks':
u"\uf22b",
'md-looks-1':
u"\uf22c",
'md-looks-2':
u"\uf22d",
'md-looks-3':
u"\uf22e",
'md-looks-4':
u"\uf22f",
'md-looks-5':
u"\uf230",
'md-looks-6':
u"\uf231",
'md-loupe':
u"\uf232",
'md-movie-creation':
u"\uf233",
'md-nature':
u"\uf234",
'md-nature-people':
u"\uf235",
'md-navigate-before':
u"\uf236",
'md-navigate-next':
u"\uf237",
'md-palette':
u"\uf238",
'md-panorama':
u"\uf239",
'md-panorama-fisheye':
u"\uf23a",
'md-panorama-horizontal':
u"\uf23b",
'md-panorama-vertical':
u"\uf23c",
'md-panorama-wide-angle':
u"\uf23d",
'md-photo':
u"\uf23e",
'md-photo-album':
u"\uf23f",
'md-photo-camera':
u"\uf240",
'md-photo-library':
u"\uf241",
'md-portrait':
u"\uf242",
'md-remove-red-eye':
u"\uf243",
'md-rotate-left':
u"\uf244",
'md-rotate-right':
u"\uf245",
'md-slideshow':
u"\uf246",
'md-straighten':
u"\uf247",
'md-style':
u"\uf248",
'md-switch-camera':
u"\uf249",
'md-switch-video':
u"\uf24a",
'md-tag-faces':
u"\uf24b",
'md-texture':
u"\uf24c",
'md-timelapse':
u"\uf24d",
'md-timer':
u"\uf24e",
'md-timer-3':
u"\uf24f",
'md-timer-10':
u"\uf250",
'md-timer-auto':
u"\uf251",
'md-timer-off':
u"\uf252",
'md-tonality':
u"\uf253",
'md-transform':
u"\uf254",
'md-tune':
u"\uf255",
'md-wb-auto':
u"\uf256",
'md-wb-cloudy':
u"\uf257",
'md-wb-incandescent':
u"\uf258",
'md-wb-irradescent':
u"\uf259",
'md-wb-sunny':
u"\uf25a",
'md-beenhere':
u"\uf25b",
'md-directions':
u"\uf25c",
'md-directions-bike':
u"\uf25d",
'md-directions-bus':
u"\uf25e",
'md-directions-car':
u"\uf25f",
'md-directions-ferry':
u"\uf260",
'md-directions-subway':
u"\uf261",
'md-directions-train':
u"\uf262",
'md-directions-transit':
u"\uf263",
'md-directions-walk':
u"\uf264",
'md-flight':
u"\uf265",
'md-hotel':
u"\uf266",
'md-layers':
u"\uf267",
'md-layers-clear':
u"\uf268",
'md-local-airport':
u"\uf269",
'md-local-atm':
u"\uf26a",
'md-local-attraction':
u"\uf26b",
'md-local-bar':
u"\uf26c",
'md-local-cafe':
u"\uf26d",
'md-local-car-wash':
u"\uf26e",
'md-local-convenience-store':
u"\uf26f",
'md-local-drink':
u"\uf270",
'md-local-florist':
u"\uf271",
'md-local-gas-station':
u"\uf272",
'md-local-grocery-store':
u"\uf273",
'md-local-hospital':
u"\uf274",
'md-local-hotel':
u"\uf275",
'md-local-laundry-service':
u"\uf276",
'md-local-library':
u"\uf277",
'md-local-mall':
u"\uf278",
'md-local-movies':
u"\uf279",
'md-local-offer':
u"\uf27a",
'md-local-parking':
u"\uf27b",
'md-local-pharmacy':
u"\uf27c",
'md-local-phone':
u"\uf27d",
'md-local-pizza':
u"\uf27e",
'md-local-play':
u"\uf27f",
'md-local-post-office':
u"\uf280",
'md-local-print-shop':
u"\uf281",
'md-local-restaurant':
u"\uf282",
'md-local-see':
u"\uf283",
'md-local-shipping':
u"\uf284",
'md-local-taxi':
u"\uf285",
'md-location-history':
u"\uf286",
'md-map':
u"\uf287",
'md-my-location':
u"\uf288",
'md-navigation':
u"\uf289",
'md-pin-drop':
u"\uf28a",
'md-place':
u"\uf28b",
'md-rate-review':
u"\uf28c",
'md-restaurant-menu':
u"\uf28d",
'md-satellite':
u"\uf28e",
'md-store-mall-directory':
u"\uf28f",
'md-terrain':
u"\uf290",
'md-traffic':
u"\uf291",
'md-apps':
u"\uf292",
'md-cancel':
u"\uf293",
'md-arrow-drop-down-circle':
u"\uf294",
'md-arrow-drop-down':
u"\uf295",
'md-arrow-drop-up':
u"\uf296",
'md-arrow-back':
u"\uf297",
'md-arrow-forward':
u"\uf298",
'md-check':
u"\uf299",
'md-close':
u"\uf29a",
'md-chevron-left':
u"\uf29b",
'md-chevron-right':
u"\uf29c",
'md-expand-less':
u"\uf29d",
'md-expand-more':
u"\uf29e",
'md-fullscreen':
u"\uf29f",
'md-fullscreen-exit':
u"\uf2a0",
'md-menu':
u"\uf2a1",
'md-more-horiz':
u"\uf2a2",
'md-more-vert':
u"\uf2a3",
'md-refresh':
u"\uf2a4",
'md-unfold-less':
u"\uf2a5",
'md-unfold-more':
u"\uf2a6",
'md-adb':
u"\uf2a7",
'md-bluetooth-audio':
u"\uf2a8",
'md-disc-full':
u"\uf2a9",
'md-dnd-forwardslash':
u"\uf2aa",
'md-do-not-disturb':
u"\uf2ab",
'md-drive-eta':
u"\uf2ac",
'md-event-available':
u"\uf2ad",
'md-event-busy':
u"\uf2ae",
'md-event-note':
u"\uf2af",
'md-folder-special':
u"\uf2b0",
'md-mms':
u"\uf2b1",
'md-more':
u"\uf2b2",
'md-network-locked':
u"\uf2b3",
'md-phone-bluetooth-speaker':
u"\uf2b4",
'md-phone-forwarded':
u"\uf2b5",
'md-phone-in-talk':
u"\uf2b6",
'md-phone-locked':
u"\uf2b7",
'md-phone-missed':
u"\uf2b8",
'md-phone-paused':
u"\uf2b9",
'md-play-download':
u"\uf2ba",
'md-play-install':
u"\uf2bb",
'md-sd-card':
u"\uf2bc",
'md-sim-card-alert':
u"\uf2bd",
'md-sms':
u"\uf2be",
'md-sms-failed':
u"\uf2bf",
'md-sync':
u"\uf2c0",
'md-sync-disabled':
u"\uf2c1",
'md-sync-problem':
u"\uf2c2",
'md-system-update':
u"\uf2c3",
'md-tap-and-play':
u"\uf2c4",
'md-time-to-leave':
u"\uf2c5",
'md-vibration':
u"\uf2c6",
'md-voice-chat':
u"\uf2c7",
'md-vpn-lock':
u"\uf2c8",
'md-cake':
u"\uf2c9",
'md-domain':
u"\uf2ca",
'md-location-city':
u"\uf2cb",
'md-mood':
u"\uf2cc",
'md-notifications-none':
u"\uf2cd",
'md-notifications':
u"\uf2ce",
'md-notifications-off':
u"\uf2cf",
'md-notifications-on':
u"\uf2d0",
'md-notifications-paused':
u"\uf2d1",
'md-pages':
u"\uf2d2",
'md-party-mode':
u"\uf2d3",
'md-group':
u"\uf2d4",
'md-group-add':
u"\uf2d5",
'md-people':
u"\uf2d6",
'md-people-outline':
u"\uf2d7",
'md-person':
u"\uf2d8",
'md-person-add':
u"\uf2d9",
'md-person-outline':
u"\uf2da",
'md-plus-one':
u"\uf2db",
'md-poll':
u"\uf2dc",
'md-public':
u"\uf2dd",
'md-school':
u"\uf2de",
'md-share':
u"\uf2df",
'md-whatshot':
u"\uf2e0",
'md-check-box':
u"\uf2e1",
'md-check-box-outline-blank':
u"\uf2e2",
'md-radio-button-off':
u"\uf2e3",
'md-radio-button-on':
u"\uf2e4",
'md-star':
u"\uf2e5",
'md-star-half':
u"\uf2e6",
'md-star-outline':
u"\uf2e7",
}
|
|
# -*- coding: utf-8 -*-
"""The old AMQP result backend, deprecated and replaced by the RPC backend."""
from __future__ import absolute_import, unicode_literals
import socket
from collections import deque
from operator import itemgetter
from kombu import Consumer, Exchange, Producer, Queue
from celery import states
from celery.exceptions import TimeoutError
from celery.five import monotonic, range
from celery.utils import deprecated
from celery.utils.log import get_logger
from .base import BaseBackend
__all__ = ('BacklogLimitExceeded', 'AMQPBackend')
logger = get_logger(__name__)
class BacklogLimitExceeded(Exception):
"""Too much state history to fast-forward."""
def repair_uuid(s):
# Historically the dashes in UUIDS are removed from AMQ entity names,
# but there's no known reason to. Hopefully we'll be able to fix
# this in v4.0.
return '%s-%s-%s-%s-%s' % (s[:8], s[8:12], s[12:16], s[16:20], s[20:])
class NoCacheQueue(Queue):
can_cache_declaration = False
class AMQPBackend(BaseBackend):
"""The AMQP result backend.
Deprecated: Please use the RPC backend or a persistent backend.
"""
Exchange = Exchange
Queue = NoCacheQueue
Consumer = Consumer
Producer = Producer
BacklogLimitExceeded = BacklogLimitExceeded
persistent = True
supports_autoexpire = True
supports_native_join = True
retry_policy = {
'max_retries': 20,
'interval_start': 0,
'interval_step': 1,
'interval_max': 1,
}
def __init__(self, app, connection=None, exchange=None, exchange_type=None,
persistent=None, serializer=None, auto_delete=True, **kwargs):
deprecated.warn(
'The AMQP result backend', deprecation='4.0', removal='5.0',
alternative='Please use RPC backend or a persistent backend.')
super(AMQPBackend, self).__init__(app, **kwargs)
conf = self.app.conf
self._connection = connection
self.persistent = self.prepare_persistent(persistent)
self.delivery_mode = 2 if self.persistent else 1
exchange = exchange or conf.result_exchange
exchange_type = exchange_type or conf.result_exchange_type
self.exchange = self._create_exchange(
exchange, exchange_type, self.delivery_mode,
)
self.serializer = serializer or conf.result_serializer
self.auto_delete = auto_delete
def _create_exchange(self, name, type='direct', delivery_mode=2):
return self.Exchange(name=name,
type=type,
delivery_mode=delivery_mode,
durable=self.persistent,
auto_delete=False)
def _create_binding(self, task_id):
name = self.rkey(task_id)
return self.Queue(
name=name,
exchange=self.exchange,
routing_key=name,
durable=self.persistent,
auto_delete=self.auto_delete,
expires=self.expires,
)
def revive(self, channel):
pass
def rkey(self, task_id):
return task_id.replace('-', '')
def destination_for(self, task_id, request):
if request:
return self.rkey(task_id), request.correlation_id or task_id
return self.rkey(task_id), task_id
def store_result(self, task_id, result, state,
traceback=None, request=None, **kwargs):
"""Send task return value and state."""
routing_key, correlation_id = self.destination_for(task_id, request)
if not routing_key:
return
with self.app.amqp.producer_pool.acquire(block=True) as producer:
producer.publish(
{'task_id': task_id, 'status': state,
'result': self.encode_result(result, state),
'traceback': traceback,
'children': self.current_task_children(request)},
exchange=self.exchange,
routing_key=routing_key,
correlation_id=correlation_id,
serializer=self.serializer,
retry=True, retry_policy=self.retry_policy,
declare=self.on_reply_declare(task_id),
delivery_mode=self.delivery_mode,
)
return result
def on_reply_declare(self, task_id):
return [self._create_binding(task_id)]
def wait_for(self, task_id, timeout=None, cache=True,
no_ack=True, on_interval=None,
READY_STATES=states.READY_STATES,
PROPAGATE_STATES=states.PROPAGATE_STATES,
**kwargs):
cached_meta = self._cache.get(task_id)
if cache and cached_meta and \
cached_meta['status'] in READY_STATES:
return cached_meta
else:
try:
return self.consume(task_id, timeout=timeout, no_ack=no_ack,
on_interval=on_interval)
except socket.timeout:
raise TimeoutError('The operation timed out.')
def get_task_meta(self, task_id, backlog_limit=1000):
# Polling and using basic_get
with self.app.pool.acquire_channel(block=True) as (_, channel):
binding = self._create_binding(task_id)(channel)
binding.declare()
prev = latest = acc = None
for i in range(backlog_limit): # spool ffwd
acc = binding.get(
accept=self.accept, no_ack=False,
)
if not acc: # no more messages
break
if acc.payload['task_id'] == task_id:
prev, latest = latest, acc
if prev:
# backends are not expected to keep history,
# so we delete everything except the most recent state.
prev.ack()
prev = None
else:
raise self.BacklogLimitExceeded(task_id)
if latest:
payload = self._cache[task_id] = self.meta_from_decoded(
latest.payload)
latest.requeue()
return payload
else:
# no new state, use previous
try:
return self._cache[task_id]
except KeyError:
# result probably pending.
return {'status': states.PENDING, 'result': None}
poll = get_task_meta # XXX compat
def drain_events(self, connection, consumer,
timeout=None, on_interval=None, now=monotonic, wait=None):
wait = wait or connection.drain_events
results = {}
def callback(meta, message):
if meta['status'] in states.READY_STATES:
results[meta['task_id']] = self.meta_from_decoded(meta)
consumer.callbacks[:] = [callback]
time_start = now()
while 1:
# Total time spent may exceed a single call to wait()
if timeout and now() - time_start >= timeout:
raise socket.timeout()
try:
wait(timeout=1)
except socket.timeout:
pass
if on_interval:
on_interval()
if results: # got event on the wanted channel.
break
self._cache.update(results)
return results
def consume(self, task_id, timeout=None, no_ack=True, on_interval=None):
wait = self.drain_events
with self.app.pool.acquire_channel(block=True) as (conn, channel):
binding = self._create_binding(task_id)
with self.Consumer(channel, binding,
no_ack=no_ack, accept=self.accept) as consumer:
while 1:
try:
return wait(
conn, consumer, timeout, on_interval)[task_id]
except KeyError:
continue
def _many_bindings(self, ids):
return [self._create_binding(task_id) for task_id in ids]
def get_many(self, task_ids, timeout=None, no_ack=True,
on_message=None, on_interval=None,
now=monotonic, getfields=itemgetter('status', 'task_id'),
READY_STATES=states.READY_STATES,
PROPAGATE_STATES=states.PROPAGATE_STATES, **kwargs):
with self.app.pool.acquire_channel(block=True) as (conn, channel):
ids = set(task_ids)
cached_ids = set()
mark_cached = cached_ids.add
for task_id in ids:
try:
cached = self._cache[task_id]
except KeyError:
pass
else:
if cached['status'] in READY_STATES:
yield task_id, cached
mark_cached(task_id)
ids.difference_update(cached_ids)
results = deque()
push_result = results.append
push_cache = self._cache.__setitem__
decode_result = self.meta_from_decoded
def _on_message(message):
body = decode_result(message.decode())
if on_message is not None:
on_message(body)
state, uid = getfields(body)
if state in READY_STATES:
push_result(body) \
if uid in task_ids else push_cache(uid, body)
bindings = self._many_bindings(task_ids)
with self.Consumer(channel, bindings, on_message=_on_message,
accept=self.accept, no_ack=no_ack):
wait = conn.drain_events
popleft = results.popleft
while ids:
wait(timeout=timeout)
while results:
state = popleft()
task_id = state['task_id']
ids.discard(task_id)
push_cache(task_id, state)
yield task_id, state
if on_interval:
on_interval()
def reload_task_result(self, task_id):
raise NotImplementedError(
'reload_task_result is not supported by this backend.')
def reload_group_result(self, task_id):
"""Reload group result, even if it has been previously fetched."""
raise NotImplementedError(
'reload_group_result is not supported by this backend.')
def save_group(self, group_id, result):
raise NotImplementedError(
'save_group is not supported by this backend.')
def restore_group(self, group_id, cache=True):
raise NotImplementedError(
'restore_group is not supported by this backend.')
def delete_group(self, group_id):
raise NotImplementedError(
'delete_group is not supported by this backend.')
def __reduce__(self, args=(), kwargs={}):
kwargs.update(
connection=self._connection,
exchange=self.exchange.name,
exchange_type=self.exchange.type,
persistent=self.persistent,
serializer=self.serializer,
auto_delete=self.auto_delete,
expires=self.expires,
)
return super(AMQPBackend, self).__reduce__(args, kwargs)
def as_uri(self, include_password=True):
return 'amqp://'
|
|
# coding=utf-8
from bisect import bisect_left, bisect_right
from collections import namedtuple, defaultdict
from datetime import datetime, timezone
from functools import total_ordering
from itertools import groupby, tee
from operator import itemgetter
import random
from intervaltree import IntervalTree, Interval
#
# Some tricky sorting bullshit
#
class _LeastValue(object):
"""
Instances of this class always sort before any other object, and are only equivalent
to other LeastValue instances.
"""
__slots__ = ()
_instance = None
# def __new__(cls, *args, **kwargs):
# global LeastValue
# LeastValue = LeastValue or super(_LeastValue, cls).__new__(cls, *args)
# return LeastValue
def __lt__(self, other):
return True
__le__ = __lt__
def __eq__(self, other):
return type(other) is _LeastValue
__ge__ = __eq__
def __gt__(self, other):
return False
def __str__(self):
return "<Least value>"
def __repr__(self):
return "LeastValue"
__hash__ = object.__hash__
# some pickling singleton trickery, since there is only ever reason to have one of these
@staticmethod
def _provider():
return LeastValue
def __reduce_ex__(self, *args, **kwargs):
return _LeastValue._provider, ()
LeastValue = _LeastValue()
class _GreatestValue(object):
"""
Instances of this class always sort before any other object, and are only equivalent
to other GreatestValue instances.
"""
__slots__ = ()
def __gt__(self, other):
return True
__ge__ = __gt__
def __eq__(self, other):
return type(other) is _GreatestValue
__le__ = __eq__
def __lt__(self, other):
return False
def __str__(self):
return "<Greatest value>"
def __repr__(self):
return "GreatestValue"
__hash__ = object.__hash__
# TODO: __add__ ?
# some pickling singleton trickery, since there is only ever reason to have one of these
@staticmethod
def _provider():
return GreatestValue
def __reduce_ex__(self, *args, **kwargs):
return _GreatestValue._provider, ()
GreatestValue = _GreatestValue()
class JustBefore(object):
"""
`JustBefore(x)` sorts exactly the same as `x`, except it is always less than x rather than equal.
"""
__slots__ = ('wrap',)
def __init__(self, wrap):
self.wrap = wrap
def __gt__(self, other):
return self.wrap > other
__ge__ = __gt__
def __eq__(self, other):
return type(other) is JustBefore and other.wrap == self.wrap
def __le__(self, other):
return self.wrap <= other
__lt__ = __le__
def __hash__(self):
return hash((self.wrap, JustBefore))
def __repr__(self):
return "JustBefore({})".format(repr(self.wrap))
class JustAfter(object):
"""
`JustAfter(x)` sorts exactly the same as `x`, except it is always greater than x rather than equal.
"""
__slots__ = ('wrap',)
def __init__(self, wrap):
self.wrap = wrap
def __gt__(self, other):
return self.wrap >= other
__ge__ = __gt__
def __eq__(self, other):
return type(other) is JustAfter and other.wrap == self.wrap
def __le__(self, other):
return self.wrap < other
__lt__ = __le__
def __hash__(self):
return hash((self.wrap, JustAfter))
def __repr__(self):
return "JustAfter({})".format(repr(self.wrap))
#
# End tricky sorting bullshit
#
class _SentinelCls(object):
__slots__ = ()
def __repr__(self):
return '_SENTINEL'
@staticmethod
def _provider():
return _SENTINEL
def __reduce_ex__(self, *args, **kwargs):
return _SentinelCls._provider, ()
_SENTINEL = _SentinelCls()
def now():
return datetime.now(timezone.utc)
def interval_overlap(a, b):
"""
:type a: Interval
:type b: Interval
:rtype: Interval
:return: Return an interval representing the overlapping portion of a and b, with the value of a.
"""
# if a.data != b.data:
# return Interval(a.begin, a.begin)
# else:
return Interval(max(a.begin, b.begin), min(a.end, b.end), a.data)
def interval_tree_intersection(tree, intervals):
"""
:type tree: IntervalTree
:type intervals: collections.Iterable[Interval]
:rtype: collections.Iterable[Interval]
:return: Return an iterable of intervals with all commonalities between the two given IntervalTrees.
This will contain intervals for all overlaps between intervals in one tree to another where the values of
the intervals are the same.
"""
for iv in intervals:
for touching in tree[iv]:
if iv.data == touching.data:
yield interval_overlap(iv, touching)
def merge_interval_overlaps(intervals, open_ended=()):
"""
Merge a set of intervals so that there are a minimum number of intervals.
Any multiplicity of intervals
with the same data value that covers a single contiguous range will be replaced with a single interval
over that range.
open_ended may be omitted or a dictionary of key:begin pairs representing open-ended intervals (intervals
that have a beginning but no set end.)
If it is included, it may be modified to merge intervals into it.
A new iterable of intervals is returned. Due to the algorithm used, the iterable of intervals produced will be
first grouped by key (in arbitrary key-order), then sorted by interval position.
"""
# pull intervals into bins sorted by key
by_key = defaultdict(list)
for iv in intervals:
by_key[iv.data].append(iv)
# render merged intervals
for key, bucket in by_key.items():
bucket.sort(key=itemgetter(0)) # sort intervals by begin
if key in open_ended:
first = bucket[0].begin # first and last keep track of the bounds of the current run
if first >= open_ended[key]:
continue # all in open-ended; skip the rest
last = bucket[0].end
if last >= open_ended[key]:
open_ended[key] = first # merge with this interval
continue # skip the rest
for iv in bucket[1:]:
if iv.begin <= last: # new interval overlaps the current run
last = max(last, iv.end) # merge
if last >= open_ended[key]: # check for collision with open interval
open_ended[key] = first # merge with open-ended
break # all intervals after this would also merge with the open-ended
else: # current run ends
yield Interval(begin=first, end=last, data=key) # yield accumulated interval
first = iv.begin # start new run
if first >= open_ended[key]:
break # all intervals from here on out will merge silently with open interval
last = iv.end
if last >= open_ended[key]:
open_ended[key] = first # merge with open-ended
break # skip the rest
else: # did not merge with an open-ended interval
yield Interval(begin=first, end=last, data=key) # yield the last run
# This is much simpler. Understand this first; the above just keeps checking open-ended
# whenever first or last are set, and is otherwise identical.
else: # no open-ended interval for this key
first = bucket[0].begin # first and last keep track of the bounds of the current run
last = bucket[0].end
for iv in bucket[1:]:
if iv.begin <= last: # new interval overlaps the current run
last = max(last, iv.end) # merge
else: # current run ends
yield Interval(begin=first, end=last, data=key) # yield accumulated interval
first = iv.begin # start new run
last = iv.end
yield Interval(begin=first, end=last, data=key) # yield the last run
class _ViewMixin(object):
"""Mixin for set operators on dictionary views"""
__slots__ = ()
def __iter__(self):
pass
def __and__(self, other):
result = set()
for x in self:
if x in other:
result.add(x)
return result
def __xor__(self, other):
result = set()
for x in self:
if x not in other:
result.add(x)
for x in other:
if x not in self:
result.add(x)
return result
def __or__(self, other):
result = set(self)
result.update(other)
return result
# Sample = namedtuple("Sample", ["time", "value"])
@total_ordering
class Sample(namedtuple('Sample', 'point value')):
"""Sample(time, value)"""
__slots__ = ()
def __lt__(self, other):
if not isinstance(other, tuple):
raise TypeError
return self[0] < other[0]
def __le__(self, other):
return self < other or self == other
def __ge__(self, other):
return self > other or self == other
def __gt__(self, other):
if not isinstance(other, tuple):
raise TypeError
return self[0] > other[0]
def __hash__(self):
return tuple.__hash__(self)
def __reduce_ex__(self, *args, **kwargs):
return Sample, tuple(self)
class SampledValue(object):
"""
Holds a time-line for a single value. This structure allows any number of consecutive entries with the same
value, allowing you to keep track of known values for a variable that may change unobserved, and gaps in
observations may be important to know.
This is much more useful as a history of observations than as a canonical history.
"""
__slots__ = ('history',)
def __init__(self, samples=(), *, initial_value=None, time=None):
if initial_value is None and time is None:
self.history = sorted(samples)
else:
if samples:
raise ValueError("Both initial value and samples were provided")
self.history = [Sample(point=time if time is not None else now(), value=initial_value)]
def __len__(self):
return len(self.history)
def __iter__(self):
return iter(self.history)
def __reversed__(self):
return reversed(self.history)
def sort(self):
self.history.sort()
def all_values(self):
result = set()
for entry in self.history:
result.add(entry.value)
return result
def get(self, *, time=None, default=None):
if not self.history: # history is empty
if default is not None:
return default
raise KeyError
if time is None:
# fetch the current value
return self.history[-1].value
else:
# get the index of the last sample before or at time
index = bisect_right(self.history, (time,)) - 1
if index < 0: # no samples this far back
if default is not None:
return default
raise KeyError
return self.history[index].value
def set(self, value, *, time=None):
if time is None:
self.history.append(Sample(point=now(), value=value))
else:
# find the first sample at >= time
index = bisect_left(self.history, (time,))
entry = Sample(point=time, value=value)
# again, we must check if there is an entry at that exact time
if index < len(self.history) and self.history[index].point == time:
self.history[index] = entry
else:
self.history.insert(index, entry)
def begin(self):
return self.history[0].point
def time_slice(self, begin, end):
"""
Return an iterable over all the intervals intersecting the given half-open interval from begin to end,
chopped to fit within it
"""
if begin is None or end is None:
raise ValueError("Both the beginning and end of the interval must be included")
if begin >= end or not self.history:
return
# get index of first sample at or before begin
start_index = max(0, bisect_right(self.history, (begin,)) - 1)
def important_values(): # yields only the first entry for groups of consecutive values in history
for key, values in groupby(self.history[start_index:], key=itemgetter(1)):
yield next(values)
i1, i2 = tee(important_values())
last_end = next(i2)
for a, b in zip(i1, i2):
yield Interval(begin=max(begin, a.point), end=min(b.point, end), data=a.value)
last_end = b
if end <= b.point:
break
else: # end is after the end of our history list or we have only one entry
if last_end.point != end: # try not to yield null intervals
yield Interval(begin=last_end.point, end=end, data=last_end.value)
def intervals(self, end_time=GreatestValue):
if not self.history:
return iter(())
return self.time_slice(self.begin(), end_time)
class IntervalMapping(object):
__slots__ = ('ivs',)
def __init__(self, intervals=()):
self.ivs = sorted(iv for iv in intervals if not iv.is_null())
def begin(self):
return self.ivs[0].begin if self.ivs else 0
def first_value(self):
"""Return the value of the first interval, or None if empty"""
if not self.ivs:
return None
return self.ivs[0].data
def end(self):
return self.ivs[-1].end if self.ivs else 0
def last_value(self):
"""Return the value of the last interval, or None if empty"""
if not self.ivs:
return None
return self.ivs[-1].data
def clear(self):
self.ivs.clear()
def copy(self):
result = IntervalMapping()
result.ivs = self.ivs.copy()
return result
def _position_index(self, position):
"""Return the index of the last interval with a beginning <= position"""
lo = 0
hi = len(self.ivs)
# copied from bisect_right, because we only care about .begin
while lo < hi:
mid = (lo + hi) // 2
if position < self.ivs[mid].begin:
hi = mid
else:
lo = mid + 1
return lo - 1
def _end_position_index(self, position):
"""Return the index of the last interval with a beginning < position"""
lo = 0
hi = len(self.ivs)
# copied from bisect_left, because we only care about .begin
while lo < hi:
mid = (lo + hi) // 2
if self.ivs[mid].begin < position:
lo = mid + 1
else:
hi = mid
return lo - 1
def _contains_point(self, position):
index = self._position_index(position)
return index >= 0 and position < self.ivs[index].end
def _overlaps_range(self, begin, end):
begin_index = self._position_index(begin)
end_index = self._end_position_index(end)
return begin_index != end_index or (begin_index >= 0 and begin < self.ivs[begin_index].end)
def _value_at_position(self, position):
index = self._position_index(position)
if index >= 0:
iv = self.ivs[index]
if position < iv.end:
return iv.data
raise KeyError
def _slice(self, begin, end):
if begin is None:
begin = LeastValue
if end is None:
end = GreatestValue
if end <= begin:
return
begin_index = self._position_index(begin)
end_index = self._end_position_index(end)
if begin_index >= 0: # begins somewhere in the middle
iv = self.ivs[begin_index]
if begin < iv.end: # slice touches this interval
yield Interval(begin, min(iv.end, end), iv.data)
if end_index == begin_index: # ends on the same of our intervals; we're done
return
yield from self.ivs[begin_index + 1:end_index]
iv = self.ivs[end_index]
if iv.begin < end: # must more than simply touch on end
yield Interval(iv.begin, min(iv.end, end), iv.data)
def _set_range(self, begin, end, wrapped_value):
if begin is None:
begin = LeastValue
if end is None:
end = GreatestValue
if end <= begin:
raise KeyError("Invalid interval: {}".format((begin, end))) # invalid interval
# inclusive first index of the intervals we plan to overwrite
begin_index = self._position_index(begin)
# will be the exclusive last index of the intervals we plan to overwrite
end_index = self._end_position_index(end)
replacement = []
# check beginning
# print("begin_index", begin_index)
if begin_index < 0: # before first interval
# print("begin index was <0")
begin_index = 0
elif begin_index < len(self.ivs):
if begin < self.ivs[begin_index].end: # begins inside this interval, needs to be sliced or covered
iv = self.ivs[begin_index]
# print("inside interval", iv)
if begin == iv.begin or (iv.data,) == wrapped_value:
begin = iv.begin # cover this interval
# print("covering beginning")
else:
# slice this interval; cannot be in-place in case we are insetting
replacement.append(Interval(iv.begin, begin, iv.data))
# print("slicing beginning", iv, replacement[0])
else: # after this interval; don't touch it
begin_index += 1
# print("beginning is after interval")
# check end
# print("end_index", end_index)
if end_index < 0:
# print("end index was <0")
end_index = 0
elif end_index < len(self.ivs):
iv = self.ivs[end_index]
# check if it ends inside this interval, and thus needs to be sliced or covered
if end < iv.end:
# print("inside interval", iv)
if end == iv.end or (iv.data,) == wrapped_value:
end = iv.end # cover this interval
end_index += 1
# print("covering end")
else:
# slice this interval in-place, and do not cover it
self.ivs[end_index] = Interval(end, iv.end, iv.data)
# print("slicing end", self.ivs[end_index])
else: # ends after this interval; cover it completely
end_index += 1
# print("ending is after interval")
# modify the list
if wrapped_value is not ():
replacement.append(Interval(begin, end, wrapped_value[0]))
# print("intervals being covered", slice(begin_index, end_index), self.ivs[begin_index:end_index])
# print("replacing with", replacement)
self.ivs[begin_index:end_index] = replacement
def __contains__(self, key):
if type(key) is Interval:
return self._overlaps_range(key.begin, key.end)
else:
return self._contains_point(key)
def envelops(self, interval):
"""Return True if there is an interval in this mapping that completely encloses the given one.
Raise ValueError if the interval given is null."""
if type(interval) is not Interval:
raise TypeError
if interval.is_null():
raise ValueError
begin_index = self._position_index(interval.begin)
end_index = self._end_position_index(interval.end)
return (
begin_index >= 0 and
begin_index == end_index and
interval.end <= self.ivs[begin_index].end
)
def is_continuous_over_interval(self, begin, end):
"""Return True if there are no values in the half-open interval [begin, end) that are un-mapped"""
begin_index = self._position_index(begin)
end_index = self._position_index(end)
# begin and end must be mapped
if begin_index < 0 or self.ivs[begin_index].end <= begin or self.ivs[end_index].end < end:
return False
# each pair of adjacent intervals in the series from begin to end must share the same end/begin value
for i in range(begin_index, end_index):
if self.ivs[i].end != self.ivs[i + 1].begin:
return False
return True
def __getitem__(self, key):
if type(key) in (slice, Interval):
a, b = (key.start, key.stop) if type(key) is slice else (key.begin, key.end)
return self._slice(a, b)
else:
return self._value_at_position(key)
def interval_at_position(self, position):
"""Return the interval that contains position, or None."""
index = self._position_index(position)
if index < 0:
return None
iv = self.ivs[index]
return iv if position < iv.end else None
def interval_at_or_after(self, position):
"""Return the first interval >= position or None."""
index = self._position_index(position)
if index < 0 or position < self.ivs[index].end:
index += 1
return None if index >= len(self.ivs) else self.ivs[index]
def interval_at_or_before(self, position):
"""Return the last interval <= position or None."""
index = self._position_index(position)
return None if index < 0 else self.ivs[index]
def next_interval_after(self, position_or_interval):
"""Return the first interval > position or None."""
if type(position_or_interval) is Interval:
index = self._end_position_index(position_or_interval.end) + 1
else:
index = self._position_index(position_or_interval) + 1
return None if index >= len(self.ivs) else self.ivs[index]
def next_interval_before(self, position_or_interval):
"""Return the last interval < position or None."""
if type(position_or_interval) is Interval:
point = position_or_interval.begin
else:
point = position_or_interval
index = self._position_index(point)
if point < self.ivs[index].end:
index -= 1
return None if index < 0 else self.ivs[index]
def __setitem__(self, key, value):
if type(key) not in (slice, Interval):
raise TypeError("IntervalMapping must be modified with index that are Intervals or slices")
a, b = (key.start, key.stop) if type(key) is slice else (key.begin, key.end)
self._set_range(a, b, (value,))
def apply(self, interval):
"""Apply this interval to the mapping. If the interval is null, raises a KeyError."""
if type(interval) is not Interval:
raise TypeError
self._set_range(interval.begin, interval.end, (interval.data,))
def __delitem__(self, key):
if type(key) not in (slice, Interval):
raise TypeError("IntervalMapping must be modified with index that are Intervals or slices")
a, b = (key.start, key.stop) if type(key) is slice else (key.begin, key.end)
self._set_range(a, b, ())
def __len__(self):
return len(self.ivs)
def __iter__(self):
return iter(self.ivs)
def __reversed__(self):
return reversed(self.ivs)
def __repr__(self):
if self.ivs:
return "IntervalMapping({})".format(self.ivs)
else:
return "IntervalMapping()"
class HistorySet(object):
__slots__ = ('current', 'history')
def __init__(self, values=(), *, time=None):
time = time if time is not None else now()
self.current = {v: time for v in values}
self.history = IntervalTree()
@staticmethod
def from_intervals(intervals):
result = HistorySet()
for iv in intervals:
result.add_interval(iv)
def add_interval(self, iv):
if iv.end is GreatestValue:
self.current[iv.data] = iv.begin
else:
if iv.data in self.current and self.current[iv.data] <= iv.end:
del self.current[iv.data]
self.history.add(iv)
def refine_history(self):
"""
Scrub the internal IntervalTree history so that there are a minimum number of intervals.
Any multiplicity of intervals with the same data value that covers a single contiguous range will
be replaced with a single interval over that range.
This is an expensive operation, both in time and memory, that should only be performed when the
history is being modified carelessly, such as naively merging with the history from another HistorySet
or adding and removing elements out of chronological order.
Behavior for the HistorySet should be identical before and after calling refine_history(), but may be
slightly faster and consume less memory afterwards. The only change will be that it should no longer
return incorrect values for the effective added date of currently contained items after merging with
history intervals.
"""
self.history = IntervalTree(merge_interval_overlaps(self.history, self.current))
def __getitem__(self, index):
if type(index) is slice:
if index.step is not None:
raise ValueError("Slice indexing is used for intervals, which do not have a step.")
iv = Interval(index.start, index.stop)
result = {x.data for x in self.history[iv]}
result.update(x[0] for x in self.current.items() if iv.overlaps(Interval(begin=x[1], end=None)))
else:
result = {x.data for x in self.history[index]}
result.update(item_ for item_, time_ in self.current.items() if time_ <= index)
return result
def time_slice(self, begin, end):
"""
Return an iterable over all the intervals intersecting the given half-open interval from begin to end,
chopped to fit within it
"""
if begin is None or end is None:
raise ValueError("Both the beginning and end of the interval must be included")
if end <= begin:
raise ValueError("begin must be < end")
for iv in self.history[begin:end]:
yield Interval(begin=max(iv.begin, begin), end=min(iv.end, end), data=iv.data)
for value, added in self.current.items():
if added < end:
yield Interval(begin=added, end=end, data=value)
def intervals(self):
"""
Return an iterator over all the intervals in this set. Currently contained values have intervals
ending with a GreatestValue object.
"""
yield from self.history
end = GreatestValue
for value, begin in self.current.items():
yield Interval(begin=begin, end=end, data=value)
def all_values(self):
result = self.copy()
for old in self.history:
result.add(old.data)
return result
def item_added_time(self, value):
return self.current[value]
def ordered_by_addition(self, *, time=None):
if time is None:
result = list(self.current.items())
else:
result = [(x.begin, x.data) for x in self.history[time]]
result.extend((added, item) for item, added in self.current.items() if added <= time)
result.sort(key=itemgetter(0))
return [x[1] for x in result]
def add(self, value, *, time=None):
time = time if time is not None else now()
if value not in self.current or self.current[value] > time:
self.current[value] = time
def remove(self, value, *, time=None):
self.history.addi(self.current.pop(value), time if time is not None else now(), value)
def discard(self, value, *, time=None):
if value in self.current:
self.remove(value, time=time)
def copy(self, *, time=None):
if time is None:
return set(self.current)
else:
return self[time]
def members_in_interval(self, begin, end):
return self[begin:end]
def clear(self, *, time=None):
time = time if time is not None else now()
for item in self.current.items():
self.history.addi(item[1], time, item[0])
self.current.clear()
def union(self, *others):
result = self.copy()
result.update(*others)
return result
def difference(self, *others):
result = self.copy()
result.difference_update(*others)
return result
def symmetric_difference(self, other):
result = self.copy()
result.symmetric_difference_update(other)
return result
def intersection(self, *others):
result = self.copy()
result.intersection_update(*others)
return result
def update(self, *others, time=None):
time = time if time is not None else now()
for other in others:
for value in other:
self.add(value, time=time)
def difference_update(self, *others, time=None):
time = time if time is not None else now()
for other in others:
for value in other:
self.discard(value, time=time)
def symmetric_difference_update(self, other, *, time=None):
time = time if time is not None else now()
for value in other:
if value in self.current:
self.remove(value, time=time)
else:
self.add(value, time=time)
def intersection_update(self, *others, time=None):
time = time if time is not None else now()
toss = self.difference(*others)
for value in toss:
self.discard(value, time=time)
def pop(self, *, time=None):
time = time if time is not None else now()
item = self.current.popitem()
self.history.addi(item[1], time, item[0])
return item[0]
def isdisjoint(self, other):
# noinspection PyUnresolvedReferences
return self.current.keys().isdisjoint(other)
def issubset(self, other):
return other > self.current
def issuperset(self, other):
return other < self.current
def __iter__(self):
return iter(self.current)
def __len__(self):
return len(self.current)
def __eq__(self, other):
if isinstance(other, (set, frozenset)):
return self.current.keys() == other
elif isinstance(other, HistorySet):
return self.current.keys() == other.current.keys()
return False
def __lt__(self, other):
return self < other or self == other
def __gt__(self, other):
return self > other or self == other
def __contains__(self, item):
return item in self.current
__le__ = issubset
__ge__ = issuperset
__or__ = union
__and__ = intersection
__sub__ = difference
__xor__ = symmetric_difference
__ior__ = update
__iand__ = intersection_update
__isub__ = difference_update
__ixor__ = symmetric_difference_update
class HistoryDict(object):
__slots__ = ('d', '_len')
def __init__(self):
self.d = {}
self._len = 0
def set(self, key, value, *, time=None):
time = time if time is not None else now()
if key in self.d:
self.d[key].set(value, time=time)
else:
self.d[key] = SampledValue(initial_value=value, time=time)
self._len += 1
def __setitem__(self, key, value):
self.set(key, value)
def setdefault(self, key, default=None, *, time=None):
time = time if time is not None else now()
result = self._fetch(key, time)
if result is _SENTINEL:
self.set(key, default, time=time)
return default
else:
return result
def update(self, other, *, time=None):
time = time if time is not None else now()
if isinstance(other, (dict, HistoryDict)):
update_from = other.items()
else:
update_from = other
for item in update_from:
self.set(item[0], item[1], time=time)
def _fetch(self, key, time):
if key in self.d:
return self.d[key].get(time=time, default=_SENTINEL)
else:
return _SENTINEL
def get(self, key, default=None, *, time=None):
time = time if time is not None else now()
result = self._fetch(key, time)
return default if result is _SENTINEL else result
def __getitem__(self, item):
result = self._fetch(item, now())
if result is _SENTINEL:
raise KeyError
else:
return result
def copy(self, *, time=None):
# no need to normalize time from None, fetching with None for current is faster
return {k: v for k, v in self.d.items() if v.get(time=time, default=_SENTINEL) is not _SENTINEL}
def __len__(self):
return self._len
def delete(self, key, *, time=None):
time = time if time is not None else now()
if key not in self.d:
raise KeyError
if self.d[key].get(time=time, default=_SENTINEL) is _SENTINEL:
raise KeyError
self.d[key].set(_SENTINEL, time=time)
self._len -= 1
def __delitem__(self, key):
self.delete(key)
def clear(self, *, time=None):
time = time if time is not None else now()
for value in self.d.values():
value.set(_SENTINEL, time=time)
self._len = 0
def contains(self, key, *, time=None):
return self._fetch(key, time) is not _SENTINEL
def __contains__(self, item):
return self.contains(item)
def pop(self, key, *, time=None):
time = time if time is not None else now()
result = self.get(key, time=time)
self.delete(key, time=time)
return result
def popitem(self, *, time=None):
if self._len == 0:
raise KeyError
time = time if time is not None else now()
key = next(self)
value = self.get(key)
self.delete(key, time=time)
return key, value
def __iter__(self):
for key in self.d:
if self._fetch(key, None) is not _SENTINEL:
yield key
def __eq__(self, other):
if len(other) != len(self):
return False
if not isinstance(other, (dict, HistoryDict)):
return False
for key, other_value in other.items():
self_value = self._fetch(key, None)
if self_value != other_value:
return False
return True
def all_keys(self):
return self.d.keys()
def all_items(self):
for key, value in self.d.items():
for past_value in value.all_values():
if past_value is not _SENTINEL:
yield (key, past_value)
def all_values_of(self, key):
for past_value in self.d[key].all_values():
if past_value is not _SENTINEL:
yield past_value
def items(self):
class HistoryDictItems(_ViewMixin):
__slots__ = ('hd',)
def __init__(self, hd):
self.hd = hd
def __contains__(self, item):
if not isinstance(item, tuple) or len(item) != 2:
return False
if item[0] in self.hd:
return self.hd[item[0]] == item[1]
else:
return False
def __iter__(self):
time = now()
for key in self.hd:
yield (key, self.hd.get(key, time=time))
return HistoryDictItems(self)
def keys(self):
class HistoryDictKeys(_ViewMixin):
__slots__ = ('hd',)
def __init__(self, hd):
self.hd = hd
def __contains__(self, item):
return item in self.hd
def __iter__(self):
yield from self.hd
return HistoryDictKeys(self)
def values(self):
class HistoryDictValues(object):
__slots__ = ('hd',)
def __init__(self, hd):
self.hd = hd
def __contains__(self, item):
return item in iter(self)
def __iter__(self):
for key in self.hd:
return self.hd[key]
return HistoryDictValues(self)
class WeightedSet(object):
"""
An fast O(log n) mapping that holds values with weightings. Can be used to select random values from a mutable
weighted set; does not work or make any sense with negative weights.
Populating many values costs O(n log n)
Iterating over all the values and their weights costs O(n)
Importing/exporting values with summed weights, with export_heap()/import_heap(), costs O(n)
Modifying, adding, or deleting a single weight costs O(log n)
Choosing a single weighted-random value from the set costs O(log n)
Querying the sum of weights in the set costs O(1)
Querying a single weight in the set costs O(1)
Add or set a weight with `weights[key] = w`
Get a weight with `weights[key]`
Remove a weight with `del weights[key]`
Increase a weight by 1 with `weights.modify(key)`
Increase or decrease a weight by a different value with `weights.modify(key, delta)`
Get the sum of all weights in the set with `weights.sum()`
Get the number of weights in the set with `len(weights)`
Get a weighted random choice from the set with `weights.choice()`
Get an un-weighted random choice from the set with `weights.unweighted_choice()`
Get a random choice using your own random number generator with `weights.choose(your_random_in_range(weights.sum())`
"""
# modeled after http://stackoverflow.com/a/2149533/3088947 once I knew to look for a sum-heap implementation
# In this version we are starting our indices at 0, so the functions are:
# left child: (i << 1) + 1
# right child: (i << 1) + 2
# parent: (i - 1) >> 1
class Node(object):
__slots__ = ('v', 'w', 'tw')
def __init__(self, value, weight, total_weight=0):
self.v = value
self.w = weight
self.tw = total_weight
def __init__(self, items=()):
self.dict = {} # tracks where in the heap each value is
self.heap = [] # stores the nodes of the sum-tree
for key, weight in items:
self.modify(key, weight)
@staticmethod
def import_heap(heap_data):
result = WeightedSet()
result.__setstate__(heap_data)
return result
def export_heap(self):
return self.__getstate__()
def copy(self):
return WeightedSet.import_heap(self.export_heap())
def __setstate__(self, heap_data):
for i, (v, w, tw) in enumerate(heap_data):
self.heap.append(WeightedSet.Node(v, w, tw))
self.dict[v] = i
def __getstate__(self):
return [(node.v, node.w, node.tw) for node in self.heap]
def __iter__(self):
"""Iterate over (value, weight) pairs in the set, with tuples"""
for node in self.heap:
yield node.v, node.w
def _propagate(self, i, delta):
while i >= 0:
self.heap[i].tw += delta
i = (i - 1) >> 1
def modify(self, key, delta=1):
if key in self.dict:
i = self.dict[key]
self.heap[i].w += delta
else:
i = len(self.heap)
self.heap.append(WeightedSet.Node(key, delta))
self.dict[key] = i
self._propagate(i, delta)
def __getitem__(self, key):
return self.heap[self.dict[key]].weight
def __setitem__(self, key, value):
self.modify(key, value - self[key])
def __delitem__(self, key):
# get this key's index in the heap
# and delete this key from the dict
i = self.dict.pop(key)
# remove weight from heap at this key's location
self._propagate(i, -self.heap[i].w)
# if this is the last node, just delete it
if i == len(self.heap) - 1:
self.heap.pop()
else:
# otherwise, get the last node from heap and put it here
replacement = self.heap.pop()
replacement.tw = self.heap[i].tw
self.heap[i] = replacement
# re-propagate its weight
self._propagate(i, replacement.w)
# and change that node's key's value in dict to its new heap location, where the old key was
self.dict[replacement.v] = i
def __len__(self):
"""Return the number of key/weight pairs"""
return len(self.heap)
def sum(self):
"""Return sum of all weights"""
if self:
return self.heap[0].tw
raise IndexError("Empty mapping has no sum")
def choose(self, choose_at):
i = 0 # start driving at the root
while choose_at >= self.heap[i].w: # while we have enough gas to get past node i:
choose_at -= self.heap[i].w # drive past node i
i = (i << 1) + 1 # move to first child
if choose_at >= self.heap[i].tw: # if we have enough gas:
choose_at -= self.heap[i].tw # drive past first child and descendants
i += 1 # move to second child
return self.heap[i].v # out of gas at heap[i]
def choice(self):
return self.choose(self.sum() * random.random())
def pop(self):
val = self.choice()
weight = self[val]
del self[val]
return val, weight
def unweighted_choice(self):
return random.choice(self.heap).v
|
|
#!/usr/bin/env python3
# Copyright (c) Klaudisz Staniek.
# See LICENSE for details.
"""
This is an example application demonstrating the event driven driver capabilities and API usage
"""
from has.manager.manager import Manager
from has.utils.notification import Notification
from threading import Lock, RLock, Condition
from datetime import datetime
import configparser
from tkinter import *
from tkinter import ttk
from tkinter import messagebox
from queue import Queue, Empty
import threading
class NodeInfo:
"""
This is a class containing mimimal information required to store the information about the nodes
"""
def __init__(self):
self.network_id = None
self.node_id = None
self.value_ids = []
nodes = []
initFailed = True
criticalSection = Lock()
def get_value_obj(value_id):
for node in nodes:
for value in node.value_ids:
if value.id == value_id:
return value
def get_node_info( notification ):
network_id = notification.network_id
node_id = notification.node_id
for node in nodes:
if node.network_id == network_id and node.node_id == node_id:
return node
return None
def OnNotification( notification, context ):
global initFailed
global criticalSection
global initCondition
with criticalSection:
notification_type = notification.type
if notification_type == Notification.Type_DriverReady:
context.on_message("StatusUpdate","Driver Ready")
initFailed = False
elif notification_type == Notification.Type_DriverFailed:
context.on_message("StatusUpdate","Driver Failed")
elif notification_type == Notification.Type_DriverReset:
context.on_message("StatusUpdate","Driver Reset")
elif notification_type == Notification.Type_AllNodesQueried:
context.on_message("StatusUpdate","All Nodes Queried")
elif notification_type == Notification.Type_NodeAdded:
node_info = NodeInfo()
node_info.network_id = notification.network_id
node_info.node_id = notification.node_id
nodes.append(node_info)
context.on_message('NodeAdded', notification)
elif notification_type == Notification.Type_NodeRemoved:
network_id = notification.network_id
node_id = notification.node_id
for node in nodes[:]:
if node_id == node.node_id and network_id == node.network_id:
nodes.remove(node)
del node
context.on_message('NodeRemoved', notification)
break
elif notification_type == Notification.Type_NodeChanged:
context.on_message('NodeChanged', notification)
elif notification_type == Notification.Type_ValueAdded:
#print("Manager: Value Added %s" % (notification.node_id ) )
node_info = get_node_info( notification )
if node_info is not None:
node_info.value_ids.append( notification.value_id )
context.on_message('ValueAdded', notification)
elif notification_type == Notification.Type_ValueChanged:
node_info = get_node_info( notification )
network_id = node_info.network_id
node_id = node_info.node_id
value_id = notification.value_id
value_type = Manager.get_value_type( value_id )
value_id = Manager.get_value_id( value_id )
value = Manager.get_value_as_string( value_id )
units = Manager.get_value_units( value_id )
node_name = Manager.get_node_name( network_id, node_id )
node_location_name = Manager.get_node_location_name( network_id, node_id )
text = "{0} Node {1}: {2} @ {3} changed {4} to {5}".format( str(datetime.today()), node_id, node_name, node_location_name, value_type, value )
context.on_message('ValueChanged', notification)
context.on_message("StatusUpdate", text)
elif notification_type == Notification.Type_NodeQueriesComplete:
node_name = Manager.get_node_name( notification.network_id, notification.node_id )
context.on_message('NodeQueriesComplete', notification)
class HASApp():
def __init__(self, root):
self.queue = Queue()
self.root = root
#self.root.protocol("WM_DELETE_WINDOW", self.callback)
self.tree = ttk.Treeview() #columns=('Node ID', 'type', 'size'), displaycolumns='size')
self.tree.tag_configure('updated', foreground='red')
self.tree.tag_configure('normal', foreground='black')
self.tree.pack(side=TOP, fill=BOTH, expand=Y)
self.status = StringVar()
Label(root, textvariable=self.status).pack()
root.bind('<<open-config-dialog>>', self.config_dialog)
root.createcommand('::tk::mac::ShowPreferences', self.config_dialog)
root.bind('<<close-all-windows>>', self.callback)
root.createcommand('exit', self.callback)
def status_update(self, notification):
self.status.set(str(notification))
def callback(self):
if messagebox.askokcancel("Quit", "Do you really wish to quit?"):
self.running = 0
#self.root.quit()
def run(self):
self.running = 1
Manager.add_watcher( OnNotification, self)
Manager.read_config("manager.ini")
#self.thread1 = threading.Thread(target=self.worker_thread)
#self.thread1.start()
self.queue_check()
def config_dialog(event=None):
if messagebox.askokcancel("Quit", "Do you really wish to quit?"):
print("config")
def add_node(self, notification):
item_id = "{0}:{1}".format(notification.network_id, notification.node_id)
if not self.tree.exists(item_id):
text = "Node {0}:".format(notification.node_id)
self.tree.insert("", "end", item_id, text=text)
def node_queries_complete(self, notification):
item_id = "{0}:{1}".format(notification.network_id, notification.node_id)
node_name = Manager.get_node_name( notification.network_id, notification.node_id )
node_location_name = Manager.get_node_location_name( notification.network_id, notification.node_id )
node_type = Manager.get_node_type( notification.network_id, notification.node_id )
#print(node_location_name)
if not self.tree.exists(node_location_name):
self.tree.insert("", "end", node_location_name, text=node_location_name)
text = "{1} (Node:{0}:{2})".format(notification.node_id, node_name, node_type)
self.tree.item(item_id, text=text)
self.tree.move(item_id, node_location_name, "end")
def remove_node(self, notification):
item_id = "{0}:{1}".format(notification.network_id, notification.node_id)
if self.tree.exists(item_id):
self.tree.delete(item_id)
def update_node(self, notification):
item_id = "{0}:{1}".format(notification.network_id, notification.node_id)
self.tree.item(item_id, tags=('updated'))
self.root.after(10000, self.reset_foreground, item_id)
def add_value(self, notification):
item_id = "{0}:{1}".format(notification.network_id, notification.node_id)
obj_value_id = notification.value_id
value_type = Manager.get_value_type( obj_value_id )
value = Manager.get_value_as_string( obj_value_id )
last_changed = Manager.get_value_last_changed(obj_value_id)
text="{0}={1} ({2})".format(value_type,value,last_changed)
self.tree.insert(item_id,"end", obj_value_id.id, text=text)
def change_value(self, notification):
obj_value_id = notification.value_id
if self.tree.exists(obj_value_id.id):
value_type = Manager.get_value_type( obj_value_id )
value = Manager.get_value_as_string( obj_value_id )
last_changed = Manager.get_value_last_changed(obj_value_id)
text="{0}={1} ({2})".format(value_type,value,last_changed)
self.tree.item(obj_value_id.id, text=text, tags=('updated'))
self.root.after(10000, self.reset_foreground, obj_value_id.id)
def reset_foreground(self, item):
self.tree.item(item, tags=('normal'))
def on_message(self, message, notification):
self.queue.put_nowait((message,notification))
def queue_check(self):
while self.queue.qsize():
try:
message, notification = self.queue.get_nowait()
if message == 'NodeAdded':
self.add_node(notification)
elif message == 'ValueAdded':
self.add_value(notification)
elif message == 'ValueChanged':
self.change_value(notification)
elif message == 'NodeRemoved':
self.remove_node(notification)
elif message == 'NodeChanged':
self.update_node(notification)
elif message == 'NodeQueriesComplete':
self.node_queries_complete(notification)
elif message == 'StatusUpdate':
self.status_update(notification)
except Empty:
pass
if not self.running:
Manager.close()
print("Done")
self.root.destroy()
else:
self.root.after(1000, self.queue_check)
def main():
root = Tk(className="Home Automation System")
title=root.title("Home Automation System")
width=root.winfo_screenwidth()
height=root.winfo_screenheight()
root.geometry("{0}x{1}".format( width,height ) )
app = HASApp(root)
app.run()
root.mainloop()
if __name__ == "__main__":
main()
exit()
|
|
"""Renewable certificates storage."""
import datetime
import glob
import logging
import re
import shutil
import stat
from typing import Optional
import configobj
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
from cryptography.hazmat.primitives.serialization import load_pem_private_key
import parsedatetime
import pytz
import certbot
from certbot import configuration
from certbot import crypto_util
from certbot import errors
from certbot import interfaces
from certbot import ocsp
from certbot import util
from certbot._internal import cli
from certbot._internal import constants
from certbot._internal import error_handler
from certbot._internal.plugins import disco as plugins_disco
from certbot.compat import filesystem
from certbot.compat import os
from certbot.plugins import common as plugins_common
logger = logging.getLogger(__name__)
ALL_FOUR = ("cert", "privkey", "chain", "fullchain")
README = "README"
CURRENT_VERSION = util.get_strict_version(certbot.__version__)
BASE_PRIVKEY_MODE = 0o600
def renewal_conf_files(config: configuration.NamespaceConfig):
"""Build a list of all renewal configuration files.
:param configuration.NamespaceConfig config: Configuration object
:returns: list of renewal configuration files
:rtype: `list` of `str`
"""
result = glob.glob(os.path.join(config.renewal_configs_dir, "*.conf"))
result.sort()
return result
def renewal_file_for_certname(config, certname):
"""Return /path/to/certname.conf in the renewal conf directory"""
path = os.path.join(config.renewal_configs_dir, "{0}.conf".format(certname))
if not os.path.exists(path):
raise errors.CertStorageError("No certificate found with name {0} (expected "
"{1}).".format(certname, path))
return path
def cert_path_for_cert_name(config: configuration.NamespaceConfig, cert_name: str) -> str:
""" If `--cert-name` was specified, but you need a value for `--cert-path`.
:param configuration.NamespaceConfig config: parsed command line arguments
:param str cert_name: cert name.
"""
cert_name_implied_conf = renewal_file_for_certname(config, cert_name)
return configobj.ConfigObj(
cert_name_implied_conf, encoding='utf-8', default_encoding='utf-8')["fullchain"]
def config_with_defaults(config=None):
"""Merge supplied config, if provided, on top of builtin defaults."""
defaults_copy = configobj.ConfigObj(
constants.RENEWER_DEFAULTS, encoding='utf-8', default_encoding='utf-8')
defaults_copy.merge(config if config is not None else configobj.ConfigObj(
encoding='utf-8', default_encoding='utf-8'))
return defaults_copy
def add_time_interval(base_time, interval, textparser=parsedatetime.Calendar()):
"""Parse the time specified time interval, and add it to the base_time
The interval can be in the English-language format understood by
parsedatetime, e.g., '10 days', '3 weeks', '6 months', '9 hours', or
a sequence of such intervals like '6 months 1 week' or '3 days 12
hours'. If an integer is found with no associated unit, it is
interpreted by default as a number of days.
:param datetime.datetime base_time: The time to be added with the interval.
:param str interval: The time interval to parse.
:returns: The base_time plus the interpretation of the time interval.
:rtype: :class:`datetime.datetime`"""
if interval.strip().isdigit():
interval += " days"
# try to use the same timezone, but fallback to UTC
tzinfo = base_time.tzinfo or pytz.UTC
return textparser.parseDT(interval, base_time, tzinfo=tzinfo)[0]
def write_renewal_config(o_filename, n_filename, archive_dir, target, relevant_data):
"""Writes a renewal config file with the specified name and values.
:param str o_filename: Absolute path to the previous version of config file
:param str n_filename: Absolute path to the new destination of config file
:param str archive_dir: Absolute path to the archive directory
:param dict target: Maps ALL_FOUR to their symlink paths
:param dict relevant_data: Renewal configuration options to save
:returns: Configuration object for the new config file
:rtype: configobj.ConfigObj
"""
config = configobj.ConfigObj(o_filename, encoding='utf-8', default_encoding='utf-8')
config["version"] = certbot.__version__
config["archive_dir"] = archive_dir
for kind in ALL_FOUR:
config[kind] = target[kind]
if "renewalparams" not in config:
config["renewalparams"] = {}
config.comments["renewalparams"] = ["",
"Options used in "
"the renewal process"]
config["renewalparams"].update(relevant_data)
for k in config["renewalparams"]:
if k not in relevant_data:
del config["renewalparams"][k]
if "renew_before_expiry" not in config:
default_interval = constants.RENEWER_DEFAULTS["renew_before_expiry"]
config.initial_comment = ["renew_before_expiry = " + default_interval]
# TODO: add human-readable comments explaining other available
# parameters
logger.debug("Writing new config %s.", n_filename)
# Ensure that the file exists
with open(n_filename, 'a'):
pass
# Copy permissions from the old version of the file, if it exists.
if os.path.exists(o_filename):
current_permissions = stat.S_IMODE(os.lstat(o_filename).st_mode)
filesystem.chmod(n_filename, current_permissions)
with open(n_filename, "wb") as f:
config.write(outfile=f)
return config
def rename_renewal_config(prev_name, new_name, cli_config):
"""Renames cli_config.certname's config to cli_config.new_certname.
:param .NamespaceConfig cli_config: parsed command line
arguments
"""
prev_filename = renewal_filename_for_lineagename(cli_config, prev_name)
new_filename = renewal_filename_for_lineagename(cli_config, new_name)
if os.path.exists(new_filename):
raise errors.ConfigurationError("The new certificate name "
"is already in use.")
try:
filesystem.replace(prev_filename, new_filename)
except OSError:
raise errors.ConfigurationError("Please specify a valid filename "
"for the new certificate name.")
def update_configuration(lineagename, archive_dir, target, cli_config):
"""Modifies lineagename's config to contain the specified values.
:param str lineagename: Name of the lineage being modified
:param str archive_dir: Absolute path to the archive directory
:param dict target: Maps ALL_FOUR to their symlink paths
:param .NamespaceConfig cli_config: parsed command line
arguments
:returns: Configuration object for the updated config file
:rtype: configobj.ConfigObj
"""
config_filename = renewal_filename_for_lineagename(cli_config, lineagename)
temp_filename = config_filename + ".new"
# If an existing tempfile exists, delete it
if os.path.exists(temp_filename):
os.unlink(temp_filename)
# Save only the config items that are relevant to renewal
values = relevant_values(vars(cli_config.namespace))
write_renewal_config(config_filename, temp_filename, archive_dir, target, values)
filesystem.replace(temp_filename, config_filename)
return configobj.ConfigObj(config_filename, encoding='utf-8', default_encoding='utf-8')
def get_link_target(link):
"""Get an absolute path to the target of link.
:param str link: Path to a symbolic link
:returns: Absolute path to the target of link
:rtype: str
:raises .CertStorageError: If link does not exists.
"""
try:
target = filesystem.readlink(link)
except OSError:
raise errors.CertStorageError(
"Expected {0} to be a symlink".format(link))
if not os.path.isabs(target):
target = os.path.join(os.path.dirname(link), target)
return os.path.abspath(target)
def _write_live_readme_to(readme_path, is_base_dir=False):
prefix = ""
if is_base_dir:
prefix = "[cert name]/"
with open(readme_path, "w") as f:
logger.debug("Writing README to %s.", readme_path)
f.write("This directory contains your keys and certificates.\n\n"
"`{prefix}privkey.pem` : the private key for your certificate.\n"
"`{prefix}fullchain.pem`: the certificate file used in most server software.\n"
"`{prefix}chain.pem` : used for OCSP stapling in Nginx >=1.3.7.\n"
"`{prefix}cert.pem` : will break many server configurations, and "
"should not be used\n"
" without reading further documentation (see link below).\n\n"
"WARNING: DO NOT MOVE OR RENAME THESE FILES!\n"
" Certbot expects these files to remain in this location in order\n"
" to function properly!\n\n"
"We recommend not moving these files. For more information, see the Certbot\n"
"User Guide at https://certbot.eff.org/docs/using.html#where-are-my-"
"certificates.\n".format(prefix=prefix))
def _relevant(namespaces, option):
"""
Is this option one that could be restored for future renewal purposes?
:param namespaces: plugin namespaces for configuration options
:type namespaces: `list` of `str`
:param str option: the name of the option
:rtype: bool
"""
from certbot._internal import renewal
return (option in renewal.CONFIG_ITEMS or
any(option.startswith(namespace) for namespace in namespaces))
def relevant_values(all_values):
"""Return a new dict containing only items relevant for renewal.
:param dict all_values: The original values.
:returns: A new dictionary containing items that can be used in renewal.
:rtype dict:
"""
plugins = plugins_disco.PluginsRegistry.find_all()
namespaces = [plugins_common.dest_namespace(plugin) for plugin in plugins]
rv = dict(
(option, value)
for option, value in all_values.items()
if _relevant(namespaces, option) and cli.option_was_set(option, value))
# We always save the server value to help with forward compatibility
# and behavioral consistency when versions of Certbot with different
# server defaults are used.
rv["server"] = all_values["server"]
return rv
def lineagename_for_filename(config_filename):
"""Returns the lineagename for a configuration filename.
"""
if not config_filename.endswith(".conf"):
raise errors.CertStorageError(
"renewal config file name must end in .conf")
return os.path.basename(config_filename[:-len(".conf")])
def renewal_filename_for_lineagename(config, lineagename):
"""Returns the lineagename for a configuration filename.
"""
return os.path.join(config.renewal_configs_dir, lineagename) + ".conf"
def _relpath_from_file(archive_dir, from_file):
"""Path to a directory from a file"""
return os.path.relpath(archive_dir, os.path.dirname(from_file))
def full_archive_path(config_obj, cli_config, lineagename):
"""Returns the full archive path for a lineagename
Uses cli_config to determine archive path if not available from config_obj.
:param configobj.ConfigObj config_obj: Renewal conf file contents (can be None)
:param configuration.NamespaceConfig cli_config: Main config file
:param str lineagename: Certificate name
"""
if config_obj and "archive_dir" in config_obj:
return config_obj["archive_dir"]
return os.path.join(cli_config.default_archive_dir, lineagename)
def _full_live_path(cli_config, lineagename):
"""Returns the full default live path for a lineagename"""
return os.path.join(cli_config.live_dir, lineagename)
def delete_files(config, certname):
"""Delete all files related to the certificate.
If some files are not found, ignore them and continue.
"""
renewal_filename = renewal_file_for_certname(config, certname)
# file exists
full_default_archive_dir = full_archive_path(None, config, certname)
full_default_live_dir = _full_live_path(config, certname)
try:
renewal_config = configobj.ConfigObj(
renewal_filename, encoding='utf-8', default_encoding='utf-8')
except configobj.ConfigObjError:
# config is corrupted
logger.error("Could not parse %s. You may wish to manually "
"delete the contents of %s and %s.", renewal_filename,
full_default_live_dir, full_default_archive_dir)
raise errors.CertStorageError(
"error parsing {0}".format(renewal_filename))
finally:
# we couldn't read it, but let's at least delete it
# if this was going to fail, it already would have.
os.remove(renewal_filename)
logger.info("Removed %s", renewal_filename)
# cert files and (hopefully) live directory
# it's not guaranteed that the files are in our default storage
# structure. so, first delete the cert files.
directory_names = set()
for kind in ALL_FOUR:
link = renewal_config.get(kind)
try:
os.remove(link)
logger.debug("Removed %s", link)
except OSError:
logger.debug("Unable to delete %s", link)
directory = os.path.dirname(link)
directory_names.add(directory)
# if all four were in the same directory, and the only thing left
# is the README file (or nothing), delete that directory.
# this will be wrong in very few but some cases.
if len(directory_names) == 1:
# delete the README file
directory = directory_names.pop()
readme_path = os.path.join(directory, README)
try:
os.remove(readme_path)
logger.debug("Removed %s", readme_path)
except OSError:
logger.debug("Unable to delete %s", readme_path)
# if it's now empty, delete the directory
try:
os.rmdir(directory) # only removes empty directories
logger.debug("Removed %s", directory)
except OSError:
logger.debug("Unable to remove %s; may not be empty.", directory)
# archive directory
try:
archive_path = full_archive_path(renewal_config, config, certname)
shutil.rmtree(archive_path)
logger.debug("Removed %s", archive_path)
except OSError:
logger.debug("Unable to remove %s", archive_path)
class RenewableCert(interfaces.RenewableCert):
"""Renewable certificate.
Represents a lineage of certificates that is under the management of
Certbot, indicated by the existence of an associated renewal
configuration file.
Note that the notion of "current version" for a lineage is
maintained on disk in the structure of symbolic links, and is not
explicitly stored in any instance variable in this object. The
RenewableCert object is able to determine information about the
current (or other) version by accessing data on disk, but does not
inherently know any of this information except by examining the
symbolic links as needed. The instance variables mentioned below
point to symlinks that reflect the notion of "current version" of
each managed object, and it is these paths that should be used when
configuring servers to use the certificate managed in a lineage.
These paths are normally within the "live" directory, and their
symlink targets -- the actual cert files -- are normally found
within the "archive" directory.
:ivar str cert: The path to the symlink representing the current
version of the certificate managed by this lineage.
:ivar str privkey: The path to the symlink representing the current
version of the private key managed by this lineage.
:ivar str chain: The path to the symlink representing the current version
of the chain managed by this lineage.
:ivar str fullchain: The path to the symlink representing the
current version of the fullchain (combined chain and cert)
managed by this lineage.
:ivar configobj.ConfigObj configuration: The renewal configuration
options associated with this lineage, obtained from parsing the
renewal configuration file and/or systemwide defaults.
"""
def __init__(self, config_filename, cli_config, update_symlinks=False):
"""Instantiate a RenewableCert object from an existing lineage.
:param str config_filename: the path to the renewal config file
that defines this lineage.
:param .NamespaceConfig: parsed command line arguments
:raises .CertStorageError: if the configuration file's name didn't end
in ".conf", or the file is missing or broken.
"""
self.cli_config = cli_config
self._lineagename = lineagename_for_filename(config_filename)
# self.configuration should be used to read parameters that
# may have been chosen based on default values from the
# systemwide renewal configuration; self.configfile should be
# used to make and save changes.
try:
self.configfile = configobj.ConfigObj(
config_filename, encoding='utf-8', default_encoding='utf-8')
except configobj.ConfigObjError:
raise errors.CertStorageError(
"error parsing {0}".format(config_filename))
# TODO: Do we actually use anything from defaults and do we want to
# read further defaults from the systemwide renewal configuration
# file at this stage?
self.configuration = config_with_defaults(self.configfile)
if not all(x in self.configuration for x in ALL_FOUR):
raise errors.CertStorageError(
"renewal config file {0} is missing a required "
"file reference".format(self.configfile))
conf_version = self.configuration.get("version")
if (conf_version is not None and
util.get_strict_version(conf_version) > CURRENT_VERSION):
logger.info(
"Attempting to parse the version %s renewal configuration "
"file found at %s with version %s of Certbot. This might not "
"work.", conf_version, config_filename, certbot.__version__)
self.cert = self.configuration["cert"]
self.privkey = self.configuration["privkey"]
self.chain = self.configuration["chain"]
self.fullchain = self.configuration["fullchain"]
self.live_dir = os.path.dirname(self.cert)
self._fix_symlinks()
if update_symlinks:
self._update_symlinks()
self._check_symlinks()
@property
def key_path(self):
"""Duck type for self.privkey"""
return self.privkey
@property
def cert_path(self):
"""Duck type for self.cert"""
return self.cert
@property
def chain_path(self):
"""Duck type for self.chain"""
return self.chain
@property
def fullchain_path(self):
"""Duck type for self.fullchain"""
return self.fullchain
@property
def lineagename(self):
"""Name given to the certificate lineage.
:rtype: str
"""
return self._lineagename
@property
def target_expiry(self):
"""The current target certificate's expiration datetime
:returns: Expiration datetime of the current target certificate
:rtype: :class:`datetime.datetime`
"""
return crypto_util.notAfter(self.current_target("cert"))
@property
def archive_dir(self):
"""Returns the default or specified archive directory"""
return full_archive_path(self.configuration,
self.cli_config, self.lineagename)
def relative_archive_dir(self, from_file):
"""Returns the default or specified archive directory as a relative path
Used for creating symbolic links.
"""
return _relpath_from_file(self.archive_dir, from_file)
@property
def server(self) -> Optional[str]:
"""Returns the ACME server associated with this certificate"""
return self.configuration["renewalparams"].get("server", None)
@property
def is_test_cert(self) -> bool:
"""Returns true if this is a test cert from a staging server."""
if self.server:
return util.is_staging(self.server)
return False
def _check_symlinks(self):
"""Raises an exception if a symlink doesn't exist"""
for kind in ALL_FOUR:
link = getattr(self, kind)
if not os.path.islink(link):
raise errors.CertStorageError(
"expected {0} to be a symlink".format(link))
target = get_link_target(link)
if not os.path.exists(target):
raise errors.CertStorageError("target {0} of symlink {1} does "
"not exist".format(target, link))
def _update_symlinks(self):
"""Updates symlinks to use archive_dir"""
for kind in ALL_FOUR:
link = getattr(self, kind)
previous_link = get_link_target(link)
new_link = os.path.join(self.relative_archive_dir(link),
os.path.basename(previous_link))
os.unlink(link)
os.symlink(new_link, link)
def _consistent(self):
"""Are the files associated with this lineage self-consistent?
:returns: Whether the files stored in connection with this
lineage appear to be correct and consistent with one
another.
:rtype: bool
"""
# Each element must be referenced with an absolute path
for x in (self.cert, self.privkey, self.chain, self.fullchain):
if not os.path.isabs(x):
logger.debug("Element %s is not referenced with an "
"absolute path.", x)
return False
# Each element must exist and be a symbolic link
for x in (self.cert, self.privkey, self.chain, self.fullchain):
if not os.path.islink(x):
logger.debug("Element %s is not a symbolic link.", x)
return False
for kind in ALL_FOUR:
link = getattr(self, kind)
target = get_link_target(link)
# Each element's link must point within the cert lineage's
# directory within the official archive directory
if not os.path.samefile(os.path.dirname(target), self.archive_dir):
logger.debug("Element's link does not point within the "
"cert lineage's directory within the "
"official archive directory. Link: %s, "
"target directory: %s, "
"archive directory: %s. If you've specified "
"the archive directory in the renewal configuration "
"file, you may need to update links by running "
"certbot update_symlinks.",
link, os.path.dirname(target), self.archive_dir)
return False
# The link must point to a file that exists
if not os.path.exists(target):
logger.debug("Link %s points to file %s that does not exist.",
link, target)
return False
# The link must point to a file that follows the archive
# naming convention
pattern = re.compile(r"^{0}([0-9]+)\.pem$".format(kind))
if not pattern.match(os.path.basename(target)):
logger.debug("%s does not follow the archive naming "
"convention.", target)
return False
# It is NOT required that the link's target be a regular
# file (it may itself be a symlink). But we should probably
# do a recursive check that ultimately the target does
# exist?
# XXX: Additional possible consistency checks (e.g.
# cryptographic validation of the chain being a chain,
# the chain matching the cert, and the cert matching
# the subject key)
# XXX: All four of the targets are in the same directory
# (This check is redundant with the check that they
# are all in the desired directory!)
# len(set(os.path.basename(self.current_target(x)
# for x in ALL_FOUR))) == 1
return True
def _fix(self):
"""Attempt to fix defects or inconsistencies in this lineage.
.. todo:: Currently unimplemented.
"""
# TODO: Figure out what kinds of fixes are possible. For
# example, checking if there is a valid version that
# we can update the symlinks to. (Maybe involve
# parsing keys and certs to see if they exist and
# if a key corresponds to the subject key of a cert?)
# TODO: In general, the symlink-reading functions below are not
# cautious enough about the possibility that links or their
# targets may not exist. (This shouldn't happen, but might
# happen as a result of random tampering by a sysadmin, or
# filesystem errors, or crashes.)
def _previous_symlinks(self):
"""Returns the kind and path of all symlinks used in recovery.
:returns: list of (kind, symlink) tuples
:rtype: list
"""
previous_symlinks = []
for kind in ALL_FOUR:
link_dir = os.path.dirname(getattr(self, kind))
link_base = "previous_{0}.pem".format(kind)
previous_symlinks.append((kind, os.path.join(link_dir, link_base)))
return previous_symlinks
def _fix_symlinks(self):
"""Fixes symlinks in the event of an incomplete version update.
If there is no problem with the current symlinks, this function
has no effect.
"""
previous_symlinks = self._previous_symlinks()
if all(os.path.exists(link[1]) for link in previous_symlinks):
for kind, previous_link in previous_symlinks:
current_link = getattr(self, kind)
if os.path.lexists(current_link):
os.unlink(current_link)
os.symlink(filesystem.readlink(previous_link), current_link)
for _, link in previous_symlinks:
if os.path.exists(link):
os.unlink(link)
def current_target(self, kind):
"""Returns full path to which the specified item currently points.
:param str kind: the lineage member item ("cert", "privkey",
"chain", or "fullchain")
:returns: The path to the current version of the specified
member.
:rtype: str or None
"""
if kind not in ALL_FOUR:
raise errors.CertStorageError("unknown kind of item")
link = getattr(self, kind)
if not os.path.exists(link):
logger.debug("Expected symlink %s for %s does not exist.",
link, kind)
return None
return get_link_target(link)
def current_version(self, kind):
"""Returns numerical version of the specified item.
For example, if kind is "chain" and the current chain link
points to a file named "chain7.pem", returns the integer 7.
:param str kind: the lineage member item ("cert", "privkey",
"chain", or "fullchain")
:returns: the current version of the specified member.
:rtype: int
"""
if kind not in ALL_FOUR:
raise errors.CertStorageError("unknown kind of item")
pattern = re.compile(r"^{0}([0-9]+)\.pem$".format(kind))
target = self.current_target(kind)
if target is None or not os.path.exists(target):
logger.debug("Current-version target for %s "
"does not exist at %s.", kind, target)
target = ""
matches = pattern.match(os.path.basename(target))
if matches:
return int(matches.groups()[0])
logger.debug("No matches for target %s.", kind)
return None
def version(self, kind, version):
"""The filename that corresponds to the specified version and kind.
.. warning:: The specified version may not exist in this
lineage. There is no guarantee that the file path returned
by this method actually exists.
:param str kind: the lineage member item ("cert", "privkey",
"chain", or "fullchain")
:param int version: the desired version
:returns: The path to the specified version of the specified member.
:rtype: str
"""
if kind not in ALL_FOUR:
raise errors.CertStorageError("unknown kind of item")
where = os.path.dirname(self.current_target(kind))
return os.path.join(where, "{0}{1}.pem".format(kind, version))
def available_versions(self, kind):
"""Which alternative versions of the specified kind of item exist?
The archive directory where the current version is stored is
consulted to obtain the list of alternatives.
:param str kind: the lineage member item (
``cert``, ``privkey``, ``chain``, or ``fullchain``)
:returns: all of the version numbers that currently exist
:rtype: `list` of `int`
"""
if kind not in ALL_FOUR:
raise errors.CertStorageError("unknown kind of item")
where = os.path.dirname(self.current_target(kind))
files = os.listdir(where)
pattern = re.compile(r"^{0}([0-9]+)\.pem$".format(kind))
matches = [pattern.match(f) for f in files]
return sorted([int(m.groups()[0]) for m in matches if m])
def newest_available_version(self, kind):
"""Newest available version of the specified kind of item?
:param str kind: the lineage member item (``cert``,
``privkey``, ``chain``, or ``fullchain``)
:returns: the newest available version of this member
:rtype: int
"""
return max(self.available_versions(kind))
def latest_common_version(self):
"""Newest version for which all items are available?
:returns: the newest available version for which all members
(``cert, ``privkey``, ``chain``, and ``fullchain``) exist
:rtype: int
"""
# TODO: this can raise CertStorageError if there is no version overlap
# (it should probably return None instead)
# TODO: this can raise a spurious AttributeError if the current
# link for any kind is missing (it should probably return None)
versions = [self.available_versions(x) for x in ALL_FOUR]
return max(n for n in versions[0] if all(n in v for v in versions[1:]))
def next_free_version(self):
"""Smallest version newer than all full or partial versions?
:returns: the smallest version number that is larger than any
version of any item currently stored in this lineage
:rtype: int
"""
# TODO: consider locking/mutual exclusion between updating processes
# This isn't self.latest_common_version() + 1 because we don't want
# collide with a version that might exist for one file type but not
# for the others.
return max(self.newest_available_version(x) for x in ALL_FOUR) + 1
def ensure_deployed(self):
"""Make sure we've deployed the latest version.
:returns: False if a change was needed, True otherwise
:rtype: bool
May need to recover from rare interrupted / crashed states."""
if self.has_pending_deployment():
logger.warning("Found a new certificate /archive/ that was not "
"linked to in /live/; fixing...")
self.update_all_links_to(self.latest_common_version())
return False
return True
def has_pending_deployment(self):
"""Is there a later version of all of the managed items?
:returns: ``True`` if there is a complete version of this
lineage with a larger version number than the current
version, and ``False`` otherwise
:rtype: bool
"""
# TODO: consider whether to assume consistency or treat
# inconsistent/consistent versions differently
smallest_current = min(self.current_version(x) for x in ALL_FOUR)
return smallest_current < self.latest_common_version()
def _update_link_to(self, kind, version):
"""Make the specified item point at the specified version.
(Note that this method doesn't verify that the specified version
exists.)
:param str kind: the lineage member item ("cert", "privkey",
"chain", or "fullchain")
:param int version: the desired version
"""
if kind not in ALL_FOUR:
raise errors.CertStorageError("unknown kind of item")
link = getattr(self, kind)
filename = "{0}{1}.pem".format(kind, version)
# Relative rather than absolute target directory
target_directory = os.path.dirname(filesystem.readlink(link))
# TODO: it could be safer to make the link first under a temporary
# filename, then unlink the old link, then rename the new link
# to the old link; this ensures that this process is able to
# create symlinks.
# TODO: we might also want to check consistency of related links
# for the other corresponding items
os.unlink(link)
os.symlink(os.path.join(target_directory, filename), link)
def update_all_links_to(self, version):
"""Change all member objects to point to the specified version.
:param int version: the desired version
"""
with error_handler.ErrorHandler(self._fix_symlinks):
previous_links = self._previous_symlinks()
for kind, link in previous_links:
os.symlink(self.current_target(kind), link)
for kind in ALL_FOUR:
self._update_link_to(kind, version)
for _, link in previous_links:
os.unlink(link)
def names(self):
"""What are the subject names of this certificate?
:returns: the subject names
:rtype: `list` of `str`
:raises .CertStorageError: if could not find cert file.
"""
target = self.current_target("cert")
if target is None:
raise errors.CertStorageError("could not find the certificate file")
with open(target) as f:
return crypto_util.get_names_from_cert(f.read())
def ocsp_revoked(self, version):
"""Is the specified cert version revoked according to OCSP?
Also returns True if the cert version is declared as revoked
according to OCSP. If OCSP status could not be determined, False
is returned.
:param int version: the desired version number
:returns: True if the certificate is revoked, otherwise, False
:rtype: bool
"""
cert_path = self.version("cert", version)
chain_path = self.version("chain", version)
# While the RevocationChecker should return False if it failed to
# determine the OCSP status, let's ensure we don't crash Certbot by
# catching all exceptions here.
try:
return ocsp.RevocationChecker().ocsp_revoked_by_paths(cert_path,
chain_path)
except Exception as e: # pylint: disable=broad-except
logger.warning(
"An error occurred determining the OCSP status of %s.",
cert_path)
logger.debug(str(e))
return False
def autorenewal_is_enabled(self):
"""Is automatic renewal enabled for this cert?
If autorenew is not specified, defaults to True.
:returns: True if automatic renewal is enabled
:rtype: bool
"""
return ("autorenew" not in self.configuration["renewalparams"] or
self.configuration["renewalparams"].as_bool("autorenew"))
def should_autorenew(self):
"""Should we now try to autorenew the most recent cert version?
This is a policy question and does not only depend on whether
the cert is expired. (This considers whether autorenewal is
enabled, whether the cert is revoked, and whether the time
interval for autorenewal has been reached.)
Note that this examines the numerically most recent cert version,
not the currently deployed version.
:returns: whether an attempt should now be made to autorenew the
most current cert version in this lineage
:rtype: bool
"""
if self.autorenewal_is_enabled():
# Consider whether to attempt to autorenew this cert now
# Renewals on the basis of revocation
if self.ocsp_revoked(self.latest_common_version()):
logger.debug("Should renew, certificate is revoked.")
return True
# Renews some period before expiry time
default_interval = constants.RENEWER_DEFAULTS["renew_before_expiry"]
interval = self.configuration.get("renew_before_expiry", default_interval)
expiry = crypto_util.notAfter(self.version(
"cert", self.latest_common_version()))
now = pytz.UTC.fromutc(datetime.datetime.utcnow())
if expiry < add_time_interval(now, interval):
logger.debug("Should renew, less than %s before certificate "
"expiry %s.", interval,
expiry.strftime("%Y-%m-%d %H:%M:%S %Z"))
return True
return False
@classmethod
def new_lineage(cls, lineagename, cert, privkey, chain, cli_config):
"""Create a new certificate lineage.
Attempts to create a certificate lineage -- enrolled for
potential future renewal -- with the (suggested) lineage name
lineagename, and the associated cert, privkey, and chain (the
associated fullchain will be created automatically). Optional
configurator and renewalparams record the configuration that was
originally used to obtain this cert, so that it can be reused
later during automated renewal.
Returns a new RenewableCert object referring to the created
lineage. (The actual lineage name, as well as all the relevant
file paths, will be available within this object.)
:param str lineagename: the suggested name for this lineage
(normally the current cert's first subject DNS name)
:param str cert: the initial certificate version in PEM format
:param str privkey: the private key in PEM format
:param str chain: the certificate chain in PEM format
:param .NamespaceConfig cli_config: parsed command line
arguments
:returns: the newly-created RenewalCert object
:rtype: :class:`storage.renewableCert`
"""
# Examine the configuration and find the new lineage's name
for i in (cli_config.renewal_configs_dir, cli_config.default_archive_dir,
cli_config.live_dir):
if not os.path.exists(i):
filesystem.makedirs(i, 0o700)
logger.debug("Creating directory %s.", i)
config_file, config_filename = util.unique_lineage_name(
cli_config.renewal_configs_dir, lineagename)
base_readme_path = os.path.join(cli_config.live_dir, README)
if not os.path.exists(base_readme_path):
_write_live_readme_to(base_readme_path, is_base_dir=True)
# Determine where on disk everything will go
# lineagename will now potentially be modified based on which
# renewal configuration file could actually be created
lineagename = lineagename_for_filename(config_filename)
archive = full_archive_path(None, cli_config, lineagename)
live_dir = _full_live_path(cli_config, lineagename)
if os.path.exists(archive) and (not os.path.isdir(archive) or os.listdir(archive)):
config_file.close()
raise errors.CertStorageError(
"archive directory exists for " + lineagename)
if os.path.exists(live_dir) and (not os.path.isdir(live_dir) or os.listdir(live_dir)):
config_file.close()
raise errors.CertStorageError(
"live directory exists for " + lineagename)
for i in (archive, live_dir):
if not os.path.exists(i):
filesystem.makedirs(i)
logger.debug("Creating directory %s.", i)
# Put the data into the appropriate files on disk
target = {kind: os.path.join(live_dir, kind + ".pem") for kind in ALL_FOUR}
archive_target = {kind: os.path.join(archive, kind + "1.pem") for kind in ALL_FOUR}
for kind in ALL_FOUR:
os.symlink(_relpath_from_file(archive_target[kind], target[kind]), target[kind])
with open(target["cert"], "wb") as f_b:
logger.debug("Writing certificate to %s.", target["cert"])
f_b.write(cert)
with util.safe_open(archive_target["privkey"], "wb", chmod=BASE_PRIVKEY_MODE) as f_a:
logger.debug("Writing private key to %s.", target["privkey"])
f_a.write(privkey)
# XXX: Let's make sure to get the file permissions right here
with open(target["chain"], "wb") as f_b:
logger.debug("Writing chain to %s.", target["chain"])
f_b.write(chain)
with open(target["fullchain"], "wb") as f_b:
# assumes that OpenSSL.crypto.dump_certificate includes
# ending newline character
logger.debug("Writing full chain to %s.", target["fullchain"])
f_b.write(cert + chain)
# Write a README file to the live directory
readme_path = os.path.join(live_dir, README)
_write_live_readme_to(readme_path)
# Document what we've done in a new renewal config file
config_file.close()
# Save only the config items that are relevant to renewal
values = relevant_values(vars(cli_config.namespace))
new_config = write_renewal_config(config_filename, config_filename, archive,
target, values)
return cls(new_config.filename, cli_config)
@property
def private_key_type(self):
"""
:returns: The type of algorithm for the private, RSA or ECDSA
:rtype: str
"""
with open(self.configuration["privkey"], "rb") as priv_key_file:
key = load_pem_private_key(
data=priv_key_file.read(),
password=None,
backend=default_backend()
)
if isinstance(key, RSAPrivateKey):
return "RSA"
else:
return "ECDSA"
def save_successor(self, prior_version, new_cert,
new_privkey, new_chain, cli_config):
"""Save new cert and chain as a successor of a prior version.
Returns the new version number that was created.
.. note:: this function does NOT update links to deploy this
version
:param int prior_version: the old version to which this version
is regarded as a successor (used to choose a privkey, if the
key has not changed, but otherwise this information is not
permanently recorded anywhere)
:param bytes new_cert: the new certificate, in PEM format
:param bytes new_privkey: the new private key, in PEM format,
or ``None``, if the private key has not changed
:param bytes new_chain: the new chain, in PEM format
:param .NamespaceConfig cli_config: parsed command line
arguments
:returns: the new version number that was created
:rtype: int
"""
# XXX: assumes official archive location rather than examining links
# XXX: consider using os.open for availability of os.O_EXCL
# XXX: ensure file permissions are correct; also create directories
# if needed (ensuring their permissions are correct)
# Figure out what the new version is and hence where to save things
self.cli_config = cli_config
target_version = self.next_free_version()
target = {kind: os.path.join(self.archive_dir, "{0}{1}.pem".format(kind, target_version))
for kind in ALL_FOUR}
old_privkey = os.path.join(
self.archive_dir, "privkey{0}.pem".format(prior_version))
# Distinguish the cases where the privkey has changed and where it
# has not changed (in the latter case, making an appropriate symlink
# to an earlier privkey version)
if new_privkey is None:
# The behavior below keeps the prior key by creating a new
# symlink to the old key or the target of the old key symlink.
if os.path.islink(old_privkey):
old_privkey = filesystem.readlink(old_privkey)
else:
old_privkey = "privkey{0}.pem".format(prior_version)
logger.debug("Writing symlink to old private key, %s.", old_privkey)
os.symlink(old_privkey, target["privkey"])
else:
with util.safe_open(target["privkey"], "wb", chmod=BASE_PRIVKEY_MODE) as f:
logger.debug("Writing new private key to %s.", target["privkey"])
f.write(new_privkey)
# Preserve gid and (mode & MASK_FOR_PRIVATE_KEY_PERMISSIONS)
# from previous privkey in this lineage.
mode = filesystem.compute_private_key_mode(old_privkey, BASE_PRIVKEY_MODE)
filesystem.copy_ownership_and_apply_mode(
old_privkey, target["privkey"], mode, copy_user=False, copy_group=True)
# Save everything else
with open(target["cert"], "wb") as f:
logger.debug("Writing certificate to %s.", target["cert"])
f.write(new_cert)
with open(target["chain"], "wb") as f:
logger.debug("Writing chain to %s.", target["chain"])
f.write(new_chain)
with open(target["fullchain"], "wb") as f:
logger.debug("Writing full chain to %s.", target["fullchain"])
f.write(new_cert + new_chain)
symlinks = {kind: self.configuration[kind] for kind in ALL_FOUR}
# Update renewal config file
self.configfile = update_configuration(
self.lineagename, self.archive_dir, symlinks, cli_config)
self.configuration = config_with_defaults(self.configfile)
return target_version
|
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2016 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import requests
import inspect
from functools import partial
from requests import Response
try:
from requests_futures.sessions import FuturesSession
session = FuturesSession(max_workers=10)
except:
session = None
from calvin.utilities.calvinlogger import get_logger
DEFAULT_TIMEOUT = 5
_log = get_logger(__name__)
# PATHS
NODE_PATH = '/node/{}'
NODE = '/node'
NODES = '/nodes'
NODE_ID = '/id'
PEER_SETUP = '/peer_setup'
ACTOR = '/actor'
ACTOR_PATH = '/actor/{}'
ACTORS = '/actors'
ACTOR_DISABLE = '/actor/{}/disable'
ACTOR_MIGRATE = '/actor/{}/migrate'
ACTOR_REPLICATE = '/actor/{}/replicate'
APPLICATION_PATH = '/application/{}'
APPLICATION_MIGRATE = '/application/{}/migrate'
ACTOR_PORT = '/actor/{}/port/{}'
ACTOR_REPORT = '/actor/{}/report'
SET_PORT_PROPERTY = '/set_port_property'
APPLICATIONS = '/applications'
DEPLOY = '/deploy'
CONNECT = '/connect'
DISCONNECT = '/disconnect'
INDEX_PATH_RPL = '/index/{}?root_prefix_level={}'
INDEX_PATH = '/index/{}'
STORAGE_PATH = '/storage/{}'
CSR_REQUEST = '/certificate_authority/certificate_signing_request'
ENROLLMENT_PASSWORD = '/certificate_authority/certificate_enrollment_password/{}'
AUTHENTICATION = '/authentication'
AUTHENTICATION_USERS_DB = '/authentication/users_db'
AUTHENTICATION_GROUPS_DB = '/authentication/groups_db'
PROXY_PEER_ABOLISH = '/proxy/{}/migrate'
def get_runtime(value):
if isinstance(value, basestring):
return RT(value)
else:
return value
class RT(object):
def __init__(self, control_uri):
self.control_uri = control_uri
class RequestHandler(object):
def __init__(self,verify=None):
self.future_responses = []
self.verify=verify
self.credentials = None
def set_credentials(self, credentials):
if ('user' in credentials) and ('password' in credentials):
self.credentials=(credentials['user'], credentials['password'])
else:
#TODO remove printing of the credentials in the log
_log.error("Incorrectly formated credentials supplied, credentials={}".format(credentials))
self.credentials=None
def check_response(self, response, success=range(200, 207), key=None):
if isinstance(response, Response):
if response.status_code in success:
if response.status_code == "204":
return
if response.headers.get("content-type") == "application/json":
try:
r = json.loads(response.text)
return r if key is None else r[key]
except ValueError:
_log.error("Content-Type is %s, but failed to decode '{}' as json", response.text)
return None
else:
# No content type return the text
return response.text
# When failed raise exception
raise Exception("%d%s" % (response.status_code, ("\n" + repr(response.text)) if response.text else ""))
else:
# We have a async Future just return it
response._calvin_key = key
response._calvin_success = success
self.future_responses.append(response)
return response
def _send(self, rt, timeout, send_func, path, data=None):
rt = get_runtime(rt)
_log.debug("Sending request %s, %s, %s", send_func, rt.control_uri + path, json.dumps(data))
if self.verify and data is not None:
return send_func(rt.control_uri + path, timeout=timeout, data=json.dumps(data), auth=self.credentials, verify=self.verify)
elif self.verify and data is None:
return send_func(rt.control_uri + path, timeout=timeout, auth=self.credentials, verify=self.verify)
elif data is not None:
return send_func(rt.control_uri + path, timeout=timeout, data=json.dumps(data), auth=self.credentials)
else:
return send_func(rt.control_uri + path, timeout=timeout, auth=self.credentials)
def _get(self, rt, timeout, async, path, headers="", data=None):
req = session if async else requests
return self._send(rt, timeout, req.get, path, data)
def _post(self, rt, timeout, async, path, data=None):
req = session if async else requests
return self._send(rt, timeout, req.post, path, data)
def _put(self, rt, timeout, async, path, data=None):
req = session if async else requests
return self._send(rt, timeout, req.put, path, data)
def _delete(self, rt, timeout, async, path, data=None):
req = session if async else requests
return self._send(rt, timeout, req.delete, path, data)
def get_node_id(self, rt, timeout=DEFAULT_TIMEOUT, async=False):
r = self._get(rt, timeout, async, NODE_ID)
return self.check_response(r, key="id")
def get_node(self, rt, node_id, timeout=DEFAULT_TIMEOUT, async=False):
r = self._get(rt, timeout, async, NODE_PATH.format(node_id))
return self.check_response(r)
def quit(self, rt, method=None, timeout=DEFAULT_TIMEOUT, async=False):
if method is None:
r = self._delete(rt, timeout, async, NODE)
else:
r = self._delete(rt, timeout, async, NODE_PATH.format(method))
return self.check_response(r)
def get_nodes(self, rt, timeout=DEFAULT_TIMEOUT, async=False):
r = self._get(rt, timeout, async, NODES)
return self.check_response(r)
def peer_setup(self, rt, *peers, **kwargs):
timeout = kwargs.get('timeout', DEFAULT_TIMEOUT)
async = kwargs.get('async', False)
if not isinstance(peers[0], type("")):
peers = peers[0]
data = {'peers': peers}
r = self._post(rt, timeout, async, PEER_SETUP, data)
return self.check_response(r)
def new_actor(self, rt, actor_type, actor_name, credentials=None, timeout=DEFAULT_TIMEOUT, async=False):
data = {
'actor_type': actor_type,
'args': {'name': actor_name},
'deploy_args': {'credentials': credentials} if credentials else None
}
r = self._post(rt, timeout, async, ACTOR, data)
return self.check_response(r, key='actor_id')
def new_actor_wargs(self, rt, actor_type, actor_name, args=None, deploy_args=None, timeout=DEFAULT_TIMEOUT,
async=False, **kwargs):
data = {'actor_type': actor_type, 'deploy_args': deploy_args}
if args is None:
kwargs['name'] = actor_name
data['args'] = kwargs
else:
data['args'] = args
r = self._post(rt, timeout, async, ACTOR, data)
return self.check_response(r, key='actor_id')
def get_actor(self, rt, actor_id, timeout=DEFAULT_TIMEOUT, async=False):
r = self._get(rt, timeout, async, ACTOR_PATH.format(actor_id))
return self.check_response(r)
def get_actors(self, rt, timeout=DEFAULT_TIMEOUT, async=False):
r = self._get(rt, timeout, async, ACTORS)
return self.check_response(r)
def delete_actor(self, rt, actor_id, timeout=DEFAULT_TIMEOUT, async=False):
r = self._delete(rt, timeout, async, ACTOR_PATH.format(actor_id))
return self.check_response(r)
def connect(self, rt, actor_id, port_name, peer_node_id, peer_actor_id, peer_port_name, timeout=DEFAULT_TIMEOUT,
async=False):
data = {
'actor_id': actor_id,
'port_name': port_name,
'port_dir': 'in',
'peer_node_id': peer_node_id,
'peer_actor_id': peer_actor_id,
'peer_port_name': peer_port_name,
'peer_port_dir': 'out'
}
r = self._post(rt, timeout, async, CONNECT, data)
return self.check_response(r)
def disconnect(self, rt, actor_id=None, port_name=None, port_dir=None, port_id=None, terminate=None,
timeout=DEFAULT_TIMEOUT, async=False):
data = {
'actor_id': actor_id,
'port_name': port_name,
'port_dir': port_dir,
'port_id': port_id,
'terminate': terminate
}
r = self._post(rt, timeout, async, DISCONNECT, data)
return self.check_response(r)
def disable(self, rt, actor_id, timeout=DEFAULT_TIMEOUT, async=False):
path = ACTOR_DISABLE.format(actor_id)
r = self._post(rt, timeout, async, path)
return self.check_response(r)
def migrate(self, rt, actor_id, dst_id, timeout=DEFAULT_TIMEOUT, async=False):
data = {'peer_node_id': dst_id}
path = ACTOR_MIGRATE.format(actor_id)
r = self._post(rt, timeout, async, path, data)
return self.check_response(r)
def replicate(self, rt, replication_id=None, dst_id=None, dereplicate=False, exhaust=False, requirements=None, timeout=DEFAULT_TIMEOUT, async=False):
data = {}
if dst_id:
data['peer_node_id'] = dst_id
if dereplicate:
data['dereplicate'] = dereplicate
if exhaust:
data['exhaust'] = exhaust
if requirements is not None:
data['requirements'] = requirements
if not data:
data = None
path = ACTOR_REPLICATE.format(replication_id)
r = self._post(rt, timeout, async, path, data)
return self.check_response(r)
def migrate_use_req(self, rt, actor_id, requirements, extend=False, move=False, timeout=DEFAULT_TIMEOUT,
async=False):
data = {'requirements': requirements, 'extend': extend, 'move': move}
path = ACTOR_MIGRATE.format(actor_id)
r = self._post(rt, timeout, async, path, data)
return self.check_response(r)
def migrate_app_use_req(self, rt, application_id, deploy_info=None, move=False, timeout=DEFAULT_TIMEOUT,
async=False):
data = {'deploy_info': deploy_info, "move": move}
path = APPLICATION_MIGRATE.format(application_id)
r = self._post(rt, timeout, async, path, data)
return self.check_response(r)
def get_port(self, rt, actor_id, port_id, timeout=DEFAULT_TIMEOUT, async=False):
path = ACTOR_PORT.format(actor_id, port_id)
r = self._get(rt, timeout, async, path)
return self.check_response(r)
def set_port_property(self, rt, actor_id, port_type, port_name, port_property=None, value=None,
port_properties=None, port_id=None,
timeout=DEFAULT_TIMEOUT, async=False):
data = {
'actor_id': actor_id,
'port_type': port_type,
'port_name': port_name,
'port_property': port_property,
'value': value,
'port_properties': port_properties
}
if port_id is not None:
data['port_id'] = port_id
r = self._post(rt, timeout, async, SET_PORT_PROPERTY, data)
return self.check_response(r)
def report(self, rt, actor_id, kwargs=None, timeout=DEFAULT_TIMEOUT, async=False):
path = ACTOR_REPORT.format(actor_id)
if kwargs:
r = self._post(rt, timeout, async, path, kwargs)
else:
r = self._get(rt, timeout, async, path)
return self.check_response(r)
def get_applications(self, rt, timeout=DEFAULT_TIMEOUT, async=False):
r = self._get(rt, timeout, async, APPLICATIONS)
return self.check_response(r)
def get_application(self, rt, application_id, timeout=DEFAULT_TIMEOUT, async=False):
r = self._get(rt, timeout, async, APPLICATION_PATH.format(application_id))
return self.check_response(r)
def delete_application(self, rt, application_id, timeout=DEFAULT_TIMEOUT, async=False):
r = self._delete(rt, timeout, async, APPLICATION_PATH.format(application_id))
return self.check_response(r)
def deploy_application(self, rt, name, script, deploy_info=None, content=None,
check=True, timeout=DEFAULT_TIMEOUT, async=False):
data = {
"name": name,
"script": script,
"deploy_info": deploy_info,
"check": check
}
if content and 'sign' in content:
data["sec_sign"] = {}
for cert_hash, signature in content['sign'].iteritems():
data["sec_sign"][cert_hash] = signature.encode('hex_codec')
r = self._post(rt, timeout, async, DEPLOY, data)
return self.check_response(r)
def deploy_app_info(self, rt, name, app_info, deploy_info=None, check=True,
timeout=DEFAULT_TIMEOUT, async=False):
data = {
"name": name,
"app_info": app_info,
"deploy_info": deploy_info,
"check": check
}
r = self._post(rt, timeout, async, DEPLOY, data=data)
return self.check_response(r)
def add_index(self, rt, index, value, root_prefix_level=None, timeout=DEFAULT_TIMEOUT, async=False):
data = {'value': value}
if root_prefix_level is not None:
data['root_prefix_level'] = root_prefix_level
path = INDEX_PATH.format(index)
r = self._post(rt, timeout, async, path, data)
return self.check_response(r)
def remove_index(self, rt, index, value, root_prefix_level=None, timeout=DEFAULT_TIMEOUT, async=False):
data = {'value': value}
if root_prefix_level is not None:
data['root_prefix_level'] = root_prefix_level
path = INDEX_PATH.format(index)
r = self._delete(rt, timeout, async, path, data)
return self.check_response(r)
def get_index(self, rt, index, root_prefix_level=None, timeout=DEFAULT_TIMEOUT, async=False):
if root_prefix_level is None:
r = self._get(rt, timeout, async, INDEX_PATH.format(index))
else:
r = self._get(rt, timeout, async, INDEX_PATH_RPL.format(index, root_prefix_level))
return self.check_response(r)
def get_storage(self, rt, key, timeout=DEFAULT_TIMEOUT, async=False):
r = self._get(rt, timeout, async, STORAGE_PATH.format(key))
return self.check_response(r)
def set_storage(self, rt, key, value, timeout=DEFAULT_TIMEOUT, async=False):
data = {'value': value}
path = STORAGE_PATH.format(key)
r = self._post(rt, timeout, async, path, data)
return self.check_response(r)
def dump_storage(self, rt, timeout=DEFAULT_TIMEOUT, async=False):
r = self._get(rt, timeout, async, "/dumpstorage")
return self.check_response(r)
def async_response(self, response):
try:
self.future_responses.remove(response)
except Exception as e:
_log.warning("Async responce exception %s", e)
_log.debug("Async responce exception %s", e, exc_info=True)
pass
r = response.result()
return self.check_response(r, response._calvin_success, response._calvin_key)
def async_barrier(self):
fr = self.future_responses[:]
exceptions = []
for r in fr:
try:
self.async_response(r)
except Exception as e:
exceptions.append(e)
if exceptions:
raise Exception(max(exceptions))
def __getattr__(self, name):
if name.startswith("async_"):
func = name[6:]
return partial(getattr(self, func), async=True)
else:
raise AttributeError("Unknown request handler attribute %s" % name)
def sign_csr_request(self, rt, csr, timeout=DEFAULT_TIMEOUT, async=False):
data = {'csr': csr}
r = self._post(rt, timeout, async, CSR_REQUEST, data=data['csr'])
return self.check_response(r)
def set_enrollment_password(self, rt, node_name, enrollment_password, timeout=DEFAULT_TIMEOUT, async=False):
import base64
data = {'enrollment_password':enrollment_password}
r = self._put(rt, timeout, async, ENROLLMENT_PASSWORD.format(node_name), data=data)
return self.check_response(r)
def get_enrollment_password(self, rt, node_name, timeout=DEFAULT_TIMEOUT, async=False):
r = self._get(rt, timeout, async, ENROLLMENT_PASSWORD.format(node_name))
result = self.check_response(r)
if 'enrollment_password' in result:
return result['enrollment_password']
else:
_log.error("Failed to fetch enrollment password")
return None
def get_users_db(self, rt, timeout=DEFAULT_TIMEOUT, async=False):
r = self._get(rt, timeout, async, AUTHENTICATION_USERS_DB)
result = self.check_response(r)
if 'users_db' in result:
return result['users_db']
else:
_log.error("Failed to fetch users_db")
return None
def post_users_db(self, rt, users_db, timeout=DEFAULT_TIMEOUT, async=False):
data = {'users_db': users_db}
r = self._put(rt, timeout, async, AUTHENTICATION_USERS_DB, data=data)
return self.check_response(r)
def abolish_proxy_peer(self, rt, peer_id, timeout=DEFAULT_TIMEOUT, async=False):
r = self._delete(rt, timeout, async, PROXY_PEER_ABOLISH.format(peer_id))
return self.check_response(r)
|
|
#!/usr/bin/python
import urwid
import mainview
"""
NOTES
-----
This module builds the widget to allow the user to enter in their own SQL query
This module will also run the sql query and show a success message if it works
"""
class Qinfo:
def __init__(self):
self.query_text = None
self.query_status = None
class Tracking:
def __init__(self):
self.start = 0
self.end = 15
def show_runsql(frame, body, user_info):
#used to easily insert a blank line widget
blank = urwid.Divider()
query_info = Qinfo()
# reorganizes row list for a column-oriented view, e.g.
# returns a list of widget lists
def splitTable(allrows):
cols = []
if allrows:
for i in range(0, len(allrows[0])):
col = []
for index, row in enumerate(allrows):
col.append(urwid.Text(str(row[i])))
cols.append(col)
return cols
#do all of the work to show the select query
def show_select_results(data, text):
location = Tracking()
row_length = len(data)
col_length = len(data[0])
widget_lists = splitTable(data) # get a list of a list of widgets
#clear out previous query text
sql_edit.original_widget.set_edit_text(u"")
#generates tables of 15 starting at a certain row
def generate_table(start, end):
columns = [] # empty columns list
for i in range (0, col_length): # for each column
if widget_lists: # if list not empty
include_list = [] # list to hold just the widgets to make table out of
for y in range(start, end):
include_list.append(widget_lists[i][y])
mypile = urwid.Pile(include_list) # make a Pile with the list of widgets
else:
mypile = urwid.Pile([ # a blank widget to fill up some space
urwid.Text(u""),
])
# make a linebox with the Pile and the columnname
if i == col_length - 1:
mylinebox = urwid.LineBox(mypile)
else:
mylinebox = urwid.LineBox((mypile), rline=' ', trcorner=u'\u2500', brcorner=u'\u2500')
columns.append(mylinebox) # append the linebox to the list of columns
return urwid.Columns(columns)
#signal handler for the more button
def more_btn_press(button):
if location.end < row_length - 15:
#can still render a full set of 15 rows
location.end += 15
location.start += 15
table.original_widget = generate_table(location.start, location.end)
count.set_text([u"Viewing rows ", str(location.start + 1), " - ", str(location.end)])
elif location.end < row_length:
#last chuck of data to show
location.start = location.end
location.end = row_length
table.original_widget = generate_table(location.start, location.end)
count.set_text([u"Viewing rows ", str(location.start + 1), " - ", str(location.end)])
#signal handler for the less button
def less_btn_press(button):
if location.start >= 15:
location.end = location.start
location.start = location.end - 15
table.original_widget = generate_table(location.start, location.end)
count.set_text([u"Viewing rows ", str(location.start + 1), " - ", str(location.end)])
#more button to show more results
#only show if results are greater than a certain amount
more_btn = urwid.AttrWrap( urwid.Button(u"Next", more_btn_press), 'btnf', 'btn')
more_btn = urwid.Padding(more_btn, width=8)
#less button to go back
less_btn = urwid.AttrWrap( urwid.Button(u"Previous", less_btn_press), 'btnf', 'btn')
less_btn = urwid.Padding(less_btn, width=12)
select_text_1 = urwid.Text(["The results from the following SELECT query are below.", "\n\nQUERY: ", text])
select_text_2 = urwid.Text([u"Total Rows: ", str(row_length)])
count = urwid.Text([u"Viewing rows ", str(location.start + 1), " - ", str(location.end)])
if row_length < 15:
#render just the data available
table = generate_table(0, row_length)
#clear out more and less buttons
more_btn.original_widget = urwid.Text(u"")
less_btn.original_widget = urwid.Text(u"")
else:
table = urwid.WidgetPlaceholder(generate_table(location.start, location.end))
#build out selection results view
select_results.original_widget = urwid.Pile([
select_text_1,
blank,
select_text_2,
count,
blank,
table,
blank,
urwid.Columns([
('fixed', 12, less_btn),
('fixed', 3, urwid.Text(u" ")),
('fixed', 8, more_btn)
])
])
#signal handler for text input, stores input information from user
def edit_change_event(self, text):
query_info.query_text = text
#signal handler for the run button
def run_btn_press(button):
#clear out any previous error messages
text_error.original_widget = urwid.AttrWrap( urwid.Text(u""), 'body')
#clear out any previous data results
select_results.original_widget = urwid.Text(u"")
if query_info.query_text != None:
#convert string to all uppercase to search for select
query_copy = query_info.query_text.upper()
#identify if query string is a select query
select = False
if 'SELECT' in query_copy:
select = True
#run query
query_info.query_status = user_info.db_obj.runquery(user_info.db_conn, query_info.query_text, select)
if query_info.query_status['success'] == True:
if select:
if query_info.query_status['data']:
#query was a select query and has data, show select data
show_select_results(query_info.query_status['data'], query_info.query_text)
else:
text_error.original_widget = urwid.AttrWrap( urwid.Text([u" SELECT query did not return any data", "\n QUERY: ", query_info.query_text]), 'error')
else:
#show success message
frame.footer = urwid.AttrWrap(urwid.Text(u" Query executed successfully"), 'header')
#reload main view. this updates tables list if table was created
mainview.show_main_view(frame, body, user_info)
else:
text_error.original_widget = urwid.AttrWrap( urwid.Text(query_info.query_status['data']), 'error')
else:
text_error.original_widget = urwid.AttrWrap( urwid.Text(u" You have enter in a query."), 'error')
#variables to hold text to show user for login view
text_1 = urwid.Text(u"Enter a SQL query to run below:")
text_2 = urwid.Text(u"(The edit box supports multiple lines when you press enter)")
text_error = urwid.AttrMap( urwid.Text(u""), 'body')
#setting up the edit input widgets for database name and password
sql_edit = urwid.Edit(caption="", edit_text="", multiline=True)
urwid.connect_signal(sql_edit, 'change', edit_change_event)
sql_edit = urwid.AttrWrap(sql_edit, 'btnf', 'btn')
#run button
runsql_btn = urwid.AttrWrap( urwid.Button(u"Run", run_btn_press), 'btnf', 'btn')
#placeholder for any data results from SELECT query
select_results = urwid.WidgetPlaceholder( urwid.Text(u""))
#This is the pile widget that holds all of the main body widgets
runsql = urwid.WidgetPlaceholder(
urwid.Pile([
urwid.Padding(text_error, left=5, width = 50),
blank,
urwid.Padding(text_1, left=5),
urwid.Padding(text_2, left=5),
urwid.Padding(sql_edit, left=2, right=2),
blank,
urwid.Padding(runsql_btn, left=10, width=11),
blank,
urwid.Padding(select_results, left=2)
]))
return runsql
|
|
# Copyright 2020 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Test the GeoEligibility class.
"""
from matched_markets.methodology import geoeligibility
import pandas as pd
import unittest
class GeoAssignmentsTest(unittest.TestCase):
def testGeoAssignments(self):
c = {'G0', 'G3', 'G5', 'G6'}
t = {'G1', 'G3', 'G4', 'G6'}
x = {'G2', 'G4', 'G5', 'G6'}
ga = geoeligibility.GeoAssignments(c, t, x)
self.assertEqual(ga.c, c)
self.assertEqual(ga.t, t)
self.assertEqual(ga.x, x)
self.assertEqual(ga.ct, {'G3'})
self.assertEqual(ga.tx, {'G4'})
self.assertEqual(ga.cx, {'G5'})
self.assertEqual(ga.ctx, {'G6'})
class GeoEligibilityTest(unittest.TestCase):
def setUp(self):
"""Set up a valid Geo Eligibility data frame."""
super(GeoEligibilityTest, self).setUp()
geonames = ['G%d' % i for i in range(7)]
df = pd.DataFrame({'geo': geonames, 'control': 0, 'treatment': 0,
'exclude': 0},
columns=['geo', 'control', 'treatment', 'exclude'])
df = df.set_index('geo')
# Add all 7 valid assignments (0, 0, 0 is invalid).
df.loc['G0'] = [1, 0, 0] # Control only.
df.loc['G1'] = [0, 1, 0] # Treatment only.
df.loc['G2'] = [0, 0, 1] # Excluded only.
df.loc['G3'] = [1, 1, 0] # Control or Treatment.
df.loc['G4'] = [0, 1, 1] # Treatment or Excluded.
df.loc['G5'] = [1, 0, 1] # Control or Excluded.
df.loc['G6'] = [1, 1, 1] # Control, Treatment, or Excluded.
self.df = df
# Verify that the above dataframe does not raise errors.
self.obj = geoeligibility.GeoEligibility(df)
def testGeoColumn(self):
"""Checks if the geo column is there (as an index or column)."""
# An index or column 'geo' (case sensitive) must exist.
df = self.df.copy()
df.index.name = 'Geo'
with self.assertRaisesRegex(
ValueError, r'There is no column or index \'geo\''):
geoeligibility.GeoEligibility(df)
df.reset_index(inplace=True)
with self.assertRaisesRegex(
ValueError, r'There is no column or index \'geo\''):
geoeligibility.GeoEligibility(df)
# Column 'geo' is also possible. No error raised.
df = self.df.copy().reset_index()
geoeligibility.GeoEligibility(df)
def testColumnNames(self):
"""Checks if the required columns are available."""
# The required column names are case sensitive.
df = self.df.copy()
new_columns = list(df.columns)
new_columns[0] = 'Control'
df.columns = new_columns
with self.assertRaisesRegex(ValueError, r'Missing column\(s\): control'):
geoeligibility.GeoEligibility(df)
# Required columns must exist.
df = self.df.copy()
del df['exclude']
with self.assertRaisesRegex(ValueError, r'Missing column\(s\): exclude'):
geoeligibility.GeoEligibility(df)
# Other columns are allowed.
df = self.df.copy()
df['newcolumn'] = 1
geoeligibility.GeoEligibility(df)
# Duplicated columns are not allowed.
df = self.df.copy()
df['newcolumn'] = 1
df.columns = ['control', 'treatment', 'exclude', 'control']
with self.assertRaisesRegex(ValueError, r'Duplicate column\(s\): control'):
geoeligibility.GeoEligibility(df)
def testDuplicateGeos(self):
"""Checks if there are any duplicate geos in the geo column."""
df = self.df.copy()
geos = df.index.tolist()
geos[1] = 'G0'
df.index = geos
df.index.name = 'geo'
with self.assertRaisesRegex(
ValueError, r'\'geo\' has duplicate values: G0'):
geoeligibility.GeoEligibility(df)
def testBadValues(self):
"""Checks if there are any illegal values in the value columns."""
# Only zeros and ones are allowed.
df = self.df.copy()
df.loc['G1'] = [1, 0, -1]
with self.assertRaisesRegex(
ValueError, 'GeoEligibility objects must have only values '
'0, 1 in columns control, treatment, exclude'):
geoeligibility.GeoEligibility(df)
# Three zeros is an illegal value.
df.loc['G1'] = [0, 0, 0]
with self.assertRaisesRegex(
ValueError, r'Three zeros found for geo\(s\) G1'):
geoeligibility.GeoEligibility(df)
def testStr(self):
"""Check the string representation."""
self.assertEqual(str(self.obj), 'Geo eligibility matrix with 7 geos')
def testDataProperty(self):
"""Check the data property."""
self.assertEqual(id(self.obj.data), id(self.obj.data))
def testEligibleAssignmentsDefault(self):
"""Test the method get_eligible_assignments, default case."""
ga = self.obj.get_eligible_assignments()
# The sets contain geo IDs.
self.assertEqual(ga.c, {'G0', 'G3', 'G5', 'G6'})
self.assertEqual(ga.t, {'G1', 'G3', 'G4', 'G6'})
self.assertEqual(ga.x, {'G2', 'G4', 'G5', 'G6'})
self.assertEqual(ga.c_fixed, {'G0'})
self.assertEqual(ga.t_fixed, {'G1'})
self.assertEqual(ga.x_fixed, {'G2'})
self.assertEqual(ga.ct, {'G3'})
self.assertEqual(ga.tx, {'G4'})
self.assertEqual(ga.cx, {'G5'})
self.assertEqual(ga.ctx, {'G6'})
self.assertEqual(ga.all, {'G0', 'G1', 'G2', 'G3', 'G4', 'G5', 'G6'})
def testEligibleAssignmentsOnlyIndicesSpecified(self):
"""Test the method get_eligible_assignments, indices=True."""
with self.assertRaisesRegex(
ValueError, '\'geos\' is not specified but indices=True'):
self.obj.get_eligible_assignments(indices=True)
with self.assertRaisesRegex(
ValueError, '\'geos\' is not specified but indices=True'):
self.obj.get_eligible_assignments(geos=None, indices=True)
def testEligibleAssignmentsSubset(self):
"""Test the method get_eligible_assignments, subset specified."""
geos = ['G1', 'G2', 'G3', 'G4']
ga = self.obj.get_eligible_assignments(geos=geos)
# The sets contain geo IDs from the list 'geos'.
self.assertEqual(ga.c, {'G3'})
self.assertEqual(ga.t, {'G1', 'G3', 'G4'})
self.assertEqual(ga.x, {'G2', 'G4'})
self.assertEqual(ga.c_fixed, set())
self.assertEqual(ga.t_fixed, {'G1'})
self.assertEqual(ga.x_fixed, {'G2'})
self.assertEqual(ga.ct, {'G3'})
self.assertEqual(ga.tx, {'G4'})
self.assertEqual(ga.cx, set())
self.assertEqual(ga.ctx, set())
self.assertEqual(ga.all, {'G1', 'G2', 'G3', 'G4'})
def testEligibleAssignmentsSubsetIndices(self):
"""Test the method get_eligible_assignments, given subset + indices."""
geos = ['G4', 'G3', 'G2', 'G1']
ga = self.obj.get_eligible_assignments(geos=geos, indices=True)
# The sets contain indices pointing to the geo IDs in 'geos'.
self.assertEqual(ga.c, {1})
self.assertEqual(ga.t, {0, 1, 3})
self.assertEqual(ga.x, {0, 2})
self.assertEqual(ga.c_fixed, set())
self.assertEqual(ga.t_fixed, {3})
self.assertEqual(ga.x_fixed, {2})
self.assertEqual(ga.ct, {1})
self.assertEqual(ga.tx, {0})
self.assertEqual(ga.cx, set())
self.assertEqual(ga.ctx, set())
self.assertEqual(ga.all, {0, 1, 2, 3})
if __name__ == '__main__':
unittest.main()
|
|
# coding: utf-8
"""
Wavefront REST API Documentation
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: chitimba@wavefront.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from wavefront_api_client.configuration import Configuration
class Event(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'alert_tags': 'list[str]',
'annotations': 'dict(str, str)',
'can_close': 'bool',
'can_delete': 'bool',
'computed_hlps': 'list[SourceLabelPair]',
'created_at': 'int',
'created_epoch_millis': 'int',
'creator_id': 'str',
'creator_type': 'list[str]',
'dimensions': 'dict(str, list[str])',
'end_time': 'int',
'hosts': 'list[str]',
'id': 'str',
'is_ephemeral': 'bool',
'is_user_event': 'bool',
'metrics_used': 'list[str]',
'name': 'str',
'running_state': 'str',
'start_time': 'int',
'summarized_events': 'int',
'table': 'str',
'tags': 'list[str]',
'updated_at': 'int',
'updated_epoch_millis': 'int',
'updater_id': 'str'
}
attribute_map = {
'alert_tags': 'alertTags',
'annotations': 'annotations',
'can_close': 'canClose',
'can_delete': 'canDelete',
'computed_hlps': 'computedHlps',
'created_at': 'createdAt',
'created_epoch_millis': 'createdEpochMillis',
'creator_id': 'creatorId',
'creator_type': 'creatorType',
'dimensions': 'dimensions',
'end_time': 'endTime',
'hosts': 'hosts',
'id': 'id',
'is_ephemeral': 'isEphemeral',
'is_user_event': 'isUserEvent',
'metrics_used': 'metricsUsed',
'name': 'name',
'running_state': 'runningState',
'start_time': 'startTime',
'summarized_events': 'summarizedEvents',
'table': 'table',
'tags': 'tags',
'updated_at': 'updatedAt',
'updated_epoch_millis': 'updatedEpochMillis',
'updater_id': 'updaterId'
}
def __init__(self, alert_tags=None, annotations=None, can_close=None, can_delete=None, computed_hlps=None, created_at=None, created_epoch_millis=None, creator_id=None, creator_type=None, dimensions=None, end_time=None, hosts=None, id=None, is_ephemeral=None, is_user_event=None, metrics_used=None, name=None, running_state=None, start_time=None, summarized_events=None, table=None, tags=None, updated_at=None, updated_epoch_millis=None, updater_id=None, _configuration=None): # noqa: E501
"""Event - a model defined in Swagger""" # noqa: E501
if _configuration is None:
_configuration = Configuration()
self._configuration = _configuration
self._alert_tags = None
self._annotations = None
self._can_close = None
self._can_delete = None
self._computed_hlps = None
self._created_at = None
self._created_epoch_millis = None
self._creator_id = None
self._creator_type = None
self._dimensions = None
self._end_time = None
self._hosts = None
self._id = None
self._is_ephemeral = None
self._is_user_event = None
self._metrics_used = None
self._name = None
self._running_state = None
self._start_time = None
self._summarized_events = None
self._table = None
self._tags = None
self._updated_at = None
self._updated_epoch_millis = None
self._updater_id = None
self.discriminator = None
if alert_tags is not None:
self.alert_tags = alert_tags
self.annotations = annotations
if can_close is not None:
self.can_close = can_close
if can_delete is not None:
self.can_delete = can_delete
if computed_hlps is not None:
self.computed_hlps = computed_hlps
if created_at is not None:
self.created_at = created_at
if created_epoch_millis is not None:
self.created_epoch_millis = created_epoch_millis
if creator_id is not None:
self.creator_id = creator_id
if creator_type is not None:
self.creator_type = creator_type
if dimensions is not None:
self.dimensions = dimensions
if end_time is not None:
self.end_time = end_time
if hosts is not None:
self.hosts = hosts
if id is not None:
self.id = id
if is_ephemeral is not None:
self.is_ephemeral = is_ephemeral
if is_user_event is not None:
self.is_user_event = is_user_event
if metrics_used is not None:
self.metrics_used = metrics_used
self.name = name
if running_state is not None:
self.running_state = running_state
self.start_time = start_time
if summarized_events is not None:
self.summarized_events = summarized_events
if table is not None:
self.table = table
if tags is not None:
self.tags = tags
if updated_at is not None:
self.updated_at = updated_at
if updated_epoch_millis is not None:
self.updated_epoch_millis = updated_epoch_millis
if updater_id is not None:
self.updater_id = updater_id
@property
def alert_tags(self):
"""Gets the alert_tags of this Event. # noqa: E501
The list of tags on the alert which created this event. # noqa: E501
:return: The alert_tags of this Event. # noqa: E501
:rtype: list[str]
"""
return self._alert_tags
@alert_tags.setter
def alert_tags(self, alert_tags):
"""Sets the alert_tags of this Event.
The list of tags on the alert which created this event. # noqa: E501
:param alert_tags: The alert_tags of this Event. # noqa: E501
:type: list[str]
"""
self._alert_tags = alert_tags
@property
def annotations(self):
"""Gets the annotations of this Event. # noqa: E501
A string->string map of additional annotations on the event # noqa: E501
:return: The annotations of this Event. # noqa: E501
:rtype: dict(str, str)
"""
return self._annotations
@annotations.setter
def annotations(self, annotations):
"""Sets the annotations of this Event.
A string->string map of additional annotations on the event # noqa: E501
:param annotations: The annotations of this Event. # noqa: E501
:type: dict(str, str)
"""
if self._configuration.client_side_validation and annotations is None:
raise ValueError("Invalid value for `annotations`, must not be `None`") # noqa: E501
self._annotations = annotations
@property
def can_close(self):
"""Gets the can_close of this Event. # noqa: E501
:return: The can_close of this Event. # noqa: E501
:rtype: bool
"""
return self._can_close
@can_close.setter
def can_close(self, can_close):
"""Sets the can_close of this Event.
:param can_close: The can_close of this Event. # noqa: E501
:type: bool
"""
self._can_close = can_close
@property
def can_delete(self):
"""Gets the can_delete of this Event. # noqa: E501
:return: The can_delete of this Event. # noqa: E501
:rtype: bool
"""
return self._can_delete
@can_delete.setter
def can_delete(self, can_delete):
"""Sets the can_delete of this Event.
:param can_delete: The can_delete of this Event. # noqa: E501
:type: bool
"""
self._can_delete = can_delete
@property
def computed_hlps(self):
"""Gets the computed_hlps of this Event. # noqa: E501
All the host/label/tags of the event. # noqa: E501
:return: The computed_hlps of this Event. # noqa: E501
:rtype: list[SourceLabelPair]
"""
return self._computed_hlps
@computed_hlps.setter
def computed_hlps(self, computed_hlps):
"""Sets the computed_hlps of this Event.
All the host/label/tags of the event. # noqa: E501
:param computed_hlps: The computed_hlps of this Event. # noqa: E501
:type: list[SourceLabelPair]
"""
self._computed_hlps = computed_hlps
@property
def created_at(self):
"""Gets the created_at of this Event. # noqa: E501
:return: The created_at of this Event. # noqa: E501
:rtype: int
"""
return self._created_at
@created_at.setter
def created_at(self, created_at):
"""Sets the created_at of this Event.
:param created_at: The created_at of this Event. # noqa: E501
:type: int
"""
self._created_at = created_at
@property
def created_epoch_millis(self):
"""Gets the created_epoch_millis of this Event. # noqa: E501
:return: The created_epoch_millis of this Event. # noqa: E501
:rtype: int
"""
return self._created_epoch_millis
@created_epoch_millis.setter
def created_epoch_millis(self, created_epoch_millis):
"""Sets the created_epoch_millis of this Event.
:param created_epoch_millis: The created_epoch_millis of this Event. # noqa: E501
:type: int
"""
self._created_epoch_millis = created_epoch_millis
@property
def creator_id(self):
"""Gets the creator_id of this Event. # noqa: E501
:return: The creator_id of this Event. # noqa: E501
:rtype: str
"""
return self._creator_id
@creator_id.setter
def creator_id(self, creator_id):
"""Sets the creator_id of this Event.
:param creator_id: The creator_id of this Event. # noqa: E501
:type: str
"""
self._creator_id = creator_id
@property
def creator_type(self):
"""Gets the creator_type of this Event. # noqa: E501
:return: The creator_type of this Event. # noqa: E501
:rtype: list[str]
"""
return self._creator_type
@creator_type.setter
def creator_type(self, creator_type):
"""Sets the creator_type of this Event.
:param creator_type: The creator_type of this Event. # noqa: E501
:type: list[str]
"""
allowed_values = ["USER", "ALERT", "SYSTEM"] # noqa: E501
if (self._configuration.client_side_validation and
not set(creator_type).issubset(set(allowed_values))): # noqa: E501
raise ValueError(
"Invalid values for `creator_type` [{0}], must be a subset of [{1}]" # noqa: E501
.format(", ".join(map(str, set(creator_type) - set(allowed_values))), # noqa: E501
", ".join(map(str, allowed_values)))
)
self._creator_type = creator_type
@property
def dimensions(self):
"""Gets the dimensions of this Event. # noqa: E501
A string-><list of strings> map of additional dimension info on the event # noqa: E501
:return: The dimensions of this Event. # noqa: E501
:rtype: dict(str, list[str])
"""
return self._dimensions
@dimensions.setter
def dimensions(self, dimensions):
"""Sets the dimensions of this Event.
A string-><list of strings> map of additional dimension info on the event # noqa: E501
:param dimensions: The dimensions of this Event. # noqa: E501
:type: dict(str, list[str])
"""
self._dimensions = dimensions
@property
def end_time(self):
"""Gets the end_time of this Event. # noqa: E501
End time of the event, in epoch millis. Set to startTime + 1 for an instantaneous event # noqa: E501
:return: The end_time of this Event. # noqa: E501
:rtype: int
"""
return self._end_time
@end_time.setter
def end_time(self, end_time):
"""Sets the end_time of this Event.
End time of the event, in epoch millis. Set to startTime + 1 for an instantaneous event # noqa: E501
:param end_time: The end_time of this Event. # noqa: E501
:type: int
"""
self._end_time = end_time
@property
def hosts(self):
"""Gets the hosts of this Event. # noqa: E501
A list of sources/hosts affected by the event # noqa: E501
:return: The hosts of this Event. # noqa: E501
:rtype: list[str]
"""
return self._hosts
@hosts.setter
def hosts(self, hosts):
"""Sets the hosts of this Event.
A list of sources/hosts affected by the event # noqa: E501
:param hosts: The hosts of this Event. # noqa: E501
:type: list[str]
"""
self._hosts = hosts
@property
def id(self):
"""Gets the id of this Event. # noqa: E501
:return: The id of this Event. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this Event.
:param id: The id of this Event. # noqa: E501
:type: str
"""
self._id = id
@property
def is_ephemeral(self):
"""Gets the is_ephemeral of this Event. # noqa: E501
Whether the event is an artificial event generated by a literal expression or alert backtesting, i.e. not stored in the Wavefront backend # noqa: E501
:return: The is_ephemeral of this Event. # noqa: E501
:rtype: bool
"""
return self._is_ephemeral
@is_ephemeral.setter
def is_ephemeral(self, is_ephemeral):
"""Sets the is_ephemeral of this Event.
Whether the event is an artificial event generated by a literal expression or alert backtesting, i.e. not stored in the Wavefront backend # noqa: E501
:param is_ephemeral: The is_ephemeral of this Event. # noqa: E501
:type: bool
"""
self._is_ephemeral = is_ephemeral
@property
def is_user_event(self):
"""Gets the is_user_event of this Event. # noqa: E501
Whether this event was created by a user, versus the system. Default: system # noqa: E501
:return: The is_user_event of this Event. # noqa: E501
:rtype: bool
"""
return self._is_user_event
@is_user_event.setter
def is_user_event(self, is_user_event):
"""Sets the is_user_event of this Event.
Whether this event was created by a user, versus the system. Default: system # noqa: E501
:param is_user_event: The is_user_event of this Event. # noqa: E501
:type: bool
"""
self._is_user_event = is_user_event
@property
def metrics_used(self):
"""Gets the metrics_used of this Event. # noqa: E501
A list of metrics affected by the event # noqa: E501
:return: The metrics_used of this Event. # noqa: E501
:rtype: list[str]
"""
return self._metrics_used
@metrics_used.setter
def metrics_used(self, metrics_used):
"""Sets the metrics_used of this Event.
A list of metrics affected by the event # noqa: E501
:param metrics_used: The metrics_used of this Event. # noqa: E501
:type: list[str]
"""
self._metrics_used = metrics_used
@property
def name(self):
"""Gets the name of this Event. # noqa: E501
The name of the event. If 'annotations.prettyName' is present, 'name' will be equivalent to that value # noqa: E501
:return: The name of this Event. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this Event.
The name of the event. If 'annotations.prettyName' is present, 'name' will be equivalent to that value # noqa: E501
:param name: The name of this Event. # noqa: E501
:type: str
"""
if self._configuration.client_side_validation and name is None:
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def running_state(self):
"""Gets the running_state of this Event. # noqa: E501
:return: The running_state of this Event. # noqa: E501
:rtype: str
"""
return self._running_state
@running_state.setter
def running_state(self, running_state):
"""Sets the running_state of this Event.
:param running_state: The running_state of this Event. # noqa: E501
:type: str
"""
allowed_values = ["ONGOING", "PENDING", "ENDED"] # noqa: E501
if (self._configuration.client_side_validation and
running_state not in allowed_values):
raise ValueError(
"Invalid value for `running_state` ({0}), must be one of {1}" # noqa: E501
.format(running_state, allowed_values)
)
self._running_state = running_state
@property
def start_time(self):
"""Gets the start_time of this Event. # noqa: E501
Start time of the event, in epoch millis. If the JSON value is missing or set to 0, startTime will be set to the current time # noqa: E501
:return: The start_time of this Event. # noqa: E501
:rtype: int
"""
return self._start_time
@start_time.setter
def start_time(self, start_time):
"""Sets the start_time of this Event.
Start time of the event, in epoch millis. If the JSON value is missing or set to 0, startTime will be set to the current time # noqa: E501
:param start_time: The start_time of this Event. # noqa: E501
:type: int
"""
if self._configuration.client_side_validation and start_time is None:
raise ValueError("Invalid value for `start_time`, must not be `None`") # noqa: E501
self._start_time = start_time
@property
def summarized_events(self):
"""Gets the summarized_events of this Event. # noqa: E501
In some event queries, multiple events that occur nearly simultaneously are summarized under a single event. This value specifies the number of events summarized under this one # noqa: E501
:return: The summarized_events of this Event. # noqa: E501
:rtype: int
"""
return self._summarized_events
@summarized_events.setter
def summarized_events(self, summarized_events):
"""Sets the summarized_events of this Event.
In some event queries, multiple events that occur nearly simultaneously are summarized under a single event. This value specifies the number of events summarized under this one # noqa: E501
:param summarized_events: The summarized_events of this Event. # noqa: E501
:type: int
"""
self._summarized_events = summarized_events
@property
def table(self):
"""Gets the table of this Event. # noqa: E501
The customer to which the event belongs # noqa: E501
:return: The table of this Event. # noqa: E501
:rtype: str
"""
return self._table
@table.setter
def table(self, table):
"""Sets the table of this Event.
The customer to which the event belongs # noqa: E501
:param table: The table of this Event. # noqa: E501
:type: str
"""
self._table = table
@property
def tags(self):
"""Gets the tags of this Event. # noqa: E501
A list of event tags # noqa: E501
:return: The tags of this Event. # noqa: E501
:rtype: list[str]
"""
return self._tags
@tags.setter
def tags(self, tags):
"""Sets the tags of this Event.
A list of event tags # noqa: E501
:param tags: The tags of this Event. # noqa: E501
:type: list[str]
"""
self._tags = tags
@property
def updated_at(self):
"""Gets the updated_at of this Event. # noqa: E501
:return: The updated_at of this Event. # noqa: E501
:rtype: int
"""
return self._updated_at
@updated_at.setter
def updated_at(self, updated_at):
"""Sets the updated_at of this Event.
:param updated_at: The updated_at of this Event. # noqa: E501
:type: int
"""
self._updated_at = updated_at
@property
def updated_epoch_millis(self):
"""Gets the updated_epoch_millis of this Event. # noqa: E501
:return: The updated_epoch_millis of this Event. # noqa: E501
:rtype: int
"""
return self._updated_epoch_millis
@updated_epoch_millis.setter
def updated_epoch_millis(self, updated_epoch_millis):
"""Sets the updated_epoch_millis of this Event.
:param updated_epoch_millis: The updated_epoch_millis of this Event. # noqa: E501
:type: int
"""
self._updated_epoch_millis = updated_epoch_millis
@property
def updater_id(self):
"""Gets the updater_id of this Event. # noqa: E501
:return: The updater_id of this Event. # noqa: E501
:rtype: str
"""
return self._updater_id
@updater_id.setter
def updater_id(self, updater_id):
"""Sets the updater_id of this Event.
:param updater_id: The updater_id of this Event. # noqa: E501
:type: str
"""
self._updater_id = updater_id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(Event, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Event):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, Event):
return True
return self.to_dict() != other.to_dict()
|
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from datetime import datetime
import numpy as np
import pytest
from pandas.compat import lrange
import pandas as pd
from pandas import DataFrame, Index, Series, Timestamp, date_range
from pandas.tests.frame.common import TestData
import pandas.util.testing as tm
from pandas.util.testing import assert_frame_equal, assert_series_equal
class TestDataFrameConcatCommon(TestData):
def test_concat_multiple_frames_dtypes(self):
# GH 2759
A = DataFrame(data=np.ones((10, 2)), columns=[
'foo', 'bar'], dtype=np.float64)
B = DataFrame(data=np.ones((10, 2)), dtype=np.float32)
results = pd.concat((A, B), axis=1).get_dtype_counts()
expected = Series(dict(float64=2, float32=2))
assert_series_equal(results, expected)
@pytest.mark.parametrize('data', [
pd.date_range('2000', periods=4),
pd.date_range('2000', periods=4, tz="US/Central"),
pd.period_range('2000', periods=4),
pd.timedelta_range(0, periods=4),
])
def test_combine_datetlike_udf(self, data):
# https://github.com/pandas-dev/pandas/issues/23079
df = pd.DataFrame({"A": data})
other = df.copy()
df.iloc[1, 0] = None
def combiner(a, b):
return b
result = df.combine(other, combiner)
tm.assert_frame_equal(result, other)
def test_concat_multiple_tzs(self):
# GH 12467
# combining datetime tz-aware and naive DataFrames
ts1 = Timestamp('2015-01-01', tz=None)
ts2 = Timestamp('2015-01-01', tz='UTC')
ts3 = Timestamp('2015-01-01', tz='EST')
df1 = DataFrame(dict(time=[ts1]))
df2 = DataFrame(dict(time=[ts2]))
df3 = DataFrame(dict(time=[ts3]))
results = pd.concat([df1, df2]).reset_index(drop=True)
expected = DataFrame(dict(time=[ts1, ts2]), dtype=object)
assert_frame_equal(results, expected)
results = pd.concat([df1, df3]).reset_index(drop=True)
expected = DataFrame(dict(time=[ts1, ts3]), dtype=object)
assert_frame_equal(results, expected)
results = pd.concat([df2, df3]).reset_index(drop=True)
expected = DataFrame(dict(time=[ts2, ts3]))
assert_frame_equal(results, expected)
@pytest.mark.parametrize(
't1',
[
'2015-01-01',
pytest.param(pd.NaT, marks=pytest.mark.xfail(
reason='GH23037 incorrect dtype when concatenating'))])
def test_concat_tz_NaT(self, t1):
# GH 22796
# Concating tz-aware multicolumn DataFrames
ts1 = Timestamp(t1, tz='UTC')
ts2 = Timestamp('2015-01-01', tz='UTC')
ts3 = Timestamp('2015-01-01', tz='UTC')
df1 = DataFrame([[ts1, ts2]])
df2 = DataFrame([[ts3]])
result = pd.concat([df1, df2])
expected = DataFrame([[ts1, ts2], [ts3, pd.NaT]], index=[0, 0])
assert_frame_equal(result, expected)
def test_concat_tz_not_aligned(self):
# GH 22796
ts = pd.to_datetime([1, 2]).tz_localize("UTC")
a = pd.DataFrame({"A": ts})
b = pd.DataFrame({"A": ts, "B": ts})
result = pd.concat([a, b], sort=True, ignore_index=True)
expected = pd.DataFrame({"A": list(ts) + list(ts),
"B": [pd.NaT, pd.NaT] + list(ts)})
assert_frame_equal(result, expected)
def test_concat_tuple_keys(self):
# GH 14438
df1 = pd.DataFrame(np.ones((2, 2)), columns=list('AB'))
df2 = pd.DataFrame(np.ones((3, 2)) * 2, columns=list('AB'))
results = pd.concat((df1, df2), keys=[('bee', 'bah'), ('bee', 'boo')])
expected = pd.DataFrame(
{'A': {('bee', 'bah', 0): 1.0,
('bee', 'bah', 1): 1.0,
('bee', 'boo', 0): 2.0,
('bee', 'boo', 1): 2.0,
('bee', 'boo', 2): 2.0},
'B': {('bee', 'bah', 0): 1.0,
('bee', 'bah', 1): 1.0,
('bee', 'boo', 0): 2.0,
('bee', 'boo', 1): 2.0,
('bee', 'boo', 2): 2.0}})
assert_frame_equal(results, expected)
def test_append_series_dict(self):
df = DataFrame(np.random.randn(5, 4),
columns=['foo', 'bar', 'baz', 'qux'])
series = df.loc[4]
msg = 'Indexes have overlapping values'
with pytest.raises(ValueError, match=msg):
df.append(series, verify_integrity=True)
series.name = None
msg = 'Can only append a Series if ignore_index=True'
with pytest.raises(TypeError, match=msg):
df.append(series, verify_integrity=True)
result = df.append(series[::-1], ignore_index=True)
expected = df.append(DataFrame({0: series[::-1]}, index=df.columns).T,
ignore_index=True)
assert_frame_equal(result, expected)
# dict
result = df.append(series.to_dict(), ignore_index=True)
assert_frame_equal(result, expected)
result = df.append(series[::-1][:3], ignore_index=True)
expected = df.append(DataFrame({0: series[::-1][:3]}).T,
ignore_index=True, sort=True)
assert_frame_equal(result, expected.loc[:, result.columns])
# can append when name set
row = df.loc[4]
row.name = 5
result = df.append(row)
expected = df.append(df[-1:], ignore_index=True)
assert_frame_equal(result, expected)
def test_append_list_of_series_dicts(self):
df = DataFrame(np.random.randn(5, 4),
columns=['foo', 'bar', 'baz', 'qux'])
dicts = [x.to_dict() for idx, x in df.iterrows()]
result = df.append(dicts, ignore_index=True)
expected = df.append(df, ignore_index=True)
assert_frame_equal(result, expected)
# different columns
dicts = [{'foo': 1, 'bar': 2, 'baz': 3, 'peekaboo': 4},
{'foo': 5, 'bar': 6, 'baz': 7, 'peekaboo': 8}]
result = df.append(dicts, ignore_index=True, sort=True)
expected = df.append(DataFrame(dicts), ignore_index=True, sort=True)
assert_frame_equal(result, expected)
def test_append_empty_dataframe(self):
# Empty df append empty df
df1 = DataFrame([])
df2 = DataFrame([])
result = df1.append(df2)
expected = df1.copy()
assert_frame_equal(result, expected)
# Non-empty df append empty df
df1 = DataFrame(np.random.randn(5, 2))
df2 = DataFrame()
result = df1.append(df2)
expected = df1.copy()
assert_frame_equal(result, expected)
# Empty df with columns append empty df
df1 = DataFrame(columns=['bar', 'foo'])
df2 = DataFrame()
result = df1.append(df2)
expected = df1.copy()
assert_frame_equal(result, expected)
# Non-Empty df with columns append empty df
df1 = DataFrame(np.random.randn(5, 2), columns=['bar', 'foo'])
df2 = DataFrame()
result = df1.append(df2)
expected = df1.copy()
assert_frame_equal(result, expected)
def test_append_dtypes(self):
# GH 5754
# row appends of different dtypes (so need to do by-item)
# can sometimes infer the correct type
df1 = DataFrame({'bar': Timestamp('20130101')}, index=lrange(5))
df2 = DataFrame()
result = df1.append(df2)
expected = df1.copy()
assert_frame_equal(result, expected)
df1 = DataFrame({'bar': Timestamp('20130101')}, index=lrange(1))
df2 = DataFrame({'bar': 'foo'}, index=lrange(1, 2))
result = df1.append(df2)
expected = DataFrame({'bar': [Timestamp('20130101'), 'foo']})
assert_frame_equal(result, expected)
df1 = DataFrame({'bar': Timestamp('20130101')}, index=lrange(1))
df2 = DataFrame({'bar': np.nan}, index=lrange(1, 2))
result = df1.append(df2)
expected = DataFrame(
{'bar': Series([Timestamp('20130101'), np.nan], dtype='M8[ns]')})
assert_frame_equal(result, expected)
df1 = DataFrame({'bar': Timestamp('20130101')}, index=lrange(1))
df2 = DataFrame({'bar': np.nan}, index=lrange(1, 2), dtype=object)
result = df1.append(df2)
expected = DataFrame(
{'bar': Series([Timestamp('20130101'), np.nan], dtype='M8[ns]')})
assert_frame_equal(result, expected)
df1 = DataFrame({'bar': np.nan}, index=lrange(1))
df2 = DataFrame({'bar': Timestamp('20130101')}, index=lrange(1, 2))
result = df1.append(df2)
expected = DataFrame(
{'bar': Series([np.nan, Timestamp('20130101')], dtype='M8[ns]')})
assert_frame_equal(result, expected)
df1 = DataFrame({'bar': Timestamp('20130101')}, index=lrange(1))
df2 = DataFrame({'bar': 1}, index=lrange(1, 2), dtype=object)
result = df1.append(df2)
expected = DataFrame({'bar': Series([Timestamp('20130101'), 1])})
assert_frame_equal(result, expected)
def test_update(self):
df = DataFrame([[1.5, np.nan, 3.],
[1.5, np.nan, 3.],
[1.5, np.nan, 3],
[1.5, np.nan, 3]])
other = DataFrame([[3.6, 2., np.nan],
[np.nan, np.nan, 7]], index=[1, 3])
df.update(other)
expected = DataFrame([[1.5, np.nan, 3],
[3.6, 2, 3],
[1.5, np.nan, 3],
[1.5, np.nan, 7.]])
assert_frame_equal(df, expected)
def test_update_dtypes(self):
# gh 3016
df = DataFrame([[1., 2., False, True], [4., 5., True, False]],
columns=['A', 'B', 'bool1', 'bool2'])
other = DataFrame([[45, 45]], index=[0], columns=['A', 'B'])
df.update(other)
expected = DataFrame([[45., 45., False, True], [4., 5., True, False]],
columns=['A', 'B', 'bool1', 'bool2'])
assert_frame_equal(df, expected)
def test_update_nooverwrite(self):
df = DataFrame([[1.5, np.nan, 3.],
[1.5, np.nan, 3.],
[1.5, np.nan, 3],
[1.5, np.nan, 3]])
other = DataFrame([[3.6, 2., np.nan],
[np.nan, np.nan, 7]], index=[1, 3])
df.update(other, overwrite=False)
expected = DataFrame([[1.5, np.nan, 3],
[1.5, 2, 3],
[1.5, np.nan, 3],
[1.5, np.nan, 3.]])
assert_frame_equal(df, expected)
def test_update_filtered(self):
df = DataFrame([[1.5, np.nan, 3.],
[1.5, np.nan, 3.],
[1.5, np.nan, 3],
[1.5, np.nan, 3]])
other = DataFrame([[3.6, 2., np.nan],
[np.nan, np.nan, 7]], index=[1, 3])
df.update(other, filter_func=lambda x: x > 2)
expected = DataFrame([[1.5, np.nan, 3],
[1.5, np.nan, 3],
[1.5, np.nan, 3],
[1.5, np.nan, 7.]])
assert_frame_equal(df, expected)
@pytest.mark.parametrize('bad_kwarg, exception, msg', [
# errors must be 'ignore' or 'raise'
({'errors': 'something'}, ValueError, 'The parameter errors must.*'),
({'join': 'inner'}, NotImplementedError, 'Only left join is supported')
])
def test_update_raise_bad_parameter(self, bad_kwarg, exception, msg):
df = DataFrame([[1.5, 1, 3.]])
with pytest.raises(exception, match=msg):
df.update(df, **bad_kwarg)
def test_update_raise_on_overlap(self):
df = DataFrame([[1.5, 1, 3.],
[1.5, np.nan, 3.],
[1.5, np.nan, 3],
[1.5, np.nan, 3]])
other = DataFrame([[2., np.nan],
[np.nan, 7]], index=[1, 3], columns=[1, 2])
with pytest.raises(ValueError, match="Data overlaps"):
df.update(other, errors='raise')
@pytest.mark.parametrize('raise_conflict', [True, False])
def test_update_deprecation(self, raise_conflict):
df = DataFrame([[1.5, 1, 3.]])
other = DataFrame()
with tm.assert_produces_warning(FutureWarning):
df.update(other, raise_conflict=raise_conflict)
def test_update_from_non_df(self):
d = {'a': Series([1, 2, 3, 4]), 'b': Series([5, 6, 7, 8])}
df = DataFrame(d)
d['a'] = Series([5, 6, 7, 8])
df.update(d)
expected = DataFrame(d)
assert_frame_equal(df, expected)
d = {'a': [1, 2, 3, 4], 'b': [5, 6, 7, 8]}
df = DataFrame(d)
d['a'] = [5, 6, 7, 8]
df.update(d)
expected = DataFrame(d)
assert_frame_equal(df, expected)
def test_join_str_datetime(self):
str_dates = ['20120209', '20120222']
dt_dates = [datetime(2012, 2, 9), datetime(2012, 2, 22)]
A = DataFrame(str_dates, index=lrange(2), columns=['aa'])
C = DataFrame([[1, 2], [3, 4]], index=str_dates, columns=dt_dates)
tst = A.join(C, on='aa')
assert len(tst.columns) == 3
def test_join_multiindex_leftright(self):
# GH 10741
df1 = (pd.DataFrame([['a', 'x', 0.471780], ['a', 'y', 0.774908],
['a', 'z', 0.563634], ['b', 'x', -0.353756],
['b', 'y', 0.368062], ['b', 'z', -1.721840],
['c', 'x', 1], ['c', 'y', 2], ['c', 'z', 3]],
columns=['first', 'second', 'value1'])
.set_index(['first', 'second']))
df2 = (pd.DataFrame([['a', 10], ['b', 20]],
columns=['first', 'value2'])
.set_index(['first']))
exp = pd.DataFrame([[0.471780, 10], [0.774908, 10], [0.563634, 10],
[-0.353756, 20], [0.368062, 20],
[-1.721840, 20],
[1.000000, np.nan], [2.000000, np.nan],
[3.000000, np.nan]],
index=df1.index, columns=['value1', 'value2'])
# these must be the same results (but columns are flipped)
assert_frame_equal(df1.join(df2, how='left'), exp)
assert_frame_equal(df2.join(df1, how='right'),
exp[['value2', 'value1']])
exp_idx = pd.MultiIndex.from_product([['a', 'b'], ['x', 'y', 'z']],
names=['first', 'second'])
exp = pd.DataFrame([[0.471780, 10], [0.774908, 10], [0.563634, 10],
[-0.353756, 20], [0.368062, 20], [-1.721840, 20]],
index=exp_idx, columns=['value1', 'value2'])
assert_frame_equal(df1.join(df2, how='right'), exp)
assert_frame_equal(df2.join(df1, how='left'),
exp[['value2', 'value1']])
def test_concat_named_keys(self):
# GH 14252
df = pd.DataFrame({'foo': [1, 2], 'bar': [0.1, 0.2]})
index = Index(['a', 'b'], name='baz')
concatted_named_from_keys = pd.concat([df, df], keys=index)
expected_named = pd.DataFrame(
{'foo': [1, 2, 1, 2], 'bar': [0.1, 0.2, 0.1, 0.2]},
index=pd.MultiIndex.from_product((['a', 'b'], [0, 1]),
names=['baz', None]))
assert_frame_equal(concatted_named_from_keys, expected_named)
index_no_name = Index(['a', 'b'], name=None)
concatted_named_from_names = pd.concat(
[df, df], keys=index_no_name, names=['baz'])
assert_frame_equal(concatted_named_from_names, expected_named)
concatted_unnamed = pd.concat([df, df], keys=index_no_name)
expected_unnamed = pd.DataFrame(
{'foo': [1, 2, 1, 2], 'bar': [0.1, 0.2, 0.1, 0.2]},
index=pd.MultiIndex.from_product((['a', 'b'], [0, 1]),
names=[None, None]))
assert_frame_equal(concatted_unnamed, expected_unnamed)
def test_concat_axis_parameter(self):
# GH 14369
df1 = pd.DataFrame({'A': [0.1, 0.2]}, index=range(2))
df2 = pd.DataFrame({'A': [0.3, 0.4]}, index=range(2))
# Index/row/0 DataFrame
expected_index = pd.DataFrame(
{'A': [0.1, 0.2, 0.3, 0.4]}, index=[0, 1, 0, 1])
concatted_index = pd.concat([df1, df2], axis='index')
assert_frame_equal(concatted_index, expected_index)
concatted_row = pd.concat([df1, df2], axis='rows')
assert_frame_equal(concatted_row, expected_index)
concatted_0 = pd.concat([df1, df2], axis=0)
assert_frame_equal(concatted_0, expected_index)
# Columns/1 DataFrame
expected_columns = pd.DataFrame(
[[0.1, 0.3], [0.2, 0.4]], index=[0, 1], columns=['A', 'A'])
concatted_columns = pd.concat([df1, df2], axis='columns')
assert_frame_equal(concatted_columns, expected_columns)
concatted_1 = pd.concat([df1, df2], axis=1)
assert_frame_equal(concatted_1, expected_columns)
series1 = pd.Series([0.1, 0.2])
series2 = pd.Series([0.3, 0.4])
# Index/row/0 Series
expected_index_series = pd.Series(
[0.1, 0.2, 0.3, 0.4], index=[0, 1, 0, 1])
concatted_index_series = pd.concat([series1, series2], axis='index')
assert_series_equal(concatted_index_series, expected_index_series)
concatted_row_series = pd.concat([series1, series2], axis='rows')
assert_series_equal(concatted_row_series, expected_index_series)
concatted_0_series = pd.concat([series1, series2], axis=0)
assert_series_equal(concatted_0_series, expected_index_series)
# Columns/1 Series
expected_columns_series = pd.DataFrame(
[[0.1, 0.3], [0.2, 0.4]], index=[0, 1], columns=[0, 1])
concatted_columns_series = pd.concat(
[series1, series2], axis='columns')
assert_frame_equal(concatted_columns_series, expected_columns_series)
concatted_1_series = pd.concat([series1, series2], axis=1)
assert_frame_equal(concatted_1_series, expected_columns_series)
# Testing ValueError
with pytest.raises(ValueError, match='No axis named'):
pd.concat([series1, series2], axis='something')
def test_concat_numerical_names(self):
# #15262 # #12223
df = pd.DataFrame({'col': range(9)},
dtype='int32',
index=(pd.MultiIndex
.from_product([['A0', 'A1', 'A2'],
['B0', 'B1', 'B2']],
names=[1, 2])))
result = pd.concat((df.iloc[:2, :], df.iloc[-2:, :]))
expected = pd.DataFrame({'col': [0, 1, 7, 8]},
dtype='int32',
index=pd.MultiIndex.from_tuples([('A0', 'B0'),
('A0', 'B1'),
('A2', 'B1'),
('A2', 'B2')],
names=[1, 2]))
tm.assert_frame_equal(result, expected)
def test_concat_astype_dup_col(self):
# gh 23049
df = pd.DataFrame([{'a': 'b'}])
df = pd.concat([df, df], axis=1)
result = df.astype('category')
expected = pd.DataFrame(np.array(["b", "b"]).reshape(1, 2),
columns=["a", "a"]).astype("category")
tm.assert_frame_equal(result, expected)
class TestDataFrameCombineFirst(TestData):
def test_combine_first_mixed(self):
a = Series(['a', 'b'], index=lrange(2))
b = Series(lrange(2), index=lrange(2))
f = DataFrame({'A': a, 'B': b})
a = Series(['a', 'b'], index=lrange(5, 7))
b = Series(lrange(2), index=lrange(5, 7))
g = DataFrame({'A': a, 'B': b})
exp = pd.DataFrame({'A': list('abab'), 'B': [0., 1., 0., 1.]},
index=[0, 1, 5, 6])
combined = f.combine_first(g)
tm.assert_frame_equal(combined, exp)
def test_combine_first(self):
# disjoint
head, tail = self.frame[:5], self.frame[5:]
combined = head.combine_first(tail)
reordered_frame = self.frame.reindex(combined.index)
assert_frame_equal(combined, reordered_frame)
assert tm.equalContents(combined.columns, self.frame.columns)
assert_series_equal(combined['A'], reordered_frame['A'])
# same index
fcopy = self.frame.copy()
fcopy['A'] = 1
del fcopy['C']
fcopy2 = self.frame.copy()
fcopy2['B'] = 0
del fcopy2['D']
combined = fcopy.combine_first(fcopy2)
assert (combined['A'] == 1).all()
assert_series_equal(combined['B'], fcopy['B'])
assert_series_equal(combined['C'], fcopy2['C'])
assert_series_equal(combined['D'], fcopy['D'])
# overlap
head, tail = reordered_frame[:10].copy(), reordered_frame
head['A'] = 1
combined = head.combine_first(tail)
assert (combined['A'][:10] == 1).all()
# reverse overlap
tail['A'][:10] = 0
combined = tail.combine_first(head)
assert (combined['A'][:10] == 0).all()
# no overlap
f = self.frame[:10]
g = self.frame[10:]
combined = f.combine_first(g)
assert_series_equal(combined['A'].reindex(f.index), f['A'])
assert_series_equal(combined['A'].reindex(g.index), g['A'])
# corner cases
comb = self.frame.combine_first(self.empty)
assert_frame_equal(comb, self.frame)
comb = self.empty.combine_first(self.frame)
assert_frame_equal(comb, self.frame)
comb = self.frame.combine_first(DataFrame(index=["faz", "boo"]))
assert "faz" in comb.index
# #2525
df = DataFrame({'a': [1]}, index=[datetime(2012, 1, 1)])
df2 = DataFrame({}, columns=['b'])
result = df.combine_first(df2)
assert 'b' in result
def test_combine_first_mixed_bug(self):
idx = Index(['a', 'b', 'c', 'e'])
ser1 = Series([5.0, -9.0, 4.0, 100.], index=idx)
ser2 = Series(['a', 'b', 'c', 'e'], index=idx)
ser3 = Series([12, 4, 5, 97], index=idx)
frame1 = DataFrame({"col0": ser1,
"col2": ser2,
"col3": ser3})
idx = Index(['a', 'b', 'c', 'f'])
ser1 = Series([5.0, -9.0, 4.0, 100.], index=idx)
ser2 = Series(['a', 'b', 'c', 'f'], index=idx)
ser3 = Series([12, 4, 5, 97], index=idx)
frame2 = DataFrame({"col1": ser1,
"col2": ser2,
"col5": ser3})
combined = frame1.combine_first(frame2)
assert len(combined.columns) == 5
# gh 3016 (same as in update)
df = DataFrame([[1., 2., False, True], [4., 5., True, False]],
columns=['A', 'B', 'bool1', 'bool2'])
other = DataFrame([[45, 45]], index=[0], columns=['A', 'B'])
result = df.combine_first(other)
assert_frame_equal(result, df)
df.loc[0, 'A'] = np.nan
result = df.combine_first(other)
df.loc[0, 'A'] = 45
assert_frame_equal(result, df)
# doc example
df1 = DataFrame({'A': [1., np.nan, 3., 5., np.nan],
'B': [np.nan, 2., 3., np.nan, 6.]})
df2 = DataFrame({'A': [5., 2., 4., np.nan, 3., 7.],
'B': [np.nan, np.nan, 3., 4., 6., 8.]})
result = df1.combine_first(df2)
expected = DataFrame(
{'A': [1, 2, 3, 5, 3, 7.], 'B': [np.nan, 2, 3, 4, 6, 8]})
assert_frame_equal(result, expected)
# GH3552, return object dtype with bools
df1 = DataFrame(
[[np.nan, 3., True], [-4.6, np.nan, True], [np.nan, 7., False]])
df2 = DataFrame(
[[-42.6, np.nan, True], [-5., 1.6, False]], index=[1, 2])
result = df1.combine_first(df2)[2]
expected = Series([True, True, False], name=2)
assert_series_equal(result, expected)
# GH 3593, converting datetime64[ns] incorrecly
df0 = DataFrame({"a": [datetime(2000, 1, 1),
datetime(2000, 1, 2),
datetime(2000, 1, 3)]})
df1 = DataFrame({"a": [None, None, None]})
df2 = df1.combine_first(df0)
assert_frame_equal(df2, df0)
df2 = df0.combine_first(df1)
assert_frame_equal(df2, df0)
df0 = DataFrame({"a": [datetime(2000, 1, 1),
datetime(2000, 1, 2),
datetime(2000, 1, 3)]})
df1 = DataFrame({"a": [datetime(2000, 1, 2), None, None]})
df2 = df1.combine_first(df0)
result = df0.copy()
result.iloc[0, :] = df1.iloc[0, :]
assert_frame_equal(df2, result)
df2 = df0.combine_first(df1)
assert_frame_equal(df2, df0)
def test_combine_first_align_nan(self):
# GH 7509 (not fixed)
dfa = pd.DataFrame([[pd.Timestamp('2011-01-01'), 2]],
columns=['a', 'b'])
dfb = pd.DataFrame([[4], [5]], columns=['b'])
assert dfa['a'].dtype == 'datetime64[ns]'
assert dfa['b'].dtype == 'int64'
res = dfa.combine_first(dfb)
exp = pd.DataFrame({'a': [pd.Timestamp('2011-01-01'), pd.NaT],
'b': [2., 5.]}, columns=['a', 'b'])
tm.assert_frame_equal(res, exp)
assert res['a'].dtype == 'datetime64[ns]'
# ToDo: this must be int64
assert res['b'].dtype == 'float64'
res = dfa.iloc[:0].combine_first(dfb)
exp = pd.DataFrame({'a': [np.nan, np.nan],
'b': [4, 5]}, columns=['a', 'b'])
tm.assert_frame_equal(res, exp)
# ToDo: this must be datetime64
assert res['a'].dtype == 'float64'
# ToDo: this must be int64
assert res['b'].dtype == 'int64'
def test_combine_first_timezone(self):
# see gh-7630
data1 = pd.to_datetime('20100101 01:01').tz_localize('UTC')
df1 = pd.DataFrame(columns=['UTCdatetime', 'abc'],
data=data1,
index=pd.date_range('20140627', periods=1))
data2 = pd.to_datetime('20121212 12:12').tz_localize('UTC')
df2 = pd.DataFrame(columns=['UTCdatetime', 'xyz'],
data=data2,
index=pd.date_range('20140628', periods=1))
res = df2[['UTCdatetime']].combine_first(df1)
exp = pd.DataFrame({'UTCdatetime': [pd.Timestamp('2010-01-01 01:01',
tz='UTC'),
pd.Timestamp('2012-12-12 12:12',
tz='UTC')],
'abc': [pd.Timestamp('2010-01-01 01:01:00',
tz='UTC'), pd.NaT]},
columns=['UTCdatetime', 'abc'],
index=pd.date_range('20140627', periods=2,
freq='D'))
tm.assert_frame_equal(res, exp)
assert res['UTCdatetime'].dtype == 'datetime64[ns, UTC]'
assert res['abc'].dtype == 'datetime64[ns, UTC]'
# see gh-10567
dts1 = pd.date_range('2015-01-01', '2015-01-05', tz='UTC')
df1 = pd.DataFrame({'DATE': dts1})
dts2 = pd.date_range('2015-01-03', '2015-01-05', tz='UTC')
df2 = pd.DataFrame({'DATE': dts2})
res = df1.combine_first(df2)
tm.assert_frame_equal(res, df1)
assert res['DATE'].dtype == 'datetime64[ns, UTC]'
dts1 = pd.DatetimeIndex(['2011-01-01', 'NaT', '2011-01-03',
'2011-01-04'], tz='US/Eastern')
df1 = pd.DataFrame({'DATE': dts1}, index=[1, 3, 5, 7])
dts2 = pd.DatetimeIndex(['2012-01-01', '2012-01-02',
'2012-01-03'], tz='US/Eastern')
df2 = pd.DataFrame({'DATE': dts2}, index=[2, 4, 5])
res = df1.combine_first(df2)
exp_dts = pd.DatetimeIndex(['2011-01-01', '2012-01-01', 'NaT',
'2012-01-02', '2011-01-03', '2011-01-04'],
tz='US/Eastern')
exp = pd.DataFrame({'DATE': exp_dts}, index=[1, 2, 3, 4, 5, 7])
tm.assert_frame_equal(res, exp)
# different tz
dts1 = pd.date_range('2015-01-01', '2015-01-05', tz='US/Eastern')
df1 = pd.DataFrame({'DATE': dts1})
dts2 = pd.date_range('2015-01-03', '2015-01-05')
df2 = pd.DataFrame({'DATE': dts2})
# if df1 doesn't have NaN, keep its dtype
res = df1.combine_first(df2)
tm.assert_frame_equal(res, df1)
assert res['DATE'].dtype == 'datetime64[ns, US/Eastern]'
dts1 = pd.date_range('2015-01-01', '2015-01-02', tz='US/Eastern')
df1 = pd.DataFrame({'DATE': dts1})
dts2 = pd.date_range('2015-01-01', '2015-01-03')
df2 = pd.DataFrame({'DATE': dts2})
res = df1.combine_first(df2)
exp_dts = [pd.Timestamp('2015-01-01', tz='US/Eastern'),
pd.Timestamp('2015-01-02', tz='US/Eastern'),
pd.Timestamp('2015-01-03')]
exp = pd.DataFrame({'DATE': exp_dts})
tm.assert_frame_equal(res, exp)
assert res['DATE'].dtype == 'object'
def test_combine_first_timedelta(self):
data1 = pd.TimedeltaIndex(['1 day', 'NaT', '3 day', '4day'])
df1 = pd.DataFrame({'TD': data1}, index=[1, 3, 5, 7])
data2 = pd.TimedeltaIndex(['10 day', '11 day', '12 day'])
df2 = pd.DataFrame({'TD': data2}, index=[2, 4, 5])
res = df1.combine_first(df2)
exp_dts = pd.TimedeltaIndex(['1 day', '10 day', 'NaT',
'11 day', '3 day', '4 day'])
exp = pd.DataFrame({'TD': exp_dts}, index=[1, 2, 3, 4, 5, 7])
tm.assert_frame_equal(res, exp)
assert res['TD'].dtype == 'timedelta64[ns]'
def test_combine_first_period(self):
data1 = pd.PeriodIndex(['2011-01', 'NaT', '2011-03',
'2011-04'], freq='M')
df1 = pd.DataFrame({'P': data1}, index=[1, 3, 5, 7])
data2 = pd.PeriodIndex(['2012-01-01', '2012-02',
'2012-03'], freq='M')
df2 = pd.DataFrame({'P': data2}, index=[2, 4, 5])
res = df1.combine_first(df2)
exp_dts = pd.PeriodIndex(['2011-01', '2012-01', 'NaT',
'2012-02', '2011-03', '2011-04'],
freq='M')
exp = pd.DataFrame({'P': exp_dts}, index=[1, 2, 3, 4, 5, 7])
tm.assert_frame_equal(res, exp)
assert res['P'].dtype == data1.dtype
# different freq
dts2 = pd.PeriodIndex(['2012-01-01', '2012-01-02',
'2012-01-03'], freq='D')
df2 = pd.DataFrame({'P': dts2}, index=[2, 4, 5])
res = df1.combine_first(df2)
exp_dts = [pd.Period('2011-01', freq='M'),
pd.Period('2012-01-01', freq='D'),
pd.NaT,
pd.Period('2012-01-02', freq='D'),
pd.Period('2011-03', freq='M'),
pd.Period('2011-04', freq='M')]
exp = pd.DataFrame({'P': exp_dts}, index=[1, 2, 3, 4, 5, 7])
tm.assert_frame_equal(res, exp)
assert res['P'].dtype == 'object'
def test_combine_first_int(self):
# GH14687 - integer series that do no align exactly
df1 = pd.DataFrame({'a': [0, 1, 3, 5]}, dtype='int64')
df2 = pd.DataFrame({'a': [1, 4]}, dtype='int64')
res = df1.combine_first(df2)
tm.assert_frame_equal(res, df1)
assert res['a'].dtype == 'int64'
@pytest.mark.parametrize("val", [1, 1.0])
def test_combine_first_with_asymmetric_other(self, val):
# see gh-20699
df1 = pd.DataFrame({'isNum': [val]})
df2 = pd.DataFrame({'isBool': [True]})
res = df1.combine_first(df2)
exp = pd.DataFrame({'isBool': [True], 'isNum': [val]})
tm.assert_frame_equal(res, exp)
def test_concat_datetime_datetime64_frame(self):
# #2624
rows = []
rows.append([datetime(2010, 1, 1), 1])
rows.append([datetime(2010, 1, 2), 'hi'])
df2_obj = DataFrame.from_records(rows, columns=['date', 'test'])
ind = date_range(start="2000/1/1", freq="D", periods=10)
df1 = DataFrame({'date': ind, 'test': lrange(10)})
# it works!
pd.concat([df1, df2_obj])
class TestDataFrameUpdate(TestData):
def test_update_nan(self):
# #15593 #15617
# test 1
df1 = DataFrame({'A': [1.0, 2, 3], 'B': date_range('2000', periods=3)})
df2 = DataFrame({'A': [None, 2, 3]})
expected = df1.copy()
df1.update(df2, overwrite=False)
tm.assert_frame_equal(df1, expected)
# test 2
df1 = DataFrame({'A': [1.0, None, 3],
'B': date_range('2000', periods=3)})
df2 = DataFrame({'A': [None, 2, 3]})
expected = DataFrame({'A': [1.0, 2, 3],
'B': date_range('2000', periods=3)})
df1.update(df2, overwrite=False)
tm.assert_frame_equal(df1, expected)
|
|
'''
Documentation Builder Environments
'''
import os
import re
import sys
import logging
import subprocess
import traceback
import socket
from datetime import datetime
from django.utils.text import slugify
from django.utils.translation import ugettext_lazy as _
from docker import Client
from docker.utils import create_host_config
from docker.errors import APIError as DockerAPIError, DockerException
from rest_framework.renderers import JSONRenderer
from readthedocs.builds.constants import BUILD_STATE_FINISHED
from readthedocs.builds.models import BuildCommandResultMixin
from readthedocs.projects.constants import LOG_TEMPLATE
from readthedocs.api.client import api as api_v1
from readthedocs.restapi.client import api as api_v2
from readthedocs.restapi.serializers import BuildCommandSerializer
from .exceptions import (BuildEnvironmentException, BuildEnvironmentError,
BuildEnvironmentWarning)
from .constants import (DOCKER_SOCKET, DOCKER_VERSION, DOCKER_IMAGE,
DOCKER_LIMITS, DOCKER_TIMEOUT_EXIT_CODE,
DOCKER_OOM_EXIT_CODE, SPHINX_TEMPLATE_DIR,
MKDOCS_TEMPLATE_DIR, DOCKER_HOSTNAME_MAX_LEN)
log = logging.getLogger(__name__)
class BuildCommand(BuildCommandResultMixin):
'''Wrap command execution for execution in build environments
This wraps subprocess commands with some logic to handle exceptions,
logging, and setting up the env for the build command.
This acts a mapping of sorts to the API reprensentation of the
:py:cls:`readthedocs.builds.models.BuildCommandResult` model.
:param command: string or array of command parameters
:param cwd: current working path for the command
:param shell: execute command in shell, default=False
:param environment: environment variables to add to environment
:type environment: dict
:param combine_output: combine stdout/stderr, default=True
:param input_data: data to pass in on stdin
:type input_data: str
:param build_env: build environment to use to execute commands
:param bin_path: binary path to add to PATH resolution
:param description: a more grokable description of the command being run
'''
def __init__(self, command, cwd=None, shell=False, environment=None,
combine_output=True, input_data=None, build_env=None,
bin_path=None, description=None):
self.command = command
self.shell = shell
if cwd is None:
cwd = os.getcwd()
self.cwd = cwd
self.environment = os.environ.copy()
if environment is not None:
assert 'PATH' not in environment, "PATH can't be set"
self.environment.update(environment)
self.combine_output = combine_output
self.input_data = input_data
self.build_env = build_env
self.output = None
self.error = None
self.start_time = None
self.end_time = None
self.bin_path = bin_path
self.description = ''
if description is not None:
self.description = description
self.exit_code = None
def __str__(self):
# TODO do we want to expose the full command here?
output = u''
if self.output is not None:
output = self.output.encode('utf-8')
return '\n'.join([self.get_command(), output])
def run(self):
'''Set up subprocess and execute command
:param cmd_input: input to pass to command in STDIN
:type cmd_input: str
:param combine_output: combine STDERR into STDOUT
'''
log.info("Running: '%s' [%s]", self.get_command(), self.cwd)
self.start_time = datetime.utcnow()
stdout = subprocess.PIPE
stderr = subprocess.PIPE
stdin = None
if self.input_data is not None:
stdin = subprocess.PIPE
if self.combine_output:
stderr = subprocess.STDOUT
environment = {}
environment.update(self.environment)
environment['READTHEDOCS'] = 'True'
if self.build_env is not None:
environment['READTHEDOCS_VERSION'] = self.build_env.version.slug
environment['READTHEDOCS_PROJECT'] = self.build_env.project.slug
if 'DJANGO_SETTINGS_MODULE' in environment:
del environment['DJANGO_SETTINGS_MODULE']
if 'PYTHONPATH' in environment:
del environment['PYTHONPATH']
if self.bin_path is not None:
env_paths = environment.get('PATH', '').split(':')
env_paths.insert(0, self.bin_path)
environment['PATH'] = ':'.join(env_paths)
try:
proc = subprocess.Popen(
self.command,
shell=self.shell,
cwd=self.cwd,
stdin=stdin,
stdout=stdout,
stderr=stderr,
env=environment,
)
cmd_input = None
if self.input_data is not None:
cmd_input = self.input_data
cmd_output = proc.communicate(input=cmd_input)
(cmd_stdout, cmd_stderr) = cmd_output
try:
self.output = cmd_stdout.decode('utf-8', 'replace')
except (TypeError, AttributeError):
self.output = None
try:
self.error = cmd_stderr.decode('utf-8', 'replace')
except (TypeError, AttributeError):
self.error = None
self.exit_code = proc.returncode
except OSError:
self.error = traceback.format_exc()
self.output = self.error
self.exit_code = -1
finally:
self.end_time = datetime.utcnow()
def get_command(self):
'''Flatten command'''
if hasattr(self.command, '__iter__') and not isinstance(self.command, str):
return ' '.join(self.command)
else:
return self.command
def save(self):
'''Save this command and result via the API'''
data = {
'build': self.build_env.build.get('id'),
'command': self.get_command(),
'description': self.description,
'output': self.output,
'exit_code': self.exit_code,
'start_time': self.start_time,
'end_time': self.end_time,
}
api_v2.command.post(data)
class DockerBuildCommand(BuildCommand):
'''Create a docker container and run a command inside the container
Build command to execute in docker container
'''
def run(self):
'''Execute command in existing Docker container
:param cmd_input: input to pass to command in STDIN
:type cmd_input: str
:param combine_output: combine STDERR into STDOUT
'''
log.info("Running in container %s: '%s' [%s]",
self.build_env.container_id, self.get_command(), self.cwd)
self.start_time = datetime.utcnow()
client = self.build_env.get_client()
try:
exec_cmd = client.exec_create(
container=self.build_env.container_id,
cmd=self.get_wrapped_command(),
stdout=True,
stderr=True
)
output = client.exec_start(exec_id=exec_cmd['Id'], stream=False)
try:
self.output = output.decode('utf-8', 'replace')
except (TypeError, AttributeError):
self.output = ''
cmd_ret = client.exec_inspect(exec_id=exec_cmd['Id'])
self.exit_code = cmd_ret['ExitCode']
# Docker will exit with a special exit code to signify the command
# was killed due to memory usage, make the error code nicer.
if (self.exit_code == DOCKER_OOM_EXIT_CODE and
self.output == 'Killed\n'):
self.output = _('Command killed due to excessive memory '
'consumption\n')
except DockerAPIError:
self.exit_code = -1
if self.output is None or not self.output:
self.output = _('Command exited abnormally')
finally:
self.end_time = datetime.utcnow()
def get_wrapped_command(self):
"""Escape special bash characters in command to wrap in shell
In order to set the current working path inside a docker container, we
need to wrap the command in a shell call manually. Some characters will
be interpreted as shell characters without escaping, such as: ``pip
install requests<0.8``. This escapes a good majority of those
characters.
"""
bash_escape_re = re.compile(r"([\t\ \!\"\#\$\&\'\(\)\*\:\;\<\>\?\@"
r"\[\\\]\^\`\{\|\}\~])")
prefix = ''
if self.bin_path:
prefix += 'PATH={0}:$PATH '.format(self.bin_path)
return ("/bin/sh -c 'cd {cwd} && {prefix}{cmd}'"
.format(
cwd=self.cwd,
prefix=prefix,
cmd=(' '.join([bash_escape_re.sub(r'\\\1', part)
for part in self.command]))))
class BuildEnvironment(object):
"""Base build environment
Base class for wrapping command execution for build steps. This provides a
context for command execution and reporting, and eventually performs updates
on the build object itself, reporting success/failure, as well as top-level
failures.
:param project: Project that is being built
:param version: Project version that is being built
:param build: Build instance
:param record: Record status of build object
:param environment: shell environment variables
:param report_build_success: update build if successful
"""
def __init__(self, project=None, version=None, build=None, record=True,
environment=None, report_build_success=True):
self.project = project
self.version = version
self.build = build
self.record = record
self.environment = environment or {}
self.report_build_success = report_build_success
self.commands = []
self.failure = None
self.start_time = datetime.utcnow()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
ret = self.handle_exception(exc_type, exc_value, tb)
self.update_build(state=BUILD_STATE_FINISHED)
log.info(LOG_TEMPLATE
.format(project=self.project.slug,
version=self.version.slug,
msg='Build finished'))
return ret
def handle_exception(self, exc_type, exc_value, _):
"""Exception handling for __enter__ and __exit__
This reports on the exception we're handling and special cases
subclasses of BuildEnvironmentException. For
:py:cls:`BuildEnvironmentWarning`, exit this context gracefully, but
don't mark the build as a failure. For :py:cls:`BuildEnvironmentError`,
exit gracefully, but mark the build as a failure. For all other
exception classes, the build will be marked as a failure and an
exception will bubble up.
"""
if exc_type is not None:
log.error(LOG_TEMPLATE
.format(project=self.project.slug,
version=self.version.slug,
msg=exc_value),
exc_info=True)
if issubclass(exc_type, BuildEnvironmentWarning):
return True
else:
self.failure = exc_value
if issubclass(exc_type, BuildEnvironmentError):
return True
return False
def run(self, *cmd, **kwargs):
'''Shortcut to run command from environment'''
return self.run_command_class(cls=self.command_class, cmd=cmd, **kwargs)
def run_command_class(self, cls, cmd, **kwargs):
'''Run command from this environment
Use ``cls`` to instantiate a command
:param warn_only: Don't raise an exception on command failure
'''
warn_only = kwargs.pop('warn_only', False)
# Remove PATH from env, and set it to bin_path if it isn't passed in
env_path = self.environment.pop('BIN_PATH', None)
if 'bin_path' not in kwargs and env_path:
kwargs['bin_path'] = env_path
assert 'environment' not in kwargs, "environment can't be passed in via commands."
kwargs['environment'] = self.environment
kwargs['build_env'] = self
build_cmd = cls(cmd, **kwargs)
self.commands.append(build_cmd)
build_cmd.run()
# Save to database
if self.record:
build_cmd.save()
if build_cmd.failed:
msg = u'Command {cmd} failed'.format(cmd=build_cmd.get_command())
if build_cmd.output:
msg += u':\n{out}'.format(out=build_cmd.output)
if warn_only:
log.warn(LOG_TEMPLATE
.format(project=self.project.slug,
version=self.version.slug,
msg=msg))
else:
raise BuildEnvironmentWarning(msg)
return build_cmd
@property
def successful(self):
'''Is build completed, without top level failures or failing commands'''
return (self.done and self.failure is None and
all(cmd.successful for cmd in self.commands))
@property
def failed(self):
'''Is build completed, but has top level failure or failing commands'''
return (self.done and (
self.failure is not None or
any(cmd.failed for cmd in self.commands)
))
@property
def done(self):
'''Is build in finished state'''
return (self.build is not None and
self.build['state'] == BUILD_STATE_FINISHED)
def update_build(self, state=None):
"""Record a build by hitting the API
This step is skipped if we aren't recording the build, or if we don't
want to record successful builds yet (if we are running setup commands
for the build)
"""
if not self.record or (state == BUILD_STATE_FINISHED and
not self.report_build_success):
return None
self.build['project'] = self.project.pk
self.build['version'] = self.version.pk
self.build['builder'] = socket.gethostname()
self.build['state'] = state
if self.done:
self.build['success'] = self.successful
# TODO drop exit_code and provide a more meaningful UX for error
# reporting
if self.failure and isinstance(self.failure,
BuildEnvironmentException):
self.build['exit_code'] = self.failure.status_code
elif len(self.commands) > 0:
self.build['exit_code'] = max([cmd.exit_code
for cmd in self.commands])
self.build['setup'] = self.build['setup_error'] = ""
self.build['output'] = self.build['error'] = ""
if self.start_time:
build_length = (datetime.utcnow() - self.start_time)
self.build['length'] = build_length.total_seconds()
if self.failure is not None:
self.build['error'] = str(self.failure)
# Attempt to stop unicode errors on build reporting
for key, val in self.build.items():
if isinstance(val, basestring):
self.build[key] = val.decode('utf-8', 'ignore')
try:
resp = api_v2.build(self.build['id']).put(self.build)
except Exception:
log.error("Unable to post a new build", exc_info=True)
class LocalEnvironment(BuildEnvironment):
'''Local execution environment'''
command_class = BuildCommand
class DockerEnvironment(BuildEnvironment):
'''
Docker build environment, uses docker to contain builds
If :py:data:`settings.DOCKER_ENABLE` is true, build documentation inside a
docker container, instead of the host system, using this build environment
class. The build command creates a docker container from a pre-built image,
defined by :py:data:`settings.DOCKER_IMAGE`. This container is started with
a mount to the project's build path under ``user_builds`` on the host
machine, walling off project builds from reading/writing other projects'
data.
:param docker_socket: Override to Docker socket URI
'''
command_class = DockerBuildCommand
container_image = DOCKER_IMAGE
container_mem_limit = DOCKER_LIMITS.get('memory')
container_time_limit = DOCKER_LIMITS.get('time')
def __init__(self, *args, **kwargs):
self.docker_socket = kwargs.pop('docker_socket', DOCKER_SOCKET)
super(DockerEnvironment, self).__init__(*args, **kwargs)
self.client = None
self.container = None
self.container_name = slugify(
'build-{build}-project-{project_id}-{project_name}'.format(
build=self.build.get('id'),
project_id=self.project.pk,
project_name=self.project.slug,
)[:DOCKER_HOSTNAME_MAX_LEN]
)
if self.project.container_mem_limit:
self.container_mem_limit = self.project.container_mem_limit
if self.project.container_time_limit:
self.container_time_limit = self.project.container_time_limit
def __enter__(self):
'''Start of environment context'''
log.info('Creating container')
try:
# Test for existing container. We remove any stale containers that
# are no longer running here if there is a collision. If the
# container is still running, this would be a failure of the version
# locking code, so we throw an exception.
state = self.container_state()
if state is not None:
if state.get('Running') is True:
exc = BuildEnvironmentError(
_('A build environment is currently '
'running for this version'))
self.failure = exc
self.update_build(state=BUILD_STATE_FINISHED)
raise exc
else:
log.warn(LOG_TEMPLATE
.format(
project=self.project.slug,
version=self.version.slug,
msg=("Removing stale container {0}"
.format(self.container_id))))
client = self.get_client()
client.remove_container(self.container_id)
except DockerAPIError:
pass
# Create the checkout path if it doesn't exist to avoid Docker creation
if not os.path.exists(self.project.doc_path):
os.makedirs(self.project.doc_path)
try:
self.create_container()
except: # pylint: disable=broad-except
self.__exit__(*sys.exc_info())
raise
return self
def __exit__(self, exc_type, exc_value, tb):
'''End of environment context'''
ret = self.handle_exception(exc_type, exc_value, tb)
# Update buildenv state given any container error states first
self.update_build_from_container_state()
client = self.get_client()
try:
client.kill(self.container_id)
except DockerAPIError:
pass
try:
log.info('Removing container %s', self.container_id)
client.remove_container(self.container_id)
except DockerAPIError:
log.error(LOG_TEMPLATE
.format(
project=self.project.slug,
version=self.version.slug,
msg="Couldn't remove container"),
exc_info=True)
self.container = None
self.update_build(state=BUILD_STATE_FINISHED)
log.info(LOG_TEMPLATE
.format(project=self.project.slug,
version=self.version.slug,
msg='Build finished'))
return ret
def get_client(self):
'''Create Docker client connection'''
try:
if self.client is None:
self.client = Client(
base_url=self.docker_socket,
version=DOCKER_VERSION,
timeout=None
)
return self.client
except DockerException as e:
log.error(LOG_TEMPLATE
.format(
project=self.project.slug,
version=self.version.slug,
msg=e),
exc_info=True)
raise BuildEnvironmentError('Problem creating build environment')
@property
def container_id(self):
'''Return id of container if it is valid'''
if self.container_name:
return self.container_name
elif self.container:
return self.container.get('Id')
def container_state(self):
'''Get container state'''
client = self.get_client()
try:
info = client.inspect_container(self.container_id)
return info.get('State', {})
except DockerAPIError:
return None
def update_build_from_container_state(self):
'''Update buildenv state from container state
In the case of the parent command exiting before the exec commands
finish and the container is destroyed, or in the case of OOM on the
container, set a failure state and error message explaining the failure
on the buildenv.
'''
state = self.container_state()
if state is not None and state.get('Running') is False:
if state.get('ExitCode') == DOCKER_TIMEOUT_EXIT_CODE:
self.failure = BuildEnvironmentError(
_('Build exited due to time out'))
elif state.get('OOMKilled', False):
self.failure = BuildEnvironmentError(
_('Build exited due to excessive memory consumption'))
elif state.get('Error'):
self.failure = BuildEnvironmentError(
(_('Build exited due to unknown error: {0}')
.format(state.get('Error'))))
def create_container(self):
'''Create docker container'''
client = self.get_client()
image = self.container_image
if self.project.container_image:
image = self.project.container_image
try:
self.container = client.create_container(
image=image,
command=('/bin/sh -c "sleep {time}; exit {exit}"'
.format(time=self.container_time_limit,
exit=DOCKER_TIMEOUT_EXIT_CODE)),
name=self.container_id,
hostname=self.container_id,
host_config=create_host_config(binds={
SPHINX_TEMPLATE_DIR: {
'bind': SPHINX_TEMPLATE_DIR,
'mode': 'ro'
},
MKDOCS_TEMPLATE_DIR: {
'bind': MKDOCS_TEMPLATE_DIR,
'mode': 'ro'
},
self.project.doc_path: {
'bind': self.project.doc_path,
'mode': 'rw'
},
}),
detach=True,
environment=self.environment,
mem_limit=self.container_mem_limit,
)
client.start(container=self.container_id)
except DockerAPIError as e:
log.error(LOG_TEMPLATE
.format(
project=self.project.slug,
version=self.version.slug,
msg=e.explanation),
exc_info=True)
raise BuildEnvironmentError('Build environment creation failed')
|
|
# -*- coding: utf-8 -*-
# Copyright 2012 splinter authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
from __future__ import with_statement
from splinter.driver import ElementAPI
from splinter.element_list import ElementList
class FindElementsTest(object):
def test_finding_by_css(self):
"should find by css"
value = self.browser.find_by_css('h1').value
self.assertEqual('Example Header', value)
def test_finding_by_xpath(self):
"should find elements by xpath"
value = self.browser.find_by_xpath('//h1').value
self.assertEqual('Example Header', value)
def test_finding_by_tag(self):
"should find elements by tag"
value = self.browser.find_by_tag('h1').value
self.assertEqual('Example Header', value)
def test_finding_by_value(self):
"should find elements by value"
value = self.browser.find_by_value('M').value
id = self.browser.find_by_id('gender-m')
self.assertEqual(id.value, value)
def test_finding_by_id(self):
"should find elements by id"
value = self.browser.find_by_id("firstheader").value
self.assertEqual('Example Header', value)
def test_finding_by_name(self):
"should find elements by name"
value = self.browser.find_by_name('query').value
self.assertEqual('default value', value)
def test_finding_all_elements_by_css(self):
"should find elements by css"
value = self.browser.find_by_css('h1')[0].value
self.assertEqual('Example Header', value)
def test_finding_all_elements_by_xpath(self):
"should find elements by xpath"
value = self.browser.find_by_xpath('//h1')[0].value
self.assertEqual('Example Header', value)
def test_finding_all_elements_by_tag(self):
"should find elements by tag"
value = self.browser.find_by_tag('h1')[0].value
self.assertEqual('Example Header', value)
def test_finding_all_elements_by_id(self):
"should find elements by id"
value = self.browser.find_by_id("firstheader")[0].value
self.assertEqual('Example Header', value)
def test_finding_all_elements_by_name(self):
"should find elements by name"
value = self.browser.find_by_name('query')[0].value
self.assertEqual('default value', value)
def test_finding_all_links_by_text(self):
"should find links by text"
link = self.browser.find_link_by_text('Link for Example.com')[0]
self.assertEqual('http://example.com/', link['href'])
def test_finding_all_links_by_href(self):
"should find links by href"
link = self.browser.find_link_by_href('http://example.com/')[0]
self.assertEqual('http://example.com/', link['href'])
def test_finding_all_links_by_partial_href(self):
"should find links by partial href"
link = self.browser.find_link_by_partial_href('example.c')[0]
self.assertEqual('http://example.com/', link['href'])
def test_finding_all_links_by_partial_text(self):
"should find links by partial text"
link = self.browser.find_link_by_partial_text('FOO')[0]
self.assertEqual('http://localhost:5000/foo', link['href'])
def test_finding_last_element_by_css(self):
"should find last element by css"
value = self.browser.find_by_css('h1').last.value
self.assertEqual('Example Last Header', value)
def test_finding_last_element_by_xpath(self):
"should find last element by xpath"
value = self.browser.find_by_xpath('//h1').last.value
self.assertEqual('Example Last Header', value)
def test_finding_last_element_by_tag(self):
"should find last element by tag"
value = self.browser.find_by_tag('h1').last.value
self.assertEqual('Example Last Header', value)
def test_finding_last_element_by_id(self):
"should find last element by id"
value = self.browser.find_by_id("firstheader").last.value
self.assertEqual('Example Header', value)
def test_last_element_is_same_than_first_element_in_find_by_id(self):
"should first element is same than last element in find by id"
#a html page have contain one element by id
first = self.browser.find_by_id("firstheader").value
last = self.browser.find_by_id("firstheader").last.value
self.assertEqual(first, last)
def test_finding_last_element_by_name(self):
"should find last element by name"
value = self.browser.find_by_name('query').last.value
self.assertEqual('default last value', value)
def test_finding_last_link_by_text(self):
"should find last link by text"
link = self.browser.find_link_by_text('Link for Example.com').last
self.assertEqual('http://example.com/last', link['href'])
def test_finding_last_link_by_href(self):
"should find last link by href"
link = self.browser.find_link_by_href('http://example.com/').last
self.assertEqual('Link for last Example.com', link.text)
def test_finding_link_by_partial_href(self):
"should find links by partial href"
link = self.browser.find_link_by_partial_href('example.c').last
self.assertEqual('Link for last Example.com', link.text)
def test_finding_last_link_by_partial_text(self):
"should find last link by partial text"
link = self.browser.find_link_by_partial_text('FOO').last
self.assertEqual('A wordier (and last) link to FOO', link.text)
def test_finding_element_by_css_using_slice(self):
"should find element by css using slice"
value = self.browser.find_by_css('h1')[-1].value
self.assertEqual('Example Last Header', value)
def test_finding_element_by_xpath_using_slice(self):
"should find element by xpath using slice"
value = self.browser.find_by_xpath('//h1')[-1].value
self.assertEqual('Example Last Header', value)
def test_finding_element_by_tag_using_slice(self):
"should find element by tag using slice"
value = self.browser.find_by_tag('h1')[-1].value
self.assertEqual('Example Last Header', value)
def test_finding_element_by_id_using_slice(self):
"should find element by id using slice"
value = self.browser.find_by_id("firstheader")[-1].value
self.assertEqual('Example Header', value)
def test_all_elements_is_same_than_first_element_in_find_by_id(self):
"should all elements is same than first element in find by id"
#a html page have contain one element by id
first = self.browser.find_by_id("firstheader").value
some = self.browser.find_by_id("firstheader")[-1].value
self.assertEqual(first, some)
def test_finding_element_by_name_using_slice(self):
"should find element by name using slice"
value = self.browser.find_by_name('query')[-1].value
self.assertEqual('default last value', value)
def test_finding_link_by_text_using_slice(self):
"should find link by text using slice"
link = self.browser.find_link_by_text('Link for Example.com')[-1]
self.assertEqual('http://example.com/last', link['href'])
def test_finding_link_by_href_using_slice(self):
"should find link by href using slice"
link = self.browser.find_link_by_href('http://example.com/')[-1]
self.assertEqual('Link for last Example.com', link.text)
def test_finding_links_by_text(self):
"should find links by text"
link = self.browser.find_link_by_text('Link for Example.com')
self.assertEqual('http://example.com/', link['href'])
def test_finding_links_by_href(self):
"should find links by href"
link = self.browser.find_link_by_href('http://example.com/')
self.assertEqual('http://example.com/', link['href'])
def test_find_by_css_in_element_context(self):
"should find elements by css in element context and should return splinter driver element"
elements = self.browser.find_by_css("#inside")
decendent = elements[0].find_by_css('h2')
self.assertEqual(decendent.text.strip(), 'inside')
assert isinstance(decendent, ElementList)
assert isinstance(decendent[0], ElementAPI)
def test_find_by_xpath_in_element_context(self):
"should find elements by xpath in element context"
elements = self.browser.find_by_css("#inside")
decendent = elements[0].find_by_xpath("//h2")
self.assertEqual(decendent.text.strip(), 'inside')
assert isinstance(decendent, ElementList)
assert isinstance(decendent[0], ElementAPI)
def test_find_by_name_in_element_context(self):
"should find elements by name in element context"
elements = self.browser.find_by_css("#inside")
decendent = elements[0].find_by_name("upload")
self.assertEqual(len(decendent), 1)
assert isinstance(decendent, ElementList)
assert isinstance(decendent[0], ElementAPI)
def test_find_by_tag_in_element_context(self):
"should find elements by tag in element context"
elements = self.browser.find_by_css("#inside")
decendent = elements[0].find_by_tag("input")
self.assertEqual(len(decendent), 1)
assert isinstance(decendent, ElementList)
assert isinstance(decendent[0], ElementAPI)
def test_find_by_id_in_element_context(self):
"should find elements by id in element context"
elements = self.browser.find_by_css("#inside")
decendent = elements[0].find_by_id("visible")
self.assertEqual(len(decendent), 1)
assert isinstance(decendent, ElementList)
assert isinstance(decendent[0], ElementAPI)
def test_find_by_value_in_element_context(self):
"should find elements by value in element context"
elements = self.browser.find_by_css("#inside")
decendent = elements[0].find_by_value("crazy diamond")
self.assertEqual(len(decendent), 1)
assert isinstance(decendent, ElementList)
assert isinstance(decendent[0], ElementAPI)
|
|
from SBaaS_base.postgresql_orm_base import *
class data_stage01_quantification_MQResultsTable(Base):
#__table__ = make_table('data_stage01_quantification_mqresultstable')
__tablename__ = 'data_stage01_quantification_mqresultstable'
id = Column(Integer, Sequence('data_stage01_quantification_mqresultstable_id_seq'), primary_key=True)
index_=Column(Integer);
sample_index=Column(Integer);
original_filename=Column(Text);
sample_name=Column(String(100));
sample_id=Column(String(500));
sample_comment=Column(Text);
sample_type=Column(String(20));
acquisition_date_and_time=Column(DateTime);
rack_number=Column(Integer);
plate_number=Column(Integer);
vial_number=Column(Integer);
dilution_factor=Column(Float);
injection_volume=Column(Float);
operator_name=Column(String(100));
acq_method_name=Column(String(100));
is_= Column(Boolean);
component_name=Column(String(500));
component_index=Column(Integer);
component_comment=Column(Text);
is_comment=Column(Text);
mass_info=Column(String(100));
is_mass=Column(String(100));
is_name=Column(String(500));
component_group_name=Column(String(100));
conc_units=Column(String(20));
failed_query=Column(Boolean);
is_failed_query=Column(Boolean);
peak_comment=Column(Text);
is_peak_comment=Column(Text);
actual_concentration=Column(Float);
is_actual_concentration=Column(Float);
concentration_ratio=Column(Float);
expected_rt=Column(Float);
is_expected_rt=Column(Float);
integration_type=Column(String(100));
is_integration_type=Column(String(100));
area=Column(Float);
is_area=Column(Float);
corrected_area=Column(Float);
is_corrected_area=Column(Float);
area_ratio=Column(Float);
height=Column(Float);
is_height=Column(Float);
corrected_height=Column(Float);
is_corrected_height=Column(Float);
height_ratio=Column(Float);
area_2_height=Column(Float);
is_area_2_height=Column(Float);
corrected_area2height=Column(Float);
is_corrected_area2height=Column(Float);
region_height=Column(Float);
is_region_height=Column(Float);
quality=Column(Float);
is_quality=Column(Float);
retention_time=Column(Float);
is_retention_time=Column(Float);
start_time=Column(Float);
is_start_time=Column(Float);
end_time=Column(Float);
is_end_time=Column(Float);
total_width=Column(Float);
is_total_width=Column(Float);
width_at_50=Column(Float);
is_width_at_50=Column(Float);
signal_2_noise=Column(Float);
is_signal_2_noise=Column(Float);
baseline_delta_2_height=Column(Float);
is_baseline_delta_2_height=Column(Float);
modified_=Column(Boolean);
relative_rt=Column(Float);
used_=Column(Boolean);
calculated_concentration=Column(Float);
accuracy_=Column(Float);
comment_=Column(Text);
use_calculated_concentration=Column(Boolean,default=True);
start_time_at_5=Column(Float);
end_time_at_5=Column(Float);
width_at_5=Column(Float);
start_time_at_10=Column(Float);
end_time_at_10=Column(Float);
width_at_10=Column(Float);
slope_of_baseline=Column(Float);
tailing_factor=Column(Float);
asymmetry_factor=Column(Float);
ion_ratio=Column(Float);
expected_ion_ratio=Column(Float);
points_across_baseline=Column(Float);
points_across_half_height=Column(Float);
__table_args__ = (
UniqueConstraint('component_name','sample_name','acquisition_date_and_time'),
)
def __init__(self,
row_dict_I,
):
self.component_name=row_dict_I['component_name'];
self.component_comment=row_dict_I['component_comment'];
self.is_comment=row_dict_I['is_comment'];
self.mass_info=row_dict_I['mass_info'];
self.is_mass=row_dict_I['is_mass'];
self.is_name=row_dict_I['is_name'];
self.component_group_name=row_dict_I['component_group_name'];
self.conc_units=row_dict_I['conc_units'];
self.failed_query=row_dict_I['failed_query'];
self.is_failed_query=row_dict_I['is_failed_query'];
self.peak_comment=row_dict_I['peak_comment'];
self.is_peak_comment=row_dict_I['is_peak_comment'];
self.actual_concentration=row_dict_I['actual_concentration'];
self.is_actual_concentration=row_dict_I['is_actual_concentration'];
self.concentration_ratio=row_dict_I['concentration_ratio'];
self.expected_rt=row_dict_I['expected_rt'];
self.is_expected_rt=row_dict_I['is_expected_rt'];
self.integration_type=row_dict_I['integration_type'];
self.is_integration_type=row_dict_I['is_integration_type'];
self.area=row_dict_I['area'];
self.is_area=row_dict_I['is_area'];
self.corrected_area=row_dict_I['corrected_area'];
self.is_corrected_area=row_dict_I['is_corrected_area'];
self.area_ratio=row_dict_I['area_ratio'];
self.height=row_dict_I['height'];
self.is_height=row_dict_I['is_height'];
self.corrected_height=row_dict_I['corrected_height'];
self.is_corrected_height=row_dict_I['is_corrected_height'];
self.height_ratio=row_dict_I['height_ratio'];
self.area_2_height=row_dict_I['area_2_height'];
self.is_area_2_height=row_dict_I['is_area_2_height'];
self.corrected_area2height=row_dict_I['corrected_area2height'];
self.is_corrected_area2height=row_dict_I['is_corrected_area2height'];
self.region_height=row_dict_I['region_height'];
self.is_region_height=row_dict_I['is_region_height'];
self.quality=row_dict_I['quality'];
self.is_quality=row_dict_I['is_quality'];
self.retention_time=row_dict_I['retention_time'];
self.is_retention_time=row_dict_I['is_retention_time'];
self.start_time=row_dict_I['start_time'];
self.is_start_time=row_dict_I['is_start_time'];
self.end_time=row_dict_I['end_time'];
self.is_end_time=row_dict_I['is_end_time'];
self.total_width=row_dict_I['total_width'];
self.is_total_width=row_dict_I['is_total_width'];
self.width_at_50=row_dict_I['width_at_50'];
self.is_width_at_50=row_dict_I['is_width_at_50'];
self.signal_2_noise=row_dict_I['signal_2_noise'];
self.is_signal_2_noise=row_dict_I['is_signal_2_noise'];
self.baseline_delta_2_height=row_dict_I['baseline_delta_2_height'];
self.is_baseline_delta_2_height=row_dict_I['is_baseline_delta_2_height'];
self.modified_=row_dict_I['modified_'];
self.relative_rt=row_dict_I['relative_rt'];
self.used_=row_dict_I['used_'];
self.calculated_concentration=row_dict_I['calculated_concentration'];
self.accuracy_=row_dict_I['accuracy_'];
self.comment_=row_dict_I['comment_'];
self.use_calculated_concentration=row_dict_I['use_calculated_concentration'];
self.acquisition_date_and_time=row_dict_I['acquisition_date_and_time'];
self.rack_number=row_dict_I['rack_number'];
self.plate_number=row_dict_I['plate_number'];
self.vial_number=row_dict_I['vial_number'];
self.dilution_factor=row_dict_I['dilution_factor'];
self.injection_volume=row_dict_I['injection_volume'];
self.operator_name=row_dict_I['operator_name'];
self.acq_method_name=row_dict_I['acq_method_name'];
self.is_=row_dict_I['is_'];
self.component_index=row_dict_I['component_index'];
self.sample_comment=row_dict_I['sample_comment'];
self.sample_id=row_dict_I['sample_id'];
self.sample_name=row_dict_I['sample_name'];
self.original_filename=row_dict_I['original_filename'];
self.sample_index=row_dict_I['sample_index'];
self.index_=row_dict_I['index_'];
self.sample_type=row_dict_I['sample_type'];
self.start_time_at_5=row_dict_I['start_time_at_5']
self.end_time_at_5=row_dict_I['end_time_at_5']
self.width_at_5=row_dict_I['width_at_5']
self.start_time_at_10=row_dict_I['start_time_at_10']
self.end_time_at_10=row_dict_I['end_time_at_10']
self.width_at_10=row_dict_I['width_at_10']
self.slope_of_baseline=row_dict_I['slope_of_baseline']
self.tailing_factor=row_dict_I['tailing_factor']
self.asymmetry_factor=row_dict_I['asymmetry_factor']
self.ion_ratio=row_dict_I['ion_ratio']
self.expected_ion_ratio=row_dict_I['expected_ion_ratio']
self.points_across_baseline=row_dict_I['points_across_baseline']
self.points_across_half_height=row_dict_I['points_across_half_height']
def __set__row__(self,index__I,sample_index_I,original_filename_I,
sample_name_I,sample_id_I,sample_comment_I,sample_type_I,
acquisition_date_and_time_I,rack_number_I,plate_number_I,
vial_number_I,dilution_factor_I,injection_volume_I,
operator_name_I,acq_method_name_I,is__I,component_name_I,
component_index_I,component_comment_I,is_comment_I,
mass_info_I,is_mass_I,is_name_I,component_group_name_I,
conc_units_I,failed_query_I,is_failed_query_I,peak_comment_I,
is_peak_comment_I,actual_concentration_I,is_actual_concentration_I,
concentration_ratio_I,expected_rt_I,is_expected_rt_I,
integration_type_I,is_integration_type_I,area_I,is_area_I,
corrected_area_I,is_corrected_area_I,area_ratio_I,height_I,
is_height_I,corrected_height_I,is_corrected_height_I,
height_ratio_I,area_2_height_I,is_area_2_height_I,
corrected_area2height_I,is_corrected_area2height_I,
region_height_I,is_region_height_I,quality_I,is_quality_I,
retention_time_I,is_retention_time_I,start_time_I,
is_start_time_I,end_time_I,is_end_time_I,total_width_I,
is_total_width_I,width_at_50_I,is_width_at_50_I,
signal_2_noise_I,is_signal_2_noise_I,baseline_delta_2_height_I,
is_baseline_delta_2_height_I,modified__I,relative_rt_I,used__I,
calculated_concentration_I,accuracy__I,comment__I,use_calculated_concentration_I,start_time_at_5_I,
end_time_at_5_I,
width_at_5_I,
start_time_at_10_I,
end_time_at_10_I,
width_at_10_I,
slope_of_baseline_I,
tailing_factor_I,
asymmetry_factor_I,
ion_ratio_I,
expected_ion_ratio_I,
points_across_baseline_I,
points_across_half_height_I,):
self.index_=index__I;
self.sample_index=sample_index_I;
self.original_filename=original_filename_I;
self.sample_name=sample_name_I;
self.sample_id=sample_id_I;
self.sample_comment=sample_comment_I;
self.sample_type=sample_type_I;
self.acquisition_date_and_time=acquisition_date_and_time_I;
self.rack_number=rack_number_I;
self.plate_number=plate_number_I;
self.vial_number=vial_number_I;
self.dilution_factor=dilution_factor_I;
self.injection_volume=injection_volume_I;
self.operator_name=operator_name_I;
self.acq_method_name=acq_method_name_I;
self.is_=is__I;
self.component_name=component_name_I;
self.component_index=component_index_I;
self.component_comment=component_comment_I;
self.is_comment=is_comment_I;
self.mass_info=mass_info_I;
self.is_mass=is_mass_I;
self.is_name=is_name_I;
self.component_group_name=component_group_name_I;
self.conc_units=conc_units_I;
self.failed_query=failed_query_I;
self.is_failed_query=is_failed_query_I;
self.peak_comment=peak_comment_I;
self.is_peak_comment=is_peak_comment_I;
self.actual_concentration=actual_concentration_I;
self.is_actual_concentration=is_actual_concentration_I;
self.concentration_ratio=concentration_ratio_I;
self.expected_rt=expected_rt_I;
self.is_expected_rt=is_expected_rt_I;
self.integration_type=integration_type_I;
self.is_integration_type=is_integration_type_I;
self.area=area_I;
self.is_area=is_area_I;
self.corrected_area=corrected_area_I;
self.is_corrected_area=is_corrected_area_I;
self.area_ratio=area_ratio_I;
self.height=height_I;
self.is_height=is_height_I;
self.corrected_height=corrected_height_I;
self.is_corrected_height=is_corrected_height_I;
self.height_ratio=height_ratio_I;
self.area_2_height=area_2_height_I;
self.is_area_2_height=is_area_2_height_I;
self.corrected_area2height=corrected_area2height_I;
self.is_corrected_area2height=is_corrected_area2height_I;
self.region_height=region_height_I;
self.is_region_height=is_region_height_I;
self.quality=quality_I;
self.is_quality=is_quality_I;
self.retention_time=retention_time_I;
self.is_retention_time=is_retention_time_I;
self.start_time=start_time_I;
self.is_start_time=is_start_time_I;
self.end_time=end_time_I;
self.is_end_time=is_end_time_I;
self.total_width=total_width_I;
self.is_total_width=is_total_width_I;
self.width_at_50=width_at_50_I;
self.is_width_at_50=is_width_at_50_I;
self.signal_2_noise=signal_2_noise_I;
self.is_signal_2_noise=is_signal_2_noise_I;
self.baseline_delta_2_height=baseline_delta_2_height_I;
self.is_baseline_delta_2_height=is_baseline_delta_2_height_I;
self.modified_=modified__I;
self.relative_rt=relative_rt_I;
self.used_=used__I;
self.calculated_concentration=calculated_concentration_I;
self.accuracy_=accuracy__I;
self.comment_=comment__I;
self.use_calculated_concentration=use_calculated_concentration_I;
self.start_time_at_5=start_time_at_5_I
self.end_time_at_5=end_time_at_5_I
self.width_at_5=width_at_5_I
self.start_time_at_10=start_time_at_10_I
self.end_time_at_10=end_time_at_10_I
self.width_at_10=width_at_10_I
self.slope_of_baseline=slope_of_baseline_I
self.tailing_factor=tailing_factor_I
self.asymmetry_factor=asymmetry_factor_I
self.ion_ratio=ion_ratio_I
self.expected_ion_ratio=expected_ion_ratio_I
self.points_across_baseline=points_across_baseline_I
self.points_across_half_height=points_across_half_height_I
#TODO:
#define relations
#define representation
def __repr__(self):
return "data_stage01_quantification_MQResultsTable %s" % (self.acquisition_date_and_time, self.sample_name,self.component_name)
def __repr__dict__(self):
return {'index_':self.index_,
'sample_index':self.sample_index,
'original_filename':self.original_filename,
'sample_name':self.sample_name,
'sample_id':self.sample_id,
'sample_comment':self.sample_comment,
'sample_type':self.sample_type,
'acquisition_date_and_time':self.acquisition_date_and_time,
'rack_number':self.rack_number,
'plate_number':self.plate_number,
'vial_number':self.vial_number,
'dilution_factor':self.dilution_factor,
'injection_volume':self.injection_volume,
'operator_name':self.operator_name,
'acq_method_name':self.acq_method_name,
'is_':self.is_,
'component_name':self.component_name,
'component_index':self.component_index,
'component_comment':self.component_comment,
'is_comment':self.is_comment,
'mass_info':self.mass_info,
'is_mass':self.is_mass,
'is_name':self.is_name,
'component_group_name':self.component_group_name,
'conc_units':self.conc_units,
'failed_query':self.failed_query,
'is_failed_query':self.is_failed_query,
'peak_comment':self.peak_comment,
'is_peak_comment':self.is_peak_comment,
'actual_concentration':self.actual_concentration,
'is_actual_concentration':self.is_actual_concentration,
'concentration_ratio':self.concentration_ratio,
'expected_rt':self.expected_rt,
'is_expected_rt':self.is_expected_rt,
'integration_type':self.integration_type,
'is_integration_type':self.is_integration_type,
'area':self.area,
'is_area':self.is_area,
'corrected_area':self.corrected_area,
'is_corrected_area':self.is_corrected_area,
'area_ratio':self.area_ratio,
'height':self.height,
'is_height':self.is_height,
'corrected_height':self.corrected_height,
'is_corrected_height':self.is_corrected_height,
'height_ratio':self.height_ratio,
'area_2_height':self.area_2_height,
'is_area_2_height':self.is_area_2_height,
'corrected_area2height':self.corrected_area2height,
'is_corrected_area2height':self.is_corrected_area2height,
'region_height':self.region_height,
'is_region_height':self.is_region_height,
'quality':self.quality,
'is_quality':self.is_quality,
'retention_time':self.retention_time,
'is_retention_time':self.is_retention_time,
'start_time':self.start_time,
'is_start_time':self.is_start_time,
'end_time':self.end_time,
'is_end_time':self.is_end_time,
'total_width':self.total_width,
'is_total_width':self.is_total_width,
'width_at_50':self.width_at_50,
'is_width_at_50':self.is_width_at_50,
'signal_2_noise':self.signal_2_noise,
'is_signal_2_noise':self.is_signal_2_noise,
'baseline_delta_2_height':self.baseline_delta_2_height,
'is_baseline_delta_2_height':self.is_baseline_delta_2_height,
'modified_':self.modified_,
'relative_rt':self.relative_rt,
'used_':self.used_,
'calculated_concentration':self.calculated_concentration,
'accuracy_':self.accuracy_,
'comment_':self.comment_,
'use_calculated_concentration':self.use_calculated_concentration,
'start_time_at_5':d['start_time_at_5'],
'end_time_at_5':d['end_time_at_5'],
'width_at_5':d['width_at_5'],
'start_time_at_10':d['start_time_at_10'],
'end_time_at_10':d['end_time_at_10'],
'width_at_10':d['width_at_10'],
'slope_of_baseline':d['slope_of_baseline'],
'tailing_factor':d['tailing_factor'],
'asymmetry_factor':d['asymmetry_factor'],
'ion_ratio':d['ion_ratio'],
'expected_ion_ratio':d['expected_ion_ratio'],
'points_across_baseline':d['points_across_baseline'],
'points_across_half_height':d['points_across_half_height'],
}
def __repr__json__(self):
return json.dumps(self.__repr__dict__())
|
|
# ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
import numpy as np
import numpy.testing as npt
from skbio.util._decorator import experimental
@experimental(as_of="0.4.0")
def mean_and_std(a, axis=None, weights=None, with_mean=True, with_std=True,
ddof=0):
"""Compute the weighted average and standard deviation along the
specified axis.
Parameters
----------
a : array_like
Calculate average and standard deviation of these values.
axis : int, optional
Axis along which the statistics are computed. The default is
to compute them on the flattened array.
weights : array_like, optional
An array of weights associated with the values in `a`. Each
value in `a` contributes to the average according to its
associated weight. The weights array can either be 1-D (in
which case its length must be the size of `a` along the given
axis) or of the same shape as `a`. If `weights=None`, then all
data in `a` are assumed to have a weight equal to one.
with_mean : bool, optional, defaults to True
Compute average if True.
with_std : bool, optional, defaults to True
Compute standard deviation if True.
ddof : int, optional, defaults to 0
It means delta degrees of freedom. Variance is calculated by
dividing by `n - ddof` (where `n` is the number of
elements). By default it computes the maximum likelyhood
estimator.
Returns
-------
average, std
Return the average and standard deviation along the specified
axis. If any of them was not required, returns `None` instead
"""
if not (with_mean or with_std):
raise ValueError("Either the mean or standard deviation need to be"
" computed.")
a = np.asarray(a)
if weights is None:
avg = a.mean(axis=axis) if with_mean else None
std = a.std(axis=axis, ddof=ddof) if with_std else None
else:
avg = np.average(a, axis=axis, weights=weights)
if with_std:
if axis is None:
variance = np.average((a - avg)**2, weights=weights)
else:
# Make sure that the subtraction to compute variance works for
# multidimensional arrays
a_rolled = np.rollaxis(a, axis)
# Numpy doesn't have a weighted std implementation, but this is
# stable and fast
variance = np.average((a_rolled - avg)**2, axis=0,
weights=weights)
if ddof != 0: # Don't waste time if variance doesn't need scaling
if axis is None:
variance *= a.size / (a.size - ddof)
else:
variance *= a.shape[axis] / (a.shape[axis] - ddof)
std = np.sqrt(variance)
else:
std = None
avg = avg if with_mean else None
return avg, std
@experimental(as_of="0.4.0")
def scale(a, weights=None, with_mean=True, with_std=True, ddof=0, copy=True):
"""Scale array by columns to have weighted average 0 and standard
deviation 1.
Parameters
----------
a : array_like
2D array whose columns are standardized according to the
weights.
weights : array_like, optional
Array of weights associated with the columns of `a`. By
default, the scaling is unweighted.
with_mean : bool, optional, defaults to True
Center columns to have 0 weighted mean.
with_std : bool, optional, defaults to True
Scale columns to have unit weighted std.
ddof : int, optional, defaults to 0
If with_std is True, variance is calculated by dividing by `n
- ddof` (where `n` is the number of elements). By default it
computes the maximum likelyhood stimator.
copy : bool, optional, defaults to True
Whether to perform the standardization in place, or return a
new copy of `a`.
Returns
-------
2D ndarray
Scaled array.
Notes
-----
Wherever std equals 0, it is replaced by 1 in order to avoid
division by zero.
"""
if copy:
a = a.copy()
avg, std = mean_and_std(a, axis=0, weights=weights, with_mean=with_mean,
with_std=with_std, ddof=ddof)
if with_mean:
a -= avg
if with_std:
std[std == 0] = 1.0
a /= std
return a
@experimental(as_of="0.4.0")
def svd_rank(M_shape, S, tol=None):
"""Matrix rank of `M` given its singular values `S`.
See `np.linalg.matrix_rank` for a rationale on the tolerance
(we're not using that function because it doesn't let us reuse a
precomputed SVD)."""
if tol is None:
tol = S.max() * max(M_shape) * np.finfo(S.dtype).eps
return np.sum(S > tol)
@experimental(as_of="0.4.0")
def corr(x, y=None):
"""Computes correlation between columns of `x`, or `x` and `y`.
Correlation is covariance of (columnwise) standardized matrices,
so each matrix is first centered and scaled to have variance one,
and then their covariance is computed.
Parameters
----------
x : 2D array_like
Matrix of shape (n, p). Correlation between its columns will
be computed.
y : 2D array_like, optional
Matrix of shape (n, q). If provided, the correlation is
computed between the columns of `x` and the columns of
`y`. Else, it's computed between the columns of `x`.
Returns
-------
correlation
Matrix of computed correlations. Has shape (p, p) if `y` is
not provided, else has shape (p, q).
"""
x = np.asarray(x)
if y is not None:
y = np.asarray(y)
if y.shape[0] != x.shape[0]:
raise ValueError("Both matrices must have the same number of rows")
x, y = scale(x), scale(y)
else:
x = scale(x)
y = x
# Notice that scaling was performed with ddof=0 (dividing by n,
# the default), so now we need to remove it by also using ddof=0
# (dividing by n)
return x.T.dot(y) / x.shape[0]
@experimental(as_of="0.4.0")
def assert_ordination_results_equal(left, right):
"""Assert that ordination results objects are equal.
This is a helper function intended to be used in unit tests that need to
compare ``OrdinationResults`` objects.
For numeric attributes (e.g., eigvals, site, etc.),
``numpy.testing.assert_almost_equal`` is used. Otherwise,
``numpy.testing.assert_equal`` is used for comparisons. An assertion is
in place to ensure the two objects are exactly the same type.
Parameters
----------
left, right : OrdinationResults
Ordination results to be compared for equality.
Raises
------
AssertionError
If the two objects are not equal.
"""
npt.assert_equal(type(left) is type(right), True)
# eigvals should always be present
npt.assert_almost_equal(left.eigvals, right.eigvals)
# these attributes are strings, so can compare directly, even if one or
# both are None
npt.assert_equal(left.species_ids, right.species_ids)
npt.assert_equal(left.site_ids, right.site_ids)
# these attributes need to be checked that they are almost equal, but one
# or both can be None, which npt.assert_almost_equal doesn't like
_assert_optional_numeric_attr_equal(left.species, right.species)
_assert_optional_numeric_attr_equal(left.site, right.site)
_assert_optional_numeric_attr_equal(left.biplot, right.biplot)
_assert_optional_numeric_attr_equal(left.site_constraints,
right.site_constraints)
_assert_optional_numeric_attr_equal(left.proportion_explained,
right.proportion_explained)
def _assert_optional_numeric_attr_equal(left, right):
if left is None or right is None:
npt.assert_equal(left, right)
else:
npt.assert_almost_equal(left, right)
|
|
import unittest
from mock import MagicMock
from steinlib.exceptions import SteinlibParsingException, \
UnrecognizedSectionException
from steinlib.instance import SteinlibInstance
from steinlib.parser import RootSectionParser
from steinlib.section import CommentSectionParser
from steinlib.state import ParsingState
class TestRootSectionParser(unittest.TestCase):
graph = SteinlibInstance()
def test_simple_section_match(self):
simple_section = 'SECTION Foo'
result = RootSectionParser.matches(simple_section,
TestRootSectionParser.graph)
self.assertEqual(len(result.groups()), 1,
'RootSectionParser.matches should return 1 name')
def test_mixed_case_section_match(self):
simple_section = 'sEcTiOn Foo'
result = RootSectionParser.matches(simple_section,
TestRootSectionParser.graph)
self.assertEqual(len(result.groups()), 1,
'RootSectionParser.matches should return 1 name')
def test_section_name_is_correct(self):
simple_section = 'SECTION Foobar'
result = RootSectionParser.matches(simple_section,
TestRootSectionParser.graph)
self.assertEqual(result.groups()[0], 'Foobar')
def test_invalid_empty_section_raises_exception(self):
simple_section = 'SECTION'
with self.assertRaises(SteinlibParsingException):
result = RootSectionParser.matches(
simple_section, TestRootSectionParser.graph)
def test_invalid_multiple_section_names_raises_exception(self):
simple_section = 'section Foo Bar'
with self.assertRaises(SteinlibParsingException):
result = RootSectionParser.matches(
simple_section, TestRootSectionParser.graph)
def test_invalid_section_string_raises_exception(self):
simple_section = 'chapter Foo'
with self.assertRaises(SteinlibParsingException):
result = RootSectionParser.matches(
simple_section, TestRootSectionParser.graph)
def test_right_section_parser_is_returned(self):
comment_section = 'section Comment'
section_parser = RootSectionParser.get_section_parser(
comment_section, TestRootSectionParser.graph)
self.assertEqual(section_parser, CommentSectionParser)
def test_right_section_callback_is_executed(self):
mock_graph = MagicMock()
comment_section = 'section Comment'
section_parser = RootSectionParser.get_section_parser(
comment_section, mock_graph)
# generic callback for all sections
mock_graph.section.assert_called_with('section Comment', ('Comment',))
# specific callback, called only when 'comment' section is found
mock_graph.comment.assert_called_with('section Comment', ('Comment',))
def test_right_section_parser_is_returned(self):
comment_section = 'section Bogus'
with self.assertRaises(UnrecognizedSectionException):
section_parser = RootSectionParser.get_section_parser(
comment_section,
TestRootSectionParser.graph)
def test_capture_exception_unrecognized_section(self):
comment_section = 'section Bogus'
try:
section_parser = RootSectionParser.get_section_parser(
comment_section,
TestRootSectionParser.graph)
except UnrecognizedSectionException as ex:
message_snippet = 'Invalid section identifier'
self.assertTrue(
message_snippet in str(ex),
'"%s" should appear in the exception message: %s' % (
message_snippet, str(ex)))
class TestCommentSectionParser(unittest.TestCase):
def setUp(self):
self._mock_graph = MagicMock()
self._sut = RootSectionParser.get_section_parser(
'SECTION Comment', self._mock_graph)
def test_invalid_line_for_comment_section(self):
invalid_line = 'FOO 1 2 3'
with self.assertRaises(SteinlibParsingException):
next_state = self._sut.parse_token(invalid_line, self._mock_graph)
def test_default_next_status_is_returned(self):
name = 'Name "Foo bar"'
next_state = self._sut.parse_token(name, self._mock_graph)
self.assertEqual(
ParsingState.inside_section,
next_state,
'Default next state should be inside_section %s, but got %s.' % (
ParsingState.inside_section, next_state))
def test_end_next_status_is_returned(self):
end = 'END'
next_state = self._sut.parse_token(end, self._mock_graph)
self.assertEqual(
ParsingState.wait_for_section,
next_state,
'Default next state should be %s, but got %s.' % (
ParsingState.wait_for_section, next_state))
def test_single_string_regex_rejects_invalid(self):
invalid_line = 'Remark "Foo bar" MORE STUFF'
with self.assertRaises(SteinlibParsingException):
next_state = self._sut.parse_token(invalid_line, self._mock_graph)
def test_name_callback(self):
name = 'Name "Foo bar"'
next_state = self._sut.parse_token(name, self._mock_graph)
self._mock_graph.comment__name.assert_called_with(name, ['Foo bar'])
def test_creator_callback(self):
creator = 'Creator "Foo bar"'
next_state = self._sut.parse_token(creator, self._mock_graph)
self._mock_graph.comment__creator.assert_called_with(
creator, ['Foo bar'])
def test_remark_callback(self):
remark = 'Remark "Foo bar"'
next_state = self._sut.parse_token(remark, self._mock_graph)
self._mock_graph.comment__remark.assert_called_with(
remark, ['Foo bar'])
def test_problem_callback(self):
problem = 'Problem "Foo bar"'
next_state = self._sut.parse_token(problem, self._mock_graph)
self._mock_graph.comment__problem.assert_called_with(
problem, ['Foo bar'])
class TestCoordinatesSectionParser(unittest.TestCase):
def setUp(self):
self._mock_graph = MagicMock()
self._sut = RootSectionParser.get_section_parser(
'SECTION Coordinates', self._mock_graph)
def test_end_next_status_is_returned(self):
end = 'END'
next_state = self._sut.parse_token(end, self._mock_graph)
self.assertEqual(
ParsingState.wait_for_section,
next_state,
'Default next state should be %s, but got %s.' % (
ParsingState.wait_for_section, next_state))
def test_dd_callback_two_params(self):
dd = 'DD 222 333'
next_state = self._sut.parse_token(dd, self._mock_graph)
self._mock_graph.coordinates__dd.assert_called_with(
dd, [222, 333])
def test_dd_callback_three_params(self):
dd = 'DD 777 888 999'
next_state = self._sut.parse_token(dd, self._mock_graph)
self._mock_graph.coordinates__dd.assert_called_with(
dd, [777, 888, 999])
def test_dd_callback_four_params(self):
dd = 'DD 777 888 999 222'
next_state = self._sut.parse_token(dd, self._mock_graph)
self._mock_graph.coordinates__dd.assert_called_with(
dd, [777, 888, 999, 222])
def test_dd_callback_reject_unexpected_parameters(self):
dd = 'DD 777 abc 999 222'
with self.assertRaises(SteinlibParsingException):
next_state = self._sut.parse_token(dd, self._mock_graph)
class TestGraphSectionParser(unittest.TestCase):
def setUp(self):
self._mock_graph = MagicMock()
self._sut = RootSectionParser.get_section_parser(
'SECTION Graph', self._mock_graph)
def test_end_next_status_is_returned(self):
end = 'END'
next_state = self._sut.parse_token(end, self._mock_graph)
self.assertEqual(
ParsingState.wait_for_section,
next_state,
'Default next state should be %s, but got %s.' % (
ParsingState.wait_for_section, next_state))
def test_obstacles_callback(self):
obstacles = 'Obstacles 555'
next_state = self._sut.parse_token(obstacles, self._mock_graph)
self._mock_graph.graph__obstacles.assert_called_with(
obstacles, [555])
def test_nodes_callback(self):
nodes = 'Nodes 777'
next_state = self._sut.parse_token(nodes, self._mock_graph)
self._mock_graph.graph__nodes.assert_called_with(nodes, [777])
def test_edges_callback(self):
edges = 'Edges 234'
next_state = self._sut.parse_token(edges, self._mock_graph)
self._mock_graph.graph__edges.assert_called_with(edges, [234])
def test_arcs_callback(self):
arcs = 'Arcs 321'
next_state = self._sut.parse_token(arcs, self._mock_graph)
self._mock_graph.graph__arcs.assert_called_with(arcs, [321])
def test_e_callback(self):
e = 'e 234 789 123'
next_state = self._sut.parse_token(e, self._mock_graph)
self._mock_graph.graph__e.assert_called_with(e, [234, 789, 123])
def test_a_callback(self):
a = 'a 234 654 321'
next_state = self._sut.parse_token(a, self._mock_graph)
self._mock_graph.graph__a.assert_called_with(a, [234, 654, 321])
class TestMaximumDegreesSectionParser(unittest.TestCase):
def setUp(self):
self._mock_graph = MagicMock()
self._sut = RootSectionParser.get_section_parser(
'SECTION MaximumDegrees', self._mock_graph)
def test_end_next_status_is_returned(self):
end = 'END'
next_state = self._sut.parse_token(end, self._mock_graph)
self.assertEqual(
ParsingState.wait_for_section,
next_state,
'Default next state should be %s, but got %s.' % (
ParsingState.wait_for_section, next_state))
def test_md_callback(self):
md = 'md 555'
next_state = self._sut.parse_token(md, self._mock_graph)
self._mock_graph.maximum_degrees__md.assert_called_with(md, [555])
class TestPresolveSectionParser(unittest.TestCase):
def setUp(self):
self._mock_graph = MagicMock()
self._sut = RootSectionParser.get_section_parser(
'SECTION Presolve', self._mock_graph)
def test_end_next_status_is_returned(self):
end = 'END'
next_state = self._sut.parse_token(end, self._mock_graph)
self.assertEqual(
ParsingState.wait_for_section,
next_state,
'Default next state should be %s, but got %s.' % (
ParsingState.wait_for_section, next_state))
def test_fixed_callback(self):
fixed = 'fixed 555'
next_state = self._sut.parse_token(fixed, self._mock_graph)
self._mock_graph.presolve__fixed.assert_called_with(fixed, [555])
def test_lower_callback(self):
lower = 'lower 444'
next_state = self._sut.parse_token(lower, self._mock_graph)
self._mock_graph.presolve__lower.assert_called_with(lower, [444])
def test_upper_callback(self):
upper = 'upper 999'
next_state = self._sut.parse_token(upper, self._mock_graph)
self._mock_graph.presolve__upper.assert_called_with(upper, [999])
def test_time_callback(self):
_time = 'time 555'
next_state = self._sut.parse_token(_time, self._mock_graph)
self._mock_graph.presolve__time.assert_called_with(_time, [555])
def test_orgnodes_callback(self):
orgnodes = 'orgnodes 555'
next_state = self._sut.parse_token(orgnodes, self._mock_graph)
self._mock_graph.presolve__orgnodes.assert_called_with(orgnodes, [555])
def test_orgedges_callback(self):
orgedges = 'orgedges 555'
next_state = self._sut.parse_token(orgedges, self._mock_graph)
self._mock_graph.presolve__orgedges.assert_called_with(orgedges, [555])
def test_ea_callback(self):
ea = 'ea 111 222 333 444'
next_state = self._sut.parse_token(ea, self._mock_graph)
self._mock_graph.presolve__ea.assert_called_with(
ea, [111, 222, 333, 444])
def test_ec_callback(self):
ec = 'ec 555 666 777'
next_state = self._sut.parse_token(ec, self._mock_graph)
self._mock_graph.presolve__ec.assert_called_with(ec, [555, 666, 777])
def test_ed_callback(self):
ed = 'ed 555 888 999'
next_state = self._sut.parse_token(ed, self._mock_graph)
self._mock_graph.presolve__ed.assert_called_with(ed, [555, 888, 999])
def test_es_callback(self):
es = 'es 555 666'
next_state = self._sut.parse_token(es, self._mock_graph)
self._mock_graph.presolve__es.assert_called_with(es, [555, 666])
class TestObstaclesSectionParser(unittest.TestCase):
def setUp(self):
self._mock_graph = MagicMock()
self._sut = RootSectionParser.get_section_parser(
'SECTION Obstacles', self._mock_graph)
def test_end_next_status_is_returned(self):
end = 'END'
next_state = self._sut.parse_token(end, self._mock_graph)
self.assertEqual(
ParsingState.wait_for_section,
next_state,
'Default next state should be %s, but got %s.' % (
ParsingState.wait_for_section, next_state))
def test_rr_callback(self):
rr = 'rr 555 444 333 222'
next_state = self._sut.parse_token(rr, self._mock_graph)
self._mock_graph.obstacles__rr.assert_called_with(
rr, [555, 444, 333, 222])
class TestTerminalsSectionParser(unittest.TestCase):
def setUp(self):
self._mock_graph = MagicMock()
self._sut = RootSectionParser.get_section_parser(
'SECTION Terminals', self._mock_graph)
def test_end_next_status_is_returned(self):
end = 'END'
next_state = self._sut.parse_token(end, self._mock_graph)
self.assertEqual(
ParsingState.wait_for_section,
next_state,
'Default next state should be %s, but got %s.' % (
ParsingState.wait_for_section, next_state))
def test_terminals_callback(self):
terminals = 'terminals 444'
next_state = self._sut.parse_token(terminals, self._mock_graph)
self._mock_graph.terminals__terminals.assert_called_with(
terminals, [444])
def test_rootp_callback(self):
rootp = 'rootp 555'
next_state = self._sut.parse_token(rootp, self._mock_graph)
self._mock_graph.terminals__rootp.assert_called_with(rootp, [555])
def test_t_callback(self):
t = 't 333'
next_state = self._sut.parse_token(t, self._mock_graph)
self._mock_graph.terminals__t.assert_called_with(t, [333])
def test_tp_callback(self):
tp = 'tp 888'
next_state = self._sut.parse_token(tp, self._mock_graph)
self._mock_graph.terminals__tp.assert_called_with(tp, [888])
|
|
#!/usr/bin/python
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import pyauto_functional # Must be imported before pyauto
import pyauto
import test_utils
class PasswordTest(pyauto.PyUITest):
"""Tests that passwords work correctly."""
def Debug(self):
"""Test method for experimentation.
This method will not run automatically.
"""
while True:
raw_input('Interact with the browser and hit <enter> to dump passwords. ')
print '*' * 20
import pprint
pp = pprint.PrettyPrinter(indent=2)
pp.pprint(self.GetSavedPasswords())
def _AssertWithinOneSecond(self, time1, time2):
self.assertTrue(abs(time1 - time2) < 1.0,
'Times not within an acceptable range. '
'First was %lf, second was %lf' % (time1, time2))
def _ConstructPasswordDictionary(self, username_value, password_value,
signon_realm, origin_url, username_element,
password_element, action_target,
time=1279650942.0, submit_element='submit',
blacklist=False):
"""Construct a password dictionary with all the required fields."""
return {'username_value': username_value,
'password_value': password_value,
'signon_realm': signon_realm,
'time': time,
'origin_url': origin_url,
'username_element': username_element,
'password_element': password_element,
'submit_element': submit_element,
'action_target': action_target,
'blacklist': blacklist}
def testSavePassword(self):
"""Test saving a password and getting saved passwords."""
password1 = self._ConstructPasswordDictionary(
'user@example.com', 'test.password',
'https://www.example.com/', 'https://www.example.com/login',
'username', 'password', 'https://www.example.com/login/')
self.assertTrue(self.AddSavedPassword(password1))
self.assertEquals(self.GetSavedPasswords(), [password1])
def testRemovePasswords(self):
"""Verify that saved passwords can be removed."""
password1 = self._ConstructPasswordDictionary(
'user1@example.com', 'test1.password',
'https://www.example.com/', 'https://www.example.com/login',
'username1', 'password', 'https://www.example.com/login/')
password2 = self._ConstructPasswordDictionary(
'user2@example.com', 'test2.password',
'https://www.example.com/', 'https://www.example.com/login',
'username2', 'password2', 'https://www.example.com/login/')
self.AddSavedPassword(password1)
self.AddSavedPassword(password2)
self.assertEquals(2, len(self.GetSavedPasswords()))
self.assertEquals([password1, password2], self.GetSavedPasswords())
self.RemoveSavedPassword(password1)
self.assertEquals(1, len(self.GetSavedPasswords()))
self.assertEquals([password2], self.GetSavedPasswords())
self.RemoveSavedPassword(password2)
# TODO: GetSavedPasswords() doesn't return anything when empty.
# http://crbug.com/64603
# self.assertFalse(self.GetSavedPasswords())
def testDisplayAndSavePasswordInfobar(self):
"""Verify password infobar displays and able to save password."""
test_utils.ClearPasswords(self)
url_https = 'https://www.google.com/accounts/'
url_logout = 'https://www.google.com/accounts/Logout'
creds = self.GetPrivateInfo()['test_google_account']
username = creds['username']
password = creds['password']
test_utils.GoogleAccountsLogin(self, username, password)
# Wait until page completes loading.
self.WaitUntil(
lambda: self.GetDOMValue('document.readyState'),
expect_retval='complete')
self.assertTrue(self.WaitForInfobarCount(1),
'Save password infobar did not appear.')
infobar = self.GetBrowserInfo()['windows'][0]['tabs'][0]['infobars']
self.assertEqual(infobar[0]['type'], 'confirm_infobar')
self.PerformActionOnInfobar('accept', infobar_index=0)
self.NavigateToURL(url_logout)
self.AppendTab(pyauto.GURL(url_https)) # New tab to avoid bug 70694
# Wait until accounts page load to detect value in username field.
self.WaitUntil(lambda: self.GetDOMValue('document.readyState', 0, 1),
expect_retval='complete')
test_utils.VerifyGoogleAccountCredsFilled(self, username, password,
tab_index=1, windex=0)
self.ExecuteJavascript('document.getElementById("gaia_loginform").submit();'
'window.domAutomationController.send("done")',
0, 1)
test_utils.ClearPasswords(self)
def testNeverSavePasswords(self):
"""Verify that we don't save passwords and delete saved passwords
for a domain when 'never for this site' is chosen."""
creds1 = self.GetPrivateInfo()['test_google_account']
test_utils.GoogleAccountsLogin(
self, creds1['username'], creds1['password'])
self.assertTrue(self.WaitForInfobarCount(1))
self.PerformActionOnInfobar('accept', infobar_index=0)
self.assertEquals(1, len(self.GetSavedPasswords()))
self.AppendTab(pyauto.GURL(creds1['logout_url']))
creds2 = self.GetPrivateInfo()['test_google_account_2']
test_utils.GoogleAccountsLogin(
self, creds2['username'], creds2['password'], tab_index=1)
self.assertTrue(self.WaitForInfobarCount(1, tab_index=1))
# Selecting 'Never for this site' option on password infobar.
self.PerformActionOnInfobar('cancel', infobar_index=0, tab_index=1)
# TODO: GetSavedPasswords() doesn't return anything when empty.
# http://crbug.com/64603
# self.assertFalse(self.GetSavedPasswords())
# TODO: Check the exceptions list
def testSavedPasswordInTabsAndWindows(self):
"""Verify saved username/password displays in Regular/Incognito Window
and NTP"""
username = 'test'
password = 'test12345'
password_dict = {
u'action_target': u'https://www.google.com/accounts/ServiceLoginAuth',
u'blacklist': False,
u'origin_url': u'https://www.google.com/accounts/ServiceLogin',
u'password_element': u'Passwd',
u'password_value': u'test12345',
u'signon_realm': u'https://www.google.com/',
u'submit_element': u'',
u'time': 1280939865.0,
u'username_element': u'Email',
u'username_value': u'test'}
url = 'https://www.google.com/accounts/ServiceLogin'
self.AddSavedPassword(password_dict)
self.NavigateToURL(url)
test_utils.VerifyGoogleAccountCredsFilled(self, username, password,
tab_index=0, windex=0)
self.AppendTab(pyauto.GURL(url))
test_utils.VerifyGoogleAccountCredsFilled(self, username, password,
tab_index=1, windex=0)
self.RunCommand(pyauto.IDC_NEW_INCOGNITO_WINDOW)
self.NavigateToURL(url, 1, 0)
test_utils.VerifyGoogleAccountCredsFilled(self, username, password,
tab_index=0, windex=1)
test_utils.ClearPasswords(self)
def testInfoBarDisappearByNavigatingPage(self):
"""Test that Password infobar is dismissed by navigating to
different page."""
creds = self.GetPrivateInfo()['test_google_account']
# Login to Google a/c
test_utils.GoogleAccountsLogin(self, creds['username'], creds['password'])
# Wait for the infobar to appear
self.assertTrue(self.WaitForInfobarCount(1))
self.assertTrue(self.GetBrowserInfo()['windows'][0]['tabs'][0]['infobars'])
self.NavigateToURL('chrome://history')
self.assertTrue(self.WaitForInfobarCount(0))
# To make sure user is navigated to History page.
self.assertEqual('History', self.GetActiveTabTitle())
self.assertFalse(self.GetBrowserInfo()['windows'][0]['tabs'][0]['infobars'])
def testInfoBarDisappearByReload(self):
"""Test that Password infobar disappears by the page reload."""
creds = self.GetPrivateInfo()['test_google_account']
# Login to Google a/c
test_utils.GoogleAccountsLogin(self, creds['username'], creds['password'])
# Wait for the infobar to appear
self.assertTrue(self.WaitForInfobarCount(1))
self.assertTrue(self.GetBrowserInfo()['windows'][0]['tabs'][0]['infobars'])
self.GetBrowserWindow(0).GetTab(0).Reload()
self.assertTrue(self.WaitForInfobarCount(0))
self.assertFalse(self.GetBrowserInfo()['windows'][0]['tabs'][0]['infobars'])
def testPasswdInfoNotStoredWhenAutocompleteOff(self):
"""Verify that password infobar does not appear when autocomplete is off.
If the password field has autocomplete turned off, then the password infobar
should not offer to save the password info.
"""
password_info = {'Email': 'test@google.com',
'Passwd': 'test12345'}
url = self.GetHttpURLForDataPath(
os.path.join('password', 'password_autocomplete_off_test.html'))
self.NavigateToURL(url)
for key, value in password_info.iteritems():
script = ('document.getElementById("%s").value = "%s"; '
'window.domAutomationController.send("done");') % (key, value)
self.ExecuteJavascript(script, 0, 0)
js_code = """
document.getElementById("loginform").submit();
window.addEventListener("unload", function() {
window.domAutomationController.send("done");
});
"""
self.ExecuteJavascript(js_code, 0, 0)
# Wait until the form is submitted and the page completes loading.
self.WaitUntil(
lambda: self.GetDOMValue('document.readyState'),
expect_retval='complete')
password_infobar = (
self.GetBrowserInfo()['windows'][0]['tabs'][0]['infobars'])
self.assertFalse(password_infobar,
msg='Save password infobar offered to save password info.')
def _SendCharToPopulateField(self, char, tab_index=0, windex=0):
"""Simulate a char being typed into a field.
Args:
char: the char value to be typed into the field.
tab_index: tab index to work on. Defaults to 0 (first tab).
windex: window index to work on. Defaults to 0 (first window).
"""
CHAR_KEYPRESS = ord((char).upper()) # ASCII char key press.
KEY_DOWN_TYPE = 0 # kRawKeyDownType
KEY_UP_TYPE = 3 # kKeyUpType
self.SendWebkitKeyEvent(KEY_DOWN_TYPE, CHAR_KEYPRESS, tab_index, windex)
self.SendWebkitCharEvent(char, tab_index, windex)
self.SendWebkitKeyEvent(KEY_UP_TYPE, CHAR_KEYPRESS, tab_index, windex)
def testClearFetchedCredForNewUserName(self):
"""Verify that the fetched credentials are cleared for a new username.
This test requires sending key events rather than pasting a new username
into the Email field.
"""
user_creds = self._ConstructPasswordDictionary(
'user1@example.com', 'test1.password',
'https://www.google.com/',
'https://www.google.com/accounts/ServiceLogin',
'username', 'password',
'https://www.google.com/accounts/ServiceLogin')
url = 'https://www.google.com/accounts/ServiceLogin'
self.AddSavedPassword(user_creds)
self.NavigateToURL(url)
self.WaitUntil(
lambda: self.GetDOMValue('document.readyState'),
expect_retval='complete')
test_utils.VerifyGoogleAccountCredsFilled(
self, user_creds['username_value'], user_creds['password_value'],
tab_index=0, windex=0)
clear_username_field = (
'document.getElementById("Email").value = ""; '
'window.domAutomationController.send("done");')
set_focus = (
'document.getElementById("Email").focus(); '
'window.domAutomationController.send("done");')
self.ExecuteJavascript(clear_username_field, 0, 0)
self.ExecuteJavascript(set_focus, 0, 0)
self._SendCharToPopulateField('t', tab_index=0, windex=0)
passwd_value = self.GetDOMValue('document.getElementById("Passwd").value')
self.assertFalse(passwd_value,
msg='Password field not empty for new username.')
test_utils.ClearPasswords(self)
if __name__ == '__main__':
pyauto_functional.Main()
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010-2011 OpenStack, LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Storage backend for SWIFT"""
from __future__ import absolute_import
import hashlib
import httplib
import math
import urllib
import urlparse
from oslo.config import cfg
from glance.common import auth
from glance.common import exception
from glance.openstack.common import excutils
import glance.openstack.common.log as logging
import glance.store
import glance.store.base
import glance.store.location
try:
import swiftclient
except ImportError:
pass
LOG = logging.getLogger(__name__)
DEFAULT_CONTAINER = 'glance'
DEFAULT_LARGE_OBJECT_SIZE = 5 * 1024 # 5GB
DEFAULT_LARGE_OBJECT_CHUNK_SIZE = 200 # 200M
ONE_MB = 1000 * 1024
swift_opts = [
cfg.BoolOpt('swift_enable_snet', default=False,
help=_('Whether to use ServiceNET to communicate with the '
'Swift storage servers.')),
cfg.StrOpt('swift_store_auth_address',
help=_('The address where the Swift authentication service '
'is listening.')),
cfg.StrOpt('swift_store_user', secret=True,
help=_('The user to authenticate against the Swift '
'authentication service')),
cfg.StrOpt('swift_store_key', secret=True,
help=_('Auth key for the user authenticating against the '
'Swift authentication service.')),
cfg.StrOpt('swift_store_auth_version', default='2',
help=_('Version of the authentication service to use. '
'Valid versions are 2 for keystone and 1 for swauth '
'and rackspace')),
cfg.BoolOpt('swift_store_auth_insecure', default=False,
help=_('If True, swiftclient won\'t check for a valid SSL '
'certificate when authenticating.')),
cfg.StrOpt('swift_store_region',
help=_('The region of the swift endpoint to be used for '
'single tenant. This setting is only necessary if the '
'tenant has multiple swift endpoints.')),
cfg.StrOpt('swift_store_endpoint_type', default='publicURL',
help=_('A string giving the endpoint type of the swift '
'service to use (publicURL, adminURL or internalURL). '
'This setting is only used if swift_store_auth_version '
'is 2.')),
cfg.StrOpt('swift_store_service_type', default='object-store',
help=_('A string giving the service type of the swift service '
'to use. This setting is only used if '
'swift_store_auth_version is 2.')),
cfg.StrOpt('swift_store_container',
default=DEFAULT_CONTAINER,
help=_('Container within the account that the account should '
'use for storing images in Swift.')),
cfg.IntOpt('swift_store_large_object_size',
default=DEFAULT_LARGE_OBJECT_SIZE,
help=_('The size, in MB, that Glance will start chunking image '
'files and do a large object manifest in Swift')),
cfg.IntOpt('swift_store_large_object_chunk_size',
default=DEFAULT_LARGE_OBJECT_CHUNK_SIZE,
help=_('The amount of data written to a temporary disk buffer '
'during the process of chunking the image file.')),
cfg.BoolOpt('swift_store_create_container_on_put', default=False,
help=_('A boolean value that determines if we create the '
'container if it does not exist.')),
cfg.BoolOpt('swift_store_multi_tenant', default=False,
help=_('If set to True, enables multi-tenant storage '
'mode which causes Glance images to be stored in '
'tenant specific Swift accounts.')),
cfg.ListOpt('swift_store_admin_tenants', default=[],
help=_('A list of tenants that will be granted read/write '
'access on all Swift containers created by Glance in '
'multi-tenant mode.')),
cfg.BoolOpt('swift_store_ssl_compression', default=True,
help=_('If set to False, disables SSL layer compression of '
'https swift requests. Setting to False may improve '
'performance for images which are already in a '
'compressed format, eg qcow2.')),
]
CONF = cfg.CONF
CONF.register_opts(swift_opts)
class StoreLocation(glance.store.location.StoreLocation):
"""
Class describing a Swift URI. A Swift URI can look like any of
the following:
swift://user:pass@authurl.com/container/obj-id
swift://account:user:pass@authurl.com/container/obj-id
swift+http://user:pass@authurl.com/container/obj-id
swift+https://user:pass@authurl.com/container/obj-id
When using multi-tenant a URI might look like this (a storage URL):
swift+https://example.com/container/obj-id
The swift+http:// URIs indicate there is an HTTP authentication URL.
The default for Swift is an HTTPS authentication URL, so swift:// and
swift+https:// are the same...
"""
def process_specs(self):
self.scheme = self.specs.get('scheme', 'swift+https')
self.user = self.specs.get('user')
self.key = self.specs.get('key')
self.auth_or_store_url = self.specs.get('auth_or_store_url')
self.container = self.specs.get('container')
self.obj = self.specs.get('obj')
def _get_credstring(self):
if self.user and self.key:
return '%s:%s@' % (urllib.quote(self.user), urllib.quote(self.key))
return ''
def get_uri(self):
auth_or_store_url = self.auth_or_store_url
if auth_or_store_url.startswith('http://'):
auth_or_store_url = auth_or_store_url[len('http://'):]
elif auth_or_store_url.startswith('https://'):
auth_or_store_url = auth_or_store_url[len('https://'):]
credstring = self._get_credstring()
auth_or_store_url = auth_or_store_url.strip('/')
container = self.container.strip('/')
obj = self.obj.strip('/')
return '%s://%s%s/%s/%s' % (self.scheme, credstring, auth_or_store_url,
container, obj)
def parse_uri(self, uri):
"""
Parse URLs. This method fixes an issue where credentials specified
in the URL are interpreted differently in Python 2.6.1+ than prior
versions of Python. It also deals with the peculiarity that new-style
Swift URIs have where a username can contain a ':', like so:
swift://account:user:pass@authurl.com/container/obj
"""
# Make sure that URIs that contain multiple schemes, such as:
# swift://user:pass@http://authurl.com/v1/container/obj
# are immediately rejected.
if uri.count('://') != 1:
reason = _("URI cannot contain more than one occurrence "
"of a scheme. If you have specified a URI like "
"swift://user:pass@http://authurl.com/v1/container/obj"
", you need to change it to use the "
"swift+http:// scheme, like so: "
"swift+http://user:pass@authurl.com/v1/container/obj")
LOG.debug(_("Invalid store URI: %(reason)s") % locals())
raise exception.BadStoreUri(message=reason)
pieces = urlparse.urlparse(uri)
assert pieces.scheme in ('swift', 'swift+http', 'swift+https')
self.scheme = pieces.scheme
netloc = pieces.netloc
path = pieces.path.lstrip('/')
if netloc != '':
# > Python 2.6.1
if '@' in netloc:
creds, netloc = netloc.split('@')
else:
creds = None
else:
# Python 2.6.1 compat
# see lp659445 and Python issue7904
if '@' in path:
creds, path = path.split('@')
else:
creds = None
netloc = path[0:path.find('/')].strip('/')
path = path[path.find('/'):].strip('/')
if creds:
cred_parts = creds.split(':')
if len(cred_parts) != 2:
reason = (_("Badly formed credentials in Swift URI."))
LOG.debug(reason)
raise exception.BadStoreUri()
user, key = cred_parts
self.user = urllib.unquote(user)
self.key = urllib.unquote(key)
else:
self.user = None
self.key = None
path_parts = path.split('/')
try:
self.obj = path_parts.pop()
self.container = path_parts.pop()
if not netloc.startswith('http'):
# push hostname back into the remaining to build full authurl
path_parts.insert(0, netloc)
self.auth_or_store_url = '/'.join(path_parts)
except IndexError:
reason = _("Badly formed Swift URI.")
LOG.debug(reason)
raise exception.BadStoreUri()
@property
def swift_url(self):
"""
Creates a fully-qualified auth url that the Swift client library can
use. The scheme for the auth_url is determined using the scheme
included in the `location` field.
HTTPS is assumed, unless 'swift+http' is specified.
"""
if self.auth_or_store_url.startswith('http'):
return self.auth_or_store_url
else:
if self.scheme in ('swift+https', 'swift'):
auth_scheme = 'https://'
else:
auth_scheme = 'http://'
return ''.join([auth_scheme, self.auth_or_store_url])
def Store(context=None, loc=None):
if (CONF.swift_store_multi_tenant and
(loc is None or loc.store_location.user is None)):
return MultiTenantStore(context, loc)
return SingleTenantStore(context, loc)
class BaseStore(glance.store.base.Store):
CHUNKSIZE = 65536
def get_schemes(self):
return ('swift+https', 'swift', 'swift+http')
def configure(self):
_obj_size = self._option_get('swift_store_large_object_size')
self.large_object_size = _obj_size * ONE_MB
_chunk_size = self._option_get('swift_store_large_object_chunk_size')
self.large_object_chunk_size = _chunk_size * ONE_MB
self.admin_tenants = CONF.swift_store_admin_tenants
self.region = CONF.swift_store_region
self.service_type = CONF.swift_store_service_type
self.endpoint_type = CONF.swift_store_endpoint_type
self.snet = CONF.swift_enable_snet
self.insecure = CONF.swift_store_auth_insecure
self.ssl_compression = CONF.swift_store_ssl_compression
def get(self, location, connection=None):
location = location.store_location
if not connection:
connection = self.get_connection(location)
try:
resp_headers, resp_body = connection.get_object(
container=location.container, obj=location.obj,
resp_chunk_size=self.CHUNKSIZE)
except swiftclient.ClientException as e:
if e.http_status == httplib.NOT_FOUND:
msg = _("Swift could not find image at URI.")
raise exception.NotFound(msg)
else:
raise
class ResponseIndexable(glance.store.Indexable):
def another(self):
try:
return self.wrapped.next()
except StopIteration:
return ''
length = int(resp_headers.get('content-length', 0))
return (ResponseIndexable(resp_body, length), length)
def get_size(self, location, connection=None):
location = location.store_location
if not connection:
connection = self.get_connection(location)
try:
resp_headers = connection.head_object(
container=location.container, obj=location.obj)
return int(resp_headers.get('content-length', 0))
except Exception:
return 0
def _option_get(self, param):
result = getattr(CONF, param)
if not result:
reason = (_("Could not find %(param)s in configuration "
"options.") % locals())
LOG.error(reason)
raise exception.BadStoreConfiguration(store_name="swift",
reason=reason)
return result
def _delete_stale_chunks(self, connection, container, chunk_list):
for chunk in chunk_list:
LOG.debug(_("Deleting chunk %s") % chunk)
try:
connection.delete_object(container, chunk)
except Exception:
msg = _("Failed to delete orphaned chunk %s/%s")
LOG.exception(msg, container, chunk)
def add(self, image_id, image_file, image_size, connection=None):
location = self.create_location(image_id)
if not connection:
connection = self.get_connection(location)
self._create_container_if_missing(location.container, connection)
LOG.debug(_("Adding image object '%(obj_name)s' "
"to Swift") % dict(obj_name=location.obj))
try:
if image_size > 0 and image_size < self.large_object_size:
# Image size is known, and is less than large_object_size.
# Send to Swift with regular PUT.
obj_etag = connection.put_object(location.container,
location.obj, image_file,
content_length=image_size)
else:
# Write the image into Swift in chunks.
chunk_id = 1
if image_size > 0:
total_chunks = str(int(
math.ceil(float(image_size) /
float(self.large_object_chunk_size))))
else:
# image_size == 0 is when we don't know the size
# of the image. This can occur with older clients
# that don't inspect the payload size.
LOG.debug(_("Cannot determine image size. Adding as a "
"segmented object to Swift."))
total_chunks = '?'
checksum = hashlib.md5()
written_chunks = []
combined_chunks_size = 0
while True:
chunk_size = self.large_object_chunk_size
if image_size == 0:
content_length = None
else:
left = image_size - combined_chunks_size
if left == 0:
break
if chunk_size > left:
chunk_size = left
content_length = chunk_size
chunk_name = "%s-%05d" % (location.obj, chunk_id)
reader = ChunkReader(image_file, checksum, chunk_size)
try:
chunk_etag = connection.put_object(
location.container, chunk_name, reader,
content_length=content_length)
written_chunks.append(chunk_name)
except Exception:
# Delete orphaned segments from swift backend
with excutils.save_and_reraise_exception():
LOG.exception(_("Error during chunked upload to "
"backend, deleting stale chunks"))
self._delete_stale_chunks(connection,
location.container,
written_chunks)
bytes_read = reader.bytes_read
msg = _("Wrote chunk %(chunk_name)s (%(chunk_id)d/"
"%(total_chunks)s) of length %(bytes_read)d "
"to Swift returning MD5 of content: "
"%(chunk_etag)s")
LOG.debug(msg % locals())
if bytes_read == 0:
# Delete the last chunk, because it's of zero size.
# This will happen if size == 0.
LOG.debug(_("Deleting final zero-length chunk"))
connection.delete_object(location.container,
chunk_name)
break
chunk_id += 1
combined_chunks_size += bytes_read
# In the case we have been given an unknown image size,
# set the size to the total size of the combined chunks.
if image_size == 0:
image_size = combined_chunks_size
# Now we write the object manifest and return the
# manifest's etag...
manifest = "%s/%s-" % (location.container, location.obj)
headers = {'ETag': hashlib.md5("").hexdigest(),
'X-Object-Manifest': manifest}
# The ETag returned for the manifest is actually the
# MD5 hash of the concatenated checksums of the strings
# of each chunk...so we ignore this result in favour of
# the MD5 of the entire image file contents, so that
# users can verify the image file contents accordingly
connection.put_object(location.container, location.obj,
None, headers=headers)
obj_etag = checksum.hexdigest()
# NOTE: We return the user and key here! Have to because
# location is used by the API server to return the actual
# image data. We *really* should consider NOT returning
# the location attribute from GET /images/<ID> and
# GET /images/details
return (location.get_uri(), image_size, obj_etag, {})
except swiftclient.ClientException as e:
if e.http_status == httplib.CONFLICT:
raise exception.Duplicate(_("Swift already has an image at "
"this location"))
msg = (_("Failed to add object to Swift.\n"
"Got error from Swift: %(e)s") % locals())
LOG.error(msg)
raise glance.store.BackendException(msg)
def delete(self, location, connection=None):
location = location.store_location
if not connection:
connection = self.get_connection(location)
try:
# We request the manifest for the object. If one exists,
# that means the object was uploaded in chunks/segments,
# and we need to delete all the chunks as well as the
# manifest.
manifest = None
try:
headers = connection.head_object(
location.container, location.obj)
manifest = headers.get('x-object-manifest')
except swiftclient.ClientException as e:
if e.http_status != httplib.NOT_FOUND:
raise
if manifest:
# Delete all the chunks before the object manifest itself
obj_container, obj_prefix = manifest.split('/', 1)
segments = connection.get_container(
obj_container, prefix=obj_prefix)[1]
for segment in segments:
# TODO(jaypipes): This would be an easy area to parallelize
# since we're simply sending off parallelizable requests
# to Swift to delete stuff. It's not like we're going to
# be hogging up network or file I/O here...
connection.delete_object(obj_container,
segment['name'])
# Delete object (or, in segmented case, the manifest)
connection.delete_object(location.container, location.obj)
except swiftclient.ClientException as e:
if e.http_status == httplib.NOT_FOUND:
msg = _("Swift could not find image at URI.")
raise exception.NotFound(msg)
else:
raise
def _create_container_if_missing(self, container, connection):
"""
Creates a missing container in Swift if the
``swift_store_create_container_on_put`` option is set.
:param container: Name of container to create
:param connection: Connection to swift service
"""
try:
connection.head_container(container)
except swiftclient.ClientException as e:
if e.http_status == httplib.NOT_FOUND:
if CONF.swift_store_create_container_on_put:
try:
connection.put_container(container)
except swiftclient.ClientException as e:
msg = _("Failed to add container to Swift.\n"
"Got error from Swift: %(e)s") % locals()
raise glance.store.BackendException(msg)
else:
msg = (_("The container %(container)s does not exist in "
"Swift. Please set the "
"swift_store_create_container_on_put option"
"to add container to Swift automatically.") %
locals())
raise glance.store.BackendException(msg)
else:
raise
def get_connection(self):
raise NotImplemented()
def create_location(self):
raise NotImplemented()
class SingleTenantStore(BaseStore):
EXAMPLE_URL = "swift://<USER>:<KEY>@<AUTH_ADDRESS>/<CONTAINER>/<FILE>"
def configure(self):
super(SingleTenantStore, self).configure()
self.auth_version = self._option_get('swift_store_auth_version')
def configure_add(self):
self.auth_address = self._option_get('swift_store_auth_address')
if self.auth_address.startswith('http://'):
self.scheme = 'swift+http'
else:
self.scheme = 'swift+https'
self.container = CONF.swift_store_container
self.user = self._option_get('swift_store_user')
self.key = self._option_get('swift_store_key')
def create_location(self, image_id):
specs = {'scheme': self.scheme,
'container': self.container,
'obj': str(image_id),
'auth_or_store_url': self.auth_address,
'user': self.user,
'key': self.key}
return StoreLocation(specs)
def get_connection(self, location):
if not location.user:
reason = (_("Location is missing user:password information."))
LOG.debug(reason)
raise exception.BadStoreUri(message=reason)
auth_url = location.swift_url
if not auth_url.endswith('/'):
auth_url += '/'
if self.auth_version == '2':
try:
tenant_name, user = location.user.split(':')
except ValueError:
reason = (_("Badly formed tenant:user '%(user)s' in "
"Swift URI") % {'user': location.user})
LOG.debug(reason)
raise exception.BadStoreUri()
else:
tenant_name = None
user = location.user
os_options = {}
if self.region:
os_options['region_name'] = self.region
os_options['endpoint_type'] = self.endpoint_type
os_options['service_type'] = self.service_type
return swiftclient.Connection(
auth_url, user, location.key, insecure=self.insecure,
tenant_name=tenant_name, snet=self.snet,
auth_version=self.auth_version, os_options=os_options,
ssl_compression=self.ssl_compression)
class MultiTenantStore(BaseStore):
EXAMPLE_URL = "swift://<SWIFT_URL>/<CONTAINER>/<FILE>"
def configure_add(self):
self.container = CONF.swift_store_container
if self.context is None:
reason = _("Multi-tenant Swift storage requires a context.")
raise exception.BadStoreConfiguration(store_name="swift",
reason=reason)
if self.context.service_catalog is None:
reason = _("Multi-tenant Swift storage requires "
"a service catalog.")
raise exception.BadStoreConfiguration(store_name="swift",
reason=reason)
self.storage_url = auth.get_endpoint(
self.context.service_catalog, service_type=self.service_type,
endpoint_region=self.region, endpoint_type=self.endpoint_type)
if self.storage_url.startswith('http://'):
self.scheme = 'swift+http'
else:
self.scheme = 'swift+https'
def delete(self, location, connection=None):
if not connection:
connection = self.get_connection(location.store_location)
super(MultiTenantStore, self).delete(location, connection)
connection.delete_container(location.store_location.container)
def set_acls(self, location, public=False, read_tenants=None,
write_tenants=None, connection=None):
location = location.store_location
if not connection:
connection = self.get_connection(location)
if read_tenants is None:
read_tenants = []
if write_tenants is None:
write_tenants = []
headers = {}
if public:
headers['X-Container-Read'] = ".r:*,.rlistings"
elif read_tenants:
headers['X-Container-Read'] = ','.join('%s:*' % i
for i in read_tenants)
else:
headers['X-Container-Read'] = ''
write_tenants.extend(self.admin_tenants)
if write_tenants:
headers['X-Container-Write'] = ','.join('%s:*' % i
for i in write_tenants)
else:
headers['X-Container-Write'] = ''
try:
connection.post_container(location.container, headers=headers)
except swiftclient.ClientException as e:
if e.http_status == httplib.NOT_FOUND:
msg = _("Swift could not find image at URI.")
raise exception.NotFound(msg)
else:
raise
def create_location(self, image_id):
specs = {'scheme': self.scheme,
'container': self.container + '_' + str(image_id),
'obj': str(image_id),
'auth_or_store_url': self.storage_url}
return StoreLocation(specs)
def get_connection(self, location):
return swiftclient.Connection(
None, self.context.user, None,
preauthurl=location.swift_url,
preauthtoken=self.context.auth_tok,
tenant_name=self.context.tenant,
auth_version='2', snet=self.snet, insecure=self.insecure,
ssl_compression=self.ssl_compression)
class ChunkReader(object):
def __init__(self, fd, checksum, total):
self.fd = fd
self.checksum = checksum
self.total = total
self.bytes_read = 0
def read(self, i):
left = self.total - self.bytes_read
if i > left:
i = left
result = self.fd.read(i)
self.bytes_read += len(result)
self.checksum.update(result)
return result
|
|
# coding: utf-8
import hashlib, datetime, sys
import tornado.httpclient
from tornado.web import HTTPError
from tornado.options import options
from apps import BaseRequestHandler
from apps.models import User, Loud
from utils.decorator import authenticated, admin, owner
from utils.mkthings import QDict
class LoudHandler(BaseRequestHandler):
@authenticated
def get(self, lid):
loud = Loud.query.get_by_key(lid)
if loud:
loud_dict = loud.loud_to_dict()
self.render_json(loud_dict)
else:
self.set_status(404)
self.render_json(self.message("the loud is not exsited"))
@authenticated
def post(self, lid):
# the precondtion the max 3 louds.
loud_count = Loud.query.get_louds().filter(Loud.user.has(User.phone==self.current_user.phone)).count()
if loud_count >= 3:
raise HTTPError(412)
data = self.get_data()
self.wrap_mars_addr(data)
loud = Loud()
loud.user_id = self.current_user.id
if self.current_user.is_admin:
# admin's loud
data['grade'] = 0
loud.from_dict(data)
if loud.save():
self.set_status(201)
self.set_header('Location', loud.get_link())
msg = self.message("Created Success.")
else:
self.set_status(400)
msg = self.message("content,lat,lon,address fields are required.")
# addtional operation add the position
#self.current_user.last_lat = data['lat']
#self.current_user.last_lon = data['lon']
#self.current_user.save()
self.render_json(msg)
@authenticated
@admin('lid', 'loud')
def delete(self, loud):
loud.block = True
loud.save()
self.render_json(self.message("Remove Succss."))
def get_recipient(self, lid):
return Loud.query.get_by_key(lid)
def wrap_mars_addr(self, data):
assert 'lat' in data and 'lon' in data, "The data must have location infomation"
lat, lon = data['lat'], data['lon']
flat, flon = self.e2m(lat, lon)
if flat and flon:
data['flat'], data['flon'] = flat, flon
addr = self.m2addr(flat, flon)
if addr:
# addrr like 'China,Zhejiang,Hangzhou,xihuqu#yugu.rd218'
policital, data['address'] = addr.split('#')
def e2m(self, lat, lon):
mars_location_uri = "%s%s" % (options.geo_uri, '/e2m/%f,%f' % (lat, lon))
http = tornado.httpclient.HTTPClient()
try:
rsp = http.fetch(mars_location_uri)
except tornado.httpclient.HTTPError, e:
res = None, None
else:
geo = self.dejson(rsp.body)
res = geo.get('lat', None), geo.get('lon', None)
return res
def m2addr(self, lat, lon):
mars_addr_uri = "%s%s" % (options.geo_uri, '/m2addr/%f,%f' % (lat, lon))
http = tornado.httpclient.HTTPClient()
try:
rsp = http.fetch(mars_addr_uri)
except tornado.httpclient.HTTPError, e:
res = None
else:
res = rsp.body
return res
class SearchLoudHandler(BaseRequestHandler):
@authenticated
def get(self):
condition = self.get_argument('q')
if ':' in condition:
field, value = condition.split(':')
else:
raise HTTPError(400)
handle_q = {
'author': lambda phn: Loud.query\
.get_louds()\
.filter(Loud.user.has(User.phone==phn)),
'position': lambda data: Loud.query\
.get_by_cycle2(*data.split(',')),
'key': lambda data: Loud.query\
.get_by_cycle_key(*data.split(',')),
}
if field in handle_q:
q = QDict(
q=condition,
v=value,
sort=self.get_argument('qs'),
start=int(self.get_argument('st')),
num=int(self.get_argument('qn')),
)
query_louds = handle_q[field](q.v)
gmt_now = datetime.datetime.now() - datetime.timedelta(hours=8)
self.set_header('Last-Modified', gmt_now.strftime('%a, %d %b %Y %H:%M:%S GMT'))
# composite the results collection
total = query_louds.count()
query_dict = {
'q': q.q,
'qs': q.sort,
'st': q.start,
'qn': q.num,
}
loud_collection = {
'louds': [e.loud_to_dict() for e in query_louds.order_by(q.sort).limit(q.num).offset(q.start)],
'total': total,
'link': self.full_uri(query_dict),
}
if q.start + q.num < total:
query_dict['st'] = q.start + q.num
loud_collection['next'] = self.full_uri(query_dict)
if q.start > 0:
query_dict['st'] = max(q.start - q.num, 0)
loud_collection['prev'] = self.full_uri(query_dict)
# make etag prepare
self.cur_louds = loud_collection['louds']
else:
raise HTTPError(400)
self.render_json(loud_collection)
def compute_etag(self):
hasher = hashlib.sha1()
if 'cur_louds' in self.__dict__:
any(hasher.update(e) for e in sorted(loud['id'] for loud in self.cur_louds))
return '"%s"' % hasher.hexdigest()
class UpdatedLoudHandler(BaseRequestHandler):
@authenticated
def get(self):
lat = self.get_argument('lat')
lon = self.get_argument('lon')
new_loud_count = Loud.query.cycle_update(lat, lon, self.last_modified_time).count()
if new_loud_count <= 0:
raise HTTPError(304)
self.render_json({'count': new_loud_count})
@property
def last_modified_time(self):
ims = self.request.headers.get('If-Modified-Since', None)
ims_time = datetime.datetime(1970,1,1,0,0)
if ims:
ims_time = datetime.datetime.strptime(ims, '%a, %d %b %Y %H:%M:%S %Z') + datetime.timedelta(hours=8)
return ims_time
|
|
'''Wrapper for SDL_mixer.h
Generated with:
/usr/bin/ctypesgen -lSDL_mixer /usr/include/SDL/SDL_mixer.h -o sdlmixer.py
Do not modify this file.
'''
__docformat__ = 'restructuredtext'
# Begin preamble
import ctypes, os, sys
from ctypes import *
_int_types = (c_int16, c_int32)
if hasattr(ctypes, 'c_int64'):
# Some builds of ctypes apparently do not have c_int64
# defined; it's a pretty good bet that these builds do not
# have 64-bit pointers.
_int_types += (c_int64,)
for t in _int_types:
if sizeof(t) == sizeof(c_size_t):
c_ptrdiff_t = t
del t
del _int_types
class c_void(Structure):
# c_void_p is a buggy return type, converting to int, so
# POINTER(None) == c_void_p is actually written as
# POINTER(c_void), so it can be treated as a real pointer.
_fields_ = [('dummy', c_int)]
def POINTER(obj):
p = ctypes.POINTER(obj)
# Convert None to a real NULL pointer to work around bugs
# in how ctypes handles None on 64-bit platforms
if not isinstance(p.from_param, classmethod):
def from_param(cls, x):
if x is None:
return cls()
else:
return x
p.from_param = classmethod(from_param)
return p
class UserString:
def __init__(self, seq):
if isinstance(seq, str):
self.data = seq
elif isinstance(seq, UserString):
self.data = seq.data[:]
else:
self.data = str(seq)
def __str__(self): return str(self.data)
def __repr__(self): return repr(self.data)
def __int__(self): return int(self.data)
def __long__(self): return int(self.data)
def __float__(self): return float(self.data)
def __complex__(self): return complex(self.data)
def __hash__(self): return hash(self.data)
def __cmp__(self, string):
if isinstance(string, UserString):
return cmp(self.data, string.data)
else:
return cmp(self.data, string)
def __contains__(self, char):
return char in self.data
def __len__(self): return len(self.data)
def __getitem__(self, index): return self.__class__(self.data[index])
def __getslice__(self, start, end):
start = max(start, 0); end = max(end, 0)
return self.__class__(self.data[start:end])
def __add__(self, other):
if isinstance(other, UserString):
return self.__class__(self.data + other.data)
elif isinstance(other, str):
return self.__class__(self.data + other)
else:
return self.__class__(self.data + str(other))
def __radd__(self, other):
if isinstance(other, str):
return self.__class__(other + self.data)
else:
return self.__class__(str(other) + self.data)
def __mul__(self, n):
return self.__class__(self.data*n)
__rmul__ = __mul__
def __mod__(self, args):
return self.__class__(self.data % args)
# the following methods are defined in alphabetical order:
def capitalize(self): return self.__class__(self.data.capitalize())
def center(self, width, *args):
return self.__class__(self.data.center(width, *args))
def count(self, sub, start=0, end=sys.maxsize):
return self.data.count(sub, start, end)
def decode(self, encoding=None, errors=None): # XXX improve this?
if encoding:
if errors:
return self.__class__(self.data.decode(encoding, errors))
else:
return self.__class__(self.data.decode(encoding))
else:
return self.__class__(self.data.decode())
def encode(self, encoding=None, errors=None): # XXX improve this?
if encoding:
if errors:
return self.__class__(self.data.encode(encoding, errors))
else:
return self.__class__(self.data.encode(encoding))
else:
return self.__class__(self.data.encode())
def endswith(self, suffix, start=0, end=sys.maxsize):
return self.data.endswith(suffix, start, end)
def expandtabs(self, tabsize=8):
return self.__class__(self.data.expandtabs(tabsize))
def find(self, sub, start=0, end=sys.maxsize):
return self.data.find(sub, start, end)
def index(self, sub, start=0, end=sys.maxsize):
return self.data.index(sub, start, end)
def isalpha(self): return self.data.isalpha()
def isalnum(self): return self.data.isalnum()
def isdecimal(self): return self.data.isdecimal()
def isdigit(self): return self.data.isdigit()
def islower(self): return self.data.islower()
def isnumeric(self): return self.data.isnumeric()
def isspace(self): return self.data.isspace()
def istitle(self): return self.data.istitle()
def isupper(self): return self.data.isupper()
def join(self, seq): return self.data.join(seq)
def ljust(self, width, *args):
return self.__class__(self.data.ljust(width, *args))
def lower(self): return self.__class__(self.data.lower())
def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars))
def partition(self, sep):
return self.data.partition(sep)
def replace(self, old, new, maxsplit=-1):
return self.__class__(self.data.replace(old, new, maxsplit))
def rfind(self, sub, start=0, end=sys.maxsize):
return self.data.rfind(sub, start, end)
def rindex(self, sub, start=0, end=sys.maxsize):
return self.data.rindex(sub, start, end)
def rjust(self, width, *args):
return self.__class__(self.data.rjust(width, *args))
def rpartition(self, sep):
return self.data.rpartition(sep)
def rstrip(self, chars=None): return self.__class__(self.data.rstrip(chars))
def split(self, sep=None, maxsplit=-1):
return self.data.split(sep, maxsplit)
def rsplit(self, sep=None, maxsplit=-1):
return self.data.rsplit(sep, maxsplit)
def splitlines(self, keepends=0): return self.data.splitlines(keepends)
def startswith(self, prefix, start=0, end=sys.maxsize):
return self.data.startswith(prefix, start, end)
def strip(self, chars=None): return self.__class__(self.data.strip(chars))
def swapcase(self): return self.__class__(self.data.swapcase())
def title(self): return self.__class__(self.data.title())
def translate(self, *args):
return self.__class__(self.data.translate(*args))
def upper(self): return self.__class__(self.data.upper())
def zfill(self, width): return self.__class__(self.data.zfill(width))
class MutableString(UserString):
"""mutable string objects
Python strings are immutable objects. This has the advantage, that
strings may be used as dictionary keys. If this property isn't needed
and you insist on changing string values in place instead, you may cheat
and use MutableString.
But the purpose of this class is an educational one: to prevent
people from inventing their own mutable string class derived
from UserString and than forget thereby to remove (override) the
__hash__ method inherited from UserString. This would lead to
errors that would be very hard to track down.
A faster and better solution is to rewrite your program using lists."""
def __init__(self, string=""):
self.data = string
def __hash__(self):
raise TypeError("unhashable type (it is mutable)")
def __setitem__(self, index, sub):
if index < 0:
index += len(self.data)
if index < 0 or index >= len(self.data): raise IndexError
self.data = self.data[:index] + sub + self.data[index+1:]
def __delitem__(self, index):
if index < 0:
index += len(self.data)
if index < 0 or index >= len(self.data): raise IndexError
self.data = self.data[:index] + self.data[index+1:]
def __setslice__(self, start, end, sub):
start = max(start, 0); end = max(end, 0)
if isinstance(sub, UserString):
self.data = self.data[:start]+sub.data+self.data[end:]
elif isinstance(sub, str):
self.data = self.data[:start]+sub+self.data[end:]
else:
self.data = self.data[:start]+str(sub)+self.data[end:]
def __delslice__(self, start, end):
start = max(start, 0); end = max(end, 0)
self.data = self.data[:start] + self.data[end:]
def immutable(self):
return UserString(self.data)
def __iadd__(self, other):
if isinstance(other, UserString):
self.data += other.data
elif isinstance(other, str):
self.data += other
else:
self.data += str(other)
return self
def __imul__(self, n):
self.data *= n
return self
class String(MutableString, Union):
_fields_ = [('raw', POINTER(c_char)),
('data', c_char_p)]
def __init__(self, obj=""):
if isinstance(obj, (str, UserString)):
self.data = str(obj)
else:
self.raw = obj
def __len__(self):
return self.data and len(self.data) or 0
def from_param(cls, obj):
# Convert None or 0
if obj is None or obj == 0:
return cls(POINTER(c_char)())
# Convert from String
elif isinstance(obj, String):
return obj
# Convert from str
elif isinstance(obj, str):
return cls(obj)
# Convert from c_char_p
elif isinstance(obj, c_char_p):
return obj
# Convert from POINTER(c_char)
elif isinstance(obj, POINTER(c_char)):
return obj
# Convert from raw pointer
elif isinstance(obj, int):
return cls(cast(obj, POINTER(c_char)))
# Convert from object
else:
return String.from_param(obj._as_parameter_)
from_param = classmethod(from_param)
def ReturnString(obj, func=None, arguments=None):
return String.from_param(obj)
# As of ctypes 1.0, ctypes does not support custom error-checking
# functions on callbacks, nor does it support custom datatypes on
# callbacks, so we must ensure that all callbacks return
# primitive datatypes.
#
# Non-primitive return values wrapped with UNCHECKED won't be
# typechecked, and will be converted to c_void_p.
def UNCHECKED(type):
if (hasattr(type, "_type_") and isinstance(type._type_, str)
and type._type_ != "P"):
return type
else:
return c_void_p
# ctypes doesn't have direct support for variadic functions, so we have to write
# our own wrapper class
class _variadic_function(object):
def __init__(self,func,restype,argtypes):
self.func=func
self.func.restype=restype
self.argtypes=argtypes
def _as_parameter_(self):
# So we can pass this variadic function as a function pointer
return self.func
def __call__(self,*args):
fixed_args=[]
i=0
for argtype in self.argtypes:
# Typecheck what we can
fixed_args.append(argtype.from_param(args[i]))
i+=1
return self.func(*fixed_args+list(args[i:]))
# End preamble
_libs = {}
_libdirs = []
# Begin loader
# ----------------------------------------------------------------------------
# Copyright (c) 2008 David James
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
import os.path, re, sys, glob
import ctypes
import ctypes.util
def _environ_path(name):
if name in os.environ:
return os.environ[name].split(":")
else:
return []
class LibraryLoader(object):
def __init__(self):
self.other_dirs=[]
def load_library(self,libname):
"""Given the name of a library, load it."""
paths = self.getpaths(libname)
for path in paths:
if os.path.exists(path):
return self.load(path)
raise ImportError("%s not found." % libname)
def load(self,path):
"""Given a path to a library, load it."""
try:
# Darwin requires dlopen to be called with mode RTLD_GLOBAL instead
# of the default RTLD_LOCAL. Without this, you end up with
# libraries not being loadable, resulting in "Symbol not found"
# errors
if sys.platform == 'darwin':
return ctypes.CDLL(path, ctypes.RTLD_GLOBAL)
else:
return ctypes.cdll.LoadLibrary(path)
except OSError as e:
raise ImportError(e)
def getpaths(self,libname):
"""Return a list of paths where the library might be found."""
if os.path.isabs(libname):
yield libname
else:
for path in self.getplatformpaths(libname):
yield path
path = ctypes.util.find_library(libname)
if path: yield path
def getplatformpaths(self, libname):
return []
# Darwin (Mac OS X)
class DarwinLibraryLoader(LibraryLoader):
name_formats = ["lib%s.dylib", "lib%s.so", "lib%s.bundle", "%s.dylib",
"%s.so", "%s.bundle", "%s"]
def getplatformpaths(self,libname):
if os.path.pathsep in libname:
names = [libname]
else:
names = [format % libname for format in self.name_formats]
for dir in self.getdirs(libname):
for name in names:
yield os.path.join(dir,name)
def getdirs(self,libname):
'''Implements the dylib search as specified in Apple documentation:
http://developer.apple.com/documentation/DeveloperTools/Conceptual/
DynamicLibraries/Articles/DynamicLibraryUsageGuidelines.html
Before commencing the standard search, the method first checks
the bundle's ``Frameworks`` directory if the application is running
within a bundle (OS X .app).
'''
dyld_fallback_library_path = _environ_path("DYLD_FALLBACK_LIBRARY_PATH")
if not dyld_fallback_library_path:
dyld_fallback_library_path = [os.path.expanduser('~/lib'),
'/usr/local/lib', '/usr/lib']
dirs = []
if '/' in libname:
dirs.extend(_environ_path("DYLD_LIBRARY_PATH"))
else:
dirs.extend(_environ_path("LD_LIBRARY_PATH"))
dirs.extend(_environ_path("DYLD_LIBRARY_PATH"))
dirs.extend(self.other_dirs)
dirs.append(".")
if hasattr(sys, 'frozen') and sys.frozen == 'macosx_app':
dirs.append(os.path.join(
os.environ['RESOURCEPATH'],
'..',
'Frameworks'))
dirs.extend(dyld_fallback_library_path)
return dirs
# Posix
class PosixLibraryLoader(LibraryLoader):
_ld_so_cache = None
def _create_ld_so_cache(self):
# Recreate search path followed by ld.so. This is going to be
# slow to build, and incorrect (ld.so uses ld.so.cache, which may
# not be up-to-date). Used only as fallback for distros without
# /sbin/ldconfig.
#
# We assume the DT_RPATH and DT_RUNPATH binary sections are omitted.
directories = []
for name in ("LD_LIBRARY_PATH",
"SHLIB_PATH", # HPUX
"LIBPATH", # OS/2, AIX
"LIBRARY_PATH", # BE/OS
):
if name in os.environ:
directories.extend(os.environ[name].split(os.pathsep))
directories.extend(self.other_dirs)
directories.append(".")
try: directories.extend([dir.strip() for dir in open('/etc/ld.so.conf')])
except IOError: pass
directories.extend(['/lib', '/usr/lib', '/lib64', '/usr/lib64'])
cache = {}
lib_re = re.compile(r'lib(.*)\.s[ol]')
ext_re = re.compile(r'\.s[ol]$')
for dir in directories:
try:
for path in glob.glob("%s/*.s[ol]*" % dir):
file = os.path.basename(path)
# Index by filename
if file not in cache:
cache[file] = path
# Index by library name
match = lib_re.match(file)
if match:
library = match.group(1)
if library not in cache:
cache[library] = path
except OSError:
pass
self._ld_so_cache = cache
def getplatformpaths(self, libname):
if self._ld_so_cache is None:
self._create_ld_so_cache()
result = self._ld_so_cache.get(libname)
if result: yield result
path = ctypes.util.find_library(libname)
if path: yield os.path.join("/lib",path)
# Windows
class _WindowsLibrary(object):
def __init__(self, path):
self.cdll = ctypes.cdll.LoadLibrary(path)
self.windll = ctypes.windll.LoadLibrary(path)
def __getattr__(self, name):
try: return getattr(self.cdll,name)
except AttributeError:
try: return getattr(self.windll,name)
except AttributeError:
raise
class WindowsLibraryLoader(LibraryLoader):
name_formats = ["%s.dll", "lib%s.dll", "%slib.dll"]
def load_library(self, libname):
try:
result = LibraryLoader.load_library(self, libname)
except ImportError:
result = None
if os.path.sep not in libname:
for name in self.name_formats:
try:
result = getattr(ctypes.cdll, name % libname)
if result:
break
except WindowsError:
result = None
if result is None:
try:
result = getattr(ctypes.cdll, libname)
except WindowsError:
result = None
if result is None:
raise ImportError("%s not found." % libname)
return result
def load(self, path):
return _WindowsLibrary(path)
def getplatformpaths(self, libname):
if os.path.sep not in libname:
for name in self.name_formats:
dll_in_current_dir = os.path.abspath(name % libname)
if os.path.exists(dll_in_current_dir):
yield dll_in_current_dir
path = ctypes.util.find_library(name % libname)
if path:
yield path
# Platform switching
# If your value of sys.platform does not appear in this dict, please contact
# the Ctypesgen maintainers.
loaderclass = {
"darwin": DarwinLibraryLoader,
"cygwin": WindowsLibraryLoader,
"win32": WindowsLibraryLoader
}
loader = loaderclass.get(sys.platform, PosixLibraryLoader)()
def add_library_search_dirs(other_dirs):
loader.other_dirs = other_dirs
load_library = loader.load_library
del loaderclass
# End loader
add_library_search_dirs([])
# Begin libraries
_libs["SDL_mixer"] = load_library("SDL_mixer")
# 1 libraries
# End libraries
# No modules
# /usr/include/bits/types.h: 61
class struct_anon_1(Structure):
pass
struct_anon_1.__slots__ = [
'__val',
]
struct_anon_1._fields_ = [
('__val', c_long * 2),
]
__quad_t = struct_anon_1 # /usr/include/bits/types.h: 61
__off_t = c_long # /usr/include/bits/types.h: 140
__off64_t = __quad_t # /usr/include/bits/types.h: 141
# /usr/include/libio.h: 253
class struct__IO_FILE(Structure):
pass
FILE = struct__IO_FILE # /usr/include/stdio.h: 48
_IO_lock_t = None # /usr/include/libio.h: 162
# /usr/include/libio.h: 168
class struct__IO_marker(Structure):
pass
struct__IO_marker.__slots__ = [
'_next',
'_sbuf',
'_pos',
]
struct__IO_marker._fields_ = [
('_next', POINTER(struct__IO_marker)),
('_sbuf', POINTER(struct__IO_FILE)),
('_pos', c_int),
]
struct__IO_FILE.__slots__ = [
'_flags',
'_IO_read_ptr',
'_IO_read_end',
'_IO_read_base',
'_IO_write_base',
'_IO_write_ptr',
'_IO_write_end',
'_IO_buf_base',
'_IO_buf_end',
'_IO_save_base',
'_IO_backup_base',
'_IO_save_end',
'_markers',
'_chain',
'_fileno',
'_flags2',
'_old_offset',
'_cur_column',
'_vtable_offset',
'_shortbuf',
'_lock',
'_offset',
'__pad1',
'__pad2',
'__pad3',
'__pad4',
'__pad5',
'_mode',
'_unused2',
]
struct__IO_FILE._fields_ = [
('_flags', c_int),
('_IO_read_ptr', String),
('_IO_read_end', String),
('_IO_read_base', String),
('_IO_write_base', String),
('_IO_write_ptr', String),
('_IO_write_end', String),
('_IO_buf_base', String),
('_IO_buf_end', String),
('_IO_save_base', String),
('_IO_backup_base', String),
('_IO_save_end', String),
('_markers', POINTER(struct__IO_marker)),
('_chain', POINTER(struct__IO_FILE)),
('_fileno', c_int),
('_flags2', c_int),
('_old_offset', __off_t),
('_cur_column', c_ushort),
('_vtable_offset', c_char),
('_shortbuf', c_char * 1),
('_lock', POINTER(_IO_lock_t)),
('_offset', __off64_t),
('__pad1', POINTER(None)),
('__pad2', POINTER(None)),
('__pad3', POINTER(None)),
('__pad4', POINTER(None)),
('__pad5', c_size_t),
('_mode', c_int),
('_unused2', c_char * (((15 * sizeof(c_int)) - (4 * sizeof(POINTER(None)))) - sizeof(c_size_t))),
]
Uint8 = c_uint8 # /usr/include/SDL/SDL_stdinc.h: 99
Sint16 = c_int16 # /usr/include/SDL/SDL_stdinc.h: 100
Uint16 = c_uint16 # /usr/include/SDL/SDL_stdinc.h: 101
Uint32 = c_uint32 # /usr/include/SDL/SDL_stdinc.h: 103
# /usr/include/SDL/SDL_error.h: 43
if hasattr(_libs['SDL_mixer'], 'SDL_SetError'):
_func = _libs['SDL_mixer'].SDL_SetError
_restype = None
_argtypes = [String]
SDL_SetError = _variadic_function(_func,_restype,_argtypes)
# /usr/include/SDL/SDL_error.h: 44
if hasattr(_libs['SDL_mixer'], 'SDL_GetError'):
SDL_GetError = _libs['SDL_mixer'].SDL_GetError
SDL_GetError.argtypes = []
if sizeof(c_int) == sizeof(c_void_p):
SDL_GetError.restype = ReturnString
else:
SDL_GetError.restype = String
SDL_GetError.errcheck = ReturnString
# /usr/include/SDL/SDL_rwops.h: 42
class struct_SDL_RWops(Structure):
pass
# /usr/include/SDL/SDL_rwops.h: 78
class struct_anon_30(Structure):
pass
struct_anon_30.__slots__ = [
'autoclose',
'fp',
]
struct_anon_30._fields_ = [
('autoclose', c_int),
('fp', POINTER(FILE)),
]
# /usr/include/SDL/SDL_rwops.h: 83
class struct_anon_31(Structure):
pass
struct_anon_31.__slots__ = [
'base',
'here',
'stop',
]
struct_anon_31._fields_ = [
('base', POINTER(Uint8)),
('here', POINTER(Uint8)),
('stop', POINTER(Uint8)),
]
# /usr/include/SDL/SDL_rwops.h: 88
class struct_anon_32(Structure):
pass
struct_anon_32.__slots__ = [
'data1',
]
struct_anon_32._fields_ = [
('data1', POINTER(None)),
]
# /usr/include/SDL/SDL_rwops.h: 65
class union_anon_33(Union):
pass
union_anon_33.__slots__ = [
'stdio',
'mem',
'unknown',
]
union_anon_33._fields_ = [
('stdio', struct_anon_30),
('mem', struct_anon_31),
('unknown', struct_anon_32),
]
struct_SDL_RWops.__slots__ = [
'seek',
'read',
'write',
'close',
'type',
'hidden',
]
struct_SDL_RWops._fields_ = [
('seek', CFUNCTYPE(UNCHECKED(c_int), POINTER(struct_SDL_RWops), c_int, c_int)),
('read', CFUNCTYPE(UNCHECKED(c_int), POINTER(struct_SDL_RWops), POINTER(None), c_int, c_int)),
('write', CFUNCTYPE(UNCHECKED(c_int), POINTER(struct_SDL_RWops), POINTER(None), c_int, c_int)),
('close', CFUNCTYPE(UNCHECKED(c_int), POINTER(struct_SDL_RWops))),
('type', Uint32),
('hidden', union_anon_33),
]
SDL_RWops = struct_SDL_RWops # /usr/include/SDL/SDL_rwops.h: 93
# /usr/include/SDL/SDL_rwops.h: 99
if hasattr(_libs['SDL_mixer'], 'SDL_RWFromFile'):
SDL_RWFromFile = _libs['SDL_mixer'].SDL_RWFromFile
SDL_RWFromFile.argtypes = [String, String]
SDL_RWFromFile.restype = POINTER(SDL_RWops)
# /usr/include/SDL/SDL_version.h: 51
class struct_SDL_version(Structure):
pass
struct_SDL_version.__slots__ = [
'major',
'minor',
'patch',
]
struct_SDL_version._fields_ = [
('major', Uint8),
('minor', Uint8),
('patch', Uint8),
]
SDL_version = struct_SDL_version # /usr/include/SDL/SDL_version.h: 51
# /usr/include/SDL/SDL_mixer.h: 65
if hasattr(_libs['SDL_mixer'], 'Mix_Linked_Version'):
Mix_Linked_Version = _libs['SDL_mixer'].Mix_Linked_Version
Mix_Linked_Version.argtypes = []
Mix_Linked_Version.restype = POINTER(SDL_version)
enum_anon_35 = c_int # /usr/include/SDL/SDL_mixer.h: 74
MIX_INIT_FLAC = 1 # /usr/include/SDL/SDL_mixer.h: 74
MIX_INIT_MOD = 2 # /usr/include/SDL/SDL_mixer.h: 74
MIX_INIT_MP3 = 4 # /usr/include/SDL/SDL_mixer.h: 74
MIX_INIT_OGG = 8 # /usr/include/SDL/SDL_mixer.h: 74
MIX_INIT_FLUIDSYNTH = 16 # /usr/include/SDL/SDL_mixer.h: 74
MIX_InitFlags = enum_anon_35 # /usr/include/SDL/SDL_mixer.h: 74
# /usr/include/SDL/SDL_mixer.h: 80
if hasattr(_libs['SDL_mixer'], 'Mix_Init'):
Mix_Init = _libs['SDL_mixer'].Mix_Init
Mix_Init.argtypes = [c_int]
Mix_Init.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 83
if hasattr(_libs['SDL_mixer'], 'Mix_Quit'):
Mix_Quit = _libs['SDL_mixer'].Mix_Quit
Mix_Quit.argtypes = []
Mix_Quit.restype = None
# /usr/include/SDL/SDL_mixer.h: 107
class struct_Mix_Chunk(Structure):
pass
struct_Mix_Chunk.__slots__ = [
'allocated',
'abuf',
'alen',
'volume',
]
struct_Mix_Chunk._fields_ = [
('allocated', c_int),
('abuf', POINTER(Uint8)),
('alen', Uint32),
('volume', Uint8),
]
Mix_Chunk = struct_Mix_Chunk # /usr/include/SDL/SDL_mixer.h: 107
enum_anon_36 = c_int # /usr/include/SDL/SDL_mixer.h: 114
MIX_NO_FADING = 0 # /usr/include/SDL/SDL_mixer.h: 114
MIX_FADING_OUT = (MIX_NO_FADING + 1) # /usr/include/SDL/SDL_mixer.h: 114
MIX_FADING_IN = (MIX_FADING_OUT + 1) # /usr/include/SDL/SDL_mixer.h: 114
Mix_Fading = enum_anon_36 # /usr/include/SDL/SDL_mixer.h: 114
enum_anon_37 = c_int # /usr/include/SDL/SDL_mixer.h: 127
MUS_NONE = 0 # /usr/include/SDL/SDL_mixer.h: 127
MUS_CMD = (MUS_NONE + 1) # /usr/include/SDL/SDL_mixer.h: 127
MUS_WAV = (MUS_CMD + 1) # /usr/include/SDL/SDL_mixer.h: 127
MUS_MOD = (MUS_WAV + 1) # /usr/include/SDL/SDL_mixer.h: 127
MUS_MID = (MUS_MOD + 1) # /usr/include/SDL/SDL_mixer.h: 127
MUS_OGG = (MUS_MID + 1) # /usr/include/SDL/SDL_mixer.h: 127
MUS_MP3 = (MUS_OGG + 1) # /usr/include/SDL/SDL_mixer.h: 127
MUS_MP3_MAD = (MUS_MP3 + 1) # /usr/include/SDL/SDL_mixer.h: 127
MUS_FLAC = (MUS_MP3_MAD + 1) # /usr/include/SDL/SDL_mixer.h: 127
MUS_MODPLUG = (MUS_FLAC + 1) # /usr/include/SDL/SDL_mixer.h: 127
Mix_MusicType = enum_anon_37 # /usr/include/SDL/SDL_mixer.h: 127
# /usr/include/SDL/SDL_mixer.h: 130
class struct__Mix_Music(Structure):
pass
Mix_Music = struct__Mix_Music # /usr/include/SDL/SDL_mixer.h: 130
# /usr/include/SDL/SDL_mixer.h: 133
if hasattr(_libs['SDL_mixer'], 'Mix_OpenAudio'):
Mix_OpenAudio = _libs['SDL_mixer'].Mix_OpenAudio
Mix_OpenAudio.argtypes = [c_int, Uint16, c_int, c_int]
Mix_OpenAudio.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 141
if hasattr(_libs['SDL_mixer'], 'Mix_AllocateChannels'):
Mix_AllocateChannels = _libs['SDL_mixer'].Mix_AllocateChannels
Mix_AllocateChannels.argtypes = [c_int]
Mix_AllocateChannels.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 146
if hasattr(_libs['SDL_mixer'], 'Mix_QuerySpec'):
Mix_QuerySpec = _libs['SDL_mixer'].Mix_QuerySpec
Mix_QuerySpec.argtypes = [POINTER(c_int), POINTER(Uint16), POINTER(c_int)]
Mix_QuerySpec.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 149
if hasattr(_libs['SDL_mixer'], 'Mix_LoadWAV_RW'):
Mix_LoadWAV_RW = _libs['SDL_mixer'].Mix_LoadWAV_RW
Mix_LoadWAV_RW.argtypes = [POINTER(SDL_RWops), c_int]
Mix_LoadWAV_RW.restype = POINTER(Mix_Chunk)
# /usr/include/SDL/SDL_mixer.h: 151
if hasattr(_libs['SDL_mixer'], 'Mix_LoadMUS'):
Mix_LoadMUS = _libs['SDL_mixer'].Mix_LoadMUS
Mix_LoadMUS.argtypes = [String]
Mix_LoadMUS.restype = POINTER(Mix_Music)
# /usr/include/SDL/SDL_mixer.h: 155
if hasattr(_libs['SDL_mixer'], 'Mix_LoadMUS_RW'):
Mix_LoadMUS_RW = _libs['SDL_mixer'].Mix_LoadMUS_RW
Mix_LoadMUS_RW.argtypes = [POINTER(SDL_RWops)]
Mix_LoadMUS_RW.restype = POINTER(Mix_Music)
# /usr/include/SDL/SDL_mixer.h: 158
if hasattr(_libs['SDL_mixer'], 'Mix_LoadMUSType_RW'):
Mix_LoadMUSType_RW = _libs['SDL_mixer'].Mix_LoadMUSType_RW
Mix_LoadMUSType_RW.argtypes = [POINTER(SDL_RWops), Mix_MusicType, c_int]
Mix_LoadMUSType_RW.restype = POINTER(Mix_Music)
# /usr/include/SDL/SDL_mixer.h: 161
if hasattr(_libs['SDL_mixer'], 'Mix_QuickLoad_WAV'):
Mix_QuickLoad_WAV = _libs['SDL_mixer'].Mix_QuickLoad_WAV
Mix_QuickLoad_WAV.argtypes = [POINTER(Uint8)]
Mix_QuickLoad_WAV.restype = POINTER(Mix_Chunk)
# /usr/include/SDL/SDL_mixer.h: 164
if hasattr(_libs['SDL_mixer'], 'Mix_QuickLoad_RAW'):
Mix_QuickLoad_RAW = _libs['SDL_mixer'].Mix_QuickLoad_RAW
Mix_QuickLoad_RAW.argtypes = [POINTER(Uint8), Uint32]
Mix_QuickLoad_RAW.restype = POINTER(Mix_Chunk)
# /usr/include/SDL/SDL_mixer.h: 167
if hasattr(_libs['SDL_mixer'], 'Mix_FreeChunk'):
Mix_FreeChunk = _libs['SDL_mixer'].Mix_FreeChunk
Mix_FreeChunk.argtypes = [POINTER(Mix_Chunk)]
Mix_FreeChunk.restype = None
# /usr/include/SDL/SDL_mixer.h: 168
if hasattr(_libs['SDL_mixer'], 'Mix_FreeMusic'):
Mix_FreeMusic = _libs['SDL_mixer'].Mix_FreeMusic
Mix_FreeMusic.argtypes = [POINTER(Mix_Music)]
Mix_FreeMusic.restype = None
# /usr/include/SDL/SDL_mixer.h: 189
if hasattr(_libs['SDL_mixer'], 'Mix_GetNumChunkDecoders'):
Mix_GetNumChunkDecoders = _libs['SDL_mixer'].Mix_GetNumChunkDecoders
Mix_GetNumChunkDecoders.argtypes = []
Mix_GetNumChunkDecoders.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 190
if hasattr(_libs['SDL_mixer'], 'Mix_GetChunkDecoder'):
Mix_GetChunkDecoder = _libs['SDL_mixer'].Mix_GetChunkDecoder
Mix_GetChunkDecoder.argtypes = [c_int]
if sizeof(c_int) == sizeof(c_void_p):
Mix_GetChunkDecoder.restype = ReturnString
else:
Mix_GetChunkDecoder.restype = String
Mix_GetChunkDecoder.errcheck = ReturnString
# /usr/include/SDL/SDL_mixer.h: 191
if hasattr(_libs['SDL_mixer'], 'Mix_GetNumMusicDecoders'):
Mix_GetNumMusicDecoders = _libs['SDL_mixer'].Mix_GetNumMusicDecoders
Mix_GetNumMusicDecoders.argtypes = []
Mix_GetNumMusicDecoders.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 192
if hasattr(_libs['SDL_mixer'], 'Mix_GetMusicDecoder'):
Mix_GetMusicDecoder = _libs['SDL_mixer'].Mix_GetMusicDecoder
Mix_GetMusicDecoder.argtypes = [c_int]
if sizeof(c_int) == sizeof(c_void_p):
Mix_GetMusicDecoder.restype = ReturnString
else:
Mix_GetMusicDecoder.restype = String
Mix_GetMusicDecoder.errcheck = ReturnString
# /usr/include/SDL/SDL_mixer.h: 197
if hasattr(_libs['SDL_mixer'], 'Mix_GetMusicType'):
Mix_GetMusicType = _libs['SDL_mixer'].Mix_GetMusicType
Mix_GetMusicType.argtypes = [POINTER(Mix_Music)]
Mix_GetMusicType.restype = Mix_MusicType
# /usr/include/SDL/SDL_mixer.h: 203
if hasattr(_libs['SDL_mixer'], 'Mix_SetPostMix'):
Mix_SetPostMix = _libs['SDL_mixer'].Mix_SetPostMix
Mix_SetPostMix.argtypes = [CFUNCTYPE(UNCHECKED(None), POINTER(None), POINTER(Uint8), c_int), POINTER(None)]
Mix_SetPostMix.restype = None
# /usr/include/SDL/SDL_mixer.h: 209
if hasattr(_libs['SDL_mixer'], 'Mix_HookMusic'):
Mix_HookMusic = _libs['SDL_mixer'].Mix_HookMusic
Mix_HookMusic.argtypes = [CFUNCTYPE(UNCHECKED(None), POINTER(None), POINTER(Uint8), c_int), POINTER(None)]
Mix_HookMusic.restype = None
# /usr/include/SDL/SDL_mixer.h: 215
if hasattr(_libs['SDL_mixer'], 'Mix_HookMusicFinished'):
Mix_HookMusicFinished = _libs['SDL_mixer'].Mix_HookMusicFinished
Mix_HookMusicFinished.argtypes = [CFUNCTYPE(UNCHECKED(None), )]
Mix_HookMusicFinished.restype = None
# /usr/include/SDL/SDL_mixer.h: 218
if hasattr(_libs['SDL_mixer'], 'Mix_GetMusicHookData'):
Mix_GetMusicHookData = _libs['SDL_mixer'].Mix_GetMusicHookData
Mix_GetMusicHookData.argtypes = []
Mix_GetMusicHookData.restype = POINTER(None)
# /usr/include/SDL/SDL_mixer.h: 228
if hasattr(_libs['SDL_mixer'], 'Mix_ChannelFinished'):
Mix_ChannelFinished = _libs['SDL_mixer'].Mix_ChannelFinished
Mix_ChannelFinished.argtypes = [CFUNCTYPE(UNCHECKED(None), c_int)]
Mix_ChannelFinished.restype = None
Mix_EffectFunc_t = CFUNCTYPE(UNCHECKED(None), c_int, POINTER(None), c_int, POINTER(None)) # /usr/include/SDL/SDL_mixer.h: 252
Mix_EffectDone_t = CFUNCTYPE(UNCHECKED(None), c_int, POINTER(None)) # /usr/include/SDL/SDL_mixer.h: 263
# /usr/include/SDL/SDL_mixer.h: 312
if hasattr(_libs['SDL_mixer'], 'Mix_RegisterEffect'):
Mix_RegisterEffect = _libs['SDL_mixer'].Mix_RegisterEffect
Mix_RegisterEffect.argtypes = [c_int, Mix_EffectFunc_t, Mix_EffectDone_t, POINTER(None)]
Mix_RegisterEffect.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 324
if hasattr(_libs['SDL_mixer'], 'Mix_UnregisterEffect'):
Mix_UnregisterEffect = _libs['SDL_mixer'].Mix_UnregisterEffect
Mix_UnregisterEffect.argtypes = [c_int, Mix_EffectFunc_t]
Mix_UnregisterEffect.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 338
if hasattr(_libs['SDL_mixer'], 'Mix_UnregisterAllEffects'):
Mix_UnregisterAllEffects = _libs['SDL_mixer'].Mix_UnregisterAllEffects
Mix_UnregisterAllEffects.argtypes = [c_int]
Mix_UnregisterAllEffects.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 378
if hasattr(_libs['SDL_mixer'], 'Mix_SetPanning'):
Mix_SetPanning = _libs['SDL_mixer'].Mix_SetPanning
Mix_SetPanning.argtypes = [c_int, Uint8, Uint8]
Mix_SetPanning.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 418
if hasattr(_libs['SDL_mixer'], 'Mix_SetPosition'):
Mix_SetPosition = _libs['SDL_mixer'].Mix_SetPosition
Mix_SetPosition.argtypes = [c_int, Sint16, Uint8]
Mix_SetPosition.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 448
if hasattr(_libs['SDL_mixer'], 'Mix_SetDistance'):
Mix_SetDistance = _libs['SDL_mixer'].Mix_SetDistance
Mix_SetDistance.argtypes = [c_int, Uint8]
Mix_SetDistance.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 495
if hasattr(_libs['SDL_mixer'], 'Mix_SetReverseStereo'):
Mix_SetReverseStereo = _libs['SDL_mixer'].Mix_SetReverseStereo
Mix_SetReverseStereo.argtypes = [c_int, c_int]
Mix_SetReverseStereo.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 504
if hasattr(_libs['SDL_mixer'], 'Mix_ReserveChannels'):
Mix_ReserveChannels = _libs['SDL_mixer'].Mix_ReserveChannels
Mix_ReserveChannels.argtypes = [c_int]
Mix_ReserveChannels.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 514
if hasattr(_libs['SDL_mixer'], 'Mix_GroupChannel'):
Mix_GroupChannel = _libs['SDL_mixer'].Mix_GroupChannel
Mix_GroupChannel.argtypes = [c_int, c_int]
Mix_GroupChannel.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 516
if hasattr(_libs['SDL_mixer'], 'Mix_GroupChannels'):
Mix_GroupChannels = _libs['SDL_mixer'].Mix_GroupChannels
Mix_GroupChannels.argtypes = [c_int, c_int, c_int]
Mix_GroupChannels.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 520
if hasattr(_libs['SDL_mixer'], 'Mix_GroupAvailable'):
Mix_GroupAvailable = _libs['SDL_mixer'].Mix_GroupAvailable
Mix_GroupAvailable.argtypes = [c_int]
Mix_GroupAvailable.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 524
if hasattr(_libs['SDL_mixer'], 'Mix_GroupCount'):
Mix_GroupCount = _libs['SDL_mixer'].Mix_GroupCount
Mix_GroupCount.argtypes = [c_int]
Mix_GroupCount.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 526
if hasattr(_libs['SDL_mixer'], 'Mix_GroupOldest'):
Mix_GroupOldest = _libs['SDL_mixer'].Mix_GroupOldest
Mix_GroupOldest.argtypes = [c_int]
Mix_GroupOldest.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 528
if hasattr(_libs['SDL_mixer'], 'Mix_GroupNewer'):
Mix_GroupNewer = _libs['SDL_mixer'].Mix_GroupNewer
Mix_GroupNewer.argtypes = [c_int]
Mix_GroupNewer.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 538
if hasattr(_libs['SDL_mixer'], 'Mix_PlayChannelTimed'):
Mix_PlayChannelTimed = _libs['SDL_mixer'].Mix_PlayChannelTimed
Mix_PlayChannelTimed.argtypes = [c_int, POINTER(Mix_Chunk), c_int, c_int]
Mix_PlayChannelTimed.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 539
if hasattr(_libs['SDL_mixer'], 'Mix_PlayMusic'):
Mix_PlayMusic = _libs['SDL_mixer'].Mix_PlayMusic
Mix_PlayMusic.argtypes = [POINTER(Mix_Music), c_int]
Mix_PlayMusic.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 542
if hasattr(_libs['SDL_mixer'], 'Mix_FadeInMusic'):
Mix_FadeInMusic = _libs['SDL_mixer'].Mix_FadeInMusic
Mix_FadeInMusic.argtypes = [POINTER(Mix_Music), c_int, c_int]
Mix_FadeInMusic.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 543
if hasattr(_libs['SDL_mixer'], 'Mix_FadeInMusicPos'):
Mix_FadeInMusicPos = _libs['SDL_mixer'].Mix_FadeInMusicPos
Mix_FadeInMusicPos.argtypes = [POINTER(Mix_Music), c_int, c_int, c_double]
Mix_FadeInMusicPos.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 545
if hasattr(_libs['SDL_mixer'], 'Mix_FadeInChannelTimed'):
Mix_FadeInChannelTimed = _libs['SDL_mixer'].Mix_FadeInChannelTimed
Mix_FadeInChannelTimed.argtypes = [c_int, POINTER(Mix_Chunk), c_int, c_int, c_int]
Mix_FadeInChannelTimed.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 552
if hasattr(_libs['SDL_mixer'], 'Mix_Volume'):
Mix_Volume = _libs['SDL_mixer'].Mix_Volume
Mix_Volume.argtypes = [c_int, c_int]
Mix_Volume.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 553
if hasattr(_libs['SDL_mixer'], 'Mix_VolumeChunk'):
Mix_VolumeChunk = _libs['SDL_mixer'].Mix_VolumeChunk
Mix_VolumeChunk.argtypes = [POINTER(Mix_Chunk), c_int]
Mix_VolumeChunk.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 554
if hasattr(_libs['SDL_mixer'], 'Mix_VolumeMusic'):
Mix_VolumeMusic = _libs['SDL_mixer'].Mix_VolumeMusic
Mix_VolumeMusic.argtypes = [c_int]
Mix_VolumeMusic.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 557
if hasattr(_libs['SDL_mixer'], 'Mix_HaltChannel'):
Mix_HaltChannel = _libs['SDL_mixer'].Mix_HaltChannel
Mix_HaltChannel.argtypes = [c_int]
Mix_HaltChannel.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 558
if hasattr(_libs['SDL_mixer'], 'Mix_HaltGroup'):
Mix_HaltGroup = _libs['SDL_mixer'].Mix_HaltGroup
Mix_HaltGroup.argtypes = [c_int]
Mix_HaltGroup.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 559
if hasattr(_libs['SDL_mixer'], 'Mix_HaltMusic'):
Mix_HaltMusic = _libs['SDL_mixer'].Mix_HaltMusic
Mix_HaltMusic.argtypes = []
Mix_HaltMusic.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 565
if hasattr(_libs['SDL_mixer'], 'Mix_ExpireChannel'):
Mix_ExpireChannel = _libs['SDL_mixer'].Mix_ExpireChannel
Mix_ExpireChannel.argtypes = [c_int, c_int]
Mix_ExpireChannel.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 571
if hasattr(_libs['SDL_mixer'], 'Mix_FadeOutChannel'):
Mix_FadeOutChannel = _libs['SDL_mixer'].Mix_FadeOutChannel
Mix_FadeOutChannel.argtypes = [c_int, c_int]
Mix_FadeOutChannel.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 572
if hasattr(_libs['SDL_mixer'], 'Mix_FadeOutGroup'):
Mix_FadeOutGroup = _libs['SDL_mixer'].Mix_FadeOutGroup
Mix_FadeOutGroup.argtypes = [c_int, c_int]
Mix_FadeOutGroup.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 573
if hasattr(_libs['SDL_mixer'], 'Mix_FadeOutMusic'):
Mix_FadeOutMusic = _libs['SDL_mixer'].Mix_FadeOutMusic
Mix_FadeOutMusic.argtypes = [c_int]
Mix_FadeOutMusic.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 576
if hasattr(_libs['SDL_mixer'], 'Mix_FadingMusic'):
Mix_FadingMusic = _libs['SDL_mixer'].Mix_FadingMusic
Mix_FadingMusic.argtypes = []
Mix_FadingMusic.restype = Mix_Fading
# /usr/include/SDL/SDL_mixer.h: 577
if hasattr(_libs['SDL_mixer'], 'Mix_FadingChannel'):
Mix_FadingChannel = _libs['SDL_mixer'].Mix_FadingChannel
Mix_FadingChannel.argtypes = [c_int]
Mix_FadingChannel.restype = Mix_Fading
# /usr/include/SDL/SDL_mixer.h: 580
if hasattr(_libs['SDL_mixer'], 'Mix_Pause'):
Mix_Pause = _libs['SDL_mixer'].Mix_Pause
Mix_Pause.argtypes = [c_int]
Mix_Pause.restype = None
# /usr/include/SDL/SDL_mixer.h: 581
if hasattr(_libs['SDL_mixer'], 'Mix_Resume'):
Mix_Resume = _libs['SDL_mixer'].Mix_Resume
Mix_Resume.argtypes = [c_int]
Mix_Resume.restype = None
# /usr/include/SDL/SDL_mixer.h: 582
if hasattr(_libs['SDL_mixer'], 'Mix_Paused'):
Mix_Paused = _libs['SDL_mixer'].Mix_Paused
Mix_Paused.argtypes = [c_int]
Mix_Paused.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 585
if hasattr(_libs['SDL_mixer'], 'Mix_PauseMusic'):
Mix_PauseMusic = _libs['SDL_mixer'].Mix_PauseMusic
Mix_PauseMusic.argtypes = []
Mix_PauseMusic.restype = None
# /usr/include/SDL/SDL_mixer.h: 586
if hasattr(_libs['SDL_mixer'], 'Mix_ResumeMusic'):
Mix_ResumeMusic = _libs['SDL_mixer'].Mix_ResumeMusic
Mix_ResumeMusic.argtypes = []
Mix_ResumeMusic.restype = None
# /usr/include/SDL/SDL_mixer.h: 587
if hasattr(_libs['SDL_mixer'], 'Mix_RewindMusic'):
Mix_RewindMusic = _libs['SDL_mixer'].Mix_RewindMusic
Mix_RewindMusic.argtypes = []
Mix_RewindMusic.restype = None
# /usr/include/SDL/SDL_mixer.h: 588
if hasattr(_libs['SDL_mixer'], 'Mix_PausedMusic'):
Mix_PausedMusic = _libs['SDL_mixer'].Mix_PausedMusic
Mix_PausedMusic.argtypes = []
Mix_PausedMusic.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 596
if hasattr(_libs['SDL_mixer'], 'Mix_SetMusicPosition'):
Mix_SetMusicPosition = _libs['SDL_mixer'].Mix_SetMusicPosition
Mix_SetMusicPosition.argtypes = [c_double]
Mix_SetMusicPosition.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 601
if hasattr(_libs['SDL_mixer'], 'Mix_Playing'):
Mix_Playing = _libs['SDL_mixer'].Mix_Playing
Mix_Playing.argtypes = [c_int]
Mix_Playing.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 602
if hasattr(_libs['SDL_mixer'], 'Mix_PlayingMusic'):
Mix_PlayingMusic = _libs['SDL_mixer'].Mix_PlayingMusic
Mix_PlayingMusic.argtypes = []
Mix_PlayingMusic.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 605
if hasattr(_libs['SDL_mixer'], 'Mix_SetMusicCMD'):
Mix_SetMusicCMD = _libs['SDL_mixer'].Mix_SetMusicCMD
Mix_SetMusicCMD.argtypes = [String]
Mix_SetMusicCMD.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 608
if hasattr(_libs['SDL_mixer'], 'Mix_SetSynchroValue'):
Mix_SetSynchroValue = _libs['SDL_mixer'].Mix_SetSynchroValue
Mix_SetSynchroValue.argtypes = [c_int]
Mix_SetSynchroValue.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 609
if hasattr(_libs['SDL_mixer'], 'Mix_GetSynchroValue'):
Mix_GetSynchroValue = _libs['SDL_mixer'].Mix_GetSynchroValue
Mix_GetSynchroValue.argtypes = []
Mix_GetSynchroValue.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 612
if hasattr(_libs['SDL_mixer'], 'Mix_SetSoundFonts'):
Mix_SetSoundFonts = _libs['SDL_mixer'].Mix_SetSoundFonts
Mix_SetSoundFonts.argtypes = [String]
Mix_SetSoundFonts.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 613
if hasattr(_libs['SDL_mixer'], 'Mix_GetSoundFonts'):
Mix_GetSoundFonts = _libs['SDL_mixer'].Mix_GetSoundFonts
Mix_GetSoundFonts.argtypes = []
if sizeof(c_int) == sizeof(c_void_p):
Mix_GetSoundFonts.restype = ReturnString
else:
Mix_GetSoundFonts.restype = String
Mix_GetSoundFonts.errcheck = ReturnString
# /usr/include/SDL/SDL_mixer.h: 614
if hasattr(_libs['SDL_mixer'], 'Mix_EachSoundFont'):
Mix_EachSoundFont = _libs['SDL_mixer'].Mix_EachSoundFont
Mix_EachSoundFont.argtypes = [CFUNCTYPE(UNCHECKED(c_int), String, POINTER(None)), POINTER(None)]
Mix_EachSoundFont.restype = c_int
# /usr/include/SDL/SDL_mixer.h: 619
if hasattr(_libs['SDL_mixer'], 'Mix_GetChunk'):
Mix_GetChunk = _libs['SDL_mixer'].Mix_GetChunk
Mix_GetChunk.argtypes = [c_int]
Mix_GetChunk.restype = POINTER(Mix_Chunk)
# /usr/include/SDL/SDL_mixer.h: 622
if hasattr(_libs['SDL_mixer'], 'Mix_CloseAudio'):
Mix_CloseAudio = _libs['SDL_mixer'].Mix_CloseAudio
Mix_CloseAudio.argtypes = []
Mix_CloseAudio.restype = None
# /usr/include/SDL/SDL_audio.h: 103
try:
AUDIO_S16LSB = 32784
except:
pass
# /usr/include/SDL/SDL_mixer.h: 41
try:
SDL_MIXER_MAJOR_VERSION = 1
except:
pass
# /usr/include/SDL/SDL_mixer.h: 42
try:
SDL_MIXER_MINOR_VERSION = 2
except:
pass
# /usr/include/SDL/SDL_mixer.h: 43
try:
SDL_MIXER_PATCHLEVEL = 12
except:
pass
# /usr/include/SDL/SDL_mixer.h: 56
try:
MIX_MAJOR_VERSION = SDL_MIXER_MAJOR_VERSION
except:
pass
# /usr/include/SDL/SDL_mixer.h: 57
try:
MIX_MINOR_VERSION = SDL_MIXER_MINOR_VERSION
except:
pass
# /usr/include/SDL/SDL_mixer.h: 58
try:
MIX_PATCHLEVEL = SDL_MIXER_PATCHLEVEL
except:
pass
# /usr/include/SDL/SDL_mixer.h: 88
try:
MIX_CHANNELS = 8
except:
pass
# /usr/include/SDL/SDL_mixer.h: 92
try:
MIX_DEFAULT_FREQUENCY = 22050
except:
pass
# /usr/include/SDL/SDL_mixer.h: 94
try:
MIX_DEFAULT_FORMAT = AUDIO_S16LSB
except:
pass
# /usr/include/SDL/SDL_mixer.h: 98
try:
MIX_DEFAULT_CHANNELS = 2
except:
pass
# /usr/include/SDL/SDL_mixer.h: 99
try:
MIX_MAX_VOLUME = 128
except:
pass
# /usr/include/SDL/SDL_mixer.h: 150
def Mix_LoadWAV(file):
return (Mix_LoadWAV_RW ((SDL_RWFromFile (file, c_char_p(b'rb'))), 1))
# /usr/include/SDL/SDL_mixer.h: 233
try:
MIX_CHANNEL_POST = (-2)
except:
pass
# /usr/include/SDL/SDL_mixer.h: 341
try:
MIX_EFFECTSMAXSPEED = 'MIX_EFFECTSMAXSPEED'
except:
pass
# /usr/include/SDL/SDL_mixer.h: 536
def Mix_PlayChannel(channel, chunk, loops):
return (Mix_PlayChannelTimed (channel, chunk, loops, (-1)))
# /usr/include/SDL/SDL_mixer.h: 544
def Mix_FadeInChannel(channel, chunk, loops, ms):
return (Mix_FadeInChannelTimed (channel, chunk, loops, ms, (-1)))
# /usr/include/SDL/SDL_mixer.h: 625
try:
Mix_SetError = SDL_SetError
except:
pass
# /usr/include/SDL/SDL_mixer.h: 626
try:
Mix_GetError = SDL_GetError
except:
pass
Mix_Chunk = struct_Mix_Chunk # /usr/include/SDL/SDL_mixer.h: 107
_Mix_Music = struct__Mix_Music # /usr/include/SDL/SDL_mixer.h: 130
# No inserted files
|
|
from django.core.exceptions import ObjectDoesNotExist
from django.test import TestCase
from django.urls import reverse
from core.models import ValidationScenario
from tests.utils import TestConfiguration
class ValidationScenarioTestCase(TestCase):
python_validation_payload = """def test_record_has_words(record, test_message='record has words'):
return True"""
SCHEMATRON_PAYLOAD = '''<?xml version="1.0" encoding="UTF-8"?>
<schema xmlns="http://purl.oclc.org/dsdl/schematron" xmlns:internet="http://internet.com">
<ns prefix="internet" uri="http://internet.com"/>
<!-- Required top level Elements for all records record -->
<pattern>
<title>Required Elements for Each MODS record</title>
<rule context="root">
<assert test="foo">There must be a foo element</assert>
</rule>
</pattern>
</schema>'''
def setUp(self):
self.config = TestConfiguration()
self.client.force_login(self.config.user)
def test_create_validation_scenario_get(self):
response = self.client.get(reverse('create_validation_scenario'))
self.assertIn(b'Create new Validation Scenario', response.content)
self.assertNotIn(b'Python Code Snippet', response.content)
def test_create_permitted_python_field_mapper_get(self):
with self.settings(ENABLE_PYTHON='true'):
response = self.client.get(reverse('create_validation_scenario'))
self.assertIn(b'Create new Validation Scenario', response.content)
self.assertIn(b'Python Code Snippet', response.content)
def test_create_validation_scenario_post(self):
post_body = {
'name': 'Test Validate',
'payload': 'Some elasticsearch query',
'validation_type': 'es_query'}
response = self.client.post(reverse('create_validation_scenario'), post_body)
self.assertRedirects(response, reverse('configuration'))
scenario = ValidationScenario.objects.get(name='Test Validate')
self.assertIsNotNone(scenario.id)
scenario_dict = scenario.as_dict()
for item in post_body:
self.assertEqual(scenario_dict[item], post_body[item])
def test_create_python_validation_scenario_post(self):
post_body = {
'name': 'Test Validate',
'payload': 'Some python code',
'validation_type': 'python'}
response = self.client.post(reverse('create_validation_scenario'), post_body)
self.assertIn(b'Select a valid choice', response.content)
def test_create_permitted_python_validation_scenario_post(self):
with self.settings(ENABLE_PYTHON='true'):
post_body = {
'name': 'Test Validate',
'payload': 'Some python code',
'validation_type': 'python'}
response = self.client.post(reverse('create_validation_scenario'), post_body)
self.assertRedirects(response, reverse('configuration'))
scenario = ValidationScenario.objects.get(name='Test Validate')
self.assertIsNotNone(scenario.id)
scenario_dict = scenario.as_dict()
for item in post_body:
self.assertEqual(scenario_dict[item], post_body[item])
def test_create_validation_scenario_invalid(self):
response = self.client.post(reverse('create_validation_scenario'), {})
self.assertIn(b'This field is required.', response.content)
def test_edit_validation_scenario_get(self):
scenario = ValidationScenario.objects.create(name='Test Validate',
payload='Some elasticsearch query',
validation_type='es_query')
response = self.client.get(reverse('validation_scenario', args=[scenario.id]))
self.assertIn(b'Test Validate', response.content)
def test_edit_python_validation_scenario_get(self):
scenario = ValidationScenario.objects.create(name='Test Validate',
payload='Some python code',
validation_type='python')
response = self.client.get(reverse('validation_scenario', args=[scenario.id]))
self.assertIn(b'Select a valid choice. python is not one of the available choices', response.content)
def test_edit_permitted_python_validation_scenario_get(self):
with self.settings(ENABLE_PYTHON='true'):
scenario = ValidationScenario.objects.create(name='Test Validate',
payload='Some python code',
validation_type='python')
response = self.client.get(reverse('validation_scenario', args=[scenario.id]))
self.assertNotIn(b'Select a valid choice. python is not one of the available choices', response.content)
def test_edit_validation_scenario_post(self):
scenario = ValidationScenario.objects.create(name='Test Validate',
payload='Some schematron thing',
validation_type='sch')
response = self.client.post(reverse('validation_scenario', args=[scenario.id]), {
'payload': ValidationScenarioTestCase.SCHEMATRON_PAYLOAD,
'name': scenario.name,
'validation_type': scenario.validation_type
})
self.assertRedirects(response, reverse('configuration'))
scenario = ValidationScenario.objects.get(name='Test Validate')
self.assertIsNotNone(scenario.id)
self.assertEqual(scenario.name, 'Test Validate')
self.assertEqual(scenario.payload, ValidationScenarioTestCase.SCHEMATRON_PAYLOAD)
def test_edit_python_validation_scenario_post(self):
scenario = ValidationScenario.objects.create(name='Test Validate',
payload='Some python code',
validation_type='python')
response = self.client.post(reverse('validation_scenario', args=[scenario.id]), {
'payload': ValidationScenarioTestCase.python_validation_payload,
'name': scenario.name,
'validation_type': scenario.validation_type
})
self.assertIn(b'Select a valid choice. python is not one of the available choices', response.content)
def test_edit_permitted_python_validation_scenario_post(self):
with self.settings(ENABLE_PYTHON='true'):
scenario = ValidationScenario.objects.create(name='Test Validate',
payload='Some python code',
validation_type='python')
response = self.client.post(reverse('validation_scenario', args=[scenario.id]), {
'payload': ValidationScenarioTestCase.python_validation_payload,
'name': scenario.name,
'validation_type': scenario.validation_type
})
self.assertRedirects(response, reverse('configuration'))
scenario = ValidationScenario.objects.get(name='Test Validate')
self.assertIsNotNone(scenario.id)
self.assertEqual(scenario.name, 'Test Validate')
self.assertEqual(scenario.payload, ValidationScenarioTestCase.python_validation_payload)
def test_edit_validation_scenario_invalid(self):
scenario = ValidationScenario.objects.create(name='Test Validate',
payload='Some python code',
validation_type='python')
response = self.client.post(reverse('validation_scenario', args=[scenario.id]), {
'payload': ValidationScenarioTestCase.python_validation_payload,
})
self.assertIn(b'This field is required.', response.content)
def test_validation_scenario_delete(self):
scenario = ValidationScenario.objects.create(name='Test Validate',
payload='Some python code',
validation_type='python')
response = self.client.delete(reverse('delete_validation_scenario', args=[scenario.id]))
self.assertRedirects(response, reverse('configuration'))
with self.assertRaises(ObjectDoesNotExist):
ValidationScenario.objects.get(pk=int(scenario.id))
def test_validation_scenario_delete_nonexistent(self):
response = self.client.delete(reverse('delete_validation_scenario', args=[12345]))
self.assertRedirects(response, reverse('configuration'))
def test_validation_scenario_payload(self):
scenario = ValidationScenario.objects.create(name='Test Validate',
payload='Some python code',
validation_type='python')
response = self.client.get(reverse('validation_scenario_payload', args=[scenario.id]))
self.assertEqual(b'Some python code', response.content)
def test_validation_scenario_payload_xml(self):
scenario = ValidationScenario.objects.create(name='Test Validate',
payload='Some schematron',
validation_type='sch')
response = self.client.get(reverse('validation_scenario_payload', args=[scenario.id]))
self.assertEqual(b'Some schematron', response.content)
def test_validation_scenario_test(self):
response = self.client.get(reverse('test_validation_scenario'))
self.assertIn(b'Test Validation Scenario', response.content)
self.assertNotIn(b'Python Code Snippet', response.content)
def test_get_test_validation_scenario_python_permitted(self):
with self.settings(ENABLE_PYTHON='true'):
response = self.client.get(reverse('test_validation_scenario'))
self.assertIn(b'Test Validation Scenario', response.content)
self.assertIn(b'Python Code Snippet', response.content)
def test_validation_scenario_test_post_raw(self):
response = self.validation_scenario_test('raw')
self.assertEqual(response.__getitem__('content-type'), 'text/plain')
self.assertEqual(b'<svrl:schematron-output xmlns:svrl="http://purl.oclc.org/dsdl/svrl" xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:schold="http://www.ascc.net/xml/schematron" xmlns:sch="http://www.ascc.net/xml/schematron" xmlns:iso="http://purl.oclc.org/dsdl/schematron" xmlns:internet="http://internet.com" title="" schemaVersion=""><!--  \n\t\t  \n\t\t  \n\t\t --><svrl:ns-prefix-in-attribute-values uri="http://internet.com" prefix="internet"/><svrl:active-pattern name="Required Elements for Each MODS record"/><svrl:fired-rule context="root"/></svrl:schematron-output>',
response.content)
def test_validation_scenario_test_post_parsed(self):
response = self.validation_scenario_test('parsed')
self.assertEqual(response.__getitem__('content-type'), 'application/json')
self.assertEqual(b'{"fail_count": 0, "passed": ["There must be a foo element"], "failed": [], "total_tests": 1}',
response.content)
def test_validation_scenario_test_post_unrecognized(self):
response = self.validation_scenario_test('other')
self.assertEqual(b'validation results format not recognized', response.content)
def test_post_test_validation_scenario_python(self):
post_body = {
'vs_payload': ValidationScenarioTestCase.python_validation_payload,
'vs_type': 'python',
'db_id': self.config.record.id,
'vs_results_format': 'parsed'
}
response = self.client.post(reverse('test_validation_scenario'), post_body)
print(response.content)
self.assertEqual(b'requested invalid type for validation scenario: python', response.content)
def validation_scenario_test(self, results_format):
return self.client.post(reverse('test_validation_scenario'), {
'vs_payload': ValidationScenarioTestCase.SCHEMATRON_PAYLOAD,
'vs_type': 'sch',
'db_id': self.config.record.id,
'vs_results_format': results_format
})
|
|
# -*- coding: utf-8 -*-
"""This file contains MRUListEx Windows Registry plugins."""
import abc
import logging
import construct
from plaso.events import windows_events
from plaso.lib import binary
from plaso.parsers import winreg
from plaso.parsers.shared import shell_items
from plaso.parsers.winreg_plugins import interface
class BaseMRUListExPlugin(interface.WindowsRegistryPlugin):
"""Class for common MRUListEx Windows Registry plugin functionality."""
_MRULISTEX_STRUCT = construct.Range(
1, 500, construct.ULInt32(u'entry_number'))
_SOURCE_APPEND = u': MRUListEx'
@abc.abstractmethod
def _ParseMRUListExEntryValue(
self, parser_mediator, key, entry_index, entry_number, **kwargs):
"""Parses the MRUListEx entry value.
Args:
parser_mediator: A parser mediator object (instance of ParserMediator).
key: the Registry key (instance of dfwinreg.WinRegistryKey) that contains
the MRUListEx value.
entry_index: integer value representing the MRUListEx entry index.
entry_number: integer value representing the entry number.
Returns:
A string containing the value.
"""
def _ParseMRUListExValue(self, key):
"""Parses the MRUListEx value in a given Registry key.
Args:
key: the Registry key (instance of dfwinreg.WinRegistryKey) that contains
the MRUListEx value.
Returns:
A MRUListEx value generator, which returns the MRU index number
and entry value.
"""
mru_list_value = key.GetValueByName(u'MRUListEx')
# The key exists but does not contain a value named "MRUListEx".
if not mru_list_value:
return enumerate([])
try:
mru_list = self._MRULISTEX_STRUCT.parse(mru_list_value.data)
except construct.FieldError:
logging.warning(u'[{0:s}] Unable to parse the MRU key: {1:s}'.format(
self.NAME, key.path))
return enumerate([])
return enumerate(mru_list)
def _ParseMRUListExKey(
self, parser_mediator, key, registry_file_type=None, codepage=u'cp1252'):
"""Extract event objects from a MRUListEx Registry key.
Args:
parser_mediator: A parser mediator object (instance of ParserMediator).
key: the Registry key (instance of dfwinreg.WinRegistryKey).
registry_file_type: Optional string containing the Windows Registry file
type, e.g. NTUSER, SOFTWARE. The default is None.
codepage: Optional extended ASCII string codepage. The default is cp1252.
"""
values_dict = {}
for entry_index, entry_number in self._ParseMRUListExValue(key):
# TODO: detect if list ends prematurely.
# MRU lists are terminated with 0xffffffff (-1).
if entry_number == 0xffffffff:
break
value_string = self._ParseMRUListExEntryValue(
parser_mediator, key, entry_index, entry_number, codepage=codepage)
value_text = u'Index: {0:d} [MRU Value {1:d}]'.format(
entry_index + 1, entry_number)
values_dict[value_text] = value_string
event_object = windows_events.WindowsRegistryEvent(
key.last_written_time, key.path, values_dict,
offset=key.offset, registry_file_type=registry_file_type,
source_append=self._SOURCE_APPEND)
parser_mediator.ProduceEvent(event_object)
class MRUListExStringPlugin(BaseMRUListExPlugin):
"""Windows Registry plugin to parse a string MRUListEx."""
NAME = u'mrulistex_string'
DESCRIPTION = u'Parser for Most Recently Used (MRU) Registry data.'
REG_TYPE = u'any'
REG_VALUES = frozenset([u'MRUListEx', u'0'])
URLS = [
u'http://forensicartifacts.com/2011/02/recentdocs/',
u'https://github.com/libyal/winreg-kb/wiki/MRU-keys']
_STRING_STRUCT = construct.Struct(
u'string_and_shell_item',
construct.RepeatUntil(
lambda obj, ctx: obj == b'\x00\x00', construct.Field(u'string', 2)))
def _ParseMRUListExEntryValue(
self, parser_mediator, key, entry_index, entry_number, **unused_kwargs):
"""Parses the MRUListEx entry value.
Args:
parser_mediator: A parser mediator object (instance of ParserMediator).
key: the Registry key (instance of dfwinreg.WinRegistryKey) that contains
the MRUListEx value.
entry_index: integer value representing the MRUListEx entry index.
entry_number: integer value representing the entry number.
Returns:
A string containing the value.
"""
value_string = u''
value = key.GetValueByName(u'{0:d}'.format(entry_number))
if value is None:
logging.debug(
u'[{0:s}] Missing MRUListEx entry value: {1:d} in key: {2:s}.'.format(
self.NAME, entry_number, key.path))
elif value.DataIsString():
value_string = value.data
elif value.DataIsBinaryData():
logging.debug((
u'[{0:s}] Non-string MRUListEx entry value: {1:d} parsed as string '
u'in key: {2:s}.').format(self.NAME, entry_number, key.path))
utf16_stream = binary.ByteStreamCopyToUtf16Stream(value.data)
try:
value_string = utf16_stream.decode(u'utf-16-le')
except UnicodeDecodeError as exception:
value_string = binary.HexifyBuffer(utf16_stream)
logging.warning((
u'[{0:s}] Unable to decode UTF-16 stream: {1:s} in MRUListEx entry '
u'value: {2:d} in key: {3:s} with error: {4:s}').format(
self.NAME, value_string, entry_number, key.path, exception))
return value_string
def GetEntries(
self, parser_mediator, registry_key, codepage=u'cp1252',
registry_file_type=None, **kwargs):
"""Extract event objects from a Registry key containing a MRUListEx value.
Args:
parser_mediator: A parser mediator object (instance of ParserMediator).
registry_key: A Windows Registry key (instance of
dfwinreg.WinRegistryKey).
codepage: Optional extended ASCII string codepage. The default is cp1252.
registry_file_type: Optional string containing the Windows Registry file
type, e.g. NTUSER, SOFTWARE. The default is None.
"""
self._ParseMRUListExKey(
parser_mediator, registry_key, codepage=codepage,
registry_file_type=registry_file_type)
def Process(self, parser_mediator, key=None, codepage=u'cp1252', **kwargs):
"""Determine if we can process this Registry key or not.
Args:
parser_mediator: A parser mediator object (instance of ParserMediator).
key: Optional Windows Registry key (instance of dfwinreg.WinRegistryKey).
The default is None.
codepage: Optional extended ASCII string codepage. The default is cp1252.
"""
# Prevent this plugin triggering on sub paths of non-string MRUListEx
# values.
if (u'BagMRU' in key.path or u'Explorer\\StreamMRU' in key.path or
u'\\Explorer\\ComDlg32\\OpenSavePidlMRU' in key.path):
return
super(MRUListExStringPlugin, self).Process(
parser_mediator, key=key, codepage=codepage)
class MRUListExShellItemListPlugin(BaseMRUListExPlugin):
"""Windows Registry plugin to parse a shell item list MRUListEx."""
NAME = u'mrulistex_shell_item_list'
DESCRIPTION = u'Parser for Most Recently Used (MRU) Registry data.'
REG_TYPE = u'any'
REG_KEYS = frozenset([
# The regular expression indicated a file extension (.jpg) or '*'.
(u'\\Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\ComDlg32\\'
u'OpenSavePidlMRU'),
u'\\Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\StreamMRU'])
def _ParseMRUListExEntryValue(
self, parser_mediator, key, entry_index, entry_number, codepage=u'cp1252',
**unused_kwargs):
"""Parses the MRUListEx entry value.
Args:
parser_mediator: A parser mediator object (instance of ParserMediator).
key: the Registry key (instance of dfwinreg.WinRegistryKey) that contains
the MRUListEx value.
entry_index: integer value representing the MRUListEx entry index.
entry_number: integer value representing the entry number.
codepage: Optional extended ASCII string codepage. The default is cp1252.
Returns:
A string containing the value.
"""
value_string = u''
value = key.GetValueByName(u'{0:d}'.format(entry_number))
if value is None:
logging.debug(
u'[{0:s}] Missing MRUListEx entry value: {1:d} in key: {2:s}.'.format(
self.NAME, entry_number, key.path))
elif not value.DataIsBinaryData():
logging.debug((
u'[{0:s}] Non-binary MRUListEx entry value: {1:d} in key: '
u'{2:s}.').format(self.NAME, entry_number, key.path))
elif value.data:
shell_items_parser = shell_items.ShellItemsParser(key.path)
shell_items_parser.UpdateChainAndParse(
parser_mediator, value.data, None, codepage=codepage)
value_string = u'Shell item path: {0:s}'.format(
shell_items_parser.CopyToPath())
return value_string
def GetEntries(
self, parser_mediator, registry_key, codepage=u'cp1252',
registry_file_type=None, **kwargs):
"""Extract event objects from a Registry key containing a MRUListEx value.
Args:
parser_mediator: A parser mediator object (instance of ParserMediator).
registry_key: A Windows Registry key (instance of
dfwinreg.WinRegistryKey).
registry_file_type: Optional string containing the Windows Registry file
type, e.g. NTUSER, SOFTWARE. The default is None.
codepage: Optional extended ASCII string codepage. The default is cp1252.
"""
if registry_key.name != u'OpenSavePidlMRU':
self._ParseMRUListExKey(
parser_mediator, registry_key, codepage=codepage,
registry_file_type=registry_file_type)
if registry_key.name == u'OpenSavePidlMRU':
# For the OpenSavePidlMRU MRUListEx we also need to parse its subkeys
# since the Registry key path does not support wildcards yet.
for subkey in registry_key.GetSubkeys():
self._ParseMRUListExKey(
parser_mediator, subkey, codepage=codepage,
registry_file_type=registry_file_type)
class MRUListExStringAndShellItemPlugin(BaseMRUListExPlugin):
"""Windows Registry plugin to parse a string and shell item MRUListEx."""
NAME = u'mrulistex_string_and_shell_item'
DESCRIPTION = u'Parser for Most Recently Used (MRU) Registry data.'
REG_TYPE = u'any'
REG_KEYS = frozenset([
u'\\Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\RecentDocs'])
_STRING_AND_SHELL_ITEM_STRUCT = construct.Struct(
u'string_and_shell_item',
construct.RepeatUntil(
lambda obj, ctx: obj == b'\x00\x00', construct.Field(u'string', 2)),
construct.Anchor(u'shell_item'))
def _ParseMRUListExEntryValue(
self, parser_mediator, key, entry_index, entry_number, codepage=u'cp1252',
**unused_kwargs):
"""Parses the MRUListEx entry value.
Args:
parser_mediator: A parser mediator object (instance of ParserMediator).
key: the Registry key (instance of dfwinreg.WinRegistryKey) that contains
the MRUListEx value.
entry_index: integer value representing the MRUListEx entry index.
entry_number: integer value representing the entry number.
codepage: Optional extended ASCII string codepage. The default is cp1252.
Returns:
A string containing the value.
"""
value_string = u''
value = key.GetValueByName(u'{0:d}'.format(entry_number))
if value is None:
logging.debug(
u'[{0:s}] Missing MRUListEx entry value: {1:d} in key: {2:s}.'.format(
self.NAME, entry_number, key.path))
elif not value.DataIsBinaryData():
logging.debug((
u'[{0:s}] Non-binary MRUListEx entry value: {1:d} in key: '
u'{2:s}.').format(self.NAME, entry_number, key.path))
elif value.data:
value_struct = self._STRING_AND_SHELL_ITEM_STRUCT.parse(value.data)
try:
# The struct includes the end-of-string character that we need
# to strip off.
path = b''.join(value_struct.string).decode(u'utf16')[:-1]
except UnicodeDecodeError as exception:
logging.warning((
u'[{0:s}] Unable to decode string MRUListEx entry value: {1:d} '
u'in key: {2:s} with error: {3:s}').format(
self.NAME, entry_number, key.path, exception))
path = u''
if path:
shell_item_list_data = value.data[value_struct.shell_item:]
if not shell_item_list_data:
logging.debug((
u'[{0:s}] Missing shell item in MRUListEx entry value: {1:d}'
u'in key: {2:s}').format(self.NAME, entry_number, key.path))
value_string = u'Path: {0:s}'.format(path)
else:
shell_items_parser = shell_items.ShellItemsParser(key.path)
shell_items_parser.UpdateChainAndParse(
parser_mediator, shell_item_list_data, None, codepage=codepage)
value_string = u'Path: {0:s}, Shell item: [{1:s}]'.format(
path, shell_items_parser.CopyToPath())
return value_string
def GetEntries(
self, parser_mediator, registry_key, codepage=u'cp1252',
registry_file_type=None, **kwargs):
"""Extract event objects from a Registry key containing a MRUListEx value.
Args:
parser_mediator: A parser mediator object (instance of ParserMediator).
registry_key: A Windows Registry key (instance of
dfwinreg.WinRegistryKey).
registry_file_type: Optional string containing the Windows Registry file
type, e.g. NTUSER, SOFTWARE. The default is None.
codepage: Optional extended ASCII string codepage. The default is cp1252.
"""
self._ParseMRUListExKey(
parser_mediator, registry_key, codepage=codepage,
registry_file_type=registry_file_type)
if registry_key.name == u'RecentDocs':
# For the RecentDocs MRUListEx we also need to parse its subkeys
# since the Registry key path does not support wildcards yet.
for subkey in registry_key.GetSubkeys():
self._ParseMRUListExKey(
parser_mediator, subkey, codepage=codepage,
registry_file_type=registry_file_type)
class MRUListExStringAndShellItemListPlugin(BaseMRUListExPlugin):
"""Windows Registry plugin to parse a string and shell item list MRUListEx."""
NAME = u'mrulistex_string_and_shell_item_list'
DESCRIPTION = u'Parser for Most Recently Used (MRU) Registry data.'
REG_TYPE = u'any'
REG_KEYS = frozenset([
(u'\\Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\ComDlg32\\'
u'LastVisitedPidlMRU')])
_STRING_AND_SHELL_ITEM_LIST_STRUCT = construct.Struct(
u'string_and_shell_item',
construct.RepeatUntil(
lambda obj, ctx: obj == b'\x00\x00', construct.Field(u'string', 2)),
construct.Anchor(u'shell_item_list'))
def _ParseMRUListExEntryValue(
self, parser_mediator, key, entry_index, entry_number, codepage=u'cp1252',
**unused_kwargs):
"""Parses the MRUListEx entry value.
Args:
parser_mediator: A parser mediator object (instance of ParserMediator).
key: the Registry key (instance of dfwinreg.WinRegistryKey) that contains
the MRUListEx value.
entry_index: integer value representing the MRUListEx entry index.
entry_number: integer value representing the entry number.
codepage: Optional extended ASCII string codepage. The default is cp1252.
Returns:
A string containing the value.
"""
value_string = u''
value = key.GetValueByName(u'{0:d}'.format(entry_number))
if value is None:
logging.debug(
u'[{0:s}] Missing MRUListEx entry value: {1:d} in key: {2:s}.'.format(
self.NAME, entry_number, key.path))
elif not value.DataIsBinaryData():
logging.debug((
u'[{0:s}] Non-binary MRUListEx entry value: {1:d} in key: '
u'{2:s}.').format(self.NAME, entry_number, key.path))
elif value.data:
value_struct = self._STRING_AND_SHELL_ITEM_LIST_STRUCT.parse(value.data)
try:
# The struct includes the end-of-string character that we need
# to strip off.
path = b''.join(value_struct.string).decode(u'utf16')[:-1]
except UnicodeDecodeError as exception:
logging.warning((
u'[{0:s}] Unable to decode string MRUListEx entry value: {1:d} '
u'in key: {2:s} with error: {3:s}').format(
self.NAME, entry_number, key.path, exception))
path = u''
if path:
shell_item_list_data = value.data[value_struct.shell_item_list:]
if not shell_item_list_data:
logging.debug((
u'[{0:s}] Missing shell item in MRUListEx entry value: {1:d}'
u'in key: {2:s}').format(self.NAME, entry_number, key.path))
value_string = u'Path: {0:s}'.format(path)
else:
shell_items_parser = shell_items.ShellItemsParser(key.path)
shell_items_parser.UpdateChainAndParse(
parser_mediator, shell_item_list_data, None, codepage=codepage)
value_string = u'Path: {0:s}, Shell item path: {1:s}'.format(
path, shell_items_parser.CopyToPath())
return value_string
def GetEntries(
self, parser_mediator, registry_key, codepage=u'cp1252',
registry_file_type=None, **kwargs):
"""Extract event objects from a Registry key containing a MRUListEx value.
Args:
parser_mediator: A parser mediator object (instance of ParserMediator).
registry_key: A Windows Registry key (instance of
dfwinreg.WinRegistryKey).
registry_file_type: Optional string containing the Windows Registry file
type, e.g. NTUSER, SOFTWARE. The default is None.
codepage: Optional extended ASCII string codepage. The default is cp1252.
"""
self._ParseMRUListExKey(
parser_mediator, registry_key, registry_file_type=registry_file_type,
codepage=codepage)
winreg.WinRegistryParser.RegisterPlugins([
MRUListExStringPlugin, MRUListExShellItemListPlugin,
MRUListExStringAndShellItemPlugin, MRUListExStringAndShellItemListPlugin])
|
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for training routines."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
import sys
import numpy as np
import six
from tensorflow.python import keras
from tensorflow.python.data.experimental.ops import cardinality
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.eager import context
from tensorflow.python.framework import test_util as tf_test_util
from tensorflow.python.keras import callbacks
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import metrics as metrics_module
from tensorflow.python.keras import testing_utils
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
class BatchCounterCallback(callbacks.Callback):
def __init__(self):
self.batch_count = 0
def on_batch_end(self, *args, **kwargs):
self.batch_count += 1
class TestTrainingWithDatasetIterators(keras_parameterized.TestCase):
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
def test_training_and_eval_methods_on_iterators_single_io(self):
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
optimizer = 'rmsprop'
loss = 'mse'
metrics = ['mae', metrics_module.CategoricalAccuracy()]
model.compile(optimizer, loss, metrics=metrics,
run_eagerly=testing_utils.should_run_eagerly())
inputs = np.zeros((10, 3), np.float32)
targets = np.zeros((10, 4), np.float32)
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.repeat(100)
dataset = dataset.batch(10)
iterator = dataset_ops.make_one_shot_iterator(dataset)
model.fit(iterator, epochs=1, steps_per_epoch=2, verbose=1)
model.evaluate(iterator, steps=2, verbose=1)
model.predict(iterator, steps=2)
# Test with validation data
model.fit(iterator,
epochs=1, steps_per_epoch=2, verbose=0,
validation_data=iterator, validation_steps=2)
# Test with validation split
with self.assertRaisesRegexp(
ValueError, '`validation_split` argument is not supported '
'when input `x` is a dataset or a dataset iterator'):
model.fit(iterator,
epochs=1, steps_per_epoch=2, verbose=0,
validation_split=0.5, validation_steps=2)
# Test with sample weight.
sample_weight = np.random.random((10,))
with self.assertRaisesRegexp(
ValueError, '`sample_weight` argument is not supported '
'when input `x` is a dataset or a dataset iterator'):
model.fit(
iterator,
epochs=1,
steps_per_epoch=2,
verbose=0,
sample_weight=sample_weight)
# Test invalid usage
with self.assertRaisesRegexp(ValueError,
'you should not specify a target'):
model.fit(iterator, iterator,
epochs=1, steps_per_epoch=2, verbose=0)
with self.assertRaisesRegexp(
ValueError, 'the `steps_per_epoch` argument'):
model.fit(iterator, epochs=1, verbose=0)
with self.assertRaisesRegexp(ValueError,
'the `steps` argument'):
model.evaluate(iterator, verbose=0)
with self.assertRaisesRegexp(ValueError,
'the `steps` argument'):
model.predict(iterator, verbose=0)
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
def test_iterators_running_out_of_data(self):
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
optimizer = 'rmsprop'
loss = 'mse'
metrics = ['mae']
model.compile(optimizer, loss, metrics=metrics,
run_eagerly=testing_utils.should_run_eagerly())
inputs = np.zeros((10, 3), np.float32)
targets = np.zeros((10, 4), np.float32)
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.repeat(2)
dataset = dataset.batch(10)
iterator = dataset_ops.make_one_shot_iterator(dataset)
with test.mock.patch.object(logging, 'warning') as mock_log:
model.fit(iterator, epochs=1, steps_per_epoch=3, verbose=0)
self.assertRegexpMatches(
str(mock_log.call_args),
'dataset iterator ran out of data')
class TestTrainingWithDataset(keras_parameterized.TestCase):
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
def test_calling_model_on_same_dataset(self):
if ((not testing_utils.should_run_eagerly())
and testing_utils.get_model_type() == 'subclass'
and context.executing_eagerly()):
self.skipTest('b/120673224')
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
optimizer = 'rmsprop'
loss = 'mse'
metrics = ['mae']
model.compile(optimizer, loss, metrics=metrics,
run_eagerly=testing_utils.should_run_eagerly())
inputs = np.zeros((10, 3), np.float32)
targets = np.zeros((10, 4), np.float32)
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.repeat(100)
dataset = dataset.batch(10)
# Call fit with validation data
model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=0,
validation_data=dataset, validation_steps=2)
model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=0,
validation_data=dataset, validation_steps=2)
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
def test_training_and_eval_methods_on_dataset(self):
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
optimizer = 'rmsprop'
loss = 'mse'
metrics = ['mae', metrics_module.CategoricalAccuracy()]
model.compile(optimizer, loss, metrics=metrics,
run_eagerly=testing_utils.should_run_eagerly())
inputs = np.zeros((10, 3), np.float32)
targets = np.zeros((10, 4), np.float32)
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.repeat() # Infinite dataset.
dataset = dataset.batch(10)
model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=1)
model.evaluate(dataset, steps=2, verbose=1)
model.predict(dataset, steps=2)
# Test with validation data
model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=0,
validation_data=dataset, validation_steps=2)
# Test with validation split
with self.assertRaisesRegexp(
ValueError, '`validation_split` argument is not supported '
'when input `x` is a dataset or a dataset iterator'):
model.fit(dataset,
epochs=1, steps_per_epoch=2, verbose=0,
validation_split=0.5, validation_steps=2)
# Test with sample weight.
sample_weight = np.random.random((10,))
with self.assertRaisesRegexp(
ValueError, '`sample_weight` argument is not supported '
'when input `x` is a dataset or a dataset iterator'):
model.fit(
dataset,
epochs=1,
steps_per_epoch=2,
verbose=0,
sample_weight=sample_weight)
# Test invalid usage
with self.assertRaisesRegexp(ValueError, 'The `batch_size` argument'
' must not be specified when using dataset'
' as an input.'):
model.fit(dataset, batch_size=10, epochs=1, steps_per_epoch=2,
verbose=0)
with self.assertRaisesRegexp(ValueError, 'The `batch_size` argument'
' must not be specified when using dataset'
' as an input.'):
model.predict(dataset, batch_size=10, steps=2, verbose=0)
with self.assertRaisesRegexp(ValueError, 'The `batch_size` argument'
' must not be specified when using dataset'
' as an input.'):
model.evaluate(dataset, batch_size=10, steps=2, verbose=0)
with self.assertRaisesRegexp(ValueError,
'you should not specify a target'):
model.fit(dataset, dataset,
epochs=1, steps_per_epoch=2, verbose=0)
# With an infinite dataset, `steps_per_epoch`/`steps` argument is required.
with self.assertRaisesRegexp(
ValueError, 'the `steps_per_epoch` argument'):
model.fit(dataset, epochs=1, verbose=0)
with self.assertRaisesRegexp(ValueError,
'the `steps` argument'):
model.evaluate(dataset, verbose=0)
with self.assertRaisesRegexp(ValueError,
'the `steps` argument'):
model.predict(dataset, verbose=0)
# TODO(b/123531973): Include tests using dataset_v1.
@keras_parameterized.run_with_all_model_types(exclude_models='sequential')
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_training_and_eval_methods_on_multi_input_output_dataset(self):
input_a = keras.layers.Input(shape=(3,), name='input_1')
input_b = keras.layers.Input(shape=(3,), name='input_2')
dense = keras.layers.Dense(4, name='dense')
dropout = keras.layers.Dropout(0.5, name='dropout')
branch_a = [input_a, dense]
branch_b = [input_b, dense, dropout]
model = testing_utils.get_multi_io_model(branch_a, branch_b)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
input_a_np = np.random.random((10, 3)).astype(dtype=np.float32)
input_b_np = np.random.random((10, 3)).astype(dtype=np.float32)
output_d_np = np.random.random((10, 4)).astype(dtype=np.float32)
output_e_np = np.random.random((10, 4)).astype(dtype=np.float32)
# Test with tuples
dataset_tuple = dataset_ops.Dataset.from_tensor_slices((
(input_a_np, input_b_np), (output_d_np, output_e_np)))
dataset_tuple = dataset_tuple.repeat(100)
dataset_tuple = dataset_tuple.batch(10)
model.fit(dataset_tuple, epochs=1, steps_per_epoch=2, verbose=1)
model.evaluate(dataset_tuple, steps=2, verbose=1)
predict_dataset_tuple = dataset_ops.Dataset.from_tensor_slices(
(input_a_np, input_b_np))
# TODO(b/123360757): Remove below assertion once predict() supports
# muti-input datasets.
with self.assertRaisesRegexp(ValueError,
'Error when checking model input'):
model.predict(predict_dataset_tuple, steps=1)
# Test with dict
input_dict = {'input_1': input_a_np, 'input_2': input_b_np}
if testing_utils.get_model_type() == 'subclass':
output_dict = {'output_1': output_d_np, 'output_2': output_e_np}
else:
output_dict = {'dense': output_d_np, 'dropout': output_e_np}
dataset_dict = dataset_ops.Dataset.from_tensor_slices((
input_dict, output_dict))
dataset_dict = dataset_dict.repeat(100)
dataset_dict = dataset_dict.batch(10)
model.fit(dataset_dict, epochs=1, steps_per_epoch=2, verbose=1)
model.evaluate(dataset_dict, steps=2, verbose=1)
predict_dataset_dict = dataset_ops.Dataset.from_tensor_slices(
input_dict)
predict_dataset_dict = predict_dataset_dict.repeat(100)
predict_dataset_dict = dataset_dict.batch(10)
model.predict(predict_dataset_dict, steps=1)
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
def test_dataset_with_sample_weights(self):
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
optimizer = 'rmsprop'
loss = 'mse'
metrics = ['mae', metrics_module.CategoricalAccuracy()]
model.compile(optimizer, loss, metrics=metrics,
run_eagerly=testing_utils.should_run_eagerly())
inputs = np.zeros((10, 3), np.float32)
targets = np.zeros((10, 4), np.float32)
sample_weights = np.ones((10), np.float32)
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets,
sample_weights))
dataset = dataset.repeat(100)
dataset = dataset.batch(10)
model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=1)
model.evaluate(dataset, steps=2, verbose=1)
model.predict(dataset, steps=2)
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
def test_dataset_with_sparse_labels(self):
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
optimizer = 'rmsprop'
model.compile(
optimizer,
loss='sparse_categorical_crossentropy',
run_eagerly=testing_utils.should_run_eagerly())
inputs = np.zeros((10, 3), dtype=np.float32)
targets = np.random.randint(0, 4, size=10, dtype=np.int32)
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.repeat(100)
dataset = dataset.batch(10)
model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=1)
@keras_parameterized.run_all_keras_modes
def test_dataset_fit_correctness(self):
class SumLayer(keras.layers.Layer):
def build(self, _):
self.w = self.add_weight('w', ())
def call(self, inputs):
return keras.backend.sum(inputs) + self.w * 0
model = keras.Sequential([SumLayer(input_shape=(2,))])
model.compile(
'rmsprop', loss='mae', run_eagerly=testing_utils.should_run_eagerly())
inputs = np.zeros((40, 2), dtype=np.float32)
inputs[10:20, :] = 2
inputs[20:30, :] = 1
inputs[30:, :] = 4
targets = np.zeros((40, 1), dtype=np.float32)
# Test correctness with `steps_per_epoch`.
train_dataset = dataset_ops.Dataset.from_tensor_slices(
(inputs, targets)).batch(10)
val_dataset = dataset_ops.Dataset.from_tensor_slices(
(inputs, targets)).batch(10)
history = model.fit(train_dataset,
epochs=2, steps_per_epoch=2, verbose=1,
validation_data=val_dataset, validation_steps=2)
self.assertListEqual(history.history['loss'],
[inputs[:20].sum() / 2, inputs[20:].sum() / 2])
# The validation dataset will be reset at the end of each validation run.
self.assertListEqual(history.history['val_loss'],
[inputs[:20].sum() / 2, inputs[:20].sum() / 2])
# Test correctness with dataset reset.
train_dataset = dataset_ops.Dataset.from_tensor_slices(
(inputs, targets)).batch(10)
val_dataset = dataset_ops.Dataset.from_tensor_slices(
(inputs, targets)).batch(10)
history = model.fit(train_dataset,
epochs=2, verbose=1, validation_data=val_dataset)
self.assertListEqual(history.history['loss'],
[inputs.sum() / 4, inputs.sum() / 4])
self.assertListEqual(history.history['val_loss'],
[inputs.sum() / 4, inputs.sum() / 4])
@tf_test_util.run_deprecated_v1
def test_dataset_input_shape_validation(self):
with self.cached_session():
model = testing_utils.get_small_functional_mlp(1, 4, input_dim=3)
model.compile(optimizer='rmsprop', loss='mse')
# User forgets to batch the dataset
inputs = np.zeros((10, 3))
targets = np.zeros((10, 4))
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.repeat(100)
with self.assertRaisesRegexp(
ValueError,
r'expected (.*?) to have shape \(3,\) but got array with shape \(1,\)'
):
model.train_on_batch(dataset)
# Wrong input shape
inputs = np.zeros((10, 5))
targets = np.zeros((10, 4))
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.repeat(100)
dataset = dataset.batch(10)
with self.assertRaisesRegexp(ValueError,
r'expected (.*?) to have shape \(3,\)'):
model.train_on_batch(dataset)
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
def test_finite_dataset_known_cardinality_no_steps_arg(self):
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
model.compile('rmsprop', 'mse',
run_eagerly=testing_utils.should_run_eagerly())
inputs = np.zeros((100, 3), dtype=np.float32)
targets = np.random.randint(0, 4, size=100, dtype=np.int32)
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.batch(10)
batch_counter = BatchCounterCallback()
history = model.fit(dataset, epochs=2, verbose=1, callbacks=[batch_counter])
self.assertLen(history.history['loss'], 2)
self.assertEqual(batch_counter.batch_count, 20)
model.evaluate(dataset)
out = model.predict(dataset)
self.assertEqual(out.shape[0], 100)
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
def test_finite_dataset_unknown_cardinality_no_steps_arg(self):
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
model.compile('rmsprop', 'mse',
run_eagerly=testing_utils.should_run_eagerly())
inputs = np.zeros((100, 3), dtype=np.float32)
targets = np.random.randint(0, 4, size=100, dtype=np.int32)
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.filter(lambda x, y: True).batch(10)
self.assertEqual(keras.backend.get_value(cardinality.cardinality(dataset)),
cardinality.UNKNOWN)
batch_counter = BatchCounterCallback()
history = model.fit(dataset, epochs=2, verbose=1, callbacks=[batch_counter])
self.assertLen(history.history['loss'], 2)
self.assertEqual(batch_counter.batch_count, 20)
model.evaluate(dataset)
out = model.predict(dataset)
self.assertEqual(out.shape[0], 100)
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_finite_dataset_unknown_cardinality_no_step_with_train_and_val(self):
class CaptureStdout(object):
def __enter__(self):
self._stdout = sys.stdout
string_io = six.StringIO()
sys.stdout = string_io
self._stringio = string_io
return self
def __exit__(self, *args):
self.output = self._stringio.getvalue()
sys.stdout = self._stdout
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
model.compile(
'rmsprop', 'mse', run_eagerly=testing_utils.should_run_eagerly())
inputs = np.zeros((100, 3), dtype=np.float32)
targets = np.random.randint(0, 4, size=100, dtype=np.int32)
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.filter(lambda x, y: True).batch(10)
self.assertEqual(
keras.backend.get_value(cardinality.cardinality(dataset)),
cardinality.UNKNOWN)
batch_counter = BatchCounterCallback()
with CaptureStdout() as capture:
history = model.fit(
dataset,
epochs=2,
callbacks=[batch_counter],
validation_data=dataset.take(3))
lines = capture.output.splitlines()
self.assertIn('1/Unknown', lines[2])
self.assertIn('10/10', lines[-1])
self.assertLen(history.history['loss'], 2)
self.assertEqual(batch_counter.batch_count, 20)
model.evaluate(dataset)
out = model.predict(dataset)
self.assertEqual(out.shape[0], 100)
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
def test_finite_dataset_unknown_cardinality_out_of_data(self):
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
model.compile('rmsprop', 'mse',
run_eagerly=testing_utils.should_run_eagerly())
inputs = np.zeros((100, 3), dtype=np.float32)
targets = np.random.randint(0, 4, size=100, dtype=np.int32)
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.filter(lambda x, y: True).batch(10)
self.assertEqual(
keras.backend.get_value(cardinality.cardinality(dataset)),
cardinality.UNKNOWN)
batch_counter = BatchCounterCallback()
with test.mock.patch.object(logging, 'warning') as mock_log:
# steps_per_epoch (200) is greater than the dataset size (100). As this is
# unexpected, training will stop and not make it to the second epoch.
history = model.fit(
dataset,
epochs=2,
verbose=1,
callbacks=[batch_counter],
steps_per_epoch=200)
self.assertIn(
'Your dataset ran out of data; interrupting training. '
'Make sure that your dataset can generate at least '
'`steps_per_epoch * epochs` batches (in this case, 400 batches). '
'You may need to use the repeat() function when '
'building your dataset.', str(mock_log.call_args))
self.assertLen(history.history['loss'], 1)
self.assertEqual(batch_counter.batch_count, 10)
model.evaluate(dataset)
out = model.predict(dataset)
self.assertEqual(out.shape[0], 100)
class TestMetricsWithDatasetIterators(keras_parameterized.TestCase):
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
def test_metrics_correctness_with_iterator(self):
layers = [
keras.layers.Dense(8, activation='relu', input_dim=4,
kernel_initializer='ones'),
keras.layers.Dense(1, activation='sigmoid', kernel_initializer='ones')
]
model = testing_utils.get_model_from_layers(layers, (4,))
model.compile(
loss='binary_crossentropy',
metrics=['accuracy', metrics_module.BinaryAccuracy()],
optimizer='rmsprop',
run_eagerly=testing_utils.should_run_eagerly())
np.random.seed(123)
x = np.random.randint(10, size=(100, 4)).astype(np.float32)
y = np.random.randint(2, size=(100, 1)).astype(np.float32)
dataset = dataset_ops.Dataset.from_tensor_slices((x, y))
dataset = dataset.batch(10)
iterator = dataset_ops.make_one_shot_iterator(dataset)
outs = model.evaluate(iterator, steps=10)
self.assertEqual(np.around(outs[1], decimals=1), 0.5)
self.assertEqual(np.around(outs[2], decimals=1), 0.5)
y = np.zeros((100, 1), dtype=np.float32)
dataset = dataset_ops.Dataset.from_tensor_slices((x, y))
dataset = dataset.repeat(100)
dataset = dataset.batch(10)
iterator = dataset_ops.make_one_shot_iterator(dataset)
outs = model.evaluate(iterator, steps=10)
self.assertEqual(outs[1], 0.)
self.assertEqual(outs[2], 0.)
if __name__ == '__main__':
test.main()
|
|
from direct.directnotify import DirectNotifyGlobal
from otp.avatar import DistributedAvatarAI
from toontown.battle import BattleExperienceAI
from toontown.toonbase import ToontownGlobals
from toontown.toonbase import ToontownBattleGlobals
from toontown.toon import InventoryBase
from toontown.battle import DistributedBattleFinalAI
from toontown.building import SuitPlannerInteriorAI
from toontown.battle import BattleBase
from pandac.PandaModules import *
import SuitDNA
import random
AllBossCogs = []
class DistributedBossCogAI(DistributedAvatarAI.DistributedAvatarAI):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedBossCogAI')
def __init__(self, air, dept):
DistributedAvatarAI.DistributedAvatarAI.__init__(self, air)
self.dept = dept
self.dna = SuitDNA.SuitDNA()
self.dna.newBossCog(self.dept)
self.deptIndex = SuitDNA.suitDepts.index(self.dept)
self.resetBattleCounters()
self.looseToons = []
self.involvedToons = []
self.toonsA = []
self.toonsB = []
self.nearToons = []
self.suitsA = []
self.activeSuitsA = []
self.suitsB = []
self.activeSuitsB = []
self.reserveSuits = []
self.barrier = None
self.keyStates = ['BattleOne',
'BattleTwo',
'BattleThree',
'Victory']
self.bossDamage = 0
self.battleThreeStart = 0
self.battleThreeDuration = 1800
self.attackCode = None
self.attackAvId = 0
self.hitCount = 0
AllBossCogs.append(self)
return
def delete(self):
self.ignoreAll()
if self in AllBossCogs:
i = AllBossCogs.index(self)
del AllBossCogs[i]
return DistributedAvatarAI.DistributedAvatarAI.delete(self)
def getDNAString(self):
return self.dna.makeNetString()
def avatarEnter(self):
avId = self.air.getAvatarIdFromSender()
self.addToon(avId)
def avatarExit(self):
avId = self.air.getAvatarIdFromSender()
self.removeToon(avId)
def avatarNearEnter(self):
avId = self.air.getAvatarIdFromSender()
if avId not in self.nearToons:
self.nearToons.append(avId)
def avatarNearExit(self):
avId = self.air.getAvatarIdFromSender()
try:
self.nearToons.remove(avId)
except:
pass
def __handleUnexpectedExit(self, avId):
self.removeToon(avId)
def addToon(self, avId):
if avId not in self.looseToons and avId not in self.involvedToons:
self.looseToons.append(avId)
event = self.air.getAvatarExitEvent(avId)
self.acceptOnce(event, self.__handleUnexpectedExit, extraArgs=[avId])
def removeToon(self, avId):
resendIds = 0
try:
self.looseToons.remove(avId)
except:
pass
try:
self.involvedToons.remove(avId)
resendIds = 1
except:
pass
try:
self.toonsA.remove(avId)
except:
pass
try:
self.toonsB.remove(avId)
except:
pass
try:
self.nearToons.remove(avId)
except:
pass
event = self.air.getAvatarExitEvent(avId)
self.ignore(event)
if not self.hasToons():
taskMgr.doMethodLater(10, self.__bossDone, self.uniqueName('BossDone'))
def __bossDone(self, task):
self.b_setState('Off')
messenger.send(self.uniqueName('BossDone'))
self.ignoreAll()
def hasToons(self):
return self.looseToons or self.involvedToons
def hasToonsAlive(self):
alive = 0
for toonId in self.involvedToons:
toon = self.air.doId2do.get(toonId)
if toon:
hp = toon.getHp()
if hp > 0:
alive = 1
return alive
def sendBattleIds(self):
self.sendUpdate('setBattleIds', [self.battleNumber, self.battleAId, self.battleBId])
def sendToonIds(self):
self.sendUpdate('setToonIds', [self.involvedToons, self.toonsA, self.toonsB])
def damageToon(self, toon, deduction):
toon.takeDamage(deduction)
if toon.getHp() <= 0:
self.sendUpdate('toonDied', [toon.doId])
empty = InventoryBase.InventoryBase(toon)
toon.b_setInventory(empty.makeNetString())
self.removeToon(toon.doId)
def healToon(self, toon, increment):
toon.toonUp(increment)
def d_setBattleExperience(self):
self.sendUpdate('setBattleExperience', self.getBattleExperience())
def getBattleExperience(self):
result = BattleExperienceAI.getBattleExperience(8, self.involvedToons, self.toonExp, self.toonSkillPtsGained, self.toonOrigQuests, self.toonItems, self.toonOrigMerits, self.toonMerits, self.toonParts, self.suitsKilled, self.helpfulToons)
return result
def b_setArenaSide(self, arenaSide):
self.setArenaSide(arenaSide)
self.d_setArenaSide(arenaSide)
def setArenaSide(self, arenaSide):
self.arenaSide = arenaSide
def d_setArenaSide(self, arenaSide):
self.sendUpdate('setArenaSide', [arenaSide])
def b_setState(self, state):
self.setState(state)
self.d_setState(state)
def d_setState(self, state):
self.sendUpdate('setState', [state])
def setState(self, state):
self.demand(state)
if self.air:
if state in self.keyStates:
self.air.writeServerEvent('bossBattle', self.doId, '%s|%s|%s|%s' % (self.dept,
state,
self.involvedToons,
self.formatReward()))
def getState(self):
return self.state
def formatReward(self):
return 'unspecified'
def enterOff(self):
self.resetBattles()
self.resetToons()
self.resetBattleCounters()
def exitOff(self):
pass
def enterWaitForToons(self):
self.acceptNewToons()
self.barrier = self.beginBarrier('WaitForToons', self.involvedToons, 5, self.__doneWaitForToons)
def __doneWaitForToons(self, toons):
self.b_setState('Elevator')
def exitWaitForToons(self):
self.ignoreBarrier(self.barrier)
def enterElevator(self):
if self.notify.getDebug():
for toonId in self.involvedToons:
toon = simbase.air.doId2do.get(toonId)
if toon:
self.notify.debug('%s. involved toon %s, %s/%s' % (self.doId,
toonId,
toon.getHp(),
toon.getMaxHp()))
self.resetBattles()
self.barrier = self.beginBarrier('Elevator', self.involvedToons, 30, self.__doneElevator)
def __doneElevator(self, avIds):
self.b_setState('Introduction')
def exitElevator(self):
self.ignoreBarrier(self.barrier)
def enterIntroduction(self):
self.resetBattles()
self.arenaSide = None
self.makeBattleOneBattles()
self.barrier = self.beginBarrier('Introduction', self.involvedToons, 45, self.doneIntroduction)
return
def doneIntroduction(self, avIds):
self.b_setState('BattleOne')
def exitIntroduction(self):
self.ignoreBarrier(self.barrier)
for toonId in self.involvedToons:
toon = simbase.air.doId2do.get(toonId)
if toon:
toon.b_setCogIndex(-1)
def enterBattleOne(self):
if self.battleA:
self.battleA.startBattle(self.toonsA, self.suitsA)
if self.battleB:
self.battleB.startBattle(self.toonsB, self.suitsB)
def exitBattleOne(self):
self.resetBattles()
def enterReward(self):
self.resetBattles()
self.barrier = self.beginBarrier('Reward', self.involvedToons, BattleBase.BUILDING_REWARD_TIMEOUT, self.__doneReward)
def __doneReward(self, avIds):
self.b_setState('Epilogue')
def exitReward(self):
pass
def enterEpilogue(self):
pass
def exitEpilogue(self):
pass
def enterFrolic(self):
self.resetBattles()
def exitFrolic(self):
pass
def resetBattleCounters(self):
self.battleNumber = 0
self.battleA = None
self.battleAId = 0
self.battleB = None
self.battleBId = 0
self.arenaSide = None
self.toonSkillPtsGained = {}
self.toonExp = {}
self.toonOrigQuests = {}
self.toonItems = {}
self.toonOrigMerits = {}
self.toonMerits = {}
self.toonParts = {}
self.suitsKilled = []
self.helpfulToons = []
return
def resetBattles(self):
sendReset = 0
if self.battleA:
self.battleA.requestDelete()
self.battleA = None
self.battleAId = 0
sendReset = 1
if self.battleB:
self.battleB.requestDelete()
self.battleB = None
self.battleBId = 0
sendReset = 1
for suit in self.suitsA + self.suitsB:
suit.requestDelete()
for suit, joinChance in self.reserveSuits:
suit.requestDelete()
self.suitsA = []
self.activeSuitsA = []
self.suitsB = []
self.activeSuitsB = []
self.reserveSuits = []
self.battleNumber = 0
if sendReset:
self.sendBattleIds()
return
def resetToons(self):
if self.toonsA or self.toonsB:
self.looseToons = self.looseToons + self.involvedToons
self.involvedToons = []
self.toonsA = []
self.toonsB = []
self.sendToonIds()
def divideToons(self):
toons = self.involvedToons[:]
random.shuffle(toons)
numToons = min(len(toons), 8)
if numToons < 4:
numToonsB = numToons / 2
else:
numToonsB = (numToons + random.choice([0, 1])) / 2
self.toonsA = toons[numToonsB:numToons]
self.toonsB = toons[:numToonsB]
self.looseToons += toons[numToons:]
self.sendToonIds()
def acceptNewToons(self):
sourceToons = self.looseToons
self.looseToons = []
for toonId in sourceToons:
toon = self.air.doId2do.get(toonId)
if toon and not toon.ghostMode:
self.involvedToons.append(toonId)
else:
self.looseToons.append(toonId)
for avId in self.involvedToons:
toon = self.air.doId2do.get(avId)
if toon:
p = []
for t in ToontownBattleGlobals.Tracks:
p.append(toon.experience.getExp(t))
self.toonExp[avId] = p
self.toonOrigMerits[avId] = toon.cogMerits[:]
self.divideToons()
def initializeBattles(self, battleNumber, bossCogPosHpr):
self.resetBattles()
if not self.involvedToons:
self.notify.warning('initializeBattles: no toons!')
return
self.battleNumber = battleNumber
suitHandles = self.generateSuits(battleNumber)
self.suitsA = suitHandles['activeSuits']
self.activeSuitsA = self.suitsA[:]
self.reserveSuits = suitHandles['reserveSuits']
suitHandles = self.generateSuits(battleNumber)
self.suitsB = suitHandles['activeSuits']
self.activeSuitsB = self.suitsB[:]
self.reserveSuits += suitHandles['reserveSuits']
if self.toonsA:
self.battleA = self.makeBattle(bossCogPosHpr, ToontownGlobals.BossCogBattleAPosHpr, self.handleRoundADone, self.handleBattleADone, battleNumber, 0)
self.battleAId = self.battleA.doId
else:
self.moveSuits(self.activeSuitsA)
self.suitsA = []
self.activeSuitsA = []
if self.arenaSide == None:
self.b_setArenaSide(0)
if self.toonsB:
self.battleB = self.makeBattle(bossCogPosHpr, ToontownGlobals.BossCogBattleBPosHpr, self.handleRoundBDone, self.handleBattleBDone, battleNumber, 1)
self.battleBId = self.battleB.doId
else:
self.moveSuits(self.activeSuitsB)
self.suitsB = []
self.activeSuitsB = []
if self.arenaSide == None:
self.b_setArenaSide(1)
self.sendBattleIds()
return
def makeBattle(self, bossCogPosHpr, battlePosHpr, roundCallback, finishCallback, battleNumber, battleSide):
battle = DistributedBattleFinalAI.DistributedBattleFinalAI(self.air, self, roundCallback, finishCallback, battleSide)
self.setBattlePos(battle, bossCogPosHpr, battlePosHpr)
battle.suitsKilled = self.suitsKilled
battle.battleCalc.toonSkillPtsGained = self.toonSkillPtsGained
battle.toonExp = self.toonExp
battle.toonOrigQuests = self.toonOrigQuests
battle.toonItems = self.toonItems
battle.toonOrigMerits = self.toonOrigMerits
battle.toonMerits = self.toonMerits
battle.toonParts = self.toonParts
battle.helpfulToons = self.helpfulToons
mult = ToontownBattleGlobals.getBossBattleCreditMultiplier(battleNumber)
battle.battleCalc.setSkillCreditMultiplier(mult)
battle.generateWithRequired(self.zoneId)
return battle
def setBattlePos(self, battle, cogPosHpr, battlePosHpr):
bossNode = NodePath('bossNode')
bossNode.setPosHpr(*cogPosHpr)
battleNode = bossNode.attachNewNode('battleNode')
battleNode.setPosHpr(*battlePosHpr)
suitNode = battleNode.attachNewNode('suitNode')
suitNode.setPos(0, 1, 0)
battle.pos = battleNode.getPos(NodePath())
battle.initialSuitPos = suitNode.getPos(NodePath())
def moveSuits(self, active):
for suit in active:
self.reserveSuits.append((suit, 0))
def handleRoundADone(self, toonIds, totalHp, deadSuits):
if self.battleA:
self.handleRoundDone(self.battleA, self.suitsA, self.activeSuitsA, toonIds, totalHp, deadSuits)
def handleRoundBDone(self, toonIds, totalHp, deadSuits):
if self.battleB:
self.handleRoundDone(self.battleB, self.suitsB, self.activeSuitsB, toonIds, totalHp, deadSuits)
def handleBattleADone(self, zoneId, toonIds):
if self.battleA:
self.battleA.requestDelete()
self.battleA = None
self.battleAId = 0
self.sendBattleIds()
if self.arenaSide == None:
self.b_setArenaSide(0)
if not self.battleB and self.hasToons() and self.hasToonsAlive():
self.b_setState(self.postBattleState)
return
def handleBattleBDone(self, zoneId, toonIds):
if self.battleB:
self.battleB.requestDelete()
self.battleB = None
self.battleBId = 0
self.sendBattleIds()
if self.arenaSide == None:
self.b_setArenaSide(1)
if not self.battleA and self.hasToons() and self.hasToonsAlive():
self.b_setState(self.postBattleState)
return
def invokeSuitPlanner(self, buildingCode, skelecog):
planner = SuitPlannerInteriorAI.SuitPlannerInteriorAI(1, buildingCode, self.dna.dept, self.zoneId)
planner.respectInvasions = 0
suits = planner.genFloorSuits(0)
if skelecog:
for suit in suits['activeSuits']:
suit.b_setSkelecog(1)
for reserve in suits['reserveSuits']:
suit = reserve[0]
suit.b_setSkelecog(1)
return suits
def generateSuits(self, battleNumber):
raise StandardError, 'generateSuits unimplemented'
def handleRoundDone(self, battle, suits, activeSuits, toonIds, totalHp, deadSuits):
totalMaxHp = 0
for suit in suits:
totalMaxHp += suit.maxHP
for suit in deadSuits:
activeSuits.remove(suit)
joinedReserves = []
if len(self.reserveSuits) > 0 and len(activeSuits) < 4:
hpPercent = 100 - totalHp / totalMaxHp * 100.0
for info in self.reserveSuits:
if info[1] <= hpPercent and len(activeSuits) < 4:
suits.append(info[0])
activeSuits.append(info[0])
joinedReserves.append(info)
for info in joinedReserves:
self.reserveSuits.remove(info)
battle.resume(joinedReserves)
def getBattleThreeTime(self):
elapsed = globalClock.getFrameTime() - self.battleThreeStart
t1 = elapsed / float(self.battleThreeDuration)
return t1
def progressValue(self, fromValue, toValue):
t0 = float(self.bossDamage) / float(self.bossMaxDamage)
elapsed = globalClock.getFrameTime() - self.battleThreeStart
t1 = elapsed / float(self.battleThreeDuration)
t = max(t0, t1)
return fromValue + (toValue - fromValue) * min(t, 1)
def progressRandomValue(self, fromValue, toValue, radius = 0.2):
t = self.progressValue(0, 1)
radius = radius * (1.0 - abs(t - 0.5) * 2.0)
t += radius * random.uniform(-1, 1)
t = max(min(t, 1.0), 0.0)
return fromValue + (toValue - fromValue) * t
def reportToonHealth(self):
if self.notify.getDebug():
str = ''
for toonId in self.involvedToons:
toon = self.air.doId2do.get(toonId)
if toon:
str += ', %s (%s/%s)' % (toonId, toon.getHp(), toon.getMaxHp())
self.notify.debug('%s.toons = %s' % (self.doId, str[2:]))
def getDamageMultiplier(self):
return 1.0
def zapToon(self, x, y, z, h, p, r, bpx, bpy, attackCode, timestamp):
avId = self.air.getAvatarIdFromSender()
if not self.validate(avId, avId in self.involvedToons, 'zapToon from unknown avatar'):
return
if attackCode == ToontownGlobals.BossCogLawyerAttack and self.dna.dept != 'l':
self.notify.warning('got lawyer attack but not in CJ boss battle')
return
toon = simbase.air.doId2do.get(avId)
if toon:
self.d_showZapToon(avId, x, y, z, h, p, r, attackCode, timestamp)
damage = ToontownGlobals.BossCogDamageLevels.get(attackCode)
if damage == None:
self.notify.warning('No damage listed for attack code %s' % attackCode)
damage = 5
damage *= self.getDamageMultiplier()
self.damageToon(toon, damage)
currState = self.getCurrentOrNextState()
if attackCode == ToontownGlobals.BossCogElectricFence and (currState == 'RollToBattleTwo' or currState == 'BattleThree'):
if bpy < 0 and abs(bpx / bpy) > 0.5:
if bpx < 0:
self.b_setAttackCode(ToontownGlobals.BossCogSwatRight)
else:
self.b_setAttackCode(ToontownGlobals.BossCogSwatLeft)
return
def d_showZapToon(self, avId, x, y, z, h, p, r, attackCode, timestamp):
self.sendUpdate('showZapToon', [avId,
x,
y,
z,
h,
p,
r,
attackCode,
timestamp])
def b_setAttackCode(self, attackCode, avId = 0):
self.d_setAttackCode(attackCode, avId)
self.setAttackCode(attackCode, avId)
def setAttackCode(self, attackCode, avId = 0):
self.attackCode = attackCode
self.attackAvId = avId
if attackCode == ToontownGlobals.BossCogDizzy or attackCode == ToontownGlobals.BossCogDizzyNow:
delayTime = self.progressValue(20, 5)
self.hitCount = 0
elif attackCode == ToontownGlobals.BossCogSlowDirectedAttack:
delayTime = ToontownGlobals.BossCogAttackTimes.get(attackCode)
delayTime += self.progressValue(10, 0)
else:
delayTime = ToontownGlobals.BossCogAttackTimes.get(attackCode)
if delayTime == None:
return
self.waitForNextAttack(delayTime)
return
def d_setAttackCode(self, attackCode, avId = 0):
self.sendUpdate('setAttackCode', [attackCode, avId])
def waitForNextAttack(self, delayTime):
currState = self.getCurrentOrNextState()
if currState == 'BattleThree':
taskName = self.uniqueName('NextAttack')
taskMgr.remove(taskName)
taskMgr.doMethodLater(delayTime, self.doNextAttack, taskName)
def stopAttacks(self):
taskName = self.uniqueName('NextAttack')
taskMgr.remove(taskName)
def doNextAttack(self, task):
self.b_setAttackCode(ToontownGlobals.BossCogNoAttack)
|
|
# Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: traceback
import linecache
import sys
import types
__all__ = ['extract_stack',
'extract_tb',
'format_exception',
'format_exception_only',
'format_list',
'format_stack',
'format_tb',
'print_exc',
'format_exc',
'print_exception',
'print_last',
'print_stack',
'print_tb',
'tb_lineno']
def _print(file, str = '', terminator = '\n'):
file.write(str + terminator)
def print_list(extracted_list, file = None):
if file is None:
file = sys.stderr
for filename, lineno, name, line in extracted_list:
_print(file, ' File "%s", line %d, in %s' % (filename, lineno, name))
if line:
_print(file, ' %s' % line.strip())
return
def format_list(extracted_list):
list = []
for filename, lineno, name, line in extracted_list:
item = ' File "%s", line %d, in %s\n' % (filename, lineno, name)
if line:
item = item + ' %s\n' % line.strip()
list.append(item)
return list
def print_tb(tb, limit = None, file = None):
if file is None:
file = sys.stderr
if limit is None:
if hasattr(sys, 'tracebacklimit'):
limit = sys.tracebacklimit
n = 0
while tb is not None and (limit is None or n < limit):
f = tb.tb_frame
lineno = tb.tb_lineno
co = f.f_code
filename = co.co_filename
name = co.co_name
_print(file, ' File "%s", line %d, in %s' % (filename, lineno, name))
linecache.checkcache(filename)
line = linecache.getline(filename, lineno, f.f_globals)
if line:
_print(file, ' ' + line.strip())
tb = tb.tb_next
n = n + 1
return
def format_tb(tb, limit = None):
return format_list(extract_tb(tb, limit))
def extract_tb(tb, limit = None):
if limit is None:
if hasattr(sys, 'tracebacklimit'):
limit = sys.tracebacklimit
list = []
n = 0
while tb is not None and (limit is None or n < limit):
f = tb.tb_frame
lineno = tb.tb_lineno
co = f.f_code
filename = co.co_filename
name = co.co_name
linecache.checkcache(filename)
line = linecache.getline(filename, lineno, f.f_globals)
if line:
line = line.strip()
else:
line = None
list.append((filename,
lineno,
name,
line))
tb = tb.tb_next
n = n + 1
return list
def print_exception(etype, value, tb, limit = None, file = None):
if file is None:
file = sys.stderr
if tb:
_print(file, 'Traceback (most recent call last):')
print_tb(tb, limit, file)
lines = format_exception_only(etype, value)
for line in lines:
_print(file, line, '')
return
def format_exception(etype, value, tb, limit = None):
if tb:
list = ['Traceback (most recent call last):\n']
list = list + format_tb(tb, limit)
else:
list = []
list = list + format_exception_only(etype, value)
return list
def format_exception_only(etype, value):
if isinstance(etype, BaseException) or isinstance(etype, types.InstanceType) or etype is None or type(etype) is str:
return [_format_final_exc_line(etype, value)]
else:
stype = etype.__name__
if not issubclass(etype, SyntaxError):
return [_format_final_exc_line(stype, value)]
lines = []
try:
msg, (filename, lineno, offset, badline) = value.args
except Exception:
pass
else:
filename = filename or '<string>'
lines.append(' File "%s", line %d\n' % (filename, lineno))
if badline is not None:
lines.append(' %s\n' % badline.strip())
if offset is not None:
caretspace = badline.rstrip('\n')
offset = min(len(caretspace), offset) - 1
caretspace = caretspace[:offset].lstrip()
caretspace = (c.isspace() and c or ' ' for c in caretspace)
lines.append(' %s^\n' % ''.join(caretspace))
value = msg
lines.append(_format_final_exc_line(stype, value))
return lines
def _format_final_exc_line(etype, value):
valuestr = _some_str(value)
if value is None or not valuestr:
line = '%s\n' % etype
else:
line = '%s: %s\n' % (etype, valuestr)
return line
def _some_str(value):
try:
return str(value)
except Exception:
pass
try:
value = unicode(value)
return value.encode('ascii', 'backslashreplace')
except Exception:
pass
return '<unprintable %s object>' % type(value).__name__
def print_exc(limit = None, file = None):
if file is None:
file = sys.stderr
try:
etype, value, tb = sys.exc_info()
print_exception(etype, value, tb, limit, file)
finally:
etype = value = tb = None
return
def format_exc(limit = None):
try:
etype, value, tb = sys.exc_info()
return ''.join(format_exception(etype, value, tb, limit))
finally:
etype = value = tb = None
return
def print_last(limit = None, file = None):
if not hasattr(sys, 'last_type'):
raise ValueError('no last exception')
if file is None:
file = sys.stderr
print_exception(sys.last_type, sys.last_value, sys.last_traceback, limit, file)
return
def print_stack(f = None, limit = None, file = None):
if f is None:
try:
raise ZeroDivisionError
except ZeroDivisionError:
f = sys.exc_info()[2].tb_frame.f_back
print_list(extract_stack(f, limit), file)
return
def format_stack(f = None, limit = None):
if f is None:
try:
raise ZeroDivisionError
except ZeroDivisionError:
f = sys.exc_info()[2].tb_frame.f_back
return format_list(extract_stack(f, limit))
def extract_stack(f = None, limit = None):
if f is None:
try:
raise ZeroDivisionError
except ZeroDivisionError:
f = sys.exc_info()[2].tb_frame.f_back
if limit is None:
if hasattr(sys, 'tracebacklimit'):
limit = sys.tracebacklimit
list = []
n = 0
while f is not None and (limit is None or n < limit):
lineno = f.f_lineno
co = f.f_code
filename = co.co_filename
name = co.co_name
linecache.checkcache(filename)
line = linecache.getline(filename, lineno, f.f_globals)
if line:
line = line.strip()
else:
line = None
list.append((filename,
lineno,
name,
line))
f = f.f_back
n = n + 1
list.reverse()
return list
def tb_lineno(tb):
return tb.tb_lineno
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.