gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
|---|---|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
from pyecs import *
from pyecs.application import main,profile,onHotswap
from pyecs.components import *
import pyecs
import pyecs.application
from collections import defaultdict
from testing import *
from funcy import partial
import mock
class TestApplication():
@mock.patch('pyecs.application.Application')
def test_main1(self,mocked_Application):
main("__main__")
mocked_Application.assert_called_once_with()
@mock.patch('pyecs.application.Application')
def test_main2(self,mocked_Application):
main("anything-else")
mocked_Application.assert_not_called()
@mock.patch('cProfile.Profile')
def test_profile(self,mocked_Profile):
operation = mock.MagicMock()
filename = "foobar"
profile(operation,filename)
operation.assert_called_once_with()
mocked_Profile.assert_called_once_with()
mocked_Profile.return_value.enable.assert_called_once_with()
mocked_Profile.return_value.disable.assert_called_once_with()
mocked_Profile.return_value.dump_stats.assert_called_once_with(filename)
@mock.patch('pyecs.application.Application.spin')
@mock.patch('pyecs.application.Entity')
def test___init___autospin_default(self, mocked_Entity, mocked_spin):
a = Application()
assert Application.instance == a
assert a.done == False
assert hasattr(a,"entity") == True
assert a.done == False
mocked_spin.assert_called_once_with()
mocked_Entity.assert_called_once_with()
mocked_Entity.return_value.fire_callbacks.assert_called_once_with("setup")
@mock.patch('pyecs.application.Application.spin')
@mock.patch('pyecs.application.Entity')
def test___init___auto_spin_True(self, mocked_Entity, mocked_spin):
a = Application(auto_spin=True)
assert Application.instance == a
assert a.done == False
assert hasattr(a,"entity") == True
assert a.done == False
mocked_spin.assert_called_once_with()
mocked_Entity.assert_called_once_with()
mocked_Entity.return_value.fire_callbacks.assert_called_once_with("setup")
@mock.patch('pyecs.application.Application.spin')
@mock.patch('pyecs.application.Entity')
def test___init___auto_spin_False(self, mocked_Entity, mocked_spin):
a = Application(auto_spin=False)
assert Application.instance == a
assert a.done == False
assert hasattr(a,"entity") == True
assert a.done == False
mocked_spin.assert_not_called()
mocked_Entity.assert_called_once_with()
mocked_Entity.return_value.fire_callbacks.assert_called_once_with("setup")
@mock.patch('pyecs.application.Entity')
def test_setup_main_entity(self, mocked_Entity):
a = Application(auto_spin=False)
mocked_Entity.reset_mock()
delattr(a, "entity")
a.setup_main_entity()
mocked_Entity.assert_called_once_with()
assert hasattr(a, "entity")
def test_register_events(self):
a = Application(auto_spin=False)
a.setup_main_entity() # generate new self.entity
assert len(a.entity.callbacks["quit"])==0
a.register_events()
assert len(a.entity.callbacks["quit"])==1
def test_on_quit(self):
a = Application(auto_spin=False)
a.done = False
a.entity.fire_callbacks("quit",None)
assert a.done == True
@mock.patch('pyecs.application.Entity.fire_callbacks')
def test_update(self, mocked_fire_callbacks):
a = Application(auto_spin=False)
dt = 1
mocked_fire_callbacks.reset_mock()
a.update(dt)
mocked_fire_callbacks.assert_called_once_with("update",dt)
@mock.patch('pyecs.application.Entity.fire_callbacks')
def test_start(self, mocked_fire_callbacks):
a = Application(auto_spin=False)
dt = 1
mocked_fire_callbacks.reset_mock()
a.start()
mocked_fire_callbacks.assert_called_once_with("start")
@mock.patch('pyecs.application.Entity.fire_callbacks')
def test_quit(self, mocked_fire_callbacks):
a = Application(auto_spin=False)
dt = 1
mocked_fire_callbacks.reset_mock()
a.quit()
mocked_fire_callbacks.assert_called_once_with("quit",None)
@mock.patch('pyecs.application.Entity.fire_callbacks')
def test_on_hotswap(self, mocked_fire_callbacks):
a = Application(auto_spin=False)
dt = 1
mocked_fire_callbacks.reset_mock()
a.on_hotswap()
mocked_fire_callbacks.assert_called_once_with("hotswap")
@mock.patch('pyecs.application.time',new_callable=lambda:partial(lambda it:0.5*it.next(),generateNaturalIntegers()))
def test_update_dt_0_5(self, mocked_time):
a = Application(auto_spin=False)
def update(dt):
assert dt == 0.5
update.called = True
a.entity.fire_callbacks("quit",None)
update.called = False
a.entity.register_callback("update", update)
a.spin()
assert update.called == True
@mock.patch('pyecs.application.time',new_callable=lambda:partial(lambda it:it.next(),generateNaturalIntegers()))
def test_update_dt_1(self, mocked_time):
a = Application(auto_spin=False)
def update(dt):
assert dt == 1
update.called = True
a.entity.fire_callbacks("quit",None)
update.called = False
a.entity.register_callback("update", update)
a.spin()
assert update.called == True
@mock.patch('pyecs.application.onHotswap')
def test_update_onHotswap(self, mocked_onHotswap):
a = Application(auto_spin=False)
def update(dt):
update.called = True
mocked_onHotswap.notify = True
assert mocked_onHotswap.notify == True
# if update.called > 10:
# a.entity.fire_callbacks("quit",None)
def hotswap():
hotswap.called = True
a.entity.fire_callbacks("quit",None)
update.called = False
hotswap.called = False
a.entity.register_callback("update", update)
a.entity.register_callback("hotswap", hotswap)
a.spin()
assert update.called == True
assert hotswap.called == True
def test_update_start_quit(self):
a = Application(auto_spin=False)
def update(dt):
a.entity.fire_callbacks("quit",None)
def start():
start.called = True
def quit(event):
quit.called = True
a.entity.register_callback("update", update)
a.entity.register_callback("start", start)
a.entity.register_callback("quit", quit)
start.called = False
quit.called = False
a.spin()
assert start.called == True
assert quit.called == True
def test_onHotswap(self):
onHotswap.notify = False
onHotswap()
assert onHotswap.notify == True
|
|
"""Test TrueType font subsetting & embedding code.
This test uses a sample font (Vera.ttf) taken from Bitstream which is called Vera
Serif Regular and is covered under the license in ../fonts/bitstream-vera-license.txt.
"""
from reportlab.lib.testutils import setOutDir,makeSuiteForClasses, outputfile, printLocation, NearTestCase
if __name__=='__main__':
setOutDir(__name__)
import unittest
from reportlab.pdfgen.canvas import Canvas
from reportlab.pdfbase import pdfmetrics
from reportlab.pdfbase.pdfdoc import PDFDocument, PDFError
from reportlab.pdfbase.ttfonts import TTFont, TTFontFace, TTFontFile, TTFOpenFile, \
TTFontParser, TTFontMaker, TTFError, \
makeToUnicodeCMap, \
FF_SYMBOLIC, FF_NONSYMBOLIC, \
calcChecksum, add32
from reportlab import rl_config
from reportlab.lib.utils import getBytesIO, isPy3, uniChr, int2Byte
def utf8(code):
"Convert a given UCS character index into UTF-8"
return uniChr(code).encode('utf8')
def _simple_subset_generation(fn,npages,alter=0):
c = Canvas(outputfile(fn))
c.setFont('Helvetica', 30)
c.drawString(100,700, 'Unicode TrueType Font Test %d pages' % npages)
# Draw a table of Unicode characters
for p in range(npages):
for fontName in ('Vera','VeraBI'):
c.setFont(fontName, 10)
for i in range(32):
for j in range(32):
ch = utf8(i * 32 + j+p*alter)
c.drawString(80 + j * 13 + int(j / 16.0) * 4, 600 - i * 13 - int(i / 8.0) * 8, ch)
c.showPage()
c.save()
class TTFontsTestCase(unittest.TestCase):
"Make documents with TrueType fonts"
def testTTF(self):
"Test PDF generation with TrueType fonts"
pdfmetrics.registerFont(TTFont("Vera", "Vera.ttf"))
pdfmetrics.registerFont(TTFont("VeraBI", "VeraBI.ttf"))
_simple_subset_generation('test_pdfbase_ttfonts1.pdf',1)
_simple_subset_generation('test_pdfbase_ttfonts3.pdf',3)
_simple_subset_generation('test_pdfbase_ttfonts35.pdf',3,5)
# Do it twice with the same font object
c = Canvas(outputfile('test_pdfbase_ttfontsadditional.pdf'))
# Draw a table of Unicode characters
c.setFont('Vera', 10)
c.drawString(100, 700, b'Hello, ' + utf8(0xffee))
c.save()
class TTFontFileTestCase(NearTestCase):
"Tests TTFontFile, TTFontParser and TTFontMaker classes"
def testFontFileFailures(self):
"Tests TTFontFile constructor error checks"
self.assertRaises(TTFError, TTFontFile, "nonexistent file")
self.assertRaises(TTFError, TTFontFile, getBytesIO(b""))
self.assertRaises(TTFError, TTFontFile, getBytesIO(b"invalid signature"))
self.assertRaises(TTFError, TTFontFile, getBytesIO(b"OTTO - OpenType not supported yet"))
self.assertRaises(TTFError, TTFontFile, getBytesIO(b"\0\1\0\0"))
def testFontFileReads(self):
"Tests TTFontParset.read_xxx"
class FakeTTFontFile(TTFontParser):
def __init__(self, data):
self._ttf_data = data
self._pos = 0
ttf = FakeTTFontFile(b"\x81\x02\x03\x04" b"\x85\x06" b"ABCD" b"\x7F\xFF" b"\x80\x00" b"\xFF\xFF")
self.assertEquals(ttf.read_ulong(), 0x81020304) # big-endian
self.assertEquals(ttf._pos, 4)
self.assertEquals(ttf.read_ushort(), 0x8506)
self.assertEquals(ttf._pos, 6)
self.assertEquals(ttf.read_tag(), 'ABCD')
self.assertEquals(ttf._pos, 10)
self.assertEquals(ttf.read_short(), 0x7FFF)
self.assertEquals(ttf.read_short(), -0x8000)
self.assertEquals(ttf.read_short(), -1)
def testFontFile(self):
"Tests TTFontFile and TTF parsing code"
ttf = TTFontFile("Vera.ttf")
self.assertEquals(ttf.name, b"BitstreamVeraSans-Roman")
self.assertEquals(ttf.flags, FF_SYMBOLIC)
self.assertEquals(ttf.italicAngle, 0.0)
self.assertNear(ttf.ascent,759.765625)
self.assertNear(ttf.descent,-240.234375)
self.assertEquals(ttf.capHeight, 759.765625)
self.assertNear(ttf.bbox, [-183.10546875, -235.83984375, 1287.109375, 928.22265625])
self.assertEquals(ttf.stemV, 87)
self.assertEquals(ttf.defaultWidth, 600.09765625)
def testAdd32(self):
"Test add32"
self.assertEquals(add32(10, -6), 4)
self.assertEquals(add32(6, -10), -4&0xFFFFFFFF)
self.assertEquals(add32(0x80000000, -1), 0x7FFFFFFF)
self.assertEquals(add32(0x7FFFFFFF, 1), 0x80000000)
def testChecksum(self):
"Test calcChecksum function"
self.assertEquals(calcChecksum(b""), 0)
self.assertEquals(calcChecksum(b"\1"), 0x01000000)
self.assertEquals(calcChecksum(b"\x01\x02\x03\x04\x10\x20\x30\x40"), 0x11223344)
self.assertEquals(calcChecksum(b"\x81"), 0x81000000)
self.assertEquals(calcChecksum(b"\x81\x02"), 0x81020000)
self.assertEquals(calcChecksum(b"\x81\x02\x03"), 0x81020300)
self.assertEquals(calcChecksum(b"\x81\x02\x03\x04"), 0x81020304)
self.assertEquals(calcChecksum(b"\x81\x02\x03\x04\x05"), 0x86020304)
self.assertEquals(calcChecksum(b"\x41\x02\x03\x04\xD0\x20\x30\x40"), 0x11223344)
self.assertEquals(calcChecksum(b"\xD1\x02\x03\x04\x40\x20\x30\x40"), 0x11223344)
self.assertEquals(calcChecksum(b"\x81\x02\x03\x04\x90\x20\x30\x40"), 0x11223344)
self.assertEquals(calcChecksum(b"\x7F\xFF\xFF\xFF\x00\x00\x00\x01"), 0x80000000)
def testFontFileChecksum(self):
"Tests TTFontFile and TTF parsing code"
F = TTFOpenFile("Vera.ttf")[1].read()
TTFontFile(getBytesIO(F), validate=1) # should not fail
F1 = F[:12345] + b"\xFF" + F[12346:] # change one byte
self.assertRaises(TTFError, TTFontFile, getBytesIO(F1), validate=1)
F1 = F[:8] + b"\xFF" + F[9:] # change one byte
self.assertRaises(TTFError, TTFontFile, getBytesIO(F1), validate=1)
def testSubsetting(self):
"Tests TTFontFile and TTF parsing code"
ttf = TTFontFile("Vera.ttf")
subset = ttf.makeSubset([0x41, 0x42])
subset = TTFontFile(getBytesIO(subset), 0)
for tag in ('cmap', 'head', 'hhea', 'hmtx', 'maxp', 'name', 'OS/2',
'post', 'cvt ', 'fpgm', 'glyf', 'loca', 'prep'):
self.assert_(subset.get_table(tag))
subset.seek_table('loca')
for n in range(4):
pos = subset.read_ushort() # this is actually offset / 2
self.failIf(pos % 2 != 0, "glyph %d at +%d should be long aligned" % (n, pos * 2))
self.assertEquals(subset.name, b"BitstreamVeraSans-Roman")
self.assertEquals(subset.flags, FF_SYMBOLIC)
self.assertEquals(subset.italicAngle, 0.0)
self.assertNear(subset.ascent,759.765625)
self.assertNear(subset.descent,-240.234375)
self.assertEquals(subset.capHeight, 759.765625)
self.assertNear(subset.bbox, [-183.10546875, -235.83984375, 1287.109375, 928.22265625])
self.assertEquals(subset.stemV, 87)
def testFontMaker(self):
"Tests TTFontMaker class"
ttf = TTFontMaker()
ttf.add("ABCD", b"xyzzy")
ttf.add("QUUX", b"123")
ttf.add("head", b"12345678xxxx")
stm = ttf.makeStream()
ttf = TTFontParser(getBytesIO(stm), 0)
self.assertEquals(ttf.get_table("ABCD"), b"xyzzy")
self.assertEquals(ttf.get_table("QUUX"), b"123")
class TTFontFaceTestCase(unittest.TestCase):
"Tests TTFontFace class"
def testAddSubsetObjects(self):
"Tests TTFontFace.addSubsetObjects"
face = TTFontFace("Vera.ttf")
doc = PDFDocument()
fontDescriptor = face.addSubsetObjects(doc, "TestFont", [ 0x78, 0x2017 ])
fontDescriptor = doc.idToObject[fontDescriptor.name].dict
self.assertEquals(fontDescriptor['Type'], '/FontDescriptor')
self.assertEquals(fontDescriptor['Ascent'], face.ascent)
self.assertEquals(fontDescriptor['CapHeight'], face.capHeight)
self.assertEquals(fontDescriptor['Descent'], face.descent)
self.assertEquals(fontDescriptor['Flags'], (face.flags & ~FF_NONSYMBOLIC) | FF_SYMBOLIC)
self.assertEquals(fontDescriptor['FontName'], "/TestFont")
self.assertEquals(fontDescriptor['FontBBox'].sequence, face.bbox)
self.assertEquals(fontDescriptor['ItalicAngle'], face.italicAngle)
self.assertEquals(fontDescriptor['StemV'], face.stemV)
fontFile = fontDescriptor['FontFile2']
fontFile = doc.idToObject[fontFile.name]
self.assert_(fontFile.content != "")
class TTFontTestCase(NearTestCase):
"Tests TTFont class"
def testStringWidth(self):
"Test TTFont.stringWidth"
font = TTFont("Vera", "Vera.ttf")
self.assert_(font.stringWidth("test", 10) > 0)
width = font.stringWidth(utf8(0x2260) * 2, 1000)
expected = font.face.getCharWidth(0x2260) * 2
self.assertNear(width,expected)
def testSplitString(self):
"Tests TTFont.splitString"
doc = PDFDocument()
font = TTFont("Vera", "Vera.ttf")
text = b"".join(utf8(i) for i in range(511))
allchars = b"".join(int2Byte(i) for i in range(256))
nospace = allchars[:32] + allchars[33:]
chunks = [(0, allchars), (1, nospace)]
self.assertEquals(font.splitString(text, doc), chunks)
# Do it twice
self.assertEquals(font.splitString(text, doc), chunks)
text = b"".join(utf8(i) for i in range(510, -1, -1))
allchars = b"".join(int2Byte(i) for i in range(255, -1, -1))
nospace = allchars[:223] + allchars[224:]
chunks = [(1, nospace), (0, allchars)]
self.assertEquals(font.splitString(text, doc), chunks)
def testSplitStringSpaces(self):
# In order for justification (word spacing) to work, the space
# glyph must have a code 32, and no other character should have
# that code in any subset, or word spacing will be applied to it.
doc = PDFDocument()
font = TTFont("Vera", "Vera.ttf")
text = b"".join(utf8(i) for i in range(512, -1, -1))
chunks = font.splitString(text, doc)
state = font.state[doc]
self.assertEquals(state.assignments[32], 32)
self.assertEquals(state.subsets[0][32], 32)
self.assertEquals(state.subsets[1][32], 32)
def testSubsetInternalName(self):
"Tests TTFont.getSubsetInternalName"
doc = PDFDocument()
font = TTFont("Vera", "Vera.ttf")
# Actually generate some subsets
text = b"".join(utf8(i) for i in range(513))
font.splitString(text, doc)
self.assertRaises(IndexError, font.getSubsetInternalName, -1, doc)
self.assertRaises(IndexError, font.getSubsetInternalName, 3, doc)
self.assertEquals(font.getSubsetInternalName(0, doc), "/F1+0")
self.assertEquals(font.getSubsetInternalName(1, doc), "/F1+1")
self.assertEquals(font.getSubsetInternalName(2, doc), "/F1+2")
self.assertEquals(doc.delayedFonts, [font])
def testAddObjectsEmpty(self):
"TTFont.addObjects should not fail when no characters were used"
font = TTFont("Vera", "Vera.ttf")
doc = PDFDocument()
font.addObjects(doc)
def no_longer_testAddObjectsResets(self):
"Test that TTFont.addObjects resets the font"
# Actually generate some subsets
doc = PDFDocument()
font = TTFont("Vera", "Vera.ttf")
font.splitString('a', doc) # create some subset
doc = PDFDocument()
font.addObjects(doc)
self.assertEquals(font.frozen, 0)
self.assertEquals(font.nextCode, 0)
self.assertEquals(font.subsets, [])
self.assertEquals(font.assignments, {})
font.splitString('ba', doc) # should work
def testParallelConstruction(self):
"Test that TTFont can be used for different documents at the same time"
ttfAsciiReadable = rl_config.ttfAsciiReadable
try:
rl_config.ttfAsciiReadable = 1
doc1 = PDFDocument()
doc2 = PDFDocument()
font = TTFont("Vera", "Vera.ttf")
self.assertEquals(font.splitString('hello ', doc1), [(0, b'hello ')])
self.assertEquals(font.splitString('hello ', doc2), [(0, b'hello ')])
self.assertEquals(font.splitString(u'\u0410\u0411'.encode('UTF-8'), doc1), [(0, b'\x80\x81')])
self.assertEquals(font.splitString(u'\u0412'.encode('UTF-8'), doc2), [(0, b'\x80')])
font.addObjects(doc1)
self.assertEquals(font.splitString(u'\u0413'.encode('UTF-8'), doc2), [(0, b'\x81')])
font.addObjects(doc2)
finally:
rl_config.ttfAsciiReadable = ttfAsciiReadable
def testAddObjects(self):
"Test TTFont.addObjects"
# Actually generate some subsets
ttfAsciiReadable = rl_config.ttfAsciiReadable
try:
rl_config.ttfAsciiReadable = 1
doc = PDFDocument()
font = TTFont("Vera", "Vera.ttf")
font.splitString('a', doc) # create some subset
internalName = font.getSubsetInternalName(0, doc)[1:]
font.addObjects(doc)
pdfFont = doc.idToObject[internalName]
self.assertEquals(doc.idToObject['BasicFonts'].dict[internalName], pdfFont)
self.assertEquals(pdfFont.Name, internalName)
self.assertEquals(pdfFont.BaseFont, "AAAAAA+BitstreamVeraSans-Roman")
self.assertEquals(pdfFont.FirstChar, 0)
self.assertEquals(pdfFont.LastChar, 127)
self.assertEquals(len(pdfFont.Widths.sequence), 128)
toUnicode = doc.idToObject[pdfFont.ToUnicode.name]
self.assert_(toUnicode.content != "")
fontDescriptor = doc.idToObject[pdfFont.FontDescriptor.name]
self.assertEquals(fontDescriptor.dict['Type'], '/FontDescriptor')
finally:
rl_config.ttfAsciiReadable = ttfAsciiReadable
def testMakeToUnicodeCMap(self):
"Test makeToUnicodeCMap"
self.assertEquals(makeToUnicodeCMap("TestFont", [ 0x1234, 0x4321, 0x4242 ]),
"""/CIDInit /ProcSet findresource begin
12 dict begin
begincmap
/CIDSystemInfo
<< /Registry (TestFont)
/Ordering (TestFont)
/Supplement 0
>> def
/CMapName /TestFont def
/CMapType 2 def
1 begincodespacerange
<00> <02>
endcodespacerange
3 beginbfchar
<00> <1234>
<01> <4321>
<02> <4242>
endbfchar
endcmap
CMapName currentdict /CMap defineresource pop
end
end""")
def makeSuite():
suite = makeSuiteForClasses(
TTFontsTestCase,
TTFontFileTestCase,
TTFontFaceTestCase,
TTFontTestCase)
return suite
#noruntests
if __name__ == "__main__":
unittest.TextTestRunner().run(makeSuite())
printLocation()
|
|
# Copyright (c) 2016-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import core, workspace
from hypothesis import given
from hypothesis import strategies as st
import caffe2.python.hypothesis_test_util as hu
import numpy as np
def batched_boarders_and_data(
data_min_size=5, data_max_size=10,
examples_min_number=1, examples_max_number=4,
example_min_size=1, example_max_size=3,
dtype=np.float32, elements=None):
dims_ = st.tuples(
st.integers(min_value=data_min_size,
max_value=data_max_size),
st.integers(min_value=examples_min_number,
max_value=examples_max_number),
st.integers(min_value=example_min_size,
max_value=example_max_size),
)
return dims_.flatmap(
lambda dims: st.tuples(
hu.arrays(
[dims[1], dims[2], 2], dtype=np.int32,
elements=st.integers(min_value=0, max_value=dims[0])
),
hu.arrays([dims[0]], dtype, elements)
))
@st.composite
def _tensor_splits(draw):
lengths = draw(st.lists(st.integers(1, 5), min_size=1, max_size=10))
batch_size = draw(st.integers(1, 5))
element_pairs = [
(batch, r) for batch in range(batch_size) for r in range(len(lengths))
]
perm = draw(st.permutations(element_pairs))
perm = perm[:-1] # skip one range
ranges = [[(0, 0)] * len(lengths) for _ in range(batch_size)]
offset = 0
for pair in perm:
ranges[pair[0]][pair[1]] = (offset, lengths[pair[1]])
offset += lengths[pair[1]]
data = draw(st.lists(
st.floats(min_value=-1.0, max_value=1.0),
min_size=offset,
max_size=offset
))
key = draw(st.permutations(range(offset)))
return (
np.array(data).astype(np.float32), np.array(ranges),
np.array(lengths), np.array(key).astype(np.int64)
)
def gather_ranges(data, ranges):
lengths = []
output = []
for example_ranges in ranges:
length = 0
for range in example_ranges:
assert len(range) == 2
output.extend(data[range[0]:range[0] + range[1]])
length += range[1]
lengths.append(length)
return output, lengths
def gather_ranges_to_dense(data, ranges, lengths):
outputs = []
assert len(ranges)
batch_size = len(ranges)
assert len(ranges[0])
num_ranges = len(ranges[0])
assert ranges.shape[2] == 2
for i in range(num_ranges):
out = []
for j in range(batch_size):
start, length = ranges[j][i]
if not length:
out.append([0] * lengths[i])
else:
assert length == lengths[i]
out.append(data[start:start + length])
outputs.append(np.array(out))
return outputs
def gather_ranges_to_dense_with_key(data, ranges, key, lengths):
outputs = []
assert len(ranges)
batch_size = len(ranges)
assert len(ranges[0])
num_ranges = len(ranges[0])
assert ranges.shape[2] == 2
for i in range(num_ranges):
out = []
for j in range(batch_size):
start, length = ranges[j][i]
if not length:
out.append([0] * lengths[i])
else:
assert length == lengths[i]
key_data_list = zip(
key[start:start + length],
data[start:start + length])
sorted_key_data_list = sorted(key_data_list, key=lambda x: x[0])
sorted_data = [d for (k, d) in sorted_key_data_list]
out.append(sorted_data)
outputs.append(np.array(out))
return outputs
class TestGatherRanges(hu.HypothesisTestCase):
@given(boarders_and_data=batched_boarders_and_data(), **hu.gcs_cpu_only)
def test_gather_ranges(self, boarders_and_data, gc, dc):
boarders, data = boarders_and_data
def boarders_to_range(boarders):
assert len(boarders) == 2
boarders = sorted(boarders)
return [boarders[0], boarders[1] - boarders[0]]
ranges = np.apply_along_axis(boarders_to_range, 2, boarders)
self.assertReferenceChecks(
device_option=gc,
op=core.CreateOperator("GatherRanges",
["data", "ranges"],
["output", "lengths"]),
inputs=[data, ranges],
reference=gather_ranges,
)
@given(tensor_splits=_tensor_splits(), **hu.gcs_cpu_only)
def test_gather_ranges_split(self, tensor_splits, gc, dc):
data, ranges, lengths, _ = tensor_splits
self.assertReferenceChecks(
device_option=gc,
op=core.CreateOperator(
"GatherRangesToDense",
['data', 'ranges'],
['X_{}'.format(i) for i in range(len(lengths))],
lengths=lengths
),
inputs=[data, ranges, lengths],
reference=gather_ranges_to_dense
)
@given(tensor_splits=_tensor_splits(), **hu.gcs_cpu_only)
def test_gather_ranges_with_key_split(self, tensor_splits, gc, dc):
data, ranges, lengths, key = tensor_splits
self.assertReferenceChecks(
device_option=gc,
op=core.CreateOperator(
"GatherRangesToDense",
['data', 'ranges', 'key'],
['X_{}'.format(i) for i in range(len(lengths))],
lengths=lengths
),
inputs=[data, ranges, key, lengths],
reference=gather_ranges_to_dense_with_key
)
def test_shape_and_type_inference(self):
with hu.temp_workspace("shape_type_inf_int32"):
net = core.Net('test_net')
net.ConstantFill(
[], "ranges", shape=[3, 5, 2], dtype=core.DataType.INT32,
)
net.ConstantFill(
[], "values", shape=[64], dtype=core.DataType.INT64,
)
net.GatherRanges(['values', 'ranges'], ['values_output', 'lengths_output'])
(shapes, types) = workspace.InferShapesAndTypes([net], {})
self.assertEqual(shapes["values_output"], [64])
self.assertEqual(types["values_output"], core.DataType.INT64)
self.assertEqual(shapes["lengths_output"], [3])
self.assertEqual(types["lengths_output"], core.DataType.INT32)
if __name__ == "__main__":
import unittest
unittest.main()
|
|
import json
import re
from django.utils.translation import ugettext as _
from ide.models.files import ResourceFile, ResourceIdentifier
__author__ = 'katharine'
def generate_wscript_file_sdk2(project, for_export=False):
jshint = project.app_jshint
wscript = """
#
# This file is the default set of rules to compile a Pebble project.
#
# Feel free to customize this to your needs.
#
import os.path
try:
from sh import CommandNotFound, jshint, cat, ErrorReturnCode_2
hint = jshint
except (ImportError, CommandNotFound):
hint = None
top = '.'
out = 'build'
def options(ctx):
ctx.load('pebble_sdk')
def configure(ctx):
ctx.load('pebble_sdk')
global hint
if hint is not None:
hint = hint.bake(['--config', 'pebble-jshintrc'])
def build(ctx):
if {{jshint}} and hint is not None:
try:
hint([node.abspath() for node in ctx.path.ant_glob("src/**/*.js")], _tty_out=False) # no tty because there are none in the cloudpebble sandbox.
except ErrorReturnCode_2 as e:
ctx.fatal("\\nJavaScript linting failed (you can disable this in Project Settings):\\n" + e.stdout)
# Concatenate all our JS files (but not recursively), and only if any JS exists in the first place.
ctx.path.make_node('src/js/').mkdir()
js_paths = ctx.path.ant_glob(['src/*.js', 'src/**/*.js'])
if js_paths:
ctx(rule='cat ${SRC} > ${TGT}', source=js_paths, target='pebble-js-app.js')
has_js = True
else:
has_js = False
ctx.load('pebble_sdk')
ctx.pbl_program(source=ctx.path.ant_glob('src/**/*.c'),
target='pebble-app.elf')
if os.path.exists('worker_src'):
ctx.pbl_worker(source=ctx.path.ant_glob('worker_src/**/*.c'),
target='pebble-worker.elf')
ctx.pbl_bundle(elf='pebble-app.elf',
worker_elf='pebble-worker.elf',
js='pebble-js-app.js' if has_js else [])
else:
ctx.pbl_bundle(elf='pebble-app.elf',
js='pebble-js-app.js' if has_js else [])
"""
return wscript.replace('{{jshint}}', 'True' if jshint and not for_export else 'False')
def generate_wscript_file_sdk3(project, for_export):
jshint = project.app_jshint
wscript = """
#
# This file is the default set of rules to compile a Pebble project.
#
# Feel free to customize this to your needs.
#
import os.path
try:
from sh import CommandNotFound, jshint, cat, ErrorReturnCode_2
hint = jshint
except (ImportError, CommandNotFound):
hint = None
top = '.'
out = 'build'
def options(ctx):
ctx.load('pebble_sdk')
def configure(ctx):
ctx.load('pebble_sdk')
def build(ctx):
if {{jshint}} and hint is not None:
try:
hint([node.abspath() for node in ctx.path.ant_glob("src/**/*.js")], _tty_out=False) # no tty because there are none in the cloudpebble sandbox.
except ErrorReturnCode_2 as e:
ctx.fatal("\\nJavaScript linting failed (you can disable this in Project Settings):\\n" + e.stdout)
# Concatenate all our JS files (but not recursively), and only if any JS exists in the first place.
ctx.path.make_node('src/js/').mkdir()
js_paths = ctx.path.ant_glob(['src/*.js', 'src/**/*.js'])
if js_paths:
ctx(rule='cat ${SRC} > ${TGT}', source=js_paths, target='pebble-js-app.js')
has_js = True
else:
has_js = False
ctx.load('pebble_sdk')
build_worker = os.path.exists('worker_src')
binaries = []
for p in ctx.env.TARGET_PLATFORMS:
ctx.set_env(ctx.all_envs[p])
ctx.set_group(ctx.env.PLATFORM_NAME)
app_elf='{}/pebble-app.elf'.format(p)
ctx.pbl_program(source=ctx.path.ant_glob('src/**/*.c'),
target=app_elf)
if build_worker:
worker_elf='{}/pebble-worker.elf'.format(p)
binaries.append({'platform': p, 'app_elf': app_elf, 'worker_elf': worker_elf})
ctx.pbl_worker(source=ctx.path.ant_glob('worker_src/**/*.c'),
target=worker_elf)
else:
binaries.append({'platform': p, 'app_elf': app_elf})
ctx.set_group('bundle')
ctx.pbl_bundle(binaries=binaries, js='pebble-js-app.js' if has_js else [])
"""
return wscript.replace('{{jshint}}', 'True' if jshint and not for_export else 'False')
def generate_wscript_file(project, for_export=False):
if project.sdk_version == '2':
return generate_wscript_file_sdk2(project, for_export)
elif project.sdk_version == '3':
return generate_wscript_file_sdk3(project, for_export)
def generate_jshint_file(project):
return """
/*
* Example jshint configuration file for Pebble development.
*
* Check out the full documentation at http://www.jshint.com/docs/options/
*/
{
// Declares the existence of the globals available in PebbleKit JS.
"globals": {
"Pebble": true,
"console": true,
"XMLHttpRequest": true,
"navigator": true, // For navigator.geolocation
"localStorage": true,
"setTimeout": true,
"setInterval": true,
"Int8Array": true,
"Uint8Array": true,
"Uint8ClampedArray": true,
"Int16Array": true,
"Uint16Array": true,
"Int32Array": true,
"Uint32Array": true,
"Float32Array": true,
"Float64Array": true
},
// Do not mess with standard JavaScript objects (Array, Date, etc)
"freeze": true,
// Do not use eval! Keep this warning turned on (ie: false)
"evil": false,
/*
* The options below are more style/developer dependent.
* Customize to your liking.
*/
// All variables should be in camelcase - too specific for CloudPebble builds to fail
// "camelcase": true,
// Do not allow blocks without { } - too specific for CloudPebble builds to fail.
// "curly": true,
// Prohibits the use of immediate function invocations without wrapping them in parentheses
"immed": true,
// Don't enforce indentation, because it's not worth failing builds over
// (especially given our somewhat lacklustre support for it)
"indent": false,
// Do not use a variable before it's defined
"latedef": "nofunc",
// Spot undefined variables
"undef": "true",
// Spot unused variables
"unused": "true"
}
"""
def generate_manifest(project, resources):
if project.project_type == 'native':
if project.sdk_version == '2':
return generate_v2_manifest(project, resources)
else:
return generate_v3_manifest(project, resources)
elif project.project_type == 'pebblejs':
return generate_pebblejs_manifest(project, resources)
elif project.project_type == 'simplyjs':
return generate_simplyjs_manifest(project)
else:
raise Exception(_("Unknown project type %s") % project.project_type)
def generate_v2_manifest(project, resources):
return dict_to_pretty_json(generate_v2_manifest_dict(project, resources))
def generate_v3_manifest(project, resources):
return dict_to_pretty_json(generate_v3_manifest_dict(project, resources))
def generate_v2_manifest_dict(project, resources):
manifest = {
'uuid': str(project.app_uuid),
'shortName': project.app_short_name,
'longName': project.app_long_name,
'companyName': project.app_company_name,
'versionLabel': project.app_version_label,
'versionCode': 1,
'watchapp': {
'watchface': project.app_is_watchface
},
'appKeys': json.loads(project.app_keys),
'resources': generate_resource_dict(project, resources),
'capabilities': project.app_capabilities.split(','),
'projectType': 'native',
'sdkVersion': "2",
}
return manifest
def generate_v3_manifest_dict(project, resources):
# Just extend the v2 one.
manifest = generate_v2_manifest_dict(project, resources)
if project.app_platforms:
manifest['targetPlatforms'] = project.app_platform_list
manifest['sdkVersion'] = "3"
del manifest['versionCode']
return manifest
def generate_manifest_dict(project, resources):
if project.project_type == 'native':
if project.sdk_version == '2':
return generate_v2_manifest_dict(project, resources)
else:
return generate_v3_manifest_dict(project, resources)
elif project.project_type == 'simplyjs':
return generate_simplyjs_manifest_dict(project)
elif project.project_type == 'pebblejs':
return generate_pebblejs_manifest_dict(project, resources)
else:
raise Exception(_("Unknown project type %s") % project.project_type)
def generate_resource_map(project, resources):
return dict_to_pretty_json(generate_resource_dict(project, resources))
def dict_to_pretty_json(d):
return json.dumps(d, indent=4, separators=(',', ': '), sort_keys=True) + "\n"
def generate_resource_dict(project, resources):
if project.project_type == 'native':
return generate_v2_resource_dict(resources)
elif project.project_type == 'simplyjs':
return generate_simplyjs_resource_dict()
elif project.project_type == 'pebblejs':
return generate_pebblejs_resource_dict(resources)
else:
raise Exception(_("Unknown project type %s") % project.project_type)
def generate_v2_resource_dict(resources):
resource_map = {'media': []}
for resource in resources:
for resource_id in resource.get_identifiers():
d = {
'type': resource.kind,
'file': resource.root_path,
'name': resource_id.resource_id,
}
if resource_id.character_regex:
d['characterRegex'] = resource_id.character_regex
if resource_id.tracking:
d['trackingAdjust'] = resource_id.tracking
if resource.is_menu_icon:
d['menuIcon'] = True
if resource_id.compatibility is not None:
d['compatibility'] = resource_id.compatibility
resource_map['media'].append(d)
return resource_map
def generate_simplyjs_resource_dict():
return {
"media": [
{
"menuIcon": True,
"type": "png",
"name": "IMAGE_MENU_ICON",
"file": "images/menu_icon.png"
}, {
"type": "png",
"name": "IMAGE_LOGO_SPLASH",
"file": "images/logo_splash.png"
}, {
"type": "font",
"name": "MONO_FONT_14",
"file": "fonts/UbuntuMono-Regular.ttf"
}
]
}
def generate_pebblejs_resource_dict(resources):
media = [
{
"menuIcon": True, # This must be the first entry; we adjust it later.
"type": "png",
"name": "IMAGE_MENU_ICON",
"file": "images/menu_icon.png"
}, {
"type": "png",
"name": "IMAGE_LOGO_SPLASH",
"file": "images/logo_splash.png"
}, {
"type": "png",
"name": "IMAGE_TILE_SPLASH",
"file": "images/tile_splash.png"
}, {
"type": "font",
"name": "MONO_FONT_14",
"file": "fonts/UbuntuMono-Regular.ttf"
}
]
for resource in resources:
if resource.kind != 'png':
continue
d = {
'type': resource.kind,
'file': resource.root_path,
'name': re.sub(r'[^A-Z0-9_]', '_', resource.root_path.upper()),
}
if resource.is_menu_icon:
d['menuIcon'] = True
del media[0]['menuIcon']
media.append(d)
return {
'media': media
}
def generate_simplyjs_manifest(project):
return dict_to_pretty_json(generate_simplyjs_manifest_dict(project))
def generate_simplyjs_manifest_dict(project):
manifest = {
"uuid": project.app_uuid,
"shortName": project.app_short_name,
"longName": project.app_long_name,
"companyName": project.app_company_name,
"versionLabel": project.app_version_label,
"versionCode": 1,
"capabilities": project.app_capabilities.split(','),
"watchapp": {
"watchface": project.app_is_watchface
},
"appKeys": {},
"resources": generate_simplyjs_resource_dict(),
"projectType": "simplyjs"
}
return manifest
def generate_pebblejs_manifest(project, resources):
return dict_to_pretty_json(generate_pebblejs_manifest_dict(project, resources))
def generate_pebblejs_manifest_dict(project, resources):
manifest = {
"uuid": project.app_uuid,
"shortName": project.app_short_name,
"longName": project.app_long_name,
"companyName": project.app_company_name,
"versionLabel": project.app_version_label,
"capabilities": project.app_capabilities.split(','),
"versionCode": 1,
"watchapp": {
"watchface": project.app_is_watchface
},
"appKeys": {},
"resources": generate_pebblejs_resource_dict(resources),
"projectType": "pebblejs",
"sdkVersion": "3",
}
return manifest
|
|
#
# [The "BSD license"]
# Copyright (c) 2012 Terence Parr
# Copyright (c) 2012 Sam Harwell
# Copyright (c) 2014 Eric Vergnaud
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# An ATN transition between any two ATN states. Subclasses define
# atom, set, epsilon, action, predicate, rule transitions.
#
# <p>This is a one way link. It emanates from a state (usually via a list of
# transitions) and has a target state.</p>
#
# <p>Since we never have to change the ATN transitions once we construct it,
# we can fix these transitions as specific classes. The DFA transitions
# on the other hand need to update the labels as it adds transitions to
# the states. We'll use the term Edge for the DFA to distinguish them from
# ATN transitions.</p>
#
from __builtin__ import unicode
from antlr4.IntervalSet import IntervalSet, Interval
from antlr4.Token import Token
from antlr4.atn.SemanticContext import Predicate, PrecedencePredicate
class Transition (object):
# constants for serialization
EPSILON = 1
RANGE = 2
RULE = 3
PREDICATE = 4 # e.g., {isType(input.LT(1))}?
ATOM = 5
ACTION = 6
SET = 7 # ~(A|B) or ~atom, wildcard, which convert to next 2
NOT_SET = 8
WILDCARD = 9
PRECEDENCE = 10
serializationNames = [
u"INVALID",
u"EPSILON",
u"RANGE",
u"RULE",
u"PREDICATE",
u"ATOM",
u"ACTION",
u"SET",
u"NOT_SET",
u"WILDCARD",
u"PRECEDENCE"
]
serializationTypes = dict()
def __init__(self, target):
# The target of this transition.
if target is None:
raise Exception("target cannot be null.")
self.target = target
# Are we epsilon, action, sempred?
self.isEpsilon = False
self.label = None
def __str__(self):
return unicode(self)
# TODO: make all transitions sets? no, should remove set edges
class AtomTransition(Transition):
def __init__(self, target, label):
super(AtomTransition, self).__init__(target)
self.label_ = label # The token type or character value; or, signifies special label.
self.label = self.makeLabel()
self.serializationType = self.ATOM
def makeLabel(self):
s = IntervalSet()
s.addOne(self.label_)
return s
def matches( self, symbol, minVocabSymbol, maxVocabSymbol):
return self.label_ == symbol
def __unicode__(self):
return unicode(self.label_)
class RuleTransition(Transition):
def __init__(self, ruleStart, ruleIndex, precedence, followState):
super(RuleTransition, self).__init__(ruleStart)
self.ruleIndex = ruleIndex # ptr to the rule definition object for this rule ref
self.precedence = precedence
self.followState = followState # what node to begin computations following ref to rule
self.serializationType = self.RULE
self.isEpsilon = True
def matches( self, symbol, minVocabSymbol, maxVocabSymbol):
return False
class EpsilonTransition(Transition):
def __init__(self, target, outermostPrecedenceReturn=-1):
super(EpsilonTransition, self).__init__(target)
self.serializationType = self.EPSILON
self.isEpsilon = True
self.outermostPrecedenceReturn = outermostPrecedenceReturn
def matches( self, symbol, minVocabSymbol, maxVocabSymbol):
return False
def __unicode__(self):
return "epsilon"
class RangeTransition(Transition):
def __init__(self, target, start, stop):
super(RangeTransition, self).__init__(target)
self.serializationType = self.RANGE
self.start = start
self.stop = stop
self.label = self.makeLabel()
def makeLabel(self):
s = IntervalSet()
s.addRange(Interval(self.start, self.stop + 1))
return s
def matches( self, symbol, minVocabSymbol, maxVocabSymbol):
return symbol >= self.start and symbol <= self.stop
def __unicode__(self):
return "'" + chr(self.start) + "'..'" + chr(self.stop) + "'"
class AbstractPredicateTransition(Transition):
def __init__(self, target):
super(AbstractPredicateTransition, self).__init__(target)
class PredicateTransition(AbstractPredicateTransition):
def __init__(self, target, ruleIndex, predIndex, isCtxDependent):
super(PredicateTransition, self).__init__(target)
self.serializationType = self.PREDICATE
self.ruleIndex = ruleIndex
self.predIndex = predIndex
self.isCtxDependent = isCtxDependent # e.g., $i ref in pred
self.isEpsilon = True
def matches( self, symbol, minVocabSymbol, maxVocabSymbol):
return False
def getPredicate(self):
return Predicate(self.ruleIndex, self.predIndex, self.isCtxDependent)
def __unicode__(self):
return u"pred_" + unicode(self.ruleIndex) + u":" + unicode(self.predIndex)
class ActionTransition(Transition):
def __init__(self, target, ruleIndex, actionIndex=-1, isCtxDependent=False):
super(ActionTransition, self).__init__(target)
self.serializationType = self.ACTION
self.ruleIndex = ruleIndex
self.actionIndex = actionIndex
self.isCtxDependent = isCtxDependent # e.g., $i ref in pred
self.isEpsilon = True
def matches( self, symbol, minVocabSymbol, maxVocabSymbol):
return False
def __unicode__(self):
return u"action_" + unicode(self.ruleIndex) + u":" + unicode(self.actionIndex)
# A transition containing a set of values.
class SetTransition(Transition):
def __init__(self, target, set):
super(SetTransition, self).__init__(target)
self.serializationType = self.SET
if set is not None:
self.label = set
else:
self.label = IntervalSet()
self.label.addRange(Interval(Token.INVALID_TYPE, Token.INVALID_TYPE + 1))
def matches( self, symbol, minVocabSymbol, maxVocabSymbol):
return symbol in self.label
def __unicode__(self):
return unicode(self.label)
class NotSetTransition(SetTransition):
def __init__(self, target, set):
super(NotSetTransition, self).__init__(target, set)
self.serializationType = self.NOT_SET
def matches( self, symbol, minVocabSymbol, maxVocabSymbol):
return symbol >= minVocabSymbol \
and symbol <= maxVocabSymbol \
and not super(type(self), self).matches(symbol, minVocabSymbol, maxVocabSymbol)
def __unicode__(self):
return u'~' + super(type(self), self).__unicode__()
class WildcardTransition(Transition):
def __init__(self, target):
super(WildcardTransition, self).__init__(target)
self.serializationType = self.WILDCARD
def matches( self, symbol, minVocabSymbol, maxVocabSymbol):
return symbol >= minVocabSymbol and symbol <= maxVocabSymbol
def __unicode__(self):
return u"."
class PrecedencePredicateTransition(AbstractPredicateTransition):
def __init__(self, target, precedence):
super(PrecedencePredicateTransition, self).__init__(target)
self.serializationType = self.PRECEDENCE
self.precedence = precedence
self.isEpsilon = True
def matches( self, symbol, minVocabSymbol, maxVocabSymbol):
return False
def getPredicate(self):
return PrecedencePredicate(self.precedence)
def __unicode__(self):
return self.precedence + " >= _p"
Transition.serializationTypes = {
EpsilonTransition: Transition.EPSILON,
RangeTransition: Transition.RANGE,
RuleTransition: Transition.RULE,
PredicateTransition: Transition.PREDICATE,
AtomTransition: Transition.ATOM,
ActionTransition: Transition.ACTION,
SetTransition: Transition.SET,
NotSetTransition: Transition.NOT_SET,
WildcardTransition: Transition.WILDCARD,
PrecedencePredicateTransition: Transition.PRECEDENCE
}
|
|
# pylint: disable=I0011,W0613,W0201,W0212,E1101,E1103
from __future__ import absolute_import, division, print_function
import pytest
import numpy as np
from mock import MagicMock
from ..data import ComponentID, Component, Data
from ..subset import Subset
from .. import parse
class TestParse(object):
def test_re_matches_valid_names(self):
reg = parse.TAG_RE
valid = ['{a}', '{ a }', '{A}', '{a }', '{ a}',
'{a_}', '{abc_1}', '{_abc_1}', '{1}', '{1_}']
invalid = ['', '{}', '{a b}']
for v in valid:
assert reg.match(v) is not None
for i in invalid:
assert reg.match(i) is None
def test_group(self):
reg = parse.TAG_RE
assert reg.match('{a}').group('tag') == 'a'
assert reg.match('{ a }').group('tag') == 'a'
assert reg.match('{ A }').group('tag') == 'A'
assert reg.match('{ Abc_ }').group('tag') == 'Abc_'
def test_reference_list(self):
cmd = '{a} - {b} + {c}'
refs = {'a': 1, 'b': 2, 'c': 3, 'd': 4}
expected = set([1, 2, 3])
result = set(parse._reference_list(cmd, refs))
assert expected == result
def test_reference_list_invalid_cmd(self):
with pytest.raises(KeyError) as exc:
parse._reference_list('{a}', {})
assert exc.value.args[0] == ("Tags from command not in "
"reference mapping")
def test_dereference(self):
c1 = ComponentID('c1')
c2 = ComponentID('c2')
s1 = Subset(None, label='s1')
s2 = Subset(None, label='s2')
refs = dict([('c1', c1), ('c2', c2), ('s1', s1), ('s2', s2)])
cmd = '({c1} > 10) and {s1}'
expected = ('(data[references["c1"], __view] > 10) and '
'references["s1"].to_mask(__view)')
result = parse._dereference(cmd, refs)
assert expected == result
def test_validate(self):
ref = {'a': 1, 'b': 2}
parse._validate('{a} + {b}', ref)
parse._validate('{a}', ref)
parse._validate('3 + 4', ref)
with pytest.raises(TypeError) as exc:
parse._validate('{c}', ref)
assert exc.value.args[0] == ("Tag c not in reference mapping: "
"['a', 'b']")
def test_ensure_only_component_references(self):
ref = {'a': 1, 'b': ComponentID('b')}
F = parse._ensure_only_component_references
F('{b} + 5', ref)
with pytest.raises(TypeError) as exc:
F('{b} + {a}', ref)
assert exc.value.args[0] == ("Reference to a, which is not a "
"ComponentID")
with pytest.raises(TypeError) as exc:
F('{b} + {d}', ref)
assert exc.value.args[0] == ("Reference to d, which is not a "
"ComponentID")
class TestParsedCommand(object):
def test_evaluate_component(self):
data = MagicMock()
c1 = ComponentID('c1')
data.__getitem__.return_value = 5
cmd = '{comp1} * 5'
refs = {'comp1': c1}
pc = parse.ParsedCommand(cmd, refs)
assert pc.evaluate(data) == 25
data.__getitem__.assert_called_once_with((c1, None))
def test_evaluate_subset(self):
sub = MagicMock(spec_set=Subset)
sub2 = MagicMock(spec_set=Subset)
sub.to_mask.return_value = 3
sub2.to_mask.return_value = 4
cmd = '{s1} and {s2}'
refs = {'s1': sub, 's2': sub2}
pc = parse.ParsedCommand(cmd, refs)
assert pc.evaluate(None) == (3 and 4)
def test_evaluate_function(self):
data = MagicMock()
c1 = ComponentID('c1')
data.__getitem__.return_value = 5
cmd = 'max({comp1}, 100)'
refs = {'comp1': c1}
pc = parse.ParsedCommand(cmd, refs)
assert pc.evaluate(data) == 100
data.__getitem__.assert_called_once_with((c1, None))
class TestParsedComponentLink(object):
def make_link(self):
data = Data()
comp = Component(np.array([1, 2, 3]))
c1 = ComponentID('c1')
c2 = ComponentID('c2')
data.add_component(comp, c1)
cmd = '{comp1} * 100'
refs = {'comp1': c1}
pc = parse.ParsedCommand(cmd, refs)
cl = parse.ParsedComponentLink(c2, pc)
data.add_component_link(cl)
return data, c2
def test(self):
data, cid = self.make_link()
result = data[cid]
expected = np.array([100, 200, 300])
np.testing.assert_array_equal(result, expected)
def test_not_identity(self):
# regression test
d = Data(x=[1, 2, 3])
c2 = ComponentID('c2')
cmd = '{x}'
refs = {'x': d.id['x']}
pc = parse.ParsedCommand(cmd, refs)
link = parse.ParsedComponentLink(c2, pc)
assert not link.identity
def test_slice(self):
data, cid = self.make_link()
result = data[cid, ::2]
np.testing.assert_array_equal(result, [100, 300])
def test_save_load(self):
from .test_state import clone
d = Data(x=[1, 2, 3])
c2 = ComponentID('c2')
cmd = '{x} + 1'
refs = {'x': d.id['x']}
pc = parse.ParsedCommand(cmd, refs)
link = parse.ParsedComponentLink(c2, pc)
d.add_component_link(link)
d2 = clone(d)
np.testing.assert_array_equal(d2['c2'], [2, 3, 4])
class TestParsedSubsetState(object):
def setup_method(self, method):
data = Data(g=[2, 4, 6, 8])
s1 = data.new_subset()
s2 = data.new_subset()
s1.subset_state = np.array([1, 1, 1, 0], dtype=bool)
s2.subset_state = np.array([0, 1, 1, 1], dtype=bool)
self.refs = {'s1': s1, 's2': s2, 'g': data.id['g']}
self.data = data
def test_two_subset(self):
cmd = '{s1} & {s2}'
s = self.data.new_subset()
p = parse.ParsedCommand(cmd, self.refs)
state = parse.ParsedSubsetState(p)
s.subset_state = state
result = s.to_mask()
expected = np.array([0, 1, 1, 0], dtype=bool)
np.testing.assert_array_equal(result, expected)
def test_two_subset_and_component(self):
cmd = '{s1} & {s2} & ({g} < 6)'
s = self.data.new_subset()
p = parse.ParsedCommand(cmd, self.refs)
state = parse.ParsedSubsetState(p)
s.subset_state = state
result = s.to_mask()
expected = np.array([0, 1, 0, 0], dtype=bool)
np.testing.assert_array_equal(result, expected)
|
|
from __future__ import absolute_import, unicode_literals
import logging
from mopidy import models
from mopidy.audio import PlaybackState
from mopidy.compat import urllib
from mopidy.internal import deprecation, validation
from mopidy.core import triggers as triggers
from mopidy.core import listener
logger = logging.getLogger(__name__)
class PlaybackController(object):
pykka_traversable = True
def __init__(self, audio, backends, core):
# TODO: these should be internal
self.backends = backends
self.core = core
self._audio = audio
self._stream_title = None
self._state = PlaybackState.STOPPED
self._current_tl_track = None
self._pending_tl_track = None
self._last_position = None
self._previous = False
if self._audio:
self._audio.set_about_to_finish_callback(
self._on_about_to_finish_callback)
def _get_backend(self, tl_track):
if tl_track is None:
return None
uri_scheme = urllib.parse.urlparse(tl_track.track.uri).scheme
return self.backends.with_playback.get(uri_scheme, None)
# Properties
def get_current_tl_track(self):
"""Get the currently playing or selected track.
Returns a :class:`mopidy.models.TlTrack` or :class:`None`.
"""
return self._current_tl_track
def _set_current_tl_track(self, value):
"""Set the currently playing or selected track.
*Internal:* This is only for use by Mopidy's test suite.
"""
self._current_tl_track = value
current_tl_track = deprecation.deprecated_property(get_current_tl_track)
"""
.. deprecated:: 1.0
Use :meth:`get_current_tl_track` instead.
"""
def get_current_track(self):
"""
Get the currently playing or selected track.
Extracted from :meth:`get_current_tl_track` for convenience.
Returns a :class:`mopidy.models.Track` or :class:`None`.
"""
return getattr(self.get_current_tl_track(), 'track', None)
current_track = deprecation.deprecated_property(get_current_track)
"""
.. deprecated:: 1.0
Use :meth:`get_current_track` instead.
"""
def get_current_tlid(self):
"""
Get the currently playing or selected TLID.
Extracted from :meth:`get_current_tl_track` for convenience.
Returns a :class:`int` or :class:`None`.
.. versionadded:: 1.1
"""
return getattr(self.get_current_tl_track(), 'tlid', None)
def get_stream_title(self):
"""Get the current stream title or :class:`None`."""
return self._stream_title
def get_state(self):
"""Get The playback state."""
return self._state
def set_state(self, new_state):
"""Set the playback state.
Must be :attr:`PLAYING`, :attr:`PAUSED`, or :attr:`STOPPED`.
Possible states and transitions:
.. digraph:: state_transitions
"STOPPED" -> "PLAYING" [ label="play" ]
"STOPPED" -> "PAUSED" [ label="pause" ]
"PLAYING" -> "STOPPED" [ label="stop" ]
"PLAYING" -> "PAUSED" [ label="pause" ]
"PLAYING" -> "PLAYING" [ label="play" ]
"PAUSED" -> "PLAYING" [ label="resume" ]
"PAUSED" -> "STOPPED" [ label="stop" ]
"""
validation.check_choice(new_state, validation.PLAYBACK_STATES)
(old_state, self._state) = (self.get_state(), new_state)
logger.debug('Changing state: %s -> %s', old_state, new_state)
triggers.trigger_playback_state_changed(old_state, new_state)
# self._trigger_playback_state_changed(old_state, new_state)
state = deprecation.deprecated_property(get_state, set_state)
"""
.. deprecated:: 1.0
Use :meth:`get_state` and :meth:`set_state` instead.
"""
def get_time_position(self):
"""Get time position in milliseconds."""
backend = self._get_backend(self.get_current_tl_track())
if backend:
return backend.playback.get_time_position().get()
else:
return 0
time_position = deprecation.deprecated_property(get_time_position)
"""
.. deprecated:: 1.0
Use :meth:`get_time_position` instead.
"""
def get_volume(self):
"""
.. deprecated:: 1.0
Use :meth:`core.mixer.get_volume()
<mopidy.core.MixerController.get_volume>` instead.
"""
deprecation.warn('core.playback.get_volume')
return self.core.mixer.get_volume()
def set_volume(self, volume):
"""
.. deprecated:: 1.0
Use :meth:`core.mixer.set_volume()
<mopidy.core.MixerController.set_volume>` instead.
"""
deprecation.warn('core.playback.set_volume')
return self.core.mixer.set_volume(volume)
volume = deprecation.deprecated_property(get_volume, set_volume)
"""
.. deprecated:: 1.0
Use :meth:`core.mixer.get_volume()
<mopidy.core.MixerController.get_volume>` and
:meth:`core.mixer.set_volume()
<mopidy.core.MixerController.set_volume>` instead.
"""
def get_mute(self):
"""
.. deprecated:: 1.0
Use :meth:`core.mixer.get_mute()
<mopidy.core.MixerController.get_mute>` instead.
"""
deprecation.warn('core.playback.get_mute')
return self.core.mixer.get_mute()
def set_mute(self, mute):
"""
.. deprecated:: 1.0
Use :meth:`core.mixer.set_mute()
<mopidy.core.MixerController.set_mute>` instead.
"""
deprecation.warn('core.playback.set_mute')
return self.core.mixer.set_mute(mute)
mute = deprecation.deprecated_property(get_mute, set_mute)
"""
.. deprecated:: 1.0
Use :meth:`core.mixer.get_mute()
<mopidy.core.MixerController.get_mute>` and
:meth:`core.mixer.set_mute()
<mopidy.core.MixerController.set_mute>` instead.
"""
# Methods
def _on_end_of_stream(self):
self.set_state(PlaybackState.STOPPED)
if self._current_tl_track:
triggers.trigger_track_playback_ended(self.core, self.get_time_position(), self.get_current_tl_track(),
self._previous)
self._previous = False
self._set_current_tl_track(None)
def _on_stream_changed(self, uri):
if self._last_position is None:
position = self.get_time_position()
else:
# This code path handles the stop() case, uri should be none.
position, self._last_position = self._last_position, None
triggers.trigger_track_playback_ended(self.core, self.get_time_position(), self.get_current_tl_track(), self._previous)
self._previous = False
self._stream_title = None
if self._pending_tl_track:
self._set_current_tl_track(self._pending_tl_track)
self._pending_tl_track = None
self.set_state(PlaybackState.PLAYING)
triggers.trigger_track_playback_started(self.core, self.get_current_tl_track())
def _on_about_to_finish_callback(self):
"""Callback that performs a blocking actor call to the real callback.
This is passed to audio, which is allowed to call this code from the
audio thread. We pass execution into the core actor to ensure that
there is no unsafe access of state in core. This must block until
we get a response.
"""
self.core.actor_ref.ask({
'command': 'pykka_call', 'args': tuple(), 'kwargs': {},
'attr_path': ('playback', '_on_about_to_finish'),
})
def _on_about_to_finish(self):
if self._state == PlaybackState.STOPPED:
return
# TODO: check that we always have a current track
original_tl_track = self.get_current_tl_track()
next_tl_track = self.core.tracklist.eot_track(original_tl_track)
# TODO: only set pending if we have a backend that can play it?
# TODO: skip tracks that don't have a backend?
self._pending_tl_track = next_tl_track
backend = self._get_backend(next_tl_track)
if backend:
backend.playback.change_track(next_tl_track.track).get()
def _on_tracklist_change(self):
"""
Tell the playback controller that the current playlist has changed.
Used by :class:`mopidy.core.TracklistController`.
"""
if not self.core.tracklist.tl_tracks:
self.stop()
self._set_current_tl_track(None)
elif self.get_current_tl_track() not in self.core.tracklist.tl_tracks:
self._set_current_tl_track(None)
def next(self):
"""
Change to the next track.
The current playback state will be kept. If it was playing, playing
will continue. If it was paused, it will still be paused, etc.
"""
state = self.get_state()
current = self._pending_tl_track or self._current_tl_track
while current:
pending = self.core.tracklist.next_track(current)
if self._change(pending, state):
break
else:
self.core.tracklist._mark_unplayable(pending)
# TODO: this could be needed to prevent a loop in rare cases
# if current == pending:
# break
current = pending
# TODO return result?
def pause(self):
"""Pause playback."""
backend = self._get_backend(self.get_current_tl_track())
if not backend or backend.playback.pause().get():
# TODO: switch to:
# backend.track(pause)
# wait for state change?
self.set_state(PlaybackState.PAUSED)
triggers.trigger_track_playback_paused(self.get_time_position(), self.get_current_tl_track(),
self.get_current_track())
def play(self, tl_track=None, tlid=None):
"""
Play the given track, or if the given tl_track and tlid is
:class:`None`, play the currently active track.
Note that the track **must** already be in the tracklist.
:param tl_track: track to play
:type tl_track: :class:`mopidy.models.TlTrack` or :class:`None`
:param tlid: TLID of the track to play
:type tlid: :class:`int` or :class:`None`
"""
if sum(o is not None for o in [tl_track, tlid]) > 1:
raise ValueError('At most one of "tl_track" and "tlid" may be set')
tl_track is None or validation.check_instance(tl_track, models.TlTrack)
tlid is None or validation.check_integer(tlid, min=1)
if tl_track:
deprecation.warn('core.playback.play:tl_track_kwarg', pending=True)
if tl_track is None and tlid is not None:
for tl_track in self.core.tracklist.get_tl_tracks():
if tl_track.tlid == tlid:
break
else:
tl_track = None
if tl_track is not None:
# TODO: allow from outside tracklist, would make sense given refs?
assert tl_track in self.core.tracklist.get_tl_tracks()
elif tl_track is None and self.get_state() == PlaybackState.PAUSED:
self.resume()
return
current = self._pending_tl_track or self._current_tl_track
pending = tl_track or current or self.core.tracklist.next_track(None)
while pending:
# TODO: should we consume unplayable tracks in this loop?
if self._change(pending, PlaybackState.PLAYING):
break
else:
self.core.tracklist._mark_unplayable(pending)
current = pending
pending = self.core.tracklist.next_track(current)
# TODO return result?
def _change(self, pending_tl_track, state):
self._pending_tl_track = pending_tl_track
if not pending_tl_track:
self.stop()
self._on_end_of_stream() # pretend an EOS happened for cleanup
return True
backend = self._get_backend(pending_tl_track)
if not backend:
return False
backend.playback.prepare_change()
if not backend.playback.change_track(pending_tl_track.track).get():
return False # TODO: test for this path
if state == PlaybackState.PLAYING:
try:
return backend.playback.play().get()
except TypeError:
# TODO: check by binding against underlying play method using
# inspect and otherwise re-raise?
logger.error('%s needs to be updated to work with this '
'version of Mopidy.', backend)
return False
elif state == PlaybackState.PAUSED:
return backend.playback.pause().get()
elif state == PlaybackState.STOPPED:
# TODO: emit some event now?
self._current_tl_track = self._pending_tl_track
self._pending_tl_track = None
return True
raise Exception('Unknown state: %s' % state)
def previous(self):
"""
Change to the previous track.
The current playback state will be kept. If it was playing, playing
will continue. If it was paused, it will still be paused, etc.
"""
self._previous = True
state = self.get_state()
current = self._pending_tl_track or self._current_tl_track
while current:
pending = self.core.tracklist.previous_track(current)
if self._change(pending, state):
break
else:
self.core.tracklist._mark_unplayable(pending)
# TODO: this could be needed to prevent a loop in rare cases
# if current == pending:
# break
current = pending
# TODO: no return value?
def resume(self):
"""If paused, resume playing the current track."""
if self.get_state() != PlaybackState.PAUSED:
return
backend = self._get_backend(self.get_current_tl_track())
if backend and backend.playback.resume().get():
self.set_state(PlaybackState.PLAYING)
# TODO: trigger via gst messages
triggers.trigger_track_playback_resumed(self.get_time_position(), self.get_current_tl_track(),
self.get_current_track())
# TODO: switch to:
# backend.resume()
# wait for state change?
def seek(self, time_position):
"""
Seeks to time position given in milliseconds.
:param time_position: time position in milliseconds
:type time_position: int
:rtype: :class:`True` if successful, else :class:`False`
"""
# TODO: seek needs to take pending tracks into account :(
validation.check_integer(time_position)
if time_position < 0:
logger.debug(
'Client seeked to negative position. Seeking to zero.')
time_position = 0
if not self.core.tracklist.tracks:
return False
if self.get_state() == PlaybackState.STOPPED:
self.play()
# TODO: uncomment once we have tests for this. Should fix seek after
# about to finish doing wrong track.
# if self._current_tl_track and self._pending_tl_track:
# self.play(self._current_tl_track)
# We need to prefer the still playing track, but if nothing is playing
# we fall back to the pending one.
tl_track = self._current_tl_track or self._pending_tl_track
if tl_track and tl_track.track.length is None:
return False
if time_position < 0:
time_position = 0
elif time_position > tl_track.track.length:
# TODO: gstreamer will trigger a about to finish for us, use that?
self.next()
return True
backend = self._get_backend(self.get_current_tl_track())
if not backend:
return False
success = backend.playback.seek(time_position).get()
if success:
triggers.trigger_seeked(time_position)
return success
def stop(self):
"""Stop playing."""
if self.get_state() != PlaybackState.STOPPED:
self._last_position = self.get_time_position()
backend = self._get_backend(self.get_current_tl_track())
if not backend or backend.playback.stop().get():
self.set_state(PlaybackState.STOPPED)
|
|
from flask import render_template, current_app, request, \
flash, redirect, url_for, jsonify
from flask_security import roles_accepted, current_user, \
utils as security_utils
import uuid
from . import admin
from .forms import UserForm
from .forms import data_required, match_pass
from pgscm import sqla, user_datastore
from pgscm.db import models
from pgscm import const as c
from pgscm.utils import __, DeleteForm, check_role, is_region_role
crud_role = c.ONLY_ADMIN_ROLE
@admin.route('/vi/quan-tri', endpoint='index_vi')
@admin.route('/en/admin', endpoint='index_en')
@roles_accepted(*c.ONLY_ADMIN_ROLE)
def index():
return render_template('admin/index.html')
@admin.route('/vi/quan-tri/nguoi-dung', endpoint='users_vi',
methods=['GET', 'POST'])
@admin.route('/en/admin/users', endpoint='users_en', methods=['GET', 'POST'])
@roles_accepted(*c.ONLY_ADMIN_ROLE)
def users():
form = UserForm()
dform = DeleteForm()
form.roles.choices = [(r.name, r.description) for r in
models.Role.query.order_by(
models.Role.name.asc()).all()]
if current_app.config['AJAX_CALL_ENABLED']:
form.associate_group_id.choices = []
associate_group_id = current_user.associate_group_id
if associate_group_id and is_region_role():
form.associate_group_id.choices = [
(ag.id, ag.name) for ag in
models.AssociateGroup.query.filter_by(
id=associate_group_id).all()]
return render_template('admin/user.html', form=form, dform=dform)
else:
province_id = current_user.province_id
if province_id and is_region_role():
us = models.User.query.filter_by(
province_id=province_id).all()
form.province_id.choices = [
(p.province_id, p.type + " " + p.name) for p in
models.Province.query.filter_by(province_id=province_id).all()]
else:
us = models.User.query.all()
form.province_id.choices = []
# form create or edit submit
if request.method == 'POST' and form.data['submit']:
if not check_role(crud_role):
return redirect(url_for(request.endpoint))
form.province_id.choices = [(form.province_id.data,
form.province_id.label.text)]
# edit user
if form.id.data:
# remove required validator in fields pass and confirm
# when form is edit form
setattr(form.password, 'validators', [match_pass])
setattr(form.confirm, 'validators', [])
if form.validate_on_submit():
edit_user = user_datastore.find_user(id=form.id.data)
if form.old_password.data:
if not security_utils.verify_and_update_password(
form.old_password.data, edit_user):
flash(str(__('Old password is wrong!')), 'error')
# TODO: fix return to keep current state of form
return redirect(url_for(request.endpoint))
else:
edit_user.password = security_utils.hash_password(
form.password.data)
temp_user = sqla.session.query(models.User) \
.filter_by(email=form.email.data).all()
if not check_user_email(temp_user, edit_user.email):
form.email.errors.append(
__('The email was existed!'))
flash(str(__('The email was existed!')), 'error')
else:
edit_user.email = form.email.data
edit_user.fullname = form.fullname.data
if form.province_id.data != edit_user.province_id:
edit_user.province = models.Province.query \
.filter_by(province_id=form.province_id.data) \
.one()
for new_role in form.roles.data:
role_is_added = False
for r in edit_user.roles:
if new_role == r.name:
role_is_added = True
break
if not role_is_added:
user_datastore.add_role_to_user(
edit_user.email, new_role)
temp_roles = list(edit_user.roles)
for old_role in temp_roles:
if old_role.name not in form.roles.data:
user_datastore.remove_role_from_user(
edit_user.email, old_role.name)
user_datastore.put(edit_user)
for user in us:
if user.id == edit_user.id:
us.remove(user)
us.append(edit_user)
flash(str(__('Update user success!')), 'success')
return redirect(url_for(request.endpoint))
else:
flash(str(__('The form is not validated!')), 'error')
# add user
else:
setattr(form.password, 'validators', [data_required,
match_pass])
setattr(form.confirm, 'validators', [data_required])
form.id.data = str(uuid.uuid4())
if form.validate_on_submit():
if not user_datastore.find_user(email=form.email.data):
province = models.Province.query.filter_by(
province_id=form.province_id.data).one()
user_datastore.create_user(id=form.id.data,
email=form.email.data, fullname=form.fullname.data,
province=province, password=security_utils
.hash_password(form.password.data))
sqla.session.commit()
for role in form.roles.data:
user_datastore.add_role_to_user(
form.email.data, role)
sqla.session.commit()
flash(str(__('Add user success!')), 'success')
return redirect(url_for(request.endpoint))
else:
form.email.errors.append(
__('The email was existed!'))
flash(str(__('The email was existed!')), 'error')
else:
flash(str(__('The form is not validated!')), 'error')
# form delete submit
if request.method == 'POST' and dform.data['submit_del']:
if not check_role(crud_role):
return redirect(url_for(request.endpoint))
elif dform.validate_on_submit():
del_user = user_datastore.find_user(id=dform.id.data)
user_datastore.delete_user(del_user)
sqla.session.commit()
flash(str(__('Delete user success!')), 'success')
return redirect(url_for(request.endpoint))
return render_template('admin/user.html', us=us,
form=form, dform=dform)
# user_list_result: users list from result of query to user with email
# in form
def check_user_email(user_list_result, edit_user_email):
# email was not register
if not len(user_list_result):
return True
# email is edit user's email
elif len(user_list_result) == 1 and \
user_list_result[0].email == edit_user_email:
return True
# email was registered
else:
return False
@admin.route('/vi/them-nguoi-dung', endpoint='add_user_vi', methods=['POST'])
@admin.route('/en/add-user', endpoint='add_user_en', methods=['POST'])
@roles_accepted(*c.ONLY_ADMIN_ROLE)
def add_user():
form = UserForm()
form.roles.choices = [(r.name, r.description) for r in
models.Role.query.order_by(models.Role.name.asc()).all()]
form.associate_group_id.choices = [(form.associate_group_id.data,
form.associate_group_id.label.text)]
setattr(form.password, 'validators', [data_required, match_pass])
setattr(form.confirm, 'validators', [data_required])
form.id.data = str(uuid.uuid4())
if form.validate_on_submit():
if not user_datastore.find_user(email=form.email.data):
associate_group = models.AssociateGroup.query.filter_by(
id=form.associate_group_id.data).one()
user_datastore.create_user(
id=form.id.data, email=form.email.data,
fullname=form.fullname.data, associate_group=associate_group,
password=security_utils.hash_password(form.password.data))
sqla.session.commit()
for role in form.roles.data:
user_datastore.add_role_to_user(
form.email.data, role)
sqla.session.commit()
return jsonify(is_success=True,
message=str(__('Add user success!')))
else:
form.email.errors.append(
__('The email was existed!'))
return jsonify(is_success=False,
message=str(__('The email was existed!')))
else:
error_message = ""
if form.errors.keys():
for k in form.errors.keys():
for mes in form.errors[k]:
error_message += mes
else:
error_message = str(__('The form is not validate!'))
return jsonify(is_success=False,
message=error_message)
@admin.route('/vi/sua-nguoi-dung', endpoint='edit_user_vi', methods=['PUT'])
@admin.route('/en/edit-user', endpoint='edit_user_en', methods=['PUT'])
@roles_accepted(*c.ONLY_ADMIN_ROLE)
def edit_user():
form = UserForm()
form.roles.choices = [(r.name, r.description) for r in
models.Role.query.order_by(
models.Role.name.asc()).all()]
form.associate_group_id.choices = [(form.associate_group_id.data,
form.associate_group_id.label.text)]
setattr(form.password, 'validators', [match_pass])
setattr(form.confirm, 'validators', [])
if form.validate_on_submit():
edit_user = user_datastore.find_user(id=form.id.data)
if form.old_password.data:
if not security_utils.verify_and_update_password(
form.old_password.data, edit_user):
return jsonify(is_success=False,
message=str(__('Old password is wrong!')))
else:
edit_user.password = security_utils.hash_password(
form.password.data)
temp_user = sqla.session.query(models.User) \
.filter_by(email=form.email.data).all()
if not check_user_email(temp_user, edit_user.email):
form.email.errors.append(__('The email was existed!'))
return jsonify(is_success=False,
message=str(__('The email was existed!')))
else:
edit_user.email = form.email.data
edit_user.fullname = form.fullname.data
if form.associate_group_id.data != edit_user.associate_group_id:
edit_user.associate_group = models.AssociateGroup.query \
.filter_by(id=form.associate_group_id.data) \
.one()
for new_role in form.roles.data:
role_is_added = False
for r in edit_user.roles:
if new_role == r.name:
role_is_added = True
break
if not role_is_added:
user_datastore.add_role_to_user(
edit_user.email, new_role)
temp_roles = list(edit_user.roles)
for old_role in temp_roles:
if old_role.name not in form.roles.data:
user_datastore.remove_role_from_user(
edit_user.email, old_role.name)
user_datastore.put(edit_user)
return jsonify(is_success=True,
message=str(__('Update user success!')))
else:
return jsonify(is_success=False,
message=str(__('The form is not validate!')))
@admin.route('/vi/xoa-nguoi-dung', endpoint='delete_user_vi',
methods=['DELETE'])
@admin.route('/en/delete-user', endpoint='delete_user_en', methods=['DELETE'])
@roles_accepted(*c.ONLY_ADMIN_ROLE)
def delete_user():
dform = DeleteForm()
if dform.validate_on_submit():
del_user = user_datastore.find_user(id=dform.id.data)
user_datastore.delete_user(del_user)
sqla.session.commit()
return jsonify(is_success=True,
message=str(__('Delete user success!')))
else:
return jsonify(is_success=False,
message=str(__('The form is not validate!')))
|
|
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# coding=utf-8
from __future__ import absolute_import
import octoprint.plugin
from octoprint.util import RepeatedTimer
from easyprocess import EasyProcess
class TemperatureFailsafe(octoprint.plugin.AssetPlugin,
octoprint.plugin.SettingsPlugin,
octoprint.plugin.ShutdownPlugin,
octoprint.plugin.StartupPlugin,
octoprint.plugin.TemplatePlugin):
def __init__(self):
self._checkTempTimer = None
def _restartTimer(self):
# stop the timer
if self._checkTempTimer:
self._logger.debug(u"Stopping Timer...")
self._checkTempTimer.cancel()
self._checkTempTimer = None
# start a new timer
interval = self._settings.get_int(['interval'])
if self._settings.get_boolean(['enabled']) and interval:
self._logger.debug(u"Starting Timer...")
self._checkTempTimer = RepeatedTimer(interval, self.CheckTemps, None, None, True)
self._checkTempTimer.start()
def _executeFailsafe(self, env):
# cancel the print and shut down all the heaters
# TODO: Wrap in a try/except block to make sure the shell command still runs.
if self._printer.is_operational():
if self._printer.is_printing() and self._settings.get_int(['cancel_print']):
self._logger.error(u"Canceling print")
# TODO: verify this is async
self._printer.cancel_print()
if self._settings.get_int(['disable_heaters']):
self._logger.error(u"Disabling heaters")
for k in self._printer.get_current_temperatures().keys():
# TODO: verify this is async
self._printer.set_temperature(k, 0)
# execute the shell command
cmd = self._settings.get(['command'])
timeout = self._settings.get_int(['read_timeout'])
# 0 implies no timeout
if timeout == 0:
timeout = None
if cmd:
self._logger.info(u"Executing Shell Command: %r" % (cmd,))
p = EasyProcess(cmd, env=env).call(timeout=timeout)
# TODO: perform a octoprint popup with the shell command response
self._logger.info(u"Exit Code from Shell Command: %r" % (p.return_code,))
self._logger.info(u"Response from Shell Command: %r" % (p.stdout,))
def CheckTemps(self):
temps = self._printer.get_current_temperatures()
self._logger.debug(u"CheckTemps(): %r" % (temps,))
if not temps:
self._logger.debug(u"No Temperature Data")
return
for k in temps.keys():
# example dictionary from octoprint
# {
# 'bed': {'actual': 0.9, 'target': 0.0, 'offset': 0},
# 'tool0': {'actual': 0.0, 'target': 0.0, 'offset': 0},
# 'tool1': {'actual': 0.0, 'target': 0.0, 'offset': 0}
# }
if k == 'bed':
threshold_high = self._settings.get_int(['bed'])
threshold_low = self._settings.get_int(['bed_low'])
elif k.startswith('tool'):
threshold_high = self._settings.get_int(['hotend'])
threshold_low = self._settings.get_int(['hotend_low'])
else:
continue
violation = False
errmsg = u"TemperatureFailSafe violation, heater: {heater}: {temp}C {exp} {threshold}C"
if threshold_high and temps[k]['actual'] > threshold_high:
errmsg = errmsg.format(heater=k, temp=temps[k]['actual'], exp=">", threshold=threshold_high)
violation = True
# only check the low thresholds if we are currently printing, or else ignore it
if self._printer.is_printing() and threshold_low and temps[k]['actual'] < threshold_low:
errmsg = errmsg.format(heater=k, temp=temps[k]['actual'], exp="<", threshold=threshold_low)
violation = True
if violation:
# alert the user
self._logger.error(errmsg)
self._plugin_manager.send_plugin_message(__plugin_name__, dict(type="popup", msg=errmsg))
env = {}
env["TEMPERATURE_FAILSAFE_FAULT_TOOL"] = str(k)
env["TEMPERATURE_FAILSAFE_FAULT_HIGH_THRESHOLD"] = str(threshold_high)
env["TEMPERATURE_FAILSAFE_FAULT_LOW_THRESHOLD"] = str(threshold_low)
# place the temperatures into an environment dictionary to pass to the remote program
for t in temps.keys():
env["TEMPERATURE_FAILSAFE_%s_ACTUAL" % t.upper()] = str(temps[t]['actual'])
env["TEMPERATURE_FAILSAFE_%s_TARGET" % t.upper()] = str(temps[t]['target'])
self._executeFailsafe(env)
##-- StartupPlugin hooks
def on_after_startup(self):
self._logger.info(u"Starting up...")
self._restartTimer()
##-- ShutdownPlugin hooks
def on_shutdown(self):
self._logger.info(u"Shutting down...")
# RepeatedTimer is a daemon thread, and won't block process exit?
##-- AssetPlugin hooks
def get_assets(self):
return dict(js=["js/Temperaturefailsafe.js"])
##~~ SettingsPlugin mixin
def get_settings_version(self):
return 1
def get_template_configs(self):
return [
dict(type="settings", name="Temperature Failsafe", custom_bindings=False)
]
def get_settings_defaults(self):
return dict(
enabled=False,
interval=5,
read_timeout=5,
bed=0,
bed_low=0,
hotend=0,
hotend_low=0,
command=None,
cancel_print=True,
disable_heaters=True
)
def on_settings_initialized(self):
self._logger.debug(u"TemperatureFailsafe on_settings_initialized()")
self._restartTimer()
def on_settings_save(self, data):
# make sure we don't get negative values
for k in ('bed', 'bed_low', 'hotend', 'hotend_low', 'read_timeout', 'interval'):
if data.get(k): data[k] = max(0, int(data[k]))
self._logger.debug(u"TemperatureFailsafe on_settings_save(%r)" % (data,))
octoprint.plugin.SettingsPlugin.on_settings_save(self, data)
self._restartTimer()
##~~ Softwareupdate hook
def get_update_information(self):
return dict(
emergencyaction=dict(
displayName="Temperature Failsafe Plugin",
displayVersion=self._plugin_version,
# version check: github repository
type="github_release",
user="google",
repo="OctoPrint-TemperatureFailsafe",
current=self._plugin_version,
# update method: pip
pip="https://github.com/google/OctoPrint-TemperatureFailsafe/archive/{target_version}.zip"
)
)
__plugin_name__ = "TemperatureFailsafe"
__plugin_pythoncompat__ = ">=2.7,<4"
def __plugin_load__():
global __plugin_implementation__
__plugin_implementation__ = TemperatureFailsafe()
global __plugin_hooks__
__plugin_hooks__ = {
"octoprint.plugin.softwareupdate.check_config": __plugin_implementation__.get_update_information,
}
|
|
#! /usr/bin/python
import os
import sys
import subprocess
import json
import argparse
import io
import time
from flask import Flask, json, Response, render_template, make_response, request
RestIP="127.0.0.1"
RestPort=8182
DBName="Cassandra-Netmap"
app = Flask(__name__)
## File Fetch ##
@app.route('/ui/img/<filename>', methods=['GET'])
@app.route('/img/<filename>', methods=['GET'])
@app.route('/css/<filename>', methods=['GET'])
@app.route('/js/models/<filename>', methods=['GET'])
@app.route('/js/views/<filename>', methods=['GET'])
@app.route('/js/<filename>', methods=['GET'])
@app.route('/lib/<filename>', methods=['GET'])
@app.route('/', methods=['GET'])
@app.route('/<filename>', methods=['GET'])
@app.route('/tpl/<filename>', methods=['GET'])
@app.route('/ons-demo/<filename>', methods=['GET'])
@app.route('/ons-demo/js/<filename>', methods=['GET'])
@app.route('/ons-demo/css/<filename>', methods=['GET'])
@app.route('/ons-demo/assets/<filename>', methods=['GET'])
@app.route('/ons-demo/data/<filename>', methods=['GET'])
def return_file(filename="index.html"):
if request.path == "/":
fullpath = "./index.html"
else:
fullpath = str(request.path)[1:]
response = make_response(open(fullpath).read())
suffix = fullpath.split(".")[-1]
if suffix == "html" or suffix == "htm":
response.headers["Content-type"] = "text/html"
elif suffix == "js":
response.headers["Content-type"] = "application/javascript"
elif suffix == "css":
response.headers["Content-type"] = "text/css"
elif suffix == "png":
response.headers["Content-type"] = "image/png"
return response
## PROXY API (allows development where the webui is served from someplace other than the controller)##
ONOS_GUI3_HOST="http://gui3.onlab.us:8080"
ONOS_LOCAL_HOST="http://localhost:8080" ;# for Amazon EC2
@app.route("/wm/onos/topology/switches")
def switches():
if request.args.get('proxy') == None:
host = ONOS_LOCAL_HOST
else:
host = ONOS_GUI3_HOST
try:
command = "curl -s %s/wm/onos/topology/switches" % (host)
print command
result = os.popen(command).read()
except:
print "REST IF has issue"
exit
resp = Response(result, status=200, mimetype='application/json')
return resp
@app.route("/wm/onos/topology/links")
def links():
if request.args.get('proxy') == None:
host = ONOS_LOCAL_HOST
else:
host = ONOS_GUI3_HOST
try:
command = "curl -s %s/wm/onos/topology/links" % (host)
print command
result = os.popen(command).read()
except:
print "REST IF has issue"
exit
resp = Response(result, status=200, mimetype='application/json')
return resp
@app.route("/wm/onos/flows/getall/json")
def flows():
if request.args.get('proxy') == None:
host = ONOS_LOCAL_HOST
else:
host = ONOS_GUI3_HOST
try:
command = "curl -s %s/wm/onos/flows/getall/json" % (host)
print command
result = os.popen(command).read()
except:
print "REST IF has issue"
exit
resp = Response(result, status=200, mimetype='application/json')
return resp
@app.route("/wm/onos/registry/controllers/json")
def registry_controllers():
if request.args.get('proxy') == None:
host = ONOS_LOCAL_HOST
else:
host = ONOS_GUI3_HOST
try:
command = "curl -s %s/wm/onos/registry/controllers/json" % (host)
print command
result = os.popen(command).read()
except:
print "REST IF has issue"
exit
resp = Response(result, status=200, mimetype='application/json')
return resp
@app.route("/wm/onos/registry/switches/json")
def registry_switches():
if request.args.get('proxy') == None:
host = ONOS_LOCAL_HOST
else:
host = ONOS_GUI3_HOST
try:
command = "curl -s %s/wm/onos/registry/switches/json" % (host)
print command
result = os.popen(command).read()
except:
print "REST IF has issue"
exit
resp = Response(result, status=200, mimetype='application/json')
return resp
## REST API ##
#@app.route("/wm/onos/linkdiscovery/links/json")
#def links():
# global links_
# js = json.dumps(links_)
# resp = Response(js, status=200, mimetype='application/json')
# return resp
#@app.route("/wm/floodlight/core/controller/switches/json")
#def switches():
# global switches_
# js = json.dumps(switches_)
# resp = Response(js, status=200, mimetype='application/json')
# return resp
@app.route("/wm/floodlight/device/")
def devices():
try:
command = "curl -s http://%s:%s/graphs/%s/vertices?key=type\&value=device" % (RestIP, RestPort, DBName)
result = os.popen(command).read()
parsedResult = json.loads(result)['results']
except:
log_error("REST IF has issue")
exit
devices_ = []
for v in parsedResult:
if v['type'] == "device":
dl_addr = v['dl_addr']
nw_addr = v['nw_addr']
vertex = v['_id']
mac = []
mac.append(dl_addr)
ip = []
ip.append(nw_addr)
device = {}
device['entryClass']="DefaultEntryClass"
device['mac']=mac
device['ipv4']=ip
device['vlan']=[]
device['lastSeen']=0
attachpoints =[]
try:
command = "curl -s http://%s:%s/graphs/%s/vertices/%d/in" % (RestIP, RestPort, DBName, vertex)
result = os.popen(command).read()
parsedResult = json.loads(result)['results']
except:
log_error("REST IF has issue")
exit
port = parsedResult[0]['number']
vertex = parsedResult[0]['_id']
dpid = portid_to_switch_dpid(vertex)
attachpoint = {}
attachpoint['port']=port
attachpoint['switchDPID']=dpid
attachpoints.append(attachpoint)
device['attachmentPoint']=attachpoints
devices_.append(device)
print devices_
js = json.dumps(devices_)
resp = Response(js, status=200, mimetype='application/json')
return resp
#{"entityClass":"DefaultEntityClass","mac":["7c:d1:c3:e0:8c:a3"],"ipv4":["192.168.2.102","10.1.10.35"],"vlan":[],"attachmentPoint":[{"port":13,"switchDPID":"00:01:00:12:e2:78:32:44","errorStatus":null}],"lastSeen":1357333593496}
## return fake stat for now
@app.route("/wm/floodlight/core/switch/<switchId>/<statType>/json")
def switch_stat(switchId, statType):
if statType == "desc":
desc=[{"length":1056,"serialNumber":"None","manufacturerDescription":"Nicira Networks, Inc.","hardwareDescription":"Open vSwitch","softwareDescription":"1.4.0+build0","datapathDescription":"None"}]
ret = {}
ret[switchId]=desc
elif statType == "aggregate":
aggr = {"packetCount":0,"byteCount":0,"flowCount":0}
ret = {}
ret[switchId]=aggr
else:
ret = {}
js = json.dumps(ret)
resp = Response(js, status=200, mimetype='application/json')
return resp
@app.route("/wm/floodlight/core/controller/switches/json")
def query_switch():
try:
command = "curl -s http://%s:%s/graphs/%s/vertices?key=type\&value=switch" % (RestIP, RestPort, DBName)
result = os.popen(command).read()
parsedResult = json.loads(result)['results']
except:
log_error("REST IF has issue")
exit
switches_ = []
for v in parsedResult:
if v['type'] == "switch":
dpid = str(v['dpid']) ;# removing quotation
sw = {}
sw['dpid']=dpid
switches_.append(sw)
print switches_
js = json.dumps(switches_)
resp = Response(js, status=200, mimetype='application/json')
return resp
@app.route("/wm/onos/linkdiscovery/links/json")
def query_links():
try:
command = "curl -s http://%s:%s/graphs/%s/vertices?key=type\&value=port" % (RestIP, RestPort, DBName)
result = os.popen(command).read()
parsedResult = json.loads(result)['results']
except:
log_error("REST IF has issue")
exit
sport = []
switches_ = []
for v in parsedResult:
srcport = v['_id']
try:
command = "curl -s http://%s:%s/graphs/%s/vertices/%d/out?_label=link" % (RestIP, RestPort, DBName, srcport)
result = os.popen(command).read()
linkResults = json.loads(result)['results']
except:
log_error("REST IF has issue")
exit
for p in linkResults:
dstport = p['_id']
(sport, sdpid) = get_port_switch(srcport)
(dport, ddpid) = get_port_switch(dstport)
link = {}
link["src-switch"]=sdpid
link["src-port"]=sport
link["src-port-state"]=0
link["dst-switch"]=ddpid
link["dst-port"]=dport
link["dst-port-state"]=0
link["type"]="internal"
switches_.append(link)
print switches_
js = json.dumps(switches_)
resp = Response(js, status=200, mimetype='application/json')
return resp
def get_port_switch(vertex):
try:
command = "curl -s http://%s:%s/graphs/%s/vertices/%d" % (RestIP, RestPort, DBName, vertex)
result = os.popen(command).read()
parsedResult = json.loads(result)['results']
except:
log_error("REST IF has issue")
exit
port_number = parsedResult['number']
vertex_id = parsedResult['_id']
switch_dpid = portid_to_switch_dpid(vertex_id)
return (port_number, switch_dpid)
def portid_to_switch_dpid(vertex):
try:
command = "curl -s http://%s:%s/graphs/%s/vertices/%d/in" % (RestIP, RestPort, DBName, vertex)
result = os.popen(command).read()
parsedResult = json.loads(result)['results']
except:
log_error("REST IF has issue")
exit
for v in parsedResult:
if v['type'] == "switch":
sw_dpid = v['dpid']
break
return sw_dpid
def id_to_dpid(vertex):
try:
command = "curl -s http://%s:%s/graphs/%s/vertices/%d" % (RestIP, RestPort, DBName, vertex)
result = os.popen(command).read()
parsedResult = json.loads(result)['results']
except:
log_error("REST IF has issue")
exit
if parsedResult['type'] != "switch":
print "not a switch vertex"
exit
else:
sw_dpid = parsedResult['dpid']
return sw_dpid
if __name__ == "__main__":
app.debug = True
app.run(threaded=True, host="0.0.0.0", port=9000)
# query_switch()
# query_links()
# devices()
|
|
# coding: utf-8
"""
Onshape REST API
The Onshape REST API consumed by all clients. # noqa: E501
The version of the OpenAPI document: 1.113
Contact: api-support@onshape.zendesk.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
import six # noqa: F401
import nulltype # noqa: F401
from onshape_client.oas.model_utils import ( # noqa: F401
ModelComposed,
ModelNormal,
ModelSimple,
date,
datetime,
file_type,
int,
none_type,
str,
validate_get_composed_info,
)
try:
from onshape_client.oas.models import bt_configured_feature_column_info1014_all_of
except ImportError:
bt_configured_feature_column_info1014_all_of = sys.modules[
"onshape_client.oas.models.bt_configured_feature_column_info1014_all_of"
]
try:
from onshape_client.oas.models import bt_parameter_spec_reference_blob1367
except ImportError:
bt_parameter_spec_reference_blob1367 = sys.modules[
"onshape_client.oas.models.bt_parameter_spec_reference_blob1367"
]
try:
from onshape_client.oas.models import bt_parameter_visibility_condition177
except ImportError:
bt_parameter_visibility_condition177 = sys.modules[
"onshape_client.oas.models.bt_parameter_visibility_condition177"
]
try:
from onshape_client.oas.models import btm_parameter1
except ImportError:
btm_parameter1 = sys.modules["onshape_client.oas.models.btm_parameter1"]
class BTParameterSpecReferenceCADImport1792(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
("ui_hints",): {
"OPPOSITE_DIRECTION": "OPPOSITE_DIRECTION",
"ALWAYS_HIDDEN": "ALWAYS_HIDDEN",
"SHOW_CREATE_SELECTION": "SHOW_CREATE_SELECTION",
"CONTROL_VISIBILITY": "CONTROL_VISIBILITY",
"NO_PREVIEW_PROVIDED": "NO_PREVIEW_PROVIDED",
"REMEMBER_PREVIOUS_VALUE": "REMEMBER_PREVIOUS_VALUE",
"DISPLAY_SHORT": "DISPLAY_SHORT",
"ALLOW_FEATURE_SELECTION": "ALLOW_FEATURE_SELECTION",
"MATE_CONNECTOR_AXIS_TYPE": "MATE_CONNECTOR_AXIS_TYPE",
"PRIMARY_AXIS": "PRIMARY_AXIS",
"SHOW_EXPRESSION": "SHOW_EXPRESSION",
"OPPOSITE_DIRECTION_CIRCULAR": "OPPOSITE_DIRECTION_CIRCULAR",
"SHOW_LABEL": "SHOW_LABEL",
"HORIZONTAL_ENUM": "HORIZONTAL_ENUM",
"UNCONFIGURABLE": "UNCONFIGURABLE",
"MATCH_LAST_ARRAY_ITEM": "MATCH_LAST_ARRAY_ITEM",
"COLLAPSE_ARRAY_ITEMS": "COLLAPSE_ARRAY_ITEMS",
"INITIAL_FOCUS_ON_EDIT": "INITIAL_FOCUS_ON_EDIT",
"INITIAL_FOCUS": "INITIAL_FOCUS",
"DISPLAY_CURRENT_VALUE_ONLY": "DISPLAY_CURRENT_VALUE_ONLY",
"READ_ONLY": "READ_ONLY",
"PREVENT_CREATING_NEW_MATE_CONNECTORS": "PREVENT_CREATING_NEW_MATE_CONNECTORS",
"FIRST_IN_ROW": "FIRST_IN_ROW",
"ALLOW_QUERY_ORDER": "ALLOW_QUERY_ORDER",
"PREVENT_ARRAY_REORDER": "PREVENT_ARRAY_REORDER",
"UNKNOWN": "UNKNOWN",
},
}
validations = {}
additional_properties_type = None
@staticmethod
def openapi_types():
"""
This must be a class method so a model may have properties that are
of type self, this ensures that we don't create a cyclic import
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
"bt_type": (str,), # noqa: E501
"additional_localized_strings": (int,), # noqa: E501
"column_name": (str,), # noqa: E501
"default_value": (btm_parameter1.BTMParameter1,), # noqa: E501
"icon_uri": (str,), # noqa: E501
"localizable_name": (str,), # noqa: E501
"localized_name": (str,), # noqa: E501
"parameter_id": (str,), # noqa: E501
"parameter_name": (str,), # noqa: E501
"strings_to_localize": ([str],), # noqa: E501
"ui_hint": (str,), # noqa: E501
"ui_hints": ([str],), # noqa: E501
"visibility_condition": (
bt_parameter_visibility_condition177.BTParameterVisibilityCondition177,
), # noqa: E501
}
@staticmethod
def discriminator():
return None
attribute_map = {
"bt_type": "btType", # noqa: E501
"additional_localized_strings": "additionalLocalizedStrings", # noqa: E501
"column_name": "columnName", # noqa: E501
"default_value": "defaultValue", # noqa: E501
"icon_uri": "iconUri", # noqa: E501
"localizable_name": "localizableName", # noqa: E501
"localized_name": "localizedName", # noqa: E501
"parameter_id": "parameterId", # noqa: E501
"parameter_name": "parameterName", # noqa: E501
"strings_to_localize": "stringsToLocalize", # noqa: E501
"ui_hint": "uiHint", # noqa: E501
"ui_hints": "uiHints", # noqa: E501
"visibility_condition": "visibilityCondition", # noqa: E501
}
required_properties = set(
[
"_data_store",
"_check_type",
"_from_server",
"_path_to_item",
"_configuration",
"_composed_instances",
"_var_name_to_model_instances",
"_additional_properties_model_instances",
]
)
def __init__(
self,
_check_type=True,
_from_server=False,
_path_to_item=(),
_configuration=None,
**kwargs
): # noqa: E501
"""bt_parameter_spec_reference_cad_import1792.BTParameterSpecReferenceCADImport1792 - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_from_server (bool): True if the data is from the server
False if the data is from the client (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
bt_type (str): [optional] # noqa: E501
additional_localized_strings (int): [optional] # noqa: E501
column_name (str): [optional] # noqa: E501
default_value (btm_parameter1.BTMParameter1): [optional] # noqa: E501
icon_uri (str): [optional] # noqa: E501
localizable_name (str): [optional] # noqa: E501
localized_name (str): [optional] # noqa: E501
parameter_id (str): [optional] # noqa: E501
parameter_name (str): [optional] # noqa: E501
strings_to_localize ([str]): [optional] # noqa: E501
ui_hint (str): [optional] # noqa: E501
ui_hints ([str]): [optional] # noqa: E501
visibility_condition (bt_parameter_visibility_condition177.BTParameterVisibilityCondition177): [optional] # noqa: E501
"""
self._data_store = {}
self._check_type = _check_type
self._from_server = _from_server
self._path_to_item = _path_to_item
self._configuration = _configuration
constant_args = {
"_check_type": _check_type,
"_path_to_item": _path_to_item,
"_from_server": _from_server,
"_configuration": _configuration,
}
required_args = {}
# remove args whose value is Null because they are unset
required_arg_names = list(required_args.keys())
for required_arg_name in required_arg_names:
if required_args[required_arg_name] is nulltype.Null:
del required_args[required_arg_name]
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in six.iteritems(kwargs):
if (
var_name in unused_args
and self._configuration is not None
and self._configuration.discard_unknown_keys
and not self._additional_properties_model_instances
):
# discard variable.
continue
setattr(self, var_name, var_value)
@staticmethod
def _composed_schemas():
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error beause the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return {
"anyOf": [],
"allOf": [
bt_configured_feature_column_info1014_all_of.BTConfiguredFeatureColumnInfo1014AllOf,
bt_parameter_spec_reference_blob1367.BTParameterSpecReferenceBlob1367,
],
"oneOf": [],
}
|
|
from typing import Callable, Tuple, Optional, List, Dict, Any, TYPE_CHECKING
from ray.rllib.env.external_env import ExternalEnv
from ray.rllib.env.external_multi_agent_env import ExternalMultiAgentEnv
from ray.rllib.env.multi_agent_env import MultiAgentEnv
from ray.rllib.env.vector_env import VectorEnv
from ray.rllib.utils.annotations import override, PublicAPI
from ray.rllib.utils.typing import AgentID, EnvID, EnvType, MultiAgentDict, \
MultiEnvDict, PartialTrainerConfigDict
if TYPE_CHECKING:
from ray.rllib.models.preprocessors import Preprocessor
ASYNC_RESET_RETURN = "async_reset_return"
@PublicAPI
class BaseEnv:
"""The lowest-level env interface used by RLlib for sampling.
BaseEnv models multiple agents executing asynchronously in multiple
environments. A call to poll() returns observations from ready agents
keyed by their environment and agent ids, and actions for those agents
can be sent back via send_actions().
All other env types can be adapted to BaseEnv. RLlib handles these
conversions internally in RolloutWorker, for example:
gym.Env => rllib.VectorEnv => rllib.BaseEnv
rllib.MultiAgentEnv => rllib.BaseEnv
rllib.ExternalEnv => rllib.BaseEnv
Attributes:
action_space (gym.Space): Action space. This must be defined for
single-agent envs. Multi-agent envs can set this to None.
observation_space (gym.Space): Observation space. This must be defined
for single-agent envs. Multi-agent envs can set this to None.
Examples:
>>> env = MyBaseEnv()
>>> obs, rewards, dones, infos, off_policy_actions = env.poll()
>>> print(obs)
{
"env_0": {
"car_0": [2.4, 1.6],
"car_1": [3.4, -3.2],
},
"env_1": {
"car_0": [8.0, 4.1],
},
"env_2": {
"car_0": [2.3, 3.3],
"car_1": [1.4, -0.2],
"car_3": [1.2, 0.1],
},
}
>>> env.send_actions(
actions={
"env_0": {
"car_0": 0,
"car_1": 1,
}, ...
})
>>> obs, rewards, dones, infos, off_policy_actions = env.poll()
>>> print(obs)
{
"env_0": {
"car_0": [4.1, 1.7],
"car_1": [3.2, -4.2],
}, ...
}
>>> print(dones)
{
"env_0": {
"__all__": False,
"car_0": False,
"car_1": True,
}, ...
}
"""
@staticmethod
def to_base_env(
env: EnvType,
make_env: Callable[[int], EnvType] = None,
num_envs: int = 1,
remote_envs: bool = False,
remote_env_batch_wait_ms: int = 0,
policy_config: PartialTrainerConfigDict = None,
) -> "BaseEnv":
"""Wraps any env type as needed to expose the async interface."""
from ray.rllib.env.remote_vector_env import RemoteVectorEnv
if remote_envs and num_envs == 1:
raise ValueError(
"Remote envs only make sense to use if num_envs > 1 "
"(i.e. vectorization is enabled).")
if not isinstance(env, BaseEnv):
if isinstance(env, MultiAgentEnv):
if remote_envs:
env = RemoteVectorEnv(
make_env,
num_envs,
multiagent=True,
remote_env_batch_wait_ms=remote_env_batch_wait_ms)
else:
env = _MultiAgentEnvToBaseEnv(
make_env=make_env,
existing_envs=[env],
num_envs=num_envs)
elif isinstance(env, ExternalEnv):
if num_envs != 1:
raise ValueError(
"External(MultiAgent)Env does not currently support "
"num_envs > 1. One way of solving this would be to "
"treat your Env as a MultiAgentEnv hosting only one "
"type of agent but with several copies.")
env = _ExternalEnvToBaseEnv(env)
elif isinstance(env, VectorEnv):
env = _VectorEnvToBaseEnv(env)
else:
if remote_envs:
env = RemoteVectorEnv(
make_env,
num_envs,
multiagent=False,
remote_env_batch_wait_ms=remote_env_batch_wait_ms)
else:
env = VectorEnv.wrap(
make_env=make_env,
existing_envs=[env],
num_envs=num_envs,
action_space=env.action_space,
observation_space=env.observation_space,
policy_config=policy_config,
)
env = _VectorEnvToBaseEnv(env)
assert isinstance(env, BaseEnv), env
return env
@PublicAPI
def poll(self) -> Tuple[MultiEnvDict, MultiEnvDict, MultiEnvDict,
MultiEnvDict, MultiEnvDict]:
"""Returns observations from ready agents.
The returns are two-level dicts mapping from env_id to a dict of
agent_id to values. The number of agents and envs can vary over time.
Returns
-------
obs (dict): New observations for each ready agent.
rewards (dict): Reward values for each ready agent. If the
episode is just started, the value will be None.
dones (dict): Done values for each ready agent. The special key
"__all__" is used to indicate env termination.
infos (dict): Info values for each ready agent.
off_policy_actions (dict): Agents may take off-policy actions. When
that happens, there will be an entry in this dict that contains
the taken action. There is no need to send_actions() for agents
that have already chosen off-policy actions.
"""
raise NotImplementedError
@PublicAPI
def send_actions(self, action_dict: MultiEnvDict) -> None:
"""Called to send actions back to running agents in this env.
Actions should be sent for each ready agent that returned observations
in the previous poll() call.
Args:
action_dict (dict): Actions values keyed by env_id and agent_id.
"""
raise NotImplementedError
@PublicAPI
def try_reset(self,
env_id: Optional[EnvID] = None) -> Optional[MultiAgentDict]:
"""Attempt to reset the sub-env with the given id or all sub-envs.
If the environment does not support synchronous reset, None can be
returned here.
Args:
env_id (Optional[int]): The sub-env ID if applicable. If None,
reset the entire Env (i.e. all sub-envs).
Returns:
Optional[MultiAgentDict]: Resetted (multi-agent) observation dict
or None if reset is not supported.
"""
return None
@PublicAPI
def get_unwrapped(self) -> List[EnvType]:
"""Return a reference to the underlying gym envs, if any.
Returns:
envs (list): Underlying gym envs or [].
"""
return []
@PublicAPI
def try_render(self, env_id: Optional[EnvID] = None) -> None:
"""Tries to render the environment.
Args:
env_id (Optional[int]): The sub-env ID if applicable. If None,
renders the entire Env (i.e. all sub-envs).
"""
# By default, do nothing.
pass
@PublicAPI
def stop(self) -> None:
"""Releases all resources used."""
for env in self.get_unwrapped():
if hasattr(env, "close"):
env.close()
# Fixed agent identifier when there is only the single agent in the env
_DUMMY_AGENT_ID = "agent0"
def _with_dummy_agent_id(env_id_to_values: Dict[EnvID, Any],
dummy_id: "AgentID" = _DUMMY_AGENT_ID
) -> MultiEnvDict:
return {k: {dummy_id: v} for (k, v) in env_id_to_values.items()}
class _ExternalEnvToBaseEnv(BaseEnv):
"""Internal adapter of ExternalEnv to BaseEnv."""
def __init__(self,
external_env: ExternalEnv,
preprocessor: "Preprocessor" = None):
self.external_env = external_env
self.prep = preprocessor
self.multiagent = issubclass(type(external_env), ExternalMultiAgentEnv)
self.action_space = external_env.action_space
if preprocessor:
self.observation_space = preprocessor.observation_space
else:
self.observation_space = external_env.observation_space
external_env.start()
@override(BaseEnv)
def poll(self) -> Tuple[MultiEnvDict, MultiEnvDict, MultiEnvDict,
MultiEnvDict, MultiEnvDict]:
with self.external_env._results_avail_condition:
results = self._poll()
while len(results[0]) == 0:
self.external_env._results_avail_condition.wait()
results = self._poll()
if not self.external_env.isAlive():
raise Exception("Serving thread has stopped.")
limit = self.external_env._max_concurrent_episodes
assert len(results[0]) < limit, \
("Too many concurrent episodes, were some leaked? This "
"ExternalEnv was created with max_concurrent={}".format(limit))
return results
@override(BaseEnv)
def send_actions(self, action_dict: MultiEnvDict) -> None:
if self.multiagent:
for env_id, actions in action_dict.items():
self.external_env._episodes[env_id].action_queue.put(actions)
else:
for env_id, action in action_dict.items():
self.external_env._episodes[env_id].action_queue.put(
action[_DUMMY_AGENT_ID])
def _poll(self) -> Tuple[MultiEnvDict, MultiEnvDict, MultiEnvDict,
MultiEnvDict, MultiEnvDict]:
all_obs, all_rewards, all_dones, all_infos = {}, {}, {}, {}
off_policy_actions = {}
for eid, episode in self.external_env._episodes.copy().items():
data = episode.get_data()
cur_done = episode.cur_done_dict[
"__all__"] if self.multiagent else episode.cur_done
if cur_done:
del self.external_env._episodes[eid]
if data:
if self.prep:
all_obs[eid] = self.prep.transform(data["obs"])
else:
all_obs[eid] = data["obs"]
all_rewards[eid] = data["reward"]
all_dones[eid] = data["done"]
all_infos[eid] = data["info"]
if "off_policy_action" in data:
off_policy_actions[eid] = data["off_policy_action"]
if self.multiagent:
# Ensure a consistent set of keys
# rely on all_obs having all possible keys for now.
for eid, eid_dict in all_obs.items():
for agent_id in eid_dict.keys():
def fix(d, zero_val):
if agent_id not in d[eid]:
d[eid][agent_id] = zero_val
fix(all_rewards, 0.0)
fix(all_dones, False)
fix(all_infos, {})
return (all_obs, all_rewards, all_dones, all_infos,
off_policy_actions)
else:
return _with_dummy_agent_id(all_obs), \
_with_dummy_agent_id(all_rewards), \
_with_dummy_agent_id(all_dones, "__all__"), \
_with_dummy_agent_id(all_infos), \
_with_dummy_agent_id(off_policy_actions)
class _VectorEnvToBaseEnv(BaseEnv):
"""Internal adapter of VectorEnv to BaseEnv.
We assume the caller will always send the full vector of actions in each
call to send_actions(), and that they call reset_at() on all completed
environments before calling send_actions().
"""
def __init__(self, vector_env: VectorEnv):
self.vector_env = vector_env
self.action_space = vector_env.action_space
self.observation_space = vector_env.observation_space
self.num_envs = vector_env.num_envs
self.new_obs = None # lazily initialized
self.cur_rewards = [None for _ in range(self.num_envs)]
self.cur_dones = [False for _ in range(self.num_envs)]
self.cur_infos = [None for _ in range(self.num_envs)]
@override(BaseEnv)
def poll(self) -> Tuple[MultiEnvDict, MultiEnvDict, MultiEnvDict,
MultiEnvDict, MultiEnvDict]:
if self.new_obs is None:
self.new_obs = self.vector_env.vector_reset()
new_obs = dict(enumerate(self.new_obs))
rewards = dict(enumerate(self.cur_rewards))
dones = dict(enumerate(self.cur_dones))
infos = dict(enumerate(self.cur_infos))
self.new_obs = []
self.cur_rewards = []
self.cur_dones = []
self.cur_infos = []
return _with_dummy_agent_id(new_obs), \
_with_dummy_agent_id(rewards), \
_with_dummy_agent_id(dones, "__all__"), \
_with_dummy_agent_id(infos), {}
@override(BaseEnv)
def send_actions(self, action_dict: MultiEnvDict) -> None:
action_vector = [None] * self.num_envs
for i in range(self.num_envs):
action_vector[i] = action_dict[i][_DUMMY_AGENT_ID]
self.new_obs, self.cur_rewards, self.cur_dones, self.cur_infos = \
self.vector_env.vector_step(action_vector)
@override(BaseEnv)
def try_reset(self, env_id: Optional[EnvID] = None) -> MultiAgentDict:
assert env_id is None or isinstance(env_id, int)
return {_DUMMY_AGENT_ID: self.vector_env.reset_at(env_id)}
@override(BaseEnv)
def get_unwrapped(self) -> List[EnvType]:
return self.vector_env.get_unwrapped()
@override(BaseEnv)
def try_render(self, env_id: Optional[EnvID] = None) -> None:
assert env_id is None or isinstance(env_id, int)
return self.vector_env.try_render_at(env_id)
class _MultiAgentEnvToBaseEnv(BaseEnv):
"""Internal adapter of MultiAgentEnv to BaseEnv.
This also supports vectorization if num_envs > 1.
"""
def __init__(self, make_env: Callable[[int], EnvType],
existing_envs: List[MultiAgentEnv], num_envs: int):
"""Wrap existing multi-agent envs.
Args:
make_env (func|None): Factory that produces a new multiagent env.
Must be defined if the number of existing envs is less than
num_envs.
existing_envs (list): List of existing multiagent envs.
num_envs (int): Desired num multiagent envs to keep total.
"""
self.make_env = make_env
self.envs = existing_envs
self.num_envs = num_envs
self.dones = set()
while len(self.envs) < self.num_envs:
self.envs.append(self.make_env(len(self.envs)))
for env in self.envs:
assert isinstance(env, MultiAgentEnv)
self.env_states = [_MultiAgentEnvState(env) for env in self.envs]
@override(BaseEnv)
def poll(self) -> Tuple[MultiEnvDict, MultiEnvDict, MultiEnvDict,
MultiEnvDict, MultiEnvDict]:
obs, rewards, dones, infos = {}, {}, {}, {}
for i, env_state in enumerate(self.env_states):
obs[i], rewards[i], dones[i], infos[i] = env_state.poll()
return obs, rewards, dones, infos, {}
@override(BaseEnv)
def send_actions(self, action_dict: MultiEnvDict) -> None:
for env_id, agent_dict in action_dict.items():
if env_id in self.dones:
raise ValueError("Env {} is already done".format(env_id))
env = self.envs[env_id]
obs, rewards, dones, infos = env.step(agent_dict)
assert isinstance(obs, dict), "Not a multi-agent obs"
assert isinstance(rewards, dict), "Not a multi-agent reward"
assert isinstance(dones, dict), "Not a multi-agent return"
assert isinstance(infos, dict), "Not a multi-agent info"
if set(infos).difference(set(obs)):
raise ValueError("Key set for infos must be a subset of obs: "
"{} vs {}".format(infos.keys(), obs.keys()))
if "__all__" not in dones:
raise ValueError(
"In multi-agent environments, '__all__': True|False must "
"be included in the 'done' dict: got {}.".format(dones))
if dones["__all__"]:
self.dones.add(env_id)
self.env_states[env_id].observe(obs, rewards, dones, infos)
@override(BaseEnv)
def try_reset(self,
env_id: Optional[EnvID] = None) -> Optional[MultiAgentDict]:
obs = self.env_states[env_id].reset()
assert isinstance(obs, dict), "Not a multi-agent obs"
if obs is not None and env_id in self.dones:
self.dones.remove(env_id)
return obs
@override(BaseEnv)
def get_unwrapped(self) -> List[EnvType]:
return [state.env for state in self.env_states]
@override(BaseEnv)
def try_render(self, env_id: Optional[EnvID] = None) -> None:
if env_id is None:
env_id = 0
assert isinstance(env_id, int)
return self.envs[env_id].render()
class _MultiAgentEnvState:
def __init__(self, env: MultiAgentEnv):
assert isinstance(env, MultiAgentEnv)
self.env = env
self.initialized = False
def poll(self) -> Tuple[MultiAgentDict, MultiAgentDict, MultiAgentDict,
MultiAgentDict, MultiAgentDict]:
if not self.initialized:
self.reset()
self.initialized = True
observations = self.last_obs
rewards = {}
dones = {"__all__": self.last_dones["__all__"]}
infos = {}
# If episode is done, release everything we have.
if dones["__all__"]:
rewards = self.last_rewards
self.last_rewards = {}
dones = self.last_dones
self.last_dones = {}
self.last_obs = {}
# Only release those agents' rewards/dones/infos, whose
# observations we have.
else:
for ag in observations.keys():
if ag in self.last_rewards:
rewards[ag] = self.last_rewards[ag]
del self.last_rewards[ag]
if ag in self.last_dones:
dones[ag] = self.last_dones[ag]
del self.last_dones[ag]
self.last_dones["__all__"] = False
self.last_infos = {}
return observations, rewards, dones, infos
def observe(self, obs: MultiAgentDict, rewards: MultiAgentDict,
dones: MultiAgentDict, infos: MultiAgentDict):
self.last_obs = obs
for ag, r in rewards.items():
if ag in self.last_rewards:
self.last_rewards[ag] += r
else:
self.last_rewards[ag] = r
for ag, d in dones.items():
if ag in self.last_dones:
self.last_dones[ag] = self.last_dones[ag] or d
else:
self.last_dones[ag] = d
self.last_infos = infos
def reset(self) -> MultiAgentDict:
self.last_obs = self.env.reset()
self.last_rewards = {}
self.last_dones = {"__all__": False}
self.last_infos = {}
return self.last_obs
|
|
# Copyright (C) 2010 Google Inc. All rights reserved.
# Copyright (C) 2010 Gabor Rapcsanyi (rgabor@inf.u-szeged.hu), University of Szeged
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""The Manager orchestrates the overall process of running layout tests.
This includes finding tests to run, reading the test expectations,
starting the required helper servers, deciding the order and way to
run the tests, retrying fails tests and collecting the test results,
including crash logs, and mismatches with expectations.
The Manager object has a constructor and one main method called run.
"""
import json
import logging
import random
import sys
import time
from webkitpy.common.net.file_uploader import FileUploader
from webkitpy.layout_tests.controllers.layout_test_finder import LayoutTestFinder
from webkitpy.layout_tests.controllers.layout_test_runner import LayoutTestRunner
from webkitpy.layout_tests.controllers.test_result_writer import TestResultWriter
from webkitpy.layout_tests.layout_package import json_results_generator
from webkitpy.layout_tests.models import test_expectations
from webkitpy.layout_tests.models import test_failures
from webkitpy.layout_tests.models import test_run_results
from webkitpy.layout_tests.models.test_input import TestInput
from webkitpy.tool import grammar
_log = logging.getLogger(__name__)
# Builder base URL where we have the archived test results.
BUILDER_BASE_URL = "http://build.chromium.org/buildbot/layout_test_results/"
TestExpectations = test_expectations.TestExpectations
class Manager(object):
"""A class for managing running a series of layout tests."""
def __init__(self, port, options, printer):
"""Initialize test runner data structures.
Args:
port: An object implementing platform-specific functionality.
options: An options argument which contains command line options.
printer: A Printer object to record updates to.
"""
self._port = port
self._filesystem = port.host.filesystem
self._options = options
self._printer = printer
self._expectations = None
self.HTTP_SUBDIR = 'http' + port.TEST_PATH_SEPARATOR
self.INSPECTOR_SUBDIR = 'inspector' + port.TEST_PATH_SEPARATOR
self.PERF_SUBDIR = 'perf'
self.WEBSOCKET_SUBDIR = 'websocket' + port.TEST_PATH_SEPARATOR
self.LAYOUT_TESTS_DIRECTORY = 'LayoutTests'
self.ARCHIVED_RESULTS_LIMIT = 25
self._http_server_started = False
self._wptserve_started = False
self._websockets_server_started = False
self._results_directory = self._port.results_directory()
self._finder = LayoutTestFinder(self._port, self._options)
self._runner = LayoutTestRunner(self._options, self._port, self._printer, self._results_directory, self._test_is_slow)
def run(self, args):
"""Run the tests and return a RunDetails object with the results."""
start_time = time.time()
self._printer.write_update("Collecting tests ...")
running_all_tests = False
try:
paths, test_names, running_all_tests = self._collect_tests(args)
except IOError:
# This is raised if --test-list doesn't exist
return test_run_results.RunDetails(exit_code=test_run_results.NO_TESTS_EXIT_STATUS)
self._printer.write_update("Parsing expectations ...")
self._expectations = test_expectations.TestExpectations(self._port, test_names)
tests_to_run, tests_to_skip = self._prepare_lists(paths, test_names)
self._printer.print_found(len(test_names), len(tests_to_run), self._options.repeat_each, self._options.iterations)
# Check to make sure we're not skipping every test.
if not tests_to_run:
_log.critical('No tests to run.')
return test_run_results.RunDetails(exit_code=test_run_results.NO_TESTS_EXIT_STATUS)
exit_code = self._set_up_run(tests_to_run)
if exit_code:
return test_run_results.RunDetails(exit_code=exit_code)
# Don't retry failures if an explicit list of tests was passed in.
if self._options.retry_failures is None:
should_retry_failures = len(paths) < len(test_names)
else:
should_retry_failures = self._options.retry_failures
enabled_pixel_tests_in_retry = False
try:
self._start_servers(tests_to_run)
num_workers = self._port.num_workers(int(self._options.child_processes))
initial_results = self._run_tests(
tests_to_run, tests_to_skip, self._options.repeat_each, self._options.iterations,
num_workers)
# Don't retry failures when interrupted by user or failures limit exception.
should_retry_failures = should_retry_failures and not (
initial_results.interrupted or initial_results.keyboard_interrupted)
tests_to_retry = self._tests_to_retry(initial_results)
all_retry_results = []
if should_retry_failures and tests_to_retry:
enabled_pixel_tests_in_retry = self._force_pixel_tests_if_needed()
for retry_attempt in xrange(1, self._options.num_retries + 1):
if not tests_to_retry:
break
_log.info('')
_log.info('Retrying %s, attempt %d of %d...' %
(grammar.pluralize('unexpected failure', len(tests_to_retry)),
retry_attempt, self._options.num_retries))
retry_results = self._run_tests(tests_to_retry,
tests_to_skip=set(),
repeat_each=1,
iterations=1,
num_workers=num_workers,
retry_attempt=retry_attempt)
all_retry_results.append(retry_results)
tests_to_retry = self._tests_to_retry(retry_results)
if enabled_pixel_tests_in_retry:
self._options.pixel_tests = False
finally:
self._stop_servers()
self._clean_up_run()
# Some crash logs can take a long time to be written out so look
# for new logs after the test run finishes.
self._printer.write_update("looking for new crash logs")
self._look_for_new_crash_logs(initial_results, start_time)
for retry_attempt_results in all_retry_results:
self._look_for_new_crash_logs(retry_attempt_results, start_time)
_log.debug("summarizing results")
summarized_full_results = test_run_results.summarize_results(
self._port, self._expectations, initial_results, all_retry_results,
enabled_pixel_tests_in_retry)
summarized_failing_results = test_run_results.summarize_results(
self._port, self._expectations, initial_results, all_retry_results,
enabled_pixel_tests_in_retry, only_include_failing=True)
exit_code = summarized_failing_results['num_regressions']
if exit_code > test_run_results.MAX_FAILURES_EXIT_STATUS:
_log.warning('num regressions (%d) exceeds max exit status (%d)' %
(exit_code, test_run_results.MAX_FAILURES_EXIT_STATUS))
exit_code = test_run_results.MAX_FAILURES_EXIT_STATUS
if not self._options.dry_run:
self._write_json_files(summarized_full_results, summarized_failing_results, initial_results, running_all_tests)
if self._options.write_full_results_to:
self._filesystem.copyfile(self._filesystem.join(self._results_directory, "full_results.json"),
self._options.write_full_results_to)
self._upload_json_files()
results_path = self._filesystem.join(self._results_directory, "results.html")
self._copy_results_html_file(results_path)
if initial_results.keyboard_interrupted:
exit_code = test_run_results.INTERRUPTED_EXIT_STATUS
else:
if initial_results.interrupted:
exit_code = test_run_results.EARLY_EXIT_STATUS
if self._options.show_results and (exit_code or (self._options.full_results_html and initial_results.total_failures)):
self._port.show_results_html_file(results_path)
self._printer.print_results(time.time() - start_time, initial_results, summarized_failing_results)
return test_run_results.RunDetails(
exit_code, summarized_full_results, summarized_failing_results,
initial_results, all_retry_results, enabled_pixel_tests_in_retry)
def _collect_tests(self, args):
return self._finder.find_tests(args, test_list=self._options.test_list,
fastest_percentile=self._options.fastest)
def _is_http_test(self, test):
return (
test.startswith(self.HTTP_SUBDIR) or
self._is_websocket_test(test) or
self._port.TEST_PATH_SEPARATOR + self.HTTP_SUBDIR in test
)
def _is_inspector_test(self, test):
return self.INSPECTOR_SUBDIR in test
def _is_websocket_test(self, test):
if self._port.is_wpt_enabled() and self._port.is_wpt_test(test):
return False
return self.WEBSOCKET_SUBDIR in test
def _http_tests(self, test_names):
return set(test for test in test_names if self._is_http_test(test))
def _is_perf_test(self, test):
return self.PERF_SUBDIR == test or (self.PERF_SUBDIR + self._port.TEST_PATH_SEPARATOR) in test
def _prepare_lists(self, paths, test_names):
tests_to_skip = self._finder.skip_tests(paths, test_names, self._expectations, self._http_tests(test_names))
tests_to_run = [test for test in test_names if test not in tests_to_skip]
if not tests_to_run:
return tests_to_run, tests_to_skip
# Create a sorted list of test files so the subset chunk,
# if used, contains alphabetically consecutive tests.
if self._options.order == 'natural':
tests_to_run.sort(key=self._port.test_key)
elif self._options.order == 'random':
random.shuffle(tests_to_run)
elif self._options.order == 'random-seeded':
rnd = random.Random()
rnd.seed(4) # http://xkcd.com/221/
rnd.shuffle(tests_to_run)
tests_to_run, tests_in_other_chunks = self._finder.split_into_chunks(tests_to_run)
self._expectations.add_extra_skipped_tests(tests_in_other_chunks)
tests_to_skip.update(tests_in_other_chunks)
return tests_to_run, tests_to_skip
def _test_input_for_file(self, test_file):
return TestInput(test_file,
self._options.slow_time_out_ms if self._test_is_slow(test_file) else self._options.time_out_ms,
self._test_requires_lock(test_file),
should_add_missing_baselines=(self._options.new_test_results and not self._test_is_expected_missing(test_file)))
def _test_requires_lock(self, test_file):
"""Return True if the test needs to be locked when running multiple
instances of this test runner.
Perf tests are locked because heavy load caused by running other
tests in parallel might cause some of them to time out.
"""
return self._is_http_test(test_file) or self._is_perf_test(test_file)
def _test_is_expected_missing(self, test_file):
expectations = self._expectations.model().get_expectations(test_file)
return test_expectations.MISSING in expectations or test_expectations.NEEDS_REBASELINE in expectations or test_expectations.NEEDS_MANUAL_REBASELINE in expectations
def _test_is_slow(self, test_file):
return test_expectations.SLOW in self._expectations.model().get_expectations(test_file)
def _needs_servers(self, test_names):
return any(self._test_requires_lock(test_name) for test_name in test_names)
def _rename_results_folder(self):
try:
timestamp = time.strftime(
"%Y-%m-%d-%H-%M-%S", time.localtime(self._filesystem.mtime(self._filesystem.join(self._results_directory, "results.html"))))
except (IOError, OSError), e:
# It might be possible that results.html was not generated in previous run, because the test
# run was interrupted even before testing started. In those cases, don't archive the folder.
# Simply override the current folder contents with new results.
import errno
if e.errno == errno.EEXIST or e.errno == errno.ENOENT:
self._printer.write_update("No results.html file found in previous run, skipping it.")
return None
archived_name = ''.join((self._filesystem.basename(self._results_directory), "_", timestamp))
archived_path = self._filesystem.join(self._filesystem.dirname(self._results_directory), archived_name)
self._filesystem.move(self._results_directory, archived_path)
def _delete_dirs(self, dir_list):
for dir in dir_list:
self._filesystem.rmtree(dir)
def _limit_archived_results_count(self):
results_directory_path = self._filesystem.dirname(self._results_directory)
file_list = self._filesystem.listdir(results_directory_path)
results_directories = []
for dir in file_list:
file_path = self._filesystem.join(results_directory_path, dir)
if self._filesystem.isdir(file_path) and self._results_directory in file_path:
results_directories.append(file_path)
results_directories.sort(key=lambda x: self._filesystem.mtime(x))
self._printer.write_update("Clobbering excess archived results in %s" % results_directory_path)
self._delete_dirs(results_directories[:-self.ARCHIVED_RESULTS_LIMIT])
def _set_up_run(self, test_names):
self._printer.write_update("Checking build ...")
if self._options.build:
exit_code = self._port.check_build(self._needs_servers(test_names), self._printer)
if exit_code:
_log.error("Build check failed")
return exit_code
# This must be started before we check the system dependencies,
# since the helper may do things to make the setup correct.
if self._options.pixel_tests:
self._printer.write_update("Starting pixel test helper ...")
self._port.start_helper()
# Check that the system dependencies (themes, fonts, ...) are correct.
if not self._options.nocheck_sys_deps:
self._printer.write_update("Checking system dependencies ...")
exit_code = self._port.check_sys_deps(self._needs_servers(test_names))
if exit_code:
self._port.stop_helper()
return exit_code
if self._options.clobber_old_results:
self._clobber_old_results()
elif self._filesystem.exists(self._results_directory):
self._limit_archived_results_count()
# Rename the existing results folder for archiving.
self._rename_results_folder()
# Create the output directory if it doesn't already exist.
self._port.host.filesystem.maybe_make_directory(self._results_directory)
self._port.setup_test_run()
return test_run_results.OK_EXIT_STATUS
def _run_tests(self, tests_to_run, tests_to_skip, repeat_each, iterations,
num_workers, retry_attempt=0):
test_inputs = []
for _ in xrange(iterations):
for test in tests_to_run:
for _ in xrange(repeat_each):
test_inputs.append(self._test_input_for_file(test))
return self._runner.run_tests(self._expectations, test_inputs,
tests_to_skip, num_workers, retry_attempt)
def _start_servers(self, tests_to_run):
if self._port.is_wpt_enabled() and any(self._port.is_wpt_test(test) for test in tests_to_run):
self._printer.write_update('Starting WPTServe ...')
self._port.start_wptserve()
self._wptserve_started = True
if self._port.requires_http_server() or any((self._is_http_test(test) or self._is_inspector_test(test)) for test in tests_to_run):
self._printer.write_update('Starting HTTP server ...')
self._port.start_http_server(additional_dirs={}, number_of_drivers=self._options.max_locked_shards)
self._http_server_started = True
if any(self._is_websocket_test(test) for test in tests_to_run):
self._printer.write_update('Starting WebSocket server ...')
self._port.start_websocket_server()
self._websockets_server_started = True
def _stop_servers(self):
if self._wptserve_started:
self._printer.write_update('Stopping WPTServe ...')
self._wptserve_started = False
self._port.stop_wptserve()
if self._http_server_started:
self._printer.write_update('Stopping HTTP server ...')
self._http_server_started = False
self._port.stop_http_server()
if self._websockets_server_started:
self._printer.write_update('Stopping WebSocket server ...')
self._websockets_server_started = False
self._port.stop_websocket_server()
def _clean_up_run(self):
_log.debug("Flushing stdout")
sys.stdout.flush()
_log.debug("Flushing stderr")
sys.stderr.flush()
_log.debug("Stopping helper")
self._port.stop_helper()
_log.debug("Cleaning up port")
self._port.clean_up_test_run()
def _force_pixel_tests_if_needed(self):
if self._options.pixel_tests:
return False
_log.debug("Restarting helper")
self._port.stop_helper()
self._options.pixel_tests = True
self._port.start_helper()
return True
def _look_for_new_crash_logs(self, run_results, start_time):
"""Looks for and writes new crash logs, at the end of the test run.
Since crash logs can take a long time to be written out if the system is
under stress, do a second pass at the end of the test run.
Args:
run_results: The results of the test run.
start_time: Time the tests started at. We're looking for crash
logs after that time.
"""
crashed_processes = []
for test, result in run_results.unexpected_results_by_name.iteritems():
if (result.type != test_expectations.CRASH):
continue
for failure in result.failures:
if not isinstance(failure, test_failures.FailureCrash):
continue
if failure.has_log:
continue
crashed_processes.append([test, failure.process_name, failure.pid])
sample_files = self._port.look_for_new_samples(crashed_processes, start_time)
if sample_files:
for test, sample_file in sample_files.iteritems():
writer = TestResultWriter(self._port._filesystem, self._port, self._port.results_directory(), test)
writer.copy_sample_file(sample_file)
crash_logs = self._port.look_for_new_crash_logs(crashed_processes, start_time)
if crash_logs:
for test, crash_log in crash_logs.iteritems():
writer = TestResultWriter(self._port._filesystem, self._port, self._port.results_directory(), test)
writer.write_crash_log(crash_log)
def _clobber_old_results(self):
dir_above_results_path = self._filesystem.dirname(self._results_directory)
self._printer.write_update("Clobbering old results in %s" % dir_above_results_path)
if not self._filesystem.exists(dir_above_results_path):
return
file_list = self._filesystem.listdir(dir_above_results_path)
results_directories = []
for dir in file_list:
file_path = self._filesystem.join(dir_above_results_path, dir)
if self._filesystem.isdir(file_path) and self._results_directory in file_path:
results_directories.append(file_path)
self._delete_dirs(results_directories)
# Port specific clean-up.
self._port.clobber_old_port_specific_results()
def _tests_to_retry(self, run_results):
# TODO(ojan): This should also check that result.type != test_expectations.MISSING since retrying missing expectations is silly.
# But that's a bit tricky since we only consider the last retry attempt for the count of unexpected regressions.
return [result.test_name for result in run_results.unexpected_results_by_name.values() if result.type != test_expectations.PASS]
def _write_json_files(self, summarized_full_results, summarized_failing_results, initial_results, running_all_tests):
_log.debug("Writing JSON files in %s." % self._results_directory)
# FIXME: Upload stats.json to the server and delete times_ms.
times_trie = json_results_generator.test_timings_trie(initial_results.results_by_name.values())
times_json_path = self._filesystem.join(self._results_directory, "times_ms.json")
json_results_generator.write_json(self._filesystem, times_trie, times_json_path)
# Save out the times data so we can use it for --fastest in the future.
if running_all_tests:
bot_test_times_path = self._port.bot_test_times_path()
self._filesystem.maybe_make_directory(self._filesystem.dirname(bot_test_times_path))
json_results_generator.write_json(self._filesystem, times_trie, bot_test_times_path)
stats_trie = self._stats_trie(initial_results)
stats_path = self._filesystem.join(self._results_directory, "stats.json")
self._filesystem.write_text_file(stats_path, json.dumps(stats_trie))
full_results_path = self._filesystem.join(self._results_directory, "full_results.json")
json_results_generator.write_json(self._filesystem, summarized_full_results, full_results_path)
full_results_path = self._filesystem.join(self._results_directory, "failing_results.json")
# We write failing_results.json out as jsonp because we need to load it
# from a file url for results.html and Chromium doesn't allow that.
json_results_generator.write_json(self._filesystem, summarized_failing_results, full_results_path, callback="ADD_RESULTS")
if self._options.json_test_results:
json_results_generator.write_json(self._filesystem, summarized_failing_results, self._options.json_test_results)
_log.debug("Finished writing JSON files.")
def _upload_json_files(self):
if not self._options.test_results_server:
return
if not self._options.master_name:
_log.error("--test-results-server was set, but --master-name was not. Not uploading JSON files.")
return
_log.debug("Uploading JSON files for builder: %s", self._options.builder_name)
attrs = [("builder", self._options.builder_name),
("testtype", self._options.step_name),
("master", self._options.master_name)]
files = [(file, self._filesystem.join(self._results_directory, file))
for file in ["failing_results.json", "full_results.json", "times_ms.json"]]
url = "http://%s/testfile/upload" % self._options.test_results_server
# Set uploading timeout in case appengine server is having problems.
# 120 seconds are more than enough to upload test results.
uploader = FileUploader(url, 120)
try:
response = uploader.upload_as_multipart_form_data(self._filesystem, files, attrs)
if response:
if response.code == 200:
_log.debug("JSON uploaded.")
else:
_log.debug("JSON upload failed, %d: '%s'" % (response.code, response.read()))
else:
_log.error("JSON upload failed; no response returned")
except Exception, err:
_log.error("Upload failed: %s" % err)
def _copy_results_html_file(self, destination_path):
base_dir = self._port.path_from_webkit_base('LayoutTests', 'fast', 'harness')
results_file = self._filesystem.join(base_dir, 'results.html')
# Note that the results.html template file won't exist when we're using a MockFileSystem during unit tests,
# so make sure it exists before we try to copy it.
if self._filesystem.exists(results_file):
self._filesystem.copyfile(results_file, destination_path)
def _stats_trie(self, initial_results):
def _worker_number(worker_name):
return int(worker_name.split('/')[1]) if worker_name else -1
stats = {}
for result in initial_results.results_by_name.values():
if result.type != test_expectations.SKIP:
stats[result.test_name] = {'results': (_worker_number(result.worker_name), result.test_number, result.pid, int(
result.test_run_time * 1000), int(result.total_run_time * 1000))}
stats_trie = {}
for name, value in stats.iteritems():
json_results_generator.add_path_to_trie(name, value, stats_trie)
return stats_trie
|
|
#!/usr/bin/env python
"""
conference.py -- Udacity conference server-side Python App Engine API;
uses Google Cloud Endpoints
$Id: conference.py,v 1.25 2014/05/24 23:42:19 wesc Exp wesc $
created by wesc on 2014 apr 21
"""
__author__ = 'wesc+api@google.com (Wesley Chun)'
from datetime import datetime
import endpoints
from protorpc import messages
from protorpc import message_types
from protorpc import remote
from google.appengine.api import memcache
from google.appengine.api import taskqueue
from google.appengine.ext import ndb
from models import ConflictException
from models import Profile
from models import ProfileMiniForm
from models import ProfileForm
from models import BooleanMessage
from models import Conference
from models import ConferenceForm
from models import ConferenceForms
from models import ConferenceQueryForm
from models import ConferenceQueryForms
from models import TeeShirtSize
from models import StringMessage
from utils import getUserId
from settings import WEB_CLIENT_ID
EMAIL_SCOPE = endpoints.EMAIL_SCOPE
API_EXPLORER_CLIENT_ID = endpoints.API_EXPLORER_CLIENT_ID
MEMCACHE_ANNOUNCEMENTS_KEY = "RECENT_ANNOUNCEMENTS"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
DEFAULTS = {
"city": "Default City",
"maxAttendees": 0,
"seatsAvailable": 0,
"topics": [ "Default", "Topic" ],
}
OPERATORS = {
'EQ': '=',
'GT': '>',
'GTEQ': '>=',
'LT': '<',
'LTEQ': '<=',
'NE': '!='
}
FIELDS = {
'CITY': 'city',
'TOPIC': 'topics',
'MONTH': 'month',
'MAX_ATTENDEES': 'maxAttendees',
}
CONF_GET_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
websafeConferenceKey=messages.StringField(1),
)
CONF_POST_REQUEST = endpoints.ResourceContainer(
ConferenceForm,
websafeConferenceKey=messages.StringField(1),
)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@endpoints.api(name='conference', version='v1',
allowed_client_ids=[WEB_CLIENT_ID, API_EXPLORER_CLIENT_ID],
scopes=[EMAIL_SCOPE])
class ConferenceApi(remote.Service):
"""Conference API v0.1"""
# - - - Conference objects - - - - - - - - - - - - - - - - -
def _copyConferenceToForm(self, conf, displayName):
"""Copy relevant fields from Conference to ConferenceForm."""
cf = ConferenceForm()
for field in cf.all_fields():
if hasattr(conf, field.name):
# convert Date to date string; just copy others
if field.name.endswith('Date'):
setattr(cf, field.name, str(getattr(conf, field.name)))
else:
setattr(cf, field.name, getattr(conf, field.name))
elif field.name == "websafeKey":
setattr(cf, field.name, conf.key.urlsafe())
if displayName:
setattr(cf, 'organizerDisplayName', displayName)
cf.check_initialized()
return cf
def _createConferenceObject(self, request):
"""Create or update Conference object, returning ConferenceForm/request."""
# preload necessary data items
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
if not request.name:
raise endpoints.BadRequestException("Conference 'name' field required")
# copy ConferenceForm/ProtoRPC Message into dict
data = {field.name: getattr(request, field.name) for field in request.all_fields()}
del data['websafeKey']
del data['organizerDisplayName']
# add default values for those missing (both data model & outbound Message)
for df in DEFAULTS:
if data[df] in (None, []):
data[df] = DEFAULTS[df]
setattr(request, df, DEFAULTS[df])
# convert dates from strings to Date objects; set month based on start_date
if data['startDate']:
data['startDate'] = datetime.strptime(data['startDate'][:10], "%Y-%m-%d").date()
data['month'] = data['startDate'].month
else:
data['month'] = 0
if data['endDate']:
data['endDate'] = datetime.strptime(data['endDate'][:10], "%Y-%m-%d").date()
# set seatsAvailable to be same as maxAttendees on creation
if data["maxAttendees"] > 0:
data["seatsAvailable"] = data["maxAttendees"]
# generate Profile Key based on user ID and Conference
# ID based on Profile key get Conference key from ID
p_key = ndb.Key(Profile, user_id)
c_id = Conference.allocate_ids(size=1, parent=p_key)[0]
c_key = ndb.Key(Conference, c_id, parent=p_key)
data['key'] = c_key
data['organizerUserId'] = request.organizerUserId = user_id
# create Conference, send email to organizer confirming
# creation of Conference & return (modified) ConferenceForm
Conference(**data).put()
taskqueue.add(params = {'email': user.email(),
'conferenceInfo': repr(request)},
url = '/tasks/send_confirmation_email'
)
# TODO 2: add confirmation email sending task to queue
return request
@ndb.transactional()
def _updateConferenceObject(self, request):
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
# copy ConferenceForm/ProtoRPC Message into dict
data = {field.name: getattr(request, field.name) for field in request.all_fields()}
# update existing conference
conf = ndb.Key(urlsafe=request.websafeConferenceKey).get()
# check that conference exists
if not conf:
raise endpoints.NotFoundException(
'No conference found with key: %s' % request.websafeConferenceKey)
# check that user is owner
if user_id != conf.organizerUserId:
raise endpoints.ForbiddenException(
'Only the owner can update the conference.')
# Not getting all the fields, so don't create a new object; just
# copy relevant fields from ConferenceForm to Conference object
for field in request.all_fields():
data = getattr(request, field.name)
# only copy fields where we get data
if data not in (None, []):
# special handling for dates (convert string to Date)
if field.name in ('startDate', 'endDate'):
data = datetime.strptime(data, "%Y-%m-%d").date()
if field.name == 'startDate':
conf.month = data.month
# write to Conference object
setattr(conf, field.name, data)
conf.put()
prof = ndb.Key(Profile, user_id).get()
return self._copyConferenceToForm(conf, getattr(prof, 'displayName'))
@endpoints.method(ConferenceForm, ConferenceForm, path='conference',
http_method='POST', name='createConference')
def createConference(self, request):
"""Create new conference."""
return self._createConferenceObject(request)
@endpoints.method(CONF_POST_REQUEST, ConferenceForm,
path='conference/{websafeConferenceKey}',
http_method='PUT', name='updateConference')
def updateConference(self, request):
"""Update conference w/provided fields & return w/updated info."""
return self._updateConferenceObject(request)
@endpoints.method(CONF_GET_REQUEST, ConferenceForm,
path='conference/{websafeConferenceKey}',
http_method='GET', name='getConference')
def getConference(self, request):
"""Return requested conference (by websafeConferenceKey)."""
# get Conference object from request; bail if not found
conf = ndb.Key(urlsafe=request.websafeConferenceKey).get()
if not conf:
raise endpoints.NotFoundException(
'No conference found with key: %s' % request.websafeConferenceKey)
prof = conf.key.parent().get()
# return ConferenceForm
return self._copyConferenceToForm(conf, getattr(prof, 'displayName'))
@endpoints.method(message_types.VoidMessage, ConferenceForms,
path='getConferencesCreated',
http_method='POST', name='getConferencesCreated')
def getConferencesCreated(self, request):
"""Return conferences created by user."""
# make sure user is authed
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
# create ancestor query for all key matches for this user
confs = Conference.query(ancestor=ndb.Key(Profile, user_id))
prof = ndb.Key(Profile, user_id).get()
# return set of ConferenceForm objects per Conference
return ConferenceForms(
items=[self._copyConferenceToForm(conf, getattr(prof, 'displayName')) for conf in confs]
)
def _getQuery(self, request):
"""Return formatted query from the submitted filters."""
q = Conference.query()
inequality_filter, filters = self._formatFilters(request.filters)
# If exists, sort on inequality filter first
if not inequality_filter:
q = q.order(Conference.name)
else:
q = q.order(ndb.GenericProperty(inequality_filter))
q = q.order(Conference.name)
for filtr in filters:
if filtr["field"] in ["month", "maxAttendees"]:
filtr["value"] = int(filtr["value"])
formatted_query = ndb.query.FilterNode(filtr["field"], filtr["operator"], filtr["value"])
q = q.filter(formatted_query)
return q
def _formatFilters(self, filters):
"""Parse, check validity and format user supplied filters."""
formatted_filters = []
inequality_field = None
for f in filters:
filtr = {field.name: getattr(f, field.name) for field in f.all_fields()}
try:
filtr["field"] = FIELDS[filtr["field"]]
filtr["operator"] = OPERATORS[filtr["operator"]]
except KeyError:
raise endpoints.BadRequestException("Filter contains invalid field or operator.")
# Every operation except "=" is an inequality
if filtr["operator"] != "=":
# check if inequality operation has been used in previous filters
# disallow the filter if inequality was performed on a different field before
# track the field on which the inequality operation is performed
if inequality_field and inequality_field != filtr["field"]:
raise endpoints.BadRequestException("Inequality filter is allowed on only one field.")
else:
inequality_field = filtr["field"]
formatted_filters.append(filtr)
return (inequality_field, formatted_filters)
@endpoints.method(ConferenceQueryForms, ConferenceForms,
path='queryConferences',
http_method='POST',
name='queryConferences')
def queryConferences(self, request):
"""Query for conferences."""
conferences = self._getQuery(request)
# need to fetch organiser displayName from profiles
# get all keys and use get_multi for speed
organisers = [(ndb.Key(Profile, conf.organizerUserId)) for conf in conferences]
profiles = ndb.get_multi(organisers)
# put display names in a dict for easier fetching
names = {}
for profile in profiles:
names[profile.key.id()] = profile.displayName
# return individual ConferenceForm object per Conference
return ConferenceForms(
items=[self._copyConferenceToForm(conf, names[conf.organizerUserId]) for conf in \
conferences]
)
# - - - Profile objects - - - - - - - - - - - - - - - - - - -
def _copyProfileToForm(self, prof):
"""Copy relevant fields from Profile to ProfileForm."""
# copy relevant fields from Profile to ProfileForm
pf = ProfileForm()
for field in pf.all_fields():
if hasattr(prof, field.name):
# convert t-shirt string to Enum; just copy others
if field.name == 'teeShirtSize':
setattr(pf, field.name, getattr(TeeShirtSize, getattr(prof, field.name)))
else:
setattr(pf, field.name, getattr(prof, field.name))
pf.check_initialized()
return pf
def _getProfileFromUser(self):
"""Return user Profile from datastore, creating new one if non-existent."""
# make sure user is authed
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
# get Profile from datastore
user_id = getUserId(user)
p_key = ndb.Key(Profile, user_id)
profile = p_key.get()
# create new Profile if not there
if not profile:
profile = Profile(
key = p_key,
displayName = user.nickname(),
mainEmail= user.email(),
teeShirtSize = str(TeeShirtSize.NOT_SPECIFIED),
)
profile.put()
return profile # return Profile
def _doProfile(self, save_request=None):
"""Get user Profile and return to user, possibly updating it first."""
# get user Profile
prof = self._getProfileFromUser()
# if saveProfile(), process user-modifyable fields
if save_request:
for field in ('displayName', 'teeShirtSize'):
if hasattr(save_request, field):
val = getattr(save_request, field)
if val:
setattr(prof, field, str(val))
#if field == 'teeShirtSize':
# setattr(prof, field, str(val).upper())
#else:
# setattr(prof, field, val)
prof.put()
# return ProfileForm
return self._copyProfileToForm(prof)
@endpoints.method(message_types.VoidMessage, ProfileForm,
path='profile', http_method='GET', name='getProfile')
def getProfile(self, request):
"""Return user profile."""
return self._doProfile()
@endpoints.method(ProfileMiniForm, ProfileForm,
path='profile', http_method='POST', name='saveProfile')
def saveProfile(self, request):
"""Update & return user profile."""
return self._doProfile(request)
# - - - Registration - - - - - - - - - - - - - - - - - - - -
@ndb.transactional(xg=True)
def _conferenceRegistration(self, request, reg=True):
"""Register or unregister user for selected conference."""
retval = None
prof = self._getProfileFromUser() # get user Profile
# check if conf exists given websafeConfKey
# get conference; check that it exists
wsck = request.websafeConferenceKey
conf = ndb.Key(urlsafe=wsck).get()
if not conf:
raise endpoints.NotFoundException(
'No conference found with key: %s' % wsck)
# register
if reg:
# check if user already registered otherwise add
if wsck in prof.conferenceKeysToAttend:
raise ConflictException(
"You have already registered for this conference")
# check if seats avail
if conf.seatsAvailable <= 0:
raise ConflictException(
"There are no seats available.")
# register user, take away one seat
prof.conferenceKeysToAttend.append(wsck)
conf.seatsAvailable -= 1
retval = True
# unregister
else:
# check if user already registered
if wsck in prof.conferenceKeysToAttend:
# unregister user, add back one seat
prof.conferenceKeysToAttend.remove(wsck)
conf.seatsAvailable += 1
retval = True
else:
retval = False
# write things back to the datastore & return
prof.put()
conf.put()
return BooleanMessage(data=retval)
@endpoints.method(message_types.VoidMessage, ConferenceForms,
path='conferences/attending',
http_method='GET', name='getConferencesToAttend')
def getConferencesToAttend(self, request):
"""Get list of conferences that user has registered for."""
prof = self._getProfileFromUser() # get user Profile
conf_keys = [ndb.Key(urlsafe=wsck) for wsck in prof.conferenceKeysToAttend]
conferences = ndb.get_multi(conf_keys)
# get organizers
organisers = [ndb.Key(Profile, conf.organizerUserId) for conf in conferences]
profiles = ndb.get_multi(organisers)
# put display names in a dict for easier fetching
names = {}
for profile in profiles:
names[profile.key.id()] = profile.displayName
# return set of ConferenceForm objects per Conference
return ConferenceForms(items=[self._copyConferenceToForm(conf, names[conf.organizerUserId])\
for conf in conferences]
)
@endpoints.method(CONF_GET_REQUEST, BooleanMessage,
path='conference/{websafeConferenceKey}',
http_method='POST', name='registerForConference')
def registerForConference(self, request):
"""Register user for selected conference."""
return self._conferenceRegistration(request)
@endpoints.method(CONF_GET_REQUEST, BooleanMessage,
path='conference/{websafeConferenceKey}',
http_method='DELETE', name='unregisterFromConference')
def unregisterFromConference(self, request):
"""Unregister user for selected conference."""
return self._conferenceRegistration(request, reg=False)
# - - - Announcements - - - - - - - - - - - - - - - - - - - -
@staticmethod
def _cacheAnnouncement():
"""Create Announcement & assign to memcache; used by
memcache cron job & putAnnouncement().
"""
confs = Conference.query(ndb.AND(
Conference.seatsAvailable <= 5,
Conference.seatsAvailable > 0)
).fetch(projection=[Conference.name])
if confs:
# If there are almost sold out conferences,
# format announcement and set it in memcache
announcement = '%s %s' % (
'Last chance to attend! The following conferences '
'are nearly sold out:',
', '.join(conf.name for conf in confs))
memcache.set(MEMCACHE_ANNOUNCEMENTS_KEY, announcement)
else:
# If there are no sold out conferences,
# delete the memcache announcements entry
announcement = ""
memcache.delete(MEMCACHE_ANNOUNCEMENTS_KEY)
return announcement
@endpoints.method(message_types.VoidMessage, StringMessage,
path='conference/announcement/get',
http_method='GET', name='getAnnouncement')
def getAnnouncement(self, request):
"""Return Announcement from memcache."""
# TODO 1
# return an existing announcement from Memcache or an empty string.
announcement = memcache.get(MEMCACHE_ANNOUNCEMENTS_KEY)
if not announcement:
announcement = ""
return StringMessage(data=announcement)
api = endpoints.api_server([ConferenceApi]) # register API
|
|
# coding: utf-8
"""
ORCID Member
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from orcid_api_v3.models.biography_v30_rc2 import BiographyV30Rc2 # noqa: F401,E501
from orcid_api_v3.models.last_modified_date_v30_rc2 import LastModifiedDateV30Rc2 # noqa: F401,E501
from orcid_api_v3.models.name_v30_rc2 import NameV30Rc2 # noqa: F401,E501
from orcid_api_v3.models.other_names_v30_rc2 import OtherNamesV30Rc2 # noqa: F401,E501
class PersonalDetailsV30Rc2(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'last_modified_date': 'LastModifiedDateV30Rc2',
'name': 'NameV30Rc2',
'other_names': 'OtherNamesV30Rc2',
'biography': 'BiographyV30Rc2',
'path': 'str'
}
attribute_map = {
'last_modified_date': 'last-modified-date',
'name': 'name',
'other_names': 'other-names',
'biography': 'biography',
'path': 'path'
}
def __init__(self, last_modified_date=None, name=None, other_names=None, biography=None, path=None): # noqa: E501
"""PersonalDetailsV30Rc2 - a model defined in Swagger""" # noqa: E501
self._last_modified_date = None
self._name = None
self._other_names = None
self._biography = None
self._path = None
self.discriminator = None
if last_modified_date is not None:
self.last_modified_date = last_modified_date
if name is not None:
self.name = name
if other_names is not None:
self.other_names = other_names
if biography is not None:
self.biography = biography
if path is not None:
self.path = path
@property
def last_modified_date(self):
"""Gets the last_modified_date of this PersonalDetailsV30Rc2. # noqa: E501
:return: The last_modified_date of this PersonalDetailsV30Rc2. # noqa: E501
:rtype: LastModifiedDateV30Rc2
"""
return self._last_modified_date
@last_modified_date.setter
def last_modified_date(self, last_modified_date):
"""Sets the last_modified_date of this PersonalDetailsV30Rc2.
:param last_modified_date: The last_modified_date of this PersonalDetailsV30Rc2. # noqa: E501
:type: LastModifiedDateV30Rc2
"""
self._last_modified_date = last_modified_date
@property
def name(self):
"""Gets the name of this PersonalDetailsV30Rc2. # noqa: E501
:return: The name of this PersonalDetailsV30Rc2. # noqa: E501
:rtype: NameV30Rc2
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this PersonalDetailsV30Rc2.
:param name: The name of this PersonalDetailsV30Rc2. # noqa: E501
:type: NameV30Rc2
"""
self._name = name
@property
def other_names(self):
"""Gets the other_names of this PersonalDetailsV30Rc2. # noqa: E501
:return: The other_names of this PersonalDetailsV30Rc2. # noqa: E501
:rtype: OtherNamesV30Rc2
"""
return self._other_names
@other_names.setter
def other_names(self, other_names):
"""Sets the other_names of this PersonalDetailsV30Rc2.
:param other_names: The other_names of this PersonalDetailsV30Rc2. # noqa: E501
:type: OtherNamesV30Rc2
"""
self._other_names = other_names
@property
def biography(self):
"""Gets the biography of this PersonalDetailsV30Rc2. # noqa: E501
:return: The biography of this PersonalDetailsV30Rc2. # noqa: E501
:rtype: BiographyV30Rc2
"""
return self._biography
@biography.setter
def biography(self, biography):
"""Sets the biography of this PersonalDetailsV30Rc2.
:param biography: The biography of this PersonalDetailsV30Rc2. # noqa: E501
:type: BiographyV30Rc2
"""
self._biography = biography
@property
def path(self):
"""Gets the path of this PersonalDetailsV30Rc2. # noqa: E501
:return: The path of this PersonalDetailsV30Rc2. # noqa: E501
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""Sets the path of this PersonalDetailsV30Rc2.
:param path: The path of this PersonalDetailsV30Rc2. # noqa: E501
:type: str
"""
self._path = path
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(PersonalDetailsV30Rc2, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, PersonalDetailsV30Rc2):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
|
from __future__ import division, print_function, absolute_import
import numpy as np
import amitgroup as ag
# If skimage is available, the image returned will be wrapped
# in the Image class. This is nice since it will be automatically
# displayed in an IPython notebook.
try:
from skimage.io import Image
except ImportError:
def Image(x):
return x
class ImageGrid:
"""
An image grid used for combining equally-sized intensity images into a
single larger image.
Parameters
----------
data : ndarray, ndim in [2, 3, 4]
The last two axes should be spatial dimensions of an intensity patch.
The rest are used to index them. If `ndim` is 2, then a single image is
shown. If `ndim` is 3, then `rows` and `cols` will determine its
layout. If `ndim` is 4, then `rows` and `cols` will be ignored and the
grid will be layed out according to its first to axes instead. If data
is set to None, then an empty image grid will be initialized. In this
case, rows and cols are both required.
rows, cols : int or None
The number of rows and columns for the grid. If both are None, the
minimal square grid that holds all images will be used. If one is
specified, the other will adapt to hold all images. If both are
specified, then it possible that the grid will be vacuous or that
some images will be omitted.
shape : tuple
Shape of the each grid image. Only use if ``data`` is set to `None`,
since it should otherwise be inferred from the data.
border_color : float or np.ndarray of length 3
Specify the border color as an array of length 3 (RGB). If a scalar is
given, it will be interpreted as the grayscale value.
border_width :
Border with in pixels. If you rescale the image, the border will be
rescaled with it.
cmap, vmin, vmax, vsym :
See `ImageGrid.set_image`.
global_bounds : bool
If this is set to True and either `vmin` or `vmax` is not
specified, it will infer it globally for the data. If `vsym` is
True, the global bounds will be symmetric around zero. If it is set
to False, it determines range per image, which would be the
equivalent of calling `set_image` manually with `vmin`, `vmax` and
`vsym` set the same.
Examples
--------
>>> import amitgroup as ag
>>> import numpy as np
>>> import matplotlib.pylab as plt
>>> from matplotlib.pylab import cm
>>> rs = np.random.RandomState(0)
Let's generate a set of 100 8x8 image patches.
>>> shape = (100, 8, 8)
>>> data = np.arange(np.prod(shape)).reshape(shape)
>>> data += rs.uniform(0, np.prod(shape), size=shape)
Creating the image grid:
>>> grid = ag.plot.ImageGrid(data, cmap=cm.hsv)
>>> img = grid.scaled_image(scale=5)
>>> plt.imshow(img)
>>> plt.show()
If you are working in an IPython notebook, you can display
``img`` simply by adding it to the end of a cell.
"""
def __init__(self, data=None, rows=None, cols=None, shape=None,
border_color=1, border_width=1, cmap=None, vmin=None,
vmax=None, vsym=False, global_bounds=True):
assert data is None or np.ndim(data) in (2, 3, 4)
if data is not None:
data = np.asanyarray(data)
if data is None:
assert rows is not None and cols is not None, \
"Must specify rows and cols if no data is specified"
shape = shape
elif data.ndim == 2:
N = 1
rows = 1
cols = 1
data = data[np.newaxis]
shape = data.shape[1:3]
elif data.ndim == 3:
N = data.shape[0]
if rows is None and cols is None:
cols = int(np.ceil(np.sqrt(N)))
rows = int(np.ceil(N / cols))
elif rows is None:
rows = int(np.ceil(N / cols))
elif cols is None:
cols = int(np.ceil(N / rows))
shape = data.shape[1:3]
elif data.ndim == 4:
assert rows is None and cols is None
rows = data.shape[0]
cols = data.shape[1]
data = data.reshape((-1,) + data.shape[2:])
N = data.shape[0]
shape = data.shape[1:3]
self._border_color = self._prepare_color(border_color)
self._rows = rows
self._cols = cols
self._shape = shape
self._border = border_width
b = self._border
self._fullsize = (b + (shape[0] + b) * self._rows,
b + (shape[1] + b) * self._cols)
self._data = np.ones(self._fullsize + (3,), dtype=np.float64)
if global_bounds:
if vmin is None:
vmin = np.nanmin(data)
if vmax is None:
vmax = np.nanmax(data)
if vsym:
mx = max(abs(vmin), abs(vmax))
vmin = -mx
vmax = mx
# Populate with data
for i in range(min(N, rows * cols)):
self.set_image(data[i], i // cols, i % cols,
cmap=cmap, vmin=vmin, vmax=vmax, vsym=vsym)
@classmethod
def _prepare_color(self, color):
if color is None:
return np.array([1.0, 1.0, 1.0])
elif isinstance(color, (int, float)):
return np.array([color]*3)
else:
return np.array(color)
@classmethod
def fromarray(cls, *args, **kwargs):
ag.info('Deprecation warning: Use ImageGrid(...) instead of '
'ImageGrid.fromarray(...)')
return cls(*args, **kwargs)
@property
def image(self):
"""
Returns the image as a skimage.io.Image class.
"""
return Image(self._data)
def set_image(self, image, row, col, cmap=None, vmin=None, vmax=None,
vsym=False):
"""
Sets the data for a single window.
Parameters
----------
image : ndarray, ndim=2
The shape should be the same as the `shape` specified when
constructing the image grid.
row, col : int
The zero-index of the row and column to set.
cmap : cmap (from matplotlib.pylab.cm)
The color palette to use. Default is grayscale.
vmin, vmax : numerical or None
Defines the range of the color palette. None, which is default,
takes the range of the data.
vsym : bool
If True, this means that the color palette will always be centered
around 0. Even if you have specified both `vmin` and `vmax`, this
will override that and extend the shorter one. Good practice is to
specify neither `vmin` or `vmax` or only `vmax` together with this
option.
"""
import matplotlib as mpl
import matplotlib.pylab as plt
from amitgroup.plot.resample import resample_and_arrange_image
if cmap is None:
cmap = plt.cm.gray
if vmin is None:
vmin = np.nanmin(image)
if vmax is None:
vmax = np.nanmax(image)
if vsym and -vmin != vmax:
mx = max(abs(vmin), abs(vmax))
vmin = -mx
vmax = mx
if vmin == vmax:
diff = 1
else:
diff = vmax - vmin
image_indices = np.clip((image - vmin) / diff, 0, 1) * 255
image_indices = image_indices.astype(np.uint8)
nan_mask = np.isnan(image).astype(np.uint8)
lut = mpl.colors.makeMappingArray(256, cmap)
rgb = resample_and_arrange_image(image_indices, nan_mask, self._shape,
lut)
x0 = row * (self._shape[0] + self._border)
x1 = (row + 1) * (self._shape[0] + self._border) + self._border
y0 = col * (self._shape[1] + self._border)
y1 = (col + 1) * (self._shape[1] + self._border) + self._border
self._data[x0:x1, y0:y1] = self._border_color
anchor = (self._border + row * (self._shape[0] + self._border),
self._border + col * (self._shape[1] + self._border))
selection = [slice(anchor[0], anchor[0] + rgb.shape[0]),
slice(anchor[1], anchor[1] + rgb.shape[1])]
nan_data = np.isnan(rgb)
rgb[nan_data] = 0.0
self._data[selection] = (rgb * ~nan_data +
self._border_color * nan_data)
def highlight(self, col=None, row=None, color=None):
# TODO: This function is not done yet and needs more work
bw = self._border
M = np.ones(tuple(np.add(self._shape, 2 * bw)) + (1,), dtype=np.bool)
M[bw:-bw, bw:-bw] = 0
def setup_axis(axis, count):
if axis is None:
return list(range(count))
elif isinstance(axis, int):
return [axis]
else:
return axis
# TODO: This is temporary
cols = [col] * self._rows
rows = list(range(self._rows))
color = self._prepare_color(color)
for c, r in zip(cols, rows):
r0 = (self._border + self._shape[0]) * r
c0 = (self._border + self._shape[1]) * c
sel = [slice(r0, r0+M.shape[0]), slice(c0, c0+M.shape[1])]
self._data[sel] = M * color + ~M * self._data[sel]
def scaled_image(self, scale=1):
"""
Returns a nearest-neighbor upscaled scaled version of the image.
Parameters
----------
scale : int
Upscaling using nearest neighbor, e.g. a scale of 5 will make each
pixel a 5x5 rectangle in the output.
Returns
-------
scaled_image : skimage.io.Image, (height, width, 3)
Returns a scaled up RGB image. If you do not have scikit-image, it
will be returned as a regular Numpy array. The benefit of wrapping
it in `Image`, is so that it will be automatically displayed in
IPython notebook, without having to issue any drawing calls.
"""
if scale == 1:
return self._data
else:
from skimage.transform import resize
data = resize(self._data, tuple([self._data.shape[i] * scale
for i in range(2)]), order=0)
return Image(data)
def save(self, path, scale=1):
"""
Save the image to file.
Parameters
----------
path : str
Output path.
scale : int
Upscaling using nearest neighbor, e.g. a scale of 5 will make each
pixel a 5x5 rectangle in the output.
"""
data = self.scaled_image(scale)
ag.image.save(path, data)
def __repr__(self):
return 'ImageGrid(rows={rows}, cols={cols}, shape={shape})'.format(
rows=self._rows,
cols=self._cols,
shape=self._shape)
|
|
import os
from django import forms
from django.conf import settings
import commonware.log
from tower import ugettext as _, ugettext_lazy as _lazy
import amo
from amo.utils import slugify, slug_validator
from happyforms import Form, ModelForm
from translations.widgets import TranslationTextInput, TranslationTextarea
from users.models import UserProfile
from .models import Collection, CollectionUser
from . import tasks
privacy_choices = (
(False, _lazy(u'Only I can view this collection.')),
(True, _lazy(u'Anybody can view this collection.')))
apps = (('', None),) + tuple((a.id, a.pretty) for a in amo.APP_USAGE)
collection_types = ((k, v) for k, v in amo.COLLECTION_CHOICES.iteritems()
if k not in (amo.COLLECTION_ANONYMOUS, amo.COLLECTION_RECOMMENDED))
log = commonware.log.getLogger('z.collections')
class AdminForm(Form):
application = forms.TypedChoiceField(choices=apps, required=False,
empty_value=None, coerce=int)
type = forms.TypedChoiceField(choices=collection_types, required=False,
coerce=int)
def save(self, collection):
collection.type = self.cleaned_data['type']
collection.application_id = self.cleaned_data['application']
collection.save()
class AddonsForm(Form):
"""This form is related to adding addons to a collection."""
addon = forms.CharField(widget=forms.MultipleHiddenInput, required=False)
addon_comment = forms.CharField(widget=forms.MultipleHiddenInput,
required=False)
def clean_addon(self):
addons = []
for a in self.data.getlist('addon'):
try:
addons.append(int(a))
except ValueError:
pass
return addons
def clean_addon_comment(self):
fields = 'addon', 'addon_comment'
rv = {}
for addon, comment in zip(*map(self.data.getlist, fields)):
try:
rv[int(addon)] = comment
except ValueError:
pass
return rv
def save(self, collection):
collection.set_addons(self.cleaned_data['addon'],
self.cleaned_data['addon_comment'])
class ContributorsForm(Form):
"""This form is related to adding contributors to a collection."""
contributor = forms.CharField(widget=forms.MultipleHiddenInput,
required=False)
new_owner = forms.IntegerField(widget=forms.HiddenInput, required=False)
def clean_new_owner(self):
new_owner = self.cleaned_data['new_owner']
if new_owner:
return UserProfile.objects.get(pk=new_owner)
def clean_contributor(self):
contributor_ids = self.data.getlist('contributor')
return UserProfile.objects.filter(pk__in=contributor_ids)
def save(self, collection):
collection.collectionuser_set.all().delete()
for user in self.cleaned_data['contributor']:
CollectionUser(collection=collection, user=user).save()
log.info('%s was added to Collection %s' % (user.username,
collection.id))
new_owner = self.cleaned_data['new_owner']
if new_owner:
old_owner = collection.author
collection.author = new_owner
cu, created = CollectionUser.objects.get_or_create(
collection=collection, user=old_owner)
if created:
cu.save()
collection.save()
# New owner is no longer a contributor.
collection.collectionuser_set.filter(user=new_owner).delete()
log.info('%s now owns Collection %s' % (new_owner.username,
collection.id))
class CollectionForm(ModelForm):
name = forms.CharField(
label=_lazy(u'Give your collection a name.'),
widget=TranslationTextInput,
)
slug = forms.CharField(label=_lazy(u'URL:'))
description = forms.CharField(
label=_lazy(u'Describe your collection.'),
widget=TranslationTextarea(attrs={'rows': 3}),
max_length=200,
required=False)
listed = forms.ChoiceField(
label=_lazy(u'Privacy:'),
widget=forms.RadioSelect,
choices=privacy_choices,
initial=True,
)
icon = forms.FileField(label=_lazy(u'Icon'),
required=False)
def __init__(self, *args, **kw):
super(CollectionForm, self).__init__(*args, **kw)
# You can't edit the slugs for the special types.
if (self.instance and
self.instance.type in amo.COLLECTION_SPECIAL_SLUGS):
del self.fields['slug']
def clean_description(self):
description = self.cleaned_data['description']
if description.strip() == '':
description = None
return description
def clean_slug(self):
slug = slugify(self.cleaned_data['slug'])
slug_validator(slug)
if self.instance and self.instance.slug == slug:
return slug
author = self.initial['author']
if author.collections.filter(slug=slug).count():
raise forms.ValidationError(
_('This url is already in use by another collection'))
return slug
def clean_icon(self):
icon = self.cleaned_data['icon']
if not icon:
return
if icon.content_type not in ('image/png', 'image/jpeg'):
raise forms.ValidationError(
_('Icons must be either PNG or JPG.'))
if icon.size > settings.MAX_ICON_UPLOAD_SIZE:
raise forms.ValidationError(
_('Please use images smaller than %dMB.' %
(settings.MAX_ICON_UPLOAD_SIZE / 1024 / 1024 - 1)))
return icon
def save(self, default_locale=None):
c = super(CollectionForm, self).save(commit=False)
c.author = self.initial['author']
c.application_id = self.initial['application_id']
icon = self.cleaned_data.get('icon')
if default_locale:
c.default_locale = default_locale
if icon:
c.icontype = 'image/png'
c.save()
if icon:
dirname = c.get_img_dir()
destination = os.path.join(dirname, '%d.png' % c.id)
tmp_destination = os.path.join(dirname,
'%d.png__unconverted' % c.id)
if not os.path.exists(dirname):
os.mkdir(dirname)
fh = open(tmp_destination, 'w')
for chunk in icon.chunks():
fh.write(chunk)
fh.close()
tasks.resize_icon.delay(tmp_destination, destination,
set_modified_on=[c])
return c
class Meta:
model = Collection
fields = ('name', 'slug', 'description', 'listed')
|
|
from __future__ import print_function
from __future__ import absolute_import
import numpy as np
import sys
import json
import pandas as pd
import re
import glob as glob
import os
from collections import defaultdict
from xml.etree import cElementTree as ET
import metatlas.metatlas_objects as metob
from metatlas.helpers import metatlas_get_data_helper_fun as ma_data
from metatlas.helpers import dill2plots as dp
import six
from six.moves import map
try:
six.string_types
except NameError: # python3
six.string_types = str
BATCH_FILE_PATH = '/global/common/software/m2650/mzmine_parameters/batch_files/'
BINARY_PATH = '/global/common/software/m2650/mzmine_parameters/MZmine'
#copy files here to keep I/O off low-performance filesystems
DATA_PATH = '/global/cscratch1/sd/bpb/raw_data'
#we don't need to request haswell on genepool partition of Cori
#remove this line
#SBATCH -C haswell
SLURM_HEADER = """#!/bin/bash
#SBATCH -N 1 -c 64
#SBATCH --exclusive
#SBATCH --error="slurm.err"
#SBATCH --output="slurm.out"
#SBATCH --qos=genepool
#SBATCH -A pkscell
#SBATCH -t 00:30:00
#SBATCH -L project
export MPLBACKEND="agg"
export HDF5_USE_FILE_LOCKING=FALSE
# cori specific tells it to not allocate memory on a per thread basis
export MALLOC_ARENA_MAX=1
#module load java
# echo every command and terminate script if there is an error
set -ex
env | grep -i java | sort
# to see the resources a job used:
# sacct -j <job_id> --format jobidraw,jobname,maxrss,maxvmsize --unit G
"""
def make_task_and_job(params):#basedir,basename,polarity,files):
if not os.path.exists(params['basedir']):
os.mkdir(params['basedir'])
xml_str = get_batch_file_template()
d = xml_to_dict(xml_str)
# # Initialize the task and give it values from the user supplied form
task = metob.MZMineTask()
task.polarity = params['polarity']
task.lcmsruns = params['files']
task.min_peak_duration = params['min_peak_duration']
task.max_peak_duration = params['max_peak_duration']
task.rt_tol_perfile = params['rt_tol_perfile']
task.rt_tol_multifile = params['rt_tol_multifile']
task.min_peak_height = params['min_peak_height']
task.ms1_noise_level = params['ms1_noise_level']
task.ms2_noise_level = params['ms2_noise_level']
task.mz_tolerance = params['mz_tolerance']
task.peak_to_valley_ratio = params['peak_to_valley_ratio']
task.min_rt = params['min_rt']
task.max_rt = params['max_rt']
task.representative_isotope = params['representative_isotope']
task.remove_isotopes = params['remove_isotopes']
task.min_peaks_in_row = params['min_peaks_in_row']
task.peak_with_msms = params['peak_with_msms']
task.chromatographic_threshold = params['chromatographic_threshold']
task.search_for_minimum_rt_range = params['search_for_minimum_rt_range']
task.minimum_relative_height = params['minimum_relative_height']
task.mz_range_scan_pairing = params['mz_range_scan_pairing']
task.rt_range_scan_pairing = params['rt_range_scan_pairing']
task.gapfill_intensity_tolerance = params['gapfill_intensity_tolerance']
task.output_csv_height = os.path.join(params['basedir'],'%s_%s_peak_height.csv'%(params['basename'],task.polarity))
task.output_csv_area = os.path.join(params['basedir'],'%s_%s_peak_area.csv'%(params['basename'],task.polarity))
task.output_workspace = os.path.join(params['basedir'],'%s_%s.mzmine'%(params['basename'],task.polarity))
task.output_mgf = os.path.join(params['basedir'],'%s_%s.mgf'%(params['basename'],task.polarity))
task.input_xml = os.path.join(params['basedir'],'logs','%s_%s.xml'%(params['basename'],task.polarity))
task.mzmine_launcher = get_latest_mzmine_binary(version=params['mzmine_version'])
new_d = replace_files(d,params['files'])
new_d = configure_crop_filter(new_d,task.polarity,params['files'],min_rt=task.min_rt,max_rt=task.max_rt)
new_d = configure_mass_detection(new_d,task.ms1_noise_level,task.ms2_noise_level)
new_d = configure_chromatogram_builder(new_d,task.min_peak_duration,task.min_peak_height,task.mz_tolerance)
new_d = configure_peak_deconvolution(new_d,
task.min_peak_height,
task.minimum_relative_height,
task.search_for_minimum_rt_range,
task.chromatographic_threshold,
task.peak_to_valley_ratio,
task.min_peak_duration,
task.max_peak_duration)
new_d = configure_isotope_search(new_d,
task.mz_tolerance,
task.rt_tol_perfile,
task.representative_isotope,
task.remove_isotopes)
new_d = configure_join_aligner(new_d,task.mz_tolerance,task.rt_tol_multifile)
new_d = configure_gap_filling(new_d,task.mz_tolerance,task.rt_tol_multifile,task.gapfill_intensity_tolerance)
new_d = configure_rows_filter(new_d,task.min_peaks_in_row,task.peak_with_msms)
new_d = configure_output(new_d,
task.output_csv_height,
task.output_csv_area,
task.output_workspace,
task.output_mgf)
t = dict_to_etree(new_d)
indent_tree(t)
xml_batch_str = tree_to_xml(t,filename=task.input_xml)
job_runner = '%s %s'%(task.mzmine_launcher,task.input_xml)
return job_runner
def create_job_script(m):
"""
This is the first function that runs when a user initializes a new untargeted workflow
"""
#setup directories
if not os.path.isdir(m['basedir']):
os.mkdir(m['basedir'])
dirs_to_make = ['job_scripts','logs','intermediate_results','%s_%s'%(m['basename'],m['polarity'])]
for d in dirs_to_make:
if not os.path.isdir(os.path.join(m['basedir'],d)):
os.mkdir(os.path.join(m['basedir'],d))
job_cmd = make_task_and_job(m)#['basedir'],m['basename'],m['polarity'],m['files'])
sbatch_file_name = os.path.join(m['basedir'],'job_scripts','%s_%s.sbatch'%(m['basename'],m['polarity']))
denovo_sbatch_file_name = os.path.join(m['basedir'],'job_scripts','%s_%s_denovo.sbatch'%(m['basename'],m['polarity']))
err_file_name = os.path.join(m['basedir'],'logs','%s_%s.err'%(m['basename'],m['polarity']))
out_file_name = os.path.join(m['basedir'],'logs','%s_%s.out'%(m['basename'],m['polarity']))
# job_cmd_filtered = make_targeted_mzmine_job(m['basedir'],m['basename'],m['polarity'],m['files'])
params_filename = os.path.join(m['basedir'],'logs','%s_%s_params.json'%(m['basename'],m['polarity']))
new_params_filename = os.path.join(m['basedir'],'logs','%s_%s_params-used.json'%(m['basename'],m['polarity']))
copy_params_command = "cp '%s' '%s'"%(params_filename,new_params_filename)
with open(sbatch_file_name,'w') as fid:
fid.write('%s\n'%SLURM_HEADER.replace('slurm.err',err_file_name).replace('slurm.out',out_file_name))
fid.write('%s\n'%copy_params_command)
fid.write('%s\n'%job_cmd)
# bad_words = ['qos', '-p','-C','-L','-t','-N']
# bad_time = '#SBATCH -t 24:00:00'
# good_time = '#SBATCH -t 24:00:00\n'
# bad_node = '-N 1 -c 64'
# good_node = '#SBATCH -N 1 -c 64\n'
# with open(sbatch_file_name) as oldfile, open(denovo_sbatch_file_name, 'w') as newfile:
# for line in oldfile:
# if not any(bad_word in line for bad_word in bad_words):
# newfile.write(line)
# if bad_time in line:
# newfile.write(good_time)
# if bad_node in line:
# newfile.write(good_node)
# newfile.write('#SBATCH --mem=494G\n')
return sbatch_file_name
#####################################################
#####################################################
######## mzmine setup scripts ########
#####################################################
#####################################################
def remove_duplicate_files(files):
file_names = []
unique_files = []
for f in files:
if not f.name in file_names:
unique_files.append(f.mzml_file)
file_names.append(f.name)
return unique_files
def get_files(groups,filename_substring,file_filters,is_group=False):
"""
if is_group is False, gets files from the experiment/folder name and filters with file_filters
if is_group is True, gets files from the metatlas group name and filters with file_filters
"""
for i,g in enumerate(groups):
if is_group == True:
# get files as a metatlas group
groups = dp.select_groups_for_analysis(name = g,do_print=False,
most_recent = True,
remove_empty = True,
include_list = [], exclude_list = file_filters)#['QC','Blank'])
new_files = []
for each_g in groups:
for f in each_g.items:
new_files.append(f)
else:
new_files = metob.retrieve('Lcmsruns',experiment=g,name=filename_substring,username='*')
if i == 0:
all_files = new_files
else:
all_files.extend(new_files)
if len(new_files) == 0:
print('##### %s has ZERO files!'%g)
if len(file_filters) > 0:
for i,ff in enumerate(file_filters):
if i == 0:
files = [f for f in all_files if not ff in f.name]
else:
files = [f for f in files if not ff in f.name]
else:
files = all_files
files = remove_duplicate_files(files)
return files
def make_targeted_mzmine_job(basedir,basename,polarity,files):
if not os.path.exists(basedir):
os.mkdir(basedir)
xml_str = get_targeted_batch_file_template()
d = xml_to_dict(xml_str)
task = metob.MZMineTask()
task.polarity = polarity
task.lcmsruns = files
new_d = replace_files(d,files)
project_name = '%s_%s'%(basename,task.polarity)
task.output_workspace = os.path.join(basedir,project_name,'%s_%s.mzmine'%(basename,task.polarity))
task.input_xml = os.path.join(basedir,'logs','%s_%s_filtered.xml'%(basename,task.polarity))
task.mzmine_launcher = get_latest_mzmine_binary()
# new_d = configure_crop_filter(new_d,task.polarity,files)
# new_d = configure_targeted_peak_detection(new_d,peak_list_filename,intensity_tolerance=1e-4,noise_level=1e4,mz_tolerance=20,rt_tolerance=0.5)
new_d = configure_workspace_output(new_d,task.output_workspace)
t = dict_to_etree(new_d)
indent_tree(t)
xml_batch_str = tree_to_xml(t,filename=task.input_xml)
job_runner = '%s %s'%(task.mzmine_launcher,task.input_xml)
return job_runner
def configure_targeted_peak_detection(new_d,peak_list_filename,intensity_tolerance=1e-4,noise_level=1e4,mz_tolerance=20,rt_tolerance=0.5):
"""
Name suffix: Suffix to be added to the peak list name.
Peak list file: Path of the csv file containing the list of peaks to be detected. The csv file should have three columns.
The first column should contain the expected M/Z, the second column the expected RT and the third the peak name. Each peak should be in a different row.
Field separator: Character(s) used to separate fields in the peak list file.
Ignore first line: Check to ignore the first line of peak list file.
Intensity tolerance: This value sets the maximum allowed deviation from expected shape of a peak in chromatographic direction.
Noise level: The minimum intensity level for a data point to be considered part of a chromatogram. All data points below this intensity level are ignored.
MZ Tolerance: Maximum allowed m/z difference to find the peak
RT tolerance: Maximum allowed retention time difference to find the peak
"""
# Set the noise floor
idx = [i for i,d in enumerate(new_d['batch']['batchstep']) if 'TargetedPeakDetectionModule' in d['@method']][0]
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'Peak list file' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['#text'] = '%s'%peak_list_filename
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'm/z tolerance' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['ppmtolerance'] = '%.3f'%(mz_tolerance)
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'Intensity tolerance' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['#text'] = '%.6f'%(intensity_tolerance)
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'Noise level' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['#text'] = '%.6f'%(noise_level)
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'Retention time tolerance' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['#text'] = '%.6f'%(rt_tolerance)
return new_d
def configure_crop_filter(new_d,polarity,files,min_rt=0.01,max_rt=100,fps_string='FPS'):
"""
"""
# identify the element for this change
idx = [i for i,d in enumerate(new_d['batch']['batchstep']) if 'CropFilterModule' in d['@method']][0]
# Set the filter string
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'Raw data files' in d['@name']][0]
if any([fps_string in f for f in files]):
new_d['batch']['batchstep'][idx]['parameter'][idx2]['name_pattern'] = '*FPS*'
else:
new_d['batch']['batchstep'][idx]['parameter'][idx2]['name_pattern'] = '*'
# Set the polarity
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'Scans' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['polarity'] = polarity.upper()
#set the rt min and rt max use the same idx2 as polarity
new_d['batch']['batchstep'][idx]['parameter'][idx2]['retention_time'] = {'max':'%.4f'%max_rt,'min':'%.4f'%min_rt}
# new_d['batch']['batchstep'][idx]['parameter'][idx2]['ms_level'] = '1-2'
return new_d
def configure_mass_detection(new_d,ms1_noise_level=1e4,ms2_noise_level=1e2):
"""
"""
# Find the module
idx = [i for i,d in enumerate(new_d['batch']['batchstep']) if 'MassDetectionModule' in d['@method']]
#The first idx will be for MS1 and the second will be for MS2
# Set the MS1 attributes
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx[0]]['parameter']) if 'Mass detector' in d['@name']][0]
idx3 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx[0]]['parameter'][idx2]['module']) if 'Centroid' in d['@name']][0]
new_d['batch']['batchstep'][idx[0]]['parameter'][idx2]['module'][idx3]['parameter']['#text'] = '%.2f'%(ms1_noise_level)
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx[0]]['parameter']) if 'Scans' in d['@name']][0]
new_d['batch']['batchstep'][idx[0]]['parameter'][idx2]['ms_level'] = '1'
# Set the MS2 attributes
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx[1]]['parameter']) if 'Mass detector' in d['@name']][0]
idx3 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx[1]]['parameter'][idx2]['module']) if 'Centroid' in d['@name']][0]
new_d['batch']['batchstep'][idx[1]]['parameter'][idx2]['module'][idx3]['parameter']['#text'] = '%.2f'%(ms2_noise_level)
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx[1]]['parameter']) if 'Scans' in d['@name']][0]
new_d['batch']['batchstep'][idx[1]]['parameter'][idx2]['ms_level'] = '2'
return new_d
def configure_chromatogram_builder(new_d,min_peak_duration,min_peak_height,mz_tolerance):
"""
"""
idx = [i for i,d in enumerate(new_d['batch']['batchstep']) if 'ChromatogramBuilderModule' in d['@method']][0]
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'Min time span' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['#text'] = '%.3f'%(min_peak_duration)
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'Min height' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['#text'] = '%.3f'%(min_peak_height)
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'm/z tolerance' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['ppmtolerance'] = '%.3f'%(mz_tolerance)
return new_d
def configure_peak_deconvolution(new_d,min_peak_height,minimum_relative_height,search_for_minimum_rt_range,chromatographic_threshold,min_sn_ratio,min_peak_duration,max_peak_duration):
"""
"""
idx = [i for i,d in enumerate(new_d['batch']['batchstep']) if 'DeconvolutionModule' in d['@method']][0]
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'Algorithm' in d['@name']][0]
idx3 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter'][idx2]['module']) if 'Local minimum search' in d['@name']][0]
idx4 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter'][idx2]['module'][idx3]['parameter']) if 'Chromatographic threshold' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['module'][idx3]['parameter'][idx4]['#text'] = '%.3f'%chromatographic_threshold
idx4 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter'][idx2]['module'][idx3]['parameter']) if 'Search minimum in RT range (min)' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['module'][idx3]['parameter'][idx4]['#text'] = '%.3f'%search_for_minimum_rt_range
idx4 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter'][idx2]['module'][idx3]['parameter']) if 'Minimum relative height' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['module'][idx3]['parameter'][idx4]['#text'] = '%.3f'%minimum_relative_height
idx4 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter'][idx2]['module'][idx3]['parameter']) if 'Minimum absolute height' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['module'][idx3]['parameter'][idx4]['#text'] = '%.3f'%min_peak_height
idx4 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter'][idx2]['module'][idx3]['parameter']) if 'Min ratio of peak top/edge' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['module'][idx3]['parameter'][idx4]['#text'] = '%.3f'%min_sn_ratio
idx4 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter'][idx2]['module'][idx3]['parameter']) if 'Peak duration range (min)' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['module'][idx3]['parameter'][idx4]['min'] = '%.3f'%min_peak_duration
new_d['batch']['batchstep'][idx]['parameter'][idx2]['module'][idx3]['parameter'][idx4]['max'] = '%.3f'%max_peak_duration
return new_d
def configure_isotope_search(new_d,mz_tolerance,rt_tol_perfile,representative_isotope,remove_isotopes):
"""
"""
idx = [i for i,d in enumerate(new_d['batch']['batchstep']) if 'Isotope' in d['@method']][0]
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'm/z tolerance' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['ppmtolerance'] = '%.3f'%(mz_tolerance)
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'Retention time tolerance' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['#text'] = '%.3f'%(rt_tol_perfile)
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'Representative isotope' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['#text'] = '%s'%(representative_isotope)
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'Remove original peaklist' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['#text'] = '%s'%(str(remove_isotopes).lower())
return new_d
def configure_join_aligner(new_d,mz_tolerance,rt_tol_multifile):
"""
"""
idx = [i for i,d in enumerate(new_d['batch']['batchstep']) if 'JoinAlignerModule' in d['@method']][0]
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'm/z tolerance' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['ppmtolerance'] = '%.3f'%(mz_tolerance)
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'Retention time tolerance' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['#text'] = '%.3f'%(rt_tol_multifile)
return new_d
def configure_rows_filter(new_d,min_peaks_in_row,peak_with_msms):
"""
"""
idx = [i for i,d in enumerate(new_d['batch']['batchstep']) if 'RowsFilterModule' in d['@method']][0]
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'Minimum peaks in a row' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['#text'] = '%d'%min_peaks_in_row
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'Minimum peaks in an isotope pattern' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['#text'] = '%d'%min_peaks_in_row
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'Keep only peaks with MS2 scan (GNPS)' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['#text'] = '%d'%peak_with_msms
return new_d
def configure_duplicate_filter(new_d,mz_tolerance,rt_tol_perfile):
"""
"""
idx = [i for i,d in enumerate(new_d['batch']['batchstep']) if 'DuplicateFilterModule' in d['@method']][0]
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'm/z tolerance' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['ppmtolerance'] = '%.3f'%(mz_tolerance)
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'RT tolerance' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['#text'] = '%.3f'%(rt_tol_perfile)
return new_d
def configure_gap_filling(new_d,mz_tolerance,gapfill_intensity_tolerance,rt_tol_multifile):
"""
"""
idx = [i for i,d in enumerate(new_d['batch']['batchstep']) if 'gapfilling.peakfinder.PeakFinderModule' in d['@method']][0]
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'm/z tolerance' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['ppmtolerance'] = '%.3f'%(mz_tolerance)
return new_d
def configure_output(new_d,output_csv_height,output_csv_area,output_workspace,output_mgf):
"""
"""
idx = [i for i,d in enumerate(new_d['batch']['batchstep']) if 'CSVExportModule' in d['@method']]
#the first will be height the second will be area
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx[0]]['parameter']) if 'Filename' in d['@name']][0]
new_d['batch']['batchstep'][idx[0]]['parameter'][idx2]['#text'] = output_csv_height
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx[1]]['parameter']) if 'Filename' in d['@name']][0]
new_d['batch']['batchstep'][idx[1]]['parameter'][idx2]['#text'] = output_csv_area
idx = [i for i,d in enumerate(new_d['batch']['batchstep']) if 'GNPSExportModule' in d['@method']][0]
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'Filename' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['#text'] = output_mgf
idx = [i for i,d in enumerate(new_d['batch']['batchstep']) if 'ProjectSaveAsModule' in d['@method']][0]
new_d['batch']['batchstep'][idx]['parameter']['#text'] = output_workspace
return new_d
def configure_csv_output(new_d,output_csv):
"""
"""
idx = [i for i,d in enumerate(new_d['batch']['batchstep']) if 'CSVExportModule' in d['@method']][0]
idx2 = [i for i,d in enumerate(new_d['batch']['batchstep'][idx]['parameter']) if 'Filename' in d['@name']][0]
new_d['batch']['batchstep'][idx]['parameter'][idx2]['#text'] = output_csv
return new_d
def indent_tree(elem, level=0):
i = "\n" + level*" "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
indent_tree(elem, level+1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
def get_targeted_batch_file_template(loc='do_not_change_batch_file_targeted_peak_list.xml'):
"""
return string of text from the template batch file
"""
with open(os.path.join(BATCH_FILE_PATH,loc),'r') as fid:
file_text = fid.read()
return file_text
def get_batch_file_template(loc='bootcamp_template_batch_file.xml'):
"""
return string of text from the template batch file
"""
with open(os.path.join(BATCH_FILE_PATH,loc),'r') as fid:
file_text = fid.read()
return file_text
def get_latest_mzmine_binary(system='Cori',version='most_recent'):
"""
Returns the path to the mzmine launch script.
Default is most recent. Alternatively specify the folder containng version you want
for example:
version='MZmine-2.23'
will use the launch script in that folder
wget $(curl -s https://api.github.com/repos/mzmine/mzmine2/releases/v2.33 | grep 'browser_' | cut -d\" -f4) -O mzmine_latest.zip
# To setup the most recent mzmine binary, follow these steps
cd /project/projectdirs/metatlas/projects/mzmine_parameters/MZmine
wget $(curl -s https://api.github.com/repos/mzmine/mzmine2/releases/latest | grep 'browser_' | cut -d\" -f4) -O mzmine_latest.zip
unzip mzmine_latest.zip
# change directories into latest mzmine download
# cd MZmine-XXXX
cp ../MZmine-2.24/startMZmine_NERSC_* .
cd /project/projectdirs/metatlas/projects/
chgrp -R metatlas mzmine_parameters
chmod -R 770 mzmine_parameters
"""
mzmine_versions = glob.glob(os.path.join(BINARY_PATH,'*' + os.path.sep))
if version == 'most_recent':
most_recent = sorted([os.path.basename(m) for m in mzmine_versions if 'MZmine-' in m])[-1]
else:
most_recent = [m.split(os.path.sep)[-2] for m in mzmine_versions if version in m][-1]
launch_script = os.path.join(os.path.join(BINARY_PATH,most_recent),'startMZmine_NERSC_Headless_%s.sh'%system)
if os.path.isfile(launch_script):
return launch_script
else:
print('See the docstring, the launch script seems to be missing.')
def replace_files(d,file_list):
"""
Replace files for mzmine task
Inputs:
d: an xml derived dictionary of batch commands
file_list: a list of full paths to mzML files
Outputs:
d: an xml derived dict with new files in it
"""
for i,step in enumerate(d['batch']['batchstep']):
if 'RawDataImportModule' in step['@method']:
d['batch']['batchstep'][i]['parameter']['file'] = file_list
return d
def tree_to_xml(t,filename=None):
"""
"""
xml_str = ET.tostring(t)
if filename:
with open(filename,'w') as fid:
fid.write(xml_str)
return xml_str
def dict_to_etree(d):
"""
Convert a python dictionary to an xml str
http://stackoverflow.com/questions/7684333/converting-xml-to-dictionary-using-elementtree
Example:
from collections import defaultdict
from xml.etree import cElementTree as ET
try:
basestring
except NameError: # python3
basestring = str
#d is a python dictionary
ET.tostring(dict_to_etree(d))
"""
def _to_etree(d, root):
if not d:
pass
elif isinstance(d, six.string_types):
root.text = d
elif isinstance(d, dict):
for k,v in d.items():
assert isinstance(k, six.string_types)
if k.startswith('#'):
assert k == '#text' and isinstance(v, six.string_types)
root.text = v
elif k.startswith('@'):
assert isinstance(v, six.string_types)
root.set(k[1:], v)
elif isinstance(v, list):
for e in v:
_to_etree(e, ET.SubElement(root, k))
else:
_to_etree(v, ET.SubElement(root, k))
else: assert d == 'invalid type', (type(d), d)
assert isinstance(d, dict) and len(d) == 1
tag, body = next(iter(d.items()))
node = ET.Element(tag)
_to_etree(body, node)
return node
def xml_to_dict(xml_str):
"""
Convert an xml file into a python dictionary.
http://stackoverflow.com/questions/7684333/converting-xml-to-dictionary-using-elementtree
Example:
from xml.etree import cElementTree as ET
filename = '/global/homes/b/bpb/batch_params/xmlfile.xml'
with open(filename,'r') as fid:
xml_str = fid.read()
d = xml_to_dict(xml_str)
"""
t = ET.XML(xml_str)
d = etree_to_dict(t)
return d
def etree_to_dict(t):
"""
Convert an xml tree into a python dictionary.
http://stackoverflow.com/questions/7684333/converting-xml-to-dictionary-using-elementtree
"""
d = {t.tag: {} if t.attrib else None}
children = list(t)
if children:
dd = defaultdict(list)
for dc in map(etree_to_dict, children):
for k, v in six.iteritems(dc):
dd[k].append(v)
d = {t.tag: {k:v[0] if len(v) == 1 else v for k, v in six.iteritems(dd)}}
if t.attrib:
d[t.tag].update(('@' + k, v) for k, v in six.iteritems(t.attrib))
if t.text:
text = t.text.strip()
if children or t.attrib:
if text:
d[t.tag]['#text'] = text
else:
d[t.tag] = text
return d
|
|
from django import forms
from django.forms.formsets import BaseFormSet
from ietf.doc.models import *
from ietf.name.models import IntendedStdLevelName
from ietf.group.models import Group
from ietf.secr.utils.ams_utils import get_base, get_revision
from ietf.secr.groups.forms import RoleForm, get_person
import datetime
import re
from os.path import splitext
# ---------------------------------------------
# Select Choices
# ---------------------------------------------
WITHDRAW_CHOICES = (('ietf','Withdraw by IETF'),('author','Withdraw by Author'))
# ---------------------------------------------
# Custom Fields
# ---------------------------------------------
class DocumentField(forms.FileField):
'''A validating document upload field'''
def __init__(self, unique=False, *args, **kwargs):
self.extension = kwargs.pop('extension')
self.filename = kwargs.pop('filename')
self.rev = kwargs.pop('rev')
super(DocumentField, self).__init__(*args, **kwargs)
def clean(self, data, initial=None):
file = super(DocumentField, self).clean(data,initial)
if file:
# validate general file format
m = re.search(r'.*-\d{2}\.(txt|pdf|ps|xml)', file.name)
if not m:
raise forms.ValidationError('File name must be in the form base-NN.[txt|pdf|ps|xml]')
# ensure file extension is correct
base,ext = os.path.splitext(file.name)
if ext != self.extension:
raise forms.ValidationError('Incorrect file extension: %s' % ext)
# if this isn't a brand new submission we need to do some extra validations
if self.filename:
# validate filename
if base[:-3] != self.filename:
raise forms.ValidationError, "Filename: %s doesn't match Draft filename." % base[:-3]
# validate revision
next_revision = str(int(self.rev)+1).zfill(2)
if base[-2:] != next_revision:
raise forms.ValidationError, "Expected revision # %s" % (next_revision)
return file
class GroupModelChoiceField(forms.ModelChoiceField):
'''
Custom ModelChoiceField sets queryset to include all active workgroups and the
individual submission group, none. Displays group acronyms as choices. Call it without the
queryset argument, for example:
group = GroupModelChoiceField(required=True)
'''
def __init__(self, *args, **kwargs):
kwargs['queryset'] = Group.objects.filter(type__in=('wg','individ'),state__in=('bof','proposed','active')).order_by('acronym')
super(GroupModelChoiceField, self).__init__(*args, **kwargs)
def label_from_instance(self, obj):
return obj.acronym
class AliasModelChoiceField(forms.ModelChoiceField):
'''
Custom ModelChoiceField, just uses Alias name in the select choices as opposed to the
more confusing alias -> doc format used by DocAlias.__unicode__
'''
def label_from_instance(self, obj):
return obj.name
# ---------------------------------------------
# Forms
# ---------------------------------------------
class AddModelForm(forms.ModelForm):
start_date = forms.DateField()
group = GroupModelChoiceField(required=True,help_text='Use group "none" for Individual Submissions')
class Meta:
model = Document
fields = ('title','group','stream','start_date','pages','abstract','internal_comments')
# use this method to set attrs which keeps other meta info from model.
def __init__(self, *args, **kwargs):
super(AddModelForm, self).__init__(*args, **kwargs)
self.fields['title'].label='Document Name'
self.fields['title'].widget=forms.Textarea()
self.fields['start_date'].initial=datetime.date.today
self.fields['pages'].label='Number of Pages'
self.fields['internal_comments'].label='Comments'
class AuthorForm(forms.Form):
'''
The generic javascript for populating the email list based on the name selected expects to
see an id_email field
'''
person = forms.CharField(max_length=50,widget=forms.TextInput(attrs={'class':'name-autocomplete'}),help_text="To see a list of people type the first name, or last name, or both.")
email = forms.CharField(widget=forms.Select(),help_text="Select an email")
# check for id within parenthesis to ensure name was selected from the list
def clean_person(self):
person = self.cleaned_data.get('person', '')
m = re.search(r'(\d+)', person)
if person and not m:
raise forms.ValidationError("You must select an entry from the list!")
# return person object
return get_person(person)
# check that email exists and return the Email object
def clean_email(self):
email = self.cleaned_data['email']
try:
obj = Email.objects.get(address=email)
except Email.ObjectDoesNoExist:
raise forms.ValidationError("Email address not found!")
# return email object
return obj
class EditModelForm(forms.ModelForm):
#expiration_date = forms.DateField(required=False)
state = forms.ModelChoiceField(queryset=State.objects.filter(type='draft'),empty_label=None)
iesg_state = forms.ModelChoiceField(queryset=State.objects.filter(type='draft-iesg'),required=False)
group = GroupModelChoiceField(required=True)
review_by_rfc_editor = forms.BooleanField(required=False)
shepherd = forms.CharField(max_length=100,widget=forms.TextInput(attrs={'class':'name-autocomplete'}),help_text="To see a list of people type the first name, or last name, or both.",required=False)
class Meta:
model = Document
fields = ('title','group','ad','shepherd','notify','stream','review_by_rfc_editor','name','rev','pages','intended_std_level','abstract','internal_comments')
# use this method to set attrs which keeps other meta info from model.
def __init__(self, *args, **kwargs):
super(EditModelForm, self).__init__(*args, **kwargs)
self.fields['ad'].queryset = Person.objects.filter(role__name='ad')
self.fields['title'].label='Document Name'
self.fields['title'].widget=forms.Textarea()
self.fields['rev'].widget.attrs['size'] = 2
self.fields['abstract'].widget.attrs['cols'] = 72
self.initial['state'] = self.instance.get_state()
self.initial['iesg_state'] = self.instance.get_state('draft-iesg')
if self.instance.shepherd:
self.initial['shepherd'] = "%s - (%s)" % (self.instance.shepherd.name, self.instance.shepherd.id)
# setup special fields
if self.instance:
# setup replaced
self.fields['review_by_rfc_editor'].initial = bool(self.instance.tags.filter(slug='rfc-rev'))
def save(self, force_insert=False, force_update=False, commit=True):
m = super(EditModelForm, self).save(commit=False)
state = self.cleaned_data['state']
iesg_state = self.cleaned_data['iesg_state']
if 'state' in self.changed_data:
m.set_state(state)
# note we're not sending notices here, is this desired
if 'iesg_state' in self.changed_data:
if iesg_state == None:
m.unset_state('draft-iesg')
else:
m.set_state(iesg_state)
if 'review_by_rfc_editor' in self.changed_data:
if self.cleaned_data.get('review_by_rfc_editor',''):
m.tags.add('rfc-rev')
else:
m.tags.remove('rfc-rev')
m.time = datetime.datetime.now()
# handle replaced by
if commit:
m.save()
return m
# field must contain filename of existing draft
def clean_replaced_by(self):
name = self.cleaned_data.get('replaced_by', '')
if name and not InternetDraft.objects.filter(filename=name):
raise forms.ValidationError("ERROR: Draft does not exist")
return name
# check for id within parenthesis to ensure name was selected from the list
def clean_shepherd(self):
person = self.cleaned_data.get('shepherd', '')
m = re.search(r'(\d+)', person)
if person and not m:
raise forms.ValidationError("You must select an entry from the list!")
# return person object
return get_person(person)
def clean(self):
super(EditModelForm, self).clean()
cleaned_data = self.cleaned_data
"""
expiration_date = cleaned_data.get('expiration_date','')
status = cleaned_data.get('status','')
replaced = cleaned_data.get('replaced',False)
replaced_by = cleaned_data.get('replaced_by','')
replaced_status_object = IDStatus.objects.get(status_id=5)
expired_status_object = IDStatus.objects.get(status_id=2)
# this condition seems to be valid
#if expiration_date and status != expired_status_object:
# raise forms.ValidationError('Expiration Date set but status is %s' % (status))
if status == expired_status_object and not expiration_date:
raise forms.ValidationError('Status is Expired but Expirated Date is not set')
if replaced and status != replaced_status_object:
raise forms.ValidationError('You have checked Replaced but status is %s' % (status))
if replaced and not replaced_by:
raise forms.ValidationError('You have checked Replaced but Replaced By field is empty')
"""
return cleaned_data
class EmailForm(forms.Form):
# max_lengths come from db limits, cc is not limited
to = forms.CharField(max_length=255)
cc = forms.CharField(required=False)
subject = forms.CharField(max_length=255)
body = forms.CharField(widget=forms.Textarea())
class ExtendForm(forms.Form):
expiration_date = forms.DateField()
class ReplaceForm(forms.Form):
replaced = AliasModelChoiceField(DocAlias.objects.none(),empty_label=None,help_text='This document may have more than one alias. Be sure to select the correct alias to replace.')
replaced_by = forms.CharField(max_length=100,help_text='Enter the filename of the Draft which replaces this one.')
def __init__(self, *args, **kwargs):
self.draft = kwargs.pop('draft')
super(ReplaceForm, self).__init__(*args, **kwargs)
self.fields['replaced'].queryset = DocAlias.objects.filter(document=self.draft)
# field must contain filename of existing draft
def clean_replaced_by(self):
name = self.cleaned_data.get('replaced_by', '')
try:
doc = Document.objects.get(name=name)
except Document.DoesNotExist:
raise forms.ValidationError("ERROR: Draft does not exist: %s" % name)
if name == self.draft.name:
raise forms.ValidationError("ERROR: A draft can't replace itself")
return doc
class BaseRevisionModelForm(forms.ModelForm):
class Meta:
model = Document
fields = ('title','pages','abstract')
class RevisionModelForm(forms.ModelForm):
class Meta:
model = Document
fields = ('title','pages','abstract')
# use this method to set attrs which keeps other meta info from model.
def __init__(self, *args, **kwargs):
super(RevisionModelForm, self).__init__(*args, **kwargs)
self.fields['title'].label='Document Name'
self.fields['title'].widget=forms.Textarea()
self.fields['pages'].label='Number of Pages'
class RfcModelForm(forms.ModelForm):
rfc_number = forms.IntegerField()
rfc_published_date = forms.DateField(initial=datetime.datetime.now)
group = GroupModelChoiceField(required=True)
class Meta:
model = Document
fields = ('title','group','pages','std_level','internal_comments')
# use this method to set attrs which keeps other meta info from model.
def __init__(self, *args, **kwargs):
super(RfcModelForm, self).__init__(*args, **kwargs)
self.fields['title'].widget = forms.Textarea()
self.fields['std_level'].required = True
def save(self, force_insert=False, force_update=False, commit=True):
obj = super(RfcModelForm, self).save(commit=False)
# create DocAlias
DocAlias.objects.create(document=self.instance,name="rfc%d" % self.cleaned_data['rfc_number'])
if commit:
obj.save()
return obj
def clean_rfc_number(self):
rfc_number = self.cleaned_data['rfc_number']
if DocAlias.objects.filter(name='rfc' + str(rfc_number)):
raise forms.ValidationError("RFC %d already exists" % rfc_number)
return rfc_number
class RfcObsoletesForm(forms.Form):
relation = forms.ModelChoiceField(queryset=DocRelationshipName.objects.filter(slug__in=('updates','obs')),required=False)
rfc = forms.IntegerField(required=False)
# ensure that RFC exists
def clean_rfc(self):
rfc = self.cleaned_data.get('rfc','')
if rfc:
if not Document.objects.filter(docalias__name="rfc%s" % rfc):
raise forms.ValidationError("RFC does not exist")
return rfc
def clean(self):
super(RfcObsoletesForm, self).clean()
cleaned_data = self.cleaned_data
relation = cleaned_data.get('relation','')
rfc = cleaned_data.get('rfc','')
if (relation and not rfc) or (rfc and not relation):
raise forms.ValidationError('You must select a relation and enter RFC #')
return cleaned_data
class SearchForm(forms.Form):
intended_std_level = forms.ModelChoiceField(queryset=IntendedStdLevelName.objects,label="Intended Status",required=False)
document_title = forms.CharField(max_length=80,label='Document Title',required=False)
group = forms.CharField(max_length=12,required=False)
filename = forms.CharField(max_length=80,required=False)
state = forms.ModelChoiceField(queryset=State.objects.filter(type='draft'),required=False)
revision_date_start = forms.DateField(label='Revision Date (start)',required=False)
revision_date_end = forms.DateField(label='Revision Date (end)',required=False)
class UploadForm(forms.Form):
txt = DocumentField(label=u'.txt format', required=True,extension='.txt',filename=None,rev=None)
xml = DocumentField(label=u'.xml format', required=False,extension='.xml',filename=None,rev=None)
pdf = DocumentField(label=u'.pdf format', required=False,extension='.pdf',filename=None,rev=None)
ps = DocumentField(label=u'.ps format', required=False,extension='.ps',filename=None,rev=None)
def __init__(self, *args, **kwargs):
if 'draft' in kwargs:
self.draft = kwargs.pop('draft')
else:
self.draft = None
super(UploadForm, self).__init__(*args, **kwargs)
if self.draft:
for field in self.fields.itervalues():
field.filename = self.draft.name
field.rev = self.draft.rev
def clean(self):
# Checks that all files have the same base
if any(self.errors):
# Don't bother validating unless each field is valid on its own
return
txt = self.cleaned_data['txt']
xml = self.cleaned_data['xml']
pdf = self.cleaned_data['pdf']
ps = self.cleaned_data['ps']
# we only need to do these validations for new drafts
if not self.draft:
names = []
for file in (txt,xml,pdf,ps):
if file:
base = splitext(file.name)[0]
if base not in names:
names.append(base)
if len(names) > 1:
raise forms.ValidationError, "All files must have the same base name"
# ensure that the basename is unique
base = splitext(txt.name)[0]
if Document.objects.filter(name=base[:-3]):
raise forms.ValidationError, "This doucment filename already exists: %s" % base[:-3]
# ensure that rev is 00
if base[-2:] != '00':
raise forms.ValidationError, "New Drafts must start with 00 revision number."
return self.cleaned_data
class WithdrawForm(forms.Form):
type = forms.CharField(widget=forms.Select(choices=WITHDRAW_CHOICES),help_text='Select which type of withdraw to perform')
|
|
# Copyright 2014 The Swarming Authors. All rights reserved.
# Use of this source code is governed by the Apache v2.0 license that can be
# found in the LICENSE file.
"""Json serializable properties."""
from google.appengine.ext import ndb
from google.appengine.api import datastore_errors
from components import utils
__all__ = [
'BytesSerializable',
'BytesSerializableProperty',
'JsonSerializable',
'JsonSerializableProperty',
'READABLE',
'SerializableModelMixin',
'WRITABLE',
'register_converter',
]
# Access to a protected member _XX of a client class - pylint: disable=W0212
# Method could be a function - pylint: disable=R0201
# Use field when converting entity to a serializable dict.
READABLE = 1 << 0
# Use field when converting entity from a serializable dict.
WRITABLE = 1 << 1
### Private stuff.
_rich_to_simple_converters = []
_simple_to_rich_converters = []
# Properties with values that look exactly the same in to_dict() and
# in to_serializable_dict() representations.
_SIMPLE_PROPERTIES = (
ndb.BlobProperty,
ndb.BooleanProperty,
ndb.FloatProperty,
ndb.IntegerProperty,
ndb.JsonProperty,
ndb.PickleProperty,
ndb.StringProperty,
ndb.TextProperty,
)
def _register_simple_converters():
noop = lambda _prop, x: x
for simple_prop_cls in _SIMPLE_PROPERTIES:
register_converter(
property_cls=simple_prop_cls,
include_subclasses=False,
rich_to_simple=noop,
simple_to_rich=noop)
class _ModelDictConverter(object):
"""Uses |property_converters| to recursively convert dictionary values.
Works by simultaneously walking over dict and and entity's structure. Dict
is used for actual values, entity is used for typing information.
Used for conversion in both directions: rich-typed dict to serializable dict
and vice versa. The difference is in |property_converters| used.
For example when converting in 'rich-typed to serializable dict' direction,
|property_converters| contains functions that take rich types (e.g. datetime)
and produce simple types (e.g int with timestamp). For reverse direction,
|property_converters| contain functions that perform reverse conversion
(e.g. int timestamp -> datetime).
"""
def __init__(self, property_converters, field_mode_predicate):
"""Args:
property_converters: sequence of tuples that define how to handle various
NDB property classes.
field_mode_predicate: callable that will be used to decide what properties
to use during conversion. It is called with single integer argument
|mode| which is a value from entity.serializable_properties dictionary
that correspond to property being considered. If |field_mode_predicate|
returns True, then property will be used, otherwise it will be silently
ignored during conversion (i.e. resulting dict will not have it even
if it was present in incoming dict).
Each property converter tuple has 3 components:
* ndb.Property subclass this converter applies to.
* Boolean: True to apply converter to all subclasses or False only to
this specific class.
* Actual converter: function(property instance, from type) -> to type.
For instance when converting rich-typed dict to serializable dict, converter
for DateTimeProperty will be defined as:
(
ndb.DateTimeProperty,
False,
lambda(ndb.DateTimeProperty instance, datetime) -> integer
)
"""
self.property_converters = property_converters
self.field_mode_predicate = field_mode_predicate
def convert_dict(self, model_cls, model_dict):
"""Returns new dictionary with values converted using |property_converters|.
Args:
model_cls: ndb.Model subclass that acts as a schema with type information,
its model_cls._properties will be used as a source of typing information
for corresponding keys in |model_dict|.
model_dict: dictionary that has same structure as entity defined by
model_cls. Its values will be passed through appropriate property
converters to get values in returned dict.
Returns:
New dictionary that structurally is a subset of |model_dict|, but with
values of different type (defined by |property_converters|).
"""
if not isinstance(model_dict, dict):
raise ValueError(
'Expecting a dict, got \'%s\' instead' % type(model_dict).__name__)
allowed_properties = self.get_allowed_properties(model_cls)
result = {}
for key, value in model_dict.iteritems():
if allowed_properties is None or key in allowed_properties:
result[key] = self.convert_property(model_cls._properties[key], value)
return result
def convert_property(self, prop, value):
"""Converts value of a single key.
Args:
prop: instance of ndb.Property subclass that defines typing information.
values: incoming property value to transform.
Returns:
Transformed property value that should be used in resulting dictionary.
Uses |prop| and |property_converters| to figure out how to perform the
conversion.
"""
if prop._repeated:
# Do not allow None here. NDB doesn't accept None as a valid value for
# repeated property in populate(...) or entity constructor.
if not isinstance(value, (list, tuple)):
raise ValueError(
'Expecting a list or tuple for \'%s\', got \'%s\' instead' % (
prop._name, type(value).__name__))
converter = self.get_property_converter(prop)
return [converter(prop, x) for x in value]
# For singular properties pass None as is.
if value is None:
return None
converter = self.get_property_converter(prop)
return converter(prop, value)
def get_allowed_properties(self, model_cls):
"""Returns a set of property names to consider when converting a dictionary.
When working with StructuredProperty based on regular ndb.Model, export all
fields. Otherwise use model_cls.serializable_properties and
self.field_mode_predicate to figure out set of properties to use.
Return value of None means all defined properties should be used.
"""
assert issubclass(model_cls, ndb.Model)
assert not issubclass(model_cls, ndb.Expando), 'Expando is not supported'
if not issubclass(model_cls, SerializableModelMixin):
return None
if model_cls.serializable_properties is None:
return None
return set(
field for field, mode in model_cls.serializable_properties.iteritems()
if self.field_mode_predicate(mode))
def get_property_converter(self, prop):
"""Returns callable that can convert values corresponding to ndb property.
Args:
prop: instance of ndb.Property subclass that defines typing information.
Returns:
Callable (property instance, incoming value) -> converter values.
"""
# For structured properties, recursively call convert_dict.
if isinstance(prop, (ndb.StructuredProperty, ndb.LocalStructuredProperty)):
return lambda prop, x: self.convert_dict(prop._modelclass, x)
# For other properties consult the registry of converters.
for prop_cls, include_subclasses, conv in self.property_converters:
if (include_subclasses and isinstance(prop, prop_cls) or
not include_subclasses and type(prop) == prop_cls):
return conv
# Give up.
raise TypeError('Don\'t know how to work with %s' % type(prop).__name__)
### Public API.
class SerializableModelMixin(object):
"""Mixing for entity that can convert itself to/from serializable dictionary.
A serializable dictionary trivially can be converted to/from JSON, XML, YAML,
etc. via standard serializers (e.g json.dump and json.load).
A serializable dictionary is a dictionary with string keys and values that are
* Scalar types: int, long, float.
* String types: str, unicode.
* Sequences: list, tuple.
* Another serializable dictionaries.
"""
# Dictionary: property name -> bit mask with READABLE and\or WRITABLE flag.
# It defines what properties to use when convert an entity to or from
# serializable dict. See doc strings for 'to_serializable_dict' and
# 'convert_serializable_dict' for more details.
# Default is None, which means that all defined properties are readable
# and writable.
serializable_properties = None
def to_serializable_dict(self, with_id_as=None, exclude=None):
"""Converts this entity to a serializable dictionary.
Operates only on properties that have READABLE flag set in
|serializable_properties|. All other entity properties are effectively
invisible.
Args:
with_id_as: name of the optional dict key to put entity's string_id() to.
exclude: list of fields to exclude from the dict.
"""
# TODO(vadimsh): Add 'include' and 'exclude' support when needed.
conv = _ModelDictConverter(
property_converters=_rich_to_simple_converters,
field_mode_predicate=lambda mode: bool(mode & READABLE))
serializable_dict = conv.convert_dict(
self.__class__, self.to_dict(exclude=exclude))
if with_id_as:
assert isinstance(with_id_as, basestring)
serializable_dict[with_id_as] = self.key.string_id()
return serializable_dict
@classmethod
def from_serializable_dict(cls, serializable_dict, **props):
"""Makes an entity with properties from |serializable_dict| and |props|.
Properties from |serializable_dict| are converted from simple types to
rich types first (e.g. int -> DateTimeProperty). See doc string for
'convert_serializable_dict' method for more details.
Properties from |props| are passed to entity constructor as is. Values in
|props| override values from |serializable_dict|.
Raises ValueError if types or structure of |serializable_dict| doesn't match
entity schema.
"""
try:
all_props = cls.convert_serializable_dict(serializable_dict)
all_props.update(props)
return cls(**all_props)
except datastore_errors.BadValueError as e:
raise ValueError(e)
@classmethod
def convert_serializable_dict(cls, serializable_dict):
"""Converts a serializable dictionary to dictionary with rich-typed values.
It can then be used in entity constructor or in 'populate' method. This
method works as reverse of to_serializable_dict, in particular if all
fields are readable and writable the following holds:
ent = Entity(...)
assert ent == Entity(
**Entity.convert_serializable_dict(ent.to_serializable_dict()))
Operates only on properties that have WRITABLE flag set in
|serializable_properties|. All other keys from |serializable_dict|
(i.e. ones that don't match any entity properties at all or ones that match
properties not explicitly marked as WRITABLE) are silently ignored.
"""
conv = _ModelDictConverter(
property_converters=_simple_to_rich_converters,
field_mode_predicate=lambda mode: bool(mode & WRITABLE))
return conv.convert_dict(cls, serializable_dict)
class BytesSerializable(object):
"""Interface that defines to_bytes() and from_bytes() methods.
Objects that implement this interface know how to serialize/deserialize
themselves to/from bytes array (represented by 'str').
"""
def to_bytes(self):
"""Serialize this object to byte array."""
raise NotImplementedError()
@classmethod
def from_bytes(cls, byte_buf):
"""Deserialize byte array into new instance of the class."""
raise NotImplementedError()
class JsonSerializable(object):
"""Interface that defines to_jsonish() and from_jsonish() methods.
Value is 'jsonish' if it can be converted to JSON with standard json.dump.
Objects that implement this interface know how to convert themselves to/from
jsonish values (usually dicts but not necessarily).
"""
def to_jsonish(self):
"""Convert this object to jsonish value."""
raise NotImplementedError()
@classmethod
def from_jsonish(cls, obj):
"""Given jsonish value convert it to new instance of the class."""
raise NotImplementedError()
class BytesSerializableProperty(ndb.BlobProperty):
"""BlobProperty that uses values's to_bytes/from_bytes methods.
Property will use to_bytes() to serialize an object before storing it in
DB and from_bytes() when fetching it from DB and validating.
Usage:
class MyValue(BytesSerializable):
...
class MyValueProperty(BytesSerializableProperty):
_value_type = MyValue
class Model(ndb.Model):
my_value = MyValueProperty()
"""
# Should be set in subclasses to some BytesSerializable subclass that this
# property class will represent.
_value_type = None
def _validate(self, value):
if not isinstance(value, self._value_type):
raise TypeError(
'Expecting %s, got %r' % (self._value_type.__name__, value))
def _to_base_type(self, value):
result = value.to_bytes()
assert isinstance(result, str)
return result
def _from_base_type(self, value):
assert isinstance(value, str)
result = self._value_type.from_bytes(value)
assert isinstance(result, self._value_type)
return result
class JsonSerializableProperty(ndb.JsonProperty):
"""JsonProperty that uses values's to_jsonish/from_jsonish methods.
Property will use to_jsonish() to convert an object to simple JSONish value
before storing it in DB as JSON and from_jsonish() when fetching it from
DB and validating.
Usage:
class MyValue(JsonSerializable):
...
class MyValueProperty(JsonSerializableProperty):
_value_type = MyValue
class Model(ndb.Model):
my_value = MyValueProperty()
"""
# Should be set in subclasses to some JsonSerializable subclass that this
# property class will represent.
_value_type = None
def _validate(self, value):
if not isinstance(value, self._value_type):
raise TypeError(
'Expecting %s, got %r' % (self._value_type.__name__, value))
def _to_base_type(self, value):
return value.to_jsonish()
def _from_base_type(self, value):
return self._value_type.from_jsonish(value)
def register_converter(
property_cls, include_subclasses, rich_to_simple, simple_to_rich):
"""Register a pair of functions that can convert some ndb.Property subclass.
Used by ndb.Model, utils.SerializableModelMixin to convert entities to
serializable dicts and vice versa.
Args:
property_cls: ndb.Property subclass.
include_subclasses: True to apply this converter to all subclasses as well.
rich_to_simple: function that converts property's value type to some simple
type: rich_to_simple(property_instance, property_value) -> simple_value.
simple_to_rich: function that converts some simple type to property's value
type: simple_to_rich(property_instance, simple_value) -> property_value.
"""
assert issubclass(property_cls, ndb.Property)
_rich_to_simple_converters.append(
(property_cls, include_subclasses, rich_to_simple))
_simple_to_rich_converters.append(
(property_cls, include_subclasses, simple_to_rich))
### Function calls.
_register_simple_converters()
# TODO(vadimsh): Add ndb.DateProperty if needed.
register_converter(
property_cls=ndb.DateTimeProperty,
include_subclasses=False,
rich_to_simple=lambda _prop, x: utils.datetime_to_timestamp(x),
simple_to_rich=lambda _prop, x: utils.timestamp_to_datetime(x))
# Handles all property classes inherited from JsonSerializableProperty.
register_converter(
property_cls=JsonSerializableProperty,
include_subclasses=True,
rich_to_simple=lambda prop, value: value.to_jsonish(),
simple_to_rich=lambda prop, value: prop._value_type.from_jsonish(value))
# Handles all property classes inherited from BytesSerializableProperty.
register_converter(
property_cls=BytesSerializableProperty,
include_subclasses=True,
rich_to_simple=lambda prop, value: value.to_bytes(),
simple_to_rich=lambda prop, value: prop._value_type.from_bytes(value))
|
|
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# these are system modules
import numpy
import sys
# these are my local ones
from env import gidgetConfigVars
import tsvIO
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
NA_VALUE = -999999
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def cleanUpName(aName):
bName = ''
aName = aName.upper()
## ii = aName.find(" - Homo sapiens (human)")
ii = aName.find(" - HOMO SAPIENS (HUMAN)")
if (ii >= 0):
aName = aName[:ii]
aName = aName.strip()
ii = aName.find("(")
while (ii >= 0):
jj = aName.find(")", ii)
aName = aName[:ii] + aName[jj + 1:]
ii = aName.find("(")
aName = aName.strip()
ii = aName.find("<")
while (ii >= 0):
jj = aName.find(">", ii)
aName = aName[:ii] + aName[jj + 1:]
ii = aName.find("<")
aName = aName.strip()
for ii in range(len(aName)):
if (aName[ii] == ','):
continue
elif (aName[ii] == '('):
bName += '_'
elif (aName[ii] == ')'):
bName += '_'
elif (aName[ii] == '-'):
bName += '_'
elif (aName[ii] == '/'):
bName += '_'
elif (aName[ii] == ';'):
bName += '_'
elif (aName[ii] == '&'):
continue
elif (aName[ii] == '#'):
continue
elif (aName[ii] == ' '):
bName += '_'
else:
bName += aName[ii].upper()
ii = bName.find("__")
while (ii >= 0):
print " ", ii, bName
bName = bName[:ii] + bName[ii + 1:]
print " ", bName
ii = bName.find("__")
return (bName)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def readPathways():
fh = file(
gidgetConfigVars['TCGAFMP_BIOINFORMATICS_REFERENCES'] + "/nci_pid/only_NCI_Nature_ver4.tab", 'r')
pwDict = {}
for aLine in fh:
aLine = aLine.strip()
aLine = aLine.upper()
tokenList = aLine.split('\t')
if (len(tokenList) != 3):
continue
if (tokenList[0] == "pathway"):
continue
longPathwayName = tokenList[0]
shortPathwayName = tokenList[1]
geneTokens = tokenList[2].strip()
geneList = geneTokens.split(',')
geneList.sort()
if (len(geneList) > 0):
while (geneList[0] == ''):
geneList = geneList[1:]
if (len(geneList) == 0):
continue
if (len(geneList) == 0):
continue
pathwayName = cleanUpName(shortPathwayName)
if (pathwayName not in pwDict.keys()):
# print " adding pathway %s (%d) " % ( pathwayName, len(geneList) )
pwDict[pathwayName] = geneList
else:
if (len(pwDict[pathwayName]) < len(geneList)):
# print " substituting shorter list of genes for %s (%d) " % (
# pathwayName, len(geneList) )
pwDict[pathwayName] = geneList
# else:
# print " NOT substituing list for %s " % pathwayName
fh.close()
print " "
print " have pathway dictionary with %d pathways " % len(pwDict)
print " --> now looking for duplicate pathways ... "
pwList = pwDict.keys()
pwList.sort()
delList = []
pairDict = {}
for ii in range(len(pwList) - 1):
iiName = pwList[ii]
iiLen = len(pwDict[iiName])
for jj in range(ii + 1, len(pwList)):
jjName = pwList[jj]
jjLen = len(pwDict[jjName])
if (jjLen != iiLen):
continue
if (pwDict[iiName] == pwDict[jjName]):
print "\n\n SAME !!! "
print iiName, iiLen
print pwDict[iiName]
print jjName, jjLen
print pwDict[jjName]
iiSplit = iiName.split('__')
jjSplit = jjName.split('__')
if (iiSplit[1] <= jjSplit[1]):
pairNames = (iiSplit[1], jjSplit[1])
else:
pairNames = (jjSplit[1], iiSplit[1])
if (pairNames in pairDict.keys()):
pairDict[pairNames] += 1
else:
pairDict[pairNames] = 1
if (iiSplit[1] == jjSplit[1]):
if (len(iiName) <= len(jjName)):
delList += [jjName]
else:
delList += [iiName]
else:
if (iiSplit[1] == "NCI-NATURE"):
delList += [jjName]
elif (jjSplit[1] == "NCI-NATURE"):
delList += [iiName]
elif (iiSplit[1] == "PID"):
delList += [jjName]
elif (jjSplit[1] == "PID"):
delList += [iiName]
elif (iiSplit[1] == "KEGG"):
delList += [jjName]
elif (jjSplit[1] == "KEGG"):
delList += [iiName]
elif (iiSplit[1] == "PWCOMMONS"):
delList += [jjName]
elif (jjSplit[1] == "PWCOMMONS"):
delList += [iiName]
elif (iiSplit[1] == "REACTOME"):
delList += [jjName]
elif (jjSplit[1] == "REACTOME"):
delList += [iiName]
elif (iiSplit[1] == "WIKIPATHWAYS"):
delList += [jjName]
elif (jjSplit[1] == "WIKIPATHWAYS"):
delList += [iiName]
elif (iiSplit[1] == "WIKIPW"):
delList += [jjName]
elif (jjSplit[1] == "WIKIPW"):
delList += [iiName]
elif (iiSplit[1] == "SMPDB"):
delList += [jjName]
elif (jjSplit[1] == "SMPDB"):
delList += [iiName]
elif (iiSplit[1] == "HUMANCYC"):
delList += [jjName]
elif (jjSplit[1] == "HUMANCYC"):
delList += [iiName]
else:
sys.exit(-1)
for aName in delList:
try:
del pwDict[aName]
except:
doNothing = 1
print " "
print " returning pathway dictionary with %d pathways " % len(pwDict)
print " "
for aKey in pairDict.keys():
print aKey, pairDict[aKey]
print " "
print " "
return (pwDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def setFeatBits(rowLabels, featPrefix, doesContain, notContain):
numSet = 0
numRow = len(rowLabels)
bitVec = numpy.zeros(numRow, dtype=numpy.bool)
for iR in range(numRow):
if (featPrefix != ""):
if (not rowLabels[iR].startswith(featPrefix)):
continue
if (doesContain != ""):
if (rowLabels[iR].find(doesContain) < 0):
continue
if (notContain != ""):
if (rowLabels[iR].find(notContain) >= 0):
continue
bitVec[iR] = 1
numSet += 1
print featPrefix, doesContain, notContain, numRow, numSet
if (numSet == 0):
print " numSet=0 ... this is probably a problem ... "
# sys.exit(-1)
return (bitVec)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# B:GNAB:ADAM7:chr8:24298509:24384483:+:y_n_somatic y_n y_del
# --> B:GNAB:ADAM7:chr8:24298509:24384483:+:y_del_somatic
def makeNewFeatureName(curFeatName, oldString, newString):
i1 = curFeatName.find(oldString)
if (i1 < 0 or len(oldString) < 2):
print " ERROR in makeNewFeatureName ???? ", curFeatName, oldString, newString
i2 = i1 + len(oldString)
newFeatName = curFeatName[:i1] + newString + curFeatName[i2:]
# print curFeatName, oldString, newString, newFeatName
return (newFeatName)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def chooseCountThreshold(dataD):
rowLabels = dataD['rowLabels']
dMat = dataD['dataMatrix']
numBits = 0
for ii in range(len(rowLabels)):
if (numBits > 0):
continue
if (rowLabels[ii].find("B:GNAB:TP53:") >= 0):
for jj in range(len(dMat[ii])):
if (dMat[ii][jj] == 0):
numBits += 1
elif (dMat[ii][jj] == 1):
numBits += 1
print " number of bits found for TP53 mutation feature: ", numBits
countThreshold = int(numBits / 11) - 1
return (countThreshold)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def pathwayGEXP(dataD, pathways={}):
print " "
print " ************************************************************* "
print " ************************************************************* "
print " "
print " in pathwayGEXP ... "
# check that the input feature matrix looks ok ...
try:
numRow = len(dataD['rowLabels'])
numCol = len(dataD['colLabels'])
rowLabels = dataD['rowLabels']
print " %d rows x %d columns " % (numRow, numCol)
# print rowLabels[:5]
# print rowLabels[-5:]
except:
print " ERROR in pathwayGEXP ??? bad data ??? "
return (dataD)
if (len(pathways) == 0):
print " no pathway information ??? "
sys.exit(-1)
print " "
print " total number of pathways : ", len(pathways)
print " "
pathwayList = pathways.keys()
pathwayList.sort()
numPW = len(pathways)
newNameVec = [0] * (numPW)
newDataMat = [0] * (numPW)
dMat = dataD['dataMatrix']
kFeat = 0
for aPathway in pathwayList:
if (1):
numON = 0
newFeatName = "N:GEXP:" + aPathway + "::::"
# first make sure we don't already have a feature with this name
# ...
stopNow = 0
for iRow in range(numRow):
if (newFeatName == rowLabels[iRow]):
stopNow = 1
if (stopNow):
continue
print " tentative new feature #%d ... <%s> " % (kFeat, newFeatName)
newNameVec[kFeat] = newFeatName
newDataMat[kFeat] = numpy.zeros(numCol)
# initialize to all NAs ...
for iCol in range(numCol):
newDataMat[kFeat][iCol] = NA_VALUE
if (0):
print " "
print " "
print aPathway, newFeatName
print len(pathways[aPathway]), pathways[aPathway]
for iR in range(numRow):
# if ( iR%1000 == 0 ): print iR, numRow
if (1):
gexpLabel = rowLabels[iR]
if (not gexpLabel.startswith("N:GEXP:")):
continue
try:
gexpTokens = gexpLabel.split(':')
gexpGene = gexpTokens[2].upper()
except:
print " FAILED to parse GEXP feature name ??? ", gexpLabel
continue
if (gexpGene in pathways[aPathway]):
for iCol in range(numCol):
if (dMat[iR][iCol] != "NA"):
if (dMat[iR][iCol] != NA_VALUE):
if (newDataMat[kFeat][iCol] == NA_VALUE):
newDataMat[kFeat][
iCol] = dMat[iR][iCol]
else:
newDataMat[kFeat][
iCol] += dMat[iR][iCol]
if (1):
kFeat += 1
print " --> keeping this feature ... ", kFeat, newFeatName
# else:
# print " --> NOT keeping this feature ... ", newFeatName, numON,
# min_numON
numNewFeat = kFeat
print " "
print " --> number of new features : ", numNewFeat
print len(newDataMat), len(newDataMat[0])
# now we need to append these new features to the input data matrix
newRowLabels = [0] * (numRow + numNewFeat)
newMatrix = [0] * (numRow + numNewFeat)
for iR in range(numRow):
newRowLabels[iR] = rowLabels[iR]
newMatrix[iR] = dMat[iR]
for iR in range(numNewFeat):
newRowLabels[iR + numRow] = newNameVec[iR]
newMatrix[iR + numRow] = newDataMat[iR]
dataD['rowLabels'] = newRowLabels
dataD['dataMatrix'] = newMatrix
print " "
print " --> finished with pathwayGEXP ... "
print " "
return (dataD)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
if __name__ == "__main__":
if (1):
if (len(sys.argv) == 3):
inFile = sys.argv[1]
outFile = sys.argv[2]
else:
print " "
print " Usage: %s <input TSV file> <output TSV file> "
print " "
print " ERROR -- bad command line arguments "
sys.exit(-1)
print " "
print " Running : %s %s %s " % (sys.argv[0], sys.argv[1], sys.argv[2])
print " "
# read in the input feature matrix first, just in case there
# actually isn't one yet available ...
testD = tsvIO.readTSV(inFile)
try:
print len(testD['rowLabels']), len(testD['colLabels'])
except:
print " --> invalid / missing input feature matrix "
sys.exit(-1)
# and then pathway level mutation features
if (1):
pwDict = readPathways()
newD = pathwayGEXP(testD, pwDict)
testD = newD
# and finally write it out ...
tsvIO.writeTSV_dataMatrix(testD, 0, 0, outFile)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
|
|
import copy
import plistlib
import uuid
from datetime import datetime
from django.http import HttpResponse
from django.test import TestCase
from django.utils.crypto import get_random_string
from zentral.contrib.inventory.models import EnrollmentSecret, MetaBusinessUnit
from zentral.contrib.mdm.models import (Artifact, ArtifactType, ArtifactVersion,
Blueprint, BlueprintArtifact,
Channel, DeviceArtifact,
DEPEnrollment, DEPEnrollmentSession,
DEPOrganization, DEPToken, DEPVirtualServer,
EnrolledDevice, EnrolledUser,
Platform, Profile, PushCertificate,
UserArtifact)
from zentral.contrib.mdm.commands.device_configured import DeviceConfigured
from zentral.contrib.mdm.commands.device_information import DeviceInformation
from zentral.contrib.mdm.commands.install_profile import InstallProfile
from zentral.contrib.mdm.commands.remove_profile import RemoveProfile
from zentral.contrib.mdm.commands.utils import (_finish_dep_enrollment_configuration,
_install_artifacts,
_get_next_queued_command,
_remove_artifacts)
PROFILE_TEMPLATE = {
'PayloadContent': [{
'PayloadType': 'com.apple.dock',
'PayloadDescription': 'Dock Payload',
'PayloadDisplayName': 'Dock',
'PayloadVersion': 1,
'orientation': 'right'
}],
'PayloadType': 'Configuration',
'PayloadDescription': 'Superbe profile imbattable!!!',
'PayloadDisplayName': 'Test User Profile with Dock',
'PayloadVersion': 1,
'PayloadOrganization': 'Zentral',
'PayloadScope': 'User',
}
def build_profile(
payload_display_name=None,
payload_description=None,
payload_identifier=None,
payload_uuid=None,
channel=Channel.Device
):
if payload_uuid is None:
payload_uuid = str(uuid.uuid4()).upper()
if payload_identifier is None:
payload_identifier = f"io.zentral.test.{payload_uuid}"
profile = copy.deepcopy(PROFILE_TEMPLATE)
profile["PayloadIdentifier"] = payload_identifier
profile["PayloadUUID"] = payload_uuid
profile["PayloadDisplayName"] = payload_display_name or get_random_string(16)
profile["PayloadDescription"] = payload_description or get_random_string(32)
profile["PayloadScope"] = "System" if channel == Channel.Device else "User"
payload = profile["PayloadContent"][0]
payload["PayloadIdentifier"] = f"{payload_identifier}.0"
payload["PayloadUUID"] = str(uuid.uuid4()).upper()
return plistlib.dumps(profile)
class TestMDMCommands(TestCase):
@classmethod
def setUpTestData(cls):
cls.meta_business_unit = MetaBusinessUnit.objects.create(name=get_random_string(32))
push_certificate = PushCertificate.objects.create(
name=get_random_string(64),
topic=get_random_string(256),
not_before=datetime(2000, 1, 1),
not_after=datetime(2050, 1, 1),
certificate=get_random_string(64).encode("utf-8"),
private_key=get_random_string(64).encode("utf-8")
)
cls.blueprint1 = Blueprint.objects.create(name=get_random_string(32))
# Enrolled devices / user
cls.enrolled_device_no_blueprint = EnrolledDevice.objects.create(
push_certificate=push_certificate,
serial_number=get_random_string(64),
platform="macOS",
udid=get_random_string(36),
token=get_random_string(32).encode("utf-8"),
push_magic=get_random_string(73),
unlock_token=get_random_string(32).encode("utf-8")
)
cls.enrolled_device = EnrolledDevice.objects.create(
push_certificate=push_certificate,
serial_number=get_random_string(64),
platform="macOS",
blueprint=cls.blueprint1,
udid=get_random_string(36),
token=get_random_string(32).encode("utf-8"),
push_magic=get_random_string(73),
unlock_token=get_random_string(32).encode("utf-8")
)
cls.enrolled_user = EnrolledUser.objects.create(
enrolled_device=cls.enrolled_device,
user_id=str(uuid.uuid4()).upper(),
long_name=get_random_string(),
short_name=get_random_string(),
token=get_random_string().encode("utf-8"),
)
cls.enrolled_device_awaiting_configuration = EnrolledDevice.objects.create(
push_certificate=push_certificate,
serial_number=get_random_string(64),
platform="macOS",
blueprint=cls.blueprint1,
awaiting_configuration=True,
udid=get_random_string(36),
token=get_random_string(32).encode("utf-8"),
push_magic=get_random_string(73),
unlock_token=get_random_string(32).encode("utf-8")
)
# DEP enrollment
dep_organization = DEPOrganization.objects.create(
identifier=get_random_string(128),
admin_id="{}@zentral.io".format(get_random_string()),
name=get_random_string(),
email="{}@zentral.io".format(get_random_string()),
phone=get_random_string(),
address=get_random_string(),
type=DEPOrganization.ORG,
version=DEPOrganization.V2
)
dep_token = DEPToken.objects.create(
certificate=get_random_string().encode("utf-8"),
private_key=get_random_string().encode("utf-8"),
)
dep_virtual_server = DEPVirtualServer.objects.create(
name=get_random_string(),
uuid=uuid.uuid4(),
organization=dep_organization,
token=dep_token
)
dep_enrollment = DEPEnrollment.objects.create(
uuid=uuid.uuid4(),
virtual_server=dep_virtual_server,
push_certificate=push_certificate,
blueprint=cls.blueprint1,
enrollment_secret=EnrollmentSecret.objects.create(meta_business_unit=cls.meta_business_unit),
skip_setup_items=[p for p, _ in DEPEnrollment.SKIPPABLE_SETUP_PANE_CHOICES],
name=get_random_string()
)
cls.dep_enrollment_session = DEPEnrollmentSession.objects.create_from_dep_enrollment(
dep_enrollment, cls.enrolled_device.serial_number, cls.enrolled_device.udid
)
es_request = EnrollmentSecret.objects.verify(
"dep_enrollment_session",
cls.dep_enrollment_session.enrollment_secret.secret,
user_agent=get_random_string(), public_ip_address="127.0.0.1"
)
cls.dep_enrollment_session.set_scep_verified_status(es_request)
cls.dep_enrollment_session.set_authenticated_status(cls.enrolled_device)
cls.dep_enrollment_session.set_completed_status(cls.enrolled_device)
def _force_artifact(
self,
version_count=1,
artifact_type=ArtifactType.Profile,
channel=Channel.Device,
platforms=None,
install_before_setup_assistant=False,
auto_update=True,
priority=0
):
if platforms is None:
platforms = Platform.all_values()
artifact = Artifact.objects.create(
name=get_random_string(32),
type=artifact_type.name,
channel=channel.name,
platforms=platforms
)
artifact_versions = []
payload_identifier = "{}.{}.{}".format(get_random_string(2), get_random_string(4), str(uuid.uuid4()))
payload_identifier = None
for version in range(version_count, 0, -1):
artifact_version = ArtifactVersion.objects.create(artifact=artifact, version=version)
artifact_versions.append(artifact_version)
if artifact_type == ArtifactType.Profile:
if payload_identifier is None:
payload_identifier = "{}.{}.{}".format(get_random_string(2),
get_random_string(4),
str(uuid.uuid4()).upper())
payload_uuid = str(uuid.uuid4()).upper()
payload_display_name = get_random_string(16)
payload_description = get_random_string(32)
Profile.objects.create(
artifact_version=artifact_version,
source=build_profile(
payload_display_name=payload_display_name,
payload_description=payload_description,
payload_identifier=payload_identifier,
payload_uuid=payload_uuid,
channel=channel
),
payload_identifier=payload_identifier,
payload_uuid=payload_uuid,
payload_display_name=payload_display_name,
payload_description=payload_description
)
return artifact, artifact_versions
def _force_blueprint_artifact(
self,
version_count=1,
artifact_type=ArtifactType.Profile,
channel=Channel.Device,
platforms=None,
install_before_setup_assistant=False,
auto_update=True,
priority=0,
blueprint=None
):
artifact, artifact_versions = self._force_artifact(
version_count,
artifact_type,
channel,
platforms,
install_before_setup_assistant,
auto_update,
priority
)
BlueprintArtifact.objects.create(
blueprint=blueprint or self.blueprint1,
artifact=artifact,
install_before_setup_assistant=install_before_setup_assistant,
auto_update=auto_update,
priority=priority,
)
return artifact, artifact_versions
def _force_target_artifact_version(self, target, artifact_version):
kwargs = {"artifact_version__artifact": artifact_version.artifact,
"defaults": {"artifact_version": artifact_version}}
if isinstance(target, EnrolledDevice):
model = DeviceArtifact
kwargs["enrolled_device"] = target
else:
model = UserArtifact
kwargs["enrolled_user"] = target
return model.objects.update_or_create(**kwargs)[0]
def test_no_next_queues_command(self):
self.assertIsNone(_get_next_queued_command(
Channel.Device,
self.dep_enrollment_session,
self.enrolled_device,
None
))
def test_device_information(self):
command = DeviceInformation.create_for_device(self.enrolled_device)
self.assertEqual(command.enrolled_device, self.enrolled_device)
self.assertIsNotNone(command.db_command.time)
self.assertIsNone(_get_next_queued_command(
Channel.Device,
self.dep_enrollment_session,
self.enrolled_device,
None
))
def test_queue_device_information(self):
command = DeviceInformation.create_for_device(self.enrolled_device, queue=True)
self.assertIsNone(command.db_command.time)
fetched_command = _get_next_queued_command(
Channel.Device,
self.dep_enrollment_session,
self.enrolled_device,
None
)
self.assertEqual(command, fetched_command)
self.assertIsNone(_get_next_queued_command(
Channel.Device,
self.dep_enrollment_session,
self.enrolled_device_no_blueprint,
None
))
self.assertIsNone(_get_next_queued_command(
Channel.User,
self.dep_enrollment_session,
self.enrolled_device,
self.enrolled_user
))
def test_device_configured(self):
self.enrolled_device_awaiting_configuration.refresh_from_db()
self.assertIsNone(_finish_dep_enrollment_configuration(
Channel.Device,
self.dep_enrollment_session,
self.enrolled_device,
None
))
command = _finish_dep_enrollment_configuration(
Channel.Device,
self.dep_enrollment_session,
self.enrolled_device_awaiting_configuration,
None
)
self.assertIsInstance(command, DeviceConfigured)
self.assertEqual(command.channel, Channel.Device)
self.assertIsNotNone(command.db_command.time)
self.assertIsNone(command.db_command.result_time)
self.assertTrue(self.enrolled_device_awaiting_configuration.awaiting_configuration)
command.process_response({"Status": "Acknowledged"}, self.dep_enrollment_session, self.meta_business_unit)
command.db_command.refresh_from_db()
self.assertEqual(command.db_command.status, "Acknowledged")
self.assertIsNotNone(command.db_command.result_time)
self.enrolled_device_awaiting_configuration.refresh_from_db()
self.assertFalse(self.enrolled_device_awaiting_configuration.awaiting_configuration)
def test_no_device_profile(self):
self.assertIsNone(_install_artifacts(
Channel.Device,
self.dep_enrollment_session,
self.enrolled_device,
None
))
def test_install_device_profile(self):
artifact, artifact_versions = self._force_blueprint_artifact()
command = _install_artifacts(
Channel.Device,
self.dep_enrollment_session,
self.enrolled_device,
None
)
self.assertIsInstance(command, InstallProfile)
self.assertEqual(command.channel, Channel.Device)
self.assertEqual(command.db_command.artifact_version, artifact_versions[0])
http_response = command.build_http_response(self.dep_enrollment_session)
self.assertIsInstance(http_response, HttpResponse)
self.assertIsNone(_install_artifacts(
Channel.User,
self.dep_enrollment_session,
self.enrolled_device,
self.enrolled_user
))
qs = DeviceArtifact.objects.filter(enrolled_device=self.enrolled_device)
self.assertEqual(qs.count(), 0)
command.process_response({"Status": "Acknowledged"}, self.dep_enrollment_session, self.meta_business_unit)
self.assertEqual(qs.count(), 1)
self.assertEqual(qs.filter(artifact_version=artifact_versions[0]).count(), 1)
def test_no_install_device_profile_previous_error(self):
artifact, artifact_versions = self._force_blueprint_artifact()
command = _install_artifacts(
Channel.Device,
self.dep_enrollment_session,
self.enrolled_device,
None
)
command.process_response({"Status": "Error", "ErrorChain": [{"un": 1}]},
self.dep_enrollment_session, self.meta_business_unit)
self.assertIsNone(_install_artifacts(
Channel.Device,
self.dep_enrollment_session,
self.enrolled_device,
None
))
def test_install_user_profile(self):
artifact, artifact_versions = self._force_blueprint_artifact(channel=Channel.User)
command = _install_artifacts(
Channel.User,
self.dep_enrollment_session,
self.enrolled_device,
self.enrolled_user
)
self.assertIsInstance(command, InstallProfile)
self.assertEqual(command.channel, Channel.User)
self.assertEqual(command.db_command.artifact_version, artifact_versions[0])
http_response = command.build_http_response(self.dep_enrollment_session)
self.assertIsInstance(http_response, HttpResponse)
self.assertIsNone(_install_artifacts(
Channel.Device,
self.dep_enrollment_session,
self.enrolled_device,
None
))
qs = UserArtifact.objects.filter(enrolled_user=self.enrolled_user)
self.assertEqual(qs.count(), 0)
command.process_response({"Status": "Acknowledged"}, self.dep_enrollment_session, self.meta_business_unit)
self.assertEqual(qs.count(), 1)
self.assertEqual(qs.filter(artifact_version=artifact_versions[0]).count(), 1)
def test_no_install_user_profile_previous_error(self):
artifact, artifact_versions = self._force_blueprint_artifact(channel=Channel.User)
command = _install_artifacts(
Channel.User,
self.dep_enrollment_session,
self.enrolled_device,
self.enrolled_user
)
command.process_response({"Status": "Error", "ErrorChain": [{"un": 1}]},
self.dep_enrollment_session, self.meta_business_unit)
self.assertIsNone(_install_artifacts(
Channel.User,
self.dep_enrollment_session,
self.enrolled_device,
self.enrolled_user
))
def test_remove_device_profile(self):
artifact, artifact_versions = self._force_artifact()
self.assertIsNone(_remove_artifacts(
Channel.Device,
self.dep_enrollment_session,
self.enrolled_device,
None
))
device_artifact = self._force_target_artifact_version(self.enrolled_device, artifact_versions[0])
command = _remove_artifacts(
Channel.Device,
self.dep_enrollment_session,
self.enrolled_device,
None
)
self.assertIsInstance(command, RemoveProfile)
self.assertEqual(command.channel, Channel.Device)
self.assertEqual(command.db_command.artifact_version, artifact_versions[0])
http_response = command.build_http_response(self.dep_enrollment_session)
self.assertIsInstance(http_response, HttpResponse)
self.assertIsNone(_remove_artifacts(
Channel.User,
self.dep_enrollment_session,
self.enrolled_device,
self.enrolled_user
))
qs = DeviceArtifact.objects.filter(enrolled_device=self.enrolled_device)
self.assertEqual(qs.count(), 1)
self.assertEqual(qs.first(), device_artifact)
command.process_response({"Status": "Acknowledged"}, self.dep_enrollment_session, self.meta_business_unit)
self.assertEqual(qs.count(), 0)
def test_no_remove_device_profile_previous_error(self):
artifact, artifact_versions = self._force_artifact()
self._force_target_artifact_version(self.enrolled_device, artifact_versions[0])
command = _remove_artifacts(
Channel.Device,
self.dep_enrollment_session,
self.enrolled_device,
None
)
command.process_response({"Status": "Error", "ErrorChain": [{"un": 1}]},
self.dep_enrollment_session, self.meta_business_unit)
self.assertIsNone(_remove_artifacts(
Channel.Device,
self.dep_enrollment_session,
self.enrolled_device,
None
))
def test_remove_user_profile(self):
artifact, artifact_versions = self._force_artifact()
self.assertIsNone(_remove_artifacts(
Channel.User,
self.dep_enrollment_session,
self.enrolled_device,
self.enrolled_user
))
user_artifact = self._force_target_artifact_version(self.enrolled_user, artifact_versions[0])
command = _remove_artifacts(
Channel.User,
self.dep_enrollment_session,
self.enrolled_device,
self.enrolled_user
)
self.assertIsInstance(command, RemoveProfile)
self.assertEqual(command.channel, Channel.User)
self.assertEqual(command.db_command.artifact_version, artifact_versions[0])
http_response = command.build_http_response(self.dep_enrollment_session)
self.assertIsInstance(http_response, HttpResponse)
self.assertIsNone(_remove_artifacts(
Channel.Device,
self.dep_enrollment_session,
self.enrolled_device,
None
))
qs = UserArtifact.objects.filter(enrolled_user=self.enrolled_user)
self.assertEqual(qs.count(), 1)
self.assertEqual(qs.first(), user_artifact)
command.process_response({"Status": "Acknowledged"}, self.dep_enrollment_session, self.meta_business_unit)
self.assertEqual(qs.count(), 0)
def test_no_remove_user_profile_previous_error(self):
artifact, artifact_versions = self._force_artifact()
self._force_target_artifact_version(self.enrolled_user, artifact_versions[0])
command = _remove_artifacts(
Channel.User,
self.dep_enrollment_session,
self.enrolled_device,
self.enrolled_user
)
command.process_response({"Status": "Error", "ErrorChain": [{"un": 1}]},
self.dep_enrollment_session, self.meta_business_unit)
self.assertIsNone(_remove_artifacts(
Channel.User,
self.dep_enrollment_session,
self.enrolled_device,
self.enrolled_user
))
|
|
""" Part of weight_app
:copyright: (c) 2012 by Andreas Madsack.
:license: BSD, see LICENSE for more details.
"""
from flask import Blueprint, Response, request, abort, redirect, flash, \
url_for, render_template
from jinja2 import TemplateNotFound
from flask.ext.login import login_required, login_user, logout_user, \
current_user
from forms import LoginForm, ProfileForm, WeightForm, ScaleForm
import datetime
from main import db, DbUser
from utils import fitbit_push
weight_pages = Blueprint('weight_app', __name__,
template_folder='templates')
@weight_pages.route('/favicon.ico')
def favicon():
abort(404)
@weight_pages.route('/')
def index():
return render_template('index.html')
@weight_pages.route('/about')
def about():
return render_template('about.html')
@weight_pages.route("/login", methods=["GET", "POST"])
def login():
form = LoginForm()
if form.validate_on_submit():
username = request.form['username']
password = request.form['password']
from models import User
u1 = User.query.get(username)
if login_user(DbUser(u1.username)):
flash("You have logged in", "info")
next = request.args.get('next')
return redirect(next or url_for('.index'))
return render_template('login.html',
form=form)
@weight_pages.route('/logout')
def logout():
logout_user()
flash('You have logged out', "info")
return(redirect(url_for('.login')))
@weight_pages.route("/profile", methods=["GET", "POST"])
@login_required
def profile():
from models import User, Scale
u1 = User.query.get(current_user._user)
form = ProfileForm(obj=u1)
form.default_scale.choices = [(g.name, g.name)
for g in Scale.query.order_by('name')]
form.default_scale.choices.insert(0, ("", "Select..."))
if form.validate_on_submit():
if 'firstname' in request.form:
u1.firstname = request.form['firstname']
if 'lastname' in request.form:
u1.lastname = request.form['lastname']
if 'email' in request.form:
u1.email = request.form['email']
if 'password' in request.form:
u1.set_password(request.form['password'])
if 'default_scale' in request.form:
u1.default_scale_name = request.form['default_scale']
db.session.add(u1)
db.session.commit()
flash('Data saved', 'info')
if u1.default_scale_name:
form.default_scale.data = u1.default_scale_name
return render_template('profile.html',
form=form)
@weight_pages.route("/weight/")
@weight_pages.route("/weight/<wid>/", methods=["GET","POST"])
@login_required
def weight(wid=None):
from models import Weight, Scale, User
import math
if not wid and 'wid' in request.args:
wid = request.args.get('wid')
if wid:
# edit weight
elem = Weight.query.get(wid)
# get min/max for buttons
x = Weight.query.order_by(Weight.wdate.desc()).limit(20).all()
if x:
wmin = int(math.floor(min([i.weight for i in x])) - 1)
wmax = int(math.ceil(max([i.weight for i in x])) + 2)
else:
wmin=70
wmax=75
if elem:
# is this weight from logged_in user? or is user admin?
if elem.user_username == current_user._user or \
current_user._user == 'admin':
form = WeightForm(obj=elem)
else:
# unauthorized
abort(401)
else:
# add
form = WeightForm()
# get scales list
form.scale_name.choices = [(g.name, g.name)
for g in Scale.query.order_by('name')]
form.scale_name.choices.insert(0, ("", "Select..."))
if form.validate_on_submit():
if not elem:
elem = Weight(weight=request.form['weight'])
if 'weight' in request.form:
elem.weight = request.form['weight']
if 'wdate' in request.form:
elem.wdate = datetime.datetime.strptime(request.form['wdate'],
'%Y-%m-%d')
if 'scale_name' in request.form:
elem.scale_name = request.form['scale_name']
elem.user_username = current_user._user
db.session.add(elem)
db.session.commit()
flash('Data saved [%s with %s]' % (elem.wdate, elem.weight),
'info')
# write to fitbitapi
# u1 = User.query.get(current_user._user)
# if u1.fitbit_user_key is not None and \
# u1.fitbit_user_secret is not None:
# fitbit_push(u1, elem.wdate, elem.weight)
# flash('Data pushed to fitbit', 'info')
if elem:
if elem.scale_name:
form.scale_name.data = elem.scale_name
else:
u1 = User.query.get(current_user._user)
if u1.default_scale_name:
form.scale_name.data = u1.default_scale_name
return render_template('weight_edit.html',
form=form,
wrange=range(wmin,wmax),)
else:
# show table of weights
page = request.args.get('page', '')
if page.isdigit():
page = int(page)
else:
page = 1
elements = Weight.query.order_by('wdate desc').filter_by(
user_username=unicode(current_user._user)).paginate(
page, per_page=10)
return render_template('weight_list.html',
elements=elements.items,
paginate=elements,
show_comment=False,)
@weight_pages.route("/scale/")
@weight_pages.route("/scale/<sid>/", methods=["GET","POST"])
@login_required
def scale(sid=None):
from models import Scale
if not sid and 'sid' in request.args:
sid = request.args.get('sid')
if sid:
# edit weight
elem = Scale.query.get(sid)
if elem:
form = ScaleForm(obj=elem)
else:
# add
form = ScaleForm()
if form.validate_on_submit():
if not elem:
elem = Scale(name=request.form['name'])
if 'name' in request.form:
elem.name = request.form['name']
if 'owner' in request.form:
elem.owner = request.form['owner']
if 'model' in request.form:
elem.model = request.form['model']
if 'comment' in request.form:
elem.comment = request.form['comment']
db.session.add(elem)
db.session.commit()
flash('Data saved', 'info')
return render_template('scale_edit.html',
form=form,)
else:
# show table of weights
page = request.args.get('page', '')
if page.isdigit():
page = int(page)
else:
page = 1
elements = Scale.query.order_by('name').paginate(
page, per_page=10)
return render_template('scale_list.html',
elements=elements.items,
paginate=elements,)
|
|
import calendar
from datetime import datetime, timedelta
import json
import pytz
import requests
import time
import urllib
from ditto import TITLE, VERSION
from .models import Account, Album, Artist, Scrobble, Track
from .utils import slugify_name
from ..core.utils import datetime_now
LASTFM_API_ENDPOINT = "http://ws.audioscrobbler.com/2.0/"
class FetchError(Exception):
pass
class ScrobblesFetcher(object):
"""
Fetches scrobbles from the API for one Account.
Use like one of:
fetcher = ScrobblesFetcher(account)
And then one of these ('recent' is the default):
results = fetcher.fetch(fetch_type='recent')
results = fetcher.fetch(fetch_type='all')
results = fetcher.fetch(fetch_type='days', days=3)
"""
# How many scrobbles do we fetch per page of results?
items_per_page = 200
def __init__(self, account):
# Will be an Account object, passed into init()
self.account = None
# We'll set this to a datetime if we're fetching scrobbles since x.
self.min_datetime = None
self.page_number = 1
self.total_pages = 1
self.results_count = 0
# What we'll return:
self.return_value = {"fetched": 0}
if isinstance(account, Account):
self.return_value["account"] = str(account)
else:
raise ValueError("An Account object is required")
if account.has_credentials():
self.account = account
else:
self.return_value["success"] = False
self.return_value["messages"] = ["Account has no API credentials"]
def fetch(self, fetch_type="recent", days=None):
"""
Fetch and save scrobbles.
Keyword arguments:
fetch_type -- 'all', 'days' or 'recent'. The latter will fetch
scrobbles since the most recent Scrobble we already have.
days -- if fetch_type is 'days', this should be an integer.
Returns a dict like:
{'success': True, 'account': 'gyford', 'fetched': 47,}
Or:
{'success': False, 'account': 'gyford', 'messages': ['Oops..',],}
"""
if self.account and self.account.is_active is False:
self.return_value["success"] = False
self.return_value["messages"] = [
"The Account %s is currently marked as inactive."
% self.account.username
]
return self.return_value
valid_fetch_types = ["all", "days", "recent"]
if fetch_type not in valid_fetch_types:
raise ValueError(
"fetch_type should be one of %s" % ", ".join(valid_fetch_types)
)
if fetch_type == "days":
try:
test = days + 1 # noqa: F841
except TypeError:
raise ValueError("days argument should be an integer")
self.min_datetime = datetime_now() - timedelta(days=days)
elif fetch_type == "recent":
try:
scrobble = Scrobble.objects.latest("post_time")
self.min_datetime = scrobble.post_time
except Scrobble.DoesNotExist:
pass
self._fetch_pages()
if self._not_failed():
self.return_value["success"] = True
self.return_value["fetched"] = self.results_count
return self.return_value
def _fetch_pages(self):
while self.page_number <= self.total_pages and self._not_failed():
self._fetch_page()
self.page_number += 1
time.sleep(0.5)
def _fetch_page(self):
"""
Fetch a single page of results.
Uses the value of self.page_number.
"""
fetch_time = datetime_now()
try:
results = self._send_request()
except FetchError as e:
self.return_value["success"] = False
self.return_value["messages"] = [str(e)]
return
for scrobble in results:
if "date" in scrobble:
# Don't save nowplaying scrobbles, that have no 'date'.
self._save_scrobble(scrobble, fetch_time)
self.results_count += 1
return
def _not_failed(self):
"""Has everything gone smoothly so far? ie, no failure registered?"""
if "success" not in self.return_value or self.return_value["success"] is True:
return True
else:
return False
def _api_method(self):
"The name of the API method."
return "user.getrecenttracks"
def _api_args(self):
"Returns a dict of args for the API call."
args = {
"user": self.account.username,
"api_key": self.account.api_key,
"format": "json",
"method": self._api_method(),
"page": self.page_number,
"limit": self.items_per_page,
}
if self.min_datetime:
# Turn our datetime object into a unix timestamp:
args["from"] = calendar.timegm(self.min_datetime.timetuple())
return args
def _send_request(self):
"""
Send a request to the Last.fm API.
Raises FetchError if something goes wrong.
Returns a list of results if all goes well.
"""
query_string = urllib.parse.urlencode(self._api_args())
url = "{}?{}".format(LASTFM_API_ENDPOINT, query_string)
try:
response = requests.get(
url, headers={"User-Agent": "Mozilla/5.0 (%s v%s)" % (TITLE, VERSION)}
)
response.raise_for_status() # Raises an exception on HTTP error.
except requests.exceptions.RequestException as e:
raise FetchError(
"Error when fetching Scrobbles (page %s): %s"
% (self.page_number, str(e))
)
response.encoding = "utf-8"
results = json.loads(response.text)
if "error" in results:
raise FetchError(
"Error %s when fetching Scrobbles (page %s): %s"
% (results["error"], self.page_number, results["message"])
)
# Set total number of pages first time round:
attr = results["recenttracks"]["@attr"]
if self.page_number == 1 and "totalPages" in attr:
self.total_pages = int(attr["totalPages"])
return results["recenttracks"]["track"]
def _save_scrobble(self, scrobble, fetch_time):
"""
Saves/updates a scrobble.
Arguments:
scrobble -- A dict of data from the Last.fm API.
fetch_time -- Datetime of when the data was fetched.
"""
artist_slug, track_slug = self._get_slugs(scrobble["url"])
artist, created = Artist.objects.update_or_create(
slug=artist_slug.lower(),
defaults={
"name": scrobble["artist"]["#text"],
"original_slug": artist_slug,
"mbid": scrobble["artist"]["mbid"], # Might be "".
},
)
track, created = Track.objects.update_or_create(
slug=track_slug.lower(),
artist=artist,
defaults={
"name": scrobble["name"],
"original_slug": track_slug,
"mbid": scrobble["mbid"], # Might be "".
},
)
if scrobble["album"]["#text"] == "":
album = None
else:
# The API data doesn't provide a URL/slug for the album, so
# we make our own:
album_slug = slugify_name(scrobble["album"]["#text"])
album, created = Album.objects.update_or_create(
slug=album_slug.lower(),
artist=artist,
defaults={
"name": scrobble["album"]["#text"],
"original_slug": album_slug,
"mbid": scrobble["album"]["mbid"], # Might be "".
},
)
# Unixtime to datetime object:
scrobble_time = datetime.utcfromtimestamp(int(scrobble["date"]["uts"])).replace(
tzinfo=pytz.utc
)
scrobble_obj, created = Scrobble.objects.update_or_create(
account=self.account,
track=track,
post_time=scrobble_time,
defaults={
"artist": artist,
"raw": json.dumps(scrobble),
"fetch_time": fetch_time,
"album": album,
},
)
return scrobble_obj
def _get_slugs(self, scrobble_url):
"""
Get the artist and track slugs from a scrobble's URL.
The scrobble's URL is also the Track's URL.
scrobble_url is like 'https://www.last.fm/music/Artist/_/Track'
returns two strings, artist_slug and track_slug.
"""
url = scrobble_url.rstrip("/")
# Need to replace semicolons as urlparse() treats them (legitimately)
# as alternatives to '&' as a query string separator, and so omits
# anything after them.
url = url.replace(";", "%3B")
# www.last.fm/music/Artist/_/Track':
url_path = urllib.parse.urlparse(url).path
path_parts = url_path.split("/")
artist_slug = path_parts[-3] # 'Artist'
track_slug = path_parts[-1] # 'Track'
# Put those naughty semicolons back in:
artist_slug = artist_slug.replace("%3B", ";")
track_slug = track_slug.replace("%3B", ";")
return artist_slug, track_slug
class ScrobblesMultiAccountFetcher(object):
"""
For fetching Scrobbles for ALL or ONE account(s).
Usage example:
results = ScrobblesMultiAccountFetcher().fetch(fetch_type='recent')
Or:
results = ScrobblesMultiAccountFetcher(username='bob').fetch(fetch_type='recent')
results will be a list of dicts containing info about what was fetched (or
went wrong) for each account.
"""
# Will be a list of Account objects.
accounts = []
def __init__(self, username=None):
"""
Gets all of the Accounts, or the single Account specified.
username -- If username is set, we only use that Account, if active.
If it's not set, we use all active Accounts.
"""
self.return_value = []
if username is None:
# Get all active Accounts.
self.accounts = list(Account.objects.filter(is_active=True))
if len(self.accounts) == 0:
raise FetchError("No active Accounts were found to fetch.")
else:
# Find the Account associated with username.
try:
account = Account.objects.get(username=username)
except Account.DoesNotExist:
raise FetchError(
"There is no Account with the username '%s'" % username
)
if account.is_active is False:
raise FetchError(
"The Account with the username '%s' is marked as inactive."
% username
)
self.accounts = [account]
def fetch(self, **kwargs):
for account in self.accounts:
self.return_value.append(ScrobblesFetcher(account).fetch(**kwargs))
return self.return_value
|
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
# Database Module
# --------------------
from __future__ import unicode_literals
import MySQLdb
from MySQLdb.times import DateTimeDeltaType
from markdown2 import UnicodeWithAttrs
import warnings
import datetime
import frappe
import frappe.defaults
import re
import frappe.model.meta
from frappe.utils import now, get_datetime, cstr
from frappe import _
from types import StringType, UnicodeType
class Database:
"""
Open a database connection with the given parmeters, if use_default is True, use the
login details from `conf.py`. This is called by the request handler and is accessible using
the `db` global variable. the `sql` method is also global to run queries
"""
def __init__(self, host=None, user=None, password=None, ac_name=None, use_default = 0):
self.host = host or frappe.conf.db_host or 'localhost'
self.user = user or frappe.conf.db_name
self._conn = None
if ac_name:
self.user = self.get_db_login(ac_name) or frappe.conf.db_name
if use_default:
self.user = frappe.conf.db_name
self.transaction_writes = 0
self.auto_commit_on_many_writes = 0
self.password = password or frappe.conf.db_password
self.value_cache = {}
def get_db_login(self, ac_name):
return ac_name
def connect(self):
"""Connects to a database as set in `site_config.json`."""
warnings.filterwarnings('ignore', category=MySQLdb.Warning)
self._conn = MySQLdb.connect(user=self.user, host=self.host, passwd=self.password,
use_unicode=True, charset='utf8')
self._conn.converter[246]=float
self._conn.converter[12]=get_datetime
self._conn.encoders[UnicodeWithAttrs] = self._conn.encoders[UnicodeType]
self._conn.encoders[DateTimeDeltaType] = self._conn.encoders[StringType]
MYSQL_OPTION_MULTI_STATEMENTS_OFF = 1
self._conn.set_server_option(MYSQL_OPTION_MULTI_STATEMENTS_OFF)
self._cursor = self._conn.cursor()
if self.user != 'root':
self.use(self.user)
frappe.local.rollback_observers = []
def use(self, db_name):
"""`USE` db_name."""
self._conn.select_db(db_name)
self.cur_db_name = db_name
def validate_query(self, q):
"""Throw exception for dangerous queries: `ALTER`, `DROP`, `TRUNCATE` if not `Administrator`."""
cmd = q.strip().lower().split()[0]
if cmd in ['alter', 'drop', 'truncate'] and frappe.session.user != 'Administrator':
frappe.throw(_("Not permitted"), frappe.PermissionError)
def sql(self, query, values=(), as_dict = 0, as_list = 0, formatted = 0,
debug=0, ignore_ddl=0, as_utf8=0, auto_commit=0, update=None):
"""Execute a SQL query and fetch all rows.
:param query: SQL query.
:param values: List / dict of values to be escaped and substituted in the query.
:param as_dict: Return as a dictionary.
:param as_list: Always return as a list.
:param formatted: Format values like date etc.
:param debug: Print query and `EXPLAIN` in debug log.
:param ignore_ddl: Catch exception if table, column missing.
:param as_utf8: Encode values as UTF 8.
:param auto_commit: Commit after executing the query.
:param update: Update this dict to all rows (if returned `as_dict`).
Examples:
# return customer names as dicts
frappe.db.sql("select name from tabCustomer", as_dict=True)
# return names beginning with a
frappe.db.sql("select name from tabCustomer where name like %s", "a%")
# values as dict
frappe.db.sql("select name from tabCustomer where name like %(name)s and owner=%(owner)s",
{"name": "a%", "owner":"test@example.com"})
"""
if not self._conn:
self.connect()
# in transaction validations
self.check_transaction_status(query)
# autocommit
if auto_commit: self.commit()
# execute
try:
if values!=():
if isinstance(values, dict):
values = dict(values)
# MySQL-python==1.2.5 hack!
if not isinstance(values, (dict, tuple, list)):
values = (values,)
if debug:
try:
self.explain_query(query, values)
frappe.errprint(query % values)
except TypeError:
frappe.errprint([query, values])
if (frappe.conf.get("logging") or False)==2:
frappe.log("<<<< query")
frappe.log(query)
frappe.log("with values:")
frappe.log(values)
frappe.log(">>>>")
self._cursor.execute(query, values)
else:
if debug:
self.explain_query(query)
frappe.errprint(query)
if (frappe.conf.get("logging") or False)==2:
frappe.log("<<<< query")
frappe.log(query)
frappe.log(">>>>")
self._cursor.execute(query)
except Exception, e:
# ignore data definition errors
if ignore_ddl and e.args[0] in (1146,1054,1091):
pass
else:
raise
if auto_commit: self.commit()
# scrub output if required
if as_dict:
ret = self.fetch_as_dict(formatted, as_utf8)
if update:
for r in ret:
r.update(update)
return ret
elif as_list:
return self.convert_to_lists(self._cursor.fetchall(), formatted, as_utf8)
elif as_utf8:
return self.convert_to_lists(self._cursor.fetchall(), formatted, as_utf8)
else:
return self._cursor.fetchall()
def explain_query(self, query, values=None):
"""Print `EXPLAIN` in error log."""
try:
frappe.errprint("--- query explain ---")
if values is None:
self._cursor.execute("explain " + query)
else:
self._cursor.execute("explain " + query, values)
import json
frappe.errprint(json.dumps(self.fetch_as_dict(), indent=1))
frappe.errprint("--- query explain end ---")
except:
frappe.errprint("error in query explain")
def sql_list(self, query, values=(), debug=False):
"""Return data as list of single elements (first column).
Example:
# doctypes = ["DocType", "DocField", "User", ...]
doctypes = frappe.db.sql_list("select name from DocType")
"""
return [r[0] for r in self.sql(query, values, debug=debug)]
def sql_ddl(self, query, values=(), debug=False):
"""Commit and execute a query. DDL (Data Definition Language) queries that alter schema
autocommit in MariaDB."""
self.commit()
self.sql(query, debug=debug)
def check_transaction_status(self, query):
"""Raises exception if more than 20,000 `INSERT`, `UPDATE` queries are
executed in one transaction. This is to ensure that writes are always flushed otherwise this
could cause the system to hang."""
if self.transaction_writes and \
query and query.strip().split()[0].lower() in ['start', 'alter', 'drop', 'create', "begin", "truncate"]:
raise Exception, 'This statement can cause implicit commit'
if query and query.strip().lower() in ('commit', 'rollback'):
self.transaction_writes = 0
if query[:6].lower() in ('update', 'insert', 'delete'):
self.transaction_writes += 1
if self.transaction_writes > 200000:
if self.auto_commit_on_many_writes:
frappe.db.commit()
else:
frappe.throw(_("Too many writes in one request. Please send smaller requests"), frappe.ValidationError)
def fetch_as_dict(self, formatted=0, as_utf8=0):
"""Internal. Converts results to dict."""
result = self._cursor.fetchall()
ret = []
needs_formatting = self.needs_formatting(result, formatted)
for r in result:
row_dict = frappe._dict({})
for i in range(len(r)):
if needs_formatting:
val = self.convert_to_simple_type(r[i], formatted)
else:
val = r[i]
if as_utf8 and type(val) is unicode:
val = val.encode('utf-8')
row_dict[self._cursor.description[i][0]] = val
ret.append(row_dict)
return ret
def needs_formatting(self, result, formatted):
"""Returns true if the first row in the result has a Date, Datetime, Long Int."""
if result and result[0]:
for v in result[0]:
if isinstance(v, (datetime.date, datetime.timedelta, datetime.datetime, long)):
return True
if formatted and isinstance(v, (int, float)):
return True
return False
def get_description(self):
"""Returns result metadata."""
return self._cursor.description
def convert_to_simple_type(self, v, formatted=0):
"""Format date, time, longint values."""
return v
from frappe.utils import formatdate, fmt_money
if isinstance(v, (datetime.date, datetime.timedelta, datetime.datetime, long)):
if isinstance(v, datetime.date):
v = unicode(v)
if formatted:
v = formatdate(v)
# time
elif isinstance(v, (datetime.timedelta, datetime.datetime)):
v = unicode(v)
# long
elif isinstance(v, long):
v=int(v)
# convert to strings... (if formatted)
if formatted:
if isinstance(v, float):
v=fmt_money(v)
elif isinstance(v, int):
v = unicode(v)
return v
def convert_to_lists(self, res, formatted=0, as_utf8=0):
"""Convert tuple output to lists (internal)."""
nres = []
needs_formatting = self.needs_formatting(res, formatted)
for r in res:
nr = []
for c in r:
if needs_formatting:
val = self.convert_to_simple_type(c, formatted)
else:
val = c
if as_utf8 and type(val) is unicode:
val = val.encode('utf-8')
nr.append(val)
nres.append(nr)
return nres
def convert_to_utf8(self, res, formatted=0):
"""Encode result as UTF-8."""
nres = []
for r in res:
nr = []
for c in r:
if type(c) is unicode:
c = c.encode('utf-8')
nr.append(self.convert_to_simple_type(c, formatted))
nres.append(nr)
return nres
def build_conditions(self, filters):
"""Convert filters sent as dict, lists to SQL conditions. filter's key
is passed by map function, build conditions like:
* ifnull(`fieldname`, default_value) = %(fieldname)s
* `fieldname` [=, !=, >, >=, <, <=] %(fieldname)s
"""
conditions = []
values = {}
def _build_condition(key):
"""
filter's key is passed by map function
build conditions like:
* ifnull(`fieldname`, default_value) = %(fieldname)s
* `fieldname` [=, !=, >, >=, <, <=] %(fieldname)s
"""
_operator = "="
_rhs = " %(" + key + ")s"
value = filters.get(key)
values[key] = value
if isinstance(value, (list, tuple)):
# value is a tuble like ("!=", 0)
_operator = value[0]
values[key] = value[1]
if isinstance(value[1], (tuple, list)):
# value is a list in tuple ("in", ("A", "B"))
inner_list = []
for i, v in enumerate(value[1]):
inner_key = "{0}_{1}".format(key, i)
values[inner_key] = v
inner_list.append("%({0})s".format(inner_key))
_rhs = " ({0})".format(", ".join(inner_list))
del values[key]
if _operator not in ["=", "!=", ">", ">=", "<", "<=", "like", "in", "not in", "not like"]:
_operator = "="
if "[" in key:
split_key = key.split("[")
condition = "ifnull(`" + split_key[0] + "`, " + split_key[1][:-1] + ") " \
+ _operator + _rhs
else:
condition = "`" + key + "` " + _operator + _rhs
conditions.append(condition)
if isinstance(filters, basestring):
filters = { "name": filters }
for f in filters:
_build_condition(f)
return " and ".join(conditions), values
def get(self, doctype, filters=None, as_dict=True, cache=False):
"""Returns `get_value` with fieldname='*'"""
return self.get_value(doctype, filters, "*", as_dict=as_dict, cache=cache)
def get_value(self, doctype, filters=None, fieldname="name", ignore=None, as_dict=False, debug=False, cache=False):
"""Returns a document property or list of properties.
:param doctype: DocType name.
:param filters: Filters like `{"x":"y"}` or name of the document. `None` if Single DocType.
:param fieldname: Column name.
:param ignore: Don't raise exception if table, column is missing.
:param as_dict: Return values as dict.
:param debug: Print query in error log.
Example:
# return first customer starting with a
frappe.db.get_value("Customer", {"name": ("like a%")})
# return last login of **User** `test@example.com`
frappe.db.get_value("User", "test@example.com", "last_login")
last_login, last_ip = frappe.db.get_value("User", "test@example.com",
["last_login", "last_ip"])
# returns default date_format
frappe.db.get_value("System Settings", None, "date_format")
"""
ret = self.get_values(doctype, filters, fieldname, ignore, as_dict, debug, cache=cache)
return ((len(ret[0]) > 1 or as_dict) and ret[0] or ret[0][0]) if ret else None
def get_values(self, doctype, filters=None, fieldname="name", ignore=None, as_dict=False,
debug=False, order_by=None, update=None, cache=False):
"""Returns multiple document properties.
:param doctype: DocType name.
:param filters: Filters like `{"x":"y"}` or name of the document.
:param fieldname: Column name.
:param ignore: Don't raise exception if table, column is missing.
:param as_dict: Return values as dict.
:param debug: Print query in error log.
Example:
# return first customer starting with a
customers = frappe.db.get_values("Customer", {"name": ("like a%")})
# return last login of **User** `test@example.com`
user = frappe.db.get_values("User", "test@example.com", "*")[0]
"""
out = None
if cache and isinstance(filters, basestring) and \
(doctype, filters, fieldname) in self.value_cache:
return self.value_cache[(doctype, filters, fieldname)]
if isinstance(filters, list):
out = self._get_value_for_many_names(doctype, filters, fieldname, debug=debug)
else:
fields = fieldname
if fieldname!="*":
if isinstance(fieldname, basestring):
fields = [fieldname]
else:
fields = fieldname
if (filters is not None) and (filters!=doctype or doctype=="DocType"):
try:
out = self._get_values_from_table(fields, filters, doctype, as_dict, debug, order_by, update)
except Exception, e:
if ignore and e.args[0] in (1146, 1054):
# table or column not found, return None
out = None
elif (not ignore) and e.args[0]==1146:
# table not found, look in singles
out = self.get_values_from_single(fields, filters, doctype, as_dict, debug, update)
else:
raise
else:
out = self.get_values_from_single(fields, filters, doctype, as_dict, debug, update)
if cache and isinstance(filters, basestring):
self.value_cache[(doctype, filters, fieldname)] = out
return out
def get_values_from_single(self, fields, filters, doctype, as_dict=False, debug=False, update=None):
"""Get values from `tabSingles` (Single DocTypes) (internal).
:param fields: List of fields,
:param filters: Filters (dict).
:param doctype: DocType name.
"""
# TODO
# if not frappe.model.meta.is_single(doctype):
# raise frappe.DoesNotExistError("DocType", doctype)
if fields=="*" or isinstance(filters, dict):
# check if single doc matches with filters
values = self.get_singles_dict(doctype)
if isinstance(filters, dict):
for key, value in filters.items():
if values.get(key) != value:
return []
if as_dict:
return values and [values] or []
if isinstance(fields, list):
return [map(lambda d: values.get(d), fields)]
else:
r = self.sql("""select field, value
from tabSingles where field in (%s) and doctype=%s""" \
% (', '.join(['%s'] * len(fields)), '%s'),
tuple(fields) + (doctype,), as_dict=False, debug=debug)
if as_dict:
if r:
r = frappe._dict(r)
if update:
r.update(update)
return [r]
else:
return []
else:
return r and [[i[1] for i in r]] or []
def get_singles_dict(self, doctype):
"""Get Single DocType as dict."""
return frappe._dict(self.sql("""select field, value from
tabSingles where doctype=%s""", doctype))
def get_all(self, *args, **kwargs):
return frappe.get_all(*args, **kwargs)
def get_list(self, *args, **kwargs):
return frappe.get_list(*args, **kwargs)
def get_single_value(self, doctype, fieldname, cache=False):
"""Get property of Single DocType. Cache locally by default"""
value = self.value_cache.setdefault(doctype, {}).get(fieldname)
if value:
return value
val = self.sql("""select value from
tabSingles where doctype=%s and field=%s""", (doctype, fieldname))
val = val[0][0] if val else None
if val=="0" or val=="1":
# check type
val = int(val)
self.value_cache[doctype][fieldname] = val
return val
def get_singles_value(self, *args, **kwargs):
"""Alias for get_single_value"""
return self.get_single_value(*args, **kwargs)
def _get_values_from_table(self, fields, filters, doctype, as_dict, debug, order_by=None, update=None):
fl = []
if isinstance(fields, (list, tuple)):
for f in fields:
if "(" in f or " as " in f: # function
fl.append(f)
else:
fl.append("`" + f + "`")
fl = ", ".join(fl)
else:
fl = fields
if fields=="*":
as_dict = True
conditions, values = self.build_conditions(filters)
order_by = ("order by " + order_by) if order_by else ""
r = self.sql("select {0} from `tab{1}` where {2} {3}".format(fl, doctype,
conditions, order_by), values, as_dict=as_dict, debug=debug, update=update)
return r
def _get_value_for_many_names(self, doctype, names, field, debug=False):
names = filter(None, names)
if names:
return dict(self.sql("select name, `%s` from `tab%s` where name in (%s)" \
% (field, doctype, ", ".join(["%s"]*len(names))), names, debug=debug))
else:
return {}
def update(self, *args, **kwargs):
"""Update multiple values. Alias for `set_value`."""
return self.set_value(*args, **kwargs)
def set_value(self, dt, dn, field, val, modified=None, modified_by=None,
update_modified=True, debug=False):
"""Set a single value in the database, do not call the ORM triggers
but update the modified timestamp (unless specified not to).
**Warning:** this function will not call Document events and should be avoided in normal cases.
:param dt: DocType name.
:param dn: Document name.
:param field: Property / field name.
:param value: Value to be updated.
:param modified: Use this as the `modified` timestamp.
:param modified_by: Set this user as `modified_by`.
:param update_modified: default True. Set as false, if you don't want to update the timestamp.
:param debug: Print the query in the developer / js console.
"""
if not modified:
modified = now()
if not modified_by:
modified_by = frappe.session.user
if dn and dt!=dn:
conditions, values = self.build_conditions(dn)
values.update({"val": val, "modified": modified, "modified_by": modified_by})
if update_modified:
self.sql("""update `tab{0}` set `{1}`=%(val)s, modified=%(modified)s, modified_by=%(modified_by)s where
{2}""".format(dt, field, conditions), values, debug=debug)
else:
self.sql("""update `tab{0}` set `{1}`=%(val)s where
{2}""".format(dt, field, conditions), values, debug=debug)
else:
self.sql("delete from tabSingles where field=%s and doctype=%s", (field, dt))
self.sql("insert into tabSingles(doctype, field, value) values (%s, %s, %s)",
(dt, field, val), debug=debug)
if update_modified and (field not in ("modified", "modified_by")):
self.set_value(dt, dn, "modified", modified)
self.set_value(dt, dn, "modified_by", modified_by)
if dt in self.value_cache:
del self.value_cache[dt]
def set(self, doc, field, val):
"""Set value in document. **Avoid**"""
doc.db_set(field, val)
def touch(self, doctype, docname):
"""Update the modified timestamp of this document."""
from frappe.utils import now
modified = now()
frappe.db.sql("""update `tab{doctype}` set `modified`=%s
where name=%s""".format(doctype=doctype), (modified, docname))
return modified
def set_temp(self, value):
"""Set a temperory value and return a key."""
key = frappe.generate_hash()
frappe.cache().hset("temp", key, value)
return key
def get_temp(self, key):
"""Return the temperory value and delete it."""
return frappe.cache().hget("temp", key)
def set_global(self, key, val, user='__global'):
"""Save a global key value. Global values will be automatically set if they match fieldname."""
self.set_default(key, val, user)
def get_global(self, key, user='__global'):
"""Returns a global key value."""
return self.get_default(key, user)
def set_default(self, key, val, parent="__default", parenttype=None):
"""Sets a global / user default value."""
frappe.defaults.set_default(key, val, parent, parenttype)
def add_default(self, key, val, parent="__default", parenttype=None):
"""Append a default value for a key, there can be multiple default values for a particular key."""
frappe.defaults.add_default(key, val, parent, parenttype)
def get_default(self, key, parent="__default"):
"""Returns default value as a list if multiple or single"""
d = frappe.defaults.get_defaults(parent).get(key)
return isinstance(d, list) and d[0] or d
def get_defaults_as_list(self, key, parent="__default"):
"""Returns default values as a list."""
d = frappe.defaults.get_default(key, parent)
return isinstance(d, basestring) and [d] or d
def get_defaults(self, key=None, parent="__default"):
"""Get all defaults"""
if key:
return frappe.defaults.get_defaults(parent).get(key)
else:
return frappe.defaults.get_defaults(parent)
def begin(self):
pass
#self.sql("start transaction")
def commit(self):
"""Commit current transaction. Calls SQL `COMMIT`."""
self.sql("commit")
frappe.local.rollback_observers = []
def rollback(self):
"""`ROLLBACK` current transaction."""
self.sql("rollback")
self.begin()
for obj in frappe.local.rollback_observers:
if hasattr(obj, "on_rollback"):
obj.on_rollback()
frappe.local.rollback_observers = []
def field_exists(self, dt, fn):
"""Return true of field exists."""
return self.sql("select name from tabDocField where fieldname=%s and parent=%s", (dt, fn))
def table_exists(self, tablename):
"""Returns True if table exists."""
return ("tab" + tablename) in self.get_tables()
def get_tables(self):
return [d[0] for d in self.sql("show tables")]
def a_row_exists(self, doctype):
"""Returns True if atleast one row exists."""
return self.sql("select name from `tab{doctype}` limit 1".format(doctype=doctype))
def exists(self, dt, dn=None):
"""Returns true if document exists.
:param dt: DocType name.
:param dn: Document name or filter dict."""
if isinstance(dt, basestring):
if dt!="DocType" and dt==dn:
return True # single always exists (!)
try:
return self.get_value(dt, dn, "name")
except:
return None
elif isinstance(dt, dict) and dt.get('doctype'):
try:
conditions = []
for d in dt:
if d == 'doctype': continue
conditions.append('`%s` = "%s"' % (d, cstr(dt[d]).replace('"', '\"')))
return self.sql('select name from `tab%s` where %s' % \
(dt['doctype'], " and ".join(conditions)))
except:
return None
def count(self, dt, filters=None, debug=False):
"""Returns `COUNT(*)` for given DocType and filters."""
if filters:
conditions, filters = self.build_conditions(filters)
return frappe.db.sql("""select count(*)
from `tab%s` where %s""" % (dt, conditions), filters, debug=debug)[0][0]
else:
return frappe.db.sql("""select count(*)
from `tab%s`""" % (dt,))[0][0]
def get_creation_count(self, doctype, minutes):
"""Get count of records created in the last x minutes"""
from frappe.utils import now_datetime
from dateutil.relativedelta import relativedelta
return frappe.db.sql("""select count(name) from `tab{doctype}`
where creation >= %s""".format(doctype=doctype),
now_datetime() - relativedelta(minutes=minutes))[0][0]
def get_table_columns(self, doctype):
"""Returns list of column names from given doctype."""
return [r[0] for r in self.sql("DESC `tab%s`" % doctype)]
def has_column(self, doctype, column):
"""Returns True if column exists in database."""
return column in self.get_table_columns(doctype)
def add_index(self, doctype, fields, index_name=None):
"""Creates an index with given fields if not already created.
Index name will be `fieldname1_fieldname2_index`"""
if not index_name:
index_name = "_".join(fields) + "_index"
# remove index length if present e.g. (10) from index name
index_name = re.sub(r"\s*\([^)]+\)\s*", r"", index_name)
if not frappe.db.sql("""show index from `tab%s` where Key_name="%s" """ % (doctype, index_name)):
frappe.db.commit()
frappe.db.sql("""alter table `tab%s`
add index `%s`(%s)""" % (doctype, index_name, ", ".join(fields)))
def add_unique(self, doctype, fields, constraint_name=None):
if isinstance(fields, basestring):
fields = [fields]
if not constraint_name:
constraint_name = "unique_" + "_".join(fields)
if not frappe.db.sql("""select CONSTRAINT_NAME from information_schema.TABLE_CONSTRAINTS
where table_name=%s and constraint_type='UNIQUE' and CONSTRAINT_NAME=%s""",
('tab' + doctype, constraint_name)):
frappe.db.commit()
frappe.db.sql("""alter table `tab%s`
add unique `%s`(%s)""" % (doctype, constraint_name, ", ".join(fields)))
def close(self):
"""Close database connection."""
if self._conn:
self._cursor.close()
self._conn.close()
self._cursor = None
self._conn = None
def escape(self, s, percent=True):
"""Excape quotes and percent in given string."""
if isinstance(s, unicode):
s = (s or "").encode("utf-8")
s = unicode(MySQLdb.escape_string(s), "utf-8").replace("`", "\\`")
# NOTE separating % escape, because % escape should only be done when using LIKE operator
# or when you use python format string to generate query that already has a %s
# for example: sql("select name from `tabUser` where name=%s and {0}".format(conditions), something)
# defaulting it to True, as this is the most frequent use case
# ideally we shouldn't have to use ESCAPE and strive to pass values via the values argument of sql
if percent:
s = s.replace("%", "%%")
return s
|
|
from unittest import TestCase
from ..config import Config
from ..state import JsonState, MongodbState
from pymongo import MongoClient
import os
from random import choice
USER1 = '29:1SPw4GoUNGtDmuYex45S13g-1zgri1qp43uA345yjSFc'
USER2 = '29:1SPw4GoUNGtDmuYex45S13g-1zgri1qp43uA547yjSFc'
CHANNEL1 = 'skype'
CHANNEL2 = 'teams'
CONVERSATION1 = "29:123w4GoUNGtDmuYexhNS13g-1zgri1qp43uA3A0yjSFc"
CONVERSATION2 = "29:567w4GoUNGtDmuYexhNS13g-1zgri1qp43uA3A0yjSFc"
BOT = "28:5e21d7a8-d1b5-4534-a63d-f521712f5a64"
MESSAGE = {"text": "image", "type": "message", "timestamp": "2017-07-13T12:34:48.338Z", "id": "149993456252",
"channelId": "skype", "serviceUrl": "https://smba.trafficmanager.net/apis/",
"from": {"id": "29:1SPw4GoUNGtDmuYex45S13g-1zgri1qp43uA345yjSFc", "name": "Matthew Brown"},
"conversation": {"id": "29:123w4GoUNGtDmuYexhNS13g-1zgri1qp43uA3A0yjSFc"},
"recipient": {"id": "28:5e21d7a8-d1b5-4534-a63d-f521712f5a64", "name": "TestPythonBotFramework"},
"entities": [{"locale": "en-US", "country": "AU", "platform": "Web", "type": "clientInfo"}],
"channelData": {"text": "image"}}
NAME_VALUES1 = {'name': 'Sally'}
NAME_VALUES2 = {'name': 'Rachel'}
AGE_VALUES1 = {'age': 34}
AGE_VALUES2 = {'age': 24}
CAR_VALUES1 = {'car': 'truck'}
CAR_VALUES2 = {'car': 'train'}
MULTI_NAMES1 = {'car': 'truck', 'name': 'Sally'}
MULTI_NAMES2 = {'car': 'corolla', 'name': 'Sam'}
MULTI_AGES1 = {'age': 45, 'house_age': 73}
MULTI_AGES2 = {'age': 23, 'house_age': 45}
MULTI_CARS1 = {'car': 'truck', 'type': 'mack'}
MULTI_CARS2 = {'car': 'jeep', 'type': '4wd'}
DELETE_CAR = {'car': None}
DELETE_AGE = {'age': None}
DELETE_NAME = {'name': None}
DELETE_MULTI_NAMES = {'car': None, 'name': None}
DELETE_MULTI_AGES = {'age': None, 'house_age': None}
DELETE_MULTI_CARS = {'car': None, 'type': None}
DELETED_NAME1 = {'car': 'truck'}
DELETED_NAME2 = {'car': 'corolla'}
DELETED_CAR1 = {'type': 'mack'}
DELETED_CAR2 = {'type': '4wd'}
DELETED_AGE1 = {'house_age': 73}
DELETED_AGE2 = {'house_age': 45}
def merge_two_dicts(x, y):
"""Given two dicts, merge them into a new dict as a shallow copy."""
z = x.copy()
z.update(y)
return z
class JsonStateTestCase(TestCase):
def setUp(self):
self._remove_files()
self.state = JsonState(state_file='teststate.json', conversation_file='testconversation.json')
def tearDown(self):
pass
self._remove_files()
def _remove_files(self):
try:
os.remove(os.getcwd() + '/teststate.json')
except OSError:
pass
try:
os.remove(os.getcwd() + '/testconversation.json')
except OSError:
pass
def test_set_key_1(self):
self.assertEqual(self.state.set_user_data_on_channel(NAME_VALUES1, channel=CHANNEL1, user_id=USER1),
NAME_VALUES1)
self.assertEqual(self.state.get_user_data_on_channel(channel=CHANNEL1, user_id=USER1), NAME_VALUES1)
def test_set_key_2(self):
self.assertEqual(self.state.set_conversation_data_on_channel(NAME_VALUES2, channel=CHANNEL1,
conversation_id=CONVERSATION1), NAME_VALUES2)
self.assertEqual(
self.state.get_conversation_data_on_channel(channel=CHANNEL1, conversation_id=CONVERSATION1),
NAME_VALUES2)
def test_set_key_3(self):
self.assertEqual(
self.state.set_private_conversation_data_on_channel(AGE_VALUES1, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1), AGE_VALUES1)
self.assertEqual(self.state.get_private_conversation_data_on_channel(channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1),
AGE_VALUES1)
def test_set_key_4(self):
self.assertEqual(self.state.set_user_data(AGE_VALUES2, user_id=USER1), AGE_VALUES2)
self.assertEqual(self.state.get_user_data(user_id=USER1), AGE_VALUES2)
def test_set_key_5(self):
self.assertEqual(self.state.set_channel_data(CAR_VALUES1, channel=CHANNEL1), CAR_VALUES1)
self.assertEqual(self.state.get_channel_data(channel=CHANNEL1), CAR_VALUES1)
def test_update_key1(self):
self.state.set_user_data_on_channel(NAME_VALUES1, channel=CHANNEL1, user_id=USER1)
self.assertEqual(self.state.set_user_data_on_channel(NAME_VALUES2, channel=CHANNEL1, user_id=USER1),
NAME_VALUES2)
self.assertEqual(self.state.get_user_data_on_channel(channel=CHANNEL1, user_id=USER1), NAME_VALUES2)
def test_update_key2(self):
self.state.set_conversation_data_on_channel(NAME_VALUES2, channel=CHANNEL1, conversation_id=CONVERSATION1)
self.assertEqual(self.state.set_conversation_data_on_channel(NAME_VALUES1, channel=CHANNEL1,
conversation_id=CONVERSATION1), NAME_VALUES1)
self.assertEqual(
self.state.get_conversation_data_on_channel(channel=CHANNEL1, conversation_id=CONVERSATION1),
NAME_VALUES1)
def test_update_key3(self):
self.state.set_private_conversation_data_on_channel(AGE_VALUES1, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1)
self.assertEqual(
self.state.set_private_conversation_data_on_channel(AGE_VALUES2, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1), AGE_VALUES2)
self.assertEqual(self.state.get_private_conversation_data_on_channel(channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1),
AGE_VALUES2)
def test_update_key4(self):
self.state.set_user_data(AGE_VALUES2, user_id=USER1)
self.assertEqual(self.state.set_user_data(AGE_VALUES1, user_id=USER1), AGE_VALUES1)
self.assertEqual(self.state.get_user_data(user_id=USER1), AGE_VALUES1)
def test_update_key5(self):
self.state.set_channel_data(CAR_VALUES1, channel=CHANNEL1)
self.assertEqual(self.state.set_channel_data(CAR_VALUES2, channel=CHANNEL1), CAR_VALUES2)
self.assertEqual(self.state.get_channel_data(channel=CHANNEL1), CAR_VALUES2)
def test_set_additional_key1(self):
self.state.set_user_data_on_channel(NAME_VALUES1, channel=CHANNEL1, user_id=USER1)
self.assertEqual(self.state.set_user_data_on_channel(CAR_VALUES2, channel=CHANNEL1, user_id=USER1),
merge_two_dicts(NAME_VALUES1, CAR_VALUES2))
self.assertEqual(self.state.get_user_data_on_channel(channel=CHANNEL1, user_id=USER1),
merge_two_dicts(NAME_VALUES1, CAR_VALUES2))
def test_set_additional_key2(self):
self.state.set_conversation_data_on_channel(NAME_VALUES2, channel=CHANNEL1, conversation_id=CONVERSATION1)
self.assertEqual(self.state.set_conversation_data_on_channel(CAR_VALUES2, channel=CHANNEL1,
conversation_id=CONVERSATION1),
merge_two_dicts(NAME_VALUES2, CAR_VALUES2))
self.assertEqual(
self.state.get_conversation_data_on_channel(channel=CHANNEL1, conversation_id=CONVERSATION1),
merge_two_dicts(NAME_VALUES2, CAR_VALUES2))
def test_set_additional_key3(self):
self.state.set_private_conversation_data_on_channel(AGE_VALUES1, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1)
self.assertEqual(
self.state.set_private_conversation_data_on_channel(CAR_VALUES2, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1),
merge_two_dicts(AGE_VALUES1, CAR_VALUES2))
self.assertEqual(self.state.get_private_conversation_data_on_channel(channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1),
merge_two_dicts(AGE_VALUES1, CAR_VALUES2))
def test_set_additional_key4(self):
self.state.set_user_data(AGE_VALUES2, user_id=USER1)
self.assertEqual(self.state.set_user_data(CAR_VALUES2, user_id=USER1),
merge_two_dicts(AGE_VALUES2, CAR_VALUES2))
self.assertEqual(self.state.get_user_data(user_id=USER1), merge_two_dicts(AGE_VALUES2, CAR_VALUES2))
def test_set_additional_key5(self):
self.state.set_channel_data(CAR_VALUES1, channel=CHANNEL1)
self.assertEqual(self.state.set_channel_data(CAR_VALUES2, channel=CHANNEL1),
merge_two_dicts(CAR_VALUES1, CAR_VALUES2))
self.assertEqual(self.state.get_channel_data(channel=CHANNEL1), merge_two_dicts(CAR_VALUES1, CAR_VALUES2))
def test_set_multi_key1(self):
self.assertEqual(self.state.set_user_data_on_channel(MULTI_NAMES1, channel=CHANNEL1, user_id=USER1),
MULTI_NAMES1)
self.assertEqual(self.state.get_user_data_on_channel(channel=CHANNEL1, user_id=USER1), MULTI_NAMES1)
def test_set_multi_key2(self):
self.assertEqual(self.state.set_conversation_data_on_channel(MULTI_NAMES2, channel=CHANNEL1,
conversation_id=CONVERSATION1), MULTI_NAMES2)
self.assertEqual(
self.state.get_conversation_data_on_channel(channel=CHANNEL1, conversation_id=CONVERSATION1),
MULTI_NAMES2)
def test_set_multi_key3(self):
self.assertEqual(
self.state.set_private_conversation_data_on_channel(MULTI_AGES1, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1), MULTI_AGES1)
self.assertEqual(self.state.get_private_conversation_data_on_channel(channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1),
MULTI_AGES1)
def test_set_multi_key4(self):
self.assertEqual(self.state.set_user_data(MULTI_AGES2, user_id=USER1), MULTI_AGES2)
self.assertEqual(self.state.get_user_data(user_id=USER1), MULTI_AGES2)
def test_set_multi_key5(self):
self.assertEqual(self.state.set_channel_data(MULTI_CARS1, channel=CHANNEL1), MULTI_CARS1)
self.assertEqual(self.state.get_channel_data(channel=CHANNEL1), MULTI_CARS1)
def test_update_multi_key1(self):
self.state.set_user_data_on_channel(MULTI_NAMES1, channel=CHANNEL1, user_id=USER1)
self.assertEqual(self.state.set_user_data_on_channel(MULTI_NAMES2, channel=CHANNEL1, user_id=USER1),
MULTI_NAMES2)
self.assertEqual(self.state.get_user_data_on_channel(channel=CHANNEL1, user_id=USER1), MULTI_NAMES2)
def test_update_multi_key2(self):
self.state.set_conversation_data_on_channel(MULTI_NAMES2, channel=CHANNEL1, conversation_id=CONVERSATION1)
self.assertEqual(self.state.set_conversation_data_on_channel(MULTI_NAMES1, channel=CHANNEL1,
conversation_id=CONVERSATION1), MULTI_NAMES1)
self.assertEqual(
self.state.get_conversation_data_on_channel(channel=CHANNEL1, conversation_id=CONVERSATION1),
MULTI_NAMES1)
def test_update_multi_key3(self):
self.state.set_private_conversation_data_on_channel(MULTI_AGES1, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1)
self.assertEqual(
self.state.set_private_conversation_data_on_channel(MULTI_AGES2, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1), MULTI_AGES2)
self.assertEqual(self.state.get_private_conversation_data_on_channel(channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1),
MULTI_AGES2)
def test_update_multi_key4(self):
self.state.set_user_data(MULTI_AGES2, user_id=USER1)
self.assertEqual(self.state.set_user_data(MULTI_AGES1, user_id=USER1), MULTI_AGES1)
self.assertEqual(self.state.get_user_data(user_id=USER1), MULTI_AGES1)
def test_update_multi_key5(self):
self.state.set_channel_data(MULTI_CARS1, channel=CHANNEL1)
self.assertEqual(self.state.set_channel_data(MULTI_CARS2, channel=CHANNEL1), MULTI_CARS2)
self.assertEqual(self.state.get_channel_data(channel=CHANNEL1), MULTI_CARS2)
def test_set_additional_multi_key1(self):
self.state.set_user_data_on_channel(MULTI_NAMES1, channel=CHANNEL1, user_id=USER1)
self.assertEqual(self.state.set_user_data_on_channel(MULTI_AGES1, channel=CHANNEL1, user_id=USER1),
merge_two_dicts(MULTI_NAMES1, MULTI_AGES1))
self.assertEqual(self.state.get_user_data_on_channel(channel=CHANNEL1, user_id=USER1),
merge_two_dicts(MULTI_NAMES1, MULTI_AGES1))
def test_set_additional_multi_key2(self):
self.state.set_conversation_data_on_channel(MULTI_NAMES2, channel=CHANNEL1, conversation_id=CONVERSATION1)
self.assertEqual(self.state.set_conversation_data_on_channel(MULTI_AGES1, channel=CHANNEL1,
conversation_id=CONVERSATION1),
merge_two_dicts(MULTI_NAMES2, MULTI_AGES1))
self.assertEqual(
self.state.get_conversation_data_on_channel(channel=CHANNEL1, conversation_id=CONVERSATION1),
merge_two_dicts(MULTI_NAMES2, MULTI_AGES1))
def test_set_additional_multi_key3(self):
self.state.set_private_conversation_data_on_channel(MULTI_AGES1, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1)
self.assertEqual(
self.state.set_private_conversation_data_on_channel(MULTI_CARS1, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1),
merge_two_dicts(MULTI_AGES1, MULTI_CARS1))
self.assertEqual(self.state.get_private_conversation_data_on_channel(channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1),
merge_two_dicts(MULTI_AGES1, MULTI_CARS1))
def test_set_additional_multi_key4(self):
self.state.set_user_data(MULTI_AGES2, user_id=USER1)
self.assertEqual(self.state.set_user_data(MULTI_CARS1, user_id=USER1),
merge_two_dicts(MULTI_AGES2, MULTI_CARS1))
self.assertEqual(self.state.get_user_data(user_id=USER1), merge_two_dicts(MULTI_AGES2, MULTI_CARS1))
def test_set_additional_multi_key5(self):
self.state.set_channel_data(MULTI_CARS1, channel=CHANNEL1)
self.assertEqual(self.state.set_channel_data(MULTI_NAMES1, channel=CHANNEL1),
merge_two_dicts(MULTI_CARS1, MULTI_NAMES1))
self.assertEqual(self.state.get_channel_data(channel=CHANNEL1), merge_two_dicts(MULTI_CARS1, MULTI_NAMES1))
def test_get_none1(self):
self.assertEqual(self.state.get_user_data_on_channel(channel=CHANNEL1, user_id=USER1), {})
def test_get_none2(self):
self.assertEqual(
self.state.get_conversation_data_on_channel(channel=CHANNEL1, conversation_id=CONVERSATION1), {})
def test_get_none3(self):
self.assertEqual(self.state.get_private_conversation_data_on_channel(channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1), {})
def test_get_none4(self):
self.assertEqual(self.state.get_user_data(user_id=USER1), {})
def test_get_none5(self):
self.assertEqual(self.state.get_channel_data(channel=CHANNEL1), {})
def test_set_same_value1(self):
self.state.set_user_data_on_channel(NAME_VALUES2, channel=CHANNEL1, user_id=USER1)
self.assertEqual(self.state.set_user_data_on_channel(NAME_VALUES2, channel=CHANNEL1, user_id=USER1),
NAME_VALUES2)
self.assertEqual(self.state.get_user_data_on_channel(channel=CHANNEL1, user_id=USER1), NAME_VALUES2)
def test_set_same_value2(self):
self.state.set_conversation_data_on_channel(NAME_VALUES1, channel=CHANNEL1, conversation_id=CONVERSATION1)
self.assertEqual(self.state.set_conversation_data_on_channel(NAME_VALUES1, channel=CHANNEL1,
conversation_id=CONVERSATION1), NAME_VALUES1)
self.assertEqual(
self.state.get_conversation_data_on_channel(channel=CHANNEL1, conversation_id=CONVERSATION1),
NAME_VALUES1)
def test_set_same_value3(self):
self.state.set_private_conversation_data_on_channel(AGE_VALUES2, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1)
self.assertEqual(
self.state.set_private_conversation_data_on_channel(AGE_VALUES2, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1), AGE_VALUES2)
self.assertEqual(self.state.get_private_conversation_data_on_channel(channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1),
AGE_VALUES2)
def test_set_same_value4(self):
self.state.set_user_data(AGE_VALUES1, user_id=USER1)
self.assertEqual(self.state.set_user_data(AGE_VALUES1, user_id=USER1), AGE_VALUES1)
self.assertEqual(self.state.get_user_data(user_id=USER1), AGE_VALUES1)
def test_set_same_value5(self):
self.state.set_channel_data(CAR_VALUES2, channel=CHANNEL1)
self.assertEqual(self.state.set_channel_data(CAR_VALUES2, channel=CHANNEL1), CAR_VALUES2)
self.assertEqual(self.state.get_channel_data(channel=CHANNEL1), CAR_VALUES2)
def test_delete_key1(self):
self.state.set_user_data_on_channel(NAME_VALUES2, channel=CHANNEL1, user_id=USER1)
self.assertEqual(self.state.set_user_data_on_channel(DELETE_NAME, channel=CHANNEL1, user_id=USER1), {})
self.assertEqual(self.state.get_user_data_on_channel(channel=CHANNEL1, user_id=USER1), {})
def test_delete_key2(self):
self.state.set_conversation_data_on_channel(NAME_VALUES1, channel=CHANNEL1, conversation_id=CONVERSATION1)
self.assertEqual(self.state.set_conversation_data_on_channel(DELETE_NAME, channel=CHANNEL1,
conversation_id=CONVERSATION1), {})
self.assertEqual(
self.state.get_conversation_data_on_channel(channel=CHANNEL1, conversation_id=CONVERSATION1), {})
def test_delete_key3(self):
self.state.set_private_conversation_data_on_channel(AGE_VALUES2, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1)
self.assertEqual(
self.state.set_private_conversation_data_on_channel(DELETE_AGE, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1), {})
self.assertEqual(self.state.get_private_conversation_data_on_channel(channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1), {})
def test_delete_key4(self):
self.state.set_user_data(AGE_VALUES1, user_id=USER1)
self.assertEqual(self.state.set_user_data(DELETE_AGE, user_id=USER1), {})
self.assertEqual(self.state.get_user_data(user_id=USER1), {})
def test_delete_key5(self):
self.state.set_channel_data(CAR_VALUES2, channel=CHANNEL1)
self.assertEqual(self.state.set_channel_data(DELETE_CAR, channel=CHANNEL1), {})
self.assertEqual(self.state.get_channel_data(channel=CHANNEL1), {})
def test_delete_some_key1(self):
self.state.set_user_data_on_channel(MULTI_NAMES1, channel=CHANNEL1, user_id=USER1)
self.assertEqual(self.state.set_user_data_on_channel(DELETE_NAME, channel=CHANNEL1, user_id=USER1),
DELETED_NAME1)
self.assertEqual(self.state.get_user_data_on_channel(channel=CHANNEL1, user_id=USER1), DELETED_NAME1)
def test_delete_some_key2(self):
self.state.set_conversation_data_on_channel(MULTI_NAMES2, channel=CHANNEL1, conversation_id=CONVERSATION1)
self.assertEqual(self.state.set_conversation_data_on_channel(DELETE_NAME, channel=CHANNEL1,
conversation_id=CONVERSATION1), DELETED_NAME2)
self.assertEqual(
self.state.get_conversation_data_on_channel(channel=CHANNEL1, conversation_id=CONVERSATION1),
DELETED_NAME2)
def test_delete_some_key3(self):
self.state.set_private_conversation_data_on_channel(MULTI_AGES1, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1)
self.assertEqual(
self.state.set_private_conversation_data_on_channel(DELETE_AGE, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1), DELETED_AGE1)
self.assertEqual(self.state.get_private_conversation_data_on_channel(channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1),
DELETED_AGE1)
def test_delete_some_key4(self):
self.state.set_user_data(MULTI_AGES2, user_id=USER1)
self.assertEqual(self.state.set_user_data(DELETE_AGE, user_id=USER1), DELETED_AGE2)
self.assertEqual(self.state.get_user_data(user_id=USER1), DELETED_AGE2)
def test_delete_some_key5(self):
self.state.set_channel_data(MULTI_CARS1, channel=CHANNEL1)
self.assertEqual(self.state.set_channel_data(DELETE_CAR, channel=CHANNEL1), DELETED_CAR1)
self.assertEqual(self.state.get_channel_data(channel=CHANNEL1), DELETED_CAR1)
def test_delete_multi_key1(self):
self.state.set_user_data_on_channel(MULTI_NAMES1, channel=CHANNEL1, user_id=USER1)
self.assertEqual(self.state.set_user_data_on_channel(DELETE_MULTI_NAMES, channel=CHANNEL1, user_id=USER1),
{})
self.assertEqual(self.state.get_user_data_on_channel(channel=CHANNEL1, user_id=USER1), {})
def test_delete_multi_key2(self):
self.state.set_conversation_data_on_channel(MULTI_NAMES2, channel=CHANNEL1, conversation_id=CONVERSATION1)
self.assertEqual(self.state.set_conversation_data_on_channel(DELETE_MULTI_NAMES, channel=CHANNEL1,
conversation_id=CONVERSATION1), {})
self.assertEqual(
self.state.get_conversation_data_on_channel(channel=CHANNEL1, conversation_id=CONVERSATION1), {})
def test_delete_multi_key3(self):
self.state.set_private_conversation_data_on_channel(MULTI_AGES1, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1)
self.assertEqual(
self.state.set_private_conversation_data_on_channel(DELETE_MULTI_AGES, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1), {})
self.assertEqual(self.state.get_private_conversation_data_on_channel(channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1), {})
def test_delete_multi_key4(self):
self.state.set_user_data(MULTI_AGES2, user_id=USER1)
self.assertEqual(self.state.set_user_data(DELETE_MULTI_AGES, user_id=USER1), {})
self.assertEqual(self.state.get_user_data(user_id=USER1), {})
def test_delete_multi_key5(self):
self.state.set_channel_data(MULTI_CARS1, channel=CHANNEL1)
self.assertEqual(self.state.set_channel_data(DELETE_MULTI_CARS, channel=CHANNEL1), {})
self.assertEqual(self.state.get_channel_data(channel=CHANNEL1), {})
def test_delete_none1(self):
self.assertEqual(self.state.set_user_data_on_channel(DELETE_MULTI_NAMES, channel=CHANNEL1, user_id=USER1),
{})
self.assertEqual(self.state.get_user_data_on_channel(channel=CHANNEL1, user_id=USER1), {})
def test_delete_none2(self):
self.assertEqual(self.state.set_conversation_data_on_channel(DELETE_MULTI_NAMES, channel=CHANNEL1,
conversation_id=CONVERSATION1), {})
self.assertEqual(
self.state.get_conversation_data_on_channel(channel=CHANNEL1, conversation_id=CONVERSATION1), {})
def test_delete_none3(self):
self.assertEqual(
self.state.set_private_conversation_data_on_channel(DELETE_MULTI_AGES, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1), {})
self.assertEqual(self.state.get_private_conversation_data_on_channel(channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1), {})
def test_delete_none4(self):
self.assertEqual(self.state.set_user_data(DELETE_MULTI_AGES, user_id=USER1), {})
self.assertEqual(self.state.get_user_data(user_id=USER1), {})
def test_delete_none5(self):
self.assertEqual(self.state.set_channel_data(DELETE_MULTI_CARS, channel=CHANNEL1), {})
self.assertEqual(self.state.get_channel_data(channel=CHANNEL1), {})
def test_delete_set1(self):
self.state.set_user_data_on_channel(NAME_VALUES2, channel=CHANNEL1, user_id=USER1)
self.assertEqual(self.state.delete_user_data_on_channel(channel=CHANNEL1, user_id=USER1), {})
self.assertEqual(self.state.get_user_data_on_channel(channel=CHANNEL1, user_id=USER1), {})
def test_delete_set2(self):
self.state.set_conversation_data_on_channel(NAME_VALUES1, channel=CHANNEL1, conversation_id=CONVERSATION1)
self.assertEqual(
self.state.delete_conversation_data_on_channel(channel=CHANNEL1, conversation_id=CONVERSATION1), {})
self.assertEqual(
self.state.get_conversation_data_on_channel(channel=CHANNEL1, conversation_id=CONVERSATION1), {})
def test_delete_set3(self):
self.state.set_private_conversation_data_on_channel(AGE_VALUES2, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1)
self.assertEqual(self.state.delete_private_conversation_data_on_channel(channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1), {})
self.assertEqual(self.state.get_private_conversation_data_on_channel(channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1), {})
def test_delete_set4(self):
self.state.set_user_data(AGE_VALUES1, user_id=USER1)
self.assertEqual(self.state.delete_user_data(user_id=USER1), {})
self.assertEqual(self.state.get_user_data(user_id=USER1), {})
def test_delete_set5(self):
self.state.set_channel_data(CAR_VALUES2, channel=CHANNEL1)
self.assertEqual(self.state.delete_channel_data(channel=CHANNEL1), {})
self.assertEqual(self.state.get_channel_data(channel=CHANNEL1), {})
def test_delete_multi_set1(self):
self.state.set_user_data_on_channel(MULTI_NAMES1, channel=CHANNEL1, user_id=USER1)
self.assertEqual(self.state.delete_user_data_on_channel(channel=CHANNEL1, user_id=USER1), {})
self.assertEqual(self.state.get_user_data_on_channel(channel=CHANNEL1, user_id=USER1), {})
def test_delete_multi_set2(self):
self.state.set_conversation_data_on_channel(MULTI_NAMES2, channel=CHANNEL1, conversation_id=CONVERSATION1)
self.assertEqual(
self.state.delete_conversation_data_on_channel(channel=CHANNEL1, conversation_id=CONVERSATION1), {})
self.assertEqual(
self.state.get_conversation_data_on_channel(channel=CHANNEL1, conversation_id=CONVERSATION1), {})
def test_delete_multi_set3(self):
self.state.set_private_conversation_data_on_channel(MULTI_AGES1, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1)
self.assertEqual(self.state.delete_private_conversation_data_on_channel(channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1), {})
self.assertEqual(self.state.get_private_conversation_data_on_channel(channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1), {})
def test_delete_multi_set4(self):
self.state.set_user_data(MULTI_AGES2, user_id=USER1)
self.assertEqual(self.state.delete_user_data(user_id=USER1), {})
self.assertEqual(self.state.get_user_data(user_id=USER1), {})
def test_delete_multi_set5(self):
self.state.set_channel_data(MULTI_CARS1, channel=CHANNEL1)
self.assertEqual(self.state.delete_channel_data(channel=CHANNEL1), {})
self.assertEqual(self.state.get_channel_data(channel=CHANNEL1), {})
def test_all(self):
self.assertEqual(self.state.set_user_data_on_channel(NAME_VALUES1, channel=CHANNEL1, user_id=USER1),
NAME_VALUES1)
self.assertEqual(
self.state.set_conversation_data_on_channel(NAME_VALUES2, channel=CHANNEL1, conversation_id=CONVERSATION1),
NAME_VALUES2)
self.assertEqual(
self.state.set_private_conversation_data_on_channel(AGE_VALUES1, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1), AGE_VALUES1)
self.assertEqual(self.state.set_user_data(AGE_VALUES2, user_id=USER1), AGE_VALUES2)
self.assertEqual(self.state.set_channel_data(CAR_VALUES1, channel=CHANNEL1), CAR_VALUES1)
self.assertEqual(self.state.get_user_data_on_channel(channel=CHANNEL1, user_id=USER1), NAME_VALUES1)
self.assertEqual(self.state.get_conversation_data_on_channel(channel=CHANNEL1, conversation_id=CONVERSATION1),
NAME_VALUES2)
self.assertEqual(self.state.get_private_conversation_data_on_channel(channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1),
AGE_VALUES1)
self.assertEqual(self.state.get_user_data(user_id=USER1), AGE_VALUES2)
self.assertEqual(self.state.get_channel_data(channel=CHANNEL1), CAR_VALUES1)
self.assertEqual(self.state.set_user_data_on_channel(DELETE_NAME, channel=CHANNEL1, user_id=USER1), {})
self.assertEqual(self.state.get_user_data_on_channel(channel=CHANNEL1, user_id=USER1), {})
self.assertEqual(
self.state.set_conversation_data_on_channel(DELETE_NAME, channel=CHANNEL1, conversation_id=CONVERSATION1),
{})
self.assertEqual(self.state.get_conversation_data_on_channel(channel=CHANNEL1, conversation_id=CONVERSATION1),
{})
self.assertEqual(
self.state.set_private_conversation_data_on_channel(DELETE_AGE, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1), {})
self.assertEqual(self.state.get_private_conversation_data_on_channel(channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1), {})
self.assertEqual(self.state.set_user_data(DELETE_AGE, user_id=USER1), {})
self.assertEqual(self.state.get_user_data(user_id=USER1), {})
self.assertEqual(self.state.set_channel_data(DELETE_CAR, channel=CHANNEL1), {})
self.assertEqual(self.state.get_channel_data(channel=CHANNEL1), {})
def test_all_delete(self):
self.state.set_user_data_on_channel(NAME_VALUES2, channel=CHANNEL1, user_id=USER1)
self.state.set_conversation_data_on_channel(NAME_VALUES1, channel=CHANNEL1, conversation_id=CONVERSATION1)
self.state.set_private_conversation_data_on_channel(AGE_VALUES2, channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1)
self.state.set_user_data(AGE_VALUES1, user_id=USER1)
self.state.set_channel_data(CAR_VALUES2, channel=CHANNEL1)
self.assertEqual(self.state.delete_user_data_on_channel(channel=CHANNEL1, user_id=USER1), {})
self.assertEqual(self.state.get_user_data_on_channel(channel=CHANNEL1, user_id=USER1), {})
self.assertEqual(
self.state.delete_conversation_data_on_channel(channel=CHANNEL1, conversation_id=CONVERSATION1), {})
self.assertEqual(self.state.get_conversation_data_on_channel(channel=CHANNEL1, conversation_id=CONVERSATION1),
{})
self.assertEqual(self.state.delete_private_conversation_data_on_channel(channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1), {})
self.assertEqual(self.state.get_private_conversation_data_on_channel(channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1), {})
self.assertEqual(self.state.delete_user_data(user_id=USER1), {})
self.assertEqual(self.state.get_user_data(user_id=USER1), {})
self.assertEqual(self.state.delete_channel_data(channel=CHANNEL1), {})
self.assertEqual(self.state.get_channel_data(channel=CHANNEL1), {})
def test_fill(self):
self.assertEqual(self.state.set_user_data_on_channel(NAME_VALUES1, fill=MESSAGE), NAME_VALUES1)
self.assertEqual(self.state.get_user_data_on_channel(channel=CHANNEL1, user_id=USER1), NAME_VALUES1)
self.assertEqual(self.state.set_conversation_data_on_channel(NAME_VALUES2, fill=MESSAGE), NAME_VALUES2)
self.assertEqual(self.state.get_conversation_data_on_channel(channel=CHANNEL1, conversation_id=CONVERSATION1),
NAME_VALUES2)
self.assertEqual(self.state.set_private_conversation_data_on_channel(AGE_VALUES1, fill=MESSAGE), AGE_VALUES1)
self.assertEqual(self.state.get_private_conversation_data_on_channel(channel=CHANNEL1, user_id=USER1,
conversation_id=CONVERSATION1),
AGE_VALUES1)
self.assertEqual(self.state.set_user_data(AGE_VALUES2, fill=MESSAGE), AGE_VALUES2)
self.assertEqual(self.state.get_user_data(user_id=USER1), AGE_VALUES2)
self.assertEqual(self.state.set_channel_data(CAR_VALUES1, fill=MESSAGE), CAR_VALUES1)
self.assertEqual(self.state.get_channel_data(channel=CHANNEL1), CAR_VALUES1)
def test_fill_bot(self):
self.assertEqual(self.state.set_user_data_on_channel(NAME_VALUES1, fill=MESSAGE, bot=True), NAME_VALUES1)
self.assertEqual(self.state.get_user_data_on_channel(channel=CHANNEL1, user_id=BOT), NAME_VALUES1)
self.assertEqual(self.state.set_private_conversation_data_on_channel(AGE_VALUES1, fill=MESSAGE, bot=True),
AGE_VALUES1)
self.assertEqual(self.state.get_private_conversation_data_on_channel(channel=CHANNEL1, user_id=BOT,
conversation_id=CONVERSATION1),
AGE_VALUES1)
self.assertEqual(self.state.set_user_data(AGE_VALUES2, fill=MESSAGE, bot=True), AGE_VALUES2)
self.assertEqual(self.state.get_user_data(user_id=BOT), AGE_VALUES2)
def _get_activity(self, id, conversation_id=None, type='ReplyToActivity', text=True):
conversation_id = id if conversation_id is None else conversation_id
activity = {
'type': type,
'conversation_id': '{}'.format(conversation_id),
'activity': {'id': 'asdf', 'type': type},
'url_parameters': {},
'response': {'info': 'success'},
}
if type in ['received', 'ReplyToActivity', 'SendToConversation']:
if text:
activity['activity']['text'] = 'message text - {}'.format(id)
if type == 'received':
activity['activity']['type'] = 'message'
else:
if type == 'received':
activity['activity']['type'] = choice(
['contactRelationUpdate', 'conversationUpdate', 'deleteUserData', 'ping', 'typing',
'endOfConversation'])
response_activity = activity.copy()
response_activity['_id'] = id
return activity, response_activity
def test_add_conversation(self):
activity, response_activity = self._get_activity(1)
self.state.save_activity(activity)
self.assertEqual(self.state.get_activities(), [response_activity])
def test_add_multiple_conversations(self):
response_activities = []
for n in range(1, 15):
activity, response_activity = self._get_activity(n)
self.state.save_activity(activity)
response_activities.append(response_activity)
self.assertEqual(self.state.get_activities(), response_activities[-10:])
self.assertEqual(len(self.state.get_activities()), 10)
def test_add_multiple_conversations_with_argument(self):
response_activities = []
for n in range(1, 15):
activity, response_activity = self._get_activity(n)
self.state.save_activity(activity)
response_activities.append(response_activity)
self.assertEqual(self.state.get_activities(count=5), response_activities[-5:])
self.assertEqual(len(self.state.get_activities(count=5)), 5)
self.assertEqual(self.state.get_activities(count=15), response_activities[-15:])
self.assertEqual(len(self.state.get_activities(count=-1)), 14)
self.assertEqual(len(self.state.get_activities(count=100)), 14)
def test_maximum_stored(self):
for n in range(1, 60):
activity, __ = self._get_activity(n)
self.state.save_activity(activity)
self.assertEqual(len(self.state.get_activities(count=-1)), 50)
def test_get_conversation_id(self):
response_activities = {}
combined_response = []
simple_combined_response = []
multi = 0
for conversation_id in ['conv1', 'conv2']:
response_activities[conversation_id] = []
for n in range(1, 4):
n += multi * 3
activity, response_activity = self._get_activity(n, conversation_id)
self.state.save_activity(activity)
response_activities[conversation_id].append(response_activity)
combined_response.append(response_activity)
simple_combined_response.append(response_activity['activity']['text'])
multi += 1
self.assertEqual(self.state.get_activities(), combined_response, combined_response)
self.assertEqual(self.state.get_activities(simple=True), simple_combined_response)
self.assertEqual(len(self.state.get_activities()), 6)
self.assertEqual(self.state.get_activities(conversation_id='conv1'), response_activities['conv1'])
self.assertEqual(self.state.get_activities(conversation_id='conv2'), response_activities['conv2'])
self.assertEqual(len(self.state.get_activities(conversation_id='conv1')), 3)
self.assertEqual(len(self.state.get_activities(conversation_id='conv2')), 3)
def test_get_conversation_id_limit(self):
response_activities = {}
simple_response_activities = {}
combined_response = []
simple_combined_response = []
multi = 0
for conversation_id in ['conv1', 'conv2']:
response_activities[conversation_id] = []
simple_response_activities[conversation_id] = []
for n in range(1, 31):
n += multi * 30
activity, response_activity = self._get_activity(n, conversation_id)
self.state.save_activity(activity)
response_activities[conversation_id].append(response_activity)
simple_response_activities[conversation_id].append(response_activity['activity']['text'])
combined_response.append(response_activity)
simple_combined_response.append(response_activity['activity']['text'])
multi += 1
self.assertEqual(self.state.get_activities(), combined_response[-10:])
self.assertEqual(self.state.get_activities(simple=True), simple_combined_response[-10:])
self.assertEqual(len(self.state.get_activities()), 10)
self.assertEqual(self.state.get_activities(conversation_id='conv1'), response_activities['conv1'][-10:])
self.assertEqual(self.state.get_activities(conversation_id='conv2'), response_activities['conv2'][-10:])
self.assertEqual(self.state.get_activities(conversation_id='conv1', simple=True), simple_response_activities['conv1'][-10:])
self.assertEqual(self.state.get_activities(conversation_id='conv2', simple=True), simple_response_activities['conv2'][-10:])
self.assertEqual(len(self.state.get_activities(conversation_id='conv1')), 10)
self.assertEqual(len(self.state.get_activities(conversation_id='conv2')), 10)
def test_get_conversation_id_alternate(self):
response_activities = {}
combined_response = []
response_activities['conv1'] = []
response_activities['conv2'] = []
for n in range(1, 61, 2):
multi = 0
for conversation_id in ['conv1', 'conv2']:
n += multi
activity, response_activity = self._get_activity(n, conversation_id)
self.state.save_activity(activity)
response_activities[conversation_id].append(response_activity)
combined_response.append(response_activity)
multi += 1
self.assertEqual(self.state.get_activities(), combined_response[-10:])
self.assertEqual(len(self.state.get_activities()), 10)
self.assertEqual(self.state.get_activities(conversation_id='conv1'), response_activities['conv1'][-10:])
self.assertEqual(self.state.get_activities(conversation_id='conv2'), response_activities['conv2'][-10:])
self.assertEqual(self.state.get_activities(conversation_id='conv3'), [])
self.assertEqual(len(self.state.get_activities(conversation_id='conv1')), 10)
self.assertEqual(len(self.state.get_activities(conversation_id='conv2')), 10)
self.assertEqual(len(self.state.get_activities(conversation_id='conv3')), 0)
# Test limits
self.assertEqual(len(self.state.get_activities(count=50, conversation_id='conv2')), 25)
self.assertEqual(len(self.state.get_activities(count=5, conversation_id='conv2')), 5)
def test_get_conversation_id_different_types(self):
values = ['received', 'ReplyToActivity', 'SendToConversation', 'DeleteActivity', 'CreateConversation', 'GetConversationMembers',
'GetActivityMembers']
get_type = self._get_type(values)
simple_response_activities = {}
simple_combined_response = []
simple_response_activities['conv1'] = []
simple_response_activities['conv2'] = []
for n in range(1, 61, 2):
multi = 0
for conversation_id in ['conv1', 'conv2']:
n += multi
activity, response_activity = self._get_activity(n, conversation_id, type=next(get_type))
self.state.save_activity(activity)
if 'text' in response_activity['activity']:
simple_response_activities[conversation_id].append(response_activity['activity']['text'])
if 'text' in response_activity['activity']:
simple_combined_response.append(response_activity['activity']['text'])
multi += 1
self.assertEqual(len(self.state.get_activities(simple=True)), 10)
self.assertEqual(self.state.get_activities(simple=True), simple_combined_response[-10:])
self.assertEqual(len(self.state.get_activities(conversation_id='conv1', simple=True)), 10)
self.assertEqual(len(self.state.get_activities(conversation_id='conv2', simple=True)), 10)
self.assertEqual(len(self.state.get_activities(conversation_id='conv3', simple=True)), 0)
self.assertEqual(self.state.get_activities(conversation_id='conv1', simple=True), simple_response_activities['conv1'][-10:])
self.assertEqual(self.state.get_activities(conversation_id='conv2', simple=True), simple_response_activities['conv2'][-10:])
self.assertEqual(self.state.get_activities(conversation_id='conv3', simple=True), [])
# Test limits
self.assertEqual(len(self.state.get_activities(count=50, conversation_id='conv2', simple=True)), 10)
self.assertEqual(len(self.state.get_activities(count=5, conversation_id='conv2', simple=True)), 5)
@staticmethod
def _get_type(values):
position = 0
while True:
actual_position = position % 7
yield values[actual_position]
position += 1
class MongodbStateTestCase(JsonStateTestCase):
def setUp(self):
self.config = Config(os.getcwd() + '/microsoftbotframework/tests/test_files/mongodb_test_config.yaml')
self._drop_database()
self.state = MongodbState(self.config, database='testmongodbstate')
def tearDown(self):
# remove any name or age values
self._drop_database()
def _drop_database(self):
# remove collection
mongodb_uri = self.config.get_config(None, 'URI', root='mongodb')
client = MongoClient(mongodb_uri)
client.drop_database('testmongodbstate')
def test_get_next_id(self):
self.assertEqual(self.state._get_last_id(), 0)
for n in range(1, 6):
self.assertEqual(self.state._get_next_id(), n)
self.assertEqual(self.state._get_last_id(), n)
|
|
# -*- coding: utf-8 -*-
# Copyright (c) 2010-2016, MIT Probabilistic Computing Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import math
import numpy
import bayeslite.core as core
import bayeslite.stats as stats
from bayeslite.exception import BQLError
from bayeslite.sqlite3_util import sqlite3_quote_name
from bayeslite.math_util import ieee_exp
from bayeslite.math_util import logmeanexp
from bayeslite.math_util import logavgexp_weighted
from bayeslite.util import casefold
def bayesdb_install_bql(db, cookie):
def function(name, nargs, fn):
db.createscalarfunction(name, (lambda *args: fn(cookie, *args)), nargs)
function("bql_column_correlation", 5, bql_column_correlation)
function("bql_column_correlation_pvalue", 5, bql_column_correlation_pvalue)
function("bql_column_dependence_probability", 5,
bql_column_dependence_probability)
function("bql_column_mutual_information", -1, bql_column_mutual_information)
function("bql_column_value_probability", -1, bql_column_value_probability)
function("bql_rand", 0, bql_rand)
function("bql_row_similarity", 6, bql_row_similarity)
function("bql_row_predictive_relevance", -1, bql_row_predictive_relevance)
function("bql_row_column_predictive_probability", 6,
bql_row_column_predictive_probability)
function("bql_predict", 7, bql_predict)
function("bql_predict_confidence", 6, bql_predict_confidence)
function("bql_json_get", 2, bql_json_get)
function("bql_pdf_joint", -1, bql_pdf_joint)
### BayesDB column functions
def bql_variable_stattypes_and_data(bdb, population_id, colno0, colno1):
st0 = core.bayesdb_variable_stattype(bdb, population_id, None, colno0)
st1 = core.bayesdb_variable_stattype(bdb, population_id, None, colno1)
table_name = core.bayesdb_population_table(bdb, population_id)
qt = sqlite3_quote_name(table_name)
varname0 = core.bayesdb_variable_name(bdb, population_id, None, colno0)
varname1 = core.bayesdb_variable_name(bdb, population_id, None, colno1)
qvn0 = sqlite3_quote_name(varname0)
qvn1 = sqlite3_quote_name(varname1)
data_sql = '''
SELECT %s, %s FROM %s WHERE %s IS NOT NULL AND %s IS NOT NULL
''' % (qvn0, qvn1, qt, qvn0, qvn1)
data = bdb.sql_execute(data_sql).fetchall()
data0 = [row[0] for row in data]
data1 = [row[1] for row in data]
return (st0, st1, data0, data1)
# Two-column function: CORRELATION [OF <col0> WITH <col1>]
def bql_column_correlation(bdb, population_id, generator_id, _modelnos,
colno0, colno1):
if colno0 < 0:
varname = core.bayesdb_variable_name(bdb, population_id,
generator_id, colno0)
raise BQLError(bdb, 'No correlation for latent variable: %r'
% (varname,))
if colno1 < 0:
varname = core.bayesdb_variable_name(bdb, population_id,
generator_id, colno1)
raise BQLError(bdb, 'No correlation for latent variable: %r'
% (varname,))
(st0, st1, data0, data1) = bql_variable_stattypes_and_data(bdb,
population_id, colno0, colno1)
if (st0, st1) not in correlation_methods:
raise NotImplementedError('No correlation method for %s/%s.'
% (st0, st1))
return correlation_methods[st0, st1](data0, data1)
# Two-column function: CORRELATION PVALUE [OF <col0> WITH <col1>]
def bql_column_correlation_pvalue(bdb, population_id, generator_id, _modelnos,
colno0, colno1):
if colno0 < 0:
varname = core.bayesdb_variable_name(bdb, population_id,
generator_id, colno0)
raise BQLError(bdb, 'No correlation p-value for latent variable: %r'
% (varname,))
if colno1 < 0:
varname = core.bayesdb_variable_name(bdb, population_id,
generator_id, colno1)
raise BQLError(bdb, 'No correlation p-value for latent variable: %r'
% (varname,))
(st0, st1, data0, data1) = bql_variable_stattypes_and_data(
bdb, population_id, colno0, colno1)
if (st0, st1) not in correlation_p_methods:
raise NotImplementedError(
'No correlation pvalue method for %s/%s.' % (st0, st1))
return correlation_p_methods[st0, st1](data0, data1)
def correlation_pearsonr2(data0, data1):
r = stats.pearsonr(data0, data1)
return r**2
def correlation_p_pearsonr2(data0, data1):
n = len(data0)
assert n == len(data1)
if n <= 2:
return float('NaN')
r = stats.pearsonr(data0, data1)
if math.isnan(r):
return float('NaN')
if r == 1. or r == -1:
return 0.
# Compute observed t statistic.
t = r * math.sqrt((n - 2)/(1 - r**2))
# Compute p-value for two-sided t-test.
return 2 * stats.t_cdf(-abs(t), n - 2)
def correlation_cramerphi(data0, data1):
# Compute observed chi^2 statistic.
chi2, n0, n1 = cramerphi_chi2(data0, data1)
if math.isnan(chi2):
return float('NaN')
n = len(data0)
assert n == len(data1)
# Compute observed correlation.
return math.sqrt(chi2 / (n * (min(n0, n1) - 1)))
def correlation_p_cramerphi(data0, data1):
# Compute observed chi^2 statistic.
chi2, n0, n1 = cramerphi_chi2(data0, data1)
if math.isnan(chi2):
return float('NaN')
# Compute p-value for chi^2 test of independence.
df = (n0 - 1)*(n1 - 1)
if df <= 0:
return float('NaN')
return stats.chi2_sf(chi2, df)
def cramerphi_chi2(data0, data1):
n = len(data0)
assert n == len(data1)
if n == 0:
return float('NaN'), 0, 0
index0 = dict((x, i) for i, x in enumerate(sorted(set(data0))))
index1 = dict((x, i) for i, x in enumerate(sorted(set(data1))))
data0 = numpy.array([index0[d] for d in data0])
data1 = numpy.array([index1[d] for d in data1])
assert data0.ndim == 1
assert data1.ndim == 1
unique0 = numpy.unique(data0)
unique1 = numpy.unique(data1)
n0 = len(unique0)
n1 = len(unique1)
min_levels = min(n0, n1)
if min_levels == 1:
# No variation in at least one column, so no notion of
# correlation.
return float('NaN'), n0, n1
ct = numpy.zeros((n0, n1), dtype=int)
for i0, x0 in enumerate(unique0):
for i1, x1 in enumerate(unique1):
matches0 = numpy.array(data0 == x0, dtype=int)
matches1 = numpy.array(data1 == x1, dtype=int)
ct[i0][i1] = numpy.dot(matches0, matches1)
# Compute observed chi^2 statistic.
chi2 = stats.chi2_contingency(ct)
return chi2, n0, n1
def correlation_anovar2(data_group, data_y):
# Compute observed F-test statistic.
F, n_groups = anovar2(data_group, data_y)
if math.isnan(F):
return float('NaN')
n = len(data_group)
assert n == len(data_y)
# Compute observed correlation.
return 1 - 1/(1 + F*(float(n_groups - 1) / float(n - n_groups)))
def correlation_p_anovar2(data_group, data_y):
# Compute observed F-test statistic.
F, n_groups = anovar2(data_group, data_y)
if math.isnan(F):
return float('NaN')
n = len(data_group)
assert n == len(data_y)
# Compute p-value for F-test.
return stats.f_sf(F, n_groups - 1, n - n_groups)
def anovar2(data_group, data_y):
n = len(data_group)
assert n == len(data_y)
group_index = {}
for x in data_group:
if x not in group_index:
group_index[x] = len(group_index)
n_groups = len(group_index)
if n_groups == 0:
# No data, so no notion of correlation.
return float('NaN'), n_groups
if n_groups == n:
# No variation in any group, so no notion of correlation.
return float('NaN'), n_groups
if n_groups == 1:
# Only one group means we can draw no information from the
# choice of group, so no notion of correlation.
return float('NaN'), n_groups
groups = [None] * n_groups
for i in xrange(n_groups):
groups[i] = []
for x, y in zip(data_group, data_y):
groups[group_index[x]].append(y)
# Compute observed F-test statistic.
F = stats.f_oneway(groups)
return F, n_groups
def correlation_anovar2_dc(discrete_data, continuous_data):
return correlation_anovar2(discrete_data, continuous_data)
def correlation_anovar2_cd(continuous_data, discrete_data):
return correlation_anovar2(discrete_data, continuous_data)
def correlation_p_anovar2_dc(discrete_data, continuous_data):
return correlation_p_anovar2(discrete_data, continuous_data)
def correlation_p_anovar2_cd(continuous_data, discrete_data):
return correlation_p_anovar2(discrete_data, continuous_data)
correlation_methods = {}
correlation_p_methods = {}
def define_correlation(stattype0, stattype1, method):
assert casefold(stattype0) == stattype0
assert casefold(stattype1) == stattype1
assert (stattype0, stattype1) not in correlation_methods
correlation_methods[stattype0, stattype1] = method
def define_correlation_p(stattype0, stattype1, method):
assert casefold(stattype0) == stattype0
assert casefold(stattype1) == stattype1
assert (stattype0, stattype1) not in correlation_p_methods
correlation_p_methods[stattype0, stattype1] = method
define_correlation('nominal', 'cyclic', correlation_anovar2_dc)
define_correlation('nominal', 'nominal', correlation_cramerphi)
define_correlation('nominal', 'numerical', correlation_anovar2_dc)
define_correlation_p('nominal', 'cyclic', correlation_p_anovar2_dc)
define_correlation_p('nominal', 'nominal', correlation_p_cramerphi)
define_correlation_p('nominal', 'numerical', correlation_p_anovar2_dc)
define_correlation('numerical', 'cyclic', correlation_pearsonr2)
define_correlation('numerical', 'nominal', correlation_anovar2_cd)
define_correlation('numerical', 'numerical', correlation_pearsonr2)
define_correlation_p('numerical', 'cyclic', correlation_p_pearsonr2)
define_correlation_p('numerical', 'nominal', correlation_p_anovar2_cd)
define_correlation_p('numerical', 'numerical', correlation_p_pearsonr2)
# XXX Pretend CYCLIC is NUMERICAL for the purposes of correlation. To
# do this properly we ought to implement a standard statistical notion
# of circular/linear correlation, as noted in Github issue #146
# <https://github.com/probcomp/bayeslite/issues/146>.
define_correlation('cyclic', 'cyclic', correlation_pearsonr2)
define_correlation('cyclic', 'nominal', correlation_anovar2_cd)
define_correlation('cyclic', 'numerical', correlation_pearsonr2)
define_correlation_p('cyclic', 'cyclic', correlation_p_pearsonr2)
define_correlation_p('cyclic', 'nominal', correlation_p_anovar2_cd)
define_correlation_p('cyclic', 'numerical', correlation_p_pearsonr2)
# Two-column function: DEPENDENCE PROBABILITY [OF <col0> WITH <col1>]
def bql_column_dependence_probability(
bdb, population_id, generator_id, modelnos, colno0, colno1):
modelnos = _retrieve_modelnos(modelnos)
def generator_depprob(generator_id):
backend = core.bayesdb_generator_backend(bdb, generator_id)
depprob_list = backend.column_dependence_probability(
bdb, generator_id, modelnos, colno0, colno1)
return stats.arithmetic_mean(depprob_list)
generator_ids = _retrieve_generator_ids(bdb, population_id, generator_id)
depprobs = map(generator_depprob, generator_ids)
return stats.arithmetic_mean(depprobs)
# Two-column function: MUTUAL INFORMATION [OF <col0> WITH <col1>]
def bql_column_mutual_information(
bdb, population_id, generator_id, modelnos, colnos0, colnos1,
numsamples, *constraint_args):
colnos0 = json.loads(colnos0)
colnos1 = json.loads(colnos1)
modelnos = _retrieve_modelnos(modelnos)
mutinfs = _bql_column_mutual_information(
bdb, population_id, generator_id, modelnos, colnos0, colnos1,
numsamples, *constraint_args)
# XXX This integral of the CMI returned by each model of all generators in
# in the population is wrong! At least, it does not directly correspond to
# any meaningful probabilistic quantity, other than literally the mean CMI
# averaged over all population models.
return stats.arithmetic_mean([stats.arithmetic_mean(m) for m in mutinfs])
def _bql_column_mutual_information(
bdb, population_id, generator_id, modelnos, colnos0, colnos1,
numsamples, *constraint_args):
if len(constraint_args) % 2 == 1:
raise ValueError('Odd constraint arguments: %s.' % (constraint_args))
constraints = zip(constraint_args[::2], constraint_args[1::2]) \
if constraint_args else None
def generator_mutinf(generator_id):
backend = core.bayesdb_generator_backend(bdb, generator_id)
return backend.column_mutual_information(
bdb, generator_id, modelnos, colnos0, colnos1,
constraints=constraints, numsamples=numsamples)
generator_ids = _retrieve_generator_ids(bdb, population_id, generator_id)
mutinfs = map(generator_mutinf, generator_ids)
return mutinfs
# One-column function: PROBABILITY DENSITY OF <col>=<value> GIVEN <constraints>
def bql_column_value_probability(
bdb, population_id, generator_id, modelnos, colno, value,
*constraint_args):
modelnos = _retrieve_modelnos(modelnos)
constraints = []
i = 0
while i < len(constraint_args):
if i + 1 == len(constraint_args):
raise ValueError(
'Odd constraint arguments: %s' % (constraint_args,))
constraint_colno = constraint_args[i]
constraint_value = constraint_args[i + 1]
constraints.append((constraint_colno, constraint_value))
i += 2
targets = [(colno, value)]
logp = _bql_logpdf(bdb, population_id, generator_id, modelnos, targets,
constraints)
return ieee_exp(logp)
# XXX This is silly. We should return log densities, not densities.
# This is Github issue #360:
# https://github.com/probcomp/bayeslite/issues/360
def bql_pdf_joint(bdb, population_id, generator_id, modelnos, *args):
modelnos = _retrieve_modelnos(modelnos)
i = 0
targets = []
while i < len(args):
if args[i] is None:
i += 1
break
if i + 1 == len(args):
raise ValueError('Missing logpdf target value: %r' % (args[i],))
t_colno = args[i]
t_value = args[i + 1]
targets.append((t_colno, t_value))
i += 2
constraints = []
while i < len(args):
if i + 1 == len(args):
raise ValueError('Missing logpdf constraint value: %r' %
(args[i],))
c_colno = args[i]
c_value = args[i + 1]
constraints.append((c_colno, c_value))
i += 2
logp = _bql_logpdf(bdb, population_id, generator_id, modelnos, targets,
constraints)
return ieee_exp(logp)
def _bql_logpdf(bdb, population_id, generator_id, modelnos, targets,
constraints):
# P(T | C) = \sum_M P(T, M | C)
# = \sum_M P(T | C, M) P(M | C)
# = \sum_M P(T | C, M) P(M) P(C | M) / P(C)
# = \sum_M P(T | C, M) P(M) P(C | M) / \sum_M' P(C, M')
# = \sum_M P(T | C, M) P(M) P(C | M) / \sum_M' P(C | M') P(M')
#
# For a generator M, logpdf(M) computes P(T | C, M), and
# loglikelihood(M) computes P(C | M). For now, we weigh each
# generator uniformly; eventually, we ought to allow the user to
# specify a prior weight (XXX and update some kind of posterior
# weight?).
rowid, constraints = _retrieve_rowid_constraints(
bdb, population_id, constraints)
def logpdf(generator_id, backend):
return backend.logpdf_joint(
bdb, generator_id, modelnos, rowid, targets, constraints)
def loglikelihood(generator_id, backend):
if not constraints:
return 0
return backend.logpdf_joint(
bdb, generator_id, modelnos, rowid, constraints, [])
generator_ids = _retrieve_generator_ids(bdb, population_id, generator_id)
backends = [
core.bayesdb_generator_backend(bdb, g)
for g in generator_ids
]
loglikelihoods = map(loglikelihood, generator_ids, backends)
logpdfs = map(logpdf, generator_ids, backends)
return logavgexp_weighted(loglikelihoods, logpdfs)
### BayesDB row functions
# Row function: SIMILARITY TO <target_row> IN THE CONTEXT OF <column>
def bql_row_similarity(
bdb, population_id, generator_id, modelnos, rowid, target_rowid, colno):
if target_rowid is None:
raise BQLError(bdb, 'No such target row for SIMILARITY')
modelnos = _retrieve_modelnos(modelnos)
def generator_similarity(generator_id):
backend = core.bayesdb_generator_backend(bdb, generator_id)
# XXX Change [colno] to colno by updating BayesDB_Backend.
similarity_list = backend.row_similarity(
bdb, generator_id, modelnos, rowid, target_rowid, [colno])
return stats.arithmetic_mean(similarity_list)
generator_ids = _retrieve_generator_ids(bdb, population_id, generator_id)
similarities = map(generator_similarity, generator_ids)
return stats.arithmetic_mean(similarities)
# Row function: PREDICTIVE RELEVANCE TO (<target_row>)
# [<AND HYPOTHETICAL ROWS WITH VALUES ((...))] IN THE CONTEXT OF <column>
def bql_row_predictive_relevance(
bdb, population_id, generator_id, modelnos, rowid_target, rowid_query,
colno, *constraint_args):
if rowid_target is None:
raise BQLError(bdb, 'No such target row for SIMILARITY')
rowid_query = json.loads(rowid_query)
modelnos = _retrieve_modelnos(modelnos)
# Build the list of hypothetical values.
# Each sequence of values is separated by None to demarcate between rows.
splits = [-1] + [i for i, x in enumerate(constraint_args) if x is None]
assert splits[-1] == len(constraint_args) - 1
rows_list = [
constraint_args[splits[i]+1:splits[i+1]]
for i in range(len(splits)-1)
]
assert all(len(row)%2 == 0 for row in rows_list)
hypotheticals = [zip(row[::2], row[1::2]) for row in rows_list]
if len(rowid_query) == 0 and len(hypotheticals) == 0:
raise BQLError(bdb, 'No matching rows for PREDICTIVE RELEVANCE.')
def generator_similarity(generator_id):
backend = core.bayesdb_generator_backend(bdb, generator_id)
return backend.predictive_relevance(
bdb, generator_id, modelnos, rowid_target, rowid_query,
hypotheticals, colno)
generator_ids = _retrieve_generator_ids(bdb, population_id, generator_id)
sims = map(generator_similarity, generator_ids)
return stats.arithmetic_mean([stats.arithmetic_mean(s) for s in sims])
# Row function: PREDICTIVE PROBABILITY OF <targets> [GIVEN <constraints>]
def bql_row_column_predictive_probability(
bdb, population_id, generator_id, modelnos, rowid, targets,
constraints):
targets = json.loads(targets)
constraints = json.loads(constraints)
modelnos = _retrieve_modelnos(modelnos)
# Build the constraints and query from rowid, using a fresh rowid.
fresh_rowid = core.bayesdb_population_fresh_row_id(bdb, population_id)
def retrieve_values(colnos):
values = [
core.bayesdb_population_cell_value(bdb, population_id, rowid, colno)
for colno in colnos
]
return [(c,v) for (c,v) in zip (colnos, values) if v is not None]
cgpm_targets = retrieve_values(targets)
# If all targets have NULL values, return None.
if len(cgpm_targets) == 0:
return None
cgpm_constraints = retrieve_values(constraints)
def generator_predprob(generator_id):
backend = core.bayesdb_generator_backend(bdb, generator_id)
return backend.logpdf_joint(
bdb, generator_id, modelnos, fresh_rowid, cgpm_targets,
cgpm_constraints)
generator_ids = _retrieve_generator_ids(bdb, population_id, generator_id)
predprobs = map(generator_predprob, generator_ids)
r = logmeanexp(predprobs)
return ieee_exp(r)
### Predict and simulate
def bql_predict(
bdb, population_id, generator_id, modelnos, rowid, colno, threshold,
numsamples):
# XXX Randomly sample 1 generator from the population, until we figure out
# how to aggregate imputations across different hypotheses.
modelnos = _retrieve_modelnos(modelnos)
if generator_id is None:
generator_ids = core.bayesdb_population_generators(bdb, population_id)
index = bdb.np_prng.randint(0, high=len(generator_ids))
generator_id = generator_ids[index]
backend = core.bayesdb_generator_backend(bdb, generator_id)
return backend.predict(
bdb, generator_id, modelnos, rowid, colno, threshold,
numsamples=numsamples)
def bql_predict_confidence(
bdb, population_id, generator_id, modelnos, rowid, colno, numsamples):
# XXX Do real imputation here!
# XXX Randomly sample 1 generator from the population, until we figure out
# how to aggregate imputations across different hypotheses.
if generator_id is None:
generator_ids = core.bayesdb_population_generators(bdb, population_id)
index = bdb.np_prng.randint(0, high=len(generator_ids))
generator_id = generator_ids[index]
modelnos = _retrieve_modelnos(modelnos)
backend = core.bayesdb_generator_backend(bdb, generator_id)
value, confidence = backend.predict_confidence(
bdb, generator_id, modelnos, rowid, colno, numsamples=numsamples)
# XXX Whattakludge!
return json.dumps({'value': value, 'confidence': confidence})
# XXX Whattakludge!
def bql_json_get(bdb, blob, key):
return json.loads(blob)[key]
def bayesdb_simulate(
bdb, population_id, generator_id, modelnos, constraints, colnos,
numpredictions=1, accuracy=None):
"""Simulate rows from a generative model, subject to constraints.
Returns a list of `numpredictions` tuples, with a value for each
column specified in the list `colnos`, conditioned on the
constraints in the list `constraints` of tuples ``(colno,
value)``.
The results are simulated from the predictive distribution on
fresh rows.
"""
modelnos = _retrieve_modelnos(modelnos)
rowid, constraints = _retrieve_rowid_constraints(
bdb, population_id, constraints)
def loglikelihood(generator_id, backend):
if not constraints:
return 0
return backend.logpdf_joint(
bdb, generator_id, modelnos, rowid, constraints, [])
def simulate(generator_id, backend, n):
return backend.simulate_joint(
bdb, generator_id, modelnos, rowid, colnos, constraints,
num_samples=n, accuracy=accuracy)
generator_ids = _retrieve_generator_ids(bdb, population_id, generator_id)
backends = [
core.bayesdb_generator_backend(bdb, generator_id)
for generator_id in generator_ids
]
if len(generator_ids) > 1:
loglikelihoods = map(loglikelihood, generator_ids, backends)
likelihoods = map(math.exp, loglikelihoods)
total_likelihood = sum(likelihoods)
if total_likelihood == 0:
# XXX Show the constraints with symbolic names.
raise BQLError(bdb, 'Impossible constraints: %r' % (constraints,))
probabilities = [
likelihood / total_likelihood
for likelihood in likelihoods
]
countses = bdb.np_prng.multinomial(
numpredictions, probabilities, size=1)
counts = countses[0]
elif len(generator_ids) == 1:
counts = [numpredictions]
else:
counts = []
rowses = map(simulate, generator_ids, backends, counts)
all_rows = [row for rows in rowses for row in rows]
assert all(isinstance(row, (tuple, list)) for row in all_rows)
return all_rows
### Seeded random number generation
def bql_rand(bdb):
return bdb.np_prng.uniform()
### Helper functions functions
def _retrieve_rowid_constraints(bdb, population_id, constraints):
rowid = core.bayesdb_population_fresh_row_id(bdb, population_id)
if constraints:
user_rowid = [
v for c, v in constraints
if c in core.bayesdb_rowid_tokens(bdb)
]
if len(user_rowid) == 1:
rowid = user_rowid[0]
elif len(user_rowid) > 1:
raise BQLError(bdb, 'Multiple rowids given: %s.' % (constraints,))
constraints = [
(c, v) for c, v in constraints
if c not in core.bayesdb_rowid_tokens(bdb)
]
return rowid, constraints
def _retrieve_generator_ids(bdb, population_id, generator_id):
if generator_id is None:
return core.bayesdb_population_generators(bdb, population_id)
return [generator_id]
def _retrieve_modelnos(modelnos):
return None if modelnos is None else json.loads(modelnos)
|
|
"""
CIECAM02 Colour Appearance Model
================================
Defines the *CIECAM02* colour appearance model objects:
- :class:`colour.appearance.InductionFactors_CIECAM02`
- :attr:`colour.VIEWING_CONDITIONS_CIECAM02`
- :class:`colour.CAM_Specification_CIECAM02`
- :func:`colour.XYZ_to_CIECAM02`
- :func:`colour.CIECAM02_to_XYZ`
References
----------
- :cite:`Fairchild2004c` : Fairchild, M. D. (2004). CIECAM02. In Color
Appearance Models (2nd ed., pp. 289-301). Wiley. ISBN:978-0-470-01216-1
- :cite:`InternationalElectrotechnicalCommission1999a` : International
Electrotechnical Commission. (1999). IEC 61966-2-1:1999 - Multimedia
systems and equipment - Colour measurement and management - Part 2-1:
Colour management - Default RGB colour space - sRGB (p. 51).
https://webstore.iec.ch/publication/6169
- :cite:`Luo2013` : Luo, Ming Ronnier, & Li, C. (2013). CIECAM02 and Its
Recent Developments. In C. Fernandez-Maloigne (Ed.), Advanced Color Image
Processing and Analysis (pp. 19-58). Springer New York.
doi:10.1007/978-1-4419-6190-7
- :cite:`Moroneya` : Moroney, N., Fairchild, M. D., Hunt, R. W. G., Li, C.,
Luo, M. R., & Newman, T. (2002). The CIECAM02 color appearance model. Color
and Imaging Conference, 1, 23-27.
- :cite:`Wikipedia2007a` : Fairchild, M. D. (2004). CIECAM02. In Color
Appearance Models (2nd ed., pp. 289-301). Wiley. ISBN:978-0-470-01216-1
"""
from __future__ import annotations
import numpy as np
from collections import namedtuple
from dataclasses import astuple, dataclass, field
from colour.algebra import matrix_dot, spow, vector_dot
from colour.adaptation import CAT_CAT02
from colour.appearance.hunt import (
MATRIX_HPE_TO_XYZ,
MATRIX_XYZ_TO_HPE,
luminance_level_adaptation_factor,
)
from colour.colorimetry import CCS_ILLUMINANTS
from colour.constants import EPSILON
from colour.hints import (
ArrayLike,
Boolean,
Dict,
FloatingOrArrayLike,
FloatingOrNDArray,
NDArray,
Optional,
Tuple,
)
from colour.models import xy_to_XYZ
from colour.utilities import (
CaseInsensitiveMapping,
MixinDataclassArithmetic,
as_float,
as_float_array,
as_int_array,
from_range_degrees,
from_range_100,
has_only_nan,
ones,
to_domain_100,
to_domain_degrees,
tsplit,
tstack,
zeros,
)
from colour.utilities.documentation import (
DocstringDict,
is_documentation_building,
)
__author__ = "Colour Developers"
__copyright__ = "Copyright 2013 Colour Developers"
__license__ = "New BSD License - https://opensource.org/licenses/BSD-3-Clause"
__maintainer__ = "Colour Developers"
__email__ = "colour-developers@colour-science.org"
__status__ = "Production"
__all__ = [
"CAT_INVERSE_CAT02",
"InductionFactors_CIECAM02",
"VIEWING_CONDITIONS_CIECAM02",
"HUE_DATA_FOR_HUE_QUADRATURE",
"CAM_KWARGS_CIECAM02_sRGB",
"CAM_Specification_CIECAM02",
"XYZ_to_CIECAM02",
"CIECAM02_to_XYZ",
"chromatic_induction_factors",
"base_exponential_non_linearity",
"viewing_condition_dependent_parameters",
"degree_of_adaptation",
"full_chromatic_adaptation_forward",
"full_chromatic_adaptation_inverse",
"RGB_to_rgb",
"rgb_to_RGB",
"post_adaptation_non_linear_response_compression_forward",
"post_adaptation_non_linear_response_compression_inverse",
"opponent_colour_dimensions_forward",
"opponent_colour_dimensions_inverse",
"hue_angle",
"hue_quadrature",
"eccentricity_factor",
"achromatic_response_forward",
"achromatic_response_inverse",
"lightness_correlate",
"brightness_correlate",
"temporary_magnitude_quantity_forward",
"temporary_magnitude_quantity_inverse",
"chroma_correlate",
"colourfulness_correlate",
"saturation_correlate",
"P",
"matrix_post_adaptation_non_linear_response_compression",
]
CAT_INVERSE_CAT02: NDArray = np.linalg.inv(CAT_CAT02)
"""Inverse CAT02 chromatic adaptation transform."""
class InductionFactors_CIECAM02(
namedtuple("InductionFactors_CIECAM02", ("F", "c", "N_c"))
):
"""
*CIECAM02* colour appearance model induction factors.
Parameters
----------
F
Maximum degree of adaptation :math:`F`.
c
Exponential non-linearity :math:`c`.
N_c
Chromatic induction factor :math:`N_c`.
References
----------
:cite:`Fairchild2004c`, :cite:`Luo2013`, :cite:`Moroneya`,
:cite:`Wikipedia2007a`
"""
VIEWING_CONDITIONS_CIECAM02: CaseInsensitiveMapping = CaseInsensitiveMapping(
{
"Average": InductionFactors_CIECAM02(1, 0.69, 1),
"Dim": InductionFactors_CIECAM02(0.9, 0.59, 0.9),
"Dark": InductionFactors_CIECAM02(0.8, 0.525, 0.8),
}
)
VIEWING_CONDITIONS_CIECAM02.__doc__ = """
Reference *CIECAM02* colour appearance model viewing conditions.
References
----------
:cite:`Fairchild2004c`, :cite:`Luo2013`, :cite:`Moroneya`,
:cite:`Wikipedia2007a`
"""
HUE_DATA_FOR_HUE_QUADRATURE: Dict = {
"h_i": np.array([20.14, 90.00, 164.25, 237.53, 380.14]),
"e_i": np.array([0.8, 0.7, 1.0, 1.2, 0.8]),
"H_i": np.array([0.0, 100.0, 200.0, 300.0, 400.0]),
}
CAM_KWARGS_CIECAM02_sRGB: Dict = {
"XYZ_w": xy_to_XYZ(
CCS_ILLUMINANTS["CIE 1931 2 Degree Standard Observer"]["D65"]
)
* 100,
"L_A": 64 / np.pi * 0.2,
"Y_b": 20,
"surround": VIEWING_CONDITIONS_CIECAM02["Average"],
}
if is_documentation_building(): # pragma: no cover
CAM_KWARGS_CIECAM02_sRGB = DocstringDict(CAM_KWARGS_CIECAM02_sRGB)
CAM_KWARGS_CIECAM02_sRGB.__doc__ = """
Default parameter values for the *CIECAM02* colour appearance model usage in
the context of *sRGB*.
References
----------
:cite:`Fairchild2004c`, :cite:`InternationalElectrotechnicalCommission1999a`,
:cite:`Luo2013`, :cite:`Moroneya`, :cite:`Wikipedia2007a`
"""
@dataclass
class CAM_Specification_CIECAM02(MixinDataclassArithmetic):
"""
Define the *CIECAM02* colour appearance model specification.
Parameters
----------
J
Correlate of *Lightness* :math:`J`.
C
Correlate of *chroma* :math:`C`.
h
*Hue* angle :math:`h` in degrees.
s
Correlate of *saturation* :math:`s`.
Q
Correlate of *brightness* :math:`Q`.
M
Correlate of *colourfulness* :math:`M`.
H
*Hue* :math:`h` quadrature :math:`H`.
HC
*Hue* :math:`h` composition :math:`H^C`.
References
----------
:cite:`Fairchild2004c`, :cite:`Luo2013`, :cite:`Moroneya`,
:cite:`Wikipedia2007a`
"""
J: Optional[FloatingOrNDArray] = field(default_factory=lambda: None)
C: Optional[FloatingOrNDArray] = field(default_factory=lambda: None)
h: Optional[FloatingOrNDArray] = field(default_factory=lambda: None)
s: Optional[FloatingOrNDArray] = field(default_factory=lambda: None)
Q: Optional[FloatingOrNDArray] = field(default_factory=lambda: None)
M: Optional[FloatingOrNDArray] = field(default_factory=lambda: None)
H: Optional[FloatingOrNDArray] = field(default_factory=lambda: None)
HC: Optional[FloatingOrNDArray] = field(default_factory=lambda: None)
def XYZ_to_CIECAM02(
XYZ: ArrayLike,
XYZ_w: ArrayLike,
L_A: FloatingOrArrayLike,
Y_b: FloatingOrArrayLike,
surround: InductionFactors_CIECAM02 = VIEWING_CONDITIONS_CIECAM02[
"Average"
],
discount_illuminant: Boolean = False,
) -> CAM_Specification_CIECAM02:
"""
Compute the *CIECAM02* colour appearance model correlates from given
*CIE XYZ* tristimulus values.
Parameters
----------
XYZ
*CIE XYZ* tristimulus values of test sample / stimulus.
XYZ_w
*CIE XYZ* tristimulus values of reference white.
L_A
Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`, (often taken
to be 20% of the luminance of a white object in the scene).
Y_b
Luminous factor of background :math:`Y_b` such as
:math:`Y_b = 100 x L_b / L_w` where :math:`L_w` is the luminance of the
light source and :math:`L_b` is the luminance of the background. For
viewing images, :math:`Y_b` can be the average :math:`Y` value for the
pixels in the entire image, or frequently, a :math:`Y` value of 20,
approximate an :math:`L^*` of 50 is used.
surround
Surround viewing conditions induction factors.
discount_illuminant
Truth value indicating if the illuminant should be discounted.
Returns
-------
:class:`colour.CAM_Specification_CIECAM02`
*CIECAM02* colour appearance model specification.
Notes
-----
+------------+-----------------------+---------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``XYZ`` | [0, 100] | [0, 1] |
+------------+-----------------------+---------------+
| ``XYZ_w`` | [0, 100] | [0, 1] |
+------------+-----------------------+---------------+
+----------------------------------+-----------------------\
+---------------+
| **Range** | **Scale - Reference** \
| **Scale - 1** |
+==================================+=======================\
+===============+
| ``CAM_Specification_CIECAM02.J`` | [0, 100] \
| [0, 1] |
+----------------------------------+-----------------------\
+---------------+
| ``CAM_Specification_CIECAM02.C`` | [0, 100] \
| [0, 1] |
+----------------------------------+-----------------------\
+---------------+
| ``CAM_Specification_CIECAM02.h`` | [0, 360] \
| [0, 1] |
+----------------------------------+-----------------------\
+---------------+
| ``CAM_Specification_CIECAM02.s`` | [0, 100] \
| [0, 1] |
+----------------------------------+-----------------------\
+---------------+
| ``CAM_Specification_CIECAM02.Q`` | [0, 100] \
| [0, 1] |
+----------------------------------+-----------------------\
+---------------+
| ``CAM_Specification_CIECAM02.M`` | [0, 100] \
| [0, 1] |
+----------------------------------+-----------------------\
+---------------+
| ``CAM_Specification_CIECAM02.H`` | [0, 400] \
| [0, 1] |
+----------------------------------+-----------------------\
+---------------+
References
----------
:cite:`Fairchild2004c`, :cite:`Luo2013`, :cite:`Moroneya`,
:cite:`Wikipedia2007a`
Examples
--------
>>> XYZ = np.array([19.01, 20.00, 21.78])
>>> XYZ_w = np.array([95.05, 100.00, 108.88])
>>> L_A = 318.31
>>> Y_b = 20.0
>>> surround = VIEWING_CONDITIONS_CIECAM02['Average']
>>> XYZ_to_CIECAM02(XYZ, XYZ_w, L_A, Y_b, surround) # doctest: +ELLIPSIS
CAM_Specification_CIECAM02(J=41.7310911..., C=0.1047077..., \
h=219.0484326..., s=2.3603053..., Q=195.3713259..., M=0.1088421..., \
H=278.0607358..., HC=None)
"""
XYZ = to_domain_100(XYZ)
XYZ_w = to_domain_100(XYZ_w)
_X_w, Y_w, _Z_w = tsplit(XYZ_w)
L_A = as_float_array(L_A)
Y_b = as_float_array(Y_b)
n, F_L, N_bb, N_cb, z = viewing_condition_dependent_parameters(
Y_b, Y_w, L_A
)
# Converting *CIE XYZ* tristimulus values to *CMCCAT2000* transform
# sharpened *RGB* values.
RGB = vector_dot(CAT_CAT02, XYZ)
RGB_w = vector_dot(CAT_CAT02, XYZ_w)
# Computing degree of adaptation :math:`D`.
D = (
degree_of_adaptation(surround.F, L_A)
if not discount_illuminant
else ones(L_A.shape)
)
# Computing full chromatic adaptation.
RGB_c = full_chromatic_adaptation_forward(RGB, RGB_w, Y_w, D)
RGB_wc = full_chromatic_adaptation_forward(RGB_w, RGB_w, Y_w, D)
# Converting to *Hunt-Pointer-Estevez* colourspace.
RGB_p = RGB_to_rgb(RGB_c)
RGB_pw = RGB_to_rgb(RGB_wc)
# Applying forward post-adaptation non-linear response compression.
RGB_a = post_adaptation_non_linear_response_compression_forward(RGB_p, F_L)
RGB_aw = post_adaptation_non_linear_response_compression_forward(
RGB_pw, F_L
)
# Converting to preliminary cartesian coordinates.
a, b = tsplit(opponent_colour_dimensions_forward(RGB_a))
# Computing the *hue* angle :math:`h`.
h = hue_angle(a, b)
# Computing hue :math:`h` quadrature :math:`H`.
H = hue_quadrature(h)
# TODO: Compute hue composition.
# Computing eccentricity factor *e_t*.
e_t = eccentricity_factor(h)
# Computing achromatic responses for the stimulus and the whitepoint.
A = achromatic_response_forward(RGB_a, N_bb)
A_w = achromatic_response_forward(RGB_aw, N_bb)
# Computing the correlate of *Lightness* :math:`J`.
J = lightness_correlate(A, A_w, surround.c, z)
# Computing the correlate of *brightness* :math:`Q`.
Q = brightness_correlate(surround.c, J, A_w, F_L)
# Computing the correlate of *chroma* :math:`C`.
C = chroma_correlate(J, n, surround.N_c, N_cb, e_t, a, b, RGB_a)
# Computing the correlate of *colourfulness* :math:`M`.
M = colourfulness_correlate(C, F_L)
# Computing the correlate of *saturation* :math:`s`.
s = saturation_correlate(M, Q)
return CAM_Specification_CIECAM02(
as_float(from_range_100(J)),
as_float(from_range_100(C)),
as_float(from_range_degrees(h)),
as_float(from_range_100(s)),
as_float(from_range_100(Q)),
as_float(from_range_100(M)),
as_float(from_range_degrees(H, 400)),
None,
)
def CIECAM02_to_XYZ(
specification: CAM_Specification_CIECAM02,
XYZ_w: ArrayLike,
L_A: FloatingOrArrayLike,
Y_b: FloatingOrArrayLike,
surround: InductionFactors_CIECAM02 = VIEWING_CONDITIONS_CIECAM02[
"Average"
],
discount_illuminant: Boolean = False,
) -> NDArray:
"""
Convert from *CIECAM02* specification to *CIE XYZ* tristimulus values.
Parameters
----------
specification
*CIECAM02* colour appearance model specification. Correlate of
*Lightness* :math:`J`, correlate of *chroma* :math:`C` or correlate of
*colourfulness* :math:`M` and *hue* angle :math:`h` in degrees must be
specified, e.g. :math:`JCh` or :math:`JMh`.
XYZ_w
*CIE XYZ* tristimulus values of reference white.
L_A
Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`, (often taken
to be 20% of the luminance of a white object in the scene).
Y_b
Luminous factor of background :math:`Y_b` such as
:math:`Y_b = 100 x L_b / L_w` where :math:`L_w` is the luminance of the
light source and :math:`L_b` is the luminance of the background. For
viewing images, :math:`Y_b` can be the average :math:`Y` value for the
pixels in the entire image, or frequently, a :math:`Y` value of 20,
approximate an :math:`L^*` of 50 is used.
surround
Surround viewing conditions.
discount_illuminant
Discount the illuminant.
Returns
-------
:class:`numpy.ndarray`
*CIE XYZ* tristimulus values.
Raises
------
ValueError
If neither *C* or *M* correlates have been defined in the
``CAM_Specification_CIECAM02`` argument.
Notes
-----
+----------------------------------+-----------------------\
+---------------+
| **Domain** | **Scale - Reference** \
| **Scale - 1** |
+==================================+=======================\
+===============+
| ``CAM_Specification_CIECAM02.J`` | [0, 100] \
| [0, 1] |
+----------------------------------+-----------------------\
+---------------+
| ``CAM_Specification_CIECAM02.C`` | [0, 100] \
| [0, 1] |
+----------------------------------+-----------------------\
+---------------+
| ``CAM_Specification_CIECAM02.h`` | [0, 360] \
| [0, 1] |
+----------------------------------+-----------------------\
+---------------+
| ``CAM_Specification_CIECAM02.s`` | [0, 100] \
| [0, 1] |
+----------------------------------+-----------------------\
+---------------+
| ``CAM_Specification_CIECAM02.Q`` | [0, 100] \
| [0, 1] |
+----------------------------------+-----------------------\
+---------------+
| ``CAM_Specification_CIECAM02.M`` | [0, 100] \
| [0, 1] |
+----------------------------------+-----------------------\
+---------------+
| ``CAM_Specification_CIECAM02.H`` | [0, 360] \
| [0, 1] |
+----------------------------------+-----------------------\
+---------------+
| ``XYZ_w`` | [0, 100] \
| [0, 1] |
+----------------------------------+-----------------------\
+---------------+
+-----------+-----------------------+---------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+===========+=======================+===============+
| ``XYZ`` | [0, 100] | [0, 1] |
+-----------+-----------------------+---------------+
References
----------
:cite:`Fairchild2004c`, :cite:`Luo2013`, :cite:`Moroneya`,
:cite:`Wikipedia2007a`
Examples
--------
>>> specification = CAM_Specification_CIECAM02(J=41.731091132513917,
... C=0.104707757171031,
... h=219.048432658311780)
>>> XYZ_w = np.array([95.05, 100.00, 108.88])
>>> L_A = 318.31
>>> Y_b = 20.0
>>> CIECAM02_to_XYZ(specification, XYZ_w, L_A, Y_b) # doctest: +ELLIPSIS
array([ 19.01..., 20... , 21.78...])
"""
J, C, h, _s, _Q, M, _H, _HC = astuple(specification)
J = to_domain_100(J)
C = to_domain_100(C)
h = to_domain_degrees(h)
M = to_domain_100(M)
L_A = as_float_array(L_A)
XYZ_w = to_domain_100(XYZ_w)
_X_w, Y_w, _Z_w = tsplit(XYZ_w)
n, F_L, N_bb, N_cb, z = viewing_condition_dependent_parameters(
Y_b, Y_w, L_A
)
if has_only_nan(C) and not has_only_nan(M):
C = M / spow(F_L, 0.25)
elif has_only_nan(C):
raise ValueError(
'Either "C" or "M" correlate must be defined in '
'the "CAM_Specification_CIECAM02" argument!'
)
# Converting *CIE XYZ* tristimulus values to *CMCCAT2000* transform
# sharpened *RGB* values.
RGB_w = vector_dot(CAT_CAT02, XYZ_w)
# Computing degree of adaptation :math:`D`.
D = (
degree_of_adaptation(surround.F, L_A)
if not discount_illuminant
else ones(L_A.shape)
)
# Computing full chromatic adaptation.
RGB_wc = full_chromatic_adaptation_forward(RGB_w, RGB_w, Y_w, D)
# Converting to *Hunt-Pointer-Estevez* colourspace.
RGB_pw = RGB_to_rgb(RGB_wc)
# Applying post-adaptation non-linear response compression.
RGB_aw = post_adaptation_non_linear_response_compression_forward(
RGB_pw, F_L
)
# Computing achromatic response for the whitepoint.
A_w = achromatic_response_forward(RGB_aw, N_bb)
# Computing temporary magnitude quantity :math:`t`.
t = temporary_magnitude_quantity_inverse(C, J, n)
# Computing eccentricity factor *e_t*.
e_t = eccentricity_factor(h)
# Computing achromatic response :math:`A` for the stimulus.
A = achromatic_response_inverse(A_w, J, surround.c, z)
# Computing *P_1* to *P_3*.
P_n = P(surround.N_c, N_cb, e_t, t, A, N_bb)
_P_1, P_2, _P_3 = tsplit(P_n)
# Computing opponent colour dimensions :math:`a` and :math:`b`.
a, b = tsplit(opponent_colour_dimensions_inverse(P_n, h))
# Applying post-adaptation non-linear response compression matrix.
RGB_a = matrix_post_adaptation_non_linear_response_compression(P_2, a, b)
# Applying inverse post-adaptation non-linear response compression.
RGB_p = post_adaptation_non_linear_response_compression_inverse(RGB_a, F_L)
# Converting to *Hunt-Pointer-Estevez* colourspace.
RGB_c = rgb_to_RGB(RGB_p)
# Applying inverse full chromatic adaptation.
RGB = full_chromatic_adaptation_inverse(RGB_c, RGB_w, Y_w, D)
# Converting *CMCCAT2000* transform sharpened *RGB* values to *CIE XYZ*
# tristimulus values.
XYZ = vector_dot(CAT_INVERSE_CAT02, RGB)
return from_range_100(XYZ)
def chromatic_induction_factors(n: FloatingOrArrayLike) -> NDArray:
"""
Return the chromatic induction factors :math:`N_{bb}` and :math:`N_{cb}`.
Parameters
----------
n
Function of the luminance factor of the background :math:`n`.
Returns
-------
:class:`numpy.ndarray`
Chromatic induction factors :math:`N_{bb}` and :math:`N_{cb}`.
Examples
--------
>>> chromatic_induction_factors(0.2) # doctest: +ELLIPSIS
array([ 1.000304, 1.000304])
"""
n = as_float_array(n)
N_bb = N_cb = as_float(0.725) * spow(1 / n, 0.2)
N_bbcb = tstack([N_bb, N_cb])
return N_bbcb
def base_exponential_non_linearity(
n: FloatingOrArrayLike,
) -> FloatingOrNDArray:
"""
Return the base exponential non-linearity :math:`n`.
Parameters
----------
n
Function of the luminance factor of the background :math:`n`.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
Base exponential non-linearity :math:`z`.
Examples
--------
>>> base_exponential_non_linearity(0.2) # doctest: +ELLIPSIS
1.9272135...
"""
n = as_float_array(n)
z = 1.48 + np.sqrt(n)
return z
def viewing_condition_dependent_parameters(
Y_b: FloatingOrArrayLike,
Y_w: FloatingOrArrayLike,
L_A: FloatingOrArrayLike,
) -> Tuple[
FloatingOrNDArray,
FloatingOrNDArray,
FloatingOrNDArray,
FloatingOrNDArray,
FloatingOrNDArray,
]:
"""
Return the viewing condition dependent parameters.
Parameters
----------
Y_b
Adapting field *Y* tristimulus value :math:`Y_b`.
Y_w
Whitepoint *Y* tristimulus value :math:`Y_w`.
L_A
Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`.
Returns
-------
:class:`tuple`
Viewing condition dependent parameters.
Examples
--------
>>> viewing_condition_dependent_parameters(20.0, 100.0, 318.31)
... # doctest: +ELLIPSIS
(0.2000000..., 1.1675444..., 1.0003040..., 1.0003040..., 1.9272135...)
"""
Y_b = as_float_array(Y_b)
Y_w = as_float_array(Y_w)
n = Y_b / Y_w
F_L = luminance_level_adaptation_factor(L_A)
N_bb, N_cb = tsplit(chromatic_induction_factors(n))
z = base_exponential_non_linearity(n)
return n, F_L, N_bb, N_cb, z
def degree_of_adaptation(
F: FloatingOrArrayLike, L_A: FloatingOrArrayLike
) -> FloatingOrNDArray:
"""
Return the degree of adaptation :math:`D` from given surround maximum
degree of adaptation :math:`F` and adapting field *luminance* :math:`L_A`
in :math:`cd/m^2`.
Parameters
----------
F
Surround maximum degree of adaptation :math:`F`.
L_A
Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
Degree of adaptation :math:`D`.
Examples
--------
>>> degree_of_adaptation(1.0, 318.31) # doctest: +ELLIPSIS
0.9944687...
"""
F = as_float_array(F)
L_A = as_float_array(L_A)
D = F * (1 - (1 / 3.6) * np.exp((-L_A - 42) / 92))
return D
def full_chromatic_adaptation_forward(
RGB: ArrayLike,
RGB_w: ArrayLike,
Y_w: FloatingOrArrayLike,
D: FloatingOrArrayLike,
) -> NDArray:
"""
Apply full chromatic adaptation to given *CMCCAT2000* transform sharpened
*RGB* array using given *CMCCAT2000* transform sharpened whitepoint
*RGB_w* array.
Parameters
----------
RGB
*CMCCAT2000* transform sharpened *RGB* array.
RGB_w
*CMCCAT2000* transform sharpened whitepoint *RGB_w* array.
Y_w
Whitepoint *Y* tristimulus value :math:`Y_w`.
D
Degree of adaptation :math:`D`.
Returns
-------
:class:`numpy.ndarray`
Adapted *RGB* array.
Examples
--------
>>> RGB = np.array([18.985456, 20.707422, 21.747482])
>>> RGB_w = np.array([94.930528, 103.536988, 108.717742])
>>> Y_w = 100.0
>>> D = 0.994468780088
>>> full_chromatic_adaptation_forward(RGB, RGB_w, Y_w, D)
... # doctest: +ELLIPSIS
array([ 19.9937078..., 20.0039363..., 20.0132638...])
"""
RGB = as_float_array(RGB)
RGB_w = as_float_array(RGB_w)
Y_w = as_float_array(Y_w)
D = as_float_array(D)
RGB_c = (
(Y_w[..., np.newaxis] * D[..., np.newaxis] / RGB_w)
+ 1
- D[..., np.newaxis]
) * RGB
return RGB_c
def full_chromatic_adaptation_inverse(
RGB: ArrayLike,
RGB_w: ArrayLike,
Y_w: FloatingOrArrayLike,
D: FloatingOrArrayLike,
) -> NDArray:
"""
Revert full chromatic adaptation of given *CMCCAT2000* transform sharpened
*RGB* array using given *CMCCAT2000* transform sharpened whitepoint
*RGB_w* array.
Parameters
----------
RGB
*CMCCAT2000* transform sharpened *RGB* array.
RGB_w
*CMCCAT2000* transform sharpened whitepoint *RGB_w* array.
Y_w
Whitepoint *Y* tristimulus value :math:`Y_w`.
D
Degree of adaptation :math:`D`.
Returns
-------
:class:`numpy.ndarray`
Adapted *RGB* array.
Examples
--------
>>> RGB = np.array([19.99370783, 20.00393634, 20.01326387])
>>> RGB_w = np.array([94.930528, 103.536988, 108.717742])
>>> Y_w = 100.0
>>> D = 0.994468780088
>>> full_chromatic_adaptation_inverse(RGB, RGB_w, Y_w, D)
array([ 18.985456, 20.707422, 21.747482])
"""
RGB = as_float_array(RGB)
RGB_w = as_float_array(RGB_w)
Y_w = as_float_array(Y_w)
D = as_float_array(D)
RGB_c = RGB / (
Y_w[..., np.newaxis] * (D[..., np.newaxis] / RGB_w)
+ 1
- D[..., np.newaxis]
)
return RGB_c
def RGB_to_rgb(RGB: ArrayLike) -> NDArray:
"""
Convert given *RGB* array to *Hunt-Pointer-Estevez*
:math:`\\rho\\gamma\\beta` colourspace.
Parameters
----------
RGB
*RGB* array.
Returns
-------
:class:`numpy.ndarray`
*Hunt-Pointer-Estevez* :math:`\\rho\\gamma\\beta` colourspace array.
Examples
--------
>>> RGB = np.array([19.99370783, 20.00393634, 20.01326387])
>>> RGB_to_rgb(RGB) # doctest: +ELLIPSIS
array([ 19.9969397..., 20.0018612..., 20.0135053...])
"""
rgb = vector_dot(matrix_dot(MATRIX_XYZ_TO_HPE, CAT_INVERSE_CAT02), RGB)
return rgb
def rgb_to_RGB(rgb: ArrayLike) -> NDArray:
"""
Convert given *Hunt-Pointer-Estevez* :math:`\\rho\\gamma\\beta`
colourspace array to *RGB* array.
Parameters
----------
rgb
*Hunt-Pointer-Estevez* :math:`\\rho\\gamma\\beta` colourspace array.
Returns
-------
:class:`numpy.ndarray`
*RGB* array.
Examples
--------
>>> rgb = np.array([19.99693975, 20.00186123, 20.01350530])
>>> rgb_to_RGB(rgb) # doctest: +ELLIPSIS
array([ 19.9937078..., 20.0039363..., 20.0132638...])
"""
RGB = vector_dot(matrix_dot(CAT_CAT02, MATRIX_HPE_TO_XYZ), rgb)
return RGB
def post_adaptation_non_linear_response_compression_forward(
RGB: ArrayLike, F_L: FloatingOrArrayLike
) -> NDArray:
"""
Return given *CMCCAT2000* transform sharpened *RGB* array with post
adaptation non-linear response compression.
Parameters
----------
RGB
*CMCCAT2000* transform sharpened *RGB* array.
F_L
*Luminance* level adaptation factor :math:`F_L`.
Returns
-------
:class:`numpy.ndarray`
Compressed *CMCCAT2000* transform sharpened *RGB* array.
Notes
-----
- This definition implements negative values handling as per
:cite:`Luo2013`.
Examples
--------
>>> RGB = np.array([19.99693975, 20.00186123, 20.01350530])
>>> F_L = 1.16754446415
>>> post_adaptation_non_linear_response_compression_forward(RGB, F_L)
... # doctest: +ELLIPSIS
array([ 7.9463202..., 7.9471152..., 7.9489959...])
"""
RGB = as_float_array(RGB)
F_L = as_float_array(F_L)
F_L_RGB = spow(F_L[..., np.newaxis] * np.absolute(RGB) / 100, 0.42)
RGB_c = (400 * np.sign(RGB) * F_L_RGB) / (27.13 + F_L_RGB) + 0.1
return RGB_c
def post_adaptation_non_linear_response_compression_inverse(
RGB: ArrayLike, F_L: FloatingOrArrayLike
) -> NDArray:
"""
Return given *CMCCAT2000* transform sharpened *RGB* array without post
adaptation non-linear response compression.
Parameters
----------
RGB
*CMCCAT2000* transform sharpened *RGB* array.
F_L
*Luminance* level adaptation factor :math:`F_L`.
Returns
-------
:class:`numpy.ndarray`
Uncompressed *CMCCAT2000* transform sharpened *RGB* array.
Examples
--------
>>> RGB = np.array([7.94632020, 7.94711528, 7.94899595])
>>> F_L = 1.16754446415
>>> post_adaptation_non_linear_response_compression_inverse(RGB, F_L)
... # doctest: +ELLIPSIS
array([ 19.9969397..., 20.0018612..., 20.0135052...])
"""
RGB = as_float_array(RGB)
F_L = as_float_array(F_L)
RGB_p = (
np.sign(RGB - 0.1)
* (100 / F_L[..., np.newaxis])
* spow(
(27.13 * np.absolute(RGB - 0.1)) / (400 - np.absolute(RGB - 0.1)),
1 / 0.42,
)
)
return RGB_p
def opponent_colour_dimensions_forward(RGB: ArrayLike) -> NDArray:
"""
Return opponent colour dimensions from given compressed *CMCCAT2000*
transform sharpened *RGB* array for forward *CIECAM02* implementation.
Parameters
----------
RGB
Compressed *CMCCAT2000* transform sharpened *RGB* array.
Returns
-------
:class:`numpy.ndarray`
Opponent colour dimensions.
Examples
--------
>>> RGB = np.array([7.94632020, 7.94711528, 7.94899595])
>>> opponent_colour_dimensions_forward(RGB) # doctest: +ELLIPSIS
array([-0.0006241..., -0.0005062...])
"""
R, G, B = tsplit(RGB)
a = R - 12 * G / 11 + B / 11
b = (R + G - 2 * B) / 9
ab = tstack([a, b])
return ab
def opponent_colour_dimensions_inverse(
P_n: ArrayLike, h: FloatingOrArrayLike
) -> NDArray:
"""
Return opponent colour dimensions from given points :math:`P_n` and hue
:math:`h` in degrees for inverse *CIECAM02* implementation.
Parameters
----------
P_n
Points :math:`P_n`.
h
Hue :math:`h` in degrees.
Returns
-------
:class:`numpy.ndarray`
Opponent colour dimensions.
Notes
-----
- This definition implements negative values handling as per
:cite:`Luo2013`.
Examples
--------
>>> P_n = np.array([30162.89081534, 24.23720547, 1.05000000])
>>> h = -140.95156734
>>> opponent_colour_dimensions_inverse(P_n, h) # doctest: +ELLIPSIS
array([-0.0006241..., -0.0005062...])
"""
P_1, P_2, P_3 = tsplit(P_n)
hr = np.radians(h)
sin_hr = np.sin(hr)
cos_hr = np.cos(hr)
P_4 = P_1 / sin_hr
P_5 = P_1 / cos_hr
n = P_2 * (2 + P_3) * (460 / 1403)
a = zeros(hr.shape)
b = zeros(hr.shape)
b = np.where(
np.isfinite(P_1) * np.abs(sin_hr) >= np.abs(cos_hr),
(
n
/ (
P_4
+ (2 + P_3) * (220 / 1403) * (cos_hr / sin_hr)
- (27 / 1403)
+ P_3 * (6300 / 1403)
)
),
b,
)
a = np.where(
np.isfinite(P_1) * np.abs(sin_hr) >= np.abs(cos_hr),
b * (cos_hr / sin_hr),
a,
)
a = np.where(
np.isfinite(P_1) * np.abs(sin_hr) < np.abs(cos_hr),
(
n
/ (
P_5
+ (2 + P_3) * (220 / 1403)
- ((27 / 1403) - P_3 * (6300 / 1403)) * (sin_hr / cos_hr)
)
),
a,
)
b = np.where(
np.isfinite(P_1) * np.abs(sin_hr) < np.abs(cos_hr),
a * (sin_hr / cos_hr),
b,
)
ab = tstack([a, b])
return ab
def hue_angle(
a: FloatingOrArrayLike, b: FloatingOrArrayLike
) -> FloatingOrNDArray:
"""
Return the *hue* angle :math:`h` in degrees.
Parameters
----------
a
Opponent colour dimension :math:`a`.
b
Opponent colour dimension :math:`b`.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
*Hue* angle :math:`h` in degrees.
Examples
--------
>>> a = -0.000624112068243
>>> b = -0.000506270106773
>>> hue_angle(a, b) # doctest: +ELLIPSIS
219.0484326...
"""
a = as_float_array(a)
b = as_float_array(b)
h = np.degrees(np.arctan2(b, a)) % 360
return as_float(h)
def hue_quadrature(h: FloatingOrArrayLike) -> FloatingOrNDArray:
"""
Return the hue quadrature from given hue :math:`h` angle in degrees.
Parameters
----------
h
Hue :math:`h` angle in degrees.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
Hue quadrature.
Examples
--------
>>> hue_quadrature(219.0484326582719) # doctest: +ELLIPSIS
278.0607358...
"""
h = as_float_array(h)
h_i = HUE_DATA_FOR_HUE_QUADRATURE["h_i"]
e_i = HUE_DATA_FOR_HUE_QUADRATURE["e_i"]
H_i = HUE_DATA_FOR_HUE_QUADRATURE["H_i"]
# *np.searchsorted* returns an erroneous index if a *nan* is used as input.
h[np.asarray(np.isnan(h))] = 0
i = as_int_array(np.searchsorted(h_i, h, side="left") - 1)
h_ii = h_i[i]
e_ii = e_i[i]
H_ii = H_i[i]
h_ii1 = h_i[i + 1]
e_ii1 = e_i[i + 1]
H = H_ii + (
(100 * (h - h_ii) / e_ii) / ((h - h_ii) / e_ii + (h_ii1 - h) / e_ii1)
)
H = np.where(
h < 20.14,
385.9 + (14.1 * h / 0.856) / (h / 0.856 + (20.14 - h) / 0.8),
H,
)
H = np.where(
h >= 237.53,
H_ii
+ (
(85.9 * (h - h_ii) / e_ii)
/ ((h - h_ii) / e_ii + (360 - h) / 0.856)
),
H,
)
return as_float(H)
def eccentricity_factor(h: FloatingOrArrayLike) -> FloatingOrNDArray:
"""
Return the eccentricity factor :math:`e_t` from given hue :math:`h` angle
in degrees for forward *CIECAM02* implementation.
Parameters
----------
h
Hue :math:`h` angle in degrees.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
Eccentricity factor :math:`e_t`.
Examples
--------
>>> eccentricity_factor(-140.951567342) # doctest: +ELLIPSIS
1.1740054...
"""
h = as_float_array(h)
e_t = 1 / 4 * (np.cos(2 + h * np.pi / 180) + 3.8)
return e_t
def achromatic_response_forward(
RGB: ArrayLike, N_bb: FloatingOrArrayLike
) -> FloatingOrNDArray:
"""
Return the achromatic response :math:`A` from given compressed
*CMCCAT2000* transform sharpened *RGB* array and :math:`N_{bb}` chromatic
induction factor for forward *CIECAM02* implementation.
Parameters
----------
RGB
Compressed *CMCCAT2000* transform sharpened *RGB* array.
N_bb
Chromatic induction factor :math:`N_{bb}`.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
Achromatic response :math:`A`.
Examples
--------
>>> RGB = np.array([7.94632020, 7.94711528, 7.94899595])
>>> N_bb = 1.000304004559381
>>> achromatic_response_forward(RGB, N_bb) # doctest: +ELLIPSIS
23.9394809...
"""
R, G, B = tsplit(RGB)
A = (2 * R + G + (1 / 20) * B - 0.305) * N_bb
return A
def achromatic_response_inverse(
A_w: FloatingOrArrayLike,
J: FloatingOrArrayLike,
c: FloatingOrArrayLike,
z: FloatingOrArrayLike,
) -> FloatingOrNDArray:
"""
Return the achromatic response :math:`A` from given achromatic response
:math:`A_w` for the whitepoint, *Lightness* correlate :math:`J`, surround
exponential non-linearity :math:`c` and base exponential non-linearity
:math:`z` for inverse *CIECAM02* implementation.
Parameters
----------
A_w
Achromatic response :math:`A_w` for the whitepoint.
J
*Lightness* correlate :math:`J`.
c
Surround exponential non-linearity :math:`c`.
z
Base exponential non-linearity :math:`z`.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
Achromatic response :math:`A`.
Examples
--------
>>> A_w = 46.1882087914
>>> J = 41.73109113251392
>>> c = 0.69
>>> z = 1.927213595499958
>>> achromatic_response_inverse(A_w, J, c, z) # doctest: +ELLIPSIS
23.9394809...
"""
A_w = as_float_array(A_w)
J = as_float_array(J)
c = as_float_array(c)
z = as_float_array(z)
A = A_w * spow(J / 100, 1 / (c * z))
return A
def lightness_correlate(
A: FloatingOrArrayLike,
A_w: FloatingOrArrayLike,
c: FloatingOrArrayLike,
z: FloatingOrArrayLike,
) -> FloatingOrNDArray:
"""
Return the *Lightness* correlate :math:`J`.
Parameters
----------
A
Achromatic response :math:`A` for the stimulus.
A_w
Achromatic response :math:`A_w` for the whitepoint.
c
Surround exponential non-linearity :math:`c`.
z
Base exponential non-linearity :math:`z`.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
*Lightness* correlate :math:`J`.
Examples
--------
>>> A = 23.9394809667
>>> A_w = 46.1882087914
>>> c = 0.69
>>> z = 1.9272135955
>>> lightness_correlate(A, A_w, c, z) # doctest: +ELLIPSIS
41.7310911...
"""
A = as_float_array(A)
A_w = as_float_array(A_w)
c = as_float_array(c)
z = as_float_array(z)
J = 100 * spow(A / A_w, c * z)
return J
def brightness_correlate(
c: FloatingOrArrayLike,
J: FloatingOrArrayLike,
A_w: FloatingOrArrayLike,
F_L: FloatingOrArrayLike,
) -> FloatingOrNDArray:
"""
Return the *brightness* correlate :math:`Q`.
Parameters
----------
c
Surround exponential non-linearity :math:`c`.
J
*Lightness* correlate :math:`J`.
A_w
Achromatic response :math:`A_w` for the whitepoint.
F_L
*Luminance* level adaptation factor :math:`F_L`.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
*Brightness* correlate :math:`Q`.
Examples
--------
>>> c = 0.69
>>> J = 41.7310911325
>>> A_w = 46.1882087914
>>> F_L = 1.16754446415
>>> brightness_correlate(c, J, A_w, F_L) # doctest: +ELLIPSIS
195.3713259...
"""
c = as_float_array(c)
J = as_float_array(J)
A_w = as_float_array(A_w)
F_L = as_float_array(F_L)
Q = (4 / c) * np.sqrt(J / 100) * (A_w + 4) * spow(F_L, 0.25)
return Q
def temporary_magnitude_quantity_forward(
N_c: FloatingOrArrayLike,
N_cb: FloatingOrArrayLike,
e_t: FloatingOrArrayLike,
a: FloatingOrArrayLike,
b: FloatingOrArrayLike,
RGB_a: ArrayLike,
) -> FloatingOrNDArray:
"""
Return the temporary magnitude quantity :math:`t`. for forward *CIECAM02*
implementation.
Parameters
----------
N_c
Surround chromatic induction factor :math:`N_{c}`.
N_cb
Chromatic induction factor :math:`N_{cb}`.
e_t
Eccentricity factor :math:`e_t`.
a
Opponent colour dimension :math:`a`.
b
Opponent colour dimension :math:`b`.
RGB_a
Compressed stimulus *CMCCAT2000* transform sharpened *RGB* array.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
Temporary magnitude quantity :math:`t`.
Examples
--------
>>> N_c = 1.0
>>> N_cb = 1.00030400456
>>> e_t = 1.174005472851914
>>> a = -0.000624112068243
>>> b = -0.000506270106773
>>> RGB_a = np.array([7.94632020, 7.94711528, 7.94899595])
>>> temporary_magnitude_quantity_forward(N_c, N_cb, e_t, a, b, RGB_a)
... # doctest: +ELLIPSIS
0.1497462...
"""
N_c = as_float_array(N_c)
N_cb = as_float_array(N_cb)
e_t = as_float_array(e_t)
a = as_float_array(a)
b = as_float_array(b)
Ra, Ga, Ba = tsplit(RGB_a)
t = (
((50000 / 13) * N_c * N_cb)
* (e_t * spow(a**2 + b**2, 0.5))
/ (Ra + Ga + 21 * Ba / 20)
)
return t
def temporary_magnitude_quantity_inverse(
C: FloatingOrArrayLike, J: FloatingOrArrayLike, n: FloatingOrArrayLike
) -> FloatingOrNDArray:
"""
Return the temporary magnitude quantity :math:`t`. for inverse *CIECAM02*
implementation.
Parameters
----------
C
*Chroma* correlate :math:`C`.
J
*Lightness* correlate :math:`J`.
n
Function of the luminance factor of the background :math:`n`.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
Temporary magnitude quantity :math:`t`.
Notes
-----
- This definition implements negative values handling as per
:cite:`Luo2013`.
Examples
--------
>>> C = 68.8364136888275
>>> J = 41.749268505999
>>> n = 0.2
>>> temporary_magnitude_quantity_inverse(C, J, n) # doctest: +ELLIPSIS
202.3873619...
"""
C = as_float_array(C)
J = np.maximum(J, EPSILON)
n = as_float_array(n)
t = spow(C / (np.sqrt(J / 100) * spow(1.64 - 0.29**n, 0.73)), 1 / 0.9)
return t
def chroma_correlate(
J: FloatingOrArrayLike,
n: FloatingOrArrayLike,
N_c: FloatingOrArrayLike,
N_cb: FloatingOrArrayLike,
e_t: FloatingOrArrayLike,
a: FloatingOrArrayLike,
b: FloatingOrArrayLike,
RGB_a: ArrayLike,
) -> FloatingOrNDArray:
"""
Return the *chroma* correlate :math:`C`.
Parameters
----------
J
*Lightness* correlate :math:`J`.
n
Function of the luminance factor of the background :math:`n`.
N_c
Surround chromatic induction factor :math:`N_{c}`.
N_cb
Chromatic induction factor :math:`N_{cb}`.
e_t
Eccentricity factor :math:`e_t`.
a
Opponent colour dimension :math:`a`.
b
Opponent colour dimension :math:`b`.
RGB_a
Compressed stimulus *CMCCAT2000* transform sharpened *RGB* array.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
*Chroma* correlate :math:`C`.
Examples
--------
>>> J = 41.7310911325
>>> n = 0.2
>>> N_c = 1.0
>>> N_cb = 1.00030400456
>>> e_t = 1.17400547285
>>> a = -0.000624112068243
>>> b = -0.000506270106773
>>> RGB_a = np.array([7.94632020, 7.94711528, 7.94899595])
>>> chroma_correlate(J, n, N_c, N_cb, e_t, a, b, RGB_a)
... # doctest: +ELLIPSIS
0.1047077...
"""
J = as_float_array(J)
n = as_float_array(n)
t = temporary_magnitude_quantity_forward(N_c, N_cb, e_t, a, b, RGB_a)
C = spow(t, 0.9) * spow(J / 100, 0.5) * spow(1.64 - 0.29**n, 0.73)
return C
def colourfulness_correlate(
C: FloatingOrArrayLike, F_L: FloatingOrArrayLike
) -> FloatingOrNDArray:
"""
Return the *colourfulness* correlate :math:`M`.
Parameters
----------
C
*Chroma* correlate :math:`C`.
F_L
*Luminance* level adaptation factor :math:`F_L`.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
*Colourfulness* correlate :math:`M`.
Examples
--------
>>> C = 0.104707757171
>>> F_L = 1.16754446415
>>> colourfulness_correlate(C, F_L) # doctest: +ELLIPSIS
0.1088421...
"""
C = as_float_array(C)
F_L = as_float_array(F_L)
M = C * spow(F_L, 0.25)
return M
def saturation_correlate(
M: FloatingOrArrayLike, Q: FloatingOrArrayLike
) -> FloatingOrNDArray:
"""
Return the *saturation* correlate :math:`s`.
Parameters
----------
M
*Colourfulness* correlate :math:`M`.
Q
*Brightness* correlate :math:`C`.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
*Saturation* correlate :math:`s`.
Examples
--------
>>> M = 0.108842175669
>>> Q = 195.371325966
>>> saturation_correlate(M, Q) # doctest: +ELLIPSIS
2.3603053...
"""
M = as_float_array(M)
Q = as_float_array(Q)
s = 100 * spow(M / Q, 0.5)
return s
def P(
N_c: FloatingOrArrayLike,
N_cb: FloatingOrArrayLike,
e_t: FloatingOrArrayLike,
t: FloatingOrArrayLike,
A: FloatingOrArrayLike,
N_bb: FloatingOrArrayLike,
) -> NDArray:
"""
Return the points :math:`P_1`, :math:`P_2` and :math:`P_3`.
Parameters
----------
N_c
Surround chromatic induction factor :math:`N_{c}`.
N_cb
Chromatic induction factor :math:`N_{cb}`.
e_t
Eccentricity factor :math:`e_t`.
t
Temporary magnitude quantity :math:`t`.
A
Achromatic response :math:`A` for the stimulus.
N_bb
Chromatic induction factor :math:`N_{bb}`.
Returns
-------
:class:`numpy.ndarray`
Points :math:`P`.
Examples
--------
>>> N_c = 1.0
>>> N_cb = 1.00030400456
>>> e_t = 1.174005472851914
>>> t = 0.149746202921
>>> A = 23.9394809667
>>> N_bb = 1.00030400456
>>> P(N_c, N_cb, e_t, t, A, N_bb) # doctest: +ELLIPSIS
array([ 3.0162890...e+04, 2.4237205...e+01, 1.0500000...e+00])
"""
N_c = as_float_array(N_c)
N_cb = as_float_array(N_cb)
e_t = as_float_array(e_t)
t = as_float_array(t)
A = as_float_array(A)
N_bb = as_float_array(N_bb)
P_1 = ((50000 / 13) * N_c * N_cb * e_t) / t
P_2 = A / N_bb + 0.305
P_3 = ones(P_1.shape) * (21 / 20)
P_n = tstack([P_1, P_2, P_3])
return P_n
def matrix_post_adaptation_non_linear_response_compression(
P_2: FloatingOrArrayLike, a: FloatingOrArrayLike, b: FloatingOrArrayLike
) -> NDArray:
"""
Apply the post-adaptation non-linear-response compression matrix.
Parameters
----------
P_2
Point :math:`P_2`.
a
Opponent colour dimension :math:`a`.
b
Opponent colour dimension :math:`b`.
Returns
-------
:class:`numpy.ndarray`
Points :math:`P`.
Examples
--------
>>> P_2 = 24.2372054671
>>> a = -0.000624112068243
>>> b = -0.000506270106773
>>> matrix_post_adaptation_non_linear_response_compression(P_2, a, b)
... # doctest: +ELLIPSIS
array([ 7.9463202..., 7.9471152..., 7.9489959...])
"""
P_2 = as_float_array(P_2)
a = as_float_array(a)
b = as_float_array(b)
RGB_a = (
vector_dot(
[
[460, 451, 288],
[460, -891, -261],
[460, -220, -6300],
],
tstack([P_2, a, b]),
)
/ 1403
)
return RGB_a
|
|
import ly
from ly import document
from ly import rhythm
from ly import pitch
from ly import lex
import math
import numpy
import re
#TODO: FIX CHORD/RHYTHM PARSER, IMPLEMENT HEADER AND CHORD PROCESSING
#==GLOBAL VARIBALES============================================================
BAR_LENGTH = 1.0 #This needs to change from header parsing
#==PREPROCESSOR HELPER FUNCTIONS===============================================
#Takes a .ly (specifically in the format defined by open real book). Cuts the
#file into individual songs and then again into metadata, chords, and
#notes/rhythms. Returns them as an array of tuples
def file_splitter(file):
songs_array = [] #Organizes songs into arrays of arrays in this format:
#([metadata][chords][notes/rhythms])
song = ([],[],[])
while True:
line = file.readline()
#This if block determines what part of the song we are in and adds
#the file line by line into the appropriate part of the song tuple
#if we reached the end of the file, add current our song and stop
if line == "":
songs_array.append(song)
break
#else if we started a new song, add our current song and continue
elif line == "\\bookpart {\n": #IS THIS OVERLY SPECIFIC? TURN INTO REGEX
songs_array.append(song)
song = ([],[],[])
#else if we entered the meta data part of the song, then read in song
#title
elif line == "\\markup {\n":
rx = r" *(\\fill-line)"
while (line != "}\n"):
line = file.readline()
if re.match(rx, line, re.I):
song[0].append(line)
#else if we reached the chord part of the song, then read in all lines
#to chord part of tuple until we reach the end of chordmode
elif line == "\\chordmode {\n":
while (line != "}\n"):
line = file.readline()
song[1].append(line)
#else if we are in rhythm/notes part part of the song, then read in all
#lines to rhythm part of tuple, unless it is meta data (go to meta data)
elif line == "{\n":
rx = r" *\\((tempo)|(time)|(key))"
while (line != "}\n"):
line = file.readline()
if re.match(rx, line, re.I):
song[0].append(line)
else:
song[2].append(line)
#Remove the first, empty song and return array
songs_array.pop(0)
return songs_array
#Takes any part of the array and puts into a temporary .ly file to be read by
#lilypond functions
def reformat_to_lily(line_array):
file = open("tempFile.ly","w")
for i in range(len(line_array)):
file.write(line_array[i])
file.close()
return open("tempFile.ly","r")
#Takes a list of LilyPond rhythm objects, explicitely defines the length of
#the notes, and seperates each bar into sublists. Also allows for cases of
#tuplets (currently only 3/2 tuplets), which is why we're bringing in pitch
#Returns an array of arrays of note lengths
def make_explicit(r,p):
#print len(r), len(p)
#print r,p
tempList = []
prevLength = 0
barCounter = 0.0
explicitRhythm = []
for i in xrange(len(r)):
rx = r"\\tuplet"
#If this actually a tuplet, then we need to overide where the notes
#are placed
if (re.match(rx, p[i], re.I)): #THIS ONLY HANDLES 3/2
p.pop(i)
tempList.extend([3,3,3])
barCounter += 0.5
#If the length of the note is not explicitly given, then assume it's the
#previous note's length
elif (r[i] == ''):
tempList.append(prevLength)
barCounter += (1.0/prevLength)
#Else, the note length is explicitly given, so set the prevLength to
#this length
else:
prevLength = int(r[i].strip('.'))
tempList.append(prevLength)
barCounter += (1.0/prevLength)
#If the bar has now been filled, then start on a new bar
if (barCounter >= BAR_LENGTH):
barCounter = 0.0
explicitRhythm.append(tempList)
tempList = []
#if (barCounter > BAR_LENGTH and __debug__):
# raise ValueError('INVALID NOTE LENGTHS')
return explicitRhythm, p
#This is the main preprocessor, it takes an array of pitches and rhythms from a
#.ly file, cleans up/removes garbage and returns 2 arrays: pitches and rhythms
def rhythm_parser(song): #THIS IS THE UGLIEST CODE I'VE EVER WRITTEN WILL FIX
#print song
#Loads a lilypond file into a ly document object
reformat_to_lily(song[2])
d = ly.document.Document().load("tempFile.ly")
cursor = ly.document.Cursor(d)
#Returns a list of the length of each note
r = ly.rhythm.rhythm_extract(cursor)
p = ly.pitch.PitchIterator(cursor).pitches()
pitches_temp = []
pitches = []
rhythms = []
chords = []
#THIS REGEX COULD BE BROKEN
rx = r"([a-g]|[r])|\\tuplet" #Will only accept notes a-g with optional
#' character or a tuplet or a rest
#Bring in the individual pitches or rests of the file
for item in ly.rhythm.music_items(cursor,True,True):
pitches_temp.append(item.tokens)
temp_pitches = ["".join(tokens) for tokens in pitches_temp]
#Sanitize pitches: only bring in stuff determined by rx
for item in temp_pitches:
if re.match(rx, item, re.I):
pitches.append(item)
try:
rhythm,pitches = make_explicit(r,pitches)
except ValueError as e:
print(e)
raise
return rhythm, pitches
#==MAIN HELPER FUNCTIONS=======================================================
#Take in a lilypond list of rhythm objects and turn into a vector of rhythms
#that can be read in by our neural nets
def rhythm_to_vector(r,p):
#Base case 1: sublist is empty, in which case don't do anything
if len(r) == 0:
return
#Base case 2: sublist is a singleton, then just return a singleton
elif len(r) == 1:
return [(1,r[0],p.pop(0))]
#Otherwise chunk list into multiple sublists based on the first prime that
#evenly divides the list and recurse
else:
length = len(r)
prime = first_prime(length)
l = [(prime,None,None)]
chunks = numpy.array_split(r,prime)#Split list by the found prime
for i in xrange(len(chunks)):
l += rhythm_to_vector(chunks[i],p)
return l
#OPTIMIZE THIS CODE
def first_prime(length):
for i in range(2,102):
if is_prime(i) and (length % i) == 0:
return i
def is_prime(n):
return all(n % i for i in xrange(2, n))
#==MAIN========================================================================
if __name__ == '__main__':
#Replace this file with "../../test/resources/realbook.ly" to test out the
#entire real book
f = open("../../test/resources/test5.ly")
songs = file_splitter(f)
rhythm,pitches = rhythm_parser(songs[0])
#v is the final vector of pitches and rhythms
v = []
#Find the size of all of the notes by going into the array of arrays
#SHOULD PROBABLY REPLACE THIS WITH A RETURN VARIABLE FROM RHYTHM_PARSER
rhythm_length = 0
for i in xrange(len(rhythm)):
rhythm_length += len(rhythm[i])
` #The main processing function call
for i in xrange(len(pitches)):
v.append(rhythm_to_vector(rhythm[i],pitches))
if __debug__:
print(v)
print("Working")
|
|
# Copyright (c) 2012, 2018, Oracle and/or its affiliates. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License, version 2.0, as
# published by the Free Software Foundation.
#
# This program is also distributed with certain software (including
# but not limited to OpenSSL) that is licensed under separate terms,
# as designated in a particular file or component or in included license
# documentation. The authors of MySQL hereby grant you an
# additional permission to link the program and your derivative works
# with the separately licensed software that they have included with
# MySQL.
#
# Without limiting anything contained in the foregoing, this file,
# which is part of MySQL Connector/Python, is also subject to the
# Universal FOSS Exception, version 1.0, a copy of which can be found at
# http://oss.oracle.com/licenses/universal-foss-exception.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License, version 2.0, for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""Module implementing low-level socket communication with MySQL servers.
"""
from collections import deque
import socket
import struct
import sys
import zlib
try:
import ssl
except:
# If import fails, we don't have SSL support.
pass
from . import constants, errors
from .catch23 import PY2, init_bytearray, struct_unpack
def _strioerror(err):
"""Reformat the IOError error message
This function reformats the IOError error message.
"""
if not err.errno:
return str(err)
return '{errno} {strerr}'.format(errno=err.errno, strerr=err.strerror)
def _prepare_packets(buf, pktnr):
"""Prepare a packet for sending to the MySQL server"""
pkts = []
pllen = len(buf)
maxpktlen = constants.MAX_PACKET_LENGTH
while pllen > maxpktlen:
pkts.append(b'\xff\xff\xff' + struct.pack('<B', pktnr)
+ buf[:maxpktlen])
buf = buf[maxpktlen:]
pllen = len(buf)
pktnr = pktnr + 1
pkts.append(struct.pack('<I', pllen)[0:3]
+ struct.pack('<B', pktnr) + buf)
return pkts
class BaseMySQLSocket(object):
"""Base class for MySQL socket communication
This class should not be used directly but overloaded, changing the
at least the open_connection()-method. Examples of subclasses are
mysql.connector.network.MySQLTCPSocket
mysql.connector.network.MySQLUnixSocket
"""
def __init__(self):
self.sock = None # holds the socket connection
self._connection_timeout = None
self._packet_number = -1
self._compressed_packet_number = -1
self._packet_queue = deque()
self.recvsize = 8192
@property
def next_packet_number(self):
"""Increments the packet number"""
self._packet_number = self._packet_number + 1
if self._packet_number > 255:
self._packet_number = 0
return self._packet_number
@property
def next_compressed_packet_number(self):
"""Increments the compressed packet number"""
self._compressed_packet_number = self._compressed_packet_number + 1
if self._compressed_packet_number > 255:
self._compressed_packet_number = 0
return self._compressed_packet_number
def open_connection(self):
"""Open the socket"""
raise NotImplementedError
def get_address(self):
"""Get the location of the socket"""
raise NotImplementedError
def shutdown(self):
"""Shut down the socket before closing it"""
try:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
del self._packet_queue
except (socket.error, AttributeError):
pass
def close_connection(self):
"""Close the socket"""
try:
self.sock.close()
del self._packet_queue
except (socket.error, AttributeError):
pass
def __del__(self):
self.shutdown()
def send_plain(self, buf, packet_number=None,
compressed_packet_number=None):
"""Send packets to the MySQL server"""
if packet_number is None:
self.next_packet_number # pylint: disable=W0104
else:
self._packet_number = packet_number
packets = _prepare_packets(buf, self._packet_number)
for packet in packets:
try:
if PY2:
self.sock.sendall(buffer(packet)) # pylint: disable=E0602
else:
self.sock.sendall(packet)
except IOError as err:
raise errors.OperationalError(
errno=2055, values=(self.get_address(), _strioerror(err)))
except AttributeError:
raise errors.OperationalError(errno=2006)
send = send_plain
def send_compressed(self, buf, packet_number=None,
compressed_packet_number=None):
"""Send compressed packets to the MySQL server"""
if packet_number is None:
self.next_packet_number # pylint: disable=W0104
else:
self._packet_number = packet_number
if compressed_packet_number is None:
self.next_compressed_packet_number # pylint: disable=W0104
else:
self._compressed_packet_number = compressed_packet_number
pktnr = self._packet_number
pllen = len(buf)
zpkts = []
maxpktlen = constants.MAX_PACKET_LENGTH
if pllen > maxpktlen:
pkts = _prepare_packets(buf, pktnr)
if PY2:
tmpbuf = bytearray()
for pkt in pkts:
tmpbuf += pkt
tmpbuf = buffer(tmpbuf) # pylint: disable=E0602
else:
tmpbuf = b''.join(pkts)
del pkts
zbuf = zlib.compress(tmpbuf[:16384])
header = (struct.pack('<I', len(zbuf))[0:3]
+ struct.pack('<B', self._compressed_packet_number)
+ b'\x00\x40\x00')
if PY2:
header = buffer(header) # pylint: disable=E0602
zpkts.append(header + zbuf)
tmpbuf = tmpbuf[16384:]
pllen = len(tmpbuf)
self.next_compressed_packet_number # pylint: disable=W0104
while pllen > maxpktlen:
zbuf = zlib.compress(tmpbuf[:maxpktlen])
header = (struct.pack('<I', len(zbuf))[0:3]
+ struct.pack('<B', self._compressed_packet_number)
+ b'\xff\xff\xff')
if PY2:
header = buffer(header) # pylint: disable=E0602
zpkts.append(header + zbuf)
tmpbuf = tmpbuf[maxpktlen:]
pllen = len(tmpbuf)
self.next_compressed_packet_number # pylint: disable=W0104
if tmpbuf:
zbuf = zlib.compress(tmpbuf)
header = (struct.pack('<I', len(zbuf))[0:3]
+ struct.pack('<B', self._compressed_packet_number)
+ struct.pack('<I', pllen)[0:3])
if PY2:
header = buffer(header) # pylint: disable=E0602
zpkts.append(header + zbuf)
del tmpbuf
else:
pkt = (struct.pack('<I', pllen)[0:3] +
struct.pack('<B', pktnr) + buf)
if PY2:
pkt = buffer(pkt) # pylint: disable=E0602
pllen = len(pkt)
if pllen > 50:
zbuf = zlib.compress(pkt)
zpkts.append(struct.pack('<I', len(zbuf))[0:3]
+ struct.pack('<B', self._compressed_packet_number)
+ struct.pack('<I', pllen)[0:3]
+ zbuf)
else:
header = (struct.pack('<I', pllen)[0:3]
+ struct.pack('<B', self._compressed_packet_number)
+ struct.pack('<I', 0)[0:3])
if PY2:
header = buffer(header) # pylint: disable=E0602
zpkts.append(header + pkt)
for zip_packet in zpkts:
try:
self.sock.sendall(zip_packet)
except IOError as err:
raise errors.OperationalError(
errno=2055, values=(self.get_address(), _strioerror(err)))
except AttributeError:
raise errors.OperationalError(errno=2006)
def recv_plain(self):
"""Receive packets from the MySQL server"""
try:
# Read the header of the MySQL packet, 4 bytes
packet = bytearray(b'')
packet_len = 0
while packet_len < 4:
chunk = self.sock.recv(4 - packet_len)
if not chunk:
raise errors.InterfaceError(errno=2013)
packet += chunk
packet_len = len(packet)
# Save the packet number and payload length
self._packet_number = packet[3]
if PY2:
payload_len = struct.unpack_from(
"<I",
buffer(packet[0:3] + b'\x00'))[0] # pylint: disable=E0602
else:
payload_len = struct.unpack("<I", packet[0:3] + b'\x00')[0]
# Read the payload
rest = payload_len
packet.extend(bytearray(payload_len))
packet_view = memoryview(packet) # pylint: disable=E0602
packet_view = packet_view[4:]
while rest:
read = self.sock.recv_into(packet_view, rest)
if read == 0 and rest > 0:
raise errors.InterfaceError(errno=2013)
packet_view = packet_view[read:]
rest -= read
return packet
except IOError as err:
raise errors.OperationalError(
errno=2055, values=(self.get_address(), _strioerror(err)))
def recv_py26_plain(self):
"""Receive packets from the MySQL server"""
try:
# Read the header of the MySQL packet, 4 bytes
header = bytearray(b'')
header_len = 0
while header_len < 4:
chunk = self.sock.recv(4 - header_len)
if not chunk:
raise errors.InterfaceError(errno=2013)
header += chunk
header_len = len(header)
# Save the packet number and payload length
self._packet_number = header[3]
payload_len = struct_unpack("<I", header[0:3] + b'\x00')[0]
# Read the payload
rest = payload_len
payload = init_bytearray(b'')
while rest > 0:
chunk = self.sock.recv(rest)
if not chunk:
raise errors.InterfaceError(errno=2013)
payload += chunk
rest = payload_len - len(payload)
return header + payload
except IOError as err:
raise errors.OperationalError(
errno=2055, values=(self.get_address(), _strioerror(err)))
if sys.version_info[0:2] == (2, 6):
recv = recv_py26_plain
recv_plain = recv_py26_plain
else:
recv = recv_plain
def _split_zipped_payload(self, packet_bunch):
"""Split compressed payload"""
while packet_bunch:
if PY2:
payload_length = struct.unpack_from(
"<I",
packet_bunch[0:3] + b'\x00')[0] # pylint: disable=E0602
else:
payload_length = struct.unpack("<I", packet_bunch[0:3] + b'\x00')[0]
self._packet_queue.append(packet_bunch[0:payload_length + 4])
packet_bunch = packet_bunch[payload_length + 4:]
def recv_compressed(self):
"""Receive compressed packets from the MySQL server"""
try:
pkt = self._packet_queue.popleft()
self._packet_number = pkt[3]
return pkt
except IndexError:
pass
header = bytearray(b'')
packets = []
try:
abyte = self.sock.recv(1)
while abyte and len(header) < 7:
header += abyte
abyte = self.sock.recv(1)
while header:
if len(header) < 7:
raise errors.InterfaceError(errno=2013)
# Get length of compressed packet
zip_payload_length = struct_unpack("<I",
header[0:3] + b'\x00')[0]
self._compressed_packet_number = header[3]
# Get payload length before compression
payload_length = struct_unpack("<I", header[4:7] + b'\x00')[0]
zip_payload = init_bytearray(abyte)
while len(zip_payload) < zip_payload_length:
chunk = self.sock.recv(zip_payload_length
- len(zip_payload))
if not chunk:
raise errors.InterfaceError(errno=2013)
zip_payload = zip_payload + chunk
# Payload was not compressed
if payload_length == 0:
self._split_zipped_payload(zip_payload)
pkt = self._packet_queue.popleft()
self._packet_number = pkt[3]
return pkt
packets.append((payload_length, zip_payload))
if zip_payload_length <= 16384:
# We received the full compressed packet
break
# Get next compressed packet
header = init_bytearray(b'')
abyte = self.sock.recv(1)
while abyte and len(header) < 7:
header += abyte
abyte = self.sock.recv(1)
except IOError as err:
raise errors.OperationalError(
errno=2055, values=(self.get_address(), _strioerror(err)))
# Compressed packet can contain more than 1 MySQL packets
# We decompress and make one so we can split it up
tmp = init_bytearray(b'')
for payload_length, payload in packets:
# payload_length can not be 0; this was previously handled
if PY2:
tmp += zlib.decompress(buffer(payload)) # pylint: disable=E0602
else:
tmp += zlib.decompress(payload)
self._split_zipped_payload(tmp)
del tmp
try:
pkt = self._packet_queue.popleft()
self._packet_number = pkt[3]
return pkt
except IndexError:
pass
def set_connection_timeout(self, timeout):
"""Set the connection timeout"""
self._connection_timeout = timeout
# pylint: disable=C0103,E1101
def switch_to_ssl(self, ca, cert, key, verify_cert=False,
verify_identity=False, cipher=None, ssl_version=None):
"""Switch the socket to use SSL"""
if not self.sock:
raise errors.InterfaceError(errno=2048)
try:
if verify_cert:
cert_reqs = ssl.CERT_REQUIRED
else:
cert_reqs = ssl.CERT_NONE
if ssl_version is None:
self.sock = ssl.wrap_socket(
self.sock, keyfile=key, certfile=cert, ca_certs=ca,
cert_reqs=cert_reqs, do_handshake_on_connect=False,
ciphers=cipher)
else:
self.sock = ssl.wrap_socket(
self.sock, keyfile=key, certfile=cert, ca_certs=ca,
cert_reqs=cert_reqs, do_handshake_on_connect=False,
ssl_version=ssl_version, ciphers=cipher)
self.sock.do_handshake()
if verify_identity:
ssl.match_hostname(self.sock.getpeercert(), self.server_host)
except NameError:
raise errors.NotSupportedError(
"Python installation has no SSL support")
except (ssl.SSLError, IOError) as err:
raise errors.InterfaceError(
errno=2055, values=(self.get_address(), _strioerror(err)))
except ssl.CertificateError as err:
raise errors.InterfaceError(str(err))
except NotImplementedError as err:
raise errors.InterfaceError(str(err))
# pylint: enable=C0103,E1101
class MySQLUnixSocket(BaseMySQLSocket):
"""MySQL socket class using UNIX sockets
Opens a connection through the UNIX socket of the MySQL Server.
"""
def __init__(self, unix_socket='/tmp/mysql.sock'):
super(MySQLUnixSocket, self).__init__()
self.unix_socket = unix_socket
def get_address(self):
return self.unix_socket
def open_connection(self):
try:
self.sock = socket.socket(socket.AF_UNIX, # pylint: disable=E1101
socket.SOCK_STREAM)
self.sock.settimeout(self._connection_timeout)
self.sock.connect(self.unix_socket)
except IOError as err:
raise errors.InterfaceError(
errno=2002, values=(self.get_address(), _strioerror(err)))
except Exception as err:
raise errors.InterfaceError(str(err))
class MySQLTCPSocket(BaseMySQLSocket):
"""MySQL socket class using TCP/IP
Opens a TCP/IP connection to the MySQL Server.
"""
def __init__(self, host='127.0.0.1', port=3306, force_ipv6=False):
super(MySQLTCPSocket, self).__init__()
self.server_host = host
self.server_port = port
self.force_ipv6 = force_ipv6
self._family = 0
def get_address(self):
return "{0}:{1}".format(self.server_host, self.server_port)
def open_connection(self):
"""Open the TCP/IP connection to the MySQL server
"""
# Get address information
addrinfo = [None] * 5
try:
addrinfos = socket.getaddrinfo(self.server_host,
self.server_port,
0, socket.SOCK_STREAM,
socket.SOL_TCP)
# If multiple results we favor IPv4, unless IPv6 was forced.
for info in addrinfos:
if self.force_ipv6 and info[0] == socket.AF_INET6:
addrinfo = info
break
elif info[0] == socket.AF_INET:
addrinfo = info
break
if self.force_ipv6 and addrinfo[0] is None:
raise errors.InterfaceError(
"No IPv6 address found for {0}".format(self.server_host))
if addrinfo[0] is None:
addrinfo = addrinfos[0]
except IOError as err:
raise errors.InterfaceError(
errno=2003, values=(self.get_address(), _strioerror(err)))
else:
(self._family, socktype, proto, _, sockaddr) = addrinfo
# Instanciate the socket and connect
try:
self.sock = socket.socket(self._family, socktype, proto)
self.sock.settimeout(self._connection_timeout)
self.sock.connect(sockaddr)
except IOError as err:
raise errors.InterfaceError(
errno=2003, values=(self.get_address(), _strioerror(err)))
except Exception as err:
raise errors.OperationalError(str(err))
|
|
# -*- coding: UTF-8 -*-
"""
literature
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
"""
__version__ = '4.0'
content = {
'book_headline': [
'<#book_pseudoscience_author#>','<#book_pseudoscience_title#>','<#book_pseudoscientific#>',
'<#book_title_front#>',
],
'book_section': ['Books', 'Books', 'Books', 'Books', 'Books', 'Books',
'Books', 'Books', 'Books', 'Books', 'Books', 'Books',
'New books', 'Literature', 'Authors', 'E-books', 'E-books', 'E-books', 'Published',
'New published','My library','Publishing','From the library',
],
'book_ankeiler':['<#^,book_phylosophy_title#>',
],
'book_pseudosci_adj': [
'eternal ',
'golden ',
'inspiring ',
'inspired ',
'negative ',
'positive ',
'divine ',
'universal ',
'liberal ',
'spirited ',
'early ',
'late ',
'',
'',
'',
'',
'',
'',
'',
],
'book_pseudoscience_author': [
'<#sci_titles_px#> <#names_last_absurdlyBritish#>, <#sci_titles_sx#>',
'<#names_last_absurdlyBritish#>, <#university_dept#>',
'<#name#>',
'<#name#>, <#sci_titles_px#> at <#university_dept#>',
],
'book_pseudoscience_title': [
'"<#book_pseudoscientific#>" by <#book_pseudoscience_author#>',
'"<#book_pseudoscientific#>, <#num_roman#>" by <#book_pseudoscience_author#>',
],
'book_pseudoscientific': [
'<#^,lit_figures#> and <#^,lit_figures#>',
'<#^,lit_figures#> as seen by <#book_pseudosci_adj#><#sci_pseudo#>',
'<#^,sci_pseudo#>, <#^,lit_figures#> and <#^,lit_figures#>',
'<#^,lit_figures#> and <#sci_disciplines#>',
'<#^,lit_figures#> in the history of <#sci_disciplines#>',
'<#^,book_pseudosci_adj#><#sci_pseudo#> and <#sci_disciplines#>',
'<#^,book_pseudosci_adj#><#sci_pseudo#> as <#sci_disciplines#>',
'The role of <#sci_disciplines#> in <#sci_pseudo#>',
'The influence of <#book_pseudosci_adj#><#sci_pseudo#> on <#sci_disciplines#>',
'<#^,book_pseudosci_adj#><#sci_pseudo#> and <#^,lit_figures#>',
'The role of <#sci_pseudo#> in <#sci_pseudo#>',
'<#^,sci_pseudo#> and <#sci_pseudo#> <#book_pseudoscientific_sx#>',
'<#book_pseudoscientific#>',
],
'book_pseudoscientific_sx': [
'(<-randint(1700, 1800)->-<-randint(1801, 1900)->)',
],
'book_title': [
'"<#state_of_mind#> and <#state_of_mind#>" by Jane Austen',
],
'lit_figures': [
'goya',
'rembrandt',
'elgreco',
'turner',
'constable',
'vangogh',
'renoir',
'seurat',
'rubens',
'klimt',
'monet',
'lautrec',
'matisse',
'mondrian',
'ruscha',
'reinhardt',
'malevich',
'dali',
'magritte',
'ensor',
'bach',
'handel',
'brahms',
'beethoven',
'bartok',
'chopin',
'delibes',
'debussy',
'mahler',
'copland',
'marais',
'forqueray',
'lully',
'couperin',
'grieg',
'wagner',
'tarantino',
'truffaut',
'vansant',
'lumiere',
'spielberg',
'cronenberg',
'lucas',
'bunuel',
'bergman',
'bronte',
'austen',
'kafka',
'chekov',
'beckett',
'camus',
'turgenev',
'james',
'hemingway',
'fitzgerald',
'wright',
'ellison',
'fourier',
'euler',
'mandelbrot',
'copernicus',
'galileo',
'einstein',
'tesla',
'whitney',
'edison',
'planck',
'leonardo',
'pythagoras',
'bohr',
'newton',
'archimedes',
'coulomb',
'fahrenheit',
'faraday',
'fermi',
'feynman',
'hawking',
'geiger',
'curie',
'kelvin',
'ohm',
'penzias',
'roentgen',
'volta',
],
'lit_mythology': [
'zeus',
'apollo',
'hermes',
'athena',
'achilles',
'antigone',
'aphrodite',
'anubis',
'arachne',
'asgard',
'atlantis',
'atlas',
'aurora',
'cadmus',
'calliope',
'calypso',
'centaur',
'charon',
'charybdis',
'cronus',
'cupid',
'cyclops',
'daedalus',
'daphne',
'demeter',
'diana',
'dido',
'electra',
'erato',
'europa',
'euridyce',
'euterpe',
'flora',
'ganymede',
'gorgon',
'hades',
'hector',
'hera',
'heracles',
'hermes',
'hyperion',
'icarus',
'janus',
'jocasta',
'leda',
'lucian',
'medea',
'minerva',
'narcissus',
'nestor',
'odin',
'odysseus',
'oedipus',
'orcus',
'osiris',
'paris',
'perseus',
'phaedra',
'poseidon',
'proteus',
'psyche',
'pyramus',
'satyr',
'scylla',
'sisyphus',
'sphinx',
'styx',
'tantalus',
'tereus',
'thalia',
'thor',
'thoth',
'titan',
'triton',
'ulysses',
'urania',
'vulcan',
],
'state_of_mind': [
'Pride',
'Prejudice',
'Stubborness',
'Anger',
'Fury',
'Depression',
'Mild Amusement',
'Consolation',
],
}
|
|
"""Amazon SQS Transport.
Amazon SQS transport module for Kombu. This package implements an AMQP-like
interface on top of Amazons SQS service, with the goal of being optimized for
high performance and reliability.
The default settings for this module are focused now on high performance in
task queue situations where tasks are small, idempotent and run very fast.
SQS Features supported by this transport:
Long Polling:
https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-long-polling.html
Long polling is enabled by setting the `wait_time_seconds` transport
option to a number > 1. Amazon supports up to 20 seconds. This is
enabled with 10 seconds by default.
Batch API Actions:
https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-batch-api.html
The default behavior of the SQS Channel.drain_events() method is to
request up to the 'prefetch_count' messages on every request to SQS.
These messages are stored locally in a deque object and passed back
to the Transport until the deque is empty, before triggering a new
API call to Amazon.
This behavior dramatically speeds up the rate that you can pull tasks
from SQS when you have short-running tasks (or a large number of workers).
When a Celery worker has multiple queues to monitor, it will pull down
up to 'prefetch_count' messages from queueA and work on them all before
moving on to queueB. If queueB is empty, it will wait up until
'polling_interval' expires before moving back and checking on queueA.
Other Features supported by this transport:
Predefined Queues:
The default behavior of this transport is to use a single AWS credential
pair in order to manage all SQS queues (e.g. listing queues, creating
queues, polling queues, deleting messages).
If it is preferable for your environment to use a single AWS credential, you
can use the 'predefined_queues' setting inside the 'transport_options' map.
This setting allows you to specify the SQS queue URL and AWS credentials for
each of your queues. For example, if you have two queues which both already
exist in AWS) you can tell this transport about them as follows:
transport_options = {
'predefined_queues': {
'queue-1': {
'url': 'https://sqs.us-east-1.amazonaws.com/xxx/aaa',
'access_key_id': 'a',
'secret_access_key': 'b',
},
'queue-2': {
'url': 'https://sqs.us-east-1.amazonaws.com/xxx/bbb',
'access_key_id': 'c',
'secret_access_key': 'd',
},
}
}
""" # noqa: E501
from __future__ import absolute_import, unicode_literals
import base64
import socket
import string
import uuid
from vine import transform, ensure_promise, promise
from kombu.asynchronous import get_event_loop
from kombu.asynchronous.aws.ext import boto3, exceptions
from kombu.asynchronous.aws.sqs.connection import AsyncSQSConnection
from kombu.asynchronous.aws.sqs.message import AsyncMessage
from kombu.five import Empty, range, string_t, text_t
from kombu.log import get_logger
from kombu.utils import scheduling
from kombu.utils.encoding import bytes_to_str, safe_str
from kombu.utils.json import loads, dumps
from kombu.utils.objects import cached_property
from . import virtual
logger = get_logger(__name__)
# dots are replaced by dash, dash remains dash, all other punctuation
# replaced by underscore.
CHARS_REPLACE_TABLE = {
ord(c): 0x5f for c in string.punctuation if c not in '-_.'
}
CHARS_REPLACE_TABLE[0x2e] = 0x2d # '.' -> '-'
#: SQS bulk get supports a maximum of 10 messages at a time.
SQS_MAX_MESSAGES = 10
def maybe_int(x):
"""Try to convert x' to int, or return x' if that fails."""
try:
return int(x)
except ValueError:
return x
class UndefinedQueueException(Exception):
"""Predefined queues are being used and an undefined queue was used."""
class Channel(virtual.Channel):
"""SQS Channel."""
default_region = 'us-east-1'
default_visibility_timeout = 1800 # 30 minutes.
default_wait_time_seconds = 10 # up to 20 seconds max
domain_format = 'kombu%(vhost)s'
_asynsqs = None
_predefined_queue_async_clients = {} # A client for each predefined queue
_sqs = None
_predefined_queue_clients = {} # A client for each predefined queue
_queue_cache = {}
_noack_queues = set()
def __init__(self, *args, **kwargs):
if boto3 is None:
raise ImportError('boto3 is not installed')
super(Channel, self).__init__(*args, **kwargs)
# SQS blows up if you try to create a new queue when one already
# exists but with a different visibility_timeout. This prepopulates
# the queue_cache to protect us from recreating
# queues that are known to already exist.
self._update_queue_cache(self.queue_name_prefix)
self.hub = kwargs.get('hub') or get_event_loop()
def _update_queue_cache(self, queue_name_prefix):
if self.predefined_queues:
for queue_name, q in self.predefined_queues.items():
self._queue_cache[queue_name] = q['url']
return
resp = self.sqs().list_queues(QueueNamePrefix=queue_name_prefix)
for url in resp.get('QueueUrls', []):
queue_name = url.split('/')[-1]
self._queue_cache[queue_name] = url
def basic_consume(self, queue, no_ack, *args, **kwargs):
if no_ack:
self._noack_queues.add(queue)
if self.hub:
self._loop1(queue)
return super(Channel, self).basic_consume(
queue, no_ack, *args, **kwargs
)
def basic_cancel(self, consumer_tag):
if consumer_tag in self._consumers:
queue = self._tag_to_queue[consumer_tag]
self._noack_queues.discard(queue)
return super(Channel, self).basic_cancel(consumer_tag)
def drain_events(self, timeout=None, callback=None, **kwargs):
"""Return a single payload message from one of our queues.
Raises:
Queue.Empty: if no messages available.
"""
# If we're not allowed to consume or have no consumers, raise Empty
if not self._consumers or not self.qos.can_consume():
raise Empty()
# At this point, go and get more messages from SQS
self._poll(self.cycle, callback, timeout=timeout)
def _reset_cycle(self):
"""Reset the consume cycle.
Returns:
FairCycle: object that points to our _get_bulk() method
rather than the standard _get() method. This allows for
multiple messages to be returned at once from SQS (
based on the prefetch limit).
"""
self._cycle = scheduling.FairCycle(
self._get_bulk, self._active_queues, Empty,
)
def entity_name(self, name, table=CHARS_REPLACE_TABLE):
"""Format AMQP queue name into a legal SQS queue name."""
if name.endswith('.fifo'):
partial = name[:-len('.fifo')]
partial = text_t(safe_str(partial)).translate(table)
return partial + '.fifo'
else:
return text_t(safe_str(name)).translate(table)
def canonical_queue_name(self, queue_name):
return self.entity_name(self.queue_name_prefix + queue_name)
def _new_queue(self, queue, **kwargs):
"""Ensure a queue with given name exists in SQS."""
if not isinstance(queue, string_t):
return queue
# Translate to SQS name for consistency with initial
# _queue_cache population.
queue = self.canonical_queue_name(queue)
# The SQS ListQueues method only returns 1000 queues. When you have
# so many queues, it's possible that the queue you are looking for is
# not cached. In this case, we could update the cache with the exact
# queue name first.
if queue not in self._queue_cache:
self._update_queue_cache(queue)
try:
return self._queue_cache[queue]
except KeyError:
if self.predefined_queues:
raise UndefinedQueueException((
"Queue with name '{}' must be "
"defined in 'predefined_queues'."
).format(queue))
attributes = {'VisibilityTimeout': str(self.visibility_timeout)}
if queue.endswith('.fifo'):
attributes['FifoQueue'] = 'true'
resp = self._create_queue(queue, attributes)
self._queue_cache[queue] = resp['QueueUrl']
return resp['QueueUrl']
def _create_queue(self, queue_name, attributes):
"""Create an SQS queue with a given name and nominal attributes."""
# Allow specifying additional boto create_queue Attributes
# via transport options
if self.predefined_queues:
return None
attributes.update(
self.transport_options.get('sqs-creation-attributes') or {},
)
return self.sqs(queue=queue_name).create_queue(
QueueName=queue_name,
Attributes=attributes,
)
def _delete(self, queue, *args, **kwargs):
"""Delete queue by name."""
if self.predefined_queues:
return
super(Channel, self)._delete(queue)
self._queue_cache.pop(queue, None)
def _put(self, queue, message, **kwargs):
"""Put message onto queue."""
q_url = self._new_queue(queue)
kwargs = {'QueueUrl': q_url,
'MessageBody': AsyncMessage().encode(dumps(message))}
if queue.endswith('.fifo'):
if 'MessageGroupId' in message['properties']:
kwargs['MessageGroupId'] = \
message['properties']['MessageGroupId']
else:
kwargs['MessageGroupId'] = 'default'
if 'MessageDeduplicationId' in message['properties']:
kwargs['MessageDeduplicationId'] = \
message['properties']['MessageDeduplicationId']
else:
kwargs['MessageDeduplicationId'] = str(uuid.uuid4())
c = self.sqs(queue=self.canonical_queue_name(queue))
if message.get('redelivered'):
c.change_message_visibility(
QueueUrl=q_url,
ReceiptHandle=message['properties']['delivery_tag'],
VisibilityTimeout=0
)
else:
c.send_message(**kwargs)
def _message_to_python(self, message, queue_name, queue):
try:
body = base64.b64decode(message['Body'].encode())
except TypeError:
body = message['Body'].encode()
payload = loads(bytes_to_str(body))
if queue_name in self._noack_queues:
queue = self._new_queue(queue_name)
self.asynsqs(queue=queue_name).delete_message(
queue,
message['ReceiptHandle'],
)
else:
try:
properties = payload['properties']
delivery_info = payload['properties']['delivery_info']
except KeyError:
# json message not sent by kombu?
delivery_info = {}
properties = {'delivery_info': delivery_info}
payload.update({
'body': bytes_to_str(body),
'properties': properties,
})
# set delivery tag to SQS receipt handle
delivery_info.update({
'sqs_message': message, 'sqs_queue': queue,
})
properties['delivery_tag'] = message['ReceiptHandle']
return payload
def _messages_to_python(self, messages, queue):
"""Convert a list of SQS Message objects into Payloads.
This method handles converting SQS Message objects into
Payloads, and appropriately updating the queue depending on
the 'ack' settings for that queue.
Arguments:
messages (SQSMessage): A list of SQS Message objects.
queue (str): Name representing the queue they came from.
Returns:
List: A list of Payload objects
"""
q = self._new_queue(queue)
return [self._message_to_python(m, queue, q) for m in messages]
def _get_bulk(self, queue,
max_if_unlimited=SQS_MAX_MESSAGES, callback=None):
"""Try to retrieve multiple messages off ``queue``.
Where :meth:`_get` returns a single Payload object, this method
returns a list of Payload objects. The number of objects returned
is determined by the total number of messages available in the queue
and the number of messages the QoS object allows (based on the
prefetch_count).
Note:
Ignores QoS limits so caller is responsible for checking
that we are allowed to consume at least one message from the
queue. get_bulk will then ask QoS for an estimate of
the number of extra messages that we can consume.
Arguments:
queue (str): The queue name to pull from.
Returns:
List[Message]
"""
# drain_events calls `can_consume` first, consuming
# a token, so we know that we are allowed to consume at least
# one message.
# Note: ignoring max_messages for SQS with boto3
max_count = self._get_message_estimate()
if max_count:
q_url = self._new_queue(queue)
resp = self.sqs(queue=queue).receive_message(
QueueUrl=q_url, MaxNumberOfMessages=max_count,
WaitTimeSeconds=self.wait_time_seconds)
if resp.get('Messages'):
for m in resp['Messages']:
m['Body'] = AsyncMessage(body=m['Body']).decode()
for msg in self._messages_to_python(resp['Messages'], queue):
self.connection._deliver(msg, queue)
return
raise Empty()
def _get(self, queue):
"""Try to retrieve a single message off ``queue``."""
q_url = self._new_queue(queue)
resp = self.sqs(queue=queue).receive_message(
QueueUrl=q_url, MaxNumberOfMessages=1,
WaitTimeSeconds=self.wait_time_seconds)
if resp.get('Messages'):
body = AsyncMessage(body=resp['Messages'][0]['Body']).decode()
resp['Messages'][0]['Body'] = body
return self._messages_to_python(resp['Messages'], queue)[0]
raise Empty()
def _loop1(self, queue, _=None):
self.hub.call_soon(self._schedule_queue, queue)
def _schedule_queue(self, queue):
if queue in self._active_queues:
if self.qos.can_consume():
self._get_bulk_async(
queue, callback=promise(self._loop1, (queue,)),
)
else:
self._loop1(queue)
def _get_message_estimate(self, max_if_unlimited=SQS_MAX_MESSAGES):
maxcount = self.qos.can_consume_max_estimate()
return min(
max_if_unlimited if maxcount is None else max(maxcount, 1),
max_if_unlimited,
)
def _get_bulk_async(self, queue,
max_if_unlimited=SQS_MAX_MESSAGES, callback=None):
maxcount = self._get_message_estimate()
if maxcount:
return self._get_async(queue, maxcount, callback=callback)
# Not allowed to consume, make sure to notify callback..
callback = ensure_promise(callback)
callback([])
return callback
def _get_async(self, queue, count=1, callback=None):
q = self._new_queue(queue)
qname = self.canonical_queue_name(queue)
return self._get_from_sqs(
qname, count=count, connection=self.asynsqs(queue=qname),
callback=transform(self._on_messages_ready, callback, q, queue),
)
def _on_messages_ready(self, queue, qname, messages):
if 'Messages' in messages and messages['Messages']:
callbacks = self.connection._callbacks
for msg in messages['Messages']:
msg_parsed = self._message_to_python(msg, qname, queue)
callbacks[qname](msg_parsed)
def _get_from_sqs(self, queue,
count=1, connection=None, callback=None):
"""Retrieve and handle messages from SQS.
Uses long polling and returns :class:`~vine.promises.promise`.
"""
connection = connection if connection is not None else queue.connection
if self.predefined_queues:
if queue not in self._queue_cache:
raise UndefinedQueueException((
"Queue with name '{}' must be defined in "
"'predefined_queues'."
).format(queue))
queue_url = self._queue_cache[queue]
else:
queue_url = connection.get_queue_url(queue)
return connection.receive_message(
queue, queue_url, number_messages=count,
wait_time_seconds=self.wait_time_seconds,
callback=callback,
)
def _restore(self, message,
unwanted_delivery_info=('sqs_message', 'sqs_queue')):
for unwanted_key in unwanted_delivery_info:
# Remove objects that aren't JSON serializable (Issue #1108).
message.delivery_info.pop(unwanted_key, None)
return super(Channel, self)._restore(message)
def basic_ack(self, delivery_tag, multiple=False):
try:
message = self.qos.get(delivery_tag).delivery_info
sqs_message = message['sqs_message']
except KeyError:
pass
else:
queue = None
if 'routing_key' in message:
queue = self.canonical_queue_name(message['routing_key'])
self.sqs(queue=queue).delete_message(
QueueUrl=message['sqs_queue'],
ReceiptHandle=sqs_message['ReceiptHandle'],
)
super(Channel, self).basic_ack(delivery_tag)
def _size(self, queue):
"""Return the number of messages in a queue."""
url = self._new_queue(queue)
c = self.sqs(queue=self.canonical_queue_name(queue))
resp = c.get_queue_attributes(
QueueUrl=url,
AttributeNames=['ApproximateNumberOfMessages'])
return int(resp['Attributes']['ApproximateNumberOfMessages'])
def _purge(self, queue):
"""Delete all current messages in a queue."""
q = self._new_queue(queue)
# SQS is slow at registering messages, so run for a few
# iterations to ensure messages are detected and deleted.
size = 0
for i in range(10):
size += int(self._size(queue))
if not size:
break
self.sqs(queue=queue).purge_queue(QueueUrl=q)
return size
def close(self):
super(Channel, self).close()
# if self._asynsqs:
# try:
# self.asynsqs().close()
# except AttributeError as exc: # FIXME ???
# if "can't set attribute" not in str(exc):
# raise
def new_sqs_client(self, region, access_key_id, secret_access_key):
session = boto3.session.Session(
region_name=region,
aws_access_key_id=access_key_id,
aws_secret_access_key=secret_access_key,
)
is_secure = self.is_secure if self.is_secure is not None else True
client_kwargs = {
'use_ssl': is_secure
}
if self.endpoint_url is not None:
client_kwargs['endpoint_url'] = self.endpoint_url
return session.client('sqs', **client_kwargs)
def sqs(self, queue=None):
if queue is not None and self.predefined_queues:
if queue in self._predefined_queue_clients:
return self._predefined_queue_clients[queue]
if queue not in self.predefined_queues:
raise UndefinedQueueException((
"Queue with name '{}' must be defined in "
"'predefined_queues'."
).format(queue))
q = self.predefined_queues[queue]
c = self._predefined_queue_clients[queue] = self.new_sqs_client(
region=q.get('region', self.region),
access_key_id=q['access_key_id'] or self.conninfo.userid,
secret_access_key=q['secret_access_key'] or self.conninfo.password, # noqa: E501
)
return c
if self._sqs is not None:
return self._sqs
c = self._sqs = self.new_sqs_client(
region=self.region,
access_key_id=self.conninfo.userid,
secret_access_key=self.conninfo.password,
)
return c
def asynsqs(self, queue=None):
if queue is not None and self.predefined_queues:
if queue in self._predefined_queue_async_clients:
return self._predefined_queue_async_clients[queue]
if queue not in self.predefined_queues:
raise UndefinedQueueException((
"Queue with name '{}' must be defined in "
"'predefined_queues'."
).format(queue))
q = self.predefined_queues[queue]
c = self._predefined_queue_async_clients[queue] = AsyncSQSConnection( # noqa: E501
sqs_connection=self.sqs(queue=queue),
region=q.get('region', self.region)
)
return c
if self._asynsqs is not None:
return self._asynsqs
c = self._asynsqs = AsyncSQSConnection(
sqs_connection=self.sqs(queue=queue),
region=self.region
)
return c
@property
def conninfo(self):
return self.connection.client
@property
def transport_options(self):
return self.connection.client.transport_options
@cached_property
def visibility_timeout(self):
return (self.transport_options.get('visibility_timeout') or
self.default_visibility_timeout)
@cached_property
def predefined_queues(self):
"""Map of queue_name to predefined queue settings."""
return self.transport_options.get('predefined_queues', None)
@cached_property
def queue_name_prefix(self):
return self.transport_options.get('queue_name_prefix', '')
@cached_property
def supports_fanout(self):
return False
@cached_property
def region(self):
return (self.transport_options.get('region') or
boto3.Session().region_name or
self.default_region)
@cached_property
def regioninfo(self):
return self.transport_options.get('regioninfo')
@cached_property
def is_secure(self):
return self.transport_options.get('is_secure')
@cached_property
def port(self):
return self.transport_options.get('port')
@cached_property
def endpoint_url(self):
if self.conninfo.hostname is not None:
scheme = 'https' if self.is_secure else 'http'
if self.conninfo.port is not None:
port = ':{}'.format(self.conninfo.port)
else:
port = ''
return '{}://{}{}'.format(
scheme,
self.conninfo.hostname,
port
)
@cached_property
def wait_time_seconds(self):
return self.transport_options.get('wait_time_seconds',
self.default_wait_time_seconds)
class Transport(virtual.Transport):
"""SQS Transport.
Additional queue attributes can be supplied to SQS during queue
creation by passing an ``sqs-creation-attributes`` key in
transport_options. ``sqs-creation-attributes`` must be a dict whose
key-value pairs correspond with Attributes in the
`CreateQueue SQS API`_.
For example, to have SQS queues created with server-side encryption
enabled using the default Amazon Managed Customer Master Key, you
can set ``KmsMasterKeyId`` Attribute. When the queue is initially
created by Kombu, encryption will be enabled.
.. code-block:: python
from kombu.transport.SQS import Transport
transport = Transport(
...,
transport_options={
'sqs-creation-attributes': {
'KmsMasterKeyId': 'alias/aws/sqs',
},
}
)
.. _CreateQueue SQS API: https://docs.aws.amazon.com/AWSSimpleQueueService/latest/APIReference/API_CreateQueue.html#API_CreateQueue_RequestParameters
""" # noqa: E501
Channel = Channel
polling_interval = 1
wait_time_seconds = 0
default_port = None
connection_errors = (
virtual.Transport.connection_errors +
(exceptions.BotoCoreError, socket.error)
)
channel_errors = (
virtual.Transport.channel_errors + (exceptions.BotoCoreError,)
)
driver_type = 'sqs'
driver_name = 'sqs'
implements = virtual.Transport.implements.extend(
asynchronous=True,
exchange_type=frozenset(['direct']),
)
@property
def default_connection_params(self):
return {'port': self.default_port}
|
|
"""
Comsystem command module.
Comm commands are OOC commands and intended to be made available to
the Player at all times (they go into the PlayerCmdSet). So we
make sure to homogenize self.caller to always be the player object
for easy handling.
"""
from django.conf import settings
from evennia.comms.models import ChannelDB, Msg
#from evennia.comms import irc, imc2, rss
from evennia.players.models import PlayerDB
from evennia.players import bots
from evennia.comms.channelhandler import CHANNELHANDLER
from evennia.utils import create, utils, prettytable, evtable
from evennia.utils.utils import make_iter
from evennia.commands.default.muxcommand import MuxCommand, MuxPlayerCommand
# limit symbol import for API
__all__ = ("CmdAddCom", "CmdDelCom", "CmdAllCom",
"CmdChannels", "CmdCdestroy", "CmdCBoot", "CmdCemit",
"CmdCWho", "CmdChannelCreate", "CmdClock", "CmdCdesc",
"CmdPage", "CmdIRC2Chan", "CmdRSS2Chan")#, "CmdIMC2Chan", "CmdIMCInfo",
#"CmdIMCTell")
_DEFAULT_WIDTH = settings.CLIENT_DEFAULT_WIDTH
def find_channel(caller, channelname, silent=False, noaliases=False):
"""
Helper function for searching for a single channel with
some error handling.
"""
channels = ChannelDB.objects.channel_search(channelname)
if not channels:
if not noaliases:
channels = [chan for chan in ChannelDB.objects.get_all_channels()
if channelname in chan.aliases.all()]
if channels:
return channels[0]
if not silent:
caller.msg("Channel '%s' not found." % channelname)
return None
elif len(channels) > 1:
matches = ", ".join(["%s(%s)" % (chan.key, chan.id) for chan in channels])
if not silent:
caller.msg("Multiple channels match (be more specific): \n%s" % matches)
return None
return channels[0]
class CmdAddCom(MuxPlayerCommand):
"""
add a channel alias and/or subscribe to a channel
Usage:
addcom [alias=] <channel>
Joins a given channel. If alias is given, this will allow you to
refer to the channel by this alias rather than the full channel
name. Subsequent calls of this command can be used to add multiple
aliases to an already joined channel.
"""
key = "addcom"
aliases = ["aliaschan", "chanalias"]
help_category = "Comms"
locks = "cmd:not pperm(channel_banned)"
def func(self):
"Implement the command"
caller = self.caller
args = self.args
player = caller
if not args:
self.msg("Usage: addcom [alias =] channelname.")
return
if self.rhs:
# rhs holds the channelname
channelname = self.rhs
alias = self.lhs
else:
channelname = self.args
alias = None
channel = find_channel(caller, channelname)
if not channel:
# we use the custom search method to handle errors.
return
# check permissions
if not channel.access(player, 'listen'):
self.msg("%s: You are not allowed to listen to this channel." % channel.key)
return
string = ""
if not channel.has_connection(player):
# we want to connect as well.
if not channel.connect(player):
# if this would have returned True, the player is connected
self.msg("%s: You are not allowed to join this channel." % channel.key)
return
else:
string += "You now listen to the channel %s. " % channel.key
else:
string += "You are already connected to channel %s." % channel.key
if alias:
# create a nick and add it to the caller.
caller.nicks.add(alias, channel.key, category="channel")
string += " You can now refer to the channel %s with the alias '%s'."
self.msg(string % (channel.key, alias))
else:
string += " No alias added."
self.msg(string)
class CmdDelCom(MuxPlayerCommand):
"""
remove a channel alias and/or unsubscribe from channel
Usage:
delcom <alias or channel>
If the full channel name is given, unsubscribe from the
channel. If an alias is given, remove the alias but don't
unsubscribe.
"""
key = "delcom"
aliases = ["delaliaschan", "delchanalias"]
help_category = "Comms"
locks = "cmd:not perm(channel_banned)"
def func(self):
"Implementing the command. "
caller = self.caller
player = caller
if not self.args:
self.msg("Usage: delcom <alias or channel>")
return
ostring = self.args.lower()
channel = find_channel(caller, ostring, silent=True, noaliases=True)
if channel:
# we have given a channel name - unsubscribe
if not channel.has_connection(player):
self.msg("You are not listening to that channel.")
return
chkey = channel.key.lower()
# find all nicks linked to this channel and delete them
for nick in [nick for nick in make_iter(caller.nicks.get(category="channel", return_obj=True))
if nick and nick.strvalue.lower() == chkey]:
nick.delete()
disconnect = channel.disconnect(player)
if disconnect:
self.msg("You stop listening to channel '%s'. Eventual aliases were removed." % channel.key)
return
else:
# we are removing a channel nick
channame = caller.nicks.get(key=ostring, category="channel")
channel = find_channel(caller, channame, silent=True)
if not channel:
self.msg("No channel with alias '%s' was found." % ostring)
else:
if caller.nicks.get(ostring, category="channel"):
caller.nicks.remove(ostring, category="channel")
self.msg("Your alias '%s' for channel %s was cleared." % (ostring, channel.key))
else:
self.msg("You had no such alias defined for this channel.")
class CmdAllCom(MuxPlayerCommand):
"""
perform admin operations on all channels
Usage:
allcom [on | off | who | destroy]
Allows the user to universally turn off or on all channels they are on,
as well as perform a 'who' for all channels they are on. Destroy deletes
all channels that you control.
Without argument, works like comlist.
"""
key = "allcom"
locks = "cmd: not pperm(channel_banned)"
help_category = "Comms"
def func(self):
"Runs the function"
caller = self.caller
args = self.args
if not args:
caller.execute_cmd("@channels")
self.msg("(Usage: allcom on | off | who | destroy)")
return
if args == "on":
# get names of all channels available to listen to
# and activate them all
channels = [chan for chan in ChannelDB.objects.get_all_channels()
if chan.access(caller, 'listen')]
for channel in channels:
caller.execute_cmd("addcom %s" % channel.key)
elif args == "off":
#get names all subscribed channels and disconnect from them all
channels = ChannelDB.objects.get_subscriptions(caller)
for channel in channels:
caller.execute_cmd("delcom %s" % channel.key)
elif args == "destroy":
# destroy all channels you control
channels = [chan for chan in ChannelDB.objects.get_all_channels()
if chan.access(caller, 'control')]
for channel in channels:
caller.execute_cmd("@cdestroy %s" % channel.key)
elif args == "who":
# run a who, listing the subscribers on visible channels.
string = "\n{CChannel subscriptions{n"
channels = [chan for chan in ChannelDB.objects.get_all_channels()
if chan.access(caller, 'listen')]
if not channels:
string += "No channels."
for channel in channels:
string += "\n{w%s:{n\n" % channel.key
subs = channel.db_subscriptions.all()
if subs:
string += " " + ", ".join([player.key for player in subs])
else:
string += " <None>"
self.msg(string.strip())
else:
# wrong input
self.msg("Usage: allcom on | off | who | clear")
class CmdChannels(MuxPlayerCommand):
"""
list all channels available to you
Usage:
@channels
@clist
comlist
Lists all channels available to you, whether you listen to them or not.
Use 'comlist' to only view your current channel subscriptions.
Use addcom/delcom to join and leave channels
"""
key = "@channels"
aliases = ["@clist", "channels", "comlist", "chanlist", "channellist", "all channels"]
help_category = "Comms"
locks = "cmd: not pperm(channel_banned)"
def func(self):
"Implement function"
caller = self.caller
# all channels we have available to listen to
channels = [chan for chan in ChannelDB.objects.get_all_channels()
if chan.access(caller, 'listen')]
#print channels
if not channels:
self.msg("No channels available.")
return
# all channel we are already subscribed to
subs = ChannelDB.objects.get_subscriptions(caller)
#print subs
if self.cmdstring == "comlist":
# just display the subscribed channels with no extra info
comtable = evtable.EvTable("{wchannel{n", "{wmy aliases{n", "{wdescription{n", align="l", maxwidth=_DEFAULT_WIDTH)
#comtable = prettytable.PrettyTable(["{wchannel", "{wmy aliases", "{wdescription"])
for chan in subs:
clower = chan.key.lower()
nicks = caller.nicks.get(category="channel")
comtable.add_row(*["%s%s" % (chan.key, chan.aliases.all() and
"(%s)" % ",".join(chan.aliases.all()) or ""),
"%s".join(nick for nick in make_iter(nicks)
if nick and nick.lower() == clower),
chan.db.desc])
caller.msg("\n{wChannel subscriptions{n (use {w@channels{n to list all, {waddcom{n/{wdelcom{n to sub/unsub):{n\n%s" % comtable)
else:
# full listing (of channels caller is able to listen to)
comtable = evtable.EvTable("{wsub{n", "{wchannel{n", "{wmy aliases{n", "{wlocks{n", "{wdescription{n", maxwidth=_DEFAULT_WIDTH)
#comtable = prettytable.PrettyTable(["{wsub", "{wchannel", "{wmy aliases", "{wlocks", "{wdescription"])
for chan in channels:
clower = chan.key.lower()
nicks = caller.nicks.get(category="channel")
nicks = nicks or []
comtable.add_row(*[chan in subs and "{gYes{n" or "{rNo{n",
"%s%s" % (chan.key, chan.aliases.all() and
"(%s)" % ",".join(chan.aliases.all()) or ""),
"%s".join(nick for nick in make_iter(nicks)
if nick.lower() == clower),
str(chan.locks),
chan.db.desc])
caller.msg("\n{wAvailable channels{n (use {wcomlist{n,{waddcom{n and {wdelcom{n to manage subscriptions):\n%s" % comtable)
class CmdCdestroy(MuxPlayerCommand):
"""
destroy a channel you created
Usage:
@cdestroy <channel>
Destroys a channel that you control.
"""
key = "@cdestroy"
help_category = "Comms"
locks = "cmd: not pperm(channel_banned)"
def func(self):
"Destroy objects cleanly."
caller = self.caller
if not self.args:
self.msg("Usage: @cdestroy <channelname>")
return
channel = find_channel(caller, self.args)
if not channel:
self.msg("Could not find channel %s." % self.args)
return
if not channel.access(caller, 'control'):
self.msg("You are not allowed to do that.")
return
channel_key = channel.key
message = "%s is being destroyed. Make sure to change your aliases." % channel_key
msgobj = create.create_message(caller, message, channel)
channel.msg(msgobj)
channel.delete()
CHANNELHANDLER.update()
self.msg("Channel '%s' was destroyed." % channel_key)
class CmdCBoot(MuxPlayerCommand):
"""
kick a player from a channel you control
Usage:
@cboot[/quiet] <channel> = <player> [:reason]
Switches:
quiet - don't notify the channel
Kicks a player or object from a channel you control.
"""
key = "@cboot"
locks = "cmd: not pperm(channel_banned)"
help_category = "Comms"
def func(self):
"implement the function"
if not self.args or not self.rhs:
string = "Usage: @cboot[/quiet] <channel> = <player> [:reason]"
self.msg(string)
return
channel = find_channel(self.caller, self.lhs)
if not channel:
return
reason = ""
if ":" in self.rhs:
playername, reason = self.rhs.rsplit(":", 1)
searchstring = playername.lstrip('*')
else:
searchstring = self.rhs.lstrip('*')
player = self.caller.search(searchstring, player=True)
if not player:
return
if reason:
reason = " (reason: %s)" % reason
if not channel.access(self.caller, "control"):
string = "You don't control this channel."
self.msg(string)
return
if not player in channel.db_subscriptions.all():
string = "Player %s is not connected to channel %s." % (player.key, channel.key)
self.msg(string)
return
if not "quiet" in self.switches:
string = "%s boots %s from channel.%s" % (self.caller, player.key, reason)
channel.msg(string)
# find all player's nicks linked to this channel and delete them
for nick in [nick for nick in
player.character.nicks.get(category="channel") or []
if nick.db_real.lower() == channel.key]:
nick.delete()
# disconnect player
channel.disconnect(player)
CHANNELHANDLER.update()
class CmdCemit(MuxPlayerCommand):
"""
send an admin message to a channel you control
Usage:
@cemit[/switches] <channel> = <message>
Switches:
sendername - attach the sender's name before the message
quiet - don't echo the message back to sender
Allows the user to broadcast a message over a channel as long as
they control it. It does not show the user's name unless they
provide the /sendername switch.
"""
key = "@cemit"
aliases = ["@cmsg"]
locks = "cmd: not pperm(channel_banned) and pperm(Players)"
help_category = "Comms"
def func(self):
"Implement function"
if not self.args or not self.rhs:
string = "Usage: @cemit[/switches] <channel> = <message>"
self.msg(string)
return
channel = find_channel(self.caller, self.lhs)
if not channel:
return
if not channel.access(self.caller, "control"):
string = "You don't control this channel."
self.msg(string)
return
message = self.rhs
if "sendername" in self.switches:
message = "%s: %s" % (self.key, message)
channel.msg(message)
if not "quiet" in self.switches:
string = "Sent to channel %s: %s" % (channel.key, message)
self.msg(string)
class CmdCWho(MuxPlayerCommand):
"""
show who is listening to a channel
Usage:
@cwho <channel>
List who is connected to a given channel you have access to.
"""
key = "@cwho"
locks = "cmd: not pperm(channel_banned)"
help_category = "Comms"
def func(self):
"implement function"
if not self.args:
string = "Usage: @cwho <channel>"
self.msg(string)
return
channel = find_channel(self.caller, self.lhs)
if not channel:
return
if not channel.access(self.caller, "listen"):
string = "You can't access this channel."
self.msg(string)
return
string = "\n{CChannel subscriptions{n"
string += "\n{w%s:{n\n" % channel.key
subs = channel.db_subscriptions.all()
if subs:
string += " " + ", ".join([player.key for player in subs])
else:
string += " <None>"
self.msg(string.strip())
class CmdChannelCreate(MuxPlayerCommand):
"""
create a new channel
Usage:
@ccreate <new channel>[;alias;alias...] = description
Creates a new channel owned by you.
"""
key = "@ccreate"
aliases = "channelcreate"
locks = "cmd:not pperm(channel_banned) and pperm(Players)"
help_category = "Comms"
def func(self):
"Implement the command"
caller = self.caller
if not self.args:
self.msg("Usage @ccreate <channelname>[;alias;alias..] = description")
return
description = ""
if self.rhs:
description = self.rhs
lhs = self.lhs
channame = lhs
aliases = None
if ';' in lhs:
channame, aliases = [part.strip().lower()
for part in lhs.split(';', 1) if part.strip()]
aliases = [alias.strip().lower()
for alias in aliases.split(';') if alias.strip()]
channel = ChannelDB.objects.channel_search(channame)
if channel:
self.msg("A channel with that name already exists.")
return
# Create and set the channel up
lockstring = "send:all();listen:all();control:id(%s)" % caller.id
new_chan = create.create_channel(channame,
aliases,
description,
locks=lockstring)
new_chan.connect(caller)
CHANNELHANDLER.update()
self.msg("Created channel %s and connected to it." % new_chan.key)
class CmdClock(MuxPlayerCommand):
"""
change channel locks of a channel you control
Usage:
@clock <channel> [= <lockstring>]
Changes the lock access restrictions of a channel. If no
lockstring was given, view the current lock definitions.
"""
key = "@clock"
locks = "cmd:not pperm(channel_banned)"
aliases = ["@clock"]
help_category = "Comms"
def func(self):
"run the function"
if not self.args:
string = "Usage: @clock channel [= lockstring]"
self.msg(string)
return
channel = find_channel(self.caller, self.lhs)
if not channel:
return
if not self.rhs:
# no =, so just view the current locks
string = "Current locks on %s:" % channel.key
string = "%s\n %s" % (string, channel.locks)
self.msg(string)
return
# we want to add/change a lock.
if not channel.access(self.caller, "control"):
string = "You don't control this channel."
self.msg(string)
return
# Try to add the lock
channel.locks.add(self.rhs)
string = "Lock(s) applied. "
string += "Current locks on %s:" % channel.key
string = "%s\n %s" % (string, channel.locks)
self.msg(string)
class CmdCdesc(MuxPlayerCommand):
"""
describe a channel you control
Usage:
@cdesc <channel> = <description>
Changes the description of the channel as shown in
channel lists.
"""
key = "@cdesc"
locks = "cmd:not pperm(channel_banned)"
help_category = "Comms"
def func(self):
"Implement command"
caller = self.caller
if not self.rhs:
self.msg("Usage: @cdesc <channel> = <description>")
return
channel = find_channel(caller, self.lhs)
if not channel:
self.msg("Channel '%s' not found." % self.lhs)
return
#check permissions
if not channel.access(caller, 'control'):
self.msg("You cannot admin this channel.")
return
# set the description
channel.db.desc = self.rhs
channel.save()
self.msg("Description of channel '%s' set to '%s'." % (channel.key,
self.rhs))
class CmdPage(MuxPlayerCommand):
"""
send a private message to another player
Usage:
page[/switches] [<player>,<player>,... = <message>]
tell ''
page <number>
Switch:
last - shows who you last messaged
list - show your last <number> of tells/pages (default)
Send a message to target user (if online). If no
argument is given, you will get a list of your latest messages.
"""
key = "page"
aliases = ['tell']
locks = "cmd:not pperm(page_banned)"
help_category = "Comms"
def func(self):
"Implement function using the Msg methods"
# this is a MuxPlayerCommand, which means caller will be a Player.
caller = self.caller
# get the messages we've sent (not to channels)
pages_we_sent = Msg.objects.get_messages_by_sender(caller,
exclude_channel_messages=True)
# get last messages we've got
pages_we_got = Msg.objects.get_messages_by_receiver(caller)
if 'last' in self.switches:
if pages_we_sent:
recv = ",".join(obj.key for obj in pages_we_sent[-1].receivers)
self.msg("You last paged {c%s{n:%s" % (recv,
pages_we_sent[-1].message))
return
else:
self.msg("You haven't paged anyone yet.")
return
if not self.args or not self.rhs:
pages = pages_we_sent + pages_we_got
pages.sort(lambda x, y: cmp(x.date_sent, y.date_sent))
number = 5
if self.args:
try:
number = int(self.args)
except ValueError:
self.msg("Usage: tell [<player> = msg]")
return
if len(pages) > number:
lastpages = pages[-number:]
else:
lastpages = pages
template = "{w%s{n {c%s{n to {c%s{n: %s"
lastpages = "\n ".join(template %
(utils.datetime_format(page.date_sent),
",".join(obj.key for obj in page.senders),
"{n,{c ".join([obj.name for obj in page.receivers]),
page.message) for page in lastpages)
if lastpages:
string = "Your latest pages:\n %s" % lastpages
else:
string = "You haven't paged anyone yet."
self.msg(string)
return
# We are sending. Build a list of targets
if not self.lhs:
# If there are no targets, then set the targets
# to the last person we paged.
if pages_we_sent:
receivers = pages_we_sent[-1].receivers
else:
self.msg("Who do you want to page?")
return
else:
receivers = self.lhslist
recobjs = []
for receiver in set(receivers):
if isinstance(receiver, basestring):
pobj = caller.search(receiver)
elif hasattr(receiver, 'character'):
pobj = receiver
else:
self.msg("Who do you want to page?")
return
if pobj:
recobjs.append(pobj)
if not recobjs:
self.msg("Noone found to page.")
return
header = "{wPlayer{n {c%s{n {wpages:{n" % caller.key
message = self.rhs
# if message begins with a :, we assume it is a 'page-pose'
if message.startswith(":"):
message = "%s %s" % (caller.key, message.strip(':').strip())
# create the persistent message object
create.create_message(caller, message,
receivers=recobjs)
# tell the players they got a message.
received = []
rstrings = []
for pobj in recobjs:
if not pobj.access(caller, 'msg'):
rstrings.append("You are not allowed to page %s." % pobj)
continue
pobj.msg("%s %s" % (header, message))
if hasattr(pobj, 'sessions') and not pobj.sessions:
received.append("{C%s{n" % pobj.name)
rstrings.append("%s is offline. They will see your message if they list their pages later." % received[-1])
else:
received.append("{c%s{n" % pobj.name)
if rstrings:
self.msg("\n".join(rstrings))
self.msg("You paged %s with: '%s'." % (", ".join(received), message))
class CmdIRC2Chan(MuxCommand):
"""
link an evennia channel to an external IRC channel
Usage:
@irc2chan[/switches] <evennia_channel> = <ircnetwork> <port> <#irchannel> <botname>
@irc2chan/list
@irc2chan/delete botname|#dbid
Switches:
/delete - this will delete the bot and remove the irc connection
to the channel.
/remove - "
/disconnect - "
/list - show all irc<->evennia mappings
Example:
@irc2chan myircchan = irc.dalnet.net 6667 myevennia-channel evennia-bot
This creates an IRC bot that connects to a given IRC network and channel.
It will relay everything said in the evennia channel to the IRC channel and
vice versa. The bot will automatically connect at server start, so this
comman need only be given once. The /disconnect switch will permanently
delete the bot. To only temporarily deactivate it, use the {w@services{n
command instead.
"""
key = "@irc2chan"
locks = "cmd:serversetting(IRC_ENABLED) and pperm(Immortals)"
help_category = "Comms"
def func(self):
"Setup the irc-channel mapping"
if not settings.IRC_ENABLED:
string = """IRC is not enabled. You need to activate it in game/settings.py."""
self.msg(string)
return
if 'list' in self.switches:
# show all connections
ircbots = [bot for bot in PlayerDB.objects.filter(db_is_bot=True, username__startswith="ircbot-")]
if ircbots:
from evennia.utils.evtable import EvTable
table = EvTable("{wdbid{n", "{wbotname{n", "{wev-channel{n", "{wirc-channel{n", maxwidth=_DEFAULT_WIDTH)
for ircbot in ircbots:
ircinfo = "%s (%s:%s)" % (ircbot.db.irc_channel, ircbot.db.irc_network, ircbot.db.irc_port)
table.add_row(ircbot.id, ircbot.db.irc_botname, ircbot.db.ev_channel, ircinfo)
self.caller.msg(table)
else:
self.msg("No irc bots found.")
return
if('disconnect' in self.switches or 'remove' in self.switches or
'delete' in self.switches):
botname = "ircbot-%s" % self.lhs
matches = PlayerDB.objects.filter(db_is_bot=True, username=botname)
dbref = utils.dbref(self.lhs)
if not matches and dbref:
# try dbref match
matches = PlayerDB.objects.filter(db_is_bot=True, id=dbref)
if matches:
matches[0].delete()
self.msg("IRC connection destroyed.")
else:
self.msg("IRC connection/bot could not be removed, does it exist?")
return
if not self.args or not self.rhs:
string = "Usage: @irc2chan[/switches] <evennia_channel> = <ircnetwork> <port> <#irchannel> <botname>"
self.msg(string)
return
channel = self.lhs
self.rhs = self.rhs.replace('#', ' ') # to avoid Python comment issues
try:
irc_network, irc_port, irc_channel, irc_botname = \
[part.strip() for part in self.rhs.split(None, 3)]
irc_channel = "#%s" % irc_channel
except Exception:
string = "IRC bot definition '%s' is not valid." % self.rhs
self.msg(string)
return
botname = "ircbot-%s" % irc_botname
# create a new bot
bot = PlayerDB.objects.filter(username__iexact=botname)
if bot:
# re-use an existing bot
bot = bot[0]
if not bot.is_bot:
self.msg("Player '%s' already exists and is not a bot." % botname)
return
else:
bot = create.create_player(botname, None, None, typeclass=bots.IRCBot)
bot.start(ev_channel=channel, irc_botname=irc_botname, irc_channel=irc_channel,
irc_network=irc_network, irc_port=irc_port)
self.msg("Connection created. Starting IRC bot.")
# RSS connection
class CmdRSS2Chan(MuxCommand):
"""
link an evennia channel to an external RSS feed
Usage:
@rss2chan[/switches] <evennia_channel> = <rss_url>
Switches:
/disconnect - this will stop the feed and remove the connection to the
channel.
/remove - "
/list - show all rss->evennia mappings
Example:
@rss2chan rsschan = http://code.google.com/feeds/p/evennia/updates/basic
This creates an RSS reader that connects to a given RSS feed url. Updates
will be echoed as a title and news link to the given channel. The rate of
updating is set with the RSS_UPDATE_INTERVAL variable in settings (default
is every 10 minutes).
When disconnecting you need to supply both the channel and url again so as
to identify the connection uniquely.
"""
key = "@rss2chan"
locks = "cmd:serversetting(RSS_ENABLED) and pperm(Immortals)"
help_category = "Comms"
def func(self):
"Setup the rss-channel mapping"
# checking we have all we need
if not settings.RSS_ENABLED:
string = """RSS is not enabled. You need to activate it in game/settings.py."""
self.msg(string)
return
try:
import feedparser
feedparser # to avoid checker error of not being used
except ImportError:
string = ("RSS requires python-feedparser (https://pypi.python.org/pypi/feedparser). "
"Install before continuing.")
self.msg(string)
return
if 'list' in self.switches:
# show all connections
rssbots = [bot for bot in PlayerDB.objects.filter(db_is_bot=True, username__startswith="rssbot-")]
if rssbots:
from evennia.utils.evtable import EvTable
table = EvTable("{wdbid{n", "{wupdate rate{n", "{wev-channel",
"{wRSS feed URL{n", border="cells", maxwidth=_DEFAULT_WIDTH)
for rssbot in rssbots:
table.add_row(rssbot.id, rssbot.db.rss_rate, rssbot.db.ev_channel, rssbot.db.rss_url)
self.caller.msg(table)
else:
self.msg("No rss bots found.")
return
if('disconnect' in self.switches or 'remove' in self.switches or
'delete' in self.switches):
botname = "rssbot-%s" % self.lhs
matches = PlayerDB.objects.filter(db_is_bot=True, db_key=botname)
if not matches:
# try dbref match
matches = PlayerDB.objects.filter(db_is_bot=True, id=self.args.lstrip("#"))
if matches:
matches[0].delete()
self.msg("RSS connection destroyed.")
else:
self.msg("RSS connection/bot could not be removed, does it exist?")
return
if not self.args or not self.rhs:
string = "Usage: @rss2chan[/switches] <evennia_channel> = <rss url>"
self.msg(string)
return
channel = self.lhs
url = self.rhs
botname = "rssbot-%s" % url
# create a new bot
bot = PlayerDB.objects.filter(username__iexact=botname)
if bot:
# re-use existing bot
bot = bot[0]
if not bot.is_bot:
self.msg("Player '%s' already exists and is not a bot." % botname)
return
else:
bot = create.create_player(botname, None, None, typeclass=bots.RSSBot)
bot.start(ev_channel=channel, rss_url=url, rss_rate=10)
self.msg("RSS reporter created. Fetching RSS.")
#class CmdIMC2Chan(MuxCommand):
# """
# link an evennia channel to an external IMC2 channel
#
# Usage:
# @imc2chan[/switches] <evennia_channel> = <imc2_channel>
#
# Switches:
# /disconnect - this clear the imc2 connection to the channel.
# /remove - "
# /list - show all imc2<->evennia mappings
#
# Example:
# @imc2chan myimcchan = ievennia
#
# Connect an existing evennia channel to a channel on an IMC2
# network. The network contact information is defined in settings and
# should already be accessed at this point. Use @imcchanlist to see
# available IMC channels.
#
# """
#
# key = "@imc2chan"
# locks = "cmd:serversetting(IMC2_ENABLED) and pperm(Immortals)"
# help_category = "Comms"
#
# def func(self):
# "Setup the imc-channel mapping"
#
# if not settings.IMC2_ENABLED:
# string = """IMC is not enabled. You need to activate it in game/settings.py."""
# self.msg(string)
# return
#
# if 'list' in self.switches:
# # show all connections
# connections = ExternalChannelConnection.objects.filter(db_external_key__startswith='imc2_')
# if connections:
# table = prettytable.PrettyTable(["Evennia channel", "IMC channel"])
# for conn in connections:
# table.add_row([conn.channel.key, conn.external_config])
# string = "{wIMC connections:{n\n%s" % table
# self.msg(string)
# else:
# self.msg("No connections found.")
# return
#
# if not self.args or not self.rhs:
# string = "Usage: @imc2chan[/switches] <evennia_channel> = <imc2_channel>"
# self.msg(string)
# return
#
# channel = self.lhs
# imc2_channel = self.rhs
#
# if('disconnect' in self.switches or 'remove' in self.switches or
# 'delete' in self.switches):
# # we don't search for channels before this since we want
# # to clear the link also if the channel no longer exists.
# ok = imc2.delete_connection(channel, imc2_channel)
# if not ok:
# self.msg("IMC2 connection could not be removed, does it exist?")
# else:
# self.msg("IMC2 connection destroyed.")
# return
#
# # actually get the channel object
# channel = find_channel(self.caller, channel)
# if not channel:
# return
#
# ok = imc2.create_connection(channel, imc2_channel)
# if not ok:
# self.msg("The connection %s <-> %s already exists." % (channel.key, imc2_channel))
# return
# self.msg("Created connection channel %s <-> IMC channel %s." % (channel.key, imc2_channel))
#
#
#class CmdIMCInfo(MuxCommand):
# """
# get various IMC2 information
#
# Usage:
# @imcinfo[/switches]
# @imcchanlist - list imc2 channels
# @imclist - list connected muds
# @imcwhois <playername> - whois info about a remote player
#
# Switches for @imcinfo:
# channels - as @imcchanlist (default)
# games or muds - as @imclist
# whois - as @imcwhois (requires an additional argument)
# update - force an update of all lists
#
# Shows lists of games or channels on the IMC2 network.
# """
#
# key = "@imcinfo"
# aliases = ["@imcchanlist", "@imclist", "@imcwhois"]
# locks = "cmd: serversetting(IMC2_ENABLED) and pperm(Wizards)"
# help_category = "Comms"
#
# def func(self):
# "Run the command"
#
# if not settings.IMC2_ENABLED:
# string = """IMC is not enabled. You need to activate it in game/settings.py."""
# self.msg(string)
# return
#
# if "update" in self.switches:
# # update the lists
# import time
# from evennia.comms.imc2lib import imc2_packets as pck
# from evennia.comms.imc2 import IMC2_MUDLIST, IMC2_CHANLIST, IMC2_CLIENT
# # update connected muds
# IMC2_CLIENT.send_packet(pck.IMC2PacketKeepAliveRequest())
# # prune inactive muds
# for name, mudinfo in IMC2_MUDLIST.mud_list.items():
# if time.time() - mudinfo.last_updated > 3599:
# del IMC2_MUDLIST.mud_list[name]
# # update channel list
# IMC2_CLIENT.send_packet(pck.IMC2PacketIceRefresh())
# self.msg("IMC2 lists were re-synced.")
#
# elif("games" in self.switches or "muds" in self.switches
# or self.cmdstring == "@imclist"):
# # list muds
# from evennia.comms.imc2 import IMC2_MUDLIST
#
# muds = IMC2_MUDLIST.get_mud_list()
# networks = set(mud.networkname for mud in muds)
# string = ""
# nmuds = 0
# for network in networks:
# table = prettytable.PrettyTable(["Name", "Url", "Host", "Port"])
# for mud in (mud for mud in muds if mud.networkname == network):
# nmuds += 1
# table.add_row([mud.name, mud.url, mud.host, mud.port])
# string += "\n{wMuds registered on %s:{n\n%s" % (network, table)
# string += "\n %i Muds found." % nmuds
# self.msg(string)
#
# elif "whois" in self.switches or self.cmdstring == "@imcwhois":
# # find out about a player
# if not self.args:
# self.msg("Usage: @imcwhois <playername>")
# return
# from evennia.comms.imc2 import IMC2_CLIENT
# self.msg("Sending IMC whois request. If you receive no response, no matches were found.")
# IMC2_CLIENT.msg_imc2(None,
# from_obj=self.caller,
# packet_type="imcwhois",
# target=self.args)
#
# elif(not self.switches or "channels" in self.switches or
# self.cmdstring == "@imcchanlist"):
# # show channels
# from evennia.comms.imc2 import IMC2_CHANLIST, IMC2_CLIENT
#
# channels = IMC2_CHANLIST.get_channel_list()
# string = ""
# nchans = 0
# table = prettytable.PrettyTable(["Full name", "Name", "Owner", "Perm", "Policy"])
# for chan in channels:
# nchans += 1
# table.add_row([chan.name, chan.localname, chan.owner,
# chan.level, chan.policy])
# string += "\n{wChannels on %s:{n\n%s" % (IMC2_CLIENT.factory.network, table)
# string += "\n%i Channels found." % nchans
# self.msg(string)
# else:
# # no valid inputs
# string = "Usage: imcinfo|imcchanlist|imclist"
# self.msg(string)
#
#
## unclear if this is working ...
#class CmdIMCTell(MuxCommand):
# """
# send a page to a remote IMC player
#
# Usage:
# imctell User@MUD = <msg>
# imcpage "
#
# Sends a page to a user on a remote MUD, connected
# over IMC2.
# """
#
# key = "imctell"
# aliases = ["imcpage", "imc2tell", "imc2page"]
# locks = "cmd: serversetting(IMC2_ENABLED)"
# help_category = "Comms"
#
# def func(self):
# "Send tell across IMC"
#
# if not settings.IMC2_ENABLED:
# string = """IMC is not enabled. You need to activate it in game/settings.py."""
# self.msg(string)
# return
#
# from evennia.comms.imc2 import IMC2_CLIENT
#
# if not self.args or not '@' in self.lhs or not self.rhs:
# string = "Usage: imctell User@Mud = <msg>"
# self.msg(string)
# return
# target, destination = self.lhs.split("@", 1)
# message = self.rhs.strip()
# data = {"target":target, "destination":destination}
#
# # send to imc2
# IMC2_CLIENT.msg_imc2(message, from_obj=self.caller, packet_type="imctell", **data)
#
# self.msg("You paged {c%s@%s{n (over IMC): '%s'." % (target, destination, message))
#
#
|
|
# ===============================================================================
# Copyright 2011 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from __future__ import absolute_import
from pyface.qt.QtCore import Qt
from pyface.qt.QtGui import QColor, QWidget, QLabel
from pyface.qt.QtGui import (
QGraphicsView,
QGraphicsScene,
QBrush,
QPen,
QRadialGradient,
QVBoxLayout,
)
from traits.api import HasTraits, Int, Callable, Str, List
from traitsui.basic_editor_factory import BasicEditorFactory
from traitsui.qt4.editor import Editor
# ============= standard library imports ========================
# ============= local library imports ==========================
COLORS = ["red", "yellow", "green", "black"]
QT_COLORS = [QColor(ci) for ci in COLORS]
class LED(HasTraits):
state = Int
def set_state(self, v):
if isinstance(v, str):
self.state = COLORS.index(v)
elif isinstance(v, (bool, int)):
self.state = v
class ButtonLED(LED):
callable = Callable
def on_action(self):
self.callable()
def change_intensity(color, fac):
rgb = [color.red(), color.green(), color.blue()]
rgb = [min(int(round(c * fac, 0)), 255) for c in rgb]
return QColor(*rgb)
def get_gradient(c, cx, cy, rad):
gradient = QRadialGradient(cx, cy, rad) # (10, 10, 10, 10)
gradient.setColorAt(0, Qt.white)
gradient.setColorAt(1, c)
brush = QBrush(gradient)
return brush
class LEDGraphicsView(QGraphicsView):
def __init__(self, rad, scene, *args, **kw):
super(LEDGraphicsView, self).__init__(*args, **kw)
self.setStyleSheet("border: 0px")
self.setMaximumWidth(rad + 15)
self.setMaximumHeight(rad + 15)
self.setScene(scene)
DIAMETER_SCALAR = 1.75
class _LEDEditor(Editor):
colors = List
def __init__(self, *args, **kw):
super(_LEDEditor, self).__init__(*args, **kw)
self._led_ellipse = None
def init(self, parent):
""" """
rad = self.factory.radius
if not rad:
rad = 20
if self.control is None:
scene = QGraphicsScene()
# system background color
scene.setBackgroundBrush(QBrush(QColor(237, 237, 237)))
x, y = 10, 10
cx = x + rad / DIAMETER_SCALAR
cy = y + rad / DIAMETER_SCALAR
self.colors = [QColor(ci) for ci in self.factory.colors]
brush = get_gradient(self.colors[self.value], cx, cy, rad / 2)
pen = QPen()
pen.setWidth(0)
self._led_ellipse = scene.addEllipse(x, y, rad, rad, pen=pen, brush=brush)
ctrl = LEDGraphicsView(rad, scene)
layout = QVBoxLayout()
layout.addWidget(ctrl)
layout.setAlignment(ctrl, Qt.AlignHCenter)
if self.factory.label:
txt = QLabel(self.factory.label)
layout.addWidget(txt)
layout.setAlignment(txt, Qt.AlignHCenter)
self.control = QWidget()
self.control.setLayout(layout)
def update_editor(self):
""" """
if self.control is not None:
rect = self._led_ellipse.rect()
x = rect.x()
y = rect.y()
r = rect.width()
x += r / DIAMETER_SCALAR
y += r / DIAMETER_SCALAR
self._led_ellipse.setBrush(
get_gradient(self.colors[self.value], x, y, r / 2)
)
class LEDEditor(BasicEditorFactory):
""" """
klass = _LEDEditor
radius = Int(20)
label = Str
colors = List(["red", "yellow", "green", "black"])
# ============= EOF ====================================
# class qtLED(QLabel):
# _state = False
#
# def __init__(self, parent, obj, state):
# '''
#
# '''
# super(qtLED, self).__init__()
#
# self._blink = 0
# self.blink = False
#
#
#
#
# self._obj = obj
# s = self._obj.shape
# if s == 'circle':
# self.ascii_led = '''
# 000000-----000000
# 0000---------0000
# 000-----------000
# 00-----XXX-----00
# 0----XXXXXXX----0
# 0---XXXXXXXXX---0
# ----XXXXXXXXX----
# ---XXXXXXXXXXX---
# ---XXXXXXXXXXX---
# ---XXXXXXXXXXX---
# ----XXXXXXXXX----
# 0---XXXXXXXXX---0
# 0----XXXXXXX----0
# 00-----XXX-----00
# 000-----------000
# 0000---------0000
# 000000-----000000
# '''.strip()
# else:
# self.ascii_led = '''
# XXXXXXXXXXXXXXXXX
# XXXXXXXXXXXXXXXXX
# XXXXXXXXXXXXXXXXX
# XXXXXXXXXXXXXXXXX
# XXXXXXXXXXXXXXXXX
# XXXXXXXXXXXXXXXXX
# XXXXXXXXXXXXXXXXX
# XXXXXXXXXXXXXXXXX
# XXXXXXXXXXXXXXXXX
# XXXXXXXXXXXXXXXXX
# XXXXXXXXXXXXXXXXX
# XXXXXXXXXXXXXXXXX
# XXXXXXXXXXXXXXXXX
# XXXXXXXXXXXXXXXXX
# XXXXXXXXXXXXXXXXX
# XXXXXXXXXXXXXXXXX
# XXXXXXXXXXXXXXXXX
# '''.strip()
#
# self.set_state(state)
#
#
# # def OnMotion(self, event):
# # print 'exception', event
# #
# # def OnLeft(self, event):
# # if self._state:
# # self.set_state(0)
# # else:
# # self.set_state(2)
# #
# # if self._obj is not None:
# # self._obj.on_action()
# #
# # def GetValue(self):
# # return self._state
#
# def setText(self, v):
# pass
#
# # def SetValue(self, v):
# # if isinstance(v, int):
# # self._state = v
# #
# # def OnTimer(self, event):
# # '''
# #
# # '''
# # if self.blink:
# # if self._blink % 3 == 0:
# # self._set_led_color(0, color=change_intensity(WX_COLORS[self._state], 0.5))
# # else:
# # self._set_led_color(self._state)
# #
# # self._blink += 1
# # if self._blink >= 100:
# # self._blink = 0
#
# def set_state(self, s):
# '''
#
# '''
# self.blink = False
# # use negative values for blinking
# if s < 0:
# self.blink = True
# # self.timer.Start(200)
# else:
# pass
# # self.timer.Stop()
#
# s = abs(s)
#
# self._state = s
# self._set_led_color(s)
#
#
# def _set_image(self, color1, color2):
# xpm = ['17 17 3 1', # width height ncolors chars_per_pixel
# '0 c None',
# 'X c {}'.format(color1.name()),
# '- c {}'.format(color2.name())
# ]
# xpm += [s.strip() for s in self.ascii_led.splitlines()]
#
# def _update():
# qim = QImage(xpm)
# pix = QPixmap.fromImage(qim)
# self.setPixmap(pix)
#
# invoke_in_main_thread(_update)
#
# def _set_led_color(self, state, color=None):
# '''
#
# '''
# if color is not None:
# color1 = color
# color2 = color
# else:
# # base_color = WX_COLORS[state]
# base_color = QT_COLORS[state]
# color1 = base_color
# color2 = change_intensity(base_color, 0.5)
#
# self._set_image(color1, color2)
|
|
# -*- coding: utf-8 -*-
#
# SelfTest/Cipher/AES.py: Self-test for the AES cipher
#
# Written in 2008 by Dwayne C. Litzenberger <dlitz@dlitz.net>
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-test suite for Crypto.Cipher.AES"""
from Crypto.Util.py3compat import *
from binascii import hexlify
# This is a list of (plaintext, ciphertext, key[, description[, params]]) tuples.
test_data = [
# FIPS PUB 197 test vectors
# http://csrc.nist.gov/publications/fips/fips197/fips-197.pdf
('00112233445566778899aabbccddeeff', '69c4e0d86a7b0430d8cdb78070b4c55a',
'000102030405060708090a0b0c0d0e0f', 'FIPS 197 C.1 (AES-128)'),
('00112233445566778899aabbccddeeff', 'dda97ca4864cdfe06eaf70a0ec0d7191',
'000102030405060708090a0b0c0d0e0f1011121314151617',
'FIPS 197 C.2 (AES-192)'),
('00112233445566778899aabbccddeeff', '8ea2b7ca516745bfeafc49904b496089',
'000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f',
'FIPS 197 C.3 (AES-256)'),
# Rijndael128 test vectors
# Downloaded 2008-09-13 from
# http://www.iaik.tugraz.at/Research/krypto/AES/old/~rijmen/rijndael/testvalues.tar.gz
# ecb_tbl.txt, KEYSIZE=128
('506812a45f08c889b97f5980038b8359', 'd8f532538289ef7d06b506a4fd5be9c9',
'00010203050607080a0b0c0d0f101112',
'ecb-tbl-128: I=1'),
('5c6d71ca30de8b8b00549984d2ec7d4b', '59ab30f4d4ee6e4ff9907ef65b1fb68c',
'14151617191a1b1c1e1f202123242526',
'ecb-tbl-128: I=2'),
('53f3f4c64f8616e4e7c56199f48f21f6', 'bf1ed2fcb2af3fd41443b56d85025cb1',
'28292a2b2d2e2f30323334353738393a',
'ecb-tbl-128: I=3'),
('a1eb65a3487165fb0f1c27ff9959f703', '7316632d5c32233edcb0780560eae8b2',
'3c3d3e3f41424344464748494b4c4d4e',
'ecb-tbl-128: I=4'),
('3553ecf0b1739558b08e350a98a39bfa', '408c073e3e2538072b72625e68b8364b',
'50515253555657585a5b5c5d5f606162',
'ecb-tbl-128: I=5'),
('67429969490b9711ae2b01dc497afde8', 'e1f94dfa776597beaca262f2f6366fea',
'64656667696a6b6c6e6f707173747576',
'ecb-tbl-128: I=6'),
('93385c1f2aec8bed192f5a8e161dd508', 'f29e986c6a1c27d7b29ffd7ee92b75f1',
'78797a7b7d7e7f80828384858788898a',
'ecb-tbl-128: I=7'),
('b5bf946be19beb8db3983b5f4c6e8ddb', '131c886a57f8c2e713aba6955e2b55b5',
'8c8d8e8f91929394969798999b9c9d9e',
'ecb-tbl-128: I=8'),
('41321ee10e21bd907227c4450ff42324', 'd2ab7662df9b8c740210e5eeb61c199d',
'a0a1a2a3a5a6a7a8aaabacadafb0b1b2',
'ecb-tbl-128: I=9'),
('00a82f59c91c8486d12c0a80124f6089', '14c10554b2859c484cab5869bbe7c470',
'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6',
'ecb-tbl-128: I=10'),
('7ce0fd076754691b4bbd9faf8a1372fe', 'db4d498f0a49cf55445d502c1f9ab3b5',
'c8c9cacbcdcecfd0d2d3d4d5d7d8d9da',
'ecb-tbl-128: I=11'),
('23605a8243d07764541bc5ad355b3129', '6d96fef7d66590a77a77bb2056667f7f',
'dcdddedfe1e2e3e4e6e7e8e9ebecedee',
'ecb-tbl-128: I=12'),
('12a8cfa23ea764fd876232b4e842bc44', '316fb68edba736c53e78477bf913725c',
'f0f1f2f3f5f6f7f8fafbfcfdfe010002',
'ecb-tbl-128: I=13'),
('bcaf32415e8308b3723e5fdd853ccc80', '6936f2b93af8397fd3a771fc011c8c37',
'04050607090a0b0c0e0f101113141516',
'ecb-tbl-128: I=14'),
('89afae685d801ad747ace91fc49adde0', 'f3f92f7a9c59179c1fcc2c2ba0b082cd',
'2c2d2e2f31323334363738393b3c3d3e',
'ecb-tbl-128: I=15'),
('f521d07b484357c4a69e76124a634216', '6a95ea659ee3889158e7a9152ff04ebc',
'40414243454647484a4b4c4d4f505152',
'ecb-tbl-128: I=16'),
('3e23b3bc065bcc152407e23896d77783', '1959338344e945670678a5d432c90b93',
'54555657595a5b5c5e5f606163646566',
'ecb-tbl-128: I=17'),
('79f0fba002be1744670e7e99290d8f52', 'e49bddd2369b83ee66e6c75a1161b394',
'68696a6b6d6e6f70727374757778797a',
'ecb-tbl-128: I=18'),
('da23fe9d5bd63e1d72e3dafbe21a6c2a', 'd3388f19057ff704b70784164a74867d',
'7c7d7e7f81828384868788898b8c8d8e',
'ecb-tbl-128: I=19'),
('e3f5698ba90b6a022efd7db2c7e6c823', '23aa03e2d5e4cd24f3217e596480d1e1',
'a4a5a6a7a9aaabacaeafb0b1b3b4b5b6',
'ecb-tbl-128: I=20'),
('bdc2691d4f1b73d2700679c3bcbf9c6e', 'c84113d68b666ab2a50a8bdb222e91b9',
'e0e1e2e3e5e6e7e8eaebecedeff0f1f2',
'ecb-tbl-128: I=21'),
('ba74e02093217ee1ba1b42bd5624349a', 'ac02403981cd4340b507963db65cb7b6',
'08090a0b0d0e0f10121314151718191a',
'ecb-tbl-128: I=22'),
('b5c593b5851c57fbf8b3f57715e8f680', '8d1299236223359474011f6bf5088414',
'6c6d6e6f71727374767778797b7c7d7e',
'ecb-tbl-128: I=23'),
('3da9bd9cec072381788f9387c3bbf4ee', '5a1d6ab8605505f7977e55b9a54d9b90',
'80818283858687888a8b8c8d8f909192',
'ecb-tbl-128: I=24'),
('4197f3051121702ab65d316b3c637374', '72e9c2d519cf555e4208805aabe3b258',
'94959697999a9b9c9e9fa0a1a3a4a5a6',
'ecb-tbl-128: I=25'),
('9f46c62ec4f6ee3f6e8c62554bc48ab7', 'a8f3e81c4a23a39ef4d745dffe026e80',
'a8a9aaabadaeafb0b2b3b4b5b7b8b9ba',
'ecb-tbl-128: I=26'),
('0220673fe9e699a4ebc8e0dbeb6979c8', '546f646449d31458f9eb4ef5483aee6c',
'bcbdbebfc1c2c3c4c6c7c8c9cbcccdce',
'ecb-tbl-128: I=27'),
('b2b99171337ded9bc8c2c23ff6f18867', '4dbe4bc84ac797c0ee4efb7f1a07401c',
'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2',
'ecb-tbl-128: I=28'),
('a7facf4e301e984e5efeefd645b23505', '25e10bfb411bbd4d625ac8795c8ca3b3',
'e4e5e6e7e9eaebeceeeff0f1f3f4f5f6',
'ecb-tbl-128: I=29'),
('f7c762e4a9819160fd7acfb6c4eedcdd', '315637405054ec803614e43def177579',
'f8f9fafbfdfefe00020304050708090a',
'ecb-tbl-128: I=30'),
('9b64fc21ea08709f4915436faa70f1be', '60c5bc8a1410247295c6386c59e572a8',
'0c0d0e0f11121314161718191b1c1d1e',
'ecb-tbl-128: I=31'),
('52af2c3de07ee6777f55a4abfc100b3f', '01366fc8ca52dfe055d6a00a76471ba6',
'20212223252627282a2b2c2d2f303132',
'ecb-tbl-128: I=32'),
('2fca001224386c57aa3f968cbe2c816f', 'ecc46595516ec612449c3f581e7d42ff',
'34353637393a3b3c3e3f404143444546',
'ecb-tbl-128: I=33'),
('4149c73658a4a9c564342755ee2c132f', '6b7ffe4c602a154b06ee9c7dab5331c9',
'48494a4b4d4e4f50525354555758595a',
'ecb-tbl-128: I=34'),
('af60005a00a1772f7c07a48a923c23d2', '7da234c14039a240dd02dd0fbf84eb67',
'5c5d5e5f61626364666768696b6c6d6e',
'ecb-tbl-128: I=35'),
('6fccbc28363759914b6f0280afaf20c6', 'c7dc217d9e3604ffe7e91f080ecd5a3a',
'70717273757677787a7b7c7d7f808182',
'ecb-tbl-128: I=36'),
('7d82a43ddf4fefa2fc5947499884d386', '37785901863f5c81260ea41e7580cda5',
'84858687898a8b8c8e8f909193949596',
'ecb-tbl-128: I=37'),
('5d5a990eaab9093afe4ce254dfa49ef9', 'a07b9338e92ed105e6ad720fccce9fe4',
'98999a9b9d9e9fa0a2a3a4a5a7a8a9aa',
'ecb-tbl-128: I=38'),
('4cd1e2fd3f4434b553aae453f0ed1a02', 'ae0fb9722418cc21a7da816bbc61322c',
'acadaeafb1b2b3b4b6b7b8b9bbbcbdbe',
'ecb-tbl-128: I=39'),
('5a2c9a9641d4299125fa1b9363104b5e', 'c826a193080ff91ffb21f71d3373c877',
'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2',
'ecb-tbl-128: I=40'),
('b517fe34c0fa217d341740bfd4fe8dd4', '1181b11b0e494e8d8b0aa6b1d5ac2c48',
'd4d5d6d7d9dadbdcdedfe0e1e3e4e5e6',
'ecb-tbl-128: I=41'),
('014baf2278a69d331d5180103643e99a', '6743c3d1519ab4f2cd9a78ab09a511bd',
'e8e9eaebedeeeff0f2f3f4f5f7f8f9fa',
'ecb-tbl-128: I=42'),
('b529bd8164f20d0aa443d4932116841c', 'dc55c076d52bacdf2eefd952946a439d',
'fcfdfeff01020304060708090b0c0d0e',
'ecb-tbl-128: I=43'),
('2e596dcbb2f33d4216a1176d5bd1e456', '711b17b590ffc72b5c8e342b601e8003',
'10111213151617181a1b1c1d1f202122',
'ecb-tbl-128: I=44'),
('7274a1ea2b7ee2424e9a0e4673689143', '19983bb0950783a537e1339f4aa21c75',
'24252627292a2b2c2e2f303133343536',
'ecb-tbl-128: I=45'),
('ae20020bd4f13e9d90140bee3b5d26af', '3ba7762e15554169c0f4fa39164c410c',
'38393a3b3d3e3f40424344454748494a',
'ecb-tbl-128: I=46'),
('baac065da7ac26e855e79c8849d75a02', 'a0564c41245afca7af8aa2e0e588ea89',
'4c4d4e4f51525354565758595b5c5d5e',
'ecb-tbl-128: I=47'),
('7c917d8d1d45fab9e2540e28832540cc', '5e36a42a2e099f54ae85ecd92e2381ed',
'60616263656667686a6b6c6d6f707172',
'ecb-tbl-128: I=48'),
('bde6f89e16daadb0e847a2a614566a91', '770036f878cd0f6ca2268172f106f2fe',
'74757677797a7b7c7e7f808183848586',
'ecb-tbl-128: I=49'),
('c9de163725f1f5be44ebb1db51d07fbc', '7e4e03908b716116443ccf7c94e7c259',
'88898a8b8d8e8f90929394959798999a',
'ecb-tbl-128: I=50'),
('3af57a58f0c07dffa669572b521e2b92', '482735a48c30613a242dd494c7f9185d',
'9c9d9e9fa1a2a3a4a6a7a8a9abacadae',
'ecb-tbl-128: I=51'),
('3d5ebac306dde4604f1b4fbbbfcdae55', 'b4c0f6c9d4d7079addf9369fc081061d',
'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2',
'ecb-tbl-128: I=52'),
('c2dfa91bceb76a1183c995020ac0b556', 'd5810fe0509ac53edcd74f89962e6270',
'c4c5c6c7c9cacbcccecfd0d1d3d4d5d6',
'ecb-tbl-128: I=53'),
('c70f54305885e9a0746d01ec56c8596b', '03f17a16b3f91848269ecdd38ebb2165',
'd8d9dadbdddedfe0e2e3e4e5e7e8e9ea',
'ecb-tbl-128: I=54'),
('c4f81b610e98012ce000182050c0c2b2', 'da1248c3180348bad4a93b4d9856c9df',
'ecedeeeff1f2f3f4f6f7f8f9fbfcfdfe',
'ecb-tbl-128: I=55'),
('eaab86b1d02a95d7404eff67489f97d4', '3d10d7b63f3452c06cdf6cce18be0c2c',
'00010203050607080a0b0c0d0f101112',
'ecb-tbl-128: I=56'),
('7c55bdb40b88870b52bec3738de82886', '4ab823e7477dfddc0e6789018fcb6258',
'14151617191a1b1c1e1f202123242526',
'ecb-tbl-128: I=57'),
('ba6eaa88371ff0a3bd875e3f2a975ce0', 'e6478ba56a77e70cfdaa5c843abde30e',
'28292a2b2d2e2f30323334353738393a',
'ecb-tbl-128: I=58'),
('08059130c4c24bd30cf0575e4e0373dc', '1673064895fbeaf7f09c5429ff75772d',
'3c3d3e3f41424344464748494b4c4d4e',
'ecb-tbl-128: I=59'),
('9a8eab004ef53093dfcf96f57e7eda82', '4488033ae9f2efd0ca9383bfca1a94e9',
'50515253555657585a5b5c5d5f606162',
'ecb-tbl-128: I=60'),
('0745b589e2400c25f117b1d796c28129', '978f3b8c8f9d6f46626cac3c0bcb9217',
'64656667696a6b6c6e6f707173747576',
'ecb-tbl-128: I=61'),
('2f1777781216cec3f044f134b1b92bbe', 'e08c8a7e582e15e5527f1d9e2eecb236',
'78797a7b7d7e7f80828384858788898a',
'ecb-tbl-128: I=62'),
('353a779ffc541b3a3805d90ce17580fc', 'cec155b76ac5ffda4cf4f9ca91e49a7a',
'8c8d8e8f91929394969798999b9c9d9e',
'ecb-tbl-128: I=63'),
('1a1eae4415cefcf08c4ac1c8f68bea8f', 'd5ac7165763225dd2a38cdc6862c29ad',
'a0a1a2a3a5a6a7a8aaabacadafb0b1b2',
'ecb-tbl-128: I=64'),
('e6e7e4e5b0b3b2b5d4d5aaab16111013', '03680fe19f7ce7275452020be70e8204',
'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6',
'ecb-tbl-128: I=65'),
('f8f9fafbfbf8f9e677767170efe0e1e2', '461df740c9781c388e94bb861ceb54f6',
'c8c9cacbcdcecfd0d2d3d4d5d7d8d9da',
'ecb-tbl-128: I=66'),
('63626160a1a2a3a445444b4a75727370', '451bd60367f96483042742219786a074',
'dcdddedfe1e2e3e4e6e7e8e9ebecedee',
'ecb-tbl-128: I=67'),
('717073720605040b2d2c2b2a05fafbf9', 'e4dfa42671a02e57ef173b85c0ea9f2b',
'f0f1f2f3f5f6f7f8fafbfcfdfe010002',
'ecb-tbl-128: I=68'),
('78797a7beae9e8ef3736292891969794', 'ed11b89e76274282227d854700a78b9e',
'04050607090a0b0c0e0f101113141516',
'ecb-tbl-128: I=69'),
('838281803231300fdddcdbdaa0afaead', '433946eaa51ea47af33895f2b90b3b75',
'18191a1b1d1e1f20222324252728292a',
'ecb-tbl-128: I=70'),
('18191a1bbfbcbdba75747b7a7f78797a', '6bc6d616a5d7d0284a5910ab35022528',
'2c2d2e2f31323334363738393b3c3d3e',
'ecb-tbl-128: I=71'),
('848586879b989996a3a2a5a4849b9a99', 'd2a920ecfe919d354b5f49eae9719c98',
'40414243454647484a4b4c4d4f505152',
'ecb-tbl-128: I=72'),
('0001020322212027cacbf4f551565754', '3a061b17f6a92885efbd0676985b373d',
'54555657595a5b5c5e5f606163646566',
'ecb-tbl-128: I=73'),
('cecfcccdafacadb2515057564a454447', 'fadeec16e33ea2f4688499d157e20d8f',
'68696a6b6d6e6f70727374757778797a',
'ecb-tbl-128: I=74'),
('92939091cdcecfc813121d1c80878685', '5cdefede59601aa3c3cda36fa6b1fa13',
'7c7d7e7f81828384868788898b8c8d8e',
'ecb-tbl-128: I=75'),
('d2d3d0d16f6c6d6259585f5ed1eeefec', '9574b00039844d92ebba7ee8719265f8',
'90919293959697989a9b9c9d9fa0a1a2',
'ecb-tbl-128: I=76'),
('acadaeaf878485820f0e1110d5d2d3d0', '9a9cf33758671787e5006928188643fa',
'a4a5a6a7a9aaabacaeafb0b1b3b4b5b6',
'ecb-tbl-128: I=77'),
('9091929364676619e6e7e0e1757a7b78', '2cddd634c846ba66bb46cbfea4a674f9',
'b8b9babbbdbebfc0c2c3c4c5c7c8c9ca',
'ecb-tbl-128: I=78'),
('babbb8b98a89888f74757a7b92959497', 'd28bae029393c3e7e26e9fafbbb4b98f',
'cccdcecfd1d2d3d4d6d7d8d9dbdcddde',
'ecb-tbl-128: I=79'),
('8d8c8f8e6e6d6c633b3a3d3ccad5d4d7', 'ec27529b1bee0a9ab6a0d73ebc82e9b7',
'e0e1e2e3e5e6e7e8eaebecedeff0f1f2',
'ecb-tbl-128: I=80'),
('86878485010203040808f7f767606162', '3cb25c09472aff6ee7e2b47ccd7ccb17',
'f4f5f6f7f9fafbfcfefe010103040506',
'ecb-tbl-128: I=81'),
('8e8f8c8d656667788a8b8c8d010e0f0c', 'dee33103a7283370d725e44ca38f8fe5',
'08090a0b0d0e0f10121314151718191a',
'ecb-tbl-128: I=82'),
('c8c9cacb858687807a7b7475e7e0e1e2', '27f9bcd1aac64bffc11e7815702c1a69',
'1c1d1e1f21222324262728292b2c2d2e',
'ecb-tbl-128: I=83'),
('6d6c6f6e5053525d8c8d8a8badd2d3d0', '5df534ffad4ed0749a9988e9849d0021',
'30313233353637383a3b3c3d3f404142',
'ecb-tbl-128: I=84'),
('28292a2b393a3b3c0607181903040506', 'a48bee75db04fb60ca2b80f752a8421b',
'44454647494a4b4c4e4f505153545556',
'ecb-tbl-128: I=85'),
('a5a4a7a6b0b3b28ddbdadddcbdb2b3b0', '024c8cf70bc86ee5ce03678cb7af45f9',
'58595a5b5d5e5f60626364656768696a',
'ecb-tbl-128: I=86'),
('323330316467666130313e3f2c2b2a29', '3c19ac0f8a3a3862ce577831301e166b',
'6c6d6e6f71727374767778797b7c7d7e',
'ecb-tbl-128: I=87'),
('27262524080b0a05171611100b141516', 'c5e355b796a57421d59ca6be82e73bca',
'80818283858687888a8b8c8d8f909192',
'ecb-tbl-128: I=88'),
('040506074142434435340b0aa3a4a5a6', 'd94033276417abfb05a69d15b6e386e2',
'94959697999a9b9c9e9fa0a1a3a4a5a6',
'ecb-tbl-128: I=89'),
('242526271112130c61606766bdb2b3b0', '24b36559ea3a9b9b958fe6da3e5b8d85',
'a8a9aaabadaeafb0b2b3b4b5b7b8b9ba',
'ecb-tbl-128: I=90'),
('4b4a4948252627209e9f9091cec9c8cb', '20fd4feaa0e8bf0cce7861d74ef4cb72',
'bcbdbebfc1c2c3c4c6c7c8c9cbcccdce',
'ecb-tbl-128: I=91'),
('68696a6b6665646b9f9e9998d9e6e7e4', '350e20d5174277b9ec314c501570a11d',
'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2',
'ecb-tbl-128: I=92'),
('34353637c5c6c7c0f0f1eeef7c7b7a79', '87a29d61b7c604d238fe73045a7efd57',
'e4e5e6e7e9eaebeceeeff0f1f3f4f5f6',
'ecb-tbl-128: I=93'),
('32333031c2c1c13f0d0c0b0a050a0b08', '2c3164c1cc7d0064816bdc0faa362c52',
'f8f9fafbfdfefe00020304050708090a',
'ecb-tbl-128: I=94'),
('cdcccfcebebdbcbbabaaa5a4181f1e1d', '195fe5e8a05a2ed594f6e4400eee10b3',
'0c0d0e0f11121314161718191b1c1d1e',
'ecb-tbl-128: I=95'),
('212023223635343ba0a1a6a7445b5a59', 'e4663df19b9a21a5a284c2bd7f905025',
'20212223252627282a2b2c2d2f303132',
'ecb-tbl-128: I=96'),
('0e0f0c0da8abaaad2f2e515002050407', '21b88714cfb4e2a933bd281a2c4743fd',
'34353637393a3b3c3e3f404143444546',
'ecb-tbl-128: I=97'),
('070605042a2928378e8f8889bdb2b3b0', 'cbfc3980d704fd0fc54378ab84e17870',
'48494a4b4d4e4f50525354555758595a',
'ecb-tbl-128: I=98'),
('cbcac9c893909196a9a8a7a6a5a2a3a0', 'bc5144baa48bdeb8b63e22e03da418ef',
'5c5d5e5f61626364666768696b6c6d6e',
'ecb-tbl-128: I=99'),
('80818283c1c2c3cc9c9d9a9b0cf3f2f1', '5a1dbaef1ee2984b8395da3bdffa3ccc',
'70717273757677787a7b7c7d7f808182',
'ecb-tbl-128: I=100'),
('1213101125262720fafbe4e5b1b6b7b4', 'f0b11cd0729dfcc80cec903d97159574',
'84858687898a8b8c8e8f909193949596',
'ecb-tbl-128: I=101'),
('7f7e7d7c3033320d97969190222d2c2f', '9f95314acfddc6d1914b7f19a9cc8209',
'98999a9b9d9e9fa0a2a3a4a5a7a8a9aa',
'ecb-tbl-128: I=102'),
('4e4f4c4d484b4a4d81808f8e53545556', '595736f6f0f70914a94e9e007f022519',
'acadaeafb1b2b3b4b6b7b8b9bbbcbdbe',
'ecb-tbl-128: I=103'),
('dcdddedfb0b3b2bd15141312a1bebfbc', '1f19f57892cae586fcdfb4c694deb183',
'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2',
'ecb-tbl-128: I=104'),
('93929190282b2a2dc4c5fafb92959497', '540700ee1f6f3dab0b3eddf6caee1ef5',
'd4d5d6d7d9dadbdcdedfe0e1e3e4e5e6',
'ecb-tbl-128: I=105'),
('f5f4f7f6c4c7c6d9373631307e717073', '14a342a91019a331687a2254e6626ca2',
'e8e9eaebedeeeff0f2f3f4f5f7f8f9fa',
'ecb-tbl-128: I=106'),
('93929190b6b5b4b364656a6b05020300', '7b25f3c3b2eea18d743ef283140f29ff',
'fcfdfeff01020304060708090b0c0d0e',
'ecb-tbl-128: I=107'),
('babbb8b90d0e0f00a4a5a2a3043b3a39', '46c2587d66e5e6fa7f7ca6411ad28047',
'10111213151617181a1b1c1d1f202122',
'ecb-tbl-128: I=108'),
('d8d9dadb7f7c7d7a10110e0f787f7e7d', '09470e72229d954ed5ee73886dfeeba9',
'24252627292a2b2c2e2f303133343536',
'ecb-tbl-128: I=109'),
('fefffcfdefeced923b3a3d3c6768696a', 'd77c03de92d4d0d79ef8d4824ef365eb',
'38393a3b3d3e3f40424344454748494a',
'ecb-tbl-128: I=110'),
('d6d7d4d58a89888f96979899a5a2a3a0', '1d190219f290e0f1715d152d41a23593',
'4c4d4e4f51525354565758595b5c5d5e',
'ecb-tbl-128: I=111'),
('18191a1ba8abaaa5303136379b848586', 'a2cd332ce3a0818769616292e87f757b',
'60616263656667686a6b6c6d6f707172',
'ecb-tbl-128: I=112'),
('6b6a6968a4a7a6a1d6d72829b0b7b6b5', 'd54afa6ce60fbf9341a3690e21385102',
'74757677797a7b7c7e7f808183848586',
'ecb-tbl-128: I=113'),
('000102038a89889755545352a6a9a8ab', '06e5c364ded628a3f5e05e613e356f46',
'88898a8b8d8e8f90929394959798999a',
'ecb-tbl-128: I=114'),
('2d2c2f2eb3b0b1b6b6b7b8b9f2f5f4f7', 'eae63c0e62556dac85d221099896355a',
'9c9d9e9fa1a2a3a4a6a7a8a9abacadae',
'ecb-tbl-128: I=115'),
('979695943536373856575051e09f9e9d', '1fed060e2c6fc93ee764403a889985a2',
'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2',
'ecb-tbl-128: I=116'),
('a4a5a6a7989b9a9db1b0afae7a7d7c7f', 'c25235c1a30fdec1c7cb5c5737b2a588',
'c4c5c6c7c9cacbcccecfd0d1d3d4d5d6',
'ecb-tbl-128: I=117'),
('c1c0c3c2686b6a55a8a9aeafeae5e4e7', '796dbef95147d4d30873ad8b7b92efc0',
'd8d9dadbdddedfe0e2e3e4e5e7e8e9ea',
'ecb-tbl-128: I=118'),
('c1c0c3c2141716118c8d828364636261', 'cbcf0fb34d98d0bd5c22ce37211a46bf',
'ecedeeeff1f2f3f4f6f7f8f9fbfcfdfe',
'ecb-tbl-128: I=119'),
('93929190cccfcec196979091e0fffefd', '94b44da6466126cafa7c7fd09063fc24',
'00010203050607080a0b0c0d0f101112',
'ecb-tbl-128: I=120'),
('b4b5b6b7f9fafbfc25241b1a6e69686b', 'd78c5b5ebf9b4dbda6ae506c5074c8fe',
'14151617191a1b1c1e1f202123242526',
'ecb-tbl-128: I=121'),
('868784850704051ac7c6c1c08788898a', '6c27444c27204b043812cf8cf95f9769',
'28292a2b2d2e2f30323334353738393a',
'ecb-tbl-128: I=122'),
('f4f5f6f7aaa9a8affdfcf3f277707172', 'be94524ee5a2aa50bba8b75f4c0aebcf',
'3c3d3e3f41424344464748494b4c4d4e',
'ecb-tbl-128: I=123'),
('d3d2d1d00605040bc3c2c5c43e010003', 'a0aeaae91ba9f31f51aeb3588cf3a39e',
'50515253555657585a5b5c5d5f606162',
'ecb-tbl-128: I=124'),
('73727170424140476a6b74750d0a0b08', '275297779c28266ef9fe4c6a13c08488',
'64656667696a6b6c6e6f707173747576',
'ecb-tbl-128: I=125'),
('c2c3c0c10a0908f754555253a1aeafac', '86523d92bb8672cb01cf4a77fd725882',
'78797a7b7d7e7f80828384858788898a',
'ecb-tbl-128: I=126'),
('6d6c6f6ef8fbfafd82838c8df8fffefd', '4b8327640e9f33322a04dd96fcbf9a36',
'8c8d8e8f91929394969798999b9c9d9e',
'ecb-tbl-128: I=127'),
('f5f4f7f684878689a6a7a0a1d2cdcccf', 'ce52af650d088ca559425223f4d32694',
'a0a1a2a3a5a6a7a8aaabacadafb0b1b2',
'ecb-tbl-128: I=128'),
# ecb_tbl.txt, KEYSIZE=192
('2d33eef2c0430a8a9ebf45e809c40bb6', 'dff4945e0336df4c1c56bc700eff837f',
'00010203050607080a0b0c0d0f10111214151617191a1b1c',
'ecb-tbl-192: I=1'),
('6aa375d1fa155a61fb72353e0a5a8756', 'b6fddef4752765e347d5d2dc196d1252',
'1e1f20212324252628292a2b2d2e2f30323334353738393a',
'ecb-tbl-192: I=2'),
('bc3736518b9490dcb8ed60eb26758ed4', 'd23684e3d963b3afcf1a114aca90cbd6',
'3c3d3e3f41424344464748494b4c4d4e5051525355565758',
'ecb-tbl-192: I=3'),
('aa214402b46cffb9f761ec11263a311e', '3a7ac027753e2a18c2ceab9e17c11fd0',
'5a5b5c5d5f60616264656667696a6b6c6e6f707173747576',
'ecb-tbl-192: I=4'),
('02aea86e572eeab66b2c3af5e9a46fd6', '8f6786bd007528ba26603c1601cdd0d8',
'78797a7b7d7e7f80828384858788898a8c8d8e8f91929394',
'ecb-tbl-192: I=5'),
('e2aef6acc33b965c4fa1f91c75ff6f36', 'd17d073b01e71502e28b47ab551168b3',
'969798999b9c9d9ea0a1a2a3a5a6a7a8aaabacadafb0b1b2',
'ecb-tbl-192: I=6'),
('0659df46427162b9434865dd9499f91d', 'a469da517119fab95876f41d06d40ffa',
'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6c8c9cacbcdcecfd0',
'ecb-tbl-192: I=7'),
('49a44239c748feb456f59c276a5658df', '6091aa3b695c11f5c0b6ad26d3d862ff',
'd2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee',
'ecb-tbl-192: I=8'),
('66208f6e9d04525bdedb2733b6a6be37', '70f9e67f9f8df1294131662dc6e69364',
'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c',
'ecb-tbl-192: I=9'),
('3393f8dfc729c97f5480b950bc9666b0', 'd154dcafad8b207fa5cbc95e9996b559',
'0e0f10111314151618191a1b1d1e1f20222324252728292a',
'ecb-tbl-192: I=10'),
('606834c8ce063f3234cf1145325dbd71', '4934d541e8b46fa339c805a7aeb9e5da',
'2c2d2e2f31323334363738393b3c3d3e4041424345464748',
'ecb-tbl-192: I=11'),
('fec1c04f529bbd17d8cecfcc4718b17f', '62564c738f3efe186e1a127a0c4d3c61',
'4a4b4c4d4f50515254555657595a5b5c5e5f606163646566',
'ecb-tbl-192: I=12'),
('32df99b431ed5dc5acf8caf6dc6ce475', '07805aa043986eb23693e23bef8f3438',
'68696a6b6d6e6f70727374757778797a7c7d7e7f81828384',
'ecb-tbl-192: I=13'),
('7fdc2b746f3f665296943b83710d1f82', 'df0b4931038bade848dee3b4b85aa44b',
'868788898b8c8d8e90919293959697989a9b9c9d9fa0a1a2',
'ecb-tbl-192: I=14'),
('8fba1510a3c5b87e2eaa3f7a91455ca2', '592d5fded76582e4143c65099309477c',
'a4a5a6a7a9aaabacaeafb0b1b3b4b5b6b8b9babbbdbebfc0',
'ecb-tbl-192: I=15'),
('2c9b468b1c2eed92578d41b0716b223b', 'c9b8d6545580d3dfbcdd09b954ed4e92',
'c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde',
'ecb-tbl-192: I=16'),
('0a2bbf0efc6bc0034f8a03433fca1b1a', '5dccd5d6eb7c1b42acb008201df707a0',
'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfc',
'ecb-tbl-192: I=17'),
('25260e1f31f4104d387222e70632504b', 'a2a91682ffeb6ed1d34340946829e6f9',
'fefe01010304050608090a0b0d0e0f10121314151718191a',
'ecb-tbl-192: I=18'),
('c527d25a49f08a5228d338642ae65137', 'e45d185b797000348d9267960a68435d',
'1c1d1e1f21222324262728292b2c2d2e3031323335363738',
'ecb-tbl-192: I=19'),
('3b49fc081432f5890d0e3d87e884a69e', '45e060dae5901cda8089e10d4f4c246b',
'3a3b3c3d3f40414244454647494a4b4c4e4f505153545556',
'ecb-tbl-192: I=20'),
('d173f9ed1e57597e166931df2754a083', 'f6951afacc0079a369c71fdcff45df50',
'58595a5b5d5e5f60626364656768696a6c6d6e6f71727374',
'ecb-tbl-192: I=21'),
('8c2b7cafa5afe7f13562daeae1adede0', '9e95e00f351d5b3ac3d0e22e626ddad6',
'767778797b7c7d7e80818283858687888a8b8c8d8f909192',
'ecb-tbl-192: I=22'),
('aaf4ec8c1a815aeb826cab741339532c', '9cb566ff26d92dad083b51fdc18c173c',
'94959697999a9b9c9e9fa0a1a3a4a5a6a8a9aaabadaeafb0',
'ecb-tbl-192: I=23'),
('40be8c5d9108e663f38f1a2395279ecf', 'c9c82766176a9b228eb9a974a010b4fb',
'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebec',
'ecb-tbl-192: I=24'),
('0c8ad9bc32d43e04716753aa4cfbe351', 'd8e26aa02945881d5137f1c1e1386e88',
'2a2b2c2d2f30313234353637393a3b3c3e3f404143444546',
'ecb-tbl-192: I=25'),
('1407b1d5f87d63357c8dc7ebbaebbfee', 'c0e024ccd68ff5ffa4d139c355a77c55',
'48494a4b4d4e4f50525354555758595a5c5d5e5f61626364',
'ecb-tbl-192: I=26'),
('e62734d1ae3378c4549e939e6f123416', '0b18b3d16f491619da338640df391d43',
'84858687898a8b8c8e8f90919394959698999a9b9d9e9fa0',
'ecb-tbl-192: I=27'),
('5a752cff2a176db1a1de77f2d2cdee41', 'dbe09ac8f66027bf20cb6e434f252efc',
'a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe',
'ecb-tbl-192: I=28'),
('a9c8c3a4eabedc80c64730ddd018cd88', '6d04e5e43c5b9cbe05feb9606b6480fe',
'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdc',
'ecb-tbl-192: I=29'),
('ee9b3dbbdb86180072130834d305999a', 'dd1d6553b96be526d9fee0fbd7176866',
'1a1b1c1d1f20212224252627292a2b2c2e2f303133343536',
'ecb-tbl-192: I=30'),
('a7fa8c3586b8ebde7568ead6f634a879', '0260ca7e3f979fd015b0dd4690e16d2a',
'38393a3b3d3e3f40424344454748494a4c4d4e4f51525354',
'ecb-tbl-192: I=31'),
('37e0f4a87f127d45ac936fe7ad88c10a', '9893734de10edcc8a67c3b110b8b8cc6',
'929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae',
'ecb-tbl-192: I=32'),
('3f77d8b5d92bac148e4e46f697a535c5', '93b30b750516b2d18808d710c2ee84ef',
'464748494b4c4d4e50515253555657585a5b5c5d5f606162',
'ecb-tbl-192: I=33'),
('d25ebb686c40f7e2c4da1014936571ca', '16f65fa47be3cb5e6dfe7c6c37016c0e',
'828384858788898a8c8d8e8f91929394969798999b9c9d9e',
'ecb-tbl-192: I=34'),
('4f1c769d1e5b0552c7eca84dea26a549', 'f3847210d5391e2360608e5acb560581',
'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbc',
'ecb-tbl-192: I=35'),
('8548e2f882d7584d0fafc54372b6633a', '8754462cd223366d0753913e6af2643d',
'bebfc0c1c3c4c5c6c8c9cacbcdcecfd0d2d3d4d5d7d8d9da',
'ecb-tbl-192: I=36'),
('87d7a336cb476f177cd2a51af2a62cdf', '1ea20617468d1b806a1fd58145462017',
'dcdddedfe1e2e3e4e6e7e8e9ebecedeef0f1f2f3f5f6f7f8',
'ecb-tbl-192: I=37'),
('03b1feac668c4e485c1065dfc22b44ee', '3b155d927355d737c6be9dda60136e2e',
'fafbfcfdfe01000204050607090a0b0c0e0f101113141516',
'ecb-tbl-192: I=38'),
('bda15e66819fa72d653a6866aa287962', '26144f7b66daa91b6333dbd3850502b3',
'18191a1b1d1e1f20222324252728292a2c2d2e2f31323334',
'ecb-tbl-192: I=39'),
('4d0c7a0d2505b80bf8b62ceb12467f0a', 'e4f9a4ab52ced8134c649bf319ebcc90',
'363738393b3c3d3e40414243454647484a4b4c4d4f505152',
'ecb-tbl-192: I=40'),
('626d34c9429b37211330986466b94e5f', 'b9ddd29ac6128a6cab121e34a4c62b36',
'54555657595a5b5c5e5f60616364656668696a6b6d6e6f70',
'ecb-tbl-192: I=41'),
('333c3e6bf00656b088a17e5ff0e7f60a', '6fcddad898f2ce4eff51294f5eaaf5c9',
'727374757778797a7c7d7e7f81828384868788898b8c8d8e',
'ecb-tbl-192: I=42'),
('687ed0cdc0d2a2bc8c466d05ef9d2891', 'c9a6fe2bf4028080bea6f7fc417bd7e3',
'90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabac',
'ecb-tbl-192: I=43'),
('487830e78cc56c1693e64b2a6660c7b6', '6a2026846d8609d60f298a9c0673127f',
'aeafb0b1b3b4b5b6b8b9babbbdbebfc0c2c3c4c5c7c8c9ca',
'ecb-tbl-192: I=44'),
('7a48d6b7b52b29392aa2072a32b66160', '2cb25c005e26efea44336c4c97a4240b',
'cccdcecfd1d2d3d4d6d7d8d9dbdcdddee0e1e2e3e5e6e7e8',
'ecb-tbl-192: I=45'),
('907320e64c8c5314d10f8d7a11c8618d', '496967ab8680ddd73d09a0e4c7dcc8aa',
'eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506',
'ecb-tbl-192: I=46'),
('b561f2ca2d6e65a4a98341f3ed9ff533', 'd5af94de93487d1f3a8c577cb84a66a4',
'08090a0b0d0e0f10121314151718191a1c1d1e1f21222324',
'ecb-tbl-192: I=47'),
('df769380d212792d026f049e2e3e48ef', '84bdac569cae2828705f267cc8376e90',
'262728292b2c2d2e30313233353637383a3b3c3d3f404142',
'ecb-tbl-192: I=48'),
('79f374bc445bdabf8fccb8843d6054c6', 'f7401dda5ad5ab712b7eb5d10c6f99b6',
'44454647494a4b4c4e4f50515354555658595a5b5d5e5f60',
'ecb-tbl-192: I=49'),
('4e02f1242fa56b05c68dbae8fe44c9d6', '1c9d54318539ebd4c3b5b7e37bf119f0',
'626364656768696a6c6d6e6f71727374767778797b7c7d7e',
'ecb-tbl-192: I=50'),
('cf73c93cbff57ac635a6f4ad2a4a1545', 'aca572d65fb2764cffd4a6eca090ea0d',
'80818283858687888a8b8c8d8f90919294959697999a9b9c',
'ecb-tbl-192: I=51'),
('9923548e2875750725b886566784c625', '36d9c627b8c2a886a10ccb36eae3dfbb',
'9e9fa0a1a3a4a5a6a8a9aaabadaeafb0b2b3b4b5b7b8b9ba',
'ecb-tbl-192: I=52'),
('4888336b723a022c9545320f836a4207', '010edbf5981e143a81d646e597a4a568',
'bcbdbebfc1c2c3c4c6c7c8c9cbcccdced0d1d2d3d5d6d7d8',
'ecb-tbl-192: I=53'),
('f84d9a5561b0608b1160dee000c41ba8', '8db44d538dc20cc2f40f3067fd298e60',
'dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6',
'ecb-tbl-192: I=54'),
('c23192a0418e30a19b45ae3e3625bf22', '930eb53bc71e6ac4b82972bdcd5aafb3',
'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314',
'ecb-tbl-192: I=55'),
('b84e0690b28b0025381ad82a15e501a7', '6c42a81edcbc9517ccd89c30c95597b4',
'161718191b1c1d1e20212223252627282a2b2c2d2f303132',
'ecb-tbl-192: I=56'),
('acef5e5c108876c4f06269f865b8f0b0', 'da389847ad06df19d76ee119c71e1dd3',
'34353637393a3b3c3e3f40414344454648494a4b4d4e4f50',
'ecb-tbl-192: I=57'),
('0f1b3603e0f5ddea4548246153a5e064', 'e018fdae13d3118f9a5d1a647a3f0462',
'525354555758595a5c5d5e5f61626364666768696b6c6d6e',
'ecb-tbl-192: I=58'),
('fbb63893450d42b58c6d88cd3c1809e3', '2aa65db36264239d3846180fabdfad20',
'70717273757677787a7b7c7d7f80818284858687898a8b8c',
'ecb-tbl-192: I=59'),
('4bef736df150259dae0c91354e8a5f92', '1472163e9a4f780f1ceb44b07ecf4fdb',
'8e8f90919394959698999a9b9d9e9fa0a2a3a4a5a7a8a9aa',
'ecb-tbl-192: I=60'),
('7d2d46242056ef13d3c3fc93c128f4c7', 'c8273fdc8f3a9f72e91097614b62397c',
'acadaeafb1b2b3b4b6b7b8b9bbbcbdbec0c1c2c3c5c6c7c8',
'ecb-tbl-192: I=61'),
('e9c1ba2df415657a256edb33934680fd', '66c8427dcd733aaf7b3470cb7d976e3f',
'cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6',
'ecb-tbl-192: I=62'),
('e23ee277b0aa0a1dfb81f7527c3514f1', '146131cb17f1424d4f8da91e6f80c1d0',
'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304',
'ecb-tbl-192: I=63'),
('3e7445b0b63caaf75e4a911e12106b4c', '2610d0ad83659081ae085266a88770dc',
'060708090b0c0d0e10111213151617181a1b1c1d1f202122',
'ecb-tbl-192: I=64'),
('767774752023222544455a5be6e1e0e3', '38a2b5a974b0575c5d733917fb0d4570',
'24252627292a2b2c2e2f30313334353638393a3b3d3e3f40',
'ecb-tbl-192: I=65'),
('72737475717e7f7ce9e8ebea696a6b6c', 'e21d401ebc60de20d6c486e4f39a588b',
'424344454748494a4c4d4e4f51525354565758595b5c5d5e',
'ecb-tbl-192: I=66'),
('dfdedddc25262728c9c8cfcef1eeefec', 'e51d5f88c670b079c0ca1f0c2c4405a2',
'60616263656667686a6b6c6d6f70717274757677797a7b7c',
'ecb-tbl-192: I=67'),
('fffe0100707776755f5e5d5c7675746b', '246a94788a642fb3d1b823c8762380c8',
'7e7f80818384858688898a8b8d8e8f90929394959798999a',
'ecb-tbl-192: I=68'),
('e0e1e2e3424140479f9e9190292e2f2c', 'b80c391c5c41a4c3b30c68e0e3d7550f',
'9c9d9e9fa1a2a3a4a6a7a8a9abacadaeb0b1b2b3b5b6b7b8',
'ecb-tbl-192: I=69'),
('2120272690efeeed3b3a39384e4d4c4b', 'b77c4754fc64eb9a1154a9af0bb1f21c',
'babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6',
'ecb-tbl-192: I=70'),
('ecedeeef5350516ea1a0a7a6a3acadae', 'fb554de520d159a06bf219fc7f34a02f',
'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4',
'ecb-tbl-192: I=71'),
('32333c3d25222320e9e8ebeacecdccc3', 'a89fba152d76b4927beed160ddb76c57',
'f6f7f8f9fbfcfdfe00010203050607080a0b0c0d0f101112',
'ecb-tbl-192: I=72'),
('40414243626160678a8bb4b511161714', '5676eab4a98d2e8473b3f3d46424247c',
'14151617191a1b1c1e1f20212324252628292a2b2d2e2f30',
'ecb-tbl-192: I=73'),
('94959293f5fafbf81f1e1d1c7c7f7e79', '4e8f068bd7ede52a639036ec86c33568',
'323334353738393a3c3d3e3f41424344464748494b4c4d4e',
'ecb-tbl-192: I=74'),
('bebfbcbd191a1b14cfcec9c8546b6a69', 'f0193c4d7aff1791ee4c07eb4a1824fc',
'50515253555657585a5b5c5d5f60616264656667696a6b6c',
'ecb-tbl-192: I=75'),
('2c2d3233898e8f8cbbbab9b8333031ce', 'ac8686eeca9ba761afe82d67b928c33f',
'6e6f70717374757678797a7b7d7e7f80828384858788898a',
'ecb-tbl-192: I=76'),
('84858687bfbcbdba37363938fdfafbf8', '5faf8573e33b145b6a369cd3606ab2c9',
'8c8d8e8f91929394969798999b9c9d9ea0a1a2a3a5a6a7a8',
'ecb-tbl-192: I=77'),
('828384857669686b909192930b08090e', '31587e9944ab1c16b844ecad0df2e7da',
'aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6',
'ecb-tbl-192: I=78'),
('bebfbcbd9695948b707176779e919093', 'd017fecd91148aba37f6f3068aa67d8a',
'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4',
'ecb-tbl-192: I=79'),
('8b8a85846067666521202322d0d3d2dd', '788ef2f021a73cba2794b616078a8500',
'e6e7e8e9ebecedeef0f1f2f3f5f6f7f8fafbfcfdfe010002',
'ecb-tbl-192: I=80'),
('76777475f1f2f3f4f8f9e6e777707172', '5d1ef20dced6bcbc12131ac7c54788aa',
'04050607090a0b0c0e0f10111314151618191a1b1d1e1f20',
'ecb-tbl-192: I=81'),
('a4a5a2a34f404142b4b5b6b727242522', 'b3c8cf961faf9ea05fdde6d1e4d8f663',
'222324252728292a2c2d2e2f31323334363738393b3c3d3e',
'ecb-tbl-192: I=82'),
('94959697e1e2e3ec16171011839c9d9e', '143075c70605861c7fac6526199e459f',
'40414243454647484a4b4c4d4f50515254555657595a5b5c',
'ecb-tbl-192: I=83'),
('03023d3c06010003dedfdcddfffcfde2', 'a5ae12eade9a87268d898bfc8fc0252a',
'5e5f60616364656668696a6b6d6e6f70727374757778797a',
'ecb-tbl-192: I=84'),
('10111213f1f2f3f4cecfc0c1dbdcddde', '0924f7cf2e877a4819f5244a360dcea9',
'7c7d7e7f81828384868788898b8c8d8e9091929395969798',
'ecb-tbl-192: I=85'),
('67666160724d4c4f1d1c1f1e73707176', '3d9e9635afcc3e291cc7ab3f27d1c99a',
'9a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6',
'ecb-tbl-192: I=86'),
('e6e7e4e5a8abaad584858283909f9e9d', '9d80feebf87510e2b8fb98bb54fd788c',
'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4',
'ecb-tbl-192: I=87'),
('71707f7e565150537d7c7f7e6162636c', '5f9d1a082a1a37985f174002eca01309',
'd6d7d8d9dbdcdddee0e1e2e3e5e6e7e8eaebecedeff0f1f2',
'ecb-tbl-192: I=88'),
('64656667212223245555aaaa03040506', 'a390ebb1d1403930184a44b4876646e4',
'f4f5f6f7f9fafbfcfefe01010304050608090a0b0d0e0f10',
'ecb-tbl-192: I=89'),
('9e9f9899aba4a5a6cfcecdcc2b28292e', '700fe918981c3195bb6c4bcb46b74e29',
'121314151718191a1c1d1e1f21222324262728292b2c2d2e',
'ecb-tbl-192: I=90'),
('c7c6c5c4d1d2d3dc626364653a454447', '907984406f7bf2d17fb1eb15b673d747',
'30313233353637383a3b3c3d3f40414244454647494a4b4c',
'ecb-tbl-192: I=91'),
('f6f7e8e9e0e7e6e51d1c1f1e5b585966', 'c32a956dcfc875c2ac7c7cc8b8cc26e1',
'4e4f50515354555658595a5b5d5e5f60626364656768696a',
'ecb-tbl-192: I=92'),
('bcbdbebf5d5e5f5868696667f4f3f2f1', '02646e2ebfa9b820cf8424e9b9b6eb51',
'6c6d6e6f71727374767778797b7c7d7e8081828385868788',
'ecb-tbl-192: I=93'),
('40414647b0afaead9b9a99989b98999e', '621fda3a5bbd54c6d3c685816bd4ead8',
'8a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6',
'ecb-tbl-192: I=94'),
('69686b6a0201001f0f0e0908b4bbbab9', 'd4e216040426dfaf18b152469bc5ac2f',
'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4',
'ecb-tbl-192: I=95'),
('c7c6c9c8d8dfdedd5a5b5859bebdbcb3', '9d0635b9d33b6cdbd71f5d246ea17cc8',
'c6c7c8c9cbcccdced0d1d2d3d5d6d7d8dadbdcdddfe0e1e2',
'ecb-tbl-192: I=96'),
('dedfdcdd787b7a7dfffee1e0b2b5b4b7', '10abad1bd9bae5448808765583a2cc1a',
'e4e5e6e7e9eaebeceeeff0f1f3f4f5f6f8f9fafbfdfefe00',
'ecb-tbl-192: I=97'),
('4d4c4b4a606f6e6dd0d1d2d3fbf8f9fe', '6891889e16544e355ff65a793c39c9a8',
'020304050708090a0c0d0e0f11121314161718191b1c1d1e',
'ecb-tbl-192: I=98'),
('b7b6b5b4d7d4d5dae5e4e3e2e1fefffc', 'cc735582e68072c163cd9ddf46b91279',
'20212223252627282a2b2c2d2f30313234353637393a3b3c',
'ecb-tbl-192: I=99'),
('cecfb0b1f7f0f1f2aeafacad3e3d3c23', 'c5c68b9aeeb7f878df578efa562f9574',
'3e3f40414344454648494a4b4d4e4f50525354555758595a',
'ecb-tbl-192: I=100'),
('cacbc8c9cdcecfc812131c1d494e4f4c', '5f4764395a667a47d73452955d0d2ce8',
'5c5d5e5f61626364666768696b6c6d6e7071727375767778',
'ecb-tbl-192: I=101'),
('9d9c9b9ad22d2c2fb1b0b3b20c0f0e09', '701448331f66106cefddf1eb8267c357',
'7a7b7c7d7f80818284858687898a8b8c8e8f909193949596',
'ecb-tbl-192: I=102'),
('7a7b787964676659959493924f404142', 'cb3ee56d2e14b4e1941666f13379d657',
'98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4',
'ecb-tbl-192: I=103'),
('aaaba4a5cec9c8cb1f1e1d1caba8a9a6', '9fe16efd18ab6e1981191851fedb0764',
'b6b7b8b9bbbcbdbec0c1c2c3c5c6c7c8cacbcccdcfd0d1d2',
'ecb-tbl-192: I=104'),
('93929190282b2a2dc4c5fafb92959497', '3dc9ba24e1b223589b147adceb4c8e48',
'd4d5d6d7d9dadbdcdedfe0e1e3e4e5e6e8e9eaebedeeeff0',
'ecb-tbl-192: I=105'),
('efeee9e8ded1d0d339383b3a888b8a8d', '1c333032682e7d4de5e5afc05c3e483c',
'f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e',
'ecb-tbl-192: I=106'),
('7f7e7d7ca2a1a0af78797e7f112e2f2c', 'd593cc99a95afef7e92038e05a59d00a',
'10111213151617181a1b1c1d1f20212224252627292a2b2c',
'ecb-tbl-192: I=107'),
('84859a9b2b2c2d2e868784852625245b', '51e7f96f53b4353923452c222134e1ec',
'2e2f30313334353638393a3b3d3e3f40424344454748494a',
'ecb-tbl-192: I=108'),
('b0b1b2b3070405026869666710171615', '4075b357a1a2b473400c3b25f32f81a4',
'4c4d4e4f51525354565758595b5c5d5e6061626365666768',
'ecb-tbl-192: I=109'),
('acadaaabbda2a3a00d0c0f0e595a5b5c', '302e341a3ebcd74f0d55f61714570284',
'6a6b6c6d6f70717274757677797a7b7c7e7f808183848586',
'ecb-tbl-192: I=110'),
('121310115655544b5253545569666764', '57abdd8231280da01c5042b78cf76522',
'88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4',
'ecb-tbl-192: I=111'),
('dedfd0d166616063eaebe8e94142434c', '17f9ea7eea17ac1adf0e190fef799e92',
'a6a7a8a9abacadaeb0b1b2b3b5b6b7b8babbbcbdbfc0c1c2',
'ecb-tbl-192: I=112'),
('dbdad9d81417161166677879e0e7e6e5', '2e1bdd563dd87ee5c338dd6d098d0a7a',
'c4c5c6c7c9cacbcccecfd0d1d3d4d5d6d8d9dadbdddedfe0',
'ecb-tbl-192: I=113'),
('6a6b6c6de0efeeed2b2a2928c0c3c2c5', 'eb869996e6f8bfb2bfdd9e0c4504dbb2',
'e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe',
'ecb-tbl-192: I=114'),
('b1b0b3b21714151a1a1b1c1d5649484b', 'c2e01549e9decf317468b3e018c61ba8',
'00010203050607080a0b0c0d0f10111214151617191a1b1c',
'ecb-tbl-192: I=115'),
('39380706a3a4a5a6c4c5c6c77271706f', '8da875d033c01dd463b244a1770f4a22',
'1e1f20212324252628292a2b2d2e2f30323334353738393a',
'ecb-tbl-192: I=116'),
('5c5d5e5f1013121539383736e2e5e4e7', '8ba0dcf3a186844f026d022f8839d696',
'3c3d3e3f41424344464748494b4c4d4e5051525355565758',
'ecb-tbl-192: I=117'),
('43424544ead5d4d72e2f2c2d64676661', 'e9691ff9a6cc6970e51670a0fd5b88c1',
'5a5b5c5d5f60616264656667696a6b6c6e6f707173747576',
'ecb-tbl-192: I=118'),
('55545756989b9a65f8f9feff18171615', 'f2baec06faeed30f88ee63ba081a6e5b',
'78797a7b7d7e7f80828384858788898a8c8d8e8f91929394',
'ecb-tbl-192: I=119'),
('05040b0a525554573c3d3e3f4a494847', '9c39d4c459ae5753394d6094adc21e78',
'969798999b9c9d9ea0a1a2a3a5a6a7a8aaabacadafb0b1b2',
'ecb-tbl-192: I=120'),
('14151617595a5b5c8584fbfa8e89888b', '6345b532a11904502ea43ba99c6bd2b2',
'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6c8c9cacbcdcecfd0',
'ecb-tbl-192: I=121'),
('7c7d7a7bfdf2f3f029282b2a51525354', '5ffae3061a95172e4070cedce1e428c8',
'd2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee',
'ecb-tbl-192: I=122'),
('38393a3b1e1d1c1341404746c23d3c3e', '0a4566be4cdf9adce5dec865b5ab34cd',
'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c',
'ecb-tbl-192: I=123'),
('8d8c939240474645818083827c7f7e41', 'ca17fcce79b7404f2559b22928f126fb',
'0e0f10111314151618191a1b1d1e1f20222324252728292a',
'ecb-tbl-192: I=124'),
('3b3a39381a19181f32333c3d45424340', '97ca39b849ed73a6470a97c821d82f58',
'2c2d2e2f31323334363738393b3c3d3e4041424345464748',
'ecb-tbl-192: I=125'),
('f0f1f6f738272625828380817f7c7d7a', '8198cb06bc684c6d3e9b7989428dcf7a',
'4a4b4c4d4f50515254555657595a5b5c5e5f606163646566',
'ecb-tbl-192: I=126'),
('89888b8a0407061966676061141b1a19', 'f53c464c705ee0f28d9a4c59374928bd',
'68696a6b6d6e6f70727374757778797a7c7d7e7f81828384',
'ecb-tbl-192: I=127'),
('d3d2dddcaaadacaf9c9d9e9fe8ebeae5', '9adb3d4cca559bb98c3e2ed73dbf1154',
'868788898b8c8d8e90919293959697989a9b9c9d9fa0a1a2',
'ecb-tbl-192: I=128'),
# ecb_tbl.txt, KEYSIZE=256
('834eadfccac7e1b30664b1aba44815ab', '1946dabf6a03a2a2c3d0b05080aed6fc',
'00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526',
'ecb-tbl-256: I=1'),
('d9dc4dba3021b05d67c0518f72b62bf1', '5ed301d747d3cc715445ebdec62f2fb4',
'28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e',
'ecb-tbl-256: I=2'),
('a291d86301a4a739f7392173aa3c604c', '6585c8f43d13a6beab6419fc5935b9d0',
'50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576',
'ecb-tbl-256: I=3'),
('4264b2696498de4df79788a9f83e9390', '2a5b56a596680fcc0e05f5e0f151ecae',
'78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e',
'ecb-tbl-256: I=4'),
('ee9932b3721804d5a83ef5949245b6f6', 'f5d6ff414fd2c6181494d20c37f2b8c4',
'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6',
'ecb-tbl-256: I=5'),
('e6248f55c5fdcbca9cbbb01c88a2ea77', '85399c01f59fffb5204f19f8482f00b8',
'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee',
'ecb-tbl-256: I=6'),
('b8358e41b9dff65fd461d55a99266247', '92097b4c88a041ddf98144bc8d22e8e7',
'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516',
'ecb-tbl-256: I=7'),
('f0e2d72260af58e21e015ab3a4c0d906', '89bd5b73b356ab412aef9f76cea2d65c',
'18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e',
'ecb-tbl-256: I=8'),
('475b8b823ce8893db3c44a9f2a379ff7', '2536969093c55ff9454692f2fac2f530',
'40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566',
'ecb-tbl-256: I=9'),
('688f5281945812862f5f3076cf80412f', '07fc76a872843f3f6e0081ee9396d637',
'68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e',
'ecb-tbl-256: I=10'),
('08d1d2bc750af553365d35e75afaceaa', 'e38ba8ec2aa741358dcc93e8f141c491',
'90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6',
'ecb-tbl-256: I=11'),
('8707121f47cc3efceca5f9a8474950a1', 'd028ee23e4a89075d0b03e868d7d3a42',
'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde',
'ecb-tbl-256: I=12'),
('e51aa0b135dba566939c3b6359a980c5', '8cd9423dfc459e547155c5d1d522e540',
'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506',
'ecb-tbl-256: I=13'),
('069a007fc76a459f98baf917fedf9521', '080e9517eb1677719acf728086040ae3',
'08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e',
'ecb-tbl-256: I=14'),
('726165c1723fbcf6c026d7d00b091027', '7c1700211a3991fc0ecded0ab3e576b0',
'30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556',
'ecb-tbl-256: I=15'),
('d7c544de91d55cfcde1f84ca382200ce', 'dabcbcc855839251db51e224fbe87435',
'58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e',
'ecb-tbl-256: I=16'),
('fed3c9a161b9b5b2bd611b41dc9da357', '68d56fad0406947a4dd27a7448c10f1d',
'80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6',
'ecb-tbl-256: I=17'),
('4f634cdc6551043409f30b635832cf82', 'da9a11479844d1ffee24bbf3719a9925',
'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce',
'ecb-tbl-256: I=18'),
('109ce98db0dfb36734d9f3394711b4e6', '5e4ba572f8d23e738da9b05ba24b8d81',
'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6',
'ecb-tbl-256: I=19'),
('4ea6dfaba2d8a02ffdffa89835987242', 'a115a2065d667e3f0b883837a6e903f8',
'70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596',
'ecb-tbl-256: I=20'),
('5ae094f54af58e6e3cdbf976dac6d9ef', '3e9e90dc33eac2437d86ad30b137e66e',
'98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe',
'ecb-tbl-256: I=21'),
('764d8e8e0f29926dbe5122e66354fdbe', '01ce82d8fbcdae824cb3c48e495c3692',
'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6',
'ecb-tbl-256: I=22'),
('3f0418f888cdf29a982bf6b75410d6a9', '0c9cff163ce936faaf083cfd3dea3117',
'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e',
'ecb-tbl-256: I=23'),
('e4a3e7cb12cdd56aa4a75197a9530220', '5131ba9bd48f2bba85560680df504b52',
'10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536',
'ecb-tbl-256: I=24'),
('211677684aac1ec1a160f44c4ebf3f26', '9dc503bbf09823aec8a977a5ad26ccb2',
'38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e',
'ecb-tbl-256: I=25'),
('d21e439ff749ac8f18d6d4b105e03895', '9a6db0c0862e506a9e397225884041d7',
'60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586',
'ecb-tbl-256: I=26'),
('d9f6ff44646c4725bd4c0103ff5552a7', '430bf9570804185e1ab6365fc6a6860c',
'88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae',
'ecb-tbl-256: I=27'),
('0b1256c2a00b976250cfc5b0c37ed382', '3525ebc02f4886e6a5a3762813e8ce8a',
'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6',
'ecb-tbl-256: I=28'),
('b056447ffc6dc4523a36cc2e972a3a79', '07fa265c763779cce224c7bad671027b',
'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe',
'ecb-tbl-256: I=29'),
('5e25ca78f0de55802524d38da3fe4456', 'e8b72b4e8be243438c9fff1f0e205872',
'00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526',
'ecb-tbl-256: I=30'),
('a5bcf4728fa5eaad8567c0dc24675f83', '109d4f999a0e11ace1f05e6b22cbcb50',
'28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e',
'ecb-tbl-256: I=31'),
('814e59f97ed84646b78b2ca022e9ca43', '45a5e8d4c3ed58403ff08d68a0cc4029',
'50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576',
'ecb-tbl-256: I=32'),
('15478beec58f4775c7a7f5d4395514d7', '196865964db3d417b6bd4d586bcb7634',
'78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e',
'ecb-tbl-256: I=33'),
('253548ffca461c67c8cbc78cd59f4756', '60436ad45ac7d30d99195f815d98d2ae',
'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6',
'ecb-tbl-256: I=34'),
('fd7ad8d73b9b0f8cc41600640f503d65', 'bb07a23f0b61014b197620c185e2cd75',
'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee',
'ecb-tbl-256: I=35'),
('06199de52c6cbf8af954cd65830bcd56', '5bc0b2850129c854423aff0751fe343b',
'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516',
'ecb-tbl-256: I=36'),
('f17c4ffe48e44c61bd891e257e725794', '7541a78f96738e6417d2a24bd2beca40',
'18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e',
'ecb-tbl-256: I=37'),
('9a5b4a402a3e8a59be6bf5cd8154f029', 'b0a303054412882e464591f1546c5b9e',
'40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566',
'ecb-tbl-256: I=38'),
('79bd40b91a7e07dc939d441782ae6b17', '778c06d8a355eeee214fcea14b4e0eef',
'68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e',
'ecb-tbl-256: I=39'),
('d8ceaaf8976e5fbe1012d8c84f323799', '09614206d15cbace63227d06db6beebb',
'90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6',
'ecb-tbl-256: I=40'),
('3316e2751e2e388b083da23dd6ac3fbe', '41b97fb20e427a9fdbbb358d9262255d',
'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde',
'ecb-tbl-256: I=41'),
('8b7cfbe37de7dca793521819242c5816', 'c1940f703d845f957652c2d64abd7adf',
'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506',
'ecb-tbl-256: I=42'),
('f23f033c0eebf8ec55752662fd58ce68', 'd2d44fcdae5332343366db297efcf21b',
'08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e',
'ecb-tbl-256: I=43'),
('59eb34f6c8bdbacc5fc6ad73a59a1301', 'ea8196b79dbe167b6aa9896e287eed2b',
'30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556',
'ecb-tbl-256: I=44'),
('dcde8b6bd5cf7cc22d9505e3ce81261a', 'd6b0b0c4ba6c7dbe5ed467a1e3f06c2d',
'58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e',
'ecb-tbl-256: I=45'),
('e33cf7e524fed781e7042ff9f4b35dc7', 'ec51eb295250c22c2fb01816fb72bcae',
'80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6',
'ecb-tbl-256: I=46'),
('27963c8facdf73062867d164df6d064c', 'aded6630a07ce9c7408a155d3bd0d36f',
'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce',
'ecb-tbl-256: I=47'),
('77b1ce386b551b995f2f2a1da994eef8', '697c9245b9937f32f5d1c82319f0363a',
'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6',
'ecb-tbl-256: I=48'),
('f083388b013679efcf0bb9b15d52ae5c', 'aad5ad50c6262aaec30541a1b7b5b19c',
'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314161718191b1c1d1e',
'ecb-tbl-256: I=49'),
('c5009e0dab55db0abdb636f2600290c8', '7d34b893855341ec625bd6875ac18c0d',
'20212223252627282a2b2c2d2f30313234353637393a3b3c3e3f404143444546',
'ecb-tbl-256: I=50'),
('7804881e26cd532d8514d3683f00f1b9', '7ef05105440f83862f5d780e88f02b41',
'48494a4b4d4e4f50525354555758595a5c5d5e5f61626364666768696b6c6d6e',
'ecb-tbl-256: I=51'),
('46cddcd73d1eb53e675ca012870a92a3', 'c377c06403382061af2c9c93a8e70df6',
'70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596',
'ecb-tbl-256: I=52'),
('a9fb44062bb07fe130a8e8299eacb1ab', '1dbdb3ffdc052dacc83318853abc6de5',
'98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe',
'ecb-tbl-256: I=53'),
('2b6ff8d7a5cc3a28a22d5a6f221af26b', '69a6eab00432517d0bf483c91c0963c7',
'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6',
'ecb-tbl-256: I=54'),
('1a9527c29b8add4b0e3e656dbb2af8b4', '0797f41dc217c80446e1d514bd6ab197',
'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e',
'ecb-tbl-256: I=55'),
('7f99cf2c75244df015eb4b0c1050aeae', '9dfd76575902a637c01343c58e011a03',
'10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536',
'ecb-tbl-256: I=56'),
('e84ff85b0d9454071909c1381646c4ed', 'acf4328ae78f34b9fa9b459747cc2658',
'38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e',
'ecb-tbl-256: I=57'),
('89afd40f99521280d5399b12404f6db4', 'b0479aea12bac4fe2384cf98995150c6',
'60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586',
'ecb-tbl-256: I=58'),
('a09ef32dbc5119a35ab7fa38656f0329', '9dd52789efe3ffb99f33b3da5030109a',
'88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae',
'ecb-tbl-256: I=59'),
('61773457f068c376c7829b93e696e716', 'abbb755e4621ef8f1214c19f649fb9fd',
'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6',
'ecb-tbl-256: I=60'),
('a34f0cae726cce41dd498747d891b967', 'da27fb8174357bce2bed0e7354f380f9',
'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe',
'ecb-tbl-256: I=61'),
('856f59496c7388ee2d2b1a27b7697847', 'c59a0663f0993838f6e5856593bdc5ef',
'00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526',
'ecb-tbl-256: I=62'),
('cb090c593ef7720bd95908fb93b49df4', 'ed60b264b5213e831607a99c0ce5e57e',
'28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e',
'ecb-tbl-256: I=63'),
('a0ac75cd2f1923d460fc4d457ad95baf', 'e50548746846f3eb77b8c520640884ed',
'50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576',
'ecb-tbl-256: I=64'),
('2a2b282974777689e8e9eeef525d5c5f', '28282cc7d21d6a2923641e52d188ef0c',
'78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e',
'ecb-tbl-256: I=65'),
('909192939390919e0f0e09089788898a', '0dfa5b02abb18e5a815305216d6d4f8e',
'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6',
'ecb-tbl-256: I=66'),
('777675748d8e8f907170777649464744', '7359635c0eecefe31d673395fb46fb99',
'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee',
'ecb-tbl-256: I=67'),
('717073720605040b2d2c2b2a05fafbf9', '73c679f7d5aef2745c9737bb4c47fb36',
'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516',
'ecb-tbl-256: I=68'),
('64656667fefdfcc31b1a1d1ca5aaaba8', 'b192bd472a4d2eafb786e97458967626',
'18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e',
'ecb-tbl-256: I=69'),
('dbdad9d86a696867b5b4b3b2c8d7d6d5', '0ec327f6c8a2b147598ca3fde61dc6a4',
'40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566',
'ecb-tbl-256: I=70'),
('5c5d5e5fe3e0e1fe31303736333c3d3e', 'fc418eb3c41b859b38d4b6f646629729',
'68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e',
'ecb-tbl-256: I=71'),
('545556574b48494673727574546b6a69', '30249e5ac282b1c981ea64b609f3a154',
'90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6',
'ecb-tbl-256: I=72'),
('ecedeeefc6c5c4bb56575051f5fafbf8', '5e6e08646d12150776bb43c2d78a9703',
'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde',
'ecb-tbl-256: I=73'),
('464744452724252ac9c8cfced2cdcccf', 'faeb3d5de652cd3447dceb343f30394a',
'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506',
'ecb-tbl-256: I=74'),
('e6e7e4e54142435c878681801c131211', 'a8e88706823f6993ef80d05c1c7b2cf0',
'08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e',
'ecb-tbl-256: I=75'),
('72737071cfcccdc2f9f8fffe710e0f0c', '8ced86677e6e00a1a1b15968f2d3cce6',
'30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556',
'ecb-tbl-256: I=76'),
('505152537370714ec3c2c5c4010e0f0c', '9fc7c23858be03bdebb84e90db6786a9',
'58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e',
'ecb-tbl-256: I=77'),
('a8a9aaab5c5f5e51aeafa8a93d222320', 'b4fbd65b33f70d8cf7f1111ac4649c36',
'80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6',
'ecb-tbl-256: I=78'),
('dedfdcddf6f5f4eb10111617fef1f0f3', 'c5c32d5ed03c4b53cc8c1bd0ef0dbbf6',
'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce',
'ecb-tbl-256: I=79'),
('bdbcbfbe5e5d5c530b0a0d0cfac5c4c7', 'd1a7f03b773e5c212464b63709c6a891',
'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6',
'ecb-tbl-256: I=80'),
('8a8b8889050606f8f4f5f2f3636c6d6e', '6b7161d8745947ac6950438ea138d028',
'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314161718191b1c1d1e',
'ecb-tbl-256: I=81'),
('a6a7a4a54d4e4f40b2b3b4b539262724', 'fd47a9f7e366ee7a09bc508b00460661',
'20212223252627282a2b2c2d2f30313234353637393a3b3c3e3f404143444546',
'ecb-tbl-256: I=82'),
('9c9d9e9fe9eaebf40e0f08099b949596', '00d40b003dc3a0d9310b659b98c7e416',
'48494a4b4d4e4f50525354555758595a5c5d5e5f61626364666768696b6c6d6e',
'ecb-tbl-256: I=83'),
('2d2c2f2e1013121dcccdcacbed121310', 'eea4c79dcc8e2bda691f20ac48be0717',
'70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596',
'ecb-tbl-256: I=84'),
('f4f5f6f7edeeefd0eaebecedf7f8f9fa', 'e78f43b11c204403e5751f89d05a2509',
'98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe',
'ecb-tbl-256: I=85'),
('3d3c3f3e282b2a2573727574150a0b08', 'd0f0e3d1f1244bb979931e38dd1786ef',
'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6',
'ecb-tbl-256: I=86'),
('b6b7b4b5f8fbfae5b4b5b2b3a0afaead', '042e639dc4e1e4dde7b75b749ea6f765',
'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e',
'ecb-tbl-256: I=87'),
('b7b6b5b4989b9a95878681809ba4a5a6', 'bc032fdd0efe29503a980a7d07ab46a8',
'10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536',
'ecb-tbl-256: I=88'),
('a8a9aaabe5e6e798e9e8efee4748494a', '0c93ac949c0da6446effb86183b6c910',
'38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e',
'ecb-tbl-256: I=89'),
('ecedeeefd9dadbd4b9b8bfbe657a7b78', 'e0d343e14da75c917b4a5cec4810d7c2',
'60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586',
'ecb-tbl-256: I=90'),
('7f7e7d7c696a6b74cacbcccd929d9c9f', '0eafb821748408279b937b626792e619',
'88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae',
'ecb-tbl-256: I=91'),
('08090a0b0605040bfffef9f8b9c6c7c4', 'fa1ac6e02d23b106a1fef18b274a553f',
'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6',
'ecb-tbl-256: I=92'),
('08090a0bf1f2f3ccfcfdfafb68676665', '0dadfe019cd12368075507df33c1a1e9',
'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe',
'ecb-tbl-256: I=93'),
('cacbc8c93a393837050403020d121310', '3a0879b414465d9ffbaf86b33a63a1b9',
'00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526',
'ecb-tbl-256: I=94'),
('e9e8ebea8281809f8f8e8988343b3a39', '62199fadc76d0be1805d3ba0b7d914bf',
'28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e',
'ecb-tbl-256: I=95'),
('515053524645444bd0d1d6d7340b0a09', '1b06d6c5d333e742730130cf78e719b4',
'50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576',
'ecb-tbl-256: I=96'),
('42434041ecefee1193929594c6c9c8cb', 'f1f848824c32e9dcdcbf21580f069329',
'78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e',
'ecb-tbl-256: I=97'),
('efeeedecc2c1c0cf76777071455a5b58', '1a09050cbd684f784d8e965e0782f28a',
'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6',
'ecb-tbl-256: I=98'),
('5f5e5d5c3f3c3d221d1c1b1a19161714', '79c2969e7ded2ba7d088f3f320692360',
'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee',
'ecb-tbl-256: I=99'),
('000102034142434c1c1d1a1b8d727371', '091a658a2f7444c16accb669450c7b63',
'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516',
'ecb-tbl-256: I=100'),
('8e8f8c8db1b2b38c56575051050a0b08', '97c1e3a72cca65fa977d5ed0e8a7bbfc',
'18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e',
'ecb-tbl-256: I=101'),
('a7a6a5a4e8ebeae57f7e7978cad5d4d7', '70c430c6db9a17828937305a2df91a2a',
'40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566',
'ecb-tbl-256: I=102'),
('8a8b888994979689454443429f909192', '629553457fbe2479098571c7c903fde8',
'68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e',
'ecb-tbl-256: I=103'),
('8c8d8e8fe0e3e2ed45444342f1cecfcc', 'a25b25a61f612669e7d91265c7d476ba',
'90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6',
'ecb-tbl-256: I=104'),
('fffefdfc4c4f4e31d8d9dedfb6b9b8bb', 'eb7e4e49b8ae0f024570dda293254fed',
'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde',
'ecb-tbl-256: I=105'),
('fdfcfffecccfcec12f2e29286679787b', '38fe15d61cca84516e924adce5014f67',
'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506',
'ecb-tbl-256: I=106'),
('67666564bab9b8a77071767719161714', '3ad208492249108c9f3ebeb167ad0583',
'08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e',
'ecb-tbl-256: I=107'),
('9a9b98992d2e2f2084858283245b5a59', '299ba9f9bf5ab05c3580fc26edd1ed12',
'30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556',
'ecb-tbl-256: I=108'),
('a4a5a6a70b0809365c5d5a5b2c232221', '19dc705b857a60fb07717b2ea5717781',
'58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e',
'ecb-tbl-256: I=109'),
('464744455754555af3f2f5f4afb0b1b2', 'ffc8aeb885b5efcad06b6dbebf92e76b',
'80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6',
'ecb-tbl-256: I=110'),
('323330317675746b7273747549464744', 'f58900c5e0b385253ff2546250a0142b',
'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce',
'ecb-tbl-256: I=111'),
('a8a9aaab181b1a15808186872b141516', '2ee67b56280bc462429cee6e3370cbc1',
'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6',
'ecb-tbl-256: I=112'),
('e7e6e5e4202323ddaaabacad343b3a39', '20db650a9c8e9a84ab4d25f7edc8f03f',
'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314161718191b1c1d1e',
'ecb-tbl-256: I=113'),
('a8a9aaab2221202fedecebea1e010003', '3c36da169525cf818843805f25b78ae5',
'20212223252627282a2b2c2d2f30313234353637393a3b3c3e3f404143444546',
'ecb-tbl-256: I=114'),
('f9f8fbfa5f5c5d42424344450e010003', '9a781d960db9e45e37779042fea51922',
'48494a4b4d4e4f50525354555758595a5c5d5e5f61626364666768696b6c6d6e',
'ecb-tbl-256: I=115'),
('57565554f5f6f7f89697909120dfdedd', '6560395ec269c672a3c288226efdba77',
'70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596',
'ecb-tbl-256: I=116'),
('f8f9fafbcccfcef1dddcdbda0e010003', '8c772b7a189ac544453d5916ebb27b9a',
'98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe',
'ecb-tbl-256: I=117'),
('d9d8dbda7073727d80818687c2dddcdf', '77ca5468cc48e843d05f78eed9d6578f',
'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6',
'ecb-tbl-256: I=118'),
('c5c4c7c6080b0a1588898e8f68676665', '72cdcc71dc82c60d4429c9e2d8195baa',
'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e',
'ecb-tbl-256: I=119'),
('83828180dcdfded186878081f0cfcecd', '8080d68ce60e94b40b5b8b69eeb35afa',
'10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536',
'ecb-tbl-256: I=120'),
('98999a9bdddedfa079787f7e0a050407', '44222d3cde299c04369d58ac0eba1e8e',
'38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e',
'ecb-tbl-256: I=121'),
('cecfcccd4f4c4d429f9e9998dfc0c1c2', '9b8721b0a8dfc691c5bc5885dbfcb27a',
'60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586',
'ecb-tbl-256: I=122'),
('404142436665647b29282f2eaba4a5a6', '0dc015ce9a3a3414b5e62ec643384183',
'88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae',
'ecb-tbl-256: I=123'),
('33323130e6e5e4eb23222524dea1a0a3', '705715448a8da412025ce38345c2a148',
'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6',
'ecb-tbl-256: I=124'),
('cfcecdccf6f5f4cbe6e7e0e199969794', 'c32b5b0b6fbae165266c569f4b6ecf0b',
'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe',
'ecb-tbl-256: I=125'),
('babbb8b97271707fdcdddadb29363734', '4dca6c75192a01ddca9476af2a521e87',
'00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526',
'ecb-tbl-256: I=126'),
('c9c8cbca4447465926272021545b5a59', '058691e627ecbc36ac07b6db423bd698',
'28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e',
'ecb-tbl-256: I=127'),
('050407067477767956575051221d1c1f', '7444527095838fe080fc2bcdd30847eb',
'50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576',
'ecb-tbl-256: I=128'),
# FIPS PUB 800-38A test vectors, 2001 edition. Annex F.
('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+
'30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710',
'3ad77bb40d7a3660a89ecaf32466ef97'+'f5d3d58503b9699de785895a96fdbaaf'+
'43b1cd7f598ece23881b00e3ed030688'+'7b0c785e27e8ad3f8223207104725dd4',
'2b7e151628aed2a6abf7158809cf4f3c',
'NIST 800-38A, F.1.1, ECB and AES-128'),
('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+
'30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710',
'bd334f1d6e45f25ff712a214571fa5cc'+'974104846d0ad3ad7734ecb3ecee4eef'+
'ef7afd2270e2e60adce0ba2face6444e'+'9a4b41ba738d6c72fb16691603c18e0e',
'8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b',
'NIST 800-38A, F.1.3, ECB and AES-192'),
('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+
'30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710',
'f3eed1bdb5d2a03c064b5a7e3db181f8'+'591ccb10d410ed26dc5ba74a31362870'+
'b6ed21b99ca6f4f9f153e7b1beafed1d'+'23304b7a39f9f3ff067d8d8f9e24ecc7',
'603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4',
'NIST 800-38A, F.1.3, ECB and AES-256'),
]
def get_tests(config={}):
from Crypto.Cipher import AES
from Crypto.Cipher.AES import _raw_cpuid_lib
from .common import make_block_tests
tests = make_block_tests(AES, "AES", test_data, {'use_aesni': False})
if _raw_cpuid_lib.have_aes_ni():
# Run tests with AES-NI instructions if they are available.
tests += make_block_tests(AES, "AESNI", test_data, {'use_aesni': True})
else:
print("Skipping AESNI tests")
return tests
if __name__ == '__main__':
import unittest
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4 expandtab:
|
|
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Performance test for a peer review system.
WARNING! Use this script to test load Course Builder. This is very dangerous
feature, be careful, because anyone can impersonate super user of your Course
Builder instance; use only if you have to perform specific load testing
Keep in mind:
- when repeatedly running tests and creating new test namespaces,
flush memcache
Here is how to run:
- update /controllers/sites.py and enable CAN_IMPERSONATE
- navigate to the root directory of the app
- run a command line by typing:
python tests/integration/load_test.py \
--thread_count=5 \
--start_uid=1 \
https://mycourse.appspot.com
If you use http instead of https, your tests will fail because your requests
will instantly be redirected (which can confound GET vs POST, for example).
"""
__author__ = 'Pavel Simakov (psimakov@google.com)'
import argparse
import cookielib
import json
import logging
import random
import re
import sys
import threading
import time
import urllib
import urllib2
# The unit id for the peer review assignment in the default course.
LEGACY_REVIEW_UNIT_ID = 'ReviewAssessmentExample'
# command line arguments parser
PARSER = argparse.ArgumentParser()
PARSER.add_argument(
'base_url', help=('Base URL of the course you want to test'), type=str)
PARSER.add_argument(
'--start_uid',
help='Initial value for unique thread identifier.', default=1, type=int)
PARSER.add_argument(
'--thread_count',
help='Number of concurrent threads for executing the test.',
default=1, type=int)
PARSER.add_argument(
'--iteration_count',
help='Number of iterations for executing the test. Each thread of each '
'iteration acts as a unique user with the uid equal to:'
'start_uid + thread_count * iteration_index.',
default=1, type=int)
def assert_contains(needle, haystack):
if needle not in haystack:
raise Exception('Expected to find term: %s\n%s', needle, haystack)
def assert_does_not_contain(needle, haystack):
if needle in haystack:
raise Exception('Did not expect to find term: %s\n%s', needle, haystack)
def assert_equals(expected, actual):
if expected != actual:
raise Exception('Expected equality of %s and %s.', expected, actual)
class WebSession(object):
"""A class that allows navigation of web pages keeping cookie session."""
PROGRESS_LOCK = threading.Lock()
MAX_RETRIES = 3
RETRY_SLEEP_SEC = 3
GET_COUNT = 0
POST_COUNT = 0
RETRY_COUNT = 0
PROGRESS_BATCH = 10
RESPONSE_TIME_HISTOGRAM = [0, 0, 0, 0, 0, 0]
def __init__(self, uid, common_headers=None):
if common_headers is None:
common_headers = {}
self.uid = uid
self.common_headers = common_headers
self.cj = cookielib.CookieJar()
self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cj))
@classmethod
def increment_duration_bucket(cls, index):
cls.RESPONSE_TIME_HISTOGRAM[index] += 1
@classmethod
def update_duration(cls, duration):
if duration > 30:
cls.increment_duration_bucket(0)
elif duration > 15:
cls.increment_duration_bucket(1)
elif duration > 7:
cls.increment_duration_bucket(2)
elif duration > 3:
cls.increment_duration_bucket(3)
elif duration > 1:
cls.increment_duration_bucket(4)
else:
cls.increment_duration_bucket(5)
@classmethod
def log_progress(cls, force=False):
update = ((cls.GET_COUNT + cls.POST_COUNT) % (
cls.PROGRESS_BATCH) == 0)
if update or force:
logging.info(
'GET/POST:[%s, %s], RETRIES:[%s], SLA:%s',
cls.GET_COUNT, cls.POST_COUNT, cls.RETRY_COUNT,
cls.RESPONSE_TIME_HISTOGRAM)
def get_cookie_value(self, name):
for cookie in self.cj:
if cookie.name == name:
return cookie.value
return None
def is_soft_error(self, http_error):
"""Checks if HTTPError is due to starvation of frontend instances."""
body = http_error.fp.read()
# this is the text specific to the front end instance starvation, which
# is a retriable error for both GET and POST; normal HTTP error 500 has
# this specific text '<h1>500 Internal Server Error</h1>'
if http_error.code == 500 and '<h1>Error: Server Error</h1>' in body:
return True
logging.error(
'Non-retriable HTTP %s error:\n%s', http_error.code, body)
return False
def open(self, request, hint):
"""Executes any HTTP request."""
start_time = time.time()
try:
try_count = 0
while True:
try:
return self.opener.open(request)
except urllib2.HTTPError as he:
if (
try_count < WebSession.MAX_RETRIES and
self.is_soft_error(he)):
try_count += 1
with WebSession.PROGRESS_LOCK:
WebSession.RETRY_COUNT += 1
time.sleep(WebSession.RETRY_SLEEP_SEC)
continue
raise he
except Exception as e:
logging.info(
'Error in session %s executing: %s', self.uid, hint)
raise e
finally:
with WebSession.PROGRESS_LOCK:
self.update_duration(time.time() - start_time)
def get(self, url, expected_code=200):
"""HTTP GET."""
with WebSession.PROGRESS_LOCK:
WebSession.GET_COUNT += 1
self.log_progress()
request = urllib2.Request(url)
for key, value in self.common_headers.items():
request.add_header(key, value)
response = self.open(request, 'GET %s' % url)
assert_equals(expected_code, response.code)
return response.read()
def post(self, url, args_dict, expected_code=200):
"""HTTP POST."""
with WebSession.PROGRESS_LOCK:
WebSession.POST_COUNT += 1
self.log_progress()
data = None
if args_dict:
data = urllib.urlencode(args_dict)
request = urllib2.Request(url, data)
for key, value in self.common_headers.items():
request.add_header(key, value)
response = self.open(request, 'POST %s' % url)
assert_equals(expected_code, response.code)
return response.read()
class TaskThread(threading.Thread):
"""Runs a task in a separate thread."""
def __init__(self, func, name=None):
super(TaskThread, self).__init__()
self.func = func
self.exception = None
self.name = name
@classmethod
def start_all_tasks(cls, tasks):
"""Starts all tasks."""
for task in tasks:
task.start()
@classmethod
def check_all_tasks(cls, tasks):
"""Checks results of all tasks; fails on the first exception found."""
failed_count = 0
for task in tasks:
while True:
# Timeouts should happen after 30 seconds.
task.join(30)
if task.isAlive():
logging.info('Still waiting for: %s.', task.name)
continue
else:
break
if task.exception:
failed_count += 1
if failed_count:
raise Exception('Tasks failed: %s', failed_count)
@classmethod
def execute_task_list(cls, tasks):
"""Starts all tasks and checks the results."""
cls.start_all_tasks(tasks)
cls.check_all_tasks(tasks)
def run(self):
try:
self.func()
except Exception as e: # pylint: disable-msg=broad-except
logging.error('Error in %s: %s', self.name, e)
self.exc_info = sys.exc_info()
raise self.exc_info[1], None, self.exc_info[2]
class LoadTest(object):
"""Parent for all load tests."""
def __init__(self, base_url, uid):
self.uid = uid
self.host = base_url
# this is an impersonation identity for the actor thread
self.email = 'load_test_bot_%s@example.com' % self.uid
self.name = 'Load Test Bot #%s' % self.uid
# begin web session
impersonate_header = {
'email': self.email, 'user_id': u'impersonation-%s' % self.uid}
self.session = WebSession(
uid=uid,
common_headers={'Gcb-Impersonate': json.dumps(impersonate_header)})
def get_hidden_field(self, name, body):
# The "\s*" denotes arbitrary whitespace; sometimes, this tag is split
# across multiple lines in the HTML.
# pylint: disable-msg=anomalous-backslash-in-string
reg = re.compile(
'<input type="hidden" name="%s"\s* value="([^"]*)">' % name)
# pylint: enable-msg=anomalous-backslash-in-string
return reg.search(body).group(1)
def register_if_has_to(self):
"""Performs student registration action."""
body = self.session.get('%s/' % self.host)
assert_contains('Logout', body)
if 'href="register"' not in body:
body = self.session.get('%s/student/home' % self.host)
assert_contains(self.email, body)
assert_contains(self.name, body)
return False
body = self.session.get('%s/register' % self.host)
xsrf_token = self.get_hidden_field('xsrf_token', body)
data = {'xsrf_token': xsrf_token, 'form01': self.name}
body = self.session.post('%s/register' % self.host, data)
body = self.session.get('%s/' % self.host)
assert_contains('Logout', body)
assert_does_not_contain('href="register"', body)
return True
class PeerReviewLoadTest(LoadTest):
"""A peer review load test."""
def run(self):
self.register_if_has_to()
self.submit_peer_review_assessment_if_possible()
while self.count_completed_reviews() < 2:
self.request_and_do_a_review()
def get_js_var(self, name, body):
reg = re.compile('%s = \'([^\']*)\';\n' % name)
return reg.search(body).group(1)
def get_draft_review_url(self, body):
"""Returns the URL of a draft review on the review dashboard."""
# The "\s*" denotes arbitrary whitespace; sometimes, this tag is split
# across multiple lines in the HTML.
# pylint: disable-msg=anomalous-backslash-in-string
reg = re.compile(
'<a href="([^"]*)">Assignment [0-9]+</a>\s*\(Draft\)')
# pylint: enable-msg=anomalous-backslash-in-string
result = reg.search(body)
if result is None:
return None
return result.group(1)
def submit_peer_review_assessment_if_possible(self):
"""Submits the peer review assessment."""
body = self.session.get(
'%s/assessment?name=%s' % (self.host, LEGACY_REVIEW_UNIT_ID))
assert_contains('You may only submit this assignment once', body)
if 'Submitted assignment' in body:
# The assignment was already submitted.
return True
assessment_xsrf_token = self.get_js_var('assessmentXsrfToken', body)
answers = [
{'index': 0, 'type': 'regex',
'value': 'Answer 0 by %s' % self.email},
{'index': 1, 'type': 'choices', 'value': self.uid},
{'index': 2, 'type': 'regex',
'value': 'Answer 2 by %s' % self.email},
]
data = {
'answers': json.dumps(answers),
'assessment_type': LEGACY_REVIEW_UNIT_ID,
'score': 0,
'xsrf_token': assessment_xsrf_token,
}
body = self.session.post('%s/answer' % self.host, data)
assert_contains('Review peer assignments', body)
return True
def request_and_do_a_review(self):
"""Request a new review, wait for it to be granted, then submit it."""
review_dashboard_url = (
'%s/reviewdashboard?unit=%s' % (self.host, LEGACY_REVIEW_UNIT_ID))
completed = False
while not completed:
# Get peer review dashboard and inspect it.
body = self.session.get(review_dashboard_url)
assert_contains('Assignments for your review', body)
assert_contains('Review a new assignment', body)
# Pick first pending review if any or ask for a new review.
draft_review_url = self.get_draft_review_url(body)
if draft_review_url: # There is a pending review. Choose it.
body = self.session.get(
'%s/%s' % (self.host, draft_review_url))
else: # Request a new assignment to review.
assert_contains('xsrf_token', body)
xsrf_token = self.get_hidden_field('xsrf_token', body)
data = {
'unit_id': LEGACY_REVIEW_UNIT_ID,
'xsrf_token': xsrf_token,
}
body = self.session.post(review_dashboard_url, data)
# It is possible that we fail to get a new review because the
# old one is now visible, but was not yet visible when we asked
# for the dashboard page.
if (
'You must complete all assigned reviews before you '
'can request a new one.' in body):
continue
# It is possible that no submissions available for review yet.
# Wait for a while until they become available on the dashboard
# page.
if 'Back to the review dashboard' not in body:
assert_contains('Assignments for your review', body)
# Sleep for a random number of seconds between 1 and 4.
time.sleep(1.0 + random.random() * 3.0)
continue
# Submit the review.
review_xsrf_token = self.get_js_var('assessmentXsrfToken', body)
answers = [
{'index': 0, 'type': 'choices', 'value': 0},
{'index': 1, 'type': 'regex',
'value': 'Review 0 by %s' % self.email},
]
data = {
'answers': json.dumps(answers),
'assessment_type': None,
'is_draft': 'false',
'key': self.get_js_var('assessmentGlobals.key', body),
'score': 0,
'unit_id': LEGACY_REVIEW_UNIT_ID,
'xsrf_token': review_xsrf_token,
}
body = self.session.post('%s/review' % self.host, data)
assert_contains('Your review has been submitted', body)
return True
def count_completed_reviews(self):
"""Counts the number of reviews that the actor has completed."""
review_dashboard_url = (
'%s/reviewdashboard?unit=%s' % (self.host, LEGACY_REVIEW_UNIT_ID))
body = self.session.get(review_dashboard_url)
num_completed = body.count('(Completed)')
return num_completed
class WelcomeNotificationLoadTest(LoadTest):
"""Tests registration confirmation notifications.
You must enable notifications in the target course for this test to be
meaningful. You must also swap the test class that's instantiated in
run_all, below.
"""
def run(self):
self.register_if_has_to()
def run_all(args):
"""Runs test scenario in multiple threads."""
if args.thread_count < 1 or args.thread_count > 256:
raise Exception('Please use between 1 and 256 threads.')
start_time = time.time()
logging.info('Started testing: %s', args.base_url)
logging.info('base_url: %s', args.base_url)
logging.info('start_uid: %s', args.start_uid)
logging.info('thread_count: %s', args.thread_count)
logging.info('iteration_count: %s', args.iteration_count)
logging.info('SLAs are [>30s, >15s, >7s, >3s, >1s, <1s]')
try:
for iteration_index in range(0, args.iteration_count):
logging.info('Started iteration: %s', iteration_index)
tasks = []
WebSession.PROGRESS_BATCH = args.thread_count
for index in range(0, args.thread_count):
test = PeerReviewLoadTest(
args.base_url,
(
args.start_uid +
iteration_index * args.thread_count +
index))
task = TaskThread(
test.run, name='PeerReviewLoadTest-%s' % index)
tasks.append(task)
try:
TaskThread.execute_task_list(tasks)
except Exception as e:
logging.info('Failed iteration: %s', iteration_index)
raise e
finally:
WebSession.log_progress(force=True)
logging.info('Done! Duration (s): %s', time.time() - start_time)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
run_all(PARSER.parse_args())
|
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import datetime as _datetime
import functools as _functools
import hashlib as _hashlib
import logging as _logging
import os as _os
import pprint as _pprint
import re as _re
import sched as _sched
import sys as _sys
import threading as _threading
import time as _time
import traceback as _traceback
import urllib as _urllib
import uuid as _uuid
from tornado.httpserver import HTTPServer as _HTTPServer
from tornado.ioloop import IOLoop as _IOLoop
from tornado.wsgi import WSGIContainer as _WSGIContainer
from urllib.parse import quote_plus as _url_escape
from urllib.parse import unquote_plus as _url_unescape
from xml.sax.saxutils import escape as _xml_escape
from xml.sax.saxutils import unescape as _xml_unescape
_log = _logging.getLogger("brbn")
_xhtml = "application/xhtml+xml; charset=utf-8"
_text = "text/plain"
_content_types_by_extension = {
".css": "text/css",
".pdf": "application/pdf",
".png": "image/png",
".html": _xhtml,
".jpeg": "image/jpeg",
".jpg": "image/jpeg",
".js": "application/javascript",
".json": "application/json",
".svg": "image/svg+xml",
".txt": _text,
".woff": "application/font-woff",
}
_page_template = """<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title>{title}</title>
<meta charset="utf-8"/>
<meta name="viewport" content="width=device-width, initial-scale=1.0"/>
<link rel="stylesheet" href="/app.css" type="text/css"/>
<link rel="icon" href="" type="image/png"/>
<script src="/app.js" type="application/javascript" defer="defer"></script>
</head>
<body>
<div id="-head">
<div id="-head-content">
{head}
</div>
</div>
<div id="-body">
<div id="-body-content">
{body}
</div>
</div>
<div id="-foot">
<div id="-foot-content">
{foot}
</div>
</div>
</body>
</html>"""
_head_template = """{global_navigation}
{path_navigation}"""
_foot_template = """ """
def url_escape(string):
if string is None:
return
return _url_escape(string)
def url_unescape(string):
if string is None:
return
return _url_unescape(string)
_extra_entities = {
'"': """,
"'": "'",
"/": "/",
}
def xml(meth):
meth._xml = True
return meth
def xml_escape(string):
if string is None:
return
return _xml_escape(string, _extra_entities)
def xml_unescape(string):
if string is None:
return
return _xml_unescape(string)
def compute_etag(content):
return _hashlib.sha1(content).hexdigest()[:8]
def find_content_type(path, default=_text):
name, ext = _os.path.splitext(path)
return _content_types_by_extension.get(ext, default)
def _format_repr(obj, *args):
cls = obj.__class__.__name__
strings = [str(x) for x in args]
return "{}({})".format(cls, ",".join(strings))
class Error(Exception):
pass
class Application:
def __init__(self, home=None):
self._home = home
self._brbn_home = None
self._resources = dict()
self._root_resource = None
self._error_page = _ErrorPage(self)
self._sessions_by_id = dict()
self._session_expire_thread = _SessionExpireThread(self)
self.debug = "BRBN_DEBUG" in _os.environ
def __repr__(self):
return _format_repr(self, self.home)
@property
def spec(self):
return "{}:{}".format(self.__module__, self.__class__.__name__)
@property
def home(self):
return self._home
@property
def brbn_home(self):
return self._brbn_home
@property
def resources(self):
return self._resources
@property
def root_resource(self):
return self._root_resource
@root_resource.setter
def root_resource(self, resource):
assert isinstance(resource, Resource), resource
self._root_resource = resource
def load(self):
_log.info("Loading {}".format(self))
if self.brbn_home is not None:
brbn_files_dir = _os.path.join(self.brbn_home, "files")
self._add_files(brbn_files_dir)
if self.home is not None:
app_files_dir = _os.path.join(self.home, "files")
self._add_files(app_files_dir)
for path, resource in sorted(self.resources.items()):
resource.load()
def _add_files(self, files_dir):
if not _os.path.isdir(files_dir):
return
_log.debug("Loading files under {}".format(files_dir))
for root, dirs, files in _os.walk(files_dir):
for name in files:
fs_path = _os.path.join(root, name)
path = fs_path[len(files_dir):]
File(self, path, fs_path)
def init(self):
_log.info("Initializing {}".format(self))
if self.root_resource is None:
index = self.resources.get("/index.html")
if index is not None:
self.resources["/"] = index
self.root_resource = index
if self.root_resource is None:
raise Error("I can't find a root resource")
for path, resource in sorted(self.resources.items()):
resource.init()
def start(self):
_log.info("Starting {}".format(self))
self._session_expire_thread.start()
def __call__(self, env, start_response):
request = Request(self, env, start_response)
try:
return self._do_call(request)
except Exception as e:
_log.exception("Unexpected error")
return request.respond_unexpected_error(e)
def _do_call(self, request):
try:
request._load()
except _RequestError as e:
_log.exception("Request error")
return request.respond_error(e)
_log.debug("Receiving {}".format(request))
try:
return self.receive_request(request)
except _RequestError as e:
_log.exception("Request error")
return request.respond_error(e)
def receive_request(self, request):
try:
resource = self.resources[request.path]
except KeyError:
return request.respond_not_found()
request._resource = resource
return resource.receive_request(request)
class Request:
def __init__(self, app, env, start_response):
self._app = app
self._env = env
self._start_response = start_response
self._parameters = None
self._response_headers = list()
self._session = None
self._resource = None
def __repr__(self):
return _format_repr(self, self.path)
@property
def app(self):
return self._app
@property
def env(self):
return self._env
@property
def parameters(self):
return self._parameters
@property
def response_headers(self):
return self._response_headers
@property
def session(self):
return self._session
@property
def resource(self):
return self._resource
def _load(self):
self._parameters = self._parse_query_string()
session_id = self._parse_session_cookie()
if session_id is None:
self._session = Session(self.app)
else:
try:
self._session = self.app._sessions_by_id[session_id]
except KeyError:
self._session = Session(self.app)
self.session._touched = _datetime.datetime.now()
def _parse_query_string(self):
query_string = None
if self.method == "GET":
query_string = self.env["QUERY_STRING"]
elif self.method == "POST":
content_type = self.env["CONTENT_TYPE"]
assert content_type == "application/x-www-form-urlencoded"
length = int(self.env["CONTENT_LENGTH"])
query_string = self.env["wsgi.input"].read(length)
if not query_string:
return {}
try:
return _urllib.parse.parse_qs(query_string, False, True)
except ValueError:
raise _RequestError("I can't parse the query string '{}'".format
(query_string))
def _parse_session_cookie(self):
try:
cookie_string = self.env["HTTP_COOKIE"]
except KeyError:
return
for crumb in cookie_string.split(";"):
name, value = crumb.split("=", 1)
name = name.strip()
if name == "session":
return value.strip()
@property
def method(self):
return self.env["REQUEST_METHOD"]
@property
def path(self):
return self.env["PATH_INFO"]
def get(self, name, default=None):
try:
return self.parameters[name][0]
except KeyError:
return default
except IndexError:
return default
def require(self, name):
try:
return self.parameters[name][0]
except KeyError:
raise _RequestError("Parameter '{}' is missing".format(name))
except IndexError:
raise _RequestError("Parameter '{}' has no values".format(name))
def is_modified(self, server_etag):
client_etag = self.env.get("HTTP_IF_NONE_MATCH")
if client_etag is not None and server_etag is not None:
client_etag = client_etag[1:-1] # Strip quotes
return client_etag != server_etag
return True
def add_response_header(self, name, value):
self.response_headers.append((name, str(value)))
def respond(self, status, content=None, content_type=None):
csp = "default-src: 'self'"
sts = "max-age=31536000"
self.add_response_header("Content-Security-Policy", csp)
self.add_response_header("Strict-Transport-Security", sts)
if self.session is not None:
# value = "session={}; Path=/; Secure; HttpOnly".format(self.session._id)
value = "session={}; Path=/; HttpOnly".format(self.session._id)
self.add_response_header("Set-Cookie", value)
if content is None:
self.add_response_header("Content-Length", 0)
self._start_response(status, self.response_headers)
return (b"",)
if isinstance(content, str):
content = content.encode("utf-8")
assert isinstance(content, bytes), type(content)
assert content_type is not None
content_length = len(content)
self.add_response_header("Content-Length", content_length)
self.add_response_header("Content-Type", content_type)
self._start_response(status, self.response_headers)
return (content,)
def respond_ok(self, content, content_type):
return self.respond("200 OK", content, content_type)
def respond_redirect(self, location):
self.add_response_header("Location", location)
return self.respond("303 See Other")
def respond_not_modified(self):
return self.respond("304 Not Modified")
def respond_not_found(self):
self.error_status = "404 Not Found"
self.error_title = "Not found!"
self.error_message = "I can't find a page or file for path '{}'" \
.format(self.path)
return self.app._error_page.send_response(self)
def respond_error(self, error):
self.error_status = "500 Internal Server Error"
self.error_title = "Error!"
self.error_message = str(error)
return self.app._error_page.send_response(self)
def respond_unexpected_error(self, exception):
try:
return self._do_respond_unexpected_error(exception)
except:
return self._respond_unexpected_error_fallback()
def _do_respond_unexpected_error(self, exception):
self.error_status = "500 Internal Server Error"
self.error_title = "Error!"
self.error_message = "Yikes! An unexpected problem: {}" \
.format(str(exception))
return self.app._error_page.send_response(self)
def _respond_unexpected_error_fallback(self):
content = _traceback.format_exc()
return self.respond("500 Internal Server Error", content, _text)
class _RequestError(Exception):
pass
class Resource:
def __init__(self, app, path):
self._app = app
self._path = path
self._content_type = find_content_type(path)
self.app.resources[self.path] = self
def __repr__(self):
return _format_repr(self, self.path)
@property
def app(self):
return self._app
@property
def path(self):
return self._path
def load(self):
_log.info("Loading {}".format(self))
def init(self):
_log.info("Initializing {}".format(self))
def get_content_type(self, request):
return self._content_type
def get_etag(self, request):
pass
def get_href(self, request, **params):
if not params:
return self.path
query_vars = list()
for name, value in sorted(params.items()):
query_vars.append("{}={}".format(url_escape(name), url_escape(value)))
query_vars = ";".join(query_vars)
return "{}?{}".format(self.path, query_vars)
def get_title(self, request):
return self.path
def get_link(self, request, **params):
href = self.get_href(request, **params)
title = self.get_title(request)
return "<a href=\"{}\">{}</a>".format(href, xml_escape(title))
def receive_request(self, request):
self.process(request)
return self.send_response(request)
def send_response(self, request):
etag = self.get_etag(request)
if etag is not None:
if not request.is_modified(etag):
return request.respond_not_modified()
request.add_response_header("ETag", "\"{}\"".format(etag))
content = self.render(request)
content_type = self.get_content_type(request)
return request.respond_ok(content, content_type)
def process(self, request):
pass
def render(self, request):
raise NotImplementedError()
class File(Resource):
def __init__(self, app, path, fs_path):
super().__init__(app, path)
self._fs_path = fs_path
self._content = None
self._etag = None
def get_etag(self, request):
return self._etag
def load(self):
super().load()
with open(self._fs_path, "rb") as f:
self._content = f.read()
self._etag = compute_etag(self._content)
def process(self, request):
max_age = 120
if self.app.debug:
self.load()
max_age = 0
request.add_response_header("Cache-Control", "max-age={}".format(max_age))
def render(self, request):
return self._content
class Page(Resource):
def __init__(self, app, path, body_template):
super().__init__(app, path)
self._content_type = _xhtml
self._page_template = Template(_page_template, self)
self._head_template = Template(_head_template, self)
self._body_template = Template(body_template, self)
self._foot_template = Template(_foot_template, self)
@xml
def render(self, request):
return self._page_template.render(request)
@xml
def render_head(self, request):
return self._head_template.render(request)
@xml
def render_body(self, request):
return self._body_template.render(request)
@xml
def render_foot(self, request):
return self._foot_template.render(request)
def render_title(self, request):
return self.get_title(request)
@xml
def render_path_navigation(self, request):
links = self.get_path_links(request)
items = ["<li>{}</li>".format(x) for x in links]
items = "".join(items)
return "<ul id=\"-path-navigation\">{}</ul>".format(items)
def get_path_links(self, request):
if self is self.app.root_resource:
return [self.get_title(request)]
return [self.app.root_resource.get_link(request), self.get_title(request)]
@xml
def render_global_navigation(self, request):
return "<ul id=\"-global-navigation\"></ul>"
class Template:
@staticmethod
def _render_escaped(func):
@_functools.wraps(func)
def wrapper(*args, **kwargs):
result = func(*args, **kwargs)
if result is None:
return ""
return xml_escape(result)
return wrapper
@staticmethod
def _render_unescaped(func):
@_functools.wraps(func)
def wrapper(*args, **kwargs):
result = func(*args, **kwargs)
if result is None:
return ""
return result
return wrapper
def __init__(self, string, object):
self._string = string
self._object = object
self._elements = self._bind()
def __repr__(self):
return _format_repr(self)
def _bind(self):
elems = list()
tokens = _re.split("({.+?})", self._string)
for token in tokens:
if token.startswith("{") and token.endswith("}"):
meth_name = "render_{}".format(token[1:-1])
meth = getattr(self._object, meth_name, None)
if meth is not None:
assert callable(meth), meth_name
if hasattr(meth, "_xml"):
meth = self._render_unescaped(meth)
else:
meth = self._render_escaped(meth)
elems.append(meth)
continue
elems.append(token)
return elems
def render(self, request):
out = list()
for elem in self._elements:
if callable(elem):
elem = elem(request)
out.append(elem)
return "".join(out)
class FilePage(Page):
def __init__(self, app, path, file_path):
super().__init__(app, path, "{file_content}")
self._file_path = file_path
@xml
def render_file_content(self, request):
file = self.app.resources[self._file_path]
return file.render(request).decode()
class _AppInfoPage(Page):
template = """
<h1>{title}</h1>
<h2>Resources</h2>
{resources}
"""
def __init__(self, app, path):
super().__init__(app, path, self.template)
def get_title(self, request):
return "App info"
@xml
def render_resources(self, request):
items = list()
for path, resource in sorted(self.app.resources.items()):
items.append(resource.get_link(request))
items = "".join(["<li>{}</li>".format(x) for x in items])
return "<ul>{}</ul>".format(items)
class _RequestInfoPage(Page):
def __init__(self, app, path):
super().__init__(app, path, "{request_info}")
self._request_info = _RequestInfo()
def get_title(self, request):
return "Request info"
@xml
def render_request_info(self, request):
return self._request_info.render(request)
class _ErrorPage(Page):
template = """
<h1>{title}</h1>
<p>{message}</p>
<div class="hidden">{request_info}</div>
"""
def __init__(self, app):
super().__init__(app, "/error", self.template)
self._request_info = _RequestInfo()
def get_title(self, request):
return "Error!"
def render_title(self, request):
return request.error_title
def render_message(self, request):
return request.error_message
@xml
def render_request_info(self, request):
return self._request_info.render(request)
def send_response(self, request):
status = request.error_status
content = self.render(request)
content_type = self.get_content_type(request)
return request.respond(status, content, content_type)
class _RequestInfo(Template):
template = """
<h2>Traceback</h2>
{traceback}
<h2>Request</h2>
{request}
<h2>Application</h2>
{application}
<h2>System</h2>
{system}
"""
def __init__(self):
super().__init__(self.template, self)
def _render_attributes(self, attrs):
lines = list()
if isinstance(attrs, dict):
attrs = sorted(attrs.items())
for name, value in attrs:
value = _pprint.pformat(value)
value = value.replace("\n", "\n{}".format(" " * 24))
lines.append("{:22} {}".format(name, value))
return "<pre>{}</pre>".format(xml_escape("\n".join(lines)))
@xml
def render_traceback(self, request):
if _sys.exc_info()[1] is None:
return "<p>None</p>"
traceback = _traceback.format_exc()
return "<pre>{}</pre>".format(xml_escape(traceback))
@xml
def render_request(self, request):
attrs = (
("request.app", request.app),
("request.method", request.method),
("request.path", request.path),
("request.parameters", request.parameters),
("request.session", request.session),
("request.resource", request.resource),
)
return self._render_attributes(attrs)
@xml
def render_application(self, request):
attrs = (
("app.spec", request.app.spec),
("app.home", request.app.home),
("app.brbn_home", request.app.brbn_home),
("app.resources", request.app.resources),
("app.root_resource", request.app.root_resource),
)
return self._render_attributes(attrs)
@xml
def render_system(self, request):
attrs = (
("sys.argv", _sys.argv),
("sys.executable", _sys.executable),
("sys.path", _sys.path),
("sys.version", _sys.version),
("sys.platform", _sys.platform),
)
return self._render_attributes(attrs)
class Session:
def __init__(self, app):
self._app = app
self._id = str(_uuid.uuid4())
self._touched = _datetime.datetime.now()
self.app._sessions_by_id[self._id] = self
def __repr__(self):
return _format_repr(self, self._id[:8])
@property
def app(self):
return self._app
class _SessionExpireThread(_threading.Thread):
def __init__(self, app):
super().__init__()
self.app = app
self.daemon = True
self.scheduler = _sched.scheduler()
def run(self):
self.expire_sessions()
self.scheduler.run()
def expire_sessions(self):
try:
self.do_expire_sessions()
except:
_log.exception("Failure expiring sessions")
self.scheduler.enter(60, 1, self.expire_sessions)
def do_expire_sessions(self):
when = _datetime.datetime.now() - _datetime.timedelta(hours=1)
count = 0
for session in list(self.app._sessions_by_id.values()):
if session._touched < when:
del self.app._sessions_by_id[session._id]
count += 1
_log.debug("Expired {} client sessions".format(count))
class Server:
def __init__(self, app, port=8000):
self._app = app
self._port = port
self._tornado_server = _HTTPServer(_WSGIContainer(self._app))
def __repr__(self):
return _format_repr(self, self._app, self._port)
def run(self):
_log.info("Starting {}".format(self))
try:
self._tornado_server.listen(self._port)
except OSError as e:
msg = "Cannot listen on port {}: {}".format(self._port, str(e))
raise Error(msg)
_IOLoop.instance().start()
class Hello(Application):
def __init__(self, home):
super().__init__(home)
self.root_resource = _HelloPage(self)
self.app_page = _AppInfoPage(self, "/app")
self.request_page = _RequestInfoPage(self, "/request")
self.explode_page = _ExplodePage(self)
class _HelloPage(FilePage):
def __init__(self, app):
super().__init__(app, "/", "/hello.html.in")
def get_title(self, request):
return "Brbn"
class _ExplodePage(Page):
def __init__(self, app):
super().__init__(app, "/explode", "{explode}")
def get_title(self, request):
return "Explode!"
@xml
def render_explode(self, request):
raise Exception("Exploding!")
|
|
import unittest
from functools import wraps
import jsonstreamer
json_file_name = lambda test_fn: 'tests/json_files/' + test_fn.__name__[5:] + '.json'
def load_test_data(func):
"""loads some json from a file with the same name as the test"""
@wraps(func)
def wrapper(self, *args, **kwargs):
with open(json_file_name(func), encoding='utf-8') as json_file:
json_input = json_file.read()
return func(self, json_input)
return wrapper
class JSONStreamerTests(unittest.TestCase):
def setUp(self):
self._assertions = []
self._streamer = jsonstreamer.JSONStreamer()
self._streamer.add_catch_all_listener(self._catch_all)
def tearDown(self):
self._streamer.close()
self.assertEqual(len(self._assertions), 0)
def _catch_all(self, event_name, *args):
value = args[0] if args else None
# print('Asserting event_name: {} , value : {}'.format(event_name,value))
try:
e, v = self._assertions.pop(0)
except IndexError:
raise AssertionError('not enough asserts')
self.assertEqual(event_name, e)
self.assertEqual(value, v)
@load_test_data
def test_simple_object(self, json_input):
self._assertions = [('doc_start', None),
('object_start', None),
('key', 'apple'),
('value', 8),
('key', 'banana'),
('value', 'many'),
('object_end', None),
('doc_end', None)]
self._streamer.consume(json_input)
class ObjectStreamerTests(unittest.TestCase):
def setUp(self):
self._assertions = []
self._streamer = jsonstreamer.ObjectStreamer()
self._streamer.add_catch_all_listener(self._catch_all)
def tearDown(self):
self._streamer.close()
self.assertEqual(len(self._assertions), 0)
def _catch_all(self, event_name, *args):
value = args[0] if args else None
# print('\nAsserting event_name: {} , value : {}'.format(event_name, value))
try:
expected_event, expected_value = self._assertions.pop(0)
except IndexError:
raise AssertionError('not enough asserts')
self.assertEqual(event_name, expected_event)
self._assert_value(expected_value, value)
def _assert_value(self, expected_value, value):
if value and isinstance(value, tuple) and len(value) == 2:
if isinstance(value[1], dict):
self.assertDictEqual(value[1], expected_value[1])
self.assertEqual(expected_value[0], value[0])
elif isinstance(value[1], list):
self.assertListEqual(value[1], expected_value[1])
self.assertEqual(expected_value[0], value[0])
else:
self.assertEqual(value, expected_value)
else:
self.assertEqual(value, expected_value)
@load_test_data
def test_nested_dict(self, json_input):
self._assertions = [('object_stream_start', None),
('pair', ('params', {'dependencies': [{'app': 'Example'}]})),
('object_stream_end', None)]
self._streamer.consume(json_input)
@load_test_data
def test_array(self, json_input):
self._assertions = [('array_stream_start', None),
('element', "a"),
('element', 2),
('element', True),
('element', {"apple": "fruit"}),
('array_stream_end', None)]
self._streamer.consume(json_input)
@load_test_data
def test_spl_chars_in_value(self, json_input):
self._assertions = [('object_stream_start', None),
('pair',
('employees',
[
{"first Name": "Jo:hn", "lastName": "Doe,Foe"},
{"firstName": "An\\na", "lastName": "Smith Jack"},
{"firstName": "Peter", "lastName": "Jones"},
True,
745
]
)
),
('object_stream_end', None)]
self._streamer.consume(json_input)
@load_test_data
def test_space_preservation(self, json_input):
self._assertions = [('object_stream_start', None),
('pair', ('between space', ' before space')),
('pair', ('after space ', ' all spaces ')),
('object_stream_end', None)
]
self._streamer.consume(json_input)
@load_test_data
def test_arbit_1(self, json_input):
self._assertions = [('object_stream_start', None),
('pair', ('to', '8743d93a')),
('pair', ('type', 'response')),
('pair', ('payload',
{'request_id': '0f2d9b9c',
'result':
{'type': 'allopathy',
'manufacturer': {'url': 'johnsons.com', 'id': 5, 'name': 'johnsons'},
'name': 'crocin 200 mg',
'brand': 'crocin',
'image_urls': ['http//1example.com/3', 'http//1example.com/2'],
'price': 200.0,
'attributes': [
{'value': 'strip', 'key': 'pack_form', 'display_name': 'pack form'},
{'value': 'tablet', 'key': 'drug_form', 'display_name': 'drug form'},
{'value': '200 mg', 'key': 'strength', 'display_name': 'strength'},
{'value': 'paracetamol', 'key': 'name', 'display_name': 'name'},
{'value': 30, 'key': 'units_in_pack', 'display_name': 'units in pack'}],
'sku_id': 91,
'units_in_pack': 30
}
}
)
),
('pair', ('entity', None)),
('pair', ('pid', '43abc6be')),
('object_stream_end', None)]
self._streamer.consume(json_input)
class ObjectStreamerListenerTests(unittest.TestCase):
def setUp(self):
self._streamer = jsonstreamer.ObjectStreamer()
def tearDown(self):
self._streamer.close()
self.assertEqual(len(self._assertions), 0)
@load_test_data
def test_on_element(self, json_input):
self._assertions = ["a", 2, True, {"apple": "fruit"}]
def _on_element(value):
try:
expected_value = self._assertions.pop(0)
except IndexError:
raise AssertionError('not enough asserts')
self.assertEqual(expected_value, value)
self._streamer.add_listener('element', _on_element)
self._streamer.consume(json_input)
@load_test_data
def test_on_element_multiple_parses(self, json_input):
self._assertions = ["a", 2, True, {"apple": "fruit"}, ]
def _on_element(value):
try:
expected_value = self._assertions.pop(0)
except IndexError:
raise AssertionError('not enough asserts')
self.assertEqual(expected_value, value)
self._streamer.add_listener('element', _on_element)
self._streamer.consume(json_input[0:8])
self._streamer.consume(json_input[8:])
if __name__ == '__main__':
unittest.main(verbosity=2)
|
|
# Copyright 2014 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import os
import re
import json
import sys
import webapp2
from google.appengine.ext.webapp import template
from handlers import backroomarthelper
from handlers.common import *
from models import FeaturedArtwork
def artwork_dict(a):
d = dict(
id=a.key().id(),
title=a.title,
byline=a.byline,
imageUri=a.image_url,
thumbUri=a.thumb_url,
detailsUri=a.details_url,
publishDate=date_to_timestamp(a.publish_date),)
if a.attribution:
d['attribution'] = a.attribution
return d
class BaseBackroomHandler(webapp2.RequestHandler):
def handle_exception(self, exception, debug):
# Log the error.
logging.exception(exception)
# Set a custom message.
self.response.write(exception.message)
# If the exception is a HTTPException, use its error code.
# Otherwise use a generic 500 error code.
if isinstance(exception, webapp2.HTTPException):
self.response.set_status(exception.code)
else:
self.response.set_status(500)
class ServiceListHandler(BaseBackroomHandler):
def get(self):
self.response.headers['Content-Type'] = 'application/json'
self.response.out.write(self.render())
def render(self):
start = datetime.date(day=1,
month=int(self.request.get('month')) + 1,
year=int(self.request.get('year')))
start -= datetime.timedelta(weeks=2)
queue = (FeaturedArtwork.all()
.filter('publish_date >=', start)
.order('publish_date')
.fetch(1000))
return json.dumps([artwork_dict(a) for a in queue])
class ServiceAddHandler(BaseBackroomHandler):
def post(self):
artwork_json = json.loads(self.request.get('json'))
publish_date = (datetime.datetime
.utcfromtimestamp(artwork_json['publishDate'] / 1000)
.date())
if FeaturedArtwork.all().filter('publish_date=', publish_date).get() != None:
webapp2.abort(409, message='Artwork already exists for this date.')
crop_tuple = tuple(float(x) for x in json.loads(self.request.get('crop')))
new_image_url, new_thumb_url = backroomarthelper.maybe_process_image(
artwork_json['imageUri'],
crop_tuple,
publish_date.strftime('%Y%m%d') + ' '
+ artwork_json['title'] + ' '
+ artwork_json['byline'])
if not new_thumb_url and 'thumbUri' in artwork_json:
new_thumb_url = artwork_json['thumbUri']
new_artwork = FeaturedArtwork(
title=artwork_json['title'],
byline=artwork_json['byline'],
attribution=artwork_json['attribution'] if 'attribution' in artwork_json else None,
image_url=new_image_url,
thumb_url=new_thumb_url,
details_url=artwork_json['detailsUri'],
publish_date=publish_date)
new_artwork.save()
self.response.set_status(200)
class ServiceAddFromExternalArtworkUrlHandler(BaseBackroomHandler):
def post(self):
publish_date = (datetime.datetime
.utcfromtimestamp(int(self.request.get('publishDate')) / 1000)
.date())
new_artwork = backroomarthelper.add_art_from_external_details_url(
publish_date,
self.request.get('externalArtworkUrl'))
self.response.set_status(200)
self.response.out.write(json.dumps(artwork_dict(new_artwork)))
class ServiceEditHandler(BaseBackroomHandler):
def post(self):
id = long(self.request.get('id'))
artwork_json = json.loads(self.request.get('json'))
crop_tuple = tuple(float(x) for x in json.loads(self.request.get('crop')))
target_artwork = FeaturedArtwork.get_by_id(id)
if not target_artwork:
webapp2.abort(404)
target_artwork.title = artwork_json['title']
target_artwork.byline = artwork_json['byline']
target_artwork.attribution = artwork_json['attribution'] if 'attribution' in artwork_json else None
new_image_url, new_thumb_url = backroomarthelper.maybe_process_image(
artwork_json['imageUri'],
crop_tuple,
target_artwork.publish_date.strftime('%Y%m%d') + ' '
+ artwork_json['title'] + ' '
+ artwork_json['byline'])
if not new_thumb_url and 'thumbUri' in artwork_json:
new_thumb_url = artwork_json['thumbUri']
target_artwork.image_url = new_image_url
target_artwork.thumb_url = new_thumb_url
target_artwork.details_url = artwork_json['detailsUri']
target_artwork.save()
self.response.set_status(200)
self.response.out.write(json.dumps(artwork_dict(target_artwork)))
class ServiceMoveHandler(BaseBackroomHandler):
def post(self):
id = long(self.request.get('id'))
publish_date = (datetime.datetime
.utcfromtimestamp(long(self.request.get('publishDate')) / 1000)
.date())
target_artwork = FeaturedArtwork.get_by_id(id)
if not target_artwork:
webapp2.abort(404)
# shift other artworks over
self.move_artwork(target_artwork, publish_date, target_artwork.key().id())
self.response.set_status(200)
def move_artwork(self, artwork, publish_date, initial_artwork_id):
# cascade moves
current_artwork_at_date = FeaturedArtwork.all().filter('publish_date =', publish_date).get()
if current_artwork_at_date and current_artwork_at_date.key().id() != initial_artwork_id:
self.move_artwork(current_artwork_at_date, publish_date + datetime.timedelta(hours=24),
initial_artwork_id)
artwork.publish_date = publish_date
artwork.save()
class ServiceRemoveHandler(BaseBackroomHandler):
def post(self):
id = long(self.request.get('id'))
target_artwork = FeaturedArtwork.get_by_id(id)
if not target_artwork:
webapp2.abort(404)
target_artwork.delete()
self.response.set_status(200)
class ScheduleHandler(BaseBackroomHandler):
def get(self):
self.response.out.write(self.render())
def render(self):
return template.render(
os.path.join(os.path.dirname(__file__), '../templates/backroom_schedule.html'),
values_with_defaults(dict(
title='Schedule',
)))
app = webapp2.WSGIApplication([
('/backroom/s/list', ServiceListHandler),
('/backroom/s/add', ServiceAddHandler),
('/backroom/s/addfromexternal', ServiceAddFromExternalArtworkUrlHandler),
('/backroom/s/edit', ServiceEditHandler),
('/backroom/s/remove', ServiceRemoveHandler),
('/backroom/s/move', ServiceMoveHandler),
('/backroom/schedule', ScheduleHandler),
],
debug=IS_DEVELOPMENT)
def main():
app.run()
if __name__ == '__main__':
main()
|
|
from PySide.QtCore import *
from PySide.QtGui import *
from PySide.QtUiTools import QUiLoader
from datetime import datetime
import logging
import math
import os
import re
import string
import subprocess
import sys
import unicodedata
import tempfile
import stat
LOG = logging.getLogger('storyTime.utils')
def isFrozen():
"""
Return whether we are frozen via py2exe.
This will affect how we find out where we are located.
"""
return hasattr(sys, 'frozen')
def modulePath(withinPackage=False):
""" Return the program's directory, even when frozen via py2exe. """
if isFrozen():
return os.path.dirname(sys.executable)
else:
dir_ = os.path.dirname(__file__)
if withinPackage:
return dir_
else:
return os.path.dirname(dir_)
def loadUi(path, parent):
""" Load the given ui file relative to this package """
fullPath = os.path.join(modulePath(True), path)
LOG.debug(fullPath)
if not os.path.isfile(fullPath):
raise ValueError('ui file not found: {0}'.format(fullPath))
loader = QUiLoader()
file_ = QFile(fullPath)
file_.open(QFile.ReadOnly)
widget = loader.load(file_, parent)
attachUi(widget, parent)
file_.close()
return widget
def attachUi(widget, parent):
if parent is None:
return
if parent.layout() is None:
layout = QVBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
parent.setLayout(layout)
LOG.debug('adding {0} to {1}'.format(widget, parent))
parent.layout().addWidget(widget)
def enum(*args):
enums = dict(zip(args, range(len(args))))
enums['names'] = args
return type('enum', (), enums)
def openDir(dir_):
"""
Open the given directory using the platform appropriate file browser
"""
dir_ = os.path.expanduser(dir_)
if os.path.isfile(dir_):
dir_ = os.path.dirname(dir_)
if os.path.isdir(dir_):
if sys.platform == 'win32':
dir_ = os.path.normpath(dir_)
subprocess.Popen(['explorer.exe', dir_])
elif sys.platform == 'darwin':
subprocess.Popen(['open', dir_])
elif sys.platform == 'linux2':
pass
def get_latest_version(dirname):
"""Return the latest version of the given filename"""
VERS_RE = re.compile('(?P<vers>[v|V]\d+)')
try:
filename = os.path.join(dirname, os.listdir(dirname)[0])
dir_, base = os.path.split(filename)
pat = VERS_RE.sub('[v|V]\d{3}', base.replace('.', '\.'))
matches = [x for x in os.listdir(dir_) if re.match(pat, x) and os.path.isfile(os.path.join(dir_, x))]
return sorted(matches)[-1]
except:
return None
def timeString():
d = datetime.now()
return d.strftime('%Y%m%d_%H%M%S')
def normalizeFilename(filename):
validChars = "-_.() %s%s" % (string.ascii_letters, string.digits)
clean = unicodedata.normalize('NFKD', unicode(filename)).encode('ASCII', 'ignore')
return ''.join(c for c in clean if c in validChars).replace(' ', '_')
# time based media functions
TIMECODE_FMT = '{hr:02}:{min:02}:{sec:02}:{frame:02}'
FPS = 24
def getTimecode(frame, fps=FPS, percentage=False, timeCodeFmt=TIMECODE_FMT):
"""
Convert a frame number to a timecode starting at 00:00:00:00
``frame`` -- the frame to convert to a time code
``fps`` -- the frame rate to use for converting
``percentage`` -- if True, will calculate frame digit as a percentage of the fps
``timeCodeFmt`` -- a string representing how to format the timecode.
should have the keys 'hr', 'min', 'sec', 'frame'
"""
decimal = frame % fps
if percentage:
decimal = decimal / fps * 100.0
seconds = float(frame) / fps
minutes = seconds / 60.0
hours = int(math.floor(minutes / 60.0))
minutes = int(math.floor(minutes - hours * 60))
seconds = int(math.floor(seconds - minutes * 60))
decimal = int(decimal)
return timeCodeFmt.format(hr=hours, min=minutes, sec=seconds, frame=decimal)
def launchSubprocess(cmd, win_showWindow=False, **kwargs):
'''
Helper function for launching subprocesses between mac and windows
Returns the proc
'''
platform = getOS()
if platform == "windows":
LOG.debug("Subprocess Command: {0}".format(cmd))
if not win_showWindow:
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
kwargs['startupinfo'] = startupinfo
proc = subprocess.Popen(cmd, **kwargs)
elif platform in ['mac', 'linux']:
LOG.debug("Subprocess Command: {0}".format(cmd))
proc = subprocess.Popen(cmd, shell=True, **kwargs)
return proc
def getTmpPath(suffix=""):
'''
Get a temp file name using tempfile.
We only want the filename, so we'll remove the file tempfile creates.
'''
tmpFile = tempfile.mkstemp(suffix=suffix, text=True)[1]
try:
os.remove(tmpFile)
except:
pass
return tmpFile
def launchSubprocessInShell(cmd, keepOpen=False):
'''
Launch the supplied command in a shell window for any OS
'''
platform = getOS()
if platform == 'mac':
ext = ".command"
path = getTmpPath(ext)
content = "{0}".format(cmd, path)
if not keepOpen:
content += "\nexit"
f = open(path, 'w')
f.write(content)
f.close()
st = os.stat(path)
os.chmod(path, st.st_mode | stat.S_IEXEC)
cmd = "open \"{0}\"".format(path)
launchSubprocess(cmd)
LOG.debug("Subprocess Temp File: {0}".format(path))
if platform == 'windows':
ext = ".bat"
path = getTmpPath(ext)
path = os.path.normpath(path)
content = "{0}".format(cmd, path)
if not keepOpen:
content += "\nexit"
f = open(path, 'w')
f.write(content)
f.close()
cmd = "start {0}".format(path)
launchSubprocess(cmd, shell=True)
LOG.debug("Subprocess Temp File: {0}".format(path))
def getOS():
'''
Get the os of the current system in a standard format
'''
if ((sys.platform.lower() == "win32") or (sys.platform.lower() == "win64")):
return "windows"
elif (sys.platform.lower() == "darwin"):
return "mac"
else:
return "linux"
|
|
#!/usr/bin/python
import os
import sys
import asyncio
import tty
import termios
import fcntl
import re
import curses
class Caps:
def Caps(self):
self.has_vt102 = False
self.type = None
#write = sys.stdout.write
#
#def color_test():
# text="xYz"; # Some test text
#
# write("\n 40m 41m 42m 43m 44m 45m 46m 47m")
#
# for fg in ["m", "1m", "30m", "1;30m", "31m", "1;31m", "32m",
# "1;32m", "33m", "1;33m", "34m", "1;34m", "35m", "1;35m",
# "36m", "1;36m", "37m", "1;37m"]:
# write(" {:>5} \033[{} {} ".format(fg, fg, text))
# for bg in ["40m", "41m", "42m", "43m", "44m", "45m", "46m", "47m"]:
# write(" \033[{}\033[{} {} \033[0m".format(fg, bg, text))
# write("\n")
#
# write("\n")
#
#color_test()
# yield from request_message("\033[c")
queue = asyncio.Queue()
def reader():
while 1:
try:
data = sys.stdin.buffer.read(1)
if len(data) > 0:
asyncio.async(queue.put(data[0]))
else:
break
except:
# throws on out of data
break
def send(cmd):
"""Sends a command; accepts either string or bytes."""
if isinstance(cmd, str):
cmd = cmd.encode("utf8")
sys.stdout.buffer.write(cmd)
sys.stdout.buffer.flush()
async def identify_test():
# identify test
send("\033[c")
data = await queue.get()
if data != 0o33:
send("got bad byte {:x}\r\n".format(data))
return
else:
caps = Caps()
buf = ""
while 1:
data = await queue.get()
ch = chr(data)
buf += ch
if ch == "c":
break;
if buf == "[?1;2c": # VT100 with Advanced Video Option
caps.type = "VT100"
caps.has_vt102 = False
elif buf == "[?1;0c": # VT101 with No Options
caps.type = "VT101"
caps.has_vt102 = False
elif buf == "[?6c": # VT102
caps.type = "VT102"
caps.has_vt102 = True
else:
m = re.match(r"\[\?6(\d)(?:;\d+)*c", buf)
if m:
# note that we don't care about the parameters
t = m.group(1)
if m == "2":
caps.type = "VT220"
caps.has_vt102 = True
elif m == "3":
caps.type = "VT320"
caps.has_vt102 = True
elif m == "4":
caps.type = "VT420"
caps.has_vt102 = True
if caps.type:
return caps
else:
send("received unexpected identify response: '{}'\r\n".format(buf))
async def cursor_command_test():
"""
vt100 basics:
tests moving cursor up (A), down (B), left (D), and right one step (C),
direct moves (with H), clear line (K) and clear to bottom (J)
"""
# go home
send("\033[H")
# clear screen
send("\033[J")
# print top line with junk
send("cursor junkstuff\r\n")
# draw a v (default to 1 moves)
send("\033[B\\")
send("\033[B\\")
send("\033[Bv")
send("\033[A/")
send("\033[A/")
# draw a line below it (default to 1 moves)
send("\033[B\033[B\033[B")
send("\033[D\033[D\033[D\033[D\033[D+")
send("\033[C-")
send("\033[C+")
# draw a line above it (default to 1 moves)
send("\033[A\033[A\033[A\033[A\033[D+")
send("\033[D\033[D\033[D-")
send("\033[D\033[D\033[D+")
# direct move, draw some text
send("\033[1;12H testshouldn't see this")
# direct move, clear to eol
send("\033[1;17H\033[K")
# direct move and overwrite
send("\033[1;8Hmove")
# some more moves (multiline/column), two spirals
send("\033[2;30H/") # top left
send("\033[2B\033[D>")
send("\033[2B\033[D\\") # lower left
send("\033[3C^")
send("\033[3C/") # lower right
send("\033[2A\033[D<")
send("\033[2A\033[D\\") # top right
send("\033[5Dv")
# and a jump, then back, with 1's this time
send("\033[4;34H*") # center
send("\033[1A\033[1D|")
send("\033[1A---")
send("\033[1B|")
send("\033[2B\033[1D|")
send("\033[1B\033[4D---")
send("\033[7D---")
send("\033[1A\033[4D|")
send("\033[2A\033[1D|")
send("\033[1A---")
# and the rest of the cross (with a 1C)
send("\033[2B\033[3D---")
send("\033[1C---")
send("\033[1B\033[4D|")
# move cursor to line 7, col 1, write a bunch of junk
send("\033[7;1H")
send("12345678901234567890\r\n")
send("12345678901234567890\r\n")
send("12345678901234567890\r\n")
send("12345678901234567890\r\n")
send("12345678901234567890\r\n")
send("12345678901234567890\r\n")
send("12345678901234567890\r\n")
# move cursor to line 7, col 10, clear below
send("\033[7;10H\033[J")
# move cursor to line 7, col 1
send("\033[7;1H")
send("above should match below:\r\n")
send("cursor move test\r\n")
send("+ - + /---v---\\\r\n")
send("\\ / | | |\r\n")
send(" \\ / >---*---<\r\n")
send(" v | | |\r\n")
send("+ - + \\---^---/\r\n")
# move cursor to line 1, col 30, write some inverted text
send("\033[1;30H\033[7minver\033[0;7mted\033[0m not-inverted")
# do it on line 8 too (without the clear all but inverted
send("\033[8;30H\033[7minverted\033[0m not-inverted")
# move back to end of output
send("\033[14;1H")
async def insert_erase_test():
# go home
send("\033[H")
# clear screen
send("\033[J")
# draw some lines
send("inserterasetest\r\n")
send("+v +\r\n")
send(" |||\r\n")
send(" + ^ +\r\n")
# do some inserts (1@)
send("\033[1;7H\033[1@")
send("\033[1;13H\033[1@")
send("\033[2;2H\033[1@-\033[C\033[1@-")
send("\033[B\033[2C\033[3D\033[1@x\033[C\033[1@x")
# do some erases (1P)
send("\033[2;5H\033[1P\033[1P")
# and a couple erases (incl a longer one)
send("\033[2B\033[4D\033[3P")
send("\033[C-\033[C-")
# then a couple multiple inserts
send("\033[4D\033[2@")
send("\033[2A\033[2D\033[2@")
# print some text
send("\033[3Bgood text\r\nbad text\r\nbext")
# delete second line
send("\033[A\033[M")
# go to beginning of line, insert mode "more", then overwrite again
send("\033[4D\033[4hmore \033[4lt")
# insert line before first
send("\033[A\033[1L\033[6Dfirst text")
# move cursor to line 8, col 1
send("\033[8;1H")
send("above should match below:\r\n")
send("insert erase test\r\n")
send(" +-v-+\r\n")
send(" |x|x|\r\n")
send(" +-^-+\r\n")
send("first text\r\n")
send("good text\r\n")
send("more text\r\n")
# move back to end of output
send("\033[15;1H")
async def terminfo_test():
pass # todo
async def keypress():
send("press a key...")
await queue.get()
async def run():
caps = await identify_test()
await cursor_command_test()
await keypress()
await insert_erase_test()
# wrap line for next
send("\r\n")
def main():
# we want raw input, but still to get break chars
old_settings = termios.tcgetattr(sys.stdin)
fd = sys.stdin.fileno()
tty.setcbreak(fd)
fl = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
curses.setupterm()
try:
# Register the file descriptor for read event
loop = asyncio.get_event_loop()
loop.add_reader(sys.stdin, reader)
loop.run_until_complete(run())
loop.close()
finally:
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_settings)
if __name__ == '__main__':
main()
"""
from tmux' ansicode.txt
Minimum requirements for VT100 emulation:
2) To enter data in VT100 mode, implement the 4 cursor keys and the 4 PF keys.
It must be possible to enter ESC, TAB, BS, DEL, and LF from the keyboard.
[A Sent by the up-cursor key (alternately ESC O A)
[B Sent by the down-cursor key (alternately ESC O B)
[C Sent by the right-cursor key (alternately ESC O C)
[D Sent by the left-cursor key (alternately ESC O D)
OP PF1 key sends ESC O P
OQ PF2 key sends ESC O Q
OR PF3 key sends ESC O R
OS PF3 key sends ESC O S
[c Request for the terminal to identify itself
[?1;0c VT100 with memory for 24 by 80, inverse video character attribute
[?1;2c VT100 capable of 132 column mode, with bold+blink+underline+inverse
3) When doing full-screen editing on a VT100, implement directed erase, the
numeric keypad in applications mode, and the limited scrolling region.
The latter is needed to do insert/delete line functions without rewriting
the screen.
[0J Erase from current position to bottom of screen inclusive
[1J Erase from top of screen to current position inclusive
[2J Erase entire screen (without moving the cursor)
[0K Erase from current position to end of line inclusive
[1K Erase from beginning of line to current position inclusive
[2K Erase entire line (without moving cursor)
[12;24r Set scrolling region to lines 12 thru 24. If a linefeed or an
INDex is received while on line 24, the former line 12 is deleted
and rows 13-24 move up. If a RI (reverse Index) is received while
on line 12, a blank line is inserted there as rows 12-13 move down.
All VT100 compatible terminals (except GIGI) have this feature.
ESC = Set numeric keypad to applications mode
ESC > Set numeric keypad to numbers mode
OA Up-cursor key sends ESC O A after ESC = ESC [ ? 1 h
OB Down-cursor key sends ESC O B " " "
OC Right-cursor key sends ESC O B " " "
OB Left-cursor key sends ESC O B " " "
OM ENTER key sends ESC O M after ESC =
Ol COMMA on keypad sends ESC O l " " (that's lowercase L)
Om MINUS on keypad sends ESC O m " "
Op ZERO on keypad sends ESC O p " "
Oq ONE on keypad sends ESC O q " "
Or TWO on keypad sends ESC O r " "
Os THREE on keypad sends ESC O s " "
Ot FOUR on keypad sends ESC O t " "
Ou FIVE on keypad sends ESC O u " "
Ov SIX on keypad sends ESC O v " "
Ow SEVEN on keypad sends ESC O w " "
Ox EIGHT on keypad sends ESC O x " "
Oy NINE on keypad sends ESC O y " "
4) If the hardware is capable of double width/double height:
#3 Top half of a double-width double-height line
#4 Bottom half of a double-width double-height line
#5 Make line single-width (lines are set this way when cleared by ESC [ J)
#6 Make line double-width normal height (40 or 66 characters)
[0i Print screen (all 24 lines) to the printer
[4i All received data goes to the printer (nothing to the screen)
[5i All received data goes to the screen (nothing to the printer)
"""
#echo 'Press any key to continue...'; read -k1 -s
|
|
#!/usr/bin/env python
import math
import rospy
from geometry_msgs.msg import Twist
from tf2_msgs.msg import TFMessage
from std_msgs.msg import Empty # for land/takeoff/emergency
from std_msgs.msg import Int8
from ardrone_autonomy.msg import Navdata # for receiving navdata feedback
from keyboard_controller import KeyboardController
class DroneStatus(object):
Emergency = 0
Inited = 1
Landed = 2
Flying = 3
Hovering = 4
Test = 5
TakingOff = 6
GotoHover = 7
Landing = 8
Looping = 9
class ObjectTracker(object):
def __init__(self):
self.state = 0
self.ardrone_state = -1
print('Rescueranger initilizing')
self.vision_pose = TFMessage()
self.marker_seen = False
self.pub = rospy.Publisher(
'object_tracker/pref_pos',
Twist,
queue_size=10)
# Subscribe to the /ardrone/navdata topic, of message type navdata, and
# call self.ReceiveNavdata when a message is received
self.subNavdata = rospy.Subscriber(
'/ardrone/navdata',
Navdata,
self.callback_ardrone_navdata)
# Allow the controller to publish to the /ardrone/takeoff, land and
# reset topics
self.pubLand = rospy.Publisher('/ardrone/land', Empty)
self.pubTakeoff = rospy.Publisher('/ardrone/takeoff', Empty)
self.pubReset = rospy.Publisher('/ardrone/reset', Empty)
# Allow the controller to publish to the /cmd_vel topic and thus control
# the drone
self.command = Twist()
self.pubCommand = rospy.Publisher('/cmd_vel', Twist, queue_size=10)
# Keyboard Controller
self.keyboard = KeyboardController()
self.keyboard.tracker = self
print('...')
rospy.init_node('object_tracker', anonymous=True)
rospy.Subscriber(
"/tf",
TFMessage,
self.callback_vision_pose)
# VISP STATE
self.visp_state = -1
rospy.Subscriber(
'/visp_auto_tracker/status',
Int8,
self.callback_visp_state)
self.visp_state_map = {
'Waiting': 0, # Not detecting any pattern, just recieving images
'Detect_flash': 1, # Pattern detected
# Model successfully initialized (from wrl & xml files)
'Detect_model': 2,
'Track_model': 3, # Tracking model
# Detecting pattern in a small region around where the pattern was
# last seen
'Redetect_flash': 4,
'Detect_flash2': 5 # Detecting pattern in a the whole frame
}
print('...')
# rospy.Subscriber(
# "/ardrone/predictedPose",
# PoseStamped,
# self.callback_ardrone_prediction)
print('...initilized\n')
rospy.on_shutdown(self.ardrone_send_land)
def callback_ardrone_navdata(self, navdata):
# Although there is a lot of data in this packet, we're only interested
# in the state at the moment
if self.ardrone_state != navdata.state:
print("Recieved droneState: %d" % navdata.state)
self.ardrone_state = navdata.state
def ardrone_send_takeoff(self):
# Send a takeoff message to the ardrone driver
# Note we only send a takeoff message if the drone is landed - an
# unexpected takeoff is not good!
if self.ardrone_state == DroneStatus.Landed:
self.pubTakeoff.publish(Empty())
def ardrone_send_land(self):
# Send a landing message to the ardrone driver
# Note we send this in all states, landing can do no harm
self.pubLand.publish(Empty())
def ardrone_send_emergency(self):
# Send an emergency (or reset) message to the ardrone driver
self.pubReset.publish(Empty())
print("Publishing reset")
def ardrone_set_xyzy(self, x=0, y=0, z=0, yaw=0):
# Called by the main program to set the current command
self.command.linear.x = x
self.command.linear.y = y
self.command.linear.z = z
self.command.angular.z = yaw
def ardrone_update_rpyz(self, event):
# The previously set command is then sent out periodically if the drone
# is flying
if (
self.ardrone_state == DroneStatus.Flying or
self.ardrone_state == DroneStatus.GotoHover or
self.ardrone_state == DroneStatus.Hovering
):
self.pubCommand.publish(self.command)
def ardrone_send_command(self,event):
# The previously set command is then sent out periodically if the drone is flying
if self.ardrone_state == DroneStatus.Flying or self.ardrone_state == DroneStatus.GotoHover or self.ardrone_state == DroneStatus.Hovering:
self.pubCommand.publish(self.command)
def callback_vision_pose(self, pose):
if pose.transforms[0].child_frame_id == "ardrone_base_frontcam":
if pose.transforms[0].header.frame_id != "ar_marker":
self.marker_seen = False
else:
self.marker_seen = True
self.vision_pose = pose
def callback_visp_state(self, data):
if data.data != self.visp_state:
self.visp_state = data.data
if data.data == 0:
print("ViSP: Not detecting any pattern, just recieving images")
if data.data == 1:
print("ViSP: Pattern detected")
if data.data == 2:
print(
"ViSP: Model successfully initialized (from wrl & xml files)")
if data.data == 3:
print("ViSP: Tracking model")
if data.data == 4:
print(
"ViSP: Detecting pattern in a small region around where the pattern was last seen")
if data.data == 5:
print("ViSP: Detecting pattern in a the whole frame")
# Predicted pose from tum_ardrone/drone_stateestimation, written to ardrone
def callback_ardrone_prediction(self, pose):
self.pred_pose = pose
def run(self):
twist = Twist()
# Publish the estimated waypoint on object_tracker/pref_pos
r = rospy.Rate(100) # in Hz
# 0=wait, 1=takeoff, 2=hover over marker, 3= search for marker, 4=
# aproach marker, 5= land, 6= do nothing
while not rospy.is_shutdown():
if self.state == 0:
# wait for start command
pass
if self.state == 1: # takeoff
pass
self.ardrone_send_takeoff()
if (
(self.ardrone_state == DroneStatus.Hovering)
):
self.state = 2
print("Hovering")
if self.state == 2: # hover over marker
if self.marker_seen:
vision_x = self.vision_pose.transforms[0].transform.translation.x
vision_y = self.vision_pose.transforms[0].transform.translation.y
vision_z = self.vision_pose.transforms[0].transform.translation.z
q1 = self.vision_pose.transforms[0].transform.rotation.x
q2 = self.vision_pose.transforms[0].transform.rotation.y
q3 = self.vision_pose.transforms[0].transform.rotation.z
q0 = self.vision_pose.transforms[0].transform.rotation.w
yaw = math.atan2(2*(q0*q3 + q1*q2), 1 - 2*(q2**2 + q3**2))
x_goal = 0.0
y_goal = 0.0
z_goal = 0.9
yaw_goal = 0.0
x_err = x_goal - vision_x
y_err = y_goal - vision_y
z_err = z_goal - vision_z
yaw_err = yaw_goal - yaw
if z_err < 0:
x_vel = 0.05
else:
x_vel = -0.05
if x_err < 0:
y_vel = 0.05
else:
y_vel = -0.05
if y_err < 0:
z_vel = -0.1
else:
z_vel = 0.1
if yaw_err < 0:
yaw_vel = -0.1
else:
yaw_vel = 0.1
self.ardrone_set_xyzy(x_vel, y_vel, z_vel, yaw_vel)
print((x_err, y_err, z_err, yaw_err))
else:
self.ardrone_set_xyzy(0, 0, 0, 0)
if self.state == 3: # search for marker
pass
if self.state == 5: # land
if self.ardrone_state == DroneStatus.Landed:
self.state = 0
print("Landed")
else:
self.ardrone_send_land()
if self.state == 6:
pass # do nothing
self.ardrone_update_rpyz(None)
# twist.linear.x = self.visp_pose.pose.position.x + \
# self.pred_pose.pose.position.x
# twist.linear.y = self.visp_pose.pose.position.y + \
# self.pred_pose.pose.position.y
# twist.linear.z = self.visp_pose.pose.position.z + \
# self.pred_pose.pose.position.z
self.pub.publish(twist)
r.sleep()
print('\n\nRescuranger is terminating.\n')
self.ardrone_send_emergency()
# spin() simply keeps python from exiting until this node is stopped
if __name__ == '__main__':
try:
tracker = ObjectTracker()
tracker.run()
except rospy.ROSInterruptException:
pass
|
|
"""
Contains basic data structures used throughout the rest of Q2MM.
"""
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
import copy
import logging
import numpy as np
import os
import re
import sys
import constants as co
import filetypes
logger = logging.getLogger(__name__)
# Row of mm3.fld where comments start.
COM_POS_START = 96
# Row where standard 3 columns of parameters appear.
P_1_START = 23
P_1_END = 33
P_2_START = 34
P_2_END = 44
P_3_START = 45
P_3_END = 55
class ParamError(Exception):
pass
class Param(object):
"""
A single parameter.
:var _allowed_range: Stored as None if not set, else it's set to True or
False depending on :func:`allowed_range`.
:type _allowed_range: None, 'both', 'pos', 'neg'
:ivar ptype: Parameter type can be one of the following: ae, af, be, bf, df,
imp1, imp2, sb, or q.
:type ptype: string
Attributes
----------
d1 : float
First derivative of parameter with respect to penalty function.
d2 : float
Second derivative of parameter with respect to penalty function.
step : float
Step size used during numerical differentiation.
ptype : {'ae', 'af', 'be', 'bf', 'df', 'imp1', 'imp2', 'sb', 'q'}
value : float
Value of the parameter.
"""
__slots__ = ['_allowed_range', '_step', '_value', 'd1', 'd2', 'ptype',
'simp_var']
def __init__(self, d1=None, d2=None, ptype=None, value=None):
self._allowed_range = None
self._step = None
self._value = None
self.d1 = d1
self.d2 = d2
self.ptype = ptype
self.simp_var = None
self.value = value
def __repr__(self):
return '{}[{}]({:7.4f})'.format(
self.__class__.__name__, self.ptype, self.value)
@property
def allowed_range(self):
"""
Returns True or False, depending on whether the parameter is
allowed to be negative values.
"""
if self._allowed_range is None and self.ptype is not None:
if self.ptype in ['q', 'df']:
self._allowed_range = [-float('inf'), float('inf')]
else:
self._allowed_range = [0., float('inf')]
return self._allowed_range
@property
def step(self):
"""
Returns a float for the current step size that should be used. If
_step is a string, return float(_step) * value. If
_step is a float, simply return that.
Not sure how well the check for a step size of zero works.
"""
if self._step is None:
try:
self._step = co.STEPS[self.ptype]
except KeyError:
logger.warning(
"{} doesn't have a default step size and none "
"provided!".format(self))
raise
if sys.version_info > (3, 0):
if isinstance(self._step, str):
return float(self._step) * self.value
else:
return self._step
else:
if isinstance(self._step, basestring):
return float(self._step) * self.value
else:
return self._step
@step.setter
def step(self, x):
self._step = x
@property
def value(self):
if self.ptype == 'ae' and self._value > 180.:
self._value = 180. - abs(180 - self._value)
return self._value
@value.setter
def value(self, value):
"""
When you try to give the parameter a value, make sure that's okay.
"""
if self.value_in_range(value):
self._value = value
def value_in_range(self, value):
if self.allowed_range[0] <= value <= self.allowed_range[1]:
return True
else:
raise ParamError(
"{} isn't allowed to have a value of {}! "
"({} <= x <= {})".format(
str(self),
value,
self.allowed_range[0],
self.allowed_range[1]))
def value_at_limits(self):
# Checks if the parameter is at the limits of
# its allowed range. Should only be run at the
# end of an optimization to warn users they should
# consider whether this is ok.
if self.value == min(self.allowed_range):
logger.warning(
"{} is equal to its lower limit of {}!\nReconsider "
"if you need to adjust limits, initial parameter "
"values, or if your reference data is appropriate.".format(
str(self),
self.value))
if self.value == max(self.allowed_range):
logger.warning(
"{} is equal to its upper limit of {}!\nReconsider "
"if you need to adjust limits, initial parameter "
"values, or if your reference data is appropriate.".format(
str(self),
self.value))
# Need a general index scheme/method/property to compare the equalness of two
# parameters, rather than having to rely on some expression that compares
# mm3_row and mm3_col.
class ParamMM3(Param):
'''
Adds information to Param that is specific to MM3* parameters.
'''
__slots__ = ['atom_labels', 'atom_types', 'mm3_col', 'mm3_row', 'mm3_label']
def __init__(self, atom_labels=None, atom_types=None, mm3_col=None,
mm3_row=None, mm3_label=None,
d1=None, d2=None, ptype=None, value=None):
self.atom_labels = atom_labels
self.atom_types = atom_types
self.mm3_col = mm3_col
self.mm3_row = mm3_row
self.mm3_label = mm3_label
super(ParamMM3, self).__init__(ptype=ptype, value=value)
def __repr__(self):
return '{}[{}][{},{}]({})'.format(
self.__class__.__name__, self.ptype, self.mm3_row, self.mm3_col,
self.value)
def __str__(self):
return '{}[{}][{},{}]({})'.format(
self.__class__.__name__, self.ptype, self.mm3_row, self.mm3_col,
self.value)
class Datum(object):
'''
Class for a reference or calculated data point.
'''
__slots__ = ['_lbl', 'val', 'wht', 'typ', 'com', 'src_1', 'src_2', 'idx_1',
'idx_2', 'atm_1', 'atm_2', 'atm_3', 'atm_4', 'ff_row']
def __init__(self, lbl=None, val=None, wht=None, typ=None, com=None,
src_1=None, src_2=None,
idx_1=None, idx_2=None,
atm_1=None, atm_2=None, atm_3=None, atm_4=None,
ff_row=None):
self._lbl = lbl
self.val = val
self.wht = wht
self.typ = typ
self.com = com
self.src_1 = src_1
self.src_2 = src_2
self.idx_1 = idx_1
self.idx_2 = idx_2
self.atm_1 = atm_1
self.atm_2 = atm_2
self.atm_3 = atm_3
self.atm_4 = atm_4
self.ff_row = ff_row
def __repr__(self):
return '{}({:7.4f})'.format(self.lbl, self.val)
@property
def lbl(self):
if self._lbl is None:
a = self.typ
if self.src_1:
b = re.split('[.]+', self.src_1)[0]
# Why would it ever not have src_1?
else:
b = None
c = '-'.join([str(x) for x in remove_none(self.idx_1, self.idx_2)])
d = '-'.join([str(x) for x in remove_none(
self.atm_1, self.atm_2, self.atm_3, self.atm_4)])
abcd = remove_none(a, b, c, d)
self._lbl = '_'.join(abcd)
return self._lbl
def remove_none(*args):
return [x for x in args if (x is not None and x is not '')]
def datum_sort_key(datum):
'''
Used as the key to sort a list of Datum instances. This should always ensure
that the calculated and reference data points align properly.
'''
return (datum.typ, datum.src_1, datum.src_2, datum.idx_1, datum.idx_2)
class FF(object):
"""
Class for any type of force field.
path - Self explanatory.
data - List of Datum objects.
method - String describing method used to generate this FF.
params - List of Param objects.
score - Float which is the objective function score.
"""
def __init__(self, path=None, data=None, method=None, params=None,
score=None):
self.path = path
self.data = data
self.method = method
self.params = params
self.score = score
def copy_attributes(self, ff):
"""
Copies some general attributes to another force field.
Parameters
----------
ff : `datatypes.FF`
"""
ff.path = self.path
def __repr__(self):
return '{}[{}]({})'.format(
self.__class__.__name__, self.method, self.score)
class TinkerFF(FF):
"""
STUFF TO FILL IN LATER
"""
def __init__(self, path=None, data=None, method=None, params=None,
score=None):
super(TinkerFF, self).__init__(path, data, method, params, score)
self.sub_names = []
self._atom_types = None
self._lines = None
def copy_attributes(self, ff):
"""
Copies some general attributes to another force field.
Parameters
----------
"""
ff.path = self.path
ff.sub_names = self.sub_names
ff._atom_types = self._atom_types
ff._lines = self._lines
@property
def lines(self):
if self._lines is None:
with open(self.path, 'r') as f:
self._lines = f.readlines()
return self._lines
@lines.setter
def lines(self, x):
self._lines = x
def import_ff(self, path=None, sub_search='OPT'):
if path is None:
path = self.path
bonds = ['bond', 'bond3', 'bond4', 'bond5']
pibonds = ['pibond', 'pibond3', 'pibond4', 'pibond5']
angles = ['angle', 'angle3', 'angle4', 'angle5']
torsions = ['torsion', 'torsion4', 'torsion5']
dipoles = ['dipole', 'dipole3', 'dipole4', 'dipole5']
self.params = []
q2mm_sec = False
gather_data = False
self.sub_names = []
with open(path, 'r') as f:
logger.log(15, 'READING: {}'.format(path))
for i, line in enumerate(f):
split = line.split()
if not q2mm_sec and '# Q2MM' in line:
q2mm_sec = True
elif q2mm_sec and '#' in line[0]:
self.sub_names.append(line[1:])
if 'OPT' in line:
gather_data = True
else:
gather_data = False
if gather_data and split:
if 'atom' == split[0]:
at = split[1]
el = split[2]
des = split[3][1:-1]
atnum = split[4]
mass = split[5]
#still don't know what this colum does. I don't even
# know if its valence
valence = split[6]
if split[0] in bonds:
at = [split[1], split[2]]
self.params.extend((
ParamMM3(atom_types = at,
ptype = 'bf',
mm3_col = 1,
mm3_row = i + 1,
value = float(split[3])),
ParamMM3(atom_types = at,
ptype = 'be',
mm3_col = 2,
mm3_row = i + 1,
value = float(split[4]))))
if split[0] in dipoles:
at = [split[1], split[2]]
self.params.extend((
ParamMM3(atom_types = at,
ptype = 'q',
mm3_col = 1,
mm3_row = i + 1,
value = float(split[3])),
#I think this second value is the position of the
#dipole along the bond. I've only seen 0.5 which
#indicates the dipole is posititioned at the center
#of the bond.
ParamMM3(atom_types = at,
ptype = 'q_p',
mm3_col = 2,
mm3_row = i + 1,
value = float(split[4]))))
if split[0] in pibonds:
at = [split[1], split[2]]
#I'm still not sure how these effect the potential
# energy but I believe they are correcting factors for
# atoms in a pi system with the pi_b being for the bond
# and pi_t being for torsions.
self.params.extend((
ParamMM3(atom_types = at,
ptype = 'pi_b',
mm3_col = 1,
mm3_row = i + 1,
value = float(split[3])),
ParamMM3(atom_types = at,
ptype = 'pi_t',
mm3_col = 2,
mm3_row = i + 1,
value = float(split[4]))))
if split[0] in angles:
at = [split[1], split[2], split[3]]
#TINKER param file might include several equillibrum
# bond angles which are for a central atom with 0, 1,
# or 2 additional hydrogens on the central atom.
self.params.extend((
ParamMM3(atom_types = at,
ptype = 'af',
mm3_col = 1,
mm3_row = i + 1,
value = float(split[4])),
ParamMM3(atom_types = at,
ptype = 'ae',
mm3_col = 2,
mm3_row = i + 1,
value = float(split[5]))))
if len(split) == 8:
self.params.extend((
ParamMM3(atom_types = at,
ptype = 'ae',
mm3_col = 3,
mm3_row = i + 1,
value = float(split[6])),
ParamMM3(atom_types = at,
ptype = 'ae',
mm3_col = 4,
mm3_row = i + 1,
value = float(split[7]))))
elif len(split) == 7:
self.params.extend((
ParamMM3(atom_types = at,
ptype = 'ae',
mm3_col = 3,
mm3_row = i + 1,
value = float(split[6]))))
if split[0] in torsions:
at = [split[1], split[2], split[3], split[4]]
self.params.extend((
ParamMM3(atom_types = at,
ptype = 't',
mm3_col = 1,
mm3_row = i + 1,
value = float(split[5])),
ParamMM3(atom_types = at,
ptype = 't',
mm3_col = 2,
mm3_row = i + 1,
value = float(split[8])),
ParamMM3(atom_types = at,
ptype = 't',
mm3_col = 3,
mm3_row = i + 1,
value = float(split[11]))))
if 'opbend' == split[0]:
at = [split[1], split[2], split[3], split[4]]
self.params.append(
ParamMM3(atom_types = at,
ptype = 'op_b',
mm3_col = 1,
mm3_row = i + 1,
value = float(split[5])))
if 'vdw' == split[0]:
#The first float is the vdw radius, the second has to do
# with homoatomic well depths and the last is a reduction
# factor for univalent atoms (I don't think we will need
# any of these except for the first one).
at = [split[1]]
self.params.append(
ParamMM3(atom_types = at,
ptype = 'vdw',
mm3_col = 1,
mm3_row = i + 1,
value = float(split[2])))
logger.log(15, ' -- Read {} parameters.'.format(len(self.params)))
def export_ff(self, path=None, params=None, lines=None):
"""
Exports the force field to a file, typically mm3.fld.
"""
if path is None:
path = self.path
if params is None:
params = self.params
if lines is None:
lines = self.lines
for param in params:
logger.log(1, '>>> param: {} param.value: {}'.format(
param, param.value))
line = lines[param.mm3_row - 1]
if abs(param.value) > 999.:
logger.warning(
'Value of {} is too high! Skipping write.'.format(param))
#Currently this isn't to flexible. The prm file (or atleast the
# parts that are actually being paramterized have to be formatted
# correctly. This includes the position of the columns and a space
# at the end of every line.
elif param.mm3_col == 1:
lines[param.mm3_row - 1] = (line[:30] +
'{:7.3f}'.format(param.value) +
line[37:])
elif param.mm3_col == 2:
lines[param.mm3_row - 1] = (line[:46] +
'{:7.3f}'.format(param.value) +
line[53:])
elif param.mm3_col == 3:
lines[param.mm3_row - 1] = (line[:62] +
'{:7.3f}'.format(param.value) +
line[69:])
elif param.mm3_col == 4:
lines[param.mm3_row - 1] = (line[:78] +
'{:7.3f}'.format(param.value) +
line[85:])
with open(path, 'w') as f:
f.writelines(lines)
logger.log(10, 'WROTE: {}'.format(path))
class MM3(FF):
"""
Class for Schrodinger MM3* force fields (mm3.fld).
Attributes
----------
smiles : list of strings
MM3* SMILES syntax used in a custom parameter section of a
Schrodinger MM3* force field file.
sub_names : list of strings
Strings used to describe each custom parameter section read.
atom_types : list of strings
Atom types derived from the SMILES formula. The smiles
formula may have some integers, but this is strictly atom
types.
lines : list of strings
Every line from the MM3* force field file.
"""
def __init__(self, path=None, data=None, method=None, params=None,
score=None):
super(MM3, self).__init__(path, data, method, params, score)
self.smiles = []
self.sub_names = []
self._atom_types = None
self._lines = None
def copy_attributes(self, ff):
"""
Copies some general attributes to another force field.
Parameters
----------
ff : `datatypes.MM3`
"""
ff.path = self.path
ff.smiles = self.smiles
ff.sub_names = self.sub_names
ff._atom_types = self._atom_types
ff._lines = self._lines
@property
def atom_types(self):
"""
Uses the SMILES-esque substructure definition (located
directly below the substructre's name) to determine
the atom types.
"""
self._atom_types = []
for smiles in self.smiles:
self._atom_types.append(self.convert_smiles_to_types(smiles))
return self._atom_types
@property
def lines(self):
if self._lines is None:
with open(self.path, 'r') as f:
self._lines = f.readlines()
return self._lines
@lines.setter
def lines(self, x):
self._lines = x
def split_smiles(self, smiles):
"""
Uses the MM3* SMILES substructure definition (located directly below the
substructure's name) to determine the atom types.
"""
split_smiles = re.split(co.RE_SPLIT_ATOMS, smiles)
# I guess this could be an if instead of while since .remove gets rid of
# all of them, right?
while '' in split_smiles:
split_smiles.remove('')
return split_smiles
def convert_smiles_to_types(self, smiles):
atom_types = self.split_smiles(smiles)
atom_types = self.convert_to_types(atom_types, atom_types)
return atom_types
def convert_to_types(self, atom_labels, atom_types):
"""
Takes a list of atom_labels, which may have digits instead of atom
types, and converts it into a list of solely atom types.
For example,
atom_labels = [1, 2]
atom_types = ["Z0", "P1", "P2"]
would return ["Z0", "P1"].
atom_labels - List of atom labels, which can be strings like C3, H1,
etc. or digits like "1" or 1.
atom_types - List of atom types, which are only strings like C3, H1,
etc.
"""
return [atom_types[int(x) - 1] if x.strip().isdigit() and
x != '00'
else x
for x in atom_labels]
def import_ff(self, path=None, sub_search='OPT'):
"""
Reads parameters from mm3.fld.
"""
if path is None:
path = self.path
self.params = []
self.smiles = []
self.sub_names = []
with open(path, 'r') as f:
logger.log(15, 'READING: {}'.format(path))
section_sub = False
section_smiles = False
section_vdw = False
for i, line in enumerate(f):
# These lines are for parameters.
if not section_sub and sub_search in line \
and line.startswith(' C'):
matched = re.match('\sC\s+({})\s+'.format(
co.RE_SUB), line)
assert matched is not None, \
"[L{}] Can't read substructure name: {}".format(
i + 1, line)
if matched != None:
# Oh good, you found your substructure!
section_sub = True
sub_name = matched.group(1).strip()
self.sub_names.append(sub_name)
logger.log(
15, '[L{}] Start of substructure: {}'.format(
i+1, sub_name))
section_smiles = True
continue
elif section_smiles is True:
matched = re.match(
'\s9\s+({})\s'.format(co.RE_SMILES), line)
assert matched is not None, \
"[L{}] Can't read substructure SMILES: {}".format(
i + 1, line)
smiles = matched.group(1)
self.smiles.append(smiles)
logger.log(15, ' -- SMILES: {}'.format(
self.smiles[-1]))
logger.log(15, ' -- Atom types: {}'.format(
' '.join(self.atom_types[-1])))
section_smiles = False
continue
# Marks the end of a substructure.
elif section_sub and line.startswith('-3'):
logger.log(15, '[L{}] End of substructure: {}'.format(
i, self.sub_names[-1]))
section_sub = False
continue
if 'OPT' in line and section_vdw:
logger.log(5, '[L{}] Found Van der Waals:\n{}'.format(
i + 1, line.strip('\n')))
atm = line[2:5]
rad = line[5:15]
eps = line[16:26]
self.params.extend((
ParamMM3(atom_types = atm,
ptype = 'vdwr',
mm3_col = 1,
mm3_row = i + 1,
value = float(rad)),
ParamMM3(atom_types = atm,
ptype = 'vdwe',
mm3_col = 2,
mm3_row = i + 1,
value = float(eps))))
continue
if 'OPT' in line or section_sub:
# Bonds.
if match_mm3_bond(line):
logger.log(
5, '[L{}] Found bond:\n{}'.format(
i + 1, line.strip('\n')))
if section_sub:
atm_lbls = [line[4:6], line[8:10]]
atm_typs = self.convert_to_types(
atm_lbls, self.atom_types[-1])
else:
atm_typs = [line[4:6], line[9:11]]
atm_lbls = atm_typs
comment = line[COM_POS_START:].strip()
self.sub_names.append(comment)
parm_cols = line[P_1_START:P_3_END]
parm_cols = [float(x) for x in parm_cols.split()]
self.params.extend((
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'be',
mm3_col = 1,
mm3_row = i + 1,
mm3_label = line[:2],
value = parm_cols[0]),
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'bf',
mm3_col = 2,
mm3_row = i + 1,
mm3_label = line[:2],
value = parm_cols[1])))
try:
self.params.append(
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'q',
mm3_col = 3,
mm3_row = i + 1,
mm3_label = line[:2],
value = parm_cols[2]))
# Some bonds parameters don't use bond dipoles.
except IndexError:
pass
continue
# Angles.
elif match_mm3_angle(line):
logger.log(
5, '[L{}] Found angle:\n{}'.format(
i + 1, line.strip('\n')))
if section_sub:
# Do stuff.
atm_lbls = [line[4:6], line[8:10],
line[12:14]]
atm_typs = self.convert_to_types(
atm_lbls, self.atom_types[-1])
else:
# Do other method.
atm_typs = [line[4:6], line[9:11],
line[14:16]]
atm_lbls = atm_typs
comment = line[COM_POS_START:].strip()
self.sub_names.append(comment)
parm_cols = line[P_1_START:P_3_END]
parm_cols = [float(x) for x in parm_cols.split()]
self.params.extend((
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'ae',
mm3_col = 1,
mm3_row = i + 1,
mm3_label = line[:2],
value = parm_cols[0]),
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'af',
mm3_col = 2,
mm3_row = i + 1,
mm3_label = line[:2],
value = parm_cols[1])))
continue
# Stretch-bends.
elif match_mm3_stretch_bend(line):
logger.log(
5, '[L{}] Found stretch-bend:\n{}'.format(
i + 1, line.strip('\n')))
if section_sub:
# Do stuff.
atm_lbls = [line[4:6], line[8:10],
line[12:14]]
atm_typs = self.convert_to_types(
atm_lbls, self.atom_types[-1])
else:
# Do other method.
atm_typs = [line[4:6], line[9:11],
line[14:16]]
atm_lbls = atm_typs
comment = line[COM_POS_START:].strip()
self.sub_names.append(comment)
parm_cols = line[P_1_START:P_3_END]
parm_cols = [float(x) for x in parm_cols.split()]
self.params.append(
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'sb',
mm3_col = 1,
mm3_row = i + 1,
mm3_label = line[:2],
value = parm_cols[0]))
continue
# Torsions.
elif match_mm3_lower_torsion(line):
logger.log(
5, '[L{}] Found torsion:\n{}'.format(
i + 1, line.strip('\n')))
if section_sub:
# Do stuff.
atm_lbls = [line[4:6], line[8:10],
line[12:14], line[16:18]]
atm_typs = self.convert_to_types(
atm_lbls, self.atom_types[-1])
else:
# Do other method.
atm_typs = [line[4:6], line[9:11],
line[14:16], line[19:21]]
atm_lbls = atm_typs
comment = line[COM_POS_START:].strip()
self.sub_names.append(comment)
parm_cols = line[P_1_START:P_3_END]
parm_cols = [float(x) for x in parm_cols.split()]
self.params.extend((
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'df',
mm3_col = 1,
mm3_row = i + 1,
mm3_label = line[:2],
value = parm_cols[0]),
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'df',
mm3_col = 2,
mm3_row = i + 1,
mm3_label = line[:2],
value = parm_cols[1]),
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'df',
mm3_col = 3,
mm3_row = i + 1,
mm3_label = line[:2],
value = parm_cols[2])))
continue
# Higher order torsions.
elif match_mm3_higher_torsion(line):
logger.log(
5, '[L{}] Found higher order torsion:\n{}'.format(
i + 1, line.strip('\n')))
# Will break if torsions aren't also looked up.
atm_lbls = self.params[-1].atom_labels
atm_typs = self.params[-1].atom_types
parm_cols = line[P_1_START:P_3_END]
parm_cols = [float(x) for x in parm_cols.split()]
self.params.extend((
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'df',
mm3_col = 1,
mm3_row = i + 1,
mm3_label = line[:2],
value = parm_cols[0]),
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'df',
mm3_col = 2,
mm3_row = i + 1,
mm3_label = line[:2],
value = parm_cols[1]),
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'df',
mm3_col = 3,
mm3_row = i + 1,
mm3_label = line[:2],
value = parm_cols[2])))
continue
# Improper torsions.
elif match_mm3_improper(line):
logger.log(
5, '[L{}] Found torsion:\n{}'.format(
i + 1, line.strip('\n')))
if section_sub:
# Do stuff.
atm_lbls = [line[4:6], line[8:10],
line[12:14], line[16:18]]
atm_typs = self.convert_to_types(
atm_lbls, self.atom_types[-1])
else:
# Do other method.
atm_typs = [line[4:6], line[9:11],
line[14:16], line[19:21]]
atm_lbls = atm_typs
comment = line[COM_POS_START:].strip()
self.sub_names.append(comment)
parm_cols = line[P_1_START:P_3_END]
parm_cols = [float(x) for x in parm_cols.split()]
self.params.extend((
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'imp1',
mm3_col = 1,
mm3_row = i + 1,
mm3_label = line[:2],
value = parm_cols[0]),
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'imp2',
mm3_col = 2,
mm3_row = i + 1,
mm3_label = line[:2],
value = parm_cols[1])))
continue
# Bonds.
elif match_mm3_vdw(line):
logger.log(
5, '[L{}] Found vdw:\n{}'.format(
i + 1, line.strip('\n')))
if section_sub:
atm_lbls = [line[4:6], line[8:10]]
atm_typs = self.convert_to_types(
atm_lbls, self.atom_types[-1])
parm_cols = line[P_1_START:P_3_END]
parm_cols = [float(x) for x in parm_cols.split()]
self.params.extend((
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'vdwr',
mm3_col = 1,
mm3_row = i + 1,
mm3_label = line[:2],
value = parm_cols[0]),
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'vdwfc',
mm3_col = 2,
mm3_row = i + 1,
mm3_label = line[:2],
value = parm_cols[1])))
continue
# The Van der Waals are stored in annoying way.
if line.startswith('-6'):
section_vdw = True
continue
logger.log(15, ' -- Read {} parameters.'.format(len(self.params)))
def alternate_import_ff(self, path=None, sub_search='OPT'):
"""
Reads parameters, but doesn't need as particular of formatting.
"""
if path is None:
path = self.path
self.params = []
self.smiles = []
self.sub_names = []
with open(path, 'r') as f:
logger.log(15, 'READING: {}'.format(path))
section_sub = False
section_smiles = False
section_vdw = False
for i, line in enumerate(f):
cols = line.split()
# These lines are for parameters.
if not section_sub and sub_search in line \
and line.startswith(' C'):
matched = re.match('\sC\s+({})\s+'.format(
co.RE_SUB), line)
assert matched is not None, \
"[L{}] Can't read substructure name: {}".format(
i + 1, line)
if matched:
# Oh good, you found your substructure!
section_sub = True
sub_name = matched.group(1).strip()
self.sub_names.append(sub_name)
logger.log(
15, '[L{}] Start of substructure: {}'.format(
i+1, sub_name))
section_smiles = True
continue
elif section_smiles is True:
matched = re.match(
'\s9\s+({})\s'.format(co.RE_SMILES), line)
assert matched is not None, \
"[L{}] Can't read substructure SMILES: {}".format(
i + 1, line)
smiles = matched.group(1)
self.smiles.append(smiles)
logger.log(15, ' -- SMILES: {}'.format(
self.smiles[-1]))
logger.log(15, ' -- Atom types: {}'.format(
' '.join(self.atom_types[-1])))
section_smiles = False
continue
# Marks the end of a substructure.
elif section_sub and line.startswith('-3'):
logger.log(15, '[L{}] End of substructure: {}'.format(
i, self.sub_names[-1]))
section_sub = False
continue
# Not implemented.
# if 'OPT' in line and section_vdw:
# logger.log(5, '[L{}] Found Van der Waals:\n{}'.format(
# i + 1, line.strip('\n')))
# atm = line[2:5]
# rad = line[5:15]
# eps = line[16:26]
# self.params.extend((
# ParamMM3(atom_types = atm,
# ptype = 'vdwr',
# mm3_col = 1,
# mm3_row = i + 1,
# value = float(rad)),
# ParamMM3(atom_types = atm,
# ptype = 'vdwe',
# mm3_col = 2,
# mm3_row = i + 1,
# value = float(eps))))
# continue
if 'OPT' in line or section_sub:
# Bonds.
if match_mm3_bond(line):
logger.log(
5, '[L{}] Found bond:\n{}'.format(
i + 1, line.strip('\n')))
if section_sub:
atm_lbls = [cols[1], cols[2]]
atm_typs = self.convert_to_types(
atm_lbls, self.atom_types[-1])
# Not really implemented.
else:
atm_typs = [cols[1], cols[2]]
atm_lbls = atm_typs
# comment = line[COM_POS_START:].strip()
# self.sub_names.append(comment)
self.params.extend((
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'be',
mm3_col = 1,
mm3_row = i + 1,
mm3_label = cols[0],
value = float(cols[3])),
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'bf',
mm3_col = 2,
mm3_row = i + 1,
mm3_label = cols[0],
value = float(cols[4]))))
try:
self.params.append(
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'q',
mm3_col = 3,
mm3_row = i + 1,
mm3_label = cols[0],
value = float(cols[5])))
# Some bonds parameters don't use bond dipoles.
except IndexError:
pass
continue
# Angles.
elif match_mm3_angle(line):
logger.log(
5, '[L{}] Found angle:\n{}'.format(
i + 1, line.strip('\n')))
if section_sub:
# Do stuff.
atm_lbls = [cols[1], cols[2], cols[3]]
atm_typs = self.convert_to_types(
atm_lbls, self.atom_types[-1])
# Not implemented.
else:
pass
# atm_typs = [line[4:6], line[9:11],
# line[14:16]]
# atm_lbls = atm_typs
# comment = line[COM_POS_START:].strip()
# self.sub_names.append(comment)
# parm_cols = line[P_1_START:P_3_END]
# parm_cols = map(float, parm_cols.split())
self.params.extend((
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'ae',
mm3_col = 1,
mm3_row = i + 1,
mm3_label = cols[0],
value = float(cols[4])),
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'af',
mm3_col = 2,
mm3_row = i + 1,
mm3_label = cols[0],
value = float(cols[5]))))
continue
# Stretch-bends.
# elif match_mm3_stretch_bend(line):
# logger.log(
# 5, '[L{}] Found stretch-bend:\n{}'.format(
# i + 1, line.strip('\n')))
# if section_sub:
# # Do stuff.
# atm_lbls = [line[4:6], line[8:10],
# line[12:14]]
# atm_typs = self.convert_to_types(
# atm_lbls, self.atom_types[-1])
# else:
# # Do other method.
# atm_typs = [line[4:6], line[9:11],
# line[14:16]]
# atm_lbls = atm_typs
# comment = line[COM_POS_START:].strip()
# self.sub_names.append(comment)
# parm_cols = line[P_1_START:P_3_END]
# parm_cols = map(float, parm_cols.split())
# self.params.append(
# ParamMM3(atom_labels = atm_lbls,
# atom_types = atm_typs,
# ptype = 'sb',
# mm3_col = 1,
# mm3_row = i + 1,
# mm3_label = line[:2],
# value = parm_cols[0]))
# continue
# Torsions.
elif match_mm3_lower_torsion(line):
logger.log(
5, '[L{}] Found torsion:\n{}'.format(
i + 1, line.strip('\n')))
if section_sub:
# Do stuff.
atm_lbls = [cols[1], cols[2], cols[3], cols[4]]
atm_typs = self.convert_to_types(
atm_lbls, self.atom_types[-1])
else:
pass
# Do other method.
# atm_typs = [line[4:6], line[9:11],
# line[14:16], line[19:21]]
# atm_lbls = atm_typs
# comment = line[COM_POS_START:].strip()
# self.sub_names.append(comment)
# parm_cols = line[P_1_START:P_3_END]
# parm_cols = map(float, parm_cols.split())
self.params.extend((
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'df',
mm3_col = 1,
mm3_row = i + 1,
mm3_label = cols[0],
value = float(cols[5])),
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'df',
mm3_col = 2,
mm3_row = i + 1,
mm3_label = cols[0],
value = float(cols[6])),
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'df',
mm3_col = 3,
mm3_row = i + 1,
mm3_label = cols[0],
value = float(cols[7]))))
continue
# Higher order torsions.
# elif match_mm3_higher_torsion(line):
# logger.log(
# 5, '[L{}] Found higher order torsion:\n{}'.format(
# i + 1, line.strip('\n')))
# # Will break if torsions aren't also looked up.
# atm_lbls = self.params[-1].atom_labels
# atm_typs = self.params[-1].atom_types
# parm_cols = line[P_1_START:P_3_END]
# parm_cols = map(float, parm_cols.split())
# self.params.extend((
# ParamMM3(atom_labels = atm_lbls,
# atom_types = atm_typs,
# ptype = 'df',
# mm3_col = 1,
# mm3_row = i + 1,
# mm3_label = cols[0],
# value = parm_cols[0]),
# ParamMM3(atom_labels = atm_lbls,
# atom_types = atm_typs,
# ptype = 'df',
# mm3_col = 2,
# mm3_row = i + 1,
# mm3_label = cols[0],
# value = parm_cols[1]),
# ParamMM3(atom_labels = atm_lbls,
# atom_types = atm_typs,
# ptype = 'df',
# mm3_col = 3,
# mm3_row = i + 1,
# mm3_label = cols[0],
# value = parm_cols[2])))
# continue
# Improper torsions.
elif match_mm3_improper(line):
logger.log(
5, '[L{}] Found torsion:\n{}'.format(
i + 1, line.strip('\n')))
if section_sub:
# Do stuff.
atm_lbls = [cols[1], cols[2], cols[3], cols[4]]
atm_typs = self.convert_to_types(
atm_lbls, self.atom_types[-1])
else:
pass
# Do other method.
# atm_typs = [line[4:6], line[9:11],
# line[14:16], line[19:21]]
# atm_lbls = atm_typs
# comment = line[COM_POS_START:].strip()
# self.sub_names.append(comment)
# parm_cols = line[P_1_START:P_3_END]
# parm_cols = map(float, parm_cols.split())
self.params.extend((
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'imp1',
mm3_col = 1,
mm3_row = i + 1,
mm3_label = cols[0],
value = float(cols[5])),
ParamMM3(atom_labels = atm_lbls,
atom_types = atm_typs,
ptype = 'imp2',
mm3_col = 2,
mm3_row = i + 1,
mm3_label = cols[0],
value = float(cols[6]))))
continue
# The Van der Waals are stored in annoying way.
if line.startswith('-6'):
section_vdw = True
continue
logger.log(15, ' -- Read {} parameters.'.format(len(self.params)))
def export_ff(self, path=None, params=None, lines=None):
"""
Exports the force field to a file, typically mm3.fld.
Parameters
----------
path : string
File to be written or overwritten.
params : list of `datatypes.Param` (or subclass)
lines : list of strings
This is what is generated when you read mm3.fld using
readlines().
"""
if path is None:
path = self.path
if params is None:
params = self.params
if lines is None:
lines = self.lines
for param in params:
logger.log(1, '>>> param: {} param.value: {}'.format(
param, param.value))
line = lines[param.mm3_row - 1]
# There are some problems with this. Probably an optimization
# technique gave you these crazy parameter values. Ideally, this
# entire trial FF should be discarded.
# Someday export_ff should raise an exception when these values
# get too rediculous, and this exception should be handled by the
# optimization techniques appropriately.
if abs(param.value) > 999.:
logger.warning(
'Value of {} is too high! Skipping write.'.format(param))
elif param.mm3_col == 1:
lines[param.mm3_row - 1] = (line[:P_1_START] +
'{:10.4f}'.format(param.value) +
line[P_1_END:])
elif param.mm3_col == 2:
lines[param.mm3_row - 1] = (line[:P_2_START] +
'{:10.4f}'.format(param.value) +
line[P_2_END:])
elif param.mm3_col == 3:
lines[param.mm3_row - 1] = (line[:P_3_START] +
'{:10.4f}'.format(param.value) +
line[P_3_END:])
with open(path, 'w') as f:
f.writelines(lines)
logger.log(10, 'WROTE: {}'.format(path))
def alternate_export_ff(self, path=None, params=None):
"""
Doesn't rely upon needing to read an mm3.fld.
"""
lines = []
for param in params:
pass
def match_mm3_label(mm3_label):
"""
Makes sure the MM3* label is recognized.
The label is the 1st 2 characters in the line containing the parameter
in a Schrodinger mm3.fld file.
"""
return re.match('[\s5a-z][1-5]', mm3_label)
def match_mm3_vdw(mm3_label):
"""Matches MM3* label for bonds."""
return re.match('[\sa-z]6', mm3_label)
def match_mm3_bond(mm3_label):
"""Matches MM3* label for bonds."""
return re.match('[\sa-z]1', mm3_label)
def match_mm3_angle(mm3_label):
"""Matches MM3* label for angles."""
return re.match('[\sa-z]2', mm3_label)
def match_mm3_stretch_bend(mm3_label):
"""Matches MM3* label for stretch-bends."""
return re.match('[\sa-z]3', mm3_label)
def match_mm3_torsion(mm3_label):
"""Matches MM3* label for all orders of torsional parameters."""
return re.match('[\sa-z]4|54', mm3_label)
def match_mm3_lower_torsion(mm3_label):
"""Matches MM3* label for torsions (1st through 3rd order)."""
return re.match('[\sa-z]4', mm3_label)
def match_mm3_higher_torsion(mm3_label):
"""Matches MM3* label for torsions (4th through 6th order)."""
return re.match('54', mm3_label)
def match_mm3_improper(mm3_label):
"""Matches MM3* label for improper torsions."""
return re.match('[\sa-z]5', mm3_label)
def mass_weight_hessian(hess, atoms, reverse=False):
"""
Mass weights Hessian. If reverse is True, it un-mass weights
the Hessian.
"""
masses = [co.MASSES[x.element] for x in atoms if not x.is_dummy]
changes = []
for mass in masses:
changes.extend([1 / np.sqrt(mass)] * 3)
x, y = hess.shape
for i in range(0, x):
for j in range(0, y):
if reverse:
hess[i, j] = \
hess[i, j] / changes[i] / changes[j]
else:
hess[i, j] = \
hess[i, j] * changes[i] * changes[j]
def mass_weight_eigenvectors(evecs, atoms, reverse=False):
"""
Mass weights eigenvectors. If reverse is True, it un-mass weights
the eigenvectors.
"""
changes = []
for atom in atoms:
if not atom.is_dummy:
changes.extend([np.sqrt(atom.exact_mass)] * 3)
x, y = evecs.shape
for i in range(0, x):
for j in range(0, y):
if reverse:
evecs[i, j] /= changes[j]
else:
evecs[i, j] *= changes[j]
def replace_minimum(array, value=1):
"""
Replace the minimum vallue in an arbitrary NumPy array. Historically,
the replace value is either 1 or co.HESSIAN_CONVERSION.
"""
minimum = array.min()
minimum_index = np.where(array == minimum)
assert minimum < 0, 'Minimum of array is not negative!'
# It would be better to address this in a different way. This particular
# data structure just isn't what we want.
array.setflags(write=True)
# Sometimes we use 1, but sometimes we use co.HESSIAN_CONVERSION.
array[minimum_index] = value
logger.log(1, '>>> minimum_index: {}'.format(minimum_index))
logger.log(1, '>>> array:\n{}'.format(array))
logger.log(10, ' -- Replaced minimum in array with {}.'.format(value))
def check_mm_dummy(hess, dummy_indices):
"""
Removes dummy atom rows and columns from the Hessian based upon
dummy_indices.
Arguments
---------
hess : np.matrix
dummy_indices : list of integers
Integers correspond to the indices to be removed from the
np.matrix of the Hessian.
Returns
-------
np.matrix
"""
hess = np.delete(hess, dummy_indices, 0)
hess = np.delete(hess, dummy_indices, 1)
logger.log(15, 'Created {} Hessian w/o dummy atoms.'.format(hess.shape))
return hess
def get_dummy_hessian_indices(dummy_indices):
"""
Takes a list of indices for the dummy atoms and returns another list of
integers corresponding to the rows of the eigenvectors to remove
for those those dummy atoms.
Arguments
---------
dummy_indices : list of integers
Indices for the dummy atoms.
Returns
-------
list of integers
"""
hess_dummy_indices = []
for index in dummy_indices:
hess_index = (index - 1) * 3
hess_dummy_indices.append(hess_index)
hess_dummy_indices.append(hess_index + 1)
hess_dummy_indices.append(hess_index + 2)
return hess_dummy_indices
|
|
#===============================================================================
# Copyright 2007 Matt Chaput
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#===============================================================================
"""Support functions and classes implementing the KinoSearch-like external sort
merging model. This module does not contain any user-level objects.
"""
import os, struct, tempfile
from marshal import dumps, loads
from heapq import heapify, heapreplace, heappop
from whoosh import structfile
_int_size = struct.calcsize("!i")
# Utility functions
def encode_posting(fieldNum, text, doc, data):
"""Encodes a posting as a string, for sorting."""
return "".join([struct.pack("!i", fieldNum),
text.encode("utf8"),
chr(0),
struct.pack("!i", doc),
dumps(data)
])
def decode_posting(posting):
"""Decodes an encoded posting string into a
(field_number, text, document_number, data) tuple.
"""
field_num = struct.unpack("!i", posting[:_int_size])[0]
zero = posting.find(chr(0), _int_size)
text = posting[_int_size:zero].decode("utf8")
docstart = zero + 1
docend = docstart + _int_size
doc = struct.unpack("!i", posting[docstart:docend])[0]
data = loads(posting[docend:])
return field_num, text, doc, data
def merge(run_readers, max_chunk_size):
# Initialize a list of terms we're "current"ly
# looking at, by taking the first posting from
# each buffer.
#
# The format of the list is
# [("encoded_posting", reader_number), ...]
#
# The list is sorted, and the runs are already
# sorted, so the first term in this list should
# be the absolute "lowest" term.
current = [(r.next(), i) for i, r
in enumerate(run_readers)]
heapify(current)
# The number of active readers (readers with more
# postings to available), initially equal
# to the total number of readers/buffers.
active = len(run_readers)
# Initialize the output buffer, and a variable to
# keep track of the output buffer size. This buffer
# accumulates postings from the various buffers in
# proper sorted order.
output = []
outputBufferSize = 0
while active > 0:
# Get the first ("encoded_posting", reader_number)
# pair and add it to the output buffer.
p, i = current[0]
output.append(p)
outputBufferSize += len(p)
# If the output buffer is full, "flush" it by yielding
# the accumulated postings back to the parent writer
# and clearing the output buffer.
if outputBufferSize > max_chunk_size:
for p in output:
yield decode_posting(p)
output = []
outputBufferSize = 0
# We need to replace the posting we just added to the output
# by getting the next posting from the same buffer.
if run_readers[i] is not None:
# Take the first posting from buffer i and insert it into the
# "current" list in sorted order.
# The current list must always stay sorted, so the first item
# is always the lowest.
p = run_readers[i].next()
if p:
heapreplace(current, (p, i))
else:
heappop(current)
active -= 1
# If there are still terms in the "current" list after all the
# readers are empty, dump them into the output buffer.
if len(current) > 0:
output.extend([p for p, i in current])
# If there's still postings in the output buffer, yield
# them all to the parent writer.
if len(output) > 0:
for p in output:
yield decode_posting(p)
# Classes
class RunReader(object):
"""An iterator that yields posting strings from a "run" on disk.
This class buffers the reads to improve efficiency.
"""
def __init__(self, stream, count, buffer_size):
"""
:stream: the file from which to read.
:count: the number of postings in the stream.
:buffer_size: the size (in bytes) of the read buffer to use.
"""
self.stream = stream
self.count = count
self.buffer_size = buffer_size
self.buffer = []
self.pointer = 0
self.finished = False
def close(self):
self.stream.close()
def _fill(self):
# Clears and refills the buffer.
# If this reader is exhausted, do nothing.
if self.finished:
return
# Clear the buffer.
buffer = self.buffer = []
# Reset the index at which the next() method
# reads from the buffer.
self.pointer = 0
# How much we've read so far.
so_far = 0
count = self.count
while so_far < self.buffer_size:
if count <= 0:
break
p = self.stream.read_string()
buffer.append(p)
so_far += len(p)
count -= 1
self.count = count
def __iter__(self):
return self
def next(self):
assert self.pointer <= len(self.buffer)
if self.pointer == len(self.buffer):
self._fill()
# If after refilling the buffer is still empty, we're
# at the end of the file and should stop. Probably this
# should raise StopIteration instead of returning None.
if len(self.buffer) == 0:
self.finished = True
return None
r = self.buffer[self.pointer]
self.pointer += 1
return r
class PostingPool(object):
"""Represents the "pool" of all postings to be sorted. As documents are added,
this object writes out "runs" of sorted encoded postings. When all documents
have been added, this object merge sorts the runs from disk, yielding decoded
postings to the SegmentWriter.
"""
def __init__(self, limit):
"""
:limit: the maximum amount of memory to use at once
for adding postings and the merge sort.
"""
self.limit = limit
self.size = 0
self.postings = []
self.finished = False
self.runs = []
self.count = 0
def add_posting(self, field_num, text, doc, data):
"""Adds a posting to the pool."""
if self.finished:
raise Exception("Can't add postings after you iterate over the pool")
if self.size >= self.limit:
print "Flushing..."
self._flush_run()
posting = encode_posting(field_num, text, doc, data)
self.size += len(posting)
self.postings.append(posting)
self.count += 1
def _flush_run(self):
# Called when the memory buffer (of size self.limit) fills up.
# Sorts the buffer and writes the current buffer to a "run" on disk.
if self.size > 0:
tempfd, tempname = tempfile.mkstemp(".run")
runfile = structfile.StructFile(os.fdopen(tempfd, "w+b"))
self.postings.sort()
for p in self.postings:
runfile.write_string(p)
runfile.flush()
runfile.seek(0)
self.runs.append((runfile, self.count))
print "Flushed run:", self.runs
self.postings = []
self.size = 0
self.count = 0
def __iter__(self):
# Iterating the PostingPool object performs a merge sort of
# the runs that have been written to disk and yields the
# sorted, decoded postings.
if self.finished:
raise Exception("Tried to iterate on PostingPool twice")
run_count = len(self.runs)
if self.postings and run_count == 0:
# Special case: we never accumulated enough postings to flush
# to disk, so the postings are still in memory: just yield
# them from there.
self.postings.sort()
for p in self.postings:
yield decode_posting(p)
return
if not self.postings and run_count == 0:
# No postings at all
return
if self.postings:
self._flush_run()
run_count = len(self.runs)
#This method does an external merge to yield postings
#from the (n > 1) runs built up during indexing and
#merging.
# Divide up the posting pool's memory limit between the
# number of runs plus an output buffer.
max_chunk_size = int(self.limit / (run_count + 1))
run_readers = [RunReader(run_file, count, max_chunk_size)
for run_file, count in self.runs]
for decoded_posting in merge(run_readers, max_chunk_size):
yield decoded_posting
for rr in run_readers:
assert rr.count == 0
rr.close()
# And we're done.
self.finished = True
#class RamPostingPool(object):
# """
# An experimental alternate implementation of PostingPool that
# just keeps everything in memory instead of doing an external
# sort on disk. This is very memory inefficient and, as it turns
# out, not much faster.
# """
#
# def __init__(self):
# self.postings = []
#
# def add_posting(self, field_num, text, doc, data):
# self.postings.append((field_num, text, doc, data))
#
# def __iter__(self):
# return iter(sorted(self.postings))
|
|
# -*- coding: utf-8 -*-
import django
from django.db import connection
from django.db.models import Count
from django.db.models.query_utils import Q
from django.utils import translation
from hvad.test_utils.data import NORMAL, STANDARD
from hvad.test_utils.testcase import HvadTestCase, minimumDjangoVersion
from hvad.test_utils.project.app.models import Normal, AggregateModel, Standard, SimpleRelated
from hvad.test_utils.fixtures import NormalFixture, StandardFixture
class FilterTests(HvadTestCase, NormalFixture):
normal_count = 2
def test_simple_filter(self):
qs = Normal.objects.language('en').filter(shared_field__contains='2')
self.assertEqual(qs.count(), 1)
obj = qs[0]
self.assertEqual(obj.shared_field, NORMAL[2].shared_field)
self.assertEqual(obj.translated_field, NORMAL[2].translated_field['en'])
qs = Normal.objects.language('ja').filter(shared_field__contains='1')
self.assertEqual(qs.count(), 1)
obj = qs[0]
self.assertEqual(obj.shared_field, NORMAL[1].shared_field)
self.assertEqual(obj.translated_field, NORMAL[1].translated_field['ja'])
def test_translated_filter(self):
qs = Normal.objects.language('en').filter(translated_field__contains='English')
self.assertEqual(qs.count(), self.normal_count)
obj1, obj2 = qs
self.assertEqual(obj1.shared_field, NORMAL[1].shared_field)
self.assertEqual(obj1.translated_field, NORMAL[1].translated_field['en'])
self.assertEqual(obj2.shared_field, NORMAL[2].shared_field)
self.assertEqual(obj2.translated_field, NORMAL[2].translated_field['en'])
@minimumDjangoVersion(1, 6)
def test_fallbacks_filter(self):
(Normal.objects.language('en')
.filter(shared_field=NORMAL[1].shared_field)
.delete_translations())
with translation.override('en'):
qs = Normal.objects.language().fallbacks()
with self.assertNumQueries(2):
self.assertEqual(qs.count(), self.normal_count)
self.assertEqual(len(qs), self.normal_count)
with self.assertNumQueries(0):
self.assertCountEqual((obj.pk for obj in qs), tuple(self.normal_id.values()))
self.assertCountEqual((obj.language_code for obj in qs), self.translations)
def test_all_languages_filter(self):
with self.assertNumQueries(2):
qs = Normal.objects.language('all').filter(shared_field__contains='Shared')
self.assertEqual(qs.count(), self.normal_count * len(self.translations))
self.assertCountEqual((obj.shared_field for obj in qs),
(NORMAL[1].shared_field,
NORMAL[2].shared_field) * 2)
self.assertCountEqual((obj.translated_field for obj in qs),
(NORMAL[1].translated_field['en'],
NORMAL[1].translated_field['ja'],
NORMAL[2].translated_field['en'],
NORMAL[2].translated_field['ja']))
with self.assertNumQueries(2):
qs = Normal.objects.language('all').filter(translated_field__contains='English')
self.assertEqual(qs.count(), self.normal_count)
self.assertCountEqual((obj.shared_field for obj in qs),
(NORMAL[1].shared_field,
NORMAL[2].shared_field))
self.assertCountEqual((obj.translated_field for obj in qs),
(NORMAL[1].translated_field['en'],
NORMAL[2].translated_field['en']))
with self.assertNumQueries(2):
qs = Normal.objects.language('all').filter(translated_field__contains='1')
self.assertEqual(qs.count(), 1)
obj = qs[0]
self.assertEqual(obj.shared_field, NORMAL[1].shared_field)
self.assertEqual(obj.translated_field, NORMAL[1].translated_field['en'])
def test_deferred_language_filter(self):
with translation.override('ja'):
qs = Normal.objects.language().filter(translated_field__contains='English')
with translation.override('en'):
self.assertEqual(qs.count(), self.normal_count)
obj1, obj2 = qs
self.assertEqual(obj1.shared_field, NORMAL[1].shared_field)
self.assertEqual(obj1.translated_field, NORMAL[1].translated_field['en'])
self.assertEqual(obj2.shared_field, NORMAL[2].shared_field)
self.assertEqual(obj2.translated_field, NORMAL[2].translated_field['en'])
class ExtraTests(HvadTestCase, NormalFixture):
normal_count = 2
def test_simple_extra(self):
qs = Normal.objects.language('en').extra(select={'test_extra': '2 + 2'})
self.assertEqual(qs.count(), self.normal_count)
self.assertEqual(int(qs[0].test_extra), 4)
class QueryCachingTests(HvadTestCase, NormalFixture):
normal_count = 2
def _try_all_cache_using_methods(self, qs, length):
with self.assertNumQueries(0):
x = 0
for obj in qs: x += 1
self.assertEqual(x, length)
with self.assertNumQueries(0):
qs[0]
with self.assertNumQueries(0):
self.assertEqual(qs.exists(), length != 0)
with self.assertNumQueries(0):
self.assertEqual(qs.count(), length)
with self.assertNumQueries(0):
self.assertEqual(len(qs), length)
with self.assertNumQueries(0):
self.assertEqual(bool(qs), length != 0)
def test_iter_caches(self):
with translation.override('en'):
index = 0
qs = Normal.objects.language().filter(pk=self.normal_id[1])
for obj in qs:
index += 1
self.assertEqual(index, 1)
self._try_all_cache_using_methods(qs, 1)
def test_pickling_caches(self):
import pickle
with translation.override('en'):
qs = Normal.objects.language().filter(pk=self.normal_id[1])
pickle.dumps(qs)
self._try_all_cache_using_methods(qs, 1)
def test_len_caches(self):
with translation.override('en'):
qs = Normal.objects.language().filter(pk=self.normal_id[1])
self.assertEqual(len(qs), 1)
self._try_all_cache_using_methods(qs, 1)
def test_bool_caches(self):
with translation.override('en'):
qs = Normal.objects.language().filter(pk=self.normal_id[1])
self.assertTrue(qs)
self._try_all_cache_using_methods(qs, 1)
class IterTests(HvadTestCase, NormalFixture):
normal_count = 2
def test_simple_iter(self):
with translation.override('en'):
with self.assertNumQueries(1):
for index, obj in enumerate(Normal.objects.language(), 1):
self.assertEqual(obj.shared_field, NORMAL[index].shared_field)
self.assertEqual(obj.translated_field, NORMAL[index].translated_field['en'])
with translation.override('ja'):
with self.assertNumQueries(1):
for index, obj in enumerate(Normal.objects.language(), 1):
self.assertEqual(obj.shared_field, NORMAL[index].shared_field)
self.assertEqual(obj.translated_field, NORMAL[index].translated_field['ja'])
def test_iter_unique_reply(self):
# Make sure .all() only returns unique rows
with translation.override('en'):
self.assertEqual(len(Normal.objects.all()), len(Normal.objects.untranslated()))
def test_iter_deferred_language(self):
with translation.override('en'):
qs = Normal.objects.language()
with translation.override('ja'):
for index, obj in enumerate(qs, 1):
self.assertEqual(obj.shared_field, NORMAL[index].shared_field)
self.assertEqual(obj.translated_field, NORMAL[index].translated_field['ja'])
class UpdateTests(HvadTestCase, NormalFixture):
normal_count = 2
def test_update_shared(self):
NEW_SHARED = 'new shared'
n1 = Normal.objects.language('en').get(pk=self.normal_id[1])
n2 = Normal.objects.language('en').get(pk=self.normal_id[2])
ja1 = Normal.objects.language('ja').get(pk=self.normal_id[1])
ja2 = Normal.objects.language('ja').get(pk=self.normal_id[2])
with self.assertNumQueries(1 if connection.features.update_can_self_select else 2):
Normal.objects.language('en').update(shared_field=NEW_SHARED)
new1 = Normal.objects.language('en').get(pk=self.normal_id[1])
new2 = Normal.objects.language('en').get(pk=self.normal_id[2])
self.assertEqual(new1.shared_field, NEW_SHARED)
self.assertEqual(new1.translated_field, n1.translated_field)
self.assertEqual(new2.shared_field, NEW_SHARED)
self.assertEqual(new2.translated_field, n2.translated_field)
newja1 = Normal.objects.language('ja').get(pk=self.normal_id[1])
newja2 = Normal.objects.language('ja').get(pk=self.normal_id[2])
self.assertEqual(newja1.shared_field, NEW_SHARED)
self.assertEqual(newja2.shared_field, NEW_SHARED)
self.assertEqual(newja1.translated_field, ja1.translated_field)
self.assertEqual(newja2.translated_field, ja2.translated_field)
def test_update_translated(self):
NEW_TRANSLATED = 'new translated'
n1 = Normal.objects.language('en').get(pk=self.normal_id[1])
n2 = Normal.objects.language('en').get(pk=self.normal_id[2])
ja1 = Normal.objects.language('ja').get(pk=self.normal_id[1])
ja2 = Normal.objects.language('ja').get(pk=self.normal_id[2])
with self.assertNumQueries(1):
Normal.objects.language('en').update(translated_field=NEW_TRANSLATED)
new1 = Normal.objects.language('en').get(pk=self.normal_id[1])
new2 = Normal.objects.language('en').get(pk=self.normal_id[2])
self.assertEqual(new1.shared_field, n1.shared_field)
self.assertEqual(new2.shared_field, n2.shared_field)
self.assertEqual(new1.translated_field, NEW_TRANSLATED)
self.assertEqual(new2.translated_field, NEW_TRANSLATED)
# check it didn't touch japanese
newja1 = Normal.objects.language('ja').get(pk=self.normal_id[1])
newja2 = Normal.objects.language('ja').get(pk=self.normal_id[2])
self.assertEqual(newja1.shared_field, ja1.shared_field)
self.assertEqual(newja2.shared_field, ja2.shared_field)
self.assertEqual(newja1.translated_field, ja1.translated_field)
self.assertEqual(newja2.translated_field, ja2.translated_field)
def test_update_mixed(self):
NEW_SHARED = 'new shared'
NEW_TRANSLATED = 'new translated'
ja1 = Normal.objects.language('ja').get(pk=self.normal_id[1])
ja2 = Normal.objects.language('ja').get(pk=self.normal_id[2])
with self.assertNumQueries(2 if connection.features.update_can_self_select else 3):
Normal.objects.language('en').update(
shared_field=NEW_SHARED, translated_field=NEW_TRANSLATED
)
new1 = Normal.objects.language('en').get(pk=self.normal_id[1])
new2 = Normal.objects.language('en').get(pk=self.normal_id[2])
self.assertEqual(new1.shared_field, NEW_SHARED)
self.assertEqual(new1.translated_field, NEW_TRANSLATED)
self.assertEqual(new2.shared_field, NEW_SHARED)
self.assertEqual(new2.translated_field, NEW_TRANSLATED)
newja1 = Normal.objects.language('ja').get(pk=self.normal_id[1])
newja2 = Normal.objects.language('ja').get(pk=self.normal_id[2])
self.assertEqual(newja1.shared_field, NEW_SHARED)
self.assertEqual(newja2.shared_field, NEW_SHARED)
# check it didn't touch japanese translated fields
self.assertEqual(newja1.translated_field, ja1.translated_field)
self.assertEqual(newja2.translated_field, ja2.translated_field)
def test_update_deferred_language(self):
NEW_TRANSLATED = 'new translated'
n1 = Normal.objects.language('en').get(pk=self.normal_id[1])
n2 = Normal.objects.language('en').get(pk=self.normal_id[2])
ja1 = Normal.objects.language('ja').get(pk=self.normal_id[1])
ja2 = Normal.objects.language('ja').get(pk=self.normal_id[2])
with translation.override('ja'):
qs = Normal.objects.language()
with translation.override('en'):
with self.assertNumQueries(1):
qs.update(translated_field=NEW_TRANSLATED)
new1 = Normal.objects.language('en').get(pk=self.normal_id[1])
new2 = Normal.objects.language('en').get(pk=self.normal_id[2])
self.assertEqual(new1.shared_field, n1.shared_field)
self.assertEqual(new2.shared_field, n2.shared_field)
self.assertEqual(new1.translated_field, NEW_TRANSLATED)
self.assertEqual(new2.translated_field, NEW_TRANSLATED)
# check it didn't touch japanese
newja1 = Normal.objects.language('ja').get(pk=self.normal_id[1])
newja2 = Normal.objects.language('ja').get(pk=self.normal_id[2])
self.assertEqual(newja1.shared_field, ja1.shared_field)
self.assertEqual(newja2.shared_field, ja2.shared_field)
self.assertEqual(newja1.translated_field, ja1.translated_field)
self.assertEqual(newja2.translated_field, ja2.translated_field)
@minimumDjangoVersion(1, 6)
def test_update_fallbacks(self):
# Test it works - note that is it still not recommended as the query is much
# more complicated that it need to be
qs = Normal.objects.language().fallbacks()
with self.assertNumQueries(1 if connection.features.update_can_self_select else 2):
qs.filter(shared_field=NORMAL[1].shared_field).update(shared_field='updated')
self.assertEqual(Normal.objects.language('ja').get(shared_field='updated').pk, self.normal_id[1])
self.assertEqual(Normal.objects.language('en').get(shared_field='updated').pk, self.normal_id[1])
class ValuesListTests(HvadTestCase, NormalFixture):
normal_count = 2
def test_values_list_translated(self):
values = Normal.objects.language('en').values_list('translated_field', flat=True)
values_list = list(values)
self.assertCountEqual(values_list, [NORMAL[1].translated_field['en'],
NORMAL[2].translated_field['en']])
def test_values_list_shared(self):
values = Normal.objects.language('en').values_list('shared_field', flat=True)
values_list = list(values)
self.assertCountEqual(values_list, [NORMAL[1].shared_field,
NORMAL[2].shared_field])
def test_values_list_mixed(self):
values = Normal.objects.language('en').values_list('shared_field', 'translated_field')
values_list = list(values)
check = [
(NORMAL[1].shared_field, NORMAL[1].translated_field['en']),
(NORMAL[2].shared_field, NORMAL[2].translated_field['en']),
]
self.assertCountEqual(values_list, check)
def test_values_list_deferred_language(self):
with translation.override('ja'):
qs = Normal.objects.language()
with translation.override('en'):
values = qs.values_list('shared_field', 'translated_field')
values_list = list(values)
check = [
(NORMAL[1].shared_field, NORMAL[1].translated_field['en']),
(NORMAL[2].shared_field, NORMAL[2].translated_field['en']),
]
self.assertCountEqual(values_list, check)
class ValuesTests(HvadTestCase, NormalFixture):
normal_count = 2
def test_values_shared(self):
values = Normal.objects.language('en').values('shared_field')
values_list = list(values)
check = [
{'shared_field': NORMAL[1].shared_field},
{'shared_field': NORMAL[2].shared_field},
]
self.assertCountEqual(values_list, check)
def test_values_translated(self):
values = Normal.objects.language('en').values('translated_field')
values_list = list(values)
check = [
{'translated_field': NORMAL[1].translated_field['en']},
{'translated_field': NORMAL[2].translated_field['en']},
]
self.assertCountEqual(values_list, check)
def test_values_mixed(self):
values = Normal.objects.language('en').values('translated_field', 'shared_field')
values_list = list(values)
check = [
{'translated_field': NORMAL[1].translated_field['en'],
'shared_field': NORMAL[1].shared_field},
{'translated_field': NORMAL[2].translated_field['en'],
'shared_field': NORMAL[2].shared_field},
]
self.assertCountEqual(values_list, check)
def test_values_post_language(self):
values = Normal.objects.language().values('shared_field').language('en')
values_list = list(values)
check = [
{'shared_field': NORMAL[1].shared_field},
{'shared_field': NORMAL[2].shared_field},
]
self.assertCountEqual(values_list, check)
def test_values_post_filter(self):
qs = Normal.objects.language('en').values('shared_field')
values = qs.filter(shared_field=NORMAL[1].shared_field)
values_list = list(values)
check = [
{'shared_field': NORMAL[1].shared_field},
]
self.assertCountEqual(values_list, check)
def test_values_deferred_language(self):
with translation.override('ja'):
qs = Normal.objects.language()
with translation.override('en'):
values = qs.values('translated_field')
values_list = list(values)
check = [
{'translated_field': NORMAL[1].translated_field['en']},
{'translated_field': NORMAL[2].translated_field['en']},
]
self.assertCountEqual(values_list, check)
class InBulkTests(HvadTestCase, NormalFixture):
normal_count = 2
def test_empty_in_bulk(self):
with self.assertNumQueries(0):
result = Normal.objects.language('en').in_bulk([])
self.assertEqual(len(result), 0)
def test_in_bulk(self):
pk1, pk2 = self.normal_id[1], self.normal_id[2]
with self.assertNumQueries(1):
result = Normal.objects.language('en').in_bulk([pk1, pk2])
self.assertCountEqual((pk1, pk2), result)
self.assertEqual(result[pk1].shared_field, NORMAL[1].shared_field)
self.assertEqual(result[pk1].translated_field, NORMAL[1].translated_field['en'])
self.assertEqual(result[pk1].language_code, 'en')
self.assertEqual(result[pk2].shared_field, NORMAL[2].shared_field)
self.assertEqual(result[pk2].translated_field, NORMAL[2].translated_field['en'])
self.assertEqual(result[pk2].language_code, 'en')
def test_untranslated_in_bulk(self):
pk1 = self.normal_id[1]
with translation.override('ja'):
with self.assertNumQueries(2):
result = Normal.objects.untranslated().in_bulk([pk1])
self.assertCountEqual((pk1,), result)
self.assertEqual(result[pk1].shared_field, NORMAL[1].shared_field)
self.assertEqual(result[pk1].translated_field, NORMAL[1].translated_field['ja'])
self.assertEqual(result[pk1].language_code, 'ja')
@minimumDjangoVersion(1, 6)
def test_fallbacks_in_bulk(self):
(Normal.objects.language('en')
.filter(shared_field=NORMAL[2].shared_field)
.delete_translations())
with self.assertNumQueries(1):
pk1, pk2 = self.normal_id[1], self.normal_id[2]
result = Normal.objects.language('en').fallbacks('de', 'ja').in_bulk([pk1, pk2])
self.assertCountEqual((pk1, pk2), result)
self.assertEqual(result[pk1].shared_field, NORMAL[1].shared_field)
self.assertEqual(result[pk1].translated_field, NORMAL[1].translated_field['en'])
self.assertEqual(result[pk1].language_code, 'en')
self.assertEqual(result[pk2].shared_field, NORMAL[2].shared_field)
self.assertEqual(result[pk2].translated_field, NORMAL[2].translated_field['ja'])
self.assertEqual(result[pk2].language_code, 'ja')
def test_all_languages_in_bulk(self):
with self.assertRaises(ValueError):
Normal.objects.language('all').in_bulk([self.normal_id[1]])
def test_in_bulk_deferred_language(self):
pk1 = self.normal_id[1]
with translation.override('ja'):
qs = Normal.objects.language()
with translation.override('en'):
result = qs.in_bulk([pk1])
self.assertCountEqual((pk1,), result)
self.assertEqual(result[pk1].shared_field, NORMAL[1].shared_field)
self.assertEqual(result[pk1].translated_field, NORMAL[1].translated_field['en'])
self.assertEqual(result[pk1].language_code, 'en')
class DeleteTests(HvadTestCase, NormalFixture):
normal_count = 2
def test_delete_all(self):
Normal.objects.all().delete()
self.assertEqual(Normal.objects.count(), 0)
self.assertEqual(Normal._meta.translations_model.objects.count(), 0)
def test_delete_translation(self):
self.assertEqual(Normal._meta.translations_model.objects.count(), 4)
Normal.objects.language('en').delete_translations()
self.assertEqual(Normal.objects.untranslated().count(), 2)
self.assertEqual(Normal._meta.translations_model.objects.count(), 2)
Normal.objects.language('ja').delete_translations()
self.assertEqual(Normal.objects.untranslated().count(), 2)
self.assertEqual(Normal._meta.translations_model.objects.count(), 0)
def test_filtered_delete_translation(self):
self.assertEqual(Normal._meta.translations_model.objects.count(), 4)
(Normal.objects.language('en')
.filter(shared_field=NORMAL[1].shared_field)
.delete_translations())
self.assertEqual(Normal.objects.untranslated().count(), 2)
self.assertEqual(Normal._meta.translations_model.objects.count(), 3)
(Normal.objects.language('ja')
.filter(translated_field=NORMAL[2].translated_field['ja'])
.delete_translations())
self.assertEqual(Normal.objects.untranslated().count(), 2)
self.assertEqual(Normal._meta.translations_model.objects.count(), 2)
def test_delete_translation_deferred_language(self):
self.assertEqual(Normal._meta.translations_model.objects.count(), 4)
with translation.override('ja'):
qs = Normal.objects.language()
with translation.override('en'):
qs.delete_translations()
self.assertEqual(Normal.objects.language('ja').count(), 2)
self.assertEqual(Normal.objects.language('en').count(), 0)
@minimumDjangoVersion(1, 6)
def test_delete_fallbacks(self):
qs = Normal.objects.language().fallbacks()
qs.filter(shared_field=NORMAL[1].shared_field).delete()
self.assertEqual(Normal.objects.language('ja').count(), self.normal_count - 1)
self.assertEqual(Normal.objects.language('en').count(), self.normal_count - 1)
class GetTranslationFromInstanceTests(HvadTestCase, NormalFixture):
normal_count = 1
def test_simple(self):
# get the english instance
en = Normal.objects.language('en').get()
# get the japanese *translations*
ja_trans = en.translations.get_language('ja')
# get the japanese *combined*
ja = Normal.objects.language('ja').get(pk=en.pk)
self.assertEqual(en.shared_field, NORMAL[1].shared_field)
self.assertEqual(en.translated_field, NORMAL[1].translated_field['en'])
self.assertRaises(AttributeError, getattr, ja_trans, 'shared_field')
self.assertEqual(ja_trans.translated_field, NORMAL[1].translated_field['ja'])
self.assertEqual(ja.shared_field, NORMAL[1].shared_field)
self.assertEqual(ja.translated_field, NORMAL[1].translated_field['ja'])
class AggregateTests(HvadTestCase):
def test_aggregate(self):
from django.db.models import Avg
# Initial data
AggregateModel.objects.language("en").create(number=10, translated_number=20)
AggregateModel.objects.language("en").create(number=0, translated_number=0)
# Check both the translated and the shared aggregates as arguments
self.assertEqual(AggregateModel.objects.language("en").aggregate(Avg("number")), {'number__avg': 5})
self.assertEqual(AggregateModel.objects.language("en").aggregate(Avg("translated_number")), {'translated_number__avg': 10})
# Check the same calculation, but with keyword arguments
self.assertEqual(AggregateModel.objects.language("en").aggregate(num=Avg("number")), {'num': 5})
self.assertEqual(AggregateModel.objects.language("en").aggregate(tnum=Avg("translated_number")), {'tnum': 10})
class AnnotateTests(HvadTestCase, StandardFixture, NormalFixture):
normal_count = 2
standard_count = 4
def test_annotate(self):
qs = Normal.objects.language('en').annotate(Count('standards'))
self.assertEqual(len(qs), self.normal_count)
self.assertEqual(qs[0].standards__count, 2)
self.assertEqual(qs[1].standards__count, 2)
qs = Normal.objects.language('en').annotate(foo=Count('standards'))
self.assertEqual(len(qs), self.normal_count)
self.assertEqual(qs[0].foo, 2)
self.assertEqual(qs[1].foo, 2)
with self.assertRaises(ValueError):
qs = Normal.objects.language('en').annotate(Count('standards'), standards__count=Count('standards'))
class NotImplementedTests(HvadTestCase):
def test_notimplemented(self):
baseqs = SimpleRelated.objects.language('en')
self.assertRaises(NotImplementedError, baseqs.defer, 'shared_field')
self.assertRaises(NotImplementedError, baseqs.only)
self.assertRaises(NotImplementedError, baseqs.bulk_create, [])
# select_related with no field is not implemented
self.assertRaises(NotImplementedError, baseqs.select_related)
# select_related with language('all') is not implemented
self.assertRaises(NotImplementedError, len, baseqs.language('all').select_related('normal'))
if django.VERSION >= (1, 7):
self.assertRaises(NotImplementedError, baseqs.update_or_create)
class MinimumVersionTests(HvadTestCase):
def test_versions(self):
qs = SimpleRelated.objects.language('en')
if django.VERSION < (1, 6):
self.assertRaises(AttributeError, getattr, qs, 'earliest')
self.assertRaises(AttributeError, getattr, qs, 'datetimes')
if django.VERSION < (1, 7):
self.assertRaises(AttributeError, getattr, qs, 'update_or_create')
class ExcludeTests(HvadTestCase, NormalFixture):
normal_count = 1
def test_defer(self):
qs = Normal.objects.language('en').exclude(translated_field=NORMAL[1].translated_field['en'])
self.assertEqual(qs.count(), 0)
@minimumDjangoVersion(1, 6)
def test_fallbacks_exclude(self):
(Normal.objects.language('en')
.filter(shared_field=NORMAL[1].shared_field)
.delete_translations())
qs = (Normal.objects.language('en')
.fallbacks('de', 'ja')
.exclude(shared_field=NORMAL[1].shared_field))
self.assertEqual(qs.count(), 0)
def test_all_languages_exclude(self):
qs = Normal.objects.language('all').exclude(translated_field=NORMAL[1].translated_field['en'])
self.assertEqual(qs.count(), 1)
self.assertEqual(qs[0].translated_field, NORMAL[1].translated_field['ja'])
def test_invalid_all_languages_exclude(self):
with self.assertRaises(ValueError):
Normal.objects.language().exclude(language_code='all')
class ComplexFilterTests(HvadTestCase, StandardFixture, NormalFixture):
normal_count = 2
standard_count = 2
def test_qobject_filter(self):
shared_contains_one = Q(shared_field__contains='1')
shared_contains_two = Q(shared_field__contains='2')
qs = Normal.objects.language('en').filter(shared_contains_two)
self.assertEqual(qs.count(), 1)
obj = qs[0]
self.assertEqual(obj.shared_field, NORMAL[2].shared_field)
self.assertEqual(obj.translated_field, NORMAL[2].translated_field['en'])
qs = (Normal.objects.language('ja').filter(Q(shared_contains_one | shared_contains_two))
.order_by('shared_field'))
self.assertEqual(qs.count(), 2)
obj = qs[0]
self.assertEqual(obj.shared_field, NORMAL[1].shared_field)
self.assertEqual(obj.translated_field, NORMAL[1].translated_field['ja'])
obj = qs[1]
self.assertEqual(obj.shared_field, NORMAL[2].shared_field)
self.assertEqual(obj.translated_field, NORMAL[2].translated_field['ja'])
def test_aware_qobject_filter(self):
from hvad.utils import get_translation_aware_manager
manager = get_translation_aware_manager(Standard)
normal_one = Q(normal_field=STANDARD[1].normal_field)
normal_two = Q(normal_field=STANDARD[2].normal_field)
shared_one = Q(normal__shared_field=NORMAL[STANDARD[1].normal].shared_field)
translated_one_en = Q(normal__translated_field=NORMAL[STANDARD[1].normal].translated_field['en'])
translated_two_en = Q(normal__translated_field=NORMAL[STANDARD[2].normal].translated_field['en'])
# control group test
with translation.override('en'):
qs = manager.filter(shared_one)
self.assertEqual(qs.count(), 1)
obj = qs[0]
self.assertEqual(obj.normal_field, STANDARD[1].normal_field)
# basic Q object test
qs = manager.filter(translated_one_en)
self.assertEqual(qs.count(), 1)
obj = qs[0]
self.assertEqual(obj.normal_field, STANDARD[1].normal_field)
# test various intersection combinations
# use a spurious Q to test the logic of recursion along the way
qs = manager.filter(Q(normal_one & shared_one & translated_one_en))
self.assertEqual(qs.count(), 1)
obj = qs[0]
self.assertEqual(obj.normal_field, STANDARD[1].normal_field)
qs = manager.filter(Q(normal_one & translated_two_en))
self.assertEqual(qs.count(), 0)
qs = manager.filter(Q(shared_one & translated_two_en))
self.assertEqual(qs.count(), 0)
qs = manager.filter(Q(translated_one_en & translated_two_en))
self.assertEqual(qs.count(), 0)
# test various union combinations
qs = manager.filter(Q(normal_one | translated_one_en))
self.assertEqual(qs.count(), 1)
qs = manager.filter(Q(shared_one | translated_one_en))
self.assertEqual(qs.count(), 1)
qs = manager.filter(Q(normal_one | translated_two_en))
self.assertEqual(qs.count(), 2)
qs = manager.filter(Q(shared_one | translated_two_en))
self.assertEqual(qs.count(), 2)
qs = manager.filter(Q(translated_one_en | translated_two_en))
self.assertEqual(qs.count(), 2)
# misc more complex combinations
qs = manager.filter(Q(normal_one & (translated_one_en | translated_two_en)))
self.assertEqual(qs.count(), 1)
qs = manager.filter(Q(normal_two & (translated_one_en | translated_two_en)))
self.assertEqual(qs.count(), 1)
qs = manager.filter(shared_one & ~translated_one_en)
self.assertEqual(qs.count(), 0)
qs = manager.filter(shared_one & ~translated_two_en)
self.assertEqual(qs.count(), 1)
def test_defer(self):
qs = Normal.objects.language('en').complex_filter({})
self.assertEqual(qs.count(), self.normal_count)
self.assertRaises(NotImplementedError,
Normal.objects.language('en').complex_filter,
Q(shared_field=NORMAL[1].shared_field))
|
|
# Copyright 2019 The KerasTuner Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tuner for Scikit-learn Models."""
import collections
import inspect
import os
import pickle
import warnings
import numpy as np
import tensorflow as tf
try:
import pandas as pd # pytype: disable=import-error
except ImportError:
pd = None
try:
import sklearn # pytype: disable=import-error
import sklearn.model_selection
import sklearn.pipeline
except ImportError:
sklearn = None
from keras_tuner.engine import base_tuner
def split_data(data, indices):
if isinstance(data, np.ndarray):
return data[indices]
elif pd is not None and isinstance(data, pd.DataFrame):
return data.iloc[indices]
else:
raise TypeError()
class SklearnTuner(base_tuner.BaseTuner):
"""Tuner for Scikit-learn Models.
Performs cross-validated hyperparameter search for Scikit-learn models.
Examples:
```python
import keras_tuner as kt
from sklearn import ensemble
from sklearn import datasets
from sklearn import linear_model
from sklearn import metrics
from sklearn import model_selection
def build_model(hp):
model_type = hp.Choice('model_type', ['random_forest', 'ridge'])
if model_type == 'random_forest':
model = ensemble.RandomForestClassifier(
n_estimators=hp.Int('n_estimators', 10, 50, step=10),
max_depth=hp.Int('max_depth', 3, 10))
else:
model = linear_model.RidgeClassifier(
alpha=hp.Float('alpha', 1e-3, 1, sampling='log'))
return model
tuner = kt.tuners.SklearnTuner(
oracle=kt.oracles.BayesianOptimizationOracle(
objective=kt.Objective('score', 'max'),
max_trials=10),
hypermodel=build_model,
scoring=metrics.make_scorer(metrics.accuracy_score),
cv=model_selection.StratifiedKFold(5),
directory='.',
project_name='my_project')
X, y = datasets.load_iris(return_X_y=True)
X_train, X_test, y_train, y_test = model_selection.train_test_split(
X, y, test_size=0.2)
tuner.search(X_train, y_train)
best_model = tuner.get_best_models(num_models=1)[0]
```
Args:
oracle: A `keras_tuner.Oracle` instance. Note that for this `Tuner`,
the `objective` for the `Oracle` should always be set to
`Objective('score', direction='max')`. Also, `Oracle`s that exploit
Neural-Network-specific training (e.g. `Hyperband`) should not be
used with this `Tuner`.
hypermodel: A `HyperModel` instance (or callable that takes
hyperparameters and returns a Model instance).
scoring: An sklearn `scoring` function. For more information, see
`sklearn.metrics.make_scorer`. If not provided, the Model's default
scoring will be used via `model.score`. Note that if you are
searching across different Model families, the default scoring for
these Models will often be different. In this case you should
supply `scoring` here in order to make sure your Models are being
scored on the same metric.
metrics: Additional `sklearn.metrics` functions to monitor during search.
Note that these metrics do not affect the search process.
cv: An `sklearn.model_selection` Splitter class. Used to
determine how samples are split up into groups for
cross-validation.
**kwargs: Keyword arguments relevant to all `Tuner` subclasses. Please
see the docstring for `Tuner`.
"""
def __init__(
self, oracle, hypermodel, scoring=None, metrics=None, cv=None, **kwargs
):
super().__init__(oracle=oracle, hypermodel=hypermodel, **kwargs)
if sklearn is None:
raise ImportError(
"Please install sklearn before using the `SklearnTuner`."
)
self.scoring = scoring
if metrics is None:
metrics = []
if not isinstance(metrics, (list, tuple)):
metrics = [metrics]
self.metrics = metrics
self.cv = cv or sklearn.model_selection.KFold(
5, shuffle=True, random_state=1
)
def search(self, X, y, sample_weight=None, groups=None):
"""Performs hyperparameter search.
Args:
X: See docstring for `model.fit` for the `sklearn` Models being tuned.
y: See docstring for `model.fit` for the `sklearn` Models being tuned.
sample_weight: Optional. See docstring for `model.fit` for the
`sklearn` Models being tuned.
groups: Optional. Required for `sklearn.model_selection` Splitter
classes that split based on group labels (For example, see
`sklearn.model_selection.GroupKFold`).
"""
# Only overridden for the docstring.
return super().search(X, y, sample_weight=sample_weight, groups=groups)
def run_trial(self, trial, X, y, sample_weight=None, groups=None):
metrics = collections.defaultdict(list)
# For cross-validation methods that expect a `groups` argument.
cv_kwargs = {"groups": groups} if groups is not None else {}
for train_indices, test_indices in self.cv.split(X, y, **cv_kwargs):
X_train = split_data(X, train_indices)
y_train = split_data(y, train_indices)
X_test = split_data(X, test_indices)
y_test = split_data(y, test_indices)
sample_weight_train = (
sample_weight[train_indices] if sample_weight is not None else None
)
model = self.hypermodel.build(trial.hyperparameters)
supports_sw = "sample_weight" in inspect.getfullargspec(model.fit).args
if isinstance(model, sklearn.pipeline.Pipeline) or not supports_sw:
model.fit(X_train, y_train)
else:
model.fit(X_train, y_train, sample_weight=sample_weight_train)
sample_weight_test = (
sample_weight[test_indices] if sample_weight is not None else None
)
if self.scoring is None:
score = model.score(X_test, y_test, sample_weight=sample_weight_test)
else:
score = self.scoring(
model, X_test, y_test, sample_weight=sample_weight_test
)
metrics["score"].append(score)
if self.metrics:
y_test_pred = model.predict(X_test)
for metric in self.metrics:
result = metric(
y_test, y_test_pred, sample_weight=sample_weight_test
)
metrics[metric.__name__].append(result)
trial_metrics = {name: np.mean(values) for name, values in metrics.items()}
self.oracle.update_trial(trial.trial_id, trial_metrics)
self.save_model(trial.trial_id, model)
def save_model(self, trial_id, model, step=0):
fname = os.path.join(self.get_trial_dir(trial_id), "model.pickle")
with tf.io.gfile.GFile(fname, "wb") as f:
pickle.dump(model, f)
def load_model(self, trial):
fname = os.path.join(self.get_trial_dir(trial.trial_id), "model.pickle")
with tf.io.gfile.GFile(fname, "rb") as f:
return pickle.load(f)
class Sklearn(SklearnTuner):
def __init__(self, *args, **kwargs):
warnings.warn(
"The `Sklearn` class is deprecated, please use `SklearnTuner`.",
DeprecationWarning,
)
super().__init__(*args, **kwargs)
|
|
import unittest
from conans.test.tools import TestServer, TestClient
from conans.model.ref import ConanFileReference
import os
from conans.paths import CONANFILE
from conans.test.utils.cpp_test_files import cpp_hello_conan_files
from conans.util.files import load
class OnlySourceTest(unittest.TestCase):
def setUp(self):
test_server = TestServer()
self.servers = {"default": test_server}
def _create(self, client, number, version, deps=None, export=True):
files = cpp_hello_conan_files(number, version, deps, build=False, config=False)
client.save(files, clean_first=True)
if export:
client.run("export lasote/stable")
def conan_test_test(self):
'''Checks --build in test command'''
client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]})
self._create(client, "Hello0", "0.0")
self._create(client, "Hello1", "1.1", ["Hello0/0.0@lasote/stable"])
# Now test out Hello2
self._create(client, "Hello2", "2.2", ["Hello1/1.1@lasote/stable"], export=True)
hello2conanfile = load(os.path.join(client.current_folder, CONANFILE))
client.save({CONANFILE: hello2conanfile})
test_conanfile = '''
from conans.model.conan_file import ConanFile
class DefaultNameConan(ConanFile):
settings = "os", "compiler", "arch"
requires = "Hello2/2.2@lasote/stable"
generators = "cmake"
def test(self):
pass
'''
client.save({"test/%s" % CONANFILE: test_conanfile})
# Should recognize the hello package
# Will Fail because Hello0/0.0 and Hello1/1.1 has not built packages
# and by default no packages are built
error = client.run("test", ignore_error=True)
self.assertTrue(error)
self.assertIn('Try to build from sources with "--build Hello0"', client.user_io.out)
# We generate the package for Hello0/0.0
client.run("install Hello0/0.0@lasote/stable --build Hello0")
# Still missing Hello1/1.1
error = client.run("test", ignore_error=True)
self.assertTrue(error)
self.assertIn('Try to build from sources with "--build Hello1"', client.user_io.out)
# We generate the package for Hello1/1.1
client.run("install Hello1/1.1@lasote/stable --build Hello1")
# Now Hello2 should be built and not fail
client.run("test")
self.assertNotIn("Can't find a 'Hello2/2.2@lasote/stable' package", client.user_io.out)
self.assertIn('Hello2/2.2@lasote/stable: WARN: Forced build from source',
client.user_io.out)
# Now package is generated but should be built again
client.run("test")
self.assertIn('Hello2/2.2@lasote/stable: WARN: Forced build from source',
client.user_io.out)
def build_policies_update_test(self):
client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]})
conanfile = """
from conans import ConanFile
class MyPackage(ConanFile):
name = "test"
version = "1.9"
build_policy = 'always'
def source(self):
self.output.info("Getting sources")
def build(self):
self.output.info("Building sources")
def package(self):
self.output.info("Packaging this test package")
"""
files = {CONANFILE: conanfile}
client.save(files, clean_first=True)
client.run("export lasote/stable")
client.run("install test/1.9@lasote/stable")
self.assertIn("Getting sources", client.user_io.out)
self.assertIn("Building sources", client.user_io.out)
self.assertIn("Packaging this test package", client.user_io.out)
self.assertIn("Building package from source as defined by build_policy='always'",
client.user_io.out)
client.run("upload test/1.9@lasote/stable")
def build_policies_in_conanfile_test(self):
client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]})
files = cpp_hello_conan_files("Hello0", "1.0", [], config=False, build=False)
# --- Build policy to missing ---
files[CONANFILE] = files[CONANFILE].replace("exports = '*'", "exports = '*'\n build_policy = 'missing'")
client.save(files, clean_first=True)
client.run("export lasote/stable")
# Install, it will build automatically if missing (without the --build missing option)
client.run("install Hello0/1.0@lasote/stable -g txt")
self.assertIn("Building", client.user_io.out)
self.assertIn("Generated txt created conanbuildinfo.txt", client.user_io.out)
# Try to do it again, now we have the package, so not build is done
client.run("install Hello0/1.0@lasote/stable -g txt")
self.assertNotIn("Building", client.user_io.out)
self.assertIn("Generated txt created conanbuildinfo.txt", client.user_io.out)
# Try now to upload all packages, should not crash because of the "missing" build policy
client.run("upload Hello0/1.0@lasote/stable --all", ignore_error=False)
# --- Build policy to always ---
files[CONANFILE] = files[CONANFILE].replace("build_policy = 'missing'", "build_policy = 'always'")
client.save(files, clean_first=True)
client.run("export lasote/stable")
# Install, it will build automatically if missing (without the --build missing option)
client.run("install Hello0/1.0@lasote/stable -g txt")
self.assertIn("Detected build_policy 'always', trying to remove source folder",
client.user_io.out)
self.assertIn("Building", client.user_io.out)
self.assertIn("Generated txt created conanbuildinfo.txt", client.user_io.out)
# Try to do it again, now we have the package, but we build again
client.run("install Hello0/1.0@lasote/stable -g txt")
self.assertIn("Building", client.user_io.out)
self.assertIn("Detected build_policy 'always', trying to remove source folder",
client.user_io.out)
self.assertIn("Generated txt created conanbuildinfo.txt", client.user_io.out)
# Try now to upload all packages, should crash because of the "always" build policy
client.run("upload Hello0/1.0@lasote/stable --all", ignore_error=True)
self.assertIn("no packages can be uploaded", client.user_io.out)
def reuse_test(self):
client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]})
conan_reference = ConanFileReference.loads("Hello0/0.1@lasote/stable")
files = cpp_hello_conan_files("Hello0", "0.1")
files[CONANFILE] = files[CONANFILE].replace("build", "build2")
client.save(files)
client.run("export lasote/stable")
client.run("install %s --build missing" % str(conan_reference))
self.assertTrue(os.path.exists(client.paths.builds(conan_reference)))
self.assertTrue(os.path.exists(client.paths.packages(conan_reference)))
# Upload
client.run("upload %s --all" % str(conan_reference))
# Now from other "computer" install the uploaded conans with same options (nothing)
other_conan = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]})
other_conan.run("install %s --build missing" % str(conan_reference))
self.assertFalse(os.path.exists(other_conan.paths.builds(conan_reference)))
self.assertTrue(os.path.exists(other_conan.paths.packages(conan_reference)))
# Now from other "computer" install the uploaded conans with same options (nothing)
other_conan = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]})
other_conan.run("install %s --build" % str(conan_reference))
self.assertTrue(os.path.exists(other_conan.paths.builds(conan_reference)))
self.assertTrue(os.path.exists(other_conan.paths.packages(conan_reference)))
# Use an invalid pattern and check that its not builded from source
other_conan = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]})
other_conan.run("install %s --build HelloInvalid" % str(conan_reference))
self.assertFalse(os.path.exists(other_conan.paths.builds(conan_reference)))
self.assertTrue(os.path.exists(other_conan.paths.packages(conan_reference)))
# Use another valid pattern and check that its not builded from source
other_conan = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]})
other_conan.run("install %s --build HelloInvalid -b Hello" % str(conan_reference))
self.assertTrue(os.path.exists(other_conan.paths.builds(conan_reference)))
self.assertTrue(os.path.exists(other_conan.paths.packages(conan_reference)))
# Now even if the package is in local store, check that's rebuilded
other_conan.run("install %s -b Hello*" % str(conan_reference))
self.assertIn("Copying sources to build folder", other_conan.user_io.out)
other_conan.run("install %s" % str(conan_reference))
self.assertNotIn("Copying sources to build folder", other_conan.user_io.out)
def detect_name_quotes_test(self):
base = '''
from conans import ConanFile
class ConanLib(ConanFile):
name = 'lib'
version = "0.1"
'''
test = '''
from conans import ConanFile
class ConanLib(ConanFile):
requires = "lib/0.1@user/channel"
def build(self):
self.conanfile_directory
def test(self):
pass
'''
files = {"conanfile.py": base,
"test/conanfile.py": test}
client = TestClient()
client.save(files)
client.run("export user/channel")
client.run("test_package")
|
|
# Copyright 2014 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import inspect
import logging
import netaddr
import re
import time
import xml.etree.ElementTree as ET
from oslo_config import cfg
from oslo_utils import importutils
from networking_cisco.plugins.cisco.cfg_agent import cfg_exceptions as cfg_exc
from networking_cisco.plugins.cisco.cfg_agent.device_drivers import (
devicedriver_api)
from networking_cisco.plugins.cisco.cfg_agent.device_drivers.iosxe import (
cisco_iosxe_snippets as snippets)
from networking_cisco.plugins.cisco.common.htparser import HTParser
from networking_cisco.plugins.cisco.extensions import ha
ncclient = importutils.try_import('ncclient')
manager = importutils.try_import('ncclient.manager')
LOG = logging.getLogger(__name__)
logging.getLogger('ncclient.transport.session').setLevel(logging.INFO)
class IosXeRoutingDriver(devicedriver_api.RoutingDriverBase):
"""Generic IOS XE Routing Driver.
This driver encapsulates the configuration logic via NETCONF protocol to
configure a generic (IOS-XE based) device for implementing
Neutron L3 services. These services include routing, NAT and floating
IPs (as per Neutron terminology).
"""
DEV_NAME_LEN = 14
def __init__(self, **device_params):
try:
self._host_ip = device_params['management_ip_address']
self._host_ssh_port = device_params['protocol_port']
credentials = device_params.get('credentials', {})
self._username = credentials.get('user_name')
self._password = credentials.get('password')
self._timeout = (device_params.get('timeout') or
cfg.CONF.cfg_agent.device_connection_timeout)
self._ncc_connection = None
self._itfcs_enabled = False
except KeyError as e:
LOG.error("Missing device parameter:%s. Aborting "
"IosXeRoutingDriver initialization", e)
raise cfg_exc.InitializationException()
# Public Functions
def router_added(self, ri):
self._create_vrf(ri)
def router_removed(self, ri):
self._remove_vrf(ri)
def internal_network_added(self, ri, port):
self._create_sub_interface(ri, port)
if port.get(ha.HA_INFO) is not None and ri.get(ha.ENABLED, False):
self._add_ha(ri, port)
def internal_network_removed(self, ri, port):
self._remove_sub_interface(port)
def external_gateway_added(self, ri, ext_gw_port):
self._create_sub_interface(ri, ext_gw_port)
# set default route via this network's gateway ip
self._add_default_route(ri, ext_gw_port)
def external_gateway_removed(self, ri, ext_gw_port):
# remove default route via this network's gateway ip
self._remove_default_route(ri, ext_gw_port)
# finally, remove external network sub-interface
self._remove_sub_interface(ext_gw_port)
def enable_internal_network_NAT(self, ri, port, ext_gw_port):
self._add_internal_nw_nat_rules(ri, port, ext_gw_port)
def disable_internal_network_NAT(self, ri, port, ext_gw_port):
self._remove_internal_nw_nat_rules(ri, [port], ext_gw_port)
def floating_ip_added(self, ri, ext_gw_port, floating_ip, fixed_ip):
self._add_floating_ip(ri, floating_ip, fixed_ip)
def floating_ip_removed(self, ri, ext_gw_port, floating_ip, fixed_ip):
self._remove_floating_ip(ri, ext_gw_port, floating_ip, fixed_ip)
def routes_updated(self, ri, action, route):
self._update_routing_table(ri, action, route)
def clear_connection(self):
self._ncc_connection = None
def cleanup_invalid_cfg(self, hd, routers):
# at this point nothing to be done
return
def get_configuration(self):
return self._get_running_config(split=False)
# Internal Functions
def _create_sub_interface(self, ri, port):
vrf_name = self._get_vrf_name(ri)
ip_cidr = port['ip_cidr']
net_mask = netaddr.IPNetwork(ip_cidr).netmask
gateway_ip = ip_cidr.split('/')[0]
sub_interface = self._get_interface_name_from_hosting_port(port)
vlan = self._get_interface_vlan_from_hosting_port(port)
self._do_create_sub_interface(sub_interface, vlan, vrf_name,
gateway_ip, net_mask)
def _remove_sub_interface(self, port):
sub_interface = self._get_interface_name_from_hosting_port(port)
self._do_remove_sub_interface(sub_interface)
def _add_ha(self, ri, port):
func_dict = {
ha.HA_HSRP: self._add_ha_hsrp,
ha.HA_VRRP: self._add_ha_vrrp,
ha.HA_GLBP: self._add_ha_gblp
}
# invoke the right function for the ha type
func_dict[ri[ha.DETAILS][ha.TYPE]](self, ri, port)
def _add_ha_hsrp(self, ri, port):
priority = ri[ha.DETAILS][ha.PRIORITY]
port_ha_info = port[ha.HA_INFO]
group = port_ha_info['group']
ip = port_ha_info['ha_port']['fixed_ips'][0]['ip_address']
if ip and group and priority:
vrf_name = self._get_vrf_name(ri)
sub_interface = self._get_interface_name_from_hosting_port(port)
self._do_add_ha_hsrp(sub_interface, vrf_name, priority, group, ip)
def _add_ha_vrrp(self, ri, port):
raise NotImplementedError()
def _add_ha_gblp(self, ri, port):
raise NotImplementedError()
def _remove_ha(self, ri, port):
pass
def _get_acl_name_from_vlan(self, vlan):
return "acl_%s" % vlan
def _add_internal_nw_nat_rules(self, ri, port, ext_port):
vrf_name = self._get_vrf_name(ri)
in_vlan = self._get_interface_vlan_from_hosting_port(port)
acl_no = self._get_acl_name_from_vlan(in_vlan)
internal_cidr = port['ip_cidr']
internal_net = netaddr.IPNetwork(internal_cidr).network
net_mask = netaddr.IPNetwork(internal_cidr).hostmask
inner_itfc = self._get_interface_name_from_hosting_port(port)
outer_itfc = self._get_interface_name_from_hosting_port(ext_port)
self._nat_rules_for_internet_access(acl_no, internal_net,
net_mask, inner_itfc,
outer_itfc, vrf_name)
def _remove_internal_nw_nat_rules(self, ri, ports, ext_port):
acls = []
# first disable nat in all inner ports
for port in ports:
in_itfc_name = self._get_interface_name_from_hosting_port(port)
inner_vlan = self._get_interface_vlan_from_hosting_port(port)
acls.append(self._get_acl_name_from_vlan(inner_vlan))
self._remove_interface_nat(in_itfc_name, 'inside')
# wait for two seconds
LOG.debug("Sleep for 2 seconds before clearing NAT rules")
time.sleep(2)
# clear the NAT translation table
self._remove_dyn_nat_translations()
# remove dynamic nat rules and acls
vrf_name = self._get_vrf_name(ri)
ext_itfc_name = self._get_interface_name_from_hosting_port(ext_port)
for acl in acls:
self._remove_dyn_nat_rule(acl, ext_itfc_name, vrf_name)
def _add_default_route(self, ri, ext_gw_port):
ext_gw_ip = ext_gw_port['subnet']['gateway_ip']
if ext_gw_ip:
vrf_name = self._get_vrf_name(ri)
conf_str = snippets.DEFAULT_ROUTE_CFG % (vrf_name, ext_gw_ip)
if not self._cfg_exists(conf_str):
conf_str = snippets.SET_DEFAULT_ROUTE % (vrf_name, ext_gw_ip)
self._edit_running_config(conf_str, 'SET_DEFAULT_ROUTE')
def _remove_default_route(self, ri, ext_gw_port):
ext_gw_ip = ext_gw_port['subnet']['gateway_ip']
if ext_gw_ip:
vrf_name = self._get_vrf_name(ri)
conf_str = snippets.DEFAULT_ROUTE_CFG % (vrf_name, ext_gw_ip)
if self._cfg_exists(conf_str):
conf_str = snippets.REMOVE_DEFAULT_ROUTE % (vrf_name,
ext_gw_ip)
self._edit_running_config(conf_str, 'REMOVE_DEFAULT_ROUTE')
def _add_floating_ip(self, ri, floating_ip, fixed_ip):
vrf_name = self._get_vrf_name(ri)
self._do_add_floating_ip(floating_ip, fixed_ip, vrf_name)
def _remove_floating_ip(self, ri, ext_gw_port, floating_ip, fixed_ip):
vrf_name = self._get_vrf_name(ri)
out_itfc_name = self._get_interface_name_from_hosting_port(ext_gw_port)
# first remove NAT from outer interface
self._remove_interface_nat(out_itfc_name, 'outside')
# clear the NAT translation table
self._remove_dyn_nat_translations()
# remove the floating ip
self._do_remove_floating_ip(floating_ip, fixed_ip, vrf_name)
# enable NAT on outer interface
self._add_interface_nat(out_itfc_name, 'outside')
def _update_routing_table(self, ri, action, route):
vrf_name = self._get_vrf_name(ri)
destination_net = netaddr.IPNetwork(route['destination'])
dest = destination_net.network
dest_mask = destination_net.netmask
next_hop = route['nexthop']
if action is 'replace':
self._add_static_route(dest, dest_mask, next_hop, vrf_name)
elif action is 'delete':
self._remove_static_route(dest, dest_mask, next_hop, vrf_name)
else:
LOG.error('Unknown route command %s', action)
def _create_vrf(self, ri):
vrf_name = self._get_vrf_name(ri)
self._do_create_vrf(vrf_name)
def _remove_vrf(self, ri):
vrf_name = self._get_vrf_name(ri)
self._do_remove_vrf(vrf_name)
def _get_vrf_name(self, ri):
return ri.router_name()[:self.DEV_NAME_LEN]
def _get_connection(self):
"""Make SSH connection to the IOS XE device.
The external ncclient library is used for creating this connection.
This method keeps state of any existing connections and reuses them if
already connected. Also interfaces (except management) are typically
disabled by default when it is booted. So if connecting for the first
time, driver will enable all other interfaces and keep that status in
the `_itfcs_enabled` flag.
"""
try:
if self._ncc_connection and self._ncc_connection.connected:
return self._ncc_connection
else:
# ncclient needs 'name' to be 'csr' in order to communicate
# with the device in the correct way.
self._ncc_connection = manager.connect(
host=self._host_ip, port=self._host_ssh_port,
username=self._username, password=self._password,
device_params={'name': "csr"}, timeout=self._timeout)
if not self._itfcs_enabled:
self._itfcs_enabled = self._enable_itfcs(
self._ncc_connection)
return self._ncc_connection
except Exception as e:
conn_params = {'host': self._host_ip, 'port': self._host_ssh_port,
'user': self._username,
'timeout': self._timeout, 'reason': e.message}
raise cfg_exc.ConnectionException(**conn_params)
def _get_interface_name_from_hosting_port(self, port):
vlan = self._get_interface_vlan_from_hosting_port(port)
return 'GigabitEthernet.%s' % vlan
@staticmethod
def _get_interface_vlan_from_hosting_port(port):
return port['hosting_info']['segmentation_id']
def _get_interfaces(self):
"""Get a list of interfaces on this hosting device.
:return: List of the interfaces
"""
ios_cfg = self._get_running_config()
parse = HTParser(ios_cfg)
itfcs_raw = parse.find_lines("^interface GigabitEthernet")
itfcs = [raw_if.strip().split(' ')[1] for raw_if in itfcs_raw]
LOG.debug("Interfaces on hosting device: %s", itfcs)
return itfcs
def _get_interface_ip(self, interface_name):
"""Get the ip address for an interface.
:param interface_name: interface_name as a string
:return: ip address of interface as a string
"""
ios_cfg = self._get_running_config()
parse = HTParser(ios_cfg)
children = parse.find_children("^interface %s" % interface_name)
for line in children:
if 'ip address' in line:
ip_address = line.strip().split(' ')[2]
LOG.debug("IP Address:%s", ip_address)
return ip_address
LOG.warning("Cannot find interface: %s", interface_name)
return None
def _interface_exists(self, interface):
"""Check whether interface exists."""
ios_cfg = self._get_running_config()
parse = HTParser(ios_cfg)
itfcs_raw = parse.find_lines("^interface " + interface)
return len(itfcs_raw) > 0
def _enable_itfcs(self, conn):
"""Enable the interfaces of a IOS XE device.
:param conn: Connection object
:return: True or False
"""
return True
def _get_vrfs(self):
"""Get the current VRFs configured in the device.
:return: A list of vrf names as string
"""
vrfs = []
ios_cfg = self._get_running_config()
parse = HTParser(ios_cfg)
vrfs_raw = parse.find_lines("^vrf definition")
for line in vrfs_raw:
# raw format ['ip vrf <vrf-name>',....]
vrf_name = line.strip().split(' ')[2]
vrfs.append(vrf_name)
LOG.info("VRFs:%s", vrfs)
return vrfs
def _get_capabilities(self):
"""Get the servers NETCONF capabilities.
:return: List of server capabilities.
"""
conn = self._get_connection()
capabilities = []
for c in conn.server_capabilities:
capabilities.append(c)
LOG.debug("Server capabilities: %s", capabilities)
return capabilities
def _get_running_config(self, split=True):
"""Get the IOS XE device's current running config.
:return: Current IOS running config as multiline string
"""
conn = self._get_connection()
config = conn.get_config(source="running")
if config:
root = ET.fromstring(config._raw)
running_config = root[0][0]
if split is True:
rgx = re.compile("\r*\n+")
ioscfg = rgx.split(running_config.text)
else:
ioscfg = running_config.text
return ioscfg
def _check_acl(self, acl_no, network, netmask):
"""Check a ACL config exists in the running config.
:param acl_no: access control list (ACL) number
:param network: network which this ACL permits
:param netmask: netmask of the network
:return:
"""
exp_cfg_lines = ['ip access-list standard ' + str(acl_no),
' permit ' + str(network) + ' ' + str(netmask)]
ios_cfg = self._get_running_config()
parse = HTParser(ios_cfg)
acls_raw = parse.find_children(exp_cfg_lines[0])
if acls_raw:
if exp_cfg_lines[1] in acls_raw:
return True
LOG.error("Mismatch in ACL configuration for %s", acl_no)
return False
LOG.debug("%s is not present in config", acl_no)
return False
def _cfg_exists(self, cfg_str):
"""Check a partial config string exists in the running config.
:param cfg_str: config string to check
:return : True or False
"""
ios_cfg = self._get_running_config()
parse = HTParser(ios_cfg)
cfg_raw = parse.find_lines("^" + cfg_str)
LOG.debug("_cfg_exists(): Found lines %s", cfg_raw)
return len(cfg_raw) > 0
def _set_interface(self, name, ip_address, mask):
conf_str = snippets.SET_INTC % (name, ip_address, mask)
self._edit_running_config(conf_str, 'SET_INTC')
def _do_create_vrf(self, vrf_name):
conf_str = snippets.CREATE_VRF % vrf_name
self._edit_running_config(conf_str, 'CREATE_VRF')
def _do_remove_vrf(self, vrf_name):
if vrf_name in self._get_vrfs():
conf_str = snippets.REMOVE_VRF % vrf_name
self._edit_running_config(conf_str, 'REMOVE_VRF')
def _do_create_sub_interface(self, sub_interface, vlan_id, vrf_name, ip,
mask):
if vrf_name not in self._get_vrfs():
LOG.error("VRF %s not present", vrf_name)
conf_str = snippets.CREATE_SUBINTERFACE % (sub_interface, vlan_id,
vrf_name, ip, mask)
self._edit_running_config(conf_str, 'CREATE_SUBINTERFACE')
def _do_remove_sub_interface(self, sub_interface):
# optional: verify this is the correct sub_interface
if self._interface_exists(sub_interface):
conf_str = snippets.REMOVE_SUBINTERFACE % sub_interface
self._edit_running_config(conf_str, 'REMOVE_SUBINTERFACE')
def _do_add_ha_hsrp(self, sub_interface, vrf_name, priority, group, ip):
if vrf_name not in self._get_vrfs():
LOG.error("VRF %s not present", vrf_name)
conf_str = snippets.SET_INTC_HSRP % (sub_interface, vrf_name, group,
priority, group, ip)
action = "SET_INTC_HSRP (Group: %s, Priority: % s)" % (group, priority)
self._edit_running_config(conf_str, action)
def _do_remove_ha_hsrp(self, sub_interface, group):
conf_str = snippets.REMOVE_INTC_HSRP % (sub_interface, group)
action = ("REMOVE_INTC_HSRP (subinterface:%s, Group:%s)"
% (sub_interface, group))
self._edit_running_config(conf_str, action)
def _get_interface_cfg(self, interface):
ios_cfg = self._get_running_config()
parse = HTParser(ios_cfg)
return parse.find_children('interface ' + interface)
def _nat_rules_for_internet_access(self, acl_no, network, netmask,
inner_itfc, outer_itfc, vrf_name):
"""Configure the NAT rules for an internal network.
:param acl_no: ACL number of the internal network.
:param network: internal network
:param netmask: netmask of the internal network.
:param inner_itfc: (name of) interface connected to the internal
network
:param outer_itfc: (name of) interface connected to the external
network
:param vrf_name: VRF corresponding to this virtual router
:return: True if configuration succeeded
:raises: networking_cisco.plugins.cisco.cfg_agent.cfg_exceptions.
IOSXEConfigException
"""
pass
def _add_interface_nat(self, itfc_name, itfc_type):
conf_str = snippets.SET_NAT % (itfc_name, itfc_type)
self._edit_running_config(conf_str, 'SET_NAT_' + itfc_type)
def _remove_interface_nat(self, itfc_name, itfc_type):
conf_str = snippets.REMOVE_NAT % (itfc_name, itfc_type)
self._edit_running_config(conf_str, 'SET_NAT_' + itfc_type)
def _remove_dyn_nat_rule(self, acl_no, outer_itfc_name, vrf_name):
conf_str = snippets.SNAT_CFG % (acl_no, outer_itfc_name, vrf_name)
if self._cfg_exists(conf_str):
conf_str = snippets.REMOVE_DYN_SRC_TRL_INTFC % (
acl_no, outer_itfc_name, vrf_name)
self._edit_running_config(conf_str, 'REMOVE_DYN_SRC_TRL_INTFC')
conf_str = snippets.REMOVE_ACL % acl_no
self._edit_running_config(conf_str, 'REMOVE_ACL')
def _remove_dyn_nat_translations(self):
conf_str = snippets.CLEAR_DYN_NAT_TRANS
self._edit_running_config(conf_str, 'CLEAR_DYN_NAT_TRANS')
def _do_add_floating_ip(self, floating_ip, fixed_ip, vrf):
conf_str = snippets.SET_STATIC_SRC_TRL % (fixed_ip, floating_ip, vrf)
self._edit_running_config(conf_str, 'SET_STATIC_SRC_TRL')
def _do_remove_floating_ip(self, floating_ip, fixed_ip, vrf):
conf_str = snippets.REMOVE_STATIC_SRC_TRL % (
fixed_ip, floating_ip, vrf)
self._edit_running_config(conf_str, 'REMOVE_STATIC_SRC_TRL')
def _get_floating_ip_cfg(self):
ios_cfg = self._get_running_config()
parse = HTParser(ios_cfg)
res = parse.find_lines('ip nat inside source static')
return res
def _add_static_route(self, dest, dest_mask, next_hop, vrf):
conf_str = snippets.SET_IP_ROUTE % (vrf, dest, dest_mask, next_hop)
self._edit_running_config(conf_str, 'SET_IP_ROUTE')
def _remove_static_route(self, dest, dest_mask, next_hop, vrf):
conf_str = snippets.REMOVE_IP_ROUTE % (vrf, dest, dest_mask, next_hop)
self._edit_running_config(conf_str, 'REMOVE_IP_ROUTE')
def _get_static_route_cfg(self):
ios_cfg = self._get_running_config()
parse = HTParser(ios_cfg)
return parse.find_lines('ip route')
def caller_name(self, skip=2):
"""
Get a name of a caller in the format module.class.method
`skip` specifies how many levels of stack to skip while getting caller
name. skip=1 means "who calls me", skip=2 "who calls my caller" etc.
An empty string is returned if skipped levels exceed stack height
"""
stack = inspect.stack()
start = 0 + skip
if len(stack) < start + 1:
return ''
parentframe = stack[start][0]
name = []
module = inspect.getmodule(parentframe)
# `modname` can be None when frame is executed directly in console
# TODO(asr1kteam): consider using __main__
if module:
name.append(module.__name__)
# detect classname
if 'self' in parentframe.f_locals:
# I don't know any way to detect call from the object method
# XXX: there seems to be no way to detect static method call,
# it will be just a function call
name.append(parentframe.f_locals['self'].__class__.__name__)
codename = parentframe.f_code.co_name
if codename != '<module>': # top level usually
name.append(codename) # function or a method
del parentframe
return ".".join(name)
# [ OR ]
# curframe = inspect.currentframe()
# calframe = inspect.getouterframes(curframe, 2)
# return calframe[1][3]
def _edit_running_config(self, conf_str, snippet):
conn = self._get_connection()
LOG.info("Config generated for [%(device)s] %(snip)s is:%(conf)s "
"caller:%(caller)s",
{'device': self.hosting_device['id'],
'snip': snippet,
'conf': conf_str,
'caller': self.caller_name()})
try:
rpc_obj = conn.edit_config(target='running', config=conf_str)
self._check_response(rpc_obj, snippet, conf_str=conf_str)
except Exception as e:
# Here we catch all exceptions caused by REMOVE_/DELETE_ configs
# to avoid config agent to get stuck once it hits this condition.
# This is needed since the current ncclient version (0.4.2)
# generates an exception when an attempt to configure the device
# fails by the device (ASR1K router) but it doesn't provide any
# details about the error message that the device reported.
# With ncclient 0.4.4 version and onwards the exception returns
# also the proper error. Hence this code can be changed when the
# ncclient version is increased.
if re.search(r"REMOVE_|DELETE_", snippet):
LOG.warning("Pass exception for %s", snippet)
pass
elif isinstance(e, ncclient.operations.rpc.RPCError):
e_tag = e.tag
e_type = e.type
params = {'snippet': snippet, 'type': e_type, 'tag': e_tag,
'dev_id': self.hosting_device['id'],
'ip': self._host_ip, 'confstr': conf_str}
raise cfg_exc.IOSXEConfigException(**params)
def _check_response(self, rpc_obj, snippet_name, conf_str=None):
"""This function checks the rpc response object for status.
This function takes as input the response rpc_obj and the snippet name
that was executed. It parses it to see, if the last edit operation was
a success or not.
<?xml version="1.0" encoding="UTF-8"?>
<rpc-reply message-id="urn:uuid:81bf8082-....-b69a-000c29e1b85c"
xmlns="urn:ietf:params:netconf:base:1.0">
<ok />
</rpc-reply>
In case of error, IOS XE device sends a response as follows.
We take the error type and tag.
<?xml version="1.0" encoding="UTF-8"?>
<rpc-reply message-id="urn:uuid:81bf8082-....-b69a-000c29e1b85c"
xmlns="urn:ietf:params:netconf:base:1.0">
<rpc-error>
<error-type>protocol</error-type>
<error-tag>operation-failed</error-tag>
<error-severity>error</error-severity>
</rpc-error>
</rpc-reply>
:return: True if the config operation completed successfully
:raises: networking_cisco.plugins.cisco.cfg_agent.cfg_exceptions.
IOSXEConfigException
"""
LOG.debug("RPCReply for %(snippet_name)s is %(rpc_obj)s",
{'snippet_name': snippet_name, 'rpc_obj': rpc_obj.xml})
xml_str = rpc_obj.xml
if "<ok />" in xml_str:
# LOG.debug("RPCReply for %s is OK", snippet_name)
LOG.info("%s was successfully executed", snippet_name)
return True
# Not Ok, we throw a ConfigurationException
e_type = rpc_obj._root[0][0].text
e_tag = rpc_obj._root[0][1].text
params = {'snippet': snippet_name, 'type': e_type, 'tag': e_tag,
'dev_id': self.hosting_device['id'],
'ip': self._host_ip, 'confstr': conf_str}
raise cfg_exc.IOSXEConfigException(**params)
|
|
"""
HTTP Exception
--------------
This module processes Python exceptions that relate to HTTP exceptions
by defining a set of exceptions, all subclasses of HTTPException.
Each exception, in addition to being a Python exception that can be
raised and caught, is also a WSGI application and ``webob.Response``
object.
This module defines exceptions according to RFC 2068 [1]_ : codes with
100-300 are not really errors; 400's are client errors, and 500's are
server errors. According to the WSGI specification [2]_ , the application
can call ``start_response`` more then once only under two conditions:
(a) the response has not yet been sent, or (b) if the second and
subsequent invocations of ``start_response`` have a valid ``exc_info``
argument obtained from ``sys.exc_info()``. The WSGI specification then
requires the server or gateway to handle the case where content has been
sent and then an exception was encountered.
Exception
HTTPException
HTTPOk
* 200 - HTTPOk
* 201 - HTTPCreated
* 202 - HTTPAccepted
* 203 - HTTPNonAuthoritativeInformation
* 204 - HTTPNoContent
* 205 - HTTPResetContent
* 206 - HTTPPartialContent
HTTPRedirection
* 300 - HTTPMultipleChoices
* 301 - HTTPMovedPermanently
* 302 - HTTPFound
* 303 - HTTPSeeOther
* 304 - HTTPNotModified
* 305 - HTTPUseProxy
* 306 - Unused (not implemented, obviously)
* 307 - HTTPTemporaryRedirect
HTTPError
HTTPClientError
* 400 - HTTPBadRequest
* 401 - HTTPUnauthorized
* 402 - HTTPPaymentRequired
* 403 - HTTPForbidden
* 404 - HTTPNotFound
* 405 - HTTPMethodNotAllowed
* 406 - HTTPNotAcceptable
* 407 - HTTPProxyAuthenticationRequired
* 408 - HTTPRequestTimeout
* 409 - HTTPConflict
* 410 - HTTPGone
* 411 - HTTPLengthRequired
* 412 - HTTPPreconditionFailed
* 413 - HTTPRequestEntityTooLarge
* 414 - HTTPRequestURITooLong
* 415 - HTTPUnsupportedMediaType
* 416 - HTTPRequestRangeNotSatisfiable
* 417 - HTTPExpectationFailed
HTTPServerError
* 500 - HTTPInternalServerError
* 501 - HTTPNotImplemented
* 502 - HTTPBadGateway
* 503 - HTTPServiceUnavailable
* 504 - HTTPGatewayTimeout
* 505 - HTTPVersionNotSupported
Subclass usage notes:
---------------------
The HTTPException class is complicated by 4 factors:
1. The content given to the exception may either be plain-text or
as html-text.
2. The template may want to have string-substitutions taken from
the current ``environ`` or values from incoming headers. This
is especially troublesome due to case sensitivity.
3. The final output may either be text/plain or text/html
mime-type as requested by the client application.
4. Each exception has a default explanation, but those who
raise exceptions may want to provide additional detail.
Subclass attributes and call parameters are designed to provide an easier path
through the complications.
Attributes:
``code``
the HTTP status code for the exception
``title``
remainder of the status line (stuff after the code)
``explanation``
a plain-text explanation of the error message that is
not subject to environment or header substitutions;
it is accessible in the template via %(explanation)s
``detail``
a plain-text message customization that is not subject
to environment or header substitutions; accessible in
the template via %(detail)s
``body_template``
a content fragment (in HTML) used for environment and
header substitution; the default template includes both
the explanation and further detail provided in the
message
Parameters:
``detail``
a plain-text override of the default ``detail``
``headers``
a list of (k,v) header pairs
``comment``
a plain-text additional information which is
usually stripped/hidden for end-users
``body_template``
a string.Template object containing a content fragment in HTML
that frames the explanation and further detail
To override the template (which is HTML content) or the plain-text
explanation, one must subclass the given exception; or customize it
after it has been created. This particular breakdown of a message
into explanation, detail and template allows both the creation of
plain-text and html messages for various clients as well as
error-free substitution of environment variables and headers.
The subclasses of :class:`~_HTTPMove`
(:class:`~HTTPMultipleChoices`, :class:`~HTTPMovedPermanently`,
:class:`~HTTPFound`, :class:`~HTTPSeeOther`, :class:`~HTTPUseProxy` and
:class:`~HTTPTemporaryRedirect`) are redirections that require a ``Location``
field. Reflecting this, these subclasses have two additional keyword arguments:
``location`` and ``add_slash``.
Parameters:
``location``
to set the location immediately
``add_slash``
set to True to redirect to the same URL as the request, except with a
``/`` appended
Relative URLs in the location will be resolved to absolute.
References:
.. [1] http://www.python.org/peps/pep-0333.html#error-handling
.. [2] http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.5
"""
import re
import urlparse
import sys
import types
from string import Template
from webob import Response, Request, html_escape
from webob.util import warn_deprecation
tag_re = re.compile(r'<.*?>', re.S)
br_re = re.compile(r'<br.*?>', re.I|re.S)
comment_re = re.compile(r'<!--|-->')
def no_escape(value):
if value is None:
return ''
if not isinstance(value, basestring):
if hasattr(value, '__unicode__'):
value = unicode(value)
else:
value = str(value)
return value
def strip_tags(value):
value = value.replace('\n', ' ')
value = value.replace('\r', '')
value = br_re.sub('\n', value)
value = comment_re.sub('', value)
value = tag_re.sub('', value)
return value
class HTTPException(Exception):
def __init__(self, message, wsgi_response):
Exception.__init__(self, message)
self.wsgi_response = wsgi_response
def __call__(self, environ, start_response):
return self.wsgi_response(environ, start_response)
# TODO: remove in version 1.3
@property
def exception(self):
warn_deprecation("Raise HTTP exceptions directly", '1.2', 2)
return self
class WSGIHTTPException(Response, HTTPException):
## You should set in subclasses:
# code = 200
# title = 'OK'
# explanation = 'why this happens'
# body_template_obj = Template('response template')
code = None
title = None
explanation = ''
body_template_obj = Template('''\
${explanation}<br /><br />
${detail}
${html_comment}
''')
plain_template_obj = Template('''\
${status}
${body}''')
html_template_obj = Template('''\
<html>
<head>
<title>${status}</title>
</head>
<body>
<h1>${status}</h1>
${body}
</body>
</html>''')
## Set this to True for responses that should have no request body
empty_body = False
def __init__(self, detail=None, headers=None, comment=None,
body_template=None, **kw):
Response.__init__(self,
status='%s %s' % (self.code, self.title),
**kw)
Exception.__init__(self, detail)
if headers:
self.headers.extend(headers)
self.detail = detail
self.comment = comment
if body_template is not None:
self.body_template = body_template
self.body_template_obj = Template(body_template)
if self.empty_body:
del self.content_type
del self.content_length
def __str__(self):
return self.detail or self.explanation
def _make_body(self, environ, escape):
args = {
'explanation': escape(self.explanation),
'detail': escape(self.detail or ''),
'comment': escape(self.comment or ''),
}
if self.comment:
args['html_comment'] = '<!-- %s -->' % escape(self.comment)
else:
args['html_comment'] = ''
body_tmpl = self.body_template_obj
if WSGIHTTPException.body_template_obj is not self.body_template_obj:
# Custom template; add headers to args
for k, v in environ.items():
args[k] = escape(v)
for k, v in self.headers.items():
args[k.lower()] = escape(v)
t_obj = self.body_template_obj
return t_obj.substitute(args)
def plain_body(self, environ):
body = self._make_body(environ, no_escape)
body = strip_tags(body)
return self.plain_template_obj.substitute(status=self.status,
title=self.title,
body=body)
def html_body(self, environ):
body = self._make_body(environ, html_escape)
return self.html_template_obj.substitute(status=self.status,
body=body)
def generate_response(self, environ, start_response):
if self.content_length is not None:
del self.content_length
headerlist = list(self.headerlist)
accept = environ.get('HTTP_ACCEPT', '')
if accept and 'html' in accept or '*/*' in accept:
content_type = 'text/html'
body = self.html_body(environ)
else:
content_type = 'text/plain'
body = self.plain_body(environ)
extra_kw = {}
if isinstance(body, unicode):
extra_kw.update(charset='utf-8')
resp = Response(body,
status=self.status,
headerlist=headerlist,
content_type=content_type,
**extra_kw
)
resp.content_type = content_type
return resp(environ, start_response)
def __call__(self, environ, start_response):
if self.body or self.empty_body:
app_iter = Response.__call__(self, environ, start_response)
else:
app_iter = self.generate_response(environ, start_response)
if environ['REQUEST_METHOD'] == 'HEAD':
app_iter = []
return app_iter
@property
def wsgi_response(self):
return self
class HTTPError(WSGIHTTPException):
"""
base class for status codes in the 400's and 500's
This is an exception which indicates that an error has occurred,
and that any work in progress should not be committed. These are
typically results in the 400's and 500's.
"""
class HTTPRedirection(WSGIHTTPException):
"""
base class for 300's status code (redirections)
This is an abstract base class for 3xx redirection. It indicates
that further action needs to be taken by the user agent in order
to fulfill the request. It does not necessarly signal an error
condition.
"""
class HTTPOk(WSGIHTTPException):
"""
Base class for the 200's status code (successful responses)
code: 200, title: OK
"""
code = 200
title = 'OK'
############################################################
## 2xx success
############################################################
class HTTPCreated(HTTPOk):
"""
subclass of :class:`~HTTPOk`
This indicates that request has been fulfilled and resulted in a new
resource being created.
code: 201, title: Created
"""
code = 201
title = 'Created'
class HTTPAccepted(HTTPOk):
"""
subclass of :class:`~HTTPOk`
This indicates that the request has been accepted for processing, but the
processing has not been completed.
code: 202, title: Accepted
"""
code = 202
title = 'Accepted'
explanation = 'The request is accepted for processing.'
class HTTPNonAuthoritativeInformation(HTTPOk):
"""
subclass of :class:`~HTTPOk`
This indicates that the returned metainformation in the entity-header is
not the definitive set as available from the origin server, but is
gathered from a local or a third-party copy.
code: 203, title: Non-Authoritative Information
"""
code = 203
title = 'Non-Authoritative Information'
class HTTPNoContent(HTTPOk):
"""
subclass of :class:`~HTTPOk`
This indicates that the server has fulfilled the request but does
not need to return an entity-body, and might want to return updated
metainformation.
code: 204, title: No Content
"""
code = 204
title = 'No Content'
empty_body = True
class HTTPResetContent(HTTPOk):
"""
subclass of :class:`~HTTPOk`
This indicates that the the server has fulfilled the request and
the user agent SHOULD reset the document view which caused the
request to be sent.
code: 205, title: Reset Content
"""
code = 205
title = 'Reset Content'
empty_body = True
class HTTPPartialContent(HTTPOk):
"""
subclass of :class:`~HTTPOk`
This indicates that the server has fulfilled the partial GET
request for the resource.
code: 206, title: Partial Content
"""
code = 206
title = 'Partial Content'
############################################################
## 3xx redirection
############################################################
class _HTTPMove(HTTPRedirection):
"""
redirections which require a Location field
Since a 'Location' header is a required attribute of 301, 302, 303,
305 and 307 (but not 304), this base class provides the mechanics to
make this easy.
You can provide a location keyword argument to set the location
immediately. You may also give ``add_slash=True`` if you want to
redirect to the same URL as the request, except with a ``/`` added
to the end.
Relative URLs in the location will be resolved to absolute.
"""
explanation = 'The resource has been moved to'
body_template_obj = Template('''\
${explanation} <a href="${location}">${location}</a>;
you should be redirected automatically.
${detail}
${html_comment}''')
def __init__(self, detail=None, headers=None, comment=None,
body_template=None, location=None, add_slash=False):
super(_HTTPMove, self).__init__(
detail=detail, headers=headers, comment=comment,
body_template=body_template)
if location is not None:
self.location = location
if add_slash:
raise TypeError(
"You can only provide one of the arguments location and add_slash")
self.add_slash = add_slash
def __call__(self, environ, start_response):
req = Request(environ)
if self.add_slash:
url = req.path_url
url += '/'
if req.environ.get('QUERY_STRING'):
url += '?' + req.environ['QUERY_STRING']
self.location = url
self.location = urlparse.urljoin(req.path_url, self.location)
return super(_HTTPMove, self).__call__(
environ, start_response)
class HTTPMultipleChoices(_HTTPMove):
"""
subclass of :class:`~_HTTPMove`
This indicates that the requested resource corresponds to any one
of a set of representations, each with its own specific location,
and agent-driven negotiation information is being provided so that
the user can select a preferred representation and redirect its
request to that location.
code: 300, title: Multiple Choices
"""
code = 300
title = 'Multiple Choices'
class HTTPMovedPermanently(_HTTPMove):
"""
subclass of :class:`~_HTTPMove`
This indicates that the requested resource has been assigned a new
permanent URI and any future references to this resource SHOULD use
one of the returned URIs.
code: 301, title: Moved Permanently
"""
code = 301
title = 'Moved Permanently'
class HTTPFound(_HTTPMove):
"""
subclass of :class:`~_HTTPMove`
This indicates that the requested resource resides temporarily under
a different URI.
code: 302, title: Found
"""
code = 302
title = 'Found'
explanation = 'The resource was found at'
# This one is safe after a POST (the redirected location will be
# retrieved with GET):
class HTTPSeeOther(_HTTPMove):
"""
subclass of :class:`~_HTTPMove`
This indicates that the response to the request can be found under
a different URI and SHOULD be retrieved using a GET method on that
resource.
code: 303, title: See Other
"""
code = 303
title = 'See Other'
class HTTPNotModified(HTTPRedirection):
"""
subclass of :class:`~HTTPRedirection`
This indicates that if the client has performed a conditional GET
request and access is allowed, but the document has not been
modified, the server SHOULD respond with this status code.
code: 304, title: Not Modified
"""
# TODO: this should include a date or etag header
code = 304
title = 'Not Modified'
empty_body = True
class HTTPUseProxy(_HTTPMove):
"""
subclass of :class:`~_HTTPMove`
This indicates that the requested resource MUST be accessed through
the proxy given by the Location field.
code: 305, title: Use Proxy
"""
# Not a move, but looks a little like one
code = 305
title = 'Use Proxy'
explanation = (
'The resource must be accessed through a proxy located at')
class HTTPTemporaryRedirect(_HTTPMove):
"""
subclass of :class:`~_HTTPMove`
This indicates that the requested resource resides temporarily
under a different URI.
code: 307, title: Temporary Redirect
"""
code = 307
title = 'Temporary Redirect'
############################################################
## 4xx client error
############################################################
class HTTPClientError(HTTPError):
"""
base class for the 400's, where the client is in error
This is an error condition in which the client is presumed to be
in-error. This is an expected problem, and thus is not considered
a bug. A server-side traceback is not warranted. Unless specialized,
this is a '400 Bad Request'
"""
code = 400
title = 'Bad Request'
explanation = ('The server could not comply with the request since\r\n'
'it is either malformed or otherwise incorrect.\r\n')
class HTTPBadRequest(HTTPClientError):
pass
class HTTPUnauthorized(HTTPClientError):
"""
subclass of :class:`~HTTPClientError`
This indicates that the request requires user authentication.
code: 401, title: Unauthorized
"""
code = 401
title = 'Unauthorized'
explanation = (
'This server could not verify that you are authorized to\r\n'
'access the document you requested. Either you supplied the\r\n'
'wrong credentials (e.g., bad password), or your browser\r\n'
'does not understand how to supply the credentials required.\r\n')
class HTTPPaymentRequired(HTTPClientError):
"""
subclass of :class:`~HTTPClientError`
code: 402, title: Payment Required
"""
code = 402
title = 'Payment Required'
explanation = ('Access was denied for financial reasons.')
class HTTPForbidden(HTTPClientError):
"""
subclass of :class:`~HTTPClientError`
This indicates that the server understood the request, but is
refusing to fulfill it.
code: 403, title: Forbidden
"""
code = 403
title = 'Forbidden'
explanation = ('Access was denied to this resource.')
class HTTPNotFound(HTTPClientError):
"""
subclass of :class:`~HTTPClientError`
This indicates that the server did not find anything matching the
Request-URI.
code: 404, title: Not Found
"""
code = 404
title = 'Not Found'
explanation = ('The resource could not be found.')
class HTTPMethodNotAllowed(HTTPClientError):
"""
subclass of :class:`~HTTPClientError`
This indicates that the method specified in the Request-Line is
not allowed for the resource identified by the Request-URI.
code: 405, title: Method Not Allowed
"""
code = 405
title = 'Method Not Allowed'
# override template since we need an environment variable
body_template_obj = Template('''\
The method ${REQUEST_METHOD} is not allowed for this resource. <br /><br />
${detail}''')
class HTTPNotAcceptable(HTTPClientError):
"""
subclass of :class:`~HTTPClientError`
This indicates the resource identified by the request is only
capable of generating response entities which have content
characteristics not acceptable according to the accept headers
sent in the request.
code: 406, title: Not Acceptable
"""
code = 406
title = 'Not Acceptable'
# override template since we need an environment variable
template = Template('''\
The resource could not be generated that was acceptable to your browser
(content of type ${HTTP_ACCEPT}. <br /><br />
${detail}''')
class HTTPProxyAuthenticationRequired(HTTPClientError):
"""
subclass of :class:`~HTTPClientError`
This is similar to 401, but indicates that the client must first
authenticate itself with the proxy.
code: 407, title: Proxy Authentication Required
"""
code = 407
title = 'Proxy Authentication Required'
explanation = ('Authentication with a local proxy is needed.')
class HTTPRequestTimeout(HTTPClientError):
"""
subclass of :class:`~HTTPClientError`
This indicates that the client did not produce a request within
the time that the server was prepared to wait.
code: 408, title: Request Timeout
"""
code = 408
title = 'Request Timeout'
explanation = ('The server has waited too long for the request to '
'be sent by the client.')
class HTTPConflict(HTTPClientError):
"""
subclass of :class:`~HTTPClientError`
This indicates that the request could not be completed due to a
conflict with the current state of the resource.
code: 409, title: Conflict
"""
code = 409
title = 'Conflict'
explanation = ('There was a conflict when trying to complete '
'your request.')
class HTTPGone(HTTPClientError):
"""
subclass of :class:`~HTTPClientError`
This indicates that the requested resource is no longer available
at the server and no forwarding address is known.
code: 410, title: Gone
"""
code = 410
title = 'Gone'
explanation = ('This resource is no longer available. No forwarding '
'address is given.')
class HTTPLengthRequired(HTTPClientError):
"""
subclass of :class:`~HTTPClientError`
This indicates that the the server refuses to accept the request
without a defined Content-Length.
code: 411, title: Length Required
"""
code = 411
title = 'Length Required'
explanation = ('Content-Length header required.')
class HTTPPreconditionFailed(HTTPClientError):
"""
subclass of :class:`~HTTPClientError`
This indicates that the precondition given in one or more of the
request-header fields evaluated to false when it was tested on the
server.
code: 412, title: Precondition Failed
"""
code = 412
title = 'Precondition Failed'
explanation = ('Request precondition failed.')
class HTTPRequestEntityTooLarge(HTTPClientError):
"""
subclass of :class:`~HTTPClientError`
This indicates that the server is refusing to process a request
because the request entity is larger than the server is willing or
able to process.
code: 413, title: Request Entity Too Large
"""
code = 413
title = 'Request Entity Too Large'
explanation = ('The body of your request was too large for this server.')
class HTTPRequestURITooLong(HTTPClientError):
"""
subclass of :class:`~HTTPClientError`
This indicates that the server is refusing to service the request
because the Request-URI is longer than the server is willing to
interpret.
code: 414, title: Request-URI Too Long
"""
code = 414
title = 'Request-URI Too Long'
explanation = ('The request URI was too long for this server.')
class HTTPUnsupportedMediaType(HTTPClientError):
"""
subclass of :class:`~HTTPClientError`
This indicates that the server is refusing to service the request
because the entity of the request is in a format not supported by
the requested resource for the requested method.
code: 415, title: Unsupported Media Type
"""
code = 415
title = 'Unsupported Media Type'
# override template since we need an environment variable
template_obj = Template('''\
The request media type ${CONTENT_TYPE} is not supported by this server.
<br /><br />
${detail}''')
class HTTPRequestRangeNotSatisfiable(HTTPClientError):
"""
subclass of :class:`~HTTPClientError`
The server SHOULD return a response with this status code if a
request included a Range request-header field, and none of the
range-specifier values in this field overlap the current extent
of the selected resource, and the request did not include an
If-Range request-header field.
code: 416, title: Request Range Not Satisfiable
"""
code = 416
title = 'Request Range Not Satisfiable'
explanation = ('The Range requested is not available.')
class HTTPExpectationFailed(HTTPClientError):
"""
subclass of :class:`~HTTPClientError`
This indidcates that the expectation given in an Expect
request-header field could not be met by this server.
code: 417, title: Expectation Failed
"""
code = 417
title = 'Expectation Failed'
explanation = ('Expectation failed.')
class HTTPUnprocessableEntity(HTTPClientError):
"""
subclass of :class:`~HTTPClientError`
This indicates that the server is unable to process the contained
instructions. Only for WebDAV.
code: 422, title: Unprocessable Entity
"""
## Note: from WebDAV
code = 422
title = 'Unprocessable Entity'
explanation = 'Unable to process the contained instructions'
class HTTPLocked(HTTPClientError):
"""
subclass of :class:`~HTTPClientError`
This indicates that the resource is locked. Only for WebDAV
code: 423, title: Locked
"""
## Note: from WebDAV
code = 423
title = 'Locked'
explanation = ('The resource is locked')
class HTTPFailedDependency(HTTPClientError):
"""
subclass of :class:`~HTTPClientError`
This indicates that the method could not be performed because the
requested action depended on another action and that action failed.
Only for WebDAV.
code: 424, title: Failed Dependency
"""
## Note: from WebDAV
code = 424
title = 'Failed Dependency'
explanation = ('The method could not be performed because the requested '
'action dependended on another action and that action failed')
############################################################
## 5xx Server Error
############################################################
# Response status codes beginning with the digit "5" indicate cases in
# which the server is aware that it has erred or is incapable of
# performing the request. Except when responding to a HEAD request, the
# server SHOULD include an entity containing an explanation of the error
# situation, and whether it is a temporary or permanent condition. User
# agents SHOULD display any included entity to the user. These response
# codes are applicable to any request method.
class HTTPServerError(HTTPError):
"""
base class for the 500's, where the server is in-error
This is an error condition in which the server is presumed to be
in-error. This is usually unexpected, and thus requires a traceback;
ideally, opening a support ticket for the customer. Unless specialized,
this is a '500 Internal Server Error'
"""
code = 500
title = 'Internal Server Error'
explanation = (
'The server has either erred or is incapable of performing\r\n'
'the requested operation.\r\n')
class HTTPInternalServerError(HTTPServerError):
pass
class HTTPNotImplemented(HTTPServerError):
"""
subclass of :class:`~HTTPServerError`
This indicates that the server does not support the functionality
required to fulfill the request.
code: 501, title: Not Implemented
"""
code = 501
title = 'Not Implemented'
template = Template('''
The request method ${REQUEST_METHOD} is not implemented for this server. <br /><br />
${detail}''')
class HTTPBadGateway(HTTPServerError):
"""
subclass of :class:`~HTTPServerError`
This indicates that the server, while acting as a gateway or proxy,
received an invalid response from the upstream server it accessed
in attempting to fulfill the request.
code: 502, title: Bad Gateway
"""
code = 502
title = 'Bad Gateway'
explanation = ('Bad gateway.')
class HTTPServiceUnavailable(HTTPServerError):
"""
subclass of :class:`~HTTPServerError`
This indicates that the server is currently unable to handle the
request due to a temporary overloading or maintenance of the server.
code: 503, title: Service Unavailable
"""
code = 503
title = 'Service Unavailable'
explanation = ('The server is currently unavailable. '
'Please try again at a later time.')
class HTTPGatewayTimeout(HTTPServerError):
"""
subclass of :class:`~HTTPServerError`
This indicates that the server, while acting as a gateway or proxy,
did not receive a timely response from the upstream server specified
by the URI (e.g. HTTP, FTP, LDAP) or some other auxiliary server
(e.g. DNS) it needed to access in attempting to complete the request.
code: 504, title: Gateway Timeout
"""
code = 504
title = 'Gateway Timeout'
explanation = ('The gateway has timed out.')
class HTTPVersionNotSupported(HTTPServerError):
"""
subclass of :class:`~HTTPServerError`
This indicates that the server does not support, or refuses to
support, the HTTP protocol version that was used in the request
message.
code: 505, title: HTTP Version Not Supported
"""
code = 505
title = 'HTTP Version Not Supported'
explanation = ('The HTTP version is not supported.')
class HTTPInsufficientStorage(HTTPServerError):
"""
subclass of :class:`~HTTPServerError`
This indicates that the server does not have enough space to save
the resource.
code: 507, title: Insufficient Storage
"""
code = 507
title = 'Insufficient Storage'
explanation = ('There was not enough space to save the resource')
class HTTPExceptionMiddleware(object):
"""
Middleware that catches exceptions in the sub-application. This
does not catch exceptions in the app_iter; only during the initial
calling of the application.
This should be put *very close* to applications that might raise
these exceptions. This should not be applied globally; letting
*expected* exceptions raise through the WSGI stack is dangerous.
"""
def __init__(self, application):
self.application = application
def __call__(self, environ, start_response):
try:
return self.application(environ, start_response)
except HTTPException, exc:
parent_exc_info = sys.exc_info()
def repl_start_response(status, headers, exc_info=None):
if exc_info is None:
exc_info = parent_exc_info
return start_response(status, headers, exc_info)
return exc(environ, repl_start_response)
try:
from paste import httpexceptions
except ImportError: # pragma: no cover
# Without Paste we don't need to do this fixup
pass
else: # pragma: no cover
for name in dir(httpexceptions):
obj = globals().get(name)
if (obj and isinstance(obj, type) and issubclass(obj, HTTPException)
and obj is not HTTPException
and obj is not WSGIHTTPException):
obj.__bases__ = obj.__bases__ + (getattr(httpexceptions, name),)
del name, obj, httpexceptions
__all__ = ['HTTPExceptionMiddleware', 'status_map']
status_map={}
for name, value in globals().items():
if (isinstance(value, (type, types.ClassType)) and issubclass(value, HTTPException)
and not name.startswith('_')):
__all__.append(name)
if getattr(value, 'code', None):
status_map[value.code]=value
if hasattr(value, 'explanation'):
value.explanation = ' '.join(value.explanation.strip().split())
del name, value
|
|
from psphere import ManagedObject, cached_property
class ExtensibleManagedObject(ManagedObject):
_valid_attrs = set(['availableField', 'value'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def availableField(self):
return self._get_dataobject("availableField", True)
@cached_property
def value(self):
return self._get_dataobject("value", True)
class Alarm(ExtensibleManagedObject):
_valid_attrs = set(['info'])
def __init__(self, mo_ref, client):
ExtensibleManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ExtensibleManagedObject._valid_attrs)
@cached_property
def info(self):
return self._get_dataobject("info", False)
class AlarmManager(ManagedObject):
_valid_attrs = set(['defaultExpression', 'description'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def defaultExpression(self):
return self._get_dataobject("defaultExpression", True)
@cached_property
def description(self):
return self._get_dataobject("description", False)
class AuthorizationManager(ManagedObject):
_valid_attrs = set(['description', 'privilegeList', 'roleList'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def description(self):
return self._get_dataobject("description", False)
@cached_property
def privilegeList(self):
return self._get_dataobject("privilegeList", True)
@cached_property
def roleList(self):
return self._get_dataobject("roleList", True)
class ManagedEntity(ExtensibleManagedObject):
_valid_attrs = set(['alarmActionsEnabled', 'configIssue', 'configStatus', 'customValue', 'declaredAlarmState', 'disabledMethod', 'effectiveRole', 'name', 'overallStatus', 'parent', 'permission', 'recentTask', 'tag', 'triggeredAlarmState'])
def __init__(self, mo_ref, client):
ExtensibleManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ExtensibleManagedObject._valid_attrs)
@cached_property
def alarmActionsEnabled(self):
return self._get_dataobject("alarmActionsEnabled", False)
@cached_property
def configIssue(self):
return self._get_dataobject("configIssue", True)
@cached_property
def configStatus(self):
return self._get_dataobject("configStatus", False)
@cached_property
def customValue(self):
return self._get_dataobject("customValue", True)
@cached_property
def declaredAlarmState(self):
return self._get_dataobject("declaredAlarmState", True)
@cached_property
def disabledMethod(self):
return self._get_dataobject("disabledMethod", True)
@cached_property
def effectiveRole(self):
return self._get_dataobject("effectiveRole", True)
@cached_property
def name(self):
return self._get_dataobject("name", False)
@cached_property
def overallStatus(self):
return self._get_dataobject("overallStatus", False)
@cached_property
def parent(self):
return self._get_mor("parent", False)
@cached_property
def permission(self):
return self._get_dataobject("permission", True)
@cached_property
def recentTask(self):
return self._get_mor("recentTask", True)
@cached_property
def tag(self):
return self._get_dataobject("tag", True)
@cached_property
def triggeredAlarmState(self):
return self._get_dataobject("triggeredAlarmState", True)
@classmethod
def all(cls, client, properties=None):
if properties is None:
properties = []
if "name" not in properties:
properties.append("name")
return client.find_entity_views(cls.__name__, properties=properties)
@classmethod
def get(cls, client, **kwargs):
if "properties" in kwargs.keys():
properties = kwargs["properties"]
# Delete properties key so it doesn't get filtered
del kwargs["properties"]
else:
properties = None
if properties is None:
properties = []
# Automatically get the name property for every ManagedEntity
if "name" not in properties:
properties.append("name")
filter = {}
for key in kwargs.keys():
filter[key] = kwargs[key]
return client.find_entity_view(cls.__name__,
filter=filter,
properties=properties)
def __cmp__(self, other):
if self.name == other.name:
return 0
if self.name < other.name:
return -1
if self.name > other.name:
return 1
# def __str__(self):
# return self.name
class ComputeResource(ManagedEntity):
_valid_attrs = set(['configurationEx', 'datastore', 'environmentBrowser', 'host', 'network', 'resourcePool', 'summary'])
def __init__(self, mo_ref, client):
ManagedEntity.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedEntity._valid_attrs)
@cached_property
def configurationEx(self):
return self._get_dataobject("configurationEx", False)
@cached_property
def datastore(self):
return self._get_mor("datastore", True)
@cached_property
def environmentBrowser(self):
return self._get_mor("environmentBrowser", False)
@cached_property
def host(self):
return self._get_mor("host", True)
@cached_property
def network(self):
return self._get_mor("network", True)
@cached_property
def resourcePool(self):
return self._get_mor("resourcePool", False)
@cached_property
def summary(self):
return self._get_dataobject("summary", False)
class ClusterComputeResource(ComputeResource):
_valid_attrs = set(['actionHistory', 'configuration', 'drsFault', 'drsRecommendation', 'migrationHistory', 'recommendation'])
def __init__(self, mo_ref, client):
ComputeResource.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ComputeResource._valid_attrs)
@cached_property
def actionHistory(self):
return self._get_dataobject("actionHistory", True)
@cached_property
def configuration(self):
return self._get_dataobject("configuration", False)
@cached_property
def drsFault(self):
return self._get_dataobject("drsFault", True)
@cached_property
def drsRecommendation(self):
return self._get_dataobject("drsRecommendation", True)
@cached_property
def migrationHistory(self):
return self._get_dataobject("migrationHistory", True)
@cached_property
def recommendation(self):
return self._get_dataobject("recommendation", True)
class Profile(ManagedObject):
_valid_attrs = set(['complianceStatus', 'config', 'createdTime', 'description', 'entity', 'modifiedTime', 'name'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def complianceStatus(self):
return self._get_dataobject("complianceStatus", False)
@cached_property
def config(self):
return self._get_dataobject("config", False)
@cached_property
def createdTime(self):
return self._get_dataobject("createdTime", False)
@cached_property
def description(self):
return self._get_dataobject("description", False)
@cached_property
def entity(self):
return self._get_mor("entity", True)
@cached_property
def modifiedTime(self):
return self._get_dataobject("modifiedTime", False)
@cached_property
def name(self):
return self._get_dataobject("name", False)
class ClusterProfile(Profile):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
Profile.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, Profile._valid_attrs)
class ProfileManager(ManagedObject):
_valid_attrs = set(['profile'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def profile(self):
return self._get_mor("profile", True)
class ClusterProfileManager(ProfileManager):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ProfileManager.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ProfileManager._valid_attrs)
class View(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class ManagedObjectView(View):
_valid_attrs = set(['view'])
def __init__(self, mo_ref, client):
View.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, View._valid_attrs)
@cached_property
def view(self):
return self._get_mor("view", True)
class ContainerView(ManagedObjectView):
_valid_attrs = set(['container', 'recursive', 'type'])
def __init__(self, mo_ref, client):
ManagedObjectView.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObjectView._valid_attrs)
@cached_property
def container(self):
return self._get_mor("container", False)
@cached_property
def recursive(self):
return self._get_dataobject("recursive", False)
@cached_property
def type(self):
return self._get_dataobject("type", True)
class CustomFieldsManager(ManagedObject):
_valid_attrs = set(['field'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def field(self):
return self._get_dataobject("field", True)
class CustomizationSpecManager(ManagedObject):
_valid_attrs = set(['encryptionKey', 'info'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def encryptionKey(self):
return self._get_dataobject("encryptionKey", True)
@cached_property
def info(self):
return self._get_dataobject("info", True)
class Datacenter(ManagedEntity):
_valid_attrs = set(['datastore', 'datastoreFolder', 'hostFolder', 'network', 'networkFolder', 'vmFolder'])
def __init__(self, mo_ref, client):
ManagedEntity.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedEntity._valid_attrs)
@cached_property
def datastore(self):
return self._get_mor("datastore", True)
@cached_property
def datastoreFolder(self):
return self._get_mor("datastoreFolder", False)
@cached_property
def hostFolder(self):
return self._get_mor("hostFolder", False)
@cached_property
def network(self):
return self._get_mor("network", True)
@cached_property
def networkFolder(self):
return self._get_mor("networkFolder", False)
@cached_property
def vmFolder(self):
return self._get_mor("vmFolder", False)
class Datastore(ManagedEntity):
_valid_attrs = set(['browser', 'capability', 'host', 'info', 'iormConfiguration', 'summary', 'vm'])
def __init__(self, mo_ref, client):
ManagedEntity.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedEntity._valid_attrs)
@cached_property
def browser(self):
return self._get_mor("browser", False)
@cached_property
def capability(self):
return self._get_dataobject("capability", False)
@cached_property
def host(self):
return self._get_dataobject("host", True)
@cached_property
def info(self):
return self._get_dataobject("info", False)
@cached_property
def iormConfiguration(self):
return self._get_dataobject("iormConfiguration", False)
@cached_property
def summary(self):
return self._get_dataobject("summary", False)
@cached_property
def vm(self):
return self._get_mor("vm", True)
class DiagnosticManager(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class Network(ManagedEntity):
_valid_attrs = set(['host', 'name', 'summary', 'vm'])
def __init__(self, mo_ref, client):
ManagedEntity.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedEntity._valid_attrs)
@cached_property
def host(self):
return self._get_mor("host", True)
@cached_property
def name(self):
return self._get_dataobject("name", False)
@cached_property
def summary(self):
return self._get_dataobject("summary", False)
@cached_property
def vm(self):
return self._get_mor("vm", True)
class DistributedVirtualPortgroup(Network):
_valid_attrs = set(['config', 'key', 'portKeys'])
def __init__(self, mo_ref, client):
Network.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, Network._valid_attrs)
@cached_property
def config(self):
return self._get_dataobject("config", False)
@cached_property
def key(self):
return self._get_dataobject("key", False)
@cached_property
def portKeys(self):
return self._get_dataobject("portKeys", True)
class DistributedVirtualSwitch(ManagedEntity):
_valid_attrs = set(['capability', 'config', 'networkResourcePool', 'portgroup', 'summary', 'uuid'])
def __init__(self, mo_ref, client):
ManagedEntity.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedEntity._valid_attrs)
@cached_property
def capability(self):
return self._get_dataobject("capability", False)
@cached_property
def config(self):
return self._get_dataobject("config", False)
@cached_property
def networkResourcePool(self):
return self._get_dataobject("networkResourcePool", True)
@cached_property
def portgroup(self):
return self._get_mor("portgroup", True)
@cached_property
def summary(self):
return self._get_dataobject("summary", False)
@cached_property
def uuid(self):
return self._get_dataobject("uuid", False)
class DistributedVirtualSwitchManager(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class EnvironmentBrowser(ManagedObject):
_valid_attrs = set(['datastoreBrowser'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def datastoreBrowser(self):
return self._get_mor("datastoreBrowser", False)
class HistoryCollector(ManagedObject):
_valid_attrs = set(['filter'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def filter(self):
return self._get_dataobject("filter", False)
class EventHistoryCollector(HistoryCollector):
_valid_attrs = set(['latestPage'])
def __init__(self, mo_ref, client):
HistoryCollector.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, HistoryCollector._valid_attrs)
@cached_property
def latestPage(self):
return self._get_dataobject("latestPage", True)
class EventManager(ManagedObject):
_valid_attrs = set(['description', 'latestEvent', 'maxCollector'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def description(self):
return self._get_dataobject("description", False)
@cached_property
def latestEvent(self):
return self._get_dataobject("latestEvent", False)
@cached_property
def maxCollector(self):
return self._get_dataobject("maxCollector", False)
class ExtensionManager(ManagedObject):
_valid_attrs = set(['extensionList'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def extensionList(self):
return self._get_dataobject("extensionList", True)
class FileManager(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class Folder(ManagedEntity):
_valid_attrs = set(['childEntity', 'childType'])
def __init__(self, mo_ref, client):
ManagedEntity.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedEntity._valid_attrs)
@cached_property
def childEntity(self):
return self._get_mor("childEntity", True)
@cached_property
def childType(self):
return self._get_dataobject("childType", True)
class GuestAuthManager(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class GuestFileManager(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class GuestOperationsManager(ManagedObject):
_valid_attrs = set(['authManager', 'fileManager', 'processManager'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def authManager(self):
return self._get_mor("authManager", False)
@cached_property
def fileManager(self):
return self._get_mor("fileManager", False)
@cached_property
def processManager(self):
return self._get_mor("processManager", False)
class GuestProcessManager(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class HostAuthenticationStore(ManagedObject):
_valid_attrs = set(['info'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def info(self):
return self._get_dataobject("info", False)
class HostDirectoryStore(HostAuthenticationStore):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
HostAuthenticationStore.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, HostAuthenticationStore._valid_attrs)
class HostActiveDirectoryAuthentication(HostDirectoryStore):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
HostDirectoryStore.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, HostDirectoryStore._valid_attrs)
class HostAuthenticationManager(ManagedObject):
_valid_attrs = set(['info', 'supportedStore'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def info(self):
return self._get_dataobject("info", False)
@cached_property
def supportedStore(self):
return self._get_mor("supportedStore", True)
class HostAutoStartManager(ManagedObject):
_valid_attrs = set(['config'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def config(self):
return self._get_dataobject("config", False)
class HostBootDeviceSystem(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class HostCacheConfigurationManager(ManagedObject):
_valid_attrs = set(['cacheConfigurationInfo'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def cacheConfigurationInfo(self):
return self._get_dataobject("cacheConfigurationInfo", True)
class HostCpuSchedulerSystem(ExtensibleManagedObject):
_valid_attrs = set(['hyperthreadInfo'])
def __init__(self, mo_ref, client):
ExtensibleManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ExtensibleManagedObject._valid_attrs)
@cached_property
def hyperthreadInfo(self):
return self._get_dataobject("hyperthreadInfo", False)
class HostDatastoreBrowser(ManagedObject):
_valid_attrs = set(['datastore', 'supportedType'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def datastore(self):
return self._get_mor("datastore", True)
@cached_property
def supportedType(self):
return self._get_dataobject("supportedType", True)
class HostDatastoreSystem(ManagedObject):
_valid_attrs = set(['capabilities', 'datastore'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def capabilities(self):
return self._get_dataobject("capabilities", False)
@cached_property
def datastore(self):
return self._get_mor("datastore", True)
class HostDateTimeSystem(ManagedObject):
_valid_attrs = set(['dateTimeInfo'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def dateTimeInfo(self):
return self._get_dataobject("dateTimeInfo", False)
class HostDiagnosticSystem(ManagedObject):
_valid_attrs = set(['activePartition'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def activePartition(self):
return self._get_dataobject("activePartition", False)
class HostEsxAgentHostManager(ManagedObject):
_valid_attrs = set(['configInfo'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def configInfo(self):
return self._get_dataobject("configInfo", False)
class HostFirewallSystem(ExtensibleManagedObject):
_valid_attrs = set(['firewallInfo'])
def __init__(self, mo_ref, client):
ExtensibleManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ExtensibleManagedObject._valid_attrs)
@cached_property
def firewallInfo(self):
return self._get_dataobject("firewallInfo", False)
class HostFirmwareSystem(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class HostHealthStatusSystem(ManagedObject):
_valid_attrs = set(['runtime'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def runtime(self):
return self._get_dataobject("runtime", False)
class HostImageConfigManager(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class HostKernelModuleSystem(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class HostLocalAccountManager(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class HostLocalAuthentication(HostAuthenticationStore):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
HostAuthenticationStore.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, HostAuthenticationStore._valid_attrs)
class HostMemorySystem(ExtensibleManagedObject):
_valid_attrs = set(['consoleReservationInfo', 'virtualMachineReservationInfo'])
def __init__(self, mo_ref, client):
ExtensibleManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ExtensibleManagedObject._valid_attrs)
@cached_property
def consoleReservationInfo(self):
return self._get_dataobject("consoleReservationInfo", False)
@cached_property
def virtualMachineReservationInfo(self):
return self._get_dataobject("virtualMachineReservationInfo", False)
class HostNetworkSystem(ExtensibleManagedObject):
_valid_attrs = set(['capabilities', 'consoleIpRouteConfig', 'dnsConfig', 'ipRouteConfig', 'networkConfig', 'networkInfo', 'offloadCapabilities'])
def __init__(self, mo_ref, client):
ExtensibleManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ExtensibleManagedObject._valid_attrs)
@cached_property
def capabilities(self):
return self._get_dataobject("capabilities", False)
@cached_property
def consoleIpRouteConfig(self):
return self._get_dataobject("consoleIpRouteConfig", False)
@cached_property
def dnsConfig(self):
return self._get_dataobject("dnsConfig", False)
@cached_property
def ipRouteConfig(self):
return self._get_dataobject("ipRouteConfig", False)
@cached_property
def networkConfig(self):
return self._get_dataobject("networkConfig", False)
@cached_property
def networkInfo(self):
return self._get_dataobject("networkInfo", False)
@cached_property
def offloadCapabilities(self):
return self._get_dataobject("offloadCapabilities", False)
class HostPatchManager(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class HostPciPassthruSystem(ExtensibleManagedObject):
_valid_attrs = set(['pciPassthruInfo'])
def __init__(self, mo_ref, client):
ExtensibleManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ExtensibleManagedObject._valid_attrs)
@cached_property
def pciPassthruInfo(self):
return self._get_dataobject("pciPassthruInfo", True)
class HostPowerSystem(ManagedObject):
_valid_attrs = set(['capability', 'info'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def capability(self):
return self._get_dataobject("capability", False)
@cached_property
def info(self):
return self._get_dataobject("info", False)
class HostProfile(Profile):
_valid_attrs = set(['referenceHost'])
def __init__(self, mo_ref, client):
Profile.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, Profile._valid_attrs)
@cached_property
def referenceHost(self):
return self._get_mor("referenceHost", False)
class HostProfileManager(ProfileManager):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ProfileManager.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ProfileManager._valid_attrs)
class HostServiceSystem(ExtensibleManagedObject):
_valid_attrs = set(['serviceInfo'])
def __init__(self, mo_ref, client):
ExtensibleManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ExtensibleManagedObject._valid_attrs)
@cached_property
def serviceInfo(self):
return self._get_dataobject("serviceInfo", False)
class HostSnmpSystem(ManagedObject):
_valid_attrs = set(['configuration', 'limits'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def configuration(self):
return self._get_dataobject("configuration", False)
@cached_property
def limits(self):
return self._get_dataobject("limits", False)
class HostStorageSystem(ExtensibleManagedObject):
_valid_attrs = set(['fileSystemVolumeInfo', 'multipathStateInfo', 'storageDeviceInfo', 'systemFile'])
def __init__(self, mo_ref, client):
ExtensibleManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ExtensibleManagedObject._valid_attrs)
@cached_property
def fileSystemVolumeInfo(self):
return self._get_dataobject("fileSystemVolumeInfo", False)
@cached_property
def multipathStateInfo(self):
return self._get_dataobject("multipathStateInfo", False)
@cached_property
def storageDeviceInfo(self):
return self._get_dataobject("storageDeviceInfo", False)
@cached_property
def systemFile(self):
return self._get_dataobject("systemFile", True)
class HostSystem(ManagedEntity):
_valid_attrs = set(['capability', 'config', 'configManager', 'datastore', 'datastoreBrowser', 'hardware', 'licensableResource', 'network', 'runtime', 'summary', 'systemResources', 'vm'])
def __init__(self, mo_ref, client):
ManagedEntity.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedEntity._valid_attrs)
@cached_property
def capability(self):
return self._get_dataobject("capability", False)
@cached_property
def config(self):
return self._get_dataobject("config", False)
@cached_property
def configManager(self):
return self._get_dataobject("configManager", False)
@cached_property
def datastore(self):
return self._get_mor("datastore", True)
@cached_property
def datastoreBrowser(self):
return self._get_mor("datastoreBrowser", False)
@cached_property
def licensableResource(self):
return self._get_dataobject("licensableResource", False)
@cached_property
def hardware(self):
return self._get_dataobject("hardware", False)
@cached_property
def network(self):
return self._get_mor("network", True)
@cached_property
def runtime(self):
return self._get_dataobject("runtime", False)
@cached_property
def summary(self):
return self._get_dataobject("summary", False)
@cached_property
def systemResources(self):
return self._get_dataobject("systemResources", False)
@cached_property
def vm(self):
return self._get_mor("vm", True)
class HostVirtualNicManager(ExtensibleManagedObject):
_valid_attrs = set(['info'])
def __init__(self, mo_ref, client):
ExtensibleManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ExtensibleManagedObject._valid_attrs)
@cached_property
def info(self):
return self._get_dataobject("info", False)
class HostVMotionSystem(ExtensibleManagedObject):
_valid_attrs = set(['ipConfig', 'netConfig'])
def __init__(self, mo_ref, client):
ExtensibleManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ExtensibleManagedObject._valid_attrs)
@cached_property
def ipConfig(self):
return self._get_dataobject("ipConfig", False)
@cached_property
def netConfig(self):
return self._get_dataobject("netConfig", False)
class HttpNfcLease(ManagedObject):
_valid_attrs = set(['error', 'info', 'initializeProgress', 'state'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def error(self):
return self._get_dataobject("error", False)
@cached_property
def info(self):
return self._get_dataobject("info", False)
@cached_property
def initializeProgress(self):
return self._get_dataobject("initializeProgress", False)
@cached_property
def state(self):
return self._get_dataobject("state", False)
class InventoryView(ManagedObjectView):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObjectView.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObjectView._valid_attrs)
class IpPoolManager(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class IscsiManager(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class LicenseAssignmentManager(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class LicenseManager(ManagedObject):
_valid_attrs = set(['diagnostics', 'evaluation', 'featureInfo', 'licenseAssignmentManager', 'licensedEdition', 'licenses', 'source', 'sourceAvailable'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def diagnostics(self):
return self._get_dataobject("diagnostics", False)
@cached_property
def evaluation(self):
return self._get_dataobject("evaluation", False)
@cached_property
def featureInfo(self):
return self._get_dataobject("featureInfo", True)
@cached_property
def licenseAssignmentManager(self):
return self._get_mor("licenseAssignmentManager", False)
@cached_property
def licensedEdition(self):
return self._get_dataobject("licensedEdition", False)
@cached_property
def licenses(self):
return self._get_dataobject("licenses", True)
@cached_property
def source(self):
return self._get_dataobject("source", False)
@cached_property
def sourceAvailable(self):
return self._get_dataobject("sourceAvailable", False)
class ListView(ManagedObjectView):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObjectView.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObjectView._valid_attrs)
class LocalizationManager(ManagedObject):
_valid_attrs = set(['catalog'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def catalog(self):
return self._get_dataobject("catalog", True)
class OptionManager(ManagedObject):
_valid_attrs = set(['setting', 'supportedOption'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def setting(self):
return self._get_dataobject("setting", True)
@cached_property
def supportedOption(self):
return self._get_dataobject("supportedOption", True)
class OvfManager(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class PerformanceManager(ManagedObject):
_valid_attrs = set(['description', 'historicalInterval', 'perfCounter'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def description(self):
return self._get_dataobject("description", False)
@cached_property
def historicalInterval(self):
return self._get_dataobject("historicalInterval", True)
@cached_property
def perfCounter(self):
return self._get_dataobject("perfCounter", True)
class ProfileComplianceManager(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class PropertyCollector(ManagedObject):
_valid_attrs = set(['filter'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def filter(self):
return self._get_mor("filter", True)
class PropertyFilter(ManagedObject):
_valid_attrs = set(['partialUpdates', 'spec'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def partialUpdates(self):
return self._get_dataobject("partialUpdates", False)
@cached_property
def spec(self):
return self._get_dataobject("spec", False)
class ResourcePlanningManager(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class ResourcePool(ManagedEntity):
_valid_attrs = set(['childConfiguration', 'config', 'owner', 'resourcePool', 'runtime', 'summary', 'vm'])
def __init__(self, mo_ref, client):
ManagedEntity.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedEntity._valid_attrs)
@cached_property
def childConfiguration(self):
return self._get_dataobject("childConfiguration", True)
@cached_property
def config(self):
return self._get_dataobject("config", False)
@cached_property
def owner(self):
return self._get_mor("owner", False)
@cached_property
def resourcePool(self):
return self._get_mor("resourcePool", True)
@cached_property
def runtime(self):
return self._get_dataobject("runtime", False)
@cached_property
def summary(self):
return self._get_dataobject("summary", False)
@cached_property
def vm(self):
return self._get_mor("vm", True)
class ScheduledTask(ExtensibleManagedObject):
_valid_attrs = set(['info'])
def __init__(self, mo_ref, client):
ExtensibleManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ExtensibleManagedObject._valid_attrs)
@cached_property
def info(self):
return self._get_dataobject("info", False)
class ScheduledTaskManager(ManagedObject):
_valid_attrs = set(['description', 'scheduledTask'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def description(self):
return self._get_dataobject("description", False)
@cached_property
def scheduledTask(self):
return self._get_mor("scheduledTask", True)
class SearchIndex(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class ServiceInstance(ManagedObject):
_valid_attrs = set(['capability', 'content', 'serverClock'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def capability(self):
return self._get_dataobject("capability", False)
@cached_property
def content(self):
return self._get_dataobject("content", False)
@cached_property
def serverClock(self):
return self._get_dataobject("serverClock", False)
class SessionManager(ManagedObject):
_valid_attrs = set(['currentSession', 'defaultLocale', 'message', 'messageLocaleList', 'sessionList', 'supportedLocaleList'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def currentSession(self):
return self._get_dataobject("currentSession", False)
@cached_property
def defaultLocale(self):
return self._get_dataobject("defaultLocale", False)
@cached_property
def message(self):
return self._get_dataobject("message", False)
@cached_property
def messageLocaleList(self):
return self._get_dataobject("messageLocaleList", True)
@cached_property
def sessionList(self):
return self._get_dataobject("sessionList", True)
@cached_property
def supportedLocaleList(self):
return self._get_dataobject("supportedLocaleList", True)
class StoragePod(Folder):
_valid_attrs = set(['podStorageDrsEntry', 'summary'])
def __init__(self, mo_ref, client):
Folder.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, Folder._valid_attrs)
@cached_property
def podStorageDrsEntry(self):
return self._get_dataobject("podStorageDrsEntry", False)
@cached_property
def summary(self):
return self._get_dataobject("summary", False)
class StorageResourceManager(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class Task(ExtensibleManagedObject):
_valid_attrs = set(['info'])
def __init__(self, mo_ref, client):
ExtensibleManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ExtensibleManagedObject._valid_attrs)
@cached_property
def info(self):
return self._get_dataobject("info", False)
class TaskHistoryCollector(HistoryCollector):
_valid_attrs = set(['latestPage'])
def __init__(self, mo_ref, client):
HistoryCollector.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, HistoryCollector._valid_attrs)
@cached_property
def latestPage(self):
return self._get_dataobject("latestPage", True)
class TaskManager(ManagedObject):
_valid_attrs = set(['description', 'maxCollector', 'recentTask'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def description(self):
return self._get_dataobject("description", False)
@cached_property
def maxCollector(self):
return self._get_dataobject("maxCollector", False)
@cached_property
def recentTask(self):
return self._get_mor("recentTask", True)
class UserDirectory(ManagedObject):
_valid_attrs = set(['domainList'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def domainList(self):
return self._get_dataobject("domainList", True)
class ViewManager(ManagedObject):
_valid_attrs = set(['viewList'])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
@cached_property
def viewList(self):
return self._get_mor("viewList", True)
class VirtualApp(ResourcePool):
_valid_attrs = set(['childLink', 'datastore', 'network', 'parentFolder', 'parentVApp', 'vAppConfig'])
def __init__(self, mo_ref, client):
ResourcePool.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ResourcePool._valid_attrs)
@cached_property
def childLink(self):
return self._get_dataobject("childLink", True)
@cached_property
def datastore(self):
return self._get_mor("datastore", True)
@cached_property
def network(self):
return self._get_mor("network", True)
@cached_property
def parentFolder(self):
return self._get_mor("parentFolder", False)
@cached_property
def parentVApp(self):
return self._get_mor("parentVApp", False)
@cached_property
def vAppConfig(self):
return self._get_dataobject("vAppConfig", False)
class VirtualDiskManager(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class VirtualizationManager(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class VirtualMachine(ManagedEntity):
_valid_attrs = set(['capability', 'config', 'datastore', 'environmentBrowser', 'guest', 'guestHeartbeatStatus', 'layout', 'layoutEx', 'network', 'parentVApp', 'resourceConfig', 'resourcePool', 'rootSnapshot', 'runtime', 'snapshot', 'storage', 'summary'])
def __init__(self, mo_ref, client):
ManagedEntity.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedEntity._valid_attrs)
@cached_property
def capability(self):
return self._get_dataobject("capability", False)
@cached_property
def config(self):
return self._get_dataobject("config", False)
@cached_property
def datastore(self):
return self._get_mor("datastore", True)
@cached_property
def environmentBrowser(self):
return self._get_mor("environmentBrowser", False)
@cached_property
def guest(self):
return self._get_dataobject("guest", False)
@cached_property
def guestHeartbeatStatus(self):
return self._get_dataobject("guestHeartbeatStatus", False)
@cached_property
def layout(self):
return self._get_dataobject("layout", False)
@cached_property
def layoutEx(self):
return self._get_dataobject("layoutEx", False)
@cached_property
def network(self):
return self._get_mor("network", True)
@cached_property
def parentVApp(self):
return self._get_mor("parentVApp", False)
@cached_property
def resourceConfig(self):
return self._get_dataobject("resourceConfig", False)
@cached_property
def resourcePool(self):
return self._get_mor("resourcePool", False)
@cached_property
def rootSnapshot(self):
return self._get_mor("rootSnapshot", True)
@cached_property
def runtime(self):
return self._get_dataobject("runtime", False)
@cached_property
def snapshot(self):
return self._get_dataobject("snapshot", False)
@cached_property
def storage(self):
return self._get_dataobject("storage", False)
@cached_property
def summary(self):
return self._get_dataobject("summary", False)
class VirtualMachineCompatibilityChecker(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class VirtualMachineProvisioningChecker(ManagedObject):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
ManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ManagedObject._valid_attrs)
class VirtualMachineSnapshot(ExtensibleManagedObject):
_valid_attrs = set(['childSnapshot', 'config'])
def __init__(self, mo_ref, client):
ExtensibleManagedObject.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, ExtensibleManagedObject._valid_attrs)
@cached_property
def childSnapshot(self):
return self._get_mor("childSnapshot", True)
@cached_property
def config(self):
return self._get_dataobject("config", False)
class VmwareDistributedVirtualSwitch(DistributedVirtualSwitch):
_valid_attrs = set([])
def __init__(self, mo_ref, client):
DistributedVirtualSwitch.__init__(self, mo_ref, client)
self._valid_attrs = set.union(self._valid_attrs, DistributedVirtualSwitch._valid_attrs)
classmap = dict((x.__name__, x) for x in (
ExtensibleManagedObject,
Alarm,
AlarmManager,
AuthorizationManager,
ManagedEntity,
ComputeResource,
ClusterComputeResource,
Profile,
ClusterProfile,
ProfileManager,
ClusterProfileManager,
View,
ManagedObjectView,
ContainerView,
CustomFieldsManager,
CustomizationSpecManager,
Datacenter,
Datastore,
DiagnosticManager,
Network,
DistributedVirtualPortgroup,
DistributedVirtualSwitch,
DistributedVirtualSwitchManager,
EnvironmentBrowser,
HistoryCollector,
EventHistoryCollector,
EventManager,
ExtensionManager,
FileManager,
Folder,
GuestAuthManager,
GuestFileManager,
GuestOperationsManager,
GuestProcessManager,
HostAuthenticationStore,
HostDirectoryStore,
HostActiveDirectoryAuthentication,
HostAuthenticationManager,
HostAutoStartManager,
HostBootDeviceSystem,
HostCacheConfigurationManager,
HostCpuSchedulerSystem,
HostDatastoreBrowser,
HostDatastoreSystem,
HostDateTimeSystem,
HostDiagnosticSystem,
HostEsxAgentHostManager,
HostFirewallSystem,
HostFirmwareSystem,
HostHealthStatusSystem,
HostImageConfigManager,
HostKernelModuleSystem,
HostLocalAccountManager,
HostLocalAuthentication,
HostMemorySystem,
HostNetworkSystem,
HostPatchManager,
HostPciPassthruSystem,
HostPowerSystem,
HostProfile,
HostProfileManager,
HostServiceSystem,
HostSnmpSystem,
HostStorageSystem,
HostSystem,
HostVirtualNicManager,
HostVMotionSystem,
HttpNfcLease,
InventoryView,
IpPoolManager,
IscsiManager,
LicenseAssignmentManager,
LicenseManager,
ListView,
LocalizationManager,
OptionManager,
OvfManager,
PerformanceManager,
ProfileComplianceManager,
PropertyCollector,
PropertyFilter,
ResourcePlanningManager,
ResourcePool,
ScheduledTask,
ScheduledTaskManager,
SearchIndex,
ServiceInstance,
SessionManager,
StoragePod,
StorageResourceManager,
Task,
TaskHistoryCollector,
TaskManager,
UserDirectory,
ViewManager,
VirtualApp,
VirtualDiskManager,
VirtualizationManager,
VirtualMachine,
VirtualMachineCompatibilityChecker,
VirtualMachineProvisioningChecker,
VirtualMachineSnapshot,
VmwareDistributedVirtualSwitch
))
def classmapper(name):
return classmap[name]
|
|
from django import forms
from django.conf import settings
from django.contrib.auth.forms import SetPasswordForm, AuthenticationForm, \
PasswordResetForm
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from django.core.validators import validate_email
from django.db.models.query import QuerySet
from django.utils.translation import ugettext as _
from jinja2 import Markup as mark_safe
from zerver.lib.actions import do_change_password, user_email_is_unique, \
validate_email_for_realm
from zerver.lib.name_restrictions import is_reserved_subdomain, is_disposable_domain
from zerver.lib.request import JsonableError
from zerver.lib.send_email import send_email, FromAddress
from zerver.lib.users import check_full_name
from zerver.lib.utils import get_subdomain, check_subdomain
from zerver.models import Realm, get_user_profile_by_email, UserProfile, \
get_realm_by_email_domain, get_realm, \
get_unique_open_realm, email_to_domain, email_allowed_for_realm
from zproject.backends import password_auth_enabled
import logging
import re
import DNS
from typing import Any, Callable, List, Optional, Text, Dict
MIT_VALIDATION_ERROR = u'That user does not exist at MIT or is a ' + \
u'<a href="https://ist.mit.edu/email-lists">mailing list</a>. ' + \
u'If you want to sign up an alias for Zulip, ' + \
u'<a href="mailto:support@zulipchat.com">contact us</a>.'
WRONG_SUBDOMAIN_ERROR = "Your Zulip account is not a member of the " + \
"organization associated with this subdomain. " + \
"Please contact %s with any questions!" % (FromAddress.SUPPORT,)
def email_is_not_mit_mailing_list(email):
# type: (Text) -> None
"""Prevent MIT mailing lists from signing up for Zulip"""
if "@mit.edu" in email:
username = email.rsplit("@", 1)[0]
# Check whether the user exists and can get mail.
try:
DNS.dnslookup("%s.pobox.ns.athena.mit.edu" % username, DNS.Type.TXT)
except DNS.Base.ServerError as e:
if e.rcode == DNS.Status.NXDOMAIN:
raise ValidationError(mark_safe(MIT_VALIDATION_ERROR))
else:
raise
class RegistrationForm(forms.Form):
MAX_PASSWORD_LENGTH = 100
full_name = forms.CharField(max_length=UserProfile.MAX_NAME_LENGTH)
# The required-ness of the password field gets overridden if it isn't
# actually required for a realm
password = forms.CharField(widget=forms.PasswordInput, max_length=MAX_PASSWORD_LENGTH)
realm_subdomain = forms.CharField(max_length=Realm.MAX_REALM_SUBDOMAIN_LENGTH, required=False)
def __init__(self, *args, **kwargs):
# type: (*Any, **Any) -> None
# Since the superclass doesn't except random extra kwargs, we
# remove it from the kwargs dict before initializing.
realm_creation = kwargs['realm_creation']
del kwargs['realm_creation']
super(RegistrationForm, self).__init__(*args, **kwargs)
if settings.TERMS_OF_SERVICE:
self.fields['terms'] = forms.BooleanField(required=True)
self.fields['realm_name'] = forms.CharField(
max_length=Realm.MAX_REALM_NAME_LENGTH,
required=realm_creation)
def clean_full_name(self):
# type: () -> Text
try:
return check_full_name(self.cleaned_data['full_name'])
except JsonableError as e:
raise ValidationError(e.msg)
def clean_realm_subdomain(self):
# type: () -> str
if settings.REALMS_HAVE_SUBDOMAINS:
error_strings = {
'too short': _("Subdomain needs to have length 3 or greater."),
'extremal dash': _("Subdomain cannot start or end with a '-'."),
'bad character': _("Subdomain can only have lowercase letters, numbers, and '-'s."),
'unavailable': _("Subdomain unavailable. Please choose a different one.")}
else:
error_strings = {
'too short': _("Short name needs at least 3 characters."),
'extremal dash': _("Short name cannot start or end with a '-'."),
'bad character': _("Short name can only have lowercase letters, numbers, and '-'s."),
'unavailable': _("Short name unavailable. Please choose a different one.")}
subdomain = self.cleaned_data['realm_subdomain']
if not subdomain:
return ''
if len(subdomain) < 3:
raise ValidationError(error_strings['too short'])
if subdomain[0] == '-' or subdomain[-1] == '-':
raise ValidationError(error_strings['extremal dash'])
if not re.match('^[a-z0-9-]*$', subdomain):
raise ValidationError(error_strings['bad character'])
if is_reserved_subdomain(subdomain) or \
get_realm(subdomain) is not None:
raise ValidationError(error_strings['unavailable'])
return subdomain
class ToSForm(forms.Form):
terms = forms.BooleanField(required=True)
class HomepageForm(forms.Form):
email = forms.EmailField()
def __init__(self, *args, **kwargs):
# type: (*Any, **Any) -> None
self.realm = kwargs.pop('realm', None)
self.from_multiuse_invite = kwargs.pop('from_multiuse_invite', False)
super(HomepageForm, self).__init__(*args, **kwargs)
def clean_email(self):
# type: () -> str
"""Returns the email if and only if the user's email address is
allowed to join the realm they are trying to join."""
email = self.cleaned_data['email']
if get_unique_open_realm():
return email
# Otherwise, the user is trying to join a specific realm.
realm = self.realm
from_multiuse_invite = self.from_multiuse_invite
if realm is None and not settings.REALMS_HAVE_SUBDOMAINS:
realm = get_realm_by_email_domain(email)
if realm is None:
if settings.REALMS_HAVE_SUBDOMAINS:
raise ValidationError(_("The organization you are trying to "
"join using {email} does not "
"exist.").format(email=email))
else:
raise ValidationError(_("Your email address, {email}, does not "
"correspond to any existing "
"organization.").format(email=email))
if not from_multiuse_invite and realm.invite_required:
raise ValidationError(_("Please request an invite for {email} "
"from the organization "
"administrator.").format(email=email))
if not email_allowed_for_realm(email, realm):
raise ValidationError(
_("Your email address, {email}, is not in one of the domains "
"that are allowed to register for accounts in this organization.").format(
string_id=realm.string_id, email=email))
validate_email_for_realm(realm, email)
if realm.is_zephyr_mirror_realm:
email_is_not_mit_mailing_list(email)
return email
def email_is_not_disposable(email):
# type: (Text) -> None
if is_disposable_domain(email_to_domain(email)):
raise ValidationError(_("Please use your real email address."))
class RealmCreationForm(forms.Form):
# This form determines whether users can create a new realm.
email = forms.EmailField(validators=[user_email_is_unique, email_is_not_disposable])
class LoggingSetPasswordForm(SetPasswordForm):
def save(self, commit=True):
# type: (bool) -> UserProfile
do_change_password(self.user, self.cleaned_data['new_password1'],
commit=commit)
return self.user
class ZulipPasswordResetForm(PasswordResetForm):
def get_users(self, email):
# type: (str) -> QuerySet
"""Given an email, return matching user(s) who should receive a reset.
This is modified from the original in that it allows non-bot
users who don't have a usable password to reset their
passwords.
"""
if not password_auth_enabled:
logging.info("Password reset attempted for %s even though password auth is disabled." % (email,))
return []
result = UserProfile.objects.filter(email__iexact=email, is_active=True,
is_bot=False)
if len(result) == 0:
logging.info("Password reset attempted for %s; no active account." % (email,))
return result
def send_mail(self, subject_template_name, email_template_name,
context, from_email, to_email, html_email_template_name=None):
# type: (str, str, Dict[str, Any], str, str, str) -> None
"""
Currently we don't support accounts in multiple subdomains using
a single email address. We override this function so that we do
not send a reset link to an email address if the reset attempt is
done on the subdomain which does not match user.realm.subdomain.
Once we start supporting accounts with the same email in
multiple subdomains, we may be able to refactor this function.
A second reason we override this function is so that we can send
the mail through the functions in zerver.lib.send_email, to match
how we send all other mail in the codebase.
"""
user = get_user_profile_by_email(to_email)
attempted_subdomain = get_subdomain(getattr(self, 'request'))
context['attempted_realm'] = False
if not check_subdomain(user.realm.subdomain, attempted_subdomain):
context['attempted_realm'] = get_realm(attempted_subdomain)
send_email('zerver/emails/password_reset', to_user_id=user.id,
from_name="Zulip Account Security",
from_address=FromAddress.NOREPLY, context=context)
def save(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Currently we don't support accounts in multiple subdomains using
a single email addresss. We override this function so that we can
inject request parameter in context. This parameter will be used
by send_mail function.
Once we start supporting accounts with the same email in
multiple subdomains, we may be able to delete or refactor this
function.
"""
setattr(self, 'request', kwargs.get('request'))
super(ZulipPasswordResetForm, self).save(*args, **kwargs)
class CreateUserForm(forms.Form):
full_name = forms.CharField(max_length=100)
email = forms.EmailField()
class OurAuthenticationForm(AuthenticationForm):
def clean_username(self):
# type: () -> str
email = self.cleaned_data['username']
try:
user_profile = get_user_profile_by_email(email)
except UserProfile.DoesNotExist:
return email
if user_profile.realm.deactivated:
error_msg = u"""Sorry for the trouble, but %s has been deactivated.
Please contact %s to reactivate this group.""" % (
user_profile.realm.name,
FromAddress.SUPPORT)
raise ValidationError(mark_safe(error_msg))
if not user_profile.is_active and not user_profile.is_mirror_dummy:
error_msg = (
u"Sorry for the trouble, but your account has been deactivated. "
u"Please contact your organization administrator to reactivate it. "
u"If you're not sure who that is, try contacting %s.") % (FromAddress.SUPPORT,)
raise ValidationError(mark_safe(error_msg))
if not check_subdomain(get_subdomain(self.request), user_profile.realm.subdomain):
logging.warning("User %s attempted to password login to wrong subdomain %s" %
(user_profile.email, get_subdomain(self.request)))
raise ValidationError(mark_safe(WRONG_SUBDOMAIN_ERROR))
return email
class MultiEmailField(forms.Field):
def to_python(self, emails):
# type: (Text) -> List[Text]
"""Normalize data to a list of strings."""
if not emails:
return []
return [email.strip() for email in emails.split(',')]
def validate(self, emails):
# type: (List[Text]) -> None
"""Check if value consists only of valid emails."""
super(MultiEmailField, self).validate(emails)
for email in emails:
validate_email(email)
class FindMyTeamForm(forms.Form):
emails = MultiEmailField(
help_text=_("Add up to 10 comma-separated email addresses."))
def clean_emails(self):
# type: () -> List[Text]
emails = self.cleaned_data['emails']
if len(emails) > 10:
raise forms.ValidationError(_("Please enter at most 10 emails."))
return emails
|
|
#!/usr/bin/env python2
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from pprint import pprint
from time import sleep
# Create one-input, one-output, no-fee transaction:
class RawTransactionsTest(BitcoinTestFramework):
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 3)
def setup_network(self, split=False):
self.nodes = start_nodes(3, self.options.tmpdir)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
self.is_network_split=False
self.sync_all()
def run_test(self):
print "Mining blocks..."
feeTolerance = Decimal(0.00000002) #if the fee's positive delta is higher than this value tests will fail, neg. delta always fail the tests
self.nodes[2].generate(1)
self.sync_all()
self.nodes[0].generate(101)
self.sync_all()
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.5);
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.0);
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),5.0);
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
###############
# simple test #
###############
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert_equal(len(dec_tx['vin']) > 0, True) #test if we have enought inputs
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.2 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert_equal(len(dec_tx['vin']) > 0, True) #test if we have enough inputs
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.6 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert_equal(len(dec_tx['vin']) > 0, True)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
################################
# simple test with two outputs #
################################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.6, self.nodes[1].getnewaddress() : 2.5 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(len(dec_tx['vin']) > 0, True)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
#########################################################################
# test a fundrawtransaction with a VIN greater than the required amount #
#########################################################################
utx = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 5.0:
utx = aUtx
break;
assert_equal(utx!=False, True)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
#####################################################################
# test a fundrawtransaction with which will not get a change output #
#####################################################################
utx = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 5.0:
utx = aUtx
break;
assert_equal(utx!=False, True)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : Decimal(5.0) - fee - feeTolerance }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(rawtxfund['changepos'], -1)
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
#########################################################################
# test a fundrawtransaction with a VIN smaller than the required amount #
#########################################################################
utx = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 1.0:
utx = aUtx
break;
assert_equal(utx!=False, True)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
# 4-byte version + 1-byte vin count + 36-byte prevout then script_len
rawtx = rawtx[:82] + "0100" + rawtx[84:]
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for i, out in enumerate(dec_tx['vout']):
totalOut += out['value']
if outputs.has_key(out['scriptPubKey']['addresses'][0]):
matchingOuts+=1
else:
assert_equal(i, rawtxfund['changepos'])
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
###########################################
# test a fundrawtransaction with two VINs #
###########################################
utx = False
utx2 = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 1.0:
utx = aUtx
if aUtx['amount'] == 5.0:
utx2 = aUtx
assert_equal(utx!=False, True)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 6.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if outputs.has_key(out['scriptPubKey']['addresses'][0]):
matchingOuts+=1
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
matchingIns = 0
for vinOut in dec_tx['vin']:
for vinIn in inputs:
if vinIn['txid'] == vinOut['txid']:
matchingIns+=1
assert_equal(matchingIns, 2) #we now must see two vins identical to vins given as params
#########################################################
# test a fundrawtransaction with two VINs and two vOUTs #
#########################################################
utx = False
utx2 = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 1.0:
utx = aUtx
if aUtx['amount'] == 5.0:
utx2 = aUtx
assert_equal(utx!=False, True)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 6.0, self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if outputs.has_key(out['scriptPubKey']['addresses'][0]):
matchingOuts+=1
assert_equal(matchingOuts, 2)
assert_equal(len(dec_tx['vout']), 3)
##############################################
# test a fundrawtransaction with invalid vin #
##############################################
listunspent = self.nodes[2].listunspent()
inputs = [ {'txid' : "1c7f966dab21119bac53213a2bc7532bff1fa844c124fd750a7d0b1332440bd1", 'vout' : 0} ] #invalid vin!
outputs = { self.nodes[0].getnewaddress() : 1.0}
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
errorString = ""
try:
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
except JSONRPCException,e:
errorString = e.error['message']
assert_equal("Insufficient" in errorString, True);
############################################################
#compare fee of a standard pubkeyhash transaction
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1.1);
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee);
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction with multiple outputs
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1,self.nodes[1].getnewaddress():1.2,self.nodes[1].getnewaddress():0.1,self.nodes[1].getnewaddress():1.3,self.nodes[1].getnewaddress():0.2,self.nodes[1].getnewaddress():0.3}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendmany("", outputs);
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee);
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a 2of2 multisig p2sh transaction
# create 2of2 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[1].validateaddress(addr2)
mSigObj = self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
inputs = []
outputs = {mSigObj:1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 1.1);
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee);
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction
# create 4of5 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr3 = self.nodes[1].getnewaddress()
addr4 = self.nodes[1].getnewaddress()
addr5 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[1].validateaddress(addr2)
addr3Obj = self.nodes[1].validateaddress(addr3)
addr4Obj = self.nodes[1].validateaddress(addr4)
addr5Obj = self.nodes[1].validateaddress(addr5)
mSigObj = self.nodes[1].addmultisigaddress(4, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey'], addr4Obj['pubkey'], addr5Obj['pubkey']])
inputs = []
outputs = {mSigObj:1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 1.1);
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee);
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
# spend a 2of2 multisig transaction over fundraw
# create 2of2 addr
addr1 = self.nodes[2].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[2].validateaddress(addr1)
addr2Obj = self.nodes[2].validateaddress(addr2)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
# send 1.2 BTC to msig addr
txId = self.nodes[0].sendtoaddress(mSigObj, 1.2);
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
oldBalance = self.nodes[1].getbalance()
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1}
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[2].fundrawtransaction(rawTx)
signedTx = self.nodes[2].signrawtransaction(fundedTx['hex'])
txId = self.nodes[2].sendrawtransaction(signedTx['hex'])
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('1.10000000'), self.nodes[1].getbalance())
############################################################
# locked wallet test
self.nodes[1].encryptwallet("test")
self.nodes.pop(1)
stop_nodes(self.nodes)
wait_bitcoinds()
self.nodes = start_nodes(3, self.options.tmpdir)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
self.is_network_split=False
self.sync_all()
error = False
try:
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.2);
except:
error = True
assert(error)
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():1.1}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
#now we need to unlock
self.nodes[1].walletpassphrase("test", 100)
signedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(signedTx['hex'])
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('51.10000000'), self.nodes[0].getbalance())
###############################################
# multiple (~19) inputs tx test | Compare fee #
###############################################
#empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True);
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01);
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[1].sendmany("", outputs);
signedFee = self.nodes[1].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee);
assert(feeDelta >= 0 and feeDelta <= feeTolerance*19) #~19 inputs
#############################################
# multiple (~19) inputs tx test | sign/send #
#############################################
#again, empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True);
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01);
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
fundedAndSignedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(oldBalance+Decimal('50.19000000'), self.nodes[0].getbalance()) #0.19+block reward
#####################################################
# test fundrawtransaction with OP_RETURN and no vin #
#####################################################
rawtx = "0100000000010000000000000000066a047465737400000000"
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(len(dec_tx['vin']), 0)
assert_equal(len(dec_tx['vout']), 1)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert_greater_than(len(dec_tx['vin']), 0) # at least one vin
assert_equal(len(dec_tx['vout']), 2) # one change output added
if __name__ == '__main__':
RawTransactionsTest().main()
|
|
"""Use the EPA crosswalk to connect EPA units to EIA generators and other data.
A major use case for this dataset is to identify subplants within plant_ids,
which are the smallest coherent units for aggregation.
Despite the name, plant_id refers to a legal entity that often contains
multiple distinct power plants, even of different technology or fuel types.
EPA CEMS data combines information from several parts of a power plant:
* emissions from smokestacks
* fuel use from combustors
* electricty production from generators
But smokestacks, combustors, and generators can be connected in
complex, many-to-many relationships. This complexity makes attribution difficult for,
as an example, allocating pollution to energy producers.
Furthermore, heterogeneity within plant_ids make aggregation
to the parent entity difficult or inappropriate.
But by analyzing the relationships between combustors and generators,
as provided in the EPA/EIA crosswalk, we can identify distinct power plants.
These are the smallest coherent units of aggregation.
In graph analysis terminology, the crosswalk is a list of edges between
nodes (combustors and generators) in a bipartite graph. The networkx python
package provides functions to analyze this edge list and extract
disjoint subgraphs (groups of combustors and generators that are connected to each other).
These are the distinct power plants. To avoid a name collision
with plant_id, we term these collections 'subplants', and identify them with a subplant_id
that is unique within each plant_id. Subplants are thus identified with the composite key
(plant_id, subplant_id).
Through this analysis, we found that 56% of plant_ids contain multiple distinct subplants,
and 11% contain subplants with different technology types, such as
a gas boiler and gas turbine (not in a combined cycle).
Usage Example:
epacems = pudl.output.epacems.epacems(states=['ID']) # small subset for quick test
epa_crosswalk_df = pudl.output.epacems.epa_crosswalk()
filtered_crosswalk = filter_crosswalk(epa_crosswalk_df, epacems)
crosswalk_with_subplant_ids = make_subplant_ids(filtered_crosswalk)
"""
from typing import Union
import dask.dataframe as dd
import networkx as nx
import pandas as pd
def _get_unique_keys(epacems: Union[pd.DataFrame, dd.DataFrame]) -> pd.DataFrame:
"""Get unique unit IDs from CEMS data.
Args:
epacems (Union[pd.DataFrame, dd.DataFrame]): epacems dataset from pudl.output.epacems.epacems
Returns:
pd.DataFrame: unique keys from the epacems dataset
"""
# The purpose of this function is mostly to resolve the
# ambiguity between dask and pandas dataframes
ids = epacems[["plant_id_eia", "unitid", "unit_id_epa"]].drop_duplicates()
if isinstance(epacems, dd.DataFrame):
ids = ids.compute()
return ids
def filter_crosswalk_by_epacems(crosswalk: pd.DataFrame, epacems: Union[pd.DataFrame, dd.DataFrame]) -> pd.DataFrame:
"""Inner join unique CEMS units with the EPA crosswalk.
This is essentially an empirical filter on EPA units. Instead of filtering by construction/retirement dates
in the crosswalk (thus assuming they are accurate), use the presence/absence of CEMS data to filter the units.
Args:
crosswalk (pd.DataFrame): the EPA crosswalk, as from pudl.output.epacems.epa_crosswalk()
unique_epacems_ids (pd.DataFrame): unique ids from _get_unique_keys
Returns:
pd.DataFrame: the inner join of the EPA crosswalk and unique epacems units. Adds the global ID column unit_id_epa.
"""
unique_epacems_ids = _get_unique_keys(epacems)
key_map = unique_epacems_ids.merge(
crosswalk,
left_on=["plant_id_eia", "unitid"],
right_on=["CAMD_PLANT_ID", "CAMD_UNIT_ID"],
how="inner",
)
return key_map
def filter_out_unmatched(crosswalk: pd.DataFrame) -> pd.DataFrame:
"""Remove unmatched or excluded (non-exporting) units.
Unmatched rows are limitations of the completeness of the EPA crosswalk itself, not of PUDL.
Args:
crosswalk (pd.DataFrame): the EPA crosswalk, as from pudl.output.epacems.epa_crosswalk()
Returns:
pd.DataFrame: the EPA crosswalk with unmatched units removed
"""
bad = crosswalk["MATCH_TYPE_GEN"].isin({"CAMD Unmatched", "Manual CAMD Excluded"})
return crosswalk.loc[~bad].copy()
def filter_out_boiler_rows(crosswalk: pd.DataFrame) -> pd.DataFrame:
"""Remove rows that represent graph edges between generators and boilers.
Args:
crosswalk (pd.DataFrame): the EPA crosswalk, as from pudl.output.epacems.epa_crosswalk()
Returns:
pd.DataFrame: the EPA crosswalk with boiler rows (many/one-to-many) removed
"""
crosswalk = crosswalk.drop_duplicates(
subset=["CAMD_PLANT_ID", "CAMD_UNIT_ID", "EIA_GENERATOR_ID"])
return crosswalk
def _prep_for_networkx(crosswalk: pd.DataFrame) -> pd.DataFrame:
"""Make surrogate keys for combustors and generators.
Args:
crosswalk (pd.DataFrame): EPA crosswalk, as from pudl.output.epacems.epa_crosswalk()
Returns:
pd.DataFrame: copy of EPA crosswalk with new surrogate ID columns 'combustor_id' and 'generator_id'
"""
prepped = crosswalk.copy()
# networkx can't handle composite keys, so make surrogates
prepped["combustor_id"] = prepped.groupby(
by=["CAMD_PLANT_ID", "CAMD_UNIT_ID"]).ngroup()
# node IDs can't overlap so add (max + 1)
prepped["generator_id"] = (
prepped.groupby(by=["CAMD_PLANT_ID", "EIA_GENERATOR_ID"]).ngroup()
+ prepped["combustor_id"].max()
+ 1
)
return prepped
def _subplant_ids_from_prepped_crosswalk(prepped: pd.DataFrame) -> pd.DataFrame:
"""Use networkx graph analysis to create global subplant IDs from a preprocessed crosswalk edge list.
Args:
prepped (pd.DataFrame): an EPA crosswalk that has passed through _prep_for_networkx()
Returns:
pd.DataFrame: copy of EPA crosswalk plus new column 'global_subplant_id'
"""
graph = nx.from_pandas_edgelist(
prepped,
source="combustor_id",
target="generator_id",
edge_attr=True,
)
for i, node_set in enumerate(nx.connected_components(graph)):
subgraph = graph.subgraph(node_set)
assert nx.algorithms.bipartite.is_bipartite(
subgraph
), f"non-bipartite: i={i}, node_set={node_set}"
nx.set_edge_attributes(subgraph, name="global_subplant_id", values=i)
return nx.to_pandas_edgelist(graph)
def _convert_global_id_to_composite_id(crosswalk_with_ids: pd.DataFrame) -> pd.DataFrame:
"""Convert global_subplant_id to an equivalent composite key (CAMD_PLANT_ID, subplant_id).
The composite key will be much more stable (though not fully stable!) in time.
The global ID changes if ANY unit or generator changes, whereas the
compound key only changes if units/generators change within that specific plant.
A global ID could also tempt users into using it as a crutch, even though it isn't stable.
A compound key should discourage that behavior.
Args:
crosswalk_with_ids (pd.DataFrame): crosswalk with global_subplant_id, as from _subplant_ids_from_prepped_crosswalk()
Raises:
ValueError: if crosswalk_with_ids has a MultiIndex
Returns:
pd.DataFrame: copy of crosswalk_with_ids with an added column: 'subplant_id'
"""
if isinstance(crosswalk_with_ids.index, pd.MultiIndex):
raise ValueError(
f"Input crosswalk must have single level index. Given levels: {crosswalk_with_ids.index.names}")
reindexed = crosswalk_with_ids.reset_index() # copy
idx_name = crosswalk_with_ids.index.name
if idx_name is None:
# Indices with no name (None) are set to a pandas default name ('index'), which
# could (though probably won't) change.
idx_col = reindexed.columns.symmetric_difference(
crosswalk_with_ids.columns)[0] # get index name
else:
idx_col = idx_name
composite_key: pd.Series = (
reindexed
.groupby('CAMD_PLANT_ID', as_index=False)
.apply(lambda x: x.groupby('global_subplant_id').ngroup())
)
# Recombine. Could use index join but I chose to reindex, sort and assign.
# Errors like mismatched length will raise exceptions, which is good.
# drop the outer group, leave the reindexed row index
composite_key.reset_index(level=0, drop=True, inplace=True)
composite_key.sort_index(inplace=True) # put back in same order as reindexed
reindexed['subplant_id'] = composite_key
# restore original index
reindexed.set_index(idx_col, inplace=True) # restore values
reindexed.index.rename(idx_name, inplace=True) # restore original name
return reindexed
def filter_crosswalk(crosswalk: pd.DataFrame, epacems: Union[pd.DataFrame, dd.DataFrame]) -> pd.DataFrame:
"""Remove crosswalk rows that do not correspond to an EIA facility or are duplicated due to many-to-many boiler relationships.
Args:
crosswalk (pd.DataFrame): The EPA/EIA crosswalk, as from pudl.output.epacems.epa_crosswalk()
epacems (Union[pd.DataFrame, dd.DataFrame]): Emissions data. Must contain columns named ["plant_id_eia", "unitid", "unit_id_epa"]
Returns:
pd.DataFrame: A filtered copy of EPA crosswalk
"""
filtered_crosswalk = filter_out_unmatched(crosswalk)
filtered_crosswalk = filter_out_boiler_rows(filtered_crosswalk)
key_map = filter_crosswalk_by_epacems(
filtered_crosswalk, epacems)
return key_map
def make_subplant_ids(crosswalk: pd.DataFrame) -> pd.DataFrame:
"""Identify sub-plants in the EPA/EIA crosswalk graph. Any row filtering should be done before this step.
Usage Example:
epacems = pudl.output.epacems.epacems(states=['ID']) # small subset for quick test
epa_crosswalk_df = pudl.output.epacems.epa_crosswalk()
filtered_crosswalk = filter_crosswalk(epa_crosswalk_df, epacems)
crosswalk_with_subplant_ids = make_subplant_ids(filtered_crosswalk)
Args:
crosswalk (pd.DataFrame): The EPA/EIA crosswalk, as from pudl.output.epacems.epa_crosswalk()
Returns:
pd.DataFrame: An edge list connecting EPA units to EIA generators, with connected pieces issued a subplant_id
"""
edge_list = _prep_for_networkx(crosswalk)
edge_list = _subplant_ids_from_prepped_crosswalk(edge_list)
edge_list = _convert_global_id_to_composite_id(edge_list)
column_order = ["subplant_id"] + list(crosswalk.columns)
return edge_list[column_order] # reorder and drop global_subplant_id
|
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2019, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
---
module: bigip_message_routing_router
short_description: Manages router profiles for message-routing protocols
description:
- Manages router profiles for message-routing protocols.
version_added: "1.0.0"
options:
name:
description:
- Specifies the name of the router profile.
required: True
type: str
description:
description:
- The user-defined description of the router profile.
type: str
type:
description:
- Parameter used to specify the type of the router profile to manage.
- Default setting is C(generic) with more options coming.
type: str
choices:
- generic
default: generic
parent:
description:
- The parent template of this router profile. Once this value has been set, it cannot be changed.
- The default values are set by the system if not specified and they correspond to the router type created,
for example, C(/Common/messagerouter) for C(generic) C(type) and so on.
type: str
ignore_client_port:
description:
- When C(yes), the remote port on clientside connections (connections where the peer connected to the BIG-IP)
is ignored when searching for an existing connection.
type: bool
inherited_traffic_group:
description:
- When set to C(yes), the C(traffic_group) will be inherited from the containing folder. When not specified the
system sets this to C(no) when creating new router profile.
type: bool
traffic_group:
description:
- Specifies the traffic-group of the router profile.
- Setting the C(traffic_group) to an empty string value C("") will cause the device to inherit from containing
folder, which means the value of C(inherited_traffic_group) on device will be C(yes).
type: str
use_local_connection:
description:
- If C(yes), the router will route a message to an existing connection on the same TMM as the message was
received.
type: bool
max_pending_bytes:
description:
- The maximum number of bytes worth of pending messages that will be held while waiting for a connection to a
peer to be created. Once reached, any additional messages to the peer will be flagged as undeliverable
and returned to the originator.
- The accepted range is between 0 and 4294967295 inclusive.
type: int
max_pending_messages:
description:
- The maximum number of pending messages that will be held while waiting for a connection to a peer to be created.
Once reached, any additional messages to the peer will be flagged as undeliverable and returned
to the originator.
- The accepted range is between 0 and 65535 inclusive.
type: int
max_retries:
description:
- Sets the maximum number of time a message may be resubmitted for rerouting by the C(MR::retry) iRule command.
- The accepted range is between 0 and 4294967295 inclusive.
type: int
mirror:
description:
- Enables or disables state mirroring. State mirroring can be used to maintain the same state information in the
standby unit that is in the active unit.
type: bool
mirrored_msg_sweeper_interval:
description:
- Specifies the maximum time in milliseconds that a message will be held on the standby device as it waits for
the active device to route the message.
- Messages on the standby device held for longer than the configurable sweeper interval, will be dropped.
- The acceptable range is between 0 and 4294967295 inclusive.
type: int
routes:
description:
- Specifies a list of static routes for the router instance to use.
- The route must be on the same partition as router profile.
type: list
elements: str
partition:
description:
- Device partition to create router profile on.
type: str
default: Common
state:
description:
- When C(present), ensures the router profile exists.
- When C(absent), ensures the router profile is removed.
type: str
choices:
- present
- absent
default: present
notes:
- Requires BIG-IP >= 14.0.0
extends_documentation_fragment: f5networks.f5_modules.f5
author:
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Create a generic router profile
bigip_message_routing_router:
name: foo
max_retries: 10
ignore_client_port: yes
routes:
- /Common/route1
- /Common/route2
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Modify a generic router profile
bigip_message_routing_router:
name: foo
ignore_client_port: no
mirror: yes
mirrored_msg_sweeper_interval: 4000
traffic_group: /Common/traffic-group-2
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Remove a generic router profile
bigip_message_routing_router:
name: foo
state: absent
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
description:
description: The user-defined description of the router profile.
returned: changed
type: str
sample: My description
parent:
description: The parent template of this router profile.
returned: changed
type: str
sample: /Common/messagerouter
ignore_client_port:
description: Enables ignoring of the remote port on clientside connections when searching for an existing connection.
returned: changed
type: bool
sample: no
inherited_traffic_group:
description: Specifies if a traffic-group should be inherited from containing folder.
returned: changed
type: bool
sample: yes
traffic_group:
description: The traffic-group of the router profile.
returned: changed
type: str
sample: /Common/traffic-group-1
use_local_connection:
description: Enables routing of messages to an existing connection on the same TMM as the message was received.
returned: changed
type: bool
sample: yes
max_pending_bytes:
description: The maximum number of bytes worth of pending messages that will be held.
returned: changed
type: int
sample: 10000
max_pending_messages:
description: The maximum number of pending messages that will be held.
returned: changed
type: int
sample: 64
max_retries:
description: The maximum number of time a message may be resubmitted for rerouting.
returned: changed
type: int
sample: 10
mirror:
description: Enables or disables state mirroring.
returned: changed
type: bool
sample: yes
mirrored_msg_sweeper_interval:
description: The maximum time in milliseconds that a message will be held on the standby device.
returned: changed
type: int
sample: 2000
routes:
description: The list of static routes for the router instance to use.
returned: changed
type: list
sample: ['/Common/route1', '/Common/route2']
'''
from datetime import datetime
from distutils.version import LooseVersion
from ansible.module_utils.basic import (
AnsibleModule, env_fallback
)
from ..module_utils.bigip import F5RestClient
from ..module_utils.common import (
F5ModuleError, AnsibleF5Parameters, transform_name, f5_argument_spec, flatten_boolean, fq_name
)
from ..module_utils.compare import (
cmp_str_with_none, cmp_simple_list
)
from ..module_utils.icontrol import tmos_version
from ..module_utils.teem import send_teem
class Parameters(AnsibleF5Parameters):
api_map = {
'defaultsFrom': 'parent',
'useLocalConnection': 'use_local_connection',
'ignoreClientPort': 'ignore_client_port',
'inheritedTrafficGroup': 'inherited_traffic_group',
'maxPendingBytes': 'max_pending_bytes',
'maxPendingMessages': 'max_pending_messages',
'maxRetries': 'max_retries',
'mirroredMessageSweeperInterval': 'mirrored_msg_sweeper_interval',
'trafficGroup': 'traffic_group',
}
api_attributes = [
'description',
'useLocalConnection',
'ignoreClientPort',
'inheritedTrafficGroup',
'maxPendingBytes',
'maxPendingMessages',
'maxRetries',
'mirror',
'mirroredMessageSweeperInterval',
'trafficGroup',
'routes',
'defaultsFrom',
]
returnables = [
'parent',
'description',
'use_local_connection',
'ignore_client_port',
'inherited_traffic_group',
'max_pending_bytes',
'max_pending_messages',
'max_retries',
'mirrored_msg_sweeper_interval',
'traffic_group',
'mirror',
'routes',
]
updatables = [
'description',
'use_local_connection',
'ignore_client_port',
'inherited_traffic_group',
'max_pending_bytes',
'max_pending_messages',
'max_retries',
'mirrored_msg_sweeper_interval',
'traffic_group',
'mirror',
'routes',
'parent',
]
@property
def ignore_client_port(self):
return flatten_boolean(self._values['ignore_client_port'])
@property
def use_local_connection(self):
return flatten_boolean(self._values['use_local_connection'])
class ApiParameters(Parameters):
pass
class ModuleParameters(Parameters):
@property
def parent(self):
if self._values['parent'] is None:
return None
result = fq_name(self.partition, self._values['parent'])
return result
@property
def inherited_traffic_group(self):
result = flatten_boolean(self._values['inherited_traffic_group'])
if result is None:
return None
if result == 'yes':
return 'true'
return 'false'
@property
def mirror(self):
result = flatten_boolean(self._values['mirror'])
if result is None:
return None
if result == 'yes':
return 'enabled'
return 'disabled'
@property
def max_pending_bytes(self):
if self._values['max_pending_bytes'] is None:
return None
if 0 <= self._values['max_pending_bytes'] <= 4294967295:
return self._values['max_pending_bytes']
raise F5ModuleError(
"Valid 'max_pending_bytes' must be in range 0 - 4294967295 bytes."
)
@property
def max_retries(self):
if self._values['max_retries'] is None:
return None
if 0 <= self._values['max_retries'] <= 4294967295:
return self._values['max_retries']
raise F5ModuleError(
"Valid 'max_retries' must be in range 0 - 4294967295."
)
@property
def max_pending_messages(self):
if self._values['max_pending_messages'] is None:
return None
if 0 <= self._values['max_pending_messages'] <= 65535:
return self._values['max_pending_messages']
raise F5ModuleError(
"Valid 'max_pending_messages' must be in range 0 - 65535 messages."
)
@property
def mirrored_msg_sweeper_interval(self):
if self._values['mirrored_msg_sweeper_interval'] is None:
return None
if 0 <= self._values['mirrored_msg_sweeper_interval'] <= 4294967295:
return self._values['mirrored_msg_sweeper_interval']
raise F5ModuleError(
"Valid 'mirrored_msg_sweeper_interval' must be in range 0 - 4294967295 milliseconds."
)
@property
def routes(self):
if self._values['routes'] is None:
return None
if len(self._values['routes']) == 1 and self._values['routes'][0] == "":
return ""
result = [fq_name(self.partition, peer) for peer in self._values['routes']]
return result
@property
def traffic_group(self):
if self._values['traffic_group'] is None:
return None
if self._values['traffic_group'] == "":
return ""
result = fq_name('Common', self._values['traffic_group'])
return result
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
raise
return result
class UsableChanges(Changes):
pass
class ReportableChanges(Changes):
@property
def mirror(self):
result = flatten_boolean(self._values['mirror'])
return result
@property
def inherited_traffic_group(self):
result = self._values['inherited_traffic_group']
if result == 'true':
return 'yes'
if result == 'false':
return 'no'
return None
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def description(self):
return cmp_str_with_none(self.want.description, self.have.description)
@property
def parent(self):
if self.want.parent is None:
return None
if self.want.parent != self.have.parent:
raise F5ModuleError(
"The parent router profile cannot be changed."
)
@property
def routes(self):
result = cmp_simple_list(self.want.routes, self.have.routes)
return result
class BaseManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def exec_module(self):
start = datetime.now().isoformat()
version = tmos_version(self.client)
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
send_teem(start, self.module, version)
return result
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def absent(self):
if self.exists():
return self.remove()
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def create(self):
self._set_changed_options()
if self.module.check_mode:
return True
self.create_on_device()
return True
class GenericModuleManager(BaseManager):
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/message-routing/generic/router/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:
return True
errors = [401, 403, 409, 500, 501, 502, 503, 504]
if resp.status in errors or 'code' in response and response['code'] in errors:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/ltm/message-routing/generic/router/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:
return True
raise F5ModuleError(resp.content)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/ltm/message-routing/generic/router/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:
return True
raise F5ModuleError(resp.content)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/message-routing/generic/router/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
response = self.client.api.delete(uri)
if response.status == 200:
return True
raise F5ModuleError(response.content)
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/message-routing/generic/router/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:
return ApiParameters(params=response)
raise F5ModuleError(resp.content)
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.kwargs = kwargs
def version_less_than_14(self):
version = tmos_version(self.client)
if LooseVersion(version) < LooseVersion('14.0.0'):
return True
return False
def exec_module(self):
if self.version_less_than_14():
raise F5ModuleError('Message routing is not supported on TMOS version below 14.x')
if self.module.params['type'] == 'generic':
manager = self.get_manager('generic')
else:
raise F5ModuleError(
"Unknown type specified."
)
return manager.exec_module()
def get_manager(self, type):
if type == 'generic':
return GenericModuleManager(**self.kwargs)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
description=dict(),
parent=dict(),
ignore_client_port=dict(type='bool'),
inherited_traffic_group=dict(type='bool'),
use_local_connection=dict(type='bool'),
max_pending_bytes=dict(type='int'),
max_pending_messages=dict(type='int'),
max_retries=dict(type='int'),
mirror=dict(type='bool'),
mirrored_msg_sweeper_interval=dict(type='int'),
routes=dict(
type='list',
elements='str',
),
traffic_group=dict(),
type=dict(
choices=['generic'],
default='generic'
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
),
state=dict(
default='present',
choices=['present', 'absent']
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
|
|
"""Stock market information from Alpha Vantage."""
from datetime import timedelta
import logging
import voluptuous as vol
from alpha_vantage.timeseries import TimeSeries
from alpha_vantage.foreignexchange import ForeignExchange
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_API_KEY, CONF_CURRENCY, CONF_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTR_CLOSE = "close"
ATTR_HIGH = "high"
ATTR_LOW = "low"
ATTRIBUTION = "Stock market information provided by Alpha Vantage"
CONF_FOREIGN_EXCHANGE = "foreign_exchange"
CONF_FROM = "from"
CONF_SYMBOL = "symbol"
CONF_SYMBOLS = "symbols"
CONF_TO = "to"
ICONS = {
"BTC": "mdi:currency-btc",
"EUR": "mdi:currency-eur",
"GBP": "mdi:currency-gbp",
"INR": "mdi:currency-inr",
"RUB": "mdi:currency-rub",
"TRY": "mdi:currency-try",
"USD": "mdi:currency-usd",
}
SCAN_INTERVAL = timedelta(minutes=5)
SYMBOL_SCHEMA = vol.Schema(
{
vol.Required(CONF_SYMBOL): cv.string,
vol.Optional(CONF_CURRENCY): cv.string,
vol.Optional(CONF_NAME): cv.string,
}
)
CURRENCY_SCHEMA = vol.Schema(
{
vol.Required(CONF_FROM): cv.string,
vol.Required(CONF_TO): cv.string,
vol.Optional(CONF_NAME): cv.string,
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(CONF_FOREIGN_EXCHANGE): vol.All(cv.ensure_list, [CURRENCY_SCHEMA]),
vol.Optional(CONF_SYMBOLS): vol.All(cv.ensure_list, [SYMBOL_SCHEMA]),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Alpha Vantage sensor."""
api_key = config.get(CONF_API_KEY)
symbols = config.get(CONF_SYMBOLS, [])
conversions = config.get(CONF_FOREIGN_EXCHANGE, [])
if not symbols and not conversions:
msg = "No symbols or currencies configured."
hass.components.persistent_notification.create(msg, "Sensor alpha_vantage")
_LOGGER.warning(msg)
return
timeseries = TimeSeries(key=api_key)
dev = []
for symbol in symbols:
try:
_LOGGER.debug("Configuring timeseries for symbols: %s", symbol[CONF_SYMBOL])
timeseries.get_intraday(symbol[CONF_SYMBOL])
except ValueError:
_LOGGER.error("API Key is not valid or symbol '%s' not known", symbol)
dev.append(AlphaVantageSensor(timeseries, symbol))
forex = ForeignExchange(key=api_key)
for conversion in conversions:
from_cur = conversion.get(CONF_FROM)
to_cur = conversion.get(CONF_TO)
try:
_LOGGER.debug("Configuring forex %s - %s", from_cur, to_cur)
forex.get_currency_exchange_rate(from_currency=from_cur, to_currency=to_cur)
except ValueError as error:
_LOGGER.error(
"API Key is not valid or currencies '%s'/'%s' not known",
from_cur,
to_cur,
)
_LOGGER.debug(str(error))
dev.append(AlphaVantageForeignExchange(forex, conversion))
add_entities(dev, True)
_LOGGER.debug("Setup completed")
class AlphaVantageSensor(Entity):
"""Representation of a Alpha Vantage sensor."""
def __init__(self, timeseries, symbol):
"""Initialize the sensor."""
self._symbol = symbol[CONF_SYMBOL]
self._name = symbol.get(CONF_NAME, self._symbol)
self._timeseries = timeseries
self.values = None
self._unit_of_measurement = symbol.get(CONF_CURRENCY, self._symbol)
self._icon = ICONS.get(symbol.get(CONF_CURRENCY, "USD"))
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def state(self):
"""Return the state of the sensor."""
return self.values["1. open"]
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self.values is not None:
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_CLOSE: self.values["4. close"],
ATTR_HIGH: self.values["2. high"],
ATTR_LOW: self.values["3. low"],
}
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return self._icon
def update(self):
"""Get the latest data and updates the states."""
_LOGGER.debug("Requesting new data for symbol %s", self._symbol)
all_values, _ = self._timeseries.get_intraday(self._symbol)
self.values = next(iter(all_values.values()))
_LOGGER.debug("Received new values for symbol %s", self._symbol)
class AlphaVantageForeignExchange(Entity):
"""Sensor for foreign exchange rates."""
def __init__(self, foreign_exchange, config):
"""Initialize the sensor."""
self._foreign_exchange = foreign_exchange
self._from_currency = config.get(CONF_FROM)
self._to_currency = config.get(CONF_TO)
if CONF_NAME in config:
self._name = config.get(CONF_NAME)
else:
self._name = f"{self._to_currency}/{self._from_currency}"
self._unit_of_measurement = self._to_currency
self._icon = ICONS.get(self._from_currency, "USD")
self.values = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def state(self):
"""Return the state of the sensor."""
return round(float(self.values["5. Exchange Rate"]), 4)
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return self._icon
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self.values is not None:
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
CONF_FROM: self._from_currency,
CONF_TO: self._to_currency,
}
def update(self):
"""Get the latest data and updates the states."""
_LOGGER.debug(
"Requesting new data for forex %s - %s",
self._from_currency,
self._to_currency,
)
self.values, _ = self._foreign_exchange.get_currency_exchange_rate(
from_currency=self._from_currency, to_currency=self._to_currency
)
_LOGGER.debug(
"Received new data for forex %s - %s",
self._from_currency,
self._to_currency,
)
|
|
import six.moves.builtins as builtins
from theano.tensor.signal.pool import *
import keras.backend as K
import theano.tensor as T
def m_maxpool2d(x, pool_size, strides=(1, 1), border_mode='valid',
dim_ordering='th'):
if border_mode == 'same':
w_pad = pool_size[0] - 2 if pool_size[0] % 2 == 1 else pool_size[0] - 1
h_pad = pool_size[1] - 2 if pool_size[1] % 2 == 1 else pool_size[1] - 1
padding = (w_pad, h_pad)
elif border_mode == 'valid':
padding = (0, 0)
else:
raise Exception('Invalid border mode: ' + str(border_mode))
if dim_ordering not in {'th', 'tf'}:
raise Exception('Unknown dim_ordering ' + str(dim_ordering))
if dim_ordering == 'tf':
x = x.dimshuffle((0, 3, 1, 2))
pool_out, ind = m_maxpool_2d_op(x, ds=pool_size, st=strides,
ignore_border=True,
padding=padding, )
if border_mode == 'same':
expected_width = (x.shape[2] + strides[0] - 1) // strides[0]
expected_height = (x.shape[3] + strides[1] - 1) // strides[1]
pool_out = pool_out[:, :, :expected_width, :expected_height]
if dim_ordering == 'tf':
pool_out = pool_out.dimshuffle((0, 2, 3, 1))
return pool_out, ind
def m_maxpool_2d_op(input, ds, ignore_border=None, st=None, padding=(0, 0)):
"""Downscale the input by a specified factor
Takes as input a N-D tensor, where N >= 2. It downscales the input image by
the specified factor, by keeping only the maximum value of non-overlapping
patches of size (ds[0],ds[1])
Parameters
----------
input : N-D theano tensor of input images
Input images. Max pooling will be done over the 2 last dimensions.
ds : tuple of length 2
Factor by which to downscale (vertical ds, horizontal ds).
(2,2) will halve the image in each dimension.
ignore_border : bool (default None, will print a warning and set to False)
When True, (5,5) input with ds=(2,2) will generate a (2,2) output.
(3,3) otherwise.
st : tuple of two ints
Stride size, which is the number of shifts over rows/cols to get the
next pool region. If st is None, it is considered equal to ds
(no overlap on pooling regions).
padding : tuple of two ints
(pad_h, pad_w), pad zeros to extend beyond four borders of the
images, pad_h is the size of the top and bottom margins, and
pad_w is the size of the left and right margins.
"""
if input.ndim < 2:
raise NotImplementedError('pool_2d requires a dimension >= 2')
if ignore_border is None:
warnings.warn(
"pool_2d() will have the parameter ignore_border"
" default value changed to True (currently"
" False). To have consistent behavior with all Theano"
" version, explicitly add the parameter ignore_border=True."
" On the GPU, using ignore_border=True is needed to use cuDNN."
" When using ignore_border=False and not using cuDNN, the only"
" GPU combination supported is when"
" `ds == st and padding == (0, 0) and mode == 'max'`."
" Otherwise, the convolution will be executed on CPU.",
stacklevel=2)
ignore_border = False
if input.ndim == 4:
op = MPool(ds, ignore_border, st=st, padding=padding)
output, ind = op(input)
return output, ind
# extract image dimensions
img_shape = input.shape[-2:]
# count the number of "leading" dimensions, store as dmatrix
batch_size = tensor.prod(input.shape[:-2])
batch_size = tensor.shape_padright(batch_size, 1)
# store as 4D tensor with shape: (batch_size,1,height,width)
new_shape = tensor.cast(tensor.join(0, batch_size,
tensor.as_tensor([1]),
img_shape), 'int64')
input_4D = tensor.reshape(input, new_shape, ndim=4)
# downsample mini-batch of images
op = MPool(ds, ignore_border, st=st, padding=padding)
output, ind = op(input_4D)
# restore to original shape
outshp = tensor.join(0, input.shape[:-2], output.shape[-2:])
output = tensor.reshape(output, outshp, ndim=input.ndim)
ind = tensor.reshape(ind, input.shape)
return output, ind
class MPool(Pool):
"""
For N-dimensional tensors, consider that the last two dimensions span
images. This Op downsamples these images by taking the max, sum or average
over different patch.
The constructor takes the max, sum or average or different input patches.
Parameters
----------
ds : list or tuple of two ints
Downsample factor over rows and column.
ds indicates the pool region size.
ignore_border : bool
If ds doesnt' divide imgshape, do we include an extra row/col
of partial downsampling (False) or ignore it (True).
st : list or tuple of two ints or None
Stride size, which is the number of shifts over rows/cols to get the
next pool region. If st is None, it is considered equal to ds
(no overlap on pooling regions).
padding: tuple of two ints
(pad_h, pad_w), pad zeros to extend beyond four borders of the images,
pad_h is the size of the top and bottom margins, and pad_w is the size
of the left and right margins.
"""
__props__ = ('ds', 'ignore_border', 'st', 'padding')
def __init__(self, ds, ignore_border, st, padding):
self.mode = 'max'
self.openmp = False
self.ds = ds
self.ignore_border = ignore_border
self.st = st
self.padding = padding
def make_node(self, x):
if x.type.ndim != 4:
raise TypeError()
x = tensor.as_tensor_variable(x)
# If the input shape are broadcastable we can have 0 in the output shape
broad = x.broadcastable[:2] + (False, False)
out1 = tensor.TensorType(x.dtype, broad)
out2 = tensor.TensorType(x.dtype, broad)
return gof.Apply(self, [x], [out1(), out2()])
def perform(self, node, inp, out):
x, = inp
z, ind = out
ind = numpy.zeros_like(x)
if len(x.shape) != 4:
raise NotImplementedError('Pool requires 4D input for now')
z_shape = self.out_shape(x.shape, self.ds, self.ignore_border, self.st,
self.padding)
if (z[0] is None) or (z[0].shape != z_shape):
z[0] = numpy.empty(z_shape, dtype=x.dtype)
zz = z[0]
# number of pooling output rows
pr = zz.shape[-2]
# number of pooling output cols
pc = zz.shape[-1]
ds0, ds1 = self.ds
st0, st1 = self.st
pad_h = self.padding[0]
pad_w = self.padding[1]
img_rows = x.shape[-2] + 2 * pad_h
img_cols = x.shape[-1] + 2 * pad_w
inc_pad = 0
# pad the image
if self.padding != (0, 0):
y = numpy.zeros(
(x.shape[0], x.shape[1], img_rows, img_cols),
dtype=x.dtype)
y[:, :, pad_h:(img_rows - pad_h), pad_w:(img_cols - pad_w)] = x
else:
y = x
for n in xrange(x.shape[0]):
for k in xrange(x.shape[1]):
for r in xrange(pr):
row_st = r * st0
row_end = builtins.min(row_st + ds0, img_rows)
if not inc_pad:
row_st = builtins.max(row_st, self.padding[0])
row_end = builtins.min(row_end, x.shape[-2] + pad_h)
for c in xrange(pc):
col_st = c * st1
col_end = builtins.min(col_st + ds1, img_cols)
if not inc_pad:
col_st = builtins.max(col_st, self.padding[1])
col_end = builtins.min(col_end,
x.shape[-1] + pad_w)
cur_max = y[n, k, row_st, col_st]
max_r, max_c = row_st, col_st
for rr in xrange(row_st, row_end):
for cc in xrange(col_st, col_end):
if y[n, k, rr, cc] > cur_max:
cur_max = y[n, k, rr, cc]
max_r, max_c = rr, cc
zz[n, k, r, c] = cur_max
ind[n, k, max_r, max_c] = 1
def infer_shape(self, node, in_shapes):
shp = self.out_shape(in_shapes[0], self.ds,
self.ignore_border, self.st, self.padding)
return [shp, in_shapes[0]]
def grad(self, inp, grads):
x, = inp
gz, _ = grads
maxout, _ = self(x)
return [MaxPoolGrad(self.ds,
ignore_border=self.ignore_border,
st=self.st, padding=self.padding)(x, maxout, gz)]
def c_code(self, node, name, inp, out, sub):
x, = inp
z, ind = out
fail = sub['fail']
ignore_border = int(self.ignore_border)
ds0, ds1 = self.ds
st0, st1 = self.st
pd0, pd1 = self.padding
ccode = """
int typenum = PyArray_ObjectType((PyObject*)%(x)s, 0);
int z_r, z_c; // shape of the output
int r, c; // shape of the padded_input
if(PyArray_NDIM(%(x)s)!=4)
{
PyErr_SetString(PyExc_ValueError, "x must be a 4d ndarray");
%(fail)s;
}
r = PyArray_DIMS(%(x)s)[2];
c = PyArray_DIMS(%(x)s)[3];
r += %(pd0)s * 2;
c += %(pd1)s * 2;
if (%(pd0)s != 0 && %(pd1)s != 0 && !%(ignore_border)s)
{
PyErr_SetString(PyExc_ValueError,
"padding must be (0,0) when ignore border is False");
%(fail)s;
}
if (%(ignore_border)s)
{
// '/' in C is different from '/' in python
if (r - %(ds0)s < 0)
{
z_r = 0;
}
else
{
z_r = (r - %(ds0)s) / %(st0)s + 1;
}
if (c - %(ds1)s < 0)
{
z_c = 0;
}
else
{
z_c = (c - %(ds1)s) / %(st1)s + 1;
}
}
else
{
// decide how many rows the output has
if (%(st0)s >= %(ds0)s)
{
z_r = (r - 1) / %(st0)s + 1;
}
else
{
z_r = std::max(0, (r - 1 - %(ds0)s) / %(st0)s + 1) + 1;
}
// decide how many columns the output has
if (%(st1)s >= %(ds1)s)
{
z_c = (c - 1) / %(st1)s + 1;
}
else
{
z_c = std::max(0, (c - 1 - %(ds1)s) / %(st1)s + 1) + 1;
}
}
// memory allocation of z if necessary
if ((!%(z)s)
|| *PyArray_DIMS(%(z)s)!=4
||(PyArray_DIMS(%(z)s)[0] != PyArray_DIMS(%(x)s)[0])
||(PyArray_DIMS(%(z)s)[1] != PyArray_DIMS(%(x)s)[1])
||(PyArray_DIMS(%(z)s)[2] != z_r)
||(PyArray_DIMS(%(z)s)[3] != z_c)
)
{
if (%(z)s) Py_XDECREF(%(z)s);
npy_intp dims[4] = {0,0,0,0};
dims[0]=PyArray_DIMS(%(x)s)[0];
dims[1]=PyArray_DIMS(%(x)s)[1];
dims[2]=z_r;
dims[3]=z_c;
//TODO: zeros not necessary
%(z)s = (PyArrayObject*) PyArray_ZEROS(4, dims, typenum,0);
}
// memory allocation of ind if necessary
if ((!%(ind)s)
|| *PyArray_DIMS(%(ind)s)!=4
||(PyArray_DIMS(%(ind)s)[0] != PyArray_DIMS(%(x)s)[0])
||(PyArray_DIMS(%(ind)s)[1] != PyArray_DIMS(%(x)s)[1])
||(PyArray_DIMS(%(ind)s)[2] != PyArray_DIMS(%(x)s)[2])
||(PyArray_DIMS(%(ind)s)[3] != PyArray_DIMS(%(x)s)[3])
)
{
if (%(ind)s) Py_XDECREF(%(ind)s);
npy_intp dims[4] = {0,0,0,0};
dims[0]=PyArray_DIMS(%(x)s)[0];
dims[1]=PyArray_DIMS(%(x)s)[1];
dims[2]=PyArray_DIMS(%(x)s)[2];
dims[3]=PyArray_DIMS(%(x)s)[3];
//TODO: zeros not necessary
%(ind)s = (PyArrayObject*) PyArray_ZEROS(4, dims, typenum,0);
}
// used for indexing a pool region inside the input
int r_st, r_end, c_st, c_end;
dtype_%(x)s collector; // temp var for the value in a region
if (z_r && z_c)
{
for(int b=0; b<PyArray_DIMS(%(x)s)[0]; b++){
for(int k=0; k<PyArray_DIMS(%(x)s)[1]; k++){
int* index_x = new int [z_c];
int* index_y = new int [z_r];
int count = 0;
for(int i=0; i< z_r; i++){
r_st = i * %(st0)s;
r_end = r_st + %(ds0)s;
// skip the padding
r_st = r_st < %(pd0)s ? %(pd0)s : r_st;
r_end = r_end > (r - %(pd0)s) ? r - %(pd0)s : r_end;
// from padded_img space to img space
r_st -= %(pd0)s;
r_end -= %(pd0)s;
// handle the case where no padding, ignore border is True
if (%(ignore_border)s)
{
r_end = r_end > r ? r : r_end;
}
for(int j=0; j<z_c; j++){
c_st = j * %(st1)s;
c_end = c_st + %(ds1)s;
// skip the padding
c_st = c_st < %(pd1)s ? %(pd1)s : c_st;
c_end = c_end > (c - %(pd1)s) ? c - %(pd1)s : c_end;
dtype_%(z)s * z = (
(dtype_%(z)s*)(PyArray_GETPTR4(%(z)s, b, k, i, j)));
// change coordinates from padding_img space into img space
c_st -= %(pd1)s;
c_end -= %(pd1)s;
// handle the case where no padding, ignore border is True
if (%(ignore_border)s)
{
c_end = c_end > c ? c : c_end;
}
// use the first element as the initial value of collector
collector = ((dtype_%(x)s*)(PyArray_GETPTR4(%(x)s,b,k,r_st,c_st)))[0];
int r_max = r_st, c_max = c_st;
// go through the pooled region in the unpadded input
for(int m=r_st; m<r_end; m++)
{
for(int n=c_st; n<c_end; n++)
{
dtype_%(x)s a = ((dtype_%(x)s*)(PyArray_GETPTR4(%(x)s,b,k,m,n)))[0];
if (a > collector) {
collector = a;
r_max = m;
c_max = n;
}
}
}
//std::cout << count << ": " << r_max << ' ' << c_max << " | " << cur_max_ind << std::endl;
count += 1;
z[0] = collector;
dtype_%(ind)s * ind = (
(dtype_%(ind)s*)(PyArray_GETPTR4(%(ind)s, b, k, r_max, c_max)));
ind[0] = 1;
}
}
}
}
}
"""
return ccode % locals()
#####################################################################################################
def m_maxpool3d(x, pool_size, strides=(1, 1, 1), border_mode='valid',
dim_ordering='th'):
if border_mode == 'same':
# TODO: add implementation for border_mode="same"
raise Exception('border_mode="same" not supported with Theano.')
elif border_mode == 'valid':
ignore_border = True
padding = (0, 0)
else:
raise Exception('Invalid border mode: ' + str(border_mode))
if dim_ordering not in {'th', 'tf'}:
raise Exception('Unknown dim_ordering ' + str(dim_ordering))
if dim_ordering == 'tf':
x = x.dimshuffle((0, 4, 1, 2, 3))
# TODO: check dimensions manipulations
# pooling over conv_dim2, conv_dim1 (last two channels)
out_shape = x.shape
output, ind1 = m_maxpool_2d_op(input=x,
ds=(pool_size[1], pool_size[2]),
st=(strides[1], strides[2]),
ignore_border=ignore_border,
padding=padding)
# pooling over conv_dim3
pool_out, ind2 = m_maxpool_2d_op(input=output.dimshuffle(0, 1, 4, 3, 2),
ds=(1, pool_size[0]),
st=(1, strides[0]),
ignore_border=ignore_border,
padding=padding)
pool_out = pool_out.dimshuffle(0, 1, 4, 3, 2)
ind2 = ind2.dimshuffle(0, 1, 4, 3, 2)
ind2 = K.resize_volumes(ind2, 1, pool_size[1], pool_size[2], dim_ordering)
padded_ind2 = T.zeros(out_shape)
padded_ind2 = T.set_subtensor(padded_ind2[:, :, :ind2.shape[2], :ind2.shape[3], :ind2.shape[4]], ind2)
ind = padded_ind2 * ind1
if dim_ordering == 'tf':
pool_out = pool_out.dimshuffle((0, 2, 3, 4, 1))
return pool_out, ind
|
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'
}
DOCUMENTATION = r'''
---
module: bigip_vlan
short_description: Manage VLANs on a BIG-IP system
description:
- Manage VLANs on a BIG-IP system
version_added: "2.2"
options:
description:
description:
- The description to give to the VLAN.
tagged_interfaces:
description:
- Specifies a list of tagged interfaces and trunks that you want to
configure for the VLAN. Use tagged interfaces or trunks when
you want to assign a single interface or trunk to multiple VLANs.
aliases:
- tagged_interface
untagged_interfaces:
description:
- Specifies a list of untagged interfaces and trunks that you want to
configure for the VLAN.
aliases:
- untagged_interface
name:
description:
- The VLAN to manage. If the special VLAN C(ALL) is specified with
the C(state) value of C(absent) then all VLANs will be removed.
required: True
state:
description:
- The state of the VLAN on the system. When C(present), guarantees
that the VLAN exists with the provided attributes. When C(absent),
removes the VLAN from the system.
default: present
choices:
- absent
- present
tag:
description:
- Tag number for the VLAN. The tag number can be any integer between 1
and 4094. The system automatically assigns a tag number if you do not
specify a value.
notes:
- Requires the f5-sdk Python package on the host. This is as easy as pip
install f5-sdk.
- Requires BIG-IP versions >= 12.0.0
extends_documentation_fragment: f5
requirements:
- f5-sdk
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Create VLAN
bigip_vlan:
name: "net1"
password: "secret"
server: "lb.mydomain.com"
user: "admin"
validate_certs: "no"
delegate_to: localhost
- name: Set VLAN tag
bigip_vlan:
name: "net1"
password: "secret"
server: "lb.mydomain.com"
tag: "2345"
user: "admin"
validate_certs: "no"
delegate_to: localhost
- name: Add VLAN 2345 as tagged to interface 1.1
bigip_vlan:
tagged_interface: 1.1
name: "net1"
password: "secret"
server: "lb.mydomain.com"
tag: "2345"
user: "admin"
validate_certs: "no"
delegate_to: localhost
- name: Add VLAN 1234 as tagged to interfaces 1.1 and 1.2
bigip_vlan:
tagged_interfaces:
- 1.1
- 1.2
name: "net1"
password: "secret"
server: "lb.mydomain.com"
tag: "1234"
user: "admin"
validate_certs: "no"
delegate_to: localhost
'''
RETURN = r'''
description:
description: The description set on the VLAN
returned: changed
type: string
sample: foo VLAN
interfaces:
description: Interfaces that the VLAN is assigned to
returned: changed
type: list
sample: ['1.1','1.2']
name:
description: The name of the VLAN
returned: changed
type: string
sample: net1
partition:
description: The partition that the VLAN was created on
returned: changed
type: string
sample: Common
tag:
description: The ID of the VLAN
returned: changed
type: int
sample: 2345
'''
from ansible.module_utils.f5_utils import AnsibleF5Client
from ansible.module_utils.f5_utils import AnsibleF5Parameters
from ansible.module_utils.f5_utils import HAS_F5SDK
from ansible.module_utils.f5_utils import F5ModuleError
from ansible.module_utils.six import iteritems
from collections import defaultdict
try:
from ansible.module_utils.f5_utils import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
class Parameters(AnsibleF5Parameters):
def __init__(self, params=None):
self._values = defaultdict(lambda: None)
if params:
self.update(params=params)
def update(self, params=None):
if params:
for k, v in iteritems(params):
if self.api_map is not None and k in self.api_map:
map_key = self.api_map[k]
else:
map_key = k
# Handle weird API parameters like `dns.proxy.__iter__` by
# using a map provided by the module developer
class_attr = getattr(type(self), map_key, None)
if isinstance(class_attr, property):
# There is a mapped value for the api_map key
if class_attr.fset is None:
# If the mapped value does not have
# an associated setter
self._values[map_key] = v
else:
# The mapped value has a setter
setattr(self, map_key, v)
else:
# If the mapped value is not a @property
self._values[map_key] = v
updatables = [
'tagged_interfaces', 'untagged_interfaces', 'tag',
'description'
]
returnables = [
'description', 'partition', 'name', 'tag', 'interfaces',
'tagged_interfaces', 'untagged_interfaces'
]
api_attributes = [
'description', 'interfaces', 'partition', 'name', 'tag'
]
api_map = {}
@property
def interfaces(self):
tagged = self._values['tagged_interfaces']
untagged = self._values['untagged_interfaces']
if tagged:
return [dict(name=x, tagged=True) for x in tagged]
if untagged:
return [dict(name=x, untagged=True) for x in untagged]
@property
def tagged_interfaces(self):
value = self._values['tagged_interfaces']
if value is None:
return None
ifcs = self._parse_return_ifcs()
for ifc in value:
if ifc not in ifcs:
err = 'The specified interface "%s" was not found' % ifc
raise F5ModuleError(err)
return value
@property
def untagged_interfaces(self):
value = self._values['untagged_interfaces']
if value is None:
return None
ifcs = self._parse_return_ifcs()
for ifc in value:
if ifc not in ifcs:
err = 'The specified interface "%s" was not found' % ifc
raise F5ModuleError(err)
return value
def _get_interfaces_from_device(self):
lst = self.client.api.tm.net.interfaces.get_collection()
return lst
def _parse_return_ifcs(self):
ifclst = self._get_interfaces_from_device()
ifcs = [str(x.name) for x in ifclst]
if not ifcs:
err = 'No interfaces were found'
raise F5ModuleError(err)
return ifcs
def to_return(self):
result = {}
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
return result
def api_params(self):
result = {}
for api_attribute in self.api_attributes:
if api_attribute in self.api_map:
result[api_attribute] = getattr(
self, self.api_map[api_attribute])
else:
result[api_attribute] = getattr(self, api_attribute)
result = self._filter_params(result)
return result
class ModuleManager(object):
def __init__(self, client):
self.client = client
self.have = None
self.want = Parameters()
self.want.client = self.client
self.want.update(self.client.module.params)
self.changes = Parameters()
def exec_module(self):
changed = False
result = dict()
state = self.want.state
try:
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
changes = self.changes.to_return()
result.update(**changes)
result.update(dict(changed=changed))
return result
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = Parameters(changed)
def _update_changed_options(self):
changed = {}
for key in Parameters.updatables:
if getattr(self.want, key) is not None:
attr1 = getattr(self.want, key)
attr2 = getattr(self.have, key)
if attr1 != attr2:
changed[key] = attr1
if changed:
self.changes = Parameters(changed)
return True
return False
def _have_interfaces(self, ifcs):
untagged = [str(x.name) for x in ifcs if hasattr(x, 'untagged')]
tagged = [str(x.name) for x in ifcs if hasattr(x, 'tagged')]
if untagged:
self.have.update({'untagged_interfaces': untagged})
if tagged:
self.have.update({'tagged_interfaces': tagged})
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def absent(self):
if self.exists():
return self.remove()
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have, ifcs = self.read_current_from_device()
if ifcs:
self._have_interfaces(ifcs)
if not self.should_update():
return False
if self.client.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.client.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the VLAN")
return True
def create(self):
self._set_changed_options()
if self.client.check_mode:
return True
self.create_on_device()
return True
def create_on_device(self):
params = self.want.api_params()
self.client.api.tm.net.vlans.vlan.create(**params)
def update_on_device(self):
params = self.want.api_params()
result = self.client.api.tm.net.vlans.vlan.load(
name=self.want.name, partition=self.want.partition
)
result.modify(**params)
def exists(self):
return self.client.api.tm.net.vlans.vlan.exists(
name=self.want.name, partition=self.want.partition
)
def remove_from_device(self):
result = self.client.api.tm.net.vlans.vlan.load(
name=self.want.name, partition=self.want.partition
)
if result:
result.delete()
def read_current_from_device(self):
tmp_res = self.client.api.tm.net.vlans.vlan.load(
name=self.want.name, partition=self.want.partition
)
ifcs = tmp_res.interfaces_s.get_collection()
result = tmp_res.attrs
return Parameters(result), ifcs
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
self.argument_spec = dict(
name=dict(
required=True,
),
tagged_interfaces=dict(
type='list',
aliases=['tagged_interface']
),
untagged_interfaces=dict(
type='list',
aliases=['untagged_interface']
),
description=dict(),
tag=dict(
type='int'
)
)
self.f5_product_name = 'bigip'
def main():
if not HAS_F5SDK:
raise F5ModuleError("The python f5-sdk module is required")
spec = ArgumentSpec()
client = AnsibleF5Client(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
f5_product_name=spec.f5_product_name,
mutually_exclusive=[
['tagged_interfaces', 'untagged_interfaces']
]
)
try:
mm = ModuleManager(client)
results = mm.exec_module()
client.module.exit_json(**results)
except F5ModuleError as e:
client.module.fail_json(msg=str(e))
if __name__ == '__main__':
main()
|
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for api module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import gc
import numpy as np
from tensorflow.python.autograph import utils
from tensorflow.python.autograph.core import converter
from tensorflow.python.autograph.impl import api
from tensorflow.python.autograph.pyct import errors
from tensorflow.python.autograph.pyct import inspect_utils
from tensorflow.python.autograph.pyct import parser
from tensorflow.python.autograph.utils import py_func
from tensorflow.python.eager import function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import test_util
from tensorflow.python.keras.engine import sequential
from tensorflow.python.keras.layers import core
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.util import tf_inspect
tf = utils.fake_tf()
testing_global_numeric = 2
class TestResource(object):
def __init__(self):
self.x = 3
class ApiTest(test.TestCase):
@test_util.run_deprecated_v1
def test_decorator_recursive(self):
class TestClass(object):
def called_member(self, a):
if a < 0:
a = -a
return a
@api.convert(recursive=True)
def test_method(self, x, s, a):
while tf.reduce_sum(x) > s:
x //= self.called_member(a)
return x
tc = TestClass()
with self.cached_session() as sess:
x = tc.test_method(
constant_op.constant([2, 4]), constant_op.constant(1),
constant_op.constant(-2))
self.assertListEqual([0, 1], self.evaluate(x).tolist())
@test_util.run_deprecated_v1
def test_decorator_not_recursive(self):
class TestClass(object):
def called_member(self, a):
return tf.negative(a)
@api.convert(recursive=False)
def test_method(self, x, s, a):
while tf.reduce_sum(x) > s:
x //= self.called_member(a)
return x
tc = TestClass()
with self.cached_session() as sess:
x = tc.test_method(
constant_op.constant([2, 4]), constant_op.constant(1),
constant_op.constant(-2))
self.assertListEqual([0, 1], self.evaluate(x).tolist())
@test_util.run_deprecated_v1
def test_convert_then_do_not_convert_graph(self):
class TestClass(object):
@api.do_not_convert(api.RunMode.GRAPH)
def called_member(self, a):
return tf.negative(a)
@api.convert(recursive=True)
def test_method(self, x, s, a):
while tf.reduce_sum(x) > s:
x //= self.called_member(a)
return x
tc = TestClass()
x = tc.test_method(
constant_op.constant((2, 4)), constant_op.constant(1),
constant_op.constant(-2))
self.assertAllEqual((0, 1), self.evaluate(x))
@test_util.run_deprecated_v1
def test_convert_then_do_not_convert_py_func(self):
class TestClass(object):
@api.do_not_convert(
api.RunMode.PY_FUNC, return_dtypes=py_func.MatchDType(1))
def called_member(self, a):
return np.negative(a)
@api.convert(recursive=True)
def test_method(self, x, s, a):
while tf.reduce_sum(x) > s:
y = self.called_member(a)
# set_shape works around while_loop's limitations.
# TODO(mdan): Allow specifying shapes (or ShapeLike) instead.
y.set_shape(a.shape)
x //= y
return x
tc = TestClass()
x = tc.test_method(
constant_op.constant((2, 4)), constant_op.constant(1),
constant_op.constant(-2))
self.assertAllEqual((0, 1), self.evaluate(x))
@test_util.run_deprecated_v1
def test_decorator_calls_decorated(self):
class TestClass(object):
@api.convert()
def called_member(self, a):
if a < 0:
a = -a
return a
@api.convert(recursive=True)
def test_method(self, x, s, a):
while tf.reduce_sum(x) > s:
x //= self.called_member(a)
return x
tc = TestClass()
with self.cached_session() as sess:
x = tc.test_method(
constant_op.constant([2, 4]), constant_op.constant(1),
constant_op.constant(-2))
self.assertListEqual([0, 1], self.evaluate(x).tolist())
def test_decorator_preserves_argspec(self):
class TestClass(object):
def called_member(self, a):
if a < 0:
a = -a
return a
called_member_converted = api.convert()(called_member)
tc = TestClass()
self.assertListEqual(
list(tf_inspect.getfullargspec(tc.called_member)),
list(tf_inspect.getfullargspec(tc.called_member_converted)))
@test_util.run_deprecated_v1
def test_convert_call_site_decorator(self):
class TestClass(object):
def called_member(self, a):
if a < 0:
a = -a
return a
@api.convert(recursive=True)
def test_method(self, x, s, a):
while tf.reduce_sum(x) > s:
x //= api.converted_call(self.called_member, None,
converter.ConversionOptions(), (a,), {})
return x
tc = TestClass()
x = tc.test_method(
constant_op.constant([2, 4]), constant_op.constant(1),
constant_op.constant(-2))
self.assertListEqual([0, 1], self.evaluate(x).tolist())
def test_converted_call_builtin(self):
x = api.converted_call(range, None, converter.ConversionOptions(), (3,), {})
self.assertEqual((0, 1, 2), tuple(x))
def test_converted_call_function(self):
def test_fn(x):
if x < 0:
return -x
return x
x = api.converted_call(test_fn, None, converter.ConversionOptions(),
(constant_op.constant(-1),), {})
self.assertEqual(1, self.evaluate(x))
@test_util.run_v1_only('b/120545219')
def test_converted_call_functools_partial(self):
def test_fn(x, y, z):
if x < 0:
return -x, -y, -z
return x, y, z
x = api.converted_call(
functools.partial(test_fn, constant_op.constant(-1), z=-3), None,
converter.ConversionOptions(), (constant_op.constant(-2),), {})
self.assertEqual((1, 2, 3), self.evaluate(x))
x = api.converted_call(
functools.partial(
functools.partial(test_fn, constant_op.constant(-1)), z=-3), None,
converter.ConversionOptions(), (constant_op.constant(-2),), {})
self.assertEqual((1, 2, 3), self.evaluate(x))
def test_converted_call_method_explicit_owner(self):
# TODO(mdan): Implement.
pass
def test_converted_call_method_explicit_super_owner(self):
# TODO(mdan): Implement.
pass
def test_converted_call_method(self):
class TestClass(object):
def __init__(self, x):
self.x = x
def test_method(self):
if self.x < 0:
return -self.x
return self.x
tc = TestClass(constant_op.constant(-1))
x = api.converted_call(tc.test_method, None, converter.ConversionOptions(),
(), {})
self.assertEqual(1, self.evaluate(x))
def test_converted_call_method_as_object_attribute(self):
class AnotherClass(object):
def __init__(self):
self.another_class_attr = constant_op.constant(1)
def method(self):
if self.another_class_attr > 0:
return self.another_class_attr + 1
return self.another_class_attr + 10
class TestClass(object):
def __init__(self, another_obj_method):
self.another_obj_method = another_obj_method
obj = AnotherClass()
tc = TestClass(obj.method)
x = api.converted_call('another_obj_method', tc,
converter.ConversionOptions(), (), {})
self.assertEqual(self.evaluate(x), 2)
def test_converted_call_method_converts_recursively(self):
class TestClass(object):
def __init__(self, x):
self.x = x
def other_method(self):
if self.x < 0:
return -self.x
return self.x
def test_method(self):
return self.other_method()
tc = TestClass(constant_op.constant(-1))
x = api.converted_call(tc.test_method, None,
converter.ConversionOptions(recursive=True), (), {})
self.assertEqual(1, self.evaluate(x))
def test_converted_call_method_by_class(self):
class TestClass(object):
def __init__(self, x):
self.x = x
def test_method(self):
if self.x < 0:
return -self.x
return self.x
tc = TestClass(constant_op.constant(-1))
x = api.converted_call(TestClass.test_method, None,
converter.ConversionOptions(), (tc,), {})
self.assertEqual(1, self.evaluate(x))
def test_converted_call_callable_object(self):
class TestClass(object):
def __init__(self, x):
self.x = x
def __call__(self):
if self.x < 0:
return -self.x
return self.x
tc = TestClass(constant_op.constant(-1))
x = api.converted_call(tc, None, converter.ConversionOptions(), (), {})
self.assertEqual(1, self.evaluate(x))
@test_util.run_deprecated_v1
def test_converted_call_constructor(self):
class TestClass(object):
def __init__(self, x):
self.x = x
def test_method(self):
if self.x < 0:
return -self.x
return self.x
tc = api.converted_call(TestClass, None, converter.ConversionOptions(),
(constant_op.constant(-1),), {})
# tc is still a TestClass - constructors are whitelisted.
# TODO(b/124016764): Support this use case.
# The error below is specific to the `if` statement not being converted.
with self.assertRaisesRegex(
TypeError, 'Using a `tf.Tensor` as a Python `bool`'):
tc.test_method()
def test_converted_call_already_converted(self):
def f(x):
return x == 0
x = api.converted_call(f, None, converter.ConversionOptions(),
(constant_op.constant(0),), {})
self.assertTrue(self.evaluate(x))
converted_f = api.to_graph(
f, experimental_optional_features=converter.Feature.ALL)
x = api.converted_call(converted_f, None, converter.ConversionOptions(),
(constant_op.constant(0),), {})
self.assertTrue(self.evaluate(x))
def test_converted_call_then_already_converted_dynamic(self):
@api.convert()
def g(x):
if x > 0:
return x
else:
return -x
def f(g, x):
return g(x)
x = api.converted_call(f, None, converter.ConversionOptions(),
(g, constant_op.constant(1)), {})
self.assertEqual(self.evaluate(x), 1)
@test_util.run_deprecated_v1
def test_converted_call_no_user_code(self):
def f(x):
return len(x)
opts = converter.ConversionOptions(internal_convert_user_code=False)
# f should not be converted, causing len to error out.
with self.assertRaisesRegexp(Exception,
'object of type \'Tensor\' has no len()'):
api.converted_call(f, None, opts, (constant_op.constant([0]),), {})
# len on the other hand should work fine.
x = api.converted_call(len, None, opts, (constant_op.constant([0]),), {})
# The constant has static shape so the result is a primitive not a Tensor.
self.assertEqual(x, 1)
def test_converted_call_whitelisted_method(self):
opts = converter.ConversionOptions()
model = sequential.Sequential([
core.Dense(2)
])
x = api.converted_call(model.call, None, opts,
(constant_op.constant([[0.0]]),), {'training': True})
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual([[0.0, 0.0]], self.evaluate(x))
def test_converted_call_whitelisted_method_via_owner(self):
opts = converter.ConversionOptions()
model = sequential.Sequential([
core.Dense(2)
])
x = api.converted_call('call', model, opts,
(constant_op.constant([[0.0]]),), {'training': True})
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual([[0.0, 0.0]], self.evaluate(x))
def test_converted_call_namedtuple(self):
opts = converter.ConversionOptions()
x = api.converted_call(collections.namedtuple, None, opts,
('TestNamedtuple', ('a', 'b')), {})
self.assertTrue(inspect_utils.isnamedtuple(x))
def test_converted_call_namedtuple_via_collections(self):
opts = converter.ConversionOptions()
x = api.converted_call('namedtuple', collections, opts, ('TestNamedtuple',
('a', 'b')), {})
self.assertTrue(inspect_utils.isnamedtuple(x))
def test_converted_call_lambda(self):
opts = converter.ConversionOptions()
l = lambda x: x == 0
x = api.converted_call(l, None, opts, (constant_op.constant(0),), {})
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual(True, self.evaluate(x))
def test_converted_call_defun_object_method(self):
opts = converter.ConversionOptions()
# pylint:disable=method-hidden
class TestClass(object):
def method(self):
return 1
def prepare(self):
self.method = function.defun(self.method)
# pylint:enable=method-hidden
tc = TestClass()
tc.prepare()
x = api.converted_call(tc.method, None, opts, (), {})
self.assertAllEqual(1, self.evaluate(x))
def assertNoMemoryLeaks(self, f):
object_ids_before = {id(o) for o in gc.get_objects()}
f()
gc.collect()
objects_after = tuple(
o for o in gc.get_objects() if id(o) not in object_ids_before)
self.assertEmpty(
tuple(o for o in objects_after if isinstance(o, TestResource)))
def test_converted_call_no_leaks_via_closure(self):
def test_fn():
res = TestResource()
def f(y):
return res.x + y
opts = converter.ConversionOptions()
api.converted_call(f, None, opts, (1,), {})
self.assertNoMemoryLeaks(test_fn)
def test_converted_call_no_leaks_via_inner_function_closure(self):
def test_fn():
res = TestResource()
def f(y):
def inner_f():
return res.x + y
return inner_f
opts = converter.ConversionOptions()
api.converted_call(f, None, opts, (1,), {})()
self.assertNoMemoryLeaks(test_fn)
def test_to_graph_basic(self):
def test_fn(x, s):
while tf.reduce_sum(x) > s:
x //= 2
return x
compiled_fn = api.to_graph(test_fn)
with tf.Graph().as_default():
x = compiled_fn(constant_op.constant((4, 8)), 4)
self.assertAllEqual(self.evaluate(x), (1, 2))
@test_util.run_deprecated_v1
def test_to_graph_with_defaults(self):
foo = 4
def test_fn(x, s=foo):
while tf.reduce_sum(x) > s:
x //= 2
return x
compiled_fn = api.to_graph(test_fn)
with self.cached_session() as sess:
x = compiled_fn(constant_op.constant([4, 8]))
self.assertListEqual([1, 2], self.evaluate(x).tolist())
def test_to_graph_with_globals(self):
def test_fn(x):
global testing_global_numeric
testing_global_numeric = x + testing_global_numeric
return testing_global_numeric
# TODO(b/122368197)
with self.assertRaisesRegex(
errors.AutoGraphError, 'global keyword is not yet supported'):
api.to_graph(test_fn)
def test_to_graph_with_kwargs_clashing_converted_call(self):
def called_fn(**kwargs):
return kwargs['f'] + kwargs['owner']
def test_fn():
# These arg names intentionally match converted_call's
return called_fn(f=1, owner=2)
compiled_fn = api.to_graph(test_fn)
self.assertEqual(compiled_fn(), 3)
def test_to_graph_with_kwargs_clashing_unconverted_call(self):
@api.do_not_convert()
def called_fn(**kwargs):
return kwargs['f'] + kwargs['owner']
def test_fn():
# These arg names intentionally match _call_unconverted's
return called_fn(f=1, owner=2)
compiled_fn = api.to_graph(test_fn)
self.assertEqual(compiled_fn(), 3)
def test_to_graph_caching(self):
def test_fn(x):
if x > 0:
return x
else:
return -x
converted_functions = tuple(api.to_graph(test_fn) for _ in (-1, 0, 1))
# All outputs are from the same module. We can't use __module__ because
# that's reset when we instantiate the function (see conversion.py).
# TODO(mdan): Can and should we overwrite __module__ instead?
module_names = frozenset(f.ag_module for f in converted_functions)
self.assertEqual(len(module_names), 1)
self.assertNotIn('__main__', module_names)
self.assertEqual(len(frozenset(id(f) for f in converted_functions)), 3)
def test_to_graph_caching_different_options(self):
def called_fn():
pass
def test_fn():
return called_fn()
converted_recursive = api.to_graph(test_fn, recursive=True)
converted_non_recursive = api.to_graph(test_fn, recursive=False)
self.assertNotEqual(converted_recursive.ag_module,
converted_non_recursive.ag_module)
self.assertIn('internal_convert_user_code=True',
tf_inspect.getsource(converted_recursive))
self.assertNotIn('internal_convert_user_code=False',
tf_inspect.getsource(converted_recursive))
self.assertIn('internal_convert_user_code=False',
tf_inspect.getsource(converted_non_recursive))
self.assertNotIn('internal_convert_user_code=True',
tf_inspect.getsource(converted_non_recursive))
def test_to_graph_preserves_bindings(self):
y = 3
def test_fn():
return y
converted = api.to_graph(test_fn)
self.assertEqual(converted(), 3)
y = 7
self.assertEqual(converted(), 7)
def test_to_graph_source_map(self):
def test_fn(y):
return y**2
self.assertTrue(hasattr(api.to_graph(test_fn), 'ag_source_map'))
def test_to_code_basic(self):
def test_fn(x, s):
while tf.reduce_sum(x) > s:
x /= 2
return x
# Just check that the output is parseable Python code.
self.assertIsNotNone(parser.parse_str(api.to_code(test_fn)))
if __name__ == '__main__':
test.main()
|
|
"""
desitarget.too
==============
Targets of Opportunity.
.. _`moving average`: https://stackoverflow.com/a/14314054
.. _`rolling sum`: https://stackoverflow.com/a/28288535
"""
import os
import numpy as np
from astropy.table import Table
from desiutil.log import get_logger
from desitarget import io
from desitarget.mtl import get_utc_date
# ADM the data model for ToO, similar to that of secondary targets...
from desitarget.secondary import indatamodel
from desitarget.secondary import outdatamodel
# ADM ...but the OVERRIDE column isn't necessary...
indtype = [tup for tup in indatamodel.dtype.descr if "OVERRIDE" not in tup]
outdtype = [tup for tup in outdatamodel.dtype.descr if "OVERRIDE" not in tup]
# ADM ...and some extra columns are necessary.
indatamodel = np.array([], dtype=indtype + [
('CHECKER', '>U7'), ('TOO_TYPE', '>U5'),
('TOO_PRIO', '>U2'), ('OCLAYER', '>U6'),
('MJD_BEGIN', '>f8'), ('MJD_END', '>f8'), ('TOOID', '>i4')])
outdatamodel = np.array([], dtype=outdtype + [
('CHECKER', '>U7'), ('TOO_TYPE', '>U5'),
('TOO_PRIO', '>U2'), ('OCLAYER', '>U6'),
('MJD_BEGIN', '>f8'), ('MJD_END', '>f8'), ('TOOID', '>i4')])
# ADM columns to only include in the output for Main Survey files.
msaddcols = np.array([], dtype=[('TIMESTAMP', 'U25')])
# ADM when using basic or csv ascii writes, specifying the formats of
# ADM float32 columns can make things easier on the eye.
tooformatdict = {"PARALLAX": '%16.8f', 'PMRA': '%16.8f', 'PMDEC': '%16.8f'}
# ADM This RELEASE means Target of Opportunity in TARGETID.
release = 9999
# ADM Constrain how many high-priority ToOs are allowed in a given time.
# ADM dictionary keys are total nights, dictionary vals are total fibers.
# ADM so, e.g., 365: 500 means no more than 500 fibers per year.
# ADM There are separate allocations for tile and fiber overrides.
# ADM Only apply to high-priority ToOs, which can override primaries.
constraints = {"FIBER": {1: 2, 30: 50, 365: 500},
"TILE": {1: 5000, 30: 5000, 365: 10000}}
log = get_logger()
def get_filename(toodir=None, ender="ecsv", outname=False):
"""Construct the input/output ToO filenames (with full directory path).
Parameters
----------
toodir : :class:`str`, optional, defaults to ``None``
The directory to treat as the Targets of Opportunity I/O directory.
If ``None`` then look up from the $TOO_DIR environment variable.
ender : :class:`str`, optional, defaults to "ecsv"
File format (in file name), likely either "ecsv" or "fits".
outname : :class:`bool`, optional, defaults to ``False``
If ``True`` return the output ToO filename. Otherwise return
the input ToO filename.
Returns
-------
:class:`str`
The directory to treat as the Targets of Opportunity I/O directory.
"""
# ADM retrieve the $TOO_DIR variable, if toodir wasn't passed.
tdir = get_too_dir(toodir)
dr = release//1000
fn = io.find_target_files(tdir, flavor="ToO", ender="ecsv", nohp=True)
# ADM change the name slightly to make this the "input" ledger.
if outname:
return fn
return fn.replace(".{}".format(ender), "-input.{}".format(ender))
def _write_too_files(filename, data, ecsv=True):
"""Write ToO ledgers and files.
Parameters
----------
filename : :class:`str`
Full path to filename to which to write Targets of Opportunity.
data : :class:`~numpy.ndarray` or `~astropy.table.Table`
Table or array of Targets of Opportunity to write.
ecsv : :class:`bool`, optional, defaults to ``True``
If ``True`` then write as a .ecsv file, if ``False`` then write
as a .fits file.
Returns
-------
None
But `data` is written to `filename` with standard ToO formalism.
"""
log.info("Writing ToO file to {}".format(filename))
# ADM grab the standard header.
hdr = _get_too_header()
# ADM create necessary directories, if they don't exist.
os.makedirs(os.path.dirname(filename), exist_ok=True)
# ADM io.write_with_units expects an array, not a Table.
if isinstance(data, Table):
data = data.as_array()
# ADM write the file.
io.write_with_units(filename, data, extname="TOO", header=hdr, ecsv=ecsv)
return
def _get_too_header():
"""Convenience function that returns a standard header for ToO files.
"""
from . import __version__ as desitarget_version
from desiutil import depend
hdr = {}
depend.setdep(hdr, 'desitarget', desitarget_version)
depend.setdep(hdr, 'desitarget-git', io.gitversion())
hdr["RELEASE"] = release
return hdr
def get_too_dir(toodir=None):
"""Convenience function to grab the TOO_DIR environment variable.
Parameters
----------
toodir : :class:`str`, optional, defaults to ``None``
The directory to treat as the Targets of Opportunity I/O directory.
If ``None`` then look up from the $TOO_DIR environment variable.
Returns
-------
:class:`str`
The directory to treat as the Targets of Opportunity I/O directory.
"""
if toodir is None:
toodir = os.environ.get("TOO_DIR")
msg = "Pass toodir or set $TOO_DIR."
if toodir is None:
log.critical(msg)
raise ValueError(msg)
msg = "{} does not exist. Make it or..." .format(toodir) + msg
if not os.path.exists(toodir):
log.critical(msg)
raise ValueError(msg)
return toodir
def make_initial_ledger(toodir=None):
"""Set up the initial ToO ledger with one ersatz observation.
Parameters
----------
toodir : :class:`str`, optional, defaults to ``None``
The directory to treat as the Targets of Opportunity I/O directory.
If ``None`` then look up from the $TOO_DIR environment variable.
Returns
-------
:class:`~astropy.table.Table`
A Table of the initial, example values for the ToO ledger.
The initial (.ecsv) ledger is also written to toodir or $TOO_DIR.
"""
# ADM get the ToO directory (or check it exists).
tdir = get_too_dir(toodir)
# ADM retrieve the file name to which to write.
fn = get_filename(tdir)
# ADM make a single line of the ledger with some indicative values.
data = np.zeros(3, dtype=indatamodel.dtype)
data["RA"] = 359.999999, 101.000001, 201.5
data["DEC"] = -89.999999, -89.999999, -89.999999
data["PMRA"] = 13.554634, 4.364553, 12.734214
data["PMDEC"] = 10.763842, -10.763842, -10.763842
data["REF_EPOCH"] = 2015.5, 2015.5, 2015.5
data["CHECKER"] = "ADM", "AM", "ADM"
data["TOO_TYPE"] = "TILE", "FIBER", "TILE"
data["TOO_PRIO"] = "HI", "LO", "HI"
data["MJD_BEGIN"] = 40811.04166667, 41811.14166667, 42811.14
data["MJD_END"] = 40811.95833333, 41811.85833333, 42811.85
data["OCLAYER"] = "BRIGHT", "DARK", "DARK"
# ADM write out the results.
_write_too_files(fn, data, ecsv=True)
return data
def rolling_sum(array, n):
"""
Sum each value in an array across itself and the next n-1 values.
Parameters
----------
array : :class:`array_like`
A 1-D array of integers.
n : :class:`int`
A window over which to sum.
Returns
-------
:class:`array_like`
Each value in the input summed with the next `n`-1 values in the
array. By definition, the output array will have `n`-1 fewer
indexes than the input array.
Notes
-----
- h/t `moving average`_ and `rolling sum`_ on stack overflow.
"""
# ADM n=0 is meaningless, here.
if n == 0:
msg = "n must be 1 or greater to calculate a rolling sum"
log.critical(msg)
raise ValueError(msg)
# ADM a rolling sum can't be calculated if the window exceeds
# ADM the array length.
if n > len(array):
msg = "window exceeds array length, zero-length array will result"
log.warning(msg)
ret = np.cumsum(array)
ret[n:] = ret[n:] - ret[:-n]
return ret[n - 1:]
def all_integers_between_many_limits(ibegin, iend):
"""
Return array of all integers between arrays of start/end limits.
Parameters
----------
ibegin : :class:`array_like`
Array of beginning integers.
iend : :class:`array_like`
Array of ending integers.
Returns
-------
:class:`array_like`
1-D, sorted array of all integers in all [ibegin, iend] ranges.
Ranges are inclusive, so `iend` is included for every pair.
"""
ranges = [np.arange(ib, ie+1) for ib, ie in zip(ibegin, iend)]
return np.sort(np.concatenate(ranges))
def max_integers_in_interval(ibegin, iend, narray):
"""
Maximum across [ibegin, iend] ranges totalled over intervals.
Parameters
----------
ibegin : :class:`array_like`
Array of beginning integers.
iend : :class:`array_like`
Array of ending integers.
narray : :class:`array_like` or :class:`int` or :class:`list`
Intervals over which to sum and return the maximum.
Returns
-------
:class:`array_like`
The maximum number of integers across all of the ranges
[`ibegin`, `iend`] in each of the intervals in `narray`.
Notes
-----
- Ranges are inclusive, so `iend` is included for every pair.
- An example use would be: for a series of ranges of days [0, 365],
[180, 456], [90, 565] what is the maximum number of days that
occurs in any rolling 365-day period? The answer would clearly
be 3, in this case, as, say, day 181, occurs in all three ranges.
"""
# ADM grab all integers occuring in all ranges.
iall = all_integers_between_many_limits(ibegin, iend)
# ADM count the occurence of all the integers between the smallest
# ADM ibegin and the largest iend, include missing entries as zeros.
bins = np.arange(iall.min()-0.5, iall.max()+1.5)
isum, _ = np.histogram(iall, bins)
# ADM in case an integer was passed.
narray = np.atleast_1d(narray)
# ADM n should not be longer than the array-length for a rolling sum.
narray = narray.clip(0, len(isum))
# ADM calculate the maximum of the rolling sum over each n in narray.
maxes = np.array([np.max(rolling_sum(isum, n)) for n in narray])
# ADM return the maximum of the rolling sum.
return maxes
def _check_ledger(inledger):
"""Perform checks that the ledger conforms to requirements.
Parameters
----------
inledger : :class:`~astropy.table.Table`
A Table of input Targets of Opportunity from the ToO ledger.
None
But a series of checks of the ledger are conducted.
"""
# ADM check that every entry has been vetted by-eye.
checkers = np.array(list(set(inledger["CHECKER"])))
checkergood = np.array([len(checker) > 1 for checker in checkers])
if not np.all(checkergood):
msg = "An entry in the ToO ledger ({}) has not been checked!".format(
checkers[~checkergood])
log.critical(msg)
raise ValueError(msg)
# ADM check that TOO_TYPE's are all either TILE or FIBER.
# ADM and the observing condiations are all either DARK or BRIGHT.
# ADM and the priorities are all LO or HI.
allowed = {"TOO_TYPE": {'FIBER', 'TILE'},
"TOO_PRIO": {'LO', 'HI'},
"OCLAYER": {'BRIGHT', 'DARK'}}
for col in allowed:
if not set(inledger[col]).issubset(allowed[col]):
msg = "Some {} entries in the ToO ledger are not one of {}!".format(
col, allowed[col])
log.critical(msg)
raise ValueError(msg)
# ADM basic check that the dates are formatted correctly.
if np.any(inledger["MJD_BEGIN"] > inledger["MJD_END"]):
msg = "Some MJD_BEGINs are later than their associated MJD_END!"
log.critical(msg)
raise ValueError(msg)
# ADM check that the requested ToOs don't exceed allocations. There
# ADM are different constraints for different types of observations.
for tootype in "FIBER", "TILE":
log.info("Working on ToO observations of type {}".format(tootype))
# ADM only restrict observations at higher-than-primary priority.
ii = (inledger["TOO_TYPE"] == tootype) & (inledger["TOO_PRIO"] == "HI")
# ADM work with discretized days that run from noon until noon
# ADM so each observing night is encompassed by an integer day.
if np.any(ii):
jdbegin = inledger["MJD_BEGIN"][ii]+0.5
jdend = inledger["MJD_END"][ii]+0.5
jdbegin, jdend = jdbegin.astype(int), jdend.astype(int)
# ADM grab the allowed fibers for each interval in nights.
nights = np.array(list(constraints[tootype].keys()))
allowed = np.array(list(constraints[tootype].values()))
# ADM check the total nights covered by the MJD ranges.
fibers = max_integers_in_interval(jdbegin, jdend, nights)
for fiber, night, allow in zip(fibers, nights, allowed):
log.info(
"Max of {} HIP fibers requested over {} nights ({} allowed)"
.format(fiber, night, allow)
)
excess = fibers > allowed
if np.any(excess):
msg = "Allocation exceeded! Number of HIP fibers requested over"
msg += "{} nights is {} (but only {} are allowed)".format(
nights[excess], fibers[excess], allowed[excess])
log.critical(msg)
raise ValueError(msg)
else:
log.info("No fibers requested")
return
def finalize_too(inledger, survey="main"):
"""Add necessary targeting columns to a ToO ledger.
Parameters
----------
inledger : :class:`~astropy.table.Table`
A Table of input Targets of Opportunity from the ToO ledger.
survey : :class:`str`, optional, defaults to ``'main'``
Specifies which target masks yaml file to use for bits, and which
column names to add in the output file. Options are ``'main'``
and ``'svX``' (where X is 1, 2, 3 etc.) for the main survey and
different iterations of SV, respectively.
Returns
-------
:class:`~astropy.table.Table`
A Table of targets generated from the ToO ledger, with the
necessary columns added for fiberassign to run.
"""
# ADM add extra columns that are just for the Main Survey.
dt = outdatamodel.dtype.descr
if survey == 'main':
dt += msaddcols.dtype.descr
# ADM create the output data table.
outdata = Table(np.zeros(len(inledger), dtype=dt))
# ADM change column names to reflect the survey.
if survey[:2] == "sv":
bitcols = [col for col in outdata.dtype.names if "_TARGET" in col]
for col in bitcols:
outdata.rename_column(col, "{}_{}".format(survey.upper(), col))
# ADM grab the appropriate masks and column names.
from desitarget.targets import main_cmx_or_sv
cols, Mxs, surv = main_cmx_or_sv(outdata, scnd=True)
dcol, bcol, mcol, scol = cols
dMx, bMx, mMx, sMx = Mxs
# ADM add the input columns to the output table.
for col in inledger.dtype.names:
outdata[col] = inledger[col]
# ADM add the output columns.
ntoo = len(outdata)
# ADM assign a TARGETID for each input targets.
from desiutil import brick
from desitarget.targets import encode_targetid
bricks = brick.Bricks(bricksize=0.25)
brickid = bricks.brickid(outdata["RA"], outdata["DEC"])
objid = np.arange(ntoo)
targetid = encode_targetid(objid=objid, brickid=brickid, release=release)
outdata["TARGETID"] = targetid
# ADM assign the target bitmasks and observing condition for
# ADM each of the possible observing conditions.
from desitarget.targetmask import obsconditions
outdata[dcol] = dMx["SCND_ANY"]
for oc in set(outdata["OCLAYER"]):
# ADM there are multiple possible priorities.
for prio in set(outdata["TOO_PRIO"]):
ii = (outdata["OCLAYER"] == oc) & (outdata["TOO_PRIO"] == prio)
bitname = "{}_TOO_{}P".format(oc, prio)
outdata[scol][ii] = sMx[bitname]
outdata["PRIORITY_INIT"][ii] = sMx[bitname].priorities["UNOBS"]
outdata["NUMOBS_INIT"][ii] = sMx[bitname].numobs
outdata["OBSCONDITIONS"][ii] = obsconditions.mask(
sMx[bitname].obsconditions)
# ADM assign a SUBPRIORITY.
np.random.seed(616)
outdata["SUBPRIORITY"] = np.random.random(ntoo)
# ADM finally, add a processing timestamp.
# ADM I only remembered to do this starting with the Main Survey.
if survey == 'main':
outdata["TIMESTAMP"] = get_utc_date(survey="main")
return outdata
def ledger_to_targets(toodir=None, survey="main", ecsv=True, outdir=None):
"""Convert a ToO ledger to a file of ToO targets.
Parameters
----------
survey : :class:`str`, optional, defaults to ``'main'``
Specifies which target masks yaml file to use for bits, and which
column names to add in the output file. Options are ``'main'``
and ``'svX``' (where X is 1, 2, 3 etc.) for the main survey and
different iterations of SV, respectively.
toodir : :class:`str`, optional, defaults to ``None``
The directory to treat as the Targets of Opportunity I/O directory.
If ``None`` then look up from the $TOO_DIR environment variable.
ecsv : :class:`bool`, optional, defaults to ``True``
If ``True`` then write as a .ecsv file, if ``False`` then write
as a .fits file.
outdir : :class:`str`, optional, defaults to ``None``
If passed and not ``None``, then read the input ledger from
`toodir` but write the file of targets to `outdir`.
Returns
-------
:class:`~astropy.table.Table`
A Table of targets generated from the ToO ledger. The output
targets are also written to `toodir` (or $TOO_DIR) or `outdir`.
Notes
-----
- One purpose of this code is to add all of the necessary columns for
fiberassign to run.
- Another purpose is to run some simple checks that the ToO targets
do not exceed allowed specifications.
"""
# ADM get the ToO directory (or check it exists).
tdir = get_too_dir(toodir)
# ADM read in the ToO ledger.
fn = get_filename(tdir)
indata = Table.read(fn, comment='#', format='ascii.basic', guess=False)
# ADM check the ledger conforms to requirements.
_check_ledger(indata)
# ADM add the output targeting columns.
outdata = finalize_too(indata, survey=survey)
# ADM determine the output filename.
# ADM set output format to ecsv if passed, or fits otherwise.
form = 'ecsv'*ecsv + 'fits'*(not(ecsv))
if outdir is None:
fn = get_filename(tdir, outname=True, ender=form)
else:
fn = get_filename(outdir, outname=True, ender=form)
# ADM write out the results.
_write_too_files(fn, outdata, ecsv=ecsv)
return outdata
|
|
# -*- coding: utf-8 -*-
import unittest
import logging
import numpy as np
from .. import alignElastix
from .. import alignSift
from ..alignFFT import alignFFT
from ..alignSimple import alignMin
from ..alignSimple import alignMax
from ..alignSimple import alignCentroid
from ..alignSimple import alignGaussMax
from ..types import transformationType
from . import helper_teststack
from ...utils.cli import getLogger
logger = getLogger(__name__, __file__)
logger.setLevel(logging.DEBUG)
from silx.opencl import ocl
if ocl:
has_ocl_device = ocl.create_context() is not None
else:
has_ocl_device = False
class test_align(unittest.TestCase):
def compare_relativecof(self, cofs, cofrel, msg=None):
for i in range(1, cofs.shape[0]):
cofrelcalc = np.dot(np.linalg.inv(cofs[i - 1, ...]), cofs[i, ...])
np.testing.assert_almost_equal(cofrelcalc, cofrel, decimal=1, err_msg=msg)
def assertAligned(self, outputstack, stackdim):
for stack in outputstack:
# Check whether maximum of each image in the stack
# is on the same location
idx = [slice(None)] * stack.ndim
n = stack.shape[stackdim]
lst1 = []
for i in range(n):
idx[stackdim] = i
lst1.append(np.nanargmax(stack[tuple(idx)]))
lst2 = [lst1[0]] * n
self.assertEqual(lst1, lst2)
def assertAlign(
self, alignclass, transfotype, realistic=False, subpixel=True, inverse=False
):
if (
transfotype == transformationType.translation
and alignclass != alignSift.alignSift
and alignclass != alignElastix.alignElastix
):
lst = [False, True]
else:
lst = [False]
for vector in lst:
for transposed in lst:
if transposed and not vector:
continue
# Prepare dataIO
inputstack, cofrel, stackdim = helper_teststack.data(
transfotype,
vector=vector,
transposed=transposed,
realistic=realistic,
subpixel=subpixel,
inverse=inverse,
)
outputstack = [np.zeros(1, dtype=np.float32)] * len(inputstack)
# References
refdatasetindex = 0
refimageindex = 0 # len(inputstack)//2
# Prepare alignment
o = alignclass(
inputstack,
None,
outputstack,
None,
None,
stackdim=stackdim,
overwrite=True,
plot=False,
transfotype=transfotype,
)
# Check alignment
if vector:
if transposed:
roi = ((1, -3), (0, 1))
else:
roi = ((0, 1), (1, -3))
else:
roi = ((1, -3), (1, -3))
for i in range(4):
pad = (i & 1) == 1
crop = (i & 2) == 2
prealigntransfo = (i & 4) == 4
if prealigntransfo:
prealigntransfolist = [
o.defaulttransform() for _ in range(o.source.nimages)
]
for transfo in prealigntransfolist:
tx, ty = np.random.randint(-2, 2, 2)
if vector:
if transposed:
tx = 0
else:
ty = 0
transfo.settranslation(tx, ty)
else:
prealigntransfolist = None
# Fixed reference
for redo in False, True:
msg = "Alignment: Pad = {}, Crop = {}, 1D = {}, transposed = {}, prealigntransfo = {}, redo = {}, type = {}".format(
pad,
crop,
vector,
transposed,
prealigntransfo,
redo,
"fixed",
)
# logger.debug(msg)
o.align(
refdatasetindex,
refimageindex=refimageindex,
pad=pad,
crop=crop,
roi=roi,
redo=redo,
prealigntransfo=prealigntransfolist,
)
self.compare_relativecof(
o.absolute_cofs(homography=True, include_pre=True),
cofrel,
msg=msg,
)
self.assertAligned(outputstack, stackdim)
# Pairwise: align on raw
for redo in False, True:
msg = "Alignment: Pad = {}, Crop = {}, 1D = {}, transposed = {}, prealigntransfo = {}, redo = {}, type = {}".format(
pad,
crop,
vector,
transposed,
prealigntransfo,
redo,
"pairwise/raw",
)
# logger.debug(msg)
o.align(
refdatasetindex,
onraw=True,
pad=pad,
crop=crop,
roi=roi,
redo=redo,
prealigntransfo=prealigntransfolist,
)
self.compare_relativecof(
o.absolute_cofs(homography=True, include_pre=True),
cofrel,
msg=msg,
)
self.assertAligned(outputstack, stackdim)
# Pairwise: align on aligned
for redo in False, True:
msg = "Alignment: Pad = {}, Crop = {}, 1D = {}, transposed = {}, prealigntransfo = {}, redo = {}, type = {}".format(
pad,
crop,
vector,
transposed,
prealigntransfo,
redo,
"pairwise",
)
# logger.debug(msg)
o.align(
refdatasetindex,
onraw=False,
pad=pad,
crop=crop,
roi=roi,
redo=redo,
prealigntransfo=prealigntransfolist,
)
self.compare_relativecof(
o.absolute_cofs(homography=True, include_pre=True),
cofrel,
msg=msg,
)
self.assertAligned(outputstack, stackdim)
@unittest.skip("TODO")
def test_fft_internals(self):
# Initialize alignFFT (not important)
inputstack = [np.zeros((2, 2, 2), dtype=np.float32)] * 5
outputstack = [np.zeros(1, dtype=np.float32)] * 5
o = alignFFT(
inputstack,
None,
outputstack,
None,
None,
stackdim=2,
overwrite=True,
transfotype=transformationType.similarity,
)
# Test Fourier related things
img = np.abs(np.fft.fft2(np.arange(7 * 8).reshape((7, 8))))
img2 = np.fft.ifftshift(np.fft.fftshift(img))
np.testing.assert_allclose(img, img2)
# Test log-polar
mx = 10
my = 10
nx = 501
ny = 401
dx = 2 * mx / (nx - 1.0)
dy = 2 * my / (ny - 1.0)
xv, yv = np.meshgrid(np.linspace(-mx, mx, nx), np.linspace(-my, my, ny))
sx = 2.0
sy = 1.0
angle = 0.0
data = [(0.0, 0.0, sx, sy, angle, 1000.0)]
fixed = helper_teststack.gettransformedimage(xv, yv, data, angle=True).reshape(
xv.shape
)
angle = -2 * np.pi / 180
scale = 1.3 # 0.9
sx *= scale
sy *= scale
data = [(0.0, 0.0, sx, sy, angle, 1000.0)]
moving = helper_teststack.gettransformedimage(xv, yv, data, angle=True).reshape(
xv.shape
)
a = scale * np.cos(angle)
b = scale * np.sin(angle)
R = np.array([[a, -b, 0], [b, a, 0], [0, 0, 1]])
T = np.array([[1, 0, mx / dx], [0, 1, my / dy], [0, 0, 1]])
Tinv = np.array([[1, 0, -mx / dx], [0, 1, -my / dy], [0, 0, 1]])
M = np.dot(T, np.dot(R, Tinv))
o.set_reference(fixed)
aligned = o.execute_alignkernel(moving)
np.testing.assert_almost_equal(M, o._transform.getnumpyhomography(), decimal=1)
@unittest.skipIf(alignElastix.sitk is None, "SimpleElastix is not installed")
def test_elastix(self):
types = [transformationType.translation]
for t in types:
self.assertAlign(alignElastix.alignElastix, t)
@unittest.skipIf(alignSift.pyopencl is None, "pyopencl is not installed")
@unittest.skipIf(not has_ocl_device, "no pyopencl device available")
# @unittest.skipIf(True, "temporary disable")
def test_sift(self):
types = [
transformationType.translation,
transformationType.rigid,
transformationType.similarity,
transformationType.affine,
]
# TODO: the others are not that precise
types = [transformationType.translation]
for t in types:
self.assertAlign(alignSift.alignSift, t)
def test_fft(self):
types = [transformationType.translation]
for t in types:
self.assertAlign(alignFFT, t)
def test_min(self):
self.assertAlign(
alignMin,
transformationType.translation,
realistic=True,
subpixel=False,
inverse=True,
)
def test_max(self):
self.assertAlign(alignMax, transformationType.translation, subpixel=False)
def test_centroid(self):
self.assertAlign(alignCentroid, transformationType.translation, subpixel=False)
def test_gaussmax(self):
self.assertAlign(alignGaussMax, transformationType.translation, subpixel=False)
def test_suite():
"""Test suite including all test suites"""
testSuite = unittest.TestSuite()
testSuite.addTest(test_align("test_fft_internals"))
testSuite.addTest(test_align("test_min"))
testSuite.addTest(test_align("test_max"))
testSuite.addTest(test_align("test_centroid"))
testSuite.addTest(test_align("test_gaussmax"))
testSuite.addTest(test_align("test_fft"))
testSuite.addTest(test_align("test_elastix"))
testSuite.addTest(test_align("test_sift"))
return testSuite
if __name__ == "__main__":
import sys
mysuite = test_suite()
runner = unittest.TextTestRunner()
if not runner.run(mysuite).wasSuccessful():
sys.exit(1)
|
|
#!/usr/bin/env python
# Copyright 2008 Orbitz WorldWide
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# This module is an implementation of the Whisper database API
# Here is the basic layout of a whisper data file
#
# File = Header,Data
# Header = Metadata,ArchiveInfo+
# Metadata = aggregationType,maxRetention,xFilesFactor,archiveCount
# ArchiveInfo = Offset,SecondsPerPoint,Points
# Data = Archive+
# Archive = Point+
# Point = timestamp,value
import os, struct, time, operator, itertools
try:
import fcntl
CAN_LOCK = True
except ImportError:
CAN_LOCK = False
LOCK = False
CACHE_HEADERS = False
AUTOFLUSH = False
__headerCache = {}
longFormat = "!L"
longSize = struct.calcsize(longFormat)
floatFormat = "!f"
floatSize = struct.calcsize(floatFormat)
valueFormat = "!d"
valueSize = struct.calcsize(valueFormat)
pointFormat = "!Ld"
pointSize = struct.calcsize(pointFormat)
metadataFormat = "!2LfL"
metadataSize = struct.calcsize(metadataFormat)
archiveInfoFormat = "!3L"
archiveInfoSize = struct.calcsize(archiveInfoFormat)
aggregationTypeToMethod = dict({
1: 'average',
2: 'sum',
3: 'last',
4: 'max',
5: 'min'
})
aggregationMethodToType = dict([[v,k] for k,v in aggregationTypeToMethod.items()])
aggregationMethods = aggregationTypeToMethod.values()
debug = startBlock = endBlock = lambda *a,**k: None
UnitMultipliers = {
'seconds' : 1,
'minutes' : 60,
'hours' : 3600,
'days' : 86400,
'weeks' : 86400 * 7,
'years' : 86400 * 365
}
def getUnitString(s):
if 'seconds'.startswith(s): return 'seconds'
if 'minutes'.startswith(s): return 'minutes'
if 'hours'.startswith(s): return 'hours'
if 'days'.startswith(s): return 'days'
if 'weeks'.startswith(s): return 'weeks'
if 'years'.startswith(s): return 'years'
raise ValueError("Invalid unit '%s'" % s)
def parseRetentionDef(retentionDef):
import re
(precision, points) = retentionDef.strip().split(':')
if precision.isdigit():
precision = int(precision) * UnitMultipliers[getUnitString('s')]
else:
precision_re = re.compile(r'^(\d+)([a-z]+)$')
match = precision_re.match(precision)
if match:
precision = int(match.group(1)) * UnitMultipliers[getUnitString(match.group(2))]
else:
raise ValueError("Invalid precision specification '%s'" % precision)
if points.isdigit():
points = int(points)
else:
points_re = re.compile(r'^(\d+)([a-z]+)$')
match = points_re.match(points)
if match:
points = int(match.group(1)) * UnitMultipliers[getUnitString(match.group(2))] / precision
else:
raise ValueError("Invalid retention specification '%s'" % points)
return (precision, points)
class WhisperException(Exception):
"""Base class for whisper exceptions."""
class InvalidConfiguration(WhisperException):
"""Invalid configuration."""
class InvalidAggregationMethod(WhisperException):
"""Invalid aggregation method."""
class InvalidTimeInterval(WhisperException):
"""Invalid time interval."""
class TimestampNotCovered(WhisperException):
"""Timestamp not covered by any archives in this database."""
class CorruptWhisperFile(WhisperException):
def __init__(self, error, path):
Exception.__init__(self, error)
self.error = error
self.path = path
def __repr__(self):
return "<CorruptWhisperFile[%s] %s>" % (self.path, self.error)
def __str__(self):
return "%s (%s)" % (self.error, self.path)
def enableDebug():
global open, debug, startBlock, endBlock
class open(file):
def __init__(self,*args,**kwargs):
file.__init__(self,*args,**kwargs)
self.writeCount = 0
self.readCount = 0
def write(self,data):
self.writeCount += 1
debug('WRITE %d bytes #%d' % (len(data),self.writeCount))
return file.write(self,data)
def read(self,bytes):
self.readCount += 1
debug('READ %d bytes #%d' % (bytes,self.readCount))
return file.read(self,bytes)
def debug(message):
print 'DEBUG :: %s' % message
__timingBlocks = {}
def startBlock(name):
__timingBlocks[name] = time.time()
def endBlock(name):
debug("%s took %.5f seconds" % (name,time.time() - __timingBlocks.pop(name)))
def __readHeader(fh):
info = __headerCache.get(fh.name)
if info:
return info
originalOffset = fh.tell()
fh.seek(0)
packedMetadata = fh.read(metadataSize)
try:
(aggregationType,maxRetention,xff,archiveCount) = struct.unpack(metadataFormat,packedMetadata)
except:
raise CorruptWhisperFile("Unable to read header", fh.name)
archives = []
for i in xrange(archiveCount):
packedArchiveInfo = fh.read(archiveInfoSize)
try:
(offset,secondsPerPoint,points) = struct.unpack(archiveInfoFormat,packedArchiveInfo)
except:
raise CorruptWhisperFile("Unable to read archive%d metadata" % i, fh.name)
archiveInfo = {
'offset' : offset,
'secondsPerPoint' : secondsPerPoint,
'points' : points,
'retention' : secondsPerPoint * points,
'size' : points * pointSize,
}
archives.append(archiveInfo)
fh.seek(originalOffset)
info = {
'aggregationMethod' : aggregationTypeToMethod.get(aggregationType, 'average'),
'maxRetention' : maxRetention,
'xFilesFactor' : xff,
'archives' : archives,
}
if CACHE_HEADERS:
__headerCache[fh.name] = info
return info
def setAggregationMethod(path, aggregationMethod):
"""setAggregationMethod(path,aggregationMethod)
path is a string
aggregationMethod specifies the method to use when propogating data (see ``whisper.aggregationMethods``)
"""
fh = open(path,'r+b')
if LOCK:
fcntl.flock( fh.fileno(), fcntl.LOCK_EX )
packedMetadata = fh.read(metadataSize)
try:
(aggregationType,maxRetention,xff,archiveCount) = struct.unpack(metadataFormat,packedMetadata)
except:
raise CorruptWhisperFile("Unable to read header", fh.name)
try:
newAggregationType = struct.pack( longFormat, aggregationMethodToType[aggregationMethod] )
except KeyError:
raise InvalidAggregationMethod("Unrecognized aggregation method: %s" %
aggregationMethod)
fh.seek(0)
fh.write(newAggregationType)
if AUTOFLUSH:
fh.flush()
os.fsync(fh.fileno())
if CACHE_HEADERS and fh.name in __headerCache:
del __headerCache[fh.name]
fh.close()
return aggregationTypeToMethod.get(aggregationType, 'average')
def validateArchiveList(archiveList):
""" Validates an archiveList.
An ArchiveList must:
1. Have at least one archive config. Example: (60, 86400)
2. No archive may be a duplicate of another.
3. Higher precision archives' precision must evenly divide all lower precision archives' precision.
4. Lower precision archives must cover larger time intervals than higher precision archives.
5. Each archive must have at least enough points to consolidate to the next archive
Returns True or False
"""
if not archiveList:
raise InvalidConfiguration("You must specify at least one archive configuration!")
archiveList.sort(key=lambda a: a[0]) #sort by precision (secondsPerPoint)
for i,archive in enumerate(archiveList):
if i == len(archiveList) - 1:
break
nextArchive = archiveList[i+1]
if not archive[0] < nextArchive[0]:
raise InvalidConfiguration("A Whisper database may not configured having"
"two archives with the same precision (archive%d: %s, archive%d: %s)" %
(i, archive, i + 1, nextArchive))
if nextArchive[0] % archive[0] != 0:
raise InvalidConfiguration("Higher precision archives' precision "
"must evenly divide all lower precision archives' precision "
"(archive%d: %s, archive%d: %s)" %
(i, archive[0], i + 1, nextArchive[0]))
retention = archive[0] * archive[1]
nextRetention = nextArchive[0] * nextArchive[1]
if not nextRetention > retention:
raise InvalidConfiguration("Lower precision archives must cover "
"larger time intervals than higher precision archives "
"(archive%d: %s seconds, archive%d: %s seconds)" %
(i, archive[1], i + 1, nextArchive[1]))
archivePoints = archive[1]
pointsPerConsolidation = nextArchive[0] / archive[0]
if not archivePoints >= pointsPerConsolidation:
raise InvalidConfiguration("Each archive must have at least enough points "
"to consolidate to the next archive (archive%d consolidates %d of "
"archive%d's points but it has only %d total points)" %
(i + 1, pointsPerConsolidation, i, archivePoints))
def create(path,archiveList,xFilesFactor=None,aggregationMethod=None,sparse=False):
"""create(path,archiveList,xFilesFactor=0.5,aggregationMethod='average')
path is a string
archiveList is a list of archives, each of which is of the form (secondsPerPoint,numberOfPoints)
xFilesFactor specifies the fraction of data points in a propagation interval that must have known values for a propagation to occur
aggregationMethod specifies the function to use when propogating data (see ``whisper.aggregationMethods``)
"""
# Set default params
if xFilesFactor is None:
xFilesFactor = 0.5
if aggregationMethod is None:
aggregationMethod = 'average'
#Validate archive configurations...
validateArchiveList(archiveList)
#Looks good, now we create the file and write the header
if os.path.exists(path):
raise InvalidConfiguration("File %s already exists!" % path)
fh = open(path,'wb')
if LOCK:
fcntl.flock( fh.fileno(), fcntl.LOCK_EX )
aggregationType = struct.pack( longFormat, aggregationMethodToType.get(aggregationMethod, 1) )
oldest = sorted([secondsPerPoint * points for secondsPerPoint,points in archiveList])[-1]
maxRetention = struct.pack( longFormat, oldest )
xFilesFactor = struct.pack( floatFormat, float(xFilesFactor) )
archiveCount = struct.pack(longFormat, len(archiveList))
packedMetadata = aggregationType + maxRetention + xFilesFactor + archiveCount
fh.write(packedMetadata)
headerSize = metadataSize + (archiveInfoSize * len(archiveList))
archiveOffsetPointer = headerSize
for secondsPerPoint,points in archiveList:
archiveInfo = struct.pack(archiveInfoFormat, archiveOffsetPointer, secondsPerPoint, points)
fh.write(archiveInfo)
archiveOffsetPointer += (points * pointSize)
if sparse:
fh.seek(archiveOffsetPointer - headerSize - 1)
fh.write("\0")
else:
# If not creating the file sparsely, then fill the rest of the file with
# zeroes.
remaining = archiveOffsetPointer - headerSize
chunksize = 16384
zeroes = '\x00' * chunksize
while remaining > chunksize:
fh.write(zeroes)
remaining -= chunksize
fh.write(zeroes[:remaining])
if AUTOFLUSH:
fh.flush()
os.fsync(fh.fileno())
fh.close()
def __aggregate(aggregationMethod, knownValues):
if aggregationMethod == 'average':
return float(sum(knownValues)) / float(len(knownValues))
elif aggregationMethod == 'sum':
return float(sum(knownValues))
elif aggregationMethod == 'last':
return knownValues[len(knownValues)-1]
elif aggregationMethod == 'max':
return max(knownValues)
elif aggregationMethod == 'min':
return min(knownValues)
else:
raise InvalidAggregationMethod("Unrecognized aggregation method %s" %
aggregationMethod)
def __propagate(fh,header,timestamp,higher,lower):
aggregationMethod = header['aggregationMethod']
xff = header['xFilesFactor']
lowerIntervalStart = timestamp - (timestamp % lower['secondsPerPoint'])
lowerIntervalEnd = lowerIntervalStart + lower['secondsPerPoint']
fh.seek(higher['offset'])
packedPoint = fh.read(pointSize)
(higherBaseInterval,higherBaseValue) = struct.unpack(pointFormat,packedPoint)
if higherBaseInterval == 0:
higherFirstOffset = higher['offset']
else:
timeDistance = lowerIntervalStart - higherBaseInterval
pointDistance = timeDistance / higher['secondsPerPoint']
byteDistance = pointDistance * pointSize
higherFirstOffset = higher['offset'] + (byteDistance % higher['size'])
higherPoints = lower['secondsPerPoint'] / higher['secondsPerPoint']
higherSize = higherPoints * pointSize
relativeFirstOffset = higherFirstOffset - higher['offset']
relativeLastOffset = (relativeFirstOffset + higherSize) % higher['size']
higherLastOffset = relativeLastOffset + higher['offset']
fh.seek(higherFirstOffset)
if higherFirstOffset < higherLastOffset: #we don't wrap the archive
seriesString = fh.read(higherLastOffset - higherFirstOffset)
else: #We do wrap the archive
higherEnd = higher['offset'] + higher['size']
seriesString = fh.read(higherEnd - higherFirstOffset)
fh.seek(higher['offset'])
seriesString += fh.read(higherLastOffset - higher['offset'])
#Now we unpack the series data we just read
byteOrder,pointTypes = pointFormat[0],pointFormat[1:]
points = len(seriesString) / pointSize
seriesFormat = byteOrder + (pointTypes * points)
unpackedSeries = struct.unpack(seriesFormat, seriesString)
#And finally we construct a list of values
neighborValues = [None] * points
currentInterval = lowerIntervalStart
step = higher['secondsPerPoint']
for i in xrange(0,len(unpackedSeries),2):
pointTime = unpackedSeries[i]
if pointTime == currentInterval:
neighborValues[i/2] = unpackedSeries[i+1]
currentInterval += step
#Propagate aggregateValue to propagate from neighborValues if we have enough known points
knownValues = [v for v in neighborValues if v is not None]
if not knownValues:
return False
knownPercent = float(len(knownValues)) / float(len(neighborValues))
if knownPercent >= xff: #we have enough data to propagate a value!
aggregateValue = __aggregate(aggregationMethod, knownValues)
myPackedPoint = struct.pack(pointFormat,lowerIntervalStart,aggregateValue)
fh.seek(lower['offset'])
packedPoint = fh.read(pointSize)
(lowerBaseInterval,lowerBaseValue) = struct.unpack(pointFormat,packedPoint)
if lowerBaseInterval == 0: #First propagated update to this lower archive
fh.seek(lower['offset'])
fh.write(myPackedPoint)
else: #Not our first propagated update to this lower archive
timeDistance = lowerIntervalStart - lowerBaseInterval
pointDistance = timeDistance / lower['secondsPerPoint']
byteDistance = pointDistance * pointSize
lowerOffset = lower['offset'] + (byteDistance % lower['size'])
fh.seek(lowerOffset)
fh.write(myPackedPoint)
return True
else:
return False
def update(path,value,timestamp=None):
"""update(path,value,timestamp=None)
path is a string
value is a float
timestamp is either an int or float
"""
value = float(value)
fh = open(path,'r+b')
return file_update(fh, value, timestamp)
def file_update(fh, value, timestamp):
if LOCK:
fcntl.flock( fh.fileno(), fcntl.LOCK_EX )
header = __readHeader(fh)
now = int( time.time() )
if timestamp is None:
timestamp = now
timestamp = int(timestamp)
diff = now - timestamp
if not ((diff < header['maxRetention']) and diff >= 0):
raise TimestampNotCovered("Timestamp not covered by any archives in "
"this database.")
for i,archive in enumerate(header['archives']): #Find the highest-precision archive that covers timestamp
if archive['retention'] < diff: continue
lowerArchives = header['archives'][i+1:] #We'll pass on the update to these lower precision archives later
break
#First we update the highest-precision archive
myInterval = timestamp - (timestamp % archive['secondsPerPoint'])
myPackedPoint = struct.pack(pointFormat,myInterval,value)
fh.seek(archive['offset'])
packedPoint = fh.read(pointSize)
(baseInterval,baseValue) = struct.unpack(pointFormat,packedPoint)
if baseInterval == 0: #This file's first update
fh.seek(archive['offset'])
fh.write(myPackedPoint)
baseInterval,baseValue = myInterval,value
else: #Not our first update
timeDistance = myInterval - baseInterval
pointDistance = timeDistance / archive['secondsPerPoint']
byteDistance = pointDistance * pointSize
myOffset = archive['offset'] + (byteDistance % archive['size'])
fh.seek(myOffset)
fh.write(myPackedPoint)
#Now we propagate the update to lower-precision archives
higher = archive
for lower in lowerArchives:
if not __propagate(fh, header, myInterval, higher, lower):
break
higher = lower
if AUTOFLUSH:
fh.flush()
os.fsync(fh.fileno())
fh.close()
def update_many(path,points):
"""update_many(path,points)
path is a string
points is a list of (timestamp,value) points
"""
if not points: return
points = [ (int(t),float(v)) for (t,v) in points]
points.sort(key=lambda p: p[0],reverse=True) #order points by timestamp, newest first
fh = open(path,'r+b')
return file_update_many(fh, points)
def file_update_many(fh, points):
if LOCK:
fcntl.flock( fh.fileno(), fcntl.LOCK_EX )
header = __readHeader(fh)
now = int( time.time() )
archives = iter( header['archives'] )
currentArchive = archives.next()
currentPoints = []
for point in points:
age = now - point[0]
while currentArchive['retention'] < age: #we can't fit any more points in this archive
if currentPoints: #commit all the points we've found that it can fit
currentPoints.reverse() #put points in chronological order
__archive_update_many(fh,header,currentArchive,currentPoints)
currentPoints = []
try:
currentArchive = archives.next()
except StopIteration:
currentArchive = None
break
if not currentArchive:
break #drop remaining points that don't fit in the database
currentPoints.append(point)
if currentArchive and currentPoints: #don't forget to commit after we've checked all the archives
currentPoints.reverse()
__archive_update_many(fh,header,currentArchive,currentPoints)
if AUTOFLUSH:
fh.flush()
os.fsync(fh.fileno())
fh.close()
def __archive_update_many(fh,header,archive,points):
step = archive['secondsPerPoint']
alignedPoints = [ (timestamp - (timestamp % step), value)
for (timestamp,value) in points ]
#Create a packed string for each contiguous sequence of points
packedStrings = []
previousInterval = None
currentString = ""
for (interval,value) in alignedPoints:
if interval == previousInterval: continue
if (not previousInterval) or (interval == previousInterval + step):
currentString += struct.pack(pointFormat,interval,value)
previousInterval = interval
else:
numberOfPoints = len(currentString) / pointSize
startInterval = previousInterval - (step * (numberOfPoints-1))
packedStrings.append( (startInterval,currentString) )
currentString = struct.pack(pointFormat,interval,value)
previousInterval = interval
if currentString:
numberOfPoints = len(currentString) / pointSize
startInterval = previousInterval - (step * (numberOfPoints-1))
packedStrings.append( (startInterval,currentString) )
#Read base point and determine where our writes will start
fh.seek(archive['offset'])
packedBasePoint = fh.read(pointSize)
(baseInterval,baseValue) = struct.unpack(pointFormat,packedBasePoint)
if baseInterval == 0: #This file's first update
baseInterval = packedStrings[0][0] #use our first string as the base, so we start at the start
#Write all of our packed strings in locations determined by the baseInterval
for (interval,packedString) in packedStrings:
timeDistance = interval - baseInterval
pointDistance = timeDistance / step
byteDistance = pointDistance * pointSize
myOffset = archive['offset'] + (byteDistance % archive['size'])
fh.seek(myOffset)
archiveEnd = archive['offset'] + archive['size']
bytesBeyond = (myOffset + len(packedString)) - archiveEnd
if bytesBeyond > 0:
fh.write( packedString[:-bytesBeyond] )
assert fh.tell() == archiveEnd, "archiveEnd=%d fh.tell=%d bytesBeyond=%d len(packedString)=%d" % (archiveEnd,fh.tell(),bytesBeyond,len(packedString))
fh.seek( archive['offset'] )
fh.write( packedString[-bytesBeyond:] ) #safe because it can't exceed the archive (retention checking logic above)
else:
fh.write(packedString)
#Now we propagate the updates to lower-precision archives
higher = archive
lowerArchives = [arc for arc in header['archives'] if arc['secondsPerPoint'] > archive['secondsPerPoint']]
for lower in lowerArchives:
fit = lambda i: i - (i % lower['secondsPerPoint'])
lowerIntervals = [fit(p[0]) for p in alignedPoints]
uniqueLowerIntervals = set(lowerIntervals)
propagateFurther = False
for interval in uniqueLowerIntervals:
if __propagate(fh, header, interval, higher, lower):
propagateFurther = True
if not propagateFurther:
break
higher = lower
def info(path):
"""info(path)
path is a string
"""
fh = open(path,'rb')
info = __readHeader(fh)
fh.close()
return info
def fetch(path,fromTime,untilTime=None):
"""fetch(path,fromTime,untilTime=None)
path is a string
fromTime is an epoch time
untilTime is also an epoch time, but defaults to now
"""
fh = open(path,'rb')
return file_fetch(fh, fromTime, untilTime)
def file_fetch(fh, fromTime, untilTime):
header = __readHeader(fh)
now = int( time.time() )
if untilTime is None:
untilTime = now
fromTime = int(fromTime)
untilTime = int(untilTime)
oldestTime = now - header['maxRetention']
if fromTime < oldestTime:
fromTime = oldestTime
if not (fromTime < untilTime):
raise InvalidTimeInterval("Invalid time interval")
if untilTime > now:
untilTime = now
if untilTime < fromTime:
untilTime = now
diff = now - fromTime
for archive in header['archives']:
if archive['retention'] >= diff:
break
fromInterval = int( fromTime - (fromTime % archive['secondsPerPoint']) ) + archive['secondsPerPoint']
untilInterval = int( untilTime - (untilTime % archive['secondsPerPoint']) ) + archive['secondsPerPoint']
fh.seek(archive['offset'])
packedPoint = fh.read(pointSize)
(baseInterval,baseValue) = struct.unpack(pointFormat,packedPoint)
if baseInterval == 0:
step = archive['secondsPerPoint']
points = (untilInterval - fromInterval) / step
timeInfo = (fromInterval,untilInterval,step)
valueList = [None] * points
return (timeInfo,valueList)
#Determine fromOffset
timeDistance = fromInterval - baseInterval
pointDistance = timeDistance / archive['secondsPerPoint']
byteDistance = pointDistance * pointSize
fromOffset = archive['offset'] + (byteDistance % archive['size'])
#Determine untilOffset
timeDistance = untilInterval - baseInterval
pointDistance = timeDistance / archive['secondsPerPoint']
byteDistance = pointDistance * pointSize
untilOffset = archive['offset'] + (byteDistance % archive['size'])
#Read all the points in the interval
fh.seek(fromOffset)
if fromOffset < untilOffset: #If we don't wrap around the archive
seriesString = fh.read(untilOffset - fromOffset)
else: #We do wrap around the archive, so we need two reads
archiveEnd = archive['offset'] + archive['size']
seriesString = fh.read(archiveEnd - fromOffset)
fh.seek(archive['offset'])
seriesString += fh.read(untilOffset - archive['offset'])
#Now we unpack the series data we just read (anything faster than unpack?)
byteOrder,pointTypes = pointFormat[0],pointFormat[1:]
points = len(seriesString) / pointSize
seriesFormat = byteOrder + (pointTypes * points)
unpackedSeries = struct.unpack(seriesFormat, seriesString)
#And finally we construct a list of values (optimize this!)
valueList = [None] * points #pre-allocate entire list for speed
currentInterval = fromInterval
step = archive['secondsPerPoint']
for i in xrange(0,len(unpackedSeries),2):
pointTime = unpackedSeries[i]
if pointTime == currentInterval:
pointValue = unpackedSeries[i+1]
valueList[i/2] = pointValue #in-place reassignment is faster than append()
currentInterval += step
fh.close()
timeInfo = (fromInterval,untilInterval,step)
return (timeInfo,valueList)
def merge(path_from, path_to, step=1<<12):
headerFrom = info(path_from)
archives = headerFrom['archives']
archives.sort(key=operator.itemgetter('retention'), reverse=True)
# Start from maxRetention of the oldest file, and skip forward at max 'step'
# points at a time.
fromTime = int(time.time()) - headerFrom['maxRetention']
for archive in archives:
pointsRemaining = archive['points']
while pointsRemaining:
pointsToRead = step
if pointsRemaining < step:
pointsToRead = pointsRemaining
pointsRemaining -= pointsToRead
untilTime = fromTime + (pointsToRead * archive['secondsPerPoint'])
(timeInfo, values) = fetch(path_from, fromTime, untilTime)
(start, end, archive_step) = timeInfo
pointsToWrite = list(itertools.ifilter(
lambda points: points[1] is not None,
itertools.izip(xrange(start, end, archive_step), values)))
pointsToWrite.sort(key=lambda p: p[0],reverse=True) #order points by timestamp, newest first
update_many(path_to, pointsToWrite)
fromTime = untilTime
|
|
import peewee
from datetime import datetime
from bakthat.conf import config, load_config, DATABASE
import hashlib
import json
import sqlite3
import os
import requests
import logging
log = logging.getLogger(__name__)
database = peewee.SqliteDatabase(DATABASE)
class JsonField(peewee.CharField):
"""Custom JSON field."""
def db_value(self, value):
return json.dumps(value)
def python_value(self, value):
try:
return json.loads(value)
except:
return value
class BaseModel(peewee.Model):
class Meta:
database = database
class SyncedModel(peewee.Model):
class Meta:
database = database
class History(BaseModel):
"""History for sync."""
data = JsonField()
ts = peewee.IntegerField(index=True)
action = peewee.CharField(index=True)
model = peewee.CharField(index=True)
pk = peewee.CharField(index=True)
class Meta:
db_table = 'history'
class Backups(SyncedModel):
"""Backups Model."""
backend = peewee.CharField(index=True)
backend_hash = peewee.CharField(index=True, null=True)
backup_date = peewee.IntegerField(index=True)
filename = peewee.TextField(index=True)
is_deleted = peewee.BooleanField()
last_updated = peewee.IntegerField()
metadata = JsonField()
size = peewee.IntegerField()
stored_filename = peewee.TextField(index=True, unique=True)
tags = peewee.CharField()
def __repr__(self):
return "<Backup: {0}>".format(self._data.get("stored_filename"))
@classmethod
def match_filename(cls, filename, destination, **kwargs):
conf = config
if kwargs.get("config"):
conf = load_config(kwargs.get("config"))
profile = conf.get(kwargs.get("profile", "default"))
s3_key = hashlib.sha512(profile.get("access_key") +
profile.get("s3_bucket")).hexdigest()
glacier_key = hashlib.sha512(profile.get("access_key") +
profile.get("glacier_vault")).hexdigest()
try:
fquery = "{0}*".format(filename)
query = Backups.select().where(Backups.filename % fquery |
Backups.stored_filename % fquery,
Backups.backend == destination,
Backups.backend_hash << [s3_key, glacier_key])
query = query.order_by(Backups.backup_date.desc())
return query.get()
except Backups.DoesNotExist:
return
@classmethod
def search(cls, query="", destination="", **kwargs):
conf = config
if kwargs.get("config"):
conf = load_config(kwargs.get("config"))
if not destination:
destination = ["s3", "glacier"]
if isinstance(destination, (str, unicode)):
destination = [destination]
query = "*{0}*".format(query)
wheres = []
if kwargs.get("profile"):
profile = conf.get(kwargs.get("profile"))
s3_key = hashlib.sha512(profile.get("access_key") +
profile.get("s3_bucket")).hexdigest()
glacier_key = hashlib.sha512(profile.get("access_key") +
profile.get("glacier_vault")).hexdigest()
wheres.append(Backups.backend_hash << [s3_key, glacier_key])
wheres.append(Backups.filename % query |
Backups.stored_filename % query)
wheres.append(Backups.backend << destination)
wheres.append(Backups.is_deleted == False)
older_than = kwargs.get("older_than")
if older_than:
wheres.append(Backups.backup_date < older_than)
backup_date = kwargs.get("backup_date")
if backup_date:
wheres.append(Backups.backup_date == backup_date)
last_updated_gt = kwargs.get("last_updated_gt")
if last_updated_gt:
wheres.append(Backups.last_updated >= last_updated_gt)
tags = kwargs.get("tags", [])
if tags:
if isinstance(tags, (str, unicode)):
tags = tags.split()
tags_query = ["Backups.tags % '*{0}*'".format(tag) for tag in tags]
tags_query = eval("({0})".format(" and ".join(tags_query)))
wheres.append(tags_query)
return Backups.select().where(*wheres).order_by(Backups.last_updated.desc())
def set_deleted(self):
self.is_deleted = True
self.last_updated = int(datetime.utcnow().strftime("%s"))
self.save()
def is_encrypted(self):
return self.stored_filename.endswith(".enc") or self.metadata.get("is_enc" == 'beefish')
def is_gpg_encrypted(self):
return self.stored_filename.endswith(".gpg") or self.metadata.get("is_enc" == 'gpg')
def is_gzipped(self):
return self.metadata.get("is_gzipped")
@classmethod
def upsert(cls, **backup):
q = Backups.select()
q = q.where(Backups.stored_filename == backup.get("stored_filename"))
if q.count():
del backup["stored_filename"]
Backups.update(**backup).where(Backups.stored_filename == backup.get("stored_filename")).execute()
else:
Backups.create(**backup)
class Meta:
db_table = 'backups'
class Sync:
pk = 'stored_filename'
class Config(BaseModel):
"""key => value config store."""
key = peewee.CharField(index=True, unique=True)
value = JsonField()
@classmethod
def get_key(self, key, default=None):
try:
return Config.get(Config.key == key).value
except Config.DoesNotExist:
return default
@classmethod
def set_key(self, key, value=None):
q = Config.select().where(Config.key == key)
if q.count():
Config.update(value=value).where(Config.key == key).execute()
else:
Config.create(key=key, value=value)
class Meta:
db_table = 'config'
class Inventory(SyncedModel):
"""Filename => archive_id mapping for glacier archives."""
archive_id = peewee.CharField(index=True, unique=True)
filename = peewee.CharField(index=True)
@classmethod
def get_archive_id(self, filename):
return Inventory.get(Inventory.filename == filename).archive_id
class Meta:
db_table = 'inventory'
class Sync:
pk = 'filename'
class Jobs(SyncedModel):
"""filename => job_id mapping for glacier archives."""
filename = peewee.CharField(index=True)
job_id = peewee.CharField()
@classmethod
def get_job_id(cls, filename):
"""Try to retrieve the job id for a filename.
:type filename: str
:param filename: Filename
:rtype: str
:return: Job Id for the given filename
"""
try:
return Jobs.get(Jobs.filename == filename).job_id
except Jobs.DoesNotExist:
return
@classmethod
def update_job_id(cls, filename, job_id):
"""Update job_id for the given filename.
:type filename: str
:param filename: Filename
:type job_id: str
:param job_id: New job_id
:return: None
"""
q = Jobs.select().where(Jobs.filename == filename)
if q.count():
Jobs.update(job_id=job_id).where(Jobs.filename == filename).execute()
else:
Jobs.create(filename=filename, job_id=job_id)
class Meta:
db_table = 'jobs'
for table in [Backups, Jobs, Inventory, Config, History]:
if not table.table_exists():
table.create_table()
def backup_sqlite(filename):
"""Backup bakthat SQLite database to file."""
con = sqlite3.connect(DATABASE)
with open(filename, 'w') as f:
for line in con.iterdump():
f.write("{0}\n".format(line))
def restore_sqlite(filename):
"""Restore a dump into bakthat SQLite database."""
con = sqlite3.connect(DATABASE)
con.executescript(open(filename).read())
def switch_from_dt_to_peewee():
if os.path.isfile(os.path.expanduser("~/.bakthat.dt")):
import dumptruck
import time
dt = dumptruck.DumpTruck(dbname=os.path.expanduser("~/.bakthat.dt"), vars_table="config")
for backup in dt.dump("backups"):
try:
backup["tags"] = " ".join(backup.get("tags", []))
Backups.upsert(**backup)
time.sleep(0.1)
except Exception, exc:
print exc
for ivt in dt.dump("inventory"):
try:
Inventory.create(filename=ivt["filename"],
archive_id=ivt["archive_id"])
except Exception, exc:
print exc
os.remove(os.path.expanduser("~/.bakthat.dt"))
switch_from_dt_to_peewee()
|
|
import json
import mimetypes
import re
import sys
import time
from collections import OrderedDict
from http.cookiejar import parse_ns_headers
from pprint import pformat
from typing import Any, List, Optional, Tuple
import requests.auth
RE_COOKIE_SPLIT = re.compile(r', (?=[^ ;]+=)')
Item = Tuple[str, Any]
Items = List[Item]
class JsonDictPreservingDuplicateKeys(OrderedDict):
"""A specialized JSON dict preserving duplicate keys."""
# Python versions prior to 3.8 suffer from an issue with multiple keys with the same name.
# `json.dumps(obj, indent=N, sort_keys=True)` will output sorted keys when they are unique, and
# duplicate keys will be outputted as they were defined in the original data.
# See <https://bugs.python.org/issue23493#msg400929> for the behavior change between Python versions.
SUPPORTS_SORTING = sys.version_info >= (3, 8)
def __init__(self, items: Items):
self._items = items
self._ensure_items_used()
def _ensure_items_used(self) -> None:
"""HACK: Force `json.dumps()` to use `self.items()` instead of an empty dict.
Two JSON encoders are available on CPython: pure-Python (1) and C (2) implementations.
(1) The pure-python implementation will do a simple `if not dict: return '{}'`,
and we could fake that check by implementing the `__bool__()` method.
Source:
- <https://github.com/python/cpython/blob/9d318ad/Lib/json/encoder.py#L334-L336>
(2) On the other hand, the C implementation will do a check on the number of
items contained inside the dict, using a verification on `dict->ma_used`, which
is updated only when an item is added/removed from the dict. For that case,
there is no workaround but to add an item into the dict.
Sources:
- <https://github.com/python/cpython/blob/9d318ad/Modules/_json.c#L1581-L1582>
- <https://github.com/python/cpython/blob/9d318ad/Include/cpython/dictobject.h#L53>
- <https://github.com/python/cpython/blob/9d318ad/Include/cpython/dictobject.h#L17-L18>
To please both implementations, we simply add one item to the dict.
"""
if self._items:
self['__hack__'] = '__hack__'
def items(self) -> Items:
"""Return all items, duplicate ones included.
"""
return self._items
def load_json_preserve_order_and_dupe_keys(s):
return json.loads(s, object_pairs_hook=JsonDictPreservingDuplicateKeys)
def repr_dict(d: dict) -> str:
return pformat(d)
def humanize_bytes(n, precision=2):
# Author: Doug Latornell
# Licence: MIT
# URL: https://code.activestate.com/recipes/577081/
"""Return a humanized string representation of a number of bytes.
>>> humanize_bytes(1)
'1 B'
>>> humanize_bytes(1024, precision=1)
'1.0 kB'
>>> humanize_bytes(1024 * 123, precision=1)
'123.0 kB'
>>> humanize_bytes(1024 * 12342, precision=1)
'12.1 MB'
>>> humanize_bytes(1024 * 12342, precision=2)
'12.05 MB'
>>> humanize_bytes(1024 * 1234, precision=2)
'1.21 MB'
>>> humanize_bytes(1024 * 1234 * 1111, precision=2)
'1.31 GB'
>>> humanize_bytes(1024 * 1234 * 1111, precision=1)
'1.3 GB'
"""
abbrevs = [
(1 << 50, 'PB'),
(1 << 40, 'TB'),
(1 << 30, 'GB'),
(1 << 20, 'MB'),
(1 << 10, 'kB'),
(1, 'B')
]
if n == 1:
return '1 B'
for factor, suffix in abbrevs:
if n >= factor:
break
# noinspection PyUnboundLocalVariable
return f'{n / factor:.{precision}f} {suffix}'
class ExplicitNullAuth(requests.auth.AuthBase):
"""Forces requests to ignore the ``.netrc``.
<https://github.com/psf/requests/issues/2773#issuecomment-174312831>
"""
def __call__(self, r):
return r
def get_content_type(filename):
"""
Return the content type for ``filename`` in format appropriate
for Content-Type headers, or ``None`` if the file type is unknown
to ``mimetypes``.
"""
return mimetypes.guess_type(filename, strict=False)[0]
def split_cookies(cookies):
"""
When ``requests`` stores cookies in ``response.headers['Set-Cookie']``
it concatenates all of them through ``, ``.
This function splits cookies apart being careful to not to
split on ``, `` which may be part of cookie value.
"""
if not cookies:
return []
return RE_COOKIE_SPLIT.split(cookies)
def get_expired_cookies(
cookies: str,
now: float = None
) -> List[dict]:
now = now or time.time()
def is_expired(expires: Optional[float]) -> bool:
return expires is not None and expires <= now
attr_sets: List[Tuple[str, str]] = parse_ns_headers(
split_cookies(cookies)
)
cookies = [
# The first attr name is the cookie name.
dict(attrs[1:], name=attrs[0][0])
for attrs in attr_sets
]
_max_age_to_expires(cookies=cookies, now=now)
return [
{
'name': cookie['name'],
'path': cookie.get('path', '/')
}
for cookie in cookies
if is_expired(expires=cookie.get('expires'))
]
def _max_age_to_expires(cookies, now):
"""
Translate `max-age` into `expires` for Requests to take it into account.
HACK/FIXME: <https://github.com/psf/requests/issues/5743>
"""
for cookie in cookies:
if 'expires' in cookie:
continue
max_age = cookie.get('max-age')
if max_age and max_age.isdigit():
cookie['expires'] = now + float(max_age)
def parse_content_type_header(header):
"""Borrowed from requests."""
tokens = header.split(';')
content_type, params = tokens[0].strip(), tokens[1:]
params_dict = {}
items_to_strip = "\"' "
for param in params:
param = param.strip()
if param:
key, value = param, True
index_of_equals = param.find("=")
if index_of_equals != -1:
key = param[:index_of_equals].strip(items_to_strip)
value = param[index_of_equals + 1:].strip(items_to_strip)
params_dict[key.lower()] = value
return content_type, params_dict
|
|
#!/usr/bin/env python
#The MIT License (MIT)
#Copyright (c) 2016 Massimiliano Patacchiola
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
#MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
#CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
#SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import numpy as np
import cv2
import sys
class HistogramColorClassifier:
"""Classifier for comparing an image I with a model M. The comparison is based on color
histograms. It included an implementation of the Histogram Intersection algorithm.
The histogram intersection was proposed by Michael Swain and Dana Ballard
in their paper "Indexing via color histograms".
Abstract: The color spectrum of multicolored objects provides a a robust,
efficient cue for indexing into a large database of models. This paper shows
color histograms to be stable object representations over change in view, and
demonstrates they can differentiate among a large number of objects. It introduces
a technique called Histogram Intersection for matching model and image histograms
and a fast incremental version of Histogram Intersection that allows real-time
indexing into a large database of stored models using standard vision hardware.
Color can also be used to search for the location of an object. An algorithm
called Histogram Backprojection performs this task efficiently in crowded scenes.
"""
def __init__(self, channels=[0, 1, 2], hist_size=[10, 10, 10], hist_range=[0, 256, 0, 256, 0, 256], hist_type='BGR'):
"""Init the classifier.
This class has an internal list containing all the models.
it is possible to append new models. Using the default values
it extracts a 3D BGR color histogram from the image, using
10 bins per channel.
@param channels list where we specify the index of the channel
we want to compute a histogram for. For a grayscale image,
the list would be [0]. For all three (red, green, blue) channels,
the channels list would be [0, 1, 2].
@param hist_size number of bins we want to use when computing a histogram.
It is a list (one value for each channel). Note: the bin sizes can
be different for each channel.
@param hist_range it is the min-max value of the values stored in the histogram.
For three channels can be [0, 256, 0, 256, 0, 256], if there is only one
channel can be [0, 256]
@param hsv_type Convert the input BGR frame in HSV or GRAYSCALE. before taking
the histogram. The HSV representation can get more reliable results in
situations where light have a strong influence.
BGR: (default) do not convert the input frame
HSV: convert in HSV represantation
GRAY: convert in grayscale
"""
self.channels = channels
self.hist_size = hist_size
self.hist_range = hist_range
self.hist_type = hist_type
self.model_list = list()
self.name_list = list()
def addModelHistogram(self, model_frame, name=''):
"""Add the histogram to internal container. If the name of the object
is already present then replace that histogram with a new one.
@param model_frame the frame to add to the model, its histogram
is obtained and saved in internal list.
@param name a string representing the name of the model.
If nothing is specified then the name will be the index of the element.
"""
if(self.hist_type=='HSV'): model_frame = cv2.cvtColor(model_frame, cv2.COLOR_BGR2HSV)
elif(self.hist_type=='GRAY'): model_frame = cv2.cvtColor(model_frame, cv2.COLOR_BGR2GRAY)
elif(self.hist_type=='RGB'): model_frame = cv2.cvtColor(model_frame, cv2.COLOR_BGR2RGB)
hist = cv2.calcHist([model_frame], self.channels, None, self.hist_size, self.hist_range)
hist = cv2.normalize(hist, hist).flatten()
if name == '': name = str(len(self.model_list))
if name not in self.name_list:
self.model_list.append(hist)
self.name_list.append(name)
else:
for i in range(len(self.name_list)):
if self.name_list[i] == name:
self.model_list[i] = hist
break
def removeModelHistogramByName(self, name):
"""Remove the specific model using the name as index.
@param: name the index of the element to remove
@return: True if the object has been deleted, otherwise False.
"""
if name not in self.name_list:
return False
for i in range(len(self.name_list)):
if self.name_list[i] == name:
del self.name_list[i]
del self.model_list[i]
return True
def returnHistogramComparison(self, hist_1, hist_2, method='intersection'):
"""Return the comparison value of two histograms.
Comparing an histogram with itself return 1.
@param hist_1
@param hist_2
@param method the comparison method.
intersection: (default) the histogram intersection (Swain, Ballard)
"""
if cv2.__version__.split(".")[0] == '3':
if(method=="intersection"):
comparison = cv2.compareHist(hist_1, hist_2, cv2.HISTCMP_INTERSECT)
elif(method=="correlation"):
comparison = cv2.compareHist(hist_1, hist_2, cv2.HISTCMP_CORREL)
elif(method=="chisqr"):
comparison = cv2.compareHist(hist_1, hist_2, cv2.HISTCMP_CHISQR)
elif(method=="bhattacharyya"):
comparison = cv2.compareHist(hist_1, hist_2, cv2.HISTCMP_BHATTACHARYYA)
else:
raise ValueError('[DEEPGAZE] color_classification.py: the method specified ' + str(method) + ' is not supported.')
else:
if(method=="intersection"):
comparison = cv2.compareHist(hist_1, hist_2, cv2.cv.CV_COMP_INTERSECT)
elif(method=="correlation"):
comparison = cv2.compareHist(hist_1, hist_2, cv2.cv.CV_COMP_CORREL)
elif(method=="chisqr"):
comparison = cv2.compareHist(hist_1, hist_2, cv2.cv.CV_COMP_CHISQR)
elif(method=="bhattacharyya"):
comparison = cv2.compareHist(hist_1, hist_2, cv2.cv.CV_COMP_BHATTACHARYYA)
else:
raise ValueError('[DEEPGAZE] color_classification.py: the method specified ' + str(method) + ' is not supported.')
return comparison
def returnHistogramComparisonArray(self, image, method='intersection'):
"""Return the comparison array between all the model and the input image.
The highest value represents the best match.
@param image the image to compare
@param method the comparison method.
intersection: (default) the histogram intersection (Swain, Ballard)
@return a numpy array containg the comparison value between each pair image-model
"""
if(self.hist_type=='HSV'): image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
elif(self.hist_type=='GRAY'): image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
elif(self.hist_type=='RGB'): image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
comparison_array = np.zeros(len(self.model_list))
image_hist = cv2.calcHist([image], self.channels, None, self.hist_size, self.hist_range)
image_hist = cv2.normalize(image_hist, image_hist).flatten()
counter = 0
for model_hist in self.model_list:
comparison_array[counter] = self.returnHistogramComparison(image_hist, model_hist, method=method)
counter += 1
return comparison_array
def returnHistogramComparisonProbability(self, image, method='intersection'):
"""Return the probability distribution of the comparison between
all the model and the input image. The sum of the elements in the output
array sum up to 1.
The highest value represents the best match.
@param image the image to compare
@param method the comparison method.
intersection: (default) the histogram intersection (Swain, Ballard)
@return a numpy array containg the comparison value between each pair image-model
"""
comparison_array = self.returnHistogramComparisonArray(image=image, method=method)
#comparison_array[comparison_array < 0] = 0 #Remove negative values
comparison_distribution = np.divide(comparison_array, np.sum(comparison_array))
return comparison_distribution
def returnBestMatchIndex(self, image, method='intersection'):
"""Return the index of the best match between the image and the internal models.
@param image the image to compare
@param method the comparison method.
intersection: (default) the histogram intersection (Swain, Ballard)
@return a numpy array containg the comparison value between each pair image-model
"""
comparison_array = self.returnHistogramComparisonArray(image, method=method)
return np.argmax(comparison_array)
def returnBestMatchName(self, image, method='intersection'):
"""Return the name of the best match between the image and the internal models.
@param image the image to compare
@param method the comparison method.
intersection: (default) the histogram intersection (Swain, Ballard)
@return a string representing the name of the best matching model
"""
comparison_array = self.returnHistogramComparisonArray(image, method=method)
arg_max = np.argmax(comparison_array)
return self.name_list[arg_max]
def returnNameList(self):
"""Return a list containing all the names stored in the model.
@return: a list containing the name of the models.
"""
return self.name_list
def returnSize(self):
"""Return the number of elements stored.
@return: an integer representing the number of elements stored
"""
return len(self.model_list)
|
|
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://aws.amazon.com/apache2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
import logging
import six
from ..core import Future, BaseFuture, AllFuture, CancelledError
from ..constants import USE_WORKER_TASK_LIST
from ..exceptions import (ActivityTaskFailedError, ActivityTaskTimedOutError, ScheduleActivityTaskFailedError,
ActivityTaskCanceledError, RequestCancelActivityTaskFailedError)
from ..decisions import ScheduleActivityTask, RequestCancelActivityTask
from ..history_events import (ActivityTaskScheduled, ScheduleActivityTaskFailed, ActivityTaskCompleted,
ActivityTaskFailed, ActivityTaskTimedOut, ActivityTaskCancelRequested,
ActivityTaskCanceled, ActivityTaskStarted, RequestCancelActivityTaskFailed)
from .activity_future import ActivityFuture
log = logging.getLogger(__name__)
class ActivityTaskHandler(object):
responds_to = (ActivityTaskScheduled, ActivityTaskCompleted, ActivityTaskFailed, ActivityTaskTimedOut,
ScheduleActivityTaskFailed, ActivityTaskStarted, ActivityTaskCanceled,
ActivityTaskCancelRequested, RequestCancelActivityTaskFailed)
def __init__(self, decider, task_list):
self._decider = decider
self._open_activities = {}
self._schedule_event_to_activity_id = {}
self._open_cancels = {}
self._task_list = task_list
def __del__(self):
log.debug("Closing all activity handlers")
for val in six.itervalues(self._open_activities):
val['handler'].close()
def handle_execute_activity(self, activity_type, decision_dict, args, kwargs):
"""Makes ScheduleActivityTask decision and creates associated future.
:return: activity future
:rtype: awsflow.decider.activity_future.ActivityFuture
"""
activity_id = self._decider.get_next_id()
decision_dict['activity_id'] = activity_id
if decision_dict['task_list']['name'] == USE_WORKER_TASK_LIST:
decision_dict['task_list']['name'] = self._task_list
decision_dict['input'] = activity_type.data_converter.dumps([args, kwargs])
decision = ScheduleActivityTask(**decision_dict)
self._decider._decisions.append(decision)
log.debug("Workflow schedule activity execution: %s, %s, %s, %s",
decision, args, kwargs, activity_id)
# set the future that represents the result of our activity
activity_future = Future()
handler = self._handler_fsm(activity_type, activity_id, activity_future)
six.next(handler) # arm
self._open_activities[activity_id] = {'future': activity_future,
'handler': handler}
return ActivityFuture(activity_future, self, activity_id)
def request_cancel_activity_task_all(self):
"""Makes RequestCancelActivityTask decisions for all open activities.
:return: all futures for cancel requests
:rtype: awsflow.core.future.AllFuture
"""
futures = set()
for activity_id, activity_info in self._open_activities.items():
futures.add(self.request_cancel_activity_task(activity_info['future'], activity_id))
return AllFuture(*futures)
def request_cancel_activity_task(self, activity_future, activity_id):
"""Requests to cancel an activity with the given activity_id.
If the schedule decision for the activity was not yet sent, it's future is set to
CancelledError and a BaseFuture is returned; otherwise, a RequestCancelActivityTask
decision is made, and a future that tracks the request is returned.
:param activity_id: id of the activity to handle
:type activity_id: str
:param activity_future: the calling future; target for cancellation
:type activity_future: awsflow.decider.activity_future.ActivityFuture
:return: cancel future
:rtype: awsflow.core.future.Future
"""
if activity_id in self._open_cancels:
return self._open_cancels[activity_id]['future'] # duplicate request
if self._decider._decisions.delete_decision(ScheduleActivityTask, activity_id):
activity_future.set_exception(CancelledError(
"Activity was cancelled before being scheduled with SWF"))
del self._open_activities[activity_id]
return BaseFuture.with_result(None)
self._decider._decisions.append(RequestCancelActivityTask(activity_id))
cancel_activity_future = Future()
self._open_cancels[activity_id] = {'future': cancel_activity_future}
return cancel_activity_future
def handle_event(self, event):
"""Determines activity id associated with event, then forwards event to appropriate handler."""
activity_id = None
if isinstance(event, ActivityTaskStarted):
return
if isinstance(event, RequestCancelActivityTaskFailed):
self._resolve_cancel_future(event.attributes['activityId'], failed_event=event)
return
if isinstance(event, (ActivityTaskScheduled, ScheduleActivityTaskFailed,
ActivityTaskCancelRequested)):
activity_id = event.attributes['activityId']
elif isinstance(event, (ActivityTaskCompleted, ActivityTaskFailed, ActivityTaskTimedOut,
ActivityTaskCanceled)):
scheduled_event_id = event.attributes['scheduledEventId']
activity_id = self._schedule_event_to_activity_id[scheduled_event_id]
if activity_id is not None:
self._open_activities[activity_id]['handler'].send(event)
else:
log.warn("Tried to handle activity event, but activity_id was None: %r", event)
def _handler_fsm(self, activity_type, activity_id, activity_future):
"""FSM responsible for yielding through events until setting a result on activity_future.
This is also responsible for resolving any open cancel futures.
:param activity_type:
:type activity_type: awsflow.workflow_types.ActivityType
:param activity_id: id of the activity to handle
:type activity_id: int
:param activity_future: to resolve once activity finishes
:type activity_future: awsflow.core.future.Future
:return:
"""
event = (yield)
if isinstance(event, (ActivityTaskScheduled, ScheduleActivityTaskFailed)):
self._decider._decisions.delete_decision(ScheduleActivityTask, activity_id)
# ActivityTaskScheduled event always preceeds ActivityTaskCancelRequested event,
# given that no cancels are sent until the ScheduleActivityTask decision is deleted.
if isinstance(event, (ActivityTaskScheduled, ActivityTaskCancelRequested)):
if isinstance(event, ActivityTaskScheduled):
# maintain mapping of schedule event id to activity id for future lookup
self._schedule_event_to_activity_id[event.id] = activity_id
event = (yield)
if isinstance(event, ActivityTaskCancelRequested):
self._resolve_cancel_future(activity_id)
event = (yield) # do not interrupt actual activity future
if isinstance(event, ActivityTaskCompleted):
result = activity_type.data_converter.loads(
event.attributes['result'])
activity_future.set_result(result)
elif isinstance(event, ActivityTaskFailed):
exception, _traceback = activity_type.data_converter.loads(
event.attributes['details'])
error = ActivityTaskFailedError(
event.id, activity_type, activity_id, cause=exception,
_traceback=_traceback)
activity_future.set_exception(error)
elif isinstance(event, ActivityTaskTimedOut):
error = ActivityTaskTimedOutError(
event.id, activity_type, activity_id,
event.attributes['timeoutType'])
activity_future.set_exception(error)
elif isinstance(event, ActivityTaskCanceled):
exception, _traceback = CancelledError("Activity was cancelled before being picked up by "
"activity worker"), None
if event.attributes.get('details', False):
# parse out exception from activity result
exception, _traceback = activity_type.data_converter.loads(event.attributes['details'])
error = ActivityTaskCanceledError(
event.id, activity_type, activity_id, cause=exception,
latest_cancel_requested_event_id=event.attributes.get('latestCancelRequestedEventId'),
scheduled_event_id=event.attributes.get('scheduledEventId'),
started_event_id=event.attributes.get('startedEventId'),
_traceback=_traceback)
activity_future.set_exception(error)
else:
raise RuntimeError("Unexpected event/state: %s", event)
elif isinstance(event, ScheduleActivityTaskFailed):
# set the exception with a cause
cause = event.attributes['cause']
activity_future.set_exception(
ScheduleActivityTaskFailedError(cause))
else:
raise RuntimeError("Unexpected event/state: %s", event)
del self._open_activities[activity_id] # activity done
if activity_id in self._open_cancels:
self._resolve_cancel_future(activity_id)
def _resolve_cancel_future(self, activity_id, failed_event=None):
"""Resolves a cancel future by setting its result to None or exception if failed_event.
:param activity_id: id of the activity that was to be cancelled
:type activity_id: int
:param failed_event: associated cancel failure event
:type failed_event: awsflow.history_events.RequestCancelActivityTaskFailed
:return:
"""
self._decider._decisions.delete_decision(RequestCancelActivityTask, activity_id)
if activity_id not in self._open_cancels:
return # no future; occurs with cancel all requests
cancel_future = self._open_cancels[activity_id]['future']
if failed_event:
cancel_future.set_exception(RequestCancelActivityTaskFailedError(
failed_event.id, activity_id, failed_event.attributes['cause'],
failed_event.attributes['decisionTaskCompletedEventId']))
else:
cancel_future.set_result(None)
del self._open_cancels[activity_id]
|
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
quota_group = cfg.OptGroup(
name='quota',
title='Quota Options',
help="""
Quota options allow to manage quotas in openstack deployment.
""")
quota_opts = [
cfg.IntOpt('instances',
min=-1,
default=10,
deprecated_group='DEFAULT',
deprecated_name='quota_instances',
help="""
The number of instances allowed per project.
Possible Values
* A positive integer or 0.
* -1 to disable the quota.
"""),
cfg.IntOpt('cores',
min=-1,
default=20,
deprecated_group='DEFAULT',
deprecated_name='quota_cores',
help="""
The number of instance cores or vCPUs allowed per project.
Possible values:
* A positive integer or 0.
* -1 to disable the quota.
"""),
cfg.IntOpt('ram',
min=-1,
default=50 * 1024,
deprecated_group='DEFAULT',
deprecated_name='quota_ram',
help="""
The number of megabytes of instance RAM allowed per project.
Possible values:
* A positive integer or 0.
* -1 to disable the quota.
"""),
cfg.IntOpt('floating_ips',
min=-1,
default=10,
deprecated_group='DEFAULT',
deprecated_name='quota_floating_ips',
deprecated_for_removal=True,
deprecated_since='15.0.0',
deprecated_reason="""
nova-network is deprecated, as are any related configuration options.
""",
help="""
The number of floating IPs allowed per project.
Floating IPs are not allocated to instances by default. Users need to select
them from the pool configured by the OpenStack administrator to attach to their
instances.
Possible values:
* A positive integer or 0.
* -1 to disable the quota.
"""),
cfg.IntOpt('fixed_ips',
min=-1,
default=-1,
deprecated_group='DEFAULT',
deprecated_name='quota_fixed_ips',
deprecated_for_removal=True,
deprecated_since='15.0.0',
deprecated_reason="""
nova-network is deprecated, as are any related configuration options.
""",
help="""
The number of fixed IPs allowed per project.
Unlike floating IPs, fixed IPs are allocated dynamically by the network
component when instances boot up. This quota value should be at least the
number of instances allowed
Possible values:
* A positive integer or 0.
* -1 to disable the quota.
"""),
cfg.IntOpt('metadata_items',
min=-1,
default=128,
deprecated_group='DEFAULT',
deprecated_name='quota_metadata_items',
help="""
The number of metadata items allowed per instance.
Users can associate metadata with an instance during instance creation. This
metadata takes the form of key-value pairs.
Possible values:
* A positive integer or 0.
* -1 to disable the quota.
"""),
cfg.IntOpt('injected_files',
min=-1,
default=5,
deprecated_group='DEFAULT',
deprecated_name='quota_injected_files',
help="""
The number of injected files allowed.
File injection allows users to customize the personality of an instance by
injecting data into it upon boot. Only text file injection is permitted: binary
or ZIP files are not accepted. During file injection, any existing files that
match specified files are renamed to include ``.bak`` extension appended with a
timestamp.
Possible values:
* A positive integer or 0.
* -1 to disable the quota.
"""),
cfg.IntOpt('injected_file_content_bytes',
min=-1,
default=10 * 1024,
deprecated_group='DEFAULT',
deprecated_name='quota_injected_file_content_bytes',
help="""
The number of bytes allowed per injected file.
Possible values:
* A positive integer or 0.
* -1 to disable the quota.
"""),
cfg.IntOpt('injected_file_path_length',
min=-1,
default=255,
deprecated_group='DEFAULT',
deprecated_name='quota_injected_file_path_length',
help="""
The maximum allowed injected file path length.
Possible values:
* A positive integer or 0.
* -1 to disable the quota.
"""),
cfg.IntOpt('security_groups',
min=-1,
default=10,
deprecated_group='DEFAULT',
deprecated_name='quota_security_groups',
deprecated_for_removal=True,
deprecated_since='15.0.0',
deprecated_reason="""
nova-network is deprecated, as are any related configuration options.
""",
help="""
The number of security groups per project.
Possible values:
* A positive integer or 0.
* -1 to disable the quota.
"""),
cfg.IntOpt('security_group_rules',
min=-1,
default=20,
deprecated_group='DEFAULT',
deprecated_name='quota_security_group_rules',
deprecated_for_removal=True,
deprecated_since='15.0.0',
deprecated_reason="""
nova-network is deprecated, as are any related configuration options.
""",
help="""
The number of security rules per security group.
The associated rules in each security group control the traffic to instances in
the group.
Possible values:
* A positive integer or 0.
* -1 to disable the quota.
"""),
cfg.IntOpt('key_pairs',
min=-1,
default=100,
deprecated_group='DEFAULT',
deprecated_name='quota_key_pairs',
help="""
The maximum number of key pairs allowed per user.
Users can create at least one key pair for each project and use the key pair
for multiple instances that belong to that project.
Possible values:
* A positive integer or 0.
* -1 to disable the quota.
"""),
cfg.IntOpt('server_groups',
min=-1,
default=10,
deprecated_group='DEFAULT',
deprecated_name='quota_server_groups',
help="""
The maxiumum number of server groups per project.
Server groups are used to control the affinity and anti-affinity scheduling
policy for a group of servers or instances. Reducing the quota will not affect
any existing group, but new servers will not be allowed into groups that have
become over quota.
Possible values:
* A positive integer or 0.
* -1 to disable the quota.
"""),
cfg.IntOpt('server_group_members',
min=-1,
default=10,
deprecated_group='DEFAULT',
deprecated_name='quota_server_group_members',
help="""
The maximum number of servers per server group.
Possible values:
* A positive integer or 0.
* -1 to disable the quota.
"""),
# TODO(stephenfin): This should have a min parameter
cfg.IntOpt('reservation_expire',
default=86400,
deprecated_group='DEFAULT',
help="""
The number of seconds until a reservation expires.
This quota represents the time period for invalidating quota reservations.
"""),
cfg.IntOpt('until_refresh',
min=0,
default=0,
deprecated_group='DEFAULT',
help="""
The count of reservations until usage is refreshed.
This defaults to 0 (off) to avoid additional load but it is useful to turn on
to help keep quota usage up-to-date and reduce the impact of out of sync usage
issues.
"""),
cfg.IntOpt('max_age',
min=0,
default=0,
deprecated_group='DEFAULT',
help="""
The number of seconds between subsequent usage refreshes.
This defaults to 0 (off) to avoid additional load but it is useful to turn on
to help keep quota usage up-to-date and reduce the impact of out of sync usage
issues. Note that quotas are not updated on a periodic task, they will update
on a new reservation if max_age has passed since the last reservation.
"""),
# TODO(pumaranikar): Add a new config to select between the db_driver and
# the no_op driver using stevedore.
cfg.StrOpt('driver',
default='nova.quota.DbQuotaDriver',
deprecated_for_removal=True,
deprecated_since='14.0.0',
deprecated_group='DEFAULT',
deprecated_name='quota_driver',
help="""
The quota enforcer driver.
Provides abstraction for quota checks. Users can configure a specific
driver to use for quota checks.
Possible values:
* nova.quota.DbQuotaDriver (default) or any string representing fully
qualified class name.
"""),
cfg.BoolOpt('recheck_quota',
default=True,
help="""
Recheck quota after resource creation to prevent allowing quota to be exceeded.
This defaults to True (recheck quota after resource creation) but can be set to
False to avoid additional load if allowing quota to be exceeded because of
racing requests is considered acceptable. For example, when set to False, if a
user makes highly parallel REST API requests to create servers, it will be
possible for them to create more servers than their allowed quota during the
race. If their quota is 10 servers, they might be able to create 50 during the
burst. After the burst, they will not be able to create any more servers but
they will be able to keep their 50 servers until they delete them.
The initial quota check is done before resources are created, so if multiple
parallel requests arrive at the same time, all could pass the quota check and
create resources, potentially exceeding quota. When recheck_quota is True,
quota will be checked a second time after resources have been created and if
the resource is over quota, it will be deleted and OverQuota will be raised,
usually resulting in a 403 response to the REST API user. This makes it
impossible for a user to exceed their quota with the caveat that it will,
however, be possible for a REST API user to be rejected with a 403 response in
the event of a collision close to reaching their quota limit, even if the user
has enough quota available when they made the request.
"""),
]
def register_opts(conf):
conf.register_group(quota_group)
conf.register_opts(quota_opts, group=quota_group)
def list_opts():
return {quota_group: quota_opts}
|
|
# Modifications copyright (C) 2017, Baidu.com, Inc.
# Copyright 2017 The Apache Software Foundation
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
# This script will generate the implementation of the simple vector functions for the BE.
# These include:
# - Arithmetic functions
# - Binary functions
# - Cast functions
#
# The script outputs (run: 'src/common/function/gen_vector_functions.py')
# - header and implemention for above functions:
# - src/gen_cpp/opcode/vector_functions.[h/cc]
# - python file that contains the metadata for those functions:
# - src/gen_cpp/generated_vector_functions.py
"""
import string
import os
filter_binary_op = string.Template("\
bool VectorComputeFunctions::${fn_signature}(\n\
Expr* expr, VectorizedRowBatch* batch) {\n\
int n = batch->size();\n\
if (0 == n) {\n\
return false;\n\
}\n\
int* sel = batch->selected();\n\
Expr* op1 = expr->children()[0];\n\
Expr* op2 = expr->children()[1];\n\
batch->add_column(expr->output_column(), expr->type());\n\
if (expr->is_constant()) {\n\
${native_type1}* val1 = reinterpret_cast<${native_type1}*>(op1->get_value(NULL));\n\
${native_type2}* val2 = reinterpret_cast<${native_type2}*>(op2->get_value(NULL));\n\
if (val1 == NULL || val2 == NULL) return false;\n\
if (!(*val1 ${native_op} *val2)) batch->set_size(0);\n\
} else if (op1->is_constant()) {\n\
${native_type1}* value = reinterpret_cast<${native_type1}*>(op1->get_value(NULL));\n\
if (NULL == value || !op2->evaluate(batch)) return false;\n\
${native_type1}* vector1\n\
= reinterpret_cast<${native_type1}*>(batch->column(op2->output_column())->col_data());\n\
\n\
int new_size = 0;\n\
if (batch->selected_in_use()) {\n\
for (int j = 0; j != n; ++j) {\n\
int i = sel[j];\n\
if (*value ${native_op} vector1[i]) {\n\
sel[new_size++] = i;\n\
}\n\
}\n\
batch->set_size(new_size);\n\
} else {\n\
for (int i = 0; i != n; ++i) {\n\
if (*value ${native_op} vector1[i]) {\n\
sel[new_size++] = i;\n\
}\n\
}\n\
\n\
if (new_size < n) {\n\
batch->set_size(new_size);\n\
batch->set_selected_in_use(true);\n\
}\n\
}\n\
} else if (op2->is_constant()) {\n\
${native_type2}* value = reinterpret_cast<${native_type2}*>(op2->get_value(NULL));\n\
if (NULL == value || !op1->evaluate(batch)) return false;\n\
${native_type1}* vector1\n\
= reinterpret_cast<${native_type1}*>(batch->column(op1->output_column())->col_data());\n\
\n\
int new_size = 0;\n\
if (batch->selected_in_use()) {\n\
for (int j = 0; j != n; ++j) {\n\
int i = sel[j];\n\
if (vector1[i] ${native_op} *value) {\n\
sel[new_size++] = i;\n\
}\n\
}\n\
batch->set_size(new_size);\n\
} else {\n\
for (int i = 0; i != n; ++i) {\n\
if (vector1[i] ${native_op} *value) {\n\
sel[new_size++] = i;\n\
}\n\
}\n\
\n\
if (new_size < n) {\n\
batch->set_size(new_size);\n\
batch->set_selected_in_use(true);\n\
}\n\
}\n\
} else {\n\
if (!op1->evaluate(batch) || !op2->evaluate(batch)) return false;\n\
${native_type1}* vector1\n\
= reinterpret_cast<${native_type1}*>(batch->column(op1->output_column())->col_data());\n\
${native_type2}* vector2\n\
= reinterpret_cast<${native_type2}*>(batch->column(op2->output_column())->col_data());\n\
\n\
int new_size = 0;\n\
if (batch->selected_in_use()) {\n\
for (int j = 0; j != n; ++j) {\n\
int i = sel[j];\n\
if (vector1[i] ${native_op} vector2[i]) {\n\
sel[new_size++] = i;\n\
}\n\
}\n\
batch->set_size(new_size);\n\
} else {\n\
for (int i = 0; i != n; ++i) {\n\
if (vector1[i] ${native_op} vector2[i]) {\n\
sel[new_size++] = i;\n\
}\n\
}\n\
if (new_size < n) {\n\
batch->set_size(new_size);\n\
batch->set_selected_in_use(true);\n\
}\n\
}\n\
}\n\
return true;\n\
}\n\n")
filter_in_op = string.Template("\
bool VectorComputeFunctions::${fn_signature}(\n\
Expr* expr, VectorizedRowBatch* batch) {\n\
int n = batch->size();\n\
if (0 == n) {\n\
return true;\n\
}\n\
batch->add_column(expr->output_column(), expr->type());\n\
int* sel = batch->selected();\n\
int num_children = expr->get_num_children();\n\
Expr* op1 = expr->children()[0];\n\
InPredicate *in_pred = static_cast<InPredicate*>(expr);\n\
\n\
if (op1->is_constant()) {\n\
void* value = op1->get_value(NULL);\n\
if (!in_pred->hybird_set()->find(value)) {\n\
batch->set_size(0);\n\
return true;\n\
}\n\
\n\
if (num_children > 1) {\n\
${native_type1}* v = reinterpret_cast<${native_type1}*>(value);\n\
${native_type1}* vectors[num_children];\n\
for (int i = 1; i < num_children; ++i) {\n\
if (expr->get_child(i)->evaluate(batch)) return false;\n\
vectors[i] = reinterpret_cast<${native_type1}*>(batch->column(expr->get_child(i)->output_column())->col_data());\n\
}\n\
\n\
int new_size = 0;\n\
if (batch->selected_in_use()) {\n\
for (int j = 0; j != n; ++j) {\n\
int i = sel[j];\n\
for (int k = 1; k < num_children; ++k) {\n\
if (*v == vectors[k][i]) {\n\
sel[new_size++] = i;\n\
break;\n\
}\n\
}\n\
}\n\
batch->set_size(new_size);\n\
} else {\n\
for (int i = 0; i != n; ++i) {\n\
for (int k = 1; k < num_children; ++k) {\n\
if (*v == vectors[k][i]) {\n\
sel[new_size++] = i;\n\
break;\n\
}\n\
}\n\
}\n\
\n\
if (new_size < n) {\n\
batch->set_size(new_size);\n\
batch->set_selected_in_use(true);\n\
}\n\
}\n\
}\n\
} else {\n\
int c1 = op1->evaluate(batch);\n\
DCHECK(c1 >= 0);\n\
${native_type1}* vector1 \n\
=reinterpret_cast<${native_type1}*>(batch->column(op1->output_column())->col_data());\n\
if (0 != in_pred->hybird_set()->size()) {\n\
int new_size = 0;\n\
if (batch->selected_in_use()) {\n\
for (int j = 0; j != n; ++j) {\n\
int i = sel[j];\n\
if (in_pred->hybird_set()->find(&vector1[i])) {\n\
sel[new_size++] = i;\n\
}\n\
}\n\
batch->set_size(new_size);\n\
} else {\n\
for (int i = 0; i != n; ++i) {\n\
if (in_pred->hybird_set()->find(&vector1[i])) {\n\
sel[new_size++] = i;\n\
}\n\
}\n\
\n\
if (new_size < n) {\n\
batch->set_size(new_size);\n\
batch->set_selected_in_use(true);\n\
}\n\
}\n\
}\n\
\n\
if (num_children > 1) {\n\
${native_type1}* vectors[num_children];\n\
for (int i = 1; i < num_children; ++i) {\n\
if (!expr->get_child(i)->evaluate(batch)) return false;\n\
vectors[i] = reinterpret_cast<${native_type1}*>(batch->column(expr->get_child(i)->output_column())->col_data());\n\
}\n\
\n\
int new_size = 0;\n\
if (batch->selected_in_use()) {\n\
for (int j = 0; j != n; ++j) {\n\
int i = sel[j];\n\
for (int k = 1; k < num_children; ++k) {\n\
if (vector1[i] == vectors[k][i]) {\n\
sel[new_size++] = i;\n\
break;\n\
}\n\
}\n\
}\n\
batch->set_size(new_size);\n\
} else {\n\
for (int i = 0; i != n; ++i) {\n\
for (int k = 1; k < num_children; ++k) {\n\
if (vector1[i] == vectors[k][i]) {\n\
sel[new_size++] = i;\n\
break;\n\
}\n\
}\n\
}\n\
\n\
if (new_size < n) {\n\
batch->set_size(new_size);\n\
batch->set_selected_in_use(true);\n\
}\n\
}\n\
}\n\
}\n\
return true;\n\
}\n\n")
python_template = string.Template("\
['${fn_name}', '${return_type}', [${args}], 'VectorComputeFunctions::${fn_signature}', []], \n")
# Mapping of function to template
templates = {
'Filter_Eq': filter_binary_op,
'Filter_Ne': filter_binary_op,
'Filter_Gt': filter_binary_op,
'Filter_Lt': filter_binary_op,
'Filter_Ge': filter_binary_op,
'Filter_Le': filter_binary_op,
'Filter_In': filter_in_op,
}
# Some aggregate types that are useful for defining functions
types = {
'BOOLEAN': ['BOOLEAN'],
'TINYINT': ['TINYINT'],
'SMALLINT': ['SMALLINT'],
'INT': ['INT'],
'BIGINT': ['BIGINT'],
'LARGEINT': ['LARGEINT'],
'FLOAT': ['FLOAT'],
'DOUBLE': ['DOUBLE'],
'STRING': ['VARCHAR'],
'DATE': ['DATE'],
'DATETIME': ['DATETIME'],
'DECIMAL': ['DECIMAL'],
'NATIVE_INT_TYPES': ['TINYINT', 'SMALLINT', 'INT', 'BIGINT'],
'INT_TYPES': ['TINYINT', 'SMALLINT', 'INT', 'BIGINT', 'LARGEINT'],
'FLOAT_TYPES': ['FLOAT', 'DOUBLE'],
'NUMERIC_TYPES': ['TINYINT', 'SMALLINT', 'INT', 'BIGINT', 'FLOAT', 'DOUBLE'],
'NATIVE_TYPES': ['BOOLEAN', 'TINYINT', 'SMALLINT', 'INT', 'BIGINT', 'FLOAT', 'DOUBLE'],
'STRCAST_TYPES': ['BOOLEAN', 'SMALLINT', 'INT', 'BIGINT', 'FLOAT', 'DOUBLE'],
'ALL_TYPES': ['BOOLEAN', 'TINYINT', 'SMALLINT', 'INT', 'BIGINT', 'LARGEINT', 'FLOAT',\
'DOUBLE', 'VARCHAR', 'DATETIME', 'DECIMAL'],
'MAX_TYPES': ['BIGINT', 'LARGEINT', 'DOUBLE', 'DECIMAL'],
}
# Operation, [ReturnType], [[Args1], [Args2], ... [ArgsN]]
functions = [
# BinaryPredicates
['Filter_Eq', ['BOOLEAN'], [['ALL_TYPES'], ['ALL_TYPES']]],
['Filter_Ne', ['BOOLEAN'], [['ALL_TYPES'], ['ALL_TYPES']]],
['Filter_Gt', ['BOOLEAN'], [['ALL_TYPES'], ['ALL_TYPES']]],
['Filter_Lt', ['BOOLEAN'], [['ALL_TYPES'], ['ALL_TYPES']]],
['Filter_Ge', ['BOOLEAN'], [['ALL_TYPES'], ['ALL_TYPES']]],
['Filter_Le', ['BOOLEAN'], [['ALL_TYPES'], ['ALL_TYPES']]],
# InPredicates
['Filter_In', ['BOOLEAN'], [['ALL_TYPES']]],
]
native_types = {
'BOOLEAN': 'bool',
'TINYINT': 'char',
'SMALLINT': 'short',
'INT': 'int',
'BIGINT': 'long',
'LARGEINT': '__int128',
'FLOAT': 'float',
'DOUBLE': 'double',
'VARCHAR': 'StringValue',
'DATE': 'DateTimeValue',
'DATETIME': 'DateTimeValue',
'DECIMAL': 'DecimalValue',
}
# Portable type used in the function implementation
implemented_types = {
'BOOLEAN': 'bool',
'TINYINT': 'int8_t',
'SMALLINT': 'int16_t',
'INT': 'int32_t',
'BIGINT': 'int64_t',
'LARGEINT': '__int128',
'FLOAT': 'float',
'DOUBLE': 'double',
'VARCHAR': 'StringValue',
'DATE': 'DateTimeValue',
'DATETIME': 'DateTimeValue',
'DECIMAL': 'DecimalValue',
}
native_ops = {
'Filter_Eq': '==',
'Filter_Ne': '!=',
'Filter_Gt': '>',
'Filter_Lt': '<',
'Filter_Ge': '>=',
'Filter_Le': '<=',
'Eq': '==',
'Ne': '!=',
'Gt': '>',
'Lt': '<',
'Ge': '>=',
'Le': '<=',
'BITAND': '&',
'BITNOT': '~',
'BITOR': '|',
'BITXOR': '^',
'DIVIDE': '/',
'EQ': '==',
'GT': '>',
'GE': '>=',
'INT_DIVIDE': '/',
'SUBTRACT': '-',
'MOD': '%',
'MULTIPLY': '*',
'LT': '<',
'LE': '<=',
'NE': '!=',
'ADD': '+',
}
native_funcs = {
'EQ': 'Eq',
'LE': 'Le',
'LT': 'Lt',
'NE': 'Ne',
'GE': 'Ge',
'GT': 'Gt',
}
cc_preamble = '\
/***************************************************************************\n\
* \n\
* Copyright (c) 2014 Baidu.com, Inc. All Rights Reserved\n\
* \n\
**************************************************************************/\n\
\n\
// This is a generated file, DO NOT EDIT IT.\n\
// To add new functions, see impala/common/function-registry/gen_vector_functions.py\n\
\n\
#include "gen_cpp/opcode/vector-functions.h"\n\
#include "exprs/case_expr.h"\n\
#include "exprs/expr.h"\n\
#include "exprs/in_predicate.h"\n\
#include "runtime/string_value.hpp"\n\
#include "runtime/vectorized_row_batch.h"\n\
#include "util/string_parser.hpp"\n\
#include <boost/lexical_cast.hpp>\n\
\n\
using namespace boost;\n\
using namespace std;\n\
\n\
namespace palo { \n\
\n'
cc_epilogue = '\
}\n'
h_preamble = '\
/***************************************************************************\n\
* \n\
* Copyright (c) 2014 Baidu.com, Inc. All Rights Reserved\n\
* \n\
**************************************************************************/\n\
\n\
// This is a generated file, DO NOT EDIT IT.\n\
// To add new functions, see impala/common/function-registry/gen_vector_functions.py\n\
\n\
#ifndef BDG_PALO_OPCODE_VECTOR_FUNCTIONS_H\n\
#define BDG_PALO_OPCODE_VECTOR_FUNCTIONS_H\n\
\n\
namespace palo {\n\
class Expr;\n\
class OpcodeRegistry;\n\
class VectorizedRowBatch;\n\
\n\
class VectorComputeFunctions {\n\
public:\n'
h_epilogue = '\
};\n\
\n\
}\n\
\n\
#endif\n'
python_preamble = '\
#!/usr/bin/env python\n\
# Modifications copyright (C) 2017, Baidu.com, Inc. \n\
# Copyright 2017 The Apache Software Foundation \n\
# \n\
# Licensed to the Apache Software Foundation (ASF) under one \n\
# or more contributor license agreements. See the NOTICE file \n\
# distributed with this work for additional information \n\
# regarding copyright ownership. The ASF licenses this file \n\
# to you under the Apache License, Version 2.0 (the \n\
# "License"); you may not use this file except in compliance \n\
# with the License. You may obtain a copy of the License at \n\
# \n\
# http://www.apache.org/licenses/LICENSE-2.0\n\
# \n\
# Unless required by applicable law or agreed to in writing, software\n\
# distributed under the License is distributed on an "AS IS" BASIS,\n\
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\
# See the License for the specific language governing permissions and\n\
# limitations under the License.\n\
\n\
# This is a generated file, DO NOT EDIT IT.\n\
# To add new functions, see impala/common/function-registry/gen_opcodes.py\n\
\n\
functions = [\n'
python_epilogue = ']'
header_template = string.Template("\
static bool ${fn_signature}(\n\
Expr* e, VectorizedRowBatch* batch);\n")
BE_PATH = "../gen_cpp/opcode/"
if not os.path.exists(BE_PATH):
os.makedirs(BE_PATH)
def initialize_sub(op, return_type, arg_types):
"""
Expand the signature data for template substitution. Returns
a dictionary with all the entries for all the templates used in this script
"""
sub = {}
sub["fn_name"] = op
sub["fn_signature"] = op
sub["return_type"] = return_type
sub["args"] = ""
if op in native_ops:
sub["native_op"] = native_ops[op]
for idx in range(0, len(arg_types)):
arg = arg_types[idx]
sub["fn_signature"] += "_" + native_types[arg]
sub["native_type" + repr(idx + 1)] = implemented_types[arg]
sub["args"] += "'" + arg + "', "
return sub
if __name__ == "__main__":
h_file = open(BE_PATH + 'vector-functions.h', 'w')
cc_file = open(BE_PATH + 'vector-functions.cc', 'w')
python_file = open('generated_vector_functions.py', 'w')
h_file.write(h_preamble)
cc_file.write(cc_preamble)
python_file.write(python_preamble)
# Generate functions and headers
for func_data in functions:
op = func_data[0]
# If a specific template has been specified, use that one.
if len(func_data) >= 4:
template = func_data[3]
else:
# Skip functions with no template (shouldn't be auto-generated)
if not op in templates:
continue
template = templates[op]
# Expand all arguments
return_types = []
for ret in func_data[1]:
for t in types[ret]:
return_types.append(t)
signatures = []
for args in func_data[2]:
expanded_arg = []
for arg in args:
for t in types[arg]:
expanded_arg.append(t)
signatures.append(expanded_arg)
# Put arguments into substitution structure
num_functions = 0
for args in signatures:
num_functions = max(num_functions, len(args))
num_functions = max(num_functions, len(return_types))
num_args = len(signatures)
# Validate the input is correct
if len(return_types) != 1 and len(return_types) != num_functions:
print "Invalid Declaration: " + func_data
sys.exit(1)
for args in signatures:
if len(args) != 1 and len(args) != num_functions:
print "Invalid Declaration: " + func_data
sys.exit(1)
# Iterate over every function signature to generate
for i in range(0, num_functions):
if len(return_types) == 1:
return_type = return_types[0]
else:
return_type = return_types[i]
arg_types = []
for j in range(0, num_args):
if len(signatures[j]) == 1:
arg_types.append(signatures[j][0])
else:
arg_types.append(signatures[j][i])
# At this point, 'return_type' is a single type and 'arg_types'
# is a list of single types
sub = initialize_sub(op, return_type, arg_types)
h_file.write(header_template.substitute(sub))
cc_file.write(template.substitute(sub))
python_file.write(python_template.substitute(sub))
h_file.write(h_epilogue)
cc_file.write(cc_epilogue)
python_file.write(python_epilogue)
h_file.close()
cc_file.close()
python_file.close()
|
|
from django.core.exceptions import ValidationError
from rest_framework import exceptions
from rest_framework import serializers as ser
from rest_framework.fields import empty
from rest_framework.exceptions import ValidationError as DRFValidationError
from api.base.exceptions import Conflict, JSONAPIException
from api.base.serializers import (
JSONAPISerializer, IDField, TypeField, HideIfNotWithdrawal, NoneIfWithdrawal,
LinksField, RelationshipField, VersionedDateTimeField, JSONAPIListField,
NodeFileHyperLinkField, WaterbutlerLink, HideIfPreprint,
LinkedNodesRelationshipSerializer,
)
from api.base.utils import absolute_reverse, get_user_auth
from api.base.parsers import NO_DATA_ERROR
from api.nodes.serializers import (
NodeCitationSerializer,
NodeLicenseSerializer,
NodeContributorsSerializer,
NodeStorageProviderSerializer,
NodeContributorsCreateSerializer,
NodeContributorDetailSerializer,
get_license_details,
NodeTagField,
)
from api.base.metrics import MetricsSerializerMixin
from api.taxonomies.serializers import TaxonomizableSerializerMixin
from framework.exceptions import PermissionsError
from website.project import signals as project_signals
from osf.exceptions import NodeStateError
from osf.models import BaseFileNode, Preprint, PreprintProvider, Node, NodeLicense
from osf.utils import permissions as osf_permissions
class PrimaryFileRelationshipField(RelationshipField):
def get_object(self, file_id):
return BaseFileNode.load(file_id)
def to_internal_value(self, data):
file = self.get_object(data)
return {'primary_file': file}
class NodeRelationshipField(RelationshipField):
def get_object(self, node_id):
try:
return Node.load(node_id)
except AttributeError:
raise exceptions.ValidationError(detail='Node not correctly specified.')
def to_internal_value(self, data):
node = self.get_object(data)
return {'node': node}
class PreprintProviderRelationshipField(RelationshipField):
def get_object(self, node_id):
return PreprintProvider.load(node_id)
def to_internal_value(self, data):
provider = self.get_object(data)
return {'provider': provider}
class PreprintLicenseRelationshipField(RelationshipField):
def to_internal_value(self, license_id):
license = NodeLicense.load(license_id)
if license:
return {'license_type': license}
raise exceptions.NotFound('Unable to find specified license.')
class PreprintSerializer(TaxonomizableSerializerMixin, MetricsSerializerMixin, JSONAPISerializer):
filterable_fields = frozenset([
'id',
'date_created',
'date_modified',
'date_published',
'original_publication_date',
'provider',
'is_published',
'subjects',
'reviews_state',
'node_is_public',
])
available_metrics = frozenset([
'downloads',
'views',
])
id = IDField(source='_id', read_only=True)
type = TypeField()
date_created = VersionedDateTimeField(source='created', read_only=True)
date_modified = VersionedDateTimeField(source='modified', read_only=True)
date_published = VersionedDateTimeField(read_only=True)
original_publication_date = VersionedDateTimeField(required=False, allow_null=True)
doi = ser.CharField(source='article_doi', required=False, allow_null=True)
title = ser.CharField(required=True, max_length=512)
description = ser.CharField(required=False, allow_blank=True, allow_null=True)
is_published = NoneIfWithdrawal(ser.BooleanField(required=False))
is_preprint_orphan = NoneIfWithdrawal(ser.BooleanField(read_only=True))
license_record = NodeLicenseSerializer(required=False, source='license')
tags = JSONAPIListField(child=NodeTagField(), required=False)
node_is_public = ser.BooleanField(read_only=True, source='node__is_public', help_text='Is supplementary project public?')
preprint_doi_created = NoneIfWithdrawal(VersionedDateTimeField(read_only=True))
date_withdrawn = VersionedDateTimeField(read_only=True, allow_null=True)
withdrawal_justification = HideIfNotWithdrawal(ser.CharField(required=False, read_only=True, allow_blank=True))
current_user_permissions = ser.SerializerMethodField(
help_text='List of strings representing the permissions '
'for the current user on this preprint.',
)
public = ser.BooleanField(source='is_public', required=False, read_only=True)
contributors = RelationshipField(
related_view='preprints:preprint-contributors',
related_view_kwargs={'preprint_id': '<_id>'},
)
reviews_state = ser.CharField(source='machine_state', read_only=True, max_length=15)
date_last_transitioned = NoneIfWithdrawal(VersionedDateTimeField(read_only=True))
citation = NoneIfWithdrawal(RelationshipField(
related_view='preprints:preprint-citation',
related_view_kwargs={'preprint_id': '<_id>'},
))
identifiers = NoneIfWithdrawal(RelationshipField(
related_view='preprints:identifier-list',
related_view_kwargs={'preprint_id': '<_id>'},
))
node = NoneIfWithdrawal(NodeRelationshipField(
related_view='nodes:node-detail',
related_view_kwargs={'node_id': '<node._id>'},
read_only=False,
many=False,
self_view='preprints:preprint-node-relationship',
self_view_kwargs={'preprint_id': '<_id>'},
))
license = PreprintLicenseRelationshipField(
related_view='licenses:license-detail',
related_view_kwargs={'license_id': '<license.node_license._id>'},
read_only=False,
)
provider = PreprintProviderRelationshipField(
related_view='providers:preprint-providers:preprint-provider-detail',
related_view_kwargs={'provider_id': '<provider._id>'},
read_only=False,
)
files = NoneIfWithdrawal(RelationshipField(
related_view='preprints:preprint-storage-providers',
related_view_kwargs={'preprint_id': '<_id>'},
))
primary_file = NoneIfWithdrawal(PrimaryFileRelationshipField(
related_view='files:file-detail',
related_view_kwargs={'file_id': '<primary_file._id>'},
read_only=False,
))
review_actions = RelationshipField(
related_view='preprints:preprint-review-action-list',
related_view_kwargs={'preprint_id': '<_id>'},
)
requests = NoneIfWithdrawal(RelationshipField(
related_view='preprints:preprint-request-list',
related_view_kwargs={'preprint_id': '<_id>'},
))
links = LinksField(
{
'self': 'get_preprint_url',
'html': 'get_absolute_html_url',
'doi': 'get_article_doi_url',
'preprint_doi': 'get_preprint_doi_url',
},
)
class Meta:
type_ = 'preprints'
@property
def subjects_related_view(self):
# Overrides TaxonomizableSerializerMixin
return 'preprints:preprint-subjects'
@property
def subjects_view_kwargs(self):
# Overrides TaxonomizableSerializerMixin
return {'preprint_id': '<_id>'}
def get_preprint_url(self, obj):
return absolute_reverse('preprints:preprint-detail', kwargs={'preprint_id': obj._id, 'version': self.context['request'].parser_context['kwargs']['version']})
def get_absolute_url(self, obj):
return self.get_preprint_url(obj)
def get_article_doi_url(self, obj):
return 'https://doi.org/{}'.format(obj.article_doi) if obj.article_doi else None
def get_current_user_permissions(self, obj):
user = self.context['request'].user
return obj.get_permissions(user)[::-1]
def get_preprint_doi_url(self, obj):
doi = None
doi_identifier = obj.get_identifier('doi')
if doi_identifier:
doi = doi_identifier.value
# if a preprint hasn't been published yet, don't show the DOI prematurely
elif obj.is_published:
client = obj.get_doi_client()
doi = client.build_doi(preprint=obj) if client else None
return 'https://doi.org/{}'.format(doi) if doi else None
def update(self, preprint, validated_data):
assert isinstance(preprint, Preprint), 'You must specify a valid preprint to be updated'
auth = get_user_auth(self.context['request'])
if not preprint.has_permission(auth.user, osf_permissions.WRITE):
raise exceptions.PermissionDenied(detail='User must have admin or write permissions to update a preprint.')
published = validated_data.pop('is_published', None)
if published and preprint.provider.is_reviewed:
raise Conflict('{} uses a moderation workflow, so preprints must be submitted for review instead of published directly. Submit a preprint by creating a `submit` Action at {}'.format(
preprint.provider.name,
absolute_reverse(
'preprints:preprint-review-action-list', kwargs={
'version': self.context['request'].parser_context['kwargs']['version'],
'preprint_id': preprint._id,
},
),
))
save_preprint = False
recently_published = False
primary_file = validated_data.pop('primary_file', None)
if primary_file:
self.set_field(preprint.set_primary_file, primary_file, auth)
save_preprint = True
old_tags = set(preprint.tags.values_list('name', flat=True))
if 'tags' in validated_data:
current_tags = set(validated_data.pop('tags', []))
elif self.partial:
current_tags = set(old_tags)
else:
current_tags = set()
for new_tag in (current_tags - old_tags):
preprint.add_tag(new_tag, auth=auth)
for deleted_tag in (old_tags - current_tags):
preprint.remove_tag(deleted_tag, auth=auth)
if 'node' in validated_data:
node = validated_data.pop('node', None)
self.set_field(preprint.set_supplemental_node, node, auth)
save_preprint = True
if 'subjects' in validated_data:
subjects = validated_data.pop('subjects', None)
self.update_subjects(preprint, subjects, auth)
save_preprint = True
if 'title' in validated_data:
title = validated_data['title']
self.set_field(preprint.set_title, title, auth)
save_preprint = True
if 'description' in validated_data:
description = validated_data['description']
self.set_field(preprint.set_description, description, auth)
save_preprint = True
if 'article_doi' in validated_data:
preprint.article_doi = validated_data['article_doi']
save_preprint = True
if 'license_type' in validated_data or 'license' in validated_data:
license_details = get_license_details(preprint, validated_data)
self.set_field(preprint.set_preprint_license, license_details, auth)
save_preprint = True
if 'original_publication_date' in validated_data:
preprint.original_publication_date = validated_data['original_publication_date'] or None
save_preprint = True
if published is not None:
if not preprint.primary_file:
raise exceptions.ValidationError(detail='A valid primary_file must be set before publishing a preprint.')
self.set_field(preprint.set_published, published, auth)
save_preprint = True
recently_published = published
preprint.set_privacy('public', log=False, save=True)
if save_preprint:
preprint.save()
if recently_published:
for author in preprint.contributors:
if author != auth.user:
project_signals.contributor_added.send(preprint, contributor=author, auth=auth, email_template='preprint')
return preprint
def set_field(self, func, val, auth, save=False):
try:
func(val, auth)
except PermissionsError as e:
raise exceptions.PermissionDenied(detail=str(e))
except (ValueError, ValidationError, NodeStateError) as e:
raise exceptions.ValidationError(detail=str(e))
class PreprintCreateSerializer(PreprintSerializer):
# Overrides PreprintSerializer to make id nullable, adds `create`
id = IDField(source='_id', required=False, allow_null=True)
def create(self, validated_data):
creator = self.context['request'].user
provider = validated_data.pop('provider', None)
if not provider:
raise exceptions.ValidationError(detail='You must specify a valid provider to create a preprint.')
title = validated_data.pop('title')
description = validated_data.pop('description', '')
preprint = Preprint(provider=provider, title=title, creator=creator, description=description)
preprint.save()
return self.update(preprint, validated_data)
class PreprintCitationSerializer(NodeCitationSerializer):
class Meta:
type_ = 'preprint-citation'
class PreprintContributorsSerializer(NodeContributorsSerializer):
""" Separate from UserSerializer due to necessity to override almost every field as read only
"""
preprint = RelationshipField(
related_view='preprints:preprint-detail',
related_view_kwargs={'preprint_id': '<preprint._id>'},
)
node = HideIfPreprint(RelationshipField(
related_view='nodes:node-detail',
related_view_kwargs={'node_id': '<node._id>'},
))
class Meta:
type_ = 'contributors'
def get_absolute_url(self, obj):
return absolute_reverse(
'preprints:preprint-contributor-detail',
kwargs={
'user_id': obj.user._id,
'preprint_id': self.context['request'].parser_context['kwargs']['preprint_id'],
'version': self.context['request'].parser_context['kwargs']['version'],
},
)
class PreprintContributorsCreateSerializer(NodeContributorsCreateSerializer, PreprintContributorsSerializer):
"""
Overrides PreprintContributorsSerializer to add email, full_name, send_email, and non-required index and users field.
id and index redefined because of the two serializers we've inherited
"""
id = IDField(source='_id', required=False, allow_null=True)
index = ser.IntegerField(required=False, source='_order')
email_preferences = ['preprint', 'false']
class PreprintContributorDetailSerializer(NodeContributorDetailSerializer, PreprintContributorsSerializer):
"""
Overrides NodeContributorDetailSerializer to set the preprint instead of the node
id and index redefined because of the two serializers we've inherited
"""
id = IDField(required=True, source='_id')
index = ser.IntegerField(required=False, read_only=False, source='_order')
class PreprintStorageProviderSerializer(NodeStorageProviderSerializer):
node = HideIfPreprint(ser.CharField(source='node_id', read_only=True))
preprint = ser.CharField(source='node_id', read_only=True)
files = NodeFileHyperLinkField(
related_view='preprints:preprint-files',
related_view_kwargs={'preprint_id': '<node._id>'},
kind='folder',
never_embed=True,
)
links = LinksField({
'upload': WaterbutlerLink(),
})
class PreprintNodeRelationshipSerializer(LinkedNodesRelationshipSerializer):
data = ser.DictField()
def run_validation(self, data=empty):
"""
Overwrites run_validation.
JSONAPIOnetoOneRelationshipParser parses data into {id: None, type: None} if data is null,
which is what this endpoint expects.
"""
if data == {}:
raise JSONAPIException(source={'pointer': '/data'}, detail=NO_DATA_ERROR)
if data.get('type', None) is not None and data.get('id', None) is not None:
raise DRFValidationError({'data': 'Data must be null. This endpoint can only be used to unset the supplemental project.'}, 400)
return data
def make_instance_obj(self, obj):
# Convenience method to format instance based on view's get_object
return {
'data': None,
'self': obj,
}
def update(self, instance, validated_data):
auth = get_user_auth(self.context['request'])
preprint = instance['self']
preprint.unset_supplemental_node(auth=auth)
preprint.save()
return self.make_instance_obj(preprint)
links = LinksField({
'self': 'get_self_url',
})
|
|
#!/usr/bin/env python
# Copyright (C) 2015 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Manage Jenkins plugin module registry.
import logging
import operator
import pkg_resources
import re
import types
from jenkins_jobs.errors import JenkinsJobsException
from jenkins_jobs.formatter import deep_format
from jenkins_jobs.local_yaml import Jinja2Loader
__all__ = [
"ModuleRegistry"
]
logger = logging.getLogger(__name__)
class ModuleRegistry(object):
_entry_points_cache = {}
def __init__(self, jjb_config, plugins_list=None):
self.modules = []
self.modules_by_component_type = {}
self.handlers = {}
self.jjb_config = jjb_config
self.masked_warned = {}
if plugins_list is None:
self.plugins_dict = {}
else:
self.plugins_dict = self._get_plugins_info_dict(plugins_list)
for entrypoint in pkg_resources.iter_entry_points(
group='jenkins_jobs.modules'):
Mod = entrypoint.load()
mod = Mod(self)
self.modules.append(mod)
self.modules.sort(key=operator.attrgetter('sequence'))
if mod.component_type is not None:
self.modules_by_component_type[mod.component_type] = entrypoint
@staticmethod
def _get_plugins_info_dict(plugins_list):
def mutate_plugin_info(plugin_info):
"""
We perform mutations on a single member of plugin_info here, then
return a dictionary with the longName and shortName of the plugin
mapped to its plugin info dictionary.
"""
version = plugin_info.get('version', '0')
plugin_info['version'] = re.sub(r'(.*)-(?:SNAPSHOT|BETA).*',
r'\g<1>.preview', version)
aliases = []
for key in ['longName', 'shortName']:
value = plugin_info.get(key, None)
if value is not None:
aliases.append(value)
plugin_info_dict = {}
for name in aliases:
plugin_info_dict[name] = plugin_info
return plugin_info_dict
list_of_dicts = [mutate_plugin_info(v) for v in plugins_list]
plugins_info_dict = {}
for d in list_of_dicts:
plugins_info_dict.update(d)
return plugins_info_dict
def get_plugin_info(self, plugin_name):
""" This method is intended to provide information about plugins within
a given module's implementation of Base.gen_xml. The return value is a
dictionary with data obtained directly from a running Jenkins instance.
This allows module authors to differentiate generated XML output based
on information such as specific plugin versions.
:arg string plugin_name: Either the shortName or longName of a plugin
as see in a query that looks like:
``http://<jenkins-hostname>/pluginManager/api/json?pretty&depth=2``
During a 'test' run, it is possible to override JJB's query to a live
Jenkins instance by passing it a path to a file containing a YAML list
of dictionaries that mimics the plugin properties you want your test
output to reflect::
jenkins-jobs test -p /path/to/plugins-info.yaml
Below is example YAML that might be included in
/path/to/plugins-info.yaml.
.. literalinclude:: /../../tests/cmd/fixtures/plugins-info.yaml
"""
return self.plugins_dict.get(plugin_name, {})
def registerHandler(self, category, name, method):
cat_dict = self.handlers.get(category, {})
if not cat_dict:
self.handlers[category] = cat_dict
cat_dict[name] = method
def getHandler(self, category, name):
return self.handlers[category][name]
@property
def parser_data(self):
return self.__parser_data
def set_parser_data(self, parser_data):
self.__parser_data = parser_data
def dispatch(self, component_type, xml_parent,
component, template_data={}):
"""This is a method that you can call from your implementation of
Base.gen_xml or component. It allows modules to define a type
of component, and benefit from extensibility via Python
entry points and Jenkins Job Builder :ref:`Macros <macro>`.
:arg string component_type: the name of the component
(e.g., `builder`)
:arg YAMLParser parser: the global YAML Parser
:arg Element xml_parent: the parent XML element
:arg dict template_data: values that should be interpolated into
the component definition
See :py:class:`jenkins_jobs.modules.base.Base` for how to register
components of a module.
See the Publishers module for a simple example of how to use
this method.
"""
if component_type not in self.modules_by_component_type:
raise JenkinsJobsException("Unknown component type: "
"'{0}'.".format(component_type))
entry_point = self.modules_by_component_type[component_type]
component_list_type = entry_point.load().component_list_type
if isinstance(component, dict):
# The component is a singleton dictionary of name: dict(args)
name, component_data = next(iter(component.items()))
if template_data or isinstance(component_data, Jinja2Loader):
# Template data contains values that should be interpolated
# into the component definition. To handle Jinja2 templates
# that don't contain any variables, we also deep format those.
try:
component_data = deep_format(
component_data, template_data,
self.jjb_config.yamlparser['allow_empty_variables'])
except Exception:
logging.error(
"Failure formatting component ('%s') data '%s'",
name, component_data)
raise
else:
# The component is a simple string name, eg "run-tests"
name = component
component_data = {}
# Look for a component function defined in an entry point
eps = self._entry_points_cache.get(component_list_type)
if eps is None:
module_eps = []
# auto build entry points by inferring from base component_types
mod = pkg_resources.EntryPoint(
"__all__", entry_point.module_name, dist=entry_point.dist)
Mod = mod.load()
func_eps = [Mod.__dict__.get(a) for a in dir(Mod)
if isinstance(Mod.__dict__.get(a),
types.FunctionType)]
for func_ep in func_eps:
try:
# extract entry point based on docstring
name_line = func_ep.__doc__.split('\n')
if not name_line[0].startswith('yaml:'):
logger.debug("Ignoring '%s' as an entry point" %
name_line)
continue
ep_name = name_line[0].split(' ')[1]
except (AttributeError, IndexError):
# AttributeError by docstring not being defined as
# a string to have split called on it.
# IndexError raised by name_line not containing anything
# after the 'yaml:' string.
logger.debug("Not including func '%s' as an entry point"
% func_ep.__name__)
continue
module_eps.append(
pkg_resources.EntryPoint(
ep_name, entry_point.module_name,
dist=entry_point.dist, attrs=(func_ep.__name__,)))
logger.debug(
"Adding auto EP '%s=%s:%s'" %
(ep_name, entry_point.module_name, func_ep.__name__))
# load from explicitly defined entry points
module_eps.extend(list(pkg_resources.iter_entry_points(
group='jenkins_jobs.{0}'.format(component_list_type))))
eps = {}
for module_ep in module_eps:
if module_ep.name in eps:
raise JenkinsJobsException(
"Duplicate entry point found for component type: "
"'{0}', '{0}',"
"name: '{1}'".format(component_type, name))
eps[module_ep.name] = module_ep
# cache both sets of entry points
self._entry_points_cache[component_list_type] = eps
logger.debug("Cached entry point group %s = %s",
component_list_type, eps)
# check for macro first
component = self.parser_data.get(component_type, {}).get(name)
if component:
if name in eps and name not in self.masked_warned:
self.masked_warned[name] = True
logger.warning(
"You have a macro ('%s') defined for '%s' "
"component type that is masking an inbuilt "
"definition" % (name, component_type))
for b in component[component_list_type]:
# Pass component_data in as template data to this function
# so that if the macro is invoked with arguments,
# the arguments are interpolated into the real defn.
self.dispatch(component_type, xml_parent, b, component_data)
elif name in eps:
func = eps[name].load()
func(self, xml_parent, component_data)
else:
raise JenkinsJobsException("Unknown entry point or macro '{0}' "
"for component type: '{1}'.".
format(name, component_type))
|
|
# Copyright 2019 Verizon Media
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from oslo_config import cfg
from oslo_config import fixture as oslo_fixture
from oslo_db import exception as odb_exceptions
from oslo_utils import uuidutils
from octavia.common import constants
import octavia.common.context
from octavia.tests.functional.api.v2 import base
class TestAvailabilityZoneProfiles(base.BaseAPITest):
root_tag = 'availability_zone_profile'
root_tag_list = 'availability_zone_profiles'
root_tag_links = 'availability_zone_profile_links'
def _assert_request_matches_response(self, req, resp, **optionals):
self.assertTrue(uuidutils.is_uuid_like(resp.get('id')))
self.assertEqual(req.get('name'), resp.get('name'))
self.assertEqual(req.get(constants.PROVIDER_NAME),
resp.get(constants.PROVIDER_NAME))
self.assertEqual(req.get(constants.AVAILABILITY_ZONE_DATA),
resp.get(constants.AVAILABILITY_ZONE_DATA))
def test_empty_list(self):
response = self.get(self.AZPS_PATH)
api_list = response.json.get(self.root_tag_list)
self.assertEqual([], api_list)
def test_create(self):
az_json = {'name': 'test1', constants.PROVIDER_NAME: 'noop_driver',
constants.AVAILABILITY_ZONE_DATA: '{"hello": "world"}'}
body = self._build_body(az_json)
response = self.post(self.AZPS_PATH, body)
api_azp = response.json.get(self.root_tag)
self._assert_request_matches_response(az_json, api_azp)
def test_create_with_missing_name(self):
az_json = {constants.PROVIDER_NAME: 'pr1',
constants.AVAILABILITY_ZONE_DATA: '{"x": "y"}'}
body = self._build_body(az_json)
response = self.post(self.AZPS_PATH, body, status=400)
err_msg = ("Invalid input for field/attribute name. Value: "
"'None'. Mandatory field missing.")
self.assertEqual(err_msg, response.json.get('faultstring'))
def test_create_with_missing_provider(self):
az_json = {'name': 'xyz',
constants.AVAILABILITY_ZONE_DATA: '{"x": "y"}'}
body = self._build_body(az_json)
response = self.post(self.AZPS_PATH, body, status=400)
err_msg = ("Invalid input for field/attribute provider_name. "
"Value: 'None'. Mandatory field missing.")
self.assertEqual(err_msg, response.json.get('faultstring'))
def test_create_with_missing_availability_zone_data(self):
az_json = {'name': 'xyz', constants.PROVIDER_NAME: 'pr1'}
body = self._build_body(az_json)
response = self.post(self.AZPS_PATH, body, status=400)
err_msg = ("Invalid input for field/attribute availability_zone_data. "
"Value: 'None'. Mandatory field missing.")
self.assertEqual(err_msg, response.json.get('faultstring'))
def test_create_with_empty_availability_zone_data(self):
az_json = {'name': 'test1', constants.PROVIDER_NAME: 'noop_driver',
constants.AVAILABILITY_ZONE_DATA: '{}'}
body = self._build_body(az_json)
response = self.post(self.AZPS_PATH, body)
api_azp = response.json.get(self.root_tag)
self._assert_request_matches_response(az_json, api_azp)
def test_create_with_long_name(self):
az_json = {'name': 'n' * 256, constants.PROVIDER_NAME: 'test1',
constants.AVAILABILITY_ZONE_DATA: '{"hello": "world"}'}
body = self._build_body(az_json)
self.post(self.AZPS_PATH, body, status=400)
def test_create_with_long_provider(self):
az_json = {'name': 'name1', constants.PROVIDER_NAME: 'n' * 256,
constants.AVAILABILITY_ZONE_DATA: '{"hello": "world"}'}
body = self._build_body(az_json)
self.post(self.AZPS_PATH, body, status=400)
def test_create_with_long_availability_zone_data(self):
az_json = {'name': 'name1', constants.PROVIDER_NAME: 'amp',
constants.AVAILABILITY_ZONE_DATA: 'n' * 4097}
body = self._build_body(az_json)
self.post(self.AZPS_PATH, body, status=400)
def test_create_authorized(self):
az_json = {'name': 'test1', constants.PROVIDER_NAME: 'noop_driver',
constants.AVAILABILITY_ZONE_DATA: '{"hello": "world"}'}
body = self._build_body(az_json)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
project_id = uuidutils.generate_uuid()
with mock.patch.object(octavia.common.context.Context, 'project_id',
project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': True,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
response = self.post(self.AZPS_PATH, body)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
api_azp = response.json.get(self.root_tag)
self._assert_request_matches_response(az_json, api_azp)
def test_create_not_authorized(self):
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
az_json = {'name': 'name',
constants.PROVIDER_NAME: 'xyz',
constants.AVAILABILITY_ZONE_DATA: '{"x": "y"}'}
body = self._build_body(az_json)
response = self.post(self.AZPS_PATH, body, status=403)
api_azp = response.json
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, api_azp)
def test_create_db_failure(self):
az_json = {'name': 'test1', constants.PROVIDER_NAME: 'noop_driver',
constants.AVAILABILITY_ZONE_DATA: '{"hello": "world"}'}
body = self._build_body(az_json)
with mock.patch(
"octavia.db.repositories.AvailabilityZoneProfileRepository."
"create") as mock_create:
mock_create.side_effect = Exception
self.post(self.AZPS_PATH, body, status=500)
mock_create.side_effect = odb_exceptions.DBDuplicateEntry
self.post(self.AZPS_PATH, body, status=409)
def test_create_with_invalid_json(self):
az_json = {'name': 'test1', constants.PROVIDER_NAME: 'noop_driver',
constants.AVAILABILITY_ZONE_DATA: '{hello: "world"}'}
body = self._build_body(az_json)
self.post(self.AZPS_PATH, body, status=400)
def test_get(self):
azp = self.create_availability_zone_profile(
'name', 'noop_driver', '{"x": "y"}')
self.assertTrue(uuidutils.is_uuid_like(azp.get('id')))
response = self.get(
self.AZP_PATH.format(
azp_id=azp.get('id'))).json.get(self.root_tag)
self.assertEqual('name', response.get('name'))
self.assertEqual(azp.get('id'), response.get('id'))
def test_get_one_deleted_id(self):
response = self.get(self.AZP_PATH.format(azp_id=constants.NIL_UUID),
status=404)
self.assertEqual('Availability Zone Profile {} not found.'.format(
constants.NIL_UUID), response.json.get('faultstring'))
def test_get_one_fields_filter(self):
azp = self.create_availability_zone_profile(
'name', 'noop_driver', '{"x": "y"}')
self.assertTrue(uuidutils.is_uuid_like(azp.get('id')))
response = self.get(
self.AZP_PATH.format(azp_id=azp.get('id')), params={
'fields': ['id', constants.PROVIDER_NAME]}
).json.get(self.root_tag)
self.assertEqual(azp.get('id'), response.get('id'))
self.assertIn(u'id', response)
self.assertIn(constants.PROVIDER_NAME, response)
self.assertNotIn(u'name', response)
self.assertNotIn(constants.AVAILABILITY_ZONE_DATA, response)
def test_get_authorized(self):
azp = self.create_availability_zone_profile(
'name', 'noop_driver', '{"x": "y"}')
self.assertTrue(uuidutils.is_uuid_like(azp.get('id')))
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
project_id = uuidutils.generate_uuid()
with mock.patch.object(octavia.common.context.Context, 'project_id',
project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': True,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
response = self.get(
self.AZP_PATH.format(
azp_id=azp.get('id'))).json.get(self.root_tag)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual('name', response.get('name'))
self.assertEqual(azp.get('id'), response.get('id'))
def test_get_not_authorized(self):
azp = self.create_availability_zone_profile(
'name', 'noop_driver', '{"x": "y"}')
self.assertTrue(uuidutils.is_uuid_like(azp.get('id')))
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
self.get(self.AZP_PATH.format(azp_id=azp.get('id')), status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
def test_get_all(self):
fp1 = self.create_availability_zone_profile(
'test1', 'noop_driver', '{"compute_zone": "my_az_1"}')
ref_fp_1 = {u'availability_zone_data': u'{"compute_zone": "my_az_1"}',
u'id': fp1.get('id'), u'name': u'test1',
constants.PROVIDER_NAME: u'noop_driver'}
self.assertTrue(uuidutils.is_uuid_like(fp1.get('id')))
fp2 = self.create_availability_zone_profile(
'test2', 'noop_driver-alt', '{"compute_zone": "my_az_1"}')
ref_fp_2 = {u'availability_zone_data': u'{"compute_zone": "my_az_1"}',
u'id': fp2.get('id'), u'name': u'test2',
constants.PROVIDER_NAME: u'noop_driver-alt'}
self.assertTrue(uuidutils.is_uuid_like(fp2.get('id')))
response = self.get(self.AZPS_PATH)
api_list = response.json.get(self.root_tag_list)
self.assertEqual(2, len(api_list))
self.assertIn(ref_fp_1, api_list)
self.assertIn(ref_fp_2, api_list)
def test_get_all_fields_filter(self):
fp1 = self.create_availability_zone_profile(
'test1', 'noop_driver', '{"compute_zone": "my_az_1"}')
self.assertTrue(uuidutils.is_uuid_like(fp1.get('id')))
fp2 = self.create_availability_zone_profile(
'test2', 'noop_driver-alt', '{"compute_zone": "my_az_1"}')
self.assertTrue(uuidutils.is_uuid_like(fp2.get('id')))
response = self.get(self.AZPS_PATH, params={
'fields': ['id', 'name']})
api_list = response.json.get(self.root_tag_list)
self.assertEqual(2, len(api_list))
for profile in api_list:
self.assertIn(u'id', profile)
self.assertIn(u'name', profile)
self.assertNotIn(constants.PROVIDER_NAME, profile)
self.assertNotIn(constants.AVAILABILITY_ZONE_DATA, profile)
def test_get_all_authorized(self):
fp1 = self.create_availability_zone_profile(
'test1', 'noop_driver', '{"compute_zone": "my_az_1"}')
self.assertTrue(uuidutils.is_uuid_like(fp1.get('id')))
fp2 = self.create_availability_zone_profile(
'test2', 'noop_driver-alt', '{"compute_zone": "my_az_1"}')
self.assertTrue(uuidutils.is_uuid_like(fp2.get('id')))
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
project_id = uuidutils.generate_uuid()
with mock.patch.object(octavia.common.context.Context, 'project_id',
project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': True,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
response = self.get(self.AZPS_PATH)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
api_list = response.json.get(self.root_tag_list)
self.assertEqual(2, len(api_list))
def test_get_all_not_authorized(self):
fp1 = self.create_availability_zone_profile(
'test1', 'noop_driver', '{"compute_zone": "my_az_1"}')
self.assertTrue(uuidutils.is_uuid_like(fp1.get('id')))
fp2 = self.create_availability_zone_profile(
'test2', 'noop_driver-alt', '{"compute_zone": "my_az_1"}')
self.assertTrue(uuidutils.is_uuid_like(fp2.get('id')))
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
self.get(self.AZPS_PATH, status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
def test_update(self):
azp = self.create_availability_zone_profile(
'test_profile', 'noop_driver', '{"x": "y"}')
update_data = {'name': 'the_profile',
constants.PROVIDER_NAME: 'noop_driver-alt',
constants.AVAILABILITY_ZONE_DATA: '{"hello": "world"}'}
body = self._build_body(update_data)
self.put(self.AZP_PATH.format(azp_id=azp.get('id')), body)
response = self.get(
self.AZP_PATH.format(azp_id=azp.get('id'))).json.get(self.root_tag)
self.assertEqual('the_profile', response.get('name'))
self.assertEqual('noop_driver-alt',
response.get(constants.PROVIDER_NAME))
self.assertEqual('{"hello": "world"}',
response.get(constants.AVAILABILITY_ZONE_DATA))
def test_update_deleted_id(self):
update_data = {'name': 'fake_profile'}
body = self._build_body(update_data)
response = self.put(self.AZP_PATH.format(azp_id=constants.NIL_UUID),
body, status=404)
self.assertEqual('Availability Zone Profile {} not found.'.format(
constants.NIL_UUID), response.json.get('faultstring'))
def test_update_nothing(self):
azp = self.create_availability_zone_profile(
'test_profile', 'noop_driver', '{"x": "y"}')
body = self._build_body({})
self.put(self.AZP_PATH.format(azp_id=azp.get('id')), body)
response = self.get(
self.AZP_PATH.format(azp_id=azp.get('id'))).json.get(self.root_tag)
self.assertEqual('test_profile', response.get('name'))
self.assertEqual('noop_driver', response.get(constants.PROVIDER_NAME))
self.assertEqual('{"x": "y"}',
response.get(constants.AVAILABILITY_ZONE_DATA))
def test_update_name_none(self):
self._test_update_param_none(constants.NAME)
def test_update_provider_name_none(self):
self._test_update_param_none(constants.PROVIDER_NAME)
def test_update_availability_zone_data_none(self):
self._test_update_param_none(constants.AVAILABILITY_ZONE_DATA)
def _test_update_param_none(self, param_name):
azp = self.create_availability_zone_profile(
'test_profile', 'noop_driver', '{"x": "y"}')
expect_error_msg = ("None is not a valid option for %s" %
param_name)
body = self._build_body({param_name: None})
response = self.put(self.AZP_PATH.format(azp_id=azp.get('id')), body,
status=400)
self.assertEqual(expect_error_msg, response.json['faultstring'])
def test_update_no_availability_zone_data(self):
azp = self.create_availability_zone_profile(
'test_profile', 'noop_driver', '{"x": "y"}')
update_data = {'name': 'the_profile',
constants.PROVIDER_NAME: 'noop_driver-alt'}
body = self._build_body(update_data)
response = self.put(self.AZP_PATH.format(azp_id=azp.get('id')), body)
response = self.get(
self.AZP_PATH.format(azp_id=azp.get('id'))).json.get(self.root_tag)
self.assertEqual('the_profile', response.get('name'))
self.assertEqual('noop_driver-alt',
response.get(constants.PROVIDER_NAME))
self.assertEqual('{"x": "y"}',
response.get(constants.AVAILABILITY_ZONE_DATA))
def test_update_authorized(self):
azp = self.create_availability_zone_profile(
'test_profile', 'noop_driver', '{"x": "y"}')
update_data = {'name': 'the_profile',
constants.PROVIDER_NAME: 'noop_driver-alt',
constants.AVAILABILITY_ZONE_DATA: '{"hello": "world"}'}
body = self._build_body(update_data)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
project_id = uuidutils.generate_uuid()
with mock.patch.object(octavia.common.context.Context, 'project_id',
project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': True,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
response = self.put(self.AZP_PATH.format(azp_id=azp.get('id')),
body)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
response = self.get(
self.AZP_PATH.format(azp_id=azp.get('id'))).json.get(self.root_tag)
self.assertEqual('the_profile', response.get('name'))
self.assertEqual('noop_driver-alt',
response.get(constants.PROVIDER_NAME))
self.assertEqual('{"hello": "world"}',
response.get(constants.AVAILABILITY_ZONE_DATA))
def test_update_not_authorized(self):
azp = self.create_availability_zone_profile(
'test_profile', 'noop_driver', '{"x": "y"}')
update_data = {'name': 'the_profile', constants.PROVIDER_NAME: 'amp',
constants.AVAILABILITY_ZONE_DATA: '{"hello": "world"}'}
body = self._build_body(update_data)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
response = self.put(self.AZP_PATH.format(azp_id=azp.get('id')),
body, status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
response = self.get(
self.AZP_PATH.format(azp_id=azp.get('id'))).json.get(self.root_tag)
self.assertEqual('test_profile', response.get('name'))
self.assertEqual('noop_driver', response.get(constants.PROVIDER_NAME))
self.assertEqual('{"x": "y"}',
response.get(constants.AVAILABILITY_ZONE_DATA))
def test_update_in_use(self):
azp = self.create_availability_zone_profile(
'test_profile', 'noop_driver', '{"x": "y"}')
self.create_availability_zone(
'name1', 'description', azp.get('id'), True)
# Test updating provider while in use is not allowed
update_data = {'name': 'the_profile',
constants.PROVIDER_NAME: 'noop_driver-alt'}
body = self._build_body(update_data)
response = self.put(self.AZP_PATH.format(azp_id=azp.get('id')), body,
status=409)
err_msg = ("Availability Zone Profile {} is in use and cannot be "
"modified.".format(azp.get('id')))
self.assertEqual(err_msg, response.json.get('faultstring'))
response = self.get(
self.AZP_PATH.format(azp_id=azp.get('id'))).json.get(self.root_tag)
self.assertEqual('test_profile', response.get('name'))
self.assertEqual('noop_driver', response.get(constants.PROVIDER_NAME))
self.assertEqual('{"x": "y"}',
response.get(constants.AVAILABILITY_ZONE_DATA))
# Test updating availability zone data while in use is not allowed
update_data = {'name': 'the_profile',
constants.AVAILABILITY_ZONE_DATA: '{"hello": "world"}'}
body = self._build_body(update_data)
response = self.put(self.AZP_PATH.format(azp_id=azp.get('id')), body,
status=409)
err_msg = ("Availability Zone Profile {} is in use and cannot be "
"modified.".format(azp.get('id')))
self.assertEqual(err_msg, response.json.get('faultstring'))
response = self.get(
self.AZP_PATH.format(azp_id=azp.get('id'))).json.get(self.root_tag)
self.assertEqual('test_profile', response.get('name'))
self.assertEqual('noop_driver', response.get(constants.PROVIDER_NAME))
self.assertEqual('{"x": "y"}',
response.get(constants.AVAILABILITY_ZONE_DATA))
# Test that you can still update the name when in use
update_data = {'name': 'the_profile'}
body = self._build_body(update_data)
response = self.put(self.AZP_PATH.format(azp_id=azp.get('id')), body)
response = self.get(
self.AZP_PATH.format(azp_id=azp.get('id'))).json.get(self.root_tag)
self.assertEqual('the_profile', response.get('name'))
self.assertEqual('noop_driver', response.get(constants.PROVIDER_NAME))
self.assertEqual('{"x": "y"}',
response.get(constants.AVAILABILITY_ZONE_DATA))
def test_delete(self):
azp = self.create_availability_zone_profile(
'test1', 'noop_driver', '{"compute_zone": "my_az_1"}')
self.assertTrue(uuidutils.is_uuid_like(azp.get('id')))
self.delete(self.AZP_PATH.format(azp_id=azp.get('id')))
response = self.get(self.AZP_PATH.format(
azp_id=azp.get('id')), status=404)
err_msg = "Availability Zone Profile %s not found." % azp.get('id')
self.assertEqual(err_msg, response.json.get('faultstring'))
def test_delete_deleted_id(self):
response = self.delete(self.AZP_PATH.format(azp_id=constants.NIL_UUID),
status=404)
self.assertEqual('Availability Zone Profile {} not found.'.format(
constants.NIL_UUID), response.json.get('faultstring'))
def test_delete_nonexistent_id(self):
response = self.delete(self.AZP_PATH.format(azp_id='bogus_id'),
status=404)
self.assertEqual('Availability Zone Profile bogus_id not found.',
response.json.get('faultstring'))
def test_delete_authorized(self):
azp = self.create_availability_zone_profile(
'test1', 'noop_driver', '{"compute_zone": "my_az_1"}')
self.assertTrue(uuidutils.is_uuid_like(azp.get('id')))
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
project_id = uuidutils.generate_uuid()
with mock.patch.object(octavia.common.context.Context, 'project_id',
project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': True,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
self.delete(self.AZP_PATH.format(azp_id=azp.get('id')))
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
response = self.get(self.AZP_PATH.format(
azp_id=azp.get('id')), status=404)
err_msg = "Availability Zone Profile %s not found." % azp.get('id')
self.assertEqual(err_msg, response.json.get('faultstring'))
def test_delete_not_authorized(self):
azp = self.create_availability_zone_profile(
'test1', 'noop_driver', '{"compute_zone": "my_az_1"}')
self.assertTrue(uuidutils.is_uuid_like(azp.get('id')))
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
response = self.delete(self.AZP_PATH.format(
azp_id=azp.get('id')), status=403)
api_azp = response.json
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, api_azp)
response = self.get(
self.AZP_PATH.format(azp_id=azp.get('id'))).json.get(self.root_tag)
self.assertEqual('test1', response.get('name'))
def test_delete_in_use(self):
azp = self.create_availability_zone_profile(
'test1', 'noop_driver', '{"compute_zone": "my_az_1"}')
self.create_availability_zone(
'name1', 'description', azp.get('id'), True)
response = self.delete(self.AZP_PATH.format(azp_id=azp.get('id')),
status=409)
err_msg = ("Availability Zone Profile {} is in use and cannot be "
"modified.".format(azp.get('id')))
self.assertEqual(err_msg, response.json.get('faultstring'))
response = self.get(
self.AZP_PATH.format(azp_id=azp.get('id'))).json.get(self.root_tag)
self.assertEqual('test1', response.get('name'))
|
|
from keras import backend as K
from keras import activations
from overrides import overrides
from .word_alignment import WordAlignmentEntailment
from ...tensors.backend import switch, apply_feed_forward
class DecomposableAttentionEntailment(WordAlignmentEntailment):
"""
This layer is a reimplementation of the entailment algorithm described in
"A Decomposable Attention Model for Natural Language Inference", Parikh et
al., 2016. The algorithm has three main steps:
(1) Attend: Compute dot products between all pairs of projections of words
in the hypothesis and the premise, normalize those dot products to use
them to align each word in premise to a phrase in the hypothesis and
vice-versa. These alignments are then used to summarize the aligned
phrase in the other sentence as a weighted sum. The initial word
projections are computed using a feed forward NN, F.
(2) Compare: Pass a concatenation of each word in the premise and the
summary of its aligned phrase in the hypothesis through a feed forward
NN, G, to get a projected comparison. Do the same with the hypothesis
and the aligned phrase from the premise.
(3) Aggregate: Sum over the comparisons to get a single vector each for
premise-hypothesis comparison, and hypothesis-premise comparison. Pass
them through a third feed forward NN (H), to get the entailment
decision.
This layer can take either a tuple (premise, hypothesis) or a concatenation
of them as input.
Input:
- Tuple input: a premise sentence and a hypothesis sentence, both with shape ``(batch_size,
sentence_length, embed_dim)`` and masks of shape ``(batch_size, sentence_length)``
- Single input: a single tensor of shape ``(batch_size, sentence_length * 2, embed_dim)``, with
a mask of shape ``(batch_size, sentence_length * 2)``, which we will split in half to get the
premise and hypothesis sentences.
Output:
- Entailment decisions with the given ``output_dim``.
Parameters
----------
num_hidden_layers: int, optional (default=1)
Number of hidden layers in each of the feed forward neural nets described above.
hidden_layer_width: int, optional (default=50)
Width of each hidden layer in each of the feed forward neural nets described above.
hidden_layer_activation: str, optional (default='relu')
Activation for each hidden layer in each of the feed forward neural nets described above.
final_activation: str, optional (default='softmax')
Activation to use for the final output. Should almost certainly be 'softmax'.
output_dim: int, optional (default=3)
Dimensionality of the final output. If this is the last layer in your model, this needs to
be the same as the number of labels you have.
initializer: str, optional (default='uniform')
Will be passed to ``self.add_weight()`` for each of the weight matrices in the feed forward
neural nets described above.
Notes
-----
premise_length = hypothesis_length = sentence_length below.
"""
def __init__(self,
num_hidden_layers: int=1,
hidden_layer_width: int=50,
hidden_layer_activation: str='relu',
final_activation: str='softmax',
output_dim: int=3,
initializer: str='uniform',
**kwargs):
self.num_hidden_layers = num_hidden_layers
self.hidden_layer_width = hidden_layer_width
self.hidden_layer_activation = hidden_layer_activation
self.final_activation = final_activation
self.output_dim = output_dim
self.initializer = initializer
# Weights will be initialized in the build method.
self.premise_length = None
self.hypothesis_length = None
self.attend_weights = [] # weights related to F
self.compare_weights = [] # weights related to G
self.aggregate_weights = [] # weights related to H
self.scorer = None
super(DecomposableAttentionEntailment, self).__init__(**kwargs)
@overrides
def build(self, input_shape):
'''
This model has three feed forward NNs (F, G and H in the paper). We assume that all three
NNs have the same hyper-parameters: num_hidden_layers, hidden_layer_width and
hidden_layer_activation. That is, F, G and H have the same structure and activations. Their
actual weights are different, though. H has a separate softmax layer at the end.
'''
super(DecomposableAttentionEntailment, self).build(input_shape)
if isinstance(input_shape, list):
# input_shape is a list containing the shapes of the two inputs.
self.premise_length = input_shape[0][1]
self.hypothesis_length = input_shape[1][1]
# input_dim below is embedding dim for the model in the paper since they feed embedded
# input directly into this layer.
self.input_dim = input_shape[0][-1]
else:
# NOTE: This will probably fail silently later on in this code if your premise and
# hypothesis actually have different lengths.
self.premise_length = self.hypothesis_length = int(input_shape[1] / 2)
self.input_dim = input_shape[-1]
attend_input_dim = self.input_dim
compare_input_dim = 2 * self.input_dim
aggregate_input_dim = self.hidden_layer_width * 2
for i in range(self.num_hidden_layers):
self.attend_weights.append(self.add_weight((attend_input_dim, self.hidden_layer_width),
name='%s_attend_%d' % (self.name, i),
initializer=self.initializer))
self.compare_weights.append(self.add_weight((compare_input_dim, self.hidden_layer_width),
name='%s_compare_%d' % (self.name, i),
initializer=self.initializer))
self.aggregate_weights.append(self.add_weight((aggregate_input_dim, self.hidden_layer_width),
name='%s_aggregate_%d' % (self.name, i),
initializer=self.initializer))
attend_input_dim = self.hidden_layer_width
compare_input_dim = self.hidden_layer_width
aggregate_input_dim = self.hidden_layer_width
self.scorer = self.add_weight((self.hidden_layer_width, self.output_dim),
initializer=self.initializer,
name='%s_score' % self.name)
@overrides
def compute_output_shape(self, input_shape):
# (batch_size, 2)
if isinstance(input_shape, list):
return (input_shape[0][0], self.output_dim)
else:
return (input_shape[0], self.output_dim)
@overrides
def compute_mask(self, inputs, mask=None):
# pylint: disable=unused-argument
return None
@overrides
def call(self, inputs, mask=None):
# premise_length = hypothesis_length in the following lines, but the names are kept separate to keep
# track of the axes being normalized.
# The inputs can be a two different tensors, or a concatenation. Hence, the conditional below.
if isinstance(inputs, list) or isinstance(inputs, tuple):
premise_embedding, hypothesis_embedding = inputs
# (batch_size, premise_length), (batch_size, hypothesis_length)
premise_mask, hypothesis_mask = mask
else:
premise_embedding = inputs[:, :self.premise_length, :]
hypothesis_embedding = inputs[:, self.premise_length:, :]
# (batch_size, premise_length), (batch_size, hypothesis_length)
premise_mask = None if mask is None else mask[:, :self.premise_length]
hypothesis_mask = None if mask is None else mask[:, self.premise_length:]
if premise_mask is not None:
premise_embedding = switch(K.expand_dims(premise_mask), premise_embedding,
K.zeros_like(premise_embedding))
if hypothesis_mask is not None:
hypothesis_embedding = switch(K.expand_dims(hypothesis_mask), hypothesis_embedding,
K.zeros_like(hypothesis_embedding))
activation = activations.get(self.hidden_layer_activation)
# (batch_size, premise_length, hidden_dim)
projected_premise = apply_feed_forward(premise_embedding, self.attend_weights, activation)
# (batch_size, hypothesis_length, hidden_dim)
projected_hypothesis = apply_feed_forward(hypothesis_embedding, self.attend_weights, activation)
## Step 1: Attend
p2h_alignment = self._align(projected_premise, projected_hypothesis, premise_mask, hypothesis_mask)
# beta in the paper (equation 2)
# (batch_size, premise_length, emb_dim)
p2h_attention = self._attend(hypothesis_embedding, p2h_alignment, self.premise_length)
h2p_alignment = self._align(projected_hypothesis, projected_premise, hypothesis_mask, premise_mask)
# alpha in the paper (equation 2)
# (batch_size, hyp_length, emb_dim)
h2p_attention = self._attend(premise_embedding, h2p_alignment, self.hypothesis_length)
## Step 2: Compare
# Equation 3 in the paper.
compared_premise = self._compare(premise_embedding, p2h_attention)
compared_hypothesis = self._compare(hypothesis_embedding, h2p_attention)
## Step 3: Aggregate
# Equations 4 and 5.
# (batch_size, hidden_dim * 2)
aggregated_input = K.concatenate([K.sum(compared_premise, axis=1), K.sum(compared_hypothesis, axis=1)])
# (batch_size, hidden_dim)
input_to_scorer = apply_feed_forward(aggregated_input, self.aggregate_weights, activation)
# (batch_size, 2)
final_activation = activations.get(self.final_activation)
scores = final_activation(K.dot(input_to_scorer, self.scorer))
return scores
def _attend(self, target_embedding, s2t_alignment, source_length):
'''
Takes target embedding, and source-target alignment attention and produces a weighted average of the
target embedding per each source word.
target_embedding: (batch_size, target_length, embed_dim)
s2t_alignment: (batch_size, source_length, target_length)
'''
# We have to explicitly tile tensors below because TF does not broadcast values while performing *.
# (batch_size, source_length, target_length, embed_dim)
tiled_s2t_alignment = K.dot(K.expand_dims(s2t_alignment), K.ones((1, self.input_dim)))
# (batch_size, source_length, target_length, embed_dim)
tiled_target_embedding = K.permute_dimensions(K.dot(K.expand_dims(target_embedding),
K.ones((1, source_length))), (0, 3, 1, 2))
# alpha or beta in the paper depending on whether the source is the premise or hypothesis.
# sum((batch_size, src_length, target_length, embed_dim), axis=2) = (batch_size, src_length, emb_dim)
s2t_attention = K.sum(tiled_s2t_alignment * tiled_target_embedding, axis=2)
return s2t_attention
def _compare(self, source_embedding, s2t_attention):
'''
Takes word embeddings from a sentence, and aggregated representations of words aligned to each of those
words from another sentence, and returns a projection of their concatenation.
source_embedding: (batch_size, source_length, embed_dim)
s2t_attention: (batch_size, source_length, embed_dim)
'''
activation = activations.get(self.hidden_layer_activation)
comparison_input = K.concatenate([source_embedding, s2t_attention])
# Equation 3 in the paper.
compared_representation = apply_feed_forward(comparison_input, self.compare_weights, activation)
return compared_representation
@overrides
def get_config(self):
config = {
'num_hidden_layers': self.num_hidden_layers,
'hidden_layer_width': self.hidden_layer_width,
'hidden_layer_activation': self.hidden_layer_activation,
'final_activation': self.final_activation,
'output_dim': self.output_dim,
'initializer': self.initializer,
}
base_config = super(DecomposableAttentionEntailment, self).get_config()
config.update(base_config)
return config
|
|
from datetime import datetime, date
import tempfile
from netCDF4 import Dataset
import os
import numpy as np
from cdo import Cdo
import datetime
#from math import *
from ocgis.util.shp_process import ShpProcess
#from ocgis.util.shp_cabinet import ShpCabinetIterator
import ocgis
from flyingpigeon import config
from pywps.Process import WPSProcess
import logging
class VBDProcess(WPSProcess):
"""
Process for Anopheles Gambiae population dynamics
"""
def __init__(self):
WPSProcess.__init__(self,
identifier = "vbd",
title="Vector born diseases",
version = "0.2",
metadata= [
{"title": "Climate Service Center", "href": "http://www.climate-service-center.de/"}
],
abstract="Collection of models to calculate variables related to vector born diseases",
statusSupported=True,
storeSupported=True
)
self.netcdf_file = self.addComplexInput(
identifier="netcdf_file",
title="NetCDF File",
abstract="NetCDF File",
minOccurs=1,
maxOccurs=1000,
maxmegabites=5000,
formats=[{"mimeType":"application/x-netcdf"}],
)
self.tommymodel = self.addLiteralInput(
identifier="tommymodel",
title="Tommy Model",
abstract="Population dynamics model for Anopheles Gambiae select tas, huss, pr, evspsblpot and ps as input data",
default=True,
type=type(False),
minOccurs=0,
maxOccurs=1,
)
self.kamilmodel = self.addLiteralInput(
identifier="kamilmodel",
title="Kamil Model",
abstract="nothing is implemented here so far ... ",
default=False,
type=type(False),
minOccurs=0,
maxOccurs=1,
)
self.output = self.addComplexOutput(
identifier="output",
title="anopheles",
abstract="Calculated population dynamics of adult Anopheles Gambiae ",
formats=[{"mimeType":"application/netcdf"}],
asReference=True,
)
def execute(self):
self.status.set('starting anopholes ...', 0)
nc_files = self.getInputValues(identifier='netcdf_file')
ocgis.env.DIR_SHPCABINET = config.shapefiles_dir()
ocgis.env.DIR_OUTPUT = os.curdir
ocgis.env.OVERWRITE = True
sc = ocgis.ShpCabinet()
geoms = 'continent'
select_ugid = [1] # UGID for Africa
self.status.set('got ShapeCabinet selected ugid : %s ...'% (select_ugid), 12)
# guess var names of files
for nc_file in nc_files:
ds = Dataset(nc_file)
if "tas" in ds.variables.keys():
file_tas = nc_file
elif "huss" in ds.variables.keys():
file_huss = nc_file
elif "ps" in ds.variables.keys():
file_ps = nc_file
elif "pr" in ds.variables.keys():
file_pr = nc_file
elif "evspsblpot" in ds.variables.keys():
file_evspsblpot = nc_file # Dataset(nc_file , 'r')
else:
raise Exception('input netcdf file has not variable tas|hurs|pr|evspsblpot')
self.status.set('sort files to appropriate variable names done' , 15)
#file_land_sea_mask = self.land_sea_mask.getValue()
#logging.debug('get landseamask ... done')
# build the n4 out variable based on pr
rd = ocgis.RequestDataset(file_pr, 'pr') # time_range=[dt1, dt2]
file_n4 = None
try :
(fp_tar, file_n4) = tempfile.mkstemp(dir=".", suffix='.nc')
prefix=os.path.splitext(os.path.basename(file_n4))[0]
ops = ocgis.OcgOperations(dataset=rd, geom=geoms, prefix=prefix, output_format='nc',select_ugid=select_ugid)
file_n4 = ops.execute()
self.status.set('created N4 outfile : %s ...'% (file_n4), 15)
except Exception as e:
self.status.set('"Something awful happened! Africa polygon subset failed for %s' % (file_n4, e), 15)
logging.exception("Something awful happened! Africa polygon subset failed for %s" % (file_n4), e )
nc_tas = Dataset(file_tas,'r')
nc_pr = Dataset(file_pr,'r')
nc_ps = Dataset(file_ps,'r')
nc_huss = Dataset(file_huss,'r')
nc_evspsblpot = Dataset(file_evspsblpot,'r')
nc_n4 = Dataset(file_n4,'a')
#nc_land_sea_mask = Dataset(file_land_sea_mask,'r')
logging.debug('open all files ... done')
#change attributes und variable name here
# att.put.nc(nc_n4, "n4", "units", "NC_FLOAT", -9e+33)
## read in values
tas = np.squeeze(nc_tas.variables["tas"])
pr = np.squeeze(nc_pr.variables["pr"])
ps = np.squeeze(nc_ps.variables["ps"])
huss = np.squeeze(nc_huss.variables["huss"])
evspsblpot = np.squeeze(nc_evspsblpot.variables["evspsblpot"])
logging.debug('read in all variables ... done')
var_n4 = nc_n4.variables["pr"]
n4 = np.zeros(pr.shape, dtype='f')
logging.debug('opended n4 file ... done')
# define some constatnts:
Increase_Ta = 0
#Evaporation (> -8)
Increase_Et = 0
#Rainfall (> -150)
Increase_Rt = 0
#Relative Humidity in (-97,+39)
Increase_RH = 0
## Text
deltaT = 6.08
h0 = 97
AT = 1.79*10**6
lamb = 1.5
m = 1000
De = 37.1
Te = 7.7
Nep = 120
alpha1 = 280.486
alpha2 = 0.025616
#if (abs(deltaT)<4):
#b = 0.89
#else:
b = 0.88
logging.debug('configuration ... done; start main loop, now!')
for x in range(0,tas.shape[1],1): #tas.shape[1]
for y in range(0,tas.shape[2],1): #tas.shape[2]
#if (var_n4[0,x,y] >= 0):
#try:
## get the appropriate values
#RH = hurs[:,x,y] * 100
Ta = tas[:,x,y] -273.15
Rt = pr[:,x,y] * 86400.
Et = np.fabs(evspsblpot[:,x,y] * 86400.) # in case evspsblpot ist stored as negaitve value
# calculation of rel. humidity
e_ = ((ps[:,x,y] * huss[:,x,y])/62.2)
es = 6.1078*10.**(7.5*(tas[:,x,y]-273.16)/(237.3+(tas[:,x,y]-273.16)))
RH = (e_ / es) * 100.
#calulation of water temperature
Tw = Ta + deltaT
## Check for Values out of range
Rt[Rt + Increase_Rt < 0] = 0
Et[Rt + Increase_Rt < 0] = 0
RH[RH + Increase_RH < 0] = 0
RH[RH + Increase_RH > 100] = 100
# create appropriate variabels
D = np.zeros(Ta.size)
Vt = np.zeros(Ta.size)
p4 = np.zeros(Ta.size)
ft = np.zeros(Ta.size)
Gc_Ta = np.zeros(Ta.size)
F4 = np.zeros(Ta.size)
N23 = np.zeros(Ta.size)
p_DD = np.zeros(Ta.size)
p_Tw = np.zeros([Ta.size,3])
p_Rt = np.zeros([Ta.size,3])
p_D = np.zeros([Ta.size,3])
G = np.zeros([Ta.size,3])
P = np.zeros([Ta.size,4])
p = np.zeros([Ta.size,4])
d = np.zeros([Ta.size,4])
N = np.zeros([Ta.size,4])
## initialize the model
Vt[0] = 1000.
N[0,0] = N[0,1] = N[0,2] = N[0,3] = 100.
# pdb.set_trace()
for t in range(0, (Ta.size -1) ,1):
#print x, y, t
if (Vt[t] == 0) & (Rt[t] == 0):
Vt[t+1] = 0
else:
Vt[t+1] = (Vt[t] + AT*Rt[t]/1000.)*(1 - 3.*Et[t]/h0* (Vt[0]/(Vt[t]+AT*Rt[t]/1000))**(1./3.))
if((Vt[t] == 0) & (Rt[t] == 0)):
Vt[t+1] = 0
else:
Vt[t+1] = (Vt[t] + AT*Rt[t]/1000.)*(1 - 3.*Et[t]/h0*(Vt[0]/(Vt[t]+AT*Rt[t]/1000))**(1./3.))
if(Vt[t+1] <= 0):
Vt[t+1] = 0
if (Vt[t+1] == 0):
D[t+1] = D[t] + 1
else:
D[t+1] = 0
beta2 = 4*10**(-6)*RH**2 - 1.09*10**(-3)*RH - 0.0255
beta1 = -2.32 * 10.**(-4.)* RH**2. + 0.0515*RH + 1.06
beta0 = 1.13*10**(-3)*RH**2 - 0.158*RH - 6.61
p4 = np.exp(-1/(beta2*Ta**2. + beta1*Ta + beta0))
d[:,0] = np.where(Vt != 0, 1.011 + 20.212*(1 + (Tw/12.096)**4.839)**(-1), 1.011 + 20.212*(1 + (Ta/12.096)**4.839)**(-1))
d[:,1] = np.where(Vt != 0, 8.130 + 13.794*(1 + (Tw/20.742)**8.946)**(-1) - d[:,0], 8.130 + 13.794*(1 + (Ta/20.742)**8.946)**(-1) - d[:,0])
d[:,2] = np.where(Vt != 0, 8.560 + 20.654*(1 + (Tw/19.759)**6.827)**(-1) - d[:,1] - d[:,0] , 8.560 + 20.654*(1 + (Ta/19.759)**6.827)**(-1) - d[:,1] - d[:,0])
d[:,3] = -1/np.log(p4)
p_Tw[:,0] = np.where(Vt != 0,np.where((Ta >= 14) & (Ta <= 40),np.exp(-1/d[:,0]),0),np.where((Ta >= 25) & (Ta <= 35),np.exp(-1./d[:,0]),0))
p_Tw[:,1] = np.where(Vt != 0,np.where((Tw >= 18) & (Tw <= 32),np.exp(-1/d[:,1]),0),np.where((Tw >= 18) & (Tw <= 32),np.exp(-1/d[:,1]),0))
p_Tw[:,2] = np.where(Vt != 0,np.where((Tw >= 18) & (Tw <= 32),np.exp(-1/d[:,2]),0),np.where((Tw >= 18) & (Tw <= 32),np.exp(-1/d[:,2]),0))
p_Rt[:,0] = np.exp(-0.0242*Rt)
p_Rt[:,1] = np.exp(-0.0127*Rt)
p_Rt[:,2] = np.exp(-0.00618*Rt)
p_D[:,0] = 2*np.exp(-0.405*D)/(1 + np.exp(-0.405*D))
p_D[:,1] = 2*np.exp(-0.855*D)/(1 + np.exp(-0.855*D))
p_D[:,2] = 2*np.exp(-0.602*D)/(1 + np.exp(-0.602*D))
for t in range(0,Rt.size -1,1): #tas.shape[0]
if(Vt[t] != 0):
p_DD[t] = (b*m/(1000*(N[t,1]+N[t,2])/Vt[t])) * (1 - (lamb**lamb/(lamb +(1000*(N[t,1]+N[t,2])/Vt[t])/m)**lamb))
else:
p_DD[t] = 1
p[t,0]= p_Tw[t,0]*p_Rt[t,0]*p_D[t,0]
p[t,1]= p_Tw[t,1]*p_Rt[t,1]*p_D[t,1]*p_DD[t]
p[t,2]= p_Tw[t,2]*p_Rt[t,2]*p_D[t,2]*p_DD[t]
p[t,3]= p4[t]
for j in range(0,4,1):
P[t,j] = (p[t,j] - p[t,j]**(d[t,j]))/(1. - p[t,j]**d[t,j])
for j in range(0,3,1):
G[t,j] = (1. - p[t,j])/(1. - p[t,j]**d[t,j])*p[t,j]**d[t,j]
ft[t] = 0.518*np.exp(-6.*(N[t,1]/Vt[t] - 0.317)**2.) + 0.192
Gc_Ta[t] = 1. + De/(Ta[t] - Te)
F4[t] = ft[t]*Nep/Gc_Ta[t]
N[t+1,0] = (P[t,0] * N[t,0] + (alpha1 * F4[t]) * N[t,3])
N[t+1,1] = (P[t,1] * N[t,1] + G[t,0] * N[t,0])
N[t+1,2] = (P[t,2] * N[t,2] + G[t,1] * N[t,1])
N[t+1,3] = (P[t,3] * N[t,3] + G[t,2] * N[t,2])
N[np.isnan(N)] = 0
n4[:,x,y] = N[:,3] #p4[t] # p_D[t,2] #N[t,3]
process = (((x+1) * tas.shape[2] + y ) / ((tas.shape[1] * tas.shape[2]) / 100 ))
logging.debug('Calculation process: %d % \r ' % (process))
#except Exception as e:
#logging.warn('Gridbox not calculated. Error= %s ' % (e))
#else:
# n4[:,x,y] = float('NaN')
# var_n4.assignValue(np.zeros(var_n4.shape))
self.status.set("anopheles done", 100)
self.output.setValue( file_n4 )
|
|
from __future__ import unicode_literals
import mock
import os
import sys
from tempfile import NamedTemporaryFile
import pytest
import six
from dallinger.config import Configuration
from dallinger.config import get_config, LOCAL_CONFIG
class TestConfigurationUnitTests(object):
def test_register_new_variable(self):
config = Configuration()
config.register("num_participants", int)
config.extend({"num_participants": 1})
config.ready = True
assert config.get("num_participants", 1)
def test_register_duplicate_variable_raises(self):
config = Configuration()
config.register("num_participants", int)
with pytest.raises(KeyError):
config.register("num_participants", int)
def test_register_unknown_type_raises(self):
config = Configuration()
with pytest.raises(TypeError):
config.register("num_participants", object)
def test_type_mismatch_on_assignment(self):
config = get_config()
with pytest.raises(TypeError):
config["base_payment"] = 12
def test_type_mismatch_on_extend(self):
config = Configuration()
config.register("num_participants", int)
with pytest.raises(TypeError):
config.extend({"num_participants": 1.0})
def test_type_mismatch_with_cast_types(self):
config = Configuration()
config.register("num_participants", int)
config.ready = True
config.extend({"num_participants": 1.0}, cast_types=True)
assert config.get("num_participants", 1) == 1
def test_type_cast_types_failure_raises(self):
config = Configuration()
config.register("num_participants", int)
config.ready = True
with pytest.raises(TypeError):
config.extend({"num_participants": "A NUMBER"}, cast_types=True)
def test_type_casts_follow_file_pointers(self):
config = Configuration()
config.register("data", six.text_type)
config.ready = True
with NamedTemporaryFile() as data_file:
data_file.write("hello".encode("utf-8"))
data_file.flush()
config.extend({"data": "file:" + data_file.name}, cast_types=True)
assert config.get("data") == "hello"
def test_get_before_ready_is_not_possible(self):
config = Configuration()
config.register("num_participants", int)
config.extend({"num_participants": 1})
with pytest.raises(RuntimeError):
config.get("num_participants", 1)
def test_layering_of_configs(self):
config = Configuration()
config.register("num_participants", int)
config.extend({"num_participants": 1})
config.ready = True
assert config.get("num_participants", 1) == 1
config.extend({"num_participants": 2})
assert config.get("num_participants", 1) == 2
def test_setting_unknown_key_is_ignored(self):
config = Configuration()
config.ready = True
config.extend({"num_participants": 1})
config.get("num_participants", None)
def test_setting_value_that_doesnt_validate_fails(self):
config = Configuration()
def is_purple(val):
if val != "purple":
raise ValueError
config.register("fave_colour", six.text_type, validators=[is_purple])
config.ready = True
config.set("fave_colour", "purple")
with pytest.raises(ValueError):
config.set("fave_colour", "red")
def test_setting_by_set(self):
config = Configuration()
config.ready = True
config.set("mode", "live")
def test_setting_by_assignment(self):
config = Configuration()
config.ready = True
config["mode"] = "live"
def test_get_without_default_raises(self):
config = Configuration()
config.register("num_participants", int)
config.ready = True
with pytest.raises(KeyError):
config.get("num_participants")
def test_get_has_default_value(self):
config = Configuration()
config.register("num_participants", int)
config.ready = True
assert config.get("num_participants", 10) == 10
def test_get_strips_strings(self):
config = Configuration()
config.register("test_string", six.text_type)
config.ready = True
config.extend({"test_string": " something "})
assert config.get("test_string") == "something"
def test_dict_access(self):
config = Configuration()
config.register("num_participants", int)
config.ready = True
config.extend({"num_participants": 1})
assert config["num_participants"] == 1
def test_attribute_access(self):
config = Configuration()
config.register("num_participants", int)
config.ready = True
config.extend({"num_participants": 1})
assert config.num_participants == 1
def test_attribute_setting(self):
config = Configuration()
config.register("num_participants", int)
config.ready = True
config.num_participants = 1
assert config.num_participants == 1
def test_strict_extending_blocks_unknown_keys(self):
config = Configuration()
config.register("num_participants", int)
config.ready = True
with pytest.raises(KeyError):
config.extend({"unknown_key": 1}, strict=True)
def test_setting_values_supports_synonyms(self):
config = Configuration()
config.register("num_participants", int, synonyms={"n"})
config.ready = True
config.extend({"n": 1})
assert config.get("num_participants") == 1
def test_loading_keys_from_config_file(self):
config = Configuration()
config.register("mode", six.text_type)
config.register("num_participants", int, synonyms={"n"})
config.register("deploy_worldwide", bool, synonyms={"worldwide"})
mode_with_trailing_whitespace = "live "
contents = """
[Example Section]
mode = {}
num_participants = 10
worldwide = false
""".format(
mode_with_trailing_whitespace
)
with NamedTemporaryFile() as configfile:
configfile.write(contents.encode("utf-8"))
configfile.flush()
config.load_from_file(configfile.name)
config.ready = True
assert config.get("mode") == "live" # whitespace stripped
assert config.get("num_participants") == 10
assert config.get("deploy_worldwide") is False
def test_loading_keys_from_environment_variables(self):
config = Configuration()
config.register("num_participants", int, synonyms={"n"})
os.environ["num_participants"] = "1"
try:
config.load_from_environment()
finally:
del os.environ["num_participants"]
config.ready = True
assert config.get("num_participants") == 1
def test_docker_image_default(self, experiment_dir):
config = get_config()
config.load()
assert config.get("docker_image_base_name") == "experiment"
@pytest.mark.usefixtures("experiment_dir_merged")
class TestConfigurationIntegrationTests(object):
def test_experiment_defined_parameters(self):
config = get_config()
config.register_extra_parameters()
config.load_from_file(LOCAL_CONFIG)
# From custom module function
assert "custom_parameter" in config.types
# From custom experiment instance method
assert "custom_parameter2" in config.types
assert config.types["custom_parameter"] is int
assert config.types["custom_parameter2"] is bool
def test_reload_config(self):
# replicate the experiment API runner config loading
config = get_config()
config.register_extra_parameters()
config.load_from_file(LOCAL_CONFIG)
config._reset(register_defaults=True)
config.register_extra_parameters()
config.load_from_file(LOCAL_CONFIG)
def test_custom_experiment_module_set_and_retained(self, reset_config):
config = get_config()
config.register_extra_parameters()
assert sys.modules["dallinger_experiment"] is not None
exp_module = mock.Mock()
with mock.patch.dict("sys.modules", dallinger_experiment=exp_module):
config.clear()
config.register_extra_parameters()
assert sys.modules["dallinger_experiment"] is exp_module
def test_write_omits_sensitive_keys_if_filter_sensitive(self, in_tempdir):
config = get_config()
config.set("aws_region", "some region")
config.set("aws_secret_access_key", "foo")
config.ready = True
config.write(filter_sensitive=True)
with open(LOCAL_CONFIG) as txt:
contents = txt.read()
assert "aws_region" in contents
assert "aws_secret_access_key" not in contents
def test_write_includes_all_keys_if_filter_sensitive_false(self, in_tempdir):
config = get_config()
config.set("aws_region", "some region")
config.set("aws_secret_access_key", "foo")
config.ready = True
config.write(filter_sensitive=False)
with open(LOCAL_CONFIG) as txt:
contents = txt.read()
assert "aws_region" in contents
assert "aws_secret_access_key" in contents
def test_write_accepts_alternate_directory(self):
import tempfile
target = os.path.join(tempfile.mkdtemp(), "custom")
os.mkdir(target)
config = get_config()
config.set("aws_region", "some region")
config.ready = True
config.write(directory=target)
with open(os.path.join(target, LOCAL_CONFIG)) as txt:
contents = txt.read()
assert "aws_region" in contents
|
|
"""Base class for sparse matrix formats using compressed storage."""
from __future__ import division, print_function, absolute_import
__all__ = []
from warnings import warn
import operator
import numpy as np
from scipy.lib.six import xrange, zip as izip
from .base import spmatrix, isspmatrix, SparseEfficiencyWarning
from .data import _data_matrix, _minmax_mixin
from .dia import dia_matrix
from . import _sparsetools
from .sputils import upcast, upcast_char, to_native, isdense, isshape, \
getdtype, isscalarlike, isintlike, IndexMixin, get_index_dtype, \
downcast_intp_index, _compat_unique, _compat_bincount
class _cs_matrix(_data_matrix, _minmax_mixin, IndexMixin):
"""base matrix class for compressed row and column oriented matrices"""
def __init__(self, arg1, shape=None, dtype=None, copy=False):
_data_matrix.__init__(self)
if isspmatrix(arg1):
if arg1.format == self.format and copy:
arg1 = arg1.copy()
else:
arg1 = arg1.asformat(self.format)
self._set_self(arg1)
elif isinstance(arg1, tuple):
if isshape(arg1):
# It's a tuple of matrix dimensions (M, N)
# create empty matrix
self.shape = arg1 # spmatrix checks for errors here
M, N = self.shape
idx_dtype = get_index_dtype(maxval=self._swap((M,N))[1])
self.data = np.zeros(0, getdtype(dtype, default=float))
self.indices = np.zeros(0, idx_dtype)
self.indptr = np.zeros(self._swap((M,N))[0] + 1, dtype=idx_dtype)
else:
if len(arg1) == 2:
# (data, ij) format
from .coo import coo_matrix
other = self.__class__(coo_matrix(arg1, shape=shape))
self._set_self(other)
elif len(arg1) == 3:
# (data, indices, indptr) format
(data, indices, indptr) = arg1
idx_dtype = get_index_dtype((indices, indptr), check_contents=True)
self.indices = np.array(indices, copy=copy, dtype=idx_dtype)
self.indptr = np.array(indptr, copy=copy, dtype=idx_dtype)
self.data = np.array(data, copy=copy, dtype=getdtype(dtype, data))
else:
raise ValueError("unrecognized %s_matrix constructor usage" %
self.format)
else:
# must be dense
try:
arg1 = np.asarray(arg1)
except:
raise ValueError("unrecognized %s_matrix constructor usage" %
self.format)
from .coo import coo_matrix
self._set_self(self.__class__(coo_matrix(arg1, dtype=dtype)))
# Read matrix dimensions given, if any
if shape is not None:
self.shape = shape # spmatrix will check for errors
else:
if self.shape is None:
# shape not already set, try to infer dimensions
try:
major_dim = len(self.indptr) - 1
minor_dim = self.indices.max() + 1
except:
raise ValueError('unable to infer matrix dimensions')
else:
self.shape = self._swap((major_dim,minor_dim))
if dtype is not None:
self.data = np.asarray(self.data, dtype=dtype)
self.check_format(full_check=False)
def getnnz(self, axis=None):
"""Get the count of explicitly-stored values (nonzeros)
Parameters
----------
axis : None, 0, or 1
Select between the number of values across the whole matrix, in
each column, or in each row.
"""
if axis is None:
return int(self.indptr[-1])
else:
if axis < 0:
axis += 2
axis, _ = self._swap((axis, 1 - axis))
_, N = self._swap(self.shape)
if axis == 0:
return _compat_bincount(downcast_intp_index(self.indices),
minlength=N)
elif axis == 1:
return np.diff(self.indptr)
raise ValueError('axis out of bounds')
nnz = property(fget=getnnz)
def _set_self(self, other, copy=False):
"""take the member variables of other and assign them to self"""
if copy:
other = other.copy()
self.data = other.data
self.indices = other.indices
self.indptr = other.indptr
self.shape = other.shape
def check_format(self, full_check=True):
"""check whether the matrix format is valid
Parameters
==========
- full_check : {bool}
- True - rigorous check, O(N) operations : default
- False - basic check, O(1) operations
"""
# use _swap to determine proper bounds
major_name,minor_name = self._swap(('row','column'))
major_dim,minor_dim = self._swap(self.shape)
# index arrays should have integer data types
if self.indptr.dtype.kind != 'i':
warn("indptr array has non-integer dtype (%s)"
% self.indptr.dtype.name)
if self.indices.dtype.kind != 'i':
warn("indices array has non-integer dtype (%s)"
% self.indices.dtype.name)
idx_dtype = get_index_dtype((self.indptr, self.indices))
self.indptr = np.asarray(self.indptr, dtype=idx_dtype)
self.indices = np.asarray(self.indices, dtype=idx_dtype)
self.data = to_native(self.data)
# check array shapes
if self.data.ndim != 1 or self.indices.ndim != 1 or self.indptr.ndim != 1:
raise ValueError('data, indices, and indptr should be 1-D')
# check index pointer
if (len(self.indptr) != major_dim + 1):
raise ValueError("index pointer size (%d) should be (%d)" %
(len(self.indptr), major_dim + 1))
if (self.indptr[0] != 0):
raise ValueError("index pointer should start with 0")
# check index and data arrays
if (len(self.indices) != len(self.data)):
raise ValueError("indices and data should have the same size")
if (self.indptr[-1] > len(self.indices)):
raise ValueError("Last value of index pointer should be less than "
"the size of index and data arrays")
self.prune()
if full_check:
# check format validity (more expensive)
if self.nnz > 0:
if self.indices.max() >= minor_dim:
raise ValueError("%s index values must be < %d" %
(minor_name,minor_dim))
if self.indices.min() < 0:
raise ValueError("%s index values must be >= 0" %
minor_name)
if np.diff(self.indptr).min() < 0:
raise ValueError("index pointer values must form a "
"non-decreasing sequence")
# if not self.has_sorted_indices():
# warn('Indices were not in sorted order. Sorting indices.')
# self.sort_indices()
# assert(self.has_sorted_indices())
# TODO check for duplicates?
#######################
# Boolean comparisons #
#######################
def _copy_with_const(self, const):
"""Copy data, with all nonzeros replaced with constant for binopt
Adopts the dtype of const to avoid removing sign or magnitude before
comparison.
Warning: does not make a copy of indices and indptr
"""
try:
self.sum_duplicates()
except NotImplementedError:
pass
data = np.empty(self.data.shape, dtype=np.asarray(const).dtype)
data.fill(const)
return self.__class__((data, self.indices, self.indptr), shape=self.shape)
def __eq__(self, other):
# Scalar other.
if isscalarlike(other):
if np.isnan(other):
return self.__class__(self.shape, dtype=np.bool_)
other_arr = self._copy_with_const(other)
res = self._binopt(other_arr,'_ne_')
if other == 0:
warn("Comparing a sparse matrix with 0 using == is inefficient"
", try using != instead.", SparseEfficiencyWarning)
all_true = self.__class__(np.ones(self.shape, dtype=np.bool_))
return all_true - res
else:
sparsity_pattern = self._copy_with_const(True)
return sparsity_pattern - res
# Dense other.
elif isdense(other):
return self.todense() == other
# Sparse other.
elif isspmatrix(other):
warn("Comparing sparse matrices using == is inefficient, try using"
" != instead.", SparseEfficiencyWarning)
#TODO sparse broadcasting
if self.shape != other.shape:
return False
elif self.format != other.format:
other = other.asformat(self.format)
res = self._binopt(other,'_ne_')
all_true = self.__class__(np.ones(self.shape, dtype=np.bool_))
return all_true - res
else:
return False
def __ne__(self, other):
# Scalar other.
if isscalarlike(other):
if np.isnan(other):
warn("Comparing a sparse matrix with nan using != is inefficient",
SparseEfficiencyWarning)
all_true = self.__class__(np.ones(self.shape, dtype=np.bool_))
return all_true
elif other != 0:
warn("Comparing a sparse matrix with a nonzero scalar using !="
" is inefficient, try using == instead.", SparseEfficiencyWarning)
all_true = self.__class__(np.ones(self.shape), dtype=np.bool_)
res = (self == other)
return all_true - res
else:
other_arr = self._copy_with_const(other)
return self._binopt(other_arr,'_ne_')
# Dense other.
elif isdense(other):
return self.todense() != other
# Sparse other.
elif isspmatrix(other):
#TODO sparse broadcasting
if self.shape != other.shape:
return True
elif self.format != other.format:
other = other.asformat(self.format)
return self._binopt(other,'_ne_')
else:
return True
def _inequality(self, other, op, op_name, bad_scalar_msg):
# Scalar other.
if isscalarlike(other):
if 0 == other and op_name in ('_le_', '_ge_'):
raise NotImplementedError(" >= and <= don't work with 0.")
elif op(0, other):
warn(bad_scalar_msg, SparseEfficiencyWarning)
other_arr = np.empty(self.shape, dtype=np.asarray(other).dtype)
other_arr.fill(other)
other_arr = self.__class__(other_arr)
return self._binopt(other_arr, op_name)
else:
other_arr = self._copy_with_const(other)
return self._binopt(other_arr, op_name)
# Dense other.
elif isdense(other):
return op(self.todense(), other)
# Sparse other.
elif isspmatrix(other):
#TODO sparse broadcasting
if self.shape != other.shape:
raise ValueError("inconsistent shapes")
elif self.format != other.format:
other = other.asformat(self.format)
if op_name not in ('_ge_', '_le_'):
return self._binopt(other, op_name)
warn("Comparing sparse matrices using >= and <= is inefficient, "
"using <, >, or !=, instead.", SparseEfficiencyWarning)
all_true = self.__class__(np.ones(self.shape))
res = self._binopt(other, '_gt_' if op_name == '_le_' else '_lt_')
return all_true - res
else:
raise ValueError("Operands could not be compared.")
def __lt__(self, other):
return self._inequality(other, operator.lt, '_lt_',
"Comparing a sparse matrix with a scalar "
"greater than zero using < is inefficient, "
"try using >= instead.")
def __gt__(self, other):
return self._inequality(other, operator.gt, '_gt_',
"Comparing a sparse matrix with a scalar "
"less than zero using > is inefficient, "
"try using <= instead.")
def __le__(self, other):
return self._inequality(other, operator.le, '_le_',
"Comparing a sparse matrix with a scalar "
"greater than zero using <= is inefficient, "
"try using > instead.")
def __ge__(self,other):
return self._inequality(other, operator.ge, '_ge_',
"Comparing a sparse matrix with a scalar "
"less than zero using >= is inefficient, "
"try using < instead.")
#################################
# Arithmatic operator overrides #
#################################
def __add__(self,other):
# First check if argument is a scalar
if isscalarlike(other):
if other == 0:
return self.copy()
else: # Now we would add this scalar to every element.
raise NotImplementedError('adding a nonzero scalar to a '
'sparse matrix is not supported')
elif isspmatrix(other):
if (other.shape != self.shape):
raise ValueError("inconsistent shapes")
return self._binopt(other,'_plus_')
elif isdense(other):
# Convert this matrix to a dense matrix and add them
return self.todense() + other
else:
return NotImplemented
def __radd__(self,other):
return self.__add__(other)
def __sub__(self,other):
# First check if argument is a scalar
if isscalarlike(other):
if other == 0:
return self.copy()
else: # Now we would add this scalar to every element.
raise NotImplementedError('adding a nonzero scalar to a '
'sparse matrix is not supported')
elif isspmatrix(other):
if (other.shape != self.shape):
raise ValueError("inconsistent shapes")
return self._binopt(other,'_minus_')
elif isdense(other):
# Convert this matrix to a dense matrix and subtract them
return self.todense() - other
else:
return NotImplemented
def __rsub__(self,other): # other - self
# note: this can't be replaced by other + (-self) for unsigned types
if isscalarlike(other):
if other == 0:
return -self.copy()
else: # Now we would add this scalar to every element.
raise NotImplementedError('adding a nonzero scalar to a '
'sparse matrix is not supported')
elif isdense(other):
# Convert this matrix to a dense matrix and subtract them
return other - self.todense()
else:
return NotImplemented
def multiply(self, other):
"""Point-wise multiplication by another matrix, vector, or
scalar.
"""
# Scalar multiplication.
if isscalarlike(other):
return self.__mul__(other)
# Sparse matrix or vector.
if isspmatrix(other):
if self.shape == other.shape:
other = self.__class__(other)
return self._binopt(other, '_elmul_')
# Single element.
elif other.shape == (1,1):
return self.__mul__(other.tocsc().data[0])
elif self.shape == (1,1):
return other.__mul__(self.tocsc().data[0])
# A row times a column.
elif self.shape[1] == other.shape[0] and self.shape[1] == 1:
return self._mul_sparse_matrix(other.tocsc())
elif self.shape[0] == other.shape[1] and self.shape[0] == 1:
return other._mul_sparse_matrix(self.tocsc())
# Row vector times matrix. other is a row.
elif other.shape[0] == 1 and self.shape[1] == other.shape[1]:
other = dia_matrix((other.toarray().ravel(), [0]),
shape=(other.shape[1], other.shape[1]))
return self._mul_sparse_matrix(other)
# self is a row.
elif self.shape[0] == 1 and self.shape[1] == other.shape[1]:
copy = dia_matrix((self.toarray().ravel(), [0]),
shape=(self.shape[1], self.shape[1]))
return other._mul_sparse_matrix(copy)
# Column vector times matrix. other is a column.
elif other.shape[1] == 1 and self.shape[0] == other.shape[0]:
other = dia_matrix((other.toarray().ravel(), [0]),
shape=(other.shape[0], other.shape[0]))
return other._mul_sparse_matrix(self)
# self is a column.
elif self.shape[1] == 1 and self.shape[0] == other.shape[0]:
copy = dia_matrix((self.toarray().ravel(), [0]),
shape=(self.shape[0], self.shape[0]))
return copy._mul_sparse_matrix(other)
else:
raise ValueError("inconsistent shapes")
# Anything else.
return np.multiply(self.todense(), other)
###########################
# Multiplication handlers #
###########################
def _mul_vector(self, other):
M,N = self.shape
# output array
result = np.zeros(M, dtype=upcast_char(self.dtype.char,
other.dtype.char))
# csr_matvec or csc_matvec
fn = getattr(_sparsetools,self.format + '_matvec')
fn(M, N, self.indptr, self.indices, self.data, other, result)
return result
def _mul_multivector(self, other):
M,N = self.shape
n_vecs = other.shape[1] # number of column vectors
result = np.zeros((M,n_vecs), dtype=upcast_char(self.dtype.char,
other.dtype.char))
# csr_matvecs or csc_matvecs
fn = getattr(_sparsetools,self.format + '_matvecs')
fn(M, N, n_vecs, self.indptr, self.indices, self.data, other.ravel(), result.ravel())
return result
def _mul_sparse_matrix(self, other):
M, K1 = self.shape
K2, N = other.shape
major_axis = self._swap((M,N))[0]
other = self.__class__(other) # convert to this format
idx_dtype = get_index_dtype((self.indptr, self.indices,
other.indptr, other.indices),
maxval=M*N)
indptr = np.empty(major_axis + 1, dtype=idx_dtype)
fn = getattr(_sparsetools, self.format + '_matmat_pass1')
fn(M, N,
np.asarray(self.indptr, dtype=idx_dtype),
np.asarray(self.indices, dtype=idx_dtype),
np.asarray(other.indptr, dtype=idx_dtype),
np.asarray(other.indices, dtype=idx_dtype),
indptr)
nnz = indptr[-1]
idx_dtype = get_index_dtype((self.indptr, self.indices,
other.indptr, other.indices),
maxval=nnz)
indptr = np.asarray(indptr, dtype=idx_dtype)
indices = np.empty(nnz, dtype=idx_dtype)
data = np.empty(nnz, dtype=upcast(self.dtype, other.dtype))
fn = getattr(_sparsetools, self.format + '_matmat_pass2')
fn(M, N, np.asarray(self.indptr, dtype=idx_dtype),
np.asarray(self.indices, dtype=idx_dtype),
self.data,
np.asarray(other.indptr, dtype=idx_dtype),
np.asarray(other.indices, dtype=idx_dtype),
other.data,
indptr, indices, data)
return self.__class__((data,indices,indptr),shape=(M,N))
def diagonal(self):
"""Returns the main diagonal of the matrix
"""
# TODO support k-th diagonal
fn = getattr(_sparsetools, self.format + "_diagonal")
y = np.empty(min(self.shape), dtype=upcast(self.dtype))
fn(self.shape[0], self.shape[1], self.indptr, self.indices, self.data, y)
return y
#####################
# Other binary ops #
#####################
def _maximum_minimum(self, other, npop, op_name, dense_check):
if isscalarlike(other):
if dense_check(other):
warn("Taking maximum (minimum) with > 0 (< 0) number results to "
"a dense matrix.",
SparseEfficiencyWarning)
other_arr = np.empty(self.shape, dtype=np.asarray(other).dtype)
other_arr.fill(other)
other_arr = self.__class__(other_arr)
return self._binopt(other_arr, op_name)
else:
try:
self.sum_duplicates()
except NotImplementedError:
pass
new_data = npop(self.data, np.asarray(other))
mat = self.__class__((new_data, self.indices, self.indptr),
dtype=new_data.dtype, shape=self.shape)
return mat
elif isdense(other):
return npop(self.todense(), other)
elif isspmatrix(other):
return self._binopt(other, op_name)
else:
raise ValueError("Operands not compatible.")
def maximum(self, other):
return self._maximum_minimum(other, np.maximum, '_maximum_', lambda x: np.asarray(x) > 0)
def minimum(self, other):
return self._maximum_minimum(other, np.minimum, '_minimum_', lambda x: np.asarray(x) < 0)
#####################
# Reduce operations #
#####################
def sum(self, axis=None):
"""Sum the matrix over the given axis. If the axis is None, sum
over both rows and columns, returning a scalar.
"""
# The spmatrix base class already does axis=0 and axis=1 efficiently
# so we only do the case axis=None here
if axis is None:
return self.data.sum()
elif (not hasattr(self, 'blocksize') and
axis in self._swap(((1, -1), (0, 2)))[0]):
# faster than multiplication for large minor axis in CSC/CSR
dtype = self.dtype
if np.issubdtype(dtype, np.bool_):
dtype = np.int_
ret = np.zeros(len(self.indptr) - 1, dtype=dtype)
major_index, value = self._minor_reduce(np.add)
ret[major_index] = value
ret = np.asmatrix(ret)
if axis % 2 == 1:
ret = ret.T
return ret
else:
return spmatrix.sum(self, axis)
def _minor_reduce(self, ufunc):
"""Reduce nonzeros with a ufunc over the minor axis when non-empty
Warning: this does not call sum_duplicates()
Returns
-------
major_index : array of ints
Major indices where nonzero
value : array of self.dtype
Reduce result for nonzeros in each major_index
"""
major_index = np.flatnonzero(np.diff(self.indptr))
if self.data.size == 0 and major_index.size == 0:
# Numpy < 1.8.0 don't handle empty arrays in reduceat
value = np.zeros_like(self.data)
else:
value = ufunc.reduceat(self.data,
downcast_intp_index(self.indptr[major_index]))
return major_index, value
#######################
# Getting and Setting #
#######################
def __getitem__(self, key):
if isinstance(key, tuple):
row = key[0]
col = key[1]
# TODO implement CSR[ [1,2,3], X ] with sparse matmat
# TODO make use of sorted indices
if isintlike(row) and isintlike(col):
return self._get_single_element(row,col)
else:
major,minor = self._swap((row,col))
if isintlike(major) and isinstance(minor,slice):
minor_shape = self._swap(self.shape)[1]
start, stop, stride = minor.indices(minor_shape)
out_shape = self._swap((1, stop-start))
return self._get_slice(major, start, stop, stride, out_shape)
elif isinstance(row, slice) or isinstance(col, slice):
return self._get_submatrix(row, col)
else:
raise NotImplementedError
elif isintlike(key):
return self[key, :]
else:
raise IndexError("invalid index")
def __setitem__(self, index, x):
# Process arrays from IndexMixin
i, j = self._unpack_index(index)
i, j = self._index_to_arrays(i, j)
if isspmatrix(x):
x = x.toarray()
# Make x and i into the same shape
x = np.asarray(x, dtype=self.dtype)
x, _ = np.broadcast_arrays(x, i)
if x.shape != i.shape:
raise ValueError("shape mismatch in assignment")
if np.size(x) == 0:
return
i, j = self._swap((i.ravel(), j.ravel()))
self._set_many(i, j, x.ravel())
def _setdiag(self, values, k):
if 0 in self.shape:
return
M, N = self.shape
broadcast = (values.ndim == 0)
if k < 0:
if broadcast:
max_index = min(M + k, N)
else:
max_index = min(M + k, N, len(values))
i = np.arange(max_index, dtype=self.indices.dtype)
j = np.arange(max_index, dtype=self.indices.dtype)
i -= k
else:
if broadcast:
max_index = min(M, N - k)
else:
max_index = min(M, N - k, len(values))
i = np.arange(max_index, dtype=self.indices.dtype)
j = np.arange(max_index, dtype=self.indices.dtype)
j += k
if not broadcast:
values = values[:len(i)]
self[i, j] = values
def _set_many(self, i, j, x):
"""Sets value at each (i, j) to x
Here (i,j) index major and minor respectively.
"""
M, N = self._swap(self.shape)
def check_bounds(indices, bound):
idx = indices.max()
if idx >= bound:
raise IndexError('index (%d) out of range (>= %d)' %
(idx, bound))
idx = indices.min()
if idx < -bound:
raise IndexError('index (%d) out of range (< -%d)' %
(idx, bound))
check_bounds(i, M)
check_bounds(j, N)
i = np.asarray(i, dtype=self.indices.dtype)
j = np.asarray(j, dtype=self.indices.dtype)
n_samples = len(x)
offsets = np.empty(n_samples, dtype=self.indices.dtype)
ret = _sparsetools.csr_sample_offsets(M, N, self.indptr, self.indices,
n_samples, i, j, offsets)
if ret == 1:
# rinse and repeat
self.sum_duplicates()
_sparsetools.csr_sample_offsets(M, N, self.indptr,
self.indices, n_samples, i, j,
offsets)
if -1 not in offsets:
# only affects existing non-zero cells
self.data[offsets] = x
return
else:
warn("Changing the sparsity structure of a %s_matrix is expensive. "
"lil_matrix is more efficient." % self.format,
SparseEfficiencyWarning)
# replace where possible
mask = offsets > -1
self.data[offsets[mask]] = x[mask]
# only insertions remain
mask = ~mask
i = i[mask]
i[i < 0] += M
j = j[mask]
j[j < 0] += N
self._insert_many(i, j, x[mask])
def _insert_many(self, i, j, x):
"""Inserts new nonzero at each (i, j) with value x
Here (i,j) index major and minor respectively.
i, j and x must be non-empty, 1d arrays.
Inserts each major group (e.g. all entries per row) at a time.
Maintains has_sorted_indices property.
Modifies i, j, x in place.
"""
order = np.argsort(i, kind='mergesort') # stable for duplicates
i = i.take(order, mode='clip')
j = j.take(order, mode='clip')
x = x.take(order, mode='clip')
do_sort = self.has_sorted_indices
# Update index data type
idx_dtype = get_index_dtype((self.indices, self.indptr),
maxval=(self.indptr[-1] + x.size))
self.indptr = np.asarray(self.indptr, dtype=idx_dtype)
self.indices = np.asarray(self.indices, dtype=idx_dtype)
i = np.asarray(i, dtype=idx_dtype)
j = np.asarray(j, dtype=idx_dtype)
# Collate old and new in chunks by major index
indices_parts = []
data_parts = []
ui, ui_indptr = _compat_unique(i, return_index=True)
ui_indptr = np.append(ui_indptr, len(j))
new_nnzs = np.diff(ui_indptr)
prev = 0
for c, (ii, js, je) in enumerate(izip(ui, ui_indptr, ui_indptr[1:])):
# old entries
start = self.indptr[prev]
stop = self.indptr[ii]
indices_parts.append(self.indices[start:stop])
data_parts.append(self.data[start:stop])
# handle duplicate j: keep last setting
uj, uj_indptr = _compat_unique(j[js:je][::-1], return_index=True)
if len(uj) == je - js:
indices_parts.append(j[js:je])
data_parts.append(x[js:je])
else:
indices_parts.append(j[js:je][::-1][uj_indptr])
data_parts.append(x[js:je][::-1][uj_indptr])
new_nnzs[c] = len(uj)
prev = ii
# remaining old entries
start = self.indptr[ii]
indices_parts.append(self.indices[start:])
data_parts.append(self.data[start:])
# update attributes
self.indices = np.concatenate(indices_parts)
self.data = np.concatenate(data_parts)
nnzs = np.asarray(np.ediff1d(self.indptr, to_begin=0), dtype=idx_dtype)
nnzs[1:][ui] += new_nnzs
self.indptr = np.cumsum(nnzs, out=nnzs)
if do_sort:
# TODO: only sort where necessary
self.has_sorted_indices = False
self.sort_indices()
self.check_format(full_check=False)
def _get_single_element(self,row,col):
M, N = self.shape
if (row < 0):
row += M
if (col < 0):
col += N
if not (0 <= row < M) or not (0 <= col < N):
raise IndexError("index out of bounds")
major_index, minor_index = self._swap((row,col))
# TODO make use of sorted indices (if present)
start = self.indptr[major_index]
end = self.indptr[major_index+1]
# can use np.add(..., where) from numpy 1.7
return np.compress(minor_index == self.indices[start:end],
self.data[start:end]).sum(dtype=self.dtype)
def _get_slice(self, i, start, stop, stride, shape):
"""Returns a copy of the elements
[i, start:stop:string] for row-oriented matrices
[start:stop:string, i] for column-oriented matrices
"""
if stride != 1:
raise ValueError("slicing with step != 1 not supported")
if stop <= start:
raise ValueError("slice width must be >= 1")
# TODO make [i,:] faster
# TODO implement [i,x:y:z]
indices = []
for ind in xrange(self.indptr[i], self.indptr[i+1]):
if self.indices[ind] >= start and self.indices[ind] < stop:
indices.append(ind)
index = self.indices[indices] - start
data = self.data[indices]
indptr = np.array([0, len(indices)])
return self.__class__((data, index, indptr), shape=shape,
dtype=self.dtype)
def _get_submatrix(self, slice0, slice1):
"""Return a submatrix of this matrix (new matrix is created)."""
slice0, slice1 = self._swap((slice0,slice1))
shape0, shape1 = self._swap(self.shape)
def _process_slice(sl, num):
if isinstance(sl, slice):
i0, i1 = sl.start, sl.stop
if i0 is None:
i0 = 0
elif i0 < 0:
i0 = num + i0
if i1 is None:
i1 = num
elif i1 < 0:
i1 = num + i1
return i0, i1
elif np.isscalar(sl):
if sl < 0:
sl += num
return sl, sl + 1
else:
return sl[0], sl[1]
def _in_bounds(i0, i1, num):
if not (0 <= i0 < num) or not (0 < i1 <= num) or not (i0 < i1):
raise IndexError("index out of bounds: 0<=%d<%d, 0<=%d<%d, %d<%d" %
(i0, num, i1, num, i0, i1))
i0, i1 = _process_slice(slice0, shape0)
j0, j1 = _process_slice(slice1, shape1)
_in_bounds(i0, i1, shape0)
_in_bounds(j0, j1, shape1)
aux = _sparsetools.get_csr_submatrix(shape0, shape1,
self.indptr, self.indices,
self.data,
i0, i1, j0, j1)
data, indices, indptr = aux[2], aux[1], aux[0]
shape = self._swap((i1 - i0, j1 - j0))
return self.__class__((data, indices, indptr), shape=shape)
######################
# Conversion methods #
######################
def todia(self):
return self.tocoo(copy=False).todia()
def todok(self):
return self.tocoo(copy=False).todok()
def tocoo(self,copy=True):
"""Return a COOrdinate representation of this matrix
When copy=False the index and data arrays are not copied.
"""
major_dim,minor_dim = self._swap(self.shape)
data = self.data
minor_indices = self.indices
if copy:
data = data.copy()
minor_indices = minor_indices.copy()
major_indices = np.empty(len(minor_indices), dtype=self.indices.dtype)
_sparsetools.expandptr(major_dim,self.indptr,major_indices)
row,col = self._swap((major_indices,minor_indices))
from .coo import coo_matrix
return coo_matrix((data,(row,col)), self.shape)
def toarray(self, order=None, out=None):
"""See the docstring for `spmatrix.toarray`."""
return self.tocoo(copy=False).toarray(order=order, out=out)
##############################################################
# methods that examine or modify the internal data structure #
##############################################################
def eliminate_zeros(self):
"""Remove zero entries from the matrix
This is an *in place* operation
"""
fn = _sparsetools.csr_eliminate_zeros
M,N = self._swap(self.shape)
fn(M, N, self.indptr, self.indices, self.data)
self.prune() # nnz may have changed
def __get_has_canonical_format(self):
"""Determine whether the matrix has sorted indices and no duplicates
Returns
- True: if the above applies
- False: otherwise
has_canonical_format implies has_sorted_indices, so if the latter flag
is False, so will the former be; if the former is found True, the
latter flag is also set.
"""
# first check to see if result was cached
if not getattr(self, '_has_sorted_indices', True):
# not sorted => not canonical
self._has_canonical_format = False
elif not hasattr(self, '_has_canonical_format'):
fn = _sparsetools.csr_has_canonical_format
self.has_canonical_format = \
fn(len(self.indptr) - 1, self.indptr, self.indices)
return self._has_canonical_format
def __set_has_canonical_format(self, val):
self._has_canonical_format = bool(val)
if val:
self.has_sorted_indices = True
has_canonical_format = property(fget=__get_has_canonical_format,
fset=__set_has_canonical_format)
def sum_duplicates(self):
"""Eliminate duplicate matrix entries by adding them together
The is an *in place* operation
"""
if self.has_canonical_format:
return
self.sort_indices()
fn = _sparsetools.csr_sum_duplicates
M,N = self._swap(self.shape)
fn(M, N, self.indptr, self.indices, self.data)
self.prune() # nnz may have changed
self.has_canonical_format = True
def __get_sorted(self):
"""Determine whether the matrix has sorted indices
Returns
- True: if the indices of the matrix are in sorted order
- False: otherwise
"""
# first check to see if result was cached
if not hasattr(self,'_has_sorted_indices'):
fn = _sparsetools.csr_has_sorted_indices
self._has_sorted_indices = \
fn(len(self.indptr) - 1, self.indptr, self.indices)
return self._has_sorted_indices
def __set_sorted(self, val):
self._has_sorted_indices = bool(val)
has_sorted_indices = property(fget=__get_sorted, fset=__set_sorted)
def sorted_indices(self):
"""Return a copy of this matrix with sorted indices
"""
A = self.copy()
A.sort_indices()
return A
# an alternative that has linear complexity is the following
# although the previous option is typically faster
# return self.toother().toother()
def sort_indices(self):
"""Sort the indices of this matrix *in place*
"""
if not self.has_sorted_indices:
fn = _sparsetools.csr_sort_indices
fn(len(self.indptr) - 1, self.indptr, self.indices, self.data)
self.has_sorted_indices = True
def prune(self):
"""Remove empty space after all non-zero elements.
"""
major_dim = self._swap(self.shape)[0]
if len(self.indptr) != major_dim + 1:
raise ValueError('index pointer has invalid length')
if len(self.indices) < self.nnz:
raise ValueError('indices array has fewer than nnz elements')
if len(self.data) < self.nnz:
raise ValueError('data array has fewer than nnz elements')
self.data = self.data[:self.nnz]
self.indices = self.indices[:self.nnz]
###################
# utility methods #
###################
# needed by _data_matrix
def _with_data(self,data,copy=True):
"""Returns a matrix with the same sparsity structure as self,
but with different data. By default the structure arrays
(i.e. .indptr and .indices) are copied.
"""
if copy:
return self.__class__((data,self.indices.copy(),self.indptr.copy()),
shape=self.shape,dtype=data.dtype)
else:
return self.__class__((data,self.indices,self.indptr),
shape=self.shape,dtype=data.dtype)
def _binopt(self, other, op):
"""apply the binary operation fn to two sparse matrices."""
other = self.__class__(other)
# e.g. csr_plus_csr, csr_minus_csr, etc.
fn = getattr(_sparsetools, self.format + op + self.format)
maxnnz = self.nnz + other.nnz
idx_dtype = get_index_dtype((self.indptr, self.indices,
other.indptr, other.indices),
maxval=maxnnz)
indptr = np.empty(self.indptr.shape, dtype=idx_dtype)
indices = np.empty(maxnnz, dtype=idx_dtype)
bool_ops = ['_ne_', '_lt_', '_gt_', '_le_', '_ge_']
if op in bool_ops:
data = np.empty(maxnnz, dtype=np.bool_)
else:
data = np.empty(maxnnz, dtype=upcast(self.dtype, other.dtype))
fn(self.shape[0], self.shape[1],
np.asarray(self.indptr, dtype=idx_dtype),
np.asarray(self.indices, dtype=idx_dtype),
self.data,
np.asarray(other.indptr, dtype=idx_dtype),
np.asarray(other.indices, dtype=idx_dtype),
other.data,
indptr, indices, data)
actual_nnz = indptr[-1]
indices = indices[:actual_nnz]
data = data[:actual_nnz]
if actual_nnz < maxnnz // 2:
# too much waste, trim arrays
indices = indices.copy()
data = data.copy()
A = self.__class__((data, indices, indptr), shape=self.shape)
return A
def _divide_sparse(self, other):
"""
Divide this matrix by a second sparse matrix.
"""
if other.shape != self.shape:
raise ValueError('inconsistent shapes')
r = self._binopt(other, '_eldiv_')
if np.issubdtype(r.dtype, np.inexact):
# Eldiv leaves entries outside the combined sparsity
# pattern empty, so they must be filled manually. They are
# always nan, so that the matrix is completely full.
out = np.empty(self.shape, dtype=self.dtype)
out.fill(np.nan)
r = r.tocoo()
out[r.row, r.col] = r.data
out = np.matrix(out)
else:
# integers types go with nan <-> 0
out = r
return out
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import time
from keystoneauth1 import adapter
from keystoneauth1 import discover
from keystoneauth1 import exceptions as ka_exc
from keystoneauth1.identity import v2 as v2_auth
from keystoneauth1.identity import v3 as v3_auth
from keystoneauth1 import session
from oslo_utils import strutils
import six.moves.urllib.parse as urlparse
from . import utils
from . import exc
from .. import monitoring_utils
from .ecl.common.apiclient import auth
from .ecl.common.apiclient import client
from .ecl.common.apiclient import exceptions
def _discover_auth_versions(session, auth_url):
# discover the API versions the server is supporting based on the
# given URL
v2_auth_url = None
v3_auth_url = None
try:
ks_discover = discover.Discover(session=session, url=auth_url)
v2_auth_url = ks_discover.url_for('2.0')
v3_auth_url = ks_discover.url_for('3.0')
except ka_exc.DiscoveryFailure:
raise
except exceptions.ClientException:
# Identity service may not support discovery. In that case,
# try to determine version from auth_url
url_parts = urlparse.urlparse(auth_url)
(scheme, netloc, path, params, query, fragment) = url_parts
path = path.lower()
if path.startswith('/v3'):
v3_auth_url = auth_url
elif path.startswith('/v2'):
v2_auth_url = auth_url
else:
raise exc.CommandError('Unable to determine the Keystone '
'version to authenticate with '
'using the given auth_url.')
return v2_auth_url, v3_auth_url
def _get_keystone_session(**kwargs):
# TODO(fabgia): the heavy lifting here should be really done by Keystone.
# Unfortunately Keystone does not support a richer method to perform
# discovery and return a single viable URL. A bug against Keystone has
# been filed: https://bugs.launchpad.net/python-keystoneclient/+bug/1330677
# first create a Keystone session
cacert = kwargs.pop('cacert', None)
cert = kwargs.pop('cert', None)
key = kwargs.pop('key', None)
insecure = kwargs.pop('insecure', False)
auth_url = kwargs.pop('auth_url', None)
project_id = kwargs.pop('project_id', None)
project_name = kwargs.pop('project_name', None)
token = kwargs['token']
timeout = kwargs.get('timeout')
if insecure:
verify = False
else:
verify = cacert or True
if cert and key:
# passing cert and key together is deprecated in favour of the
# requests lib form of having the cert and key as a tuple
cert = (cert, key)
# create the keystone client session
ks_session = session.Session(verify=verify, cert=cert, timeout=timeout)
v2_auth_url, v3_auth_url = _discover_auth_versions(ks_session, auth_url)
username = kwargs.pop('username', None)
user_id = kwargs.pop('user_id', None)
user_domain_name = kwargs.pop('user_domain_name', None)
user_domain_id = kwargs.pop('user_domain_id', None)
project_domain_name = kwargs.pop('project_domain_name', None)
project_domain_id = kwargs.pop('project_domain_id', None)
use_domain = (user_domain_id or user_domain_name or
project_domain_id or project_domain_name)
use_v3 = v3_auth_url and (use_domain or (not v2_auth_url))
use_v2 = v2_auth_url and not use_domain
if use_v3 and token:
auth = v3_auth.Token(
v3_auth_url,
token=token,
project_name=project_name,
project_id=project_id,
project_domain_name=project_domain_name,
project_domain_id=project_domain_id)
elif use_v2 and token:
auth = v2_auth.Token(
v2_auth_url,
token=token,
tenant_id=project_id,
tenant_name=project_name)
elif use_v3:
# the auth_url as v3 specified
# e.g. http://no.where:5000/v3
# Keystone will return only v3 as viable option
auth = v3_auth.Password(
v3_auth_url,
username=username,
password=kwargs.pop('password', None),
user_id=user_id,
user_domain_name=user_domain_name,
user_domain_id=user_domain_id,
project_name=project_name,
project_id=project_id,
project_domain_name=project_domain_name,
project_domain_id=project_domain_id)
elif use_v2:
# the auth_url as v2 specified
# e.g. http://no.where:5000/v2.0
# Keystone will return only v2 as viable option
auth = v2_auth.Password(
v2_auth_url,
username,
kwargs.pop('password', None),
tenant_id=project_id,
tenant_name=project_name)
else:
raise exc.CommandError('Unable to determine the Keystone version '
'to authenticate with using the given '
'auth_url.')
ks_session.auth = auth
return ks_session
def _get_endpoint(ks_session, **kwargs):
"""Get an endpoint using the provided keystone session."""
# set service specific endpoint types
endpoint_type = kwargs.get('endpoint_type') or 'publicURL'
service_type = kwargs.get('service_type') or 'monitoring'
endpoint = ks_session.get_endpoint(service_type=service_type,
interface=endpoint_type,
region_name=kwargs.get('region_name'))
return endpoint
class AuthPlugin(auth.BaseAuthPlugin):
opt_names = ['tenant_id', 'region_name', 'auth_token',
'service_type', 'endpoint_type', 'cacert',
'auth_url', 'insecure', 'cert_file', 'key_file',
'cert', 'key', 'tenant_name', 'project_name',
'project_id', 'project_domain_id', 'project_domain_name',
'user_id', 'user_domain_id', 'user_domain_name',
'password', 'username', 'endpoint']
def __init__(self, auth_system=None, **kwargs):
self.opt_names.extend(self.common_opt_names)
super(AuthPlugin, self).__init__(auth_system, **kwargs)
# NOTE(sileht): backward compat
if self.opts.get('auth_token') and not self.opts.get('token'):
self.opts['token'] = self.opts.get('auth_token')
def _do_authenticate(self, http_client):
token = self.opts.get('token')
endpoint = self.opts.get('endpoint')
if not (endpoint and token):
ks_kwargs = self._get_ks_kwargs(http_timeout=http_client.timeout)
ks_session = _get_keystone_session(**ks_kwargs)
if not token:
token = lambda: ks_session.get_token()
if not endpoint:
endpoint = _get_endpoint(ks_session, **ks_kwargs)
self.opts['token'] = token
self.opts['endpoint'] = endpoint
def _get_ks_kwargs(self, http_timeout):
project_id = (self.opts.get('project_id') or
self.opts.get('tenant_id'))
project_name = (self.opts.get('project_name') or
self.opts.get('tenant_name'))
token = self.opts.get('token')
ks_kwargs = {
'username': self.opts.get('username'),
'password': self.opts.get('password'),
'user_id': self.opts.get('user_id'),
'user_domain_id': self.opts.get('user_domain_id'),
'user_domain_name': self.opts.get('user_domain_name'),
'project_id': project_id,
'project_name': project_name,
'project_domain_name': self.opts.get('project_domain_name'),
'project_domain_id': self.opts.get('project_domain_id'),
'auth_url': self.opts.get('auth_url'),
'cacert': self.opts.get('cacert'),
'cert': self.opts.get('cert'),
'key': self.opts.get('key'),
'insecure': strutils.bool_from_string(
self.opts.get('insecure')),
'endpoint_type': self.opts.get('endpoint_type'),
'service_type': self.opts.get('service_type'),
'region_name': self.opts.get('region_name'),
'timeout': http_timeout,
'token': token() if callable(token) else token,
}
return ks_kwargs
def token_and_endpoint(self, endpoint_type, service_type):
token = self.opts.get('token')
if callable(token):
token = token()
return token, self.opts.get('endpoint')
def sufficient_options(self):
"""Check if all required options are present.
:raises: AuthPluginOptionsMissing
"""
has_token = self.opts.get('token')
has_project_domain_or_tenant = (self.opts.get('project_id') or
(self.opts.get('project_name') and
(self.opts.get('user_domain_name') or
self.opts.get('user_domain_id'))) or
(self.opts.get('tenant_id') or
self.opts.get('tenant_name')))
has_credential = (self.opts.get('username')
and has_project_domain_or_tenant
and self.opts.get('password')
and self.opts.get('auth_url'))
missing = not (has_token or has_credential)
if missing:
missing_opts = []
opts = ['token', 'endpoint', 'username', 'password', 'auth_url',
'tenant_id', 'tenant_name']
for opt in opts:
if not self.opts.get(opt):
missing_opts.append(opt)
raise exceptions.AuthPluginOptionsMissing(missing_opts)
def _adjust_kwargs(kwargs):
client_kwargs = {
'username': kwargs.get('os_username'),
'password': kwargs.get('os_password'),
'tenant_id': kwargs.get('os_tenant_id'),
'tenant_name': kwargs.get('os_tenant_name'),
'auth_url': kwargs.get('os_auth_url'),
'region_name': kwargs.get('os_region_name'),
'service_type': kwargs.get('os_service_type'),
'endpoint_type': kwargs.get('os_endpoint_type'),
'insecure': kwargs.get('os_insecure'),
'cacert': kwargs.get('os_cacert'),
'cert_file': kwargs.get('os_cert'),
'key_file': kwargs.get('os_key'),
'token': kwargs.get('os_token') or kwargs.get('os_auth_token'),
'user_domain_name': kwargs.get('os_user_domain_name'),
'user_domain_id': kwargs.get('os_user_domain_id'),
'project_domain_name': kwargs.get('os_project_domain_name'),
'project_domain_id': kwargs.get('os_project_domain_id'),
}
client_kwargs.update(kwargs)
client_kwargs['endpoint_type'] = 'publicURL'
client_kwargs['token'] = (client_kwargs.get('token') or
kwargs.get('token') or
kwargs.get('auth_token'))
timeout = kwargs.get('timeout')
if timeout is not None:
timeout = int(timeout)
if timeout <= 0:
timeout = None
insecure = strutils.bool_from_string(kwargs.get('insecure'))
verify = kwargs.get('verify')
if verify is None:
if insecure:
verify = False
else:
verify = client_kwargs.get('cacert') or True
cert = client_kwargs.get('cert_file')
key = client_kwargs.get('key_file')
if cert and key:
cert = cert, key
client_kwargs.update({'verify': verify, 'cert': cert, 'timeout': timeout})
return client_kwargs
def Client(version, *args, **kwargs):
client_kwargs = _adjust_kwargs(kwargs)
from .v2.client import Client
return Client(*args, **client_kwargs)
# client_kwargs = _adjust_kwargs(kwargs)
# module = utils.import_versioned_module(version, 'client')
# client_class = getattr(module, 'Client')
# return client_class(*args, **client_kwargs)
def get_client(version, **kwargs):
"""Get an authenticated client, based on the credentials in the kwargs.
:param version: the API version to use ('1' or '2')
:param kwargs: keyword args containing credentials, either:
* session: a keystoneauth/keystoneclient session object
* service_type: The default service_type for URL discovery
* service_name: The default service_name for URL discovery
* interface: The default interface for URL discovery
(Default: public)
* region_name: The default region_name for URL discovery
* endpoint_override: Always use this endpoint URL for requests
for this ceiloclient
* auth: An auth plugin to use instead of the session one
* user_agent: The User-Agent string to set
(Default is python-ceilometer-client)
* connect_retries: the maximum number of retries that should be
attempted for connection errors
* logger: A logging object
or (DEPRECATED):
* os_auth_token: (DEPRECATED) pre-existing token to re-use,
use os_token instead
* os_token: pre-existing token to re-use
* ceilometer_url: (DEPRECATED) Ceilometer API endpoint,
use os_endpoint instead
* os_endpoint: Ceilometer API endpoint
or (DEPRECATED):
* os_username: name of user
* os_password: user's password
* os_user_id: user's id
* os_user_domain_id: the domain id of the user
* os_user_domain_name: the domain name of the user
* os_project_id: the user project id
* os_tenant_id: V2 alternative to os_project_id
* os_project_name: the user project name
* os_tenant_name: V2 alternative to os_project_name
* os_project_domain_name: domain name for the user project
* os_project_domain_id: domain id for the user project
* os_auth_url: endpoint to authenticate against
* os_cert|os_cacert: path of CA TLS certificate
* os_key: SSL private key
* os_insecure: allow insecure SSL (no cert verification)
"""
endpoint = kwargs.get('os_endpoint') or kwargs.get('ceilometer_url')
return Client(version, endpoint, **kwargs)
def get_auth_plugin(endpoint, **kwargs):
auth_plugin = AuthPlugin(
auth_url=kwargs.get('auth_url'),
service_type=kwargs.get('service_type'),
token=kwargs.get('token'),
endpoint_type=kwargs.get('endpoint_type'),
insecure=kwargs.get('insecure'),
region_name=kwargs.get('region_name'),
cacert=kwargs.get('cacert'),
tenant_id=kwargs.get('project_id') or kwargs.get('tenant_id'),
endpoint=endpoint,
username=kwargs.get('username'),
password=kwargs.get('password'),
tenant_name=kwargs.get('tenant_name') or kwargs.get('project_name'),
user_domain_name=kwargs.get('user_domain_name'),
user_domain_id=kwargs.get('user_domain_id'),
project_domain_name=kwargs.get('project_domain_name'),
project_domain_id=kwargs.get('project_domain_id')
)
return auth_plugin
LEGACY_OPTS = ('auth_plugin', 'auth_url', 'token', 'insecure', 'cacert',
'tenant_id', 'project_id', 'username', 'password',
'project_name', 'tenant_name',
'user_domain_name', 'user_domain_id',
'project_domain_name', 'project_domain_id',
'key_file', 'cert_file', 'verify', 'timeout', 'cert')
def _construct_http_client(**kwargs):
kwargs = kwargs.copy()
if kwargs.get('session') is not None:
# Drop legacy options
for opt in LEGACY_OPTS:
kwargs.pop(opt, None)
# Drop aodh_endpoint from kwargs
kwargs.pop('aodh_endpoint', None)
return SessionClient(
session=kwargs.pop('session'),
service_type=kwargs.pop('service_type', 'monitoring') or 'monitoring',
interface=kwargs.pop('interface', kwargs.pop('endpoint_type',
'publicURL')),
region_name=kwargs.pop('region_name', None),
user_agent=kwargs.pop('user_agent', 'monitoringclient'),
auth=kwargs.get('auth'),
timings=kwargs.pop('timings', None),
**kwargs)
else:
return client.BaseClient(client.HTTPClient(
auth_plugin=kwargs.get('auth_plugin'),
region_name=kwargs.get('region_name'),
endpoint_type=kwargs.get('endpoint_type'),
original_ip=kwargs.get('original_ip'),
verify=kwargs.get('verify'),
cert=kwargs.get('cert'),
timeout=kwargs.get('timeout'),
timings=kwargs.get('timings'),
keyring_saver=kwargs.get('keyring_saver'),
debug=kwargs.get('debug'),
user_agent=kwargs.get('user_agent'),
http=kwargs.get('http')
))
@contextlib.contextmanager
def record_time(times, enabled, *args):
"""Record the time of a specific action.
:param times: A list of tuples holds time data.
:type times: list
:param enabled: Whether timing is enabled.
:type enabled: bool
:param args: Other data to be stored besides time data, these args
will be joined to a string.
"""
if not enabled:
yield
else:
start = time.time()
yield
end = time.time()
times.append((' '.join(args), start, end))
class SessionClient(adapter.LegacyJsonAdapter):
def __init__(self, *args, **kwargs):
self.times = []
self.timings = kwargs.pop('timings', False)
super(SessionClient, self).__init__(*args, **kwargs)
def request(self, url, method, **kwargs):
kwargs.setdefault('headers', kwargs.get('headers', {}))
# NOTE(sileht): The standard call raises errors from
# keystoneauth, where we need to raise the monitoringclient errors.
raise_exc = kwargs.pop('raise_exc', True)
with record_time(self.times, self.timings, method, url):
resp, body = super(SessionClient, self).request(url,
method,
raise_exc=False,
**kwargs)
if raise_exc and resp.status_code >= 400:
print(monitoring_utils._print_resp_error(resp.json()['error']))
raise exc.from_response(resp, body)
return resp
|
|
# Copyright 2014 Scalyr Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------
#
# author: Steven Czerwinski <czerwin@scalyr.com>
from __future__ import unicode_literals
from __future__ import absolute_import
from scalyr_agent import compat
__author__ = "czerwin@scalyr.com"
import socket
import struct
import io
from scalyr_agent.monitor_utils.server_processors import (
RequestStream,
ConnectionProcessor,
)
from scalyr_agent.monitor_utils.server_processors import (
Int32RequestParser,
LineRequestParser,
)
from scalyr_agent.monitor_utils.server_processors import (
ConnectionIdleTooLong,
RequestSizeExceeded,
)
from scalyr_agent.util import FakeRunState
from scalyr_agent.test_base import ScalyrTestCase
class TestInt32RequestParser(ScalyrTestCase):
def setUp(self):
super(TestInt32RequestParser, self).setUp()
self.__buffer = io.BytesIO()
def test_basic_case(self):
self.assertEquals(self.run_test_case(b"Hi there", 8), b"Hi there")
self.assertEquals(self.run_test_case(b"Hi thereok", 8), b"Hi there")
def test_prefix_not_ready(self):
self.assertIsNone(self.run_test_case(b"Hi there", 8, truncate_size=2))
def test_body_not_ready(self):
self.assertIsNone(self.run_test_case(b"Hi there", 8, truncate_size=8))
def test_request_too_long(self):
self.assertRaises(
RequestSizeExceeded, self.run_test_case, b"Hi there fool again", 18
)
def run_test_case(self, input_string, length_to_send, truncate_size=None):
input_buffer = io.BytesIO()
# 2->TODO struct.pack|unpack in python < 2.7.7 does not allow unicode format string.
input_buffer.write(compat.struct_pack_unicode("!I", length_to_send))
input_buffer.write(input_string)
if truncate_size is not None:
input_buffer.truncate(truncate_size)
input_buffer.seek(truncate_size)
num_bytes = input_buffer.tell()
input_buffer.seek(0)
result = Int32RequestParser(10).parse_request(input_buffer, num_bytes)
if result is None:
self.assertEquals(0, input_buffer.tell())
else:
self.assertEquals(len(result) + struct.calcsize("!I"), input_buffer.tell())
return result
class FakeSocket(object):
"""An object that implements the socket interface necessary for testing RequestStream."""
def __init__(self):
# True if the socket has been closed.
self.__is_closed = False
# Holds the fake data that is inserted by the test code and will be return to callers of recv.
self.__stream = io.BytesIO()
def recv(self, max_bytes):
"""Reads data from the socket.
This test object will return the next bytes that were added to this object using
the add_input method. If not enough bytes are available, then will raise a timeout.
If the socket is closed, then returns None.
@param max_bytes: The maximum number of bytes to return.
"""
result = self.__stream.read(max_bytes)
if len(result) == 0:
if self.__is_closed:
return None
else:
raise socket.timeout()
return result
def setblocking(self, is_blocking):
"""Set whether or not the socket is blocking.
This is provided just because the code being tested needs to invoke it.
@param is_blocking: Whether or not the socket should be blocking.
"""
pass
def add_input(self, string_input):
"""Adds the specified string to the pending input that will be returned by calls to 'recv' on this
socket.
Testing code can use this to simulate bytes being received.
@param string_input: The input.
"""
# We have to remember our current position in the stream so that we can return here.
original_position = self.__stream.tell()
# Go to the end and insert the bytes.
self.__stream.seek(0, 2)
self.__stream.write(string_input)
# Then return.
self.__stream.seek(original_position)
def close(self):
self.__is_closed = True
class TestRequestStream(ScalyrTestCase):
def setUp(self):
super(TestRequestStream, self).setUp()
self.__fake_socket = FakeSocket()
self.__fake_run_state = FakeRunState()
parser = LineRequestParser(10)
self.__request_stream = RequestStream(
self.__fake_socket,
parser.parse_request,
max_buffer_size=10,
max_request_size=10,
)
def test_basic_case(self):
# Basic case of just a single line.
self.__fake_socket.add_input(b"Hi there\n")
self.assertEquals(self.read_request(), b"Hi there\n")
self.assertEquals(self.total_times_slept(), 1)
self.assertEquals(self.buffer_size(), 0)
self.assertIsNone(self.read_request())
def test_multiple_lines(self):
self.__fake_socket.add_input(b"Hi\nBye\nOk\n")
self.assertEquals(self.read_request(), b"Hi\n")
self.assertEquals(self.buffer_size(), 10)
self.assertEquals(self.read_request(), b"Bye\n")
self.assertEquals(self.read_request(), b"Ok\n")
self.assertEquals(self.total_times_slept(), 1)
self.assertEquals(self.buffer_size(), 0)
self.assertFalse(self.at_end())
self.assertIsNone(self.read_request())
def test_broken_lines(self):
self.__fake_socket.add_input(b"Hi there")
self.assertIsNone(self.read_request())
self.__fake_socket.add_input(b"\n")
self.assertEquals(self.read_request(), b"Hi there\n")
self.assertEquals(self.total_times_slept(), 2)
self.assertEquals(self.buffer_size(), 0)
self.assertFalse(self.at_end())
def test_request_too_long(self):
self.__fake_socket.add_input(b"0123456789")
self.assertRaises(RequestSizeExceeded, self.read_request)
self.assertFalse(self.at_end())
def test_full_compaction(self):
self.__fake_socket.add_input(b"012\n345678")
self.assertEquals(self.read_request(), b"012\n")
self.assertEquals(self.total_times_slept(), 1)
self.assertEquals(self.buffer_size(), 10)
self.assertFalse(self.at_end())
self.assertIsNone(self.read_request())
self.assertEquals(self.buffer_size(), 6)
self.__fake_socket.add_input(b"\n")
self.assertEquals(self.read_request(), b"345678\n")
self.assertEquals(self.total_times_slept(), 3)
self.assertEquals(self.buffer_size(), 0)
def test_close(self):
self.__fake_socket.add_input(b"Hi there\n")
self.__fake_socket.close()
self.assertEquals(self.read_request(), b"Hi there\n")
self.assertEquals(self.total_times_slept(), 1)
self.assertEquals(self.buffer_size(), 0)
self.assertIsNone(self.read_request())
self.assertTrue(self.at_end())
self.assertEquals(self.total_times_slept(), 2)
def read_request(self):
return self.__request_stream.read_request(run_state=self.__fake_run_state)
def total_times_slept(self):
return self.__fake_run_state.total_times_slept
def buffer_size(self):
return self.__request_stream.get_buffer_size()
def at_end(self):
return self.__request_stream.at_end()
class TestLineRequestEOF(ScalyrTestCase):
def setUp(self):
super(TestLineRequestEOF, self).setUp()
self.__fake_socket = FakeSocket()
self.__fake_run_state = FakeRunState()
parser = LineRequestParser(100, eof_as_eol=True)
self.__request_stream = RequestStream(
self.__fake_socket,
parser.parse_request,
max_buffer_size=100,
max_request_size=100,
)
def read_request(self):
return self.__request_stream.read_request(run_state=self.__fake_run_state)
def test_eof_as_eol(self):
self.__fake_socket.add_input(b"Hi there\nGoodbye")
self.assertEquals(self.read_request(), b"Hi there\n")
self.assertEquals(self.read_request(), b"Goodbye")
class TestConnectionHandler(ScalyrTestCase):
def setUp(self):
super(TestConnectionHandler, self).setUp()
self.__fake_socket = FakeSocket()
self.__fake_run_state = FakeRunState()
self.__last_request = None
parser = LineRequestParser(10)
request_stream = RequestStream(
self.__fake_socket,
parser.parse_request,
max_buffer_size=10,
max_request_size=10,
)
self.__fake_handler = ConnectionProcessor(
request_stream, self.execute_request, self.__fake_run_state, 5.0
)
self.__fake_time = 0.0
def test_basic_case(self):
self.__fake_socket.add_input(b"Hi there\n")
self.assertTrue(self.run_single_cycle())
self.assertEquals(self.__last_request, b"Hi there\n")
def test_multiple_requests(self):
self.__fake_socket.add_input(b"Hi there\n")
self.assertTrue(self.run_single_cycle())
self.assertEquals(self.__last_request, b"Hi there\n")
self.advance_time(3.0)
self.__fake_socket.add_input(b"2nd there\n")
self.assertTrue(self.run_single_cycle())
self.assertEquals(self.__last_request, b"2nd there\n")
self.advance_time(3.0)
self.assertTrue(self.run_single_cycle())
self.assertIsNone(self.__last_request)
def test_inactivity_error(self):
self.__fake_socket.add_input(b"Hi there\n")
self.assertTrue(self.run_single_cycle())
self.assertEquals(self.__last_request, b"Hi there\n")
self.advance_time(3.0)
self.assertTrue(self.run_single_cycle())
self.assertIsNone(self.__last_request)
self.advance_time(3.0)
self.assertRaises(ConnectionIdleTooLong, self.run_single_cycle)
def test_run_state_done(self):
self.__fake_socket.add_input(b"Hi there\nOk\n")
self.assertTrue(self.run_single_cycle())
self.assertEquals(self.__last_request, b"Hi there\n")
self.__fake_run_state.stop()
self.assertFalse(self.run_single_cycle())
self.assertIsNone(self.__last_request)
def test_connection_closed(self):
self.__fake_socket.add_input(b"Hi there\n")
self.assertTrue(self.run_single_cycle())
self.assertEquals(self.__last_request, b"Hi there\n")
self.__fake_socket.close()
self.assertFalse(self.run_single_cycle())
self.assertIsNone(self.__last_request)
def execute_request(self, request):
self.__last_request = request
def run_single_cycle(self):
self.__last_request = None
return self.__fake_handler.run_single_cycle(current_time=self.__fake_time)
def advance_time(self, delta):
self.__fake_time += delta
|
|
"""
Support for Xiaomi Yeelight Wifi color bulb.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/light.yeelight/
"""
import logging
import voluptuous as vol
from homeassistant.util.color import (
color_temperature_mired_to_kelvin as mired_to_kelvin,
color_temperature_kelvin_to_mired as kelvin_to_mired)
from homeassistant.const import CONF_DEVICES, CONF_NAME
from homeassistant.components.light import (
ATTR_BRIGHTNESS, ATTR_HS_COLOR, ATTR_TRANSITION, ATTR_COLOR_TEMP,
ATTR_FLASH, FLASH_SHORT, FLASH_LONG, ATTR_EFFECT, SUPPORT_BRIGHTNESS,
SUPPORT_COLOR, SUPPORT_TRANSITION, SUPPORT_COLOR_TEMP, SUPPORT_FLASH,
SUPPORT_EFFECT, Light, PLATFORM_SCHEMA, ATTR_ENTITY_ID, DOMAIN)
import homeassistant.helpers.config_validation as cv
import homeassistant.util.color as color_util
REQUIREMENTS = ['yeelight==0.4.3']
_LOGGER = logging.getLogger(__name__)
LEGACY_DEVICE_TYPE_MAP = {
'color1': 'rgb',
'mono1': 'white',
'strip1': 'strip',
'bslamp1': 'bedside',
'ceiling1': 'ceiling',
}
DEFAULT_NAME = 'Yeelight'
DEFAULT_TRANSITION = 350
CONF_MODEL = 'model'
CONF_TRANSITION = 'transition'
CONF_SAVE_ON_CHANGE = 'save_on_change'
CONF_MODE_MUSIC = 'use_music_mode'
CONF_CUSTOM_EFFECTS = 'custom_effects'
CONF_FLOW_PARAMS = 'flow_params'
DATA_KEY = 'light.yeelight'
ATTR_MODE = 'mode'
ATTR_COUNT = 'count'
ATTR_ACTION = 'action'
ATTR_TRANSITIONS = 'transitions'
ACTION_RECOVER = 'recover'
ACTION_STAY = 'stay'
ACTION_OFF = 'off'
YEELIGHT_RGB_TRANSITION = 'RGBTransition'
YEELIGHT_HSV_TRANSACTION = 'HSVTransition'
YEELIGHT_TEMPERATURE_TRANSACTION = 'TemperatureTransition'
YEELIGHT_SLEEP_TRANSACTION = 'SleepTransition'
YEELIGHT_SERVICE_SCHEMA = vol.Schema({
vol.Required(ATTR_ENTITY_ID): cv.entity_ids,
})
YEELIGHT_FLOW_TRANSITION_SCHEMA = {
vol.Optional(ATTR_COUNT, default=0): cv.positive_int,
vol.Optional(ATTR_ACTION, default=ACTION_RECOVER):
vol.Any(ACTION_RECOVER, ACTION_OFF, ACTION_STAY),
vol.Required(ATTR_TRANSITIONS): [{
vol.Exclusive(YEELIGHT_RGB_TRANSITION, CONF_TRANSITION):
vol.All(cv.ensure_list, [cv.positive_int]),
vol.Exclusive(YEELIGHT_HSV_TRANSACTION, CONF_TRANSITION):
vol.All(cv.ensure_list, [cv.positive_int]),
vol.Exclusive(YEELIGHT_TEMPERATURE_TRANSACTION, CONF_TRANSITION):
vol.All(cv.ensure_list, [cv.positive_int]),
vol.Exclusive(YEELIGHT_SLEEP_TRANSACTION, CONF_TRANSITION):
vol.All(cv.ensure_list, [cv.positive_int]),
}]
}
DEVICE_SCHEMA = vol.Schema({
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_TRANSITION, default=DEFAULT_TRANSITION): cv.positive_int,
vol.Optional(CONF_MODE_MUSIC, default=False): cv.boolean,
vol.Optional(CONF_SAVE_ON_CHANGE, default=False): cv.boolean,
vol.Optional(CONF_MODEL): cv.string,
})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_DEVICES, default={}): {cv.string: DEVICE_SCHEMA},
vol.Optional(CONF_CUSTOM_EFFECTS): [{
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_FLOW_PARAMS): YEELIGHT_FLOW_TRANSITION_SCHEMA
}]
})
SUPPORT_YEELIGHT = (SUPPORT_BRIGHTNESS |
SUPPORT_TRANSITION |
SUPPORT_FLASH)
SUPPORT_YEELIGHT_WHITE_TEMP = (SUPPORT_YEELIGHT |
SUPPORT_COLOR_TEMP)
SUPPORT_YEELIGHT_RGB = (SUPPORT_YEELIGHT |
SUPPORT_COLOR |
SUPPORT_EFFECT |
SUPPORT_COLOR_TEMP)
EFFECT_DISCO = "Disco"
EFFECT_TEMP = "Slow Temp"
EFFECT_STROBE = "Strobe epilepsy!"
EFFECT_STROBE_COLOR = "Strobe color"
EFFECT_ALARM = "Alarm"
EFFECT_POLICE = "Police"
EFFECT_POLICE2 = "Police2"
EFFECT_CHRISTMAS = "Christmas"
EFFECT_RGB = "RGB"
EFFECT_RANDOM_LOOP = "Random Loop"
EFFECT_FAST_RANDOM_LOOP = "Fast Random Loop"
EFFECT_LSD = "LSD"
EFFECT_SLOWDOWN = "Slowdown"
EFFECT_WHATSAPP = "WhatsApp"
EFFECT_FACEBOOK = "Facebook"
EFFECT_TWITTER = "Twitter"
EFFECT_STOP = "Stop"
YEELIGHT_EFFECT_LIST = [
EFFECT_DISCO,
EFFECT_TEMP,
EFFECT_STROBE,
EFFECT_STROBE_COLOR,
EFFECT_ALARM,
EFFECT_POLICE,
EFFECT_POLICE2,
EFFECT_CHRISTMAS,
EFFECT_RGB,
EFFECT_RANDOM_LOOP,
EFFECT_FAST_RANDOM_LOOP,
EFFECT_LSD,
EFFECT_SLOWDOWN,
EFFECT_WHATSAPP,
EFFECT_FACEBOOK,
EFFECT_TWITTER,
EFFECT_STOP]
SERVICE_SET_MODE = 'yeelight_set_mode'
SERVICE_START_FLOW = 'yeelight_start_flow'
def _cmd(func):
"""Define a wrapper to catch exceptions from the bulb."""
def _wrap(self, *args, **kwargs):
import yeelight
try:
_LOGGER.debug("Calling %s with %s %s", func, args, kwargs)
return func(self, *args, **kwargs)
except yeelight.BulbException as ex:
_LOGGER.error("Error when calling %s: %s", func, ex)
return _wrap
def _parse_custom_effects(effects_config):
effects = {}
for config in effects_config:
params = config[CONF_FLOW_PARAMS]
transitions = YeelightLight.transitions_config_parser(
params[ATTR_TRANSITIONS])
effects[config[CONF_NAME]] = \
{ATTR_COUNT: params[ATTR_COUNT], ATTR_TRANSITIONS: transitions}
return effects
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Yeelight bulbs."""
from yeelight.enums import PowerMode
if DATA_KEY not in hass.data:
hass.data[DATA_KEY] = {}
lights = []
if discovery_info is not None:
_LOGGER.debug("Adding autodetected %s", discovery_info['hostname'])
device_type = discovery_info['device_type']
legacy_device_type = LEGACY_DEVICE_TYPE_MAP.get(device_type,
device_type)
# Not using hostname, as it seems to vary.
name = "yeelight_%s_%s" % (legacy_device_type,
discovery_info['properties']['mac'])
device = {'name': name, 'ipaddr': discovery_info['host']}
light = YeelightLight(device, DEVICE_SCHEMA({CONF_MODEL: device_type}))
lights.append(light)
hass.data[DATA_KEY][name] = light
else:
for ipaddr, device_config in config[CONF_DEVICES].items():
name = device_config[CONF_NAME]
_LOGGER.debug("Adding configured %s", name)
device = {'name': name, 'ipaddr': ipaddr}
if CONF_CUSTOM_EFFECTS in config:
custom_effects = \
_parse_custom_effects(config[CONF_CUSTOM_EFFECTS])
else:
custom_effects = None
light = YeelightLight(device, device_config,
custom_effects=custom_effects)
lights.append(light)
hass.data[DATA_KEY][name] = light
add_entities(lights, True)
def service_handler(service):
"""Dispatch service calls to target entities."""
params = {key: value for key, value in service.data.items()
if key != ATTR_ENTITY_ID}
entity_ids = service.data.get(ATTR_ENTITY_ID)
target_devices = [dev for dev in hass.data[DATA_KEY].values()
if dev.entity_id in entity_ids]
for target_device in target_devices:
if service.service == SERVICE_SET_MODE:
target_device.set_mode(**params)
elif service.service == SERVICE_START_FLOW:
target_device.start_flow(**params)
service_schema_set_mode = YEELIGHT_SERVICE_SCHEMA.extend({
vol.Required(ATTR_MODE):
vol.In([mode.name.lower() for mode in PowerMode])
})
hass.services.register(
DOMAIN, SERVICE_SET_MODE, service_handler,
schema=service_schema_set_mode)
service_schema_start_flow = YEELIGHT_SERVICE_SCHEMA.extend(
YEELIGHT_FLOW_TRANSITION_SCHEMA
)
hass.services.register(
DOMAIN, SERVICE_START_FLOW, service_handler,
schema=service_schema_start_flow)
class YeelightLight(Light):
"""Representation of a Yeelight light."""
def __init__(self, device, config, custom_effects=None):
"""Initialize the Yeelight light."""
self.config = config
self._name = device['name']
self._ipaddr = device['ipaddr']
self._supported_features = SUPPORT_YEELIGHT
self._available = False
self._bulb_device = None
self._brightness = None
self._color_temp = None
self._is_on = None
self._hs = None
self._model = config.get('model')
self._min_mireds = None
self._max_mireds = None
if custom_effects:
self._custom_effects = custom_effects
else:
self._custom_effects = {}
@property
def available(self) -> bool:
"""Return if bulb is available."""
return self._available
@property
def supported_features(self) -> int:
"""Flag supported features."""
return self._supported_features
@property
def effect_list(self):
"""Return the list of supported effects."""
return YEELIGHT_EFFECT_LIST + self.custom_effects_names
@property
def color_temp(self) -> int:
"""Return the color temperature."""
return self._color_temp
@property
def name(self) -> str:
"""Return the name of the device if any."""
return self._name
@property
def is_on(self) -> bool:
"""Return true if device is on."""
return self._is_on
@property
def brightness(self) -> int:
"""Return the brightness of this light between 1..255."""
return self._brightness
@property
def min_mireds(self):
"""Return minimum supported color temperature."""
return self._min_mireds
@property
def max_mireds(self):
"""Return maximum supported color temperature."""
return self._max_mireds
@property
def custom_effects(self):
"""Return dict with custom effects."""
return self._custom_effects
@property
def custom_effects_names(self):
"""Return list with custom effects names."""
return list(self.custom_effects.keys())
def _get_hs_from_properties(self):
rgb = self._properties.get('rgb', None)
color_mode = self._properties.get('color_mode', None)
if not rgb or not color_mode:
return None
color_mode = int(color_mode)
if color_mode == 2: # color temperature
temp_in_k = mired_to_kelvin(self._color_temp)
return color_util.color_temperature_to_hs(temp_in_k)
if color_mode == 3: # hsv
hue = int(self._properties.get('hue'))
sat = int(self._properties.get('sat'))
return (hue / 360 * 65536, sat / 100 * 255)
rgb = int(rgb)
blue = rgb & 0xff
green = (rgb >> 8) & 0xff
red = (rgb >> 16) & 0xff
return color_util.color_RGB_to_hs(red, green, blue)
@property
def hs_color(self) -> tuple:
"""Return the color property."""
return self._hs
@property
def _properties(self) -> dict:
if self._bulb_device is None:
return {}
return self._bulb_device.last_properties
# F821: https://github.com/PyCQA/pyflakes/issues/373
@property
def _bulb(self) -> 'yeelight.Bulb': # noqa: F821
import yeelight
if self._bulb_device is None:
try:
self._bulb_device = yeelight.Bulb(self._ipaddr,
model=self._model)
self._bulb_device.get_properties() # force init for type
self._available = True
except yeelight.BulbException as ex:
self._available = False
_LOGGER.error("Failed to connect to bulb %s, %s: %s",
self._ipaddr, self._name, ex)
return self._bulb_device
def set_music_mode(self, mode) -> None:
"""Set the music mode on or off."""
if mode:
self._bulb.start_music()
else:
self._bulb.stop_music()
def update(self) -> None:
"""Update properties from the bulb."""
import yeelight
try:
self._bulb.get_properties()
if self._bulb_device.bulb_type == yeelight.BulbType.Color:
self._supported_features = SUPPORT_YEELIGHT_RGB
elif self._bulb_device.bulb_type == yeelight.BulbType.WhiteTemp:
self._supported_features = SUPPORT_YEELIGHT_WHITE_TEMP
if self._min_mireds is None:
model_specs = self._bulb.get_model_specs()
self._min_mireds = \
kelvin_to_mired(model_specs['color_temp']['max'])
self._max_mireds = \
kelvin_to_mired(model_specs['color_temp']['min'])
self._is_on = self._properties.get('power') == 'on'
bright = self._properties.get('bright', None)
if bright:
self._brightness = round(255 * (int(bright) / 100))
temp_in_k = self._properties.get('ct', None)
if temp_in_k:
self._color_temp = kelvin_to_mired(int(temp_in_k))
self._hs = self._get_hs_from_properties()
self._available = True
except yeelight.BulbException as ex:
if self._available: # just inform once
_LOGGER.error("Unable to update bulb status: %s", ex)
self._available = False
@_cmd
def set_brightness(self, brightness, duration) -> None:
"""Set bulb brightness."""
if brightness:
_LOGGER.debug("Setting brightness: %s", brightness)
self._bulb.set_brightness(brightness / 255 * 100,
duration=duration)
@_cmd
def set_rgb(self, rgb, duration) -> None:
"""Set bulb's color."""
if rgb and self.supported_features & SUPPORT_COLOR:
_LOGGER.debug("Setting RGB: %s", rgb)
self._bulb.set_rgb(rgb[0], rgb[1], rgb[2], duration=duration)
@_cmd
def set_colortemp(self, colortemp, duration) -> None:
"""Set bulb's color temperature."""
if colortemp and self.supported_features & SUPPORT_COLOR_TEMP:
temp_in_k = mired_to_kelvin(colortemp)
_LOGGER.debug("Setting color temp: %s K", temp_in_k)
self._bulb.set_color_temp(temp_in_k, duration=duration)
@_cmd
def set_default(self) -> None:
"""Set current options as default."""
self._bulb.set_default()
@_cmd
def set_flash(self, flash) -> None:
"""Activate flash."""
if flash:
from yeelight import (RGBTransition, SleepTransition, Flow,
BulbException)
if self._bulb.last_properties["color_mode"] != 1:
_LOGGER.error("Flash supported currently only in RGB mode.")
return
transition = int(self.config[CONF_TRANSITION])
if flash == FLASH_LONG:
count = 1
duration = transition * 5
if flash == FLASH_SHORT:
count = 1
duration = transition * 2
red, green, blue = color_util.color_hs_to_RGB(*self._hs)
transitions = list()
transitions.append(
RGBTransition(255, 0, 0, brightness=10, duration=duration))
transitions.append(SleepTransition(
duration=transition))
transitions.append(
RGBTransition(red, green, blue, brightness=self.brightness,
duration=duration))
flow = Flow(count=count, transitions=transitions)
try:
self._bulb.start_flow(flow)
except BulbException as ex:
_LOGGER.error("Unable to set flash: %s", ex)
@_cmd
def set_effect(self, effect) -> None:
"""Activate effect."""
if effect:
from yeelight import (Flow, BulbException)
from yeelight.transitions import (disco, temp, strobe, pulse,
strobe_color, alarm, police,
police2, christmas, rgb,
randomloop, lsd, slowdown)
if effect == EFFECT_STOP:
self._bulb.stop_flow()
return
effects_map = {
EFFECT_DISCO: disco,
EFFECT_TEMP: temp,
EFFECT_STROBE: strobe,
EFFECT_STROBE_COLOR: strobe_color,
EFFECT_ALARM: alarm,
EFFECT_POLICE: police,
EFFECT_POLICE2: police2,
EFFECT_CHRISTMAS: christmas,
EFFECT_RGB: rgb,
EFFECT_RANDOM_LOOP: randomloop,
EFFECT_LSD: lsd,
EFFECT_SLOWDOWN: slowdown,
}
if effect in self.custom_effects_names:
flow = Flow(**self.custom_effects[effect])
elif effect in effects_map:
flow = Flow(count=0, transitions=effects_map[effect]())
elif effect == EFFECT_FAST_RANDOM_LOOP:
flow = Flow(count=0, transitions=randomloop(duration=250))
elif effect == EFFECT_WHATSAPP:
flow = Flow(count=2, transitions=pulse(37, 211, 102))
elif effect == EFFECT_FACEBOOK:
flow = Flow(count=2, transitions=pulse(59, 89, 152))
elif effect == EFFECT_TWITTER:
flow = Flow(count=2, transitions=pulse(0, 172, 237))
try:
self._bulb.start_flow(flow)
except BulbException as ex:
_LOGGER.error("Unable to set effect: %s", ex)
def turn_on(self, **kwargs) -> None:
"""Turn the bulb on."""
import yeelight
brightness = kwargs.get(ATTR_BRIGHTNESS)
colortemp = kwargs.get(ATTR_COLOR_TEMP)
hs_color = kwargs.get(ATTR_HS_COLOR)
rgb = color_util.color_hs_to_RGB(*hs_color) if hs_color else None
flash = kwargs.get(ATTR_FLASH)
effect = kwargs.get(ATTR_EFFECT)
duration = int(self.config[CONF_TRANSITION]) # in ms
if ATTR_TRANSITION in kwargs: # passed kwarg overrides config
duration = int(kwargs.get(ATTR_TRANSITION) * 1000) # kwarg in s
try:
self._bulb.turn_on(duration=duration)
except yeelight.BulbException as ex:
_LOGGER.error("Unable to turn the bulb on: %s", ex)
return
if self.config[CONF_MODE_MUSIC] and not self._bulb.music_mode:
try:
self.set_music_mode(self.config[CONF_MODE_MUSIC])
except yeelight.BulbException as ex:
_LOGGER.error("Unable to turn on music mode,"
"consider disabling it: %s", ex)
try:
# values checked for none in methods
self.set_rgb(rgb, duration)
self.set_colortemp(colortemp, duration)
self.set_brightness(brightness, duration)
self.set_flash(flash)
self.set_effect(effect)
except yeelight.BulbException as ex:
_LOGGER.error("Unable to set bulb properties: %s", ex)
return
# save the current state if we had a manual change.
if self.config[CONF_SAVE_ON_CHANGE] and (brightness
or colortemp
or rgb):
try:
self.set_default()
except yeelight.BulbException as ex:
_LOGGER.error("Unable to set the defaults: %s", ex)
return
def turn_off(self, **kwargs) -> None:
"""Turn off."""
import yeelight
duration = int(self.config[CONF_TRANSITION]) # in ms
if ATTR_TRANSITION in kwargs: # passed kwarg overrides config
duration = int(kwargs.get(ATTR_TRANSITION) * 1000) # kwarg in s
try:
self._bulb.turn_off(duration=duration)
except yeelight.BulbException as ex:
_LOGGER.error("Unable to turn the bulb off: %s", ex)
def set_mode(self, mode: str):
"""Set a power mode."""
import yeelight
try:
self._bulb.set_power_mode(yeelight.enums.PowerMode[mode.upper()])
self.async_schedule_update_ha_state(True)
except yeelight.BulbException as ex:
_LOGGER.error("Unable to set the power mode: %s", ex)
@staticmethod
def transitions_config_parser(transitions):
"""Parse transitions config into initialized objects."""
import yeelight
transition_objects = []
for transition_config in transitions:
transition, params = list(transition_config.items())[0]
transition_objects.append(getattr(yeelight, transition)(*params))
return transition_objects
def start_flow(self, transitions, count=0, action=ACTION_RECOVER):
"""Start flow."""
import yeelight
try:
flow = yeelight.Flow(
count=count,
action=yeelight.Flow.actions[action],
transitions=self.transitions_config_parser(transitions))
self._bulb.start_flow(flow)
except yeelight.BulbException as ex:
_LOGGER.error("Unable to set effect: %s", ex)
|
|
#!/usr/bin/env python
import os
import platform
import stat
import time
import unittest
import mock
from grr.client import vfs
from grr.lib import action_mocks
from grr.lib import aff4
from grr.lib import client_fixture
from grr.lib import flags
from grr.lib import flow_runner
from grr.lib import test_lib
from grr.lib import utils
from grr.lib.aff4_objects import sequential_collection
# For RegistryFinder pylint: disable=unused-import
from grr.lib.flows.general import registry as _
# pylint: enable=unused-import
from grr.lib.rdfvalues import paths as rdf_paths
from grr.lib.rdfvalues import protodict as rdf_protodict
class WindowsActionTests(test_lib.OSSpecificClientTests):
def setUp(self):
super(WindowsActionTests, self).setUp()
self.win32com = mock.MagicMock()
self.win32com.client = mock.MagicMock()
modules = {
"_winreg":
mock.MagicMock(),
# Requires mocking because exceptions.WindowsError does not exist
"exceptions":
mock.MagicMock(),
"pythoncom":
mock.MagicMock(),
"pywintypes":
mock.MagicMock(),
"win32api":
mock.MagicMock(),
# Necessary to stop the import of client_actions.standard re-populating
# actions.ActionPlugin.classes
("grr.client.client_actions"
".standard"):
mock.MagicMock(),
"win32com":
self.win32com,
"win32com.client":
self.win32com.client,
"win32file":
mock.MagicMock(),
"win32service":
mock.MagicMock(),
"win32serviceutil":
mock.MagicMock(),
"winerror":
mock.MagicMock(),
"wmi":
mock.MagicMock()
}
self.module_patcher = mock.patch.dict("sys.modules", modules)
self.module_patcher.start()
# pylint: disable= g-import-not-at-top
from grr.client.client_actions.windows import windows
# pylint: enable=g-import-not-at-top
self.windows = windows
def tearDown(self):
super(WindowsActionTests, self).tearDown()
self.module_patcher.stop()
@unittest.skipIf(platform.system() == "Darwin", (
"IPv6 address strings are cosmetically slightly different on OS X, "
"and we only expect this parsing code to run on Linux or maybe Windows"))
def testEnumerateInterfaces(self):
# Stub out wmi.WMI().Win32_NetworkAdapterConfiguration(IPEnabled=1)
wmi_object = self.windows.wmi.WMI.return_value
wmi_object.Win32_NetworkAdapterConfiguration.return_value = [
client_fixture.WMIWin32NetworkAdapterConfigurationMock()
]
enumif = self.windows.EnumerateInterfaces()
interface_dict_list = list(enumif.RunNetAdapterWMIQuery())
self.assertEqual(len(interface_dict_list), 1)
interface = interface_dict_list[0]
self.assertEqual(len(interface["addresses"]), 4)
addresses = [x.human_readable_address for x in interface["addresses"]]
self.assertItemsEqual(addresses, [
"192.168.1.20", "ffff::ffff:aaaa:1111:aaaa",
"dddd:0:8888:6666:bbbb:aaaa:eeee:bbbb",
"dddd:0:8888:6666:bbbb:aaaa:ffff:bbbb"
])
def testRunWMI(self):
wmi_obj = self.windows.win32com.client.GetObject.return_value
mock_query_result = mock.MagicMock()
mock_query_result.Properties_ = []
mock_config = client_fixture.WMIWin32NetworkAdapterConfigurationMock
wmi_properties = mock_config.__dict__.iteritems()
for key, value in wmi_properties:
keyval = mock.MagicMock()
keyval.Name, keyval.Value = key, value
mock_query_result.Properties_.append(keyval)
wmi_obj.ExecQuery.return_value = [mock_query_result]
result_list = list(self.windows.RunWMIQuery("select blah"))
self.assertEqual(len(result_list), 1)
result = result_list.pop()
self.assertTrue(isinstance(result, rdf_protodict.Dict))
nest = result["NestingTest"]
self.assertEqual(nest["one"]["two"], [3, 4])
self.assertTrue("Unsupported type" in nest["one"]["broken"])
self.assertTrue(isinstance(nest["one"]["three"], rdf_protodict.Dict))
self.assertEqual(nest["four"], [])
self.assertEqual(nest["five"], "astring")
self.assertEqual(nest["six"], [None, None, ""])
self.assertEqual(nest["seven"], None)
self.assertItemsEqual(nest["rdfvalue"].keys(), ["a"])
self.assertEqual(result["GatewayCostMetric"], [0, 256])
self.assertTrue(isinstance(result["OpaqueObject"], basestring))
self.assertTrue("Unsupported type" in result["OpaqueObject"])
class FakeKeyHandle(object):
def __init__(self, value):
self.value = value.replace("\\", "/")
def __enter__(self):
return self
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
return False
class RegistryFake(test_lib.FakeRegistryVFSHandler):
def OpenKey(self, key, sub_key):
res = "%s/%s" % (key.value, sub_key.replace("\\", "/"))
res = res.rstrip("/")
parts = res.split("/")
for cache_key in [
utils.Join(*[p.lower() for p in parts[:-1]] + parts[-1:]), res.lower()
]:
if not cache_key.startswith("/"):
cache_key = "/" + cache_key
if cache_key in self.cache[self.prefix]:
return FakeKeyHandle(cache_key)
raise IOError()
def QueryValueEx(self, key, value_name):
full_key = os.path.join(key.value.lower(), value_name).rstrip("/")
try:
stat_entry = self.cache[self.prefix][full_key][1]
data = stat_entry.registry_data.GetValue()
if data:
return data, str
except KeyError:
pass
raise IOError()
def QueryInfoKey(self, key):
num_keys = len(self._GetKeys(key))
num_vals = len(self._GetValues(key))
for path in self.cache[self.prefix]:
if path == key.value:
_, stat_entry = self.cache[self.prefix][path]
modification_time = stat_entry.st_mtime
if modification_time:
return num_keys, num_vals, modification_time
modification_time = time.time()
return num_keys, num_vals, modification_time
def EnumKey(self, key, index):
try:
return self._GetKeys(key)[index]
except IndexError:
raise IOError()
def _GetKeys(self, key):
res = []
for path in self.cache[self.prefix]:
if os.path.dirname(path) == key.value:
sub_type, stat_entry = self.cache[self.prefix][path]
if sub_type.__name__ == "VFSDirectory":
res.append(os.path.basename(stat_entry.pathspec.path))
return sorted(res)
def EnumValue(self, key, index):
try:
subkey = self._GetValues(key)[index]
value, value_type = self.QueryValueEx(key, subkey)
return subkey, value, value_type
except IndexError:
raise IOError()
def _GetValues(self, key):
res = []
for path in self.cache[self.prefix]:
if os.path.dirname(path) == key.value:
sub_type, stat_entry = self.cache[self.prefix][path]
if sub_type.__name__ == "VFSFile":
res.append(os.path.basename(stat_entry.pathspec.path))
return sorted(res)
class RegistryVFSTests(test_lib.EmptyActionTest):
def setUp(self):
super(RegistryVFSTests, self).setUp()
modules = {
"_winreg": mock.MagicMock(),
"ctypes": mock.MagicMock(),
"ctypes.wintypes": mock.MagicMock(),
# Requires mocking because exceptions.WindowsError does not exist
"exceptions": mock.MagicMock(),
}
self.module_patcher = mock.patch.dict("sys.modules", modules)
self.module_patcher.start()
# pylint: disable= g-import-not-at-top
from grr.client.vfs_handlers import registry
import exceptions
import _winreg
# pylint: enable=g-import-not-at-top
fixture = RegistryFake()
self.stubber = utils.MultiStubber(
(registry, "KeyHandle", FakeKeyHandle),
(registry, "OpenKey", fixture.OpenKey),
(registry, "QueryValueEx", fixture.QueryValueEx),
(registry, "QueryInfoKey", fixture.QueryInfoKey),
(registry, "EnumValue", fixture.EnumValue),
(registry, "EnumKey", fixture.EnumKey))
self.stubber.Start()
# Add the Registry handler to the vfs.
vfs.VFSInit().Run()
_winreg.HKEY_USERS = "HKEY_USERS"
_winreg.HKEY_LOCAL_MACHINE = "HKEY_LOCAL_MACHINE"
exceptions.WindowsError = IOError
def tearDown(self):
super(RegistryVFSTests, self).tearDown()
self.module_patcher.stop()
self.stubber.Stop()
def testRegistryListing(self):
pathspec = rdf_paths.PathSpec(
pathtype=rdf_paths.PathSpec.PathType.REGISTRY,
path=("/HKEY_USERS/S-1-5-20/Software/Microsoft"
"/Windows/CurrentVersion/Run"))
expected_names = {"MctAdmin": stat.S_IFDIR, "Sidebar": stat.S_IFDIR}
expected_data = [
u"%ProgramFiles%\\Windows Sidebar\\Sidebar.exe /autoRun",
u"%TEMP%\\Sidebar.exe"
]
for f in vfs.VFSOpen(pathspec).ListFiles():
base, name = os.path.split(f.pathspec.CollapsePath())
self.assertEqual(base, pathspec.CollapsePath())
self.assertIn(name, expected_names)
self.assertIn(f.registry_data.GetValue(), expected_data)
def _RunRegistryFinder(self, paths=None):
client_mock = action_mocks.GlobClientMock()
client_id = self.SetupClients(1)[0]
for s in test_lib.TestFlowHelper(
"RegistryFinder",
client_mock,
client_id=client_id,
keys_paths=paths,
conditions=[],
token=self.token):
session_id = s
try:
return list(
aff4.FACTORY.Open(
session_id.Add(flow_runner.RESULTS_SUFFIX),
aff4_type=sequential_collection.GeneralIndexedCollection,
token=self.token))
except aff4.InstantiationError:
return []
def testRegistryFinder(self):
# Listing inside a key gives the values.
results = self._RunRegistryFinder(
["HKEY_LOCAL_MACHINE/SOFTWARE/ListingTest/*"])
self.assertEqual(len(results), 2)
self.assertEqual(
sorted([x.stat_entry.registry_data.GetValue() for x in results]),
["Value1", "Value2"])
# This is a key so we should get back the default value.
results = self._RunRegistryFinder(
["HKEY_LOCAL_MACHINE/SOFTWARE/ListingTest"])
self.assertEqual(len(results), 1)
self.assertEqual(results[0].stat_entry.registry_data.GetValue(),
"DefaultValue")
# The same should work using a wildcard.
results = self._RunRegistryFinder(["HKEY_LOCAL_MACHINE/SOFTWARE/*"])
self.assertTrue(results)
paths = [x.stat_entry.pathspec.path for x in results]
expected_path = u"/HKEY_LOCAL_MACHINE/SOFTWARE/ListingTest"
self.assertIn(expected_path, paths)
idx = paths.index(expected_path)
self.assertEqual(results[idx].stat_entry.registry_data.GetValue(),
"DefaultValue")
def testRegistryMTimes(self):
# Just listing all keys does not generate a full stat entry for each of
# the results.
results = self._RunRegistryFinder(
["HKEY_LOCAL_MACHINE/SOFTWARE/ListingTest/*"])
self.assertEqual(len(results), 2)
for result in results:
st = result.stat_entry
self.assertIsNone(st.st_mtime)
# Explicitly calling RegistryFinder on a value does though.
results = self._RunRegistryFinder([
"HKEY_LOCAL_MACHINE/SOFTWARE/ListingTest/Value1",
"HKEY_LOCAL_MACHINE/SOFTWARE/ListingTest/Value2",
])
self.assertEqual(len(results), 2)
for result in results:
st = result.stat_entry
path = utils.SmartStr(st.aff4path)
if "Value1" in path:
self.assertEqual(st.st_mtime, 110)
elif "Value2" in path:
self.assertEqual(st.st_mtime, 120)
else:
self.fail("Unexpected value: %s" % path)
# For Listdir, the situation is the same. Listing does not yield mtimes.
client_id = self.SetupClients(1)[0]
pb = rdf_paths.PathSpec(
path="/HKEY_LOCAL_MACHINE/SOFTWARE/ListingTest",
pathtype=rdf_paths.PathSpec.PathType.REGISTRY)
output_path = client_id.Add("registry").Add(pb.first.path)
aff4.FACTORY.Delete(output_path, token=self.token)
client_mock = action_mocks.ListDirectoryClientMock()
for _ in test_lib.TestFlowHelper(
"ListDirectory",
client_mock,
client_id=client_id,
pathspec=pb,
token=self.token):
pass
results = list(
aff4.FACTORY.Open(
output_path, token=self.token).OpenChildren())
self.assertEqual(len(results), 2)
for result in results:
st = result.Get(result.Schema.STAT)
self.assertIsNone(st.st_mtime)
def testRecursiveRegistryListing(self):
"""Test our ability to walk over a registry tree."""
pathspec = rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.REGISTRY)
walk_tups_0 = list(vfs.VFSOpen(pathspec).RecursiveListNames())
walk_tups_1 = list(vfs.VFSOpen(pathspec).RecursiveListNames(depth=1))
walk_tups_2 = list(vfs.VFSOpen(pathspec).RecursiveListNames(depth=2))
walk_tups_inf = list(
vfs.VFSOpen(pathspec).RecursiveListNames(depth=float("inf")))
self.assertEqual(walk_tups_0,
[(r"", [r"HKEY_LOCAL_MACHINE", r"HKEY_USERS"], [])])
self.assertEqual(walk_tups_1,
[(r"", [r"HKEY_LOCAL_MACHINE", r"HKEY_USERS"], []),
(r"HKEY_LOCAL_MACHINE", [r"SOFTWARE", r"SYSTEM"], []),
(r"HKEY_USERS", [
r"S-1-5-20",
r"S-1-5-21-2911950750-476812067-1487428992-1001",
r"S-1-5-21-702227000-2140022111-3110739999-1990"
], [])])
self.assertEqual(walk_tups_2, [
(r"", [r"HKEY_LOCAL_MACHINE", r"HKEY_USERS"], []),
(r"HKEY_LOCAL_MACHINE", [r"SOFTWARE", r"SYSTEM"], []),
(r"HKEY_LOCAL_MACHINE\SOFTWARE", [r"ListingTest", r"Microsoft"], []),
(r"HKEY_LOCAL_MACHINE\SYSTEM", [r"ControlSet001", r"Select"], []),
(r"HKEY_USERS", [
r"S-1-5-20", r"S-1-5-21-2911950750-476812067-1487428992-1001",
r"S-1-5-21-702227000-2140022111-3110739999-1990"
], []),
(r"HKEY_USERS\S-1-5-20", [r"Software"], []),
(r"HKEY_USERS\S-1-5-21-2911950750-476812067-1487428992-1001",
[r"Software"], []),
(r"HKEY_USERS\S-1-5-21-702227000-2140022111-3110739999-1990",
[r"Software"], []),
])
self.assertEqual(
walk_tups_inf,
[(r"", [r"HKEY_LOCAL_MACHINE", r"HKEY_USERS"], []),
(r"HKEY_LOCAL_MACHINE", [r"SOFTWARE", r"SYSTEM"], []),
(r"HKEY_LOCAL_MACHINE\SOFTWARE", [r"ListingTest", r"Microsoft"], []),
(r"HKEY_LOCAL_MACHINE\SOFTWARE\ListingTest", [],
[r"Value1", r"Value2"]), (r"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft",
[r"Windows", r"Windows NT"], []),
(r"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows",
[r"CurrentVersion"], []),
(r"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows\CurrentVersion", [],
[r"ProgramFilesDir", r"ProgramFilesDir (x86)"]),
(r"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows NT",
[r"CurrentVersion"], []),
(r"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows NT\CurrentVersion",
[r"ProfileList"], [r"SystemRoot"]),
(r"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows NT\CurrentVersion"
r"\ProfileList", [
r"S-1-5-21-702227000-2140022111-3110739999-1990",
r"S-1-5-21-702227068-2140022151-3110739409-1000"
], [r"ProfilesDirectory", r"ProgramData"]),
(r"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows NT\CurrentVersion"
r"\ProfileList\S-1-5-21-702227000-2140022111-3110739999-1990", [],
[r"ProfileImagePath"]),
(r"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows NT\CurrentVersion"
r"\ProfileList\S-1-5-21-702227068-2140022151-3110739409-1000", [],
[r"ProfileImagePath"]), (r"HKEY_LOCAL_MACHINE\SYSTEM",
[r"ControlSet001", r"Select"], []),
(r"HKEY_LOCAL_MACHINE\SYSTEM\ControlSet001", [r"Control"], []),
(r"HKEY_LOCAL_MACHINE\SYSTEM\ControlSet001\Control",
[r"Nls", r"Session Manager", r"TimeZoneInformation"], []),
(r"HKEY_LOCAL_MACHINE\SYSTEM\ControlSet001\Control\Nls",
[r"CodePage"], []),
(r"HKEY_LOCAL_MACHINE\SYSTEM\ControlSet001\Control\Nls\CodePage", [],
[r"ACP"]),
(r"HKEY_LOCAL_MACHINE\SYSTEM\ControlSet001\Control\Session Manager",
[r"Environment"], []),
(r"HKEY_LOCAL_MACHINE\SYSTEM\ControlSet001\Control\Session Manager"
r"\Environment", [], [r"Path", r"TEMP", r"windir"]),
(r"HKEY_LOCAL_MACHINE\SYSTEM\ControlSet001\Control"
r"\TimeZoneInformation", [], [r"StandardName"]),
(r"HKEY_LOCAL_MACHINE\SYSTEM\Select", [], [r"Current"]),
(r"HKEY_USERS",
[
r"S-1-5-20", r"S-1-5-21-2911950750-476812067-1487428992-1001",
r"S-1-5-21-702227000-2140022111-3110739999-1990"
], []), (r"HKEY_USERS\S-1-5-20", [r"Software"], []),
(r"HKEY_USERS\S-1-5-20\Software", [r"Microsoft"], []),
(r"HKEY_USERS\S-1-5-20\Software\Microsoft", [r"Windows"], []),
(r"HKEY_USERS\S-1-5-20\Software\Microsoft\Windows",
[r"CurrentVersion"], []),
(r"HKEY_USERS\S-1-5-20\Software\Microsoft\Windows\CurrentVersion",
[r"Run"], []),
(r"HKEY_USERS\S-1-5-20\Software\Microsoft\Windows\CurrentVersion\Run",
[], [r"MctAdmin", r"Sidebar"]),
(r"HKEY_USERS\S-1-5-21-2911950750-476812067-1487428992-1001",
[r"Software"], []),
(r"HKEY_USERS\S-1-5-21-2911950750-476812067-1487428992-1001\Software",
[r"Microsoft"], []),
(r"HKEY_USERS\S-1-5-21-2911950750-476812067-1487428992-1001\Software"
r"\Microsoft", [r"Windows"], []),
(r"HKEY_USERS\S-1-5-21-2911950750-476812067-1487428992-1001\Software"
r"\Microsoft\Windows", [r"CurrentVersion"], []),
(r"HKEY_USERS\S-1-5-21-2911950750-476812067-1487428992-1001\Software"
r"\Microsoft\Windows\CurrentVersion", [r"Explorer"], []),
(r"HKEY_USERS\S-1-5-21-2911950750-476812067-1487428992-1001\Software"
r"\Microsoft\Windows\CurrentVersion\Explorer", [r"ComDlg32"], []),
(r"HKEY_USERS\S-1-5-21-2911950750-476812067-1487428992-1001\Software"
r"\Microsoft\Windows\CurrentVersion\Explorer\ComDlg32",
[r"OpenSavePidlMRU"], []),
(r"HKEY_USERS\S-1-5-21-2911950750-476812067-1487428992-1001\Software"
r"\Microsoft\Windows\CurrentVersion\Explorer\ComDlg32"
r"\OpenSavePidlMRU", [r"dd"], []),
(r"HKEY_USERS\S-1-5-21-2911950750-476812067-1487428992-1001\Software"
r"\Microsoft\Windows\CurrentVersion\Explorer\ComDlg32\OpenSavePidlMRU"
r"\dd", [], [r"0"]),
(r"HKEY_USERS\S-1-5-21-702227000-2140022111-3110739999-1990",
[r"Software"], []),
(r"HKEY_USERS\S-1-5-21-702227000-2140022111-3110739999-1990\Software",
[r"Microsoft"], []),
(r"HKEY_USERS\S-1-5-21-702227000-2140022111-3110739999-1990\Software"
r"\Microsoft", [r"Windows"], []),
(r"HKEY_USERS\S-1-5-21-702227000-2140022111-3110739999-1990\Software"
r"\Microsoft\Windows", [r"CurrentVersion"], []),
(r"HKEY_USERS\S-1-5-21-702227000-2140022111-3110739999-1990\Software"
r"\Microsoft\Windows\CurrentVersion", [r"Run"], []),
(r"HKEY_USERS\S-1-5-21-702227000-2140022111-3110739999-1990\Software"
r"\Microsoft\Windows\CurrentVersion\Run", [], [r"NothingToSeeHere"])])
def main(argv):
test_lib.main(argv)
if __name__ == "__main__":
flags.StartMain(main)
|
|
import io
import logging
import time
from urllib.parse import parse_qs, urlparse
import requests
from django.contrib.contenttypes.models import ContentType
from django.db.models import Q
from django.utils.functional import cached_property
from waldur_core.core import models as core_models
from waldur_core.core import utils as core_utils
from waldur_core.media.utils import guess_image_extension
from waldur_core.structure.backend import ServiceBackend
from waldur_core.structure.models import ServiceSettings
from waldur_core.structure.registry import get_resource_type
from waldur_core.structure.utils import update_pulled_fields
from waldur_mastermind.common.utils import parse_datetime
from waldur_rancher.enums import (
LONGHORN_NAME,
LONGHORN_NAMESPACE,
ClusterRoles,
GlobalRoles,
)
from waldur_rancher.exceptions import NotFound, RancherException
from . import client, models, signals, utils
logger = logging.getLogger(__name__)
class RancherBackend(ServiceBackend):
DEFAULTS = {
'cloud_init_template': '#cloud-config\n'
'packages: \n'
' - curl\n'
'runcmd:\n'
' - curl -fsSL https://get.docker.com -o get-docker.sh; sh get-docker.sh\n'
' - sudo systemctl start docker\n'
' - sudo systemctl enable docker\n'
' - [ sh, -c, "{command}" ]\n',
'default_mtu': 1400,
'private_registry_url': None,
'private_registry_user': None,
'private_registry_password': None,
'management_tenant_access_port': 443,
}
def __init__(self, settings):
"""
:type settings: :class:`waldur_core.structure.models.ServiceSettings`
"""
self.settings = settings
@cached_property
def client(self):
"""
Construct Rancher REST API client using credentials specified in the service settings.
"""
rancher_client = client.RancherClient(self.host, verify_ssl=False)
rancher_client.login(self.settings.username, self.settings.password)
return rancher_client
@cached_property
def host(self):
return self.settings.backend_url.strip('/')
def pull_service_properties(self):
self.pull_clusters()
self.pull_projects()
self.pull_namespaces()
self.pull_catalogs()
self.pull_templates()
self.pull_template_icons()
self.pull_workloads()
self.pull_hpas()
self.pull_apps()
self.pull_ingresses()
self.pull_services()
def pull_clusters(self):
"""
Mark stale clusters as erred.
"""
remote_clusters = self.client.list_clusters()
remote_clusters_map = {item['id']: item for item in remote_clusters}
local_clusters_map = {
cluster.backend_id: cluster
for cluster in models.Cluster.objects.filter(settings=self.settings)
}
stale_ids = set(local_clusters_map.keys()) - set(remote_clusters_map.keys())
# exclude not yet created clusters
stale_clusters = models.Cluster.objects.filter(
settings=self.settings, backend_id__in=stale_ids
).exclude(backend_id='')
stale_clusters.update(
state=models.Cluster.States.ERRED, error_message='Resource is gone.'
)
def get_kubeconfig_file(self, cluster):
return self.client.get_kubeconfig_file(cluster.backend_id)
def create_cluster(self, cluster):
mtu = self.settings.get_option('default_mtu')
private_registry = None
private_registry_url = self.settings.get_option('private_registry_url')
private_registry_user = self.settings.get_option('private_registry_user')
private_registry_password = self.settings.get_option(
'private_registry_password'
)
if private_registry_url and private_registry_user and private_registry_password:
private_registry = {
'url': private_registry_url,
'user': private_registry_user,
'password': private_registry_password,
}
backend_cluster = self.client.create_cluster(
cluster.name, mtu=mtu, private_registry=private_registry
)
self._backend_cluster_to_cluster(backend_cluster, cluster)
# as rancher API is not transactional, give it 2s to write cluster state to etcd
time.sleep(2)
self.client.create_cluster_registration_token(cluster.backend_id)
cluster.node_command = self.client.get_node_command(cluster.backend_id)
cluster.save()
def delete_cluster(self, cluster):
if cluster.backend_id:
try:
self.client.delete_cluster(cluster.backend_id)
except NotFound:
logger.debug(
'Cluster %s is not present in the backend ' % cluster.backend_id
)
cluster.delete()
def delete_node(self, node):
if node.backend_id:
try:
self.client.delete_node(node.backend_id)
except NotFound:
logger.debug('Node %s is not present in the backend ' % node.backend_id)
node.delete()
def update_cluster(self, cluster):
backend_cluster = self._cluster_to_backend_cluster(cluster)
self.client.update_cluster(cluster.backend_id, backend_cluster)
def _backend_cluster_to_cluster(self, backend_cluster, cluster):
cluster.backend_id = backend_cluster['id']
cluster.name = backend_cluster['name']
cluster.runtime_state = backend_cluster['state']
def _cluster_to_backend_cluster(self, cluster):
return {'name': cluster.name}
def _backend_node_to_node(self, backend_node):
return {
'backend_id': backend_node['id'],
'name': backend_node['requestedHostname'],
'controlplane_role': backend_node.get('controlPlane', False),
'etcd_role': backend_node.get('etcd', False),
'worker_role': backend_node.get('worker', False),
'runtime_state': backend_node.get('state', ''),
}
def get_nodes_count(self, remote_cluster):
spec = remote_cluster.get('appliedSpec', {})
config = spec.get('rancherKubernetesEngineConfig', {})
backend_nodes = config.get('nodes', [])
return len(backend_nodes)
def get_importable_clusters(self):
remote_clusters = [
{
'type': get_resource_type(models.Cluster),
'name': cluster['name'],
'backend_id': cluster['id'],
'extra': [
{'name': 'Description', 'value': cluster['description']},
{'name': 'Number of nodes', 'value': self.get_nodes_count(cluster)},
{'name': 'Created at', 'value': cluster['created']},
],
}
for cluster in self.client.list_clusters()
if cluster.get('state') == 'active'
]
return self.get_importable_resources(models.Cluster, remote_clusters)
def import_cluster(self, backend_id, project):
backend_cluster = self.client.get_cluster(backend_id)
if not backend_cluster.get('state', '') == models.Cluster.RuntimeStates.ACTIVE:
raise RancherException('Cannot import K8s cluster in non-active state.')
cluster = models.Cluster.objects.create(
backend_id=backend_id,
service_settings=self.settings,
project=project,
state=models.Cluster.States.OK,
runtime_state=backend_cluster['state'],
settings=self.settings,
)
self.pull_cluster(cluster, backend_cluster)
return cluster
def pull_cluster(self, cluster: models.Cluster, backend_cluster=None):
"""
Pull order is important because subsequent objects depend on previous ones.
For example, namespaces and catalogs depend on projects.
"""
self.pull_cluster_details(cluster, backend_cluster)
self.pull_cluster_nodes(cluster)
self.pull_projects_for_cluster(cluster)
self.pull_namespaces_for_cluster(cluster)
self.pull_catalogs_for_cluster(cluster)
self.pull_templates_for_cluster(cluster)
self.pull_cluster_workloads(cluster)
self.pull_cluster_hpas(cluster)
self.pull_cluster_apps(cluster)
self.pull_cluster_ingresses(cluster)
def pull_cluster_details(self, cluster, backend_cluster=None):
backend_cluster = backend_cluster or self.client.get_cluster(cluster.backend_id)
self._backend_cluster_to_cluster(backend_cluster, cluster)
cluster.save()
def pull_cluster_nodes(self, cluster: models.Cluster):
backend_nodes = self.get_cluster_nodes(cluster.backend_id)
for backend_node in backend_nodes:
# If the node has not been requested from Waldur, so it will be created
node, created = models.Node.objects.get_or_create(
name=backend_node['name'],
cluster=cluster,
defaults=dict(
backend_id=backend_node['backend_id'],
controlplane_role=backend_node['controlplane_role'],
etcd_role=backend_node['etcd_role'],
worker_role=backend_node['worker_role'],
),
)
if not node.backend_id:
# If the node has been requested from Waldur, but it has not been synchronized
node.backend_id = backend_node['backend_id']
node.controlplane_role = backend_node['controlplane_role']
node.etcd_role = backend_node['etcd_role']
node.worker_role = backend_node['worker_role']
node.save()
# Update details in all cases.
self.pull_node(node)
# Update nodes states.
utils.update_cluster_nodes_states(cluster.id)
def check_cluster_nodes(self, cluster):
self.pull_cluster_details(cluster)
if cluster.runtime_state == models.Cluster.RuntimeStates.ACTIVE:
# We don't need change cluster state here, because it will make in an executor.
return
for node in cluster.node_set.filter(
Q(controlplane_role=True) | Q(etcd_role=True)
):
controlplane_role = etcd_role = False
if node.instance.state not in [
core_models.StateMixin.States.ERRED,
core_models.StateMixin.States.DELETING,
core_models.StateMixin.States.DELETION_SCHEDULED,
]:
if node.controlplane_role:
controlplane_role = True
if node.etcd_role:
etcd_role = True
if controlplane_role and etcd_role:
# We make a return if one or more VMs with 'controlplane' and 'etcd' roles exist
# and they haven't a state 'error' or 'delete'.
# Here 'return' means that cluster state checking must be retry later.
return
cluster.error_message = (
'The cluster is not connected with any '
'non-failed VM\'s with \'controlplane\' or \'etcd\' roles.'
)
cluster.runtime_state = 'error'
cluster.save()
def get_cluster_nodes(self, backend_id):
nodes = self.client.get_cluster_nodes(backend_id)
return [self._backend_node_to_node(node) for node in nodes]
def node_is_active(self, backend_id):
backend_node = self.client.get_node(backend_id)
return backend_node['state'] == models.Node.RuntimeStates.ACTIVE
def pull_node(self, node):
if not node.backend_id:
return
backend_node = self.client.get_node(node.backend_id)
# rancher can skip return of some fields when node is being created,
# so avoid crashing by supporting missing values
def get_backend_node_field(*args):
value = backend_node
for arg in args:
if isinstance(value, dict):
value = value.get(arg)
else:
return
return value
def update_node_field(*args, field):
value = get_backend_node_field(*args)
if value:
setattr(node, field, value)
update_node_field('labels', field='labels')
update_node_field('annotations', field='annotations')
update_node_field('info', 'os', 'dockerVersion', field='docker_version')
update_node_field('info', 'kubernetes', 'kubeletVersion', field='k8s_version')
cpu_allocated = get_backend_node_field('requested', 'cpu')
if cpu_allocated:
node.cpu_allocated = (
core_utils.parse_int(cpu_allocated) / 1000
) # convert data from 380m to 0.38
ram_allocated = get_backend_node_field('requested', 'memory')
update_node_field('allocatable', 'cpu', field='cpu_total')
if ram_allocated:
node.ram_allocated = int(
core_utils.parse_int(ram_allocated) / 2 ** 20
) # convert data to Mi
ram_total = get_backend_node_field('allocatable', 'memory')
if ram_total:
node.ram_total = int(
core_utils.parse_int(ram_total) / 2 ** 20
) # convert data to Mi
update_node_field('requested', 'pods', field='pods_allocated')
update_node_field('allocatable', 'pods', field='pods_total')
update_node_field('state', field='runtime_state')
return node.save()
def create_user(self, user):
if user.backend_id:
return
password = models.RancherUser.make_random_password()
response = self.client.create_user(
name=user.user.username, username=user.user.username, password=password
)
user_id = response['id']
user.backend_id = user_id
user.save()
self.client.create_global_role(user.backend_id, GlobalRoles.user_base)
signals.rancher_user_created.send(
sender=models.RancherUser, instance=user, password=password,
)
def delete_user(self, user):
if user.backend_id:
self.client.delete_user(user_id=user.backend_id)
user.delete()
def block_user(self, user):
if user.is_active:
self.client.disable_user(user.backend_id)
user.is_active = False
user.save()
def activate_user(self, user):
if not user.is_active:
self.client.enable_user(user.backend_id)
user.is_active = True
user.save()
def get_or_create_cluster_group_role(self, group_id, cluster_id, role):
if not self.client.get_cluster_group_role(group_id, cluster_id, role):
self.client.create_cluster_group_role(group_id, cluster_id, role)
return True
return False
def create_cluster_user_role(self, link):
role = None
if link.role == models.ClusterRole.CLUSTER_OWNER:
role = ClusterRoles.cluster_owner
if link.role == models.ClusterRole.CLUSTER_MEMBER:
role = ClusterRoles.cluster_member
response = self.client.create_cluster_user_role(
link.user.backend_id, link.cluster.backend_id, role
)
link_id = response['id']
link.backend_id = link_id
link.save()
def delete_cluster_role(self, link):
if link.backend_id:
try:
self.client.delete_cluster_role(cluster_role_id=link.backend_id)
except NotFound:
logger.debug(
'Cluster role %s is not present in the backend ' % link.backend_id
)
link.delete()
def pull_catalogs_for_cluster(self, cluster: models.Cluster):
self.pull_cluster_catalogs_for_cluster(cluster)
self.pull_project_catalogs_for_cluster(cluster)
def pull_cluster_catalogs_for_cluster(self, cluster):
remote_catalogs = self.client.list_cluster_catalogs(cluster.backend_id)
self.pull_catalogs_for_scope(remote_catalogs, cluster)
def pull_project_catalogs_for_cluster(self, cluster):
for project in models.Project.objects.filter(cluster=cluster):
self.pull_project_catalogs_for_project(project)
def pull_project_catalogs_for_project(self, project):
remote_catalogs = self.client.list_project_catalogs(project.backend_id)
self.pull_catalogs_for_scope(remote_catalogs, project)
def pull_catalogs(self):
self.pull_global_catalogs()
self.pull_cluster_catalogs()
self.pull_project_catalogs()
def pull_global_catalogs(self):
remote_catalogs = self.client.list_global_catalogs()
self.pull_catalogs_for_scope(remote_catalogs, self.settings)
def pull_cluster_catalogs(self):
remote_catalogs = self.client.list_cluster_catalogs()
for cluster in models.Cluster.objects.filter(settings=self.settings):
self.pull_catalogs_for_scope(remote_catalogs, cluster)
def pull_project_catalogs(self):
remote_catalogs = self.client.list_project_catalogs()
for project in models.Project.objects.filter(settings=self.settings):
self.pull_catalogs_for_scope(remote_catalogs, project)
def pull_catalogs_for_scope(self, remote_catalogs, scope):
content_type = ContentType.objects.get_for_model(scope)
local_catalogs = models.Catalog.objects.filter(
content_type=content_type, object_id=scope.id,
)
remote_catalog_map = {
catalog['id']: self.remote_catalog_to_local(catalog, content_type, scope.id)
for catalog in remote_catalogs
}
local_catalog_map = {catalog.backend_id: catalog for catalog in local_catalogs}
remote_catalog_ids = set(remote_catalog_map.keys())
local_catalog_ids = set(local_catalog_map.keys())
stale_catalogs = local_catalog_ids - remote_catalog_ids
new_catalogs = [
remote_catalog_map[catalog_id]
for catalog_id in remote_catalog_ids - local_catalog_ids
]
existing_catalogs = remote_catalog_ids & local_catalog_ids
pulled_fields = {
'name',
'description',
'catalog_url',
'branch',
'commit',
'username',
'password',
'runtime_state',
}
for catalog_id in existing_catalogs:
local_catalog = local_catalog_map[catalog_id]
remote_catalog = remote_catalog_map[catalog_id]
update_pulled_fields(local_catalog, remote_catalog, pulled_fields)
models.Catalog.objects.bulk_create(new_catalogs)
local_catalogs.filter(backend_id__in=stale_catalogs).delete()
def remote_catalog_to_local(self, remote_catalog, content_type, object_id):
return models.Catalog(
content_type=content_type,
object_id=object_id,
backend_id=remote_catalog['id'],
name=remote_catalog['name'],
description=remote_catalog['description'],
created=parse_datetime(remote_catalog['created']),
catalog_url=remote_catalog['url'],
branch=remote_catalog['branch'],
commit=remote_catalog.get('commit', ''),
username=remote_catalog.get('username', ''),
password=remote_catalog.get('password', ''),
runtime_state=remote_catalog['state'],
settings=self.settings,
)
def refresh_catalog(self, catalog):
if isinstance(catalog.scope, ServiceSettings):
return self.client.refresh_global_catalog(catalog.backend_id)
elif isinstance(catalog.scope, models.Cluster):
return self.client.refresh_cluster_catalog(catalog.backend_id)
else:
return self.client.refresh_project_catalog(catalog.backend_id)
def delete_catalog(self, catalog):
try:
if isinstance(catalog.scope, ServiceSettings):
return self.client.delete_global_catalog(catalog.backend_id)
elif isinstance(catalog.scope, models.Cluster):
return self.client.delete_cluster_catalog(catalog.backend_id)
else:
return self.client.delete_project_catalog(catalog.backend_id)
except NotFound:
logger.debug(
'Catalog %s is not present in the backend ', catalog.backend_id
)
def get_catalog_spec(self, catalog):
spec = {
'name': catalog.name,
'description': catalog.description,
'url': catalog.catalog_url,
'branch': catalog.branch,
}
if catalog.username:
spec['username'] = catalog.username
if catalog.password:
spec['password'] = catalog.password
return spec
def create_catalog(self, catalog):
spec = self.get_catalog_spec(catalog)
if isinstance(catalog.scope, ServiceSettings):
remote_catalog = self.client.create_global_catalog(spec)
elif isinstance(catalog.scope, models.Cluster):
spec['clusterId'] = catalog.scope.backend_id
remote_catalog = self.client.create_cluster_catalog(spec)
else:
spec['projectId'] = catalog.scope.backend_id
remote_catalog = self.client.create_project_catalog(spec)
catalog.backend_id = remote_catalog['id']
catalog.runtime_state = remote_catalog['state']
catalog.save()
def update_catalog(self, catalog):
spec = self.get_catalog_spec(catalog)
if isinstance(catalog.scope, ServiceSettings):
return self.client.update_global_catalog(catalog.backend_id, spec)
elif isinstance(catalog.scope, models.Cluster):
return self.client.update_cluster_catalog(catalog.backend_id, spec)
else:
return self.client.update_project_catalog(catalog.backend_id, spec)
def pull_projects_for_cluster(self, cluster: models.Cluster):
"""
Pull projects for one cluster. It is used for cluster synchronization.
"""
remote_projects = self.client.list_projects(cluster.backend_id)
local_projects = models.Project.objects.filter(cluster=cluster)
local_clusters = [cluster]
self._pull_projects(local_clusters, local_projects, remote_projects)
def pull_projects(self):
"""
Pull projects for all clusters. It is used for provider synchronization.
"""
remote_projects = self.client.list_projects()
local_projects = models.Project.objects.filter(settings=self.settings)
local_clusters = models.Cluster.objects.filter(settings=self.settings)
self._pull_projects(local_clusters, local_projects, remote_projects)
def _pull_projects(self, local_clusters, local_projects, remote_projects):
"""
This private method pulls projects for given clusters and projects.
"""
local_cluster_map = {cluster.backend_id: cluster for cluster in local_clusters}
remote_project_map = {
project['id']: self.remote_project_to_local(project, local_cluster_map)
for project in remote_projects
}
local_project_map = {project.backend_id: project for project in local_projects}
remote_project_ids = set(remote_project_map.keys())
local_project_ids = set(local_project_map.keys())
stale_projects = local_project_ids - remote_project_ids
new_projects = [
remote_project_map[project_id]
for project_id in remote_project_ids - local_project_ids
]
existing_projects = remote_project_ids & local_project_ids
pulled_fields = {
'name',
'description',
'runtime_state',
'cluster',
}
for project_id in existing_projects:
local_project = local_project_map[project_id]
remote_project = remote_project_map[project_id]
update_pulled_fields(local_project, remote_project, pulled_fields)
models.Project.objects.bulk_create(new_projects)
local_projects.filter(backend_id__in=stale_projects).delete()
def remote_project_to_local(self, remote_project, local_cluster_map):
return models.Project(
backend_id=remote_project['id'],
name=remote_project['name'],
description=remote_project['description'],
created=parse_datetime(remote_project['created']),
runtime_state=remote_project['state'],
cluster=local_cluster_map.get(remote_project['clusterId']),
settings=self.settings,
)
def pull_namespaces(self):
local_clusters = models.Cluster.objects.filter(settings=self.settings)
for cluster in local_clusters:
if cluster.state == models.Cluster.States.OK:
self.pull_namespaces_for_cluster(cluster)
else:
logger.debug(
'Skipping namespace pulling for cluster with backend ID %s'
'because otherwise one failed cluster leads to provider failure',
cluster.backend_id,
)
def pull_namespaces_for_cluster(self, cluster: models.Cluster):
remote_namespaces = self.client.list_namespaces(cluster.backend_id)
local_namespaces = models.Namespace.objects.filter(project__cluster=cluster)
local_projects = models.Project.objects.filter(cluster=cluster)
local_project_map = {project.backend_id: project for project in local_projects}
remote_namespace_map = {
namespace['id']: self.remote_namespace_to_local(
namespace, local_project_map
)
for namespace in remote_namespaces
}
local_namespace_map = {
namespace.backend_id: namespace for namespace in local_namespaces
}
remote_namespace_ids = set(remote_namespace_map.keys())
local_namespace_ids = set(local_namespace_map.keys())
stale_namespaces = local_namespace_ids - remote_namespace_ids
new_namespaces = [
remote_namespace_map[namespace_id]
for namespace_id in remote_namespace_ids - local_namespace_ids
]
existing_namespaces = remote_namespace_ids & local_namespace_ids
pulled_fields = {
'name',
'runtime_state',
'project',
}
for namespace_id in existing_namespaces:
local_namespace = local_namespace_map[namespace_id]
remote_namespace = remote_namespace_map[namespace_id]
update_pulled_fields(local_namespace, remote_namespace, pulled_fields)
models.Namespace.objects.bulk_create(new_namespaces)
local_namespaces.filter(backend_id__in=stale_namespaces).delete()
def remote_namespace_to_local(self, remote_namespace, local_project_map):
return models.Namespace(
backend_id=remote_namespace['id'],
name=remote_namespace['name'],
created=parse_datetime(remote_namespace['created']),
runtime_state=remote_namespace['state'],
project=local_project_map.get(remote_namespace['projectId']),
settings=self.settings,
)
def pull_templates_for_cluster(self, cluster: models.Cluster):
remote_templates = self.client.list_templates(cluster.backend_id)
local_templates = models.Template.objects.filter(cluster=cluster)
content_type = ContentType.objects.get_for_model(cluster)
local_catalogs = models.Catalog.objects.filter(
content_type=content_type, object_id=cluster.id
)
local_clusters = [cluster]
local_projects = models.Project.objects.filter(cluster=cluster)
self._pull_templates(
local_templates,
local_catalogs,
local_clusters,
local_projects,
remote_templates,
)
def pull_templates(self):
remote_templates = self.client.list_templates()
local_templates = models.Template.objects.filter(settings=self.settings)
local_catalogs = models.Catalog.objects.filter(settings=self.settings)
local_clusters = models.Cluster.objects.filter(settings=self.settings)
local_projects = models.Project.objects.filter(settings=self.settings)
self._pull_templates(
local_templates,
local_catalogs,
local_clusters,
local_projects,
remote_templates,
)
def _pull_templates(
self,
local_templates,
local_catalogs,
local_clusters,
local_projects,
remote_templates,
):
local_catalog_map = {catalog.backend_id: catalog for catalog in local_catalogs}
local_cluster_map = {cluster.backend_id: cluster for cluster in local_clusters}
local_project_map = {project.backend_id: project for project in local_projects}
local_template_map = {
template.backend_id: template for template in local_templates
}
remote_template_map = {
template['id']: self.remote_template_to_local(
template, local_catalog_map, local_cluster_map, local_project_map
)
for template in remote_templates
}
remote_template_ids = set(remote_template_map.keys())
local_template_ids = set(local_template_map.keys())
stale_templates = local_template_ids - remote_template_ids
new_templates = [
remote_template_map[template_id]
for template_id in remote_template_ids - local_template_ids
]
existing_templates = remote_template_ids & local_template_ids
pulled_fields = {
'name',
'description',
'runtime_state',
'project_url',
'icon_url',
'default_version',
'versions',
'catalog',
'cluster',
'project',
}
for template_id in existing_templates:
local_template = local_template_map[template_id]
remote_template = remote_template_map[template_id]
update_pulled_fields(local_template, remote_template, pulled_fields)
models.Template.objects.bulk_create(new_templates)
local_templates.filter(backend_id__in=stale_templates).delete()
def remote_template_to_local(
self, remote_template, local_catalog_map, local_cluster_map, local_project_map
):
catalog_id = remote_template['catalogId'] or remote_template['clusterCatalogId']
return models.Template(
backend_id=remote_template['id'],
name=remote_template['name'],
description=remote_template['description'],
created=parse_datetime(remote_template['created']),
runtime_state=remote_template['state'],
icon_url=remote_template['links']['icon'],
project_url=remote_template.get('projectURL', ''),
default_version=remote_template['defaultVersion'],
versions=list(remote_template['versionLinks'].keys()),
catalog=local_catalog_map.get(catalog_id),
cluster=local_cluster_map.get(remote_template['clusterId']),
project=local_project_map.get(remote_template['projectId']),
settings=self.settings,
)
def _get_external_template_icon(self, icon_url):
try:
response = requests.get(icon_url, timeout=3)
except requests.RequestException as e:
logger.debug(f"Failed to get {icon_url}: {e}")
return None
status_code = response.status_code
if status_code == requests.codes.ok: # only care about the positive case
return response.content
else:
return None
def pull_template_icons(self):
for template in models.Template.objects.filter(settings=self.settings):
content = self.client.get_template_icon(template.backend_id)
if (
not content
and template.icon_url
and not urlparse(template.icon_url).netloc == urlparse(self.host).netloc
):
# try to download icon from the icon_url field
logger.debug(
'Rancher did not return icon for a Template, trying with external URL'
)
content = self._get_external_template_icon(template.icon_url)
if not content:
# Clear icon field so that default icon would be rendered
template.icon = None
template.save()
continue
extension = guess_image_extension(content)
if not extension:
continue
# Overwrite existing file
if template.icon:
template.icon.delete()
template.icon.save(f'{template.uuid}.{extension}', io.BytesIO(content))
def list_project_secrets(self, project):
return self.client.list_project_secrets(project.backend_id)
def pull_cluster_workloads(self, cluster):
for project in models.Project.objects.filter(cluster=cluster):
self.pull_project_workloads(project)
def pull_workloads(self):
local_clusters = models.Cluster.objects.filter(settings=self.settings)
for cluster in local_clusters:
if cluster.state == models.Cluster.States.OK:
self.pull_cluster_workloads(cluster)
else:
logger.debug(
'Skipping workload pulling for cluster with backend ID %s'
'because otherwise one failed cluster leads to provider failure',
cluster.backend_id,
)
def pull_project_workloads(self, project):
remote_workloads = self.client.list_workloads(project.backend_id)
local_workloads = models.Workload.objects.filter(project=project)
local_namespaces = models.Namespace.objects.filter(project=project)
local_namespaces_map = {
namespace.backend_id: namespace for namespace in local_namespaces
}
remote_workload_map = {
workload['id']: self.remote_workload_to_local(
workload, project, local_namespaces_map
)
for workload in remote_workloads
}
local_workload_map = {
workload.backend_id: workload for workload in local_workloads
}
remote_workload_ids = set(remote_workload_map.keys())
local_workload_ids = set(local_workload_map.keys())
stale_workloads = local_workload_ids - remote_workload_ids
new_workloads = [
remote_workload_map[workload_id]
for workload_id in remote_workload_ids - local_workload_ids
]
existing_workloads = remote_workload_ids & local_workload_ids
pulled_fields = {
'name',
'runtime_state',
'scale',
}
for workload_id in existing_workloads:
local_workload = local_workload_map[workload_id]
remote_workload = remote_workload_map[workload_id]
update_pulled_fields(local_workload, remote_workload, pulled_fields)
models.Workload.objects.bulk_create(new_workloads)
local_workloads.filter(backend_id__in=stale_workloads).delete()
def remote_workload_to_local(self, remote_workload, project, local_namespaces_map):
return models.Workload(
backend_id=remote_workload['id'],
name=remote_workload['name'],
created=parse_datetime(remote_workload['created']),
runtime_state=remote_workload['state'],
project=project,
cluster=project.cluster,
settings=self.settings,
namespace=local_namespaces_map.get(remote_workload['namespaceId']),
scale=remote_workload.get('scale', 0),
)
def redeploy_workload(self, workload: models.Workload):
self.client.redeploy_workload(workload.project.backend_id, workload.backend_id)
def delete_workload(self, workload: models.Workload):
self.client.delete_workload(workload.project.backend_id, workload.backend_id)
def get_workload_yaml(self, workload: models.Workload):
return self.client.get_workload_yaml(
workload.project.backend_id, workload.backend_id
)
def put_workload_yaml(self, workload: models.Workload, yaml: str):
return self.client.put_workload_yaml(
workload.project.backend_id, workload.backend_id, yaml
)
def pull_cluster_hpas(self, cluster):
for project in models.Project.objects.filter(cluster=cluster):
self.pull_project_hpas(project)
def pull_hpas(self):
local_clusters = models.Cluster.objects.filter(settings=self.settings)
for cluster in local_clusters:
if cluster.state == models.Cluster.States.OK:
self.pull_cluster_hpas(cluster)
else:
logger.debug(
'Skipping HPA pulling for cluster with backend ID %s'
'because otherwise one failed cluster leads to provider failure',
cluster.backend_id,
)
def pull_project_hpas(self, project):
local_workloads = models.Workload.objects.filter(project=project)
local_workloads_map = {
workload.backend_id: workload for workload in local_workloads
}
local_hpas = models.HPA.objects.filter(project=project)
local_hpa_map = {hpa.backend_id: hpa for hpa in local_hpas}
remote_hpas = self.client.list_hpas(project.backend_id)
remote_hpa_map = {
hpa['id']: self.remote_hpa_to_local(hpa, local_workloads_map)
for hpa in remote_hpas
}
remote_hpa_ids = set(remote_hpa_map.keys())
local_hpa_ids = set(local_hpa_map.keys())
stale_hpas = local_hpa_ids - remote_hpa_ids
new_hpas = [remote_hpa_map[hpa_id] for hpa_id in remote_hpa_ids - local_hpa_ids]
existing_hpas = remote_hpa_ids & local_hpa_ids
pulled_fields = {
'name',
'runtime_state',
'current_replicas',
'desired_replicas',
'min_replicas',
'max_replicas',
'metrics',
}
for hpa_id in existing_hpas:
local_hpa = local_hpa_map[hpa_id]
remote_hpa = remote_hpa_map[hpa_id]
update_pulled_fields(local_hpa, remote_hpa, pulled_fields)
models.HPA.objects.bulk_create(new_hpas)
local_hpas.filter(backend_id__in=stale_hpas).delete()
def remote_hpa_to_local(self, remote_hpa, local_workloads_map):
workload = local_workloads_map[remote_hpa['workloadId']]
return models.HPA(
backend_id=remote_hpa['id'],
name=remote_hpa['name'],
created=parse_datetime(remote_hpa['created']),
runtime_state=remote_hpa['state'],
project=workload.project,
cluster=workload.cluster,
settings=self.settings,
namespace=workload.namespace,
current_replicas=remote_hpa['currentReplicas'],
desired_replicas=remote_hpa['desiredReplicas'],
min_replicas=remote_hpa['minReplicas'],
max_replicas=remote_hpa['maxReplicas'],
metrics=remote_hpa['metrics'],
state=models.HPA.States.OK,
)
def create_hpa(self, hpa):
remote_hpa = self.client.create_hpa(
hpa.project.backend_id,
hpa.namespace.backend_id,
hpa.workload.backend_id,
hpa.name,
hpa.description,
hpa.min_replicas,
hpa.max_replicas,
hpa.metrics,
)
hpa.backend_id = remote_hpa['id']
hpa.runtime_state = remote_hpa['state']
hpa.save(update_fields=['backend_id', 'runtime_state'])
def update_hpa(self, hpa):
self.client.update_hpa(
hpa.project.backend_id,
hpa.backend_id,
hpa.namespace.backend_id,
hpa.workload.backend_id,
hpa.name,
hpa.description,
hpa.min_replicas,
hpa.max_replicas,
hpa.metrics,
)
def delete_hpa(self, hpa):
try:
self.client.delete_hpa(hpa.project.backend_id, hpa.backend_id)
except NotFound:
logger.debug('HPA %s is not present in the backend.' % hpa.backend_id)
def get_hpa_yaml(self, hpa: models.HPA):
return self.client.get_hpa_yaml(hpa.project.backend_id, hpa.backend_id)
def put_hpa_yaml(self, hpa: models.HPA, yaml: str):
return self.client.put_hpa_yaml(hpa.project.backend_id, hpa.backend_id, yaml)
def pull_apps(self):
local_clusters = models.Cluster.objects.filter(settings=self.settings)
for cluster in local_clusters:
if cluster.state == models.Cluster.States.OK:
self.pull_cluster_apps(cluster)
else:
logger.debug(
'Skipping apps pulling for cluster with backend ID %s'
'because otherwise one failed cluster leads to provider failure',
cluster.backend_id,
)
def pull_cluster_apps(self, cluster: models.Cluster):
for project in models.Project.objects.filter(cluster=cluster):
self.pull_project_apps(project)
def pull_project_apps(self, project: models.Project):
local_namespaces = models.Namespace.objects.filter(project=project)
local_namespaces_map = {
namespace.backend_id: namespace for namespace in local_namespaces
}
local_apps = models.Application.objects.filter(rancher_project=project)
local_app_map = {app.backend_id: app for app in local_apps}
remote_apps = self.client.get_project_applications(project.backend_id)
remote_app_map = {
app['id']: self.remote_app_to_local(app, project, local_namespaces_map)
for app in remote_apps
}
remote_app_ids = set(remote_app_map.keys())
local_app_ids = set(local_app_map.keys())
stale_apps = local_app_ids - remote_app_ids
new_apps = [remote_app_map[app_id] for app_id in remote_app_ids - local_app_ids]
existing_apps = remote_app_ids & local_app_ids
pulled_fields = {
'name',
'runtime_state',
'answers',
}
for app_id in existing_apps:
local_app = local_app_map[app_id]
remote_app = remote_app_map[app_id]
update_pulled_fields(local_app, remote_app, pulled_fields)
models.Application.objects.bulk_create(new_apps)
local_apps.filter(backend_id__in=stale_apps).delete()
def remote_app_to_local(self, remote_app, rancher_project, local_namespaces_map):
parts = urlparse(remote_app['externalId'])
params = parse_qs(parts.query)
template = models.Template.objects.get(
settings=self.settings,
name=params['template'][0],
catalog__name=params['catalog'][0],
)
return models.Application(
settings=self.settings,
service_settings=rancher_project.cluster.service_settings,
project=rancher_project.cluster.project,
rancher_project=rancher_project,
cluster=rancher_project.cluster,
namespace=local_namespaces_map.get(remote_app['targetNamespace']),
template=template,
name=remote_app['name'],
runtime_state=remote_app['state'],
created=remote_app['created'],
backend_id=remote_app['id'],
answers=remote_app.get('answers', {}),
version=params['version'][0],
)
def create_app(self, app: models.Application):
if not app.namespace.backend_id:
remote_response = self.client.create_namespace(
app.rancher_project.cluster.backend_id,
app.rancher_project.backend_id,
app.namespace.name,
)
app.namespace.backend_id = remote_response['id']
app.namespace.save()
remote_app = self.client.create_application(
app.template.catalog.backend_id,
app.template.name,
app.version,
app.rancher_project.backend_id,
app.namespace.backend_id,
app.name,
app.answers,
)
app.backend_id = remote_app['id']
app.runtime_state = remote_app['state']
app.save()
def check_application_state(self, app):
remote_app = self.client.get_application(
app.rancher_project.backend_id, app.backend_id
)
app.runtime_state = remote_app['state']
app.save()
def delete_app(self, app):
try:
self.client.destroy_application(
app.rancher_project.backend_id, app.backend_id
)
except NotFound:
logger.debug('App %s is not present in the backend.' % app.backend_id)
def install_longhorn_to_cluster(self, cluster):
catalog_name = 'library'
system_project = models.Project.objects.filter(
cluster=cluster, name='System'
).first()
if not system_project:
raise RancherException(
"There is no system project in cluster %s" % cluster.backend_id
)
available_templates = models.Template.objects.filter(
name=LONGHORN_NAME, catalog__name=catalog_name
)
available_templates_count = len(available_templates)
if available_templates_count != 1:
if available_templates_count == 0:
message = "There are no templates with name=%s, catalog.name=%s" % (
LONGHORN_NAME,
catalog_name,
)
else:
message = (
"There are more than one template for name=%s, catalog.name=%s"
% (LONGHORN_NAME, catalog_name)
)
logger.info(message)
raise RancherException(message)
logger.info(
'Starting longhorn installation for cluster %s (name=%s, backend_id=%s)',
cluster,
cluster.name,
cluster.backend_id,
)
template = available_templates.first()
try:
namespace = models.Namespace.objects.get(
name=LONGHORN_NAMESPACE, project=system_project
)
except models.Namespace.DoesNotExist:
logger.info(
'Creating namespace %s for cluster %s (name=%s, backend_id=%s)',
LONGHORN_NAMESPACE,
cluster,
cluster.name,
cluster.backend_id,
)
namespace_response = self.client.create_namespace(
cluster.backend_id, system_project.backend_id, LONGHORN_NAMESPACE
)
namespace = models.Namespace.objects.create(
name=LONGHORN_NAMESPACE,
backend_id=namespace_response['id'],
settings=system_project.settings,
project=system_project,
)
logger.info(
'Creating application %s for cluster %s (name=%s, backend_id=%s) in namespace %s (backend_id=%s)',
LONGHORN_NAMESPACE,
cluster,
cluster.name,
cluster.backend_id,
namespace.name,
namespace.backend_id,
)
worker_node_count = cluster.node_set.filter(worker_role=True).count()
replica_count = min(3, worker_node_count)
application = self.client.create_application(
catalog_id=template.catalog.backend_id,
template_id=template.name,
version=template.default_version,
project_id=system_project.backend_id,
namespace_id=namespace.backend_id,
name=LONGHORN_NAME,
answers={'persistence.defaultClassReplicaCount': replica_count},
wait=True,
timeout=1200,
)
models.Application.objects.create(
settings=self.settings,
service_settings=cluster.service_settings,
project=cluster.project,
rancher_project=system_project,
cluster=cluster,
namespace=namespace,
template=template,
name=LONGHORN_NAME,
state=models.Application.States.CREATING,
runtime_state=application['state'],
created=application['created'],
backend_id=application['id'],
answers=application.get('answers'),
version=template.default_version,
)
logger.info(
'Application %s for cluster %s (name=%s, backend_id=%s) was created',
application,
cluster,
cluster.name,
cluster.backend_id,
)
def pull_ingresses(self):
local_clusters = models.Cluster.objects.filter(settings=self.settings)
for cluster in local_clusters:
if cluster.state == models.Cluster.States.OK:
self.pull_cluster_ingresses(cluster)
else:
logger.debug(
'Skipping ingresses pulling for cluster with backend ID %s'
'because otherwise one failed cluster leads to provider failure',
cluster.backend_id,
)
def pull_cluster_ingresses(self, cluster: models.Cluster):
for project in models.Project.objects.filter(cluster=cluster):
self.pull_project_ingresses(project)
def pull_project_ingresses(self, project):
remote_ingresses = self.client.list_ingresses(project.backend_id)
local_ingresses = models.Ingress.objects.filter(rancher_project=project)
local_namespaces = models.Namespace.objects.filter(project=project)
local_namespaces_map = {
namespace.backend_id: namespace for namespace in local_namespaces
}
remote_ingress_map = {
ingress['id']: self.remote_ingress_to_local(
ingress, project, local_namespaces_map
)
for ingress in remote_ingresses
}
local_ingress_map = {ingress.backend_id: ingress for ingress in local_ingresses}
remote_ingress_ids = set(remote_ingress_map.keys())
local_ingress_ids = set(local_ingress_map.keys())
stale_ingresses = local_ingress_ids - remote_ingress_ids
new_ingresses = [
remote_ingress_map[ingress_id]
for ingress_id in remote_ingress_ids - local_ingress_ids
]
existing_ingresses = remote_ingress_ids & local_ingress_ids
pulled_fields = {
'name',
'runtime_state',
'rules',
}
for ingress_id in existing_ingresses:
local_ingress = local_ingress_map[ingress_id]
remote_ingress = remote_ingress_map[ingress_id]
update_pulled_fields(local_ingress, remote_ingress, pulled_fields)
models.Ingress.objects.bulk_create(new_ingresses)
local_ingresses.filter(backend_id__in=stale_ingresses).delete()
def remote_ingress_to_local(self, remote_ingress, project, local_namespaces_map):
namespace = local_namespaces_map.get(remote_ingress['namespaceId'])
return models.Ingress(
backend_id=remote_ingress['id'],
name=remote_ingress['name'],
created=parse_datetime(remote_ingress['created']),
runtime_state=remote_ingress['state'],
settings=self.settings,
service_settings=namespace.project.cluster.service_settings,
project=namespace.project.cluster.project,
namespace=namespace,
cluster=namespace.project.cluster,
rancher_project=namespace.project,
rules=remote_ingress['rules'],
state=models.Ingress.States.OK,
)
def get_ingress_yaml(self, ingress: models.Ingress):
return self.client.get_ingress_yaml(
ingress.rancher_project.backend_id, ingress.backend_id
)
def put_ingress_yaml(self, ingress: models.Ingress, yaml: str):
return self.client.put_ingress_yaml(
ingress.rancher_project.backend_id, ingress.backend_id, yaml
)
def delete_ingress(self, ingress: models.Ingress):
return self.client.delete_ingress(
ingress.rancher_project.backend_id, ingress.backend_id
)
def pull_services(self):
local_clusters = models.Cluster.objects.filter(settings=self.settings)
for cluster in local_clusters:
if cluster.state == models.Cluster.States.OK:
self.pull_cluster_services(cluster)
else:
logger.debug(
'Skipping services pulling for cluster with backend ID %s'
'because otherwise one failed cluster leads to provider failure',
cluster.backend_id,
)
def pull_cluster_services(self, cluster: models.Cluster):
for project in models.Project.objects.filter(cluster=cluster):
self.pull_project_services(project)
def pull_project_services(self, project):
remote_services = self.client.list_services(project.backend_id)
local_services = models.Service.objects.filter(namespace__project=project)
local_namespaces = models.Namespace.objects.filter(project=project)
local_workloads = models.Workload.objects.filter(project=project)
local_namespaces_map = {
namespace.backend_id: namespace for namespace in local_namespaces
}
local_workloads_map = {
workload.backend_id: workload for workload in local_workloads
}
remote_service_map = {service['id']: service for service in remote_services}
local_service_map = {service.backend_id: service for service in local_services}
remote_service_ids = set(remote_service_map.keys())
local_service_ids = set(local_service_map.keys())
stale_services = local_service_ids - remote_service_ids
new_services = [
remote_service_map[service_id]
for service_id in remote_service_ids - local_service_ids
]
existing_services = remote_service_ids & local_service_ids
for service_id in existing_services:
local_service = local_service_map[service_id]
remote_service = remote_service_map[service_id]
update_fields = set()
if remote_service['name'] != local_service.name:
local_service.name = remote_service['name']
update_fields.add('name')
if remote_service['state'] != local_service.runtime_state:
local_service.runtime_state = remote_service['state']
update_fields.add('runtime_state')
if remote_service.get('selector') != local_service.selector:
local_service.selector = remote_service.get('selector')
update_fields.add('selector')
if remote_service['clusterIp'] != local_service.cluster_ip:
local_service.cluster_ip = remote_service['clusterIp']
update_fields.add('cluster_ip')
if update_fields:
local_service.save(update_fields=update_fields)
local_service_workload_map = {
workload.backend_id: workload
for workload in local_service.target_workloads.all()
}
remote_service_workload_map = {
workload_id: local_workloads_map[workload_id]
for workload_id in remote_service.get('targetWorkloadIds', [])
}
local_service_workload_ids = set(local_service_workload_map.keys())
remote_service_workload_ids = set(remote_service_workload_map.keys())
stale_service_workload_ids = (
local_service_workload_ids - remote_service_workload_ids
)
for workload_id in stale_service_workload_ids:
workload = local_workloads_map[workload_id]
local_service.target_workloads.remove(workload)
new_service_workload_ids = (
remote_service_workload_ids - local_service_workload_ids
)
for workload_id in new_service_workload_ids:
workload = local_workloads_map[workload_id]
local_service.target_workloads.add(workload)
for remote_service in new_services:
namespace = local_namespaces_map.get(remote_service['namespaceId'])
local_service = models.Service(
backend_id=remote_service['id'],
name=remote_service['name'],
created=parse_datetime(remote_service['created']),
runtime_state=remote_service['state'],
settings=self.settings,
service_settings=namespace.project.cluster.service_settings,
project=namespace.project.cluster.project,
namespace=namespace,
cluster_ip=remote_service['clusterIp'],
selector=remote_service.get('selector'),
state=models.Service.States.OK,
)
local_service.save()
workloads = [
local_workloads_map[workload_id]
for workload_id in remote_service.get('targetWorkloadIds', [])
]
local_service.target_workloads.set(workloads)
local_services.filter(backend_id__in=stale_services).delete()
def get_service_yaml(self, service: models.Service):
return self.client.get_service_yaml(
service.namespace.project.backend_id, service.backend_id
)
def put_service_yaml(self, service: models.Service, yaml: str):
return self.client.put_service_yaml(
service.namespace.project.backend_id, service.backend_id, yaml
)
def delete_service(self, service: models.Service):
return self.client.delete_service(
service.namespace.project.backend_id, service.backend_id
)
def import_yaml(
self,
cluster: models.Cluster,
yaml: str,
default_namespace: models.Namespace = None,
namespace: models.Namespace = None,
):
return self.client.import_yaml(
cluster.backend_id,
yaml,
default_namespace and default_namespace.backend_id,
namespace and namespace.backend_id,
)
|
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable=
"""Dataset container."""
__all__ = ['Dataset', 'SimpleDataset', 'ArrayDataset',
'RecordFileDataset']
import os
from ... import recordio, ndarray
class Dataset(object):
"""Abstract dataset class. All datasets should have this interface.
Subclasses need to override `__getitem__`, which returns the i-th
element, and `__len__`, which returns the total number elements.
.. note:: An mxnet or numpy array can be directly used as a dataset.
"""
def __getitem__(self, idx):
raise NotImplementedError
def __len__(self):
raise NotImplementedError
def transform(self, fn, lazy=True):
"""Returns a new dataset with each sample transformed by the
transformer function `fn`.
Parameters
----------
fn : callable
A transformer function that takes a sample as input and
returns the transformed sample.
lazy : bool, default True
If False, transforms all samples at once. Otherwise,
transforms each sample on demand. Note that if `fn`
is stochastic, you must set lazy to True or you will
get the same result on all epochs.
Returns
-------
Dataset
The transformed dataset.
"""
trans = _LazyTransformDataset(self, fn)
if lazy:
return trans
return SimpleDataset([i for i in trans])
def transform_first(self, fn, lazy=True):
"""Returns a new dataset with the first element of each sample
transformed by the transformer function `fn`.
This is useful, for example, when you only want to transform data
while keeping label as is.
Parameters
----------
fn : callable
A transformer function that takes the first elemtn of a sample
as input and returns the transformed element.
lazy : bool, default True
If False, transforms all samples at once. Otherwise,
transforms each sample on demand. Note that if `fn`
is stochastic, you must set lazy to True or you will
get the same result on all epochs.
Returns
-------
Dataset
The transformed dataset.
"""
def base_fn(x, *args):
if args:
return (fn(x),) + args
return fn(x)
return self.transform(base_fn, lazy)
class SimpleDataset(Dataset):
"""Simple Dataset wrapper for lists and arrays.
Parameters
----------
data : dataset-like object
Any object that implements `len()` and `[]`.
"""
def __init__(self, data):
self._data = data
def __len__(self):
return len(self._data)
def __getitem__(self, idx):
return self._data[idx]
class _LazyTransformDataset(Dataset):
"""Lazily transformed dataset."""
def __init__(self, data, fn):
self._data = data
self._fn = fn
def __len__(self):
return len(self._data)
def __getitem__(self, idx):
item = self._data[idx]
if isinstance(item, tuple):
return self._fn(*item)
return self._fn(item)
class ArrayDataset(Dataset):
"""A dataset that combines multiple dataset-like objects, e.g.
Datasets, lists, arrays, etc.
The i-th sample is defined as `(x1[i], x2[i], ...)`.
Parameters
----------
*args : one or more dataset-like objects
The data arrays.
"""
def __init__(self, *args):
assert len(args) > 0, "Needs at least 1 arrays"
self._length = len(args[0])
self._data = []
for i, data in enumerate(args):
assert len(data) == self._length, \
"All arrays must have the same length; array[0] has length %d " \
"while array[%d] has %d." % (self._length, i+1, len(data))
if isinstance(data, ndarray.NDArray) and len(data.shape) == 1:
data = data.asnumpy()
self._data.append(data)
def __getitem__(self, idx):
if len(self._data) == 1:
return self._data[0][idx]
else:
return tuple(data[idx] for data in self._data)
def __len__(self):
return self._length
class RecordFileDataset(Dataset):
"""A dataset wrapping over a RecordIO (.rec) file.
Each sample is a string representing the raw content of an record.
Parameters
----------
filename : str
Path to rec file.
"""
def __init__(self, filename):
self.idx_file = os.path.splitext(filename)[0] + '.idx'
self.filename = filename
self._record = recordio.MXIndexedRecordIO(self.idx_file, self.filename, 'r')
def __getitem__(self, idx):
return self._record.read_idx(self._record.keys[idx])
def __len__(self):
return len(self._record.keys)
class _DownloadedDataset(Dataset):
"""Base class for MNIST, cifar10, etc."""
def __init__(self, root, transform):
super(_DownloadedDataset, self).__init__()
self._transform = transform
self._data = None
self._label = None
root = os.path.expanduser(root)
self._root = root
if not os.path.isdir(root):
os.makedirs(root)
self._get_data()
def __getitem__(self, idx):
if self._transform is not None:
return self._transform(self._data[idx], self._label[idx])
return self._data[idx], self._label[idx]
def __len__(self):
return len(self._label)
def _get_data(self):
raise NotImplementedError
|
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'ProjectCountByMinute.time_spent_total'
db.alter_column(
u'sentry_projectcountbyminute', 'time_spent_total',
self.gf('django.db.models.fields.IntegerField')()
)
# Changing field 'Group.time_spent_total'
db.alter_column(
'sentry_groupedmessage', 'time_spent_total',
self.gf('django.db.models.fields.IntegerField')()
)
# Changing field 'GroupCountByMinute.time_spent_total'
db.alter_column(
'sentry_messagecountbyminute', 'time_spent_total',
self.gf('django.db.models.fields.IntegerField')()
)
# Changing field 'Event.time_spent'
db.alter_column(
'sentry_message',
'time_spent',
self.gf('django.db.models.fields.IntegerField')(null=True)
)
def backwards(self, orm):
# Changing field 'ProjectCountByMinute.time_spent_total'
db.alter_column(
u'sentry_projectcountbyminute', 'time_spent_total',
self.gf('django.db.models.fields.FloatField')()
)
# Changing field 'Group.time_spent_total'
db.alter_column(
'sentry_groupedmessage', 'time_spent_total',
self.gf('django.db.models.fields.FloatField')()
)
# Changing field 'GroupCountByMinute.time_spent_total'
db.alter_column(
'sentry_messagecountbyminute', 'time_spent_total',
self.gf('django.db.models.fields.FloatField')()
)
# Changing field 'Event.time_spent'
db.alter_column(
'sentry_message',
'time_spent',
self.gf('django.db.models.fields.FloatField')(null=True)
)
models = {
u'auth.group': {
'Meta': {
'object_name': 'Group'
},
u'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '80'
}),
'permissions': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': u"orm['auth.Permission']",
'symmetrical': 'False',
'blank': 'True'
}
)
},
u'auth.permission': {
'Meta': {
'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')",
'unique_together': "((u'content_type', u'codename'),)",
'object_name': 'Permission'
},
'codename': ('django.db.models.fields.CharField', [], {
'max_length': '100'
}),
'content_type': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['contenttypes.ContentType']"
}
),
u'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '50'
})
},
'sentry.user': {
'Meta': {
'object_name': 'User',
'db_table': "'auth_user'"
},
'date_joined':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email':
('django.db.models.fields.EmailField', [], {
'max_length': '75',
'blank': 'True'
}),
'first_name':
('django.db.models.fields.CharField', [], {
'max_length': '30',
'blank': 'True'
}),
'id': ('django.db.models.fields.AutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'is_staff': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'last_login':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'last_name':
('django.db.models.fields.CharField', [], {
'max_length': '30',
'blank': 'True'
}),
'password': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'username':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '30'
})
},
u'contenttypes.contenttype': {
'Meta': {
'ordering': "('name',)",
'unique_together': "(('app_label', 'model'),)",
'object_name': 'ContentType',
'db_table': "'django_content_type'"
},
'app_label': ('django.db.models.fields.CharField', [], {
'max_length': '100'
}),
u'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'model': ('django.db.models.fields.CharField', [], {
'max_length': '100'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '100'
})
},
u'sentry.accessgroup': {
'Meta': {
'unique_together': "(('team', 'name'),)",
'object_name': 'AccessGroup'
},
'data': ('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'managed': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'members': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': u"orm['sentry.User']",
'symmetrical': 'False'
}
),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'projects': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': u"orm['sentry.Project']",
'symmetrical': 'False'
}
),
'team':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Team']"
}),
'type': ('django.db.models.fields.IntegerField', [], {
'default': '50'
})
},
u'sentry.activity': {
'Meta': {
'object_name': 'Activity'
},
'data': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Event']",
'null': 'True'
}
),
'group': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Group']",
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Project']"
}),
'type': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.User']",
'null': 'True'
}
)
},
u'sentry.alert': {
'Meta': {
'object_name': 'Alert'
},
'data': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Group']",
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'message': ('django.db.models.fields.TextField', [], {}),
'project':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Project']"
}),
'related_groups': (
'django.db.models.fields.related.ManyToManyField', [], {
'related_name': "'related_alerts'",
'symmetrical': 'False',
'through': u"orm['sentry.AlertRelatedGroup']",
'to': u"orm['sentry.Group']"
}
),
'status': (
'django.db.models.fields.PositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
)
},
u'sentry.alertrelatedgroup': {
'Meta': {
'unique_together': "(('group', 'alert'),)",
'object_name': 'AlertRelatedGroup'
},
'alert':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Alert']"
}),
'data': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'group':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Group']"
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
})
},
u'sentry.event': {
'Meta': {
'unique_together': "(('project', 'event_id'),)",
'object_name': 'Event',
'db_table': "'sentry_message'"
},
'checksum':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'db_index': 'True'
}),
'culprit': (
'django.db.models.fields.CharField', [], {
'max_length': '200',
'null': 'True',
'db_column': "'view'",
'blank': 'True'
}
),
'data': ('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'datetime': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'event_id': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True',
'db_column': "'message_id'"
}
),
'group': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'event_set'",
'null': 'True',
'to': u"orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'level': (
'django.db.models.fields.PositiveIntegerField', [], {
'default': '40',
'db_index': 'True',
'blank': 'True'
}
),
'logger': (
'django.db.models.fields.CharField', [], {
'default': "'root'",
'max_length': '64',
'db_index': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments':
('django.db.models.fields.PositiveIntegerField', [], {
'default': '0',
'null': 'True'
}),
'platform':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Project']",
'null': 'True'
}
),
'server_name': (
'django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True',
'db_index': 'True'
}
),
'site': (
'django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True',
'db_index': 'True'
}
),
'time_spent': ('django.db.models.fields.IntegerField', [], {
'null': 'True'
})
},
u'sentry.eventmapping': {
'Meta': {
'unique_together': "(('project', 'event_id'),)",
'object_name': 'EventMapping'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event_id': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'group':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Group']"
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Project']"
})
},
u'sentry.group': {
'Meta': {
'unique_together': "(('project', 'checksum'),)",
'object_name': 'Group',
'db_table': "'sentry_groupedmessage'"
},
'active_at':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'db_index': 'True'
}),
'checksum':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'db_index': 'True'
}),
'culprit': (
'django.db.models.fields.CharField', [], {
'max_length': '200',
'null': 'True',
'db_column': "'view'",
'blank': 'True'
}
),
'data': ('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_public': (
'django.db.models.fields.NullBooleanField', [], {
'default': 'False',
'null': 'True',
'blank': 'True'
}
),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'level': (
'django.db.models.fields.PositiveIntegerField', [], {
'default': '40',
'db_index': 'True',
'blank': 'True'
}
),
'logger': (
'django.db.models.fields.CharField', [], {
'default': "'root'",
'max_length': '64',
'db_index': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments':
('django.db.models.fields.PositiveIntegerField', [], {
'default': '0',
'null': 'True'
}),
'platform':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Project']",
'null': 'True'
}
),
'resolved_at':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'db_index': 'True'
}),
'score': ('django.db.models.fields.IntegerField', [], {
'default': '0'
}),
'status': (
'django.db.models.fields.PositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {
'default': '0'
}),
'time_spent_total': ('django.db.models.fields.IntegerField', [], {
'default': '0'
}),
'times_seen': (
'django.db.models.fields.PositiveIntegerField', [], {
'default': '1',
'db_index': 'True'
}
)
},
u'sentry.groupbookmark': {
'Meta': {
'unique_together': "(('project', 'user', 'group'),)",
'object_name': 'GroupBookmark'
},
'group': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'bookmark_set'",
'to': u"orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'bookmark_set'",
'to': u"orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'sentry_bookmark_set'",
'to': u"orm['sentry.User']"
}
)
},
u'sentry.groupcountbyminute': {
'Meta': {
'unique_together': "(('project', 'group', 'date'),)",
'object_name': 'GroupCountByMinute',
'db_table': "'sentry_messagecountbyminute'"
},
'date': ('django.db.models.fields.DateTimeField', [], {
'db_index': 'True'
}),
'group':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Group']"
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Project']",
'null': 'True'
}
),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {
'default': '0'
}),
'time_spent_total': ('django.db.models.fields.IntegerField', [], {
'default': '0'
}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '0'
})
},
u'sentry.groupmeta': {
'Meta': {
'unique_together': "(('group', 'key'),)",
'object_name': 'GroupMeta'
},
'group':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Group']"
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'value': ('django.db.models.fields.TextField', [], {})
},
u'sentry.groupseen': {
'Meta': {
'unique_together': "(('user', 'group'),)",
'object_name': 'GroupSeen'
},
'group':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Group']"
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'project':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Project']"
}),
'user': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.User']",
'db_index': 'False'
}
)
},
u'sentry.grouptag': {
'Meta': {
'unique_together': "(('project', 'key', 'value', 'group'),)",
'object_name': 'GroupTag',
'db_table': "'sentry_messagefiltervalue'"
},
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'group':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Group']"
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Project']",
'null': 'True'
}
),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '0'
}),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '200'
})
},
u'sentry.grouptagkey': {
'Meta': {
'unique_together': "(('project', 'group', 'key'),)",
'object_name': 'GroupTagKey'
},
'group':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Group']"
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Project']",
'null': 'True'
}
),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '0'
})
},
u'sentry.lostpasswordhash': {
'Meta': {
'object_name': 'LostPasswordHash'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'hash': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'user': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.User']",
'unique': 'True'
}
)
},
u'sentry.option': {
'Meta': {
'object_name': 'Option'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '64'
}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
u'sentry.pendingteammember': {
'Meta': {
'unique_together': "(('team', 'email'),)",
'object_name': 'PendingTeamMember'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'team': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'pending_member_set'",
'to': u"orm['sentry.Team']"
}
),
'type': ('django.db.models.fields.IntegerField', [], {
'default': '50'
})
},
u'sentry.project': {
'Meta': {
'unique_together': "(('team', 'slug'),)",
'object_name': 'Project'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '200'
}),
'owner': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'sentry_owned_project_set'",
'null': 'True',
'to': u"orm['sentry.User']"
}
),
'platform':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True'
}),
'public': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'slug': ('django.db.models.fields.SlugField', [], {
'max_length': '50',
'null': 'True'
}),
'status': (
'django.db.models.fields.PositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'team': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Team']",
'null': 'True'
}
)
},
u'sentry.projectcountbyminute': {
'Meta': {
'unique_together': "(('project', 'date'),)",
'object_name': 'ProjectCountByMinute'
},
'date': ('django.db.models.fields.DateTimeField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Project']",
'null': 'True'
}
),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {
'default': '0'
}),
'time_spent_total': ('django.db.models.fields.IntegerField', [], {
'default': '0'
}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '0'
})
},
u'sentry.projectkey': {
'Meta': {
'object_name': 'ProjectKey'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'key_set'",
'to': u"orm['sentry.Project']"
}
),
'public_key': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'unique': 'True',
'null': 'True'
}
),
'secret_key': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'unique': 'True',
'null': 'True'
}
),
'user': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.User']",
'null': 'True'
}
),
'user_added': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'keys_added_set'",
'null': 'True',
'to': u"orm['sentry.User']"
}
)
},
u'sentry.projectoption': {
'Meta': {
'unique_together': "(('project', 'key'),)",
'object_name': 'ProjectOption',
'db_table': "'sentry_projectoptions'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Project']"
}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
u'sentry.searchdocument': {
'Meta': {
'unique_together': "(('project', 'group'),)",
'object_name': 'SearchDocument'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_changed':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Group']"
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Project']"
}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '0'
}),
'total_events': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '1'
})
},
u'sentry.searchtoken': {
'Meta': {
'unique_together': "(('document', 'field', 'token'),)",
'object_name': 'SearchToken'
},
'document': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'token_set'",
'to': u"orm['sentry.SearchDocument']"
}
),
'field':
('django.db.models.fields.CharField', [], {
'default': "'text'",
'max_length': '64'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '1'
}),
'token': ('django.db.models.fields.CharField', [], {
'max_length': '128'
})
},
u'sentry.tagkey': {
'Meta': {
'unique_together': "(('project', 'key'),)",
'object_name': 'TagKey',
'db_table': "'sentry_filterkey'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'label':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Project']"
}),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '0'
})
},
u'sentry.tagvalue': {
'Meta': {
'unique_together': "(('project', 'key', 'value'),)",
'object_name': 'TagValue',
'db_table': "'sentry_filtervalue'"
},
'data': ('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Project']",
'null': 'True'
}
),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '0'
}),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '200'
})
},
u'sentry.team': {
'Meta': {
'object_name': 'Team'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'members': (
'django.db.models.fields.related.ManyToManyField', [], {
'related_name': "'team_memberships'",
'symmetrical': 'False',
'through': u"orm['sentry.TeamMember']",
'to': u"orm['sentry.User']"
}
),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'owner':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.User']"
}),
'slug':
('django.db.models.fields.SlugField', [], {
'unique': 'True',
'max_length': '50'
})
},
u'sentry.teammember': {
'Meta': {
'unique_together': "(('team', 'user'),)",
'object_name': 'TeamMember'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'team': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'member_set'",
'to': u"orm['sentry.Team']"
}
),
'type': ('django.db.models.fields.IntegerField', [], {
'default': '50'
}),
'user': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'sentry_teammember_set'",
'to': u"orm['sentry.User']"
}
)
},
u'sentry.useroption': {
'Meta': {
'unique_together': "(('user', 'project', 'key'),)",
'object_name': 'UserOption'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.Project']",
'null': 'True'
}
),
'user':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['sentry.User']"
}),
'value': ('picklefield.fields.PickledObjectField', [], {})
}
}
complete_apps = ['sentry']
|
|
from datetime import date
from django import forms
from django.core.exceptions import FieldError, ValidationError
from django.core.files.uploadedfile import SimpleUploadedFile
from django.forms.models import (modelform_factory, ModelChoiceField,
fields_for_model, construct_instance, ModelFormMetaclass)
from django.utils import unittest
from django.test import TestCase
from models import Person, RealPerson, Triple, FilePathModel, Article, \
Publication, CustomFF, Author, Author1, Homepage, Document, Edition
class ModelMultipleChoiceFieldTests(TestCase):
def test_model_multiple_choice_number_of_queries(self):
"""
Test that ModelMultipleChoiceField does O(1) queries instead of
O(n) (#10156).
"""
persons = [Person.objects.create(name="Person %s" % i) for i in range(30)]
f = forms.ModelMultipleChoiceField(queryset=Person.objects.all())
self.assertNumQueries(1, f.clean, [p.pk for p in persons[1:11:2]])
def test_model_multiple_choice_run_validators(self):
"""
Test that ModelMultipleChoiceField run given validators (#14144).
"""
for i in range(30):
Person.objects.create(name="Person %s" % i)
self._validator_run = False
def my_validator(value):
self._validator_run = True
f = forms.ModelMultipleChoiceField(queryset=Person.objects.all(),
validators=[my_validator])
f.clean([p.pk for p in Person.objects.all()[8:9]])
self.assertTrue(self._validator_run)
class TripleForm(forms.ModelForm):
class Meta:
model = Triple
class UniqueTogetherTests(TestCase):
def test_multiple_field_unique_together(self):
"""
When the same field is involved in multiple unique_together
constraints, we need to make sure we don't remove the data for it
before doing all the validation checking (not just failing after
the first one).
"""
Triple.objects.create(left=1, middle=2, right=3)
form = TripleForm({'left': '1', 'middle': '2', 'right': '3'})
self.assertFalse(form.is_valid())
form = TripleForm({'left': '1', 'middle': '3', 'right': '1'})
self.assertTrue(form.is_valid())
class TripleFormWithCleanOverride(forms.ModelForm):
class Meta:
model = Triple
def clean(self):
if not self.cleaned_data['left'] == self.cleaned_data['right']:
raise forms.ValidationError('Left and right should be equal')
return self.cleaned_data
class OverrideCleanTests(TestCase):
def test_override_clean(self):
"""
Regression for #12596: Calling super from ModelForm.clean() should be
optional.
"""
form = TripleFormWithCleanOverride({'left': 1, 'middle': 2, 'right': 1})
self.assertTrue(form.is_valid())
# form.instance.left will be None if the instance was not constructed
# by form.full_clean().
self.assertEqual(form.instance.left, 1)
# Regression test for #12960.
# Make sure the cleaned_data returned from ModelForm.clean() is applied to the
# model instance.
class PublicationForm(forms.ModelForm):
def clean(self):
self.cleaned_data['title'] = self.cleaned_data['title'].upper()
return self.cleaned_data
class Meta:
model = Publication
class ModelFormCleanTest(TestCase):
def test_model_form_clean_applies_to_model(self):
data = {'title': 'test', 'date_published': '2010-2-25'}
form = PublicationForm(data)
publication = form.save()
self.assertEqual(publication.title, 'TEST')
class FPForm(forms.ModelForm):
class Meta:
model = FilePathModel
class FilePathFieldTests(TestCase):
def test_file_path_field_blank(self):
"""
Regression test for #8842: FilePathField(blank=True)
"""
form = FPForm()
names = [p[1] for p in form['path'].field.choices]
names.sort()
self.assertEqual(names, ['---------', '__init__.py', 'models.py', 'tests.py'])
class ManyToManyCallableInitialTests(TestCase):
def test_callable(self):
"Regression for #10349: A callable can be provided as the initial value for an m2m field"
# Set up a callable initial value
def formfield_for_dbfield(db_field, **kwargs):
if db_field.name == 'publications':
kwargs['initial'] = lambda: Publication.objects.all().order_by('date_published')[:2]
return db_field.formfield(**kwargs)
# Set up some Publications to use as data
book1 = Publication.objects.create(title="First Book", date_published=date(2007,1,1))
book2 = Publication.objects.create(title="Second Book", date_published=date(2008,1,1))
book3 = Publication.objects.create(title="Third Book", date_published=date(2009,1,1))
# Create a ModelForm, instantiate it, and check that the output is as expected
ModelForm = modelform_factory(Article, formfield_callback=formfield_for_dbfield)
form = ModelForm()
self.assertEqual(form.as_ul(), u"""<li><label for="id_headline">Headline:</label> <input id="id_headline" type="text" name="headline" maxlength="100" /></li>
<li><label for="id_publications">Publications:</label> <select multiple="multiple" name="publications" id="id_publications">
<option value="%d" selected="selected">First Book</option>
<option value="%d" selected="selected">Second Book</option>
<option value="%d">Third Book</option>
</select> <span class="helptext"> Hold down "Control", or "Command" on a Mac, to select more than one.</span></li>"""
% (book1.pk, book2.pk, book3.pk))
class CFFForm(forms.ModelForm):
class Meta:
model = CustomFF
class CustomFieldSaveTests(TestCase):
def test_save(self):
"Regression for #11149: save_form_data should be called only once"
# It's enough that the form saves without error -- the custom save routine will
# generate an AssertionError if it is called more than once during save.
form = CFFForm(data = {'f': None})
form.save()
class ModelChoiceIteratorTests(TestCase):
def test_len(self):
class Form(forms.ModelForm):
class Meta:
model = Article
fields = ["publications"]
Publication.objects.create(title="Pravda",
date_published=date(1991, 8, 22))
f = Form()
self.assertEqual(len(f.fields["publications"].choices), 1)
class RealPersonForm(forms.ModelForm):
class Meta:
model = RealPerson
class CustomModelFormSaveMethod(TestCase):
def test_string_message(self):
data = {'name': 'anonymous'}
form = RealPersonForm(data)
self.assertEqual(form.is_valid(), False)
self.assertEqual(form.errors['__all__'], ['Please specify a real name.'])
class ModelClassTests(TestCase):
def test_no_model_class(self):
class NoModelModelForm(forms.ModelForm):
pass
self.assertRaises(ValueError, NoModelModelForm)
class OneToOneFieldTests(TestCase):
def test_assignment_of_none(self):
class AuthorForm(forms.ModelForm):
class Meta:
model = Author
fields = ['publication', 'full_name']
publication = Publication.objects.create(title="Pravda",
date_published=date(1991, 8, 22))
author = Author.objects.create(publication=publication, full_name='John Doe')
form = AuthorForm({'publication':u'', 'full_name':'John Doe'}, instance=author)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['publication'], None)
author = form.save()
# author object returned from form still retains original publication object
# that's why we need to retreive it from database again
new_author = Author.objects.get(pk=author.pk)
self.assertEqual(new_author.publication, None)
def test_assignment_of_none_null_false(self):
class AuthorForm(forms.ModelForm):
class Meta:
model = Author1
fields = ['publication', 'full_name']
publication = Publication.objects.create(title="Pravda",
date_published=date(1991, 8, 22))
author = Author1.objects.create(publication=publication, full_name='John Doe')
form = AuthorForm({'publication':u'', 'full_name':'John Doe'}, instance=author)
self.assertTrue(not form.is_valid())
class ModelChoiceForm(forms.Form):
person = ModelChoiceField(Person.objects.all())
class TestTicket11183(TestCase):
def test_11183(self):
form1 = ModelChoiceForm()
field1 = form1.fields['person']
# To allow the widget to change the queryset of field1.widget.choices correctly,
# without affecting other forms, the following must hold:
self.assertTrue(field1 is not ModelChoiceForm.base_fields['person'])
self.assertTrue(field1.widget.choices.field is field1)
class HomepageForm(forms.ModelForm):
class Meta:
model = Homepage
class URLFieldTests(TestCase):
def test_url_on_modelform(self):
"Check basic URL field validation on model forms"
self.assertFalse(HomepageForm({'url': 'foo'}).is_valid())
self.assertFalse(HomepageForm({'url': 'http://'}).is_valid())
self.assertFalse(HomepageForm({'url': 'http://example'}).is_valid())
self.assertFalse(HomepageForm({'url': 'http://example.'}).is_valid())
self.assertFalse(HomepageForm({'url': 'http://com.'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://localhost'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://example.com'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://www.example.com'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://www.example.com:8000'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://www.example.com/test'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://www.example.com:8000/test'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://example.com/foo/bar'}).is_valid())
def test_http_prefixing(self):
"If the http:// prefix is omitted on form input, the field adds it again. (Refs #13613)"
form = HomepageForm({'url': 'example.com'})
form.is_valid()
# self.assertTrue(form.is_valid())
# self.assertEqual(form.cleaned_data['url'], 'http://example.com/')
form = HomepageForm({'url': 'example.com/test'})
form.is_valid()
# self.assertTrue(form.is_valid())
# self.assertEqual(form.cleaned_data['url'], 'http://example.com/test')
class FormFieldCallbackTests(TestCase):
def test_baseform_with_widgets_in_meta(self):
"""Regression for #13095: Using base forms with widgets defined in Meta should not raise errors."""
widget = forms.Textarea()
class BaseForm(forms.ModelForm):
class Meta:
model = Person
widgets = {'name': widget}
Form = modelform_factory(Person, form=BaseForm)
self.assertTrue(Form.base_fields['name'].widget is widget)
def test_custom_callback(self):
"""Test that a custom formfield_callback is used if provided"""
callback_args = []
def callback(db_field, **kwargs):
callback_args.append((db_field, kwargs))
return db_field.formfield(**kwargs)
widget = forms.Textarea()
class BaseForm(forms.ModelForm):
class Meta:
model = Person
widgets = {'name': widget}
_ = modelform_factory(Person, form=BaseForm,
formfield_callback=callback)
id_field, name_field = Person._meta.fields
self.assertEqual(callback_args,
[(id_field, {}), (name_field, {'widget': widget})])
def test_bad_callback(self):
# A bad callback provided by user still gives an error
self.assertRaises(TypeError, modelform_factory, Person,
formfield_callback='not a function or callable')
class InvalidFieldAndFactory(TestCase):
""" Tests for #11905 """
def test_extra_field_model_form(self):
try:
class ExtraPersonForm(forms.ModelForm):
""" ModelForm with an extra field """
age = forms.IntegerField()
class Meta:
model = Person
fields = ('name', 'no-field')
except FieldError, e:
# Make sure the exception contains some reference to the
# field responsible for the problem.
self.assertTrue('no-field' in e.args[0])
else:
self.fail('Invalid "no-field" field not caught')
def test_extra_declared_field_model_form(self):
try:
class ExtraPersonForm(forms.ModelForm):
""" ModelForm with an extra field """
age = forms.IntegerField()
class Meta:
model = Person
fields = ('name', 'age')
except FieldError:
self.fail('Declarative field raised FieldError incorrectly')
def test_extra_field_modelform_factory(self):
self.assertRaises(FieldError, modelform_factory,
Person, fields=['no-field', 'name'])
class DocumentForm(forms.ModelForm):
class Meta:
model = Document
class FileFieldTests(unittest.TestCase):
def test_clean_false(self):
"""
If the ``clean`` method on a non-required FileField receives False as
the data (meaning clear the field value), it returns False, regardless
of the value of ``initial``.
"""
f = forms.FileField(required=False)
self.assertEqual(f.clean(False), False)
self.assertEqual(f.clean(False, 'initial'), False)
def test_clean_false_required(self):
"""
If the ``clean`` method on a required FileField receives False as the
data, it has the same effect as None: initial is returned if non-empty,
otherwise the validation catches the lack of a required value.
"""
f = forms.FileField(required=True)
self.assertEqual(f.clean(False, 'initial'), 'initial')
self.assertRaises(ValidationError, f.clean, False)
def test_full_clear(self):
"""
Integration happy-path test that a model FileField can actually be set
and cleared via a ModelForm.
"""
form = DocumentForm()
self.assertTrue('name="myfile"' in unicode(form))
self.assertTrue('myfile-clear' not in unicode(form))
form = DocumentForm(files={'myfile': SimpleUploadedFile('something.txt', 'content')})
self.assertTrue(form.is_valid())
doc = form.save(commit=False)
self.assertEqual(doc.myfile.name, 'something.txt')
form = DocumentForm(instance=doc)
self.assertTrue('myfile-clear' in unicode(form))
form = DocumentForm(instance=doc, data={'myfile-clear': 'true'})
doc = form.save(commit=False)
self.assertEqual(bool(doc.myfile), False)
def test_clear_and_file_contradiction(self):
"""
If the user submits a new file upload AND checks the clear checkbox,
they get a validation error, and the bound redisplay of the form still
includes the current file and the clear checkbox.
"""
form = DocumentForm(files={'myfile': SimpleUploadedFile('something.txt', 'content')})
self.assertTrue(form.is_valid())
doc = form.save(commit=False)
form = DocumentForm(instance=doc,
files={'myfile': SimpleUploadedFile('something.txt', 'content')},
data={'myfile-clear': 'true'})
self.assertTrue(not form.is_valid())
self.assertEqual(form.errors['myfile'],
[u'Please either submit a file or check the clear checkbox, not both.'])
rendered = unicode(form)
self.assertTrue('something.txt' in rendered)
self.assertTrue('myfile-clear' in rendered)
class EditionForm(forms.ModelForm):
author = forms.ModelChoiceField(queryset=Person.objects.all())
publication = forms.ModelChoiceField(queryset=Publication.objects.all())
edition = forms.IntegerField()
isbn = forms.CharField(max_length=13)
class Meta:
model = Edition
class UniqueErrorsTests(TestCase):
def setUp(self):
self.author1 = Person.objects.create(name=u'Author #1')
self.author2 = Person.objects.create(name=u'Author #2')
self.pub1 = Publication.objects.create(title='Pub #1', date_published=date(2000, 10, 31))
self.pub2 = Publication.objects.create(title='Pub #2', date_published=date(2004, 1, 5))
form = EditionForm(data={'author': self.author1.pk, 'publication': self.pub1.pk, 'edition': 1, 'isbn': '9783161484100'})
form.save()
def test_unique_error_message(self):
form = EditionForm(data={'author': self.author1.pk, 'publication': self.pub2.pk, 'edition': 1, 'isbn': '9783161484100'})
self.assertEqual(form.errors, {'isbn': [u'Edition with this Isbn already exists.']})
def test_unique_together_error_message(self):
form = EditionForm(data={'author': self.author1.pk, 'publication': self.pub1.pk, 'edition': 2, 'isbn': '9783161489999'})
self.assertEqual(form.errors, {'__all__': [u'Edition with this Author and Publication already exists.']})
form = EditionForm(data={'author': self.author2.pk, 'publication': self.pub1.pk, 'edition': 1, 'isbn': '9783161487777'})
self.assertEqual(form.errors, {'__all__': [u'Edition with this Publication and Edition already exists.']})
class EmptyFieldsTestCase(TestCase):
"Tests for fields=() cases as reported in #14119"
class EmptyPersonForm(forms.ModelForm):
class Meta:
model = Person
fields = ()
def test_empty_fields_to_fields_for_model(self):
"An argument of fields=() to fields_for_model should return an empty dictionary"
field_dict = fields_for_model(Person, fields=())
self.assertEqual(len(field_dict), 0)
def test_empty_fields_on_modelform(self):
"No fields on a ModelForm should actually result in no fields"
form = self.EmptyPersonForm()
self.assertEqual(len(form.fields), 0)
def test_empty_fields_to_construct_instance(self):
"No fields should be set on a model instance if construct_instance receives fields=()"
form = modelform_factory(Person)({'name': 'John Doe'})
self.assertTrue(form.is_valid())
instance = construct_instance(form, Person(), fields=())
self.assertEqual(instance.name, '')
class CustomMetaclass(ModelFormMetaclass):
def __new__(cls, name, bases, attrs):
new = super(CustomMetaclass, cls).__new__(cls, name, bases, attrs)
new.base_fields = {}
return new
class CustomMetaclassForm(forms.ModelForm):
__metaclass__ = CustomMetaclass
class CustomMetaclassTestCase(TestCase):
def test_modelform_factory_metaclass(self):
new_cls = modelform_factory(Person, form=CustomMetaclassForm)
self.assertEqual(new_cls.base_fields, {})
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import base64
import logging
from keyring.errors import (PasswordDeleteError)
from keyring.backend import KeyringBackend
from botocore.exceptions import EndpointConnectionError
import boto3facade.s3
import string
import six
import keyring
import sys
from s3keyring.settings import config
# Python2/3 compatibility
if sys.version_info.major == 2:
from base64 import decodestring as base64_decode
from base64 import encodestring as base64_encode
elif sys.version_info.major == 3:
from base64 import decodebytes as base64_decode
from base64 import encodebytes as base64_encode
else:
raise Exception("Invalid Python major version: {}".format(sys.version_info.major))
LEGAL_CHARS = (
getattr(string, 'letters', None) # Python 2.x
or getattr(string, 'ascii_letters') # Python 3.x
) + string.digits + '/_-'
ESCAPE_FMT = "_{}02X"
logger = logging.getLogger()
logger.setLevel("INFO")
class PasswordGetError(Exception):
"""Raised when there is an error retrieving a password.
"""
pass
class InitError(Exception):
"""Raised when the S3 backend has not been properly initialized
"""
pass
class S3Backed(object):
def __init__(self, config_file=None):
"""Initialize the keyring."""
if config_file:
config.boto_config.config_file = config_file
config.boto_config.load()
self.s3 = boto3facade.s3.S3(config=config.boto_config)
@property
def bucket(self):
return self.s3.resource.Bucket(config.profile["bucket"])
def supported(self):
try:
resp = self.s3.client.list_objects(Bucket=self.bucket.name)
return resp['ResponseMetadata']['HTTPStatusCode'] == 200
except:
return False
@property
def kms_key_id(self):
return config.profile["kms_key_id"]
@property
def use_local_keyring(self):
return config.profile.get("use_local_keyring", "no") == 'yes'
@property
def namespace(self):
"""A namespace is simply a shared S3 prefix across a set of keys."""
try:
return _escape_for_s3(config.profile["namespace"])
except KeyError:
logger.error("No keyring namespace was found: did you run "
"`s3keyring configure`?")
class S3Keyring(S3Backed, KeyringBackend):
"""
BaseS3Keyring is a S3-based implementation of keyring.
This keyring stores the password directly in S3 and provides methods
which may be overridden by subclasses to support
encryption and decryption. The encrypted payload is stored in base64
format.
"""
def _get_s3_key(self, service, username):
"""The S3 key where the secret will be stored."""
return "{}/{}/{}/secret.b64".format(self.namespace, service, username)
def _get_service_prefix(self, service):
"""Get the S3 prefix for a given service."""
return "{}/{}".format(self.namespace, service)
def get_value(self, *args, **kwargs):
"""An alias of method get_password"""
return self.get_password(*args, **kwargs)
def list_keys(self, service):
"""List the keys associated to a given service."""
prefix = self._get_service_prefix(service)
return [x.key[len(prefix)+1:-11] for x
in list(self.bucket.objects.filter(Prefix=prefix))]
def get_password(self, service, username):
"""Read the password from the S3 bucket."""
service = _escape_for_s3(service)
username = _escape_for_s3(username)
# Read the password from S3
prefix = self._get_s3_key(service, username)
try:
values = list(self.bucket.objects.filter(Prefix=prefix))
except EndpointConnectionError:
if self.use_local_keyring:
# Can't connect to S3: fallback to the local keyring
print("WARNING: can't connect to S3, using OS keyring instead",
file=sys.stderr)
return keyring.get_password(service, username)
else:
raise
if len(values) == 0:
# service/username not found
return
if len(values) > 1:
msg = "Ambiguous prefix {prefix} in bucket {bucket}.".format(
prefix=prefix, bucket=self.bucket.name)
raise PasswordGetError(msg)
pwd_base64 = values[0].get()['Body'].read()
pwd = base64_decode(pwd_base64)
return pwd.decode('utf-8')
def set_value(self, *args, **kwargs):
"""An alias for method set_password."""
return self.set_password(*args, **kwargs)
def set_password(self, service, username, password):
"""Write the password in the S3 bucket."""
service = _escape_for_s3(service)
username = _escape_for_s3(username)
pwd_base64 = base64_encode(password.encode('utf-8')).decode()
# Save in S3 using both server and client side encryption
keyname = self._get_s3_key(service, username)
try:
self.bucket.Object(keyname).put(ACL='private', Body=pwd_base64,
ServerSideEncryption='aws:kms',
SSEKMSKeyId=self.kms_key_id)
except EndpointConnectionError:
if self.use_local_keyring:
# Can't connect to S3: fallback to OS keyring
print("WARNING: can't connect to S3, storing in OS keyring",
file=sys.stderr)
keyring.set_password(service, username, password)
else:
raise
def delete_value(self, *args, **kwargs):
"""An alias for delete_password."""
return self.delete_password(*args, **kwargs)
def delete_password(self, service, username):
"""Delete the password for the username of the service."""
service = _escape_for_s3(service)
username = _escape_for_s3(username)
prefix = self._get_s3_key(service, username)
try:
objects = list(self.bucket.objects.filter(Prefix=prefix))
if len(objects) == 0:
msg = ("Password for {service}/{username} not found"
).format(service=service, username=username)
raise PasswordDeleteError(msg)
elif len(objects) > 1:
msg = ("Multiple objects in bucket {bucket} match the prefix "
"{prefix}.").format(bucket=self.bucket.name,
prefix=prefix)
else:
objects[0].delete()
except EndpointConnectionError:
if self.use_local_keyring:
# Can't connect to S3: fallback to OS keyring
print("WARNING: can't connect to S3, deleting from OS keyring",
file=sys.stderr)
else:
raise
# Delete also in the local keyring
if self.use_local_keyring:
try:
keyring.delete_password(service, username)
except PasswordDeleteError:
# It's OK: the password was not available in the local keyring
print("WARNING: {}/{} not found in OS keyring".format(
service, username))
def _escape_char(c):
if isinstance(c, int):
c = six.unichr(c)
return c if c in LEGAL_CHARS else ESCAPE_FMT.format(ord(c))
def _escape_for_s3(value):
return "".join(_escape_char(c) for c in value.encode('utf-8'))
|
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2013, Nahuel Riva
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice,this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
PE directory classes.
"""
__revision__ = "$Id$"
import datatypes
import consts
import datadirs
import excep
import utils
import baseclasses
import dotnet
# typedef struct IMAGE_BOUND_FORWARDER_REF
# {
# DWORD TimeDateStamp;
# WORD OffsetModuleName;
# WORD Reserved;
# }
class ImageBoundForwarderRefEntry(baseclasses.BaseStructClass):
"""ImageBoundForwarderRefEntry object."""
def __init__(self, shouldPack = True):
"""
This class represents an element of type C{IMAGE_BOUND_FORWARDER_REF}.
@see: U{http://msdn.microsoft.com/en-us/magazine/cc301808.aspx}
@type shouldPack: bool
@param shouldPack: (Optional) If set to c{True}, the object will be packed. If set to C{False}, the object won't be packed.
"""
baseclasses.BaseStructClass.__init__(self, shouldPack)
self.timeDateStamp = datatypes.DWORD(0) #: L{DWORD} timeDateStamp.
self.offsetModuleName = datatypes.WORD(0) #: L{WORD} offsetModuleName.
self.reserved = datatypes.WORD(0) #: L{WORD} reserved.
self.moduleName = datatypes.String(shouldPack = False) #: moduleName is metadata, not part of the structure.
self._attrsList = ["timeDateStamp", "offsetModuleName", "reserved", "moduleName"]
def getType(self):
"""Returns L{consts.IMAGE_BOUND_FORWARDER_REF_ENTRY}."""
return consts.IMAGE_BOUND_FORWARDER_REF_ENTRY
@staticmethod
def parse(readDataInstance):
"""
Returns a new L{ImageBoundForwarderRefEntry} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A L{ReadData} object with the corresponding data to generate a new L{ImageBoundForwarderRefEntry} object.
@rtype: L{ImageBoundForwarderRefEntry}
@return: A new L{ImageBoundForwarderRefEntry} object.
"""
boundForwarderEntry = ImageBoundForwarderRefEntry()
boundForwarderEntry.timeDateStamp.value = readDataInstance.readDword()
boundForwarderEntry.offsetModuleName.value = readDataInstance.readWord()
boundForwarderEntry.reserved.value = readDataInstance.readWord()
return boundForwarderEntry
class ImageBoundForwarderRef(list):
"""ImageBoundForwarderRef array object."""
def __init__(self, shouldPack = True):
"""
This class is a wrapper over an array of C{IMAGE_BOUND_FORWARDER_REF}.
@type shouldPack: bool
@param shouldPack: (Optional) If set to c{True}, the object will be packed. If set to C{False}, the object won't be packed.
"""
list.__init__(self)
self.shouldPack = shouldPack
def __str__(self):
return ''.join([str(x) for x in self if x.shouldPack])
@staticmethod
def parse(readDataInstance, numberOfEntries):
"""
Returns a L{ImageBoundForwarderRef} array where every element is a L{ImageBoundForwarderRefEntry} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A L{ReadData} object with the corresponding data to generate a new L{ImageBoundForwarderRef} object.
@type numberOfEntries: int
@param numberOfEntries: The number of C{IMAGE_BOUND_FORWARDER_REF} entries in the array.
@rtype: L{ImageBoundForwarderRef}
@return: A new L{ImageBoundForwarderRef} object.
@raise DataLengthException: If the L{ReadData} instance has less data than C{NumberOfEntries} * sizeof L{ImageBoundForwarderRefEntry}.
"""
imageBoundForwarderRefsList = ImageBoundForwarderRef()
dLength = len(readDataInstance)
entryLength = ImageBoundForwarderRefEntry().sizeof()
toRead = numberOfEntries * entryLength
if dLength >= toRead:
for i in range(numberOfEntries):
entryData = readDataInstance.read(entryLength)
rd = utils.ReadData(entryData)
imageBoundForwarderRefsList.append(ImageBoundForwarderRefEntry.parse(rd))
else:
raise excep.DataLengthException("Not enough bytes to read.")
return imageBoundForwarderRefsList
class ImageBoundImportDescriptor(list):
"""ImageBoundImportDescriptor object."""
def __init__(self, shouldPack = True):
"""
Array of L{ImageBoundImportDescriptorEntry} objects.
@type shouldPack: bool
@param shouldPack: (Optional) If set to c{True}, the object will be packed. If set to C{False}, the object won't be packed.
"""
list.__init__(self)
self.shouldPack = shouldPack
def __str__(self):
return ''.join([str(x) for x in self if x.shouldPack])
@staticmethod
def parse(readDataInstance):
"""
Returns a new L{ImageBoundImportDescriptor} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A L{ReadData} object containing the data to create a new L{ImageBoundImportDescriptor} object.
@rtype: L{ImageBoundImportDescriptor}
@return: A new {ImageBoundImportDescriptor} object.
"""
ibd = ImageBoundImportDescriptor()
entryData = readDataInstance.read(consts.SIZEOF_IMAGE_BOUND_IMPORT_ENTRY32)
readDataInstance.offset = 0
while not utils.allZero(entryData):
prevOffset = readDataInstance.offset
boundEntry = ImageBoundImportDescriptorEntry.parse(readDataInstance)
# if the parsed entry has numberOfModuleForwarderRefs we must adjust the value in the readDataInstance.offset field
# in order to point after the last ImageBoundForwarderRefEntry.
if boundEntry.numberOfModuleForwarderRefs.value:
readDataInstance.offset = prevOffset + (consts.SIZEOF_IMAGE_BOUND_FORWARDER_REF_ENTRY32 * boundEntry.numberOfModuleForwarderRefs.value)
else:
readDataInstance.offset = prevOffset
ibd.append(boundEntry)
entryData = readDataInstance.read(consts.SIZEOF_IMAGE_BOUND_IMPORT_ENTRY32)
return ibd
# typedef struct IMAGE_BOUND_IMPORT_DESCRIPTOR
# {
# DWORD TimeDateStamp;
# WORD OffsetModuleName;
# WORD NumberOfModuleForwarderRefs;
# }
class ImageBoundImportDescriptorEntry(baseclasses.BaseStructClass):
"""ImageBoundImportDescriptorEntry object."""
def __init__(self, shouldPack = True):
"""
This class represents a C{IMAGE_BOUND_IMPORT_DESCRIPTOR} structure.
@see: U{http://msdn.microsoft.com/en-us/magazine/cc301808.aspx}
@type shouldPack: bool
@param shouldPack: (Optional) If set to c{True}, the object will be packed. If set to C{False}, the object won't be packed.
"""
baseclasses.BaseStructClass.__init__(self, shouldPack)
self.timeDateStamp = datatypes.DWORD(0) #: L{DWORD} timeDateStamp.
self.offsetModuleName = datatypes.WORD(0) #: L{WORD} offsetModuleName.
self.numberOfModuleForwarderRefs = datatypes.WORD(0)#: L{WORD} numberOfModuleForwarderRefs.
self.forwarderRefsList = ImageBoundForwarderRef() #: L{ImageBoundForwarderRef} forwarderRefsList.
self.moduleName = datatypes.String(shouldPack = False) #: moduleName is metadata, not part of the structure.
self._attrsList = ["timeDateStamp", "offsetModuleName", "numberOfModuleForwarderRefs", "forwarderRefsList", "moduleName"]
def getType(self):
"""Returns L{consts.IMAGE_BOUND_IMPORT_DESCRIPTOR_ENTRY}"""
return consts.IMAGE_BOUND_IMPORT_DESCRIPTOR_ENTRY
@staticmethod
def parse(readDataInstance):
"""
Returns a new L{ImageBoundImportDescriptorEntry} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A L{ReadData} object containing data to create a new L{ImageBoundImportDescriptorEntry}.
@rtype: L{ImageBoundImportDescriptorEntry}
@return: A new {ImageBoundImportDescriptorEntry} object.
"""
boundEntry = ImageBoundImportDescriptorEntry()
boundEntry.timeDateStamp.value = readDataInstance.readDword()
boundEntry.offsetModuleName.value = readDataInstance.readWord()
boundEntry.numberOfModuleForwarderRefs.value = readDataInstance.readWord()
numberOfForwarderRefsEntries = boundEntry.numberOfModuleForwarderRefs .value
if numberOfForwarderRefsEntries:
bytesToRead = numberOfForwarderRefsEntries * ImageBoundForwarderRefEntry().sizeof()
rd = utils.ReadData(readDataInstance.read(bytesToRead))
boundEntry.forwarderRefsList = ImageBoundForwarderRef.parse(rd, numberOfForwarderRefsEntries)
return boundEntry
class TLSDirectory(baseclasses.BaseStructClass):
"""TLS directory object."""
def __init__(self, shouldPack = True):
"""
Class representation of a C{IMAGE_TLS_DIRECTORY} structure.
@see: Figure 11 U{http://msdn.microsoft.com/en-us/magazine/bb985996.aspx}
@type shouldPack: bool
@param shouldPack: (Optional) If set to c{True}, the object will be packed. If set to C{False}, the object won't be packed.
"""
baseclasses.BaseStructClass.__init__(self, shouldPack)
self.startAddressOfRawData = datatypes.DWORD(0) #: L{DWORD} startAddressOfRawData.
self.endAddressOfRawData = datatypes.DWORD(0) #: L{DWORD} endAddressOfRawData.
self.addressOfIndex = datatypes.DWORD(0) #: L{DWORD} addressOfIndex.
self.addressOfCallbacks = datatypes.DWORD(0) #: L{DWORD} addressOfCallbacks.
self.sizeOfZeroFill = datatypes.DWORD(0) #: L{DWORD} sizeOfZeroFill.
self.characteristics = datatypes.DWORD(0) #:L{DWORD} characteristics.
self._attrsList = ["startAddressOfRawData", "endAddressOfRawData", "addressOfIndex", "addressOfCallbacks",\
"sizeOfZeroFill", "characteristics"]
def getType(self):
"""Returns L{consts.TLS_DIRECTORY}."""
return consts.TLS_DIRECTORY32
@staticmethod
def parse(readDataInstance):
"""
Returns a new L{TLSDirectory} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A L{ReadData} object containing data to create a new L{TLSDirectory} object.
@rtype: L{TLSDirectory}
@return: A new {TLSDirectory} object.
"""
tlsDir = TLSDirectory()
tlsDir.startAddressOfRawData.value = readDataInstance.readDword()
tlsDir.endAddressOfRawData.value = readDataInstance.readDword()
tlsDir.addressOfIndex.value = readDataInstance.readDword()
tlsDir.addressOfCallbacks.value = readDataInstance.readDword()
tlsDir.sizeOfZeroFill.value = readDataInstance.readDword()
tlsDir.characteristics.value = readDataInstance.readDword()
return tlsDir
class TLSDirectory64(baseclasses.BaseStructClass):
"""TLSDirectory64 object."""
def __init__(self, shouldPack = True):
"""
Class representation of a C{IMAGE_TLS_DIRECTORY} structure in 64 bits systems.
@type shouldPack: bool
@param shouldPack: (Optional) If set to c{True}, the object will be packed. If set to C{False}, the object won't be packed.
"""
baseclasses.BaseStructClass.__init__(self, shouldPack)
self.startAddressOfRawData = datatypes.QWORD(0) #: L{QWORD} startAddressOfRawData.
self.endAddressOfRawData = datatypes.QWORD(0) #: L{QWORD} endAddressOfRawData.
self.addressOfIndex = datatypes.QWORD(0) #: L{QWORD} addressOfIndex.
self.addressOfCallbacks = datatypes.QWORD(0) #: L{QWORD} addressOfCallbacks.
self.sizeOfZeroFill = datatypes.DWORD(0) #: L{DWORD} sizeOfZeroFill.
self.characteristics = datatypes.DWORD(0) #: L{DWORD} characteristics.
self._attrsList = ["startAddressOfRawData", "endAddressOfRawData", "addressOfIndex", "addressOfCallbacks",\
"sizeOfZeroFill", "characteristics"]
def getType(self):
"""Returns L{consts.TLS_DIRECTORY64}."""
return consts.TLS_DIRECTORY64
@staticmethod
def parse(readDataInstance):
"""
Returns a new L{TLSDirectory64} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A L{ReadData} object containing data to create a new L{TLSDirectory64} object.
@rtype: L{TLSDirectory64}
@return: A new L{TLSDirectory64} object.
"""
tlsDir = TLSDirectory64()
tlsDir.startAddressOfRawData.value = readDataInstance.readQword()
tlsDir.endAddressOfRawData.value = readDataInstance.readQword()
tlsDir.addressOfIndex.value = readDataInstance.readQword()
tlsDir.addressOfCallbacks.value = readDataInstance.readQword()
tlsDir.sizeOfZeroFill.value = readDataInstance.readDword()
tlsDir.characteristics.value = readDataInstance.readDword()
return tlsDir
# http://msdn.microsoft.com/en-us/library/windows/desktop/ms680328%28v=vs.85%29.aspx
class ImageLoadConfigDirectory(baseclasses.BaseStructClass):
"IMAGE_LOAD_CONFIG_DIRECTORY32 object aka CONFIGURATION_DIRECTORY"
def __init__(self, shouldPack = True):
"""
Class representation of a C{IMAGE_LOAD_CONFIG_DIRECTORY32} structure.
@type shouldPack: bool
@param shouldPack: (Optional) If set to c{True}, the object will be packed. If set to C{False}, the object won't be packed.
"""
baseclasses.BaseStructClass.__init__(self, shouldPack)
self.size = datatypes.DWORD()
self.timeDateStamp = datatypes.DWORD()
self.majorVersion = datatypes.WORD()
self.minorVersion = datatypes.WORD()
self.globalFlagsClear = datatypes.DWORD()
self.globalFlagsSet = datatypes.DWORD()
self.criticalSectionDefaultTimeout = datatypes.DWORD()
self.deCommitFreeBlockThreshold = datatypes.DWORD()
self.deCommitTotalFreeThreshold = datatypes.DWORD()
self.lockPrefixTable = datatypes.DWORD() # VA
self.maximumAllocationSize = datatypes.DWORD()
self.virtualMemoryThreshold = datatypes.DWORD()
self.processHeapFlags = datatypes.DWORD()
self.processAffinityMask = datatypes.DWORD()
self.csdVersion = datatypes.WORD()
self.reserved1 = datatypes.WORD()
self.editList = datatypes.DWORD() # VA
self.securityCookie = datatypes.DWORD() # VA
self.SEHandlerTable = datatypes.DWORD() # VA
self.SEHandlerCount = datatypes.DWORD()
# Fields for Control Flow Guard
self.GuardCFCheckFunctionPointer = datatypes.DWORD() # VA
self.Reserved2 = datatypes.DWORD()
self.GuardCFFunctionTable = datatypes.DWORD() # VA
self.GuardCFFunctionCount = datatypes.DWORD()
self.GuardFlags = datatypes.DWORD()
self._attrsList = ["size", "timeDateStamp", "majorVersion", "minorVersion", "globalFlagsClear", "globalFlagsSet", "criticalSectionDefaultTimeout", "deCommitFreeBlockThreshold",\
"deCommitTotalFreeThreshold", "lockPrefixTable", "maximumAllocationSize", "virtualMemoryThreshold", "processHeapFlags", "processAffinityMask", "csdVersion",\
"reserved1", "editList", "securityCookie", "SEHandlerTable","SEHandlerCount", "GuardCFCheckFunctionPointer", "Reserved2", "GuardCFFunctionTable",\
"GuardCFFunctionCount", "GuardFlags"]
def getType(self):
"""Returns L{consts.IMAGE_LOAD_CONFIG_DIRECTORY32}."""
return consts.IMAGE_LOAD_CONFIG_DIRECTORY32
@staticmethod
def parse(readDataInstance):
"""
Returns a new L{ImageLoadConfigDirectory} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A L{ReadData} object containing data to create a new L{ImageLoadConfigDirectory} object.
@rtype: L{ImageLoadConfigDirectory}
@return: A new L{ImageLoadConfigDirectory} object.
"""
configDir = ImageLoadConfigDirectory()
configDir.size.value = readDataInstance.readDword()
configDir.timeDateStamp.value = readDataInstance.readDword()
configDir.majorVersion.value = readDataInstance.readWord()
configDir.minorVersion.value = readDataInstance.readWord()
configDir.globalFlagsClear.value = readDataInstance.readDword()
configDir.globalFlagsSet.value = readDataInstance.readDword()
configDir.criticalSectionDefaultTimeout.value = readDataInstance.readDword()
configDir.deCommitFreeBlockThreshold.value = readDataInstance.readDword()
configDir.deCommitTotalFreeThreshold.value = readDataInstance.readDword()
configDir.lockPrefixTable.value = readDataInstance.readDword() # VA
configDir.maximumAllocationSize.value = readDataInstance.readDword()
configDir.virtualMemoryThreshold.value = readDataInstance.readDword()
configDir.processHeapFlags.value = readDataInstance.readDword()
configDir.processAffinityMask.value = readDataInstance.readDword()
configDir.csdVersion.value = readDataInstance.readWord()
configDir.reserved1.value = readDataInstance.readWord()
configDir.editList.value = readDataInstance.readDword() # VA
configDir.securityCookie.value = readDataInstance.readDword() # VA
configDir.SEHandlerTable.value = readDataInstance.readDword() # VA
configDir.SEHandlerCount.value = readDataInstance.readDword()
# Fields for Control Flow Guard
configDir.GuardCFCheckFunctionPointer.value = readDataInstance.readDword() # VA
configDir.Reserved2.value = readDataInstance.readDword()
configDir.GuardCFFunctionTable.value = readDataInstance.readDword() # VA
configDir.GuardCFFunctionCount.value = readDataInstance.readDword()
configDir.GuardFlags.value = readDataInstance.readDword()
return configDir
class ImageLoadConfigDirectory64(baseclasses.BaseStructClass):
"IMAGE_LOAD_CONFIG_DIRECTORY64 object"
def __init__(self, shouldPack = True):
"""
Class representation of a C{IMAGE_LOAD_CONFIG_DIRECTORY64} structure in 64 bits systems.
@type shouldPack: bool
@param shouldPack: (Optional) If set to c{True}, the object will be packed. If set to C{False}, the object won't be packed.
"""
baseclasses.BaseStructClass.__init__(self, shouldPack)
self.size = datatypes.DWORD()
self.timeDateStamp = datatypes.DWORD()
self.majorVersion = datatypes.WORD()
self.minorVersion = datatypes.WORD()
self.globalFlagsClear = datatypes.DWORD()
self.globalFlagsSet = datatypes.DWORD()
self.criticalSectionDefaultTimeout = datatypes.DWORD()
self.deCommitFreeBlockThreshold = datatypes.QWORD()
self.deCommitTotalFreeThreshold = datatypes.QWORD()
self.lockPrefixTable = datatypes.QWORD()
self.maximumAllocationSize = datatypes.QWORD()
self.virtualMemoryThreshold = datatypes.QWORD()
self.processAffinityMask = datatypes.QWORD()
self.processHeapFlags = datatypes.DWORD()
self.cdsVersion = datatypes.WORD()
self.reserved1 = datatypes.WORD()
self.editList = datatypes.QWORD()
self.securityCookie = datatypes.QWORD()
self.SEHandlerTable = datatypes.QWORD()
self.SEHandlerCount = datatypes.QWORD()
# Fields for Control Flow Guard
self.GuardCFCheckFunctionPointer = datatypes.QWORD() # VA
self.Reserved2 = datatypes.QWORD()
self.GuardCFFunctionTable = datatypes.QWORD() # VA
self.GuardCFFunctionCount = datatypes.QWORD()
self.GuardFlags = datatypes.QWORD()
self._attrsList = ["size", "timeDateStamp", "majorVersion", "minorVersion", "globalFlagsClear", "globalFlagsSet", "criticalSectionDefaultTimeout", "deCommitFreeBlockThreshold",\
"deCommitTotalFreeThreshold", "lockPrefixTable", "maximumAllocationSize", "virtualMemoryThreshold", "processAffinityMask", "processHeapFlags", "cdsVersion",\
"reserved1", "editList", "securityCookie", "SEHandlerTable", "SEHandlerCount", "GuardCFCheckFunctionPointer", "Reserved2", "GuardCFFunctionTable",\
"GuardCFFunctionCount", "GuardFlags"]
def getType(self):
"""Returns L{consts.IMAGE_LOAD_CONFIG_DIRECTORY64}."""
return consts.IMAGE_LOAD_CONFIG_DIRECTORY64
@staticmethod
def parse(readDataInstance):
"""
Returns a new L{ImageLoadConfigDirectory64} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A L{ReadData} object containing data to create a new L{ImageLoadConfigDirectory64} object.
@rtype: L{ImageLoadConfigDirectory64}
@return: A new L{ImageLoadConfigDirectory64} object.
"""
configDir = ImageLoadConfigDirectory64()
configDir.size.value = readDataInstance.readDword()
configDir.timeDateStamp.value = readDataInstance.readDword()
configDir.majorVersion.value = readDataInstance.readWord()
configDir.minorVersion.value = readDataInstance.readWord()
configDir.globalFlagsClear.value = readDataInstance.readDword()
configDir.globalFlagsSet.value = readDataInstance.readDword()
configDir.criticalSectionDefaultTimeout.value = readDataInstance.readDword()
configDir.deCommitFreeBlockThreshold.value = readDataInstance.readQword()
configDir.deCommitTotalFreeThreshold.value = readDataInstance.readQword()
configDir.lockPrefixTable.value = readDataInstance.readQword()
configDir.maximumAllocationSize.value = readDataInstance.readQword()
configDir.virtualMemoryThreshold.value = readDataInstance.readQword()
configDir.processAffinityMask.value = readDataInstance.readQword()
configDir.processHeapFlags.value = readDataInstance.readDword()
configDir.cdsVersion.value = readDataInstance.readWord()
configDir.reserved1.value = readDataInstance.readWord()
configDir.editList.value = readDataInstance.readQword()
configDir.securityCookie.value = readDataInstance.readQword()
configDir.SEHandlerTable.value = readDataInstance.readQword()
configDir.SEHandlerCount.value = readDataInstance.readQword()
# Fields for Control Flow Guard
configDir.GuardCFCheckFunctionPointer.value = readDataInstance.readQword() # VA
configDir.Reserved2.value = readDataInstance.readQword()
configDir.GuardCFFunctionTable.value = readDataInstance.readQword() # VA
configDir.GuardCFFunctionCount.value = readDataInstance.readQword()
configDir.GuardFlags.value = readDataInstance.readQword()
return configDir
class ImageBaseRelocationEntry(baseclasses.BaseStructClass):
"""ImageBaseRelocationEntry object."""
def __init__(self, shouldPack = True):
"""
A class representation of a C{IMAGE_BASE_RELOCATION} structure.
@see: U{http://msdn.microsoft.com/en-us/magazine/cc301808.aspx}
@type shouldPack: bool
@param shouldPack: (Optional) If set to c{True}, the object will be packed. If set to C{False}, the object won't be packed.
"""
baseclasses.BaseStructClass.__init__(self, shouldPack)
self.virtualAddress = datatypes.DWORD(0) #: L{DWORD} virtualAddress.
self.sizeOfBlock = datatypes.DWORD(0) #: L{DWORD} sizeOfBlock
self.items = datatypes.Array(datatypes.TYPE_WORD) #: L{Array} items.
self._attrsList = ["virtualAddress", "sizeOfBlock", "items"]
def getType(self):
"""Returns L{consts.IMAGE_BASE_RELOCATION_ENTRY}."""
return consts.IMAGE_BASE_RELOCATION_ENTRY
@staticmethod
def parse(readDataInstance):
"""
Returns a new L{ImageBaseRelocationEntry} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A L{ReadData} object with data to parse as a L{ImageBaseRelocationEntry} object.
@rtype: L{ImageBaseRelocationEntry}
@return: A new L{ImageBaseRelocationEntry} object.
"""
reloc = ImageBaseRelocationEntry()
reloc.virtualAddress.value = readDataInstance.readDword()
reloc.sizeOfBlock.value = readDataInstance.readDword()
toRead = (reloc.sizeOfBlock.value - 8) / len(datatypes.WORD(0))
reloc.items = datatypes.Array.parse(readDataInstance, datatypes.TYPE_WORD, toRead)
return reloc
class ImageBaseRelocation(list):
"""ImageBaseRelocation array."""
pass
class ImageDebugDirectory(baseclasses.BaseStructClass):
"""ImageDebugDirectory object."""
def __init__(self, shouldPack = True):
"""
Class representation of a C{IMAGE_DEBUG_DIRECTORY} structure.
@see: U{http://msdn.microsoft.com/es-es/library/windows/desktop/ms680307%28v=vs.85%29.aspx}
@type shouldPack: bool
@param shouldPack: (Optional) If set to c{True}, the object will be packed. If set to C{False}, the object won't be packed.
"""
baseclasses.BaseStructClass.__init__(self, shouldPack)
self.characteristics = datatypes.DWORD(0) #: L{DWORD} characteristics.
self.timeDateStamp = datatypes.DWORD(0) #: L{DWORD} timeDateStamp.
self.majorVersion = datatypes.WORD(0) #: L{WORD} majorVersion.
self.minorVersion = datatypes.WORD(0) #: L{WORD} minorVersion.
self.type = datatypes.DWORD(0) #: L{DWORD} type.
self.sizeOfData = datatypes.DWORD(0) #: L{DWORD} sizeOfData.
self.addressOfData = datatypes.DWORD(0) #: L{DWORD} addressOfData.
self.pointerToRawData = datatypes.DWORD(0) #: L{DWORD} pointerToRawData.
self._attrsList = ["characteristics", "timeDateStamp", "majorVersion", "minorVersion", "type", "sizeOfData",\
"addressOfData", "pointerToRawData"]
def getType(self):
"""Returns L{consts.IMAGE_DEBUG_DIRECTORY}."""
return consts.IMAGE_DEBUG_DIRECTORY
@staticmethod
def parse(readDataInstance):
"""
Returns a new L{ImageDebugDirectory} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A new L{ReadData} object with data to be parsed as a L{ImageDebugDirectory} object.
@rtype: L{ImageDebugDirectory}
@return: A new L{ImageDebugDirectory} object.
"""
dbgDir = ImageDebugDirectory()
dbgDir.characteristics.value = readDataInstance.readDword()
dbgDir.timeDateStamp.value = readDataInstance.readDword()
dbgDir.majorVersion.value = readDataInstance.readWord()
dbgDir.minorVersion.value = readDataInstance.readWord()
dbgDir.type.value = readDataInstance.readDword()
dbgDir.sizeOfData.value = readDataInstance.readDword()
dbgDir.addressOfData.value = readDataInstance.readDword()
dbgDir.pointerToRawData.value = readDataInstance.readDword()
return dbgDir
class ImageDebugDirectories(list):
"""ImageDebugDirectories object."""
def __init__(self, shouldPack = True):
"""
Array of L{ImageDebugDirectory} objects.
@type shouldPack: bool
@param shouldPack: (Optional) If set to c{True}, the object will be packed. If set to C{False}, the object won't be packed.
"""
self.shouldPack = shouldPack
def __str__(self):
return ''.join([str(x) for x in self if self.shouldPack])
def getType(self):
""""Returns L{consts.IMAGE_DEBUG_DIRECTORIES}."""
return consts.IMAGE_DEBUG_DIRECTORIES
@staticmethod
def parse(readDataInstance, nDebugEntries):
"""
Returns a new L{ImageDebugDirectories} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A L{ReadData} object with data to be parsed as a L{ImageDebugDirectories} object.
@type nDebugEntries: int
@param nDebugEntries: Number of L{ImageDebugDirectory} objects in the C{readDataInstance} object.
@rtype: L{ImageDebugDirectories}
@return: A new L{ImageDebugDirectories} object.
@raise DataLengthException: If not enough data to read in the C{readDataInstance} object.
"""
dbgEntries = ImageDebugDirectories()
dataLength = len(readDataInstance)
toRead = nDebugEntries * consts.SIZEOF_IMAGE_DEBUG_ENTRY32
if dataLength >= toRead:
for i in range(nDebugEntries):
dbgEntry = ImageDebugDirectory.parse(readDataInstance)
dbgEntries.append(dbgEntry)
else:
raise excep.DataLengthException("Not enough bytes to read.")
return dbgEntries
class ImageImportDescriptorMetaData(baseclasses.BaseStructClass):
"""ImageImportDescriptorMetaData object."""
def __init__(self, shouldPack = True):
"""
Class used to store metadata from the L{ImageImportDescriptor} object.
@type shouldPack: bool
@param shouldPack: (Optional) If set to c{True}, the object will be packed. If set to C{False}, the object won't be packed.
"""
baseclasses.BaseStructClass.__init__(self, shouldPack)
self.moduleName = datatypes.String("") #: L{String} moduleName.
self.numberOfImports = datatypes.DWORD(0) #: L{DWORD} numberOfImports.
self._attrsList = ["moduleName", "numberOfImports"]
def getType(self):
"""Returns L{consts.IID_METADATA}."""
return consts.IID_METADATA
class ImageImportDescriptorEntry(baseclasses.BaseStructClass):
"""ImageImportDescriptorEntry object."""
def __init__(self, shouldPack = True):
"""
Class representation of a C{IMAGE_IMPORT_DESCRIPTOR} structure.
@see: Figure 5 U{http://msdn.microsoft.com/es-ar/magazine/bb985996%28en-us%29.aspx}
@type shouldPack: bool
@param shouldPack: (Optional) If set to c{True}, the object will be packed. If set to C{False}, the object won't be packed.
"""
baseclasses.BaseStructClass.__init__(self, shouldPack)
self.metaData = ImageImportDescriptorMetaData() #: L{ImageImportDescriptorMetaData} metaData.
self.originalFirstThunk = datatypes.DWORD(0) #: L{DWORD} originalFirstThunk.
self.timeDateStamp = datatypes.DWORD(0) #: L{DWORD} timeDateStamp.
self.forwarderChain = datatypes.DWORD(0) #: L{DWORD} forwarderChain.
self.name = datatypes.DWORD(0) #: L{DWORD} name.
self.firstThunk = datatypes.DWORD(0) #: L{DWORD} firstThunk.
self.iat = ImportAddressTable() #: L{ImportAddressTable} iat.
self._attrsList = ["originalFirstThunk", "timeDateStamp", "forwarderChain", "name", "firstThunk"]
@staticmethod
def parse(readDataInstance):
"""
Returns a new L{ImageImportDescriptorEntry} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A L{ReadData} object with data to be parsed as a L{ImageImportDescriptorEntry}.
@rtype: L{ImageImportDescriptorEntry}
@return: A new L{ImageImportDescriptorEntry} object.
"""
iid = ImageImportDescriptorEntry()
iid.originalFirstThunk.value = readDataInstance.readDword()
iid.timeDateStamp.value = readDataInstance.readDword()
iid.forwarderChain.value = readDataInstance.readDword()
iid.name.value = readDataInstance.readDword()
iid.firstThunk.value = readDataInstance.readDword()
return iid
def getType(self):
"""Returns C{consts.IMAGE_IMPORT_DESCRIPTOR_ENTRY}."""
return consts.IMAGE_IMPORT_DESCRIPTOR_ENTRY
class ImageImportDescriptor(list):
"""ImageImportDescriptor object."""
def __init__(self, shouldPack = True):
"""
Array of L{ImageImportDescriptorEntry} objects.
@type shouldPack: bool
@param shouldPack: (Optional) If set to c{True}, the object will be packed. If set to C{False}, the object won't be packed.
"""
self.shouldPack = shouldPack
def __str__(self):
return ''.join([str(x) for x in self if x.shouldPack])
def getType(self):
"""Returns L{consts.IMAGE_IMPORT_DESCRIPTOR}."""
return consts.IMAGE_IMPORT_DESCRIPTOR
@staticmethod
def parse(readDataInstance, nEntries):
"""
Returns a new L{ImageImportDescriptor} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A L{ReadData} object with data to be parsed as a L{ImageImportDescriptor} object.
@type nEntries: int
@param nEntries: The number of L{ImageImportDescriptorEntry} objects in the C{readDataInstance} object.
@rtype: L{ImageImportDescriptor}
@return: A new L{ImageImportDescriptor} object.
@raise DataLengthException: If not enough data to read.
"""
importEntries = ImageImportDescriptor()
dataLength = len(readDataInstance)
toRead = nEntries * consts.SIZEOF_IMAGE_IMPORT_ENTRY32
if dataLength >= toRead:
for i in range(nEntries):
importEntry = ImageImportDescriptorEntry.parse(readDataInstance)
importEntries.append(importEntry)
else:
raise excep.DataLengthException("Not enough bytes to read.")
return importEntries
class ImportAddressTableEntry(baseclasses.BaseStructClass):
"""ImportAddressTableEntry object."""
def __init__(self, shouldPack = True):
"""
A class representation of a C{} structure.
@type shouldPack: bool
@param shouldPack: (Optional) If set to c{True}, the object will be packed. If set to C{False}, the object won't be packed.
"""
baseclasses.BaseStructClass.__init__(self, shouldPack)
self.firstThunk = datatypes.DWORD(0) #: L{DWORD} firstThunk.
self.originalFirstThunk = datatypes.DWORD(0) #: L{DWORD} originalFirstThunk.
self.hint = datatypes.WORD(0) #: L{WORD} hint.
self.name = datatypes.String("") #: L{String} name.
self._attrsList = ["firstThunk", "originalFirstThunk", "hint", "name"]
def getType(self):
"""Returns L{consts.IMPORT_ADDRESS_TABLE_ENTRY}."""
return consts.IMPORT_ADDRESS_TABLE_ENTRY
class ImportAddressTableEntry64(baseclasses.BaseStructClass):
"""ImportAddressTableEntry64 object."""
def __init__(self, shouldPack = True):
"""
A class representation of a C{} structure.
@type shouldPack: bool
@param shouldPack: (Optional) If set to c{True}, the object will be packed. If set to C{False}, the object won't be packed.
"""
baseclasses.BaseStructClass.__init__(self, shouldPack)
self.firstThunk = datatypes.QWORD(0) #: L{QWORD} firstThunk.
self.originalFirstThunk = datatypes.QWORD(0) #: L{QWORD} originalFirstThunk.
self.hint = datatypes.WORD(0) #: L{WORD} hint.
self.name = datatypes.String("") #: L{String} name.
self._attrsList = ["firstThunk", "originalFirstThunk", "hint", "name"]
def getType(self):
"""Returns L{consts.IMPORT_ADDRESS_TABLE_ENTRY64}."""
return consts.IMPORT_ADDRESS_TABLE_ENTRY64
class ImportAddressTable(list):
"""Array of L{ImportAddressTableEntry} objects."""
pass
class ExportTable(list):
"""Array of L{ExportTableEntry} objects."""
pass
class ExportTableEntry(baseclasses.BaseStructClass):
"""ExportTableEntry object."""
def __init__(self, shouldPack = True):
"""
A class representation of a C{} structure.
@type shouldPack: bool
@param shouldPack: (Optional) If set to c{True}, the object will be packed. If set to C{False}, the object won't be packed.
"""
baseclasses.BaseStructClass.__init__(self, shouldPack)
self.ordinal = datatypes.DWORD(0) #: L{DWORD} ordinal.
self.functionRva = datatypes.DWORD(0) #: L{DWORD} functionRva.
self.nameOrdinal = datatypes.WORD(0) #: L{WORD} nameOrdinal.
self.nameRva = datatypes.DWORD(0) #: L{DWORD} nameRva.
self.name = datatypes.String("") #: L{String} name.
self._attrsList = ["ordinal", "functionRva", "nameOrdinal", "nameRva", "name"]
def __repr__(self):
return repr((self.ordinal, self.functionRva, self.nameOrdinal, self.nameRva, self.name))
def getType(self):
"""Returns L{consts.EXPORT_TABLE_ENTRY}."""
return consts.EXPORT_TABLE_ENTRY
@staticmethod
def parse(readDataInstance):
"""
Returns a new L{ExportTableEntry} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A L{ReadData} object with data to be parsed as a L{ExportTableEntry} object.
@rtype: L{ExportTableEntry}
@return: A new L{ExportTableEntry} object.
"""
exportEntry = ExportTableEntry()
exportEntry.functionRva.value = readDataInstance.readDword()
exportEntry.nameOrdinal.value = readDataInstance.readWord()
exportEntry.nameRva.value = readDataInstance.readDword()
exportEntry.name.value = readDataInstance.readString()
return exportEntry
class ImageExportTable(baseclasses.BaseStructClass):
"""ImageExportTable object."""
def __init__(self, shouldPack = True):
"""
Class representation of a C{IMAGE_EXPORT_DIRECTORY} structure.
@see: Figure 2 U{http://msdn.microsoft.com/en-us/magazine/bb985996.aspx}
@type shouldPack: bool
@param shouldPack: (Optional) If set to c{True}, the object will be packed. If set to C{False}, the object won't be packed.
"""
baseclasses.BaseStructClass.__init__(self, shouldPack)
self.exportTable = ExportTable()
self.characteristics = datatypes.DWORD(0) #: L{DWORD} characteristics.
self.timeDateStamp = datatypes.DWORD(0) #: L{DWORD} timeDateStamp.
self.majorVersion = datatypes.WORD(0) #: L{WORD} majorVersion.
self.minorVersion = datatypes.WORD(0) #: L{WORD} minorVersion.
self.name = datatypes.DWORD(0) #: L{DWORD} name.
self.base = datatypes.DWORD(0) #: L{DWORD} base.
self.numberOfFunctions = datatypes.DWORD(0) #: L{DWORD} numberOfFunctions.
self.numberOfNames = datatypes.DWORD(0) #: L{DWORD} numberOfNames.
self.addressOfFunctions = datatypes.DWORD(0) #: L{DWORD} addressOfFunctions.
self.addressOfNames = datatypes.DWORD(0) #: L{DWORD} addressOfNames.
self.addressOfNameOrdinals = datatypes.DWORD(0) #: L{DWORD} addressOfNamesOrdinals.
self._attrsList = ["characteristics", "timeDateStamp", "majorVersion", "minorVersion", "name", "base", "numberOfFunctions",\
"numberOfNames", "addressOfFunctions", "addressOfNames", "addressOfNameOrdinals"]
def getType(self):
"""Returns L{consts.EXPORT_DIRECTORY}."""
return consts.EXPORT_DIRECTORY
@staticmethod
def parse(readDataInstance):
"""
Returns a new L{ImageExportTable} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A L{ReadData} object with data to be parsed as a L{ImageExportTable} object.
@rtype: L{ImageExportTable}
@return: A new L{ImageExportTable} object.
"""
et = ImageExportTable()
et.characteristics.value = readDataInstance.readDword()
et.timeDateStamp.value = readDataInstance.readDword()
et.majorVersion.value = readDataInstance.readWord()
et.minorVersion.value = readDataInstance.readWord()
et.name.value = readDataInstance.readDword()
et.base.value = readDataInstance.readDword()
et.numberOfFunctions.value = readDataInstance.readDword()
et.numberOfNames.value = readDataInstance.readDword()
et.addressOfFunctions.value = readDataInstance.readDword()
et.addressOfNames.value = readDataInstance.readDword()
et.addressOfNameOrdinals.value = readDataInstance.readDword()
return et
class NETDirectory(baseclasses.BaseStructClass):
"""NETDirectory object."""
def __init__(self, shouldPack = True):
"""
A class to abstract data from the .NET PE format.
@type shouldPack: bool
@param shouldPack: (Optional) If set to c{True}, the object will be packed. If set to C{False}, the object won't be packed.
"""
baseclasses.BaseStructClass.__init__(self, shouldPack)
self.directory = NetDirectory() #: L{NetDirectory} directory.
self.netMetaDataHeader = NetMetaDataHeader() #: L{NetMetaDataHeader} netMetaDataHeader.
self.netMetaDataStreams = NetMetaDataStreams() #: L{NetMetaDataStreams} netMetaDataStreams.
self._attrsList = ["directory", "netMetaDataHeader", "netMetaDataStreams"]
@staticmethod
def parse(readDataInstance):
"""
Returns a new L{NETDirectory} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A L{ReadData} object with data to be parsed as a L{NETDirectory} object.
@rtype: L{NETDirectory}
@return: A new L{NETDirectory} object.
"""
nd = NETDirectory()
nd.directory = NetDirectory.parse(readDataInstance)
nd.netMetaDataHeader = NetMetaDataHeader.parse(readDataInstance)
nd.netMetaDataStreams = NetMetaDataStreams.parse(readDataInstance)
return nd
def getType(self):
"""Returns L{consts.NET_DIRECTORY}."""
return consts.NET_DIRECTORY
class NetDirectory(baseclasses.BaseStructClass):
"""NetDirectory object."""
def __init__(self, shouldPack = True):
"""
A class representation of the C{IMAGE_COR20_HEADER} structure.
@see: U{http://www.ntcore.com/files/dotnetformat.htm}
@type shouldPack: bool
@param shouldPack: (Optional) If set to c{True}, the object will be packed. If set to C{False}, the object won't be packed.
"""
baseclasses.BaseStructClass.__init__(self, shouldPack)
self.cb = datatypes.DWORD(0) #: L{DWORD} cb.
self.majorRuntimeVersion = datatypes.WORD(0) #: L{WORD} majorRuntimeVersion.
self.minorRuntimeVersion = datatypes.WORD(0) #: L{WORD} minorRuntimeVersion.
self.metaData = datadirs.Directory() #: L{Directory} metaData.
self.flags = datatypes.DWORD(0) #: L{DWORD} flags.
self.entryPointToken = datatypes.DWORD(0) #: L{DWORD} entryPointToken.
self.resources = datadirs.Directory() #: L{Directory} resources.
self.strongNameSignature = datadirs.Directory() #: L{Directory} strongNameSignature.
self.codeManagerTable = datadirs.Directory() #: L{Directory} codeManagerTable.
self.vTableFixups = datadirs.Directory() #: L{Directory} vTableFixups.
self.exportAddressTableJumps = datadirs.Directory() #: L{Directory} exportAddressTableJumps.
self.managedNativeHeader = datadirs.Directory() #: L{Directory} managedNativeHeader.
self._attrsList = ["cb","majorRuntimeVersion","minorRuntimeVersion","metaData", \
"flags","entryPointToken","resources","strongNameSignature",\
"codeManagerTable","vTableFixups", "exportAddressTableJumps",\
"managedNativeHeader"]
def getType(self):
"""Returns L{consts.IMAGE_COR20_HEADER}."""
return consts.IMAGE_COR20_HEADER
@staticmethod
def parse(readDataInstance):
"""
Returns a new L{NetDirectory} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A L{ReadData} object with data to be parsed as a L{NetDirectory} object.
@rtype: L{NetDirectory}
@return: A new L{NetDirectory} object.
"""
nd = NetDirectory()
nd.cb.value = readDataInstance.readDword()
nd.majorRuntimeVersion.value= readDataInstance.readWord()
nd.minorRuntimeVersion.value = readDataInstance.readWord()
nd.metaData.rva.value = readDataInstance.readDword()
nd.metaData.size.value = readDataInstance.readDword()
nd.metaData.name.value = "MetaData"
nd.flags.value = readDataInstance.readDword()
nd.entryPointToken.value = readDataInstance.readDword()
nd.resources.rva.value = readDataInstance.readDword()
nd.resources.size.value = readDataInstance.readDword()
nd.resources.name.value = "Resources"
nd.strongNameSignature.rva.value = readDataInstance.readDword()
nd.strongNameSignature.size.value = readDataInstance.readDword()
nd.strongNameSignature.name.value = "StrongNameSignature"
nd.codeManagerTable.rva.value = readDataInstance.readDword()
nd.codeManagerTable.size.value = readDataInstance.readDword()
nd.codeManagerTable.name.value = "CodeManagerTable"
nd.vTableFixups.rva.value = readDataInstance.readDword()
nd.vTableFixups.size.value = readDataInstance.readDword()
nd.vTableFixups.name.value = "VTableFixups"
nd.exportAddressTableJumps.rva.value = readDataInstance.readDword()
nd.exportAddressTableJumps.size.value = readDataInstance.readDword()
nd.exportAddressTableJumps.name.value = "ExportAddressTableJumps"
nd.managedNativeHeader.rva.value = readDataInstance.readDword()
nd.managedNativeHeader.size.value = readDataInstance.readDword()
nd.managedNativeHeader.name.value = "ManagedNativeHeader"
return nd
class NetMetaDataHeader(baseclasses.BaseStructClass):
"""NetMetaDataHeader object."""
def __init__(self, shouldPack = True):
baseclasses.BaseStructClass.__init__(self, shouldPack)
self.signature = datatypes.DWORD(0) #: L{DWORD} signature.
self.majorVersion = datatypes.WORD(0) #: L{WORD} majorVersion.
self.minorVersion = datatypes.WORD(0) #: L{WORD} minorVersion.
self.reserved = datatypes.DWORD(0) #: L{DWORD} reserved.
self.versionLength = datatypes.DWORD(0) #: L{DWORD} versionLength.
self.versionString = datatypes.AlignedString("") #: L{AlignedString} versionString.
self.flags = datatypes.WORD(0) #: L{WORD} flags.
self.numberOfStreams = datatypes.WORD(0) #: L{WORD} numberOfStreams.
self._attrsList = ["signature","majorVersion","minorVersion","reserved","versionLength","versionString","flags","numberOfStreams"]
def getType(self):
"""Returns L{consts.NET_METADATA_HEADER}."""
return consts.NET_METADATA_HEADER
@staticmethod
def parse(readDataInstance):
"""
Returns a new L{NetMetaDataHeader} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A L{ReadData} object with data to be parsed as a L{NetMetaDataHeader} object.
@rtype: L{NetMetaDataHeader}
@return: A new L{NetMetaDataHeader} object.
"""
nmh = NetMetaDataHeader()
nmh.signature.value = readDataInstance.readDword()
nmh.majorVersion.value = readDataInstance.readWord()
nmh.minorVersion.value = readDataInstance.readWord()
nmh.reserved.value = readDataInstance.readDword()
nmh.versionLength.value = readDataInstance.readDword()
nmh.versionString.value = readDataInstance.readAlignedString()
nmh.flags.value = readDataInstance.readWord()
nmh.numberOfStreams.value = readDataInstance.readWord()
return nmh
class NetMetaDataStreamEntry(baseclasses.BaseStructClass):
"""NetMetaDataStreamEntry object."""
def __init__(self, shouldPack = True):
baseclasses.BaseStructClass.__init__(self, shouldPack)
self.offset = datatypes.DWORD(0) #: L{DWORD} offset.
self.size = datatypes.DWORD(0) #: L{DWORD} size.
# this must be aligned to the next 4-byte boundary
self.name = datatypes.AlignedString("") #: L{AlignedString} name.
# the "info" attribute does not belong to the NETMetaDataStreamEntry struct. It is just a place holder where the
# data for every entry will be stored.
self.info = None
self._attrsList = ["offset", "size", "name", "info"]
def getType(self):
"""Returns L{consts.NET_METADATA_STREAM_ENTRY}."""
return consts.NET_METADATA_STREAM_ENTRY
@staticmethod
def parse(readDataInstance):
"""
Returns a new L{NetMetaDataStreamEntry} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A L{ReadData} object with data to be parsed as a L{NetMetaDataStreamEntry}.
@rtype: L{NetMetaDataStreamEntry}
@return: A new L{NetMetaDataStreamEntry} object.
"""
n = NetMetaDataStreamEntry()
n.offset.value = readDataInstance.readDword()
n.size.value = readDataInstance.readDword()
n.name.value = readDataInstance.readAlignedString()
return n
class NetMetaDataStreams(dict):
"""NetMetaDataStreams object."""
def __init__(self, shouldPack = True):
self.shouldPack = shouldPack
def __str__(self):
return "".join([str(x) for x in self if hasattr(x, "shouldPack") and x.shouldPack])
def getByNumber(self, number):
return self.get(name)
def getByName(self, name):
return self.get(name)
def getType(self):
"""Returns L{consts.NET_METADATA_STREAMS}."""
return consts.NET_METADATA_STREAMS
@staticmethod
def parse(readDataInstance, nStreams):
"""
Returns a new L{NetMetaDataStreams} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A L{ReadData} object with data to be parsed as a L{NetMetaDataStreams} object.
@type nStreams: int
@param nStreams: The number of L{NetMetaDataStreamEntry} objects in the C{readDataInstance} object.
@rtype: L{NetMetaDataStreams}
@return: A new L{NetMetaDataStreams} object.
"""
streams = NetMetaDataStreams()
for i in range(nStreams):
streamEntry = NetMetaDataStreamEntry()
streamEntry.offset.value = readDataInstance.readDword()
streamEntry.size.value = readDataInstance.readDword()
streamEntry.name.value = readDataInstance.readAlignedString()
#streams.append(streamEntry)
streams.update({ i: streamEntry, streamEntry.name.value: streamEntry })
return streams
class NetMetaDataTableHeader(baseclasses.BaseStructClass):
"""NetMetaDataTableHeader object."""
def __init__(self, shouldPack = True):
baseclasses.BaseStructClass.__init__(self, shouldPack)
self.reserved_1 = datatypes.DWORD(0) #: L{DWORD} reserved_1.
self.majorVersion = datatypes.BYTE(0) #: L{BYTE} majorVersion.
self.minorVersion = datatypes.BYTE(0) #: L{BYTE} minorVersion.
self.heapOffsetSizes = datatypes.BYTE(0) #: L{BYTE} heapOffsetSizes.
self.reserved_2 = datatypes.BYTE(0) #: L{BYTE} reserved_2.
self.maskValid = datatypes.QWORD(0) #: L{QWORD} maskValid.
self.maskSorted = datatypes.QWORD(0) #: L{QWORD} maskSorted.
self._attrsList = ["reserved_1", "majorVersion", "minorVersion", "heapOffsetSizes", "reserved_2", "maskValid", "maskSorted"]
def getType(self):
"""Returns L{consts.NET_METADATA_TABLE_HEADER}."""
return consts.NET_METADATA_TABLE_HEADER
@staticmethod
def parse(readDataInstance):
"""
Returns a new L{NetMetaDataTableHeader} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A L{ReadData} object with data to be parsed as a L{NetMetaDataTableHeader} object.
@rtype: L{NetMetaDataTableHeader}
@return: A new L{NetMetaDataTableHeader} object.
"""
th = NetMetaDataTableHeader()
th.reserved_1.value = readDataInstance.readDword()
th.majorVersion.value = readDataInstance.readByte()
th.minorVersion.value = readDataInstance.readByte()
th.heapOffsetSizes.value = readDataInstance.readByte()
th.reserved_2.value = readDataInstance.readByte()
th.maskValid.value = readDataInstance.readQword()
th.maskSorted.value = readDataInstance.readQword()
return th
class NetMetaDataTables(baseclasses.BaseStructClass):
"""NetMetaDataTables object."""
def __init__(self, shouldPack = True):
"""
NetMetaDataTables object.
@todo: Parse every table in this struct and store them in the C{self.tables} attribute.
"""
baseclasses.BaseStructClass.__init__(self, shouldPack)
self.netMetaDataTableHeader = NetMetaDataTableHeader() #: L{NetMetaDataTableHeader} netMetaDataTableHeader.
self.tables = None #: C{str} tables.
self._attrsList = ["netMetaDataTableHeader", "tables"]
def getType(self):
"""Returns L{consts.NET_METADATA_TABLES}."""
return consts.NET_METADATA_TABLES
@staticmethod
def parse(readDataInstance, netMetaDataStreams):
"""
Returns a new L{NetMetaDataTables} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A L{ReadData} object with data to be parsed as a L{NetMetaDataTables} object.
@rtype: L{NetMetaDataTables}
@return: A new L{NetMetaDataTables} object.
"""
dt = NetMetaDataTables()
dt.netMetaDataTableHeader = NetMetaDataTableHeader.parse(readDataInstance)
dt.tables = {}
metadataTableDefinitions = dotnet.MetadataTableDefinitions(dt, netMetaDataStreams)
for i in xrange(64):
dt.tables[i] = { "rows": 0 }
if dt.netMetaDataTableHeader.maskValid.value >> i & 1:
dt.tables[i]["rows"] = readDataInstance.readDword()
if i in dotnet.MetadataTableNames:
dt.tables[dotnet.MetadataTableNames[i]] = dt.tables[i]
for i in xrange(64):
dt.tables[i]["data"] = []
for j in range(dt.tables[i]["rows"]):
row = None
if i in metadataTableDefinitions:
row = readDataInstance.readFields(metadataTableDefinitions[i])
dt.tables[i]["data"].append(row)
for i in xrange(64):
if i in dotnet.MetadataTableNames:
dt.tables[dotnet.MetadataTableNames[i]] = dt.tables[i]["data"]
dt.tables[i] = dt.tables[i]["data"]
return dt
class NetResources(baseclasses.BaseStructClass):
"""NetResources object."""
def __init__(self, shouldPack = True):
"""
NetResources object.
@todo: Parse every resource in this struct and store them in the C{self.resources} attribute.
"""
baseclasses.BaseStructClass.__init__(self, shouldPack)
self.signature = datatypes.DWORD(0)
self.readerCount = datatypes.DWORD(0)
self.readerTypeLength = datatypes.DWORD(0)
self.version = datatypes.DWORD(0)
self.resourceCount = datatypes.DWORD(0)
self.resourceTypeCount = datatypes.DWORD(0)
self.resourceTypes = None
self.resourceHashes = None
self.resourceNameOffsets = None
self.dataSectionOffset = datatypes.DWORD(0)
self.resourceNames = None
self.resourceOffsets = None
self.info = None
self._attrsList = ["signature", "readerCount", "readerTypeLength", "version", "resourceCount", "resourceTypeCount", "resourceTypes", "resourceHashes", "resourceNameOffsets", "dataSectionOffset", "resourceNames", "resourceOffets", "info"]
def __str__(self):
return str(self.info)
def __repr__(self):
return repr(self.info)
def getType(self):
"""Returns L{consts.NET_RESOURCES}."""
return consts.NET_RESOURCES
@staticmethod
def parse(readDataInstance):
"""
Returns a new L{NetResources} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A L{ReadData} object with data to be parsed as a L{NetResources} object.
@rtype: L{NetResources}
@return: A new L{NetResources} object.
"""
r = NetResources()
r.signature = readDataInstance.readDword()
if r.signature != 0xbeefcace:
return r
r.readerCount = readDataInstance.readDword()
r.readerTypeLength = readDataInstance.readDword()
r.readerType = utils.ReadData(readDataInstance.read(r.readerTypeLength)).readDotNetBlob()
r.version = readDataInstance.readDword()
r.resourceCount = readDataInstance.readDword()
r.resourceTypeCount = readDataInstance.readDword()
r.resourceTypes = []
for i in xrange(r.resourceTypeCount):
r.resourceTypes.append(readDataInstance.readDotNetBlob())
# aligned to 8 bytes
readDataInstance.skipBytes(8 - readDataInstance.tell() & 0x7)
r.resourceHashes = []
for i in xrange(r.resourceCount):
r.resourceHashes.append(readDataInstance.readDword())
r.resourceNameOffsets = []
for i in xrange(r.resourceCount):
r.resourceNameOffsets.append(readDataInstance.readDword())
r.dataSectionOffset = readDataInstance.readDword()
r.resourceNames = []
r.resourceOffsets = []
base = readDataInstance.tell()
for i in xrange(r.resourceCount):
readDataInstance.setOffset(base + r.resourceNameOffsets[i])
r.resourceNames.append(readDataInstance.readDotNetUnicodeString())
r.resourceOffsets.append(readDataInstance.readDword())
r.info = {}
for i in xrange(r.resourceCount):
readDataInstance.setOffset(r.dataSectionOffset + r.resourceOffsets[i])
r.info[i] = readDataInstance.read(len(readDataInstance))
r.info[r.resourceNames[i]] = r.info[i]
return r
|
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests for apache_beam.typehints.trivial_inference."""
# pytype: skip-file
from __future__ import absolute_import
import sys
import types
import unittest
from apache_beam.typehints import trivial_inference
from apache_beam.typehints import typehints
global_int = 1
class TrivialInferenceTest(unittest.TestCase):
def assertReturnType(self, expected, f, inputs=(), depth=5):
self.assertEqual(
expected,
trivial_inference.infer_return_type(f, inputs, debug=True, depth=depth))
def testIdentity(self):
self.assertReturnType(int, lambda x: x, [int])
def testIndexing(self):
self.assertReturnType(int, lambda x: x[0], [typehints.Tuple[int, str]])
self.assertReturnType(str, lambda x: x[1], [typehints.Tuple[int, str]])
self.assertReturnType(str, lambda x: x[1], [typehints.List[str]])
def testTuples(self):
self.assertReturnType(
typehints.Tuple[typehints.Tuple[()], int], lambda x: ((), x), [int])
self.assertReturnType(
typehints.Tuple[str, int, float], lambda x: (x, 0, 1.0), [str])
def testGetItem(self):
def reverse(ab):
return ab[-1], ab[0]
self.assertReturnType(
typehints.Tuple[typehints.Any, typehints.Any], reverse, [typehints.Any])
self.assertReturnType(
typehints.Tuple[int, float], reverse, [typehints.Tuple[float, int]])
self.assertReturnType(
typehints.Tuple[int, str], reverse, [typehints.Tuple[str, float, int]])
self.assertReturnType(
typehints.Tuple[int, int], reverse, [typehints.List[int]])
def testGetItemSlice(self):
self.assertReturnType(
typehints.List[int], lambda v: v[::-1], [typehints.List[int]])
self.assertReturnType(
typehints.Tuple[int], lambda v: v[::-1], [typehints.Tuple[int]])
self.assertReturnType(str, lambda v: v[::-1], [str])
self.assertReturnType(typehints.Any, lambda v: v[::-1], [typehints.Any])
self.assertReturnType(typehints.Any, lambda v: v[::-1], [object])
if sys.version_info >= (3, ):
# Test binary_subscr on a slice of a Const. On Py2.7 this will use the
# unsupported opcode SLICE+0.
test_list = ['a', 'b']
self.assertReturnType(typehints.List[str], lambda: test_list[:], [])
def testUnpack(self):
def reverse(a_b):
(a, b) = a_b
return b, a
any_tuple = typehints.Tuple[typehints.Any, typehints.Any]
self.assertReturnType(
typehints.Tuple[int, float], reverse, [typehints.Tuple[float, int]])
self.assertReturnType(
typehints.Tuple[int, int], reverse, [typehints.Tuple[int, ...]])
self.assertReturnType(
typehints.Tuple[int, int], reverse, [typehints.List[int]])
self.assertReturnType(
typehints.Tuple[typehints.Union[int, float, str],
typehints.Union[int, float, str]],
reverse, [typehints.Tuple[int, float, str]])
self.assertReturnType(any_tuple, reverse, [typehints.Any])
self.assertReturnType(
typehints.Tuple[int, float],
reverse, [trivial_inference.Const((1.0, 1))])
self.assertReturnType(
any_tuple, reverse, [trivial_inference.Const((1, 2, 3))])
def testNoneReturn(self):
def func(a):
if a == 5:
return a
return None
self.assertReturnType(typehints.Union[int, type(None)], func, [int])
def testSimpleList(self):
self.assertReturnType(
typehints.List[int], lambda xs: [1, 2], [typehints.Tuple[int, ...]])
self.assertReturnType(
typehints.List[typehints.Any],
lambda xs: list(xs), # List is a disallowed builtin
[typehints.Tuple[int, ...]])
def testListComprehension(self):
self.assertReturnType(
typehints.List[int],
lambda xs: [x for x in xs], [typehints.Tuple[int, ...]])
def testTupleListComprehension(self):
self.assertReturnType(
typehints.List[int],
lambda xs: [x for x in xs], [typehints.Tuple[int, int, int]])
self.assertReturnType(
typehints.List[typehints.Union[int, float]],
lambda xs: [x for x in xs], [typehints.Tuple[int, float]])
if sys.version_info[:2] == (3, 5):
# A better result requires implementing the MAKE_CLOSURE opcode.
expected = typehints.Any
else:
expected = typehints.List[typehints.Tuple[str, int]]
self.assertReturnType(
expected,
lambda kvs: [(kvs[0], v) for v in kvs[1]],
[typehints.Tuple[str, typehints.Iterable[int]]])
self.assertReturnType(
typehints.List[typehints.Tuple[str, typehints.Union[str, int], int]],
lambda L: [(a, a or b, b) for a, b in L],
[typehints.Iterable[typehints.Tuple[str, int]]])
def testGenerator(self):
def foo(x, y):
yield x
yield y
self.assertReturnType(typehints.Iterable[int], foo, [int, int])
self.assertReturnType(
typehints.Iterable[typehints.Union[int, float]], foo, [int, float])
def testGeneratorComprehension(self):
self.assertReturnType(
typehints.Iterable[int],
lambda xs: (x for x in xs), [typehints.Tuple[int, ...]])
def testBinOp(self):
self.assertReturnType(int, lambda a, b: a + b, [int, int])
self.assertReturnType(
typehints.Any, lambda a, b: a + b, [int, typehints.Any])
self.assertReturnType(
typehints.List[typehints.Union[int, str]],
lambda a,
b: a + b, [typehints.List[int], typehints.List[str]])
def testCall(self):
f = lambda x, *args: x
self.assertReturnType(
typehints.Tuple[int, float], lambda: (f(1), f(2.0, 3)))
# We could do better here, but this is at least correct.
self.assertReturnType(
typehints.Tuple[int, typehints.Any], lambda: (1, f(x=1.0)))
def testClosure(self):
x = 1
y = 1.0
self.assertReturnType(typehints.Tuple[int, float], lambda: (x, y))
def testGlobals(self):
self.assertReturnType(int, lambda: global_int)
def testBuiltins(self):
self.assertReturnType(int, lambda x: len(x), [typehints.Any])
def testGetAttr(self):
self.assertReturnType(
typehints.Tuple[str, typehints.Any],
lambda: (typehints.__doc__, typehints.fake))
def testMethod(self):
class A(object):
def m(self, x):
return x
self.assertReturnType(int, lambda: A().m(3))
self.assertReturnType(float, lambda: A.m(A(), 3.0))
def testAlwaysReturnsEarly(self):
def some_fn(v):
if v:
return 1
return 2
self.assertReturnType(int, some_fn)
def testDict(self):
self.assertReturnType(
typehints.Dict[typehints.Any, typehints.Any], lambda: {})
def testDictComprehension(self):
fields = []
if sys.version_info >= (3, 6):
expected_type = typehints.Dict[typehints.Any, typehints.Any]
else:
# For Python 2, just ensure it doesn't crash.
expected_type = typehints.Any
self.assertReturnType(
expected_type, lambda row: {f: row[f]
for f in fields}, [typehints.Any])
def testDictComprehensionSimple(self):
self.assertReturnType(
typehints.Dict[str, int], lambda _list: {'a': 1
for _ in _list}, [])
def testDepthFunction(self):
def f(i):
return i
self.assertReturnType(typehints.Any, lambda i: f(i), [int], depth=0)
self.assertReturnType(int, lambda i: f(i), [int], depth=1)
def testDepthMethod(self):
class A(object):
def m(self, x):
return x
self.assertReturnType(typehints.Any, lambda: A().m(3), depth=0)
self.assertReturnType(int, lambda: A().m(3), depth=1)
self.assertReturnType(typehints.Any, lambda: A.m(A(), 3.0), depth=0)
self.assertReturnType(float, lambda: A.m(A(), 3.0), depth=1)
def testBuildTupleUnpackWithCall(self):
# Lambda uses BUILD_TUPLE_UNPACK_WITH_CALL opcode in Python 3.6, 3.7.
def fn(x1, x2, *unused_args):
return x1, x2
self.assertReturnType(
typehints.Tuple[str, float],
lambda x1,
x2,
_list: fn(x1, x2, *_list), [str, float, typehints.List[int]])
# No *args
self.assertReturnType(
typehints.Tuple[str, typehints.List[int]],
lambda x1,
x2,
_list: fn(x1, x2, *_list), [str, typehints.List[int]])
@unittest.skipIf(sys.version_info < (3, 6), 'CALL_FUNCTION_EX is new in 3.6')
def testCallFunctionEx(self):
# Test when fn arguments are built using BUiLD_LIST.
def fn(*args):
return args
self.assertReturnType(
typehints.List[typehints.Union[str, float]],
lambda x1,
x2: fn(*[x1, x2]), [str, float])
@unittest.skipIf(sys.version_info < (3, 6), 'CALL_FUNCTION_EX is new in 3.6')
def testCallFunctionExKwargs(self):
def fn(x1, x2, **unused_kwargs):
return x1, x2
# Keyword args are currently unsupported for CALL_FUNCTION_EX.
self.assertReturnType(
typehints.Any,
lambda x1,
x2,
_dict: fn(x1, x2, **_dict), [str, float, typehints.List[int]])
def testInstanceToType(self):
class MyClass(object):
def method(self):
pass
test_cases = [
(typehints.Dict[str, int], {
'a': 1
}),
(typehints.Dict[str, typehints.Union[str, int]], {
'a': 1, 'b': 'c'
}),
(typehints.Dict[typehints.Any, typehints.Any], {}),
(typehints.Set[str], {'a'}),
(typehints.Set[typehints.Union[str, float]], {'a', 0.4}),
(typehints.Set[typehints.Any], set()),
(typehints.Tuple[int], (1, )),
(typehints.Tuple[int, int, str], (1, 2, '3')),
(typehints.Tuple[()], ()),
(typehints.List[int], [1]),
(typehints.List[typehints.Union[int, str]], [1, 'a']),
(typehints.List[typehints.Any], []),
(type(None), None),
(type(MyClass), MyClass),
(MyClass, MyClass()),
(type(MyClass.method), MyClass.method),
(types.MethodType, MyClass().method),
]
for expected_type, instance in test_cases:
self.assertEqual(
expected_type,
trivial_inference.instance_to_type(instance),
msg=instance)
if __name__ == '__main__':
unittest.main()
|
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import os
from enum import Enum
from knack.prompting import prompt, prompt_pass
from knack.util import CLIError
from azure.mgmt.datamigration.models import (DataMigrationService,
NameAvailabilityRequest,
MigratePostgreSqlAzureDbForPostgreSqlSyncTaskProperties,
MigrateMySqlAzureDbForMySqlOfflineTaskProperties,
MigrateSqlServerSqlDbTaskProperties,
MigrateSyncCompleteCommandInput,
MigrateSyncCompleteCommandProperties,
MySqlConnectionInfo,
PostgreSqlConnectionInfo,
Project,
ProjectTask,
ServiceSku,
SqlConnectionInfo)
from azure.cli.core.azclierror import RequiredArgumentMissingError
from azure.cli.core.util import sdk_no_wait, get_file_json, shell_safe_json_parse
from azure.cli.command_modules.dms._client_factory import dms_cf_projects
from azure.cli.command_modules.dms.scenario_inputs import (get_migrate_sql_to_sqldb_offline_input,
get_migrate_postgresql_to_azuredbforpostgresql_sync_input,
get_migrate_mysql_to_azuredbformysql_offline_input)
# region Service
def check_service_name_availability(client, service_name, location):
parameters = NameAvailabilityRequest(name=service_name,
type='services')
return client.check_name_availability(location=location,
parameters=parameters)
def create_service(client,
service_name,
resource_group_name,
location,
subnet,
sku_name,
tags=None,
no_wait=False):
parameters = DataMigrationService(location=location,
virtual_subnet_id=subnet,
sku=ServiceSku(name=sku_name),
tags=tags)
return sdk_no_wait(no_wait,
client.begin_create_or_update,
parameters=parameters,
group_name=resource_group_name,
service_name=service_name)
def delete_service(client, service_name, resource_group_name, delete_running_tasks=None, no_wait=False):
return sdk_no_wait(no_wait,
client.begin_delete,
group_name=resource_group_name,
service_name=service_name,
delete_running_tasks=delete_running_tasks)
def list_services(client, resource_group_name=None):
list_func = client.list_by_resource_group(group_name=resource_group_name) \
if resource_group_name else client.list()
return list_func
def start_service(client, service_name, resource_group_name, no_wait=False):
return sdk_no_wait(no_wait,
client.begin_start,
group_name=resource_group_name,
service_name=service_name)
def stop_service(client, service_name, resource_group_name, no_wait=False):
return sdk_no_wait(no_wait,
client.begin_stop,
group_name=resource_group_name,
service_name=service_name)
# endregion
# region Project
def check_project_name_availability(client, resource_group_name, service_name, project_name):
parameters = NameAvailabilityRequest(name=project_name,
type='projects')
return client.check_children_name_availability(group_name=resource_group_name,
service_name=service_name,
parameters=parameters)
def create_or_update_project(client,
project_name,
service_name,
resource_group_name,
location,
source_platform,
target_platform,
tags=None):
"""This implementation eschews the source and target connection details and the database list. This is because this
generally only helps in a GUI context--to guide the user more easily through creating a task. Since this info is
necessary at the Task level, there is no need to include it at the Project level where for CLI it is more of a
useless redundancy."""
# Set inputs to lowercase
source_platform = source_platform.lower()
target_platform = target_platform.lower()
scenario_handled_in_core = core_handles_scenario(source_platform, target_platform)
# Validation: Test scenario eligibility
if not scenario_handled_in_core:
raise CLIError("The provided source-platform, target-platform combination is not appropriate. \n\
Please refer to the help file 'az dms project create -h' for the supported scenarios.")
parameters = Project(location=location,
source_platform=source_platform,
target_platform=target_platform,
tags=tags)
return client.create_or_update(parameters=parameters,
group_name=resource_group_name,
service_name=service_name,
project_name=project_name)
# endregion
# region Task
def check_task_name_availability(client, resource_group_name, service_name, project_name, task_name):
# because the URL to check for tasks needs to look like this:
# /subscriptions/{subscription}/resourceGroups/{resourcegroup}/providers/Microsoft.DataMigration/services/{service}/projects/{project}/checkNameAvailability?api-version={version} # pylint: disable=line-too-long
# But check_children_name_availability only builds a URL that would check for projects, so we cheat a little by
# making the service name include the project portion as well.
parameters = NameAvailabilityRequest(name=task_name,
type='tasks')
return client.check_children_name_availability(group_name=resource_group_name,
service_name=service_name + '/projects/' + project_name,
parameters=parameters)
def create_task(cmd,
client,
resource_group_name,
service_name,
project_name,
task_name,
source_connection_json,
target_connection_json,
database_options_json,
task_type="",
enable_schema_validation=False,
enable_data_integrity_validation=False,
enable_query_analysis_validation=False):
# Get source and target platform abd set inputs to lowercase
source_platform, target_platform = get_project_platforms(cmd,
project_name=project_name,
service_name=service_name,
resource_group_name=resource_group_name)
task_type = task_type.lower()
scenario_handled_in_core = core_handles_scenario(source_platform,
target_platform,
task_type)
# Validation: Test scenario eligibility
if not scenario_handled_in_core:
raise CLIError("The combination of the provided task-type and the project's \
source-platform and target-platform is not appropriate. \n\
Please refer to the help file 'az dms project task create -h' \
for the supported scenarios.")
source_connection_info, target_connection_info, database_options_json = \
transform_json_inputs(source_connection_json,
source_platform,
target_connection_json,
target_platform,
database_options_json)
task_properties = get_task_migration_properties(database_options_json,
source_platform,
target_platform,
task_type,
source_connection_info,
target_connection_info,
enable_schema_validation,
enable_data_integrity_validation,
enable_query_analysis_validation)
parameters = ProjectTask(properties=task_properties)
return client.create_or_update(group_name=resource_group_name,
service_name=service_name,
project_name=project_name,
task_name=task_name,
parameters=parameters)
def list_tasks(client, resource_group_name, service_name, project_name, task_type=None):
return client.tasks.list(group_name=resource_group_name,
service_name=service_name,
project_name=project_name,
task_type=task_type)
def cutover_sync_task(cmd,
client,
resource_group_name,
service_name,
project_name,
task_name,
object_name):
# If object name is empty, treat this as cutting over the entire online migration.
# Otherwise, for scenarios that support it, just cut over the migration on the specified object.
# 'input' is a built in function. Even though we can technically use it, it's not recommended.
# https://stackoverflow.com/questions/20670732/is-input-a-keyword-in-python
source_platform, target_platform = get_project_platforms(cmd,
project_name=project_name,
service_name=service_name,
resource_group_name=resource_group_name)
st = get_scenario_type(source_platform, target_platform, "onlinemigration")
if st in [ScenarioType.mysql_azuremysql_online,
ScenarioType.postgres_azurepostgres_online]:
command_input = MigrateSyncCompleteCommandInput(database_name=object_name)
command_properties_model = MigrateSyncCompleteCommandProperties
else:
raise CLIError("The supplied project's source and target do not support cutting over the migration.")
run_command(client,
command_input,
command_properties_model,
resource_group_name,
service_name,
project_name,
task_name)
# endregion
# region Helper Methods
def run_command(client,
command_input,
command_properties_model,
resource_group_name,
service_name,
project_name,
task_name):
command_properties_params = {'input': command_input}
command_properties = command_properties_model(**command_properties_params)
client.command(group_name=resource_group_name,
service_name=service_name,
project_name=project_name,
task_name=task_name,
parameters=command_properties)
def get_project_platforms(cmd, project_name, service_name, resource_group_name):
client = dms_cf_projects(cmd.cli_ctx)
proj = client.get(group_name=resource_group_name, service_name=service_name, project_name=project_name)
return (proj.source_platform.lower(), proj.target_platform.lower())
def core_handles_scenario(
source_platform,
target_platform,
task_type=""):
# Add scenarios here after migrating them to the core from the extension.
CoreScenarioTypes = [ScenarioType.sql_sqldb_offline,
ScenarioType.postgres_azurepostgres_online,
ScenarioType.mysql_azuremysql_offline]
return get_scenario_type(source_platform, target_platform, task_type) in CoreScenarioTypes
def transform_json_inputs(
source_connection_json,
source_platform,
target_connection_json,
target_platform,
database_options_json):
# Source connection info
source_connection_json = get_file_or_parse_json(source_connection_json, "source-connection-json")
source_connection_info = create_connection(source_connection_json, "Source Database ", source_platform)
# Target connection info
target_connection_json = get_file_or_parse_json(target_connection_json, "target-connection-json")
target_connection_info = create_connection(target_connection_json, "Target Database ", target_platform)
# Database options
database_options_json = get_file_or_parse_json(database_options_json, "database-options-json")
return (source_connection_info, target_connection_info, database_options_json)
def get_file_or_parse_json(value, value_type):
if os.path.exists(value):
return get_file_json(value)
# Test if provided value is a valid json
try:
json_parse = shell_safe_json_parse(value)
except:
raise CLIError("The supplied input for '" + value_type + "' is not a valid file path or a valid json object.")
else:
return json_parse
def create_connection(connection_info_json, prompt_prefix, typeOfInfo):
user_name = connection_info_json.get('userName', None) or prompt(prompt_prefix + 'Username: ')
password = connection_info_json.get('password', None) or prompt_pass(msg=prompt_prefix + 'Password: ')
if "mysql" in typeOfInfo:
server_name = connection_info_json.get('serverName', None)
if server_name is None:
raise RequiredArgumentMissingError('ServerName cannot be null/empty')
port = connection_info_json.get('port', 3306)
encrypt_connection = connection_info_json.get('encryptConnection', True)
trust_server_certificate = connection_info_json.get('trustServerCertificate', True)
return MySqlConnectionInfo(user_name=user_name,
password=password,
server_name=server_name,
port=port,
encrypt_connection=encrypt_connection,
trust_server_certificate=trust_server_certificate)
if "postgres" in typeOfInfo:
server_name = connection_info_json.get('serverName', None)
database_name = connection_info_json.get('databaseName', "postgres")
port = connection_info_json.get('port', 5432)
trust_server_certificate = connection_info_json.get('trustServerCertificate', False)
encrypt_connection = connection_info_json.get('encryptConnection', True)
return PostgreSqlConnectionInfo(user_name=user_name,
password=password,
server_name=server_name,
database_name=database_name,
port=port,
encrypt_connection=encrypt_connection,
trust_server_certificate=trust_server_certificate)
if "sql" in typeOfInfo:
data_source = connection_info_json.get('dataSource', None)
authentication = connection_info_json.get('authentication', None)
encrypt_connection = connection_info_json.get('encryptConnection', None)
trust_server_certificate = connection_info_json.get('trustServerCertificate', None)
additional_settings = connection_info_json.get('additionalSettings', None)
return SqlConnectionInfo(user_name=user_name,
password=password,
data_source=data_source,
authentication=authentication,
encrypt_connection=encrypt_connection,
trust_server_certificate=trust_server_certificate,
additional_settings=additional_settings)
# If no match, Pass the connection info through
return connection_info_json
def get_task_migration_properties(
database_options_json,
source_platform,
target_platform,
task_type,
source_connection_info,
target_connection_info,
enable_schema_validation,
enable_data_integrity_validation,
enable_query_analysis_validation):
st = get_scenario_type(source_platform, target_platform, task_type)
if st == ScenarioType.sql_sqldb_offline:
TaskProperties = MigrateSqlServerSqlDbTaskProperties
GetInput = get_migrate_sql_to_sqldb_offline_input
elif st == ScenarioType.mysql_azuremysql_offline:
TaskProperties = MigrateMySqlAzureDbForMySqlOfflineTaskProperties
GetInput = get_migrate_mysql_to_azuredbformysql_offline_input
elif st == ScenarioType.postgres_azurepostgres_online:
TaskProperties = MigratePostgreSqlAzureDbForPostgreSqlSyncTaskProperties
GetInput = get_migrate_postgresql_to_azuredbforpostgresql_sync_input
else:
raise CLIError("The supplied source, target, and task type is not supported for migration.")
return get_task_properties(st,
GetInput,
TaskProperties,
database_options_json,
source_connection_info,
target_connection_info,
enable_schema_validation,
enable_data_integrity_validation,
enable_query_analysis_validation)
def get_task_properties(scenario_type,
input_func,
task_properties_type,
options_json,
source_connection_info,
target_connection_info,
enable_schema_validation,
enable_data_integrity_validation,
enable_query_analysis_validation):
if source_connection_info is None and target_connection_info is None:
task_input = input_func(options_json)
elif scenario_type == ScenarioType.sql_sqldb_offline:
task_input = input_func(
options_json,
source_connection_info,
target_connection_info,
enable_schema_validation,
enable_data_integrity_validation,
enable_query_analysis_validation)
elif scenario_type == ScenarioType.mysql_azuremysql_offline:
task_input = input_func(
options_json,
source_connection_info,
target_connection_info)
else:
task_input = input_func(
options_json,
source_connection_info,
target_connection_info)
task_properties_params = {'input': task_input}
return task_properties_type(**task_properties_params)
def get_scenario_type(source_platform, target_platform, task_type=""):
if source_platform == "sql" and target_platform == "sqldb":
scenario_type = ScenarioType.sql_sqldb_offline if not task_type or "offline" in task_type else \
ScenarioType.unknown
elif source_platform == "mysql" and target_platform == "azuredbformysql":
scenario_type = ScenarioType.mysql_azuremysql_offline if not task_type or "offline" in task_type else \
ScenarioType.unknown
elif source_platform == "postgresql" and target_platform == "azuredbforpostgresql":
scenario_type = ScenarioType.postgres_azurepostgres_online if not task_type or "online" in task_type else \
ScenarioType.unknown
else:
scenario_type = ScenarioType.unknown
return scenario_type
class ScenarioType(Enum):
unknown = 0
# SQL to SQLDB
sql_sqldb_offline = 1
# MySQL to Azure for MySQL
mysql_azuremysql_online = 21
# PostgresSQL to Azure for PostgreSQL
postgres_azurepostgres_online = 31
# MySQL to Azure for MySQL Offline
mysql_azuremysql_offline = 22
# endregion
|
|
"""Armis Integration for Cortex XSOAR - Unit Tests file
This file contains the Pytest Tests for the Armis Integration
"""
import json
import pytest
import time
import CommonServerPython
def test_untag_device_success(requests_mock):
from Armis import Client, untag_device_command
mock_token = {
'data': {
'access_token': 'example',
'expiration_utc': time.ctime(time.time() + 10000)
}
}
requests_mock.post('https://test.com/api/v1/access_token/?secret_key=secret-example', json=mock_token)
requests_mock.delete('https://test.com/api/v1/devices/1/tags/', json={})
client = Client('secret-example', 'https://test.com/api/v1', verify=False, proxy=False)
result = untag_device_command(client, {'device_id': '1', 'tags': 'test-tag'})
assert result == "Successfully Untagged device: 1 with tags: ['test-tag']"
def test_untag_device_failure(requests_mock):
from Armis import Client, untag_device_command
mock_token = {
'data': {
'access_token': 'example',
'expiration_utc': time.ctime(time.time() + 10000)
}
}
requests_mock.post('https://test.com/api/v1/access_token/?secret_key=secret-example', json=mock_token)
requests_mock.delete('https://test.com/api/v1/devices/1/tags/', json={}, status_code=400)
client = Client('secret-example', 'https://test.com/api/v1', verify=False, proxy=False)
with pytest.raises(CommonServerPython.DemistoException):
untag_device_command(client, {'device_id': '1', 'tags': 'test-tag'})
def test_tag_device(requests_mock):
from Armis import Client, tag_device_command
mock_token = {
'data': {
'access_token': 'example',
'expiration_utc': time.ctime(time.time() + 10000)
}
}
requests_mock.post('https://test.com/api/v1/access_token/?secret_key=secret-example', json=mock_token)
requests_mock.post('https://test.com/api/v1/devices/1/tags/', json={})
client = Client('secret-example', 'https://test.com/api/v1', verify=False, proxy=False)
result = tag_device_command(client, {'device_id': '1', 'tags': 'test-tag'})
assert result == "Successfully Tagged device: 1 with tags: ['test-tag']"
def test_update_alert_status(requests_mock):
from Armis import Client, update_alert_status_command
mock_token = {
'data': {
'access_token': 'example',
'expiration_utc': time.ctime(time.time() + 10000)
}
}
requests_mock.post('https://test.com/api/v1/access_token/?secret_key=secret-example', json=mock_token)
requests_mock.patch('https://test.com/api/v1/alerts/1/', json={})
client = Client('secret-example', 'https://test.com/api/v1', verify=False, proxy=False)
args = {'alert_id': '1', 'status': 'UNHANDLED'}
assert update_alert_status_command(client, args) == "Successfully Updated Alert: 1 to status: UNHANDLED"
def test_search_alerts(requests_mock):
from Armis import Client, search_alerts_command
mock_token = {
'data': {
'access_token': 'example',
'expiration_utc': time.ctime(time.time() + 10000)
}
}
requests_mock.post('https://test.com/api/v1/access_token/?secret_key=secret-example', json=mock_token)
url = 'https://test.com/api/v1/search/?aql='
url += '+'.join([
'in%3Aalerts',
'timeFrame%3A%223+days%22',
'riskLevel%3AHigh%2CMedium',
'status%3AUNHANDLED%2CRESOLVED',
'type%3A%22Policy+Violation%22',
'alertId%3A%281%29',
])
mock_results = {
'data': {
'results': []
}
}
requests_mock.get(url, json=mock_results)
client = Client('secret-example', 'https://test.com/api/v1', verify=False, proxy=False)
args = {
'severity': 'High,Medium',
'status': 'UNHANDLED,RESOLVED',
'alert_type': 'Policy Violation',
'alert_id': '1',
'max_results': '20',
'time_frame': '3 days',
}
response = search_alerts_command(client, args)
assert response == 'No results found'
example_alerts = [
{
"activityIds": [
19625045,
19625223,
19625984,
19626169,
19626680,
19626818,
19628162,
19628359
],
"activityUUIDs": [
"1-uS23YBAAAC-vCTQOhA",
"7eut23YBAAAC-vCTkOhB",
"Oes13HYBAAAC-vCTcel0",
"T-tU3HYBAAAC-vCTyunu",
"mevb3HYBAAAC-vCT9-nn",
"uev33HYBAAAC-vCTa-mg",
"P-u33XYBAAAC-vCTlOpq",
"SevT3XYBAAAC-vCTA-o_"
],
"alertId": 1,
"connectionIds": [
845993,
846061,
846157,
846308
],
"description": "Smart TV started connection to Corporate Network",
"deviceIds": [
165722,
532
],
"severity": "Medium",
"status": "Unhandled",
"time": "2021-01-07T06:39:13.320893+00:00",
"title": "Smart TV connected to Corporate network",
"type": "System Policy Violation"
}
]
mock_results['data']['results'] = example_alerts
requests_mock.get(url, json=mock_results)
response = search_alerts_command(client, args)
assert response.outputs == example_alerts
def test_search_alerts_by_aql(requests_mock):
from Armis import Client, search_alerts_by_aql_command
mock_token = {
'data': {
'access_token': 'example',
'expiration_utc': time.ctime(time.time() + 10000)
}
}
requests_mock.post('https://test.com/api/v1/access_token/?secret_key=secret-example', json=mock_token)
url = 'https://test.com/api/v1/search/?aql='
url += '+'.join([
'in%3Aalerts',
'timeFrame%3A%223+days%22',
'riskLevel%3AHigh%2CMedium',
'status%3AUNHANDLED%2CRESOLVED',
'type%3A%22Policy+Violation%22',
])
mock_results = {
'data': {
'results': []
}
}
requests_mock.get(url, json=mock_results)
client = Client('secret-example', 'https://test.com/api/v1', verify=False, proxy=False)
args = {
'aql_string': 'timeFrame:"3 days" riskLevel:High,Medium status:UNHANDLED,RESOLVED type:"Policy Violation"'
}
response = search_alerts_by_aql_command(client, args)
assert response == 'No alerts found'
example_alerts = [
{
"activityIds": [
19625045,
19625223,
19625984,
19626169,
19626680,
19626818,
19628162,
19628359
],
"activityUUIDs": [
"1-uS23YBAAAC-vCTQOhA",
"7eut23YBAAAC-vCTkOhB",
"Oes13HYBAAAC-vCTcel0",
"T-tU3HYBAAAC-vCTyunu",
"mevb3HYBAAAC-vCT9-nn",
"uev33HYBAAAC-vCTa-mg",
"P-u33XYBAAAC-vCTlOpq",
"SevT3XYBAAAC-vCTA-o_"
],
"alertId": 1,
"connectionIds": [
845993,
846061,
846157,
846308
],
"description": "Smart TV started connection to Corporate Network",
"deviceIds": [
165722,
532
],
"severity": "Medium",
"status": "Unhandled",
"time": "2021-01-07T06:39:13.320893+00:00",
"title": "Smart TV connected to Corporate network",
"type": "System Policy Violation"
}
]
mock_results['data']['results'] = example_alerts
requests_mock.get(url, json=mock_results)
response = search_alerts_by_aql_command(client, args)
assert response.outputs == example_alerts
def test_search_devices(requests_mock):
from Armis import Client, search_devices_command
mock_token = {
'data': {
'access_token': 'example',
'expiration_utc': time.ctime(time.time() + 10000)
}
}
requests_mock.post('https://test.com/api/v1/access_token/?secret_key=secret-example', json=mock_token)
url = 'https://test.com/api/v1/search/?aql=in%3Adevices+timeFrame%3A%223+days%22+deviceId%3A%281%29'
mock_results = {
'data': {
'results': []
}
}
requests_mock.get(url, json=mock_results)
client = Client('secret-example', 'https://test.com/api/v1', verify=False, proxy=False)
args = {
'device_id': '1',
'time_frame': '3 days'
}
response = search_devices_command(client, args)
assert response == 'No devices found'
example_alerts = [
{
"accessSwitch": None,
"category": "Network Equipment",
"dataSources": [
{
"firstSeen": "2021-01-15T03:26:56+00:00",
"lastSeen": "2021-01-16T18:16:32+00:00",
"name": "Meraki",
"types": [
"WLC"
]
}
],
"firstSeen": "2021-01-15T03:26:56+00:00",
"id": 1,
"ipAddress": None,
"ipv6": None,
"lastSeen": "2021-01-16T18:16:32+00:00",
"macAddress": "f8:ca:59:53:91:ce",
"manufacturer": "NetComm Wireless",
"model": "NetComm device",
"name": "Aussie Broadband 0079",
"operatingSystem": None,
"operatingSystemVersion": None,
"riskLevel": 5,
"sensor": {
"name": "win-wap-tom-Upstairs",
"type": "Access Point"
},
"site": {
"location": "51 Longview Court, Thomastown Vic 3074",
"name": "Winslow Workshop - Thomastown"
},
"tags": [
"Access Point",
"Off Network",
"SSID=Aussie Broadband 0079"
],
"type": "Access Point Interface",
"user": "",
"visibility": "Full"
}
]
mock_results['data']['results'] = example_alerts
requests_mock.get(url, json=mock_results)
response = search_devices_command(client, args)
assert response.outputs == example_alerts
def test_search_devices_by_aql(requests_mock):
from Armis import Client, search_devices_by_aql_command
mock_token = {
'data': {
'access_token': 'example',
'expiration_utc': time.ctime(time.time() + 10000)
}
}
requests_mock.post('https://test.com/api/v1/access_token/?secret_key=secret-example', json=mock_token)
url = 'https://test.com/api/v1/search/?aql=in%3Adevices+timeFrame%3A%223+days%22+deviceId%3A%281%29'
mock_results = {
'data': {
'results': []
}
}
requests_mock.get(url, json=mock_results)
client = Client('secret-example', 'https://test.com/api/v1', verify=False, proxy=False)
args = {
'aql_string': 'timeFrame:"3 days" deviceId:(1)'
}
response = search_devices_by_aql_command(client, args)
assert response == 'No devices found'
example_alerts = [
{
"accessSwitch": None,
"category": "Network Equipment",
"dataSources": [
{
"firstSeen": "2021-01-15T03:26:56+00:00",
"lastSeen": "2021-01-16T18:16:32+00:00",
"name": "Meraki",
"types": [
"WLC"
]
}
],
"firstSeen": "2021-01-15T03:26:56+00:00",
"id": 1,
"ipAddress": None,
"ipv6": None,
"lastSeen": "2021-01-16T18:16:32+00:00",
"macAddress": "f8:ca:59:53:91:ce",
"manufacturer": "NetComm Wireless",
"model": "NetComm device",
"name": "Aussie Broadband 0079",
"operatingSystem": None,
"operatingSystemVersion": None,
"riskLevel": 5,
"sensor": {
"name": "win-wap-tom-Upstairs",
"type": "Access Point"
},
"site": {
"location": "51 Longview Court, Thomastown Vic 3074",
"name": "Winslow Workshop - Thomastown"
},
"tags": [
"Access Point",
"Off Network",
"SSID=Aussie Broadband 0079"
],
"type": "Access Point Interface",
"user": "",
"visibility": "Full"
}
]
mock_results['data']['results'] = example_alerts
requests_mock.get(url, json=mock_results)
response = search_devices_by_aql_command(client, args)
assert response.outputs == example_alerts
def test_fetch_incidents_no_duplicates(mocker):
"""
Given:
- 'client': Armis client.
- 'last_run': Last run parameters.
When:
- Performing two consecutive calls to fetch incidents
Then:
- Ensure incident that was already fetched is not fetched again.
"""
from Armis import Client, fetch_incidents
client = Client('secret-example', 'https://test.com/api/v1', verify=False, proxy=False)
last_fetch = '2021-03-09T01:00:00.000001+00:00'
armis_incident = {'time': '2021-03-09T01:00:00.000001+00:00', 'type': 'System Policy Violation'}
response = {
'results': [armis_incident],
'next': 'more data'
}
mocker.patch.object(client, 'search_alerts', return_value=response)
next_run, incidents = fetch_incidents(client, {'last_fetch': last_fetch}, '', 'Low', [], [], '', 1)
assert next_run['last_fetch'] == last_fetch
assert incidents[0]['rawJSON'] == json.dumps(armis_incident)
_, incidents = fetch_incidents(client, next_run, '', 'Low', [], [], '', 1)
assert not incidents
|
|
"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2017"""
from datetime import datetime
from parser.box import Box
from parser.navigation_page import NavigationPage
from settings import JAHIA_DATE_FORMAT
from parser.sidebar import Sidebar
import logging
import re
from utils import Utils
class PageContent:
"""
The language specific data of a Page
"""
def __init__(self, page, language, element):
self.element = element
self.page = page
self.site = page.site
self.wp_id = None
self.language = language
# the relative path, e.g. /team.html
self.path = ""
self.vanity_urls = []
self.boxes = []
self.sidebar = Sidebar()
self.last_update = ""
# a list of NavigationPages
self.navigation = []
# the number of occurrences of each tag, e.g. "br" : 10
self.num_tags = {}
self.parse_title()
# last update
self.parse_last_update()
# sidebar
self.parse_sidebar()
# path
self.set_path()
# navigation
self.parse_navigation()
# add to the site PageContents
self.site.pages_content_by_path[self.path] = self
def parse_title(self):
"""
Page have a default title but it can be overrided by another title.
:return:
"""
# For menu title, we have to use default page title
self.menu_title = self.element.getAttribute("jahia:title")
self.title = ""
# Looking if there is an overrided page title (that will be used only on page). We have to look only
# in direct children otherwise there's a risque we get a child page's title.
page_list_list = Utils.get_dom_next_level_children(self.element, "pageTitleListList")
if page_list_list:
self.title = page_list_list[0].getElementsByTagName('pageTitle')
if self.title:
# Can have a value or be empty
self.title = self.title[0].getAttribute("jahia:value")
# If page title is empty (equal to "")
if not self.title:
# We use the menu title as page title
self.title = self.menu_title
def parse_last_update(self):
"""Parse the last update information"""
date = self.element.getAttribute("jcr:lastModified")
try:
if date:
self.last_update = datetime.strptime(date, JAHIA_DATE_FORMAT)
else:
logging.warning(
"%s - parse - For the page id=%s the last update date is empty",
self.site.name, self.page.pid)
except ValueError as e:
logging.error(
"%s - parse - Invalid last update date for page %s : '%s'",
self.site.name, self.page.pid, date)
raise e
def parse_sidebar(self):
""" Parse sidebar """
# search the sidebar in the page xml content
children = self.element.childNodes
for child in children:
if child.nodeName == "extraList":
for extra in child.childNodes:
if extra.ELEMENT_NODE != extra.nodeType:
continue
# If we have to skip this box,
if extra.getAttribute("jahia:acl") == "break":
continue
multibox = extra.getElementsByTagName("text").length > 1
box = Box(site=self.site, page_content=self, element=extra, multibox=multibox, is_in_sidebar=True)
self.sidebar.boxes.append(box)
nb_boxes = len(self.sidebar.boxes)
# if we don't have boxes in this sidebar we check the parents
if nb_boxes == 0:
parent = self.page.parent
while parent:
# Check if parent have a content in current lang
if self.language in parent.contents:
sidebar = parent.contents[self.language].sidebar
# we found a sidebar with boxes, we stop
if len(sidebar.boxes) > 0:
self.sidebar = sidebar
break
# otherwise we continue in the hierarchy
parent = parent.parent
def set_path(self):
"""
Set the page path
"""
if self.page.is_homepage():
if "en" == self.language:
self.vanity_urls = ["/index.html"]
else:
self.vanity_urls = ["/index-{}.html".format(self.language)]
else:
# Vanity URL can have the following content :
# one URL ==> '/sciences_donnees$$$true$$$true==='
# many URLs ==> '/sciences_donnees$$$true$$$true===/sciencesdonnees$$$true$$$false==='
# many URLs ==> '/sciences_donnees$$$true$$$false===/sciencesdonnees$$$true$$$false==='
vanity_url = self.element.getAttribute("jahia:urlMappings")
if vanity_url:
# Going through exploded parts
for url in vanity_url.split('$$$'):
# Cleaning content
url = re.sub(r'(true|false)(===)?', '', url)
if url:
self.vanity_urls.append(url)
# By default, we also add the "default" page name because it can also be used even if there are
# vanity URLs defined.
self.vanity_urls.append("/page-{}-{}.html".format(self.page.pid, self.language))
# If website has only one language, we also add another way to reach page, the URL without the language
# FIXME: It may also work if website have more than one language and in this case, URL without language
# points on the default language URL.
if len(self.site.languages) == 1:
# Add if not exists
url_without_lang = "/page-{}.html".format(self.page.pid)
if url_without_lang not in self.vanity_urls:
self.vanity_urls.append(url_without_lang)
# FIXME, the prefixing part should be done in exporter
# add the site root_path at the beginning
self.path = self.site.root_path + self.vanity_urls[0]
def parse_navigation(self):
"""Parse the navigation"""
navigation_pages = self.element.getElementsByTagName("navigationPage")
for navigation_page in navigation_pages:
# check if the <navigationPage> belongs to this page
if not self.site.belongs_to(element=navigation_page, page=self.page):
continue
for child in navigation_page.childNodes:
# internal page declared with <jahia:page>
if child.nodeName == "jahia:page":
template = child.getAttribute("jahia:template")
# we don't want the sitemap
if not template == "sitemap":
ref = child.getAttribute("jcr:uuid")
title = child.getAttribute("jahia:title")
self.add_navigation_page(type="internal", ref=ref, title=title)
# internal page declared with <jahia:link>
elif child.nodeName == "jahia:link":
ref = child.getAttribute("jahia:reference")
title = child.getAttribute("jahia:title")
self.add_navigation_page(type="internal", ref=ref, title=title)
# external page
elif child.nodeName == "jahia:url":
ref = child.getAttribute("jahia:value")
title = child.getAttribute("jahia:title")
self.add_navigation_page(type="external", ref=ref, title=title)
def add_navigation_page(self, type, ref, title):
"""Add a NavigationPage with the given info"""
navigation_page = NavigationPage(parent=self, type=type, ref=ref, title=title)
self.navigation.append(navigation_page)
|
|
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Wsgi helper utilities for trove"""
import math
import re
import time
import traceback
import uuid
import eventlet.wsgi
import jsonschema
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_service import service
from oslo_utils import encodeutils
import paste.urlmap
import webob
import webob.dec
import webob.exc
from trove.common import base_wsgi
from trove.common import cfg
from trove.common import context as rd_context
from trove.common import exception
from trove.common.i18n import _
from trove.common import pastedeploy
from trove.common import utils
CONTEXT_KEY = 'trove.context'
Router = base_wsgi.Router
Debug = base_wsgi.Debug
Middleware = base_wsgi.Middleware
JSONDictSerializer = base_wsgi.JSONDictSerializer
RequestDeserializer = base_wsgi.RequestDeserializer
CONF = cfg.CONF
# Raise the default from 8192 to accommodate large tokens
eventlet.wsgi.MAX_HEADER_LINE = CONF.max_header_line
eventlet.patcher.monkey_patch(all=False, socket=True)
LOG = logging.getLogger('trove.common.wsgi')
def versioned_urlmap(*args, **kwargs):
urlmap = paste.urlmap.urlmap_factory(*args, **kwargs)
return VersionedURLMap(urlmap)
def launch(app_name, port, paste_config_file, data={},
host='0.0.0.0', backlog=128, threads=1000, workers=None):
"""Launches a wsgi server based on the passed in paste_config_file.
Launch provides a easy way to create a paste app from the config
file and launch it via the service launcher. It takes care of
all of the plumbing. The only caveat is that the paste_config_file
must be a file that paste.deploy can find and handle. There is
a helper method in cfg.py that finds files.
Example:
conf_file = CONF.find_file(CONF.api_paste_config)
launcher = wsgi.launch('myapp', CONF.bind_port, conf_file)
launcher.wait()
"""
LOG.debug("Trove started on %s", host)
app = pastedeploy.paste_deploy_app(paste_config_file, app_name, data)
server = base_wsgi.Service(app, port, host=host,
backlog=backlog, threads=threads)
return service.launch(CONF, server, workers)
# Note: taken from Nova
def serializers(**serializers):
"""Attaches serializers to a method.
This decorator associates a dictionary of serializers with a
method. Note that the function attributes are directly
manipulated; the method is not wrapped.
"""
def decorator(func):
if not hasattr(func, 'wsgi_serializers'):
func.wsgi_serializers = {}
func.wsgi_serializers.update(serializers)
return func
return decorator
class TroveMiddleware(Middleware):
# Note: taken from nova
@classmethod
def factory(cls, global_config, **local_config):
"""Used for paste app factories in paste.deploy config files.
Any local configuration (that is, values under the [filter:APPNAME]
section of the paste config) will be passed into the `__init__` method
as kwargs.
A hypothetical configuration would look like:
[filter:analytics]
redis_host = 127.0.0.1
paste.filter_factory = nova.api.analytics:Analytics.factory
which would result in a call to the `Analytics` class as
import nova.api.analytics
analytics.Analytics(app_from_paste, redis_host='127.0.0.1')
You could of course re-implement the `factory` method in subclasses,
but using the kwarg passing it shouldn't be necessary.
"""
def _factory(app):
return cls(app, **local_config)
return _factory
class VersionedURLMap(object):
def __init__(self, urlmap):
self.urlmap = urlmap
def __call__(self, environ, start_response):
req = Request(environ)
if req.url_version is None and req.accept_version is not None:
version = "/v" + req.accept_version
http_exc = webob.exc.HTTPNotAcceptable(_("version not supported"))
app = self.urlmap.get(version, Fault(http_exc))
else:
app = self.urlmap
return app(environ, start_response)
class Router(base_wsgi.Router):
# Original router did not allow for serialization of the 404 error.
# To fix this the _dispatch was modified to use Fault() objects.
@staticmethod
@webob.dec.wsgify
def _dispatch(req):
"""
Called by self._router after matching the incoming request to a route
and putting the information into req.environ. Either returns 404
or the routed WSGI app's response.
"""
match = req.environ['wsgiorg.routing_args'][1]
if not match:
return Fault(webob.exc.HTTPNotFound())
app = match['controller']
return app
class Request(base_wsgi.Request):
@property
def params(self):
return utils.stringify_keys(super(Request, self).params)
def best_match_content_type(self, supported_content_types=None):
"""Determine the most acceptable content-type.
Based on the query extension then the Accept header.
"""
parts = self.path.rsplit('.', 1)
if len(parts) > 1:
format = parts[1]
if format in ['json']:
return 'application/{0}'.format(parts[1])
ctypes = {
'application/vnd.openstack.trove+json': "application/json",
'application/json': "application/json",
}
bm = self.accept.best_match(ctypes.keys())
return ctypes.get(bm, 'application/json')
@utils.cached_property
def accept_version(self):
accept_header = self.headers.get('ACCEPT', "")
accept_version_re = re.compile(".*?application/vnd.openstack.trove"
"(\+.+?)?;"
"version=(?P<version_no>\d+\.?\d*)")
match = accept_version_re.search(accept_header)
return match.group("version_no") if match else None
@utils.cached_property
def url_version(self):
versioned_url_re = re.compile("/v(?P<version_no>\d+\.?\d*)")
match = versioned_url_re.search(self.path)
return match.group("version_no") if match else None
class Result(object):
"""A result whose serialization is compatible with JSON."""
def __init__(self, data, status=200):
self._data = data
self.status = status
def data(self, serialization_type):
"""Return an appropriate serialized type for the body.
serialization_type is not used presently, but may be
in the future, so it stays.
"""
if hasattr(self._data, "data_for_json"):
return self._data.data_for_json()
return self._data
class Resource(base_wsgi.Resource):
def __init__(self, controller, deserializer, serializer,
exception_map=None):
exception_map = exception_map or {}
self.model_exception_map = self._invert_dict_list(exception_map)
super(Resource, self).__init__(controller, deserializer, serializer)
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, request):
return super(Resource, self).__call__(request)
def execute_action(self, action, request, **action_args):
if getattr(self.controller, action, None) is None:
return Fault(webob.exc.HTTPNotFound())
try:
self.controller.validate_request(action, action_args)
result = super(Resource, self).execute_action(
action,
request,
**action_args)
if type(result) is dict:
result = Result(result)
return result
except exception.TroveError as trove_error:
LOG.debug(traceback.format_exc())
LOG.debug("Caught Trove Error %s", trove_error)
httpError = self._get_http_error(trove_error)
LOG.debug("Mapped Error to %s", httpError)
return Fault(httpError(str(trove_error), request=request))
except webob.exc.HTTPError as http_error:
LOG.debug(traceback.format_exc())
return Fault(http_error)
except Exception as error:
exception_uuid = str(uuid.uuid4())
LOG.exception(exception_uuid + ": " + str(error))
return Fault(webob.exc.HTTPInternalServerError(
"Internal Server Error. Please keep this ID to help us "
"figure out what went wrong: (%s)." % exception_uuid,
request=request))
def _get_http_error(self, error):
return self.model_exception_map.get(type(error),
webob.exc.HTTPBadRequest)
def _invert_dict_list(self, exception_dict):
"""Flattens values of keys and inverts keys and values.
Example:
{'x': [1, 2, 3], 'y': [4, 5, 6]} converted to
{1: 'x', 2: 'x', 3: 'x', 4: 'y', 5: 'y', 6: 'y'}
"""
inverted_dict = {}
for key, value_list in exception_dict.items():
for value in value_list:
inverted_dict[value] = key
return inverted_dict
def serialize_response(self, action, action_result, accept):
# If an exception is raised here in the base class, it is swallowed,
# and the action_result is returned as-is. For us, that's bad news -
# we never want that to happen except in the case of webob types.
# So we override the behavior here so we can at least log it.
try:
return super(Resource, self).serialize_response(
action, action_result, accept)
except Exception:
# execute_action either returns the results or a Fault object.
# If action_result is not a Fault then there really was a
# serialization error which we log. Otherwise return the Fault.
if not isinstance(action_result, Fault):
LOG.exception(_("Unserializable result detected."))
raise
return action_result
class Controller(object):
"""Base controller that creates a Resource with default serializers."""
exception_map = {
webob.exc.HTTPUnprocessableEntity: [
exception.UnprocessableEntity,
],
webob.exc.HTTPUnauthorized: [
exception.Forbidden,
exception.SwiftAuthError,
],
webob.exc.HTTPForbidden: [
exception.ReplicaSourceDeleteForbidden,
exception.BackupTooLarge,
exception.ModuleAccessForbidden,
exception.ModuleAppliedToInstance,
exception.PolicyNotAuthorized,
exception.LogAccessForbidden,
],
webob.exc.HTTPBadRequest: [
exception.InvalidModelError,
exception.BadRequest,
exception.CannotResizeToSameSize,
exception.BadValue,
exception.DatabaseAlreadyExists,
exception.UserAlreadyExists,
exception.LocalStorageNotSpecified,
exception.ModuleAlreadyExists,
],
webob.exc.HTTPNotFound: [
exception.NotFound,
exception.ComputeInstanceNotFound,
exception.ModelNotFoundError,
exception.UserNotFound,
exception.DatabaseNotFound,
exception.QuotaResourceUnknown,
exception.BackupFileNotFound,
exception.ClusterNotFound,
exception.DatastoreNotFound,
exception.SwiftNotFound,
exception.ModuleTypeNotFound,
],
webob.exc.HTTPConflict: [
exception.BackupNotCompleteError,
exception.RestoreBackupIntegrityError,
],
webob.exc.HTTPRequestEntityTooLarge: [
exception.OverLimit,
exception.QuotaExceeded,
exception.VolumeQuotaExceeded,
],
webob.exc.HTTPServerError: [
exception.VolumeCreationFailure,
exception.UpdateGuestError,
],
webob.exc.HTTPNotImplemented: [
exception.VolumeNotSupported,
exception.LocalStorageNotSupported,
exception.DatastoreOperationNotSupported,
exception.ClusterInstanceOperationNotSupported,
exception.ClusterDatastoreNotSupported
],
}
schemas = {}
@classmethod
def get_schema(cls, action, body):
LOG.debug("Getting schema for %(name)s:%(action)s",
{'name': cls.__class__.__name__, 'action': action})
if cls.schemas:
matching_schema = cls.schemas.get(action, {})
if matching_schema:
LOG.debug("Found Schema: %s",
matching_schema.get("name", matching_schema))
return matching_schema
@staticmethod
def format_validation_msg(errors):
# format path like object['field1'][i]['subfield2']
messages = []
for error in errors:
path = list(error.path)
f_path = "%s%s" % (path[0],
''.join(['[%r]' % i for i in path[1:]]))
messages.append("%s %s" % (f_path, error.message))
for suberror in sorted(error.context, key=lambda e: e.schema_path):
messages.append(suberror.message)
error_msg = "; ".join(messages)
return "Validation error: %s" % error_msg
def validate_request(self, action, action_args):
body = action_args.get('body', {})
schema = self.get_schema(action, body)
if schema:
validator = jsonschema.Draft4Validator(schema)
if not validator.is_valid(body):
errors = sorted(validator.iter_errors(body),
key=lambda e: e.path)
error_msg = self.format_validation_msg(errors)
LOG.info(error_msg)
raise exception.BadRequest(message=error_msg)
def create_resource(self):
return Resource(
self,
RequestDeserializer(),
TroveResponseSerializer(),
self.exception_map)
def _extract_limits(self, params):
return {key: params[key] for key in params.keys()
if key in ["limit", "marker"]}
class TroveResponseSerializer(base_wsgi.ResponseSerializer):
def serialize_body(self, response, data, content_type, action):
"""Overrides body serialization in base_wsgi.ResponseSerializer.
If the "data" argument is the Result class, its data
method is called and *that* is passed to the superclass implementation
instead of the actual data.
"""
if isinstance(data, Result):
data = data.data(content_type)
super(TroveResponseSerializer, self).serialize_body(
response,
data,
content_type,
action)
def serialize_headers(self, response, data, action):
super(TroveResponseSerializer, self).serialize_headers(
response,
data,
action)
if isinstance(data, Result):
response.status = data.status
class Fault(webob.exc.HTTPException):
"""Error codes for API faults."""
code_wrapper = {
400: webob.exc.HTTPBadRequest,
401: webob.exc.HTTPUnauthorized,
403: webob.exc.HTTPUnauthorized,
404: webob.exc.HTTPNotFound,
}
resp_codes = [int(code) for code in code_wrapper.keys()]
def __init__(self, exception):
"""Create a Fault for the given webob.exc.exception."""
self.wrapped_exc = exception
@staticmethod
def _get_error_name(exc):
# Displays a Red Dwarf specific error name instead of a webob exc name.
named_exceptions = {
'HTTPBadRequest': 'badRequest',
'HTTPUnauthorized': 'unauthorized',
'HTTPForbidden': 'forbidden',
'HTTPNotFound': 'itemNotFound',
'HTTPMethodNotAllowed': 'badMethod',
'HTTPRequestEntityTooLarge': 'overLimit',
'HTTPUnsupportedMediaType': 'badMediaType',
'HTTPInternalServerError': 'instanceFault',
'HTTPNotImplemented': 'notImplemented',
'HTTPServiceUnavailable': 'serviceUnavailable',
}
name = exc.__class__.__name__
if name in named_exceptions:
return named_exceptions[name]
# If the exception isn't in our list, at least strip off the
# HTTP from the name, and then drop the case on the first letter.
name = name.split("HTTP").pop()
name = name[:1].lower() + name[1:]
return name
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, req):
"""Generate a WSGI response based on the exception passed to ctor."""
# Replace the body with fault details.
fault_name = Fault._get_error_name(self.wrapped_exc)
fault_data = {
fault_name: {
'code': self.wrapped_exc.status_int,
}
}
if self.wrapped_exc.detail:
fault_data[fault_name]['message'] = self.wrapped_exc.detail
else:
fault_data[fault_name]['message'] = self.wrapped_exc.explanation
content_type = req.best_match_content_type()
serializer = {
'application/json': base_wsgi.JSONDictSerializer(),
}[content_type]
self.wrapped_exc.body = serializer.serialize(fault_data, content_type)
self.wrapped_exc.content_type = content_type
return self.wrapped_exc
class ContextMiddleware(base_wsgi.Middleware):
def __init__(self, application):
self.admin_roles = CONF.admin_roles
super(ContextMiddleware, self).__init__(application)
def _extract_limits(self, params):
return {key: params[key] for key in params.keys()
if key in ["limit", "marker"]}
def process_request(self, request):
service_catalog = None
catalog_header = request.headers.get('X-Service-Catalog', None)
if catalog_header:
try:
service_catalog = jsonutils.loads(catalog_header)
except ValueError:
raise webob.exc.HTTPInternalServerError(
_('Invalid service catalog json.'))
tenant_id = request.headers.get('X-Tenant-Id', None)
auth_token = request.headers["X-Auth-Token"]
user_id = request.headers.get('X-User-ID', None)
roles = request.headers.get('X-Role', '').split(',')
is_admin = False
for role in roles:
if role.lower() in self.admin_roles:
is_admin = True
break
limits = self._extract_limits(request.params)
context = rd_context.TroveContext(auth_token=auth_token,
tenant=tenant_id,
user=user_id,
is_admin=is_admin,
limit=limits.get('limit'),
marker=limits.get('marker'),
service_catalog=service_catalog,
roles=roles)
request.environ[CONTEXT_KEY] = context
@classmethod
def factory(cls, global_config, **local_config):
def _factory(app):
LOG.debug("Created context middleware with config: %s",
local_config)
return cls(app)
return _factory
class FaultWrapper(base_wsgi.Middleware):
"""Calls down the middleware stack, making exceptions into faults."""
@webob.dec.wsgify(RequestClass=base_wsgi.Request)
def __call__(self, req):
try:
resp = req.get_response(self.application)
if resp.status_int in Fault.resp_codes:
for (header, value) in resp._headerlist:
if header == "Content-Type" and \
value == "text/plain; charset=UTF-8":
return Fault(Fault.code_wrapper[resp.status_int]())
return resp
return resp
except Exception as ex:
LOG.exception(_("Caught error: %s."),
encodeutils.exception_to_unicode(ex))
exc = webob.exc.HTTPInternalServerError()
return Fault(exc)
@classmethod
def factory(cls, global_config, **local_config):
def _factory(app):
return cls(app)
return _factory
# ported from Nova
class OverLimitFault(webob.exc.HTTPException):
"""
Rate-limited request response.
"""
def __init__(self, message, details, retry_time):
"""
Initialize new `OverLimitFault` with relevant information.
"""
hdrs = OverLimitFault._retry_after(retry_time)
self.wrapped_exc = webob.exc.HTTPRequestEntityTooLarge(headers=hdrs)
self.content = {"overLimit": {"code": self.wrapped_exc.status_int,
"message": message,
"details": details,
"retryAfter": hdrs['Retry-After'],
},
}
@staticmethod
def _retry_after(retry_time):
delay = int(math.ceil(retry_time - time.time()))
retry_after = delay if delay > 0 else 0
headers = {'Retry-After': '%d' % retry_after}
return headers
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, request):
"""
Return the wrapped exception with a serialized body conforming to our
error format.
"""
content_type = request.best_match_content_type()
serializer = {'application/json': JSONDictSerializer(),
}[content_type]
content = serializer.serialize(self.content)
self.wrapped_exc.body = content
self.wrapped_exc.content_type = content_type
return self.wrapped_exc
class ActionDispatcher(object):
"""Maps method name to local methods through action name."""
def dispatch(self, *args, **kwargs):
"""Find and call local method."""
action = kwargs.pop('action', 'default')
action_method = getattr(self, str(action), self.default)
return action_method(*args, **kwargs)
def default(self, data):
raise NotImplementedError()
class DictSerializer(ActionDispatcher):
"""Default request body serialization."""
def serialize(self, data, action='default'):
return self.dispatch(data, action=action)
def default(self, data):
return ""
class JSONDictSerializer(DictSerializer):
"""Default JSON request body serialization."""
def default(self, data):
return jsonutils.dump_as_bytes(data)
|
|
#!/usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Simian network backoff detection module."""
import logging
import platform
import re
import socket
import urlparse
import requests
from simian.mac.client import flight_common
LINUX = 'Linux'
DARWIN = 'Darwin'
PLATFORM = platform.system()
ROUTE = {LINUX: ['/sbin/ip', 'route'], DARWIN: ['/usr/sbin/netstat', '-nr']}
ARP = {LINUX: '/usr/sbin/arp', DARWIN: '/usr/sbin/arp'}
HOST = '/usr/bin/host'
IFCONFIG = '/sbin/ifconfig'
IOS_WAP_DEFAULT_GATEWAY_IP = '172.20.10.1'
IOS_WAP_NETWORK_GATEWAY_SUBNET = '172.20.10/28'
INTERFACE_ANDROID_WAP = 'android_wap'
INTERFACE_WWAN = 'wwan'
INTERFACE_VPN = 'vpn'
BACKOFF_WLANS = frozenset([
'Fly-Fi',
'gogoinflight',
'Telekom_FlyNet',
'United_WiFi',
'United_Wi-Fi',
])
def _GetPlatform():
"""Returns a str like constants LINUX or DARWIN."""
platform_str = platform.system()
assert platform_str in [LINUX, DARWIN]
return platform_str
def GetAllInterfaceNames():
"""Get network interfaces info for this host.
Note that this list may include all types of interfaces
that are not normally interesting to this script, e.g. fw0.
Returns:
list, e.g. ['en0', 'en1', 'fw0', 'eth0']
"""
this_platform = _GetPlatform()
# Note slight difference in regex.
# BSD ifconfig writes "interface_name:\s+"
# while Linux writes "interface_name\s+"
if this_platform == LINUX:
intf_header = re.compile(r'^([a-z]+(?:[0-9]+)?)\s+')
elif this_platform == DARWIN:
intf_header = re.compile(r'^([a-z]+(?:[0-9]+)?):\s+')
return_code, stdout, stderr = flight_common.Exec('/sbin/ifconfig')
if return_code != 0 or stderr:
return []
interfaces = []
if stdout:
for l in stdout.splitlines(): # pylint: disable=maybe-no-member
m = intf_header.search(str(l))
if m:
interfaces.append(m.group(1))
return interfaces
def GetInterfaceNames(interface_type):
"""Get the network interface names for an interface type.
Args:
interface_type: str, like INTERFACE_* constant
Returns:
list of str, like ['ppp0'] or ['en0', 'en1']
Raises:
ValueError: if interface_type is unknown
PlatformError: if platform is not implemented
"""
this_platform = _GetPlatform()
all_interfaces = GetAllInterfaceNames()
if interface_type == INTERFACE_WWAN:
return [x for x in all_interfaces if x.startswith('ppp')
or x.startswith('bnep')]
elif interface_type == INTERFACE_ANDROID_WAP:
if this_platform == DARWIN:
return [x for x in all_interfaces if x.startswith('en')]
elif this_platform == LINUX:
return [x for x in all_interfaces if x.startswith('wlan')]
elif interface_type == INTERFACE_VPN:
if this_platform in [DARWIN, LINUX]:
return [x for x in all_interfaces if x.endswith('tun0')]
else:
raise ValueError('Unknown Platform: %s' % this_platform)
else:
raise ValueError(interface_type)
def GetNetworkGateway(network):
"""Get the gateway for a network.
Uses "netstat -nr" on Darwin and "ip route" on Linux to read the routing
table.
It searches for a route with destination exactly matching the network
parameter!
Args:
network: str, likely in CIDR format or default gateway,
e.g. "1.2.3/24" or "0.0.0.0"
Returns:
a string like "1.2.3.4" or "link#1" or "01:02:03:04:05:06" or
"dev wlan0", depending on the type of route and platform.
"""
route = ROUTE.get(_GetPlatform(), None)
logging.debug('Route: %s', str(route))
if not route:
return
try:
return_code, stdout, stderr = flight_common.Exec(route)
except OSError:
return_code = None
if return_code != 0 or stderr or not stdout:
return
gateway_pattern = (
r'^%s\s+(via[\s\t])?'
r'([\d\.]+|[0-9a-f:]+|link#\d+|dev [a-z\d]+)[\s\t]+' % network)
gateway = re.search(gateway_pattern, str(stdout), re.MULTILINE)
if gateway:
return gateway.group(2)
return
def GetDefaultGateway():
"""Gets the default gateway.
Returns:
a string like "192.168.0.1" or None if default gateway is unknown.
"""
if _GetPlatform() in [DARWIN, LINUX]:
default = 'default'
else:
logging.error('Unknown platform %s', _GetPlatform())
return GetNetworkGateway(default)
def GetHttpResource(host, path='/', port=80, redir=False):
"""Gets HTTP resource.
Args:
host: str, like "example.com", but not "http://example.com".
path: optional, str, like "/path", default "/".
port: optional, int, default 80.
redir: optional, bool, whether to follow redirects.
Returns:
(int response code, str response body)
(int -1, str error from http exception)
"""
if port != 80:
port_str = ':%d' % port
else:
port_str = ''
url = 'http://%s%s' % (host, port_str)
url = urlparse.urljoin(url, path)
try:
response = requests.get(url, allow_redirects=redir)
code = response.status_code
body = response.text
return code, body
except requests.RequestException as e:
return -1, str(e)
def IsOnWwan():
""""Checks WWAN device connection status.
Note: this may produce false-positives, and may not catch all WWAN
devices. Several Sprint and Verizon devices were tested, all of which
create ppp0 upon connection. However, L2TP VPN also creates ppp0
(Google no longer uses this as of Q2-2010 in favor of SSLVPN). A
stronger check is probably needed at some point.
As of 2011-12-6 OpenVPN interface is tun0 on Linux and Darwin.
Returns:
Boolean. True if WWAN device is active, False otherwise.
"""
wwan_ifaces = GetInterfaceNames(INTERFACE_WWAN)
for wwan_iface in wwan_ifaces:
try:
return_code, unused_out, unused_err = flight_common.Exec(
[IFCONFIG, wwan_iface])
except OSError:
return_code = None
# ifconfig exits with 1 if interface doesn't exist.
if return_code == 0:
return True
return False
def GetNetworkName():
"""Return network name (SSID for WLANs) a device is connected to.
Returns:
name of the matching network name if possible, None otherwise.
"""
this_platform = _GetPlatform()
if this_platform == LINUX:
cmdline = '/usr/bin/nmcli -t -f NAME,DEVICES conn status'
# Ignore "Auto " prefix on automatically connecting networks.
ssid_re = re.compile(r'^(Auto )?([^:]*):.*$')
try:
return_code, out, _ = flight_common.Exec(cmdline)
except OSError:
logging.exception('Error executing nmcli')
return
if out and not return_code:
for l in out.splitlines():
res = ssid_re.match(l)
if res:
return res.groups()[1]
elif this_platform == DARWIN:
cmdline = (
'/System/Library/PrivateFrameworks/Apple80211.framework/Versions/'
'Current/Resources/airport -I | '
'awk \'/ SSID/ {print substr($0, index($0, $2))}\'')
try:
return_code, out, _ = flight_common.Exec(cmdline)
except OSError:
logging.exception('Error executing airport')
return
if out and not return_code:
return out.strip() or None
def IsOnBackoffWLAN():
"""Returns True if on a Backoff WLAN, such as gogoinflight WiFi."""
return GetNetworkName() in BACKOFF_WLANS
def IsOnAndroidWap():
"""Checks if Android WiFi or Bluetooth tethering is connected.
Returns:
Boolean. True if Android tethering is connected, False otherwise.
"""
# ifconfig output looks a little bit different on Darwin vs Linux.
#
# Darwin:
# inet 169.254.135.20 netmask 0xffff0000 broadcast 169.254.255.255
# Linux:
# inet addr:172.26.113.45 Bcast:172.26.115.255 Mask:255.255.252.0
android_wap_match_regex = re.compile(
r'inet[\w\s]*[\s:]+192\.168\.(42|43|44)\.\d{1,3}\s+'
r'.*(?:netmask\s+0xffffff00\s+|Mask:255\.255\.255\.0)')
ifaces = GetInterfaceNames(INTERFACE_ANDROID_WAP)
for wifi_iface in ifaces:
# Android tethering uses very specific subnets*, as well as dnsmasq which
# reveals itself via the TXT VERSION.BIND record.
# * 192.168.42.0/24 for wired, 192.168.43.0/24 for WiFi, and
# 192.168.44.0/24 for Bluetooth.
try:
return_code, stdout, stderr = flight_common.Exec([IFCONFIG, wifi_iface])
except OSError:
return_code = None
if return_code != 0 or stderr: # interface was likely not found.
continue
android_wap_match = android_wap_match_regex.search(stdout)
# Look for an interface on 192.168.4[2-4].0/24.
if android_wap_match is not None:
# If the default gateway is not through a likely Android WAN interface,
# tethering may be active but is not likely to be used.
default_gateway = GetDefaultGateway()
logging.debug('Default gateway: %s', str(default_gateway))
default_gateway_prefix = '192.168.%s.' % android_wap_match.group(1)
if not default_gateway.startswith(default_gateway_prefix):
return False
# IP, netmask, gateway look like Android WAP, so check dnsmasq.
# Request needs to be explicitly top level, as Linux uses
# ndots:2 which would turn VERSION.BIND (without trailing dot) into
# VERSION.BIND.foo.example.com in some cases.
cmd = [HOST, '-W', '5', '-c', 'CHAOS', '-t', 'txt', 'VERSION.BIND.',
default_gateway]
try:
return_code, stdout, unused_err = flight_common.Exec(cmd)
except OSError:
return_code = None
if return_code != 0:
continue
dnsmasq_match = re.search(
r'VERSION\.BIND descriptive text "dnsmasq-.*"', stdout)
if dnsmasq_match is not None:
# IP, netmask and dnsmasq all match Android WAP tethering.
return True
return False
def IsOnIosWap():
"""Checks if the wireless connection is to an iOS WAP tether.
Returns:
Boolean. True if iOS WAP is connected, False otherwise.
"""
# iOS WAP looks like a 172.20.10/28 network. Gateway is
# 172.20.10.1 with TCP port 62078 open.
gateway = GetNetworkGateway(IOS_WAP_NETWORK_GATEWAY_SUBNET)
if not gateway:
return False
ip = GetDefaultGateway()
if not ip:
return False
if ip != IOS_WAP_DEFAULT_GATEWAY_IP:
return False
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
result = sock.connect_ex((ip, 62078))
if result == 0:
return True
return False
def IsOnMifi():
"""Checks if the wireless connection is to a MiFi-like device.
These devices are available from Verizon, Sprint, others, and usually
offer some kind of web access portal that says MiFi or Jetpack as a text
string.
Returns:
Bool, True if the connection is a likely MiFi-like device, False if not.
"""
ip = GetDefaultGateway()
if not ip:
return False
if ip.startswith('192.168.1.'): # Verizon and Sprint devices
http_status, body = GetHttpResource(ip, redir=True)
# MiFi-like devices usually run a http interface. It returns a long http
# response with various easily found "MiFi" or "Jetpack" strings in it
# when loaded. No http auth challenge is issued.
if http_status == 200 and body and ('MiFi' in body or 'Jetpack' in body):
return True
elif ip == '192.168.8.1': # common Huawei gateway
http_status, _ = GetHttpResource(ip, redir=False)
return http_status == 307
return False
|
|
import os
import json
import sys
import pdb
from pprint import pprint
def azure_login(agent):
try:
agent.shell("az account show")
print("Azure CLI login verified")
except Exception as e:
print("It appears that you are not logged into the azure cli...")
print("Please login to the azure cli : azure login")
print("You will need to follow the instructions, then restart this script")
sys.exit(2)
def azure_set_account(agent):
accs = agent.shell("az account list")
print(accs)
accounts = json.loads(accs)
sub_id = None
while not sub_id:
input_id = input("Please paste the Subscription ID you are using : ")
for account in accounts:
if input_id in account['id']:
sub_id = input_id
print("Using subscription [{0}]".format(sub_id))
agent.shell("az account set -s {0}".format(sub_id))
return sub_id
def azure_create_app(agent,
name,
homepage, # "https://test.com"
identifier, # "https://test.com"
password):
agent.shell("az ad app create --display-name {0} --homepage {1} --identifier-uris {2} --password {3}".format(name,
homepage,
identifier,
password))
def config_get_app_list(agent):
user_apps = {}
creds = load_config(agent)
for app, cred in creds.items():
user_apps[app] = cred['AZURE_CLIENT_ID']
return user_apps
def azure_get_account(agent):
try:
return json.loads(agent.shell("az account show"))
except:
return None
def azure_get_user(agent):
account = azure_get_account(agent)
return account['user']['name']
def azure_get_subscription_id(agent):
account = azure_get_account(agent)
if account:
return account['id']
return None
def get_azure_config_file_name(agent):
account = azure_get_account(agent)
if account:
return "az.{0}_{1}.conf".format(account['user']['name'], account['name'])
return None
def save_config(creds_dict, agent):
path = "{0}/.azure/".format(os.getenv("HOME"))
filename = get_azure_config_file_name(agent)
if not os.path.isdir(path):
try:
os.mkdir(path)
except OSError as e:
raise Exception("Failed to create the directory [{0}] to save credentials."
" Please either move it or alter the permissions")
with open(path+filename, 'w') as f:
f.write(json.dumps(creds_dict))
f.flush()
def load_config(agent):
path = "{0}/.azure/".format(os.getenv("HOME"))
filename = get_azure_config_file_name(agent)
if filename is None or not os.path.isfile(path+filename):
return {}
with open(path+filename, 'r') as f:
data = f.read()
return json.loads(data)
def azure_get_user_name(agent):
account = azure_get_account(agent)
return account['user']['name']
def azure_show_service_principals(agent):
sps = []
apps = config_get_app_list(agent)
for app_name, id in apps.items():
sps.append(json.loads(agent.shell("az ad sp show --id {0}".format(id))))
return sps
def azure_show_apps(agent):
applications = []
apps = config_get_app_list(agent)
for app_name, id in apps.items():
applications.append(json.loads(agent.shell("az ad app show --id {0}".format(id))))
return applications
def azure_get_service_principals(agent):
return json.loads(agent.shell("az ad sp list"))
def azure_get_apps(agent):
return json.loads(agent.shell("az ad app list"))
def azure_get_role_assignment(agent, object_id):
try:
return json.loads(
agent.shell("az role assignment list --assignee {0}".format(object_id)))[0]
except:
return None
def azure_get_application(agent, key, value):
for app in azure_get_apps(agent):
if value in app[key]:
return app
return None
def azure_get_service_principal(agent, key, value):
for sp in azure_get_service_principals(agent):
if value in sp[key]:
return sp
return None
def azure_create_service_principal(agent, name):
app = azure_get_application(agent, 'displayName', name)
agent.shell("az ad sp create --id {0}".format(app['appId']))
return azure_get_service_principal(agent, 'displayName', name)
def azure_delete_service_principal(agent, sp_id):
print(agent.shell('az ad sp delete --id {0}'.format(sp_id)))
def azure_create_role_assignment(agent, assignee):
try:
agent.shell("az role assignment create --assignee {0} --role Contributor".format(assignee))
return azure_get_role_assignment(agent, assignee)
except Exception as e:
print("Role assignment failed! ", e)
def azure_delete_role_assignment(agent, assignee):
agent.shell("az role assignment delete --assignee {0}".format(assignee))
def create_cred_elements(agent, app_name, app_hostname, subscription_id, password):
app = azure_get_application(agent, 'displayName', app_name)
if not app:
print("Creating an application for [{0}]".format(app_name))
azure_create_app(agent,
app_name,
"https://{0}.com".format(app_hostname),
"https://{0}.com".format(app_hostname),
password)
else:
print("App exists : {0}".format(app['displayName']))
sp = azure_get_service_principal(agent, 'displayName', app_name)
if not sp:
print("Creating a service principal for [{0}]".format(app_name))
sp = azure_create_service_principal(agent, app_name)
else:
print("Service Principal exists : {0}".format(sp['displayName']))
role = azure_get_role_assignment(agent, sp['objectId'])
if not role:
print("Creating a role assignment for [{0}]".format(app_name))
role = azure_create_role_assignment(agent, sp['objectId'])
print("Created role [{0}] for application [{1}]".format(role['roleDefinitionName'], app_name))
else:
print("Role exists : {0}".format(role['roleDefinitionName']))
if 'Contributor' not in role['roleDefinitionName']:
sys.exit("Error: The service principal role is not set to contributor [{0}]".format(role['roleDefinitionName']))
return get_arm_creds(agent, app_name, subscription_id, password)
def azure_show_cred_elements(agent, app_name):
sp = azure_get_service_principal(agent, 'displayName', app_name)
print("")
pprint(sp, indent=2) if sp else print("No service principal for [{0}]".format(app_name))
print("")
if sp:
role = azure_get_role_assignment(agent, sp['objectId'])
pprint(role, indent=2) if role else print("No role assignment for [{0}]".format(app_name))
print("")
def get_arm_creds(agent, app_name, subscription_id, password):
sp = azure_get_service_principal(agent, 'displayName', app_name)
return {
app_name: {
'AZURE_SUBSCRIPTION_ID': subscription_id,
'AZURE_TENANT_ID': sp['additionalProperties']['appOwnerTenantId'],
'AZURE_CLIENT_ID': sp['appId'],
'AZURE_CLIENT_SECRET': password
}
}
def azure_delete_cred_elements(agent, app_name):
sp = azure_get_service_principal(agent, 'displayName', app_name)
if sp:
role = azure_get_role_assignment(agent, sp['objectId'])
if role:
pdb.set_trace()
print("Removing role assignment for [{0}]".format(sp['objectId']))
azure_delete_role_assignment(agent, sp['objectId'], role['roleDefinitionName'])
print("Removing service principal for [{0}]".format(sp['objectId']))
azure_delete_service_principal(agent, sp['objectId'])
else:
app = azure_get_application(agent, 'displayName', app_name)
if app:
print("Removing application due to missing service principal for [{0}]".format(app_name))
agent.shell("az ad app delete --objectId {0}".format(app['objectId']))
creds = load_config(agent)
try:
del creds[app_name]
print("Deleting application [{0}] credentials".format(app_name))
save_config(creds,agent)
except KeyError as e:
print("Application [{0}] has already been removed from your credentials".format(app_name))
|
|
from __future__ import absolute_import
import datetime
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase
from django.test.utils import override_settings
from django.utils import timezone
from .models import Book, BookSigning
class ArchiveIndexViewTests(TestCase):
fixtures = ['generic-views-test-data.json']
urls = 'regressiontests.generic_views.urls'
def _make_books(self, n, base_date):
for i in range(n):
b = Book.objects.create(
name='Book %d' % i,
slug='book-%d' % i,
pages=100+i,
pubdate=base_date - datetime.timedelta(days=1))
def test_archive_view(self):
res = self.client.get('/dates/books/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['date_list'], Book.objects.dates('pubdate', 'year')[::-1])
self.assertEqual(list(res.context['latest']), list(Book.objects.all()))
self.assertTemplateUsed(res, 'generic_views/book_archive.html')
def test_archive_view_context_object_name(self):
res = self.client.get('/dates/books/context_object_name/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['date_list'], Book.objects.dates('pubdate', 'year')[::-1])
self.assertEqual(list(res.context['thingies']), list(Book.objects.all()))
self.assertFalse('latest' in res.context)
self.assertTemplateUsed(res, 'generic_views/book_archive.html')
def test_empty_archive_view(self):
Book.objects.all().delete()
res = self.client.get('/dates/books/')
self.assertEqual(res.status_code, 404)
def test_allow_empty_archive_view(self):
Book.objects.all().delete()
res = self.client.get('/dates/books/allow_empty/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), [])
self.assertEqual(list(res.context['date_list']), [])
self.assertTemplateUsed(res, 'generic_views/book_archive.html')
def test_archive_view_template(self):
res = self.client.get('/dates/books/template_name/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['date_list'], Book.objects.dates('pubdate', 'year')[::-1])
self.assertEqual(list(res.context['latest']), list(Book.objects.all()))
self.assertTemplateUsed(res, 'generic_views/list.html')
def test_archive_view_template_suffix(self):
res = self.client.get('/dates/books/template_name_suffix/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['date_list'], Book.objects.dates('pubdate', 'year')[::-1])
self.assertEqual(list(res.context['latest']), list(Book.objects.all()))
self.assertTemplateUsed(res, 'generic_views/book_detail.html')
def test_archive_view_invalid(self):
self.assertRaises(ImproperlyConfigured, self.client.get, '/dates/books/invalid/')
def test_paginated_archive_view(self):
self._make_books(20, base_date=datetime.date.today())
res = self.client.get('/dates/books/paginated/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['date_list'], Book.objects.dates('pubdate', 'year')[::-1])
self.assertEqual(list(res.context['latest']), list(Book.objects.all()[0:10]))
self.assertTemplateUsed(res, 'generic_views/book_archive.html')
res = self.client.get('/dates/books/paginated/?page=2')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['page_obj'].number, 2)
self.assertEqual(list(res.context['latest']), list(Book.objects.all()[10:20]))
def test_paginated_archive_view_does_not_load_entire_table(self):
# Regression test for #18087
self._make_books(20, base_date=datetime.date.today())
# 1 query for years list + 1 query for books
with self.assertNumQueries(2):
self.client.get('/dates/books/')
# same as above + 1 query to test if books exist
with self.assertNumQueries(3):
self.client.get('/dates/books/paginated/')
def test_datetime_archive_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get('/dates/booksignings/')
self.assertEqual(res.status_code, 200)
@override_settings(USE_TZ=True, TIME_ZONE='Africa/Nairobi')
def test_aware_datetime_archive_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc))
res = self.client.get('/dates/booksignings/')
self.assertEqual(res.status_code, 200)
class YearArchiveViewTests(TestCase):
fixtures = ['generic-views-test-data.json']
urls = 'regressiontests.generic_views.urls'
def test_year_view(self):
res = self.client.get('/dates/books/2008/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), [datetime.datetime(2008, 10, 1)])
self.assertEqual(res.context['year'], '2008')
self.assertTemplateUsed(res, 'generic_views/book_archive_year.html')
def test_year_view_make_object_list(self):
res = self.client.get('/dates/books/2006/make_object_list/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), [datetime.datetime(2006, 5, 1)])
self.assertEqual(list(res.context['book_list']), list(Book.objects.filter(pubdate__year=2006)))
self.assertEqual(list(res.context['object_list']), list(Book.objects.filter(pubdate__year=2006)))
self.assertTemplateUsed(res, 'generic_views/book_archive_year.html')
def test_year_view_empty(self):
res = self.client.get('/dates/books/1999/')
self.assertEqual(res.status_code, 404)
res = self.client.get('/dates/books/1999/allow_empty/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), [])
self.assertEqual(list(res.context['book_list']), [])
def test_year_view_allow_future(self):
# Create a new book in the future
year = datetime.date.today().year + 1
b = Book.objects.create(name="The New New Testement", pages=600, pubdate=datetime.date(year, 1, 1))
res = self.client.get('/dates/books/%s/' % year)
self.assertEqual(res.status_code, 404)
res = self.client.get('/dates/books/%s/allow_empty/' % year)
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), [])
res = self.client.get('/dates/books/%s/allow_future/' % year)
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), [datetime.datetime(year, 1, 1)])
def test_year_view_paginated(self):
res = self.client.get('/dates/books/2006/paginated/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), list(Book.objects.filter(pubdate__year=2006)))
self.assertEqual(list(res.context['object_list']), list(Book.objects.filter(pubdate__year=2006)))
self.assertTemplateUsed(res, 'generic_views/book_archive_year.html')
def test_year_view_invalid_pattern(self):
res = self.client.get('/dates/books/no_year/')
self.assertEqual(res.status_code, 404)
def test_datetime_year_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get('/dates/booksignings/2008/')
self.assertEqual(res.status_code, 200)
@override_settings(USE_TZ=True, TIME_ZONE='Africa/Nairobi')
def test_aware_datetime_year_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc))
res = self.client.get('/dates/booksignings/2008/')
self.assertEqual(res.status_code, 200)
class MonthArchiveViewTests(TestCase):
fixtures = ['generic-views-test-data.json']
urls = 'regressiontests.generic_views.urls'
def test_month_view(self):
res = self.client.get('/dates/books/2008/oct/')
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'generic_views/book_archive_month.html')
self.assertEqual(list(res.context['date_list']), [datetime.datetime(2008, 10, 1)])
self.assertEqual(list(res.context['book_list']),
list(Book.objects.filter(pubdate=datetime.date(2008, 10, 1))))
self.assertEqual(res.context['month'], datetime.date(2008, 10, 1))
# Since allow_empty=False, next/prev months must be valid (#7164)
self.assertEqual(res.context['next_month'], None)
self.assertEqual(res.context['previous_month'], datetime.date(2006, 5, 1))
def test_month_view_allow_empty(self):
# allow_empty = False, empty month
res = self.client.get('/dates/books/2000/jan/')
self.assertEqual(res.status_code, 404)
# allow_empty = True, empty month
res = self.client.get('/dates/books/2000/jan/allow_empty/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), [])
self.assertEqual(list(res.context['book_list']), [])
self.assertEqual(res.context['month'], datetime.date(2000, 1, 1))
# Since allow_empty=True, next/prev are allowed to be empty months (#7164)
self.assertEqual(res.context['next_month'], datetime.date(2000, 2, 1))
self.assertEqual(res.context['previous_month'], datetime.date(1999, 12, 1))
# allow_empty but not allow_future: next_month should be empty (#7164)
url = datetime.date.today().strftime('/dates/books/%Y/%b/allow_empty/').lower()
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['next_month'], None)
def test_month_view_allow_future(self):
future = (datetime.date.today() + datetime.timedelta(days=60)).replace(day=1)
urlbit = future.strftime('%Y/%b').lower()
b = Book.objects.create(name="The New New Testement", pages=600, pubdate=future)
# allow_future = False, future month
res = self.client.get('/dates/books/%s/' % urlbit)
self.assertEqual(res.status_code, 404)
# allow_future = True, valid future month
res = self.client.get('/dates/books/%s/allow_future/' % urlbit)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['date_list'][0].date(), b.pubdate)
self.assertEqual(list(res.context['book_list']), [b])
self.assertEqual(res.context['month'], future)
# Since allow_future = True but not allow_empty, next/prev are not
# allowed to be empty months (#7164)
self.assertEqual(res.context['next_month'], None)
self.assertEqual(res.context['previous_month'], datetime.date(2008, 10, 1))
# allow_future, but not allow_empty, with a current month. So next
# should be in the future (yup, #7164, again)
res = self.client.get('/dates/books/2008/oct/allow_future/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['next_month'], future)
self.assertEqual(res.context['previous_month'], datetime.date(2006, 5, 1))
def test_month_view_paginated(self):
res = self.client.get('/dates/books/2008/oct/paginated/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), list(Book.objects.filter(pubdate__year=2008, pubdate__month=10)))
self.assertEqual(list(res.context['object_list']), list(Book.objects.filter(pubdate__year=2008, pubdate__month=10)))
self.assertTemplateUsed(res, 'generic_views/book_archive_month.html')
def test_custom_month_format(self):
res = self.client.get('/dates/books/2008/10/')
self.assertEqual(res.status_code, 200)
def test_month_view_invalid_pattern(self):
res = self.client.get('/dates/books/2007/no_month/')
self.assertEqual(res.status_code, 404)
def test_previous_month_without_content(self):
"Content can exist on any day of the previous month. Refs #14711"
self.pubdate_list = [
datetime.date(2010, month, day)
for month,day in ((9,1), (10,2), (11,3))
]
for pubdate in self.pubdate_list:
name = str(pubdate)
Book.objects.create(name=name, slug=name, pages=100, pubdate=pubdate)
res = self.client.get('/dates/books/2010/nov/allow_empty/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['previous_month'], datetime.date(2010,10,1))
# The following test demonstrates the bug
res = self.client.get('/dates/books/2010/nov/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['previous_month'], datetime.date(2010,10,1))
# The bug does not occur here because a Book with pubdate of Sep 1 exists
res = self.client.get('/dates/books/2010/oct/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['previous_month'], datetime.date(2010,9,1))
def test_datetime_month_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 2, 1, 12, 0))
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
BookSigning.objects.create(event_date=datetime.datetime(2008, 6, 3, 12, 0))
res = self.client.get('/dates/booksignings/2008/apr/')
self.assertEqual(res.status_code, 200)
@override_settings(USE_TZ=True, TIME_ZONE='Africa/Nairobi')
def test_aware_datetime_month_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 2, 1, 12, 0, tzinfo=timezone.utc))
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc))
BookSigning.objects.create(event_date=datetime.datetime(2008, 6, 3, 12, 0, tzinfo=timezone.utc))
res = self.client.get('/dates/booksignings/2008/apr/')
self.assertEqual(res.status_code, 200)
class WeekArchiveViewTests(TestCase):
fixtures = ['generic-views-test-data.json']
urls = 'regressiontests.generic_views.urls'
def test_week_view(self):
res = self.client.get('/dates/books/2008/week/39/')
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'generic_views/book_archive_week.html')
self.assertEqual(res.context['book_list'][0], Book.objects.get(pubdate=datetime.date(2008, 10, 1)))
self.assertEqual(res.context['week'], datetime.date(2008, 9, 28))
# Since allow_empty=False, next/prev weeks must be valid
self.assertEqual(res.context['next_week'], None)
self.assertEqual(res.context['previous_week'], datetime.date(2006, 4, 30))
def test_week_view_allow_empty(self):
# allow_empty = False, empty week
res = self.client.get('/dates/books/2008/week/12/')
self.assertEqual(res.status_code, 404)
# allow_empty = True, empty month
res = self.client.get('/dates/books/2008/week/12/allow_empty/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), [])
self.assertEqual(res.context['week'], datetime.date(2008, 3, 23))
# Since allow_empty=True, next/prev are allowed to be empty weeks
self.assertEqual(res.context['next_week'], datetime.date(2008, 3, 30))
self.assertEqual(res.context['previous_week'], datetime.date(2008, 3, 16))
# allow_empty but not allow_future: next_week should be empty
url = datetime.date.today().strftime('/dates/books/%Y/week/%U/allow_empty/').lower()
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['next_week'], None)
def test_week_view_allow_future(self):
# January 7th always falls in week 1, given Python's definition of week numbers
future = datetime.date(datetime.date.today().year + 1, 1, 7)
future_sunday = future - datetime.timedelta(days=(future.weekday() + 1) % 7)
b = Book.objects.create(name="The New New Testement", pages=600, pubdate=future)
res = self.client.get('/dates/books/%s/week/1/' % future.year)
self.assertEqual(res.status_code, 404)
res = self.client.get('/dates/books/%s/week/1/allow_future/' % future.year)
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), [b])
self.assertEqual(res.context['week'], future_sunday)
# Since allow_future = True but not allow_empty, next/prev are not
# allowed to be empty weeks
self.assertEqual(res.context['next_week'], None)
self.assertEqual(res.context['previous_week'], datetime.date(2008, 9, 28))
# allow_future, but not allow_empty, with a current week. So next
# should be in the future
res = self.client.get('/dates/books/2008/week/39/allow_future/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['next_week'], future_sunday)
self.assertEqual(res.context['previous_week'], datetime.date(2006, 4, 30))
def test_week_view_paginated(self):
week_start = datetime.date(2008, 9, 28)
week_end = week_start + datetime.timedelta(days=7)
res = self.client.get('/dates/books/2008/week/39/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), list(Book.objects.filter(pubdate__gte=week_start, pubdate__lt=week_end)))
self.assertEqual(list(res.context['object_list']), list(Book.objects.filter(pubdate__gte=week_start, pubdate__lt=week_end)))
self.assertTemplateUsed(res, 'generic_views/book_archive_week.html')
def test_week_view_invalid_pattern(self):
res = self.client.get('/dates/books/2007/week/no_week/')
self.assertEqual(res.status_code, 404)
def test_week_start_Monday(self):
# Regression for #14752
res = self.client.get('/dates/books/2008/week/39/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['week'], datetime.date(2008, 9, 28))
res = self.client.get('/dates/books/2008/week/39/monday/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['week'], datetime.date(2008, 9, 29))
def test_datetime_week_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get('/dates/booksignings/2008/week/13/')
self.assertEqual(res.status_code, 200)
@override_settings(USE_TZ=True, TIME_ZONE='Africa/Nairobi')
def test_aware_datetime_week_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc))
res = self.client.get('/dates/booksignings/2008/week/13/')
self.assertEqual(res.status_code, 200)
class DayArchiveViewTests(TestCase):
fixtures = ['generic-views-test-data.json']
urls = 'regressiontests.generic_views.urls'
def test_day_view(self):
res = self.client.get('/dates/books/2008/oct/01/')
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'generic_views/book_archive_day.html')
self.assertEqual(list(res.context['book_list']),
list(Book.objects.filter(pubdate=datetime.date(2008, 10, 1))))
self.assertEqual(res.context['day'], datetime.date(2008, 10, 1))
# Since allow_empty=False, next/prev days must be valid.
self.assertEqual(res.context['next_day'], None)
self.assertEqual(res.context['previous_day'], datetime.date(2006, 5, 1))
def test_day_view_allow_empty(self):
# allow_empty = False, empty month
res = self.client.get('/dates/books/2000/jan/1/')
self.assertEqual(res.status_code, 404)
# allow_empty = True, empty month
res = self.client.get('/dates/books/2000/jan/1/allow_empty/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), [])
self.assertEqual(res.context['day'], datetime.date(2000, 1, 1))
# Since it's allow empty, next/prev are allowed to be empty months (#7164)
self.assertEqual(res.context['next_day'], datetime.date(2000, 1, 2))
self.assertEqual(res.context['previous_day'], datetime.date(1999, 12, 31))
# allow_empty but not allow_future: next_month should be empty (#7164)
url = datetime.date.today().strftime('/dates/books/%Y/%b/%d/allow_empty/').lower()
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['next_day'], None)
def test_day_view_allow_future(self):
future = (datetime.date.today() + datetime.timedelta(days=60))
urlbit = future.strftime('%Y/%b/%d').lower()
b = Book.objects.create(name="The New New Testement", pages=600, pubdate=future)
# allow_future = False, future month
res = self.client.get('/dates/books/%s/' % urlbit)
self.assertEqual(res.status_code, 404)
# allow_future = True, valid future month
res = self.client.get('/dates/books/%s/allow_future/' % urlbit)
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), [b])
self.assertEqual(res.context['day'], future)
# allow_future but not allow_empty, next/prev must be valid
self.assertEqual(res.context['next_day'], None)
self.assertEqual(res.context['previous_day'], datetime.date(2008, 10, 1))
# allow_future, but not allow_empty, with a current month.
res = self.client.get('/dates/books/2008/oct/01/allow_future/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['next_day'], future)
self.assertEqual(res.context['previous_day'], datetime.date(2006, 5, 1))
def test_day_view_paginated(self):
res = self.client.get('/dates/books/2008/oct/1/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), list(Book.objects.filter(pubdate__year=2008, pubdate__month=10, pubdate__day=1)))
self.assertEqual(list(res.context['object_list']), list(Book.objects.filter(pubdate__year=2008, pubdate__month=10, pubdate__day=1)))
self.assertTemplateUsed(res, 'generic_views/book_archive_day.html')
def test_next_prev_context(self):
res = self.client.get('/dates/books/2008/oct/01/')
self.assertEqual(res.content, "Archive for Oct. 1, 2008. Previous day is May 1, 2006")
def test_custom_month_format(self):
res = self.client.get('/dates/books/2008/10/01/')
self.assertEqual(res.status_code, 200)
def test_day_view_invalid_pattern(self):
res = self.client.get('/dates/books/2007/oct/no_day/')
self.assertEqual(res.status_code, 404)
def test_today_view(self):
res = self.client.get('/dates/books/today/')
self.assertEqual(res.status_code, 404)
res = self.client.get('/dates/books/today/allow_empty/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['day'], datetime.date.today())
def test_datetime_day_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get('/dates/booksignings/2008/apr/2/')
self.assertEqual(res.status_code, 200)
@override_settings(USE_TZ=True, TIME_ZONE='Africa/Nairobi')
def test_aware_datetime_day_view(self):
bs = BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc))
res = self.client.get('/dates/booksignings/2008/apr/2/')
self.assertEqual(res.status_code, 200)
# 2008-04-02T00:00:00+03:00 (beginning of day) > 2008-04-01T22:00:00+00:00 (book signing event date)
bs.event_date = datetime.datetime(2008, 4, 1, 22, 0, tzinfo=timezone.utc)
bs.save()
res = self.client.get('/dates/booksignings/2008/apr/2/')
self.assertEqual(res.status_code, 200)
# 2008-04-03T00:00:00+03:00 (end of day) > 2008-04-02T22:00:00+00:00 (book signing event date)
bs.event_date = datetime.datetime(2008, 4, 2, 22, 0, tzinfo=timezone.utc)
bs.save()
res = self.client.get('/dates/booksignings/2008/apr/2/')
self.assertEqual(res.status_code, 404)
class DateDetailViewTests(TestCase):
fixtures = ['generic-views-test-data.json']
urls = 'regressiontests.generic_views.urls'
def test_date_detail_by_pk(self):
res = self.client.get('/dates/books/2008/oct/01/1/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], Book.objects.get(pk=1))
self.assertEqual(res.context['book'], Book.objects.get(pk=1))
self.assertTemplateUsed(res, 'generic_views/book_detail.html')
def test_date_detail_by_slug(self):
res = self.client.get('/dates/books/2006/may/01/byslug/dreaming-in-code/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['book'], Book.objects.get(slug='dreaming-in-code'))
def test_date_detail_custom_month_format(self):
res = self.client.get('/dates/books/2008/10/01/1/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['book'], Book.objects.get(pk=1))
def test_date_detail_allow_future(self):
future = (datetime.date.today() + datetime.timedelta(days=60))
urlbit = future.strftime('%Y/%b/%d').lower()
b = Book.objects.create(name="The New New Testement", slug="new-new", pages=600, pubdate=future)
res = self.client.get('/dates/books/%s/new-new/' % urlbit)
self.assertEqual(res.status_code, 404)
res = self.client.get('/dates/books/%s/%s/allow_future/' % (urlbit, b.id))
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['book'], b)
self.assertTemplateUsed(res, 'generic_views/book_detail.html')
def test_invalid_url(self):
self.assertRaises(AttributeError, self.client.get, "/dates/books/2008/oct/01/nopk/")
def test_get_object_custom_queryset(self):
"""
Ensure that custom querysets are used when provided to
BaseDateDetailView.get_object()
Refs #16918.
"""
res = self.client.get(
'/dates/books/get_object_custom_queryset/2006/may/01/2/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], Book.objects.get(pk=2))
self.assertEqual(res.context['book'], Book.objects.get(pk=2))
self.assertTemplateUsed(res, 'generic_views/book_detail.html')
res = self.client.get(
'/dates/books/get_object_custom_queryset/2008/oct/01/1/')
self.assertEqual(res.status_code, 404)
def test_datetime_date_detail(self):
bs = BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get('/dates/booksignings/2008/apr/2/%d/' % bs.pk)
self.assertEqual(res.status_code, 200)
@override_settings(USE_TZ=True, TIME_ZONE='Africa/Nairobi')
def test_aware_datetime_date_detail(self):
bs = BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc))
res = self.client.get('/dates/booksignings/2008/apr/2/%d/' % bs.pk)
self.assertEqual(res.status_code, 200)
# 2008-04-02T00:00:00+03:00 (beginning of day) > 2008-04-01T22:00:00+00:00 (book signing event date)
bs.event_date = datetime.datetime(2008, 4, 1, 22, 0, tzinfo=timezone.utc)
bs.save()
res = self.client.get('/dates/booksignings/2008/apr/2/%d/' % bs.pk)
self.assertEqual(res.status_code, 200)
# 2008-04-03T00:00:00+03:00 (end of day) > 2008-04-02T22:00:00+00:00 (book signing event date)
bs.event_date = datetime.datetime(2008, 4, 2, 22, 0, tzinfo=timezone.utc)
bs.save()
res = self.client.get('/dates/booksignings/2008/apr/2/%d/' % bs.pk)
self.assertEqual(res.status_code, 404)
|
|
import queue
def part1():
class Computer:
def __init__(self):
self.registers = {}
self.last_played = None
self.index = 0
def print(self):
print(self.index, self.registers)
def getIndex(self):
return self.index
def value(self, x):
try:
return int(x)
except:
return self.registers.get(x, 0)
# plays a sound with a frequency equal to the value of X.
def snd(self, x):
#print("playing", self.value(x))
self.last_played = self.value(x)
self.index += 1
# sets register X to the value of Y
def set(self, x, y):
self.registers[x] = self.value(y)
self.index += 1
# increases register X by the value of Y
def add(self, x, y):
self.registers[x] = self.value(x) + self.value(y)
self.index += 1
# sets register X to the result of multiplying the value contained in register X by the value of Y.
def mul(self, x, y):
self.registers[x] = self.value(x) * self.value(y)
self.index += 1
# sets register X to the remainder of dividing the value contained in register X by the value of Y (that is, it sets X to the result of X modulo Y)
def mod(self, x, y):
self.registers[x] = self.value(x) % self.value(y)
self.index += 1
# recovers the frequency of the last sound played, but only when the value of X is not zero. (If it is zero, the command does nothing.)
def rcv(self, x):
val = self.value(x)
if val != 0:
print("part1:", self.last_played)
return 1
self.index += 1
# jumps with an offset of the value of Y, but only if the value of X is greater than zero. (An offset of 2 skips the next instruction, an offset of -1 jumps to the previous instruction, and so on.)
def jgz(self, x, y):
xval = self.value(x)
if xval > 0:
self.index += self.value(y)
else:
self.index += 1
computer = Computer()
cmd_map = {
"snd": computer.snd,
"set": computer.set,
"add": computer.add,
"mul": computer.mul,
"mod": computer.mod,
"rcv": computer.rcv,
"jgz": computer.jgz
}
commands = []
with open("inputs/day18.txt") as f:
for line in f:
line = line.strip().split()
cmd = line[0]
args = line[1:]
commands.append((cmd_map[cmd], args))
#computer.print()
index = computer.getIndex()
while index >= 0 and index < len(commands):
index = computer.getIndex()
(cmd, args) = commands[index]
r = cmd(*args)
#computer.print()
if r is not None:
break
part1()
def part2():
class Computer:
def __init__(self, filename, id, sendQ, recvQ):
self.id = id
self.registers = {'p': id}
self.last_played = None
self.index = 0
self.sendQ = sendQ
self.recvQ = recvQ
self.deadlock = False
self.send_count = 0
self.parseFile(filename)
def parseFile(self, filename):
cmd_map = {
"snd": self.snd,
"set": self.set,
"add": self.add,
"mul": self.mul,
"mod": self.mod,
"rcv": self.rcv,
"jgz": self.jgz
}
self.commands = []
with open(filename) as f:
for line in f:
line = line.strip().split()
cmd = line[0]
args = line[1:]
self.commands.append((cmd_map[cmd], args))
def process(self):
self.deadlock = True
while self.index >= 0 and self.index < len(self.commands):
(cmd, args) = self.commands[self.index]
r = cmd(*args)
if r == 1: # deadlocked
break
else:
self.deadlock = False
def print(self):
print("id: %d, index: %d" % (self.id, self.index), self.registers)
def getIndex(self):
return self.index
def value(self, x):
try:
return int(x)
except:
return self.registers.get(x, 0)
# sends the value of X to the other program.
def snd(self, x):
val = self.value(x)
#print("id %d sending %d" % (self.id, val))
self.sendQ.put(val)
self.index += 1
self.send_count += 1
# sets register X to the value of Y
def set(self, x, y):
#print("id %d set" % self.id)
self.registers[x] = self.value(y)
self.index += 1
# increases register X by the value of Y
def add(self, x, y):
#print("id %d add" % self.id)
self.registers[x] = self.value(x) + self.value(y)
self.index += 1
# sets register X to the result of multiplying the value contained in register X by the value of Y.
def mul(self, x, y):
#print("id %d mul" % self.id)
self.registers[x] = self.value(x) * self.value(y)
self.index += 1
# sets register X to the remainder of dividing the value contained in register X by the value of Y (that is, it sets X to the result of X modulo Y)
def mod(self, x, y):
#print("id %d mod" % self.id)
self.registers[x] = self.value(x) % self.value(y)
self.index += 1
# receives the next value and stores it in register X
def rcv(self, x):
#print("id %d recv" % self.id)
try:
val = self.recvQ.get(block=False)
self.registers[x] = val
self.index += 1
except queue.Empty:
return 1
# jumps with an offset of the value of Y, but only if the value of X is greater than zero. (An offset of 2 skips the next instruction, an offset of -1 jumps to the previous instruction, and so on.)
def jgz(self, x, y):
#print("id %d jgz" % self.id)
xval = self.value(x)
if xval > 0:
self.index += self.value(y)
else:
self.index += 1
q0 = queue.Queue()
q1 = queue.Queue()
filename = "inputs/day18.txt"
p0 = Computer(filename, 0, q1, q0)
p1 = Computer(filename, 1, q0, q1)
while not p0.deadlock or not p1.deadlock:
#print("New Round:")
#p0.print()
#p1.print()
p0.process()
p1.process()
print("part2:", p1.send_count)
part2()
|
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
import shutil
import tempfile
import warnings
import numpy
import tables
from tables.exceptions import FlavorWarning
from tables.tests import common
from tables.tests.common import allequal
from tables.tests.common import unittest, test_filename
from tables.tests.common import PyTablesTestCase as TestCase
# Check read Tables from pytables version 0.8
class BackCompatTablesTestCase(TestCase):
def test01_readTable(self):
"""Checking backward compatibility of old formats of tables."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01_readTable..." % self.__class__.__name__)
# Create an instance of an HDF5 Table
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=UserWarning)
h5file = tables.open_file(test_filename(self.h5fname), "r")
try:
table = h5file.get_node("/tuple0")
# Read the 100 records
result = [rec['var2'] for rec in table]
if common.verbose:
print("Nrows in", table._v_pathname, ":", table.nrows)
print("Last record in table ==>", rec)
print("Total selected records in table ==> ", len(result))
self.assertEqual(len(result), 100)
finally:
h5file.close()
@unittest.skipIf(not common.lzo_avail, 'lzo not available')
class Table2_1LZO(BackCompatTablesTestCase):
# pytables 0.8.x versions and after
h5fname = "Table2_1_lzo_nrv2e_shuffle.h5"
@unittest.skipIf(not common.lzo_avail, 'lzo not available')
class Tables_LZO1(BackCompatTablesTestCase):
h5fname = "Tables_lzo1.h5" # files compressed with LZO1
@unittest.skipIf(not common.lzo_avail, 'lzo not available')
class Tables_LZO1_shuffle(BackCompatTablesTestCase):
# files compressed with LZO1 and shuffle
h5fname = "Tables_lzo1_shuffle.h5"
@unittest.skipIf(not common.lzo_avail, 'lzo not available')
class Tables_LZO2(BackCompatTablesTestCase):
h5fname = "Tables_lzo2.h5" # files compressed with LZO2
@unittest.skipIf(not common.lzo_avail, 'lzo not available')
class Tables_LZO2_shuffle(BackCompatTablesTestCase):
# files compressed with LZO2 and shuffle
h5fname = "Tables_lzo2_shuffle.h5"
# Check read attributes from PyTables >= 1.0 properly
class BackCompatAttrsTestCase(common.TestFileMixin, TestCase):
FILENAME = "zerodim-attrs-%s.h5"
def setUp(self):
self.h5fname = test_filename(self.FILENAME % self.format)
super(BackCompatAttrsTestCase, self).setUp()
def test01_readAttr(self):
"""Checking backward compatibility of old formats for attributes."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01_readAttr..." % self.__class__.__name__)
# Read old formats
a = self.h5file.get_node("/a")
scalar = numpy.array(1, dtype="int32")
vector = numpy.array([1], dtype="int32")
if self.format == "1.3":
self.assertTrue(allequal(a.attrs.arrdim1, vector))
self.assertTrue(allequal(a.attrs.arrscalar, scalar))
self.assertEqual(a.attrs.pythonscalar, 1)
elif self.format == "1.4":
self.assertTrue(allequal(a.attrs.arrdim1, vector))
self.assertTrue(allequal(a.attrs.arrscalar, scalar))
self.assertTrue(allequal(a.attrs.pythonscalar, scalar))
class Attrs_1_3(BackCompatAttrsTestCase):
format = "1.3" # pytables 1.0.x versions and earlier
class Attrs_1_4(BackCompatAttrsTestCase):
format = "1.4" # pytables 1.1.x versions and later
class VLArrayTestCase(common.TestFileMixin, TestCase):
h5fname = test_filename("flavored_vlarrays-format1.6.h5")
def test01_backCompat(self):
"""Checking backward compatibility with old flavors of VLArray."""
# Check that we can read the contents without problems (nor warnings!)
vlarray1 = self.h5file.root.vlarray1
self.assertEqual(vlarray1.flavor, "numeric")
vlarray2 = self.h5file.root.vlarray2
self.assertEqual(vlarray2.flavor, "python")
self.assertEqual(vlarray2[1], [b'5', b'6', b'77'])
# Make sure that 1.x files with TimeXX types continue to be readable
# and that its byteorder is correctly retrieved.
class TimeTestCase(common.TestFileMixin, TestCase):
# Open a PYTABLES_FORMAT_VERSION=1.x file
h5fname = test_filename("time-table-vlarray-1_x.h5")
def test00_table(self):
"""Checking backward compatibility with old TimeXX types (tables)."""
# Check that we can read the contents without problems (nor warnings!)
table = self.h5file.root.table
self.assertEqual(table.byteorder, "little")
def test01_vlarray(self):
"""Checking backward compatibility with old TimeXX types (vlarrays)."""
# Check that we can read the contents without problems (nor warnings!)
vlarray4 = self.h5file.root.vlarray4
self.assertEqual(vlarray4.byteorder, "little")
vlarray8 = self.h5file.root.vlarray4
self.assertEqual(vlarray8.byteorder, "little")
class OldFlavorsTestCase01(TestCase):
close = False
# numeric
def test01_open(self):
"""Checking opening of (X)Array (old 'numeric' flavor)"""
# Open the HDF5 with old numeric flavor
h5fname = test_filename("oldflavor_numeric.h5")
with tables.open_file(h5fname) as h5file:
# Assert other properties in array
self.assertEqual(h5file.root.array1.flavor, 'numeric')
self.assertEqual(h5file.root.array2.flavor, 'python')
self.assertEqual(h5file.root.carray1.flavor, 'numeric')
self.assertEqual(h5file.root.carray2.flavor, 'python')
self.assertEqual(h5file.root.vlarray1.flavor, 'numeric')
self.assertEqual(h5file.root.vlarray2.flavor, 'python')
def test02_copy(self):
"""Checking (X)Array.copy() method ('numetic' flavor)"""
srcfile = test_filename("oldflavor_numeric.h5")
tmpfile = tempfile.mktemp(".h5")
shutil.copy(srcfile, tmpfile)
try:
# Open the HDF5 with old numeric flavor
with tables.open_file(tmpfile, "r+") as h5file:
# Copy to another location
self.assertWarns(FlavorWarning,
h5file.root.array1.copy, '/', 'array1copy')
h5file.root.array2.copy('/', 'array2copy')
h5file.root.carray1.copy('/', 'carray1copy')
h5file.root.carray2.copy('/', 'carray2copy')
h5file.root.vlarray1.copy('/', 'vlarray1copy')
h5file.root.vlarray2.copy('/', 'vlarray2copy')
if self.close:
h5file.close()
h5file = tables.open_file(tmpfile)
else:
h5file.flush()
# Assert other properties in array
self.assertEqual(h5file.root.array1copy.flavor, 'numeric')
self.assertEqual(h5file.root.array2copy.flavor, 'python')
self.assertEqual(h5file.root.carray1copy.flavor, 'numeric')
self.assertEqual(h5file.root.carray2copy.flavor, 'python')
self.assertEqual(h5file.root.vlarray1copy.flavor, 'numeric')
self.assertEqual(h5file.root.vlarray2copy.flavor, 'python')
finally:
os.remove(tmpfile)
class OldFlavorsTestCase02(TestCase):
close = True
def suite():
theSuite = unittest.TestSuite()
niter = 1
for n in range(niter):
theSuite.addTest(unittest.makeSuite(VLArrayTestCase))
theSuite.addTest(unittest.makeSuite(TimeTestCase))
theSuite.addTest(unittest.makeSuite(OldFlavorsTestCase01))
theSuite.addTest(unittest.makeSuite(OldFlavorsTestCase02))
theSuite.addTest(unittest.makeSuite(Table2_1LZO))
theSuite.addTest(unittest.makeSuite(Tables_LZO1))
theSuite.addTest(unittest.makeSuite(Tables_LZO1_shuffle))
theSuite.addTest(unittest.makeSuite(Tables_LZO2))
theSuite.addTest(unittest.makeSuite(Tables_LZO2_shuffle))
return theSuite
if __name__ == '__main__':
import sys
common.parse_argv(sys.argv)
common.print_versions()
unittest.main(defaultTest='suite')
|
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2020, Exa Analytics Development Team
# Distributed under the terms of the Apache License 2.0
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import ticker, rc
def _lorentz(freq, inten, x, fwhm):
y = np.zeros(len(x))
for fdx, idx in zip(freq, inten):
y += 1/(2*np.pi)*idx*fwhm/((fdx-x)**2+(0.5*fwhm)**2)
return y
class PlotVROA:
# variable to keep track of the figure count
# more important when using it outside of a jupyter notebook
_fig_count = 0
# list object that will hold all of the plots that are created
vroa = []
raman = []
def single_vroa(self, vroa, **kwargs):
if not hasattr(vroa, "scatter"):
raise AttributeError("Please compute scatter dataframe")
forward = kwargs.pop('forw', False)
backward = kwargs.pop('back', False)
if not(forward or backward):
raise ValueError("Must set forward (forw) or backward (back) scattering variables to True")
elif forward and backward:
raise ValueError("Can only set forward (forw) or backward (back) scattering variables to True. Both are True.")
if forward:
sct = 'forwardscatter'
elif backward:
sct = 'backscatter'
title = kwargs.pop('title', '')
xlabel = kwargs.pop('xlabel', '')
ylabel = kwargs.pop('ylabel', '')
marker = kwargs.pop('marker', '')
line = kwargs.pop('line', '-')
figsize = kwargs.pop('figsize', (8,8))
dpi = kwargs.pop('dpi', 50)
xrange = kwargs.pop('xrange', None)
yrange = kwargs.pop('yrange', None)
fwhm = kwargs.pop('fwhm', 15)
res = kwargs.pop('res', 1)
grid = kwargs.pop('grid', False)
legend = kwargs.pop('legend', True)
exc_units = kwargs.pop('exc_units', 'nm')
invert_x = kwargs.pop('invert_x', False)
font = kwargs.pop('font', 10)
if not isinstance(figsize, tuple):
raise TypeError("figsize must be a tuple not {}".format(type(figsize)))
rc('font', size=font)
grouped = vroa.scatter.groupby('exc_freq')
exc_freq = vroa.scatter['exc_freq'].drop_duplicates()
for _, val in enumerate(exc_freq):
fig = plt.figure(self._fig_count, figsize=figsize, dpi=dpi)
inten = grouped.get_group(val)[sct].values
freq = grouped.get_group(val)['freq'].values
if xrange is None:
x = np.arange(freq[0]-fwhm*3, freq[-1]+fwhm*3, res)
else:
x = np.arange(xrange[0], xrange[1], res)
y = _lorentz(freq=freq, inten=inten, x=x, fwhm=fwhm)
#y_bar = _lorentz(freq=freq, inten=inten, x=x, fwhm=fwhm)
ax = fig.add_subplot(111)
ax.plot(x,y,marker=marker,linestyle=line,
label=str(val)+' '+exc_units if val is not -1 else "unk")
#ax.bar(freq, y_bar*0.5, width=fwhm*0.35)
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
ax.set_title(title)
if xrange is not None:
if invert_x:
ax.set_xlim(xrange[1],xrange[0])
else:
ax.set_xlim(xrange)
if yrange is not None:
ax.set_ylim(yrange)
if grid:
ax.grid(grid)
if legend:
ax.legend()
fig.tight_layout()
self._fig_count += 1
self.vroa.append(fig)
def multiple_vroa(self, vroa, **kwargs):
if not hasattr(vroa, "scatter"):
raise AttributeError("Please compute scatter dataframe")
forward = kwargs.pop('forw', False)
backward = kwargs.pop('back', False)
if not(forward or backward):
raise ValueError("Must set forward (forw) or backward (back) scattering variables to True")
elif forward and backward:
raise ValueError("Can only set forward (forw) or backward (back) scattering variables to True. Both are True.")
if forward:
sct = 'forwardscatter'
elif backward:
sct = 'backscatter'
title = kwargs.pop('title', '')
xlabel = kwargs.pop('xlabel', '')
ylabel = kwargs.pop('ylabel', '')
marker = kwargs.pop('marker', '')
line = kwargs.pop('line', '-')
figsize = kwargs.pop('figsize', (8,8))
dpi = kwargs.pop('dpi', 50)
xrange = kwargs.pop('xrange', None)
yrange = kwargs.pop('yrange', None)
fwhm = kwargs.pop('fwhm', 15)
res = kwargs.pop('res', 1)
grid = kwargs.pop('grid', True)
legend = kwargs.pop('legend', True)
exc_units = kwargs.pop('exc_units', 'nm')
normalize = kwargs.pop('normalize', 'all')
invert_x = kwargs.pop('invert_x', False)
font = kwargs.pop('font', 10)
if not isinstance(figsize, tuple):
raise TypeError("figsize must be a tuple not {}".format(type(figsize)))
rc('font', size=font)
grouped = vroa.scatter.groupby('exc_freq')
exc_freq = vroa.scatter['exc_freq'].drop_duplicates().values
fig = plt.figure(self._fig_count, figsize=figsize, dpi=dpi)
ax = fig.add_subplot(111)
norm = []
if normalize == 'max':
norm = round(abs(vroa.scatter[sct].abs().max())*2/(np.pi*fwhm),4)
for idx, val in enumerate(exc_freq):
inten = grouped.get_group(val)[sct].values
freq = grouped.get_group(val)['freq'].values
if xrange is None:
x = np.arange(freq[0]-fwhm*3, freq[-1]+fwhm*3, res)
else:
x = np.arange(xrange[0], xrange[1], res)
y = _lorentz(freq=freq, inten=inten, x=x, fwhm=fwhm)
if normalize == 'max':
y /= norm
else:
norm.append(round(max(abs(y)),4))
y /= max(abs(y))
y += idx*2
ax.plot(x,y,marker=marker,linestyle=line,
label=str(val)+' '+exc_units if val is not -1 else "unk")
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
ax.set_title(title)
if xrange is not None:
if invert_x:
ax.set_xlim(xrange[1],xrange[0])
else:
ax.set_xlim(xrange)
if yrange is not None:
ax.set_ylim(yrange)
if grid:
ax.yaxis.grid(b=grid, which='major', color='k')
ax.yaxis.grid(b=grid, which='minor', color='k', linestyle='-', linewidth=4.0)
ax.xaxis.grid(b=grid)
if legend:
ax.legend(bbox_to_anchor=(1.02,1), loc="upper left")
majors = np.arange(0, len(exc_freq)*2, 2)
minors = majors + 1
minors = np.insert(minors, 0, majors[0]-1)
if normalize == 'max':
norm = np.repeat(norm, len(majors))
ax.yaxis.set_major_locator(ticker.FixedLocator(majors))
ax.yaxis.set_minor_locator(ticker.FixedLocator(minors))
ax.set_yticklabels(['{:4.3E}'.format(n) for n in norm])
fig.tight_layout()
self._fig_count += 1
self.vroa.append(fig)
def single_raman(self, raman, **kwargs):
if not hasattr(raman, "raman"):
raise AttributeError("Please compute raman dataframe")
title = kwargs.pop('title', '')
xlabel = kwargs.pop('xlabel', '')
ylabel = kwargs.pop('ylabel', '')
marker = kwargs.pop('marker', '')
line = kwargs.pop('line', '-')
figsize = kwargs.pop('figsize', (8,8))
dpi = kwargs.pop('dpi', 50)
xrange = kwargs.pop('xrange', None)
yrange = kwargs.pop('yrange', None)
fwhm = kwargs.pop('fwhm', 15)
res = kwargs.pop('res', 1)
grid = kwargs.pop('grid', False)
legend = kwargs.pop('legend', True)
exc_units = kwargs.pop('exc_units', 'nm')
invert_x = kwargs.pop('invert_x', False)
font = kwargs.pop('font', 10)
if not isinstance(figsize, tuple):
raise TypeError("figsize must be a tuple not {}".format(type(figsize)))
rc('font', size=font)
grouped = raman.raman.groupby('exc_freq')
exc_freq = raman.raman['exc_freq'].drop_duplicates()
for _, val in enumerate(exc_freq):
fig = plt.figure(self._fig_count, figsize=figsize, dpi=dpi)
inten = grouped.get_group(val)['raman_int'].values
freq = grouped.get_group(val)['freq'].values
if xrange is None:
x = np.arange(freq[0]-fwhm*3, freq[-1]+fwhm*3, res)
else:
x = np.arange(xrange[0], xrange[1], res)
y = _lorentz(freq=freq, inten=inten, x=x, fwhm=fwhm)
#y_bar = _lorentz(freq=freq, inten=inten, x=x, fwhm=fwhm)
ax = fig.add_subplot(111)
ax.plot(x,y,marker=marker,linestyle=line,
label=str(val)+' '+exc_units if val is not -1 else "unk")
#ax.bar(freq, y_bar*0.35, width=fwhm*0.5)
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
ax.set_title(title)
if xrange is not None:
if invert_x:
ax.set_xlim(xrange[1],xrange[0])
else:
ax.set_xlim(xrange)
if yrange is not None:
ax.set_ylim(yrange)
if grid:
ax.grid(grid)
if legend:
ax.legend()
self._fig_count += 1
self.raman.append(fig)
|
|
# coding=utf-8
#
# Copyright 2014 Red Hat, Inc.
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
A driver wrapping the Ironic API, such that Nova may provision
bare metal resources.
"""
import base64
import gzip
import logging as py_logging
import shutil
import tempfile
import time
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import excutils
from oslo_utils import importutils
import six
from nova.api.metadata import base as instance_metadata
from nova.compute import arch
from nova.compute import hv_type
from nova.compute import power_state
from nova.compute import task_states
from nova.compute import vm_mode
from nova import context as nova_context
from nova import exception
from nova.i18n import _
from nova.i18n import _LE
from nova.i18n import _LI
from nova.i18n import _LW
from nova import objects
from nova.openstack.common import loopingcall
from nova.virt import configdrive
from nova.virt import driver as virt_driver
from nova.virt import firewall
from nova.virt import hardware
from nova.virt.ironic import client_wrapper
from nova.virt.ironic import ironic_states
from nova.virt.ironic import patcher
ironic = None
LOG = logging.getLogger(__name__)
opts = [
cfg.IntOpt('api_version',
default=1,
help='Version of Ironic API service endpoint.'),
cfg.StrOpt('api_endpoint',
help='URL for Ironic API endpoint.'),
cfg.StrOpt('admin_username',
help='Ironic keystone admin name'),
cfg.StrOpt('admin_password',
secret=True,
help='Ironic keystone admin password.'),
cfg.StrOpt('admin_auth_token',
secret=True,
help='Ironic keystone auth token.'),
cfg.StrOpt('admin_url',
help='Keystone public API endpoint.'),
cfg.StrOpt('client_log_level',
deprecated_for_removal=True,
help='Log level override for ironicclient. Set this in '
'order to override the global "default_log_levels", '
'"verbose", and "debug" settings. '
'DEPRECATED: use standard logging configuration.'),
cfg.StrOpt('admin_tenant_name',
help='Ironic keystone tenant name.'),
cfg.IntOpt('api_max_retries',
default=60,
help='How many retries when a request does conflict.'),
cfg.IntOpt('api_retry_interval',
default=2,
help='How often to retry in seconds when a request '
'does conflict'),
]
ironic_group = cfg.OptGroup(name='ironic',
title='Ironic Options')
CONF = cfg.CONF
CONF.register_group(ironic_group)
CONF.register_opts(opts, ironic_group)
_POWER_STATE_MAP = {
ironic_states.POWER_ON: power_state.RUNNING,
ironic_states.NOSTATE: power_state.NOSTATE,
ironic_states.POWER_OFF: power_state.SHUTDOWN,
}
def map_power_state(state):
try:
return _POWER_STATE_MAP[state]
except KeyError:
LOG.warning(_LW("Power state %s not found."), state)
return power_state.NOSTATE
def _validate_instance_and_node(ironicclient, instance):
"""Get the node associated with the instance.
Check with the Ironic service that this instance is associated with a
node, and return the node.
"""
try:
return ironicclient.call("node.get_by_instance_uuid", instance.uuid)
except ironic.exc.NotFound:
raise exception.InstanceNotFound(instance_id=instance.uuid)
def _get_nodes_supported_instances(cpu_arch=None):
"""Return supported instances for a node."""
if not cpu_arch:
return []
return [(cpu_arch,
hv_type.BAREMETAL,
vm_mode.HVM)]
def _log_ironic_polling(what, node, instance):
power_state = (None if node.power_state is None else
'"%s"' % node.power_state)
tgt_power_state = (None if node.target_power_state is None else
'"%s"' % node.target_power_state)
prov_state = (None if node.provision_state is None else
'"%s"' % node.provision_state)
tgt_prov_state = (None if node.target_provision_state is None else
'"%s"' % node.target_provision_state)
LOG.debug('Still waiting for ironic node %(node)s to %(what)s: '
'power_state=%(power_state)s, '
'target_power_state=%(tgt_power_state)s, '
'provision_state=%(prov_state)s, '
'target_provision_state=%(tgt_prov_state)s',
dict(what=what,
node=node.uuid,
power_state=power_state,
tgt_power_state=tgt_power_state,
prov_state=prov_state,
tgt_prov_state=tgt_prov_state),
instance=instance)
class IronicDriver(virt_driver.ComputeDriver):
"""Hypervisor driver for Ironic - bare metal provisioning."""
capabilities = {"has_imagecache": False,
"supports_recreate": False,
"supports_migrate_to_same_host": False}
def __init__(self, virtapi, read_only=False):
super(IronicDriver, self).__init__(virtapi)
global ironic
if ironic is None:
ironic = importutils.import_module('ironicclient')
# NOTE(deva): work around a lack of symbols in the current version.
if not hasattr(ironic, 'exc'):
ironic.exc = importutils.import_module('ironicclient.exc')
if not hasattr(ironic, 'client'):
ironic.client = importutils.import_module(
'ironicclient.client')
self.firewall_driver = firewall.load_driver(
default='nova.virt.firewall.NoopFirewallDriver')
self.node_cache = {}
self.node_cache_time = 0
ironicclient_log_level = CONF.ironic.client_log_level
if ironicclient_log_level:
level = py_logging.getLevelName(ironicclient_log_level)
logger = py_logging.getLogger('ironicclient')
logger.setLevel(level)
self.ironicclient = client_wrapper.IronicClientWrapper()
def _node_resources_unavailable(self, node_obj):
"""Determine whether the node's resources are in an acceptable state.
Determines whether the node's resources should be presented
to Nova for use based on the current power, provision and maintenance
state. This is called after _node_resources_used, so any node that
is not used and not in AVAILABLE should be considered in a 'bad' state,
and unavailable for scheduling. Returns True if unacceptable.
"""
bad_power_states = [
ironic_states.ERROR, ironic_states.NOSTATE]
# keep NOSTATE around for compatibility
good_provision_states = [
ironic_states.AVAILABLE, ironic_states.NOSTATE]
return (node_obj.maintenance or
node_obj.power_state in bad_power_states or
node_obj.provision_state not in good_provision_states)
def _node_resources_used(self, node_obj):
"""Determine whether the node's resources are currently used.
Determines whether the node's resources should be considered used
or not. A node is used when it is either in the process of putting
a new instance on the node, has an instance on the node, or is in
the process of cleaning up from a deleted instance. Returns True if
used.
"""
used_provision_states = [
ironic_states.CLEANING, ironic_states.DEPLOYING,
ironic_states.DEPLOYWAIT, ironic_states.DEPLOYDONE,
ironic_states.ACTIVE, ironic_states.DELETING,
ironic_states.DELETED]
return (node_obj.instance_uuid is not None or
node_obj.provision_state in used_provision_states)
def _node_resource(self, node):
"""Helper method to create resource dict from node stats."""
vcpus = int(node.properties.get('cpus', 0))
memory_mb = int(node.properties.get('memory_mb', 0))
local_gb = int(node.properties.get('local_gb', 0))
raw_cpu_arch = node.properties.get('cpu_arch', None)
try:
cpu_arch = arch.canonicalize(raw_cpu_arch)
except exception.InvalidArchitectureName:
cpu_arch = None
if not cpu_arch:
LOG.warning(_LW("cpu_arch not defined for node '%s'"), node.uuid)
nodes_extra_specs = {}
# NOTE(deva): In Havana and Icehouse, the flavor was required to link
# to an arch-specific deploy kernel and ramdisk pair, and so the flavor
# also had to have extra_specs['cpu_arch'], which was matched against
# the ironic node.properties['cpu_arch'].
# With Juno, the deploy image(s) may be referenced directly by the
# node.driver_info, and a flavor no longer needs to contain any of
# these three extra specs, though the cpu_arch may still be used
# in a heterogeneous environment, if so desired.
# NOTE(dprince): we use the raw cpu_arch here because extra_specs
# filters aren't canonicalized
nodes_extra_specs['cpu_arch'] = raw_cpu_arch
# NOTE(gilliard): To assist with more precise scheduling, if the
# node.properties contains a key 'capabilities', we expect the value
# to be of the form "k1:v1,k2:v2,etc.." which we add directly as
# key/value pairs into the node_extra_specs to be used by the
# ComputeCapabilitiesFilter
capabilities = node.properties.get('capabilities')
if capabilities:
for capability in str(capabilities).split(','):
parts = capability.split(':')
if len(parts) == 2 and parts[0] and parts[1]:
nodes_extra_specs[parts[0]] = parts[1]
else:
LOG.warning(_LW("Ignoring malformed capability '%s'. "
"Format should be 'key:val'."), capability)
vcpus_used = 0
memory_mb_used = 0
local_gb_used = 0
if self._node_resources_used(node):
# Node is in the process of deploying, is deployed, or is in
# the process of cleaning up from a deploy. Report all of its
# resources as in use.
vcpus_used = vcpus
memory_mb_used = memory_mb
local_gb_used = local_gb
elif self._node_resources_unavailable(node):
# The node's current state is such that it should not present any
# of its resources to Nova
vcpus = 0
memory_mb = 0
local_gb = 0
dic = {
'hypervisor_hostname': str(node.uuid),
'hypervisor_type': self._get_hypervisor_type(),
'hypervisor_version': self._get_hypervisor_version(),
# The Ironic driver manages multiple hosts, so there are
# likely many different CPU models in use. As such it is
# impossible to provide any meaningful info on the CPU
# model of the "host"
'cpu_info': None,
'vcpus': vcpus,
'vcpus_used': vcpus_used,
'local_gb': local_gb,
'local_gb_used': local_gb_used,
'disk_available_least': local_gb - local_gb_used,
'memory_mb': memory_mb,
'memory_mb_used': memory_mb_used,
'supported_instances': jsonutils.dumps(
_get_nodes_supported_instances(cpu_arch)),
'stats': jsonutils.dumps(nodes_extra_specs),
}
return dic
def _start_firewall(self, instance, network_info):
self.firewall_driver.setup_basic_filtering(instance, network_info)
self.firewall_driver.prepare_instance_filter(instance, network_info)
self.firewall_driver.apply_instance_filter(instance, network_info)
def _stop_firewall(self, instance, network_info):
self.firewall_driver.unfilter_instance(instance, network_info)
def _add_driver_fields(self, node, instance, image_meta, flavor,
preserve_ephemeral=None):
patch = patcher.create(node).get_deploy_patch(instance,
image_meta,
flavor,
preserve_ephemeral)
# Associate the node with an instance
patch.append({'path': '/instance_uuid', 'op': 'add',
'value': instance.uuid})
try:
self.ironicclient.call('node.update', node.uuid, patch)
except ironic.exc.BadRequest:
msg = (_("Failed to add deploy parameters on node %(node)s "
"when provisioning the instance %(instance)s")
% {'node': node.uuid, 'instance': instance.uuid})
LOG.error(msg)
raise exception.InstanceDeployFailure(msg)
def _cleanup_deploy(self, context, node, instance, network_info,
flavor=None):
if flavor is None:
flavor = instance.flavor
patch = patcher.create(node).get_cleanup_patch(instance, network_info,
flavor)
# Unassociate the node
patch.append({'op': 'remove', 'path': '/instance_uuid'})
try:
self.ironicclient.call('node.update', node.uuid, patch)
except ironic.exc.BadRequest:
LOG.error(_LE("Failed to clean up the parameters on node %(node)s "
"when unprovisioning the instance %(instance)s"),
{'node': node.uuid, 'instance': instance.uuid})
reason = (_("Fail to clean up node %s parameters") % node.uuid)
raise exception.InstanceTerminationFailure(reason=reason)
self._unplug_vifs(node, instance, network_info)
self._stop_firewall(instance, network_info)
def _wait_for_active(self, ironicclient, instance):
"""Wait for the node to be marked as ACTIVE in Ironic."""
node = _validate_instance_and_node(ironicclient, instance)
if node.provision_state == ironic_states.ACTIVE:
# job is done
LOG.debug("Ironic node %(node)s is now ACTIVE",
dict(node=node.uuid), instance=instance)
raise loopingcall.LoopingCallDone()
if node.target_provision_state in (ironic_states.DELETED,
ironic_states.AVAILABLE):
# ironic is trying to delete it now
raise exception.InstanceNotFound(instance_id=instance.uuid)
if node.provision_state in (ironic_states.NOSTATE,
ironic_states.AVAILABLE):
# ironic already deleted it
raise exception.InstanceNotFound(instance_id=instance.uuid)
if node.provision_state == ironic_states.DEPLOYFAIL:
# ironic failed to deploy
msg = (_("Failed to provision instance %(inst)s: %(reason)s")
% {'inst': instance.uuid, 'reason': node.last_error})
raise exception.InstanceDeployFailure(msg)
_log_ironic_polling('become ACTIVE', node, instance)
def _wait_for_power_state(self, ironicclient, instance, message):
"""Wait for the node to complete a power state change."""
node = _validate_instance_and_node(ironicclient, instance)
if node.target_power_state == ironic_states.NOSTATE:
raise loopingcall.LoopingCallDone()
_log_ironic_polling(message, node, instance)
def init_host(self, host):
"""Initialize anything that is necessary for the driver to function.
:param host: the hostname of the compute host.
"""
return
def _get_hypervisor_type(self):
"""Get hypervisor type."""
return 'ironic'
def _get_hypervisor_version(self):
"""Returns the version of the Ironic API service endpoint."""
return CONF.ironic.api_version
def instance_exists(self, instance):
"""Checks the existence of an instance.
Checks the existence of an instance. This is an override of the
base method for efficiency.
:param instance: The instance object.
:returns: True if the instance exists. False if not.
"""
try:
_validate_instance_and_node(self.ironicclient, instance)
return True
except exception.InstanceNotFound:
return False
def list_instances(self):
"""Return the names of all the instances provisioned.
:returns: a list of instance names.
"""
# NOTE(lucasagomes): limit == 0 is an indicator to continue
# pagination until there're no more values to be returned.
node_list = self.ironicclient.call("node.list", associated=True,
limit=0)
context = nova_context.get_admin_context()
return [objects.Instance.get_by_uuid(context,
i.instance_uuid).name
for i in node_list]
def list_instance_uuids(self):
"""Return the UUIDs of all the instances provisioned.
:returns: a list of instance UUIDs.
"""
# NOTE(lucasagomes): limit == 0 is an indicator to continue
# pagination until there're no more values to be returned.
node_list = self.ironicclient.call("node.list", associated=True,
limit=0)
return list(n.instance_uuid for n in node_list)
def node_is_available(self, nodename):
"""Confirms a Nova hypervisor node exists in the Ironic inventory.
:param nodename: The UUID of the node.
:returns: True if the node exists, False if not.
"""
# NOTE(comstud): We can cheat and use caching here. This method
# just needs to return True for nodes that exist. It doesn't
# matter if the data is stale. Sure, it's possible that removing
# node from Ironic will cause this method to return True until
# the next call to 'get_available_nodes', but there shouldn't
# be much harm. There's already somewhat of a race.
if not self.node_cache:
# Empty cache, try to populate it.
self._refresh_cache()
if nodename in self.node_cache:
return True
# NOTE(comstud): Fallback and check Ironic. This case should be
# rare.
try:
self.ironicclient.call("node.get", nodename)
return True
except ironic.exc.NotFound:
return False
def _refresh_cache(self):
# NOTE(lucasagomes): limit == 0 is an indicator to continue
# pagination until there're no more values to be returned.
node_list = self.ironicclient.call('node.list', detail=True, limit=0)
node_cache = {}
for node in node_list:
node_cache[node.uuid] = node
self.node_cache = node_cache
self.node_cache_time = time.time()
def get_available_nodes(self, refresh=False):
"""Returns the UUIDs of all nodes in the Ironic inventory.
:param refresh: Boolean value; If True run update first. Ignored by
this driver.
:returns: a list of UUIDs
"""
# NOTE(jroll) we refresh the cache every time this is called
# because it needs to happen in the resource tracker
# periodic task. This task doesn't pass refresh=True,
# unfortunately.
self._refresh_cache()
node_uuids = list(self.node_cache.keys())
LOG.debug("Returning %(num_nodes)s available node(s)",
dict(num_nodes=len(node_uuids)))
return node_uuids
def get_available_resource(self, nodename):
"""Retrieve resource information.
This method is called when nova-compute launches, and
as part of a periodic task that records the results in the DB.
:param nodename: the UUID of the node.
:returns: a dictionary describing resources.
"""
# NOTE(comstud): We can cheat and use caching here. This method is
# only called from a periodic task and right after the above
# get_available_nodes() call is called.
if not self.node_cache:
# Well, it's also called from init_host(), so if we have empty
# cache, let's try to populate it.
self._refresh_cache()
cache_age = time.time() - self.node_cache_time
if nodename in self.node_cache:
LOG.debug("Using cache for node %(node)s, age: %(age)s",
{'node': nodename, 'age': cache_age})
node = self.node_cache[nodename]
else:
LOG.debug("Node %(node)s not found in cache, age: %(age)s",
{'node': nodename, 'age': cache_age})
node = self.ironicclient.call("node.get", nodename)
return self._node_resource(node)
def get_info(self, instance):
"""Get the current state and resource usage for this instance.
If the instance is not found this method returns (a dictionary
with) NOSTATE and all resources == 0.
:param instance: the instance object.
:returns: a InstanceInfo object
"""
try:
node = _validate_instance_and_node(self.ironicclient, instance)
except exception.InstanceNotFound:
return hardware.InstanceInfo(
state=map_power_state(ironic_states.NOSTATE))
memory_kib = int(node.properties.get('memory_mb', 0)) * 1024
if memory_kib == 0:
LOG.warning(_LW("Warning, memory usage is 0 for "
"%(instance)s on baremetal node %(node)s."),
{'instance': instance.uuid,
'node': instance.node})
num_cpu = node.properties.get('cpus', 0)
if num_cpu == 0:
LOG.warning(_LW("Warning, number of cpus is 0 for "
"%(instance)s on baremetal node %(node)s."),
{'instance': instance.uuid,
'node': instance.node})
return hardware.InstanceInfo(state=map_power_state(node.power_state),
max_mem_kb=memory_kib,
mem_kb=memory_kib,
num_cpu=num_cpu)
def deallocate_networks_on_reschedule(self, instance):
"""Does the driver want networks deallocated on reschedule?
:param instance: the instance object.
:returns: Boolean value. If True deallocate networks on reschedule.
"""
return True
def macs_for_instance(self, instance):
"""List the MAC addresses of an instance.
List of MAC addresses for the node which this instance is
associated with.
:param instance: the instance object.
:return: None, or a set of MAC ids (e.g. set(['12:34:56:78:90:ab'])).
None means 'no constraints', a set means 'these and only these
MAC addresses'.
"""
try:
node = self.ironicclient.call("node.get", instance.node)
except ironic.exc.NotFound:
return None
ports = self.ironicclient.call("node.list_ports", node.uuid)
return set([p.address for p in ports])
def _generate_configdrive(self, instance, node, network_info,
extra_md=None, files=None):
"""Generate a config drive.
:param instance: The instance object.
:param node: The node object.
:param network_info: Instance network information.
:param extra_md: Optional, extra metadata to be added to the
configdrive.
:param files: Optional, a list of paths to files to be added to
the configdrive.
"""
if not extra_md:
extra_md = {}
i_meta = instance_metadata.InstanceMetadata(instance,
content=files, extra_md=extra_md, network_info=network_info)
with tempfile.NamedTemporaryFile() as uncompressed:
try:
with configdrive.ConfigDriveBuilder(instance_md=i_meta) as cdb:
cdb.make_drive(uncompressed.name)
except Exception as e:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Creating config drive failed with "
"error: %s"), e, instance=instance)
with tempfile.NamedTemporaryFile() as compressed:
# compress config drive
with gzip.GzipFile(fileobj=compressed, mode='wb') as gzipped:
uncompressed.seek(0)
shutil.copyfileobj(uncompressed, gzipped)
# base64 encode config drive
compressed.seek(0)
return base64.b64encode(compressed.read())
def spawn(self, context, instance, image_meta, injected_files,
admin_password, network_info=None, block_device_info=None):
"""Deploy an instance.
:param context: The security context.
:param instance: The instance object.
:param image_meta: Image dict returned by nova.image.glance
that defines the image from which to boot this instance.
:param injected_files: User files to inject into instance.
:param admin_password: Administrator password to set in
instance.
:param network_info: Instance network information.
:param block_device_info: Instance block device
information. Ignored by this driver.
"""
# The compute manager is meant to know the node uuid, so missing uuid
# is a significant issue. It may mean we've been passed the wrong data.
node_uuid = instance.get('node')
if not node_uuid:
raise ironic.exc.BadRequest(
_("Ironic node uuid not supplied to "
"driver for instance %s.") % instance.uuid)
node = self.ironicclient.call("node.get", node_uuid)
flavor = instance.flavor
self._add_driver_fields(node, instance, image_meta, flavor)
# NOTE(Shrews): The default ephemeral device needs to be set for
# services (like cloud-init) that depend on it being returned by the
# metadata server. Addresses bug https://launchpad.net/bugs/1324286.
if flavor.ephemeral_gb:
instance.default_ephemeral_device = '/dev/sda1'
instance.save()
# validate we are ready to do the deploy
validate_chk = self.ironicclient.call("node.validate", node_uuid)
if not validate_chk.deploy or not validate_chk.power:
# something is wrong. undo what we have done
self._cleanup_deploy(context, node, instance, network_info,
flavor=flavor)
raise exception.ValidationError(_(
"Ironic node: %(id)s failed to validate."
" (deploy: %(deploy)s, power: %(power)s)")
% {'id': node.uuid,
'deploy': validate_chk.deploy,
'power': validate_chk.power})
# prepare for the deploy
try:
self._plug_vifs(node, instance, network_info)
self._start_firewall(instance, network_info)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Error preparing deploy for instance "
"%(instance)s on baremetal node %(node)s."),
{'instance': instance.uuid,
'node': node_uuid})
self._cleanup_deploy(context, node, instance, network_info,
flavor=flavor)
# Config drive
configdrive_value = None
if configdrive.required_by(instance):
extra_md = {}
if admin_password:
extra_md['admin_pass'] = admin_password
configdrive_value = self._generate_configdrive(
instance, node, network_info, extra_md=extra_md,
files=injected_files)
LOG.info(_LI("Config drive for instance %(instance)s on "
"baremetal node %(node)s created."),
{'instance': instance['uuid'], 'node': node_uuid})
# trigger the node deploy
try:
self.ironicclient.call("node.set_provision_state", node_uuid,
ironic_states.ACTIVE,
configdrive=configdrive_value)
except Exception as e:
with excutils.save_and_reraise_exception():
msg = (_LE("Failed to request Ironic to provision instance "
"%(inst)s: %(reason)s"),
{'inst': instance.uuid,
'reason': six.text_type(e)})
LOG.error(msg)
self._cleanup_deploy(context, node, instance, network_info,
flavor=flavor)
timer = loopingcall.FixedIntervalLoopingCall(self._wait_for_active,
self.ironicclient,
instance)
try:
timer.start(interval=CONF.ironic.api_retry_interval).wait()
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Error deploying instance %(instance)s on "
"baremetal node %(node)s."),
{'instance': instance.uuid,
'node': node_uuid})
self.destroy(context, instance, network_info)
def _unprovision(self, ironicclient, instance, node):
"""This method is called from destroy() to unprovision
already provisioned node after required checks.
"""
try:
ironicclient.call("node.set_provision_state", node.uuid, "deleted")
except Exception as e:
# if the node is already in a deprovisioned state, continue
# This should be fixed in Ironic.
# TODO(deva): This exception should be added to
# python-ironicclient and matched directly,
# rather than via __name__.
if getattr(e, '__name__', None) != 'InstanceDeployFailure':
raise
# using a dict because this is modified in the local method
data = {'tries': 0}
def _wait_for_provision_state():
node = _validate_instance_and_node(ironicclient, instance)
if node.provision_state in (ironic_states.NOSTATE,
ironic_states.CLEANING,
ironic_states.CLEANFAIL,
ironic_states.AVAILABLE):
# From a user standpoint, the node is unprovisioned. If a node
# gets into CLEANFAIL state, it must be fixed in Ironic, but we
# can consider the instance unprovisioned.
LOG.debug("Ironic node %(node)s is in state %(state)s, "
"instance is now unprovisioned.",
dict(node=node.uuid, state=node.provision_state),
instance=instance)
raise loopingcall.LoopingCallDone()
if data['tries'] >= CONF.ironic.api_max_retries:
msg = (_("Error destroying the instance on node %(node)s. "
"Provision state still '%(state)s'.")
% {'state': node.provision_state,
'node': node.uuid})
LOG.error(msg)
raise exception.NovaException(msg)
else:
data['tries'] += 1
_log_ironic_polling('unprovision', node, instance)
# wait for the state transition to finish
timer = loopingcall.FixedIntervalLoopingCall(_wait_for_provision_state)
timer.start(interval=CONF.ironic.api_retry_interval).wait()
def destroy(self, context, instance, network_info,
block_device_info=None, destroy_disks=True, migrate_data=None):
"""Destroy the specified instance, if it can be found.
:param context: The security context.
:param instance: The instance object.
:param network_info: Instance network information.
:param block_device_info: Instance block device
information. Ignored by this driver.
:param destroy_disks: Indicates if disks should be
destroyed. Ignored by this driver.
:param migrate_data: implementation specific params.
Ignored by this driver.
"""
try:
node = _validate_instance_and_node(self.ironicclient, instance)
except exception.InstanceNotFound:
LOG.warning(_LW("Destroy called on non-existing instance %s."),
instance.uuid)
# NOTE(deva): if nova.compute.ComputeManager._delete_instance()
# is called on a non-existing instance, the only way
# to delete it is to return from this method
# without raising any exceptions.
return
if node.provision_state in (ironic_states.ACTIVE,
ironic_states.DEPLOYFAIL,
ironic_states.ERROR,
ironic_states.DEPLOYWAIT):
self._unprovision(self.ironicclient, instance, node)
self._cleanup_deploy(context, node, instance, network_info)
def reboot(self, context, instance, network_info, reboot_type,
block_device_info=None, bad_volumes_callback=None):
"""Reboot the specified instance.
NOTE: Ironic does not support soft-off, so this method
always performs a hard-reboot.
NOTE: Unlike the libvirt driver, this method does not delete
and recreate the instance; it preserves local state.
:param context: The security context.
:param instance: The instance object.
:param network_info: Instance network information. Ignored by
this driver.
:param reboot_type: Either a HARD or SOFT reboot. Ignored by
this driver.
:param block_device_info: Info pertaining to attached volumes.
Ignored by this driver.
:param bad_volumes_callback: Function to handle any bad volumes
encountered. Ignored by this driver.
"""
node = _validate_instance_and_node(self.ironicclient, instance)
self.ironicclient.call("node.set_power_state", node.uuid, 'reboot')
timer = loopingcall.FixedIntervalLoopingCall(
self._wait_for_power_state,
self.ironicclient, instance, 'reboot')
timer.start(interval=CONF.ironic.api_retry_interval).wait()
def power_off(self, instance, timeout=0, retry_interval=0):
"""Power off the specified instance.
NOTE: Ironic does not support soft-off, so this method ignores
timeout and retry_interval parameters.
NOTE: Unlike the libvirt driver, this method does not delete
and recreate the instance; it preserves local state.
:param instance: The instance object.
:param timeout: time to wait for node to shutdown. Ignored by
this driver.
:param retry_interval: How often to signal node while waiting
for it to shutdown. Ignored by this driver.
"""
node = _validate_instance_and_node(self.ironicclient, instance)
self.ironicclient.call("node.set_power_state", node.uuid, 'off')
timer = loopingcall.FixedIntervalLoopingCall(
self._wait_for_power_state,
self.ironicclient, instance, 'power off')
timer.start(interval=CONF.ironic.api_retry_interval).wait()
def power_on(self, context, instance, network_info,
block_device_info=None):
"""Power on the specified instance.
NOTE: Unlike the libvirt driver, this method does not delete
and recreate the instance; it preserves local state.
:param context: The security context.
:param instance: The instance object.
:param network_info: Instance network information. Ignored by
this driver.
:param block_device_info: Instance block device
information. Ignored by this driver.
"""
node = _validate_instance_and_node(self.ironicclient, instance)
self.ironicclient.call("node.set_power_state", node.uuid, 'on')
timer = loopingcall.FixedIntervalLoopingCall(
self._wait_for_power_state,
self.ironicclient, instance, 'power on')
timer.start(interval=CONF.ironic.api_retry_interval).wait()
def refresh_security_group_rules(self, security_group_id):
"""Refresh security group rules from data store.
Invoked when security group rules are updated.
:param security_group_id: The security group id.
"""
self.firewall_driver.refresh_security_group_rules(security_group_id)
def refresh_security_group_members(self, security_group_id):
"""Refresh security group members from data store.
Invoked when instances are added/removed to a security group.
:param security_group_id: The security group id.
"""
self.firewall_driver.refresh_security_group_members(security_group_id)
def refresh_provider_fw_rules(self):
"""Triggers a firewall update based on database changes."""
self.firewall_driver.refresh_provider_fw_rules()
def refresh_instance_security_rules(self, instance):
"""Refresh security group rules from data store.
Gets called when an instance gets added to or removed from
the security group the instance is a member of or if the
group gains or loses a rule.
:param instance: The instance object.
"""
self.firewall_driver.refresh_instance_security_rules(instance)
def ensure_filtering_rules_for_instance(self, instance, network_info):
"""Set up filtering rules.
:param instance: The instance object.
:param network_info: Instance network information.
"""
self.firewall_driver.setup_basic_filtering(instance, network_info)
self.firewall_driver.prepare_instance_filter(instance, network_info)
def unfilter_instance(self, instance, network_info):
"""Stop filtering instance.
:param instance: The instance object.
:param network_info: Instance network information.
"""
self.firewall_driver.unfilter_instance(instance, network_info)
def _plug_vifs(self, node, instance, network_info):
# NOTE(PhilDay): Accessing network_info will block if the thread
# it wraps hasn't finished, so do this ahead of time so that we
# don't block while holding the logging lock.
network_info_str = str(network_info)
LOG.debug("plug: instance_uuid=%(uuid)s vif=%(network_info)s",
{'uuid': instance.uuid,
'network_info': network_info_str})
# start by ensuring the ports are clear
self._unplug_vifs(node, instance, network_info)
ports = self.ironicclient.call("node.list_ports", node.uuid)
if len(network_info) > len(ports):
raise exception.VirtualInterfacePlugException(_(
"Ironic node: %(id)s virtual to physical interface count"
" missmatch"
" (Vif count: %(vif_count)d, Pif count: %(pif_count)d)")
% {'id': node.uuid,
'vif_count': len(network_info),
'pif_count': len(ports)})
if len(network_info) > 0:
# not needed if no vif are defined
for vif, pif in zip(network_info, ports):
# attach what neutron needs directly to the port
port_id = unicode(vif['id'])
patch = [{'op': 'add',
'path': '/extra/vif_port_id',
'value': port_id}]
self.ironicclient.call("port.update", pif.uuid, patch)
def _unplug_vifs(self, node, instance, network_info):
# NOTE(PhilDay): Accessing network_info will block if the thread
# it wraps hasn't finished, so do this ahead of time so that we
# don't block while holding the logging lock.
network_info_str = str(network_info)
LOG.debug("unplug: instance_uuid=%(uuid)s vif=%(network_info)s",
{'uuid': instance.uuid,
'network_info': network_info_str})
if network_info and len(network_info) > 0:
ports = self.ironicclient.call("node.list_ports", node.uuid,
detail=True)
# not needed if no vif are defined
for vif, pif in zip(network_info, ports):
if 'vif_port_id' in pif.extra:
# we can not attach a dict directly
patch = [{'op': 'remove', 'path': '/extra/vif_port_id'}]
try:
self.ironicclient.call("port.update", pif.uuid, patch)
except ironic.exc.BadRequest:
pass
def plug_vifs(self, instance, network_info):
"""Plug VIFs into networks.
:param instance: The instance object.
:param network_info: Instance network information.
"""
node = self.ironicclient.call("node.get", instance.node)
self._plug_vifs(node, instance, network_info)
def unplug_vifs(self, instance, network_info):
"""Unplug VIFs from networks.
:param instance: The instance object.
:param network_info: Instance network information.
"""
node = self.ironicclient.call("node.get", instance.node)
self._unplug_vifs(node, instance, network_info)
def rebuild(self, context, instance, image_meta, injected_files,
admin_password, bdms, detach_block_devices,
attach_block_devices, network_info=None,
recreate=False, block_device_info=None,
preserve_ephemeral=False):
"""Rebuild/redeploy an instance.
This version of rebuild() allows for supporting the option to
preserve the ephemeral partition. We cannot call spawn() from
here because it will attempt to set the instance_uuid value
again, which is not allowed by the Ironic API. It also requires
the instance to not have an 'active' provision state, but we
cannot safely change that. Given that, we implement only the
portions of spawn() we need within rebuild().
:param context: The security context.
:param instance: The instance object.
:param image_meta: Image object returned by nova.image.glance
that defines the image from which to boot this instance. Ignored
by this driver.
:param injected_files: User files to inject into instance. Ignored
by this driver.
:param admin_password: Administrator password to set in
instance. Ignored by this driver.
:param bdms: block-device-mappings to use for rebuild. Ignored
by this driver.
:param detach_block_devices: function to detach block devices. See
nova.compute.manager.ComputeManager:_rebuild_default_impl for
usage. Ignored by this driver.
:param attach_block_devices: function to attach block devices. See
nova.compute.manager.ComputeManager:_rebuild_default_impl for
usage. Ignored by this driver.
:param network_info: Instance network information. Ignored by
this driver.
:param recreate: Boolean value; if True the instance is
recreated on a new hypervisor - all the cleanup of old state is
skipped. Ignored by this driver.
:param block_device_info: Instance block device
information. Ignored by this driver.
:param preserve_ephemeral: Boolean value; if True the ephemeral
must be preserved on rebuild.
"""
instance.task_state = task_states.REBUILD_SPAWNING
instance.save(expected_task_state=[task_states.REBUILDING])
node_uuid = instance.node
node = self.ironicclient.call("node.get", node_uuid)
self._add_driver_fields(node, instance, image_meta, instance.flavor,
preserve_ephemeral)
# Trigger the node rebuild/redeploy.
try:
self.ironicclient.call("node.set_provision_state",
node_uuid, ironic_states.REBUILD)
except (exception.NovaException, # Retry failed
ironic.exc.InternalServerError, # Validations
ironic.exc.BadRequest) as e: # Maintenance
msg = (_("Failed to request Ironic to rebuild instance "
"%(inst)s: %(reason)s") % {'inst': instance.uuid,
'reason': six.text_type(e)})
raise exception.InstanceDeployFailure(msg)
# Although the target provision state is REBUILD, it will actually go
# to ACTIVE once the redeploy is finished.
timer = loopingcall.FixedIntervalLoopingCall(self._wait_for_active,
self.ironicclient,
instance)
timer.start(interval=CONF.ironic.api_retry_interval).wait()
|
|
# ===============================================================================
# Copyright 2011 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# =============enthought library imports=======================
# ============= standard library imports ========================
import math
from copy import deepcopy
from numpy import asarray, average, array
from uncertainties import ufloat, umath
# ============= local library imports ==========================
from constants import ArArConstants
from plateau import Plateau
from stats import calculate_weighted_mean
def calculate_F_ratio(m4039, m3739, m3639, pr):
"""
required ratios
(40/39)m
(36/39)m
(37/39)m
"""
atm4036 = 295.5
n = m4039 - atm4036 * m3639 + atm4036 * pr.get('ca3637') * m3739
d = 1 - pr.get('ca3937') * m3739
F = n / d - pr.get('k4039')
return F
def calculate_flux(f, age, arar_constants=None):
"""
#rad40: radiogenic 40Ar
#k39: 39Ar from potassium
f: F value rad40Ar/39Ar
age: age of monitor in years
solve age equation for J
"""
# if isinstance(rad40, (list, tuple)):
# rad40 = ufloat(*rad40)
# if isinstance(k39, (list, tuple)):
# k39 = ufloat(*k39)
if isinstance(f, (list, tuple)):
f = ufloat(*f)
if isinstance(age, (list, tuple)):
age = ufloat(*age)
# age = (1 / constants.lambdak) * umath.log(1 + JR)
try:
# r = rad40 / k39
if arar_constants is None:
arar_constants = ArArConstants()
j = (umath.exp(age * arar_constants.lambda_k.nominal_value) - 1) / f
return j.nominal_value, j.std_dev
except ZeroDivisionError:
return 1, 0
# return j
def calculate_decay_time(dc, f):
return math.log(f) / dc
def calculate_decay_factor(dc, segments):
"""
McDougall and Harrison
p.75 equation 3.22
the book suggests using ti==analysis_time-end of irradiation segment_i
mass spec uses ti==analysis_time-start of irradiation segment_i
using start seems more appropriate
"""
a = sum([pi * ti for pi, ti, _ in segments])
b = sum([pi * ((1 - math.exp(-dc * ti)) / (dc * math.exp(dc * dti)))
for pi, ti, dti in segments])
try:
return a / b
except ZeroDivisionError:
return 1.0
def abundance_sensitivity_correction(isos, abundance_sensitivity):
s40, s39, s38, s37, s36 = isos
# correct for abundance sensitivity
# assumes symmetric and equal abundant sens for all peaks
n40 = s40 - abundance_sensitivity * (s39 + s39)
n39 = s39 - abundance_sensitivity * (s40 + s38)
n38 = s38 - abundance_sensitivity * (s39 + s37)
n37 = s37 - abundance_sensitivity * (s38 + s36)
n36 = s36 - abundance_sensitivity * (s37 + s37)
return [n40, n39, n38, n37, n36]
def apply_fixed_k3739(a39, pr, fixed_k3739):
"""
x=ca37/k39
y=ca37/ca39
T=s39dec_cor
T=ca39+k39
T=ca37/y+ca37/x
ca37=(T*x*y)/(x+y)
"""
x = fixed_k3739
y = 1 / pr.get('ca3937', 1)
ca37 = (a39 * x * y) / (x + y)
ca39 = pr.get('ca3937', 0) * ca37
k39 = a39 - ca39
k37 = x * k39
return ca37, ca39, k37, k39
def interference_corrections(a40, a39, a38, a37, a36,
production_ratios,
arar_constants=None,
fixed_k3739=False):
if production_ratios is None:
production_ratios = {}
if arar_constants is None:
arar_constants = ArArConstants()
pr = production_ratios
k37 = ufloat(0, 1e-20)
if arar_constants.k3739_mode.lower() == 'normal' and not fixed_k3739:
# iteratively calculate 37, 39
for _ in range(5):
ca37 = a37 - k37
ca39 = pr.get('ca3937', 0) * ca37
k39 = a39 - ca39
k37 = pr.get('k3739', 0) * k39
else:
if not fixed_k3739:
fixed_k3739 = arar_constants.fixed_k3739
ca37, ca39, k37, k39 = apply_fixed_k3739(a39, pr, fixed_k3739)
k38 = pr.get('k3839', 0) * k39
if not arar_constants.allow_negative_ca_correction:
ca37 = max(ufloat(0, 0), ca37)
ca36 = pr.get('ca3637', 0) * ca37
ca38 = pr.get('ca3837', 0) * ca37
return k37, k38, k39, ca36, ca37, ca38, ca39
def calculate_atmospheric(a38, a36, k38, ca38, ca36, decay_time,
production_ratios=None,
arar_constants=None):
"""
McDougall and Harrison
Roddick 1983
Foland 1993
iteratively calculate atm36
"""
if production_ratios is None:
production_ratios = {}
if arar_constants is None:
arar_constants = ArArConstants()
pr = production_ratios
m = pr.get('cl3638', 0) * arar_constants.lambda_Cl36.nominal_value * decay_time
atm36 = ufloat(0, 1e-20)
for _ in range(5):
ar38atm = arar_constants.atm3836.nominal_value * atm36
cl38 = a38 - ar38atm - k38 - ca38
cl36 = cl38 * m
atm36 = a36 - ca36 - cl36
return atm36, cl36
def calculate_F(isotopes,
decay_time,
interferences=None,
arar_constants=None,
fixed_k3739=False):
"""
isotope values corrected for blank, baseline, (background)
ic_factor, (discrimination), ar37 and ar39 decay
"""
a40, a39, a38, a37, a36 = isotopes
#a37*=113
if interferences is None:
interferences = {}
if arar_constants is None:
arar_constants = ArArConstants()
#make local copy of interferences
pr = dict(((k, v.__copy__()) for k, v in interferences.iteritems()))
#for k,v in pr.iteritems():
# print k, v
k37, k38, k39, ca36, ca37, ca38, ca39 = interference_corrections(a40, a39, a38, a37, a36,
pr, arar_constants, fixed_k3739)
atm36, cl36 = calculate_atmospheric(a38, a36, k38, ca38, ca36,
decay_time,
pr,
arar_constants)
# calculate rodiogenic
# dont include error in 40/36
atm40 = atm36 * arar_constants.atm4036.nominal_value
k40 = k39 * pr.get('k4039', 1)
rad40 = a40 - atm40 - k40
try:
f = rad40 / k39
except ZeroDivisionError:
f = ufloat(1.0, 0)
rf = deepcopy(f)
# f = ufloat(f.nominal_value, f.std_dev, tag='F')
non_ar_isotopes = dict(k40=k40,
ca39=ca39,
k38=k38,
ca38=ca38,
k37=k37,
ca37=ca37,
ca36=ca36,
cl36=cl36)
try:
rp = rad40 / a40 * 100
except ZeroDivisionError:
rp = ufloat(0, 0)
computed = dict(rad40=rad40, rad40_percent=rp,
k39=k39, atm40=atm40)
#print 'Ar40', a40-k40, a40, k40
#print 'Ar39', a39-k39, a39, k39
interference_corrected = dict(Ar40=a40 - k40,
Ar39=k39,
Ar38=a38, #- k38 - ca38,
Ar37=a37, #- ca37 - k37,
Ar36=atm36)
##clear errors in irrad
for pp in pr.itervalues():
pp.std_dev = 0
f_wo_irrad = f
return rf, f_wo_irrad, non_ar_isotopes, computed, interference_corrected
def age_equation(j, f,
include_decay_error=False,
arar_constants=None):
if isinstance(j, tuple):
j = ufloat(*j)
elif isinstance(j, str):
j = ufloat(j)
if isinstance(f, tuple):
f = ufloat(*f)
elif isinstance(f, str):
f = ufloat(f)
if arar_constants is None:
arar_constants = ArArConstants()
scalar = float(arar_constants.age_scalar)
lk = arar_constants.lambda_k
if not include_decay_error:
lk = lk.nominal_value
try:
return (lk ** -1 * umath.log(1 + j * f)) / scalar
except (ValueError, TypeError):
return ufloat(0, 0)
#===============================================================================
# non-recursive
#===============================================================================
def calculate_error_F(signals, F, k4039, ca3937, ca3637):
"""
McDougall and Harrison
p92 eq 3.43
"""
m40, m39, m38, m37, m36 = signals
G = m40 / m39
B = m36 / m39
D = m37 / m39
C1 = 295.5
C2 = ca3637.nominal_value
C3 = k4039.nominal_value
C4 = ca3937.nominal_value
ssD = D.std_dev ** 2
ssB = B.std_dev ** 2
ssG = G.std_dev ** 2
G = G.nominal_value
B = B.nominal_value
D = D.nominal_value
ssF = ssG + C1 ** 2 * ssB + ssD * (C4 * G - C1 * C4 * B + C1 * C2) ** 2
return ssF ** 0.5
def calculate_error_t(F, ssF, j, ssJ):
"""
McDougall and Harrison
p92 eq. 3.43
"""
JJ = j * j
FF = F * F
constants = ArArConstants()
ll = constants().lambdak.nominal_value ** 2
sst = (JJ * ssF + FF * ssJ) / (ll * (1 + F * j) ** 2)
return sst ** 0.5
#============= EOF =====================================
#isochron
# def extract_isochron_xy(analyses):
# ans = [(ai.get_interference_corrected_value('Ar39'),
# ai.get_interference_corrected_value('Ar36'),
# ai.get_interference_corrected_value('Ar40'))
# for ai in analyses]
# a39, a36, a40 = array(ans).T
# # print 'a40',a40
# # print 'a39',a39
# # print 'a36',a36
# try:
# xx = a39 / a40
# yy = a36 / a40
# except ZeroDivisionError:
# return
#
# return xx, yy
#
#
# def calculate_isochron(analyses, reg='NewYork'):
# ref = analyses[0]
# ans = [(ai.get_interference_corrected_value('Ar39'),
# ai.get_interference_corrected_value('Ar36'),
# ai.get_interference_corrected_value('Ar40'))
# for ai in analyses]
#
# a39, a36, a40 = array(ans).T
# try:
# xx = a39 / a40
# yy = a36 / a40
# except ZeroDivisionError:
# return
#
# xs, xerrs = zip(*[(xi.nominal_value, xi.std_dev) for xi in xx])
# ys, yerrs = zip(*[(yi.nominal_value, yi.std_dev) for yi in yy])
#
# xds, xdes = zip(*[(xi.nominal_value, xi.std_dev) for xi in a40])
# yns, ynes = zip(*[(xi.nominal_value, xi.std_dev) for xi in a36])
# xns, xnes = zip(*[(xi.nominal_value, xi.std_dev) for xi in a39])
#
# regx = isochron_regressor(ys, yerrs, xs, xerrs,
# xds,xdes, yns, ynes, xns, xnes)
#
# reg = isochron_regressor(xs, xerrs, ys, yerrs,
# xds, xdes, xns, xnes, yns, ynes,
# reg)
#
# xint = ufloat(regx.get_intercept(), regx.get_intercept_error())
# # xint = ufloat(reg.x_intercept, reg.x_intercept_error)
# try:
# r = xint ** -1
# except ZeroDivisionError:
# r = 0
#
# age = ufloat(0, 0)
# if r > 0:
# age = age_equation((ref.j.nominal_value, 0), r, arar_constants=ref.arar_constants)
# return age, reg, (xs, ys, xerrs, yerrs)
#
#
# def isochron_regressor(xs, xes, ys, yes,
# xds, xdes, xns, xnes, yns, ynes,
# reg='Reed'):
# if reg.lower() in ('newyork', 'new_york'):
# from pychron.core.regression.new_york_regressor import NewYorkRegressor as klass
# else:
# from pychron.core.regression.new_york_regressor import ReedYorkRegressor as klass
# reg = klass(xs=xs, ys=ys,
# xserr=xes, yserr=yes,
# xds=xds, xdes=xdes,
# xns=xns, xnes=xnes,
# yns=yns, ynes=ynes)
# reg.calculate()
# return reg
# #==========================================================================
# # errors mass spec copy
# #==========================================================================
#
# square = lambda x: x * x
#
# Tot40Er = s40er
# Tot39Er = s39er
# Tot38Er = s38er
# Tot37Er = s37er
# Tot36Er = s36er
#
# D = d
# D2 = d * d
# D3 = d * D2
# D4 = d * D3
#
# T40 = s40 / D4
# T39 = s39 / D3
# T38 = s39 / D2
# T37 = s39 / D
# T36 = s36
#
# A4036 = constants.atm4036
# A3836 = constants.atm3836
#
# s = ca3937 * D * T37
# T = ca3637 * D * T37
# G = D3 * T39 - s
# # P = mcl * (ca3837 * D * T37 + A3836 * (T36 - T) - D2 * T38 + k3839 * G)
# R = (-k4039 * G - A4036 * (T36 - T - mcl * (ca3837 * D * T37 + A3836 * (T36 - T) - D2 * T38 + k3839 * G)) + D4 * T40)
# G2 = G * G
#
# er40 = square(D4 * j / G) * square(Tot40Er)
#
# er39 = square((j * (-D3 * k4039 + A4036 * D3 * k3839 * mcl)) / G - (D3 * j * R) / G2) * square(Tot39Er)
#
# er38 = square(A4036 * D2 * j * mcl / G) * square(Tot38Er)
#
# er37 = square((j * (ca3937 * D * k4039 - A4036 *
# (-ca3637 * D - (-A3836 * ca3637 * D + ca3837 * D - ca3937 * D * k3839) * mcl)))
# / G + (ca3937 * D * j * R) / G2) * square(Tot37Er)
#
# er36 = square(A4036 * j * (1 - A3836 * mcl) / G) * square(Tot36Er)
# '''
# square((j * (4 * T40 * D3 - K4039 * (3 * D2 * T39 - Ca3937 * T37)
# - A4036 * (-(Ca3637 * T37) - MCl * (-(A3836 * Ca3637 * T37)
# + Ca3837 * T37 + K3839 * (3 * D2 * T39 - Ca3937 * T37)
# - 2 * D * T38))))
# / (D3 * T39 - s) - (1 * j * (3 * D2 * T39 - Ca3937 * T37)
# * (T40 * D4 - K4039 * (D3 * T39 - s)
# - A4036 * (T36 - T - MCl * (-(T38 * D2) + Ca3837 * T37 * D + A3836 * (T36 - T) + K3839 * (D3 * T39 - s)))))
# / square(D3 * T39 - s)) * square(DiscEr)
# '''
# erD = square((j * (4 * T40 * D3 - k4039 * (3 * D2 * T39 - ca3937 * T37)
# - A4036 * (-(ca3637 * T37) - mcl * (-(A3836 * ca3637 * T37)
# + ca3837 * T37 + k3839 * (3 * D2 * T39 - ca3937 * T37)
# - 2 * D * T38))))
# / (D3 * T39 - s) - (1 * j * (3 * D2 * T39 - ca3937 * T37)
# * (T40 * D4 - k4039 * (D3 * T39 - s)
# - A4036 * (T36 - T - mcl * (-(T38 * D2) + ca3837 * T37 * D + A3836 * (T36 - T) + k3839 * (D3 * T39 - s)))))
# / square(D3 * T39 - s)) * square(der)
#
# er4039 = square(j * (s - D3 * T39) / G) * square(k4039er)
#
# er3937 = square((j * (D * k4039 * T37 - A4036 * D * k3839 * mcl * T37)) / G + (D * j * T37 * R) / G2) * square(ca3937er)
#
# er3637 = square(-((A4036 * j * (-D * T37 + A3836 * D * mcl * T37)) / G)) * square(ca3637er)
#
# erJ = square(R / G) * square(jer)
# JRer = (er40 + er39 + er38 + er37 + er36 + erD + er4039 + er3937 + er3637 + erJ) ** 0.5
# age_err = (1e-6 / constants.lambdak) * JRer / (1 + ar40rad / k39 * j)
##===============================================================================
# # error pychron port
##===============================================================================
# # s = ca3937 * s37
# # T = ca3637 * s37
# # G = s39 - s
# # R = (-k4039 * G - constants.atm4036 * (s36 - T - mcl * (ca3837 * s37 + constants.atm3836 * (s36 - T) - s38 + k3839 * G)) + s40)
# # #ErComp(1) = square(D4 * j / G) * square(Tot40Er)
# # er40 = (d ** 4 * j / G) ** 2 * s40er ** 2
# #
# # #square((j * (-D3 * K4039 + A4036 * D3 * K3839 * MCl)) / G - (D3 * j * R) / G2) * square(Tot39Er)
# # d3 = d ** 3
# # er39 = ((j * (-d3 * k4039 + constants.atm4036 * d3 * k3839 * mcl)) / G - (d3 * j * R) / G ** 2) ** 2 * s39er ** 2
# #
# # #square(A4036 * D2 * j * MCl / G) * square(Tot38Er)
# # er38 = (constants.atm4036 * d * d * j * mcl / G) ** 2 * s38er ** 2
# #
# # #square((j * (Ca3937 * D * K4039 - A4036 *
# # # (-Ca3637 * D - (-A3836 * Ca3637 * D + Ca3837 * D - Ca3937 * D * K3839) * MCl)))
# # # / G + (Ca3937 * D * j * R) / G2) * square(Tot37Er)
# # er37 = ((j * (ca3937 * d * k4039 - constants.atm4036
# # * (-ca3637 * d - (-constants.atm3836 * ca3637 * d + ca3837 * d - ca3937 * d * k3839) * mcl)))
# # / G + (ca3937 * d * j * R) / G ** 2) ** 2 * s37er ** 2
# #
# # #square(A4036 * j * (1 - A3836 * MCl) / G) * square(Tot36Er)
# # er36 = (constants.atm4036 * j * (1 - constants.atm3836 * mcl) / G) ** 2 * s36er ** 2
# #
# # #square((j * (4 * T40 * D3 - K4039 * (3 * D2 * T39 - Ca3937 * T37)
# # # -A4036 * (-(Ca3637 * T37) - MCl * (-(A3836 * Ca3637 * T37)
# # # + Ca3837 * T37 + K3839 * (3 * D2 * T39 - Ca3937 * T37)
# # # - 2 * D * T38))))
# # # / (D3 * T39 - s) - (1 * j * (3 * D2 * T39 - Ca3937 * T37)
# # # * (T40 * D4 - K4039 * (D3 * T39 - s)
# # # - A4036 * (T36 - T - MCl * (-(T38 * D2) + Ca3837 * T37 * D + A3836 * (T36 - T) + K3839 * (D3 * T39 - s)))))
# # # / square(D3 * T39 - s)) * square(DiscEr)
# #
# # erD = ((j * (4 * s40 / d - k4039 * (3 * s39 / d - ca3937 * s37 / d)
# # - constants.atm4036 * (-(ca3637 * s37 / d) - mcl * (-(constants.atm3836 * ca3637 * s37 / d)
# # + ca3837 * s37 / d + k3839 * (3 * s39 / d - ca3937 * s37 / d)
# # - 2 * s38 / d))))
# # / (s39 / d - s) - (1 * j * (3 * s39 / d - ca3937 * s37 / d)
# # * (s40 / d - k4039 * (s40 / d - s)
# # - constants.atm4036 * (s36 - T - mcl * (-(s38 / d) + ca3837 * s37 + constants.atm3836 * (s36 - T) + k3839 * (s39 / d - s)))))
# # / (s39 / d - s) ** 2) ** 2 * der ** 2
# # #square(j * (s - D3 * T39) / G) * square(K4039Er)
# # er4039 = (j * (s - s39 / d) / G) ** 2 * k4039er ** 2
# #
# # #square((j * (D * K4039 * T37 - A4036 * D * K3839 * MCl * T37)) / G + (D * j * T37 * R) / G2) * square(Ca3937Er)
# # er3937 = ((j * (k4039 * s37 - constants.atm4036 * k3839 * mcl * s37)) / G + (j * s37 * R) / G ** 2) ** 2 * ca3937er ** 2
# #
# # #square(-((A4036 * j * (-D * T37 + A3836 * D * MCl * T37)) / G)) * square(Ca3637Er)
# # er3637 = (-((constants.atm4036 * j * (-s37 + constants.atm3836 * mcl * s37)) / G)) ** 2 * ca3637er ** 2
# #
# # #square(R / G) * square(JErLocal)
# # erJ = (R / G) ** 2 * jer ** 2
# # JRer = (er40 + er39 + er38 + er37 + er36 + erD + er4039 + er3937 + er3637 + erJ) ** 0.5
# # age_err = (1e-6 / constants.lambdak) * JRer / (1 + ar40rad / k39 * j)
#
# return age / 1e6, age_err
|
|
from __future__ import unicode_literals
import copy
import logging
from django.utils import six
from django.utils.encoding import force_unicode
from django.utils.six.moves.urllib.parse import quote as urllib_quote
from django.utils.translation import ugettext_lazy as _
from djblets.registries.errors import RegistrationError
from djblets.util.decorators import augment_method_from
from djblets.webapi.decorators import (SPECIAL_PARAMS,
webapi_login_required,
webapi_request_fields)
from djblets.webapi.errors import PERMISSION_DENIED
from djblets.webapi.resources.base import \
WebAPIResource as DjbletsWebAPIResource
from djblets.webapi.resources.mixins.api_tokens import ResourceAPITokenMixin
from djblets.webapi.resources.mixins.oauth2_tokens import (
ResourceOAuth2TokenMixin)
from djblets.webapi.resources.mixins.queries import APIQueryUtilsMixin
from reviewboard.registries.registry import Registry
from reviewboard.site.models import LocalSite
from reviewboard.site.urlresolvers import local_site_reverse
from reviewboard.webapi.decorators import (webapi_check_local_site,
webapi_check_login_required)
from reviewboard.webapi.models import WebAPIToken
CUSTOM_MIMETYPE_BASE = 'application/vnd.reviewboard.org'
EXTRA_DATA_LEN = len('extra_data.')
PRIVATE_KEY_PREFIX = '__'
class ExtraDataAccessLevel(object):
"""Various access levels for ``extra_data`` fields.
This class consists of constants describing the various access levels for
``extra_data`` keys on :py:class:`~reviewboard.webapi.base.WebAPIResource`
subclasses.
"""
#: The associated extra_data key can be retrieved and updated via the API.
ACCESS_STATE_PUBLIC = 1
#: The associated extra_data key can only be retrieved via the API.
ACCESS_STATE_PUBLIC_READONLY = 2
#: The associated extra_data key cannot be accessed via the API.
ACCESS_STATE_PRIVATE = 3
NOT_CALLABLE = 'not_callable'
class CallbackRegistry(Registry):
item_name = 'callback'
errors = {
NOT_CALLABLE: _(
'Could not register %(item)s: it is not callable.'
),
}
def register(self, item):
"""Register a callback.
Args:
item (callable):
The item to register.
Raises:
djblets.registries.errors.RegistrationError:
Raised if the item is not a callable.
djblets.registries.errors.AlreadyRegisteredError:
Raised if the item is already registered.
"""
self.populate()
if not callable(item):
raise RegistrationError(self.format_error(NOT_CALLABLE,
item=item))
super(CallbackRegistry, self).register(item)
class RBResourceMixin(APIQueryUtilsMixin, ResourceAPITokenMixin,
ResourceOAuth2TokenMixin):
"""A mixin for Review Board resources.
This mixin is intended to be used by the base Review Board
:py:class:`WebAPIResource` and in subclasses of resources from other
packages (e.g., Djblets) to specialize them for Review Board.
"""
autogenerate_etags = True
mimetype_vendor = 'reviewboard.org'
api_token_model = WebAPIToken
class WebAPIResource(RBResourceMixin, DjbletsWebAPIResource):
"""A specialization of the Djblets WebAPIResource for Review Board."""
#: An optional set of required features to communicate with this resource.
#:
#: If no features are listed here, the resource will behave normally.
#: However, if one or more features are listed here and are **not**
#: enabled, the resource will return a 403 Forbidden error.
required_features = []
def __init__(self, *args, **kwargs):
super(WebAPIResource, self).__init__(*args, **kwargs)
self.extra_data_access_callbacks = CallbackRegistry()
def has_access_permissions(self, *args, **kwargs):
# By default, raise an exception if this is called. Specific resources
# will have to explicitly override this and opt-in to access.
raise NotImplementedError(
'%s must provide a has_access_permissions method'
% self.__class__.__name__)
def serialize_extra_data_field(self, obj, request=None):
"""Serialize a resource's ``extra_data`` field.
Args:
obj (django.db.models.Model):
The model of a given resource.
request (HttpRequest):
The HTTP request from the client.
Returns:
dict:
A serialized ``extra_data`` field or, ``None``.
"""
if obj.extra_data is not None:
return self._strip_private_data(obj.extra_data)
return None
@webapi_check_login_required
@webapi_check_local_site
@augment_method_from(DjbletsWebAPIResource)
def get(self, *args, **kwargs):
"""Returns the serialized object for the resource.
This will require login if anonymous access isn't enabled on the
site.
"""
pass
@webapi_check_login_required
@webapi_check_local_site
@webapi_request_fields(
optional=dict({
'counts-only': {
'type': bool,
'description': 'If specified, a single ``count`` field is '
'returned with the number of results, instead '
'of the results themselves.',
},
}, **DjbletsWebAPIResource.get_list.optional_fields),
required=DjbletsWebAPIResource.get_list.required_fields,
allow_unknown=True
)
def get_list(self, request, *args, **kwargs):
"""Returns a list of objects.
This will require login if anonymous access isn't enabled on the
site.
If ``?counts-only=1`` is passed on the URL, then this will return
only a ``count`` field with the number of entries, instead of the
serialized objects.
"""
if self.model and request.GET.get('counts-only', False):
return 200, {
'count': self.get_queryset(request, is_list=True,
*args, **kwargs).count()
}
else:
return self._get_list_impl(request, *args, **kwargs)
@webapi_login_required
@webapi_check_local_site
@augment_method_from(DjbletsWebAPIResource)
def delete(self, *args, **kwargs):
pass
def _get_list_impl(self, request, *args, **kwargs):
"""Actual implementation to return the list of results.
This by default calls the parent WebAPIResource.get_list, but this
can be overridden by subclasses to provide a more custom
implementation while still retaining the ?counts-only=1 functionality.
"""
return super(WebAPIResource, self).get_list(request, *args, **kwargs)
def can_import_extra_data_field(self, obj, field):
"""Returns whether a particular field in extra_data can be imported.
Subclasses can use this to limit which fields are imported by
import_extra_data. By default, all fields can be imported.
"""
return True
def call_method_view(self, request, method, view, *args, **kwargs):
"""Call the given method view.
The default behaviour is to call the given ``view`` passing in all
``args`` and ``kwargs``. However, Review Board allows certain resources
to be disabled by setting the :py:attr:`~required_features` attribute.
If a feature specified in that list is disabled, this method will
return a 403 Forbidden response instead of calling the method view.
In addition, Review Board has token access policies. If the client is
authenticated with an API token, the token's access policies will be
checked before calling the view. If the operation is disallowed, a 403
Forbidden response will be returned.
Only if those two conditions are met will the view actually be called.
Args:
request (django.http.HttpRequest):
The current HTTP request.
method (unicode):
The HTTP method.
view (callable):
The view.
*args (tuple):
Additional positional arguments.
**kwargs (dict):
Additional keyword arguments.
Returns:
WebAPIError or tuple:
Either a 403 Forbidden error or the result of calling the method
view, which will either be a
:py:class:`~djblets.webapi.errors.WebAPIError` or a 2-tuple of the
HTTP status code and a dict indicating the JSON response from the
view.
"""
for feature in self.required_features:
if not feature.is_enabled(request=request):
logging.warning('Disallowing %s for API resource %r because '
'feature %s is not enabled',
method, self, feature.feature_id,
request=request)
return PERMISSION_DENIED
return super(WebAPIResource, self).call_method_view(
request, method, view, *args, **kwargs)
def build_resource_url(self, name, local_site_name=None, request=None,
**kwargs):
"""Build the URL to a resource, factoring in Local Sites.
Args:
name (unicode):
The resource name.
local_site_name (unicode):
The LocalSite name.
request (django.http.HttpRequest):
The HTTP request from the client.
kwargs (dict):
The keyword arguments needed for URL resolution.
Returns:
unicode: The resulting absolute URL to the resource.
"""
url = local_site_reverse(
self._build_named_url(name),
local_site_name=local_site_name,
request=request,
kwargs=kwargs)
if request:
return request.build_absolute_uri(url)
return url
def _get_local_site(self, local_site_name):
if local_site_name:
return LocalSite.objects.get(name=local_site_name)
else:
return None
def _get_form_errors(self, form):
fields = {}
for field in form.errors:
fields[field] = [force_unicode(e) for e in form.errors[field]]
return fields
def import_extra_data(self, obj, extra_data, fields):
for key, value in six.iteritems(fields):
if key.startswith('extra_data.'):
key = key[EXTRA_DATA_LEN:]
if self._should_process_extra_data(key, obj):
if value != '':
if value in ('true', 'True', 'TRUE'):
value = True
elif value in ('false', 'False', 'FALSE'):
value = False
else:
try:
value = int(value)
except ValueError:
try:
value = float(value)
except ValueError:
pass
extra_data[key] = value
elif key in extra_data:
del extra_data[key]
def _should_process_extra_data(self, key, obj):
"""Check if an ``extra_data`` field should be processed.
Args:
key (unicode):
A key for an extra_data field.
obj (django.db.models.Model):
The model of a given resource.
Returns:
bool:
Whether the extra_data field should be processed or not.
"""
return (self.can_import_extra_data_field(obj, key) and
not key.startswith(PRIVATE_KEY_PREFIX) and
self.get_extra_data_field_state((key,)) ==
ExtraDataAccessLevel.ACCESS_STATE_PUBLIC)
def _build_redirect_with_args(self, request, new_url):
"""Builds a redirect URL with existing query string arguments.
This will construct a URL that contains all the query string arguments
provided in this request.
This will not include the special arguments handled by the base
WebAPIResource in Djblets. Those will be specially added
automatically, so there's no need to do this twice here.
"""
query_str = '&'.join([
'%s=%s' % (urllib_quote(key), urllib_quote(value))
for key, value in six.iteritems(request.GET)
if key not in SPECIAL_PARAMS
])
if '?' in new_url:
new_url += '&' + query_str
else:
new_url += '?' + query_str
return new_url
def get_extra_data_field_state(self, key_path):
"""Return the state of a registered ``extra_data`` key path.
Example:
.. code-block:: python
resource.extra_data = {
'public': 'foo',
'private': 'secret',
'data': {
'secret_key': 'secret_data',
},
'readonly': 'bar',
}
key_path = ('data', 'secret_key',)
resource.get_extra_data_field_state(key_path)
Args:
key_path (tuple):
The path of the ``extra_data`` key as a :py:class`tuple` of
:py:class:`unicode` strings.
Returns:
int:
The access state of the provided key.
"""
for callback in self.extra_data_access_callbacks:
value = callback(key_path)
if value is not None:
return value
return ExtraDataAccessLevel.ACCESS_STATE_PUBLIC
def _strip_private_data(self, extra_data, parent_path=None):
"""Strip private fields from an extra data object.
This function creates a clone of the provided object and traverses it
and any nested dictionaries to remove any private fields.
Args:
extra_data (dict):
The object from which to strip private fields.
parent_path (tuple):
Parent key path leading to provided ``extra_data``
dictionary.
Returns:
dict:
A clone of the ``extra_data`` stripped of its private fields.
"""
clone = copy.copy(extra_data)
for field_name, value in six.iteritems(extra_data):
if parent_path:
path = parent_path + (field_name,)
else:
path = (field_name,)
if (field_name.startswith(PRIVATE_KEY_PREFIX) or
self.get_extra_data_field_state(path) ==
ExtraDataAccessLevel.ACCESS_STATE_PRIVATE):
del clone[field_name]
elif isinstance(value, dict):
clone[field_name] = self._strip_private_data(value, path)
return clone
|
|
# encoding: utf-8
# Copyright 2011 Tree.io Limited
# This file is part of Treeio.
# License www.tree.io/license
"""
Sales: test suites
"""
from django.test import TestCase
from django.test.client import Client
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User as DjangoUser
from treeio.core.models import User, Group, Perspective, ModuleSetting
from treeio.identities.models import Contact, ContactType
from treeio.sales.models import SaleOrder, Product, OrderedProduct, Subscription, \
SaleStatus, SaleSource, Lead, Opportunity
from treeio.finance.models import Currency
class SalesViewsTest(TestCase):
"Sales functional tests for views"
username = "test"
password = "password"
prepared = False
def setUp(self):
"Initial Setup"
if not self.prepared:
self.group, created = Group.objects.get_or_create(name='test')
duser, created = DjangoUser.objects.get_or_create(
username=self.username)
duser.set_password(self.password)
duser.save()
self.user, created = User.objects.get_or_create(user=duser)
self.user.save()
perspective, created = Perspective.objects.get_or_create(
name='default')
perspective.set_default_user()
perspective.save()
ModuleSetting.set('default_perspective', perspective.id)
self.contact_type = ContactType()
self.contact_type.slug = 'machine'
self.contact_type.name = 'machine'
self.contact_type.save()
self.contact = Contact()
self.contact.contact_type = self.contact_type
self.contact.set_default_user()
self.contact.save()
self.assertNotEquals(self.contact.id, None)
self.status = SaleStatus()
self.status.set_default_user()
self.status.save()
self.assertNotEquals(self.status.id, None)
self.currency = Currency(code="GBP",
name="Pounds",
symbol="L",
is_default=True)
self.currency.save()
self.source = SaleSource()
self.source.set_default_user()
self.source.save()
self.assertNotEquals(self.source.id, None)
self.product = Product(name="Test")
self.product.product_type = 'service'
self.product.active = True
self.product.sell_price = 10
self.product.buy_price = 100
self.product.set_default_user()
self.product.save()
self.assertNotEquals(self.product.id, None)
self.subscription = Subscription()
self.subscription.client = self.contact
self.subscription.set_default_user()
self.subscription.save()
self.assertNotEquals(self.subscription.id, None)
self.lead = Lead()
self.lead.contact_method = 'email'
self.lead.status = self.status
self.lead.contact = self.contact
self.lead.set_default_user()
self.lead.save()
self.assertNotEquals(self.lead.id, None)
self.opportunity = Opportunity()
self.opportunity.lead = self.lead
self.opportunity.contact = self.contact
self.opportunity.status = self.status
self.opportunity.amount = 100
self.opportunity.amount_currency = self.currency
self.opportunity.amount_display = 120
self.opportunity.set_default_user()
self.opportunity.save()
self.assertNotEquals(self.opportunity.id, None)
self.order = SaleOrder(reference="Test")
self.order.opportunity = self.opportunity
self.order.status = self.status
self.order.source = self.source
self.order.currency = self.currency
self.order.total = 0
self.order.total_display = 0
self.order.set_default_user()
self.order.save()
self.assertNotEquals(self.order.id, None)
self.ordered_product = OrderedProduct()
self.ordered_product.product = self.product
self.ordered_product.order = self.order
self.ordered_product.rate = 0
self.ordered_product.subscription = self.subscription
self.ordered_product.set_default_user()
self.ordered_product.save()
self.assertNotEquals(self.ordered_product.id, None)
self.client = Client()
self.prepared = True
######################################
# Testing views when user is logged in
######################################
def test_index(self):
"Test page with login at /sales/index"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(reverse('sales_index'))
self.assertEquals(response.status_code, 200)
def test_index_open(self):
"Test page with login at /sales/open"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(reverse('sales_index_open'))
self.assertEquals(response.status_code, 200)
def test_index_assigned(self):
"Test page with login at /sales/index/assigned"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(reverse('sales_index_assigned'))
self.assertEquals(response.status_code, 200)
# Orders
def test_order_add(self):
"Test page with login at /sales/order/add"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(reverse('sales_order_add'))
self.assertEquals(response.status_code, 200)
def test_order_add_lead(self):
"Test page with login at /sales/order/add/lead/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_order_add_with_lead', args=[self.lead.id]))
self.assertEquals(response.status_code, 200)
def test_order_add_opportunity(self):
"Test page with login at /sales/order/add/opportunity/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_order_add_with_opportunity', args=[self.opportunity.id]))
self.assertEquals(response.status_code, 200)
def test_order_edit(self):
"Test page with login at /sales/order/edit/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_order_edit', args=[self.order.id]))
self.assertEquals(response.status_code, 200)
def test_order_view(self):
"Test page with login at /sales/order/view/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_order_view', args=[self.order.id]))
self.assertEquals(response.status_code, 200)
def test_order_delete(self):
"Test page with login at /sales/order/delete/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_order_delete', args=[self.order.id]))
self.assertEquals(response.status_code, 200)
def test_order_invoice_view(self):
"Test page with login at /sales/order/invoice/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_order_invoice_view', args=[self.order.id]))
self.assertEquals(response.status_code, 200)
# Products
def test_product_index(self):
"Test page with login at /sales/product/index"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(reverse('sales_product_index'))
self.assertEquals(response.status_code, 200)
def test_product_add(self):
"Test page with login at /sales/product/add/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(reverse('sales_product_add'))
self.assertEquals(response.status_code, 200)
def test_product_add_parent(self):
"Test page with login at /sales/product/add"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_product_add', args=[self.product.id]))
self.assertEquals(response.status_code, 200)
def test_product_edit(self):
"Test page with login at /sales/product/edit/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_product_edit', args=[self.product.id]))
self.assertEquals(response.status_code, 200)
def test_product_view(self):
"Test page with login at /sales/product/view/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_product_view', args=[self.product.id]))
self.assertEquals(response.status_code, 200)
def test_product_delete(self):
"Test page with login at /sales/product/delete/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_product_delete', args=[self.product.id]))
self.assertEquals(response.status_code, 200)
# Settings
def test_settings_view(self):
"Test page with login at /sales/settings/view"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(reverse('sales_settings_view'))
self.assertEquals(response.status_code, 200)
def test_settings_edit(self):
"Test page with login at /sales/settings/edit"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(reverse('sales_settings_edit'))
self.assertEquals(response.status_code, 200)
# Statuses
def test_status_add(self):
"Test page with login at /sales/status/add"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(reverse('sales_status_add'))
self.assertEquals(response.status_code, 200)
def test_status_edit(self):
"Test page with login at /sales/status/edit/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_status_edit', args=[self.status.id]))
self.assertEquals(response.status_code, 200)
def test_status_view(self):
"Test page with login at /sales/status/view/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_status_view', args=[self.status.id]))
self.assertEquals(response.status_code, 200)
def test_status_delete(self):
"Test page with login at /sales/status/delete/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_status_delete', args=[self.status.id]))
self.assertEquals(response.status_code, 200)
# Subscriptions
def test_subscription_add(self):
"Test page with login at /sales/subscription/add"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(reverse('sales_subscription_add'))
self.assertEquals(response.status_code, 200)
def test_subscription_add_product(self):
"Test page with login at /sales/subscription/add/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_subscription_add_with_product', args=[self.product.id]))
self.assertEquals(response.status_code, 200)
def test_subscription_edit(self):
"Test page with login at /sales/subscription/edit/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_subscription_edit', args=[self.subscription.id]))
self.assertEquals(response.status_code, 200)
def test_subscription_view(self):
"Test page with login at /sales/subscription/view/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_subscription_view', args=[self.subscription.id]))
self.assertEquals(response.status_code, 200)
def test_subscription_delete(self):
"Test page with login at /sales/subscription/delete/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_subscription_delete', args=[self.subscription.id]))
self.assertEquals(response.status_code, 200)
# Ordered Products
def test_ordered_product_add(self):
"Test page with login at /sales/ordered_product/add/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_ordered_product_add', args=[self.order.id]))
self.assertEquals(response.status_code, 200)
def test_ordered_product_edit(self):
"Test page with login at /sales/ordered_product/edit/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_ordered_product_edit', args=[self.ordered_product.id]))
self.assertEquals(response.status_code, 200)
def test_ordered_product_view(self):
"Test page with login at /sales/ordered_product/view/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_ordered_product_view', args=[self.ordered_product.id]))
self.assertEquals(response.status_code, 200)
def test_ordered_product_delete(self):
"Test page with login at /sales/ordered_product/delete/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_ordered_product_delete', args=[self.ordered_product.id]))
self.assertEquals(response.status_code, 200)
# Sources
def test_source_add(self):
"Test page with login at /sales/source/add"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(reverse('sales_source_add'))
self.assertEquals(response.status_code, 200)
def test_source_edit(self):
"Test page with login at /sales/source/edit/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_source_edit', args=[self.source.id]))
self.assertEquals(response.status_code, 200)
def test_source_view(self):
"Test page with login at /sales/source/view/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_source_view', args=[self.source.id]))
self.assertEquals(response.status_code, 200)
def test_source_delete(self):
"Test page with login at /sales/source/delete/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_source_delete', args=[self.source.id]))
self.assertEquals(response.status_code, 200)
# Leads
def test_lead_index(self):
"Test page with login at /sales/lead/index"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(reverse('sales_lead_index'))
self.assertEquals(response.status_code, 200)
def test_lead_index_assigned(self):
"Test page with login at /sales/lead/index/assigned"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(reverse('sales_lead_index_assigned'))
self.assertEquals(response.status_code, 200)
def test_lead_add(self):
"Test page with login at /sales/lead/add"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(reverse('sales_lead_add'))
self.assertEquals(response.status_code, 200)
def test_lead_edit(self):
"Test page with login at /sales/lead/edit/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_lead_edit', args=[self.lead.id]))
self.assertEquals(response.status_code, 200)
def test_lead_view(self):
"Test page with login at /sales/lead/view/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_lead_view', args=[self.lead.id]))
self.assertEquals(response.status_code, 200)
def test_lead_delete(self):
"Test page with login at /sales/lead/delete/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_lead_delete', args=[self.lead.id]))
self.assertEquals(response.status_code, 200)
# Opportunities
def test_opportunity_index(self):
"Test page with login at /sales/opportunity/index"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(reverse('sales_opportunity_index'))
self.assertEquals(response.status_code, 200)
def test_opportunity_index_assigned(self):
"Test page with login at /sales/opportunity/index/assigned"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(reverse('sales_opportunity_index_assigned'))
self.assertEquals(response.status_code, 200)
def test_opportunity_add(self):
"Test page with login at /sales/opportunity/add"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(reverse('sales_opportunity_add'))
self.assertEquals(response.status_code, 200)
def test_opportunity_add_lead(self):
"Test page with login at /sales/opportunity/add/lead/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_opportunity_add_with_lead', args=[self.lead.id]))
self.assertEquals(response.status_code, 200)
def test_opportunity_edit(self):
"Test page with login at /sales/opportunity/edit/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_opportunity_edit', args=[self.opportunity.id]))
self.assertEquals(response.status_code, 200)
def test_opportunity_view(self):
"Test page with login at /sales/opportunity/view/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_opportunity_view', args=[self.opportunity.id]))
self.assertEquals(response.status_code, 200)
def test_opportunity_delete(self):
"Test page with login at /sales/opportunity/delete/"
response = self.client.post('/accounts/login',
{'username': self.username, 'password': self.password})
self.assertRedirects(response, '/')
response = self.client.get(
reverse('sales_opportunity_delete', args=[self.opportunity.id]))
self.assertEquals(response.status_code, 200)
######################################
# Testing views when user is not logged in
######################################
def test_index_anonymous(self):
"Test index page at /sales/"
response = self.client.get('/sales/')
# Redirects as unauthenticated
self.assertRedirects(response, reverse('user_login'))
def test_index_open_out(self):
"Testing /sales/open"
response = self.client.get(reverse('sales_index_open'))
self.assertRedirects(response, reverse('user_login'))
def test_index_assigned_out(self):
"Testing /sales/index/assigned"
response = self.client.get(reverse('sales_index_assigned'))
self.assertRedirects(response, reverse('user_login'))
# Orders
def test_order_add_out(self):
"Testing /sales/order/add"
response = self.client.get(reverse('sales_order_add'))
self.assertRedirects(response, reverse('user_login'))
def test_order_add_lead_out(self):
"Testing /sales/order/add/lead/"
response = self.client.get(
reverse('sales_order_add_with_lead', args=[self.lead.id]))
self.assertRedirects(response, reverse('user_login'))
def test_order_add_opportunity_out(self):
"Testing /sales/order/add/opportunity/"
response = self.client.get(
reverse('sales_order_add_with_opportunity', args=[self.opportunity.id]))
self.assertRedirects(response, reverse('user_login'))
def test_order_edit_out(self):
"Testing /sales/order/edit/"
response = self.client.get(
reverse('sales_order_edit', args=[self.order.id]))
self.assertRedirects(response, reverse('user_login'))
def test_order_view_out(self):
"Testing /sales/order/view/"
response = self.client.get(
reverse('sales_order_view', args=[self.order.id]))
self.assertRedirects(response, reverse('user_login'))
def test_order_delete_out(self):
"Testing /sales/order/delete/"
response = self.client.get(
reverse('sales_order_delete', args=[self.order.id]))
self.assertRedirects(response, reverse('user_login'))
def test_order_invoice_view_out(self):
"Testing /sales/order/invoice/"
response = self.client.get(
reverse('sales_order_invoice_view', args=[self.order.id]))
self.assertRedirects(response, reverse('user_login'))
# Products
def test_product_index_out(self):
"Testing /sales/product/index"
response = self.client.get(reverse('sales_product_index'))
self.assertRedirects(response, reverse('user_login'))
def test_product_add_out(self):
"Testing /sales/product/add/"
response = self.client.get(reverse('sales_product_add'))
self.assertRedirects(response, reverse('user_login'))
def test_product_add_parent_out(self):
"Testing /sales/product/add"
response = self.client.get(
reverse('sales_product_add', args=[self.product.id]))
self.assertRedirects(response, reverse('user_login'))
def test_product_edit_out(self):
"Testing /sales/product/edit/"
response = self.client.get(
reverse('sales_product_edit', args=[self.product.id]))
self.assertRedirects(response, reverse('user_login'))
def test_product_view_out(self):
"Testing /sales/product/view/"
response = self.client.get(
reverse('sales_product_view', args=[self.product.id]))
self.assertRedirects(response, reverse('user_login'))
def test_product_delete_out(self):
"Testing /sales/product/delete/"
response = self.client.get(
reverse('sales_product_delete', args=[self.product.id]))
self.assertRedirects(response, reverse('user_login'))
# Settings
def test_settings_view_out(self):
"Testing /sales/settings/view"
response = self.client.get(reverse('sales_settings_view'))
self.assertRedirects(response, reverse('user_login'))
def test_settings_edit_out(self):
"Testing /sales/settings/edit"
response = self.client.get(reverse('sales_settings_edit'))
self.assertRedirects(response, reverse('user_login'))
# Statuses
def test_status_add_out(self):
"Testing /sales/status/add"
response = self.client.get(reverse('sales_status_add'))
self.assertRedirects(response, reverse('user_login'))
def test_status_edit_out(self):
"Testing /sales/status/edit/"
response = self.client.get(
reverse('sales_status_edit', args=[self.status.id]))
self.assertRedirects(response, reverse('user_login'))
def test_status_view_out(self):
"Testing /sales/status/view/"
response = self.client.get(
reverse('sales_status_view', args=[self.status.id]))
self.assertRedirects(response, reverse('user_login'))
def test_status_delete_out(self):
"Testing /sales/status/delete/"
response = self.client.get(
reverse('sales_status_delete', args=[self.status.id]))
self.assertRedirects(response, reverse('user_login'))
# Subscriptions
def test_subscription_add_out(self):
"Testing /sales/subscription/add"
response = self.client.get(reverse('sales_subscription_add'))
self.assertRedirects(response, reverse('user_login'))
def test_subscription_add_product_out(self):
"Testing /sales/subscription/add/"
response = self.client.get(
reverse('sales_subscription_add_with_product', args=[self.product.id]))
self.assertRedirects(response, reverse('user_login'))
def test_subscription_edit_out(self):
"Testing /sales/subscription/edit/"
response = self.client.get(
reverse('sales_subscription_edit', args=[self.subscription.id]))
self.assertRedirects(response, reverse('user_login'))
def test_subscription_view_out(self):
"Testing /sales/subscription/view/"
response = self.client.get(
reverse('sales_subscription_view', args=[self.subscription.id]))
self.assertRedirects(response, reverse('user_login'))
def test_subscription_delete_out(self):
"Testing /sales/subscription/delete/"
response = self.client.get(
reverse('sales_subscription_delete', args=[self.subscription.id]))
self.assertRedirects(response, reverse('user_login'))
# Ordered Products
def test_ordered_product_add_out(self):
"Testing /sales/ordered_product/add/"
response = self.client.get(
reverse('sales_ordered_product_add', args=[self.order.id]))
self.assertRedirects(response, reverse('user_login'))
def test_ordered_product_edit_out(self):
"Testing /sales/ordered_product/edit/"
response = self.client.get(
reverse('sales_ordered_product_edit', args=[self.ordered_product.id]))
self.assertRedirects(response, reverse('user_login'))
def test_ordered_product_view_out(self):
"Testing /sales/ordered_product/view/"
response = self.client.get(
reverse('sales_ordered_product_view', args=[self.ordered_product.id]))
self.assertRedirects(response, reverse('user_login'))
def test_ordered_product_delete_out(self):
"Testing /sales/ordered_product/delete/"
response = self.client.get(
reverse('sales_ordered_product_delete', args=[self.ordered_product.id]))
self.assertRedirects(response, reverse('user_login'))
# Sources
def test_source_add_out(self):
"Testing /sales/source/add"
response = self.client.get(reverse('sales_source_add'))
self.assertRedirects(response, reverse('user_login'))
def test_source_edit_out(self):
"Testing /sales/source/edit/"
response = self.client.get(
reverse('sales_source_edit', args=[self.source.id]))
self.assertRedirects(response, reverse('user_login'))
def test_source_view_out(self):
"Testing /sales/source/view/"
response = self.client.get(
reverse('sales_source_view', args=[self.source.id]))
self.assertRedirects(response, reverse('user_login'))
def test_source_delete_out(self):
"Testing /sales/source/delete/"
response = self.client.get(
reverse('sales_source_delete', args=[self.source.id]))
self.assertRedirects(response, reverse('user_login'))
# Leads
def test_lead_index_out(self):
"Testing /sales/lead/index"
response = self.client.get(reverse('sales_lead_index'))
self.assertRedirects(response, reverse('user_login'))
def test_lead_index_assigned_out(self):
"Testing /sales/lead/index/assigned"
response = self.client.get(reverse('sales_lead_index_assigned'))
self.assertRedirects(response, reverse('user_login'))
def test_lead_add_out(self):
"Testing /sales/lead/add"
response = self.client.get(reverse('sales_lead_add'))
self.assertRedirects(response, reverse('user_login'))
def test_lead_edit_out(self):
"Testing /sales/lead/edit/"
response = self.client.get(
reverse('sales_lead_edit', args=[self.lead.id]))
self.assertRedirects(response, reverse('user_login'))
def test_lead_view_out(self):
"Testing /sales/lead/view/"
response = self.client.get(
reverse('sales_lead_view', args=[self.lead.id]))
self.assertRedirects(response, reverse('user_login'))
def test_lead_delete_out(self):
"Testing /sales/lead/delete/"
response = self.client.get(
reverse('sales_lead_delete', args=[self.lead.id]))
self.assertRedirects(response, reverse('user_login'))
# Opportunities
def test_opportunity_index_out(self):
"Testing /sales/opportunity/index/"
response = self.client.get(reverse('sales_opportunity_index'))
self.assertRedirects(response, reverse('user_login'))
def test_opportunity_index_assigned_out(self):
"Testing /sales/opportunity/index/assigned/"
response = self.client.get(reverse('sales_opportunity_index_assigned'))
self.assertRedirects(response, reverse('user_login'))
def test_opportunity_add_out(self):
"Testing /sales/opportunity/add/"
response = self.client.get(reverse('sales_opportunity_add'))
self.assertRedirects(response, reverse('user_login'))
def test_opportunity_add_lead_out(self):
"Testing /sales/opportunity/add/lead/"
response = self.client.get(
reverse('sales_opportunity_add_with_lead', args=[self.lead.id]))
self.assertRedirects(response, reverse('user_login'))
def test_opportunity_edit_out(self):
"Testing /sales/opportunity/edit/"
response = self.client.get(
reverse('sales_opportunity_edit', args=[self.opportunity.id]))
self.assertRedirects(response, reverse('user_login'))
def test_opportunity_view_out(self):
"Testing /sales/opportunity/view/"
response = self.client.get(
reverse('sales_opportunity_view', args=[self.opportunity.id]))
self.assertRedirects(response, reverse('user_login'))
def test_opportunity_delete_out(self):
"Testing /sales/opportunity/delete/"
response = self.client.get(
reverse('sales_opportunity_delete', args=[self.opportunity.id]))
self.assertRedirects(response, reverse('user_login'))
|
|
#!/usr/bin/env python
# -*- mode: python; encoding: utf-8 -*-
# Copyright 2012 Google Inc. All Rights Reserved.
"""Test the cron_view interface."""
from grr.gui import runtests_test
from grr.lib import cron
from grr.lib import flags
from grr.lib import rdfvalue
from grr.lib import test_lib
class TestCronView(test_lib.GRRSeleniumTest):
"""Test the Cron view GUI."""
def setUp(self):
super(TestCronView, self).setUp()
with self.ACLChecksDisabled():
cron.ScheduleSystemCronFlows(token=self.token)
cron.CRON_MANAGER.RunOnce(token=self.token)
def testCronView(self):
self.Open("/")
self.WaitUntil(self.IsElementPresent, "client_query")
self.Click("css=a[grrtarget=ManageCron]")
# Table should contain Last Run
self.WaitUntil(self.IsTextPresent, "Last Run")
# Table should contain system cron jobs
self.WaitUntil(self.IsTextPresent, "GRRVersionBreakDown")
self.WaitUntil(self.IsTextPresent, "LastAccessStats")
self.WaitUntil(self.IsTextPresent, "OSBreakDown")
# Select a Cron.
self.Click("css=td:contains('OSBreakDown')")
# Check that there's one flow in the list.
self.WaitUntil(self.IsElementPresent,
"css=#main_bottomPane td:contains('OSBreakDown')")
def testMessageIsShownWhenNoCronJobSelected(self):
self.Open("/")
self.WaitUntil(self.IsElementPresent, "client_query")
self.Click("css=a[grrtarget=ManageCron]")
self.WaitUntil(self.IsTextPresent,
"Please select a cron job to see the details.")
def testShowsCronJobDetailsOnClick(self):
self.Open("/")
self.Click("css=a[grrtarget=ManageCron]")
self.Click("css=td:contains('OSBreakDown')")
# Tabs should appear in the bottom pane
self.WaitUntil(self.IsElementPresent, "css=#main_bottomPane #Details")
self.WaitUntil(self.IsElementPresent, "css=#main_bottomPane #Flows")
self.WaitUntil(self.IsTextPresent, "CURRENT_FLOW_URN")
self.WaitUntil(self.IsTextPresent, "FLOW_NAME")
self.WaitUntil(self.IsTextPresent, "FLOW_ARGS")
# Click on "Flows" tab
self.Click("css=#main_bottomPane #Flows")
# Click on the first flow and wait for flow details panel to appear.
self.Click("css=#main_bottomPane td:contains('OSBreakDown')")
self.WaitUntil(self.IsTextPresent, "FLOW_STATE")
self.WaitUntil(self.IsTextPresent, "next_states")
self.WaitUntil(self.IsTextPresent, "outstanding_requests")
# Close the panel.
self.Click("css=#main_bottomPane .panel button.close")
self.WaitUntilNot(self.IsTextPresent, "FLOW_STATE")
self.WaitUntilNot(self.IsTextPresent, "next_states")
self.WaitUntilNot(self.IsTextPresent, "outstanding_requests")
def testToolbarStateForDisabledCronJob(self):
with self.ACLChecksDisabled():
cron.CRON_MANAGER.DisableJob(rdfvalue.RDFURN("aff4:/cron/OSBreakDown"))
self.Open("/")
self.Click("css=a[grrtarget=ManageCron]")
self.Click("css=td:contains('OSBreakDown')")
self.assertTrue(self.IsElementPresent(
"css=button[name=EnableCronJob]:not([disabled])"))
self.assertTrue(self.IsElementPresent(
"css=button[name=DisableCronJob][disabled]"))
self.assertTrue(self.IsElementPresent(
"css=button[name=DeleteCronJob]:not([disabled])"))
def testToolbarStateForEnabledCronJob(self):
with self.ACLChecksDisabled():
cron.CRON_MANAGER.EnableJob(rdfvalue.RDFURN("aff4:/cron/OSBreakDown"))
self.Open("/")
self.Click("css=a[grrtarget=ManageCron]")
self.Click("css=td:contains('OSBreakDown')")
self.assertTrue(self.IsElementPresent(
"css=button[name=EnableCronJob][disabled]"))
self.assertTrue(self.IsElementPresent(
"css=button[name=DisableCronJob]:not([disabled])"))
self.assertTrue(self.IsElementPresent(
"css=button[name=DeleteCronJob]:not([disabled])"))
def testEnableCronJob(self):
with self.ACLChecksDisabled():
cron.CRON_MANAGER.DisableJob(rdfvalue.RDFURN("aff4:/cron/OSBreakDown"))
self.Open("/")
self.Click("css=a[grrtarget=ManageCron]")
self.Click("css=td:contains('OSBreakDown')")
# Click on Enable button and check that dialog appears.
self.Click("css=button[name=EnableCronJob]")
self.WaitUntil(self.IsTextPresent,
"Are you sure you want to enable this cron job?")
# Click on "Proceed" and wait for authorization dialog to appear.
self.Click("css=button[name=Proceed]")
# This should be rejected now and a form request is made.
self.WaitUntil(self.IsTextPresent, "Create a new approval")
self.Click("css=#acl_dialog button[name=Close]")
# Wait for dialog to disappear.
self.WaitUntilNot(self.IsVisible, "css=.modal-backdrop")
with self.ACLChecksDisabled():
self.GrantCronJobApproval(rdfvalue.RDFURN("aff4:/cron/OSBreakDown"))
# Click on Enable button and check that dialog appears.
self.Click("css=button[name=EnableCronJob]")
self.WaitUntil(self.IsTextPresent,
"Are you sure you want to enable this cron job?")
# Click on "Proceed" and wait for success label to appear.
# Also check that "Proceed" button gets disabled.
self.Click("css=button[name=Proceed]")
self.WaitUntil(self.IsTextPresent, "Cron job was enabled successfully!")
self.assertTrue(self.IsElementPresent("css=button[name=Proceed][disabled]"))
# Click on "Cancel" and check that dialog disappears.
self.Click("css=button[name=Cancel]")
self.WaitUntilNot(self.IsVisible, "css=.modal-backdrop")
# View should be refreshed automatically.
self.WaitUntil(self.IsTextPresent, "OSBreakDown")
self.WaitUntil(self.IsElementPresent,
"css=tr:contains('OSBreakDown') *[state=enabled]")
def testDisableCronJob(self):
with self.ACLChecksDisabled():
cron.CRON_MANAGER.EnableJob(rdfvalue.RDFURN("aff4:/cron/OSBreakDown"))
self.Open("/")
self.Click("css=a[grrtarget=ManageCron]")
self.Click("css=td:contains('OSBreakDown')")
# Click on Enable button and check that dialog appears.
self.Click("css=button[name=DisableCronJob]")
self.WaitUntil(self.IsTextPresent,
"Are you sure you want to disable this cron job?")
# Click on "Proceed" and wait for authorization dialog to appear.
self.Click("css=button[name=Proceed]")
# This should be rejected now and a form request is made.
self.WaitUntil(self.IsTextPresent, "Create a new approval")
self.Click("css=#acl_dialog button[name=Close]")
# Wait for dialog to disappear.
self.WaitUntilNot(self.IsVisible, "css=.modal-backdrop")
with self.ACLChecksDisabled():
self.GrantCronJobApproval(rdfvalue.RDFURN("aff4:/cron/OSBreakDown"))
# Click on Disable button and check that dialog appears.
self.Click("css=button[name=DisableCronJob]")
self.WaitUntil(self.IsTextPresent,
"Are you sure you want to disable this cron job?")
# Click on "Proceed" and wait for success label to appear.
# Also check that "Proceed" button gets disabled.
self.Click("css=button[name=Proceed]")
self.WaitUntil(self.IsTextPresent, "Cron job was disabled successfully!")
self.assertTrue(self.IsElementPresent("css=button[name=Proceed][disabled]"))
# Click on "Cancel" and check that dialog disappears.
self.Click("css=button[name=Cancel]")
self.WaitUntilNot(self.IsVisible, "css=.modal-backdrop")
# View should be refreshed automatically.
self.WaitUntil(self.IsTextPresent, "OSBreakDown")
self.WaitUntil(self.IsElementPresent,
"css=tr:contains('OSBreakDown') *[state=disabled]")
def testDeleteCronJob(self):
with self.ACLChecksDisabled():
cron.CRON_MANAGER.EnableJob(rdfvalue.RDFURN("aff4:/cron/OSBreakDown"))
self.Open("/")
self.Click("css=a[grrtarget=ManageCron]")
self.Click("css=td:contains('OSBreakDown')")
# Click on Enable button and check that dialog appears.
self.Click("css=button[name=DeleteCronJob]")
self.WaitUntil(self.IsTextPresent,
"Are you sure you want to delete this cron job?")
# Click on "Proceed" and wait for authorization dialog to appear.
self.Click("css=button[name=Proceed]")
# This should be rejected now and a form request is made.
self.WaitUntil(self.IsTextPresent, "Create a new approval")
self.Click("css=#acl_dialog button[name=Close]")
# Wait for dialog to disappear.
self.WaitUntilNot(self.IsVisible, "css=.modal-backdrop")
with self.ACLChecksDisabled():
self.GrantCronJobApproval(rdfvalue.RDFURN("aff4:/cron/OSBreakDown"))
# Click on Disable button and check that dialog appears.
self.Click("css=button[name=DeleteCronJob]")
self.WaitUntil(self.IsTextPresent,
"Are you sure you want to delete this cron job?")
# Click on "Proceed" and wait for success label to appear.
# Also check that "Proceed" button gets disabled.
self.Click("css=button[name=Proceed]")
self.WaitUntil(self.IsTextPresent, "Cron job was deleted successfully!")
self.assertTrue(self.IsElementPresent("css=button[name=Proceed][disabled]"))
# Click on "Cancel" and check that dialog disappears.
self.Click("css=button[name=Cancel]")
self.WaitUntilNot(self.IsVisible, "css=.modal-backdrop")
# View should be refreshed automatically.
self.WaitUntil(self.IsElementPresent,
"css=#main_topPane td:contains('GRRVersionBreakDown')")
self.WaitUntilNot(self.IsElementPresent,
"css=#main_topPane td:contains('OSBreakDown')")
def testHuntSchedulingWorksCorrectly(self):
self.Open("/")
self.Click("css=a[grrtarget=ManageCron]")
self.Click("css=button[name=ScheduleHuntCronJob]")
self.WaitUntil(self.IsTextPresent, "What to run?")
# Click on Filesystem item in flows list
self.WaitUntil(self.IsElementPresent, "css=#_Filesystem > ins.jstree-icon")
self.Click("css=#_Filesystem > ins.jstree-icon")
# Click on DownloadDirectory item in Filesystem flows list
self.WaitUntil(self.IsElementPresent,
"link=DownloadDirectory")
self.Click("link=DownloadDirectory")
# Wait for flow configuration form to be rendered (just wait for first
# input field).
self.WaitUntil(self.IsElementPresent,
"css=.Wizard .HuntFormBody input[name=pathspec_path]")
# Change "path", "pathtype", "depth" and "ignore_errors" values
self.Type("css=.Wizard .HuntFormBody input[name=pathspec_path]", "/tmp")
self.Select("css=.Wizard .HuntFormBody select[name=pathspec_pathtype]",
"TSK")
self.Type("css=.Wizard .HuntFormBody input[name=depth]", "42")
self.Click("css=.Wizard .HuntFormBody input[name=ignore_errors]")
# Click on "Next" button
self.Click("css=.Wizard input.Next")
self.WaitUntil(self.IsTextPresent, "Output Processing")
# Configure the hunt to use a collection and also send an email on results.
self.Select("css=.Wizard .Rule:nth-of-type(1) select[name=output_type]",
"Send an email")
self.Type("css=.Wizard .Rule:nth-of-type(1) input[name=email]",
"test@grrserver.com")
self.Click("css=.Wizard input[value='Add another output plugin']")
self.Select("css=.Wizard .Rule:nth-of-type(2) select[name=output_type]",
"Store results in a collection")
# Click on "Next" button
self.Click("css=.Wizard input.Next")
self.WaitUntil(self.IsTextPresent, "Where to run?")
# Create 3 foreman rules
self.WaitUntil(
self.IsElementPresent,
"css=.Wizard .Rule:nth-of-type(1) select[name=rule_type]")
self.Select("css=.Wizard .Rule:nth-of-type(1) select[name=rule_type]",
"Regular expression match")
self.Type("css=.Wizard .Rule:nth-of-type(1) input[name=attribute_name]",
"System")
self.Type("css=.Wizard .Rule:nth-of-type(1) input[name=attribute_regex]",
"Linux")
self.Click("css=.Wizard input[value='Add Rule']")
self.Select("css=.Wizard .Rule:nth-of-type(2) select[name=rule_type]",
"Integer comparison")
self.Type("css=.Wizard .Rule:nth-of-type(2) input[name=attribute_name]",
"Clock")
self.Select("css=.Wizard .Rule:nth-of-type(2) select[name=operator]",
"GREATER_THAN")
self.Type("css=.Wizard .Rule:nth-of-type(2) input[name=value]",
"1336650631137737")
self.Click("css=.Wizard input[value='Add Rule']")
self.Select("css=.Wizard .Rule:nth-of-type(3) select[name=rule_type]",
"Mac OS X systems")
# Click on "Next" button
self.Click("css=.Wizard input.Next")
self.WaitUntil(self.IsTextPresent, "When to run?")
# Select daily periodicity
self.Select("css=.Wizard select[name=periodicity]", "Daily")
# Click on "Next" button
self.Click("css=.Wizard input.Next")
self.WaitUntil(self.IsTextPresent, "Review")
# Check that the arguments summary is present.
self.assertTrue(self.IsTextPresent("Settings"))
self.assertTrue(self.IsTextPresent("pathspec"))
self.assertTrue(self.IsTextPresent("/tmp"))
self.assertTrue(self.IsTextPresent("depth"))
self.assertTrue(self.IsTextPresent("42"))
# Check that output plugins are shown.
self.assertTrue(self.IsTextPresent("Send an email"))
self.assertTrue(self.IsTextPresent("test@grrserver.com"))
self.assertTrue(self.IsTextPresent("Store results in a collection."))
# Check that rules summary is present.
self.assertTrue(self.IsTextPresent("Rules"))
self.assertTrue(self.IsTextPresent("regex_rules"))
self.assertTrue(self.IsTextPresent("actions"))
# Check that periodicity information is present in the review.
self.assertTrue(self.IsTextPresent("Hunt Periodicity"))
self.assertTrue(self.IsTextPresent("Hunt will run daily."))
# Click on "Schedule" button
self.Click("css=.Wizard input.Next")
# This should be rejected now and a form request is made.
self.WaitUntil(self.IsTextPresent,
"Create a new approval request")
# Close the window and check that cron job object was created.
self.Click("css=#acl_dialog button[name=Close]")
# Select newly created cron job.
self.Click("css=td:contains('cron/Hunt_DownloadDirectory_')")
# Check that correct details are displayed in cron job details tab.
self.WaitUntil(self.IsTextPresent, "CreateAndRunGenericHuntFlow")
self.WaitUntil(self.IsTextPresent, "FLOW_ARGS")
self.assertTrue(self.IsTextPresent("Settings"))
self.assertTrue(self.IsTextPresent("pathspec"))
self.assertTrue(self.IsTextPresent("/tmp"))
self.assertTrue(self.IsTextPresent("depth"))
self.assertTrue(self.IsTextPresent("42"))
def main(argv):
# Run the full test suite
runtests_test.SeleniumTestProgram(argv=argv)
if __name__ == "__main__":
flags.StartMain(main)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.