code stringlengths 281 23.7M |
|---|
class MenuButton(JsonDeserializable, JsonSerializable, Dictionaryable):
def de_json(cls, json_string):
if (json_string is None):
return None
obj = cls.check_json(json_string)
map = {'commands': MenuButtonCommands, 'web_app': MenuButtonWebApp, 'default': MenuButtonDefault}
return map[obj['type']](**obj)
def to_json(self):
raise NotImplementedError
def to_dict(self):
raise NotImplementedError |
class TestComparisons(unittest.TestCase):
def test_instance(self):
p1 = pathlib.Path('wcmatch')
p2 = pypathlib.Path('wcmatch')
self.assertTrue(isinstance(p1, pathlib.Path))
self.assertTrue(isinstance(p1, pypathlib.Path))
self.assertFalse(isinstance(p2, pathlib.Path))
self.assertTrue(isinstance(p2, pypathlib.Path))
def test_equal(self):
p1 = pathlib.Path('wcmatch')
p2 = pypathlib.Path('wcmatch')
p3 = pathlib.Path('docs')
self.assertTrue((p1 == p2))
self.assertFalse((p1 == p3))
self.assertFalse((p3 == p2))
def test_pure_equal(self):
p1 = pathlib.PureWindowsPath('wcmatch')
p2 = pathlib.PurePosixPath('wcmatch')
p3 = pypathlib.PureWindowsPath('wcmatch')
p4 = pypathlib.PurePosixPath('wcmatch')
self.assertTrue((p1 != p2))
self.assertTrue((p3 != p4))
self.assertTrue((p1 == p3))
self.assertTrue((p2 == p4))
def test_flavour_equal(self):
p1 = pathlib.PurePath('wcmatch')
p2 = pathlib.Path('wcmatch')
p3 = pypathlib.PurePath('wcmatch')
p4 = pypathlib.Path('wcmatch')
self.assertTrue((p1 == p2))
self.assertTrue((p3 == p4))
self.assertTrue((p1 == p3))
self.assertTrue((p2 == p4))
self.assertTrue((p1 == p4))
self.assertTrue((p2 == p3))
def test_pickle(self):
p1 = pathlib.PurePath('wcmatch')
p2 = pathlib.Path('wcmatch')
p3 = pickle.loads(pickle.dumps(p1))
p4 = pickle.loads(pickle.dumps(p2))
self.assertTrue((type(p1) == type(p3)))
self.assertTrue((type(p2) == type(p4)))
self.assertTrue((type(p1) != type(p2)))
self.assertTrue((type(p3) != type(p4))) |
class OptionSeriesLollipopOnpoint(Options):
def connectorOptions(self) -> 'OptionSeriesLollipopOnpointConnectoroptions':
return self._config_sub_data('connectorOptions', OptionSeriesLollipopOnpointConnectoroptions)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def position(self) -> 'OptionSeriesLollipopOnpointPosition':
return self._config_sub_data('position', OptionSeriesLollipopOnpointPosition) |
class TestOFPActionCopyTtlIn(unittest.TestCase):
type_ = ofproto.OFPAT_COPY_TTL_IN
len_ = ofproto.OFP_ACTION_HEADER_SIZE
fmt = ofproto.OFP_ACTION_HEADER_PACK_STR
buf = pack(fmt, type_, len_)
c = OFPActionCopyTtlIn()
def test_parser(self):
res = self.c.parser(self.buf, 0)
eq_(res.len, self.len_)
eq_(res.type, self.type_)
def test_serialize(self):
buf = bytearray()
self.c.serialize(buf, 0)
res = struct.unpack(self.fmt, six.binary_type(buf))
eq_(res[0], self.type_)
eq_(res[1], self.len_) |
def test_numbered_duplicates_returns_correct_items():
numbers = ['one', 'two', 'three', 'four']
for i in range(1000):
sample = random.choices(numbers, k=7)
result = _numbered_duplicates(sample)
result_split = [num.rsplit('_', 1)[0] for num in result]
assert (Counter(sample) == Counter(result_split)) |
class stackoverflow_question__test_case(unittest.TestCase):
def test_stackoverflow_question_(self):
from benedict import benedict as bdict
d = bdict({'ResponseMetadata': {'NOT IMPORTANT'}, 'hasMoreResults': True, 'marker': '{"NOT IMPORTANT"}', 'pipelineIdList': [{'id': 'df-0001', 'name': 'Blue'}, {'id': 'df-0002', 'name': 'Orange'}, {'id': 'df-0003', 'name': 'Green'}, {'id': 'df-0004', 'name': 'Red'}, {'id': 'df-0005', 'name': 'Purple'}]})
results = d.search('red', in_keys=False, in_values=True, exact=True, case_sensitive=False)
self.assertEqual(len(results), 1)
self.assertEqual(results[0], ({'id': 'df-0004', 'name': 'Red'}, 'name', 'Red')) |
def parse_property_inetnum(block: str):
match = re.findall(b'^inetnum:[\\s]*((?:\\d{1,3}\\.){3}\\d{1,3})[\\s]*-[\\s]*((?:\\d{1,3}\\.){3}\\d{1,3})', block, re.MULTILINE)
if match:
ip_start = match[0][0].decode('utf-8')
ip_end = match[0][1].decode('utf-8')
cidrs = iprange_to_cidrs(ip_start, ip_end)
return cidrs
match = re.findall(b'^inetnum:[\\s]*((?:\\d{1,3}\\.){3}\\d{1,3}/\\d+)', block, re.MULTILINE)
if match:
return match[0]
match = re.findall(b'^inetnum:[\\s]*((?:\\d{1,3}\\.){2}\\d{1,3}/\\d+)', block, re.MULTILINE)
if match:
tmp = match[0].split(b'/')
return f"{tmp[0].decode('utf-8')}.0/{tmp[1].decode('utf-8')}".encode('utf-8')
match = re.findall(b'^inetnum:[\\s]*((?:\\d{1,3}\\.){1}\\d{1,3}/\\d+)', block, re.MULTILINE)
if match:
tmp = match[0].split(b'/')
return f"{tmp[0].decode('utf-8')}.0.0/{tmp[1].decode('utf-8')}".encode('utf-8')
match = re.findall(b'^inet6num:[\\s]*([0-9a-fA-F:\\/]{1,43})', block, re.MULTILINE)
if match:
return match[0]
match = re.findall(b'^route:[\\s]*((?:\\d{1,3}\\.){3}\\d{1,3}/\\d{1,2})', block, re.MULTILINE)
if match:
return match[0]
match = re.findall(b'^route6:[\\s]*([0-9a-fA-F:\\/]{1,43})', block, re.MULTILINE)
if match:
return match[0]
return None |
class Status(object):
swagger_types = {'embedded': 'StatusEmbedded', 'links': 'StatusLinks', 'battery': 'Battery', 'doors_state': 'DoorsState', 'energy': 'list[Energy]', 'environment': 'Environment', 'ignition': 'Ignition', 'kinetic': 'Kinetic', 'last_position': 'Position', 'preconditionning': 'Preconditioning', 'privacy': 'Privacy', 'safety': 'Safety', 'service': 'ServiceType', 'timed_odometer': 'VehicleOdometer'}
attribute_map = {'embedded': '_embedded', 'links': '_links', 'battery': 'battery', 'doors_state': 'doorsState', 'energy': 'energy', 'environment': 'environment', 'ignition': 'ignition', 'kinetic': 'kinetic', 'last_position': 'lastPosition', 'preconditionning': 'preconditioning', 'privacy': 'privacy', 'safety': 'safety', 'service': 'service', 'timed_odometer': 'odometer'}
def __init__(self, embedded=None, links=None, battery=None, doors_state=None, energy=None, environment=None, ignition=None, kinetic=None, last_position=None, preconditionning=None, privacy=None, safety=None, service=None, timed_odometer=None):
self._embedded = None
self._links = None
self._battery = None
self._doors_state = None
self._energy = None
self._environment = None
self._ignition = None
self._kinetic = None
self._last_position = None
self._preconditionning = None
self._privacy = None
self._safety = None
self._service = None
self._timed_odometer = None
self.discriminator = None
if (embedded is not None):
self.embedded = embedded
self.links = links
if (battery is not None):
self.battery = battery
if (doors_state is not None):
self.doors_state = doors_state
if (energy is not None):
self.energy = energy
if (environment is not None):
self.environment = environment
if (ignition is not None):
self.ignition = ignition
if (kinetic is not None):
self.kinetic = kinetic
if (last_position is not None):
self.last_position = last_position
if (preconditionning is not None):
self.preconditionning = preconditionning
if (privacy is not None):
self.privacy = privacy
if (safety is not None):
self.safety = safety
if (service is not None):
self.service = service
if (timed_odometer is not None):
self.timed_odometer = timed_odometer
def embedded(self):
return self._embedded
def embedded(self, embedded):
self._embedded = embedded
def links(self):
return self._links
def links(self, links):
if (links is None):
raise ValueError('Invalid value for `links`, must not be `None`')
self._links = links
def battery(self):
return self._battery
def battery(self, battery):
self._battery = battery
def doors_state(self):
return self._doors_state
_state.setter
def doors_state(self, doors_state):
self._doors_state = doors_state
def energy(self):
return self._energy
def energy(self, energy):
self._energy = energy
def environment(self):
return self._environment
def environment(self, environment):
self._environment = environment
def ignition(self):
return self._ignition
def ignition(self, ignition):
self._ignition = ignition
def kinetic(self):
return self._kinetic
def kinetic(self, kinetic):
self._kinetic = kinetic
def last_position(self):
return self._last_position
_position.setter
def last_position(self, last_position):
self._last_position = last_position
def preconditionning(self):
return self._preconditionning
def preconditionning(self, preconditionning):
self._preconditionning = preconditionning
def privacy(self):
return self._privacy
def privacy(self, privacy):
self._privacy = privacy
def safety(self):
return self._safety
def safety(self, safety):
self._safety = safety
def service(self):
return self._service
def service(self, service):
self._service = service
def timed_odometer(self):
return self._timed_odometer
_odometer.setter
def timed_odometer(self, timed_odometer):
self._timed_odometer = timed_odometer
def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(Status, dict):
for (key, value) in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if (not isinstance(other, Status)):
return False
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
return (not (self == other)) |
class TestSoupContains(util.TestCase):
MARKUP = '\n <body>\n <div id="1">\n Testing\n <span id="2"> that </span>\n contains works.\n </div>\n </body>\n '
def test_contains(self):
self.assert_selector(self.MARKUP, 'body span:-soup-contains(that)', ['2'], flags=util.HTML)
def test_contains_quoted_with_space(self):
self.assert_selector(self.MARKUP, 'body span:-soup-contains(" that ")', ['2'], flags=util.HTML)
def test_contains_quoted_without_space(self):
self.assert_selector(self.MARKUP, 'body :-soup-contains( "Testing" )', ['1'], flags=util.HTML)
def test_contains_quoted_with_escaped_newline(self):
self.assert_selector(self.MARKUP, 'body :-soup-contains("Test\\\ning")', ['1'], flags=util.HTML)
def test_contains_quoted_with_escaped_newline_with_carriage_return(self):
self.assert_selector(self.MARKUP, 'body :-soup-contains("Test\\\r\ning")', ['1'], flags=util.HTML)
def test_contains_list(self):
self.assert_selector(self.MARKUP, 'body span:-soup-contains("does not exist", "that")', ['2'], flags=util.HTML)
def test_contains_multiple(self):
self.assert_selector(self.MARKUP, 'body span:-soup-contains("th"):-soup-contains("at")', ['2'], flags=util.HTML)
def test_contains_multiple_not_match(self):
self.assert_selector(self.MARKUP, 'body span:not(:-soup-contains("does not exist")):-soup-contains("that")', ['2'], flags=util.HTML)
def test_contains_multiple_not_no_match(self):
self.assert_selector(self.MARKUP, 'body span:not(:-soup-contains("that")):-soup-contains("that")', [], flags=util.HTML)
def test_contains_with_descendants(self):
self.assert_selector(self.MARKUP, 'body :-soup-contains(" that ")', ['1', '2'], flags=util.HTML)
def test_contains_bad(self):
self.assert_selector(self.MARKUP, 'body :-soup-contains(bad)', [], flags=util.HTML)
def test_contains_escapes(self):
markup = '\n <body>\n <div id="1">Testing<span id="2">\n that</span>contains works.</div>\n </body>\n '
self.assert_selector(markup, 'body span:-soup-contains("\\0a that")', ['2'], flags=util.HTML)
def test_contains_cdata_html5(self):
markup = '\n <body><div id="1">Testing that <span id="2"><![CDATA[that]]></span>contains works.</div></body>\n '
self.assert_selector(markup, 'body *:-soup-contains("that")', ['1'], flags=util.HTML5)
def test_contains_cdata_py_html(self):
markup = '\n <body><div id="1">Testing that <span id="2"><![CDATA[that]]></span>contains works.</div></body>\n '
self.assert_selector(markup, 'body *:-soup-contains("that")', ['1'], flags=util.PYHTML)
_no_lxml
def test_contains_cdata_lxml_html(self):
from lxml import etree
libxml_ver = etree.LIBXML_VERSION
markup = '\n <body><div id="1">Testing that <span id="2"><![CDATA[that]]></span>contains works.</div></body>\n '
results = (['1', '2'] if (libxml_ver >= (2, 9, 11)) else ['1'])
self.assert_selector(markup, 'body *:-soup-contains("that")', results, flags=util.LXML_HTML)
def test_contains_cdata_xhtml(self):
markup = '\n <div id="1">Testing that <span id="2"><![CDATA[that]]></span>contains works.</div>\n '
self.assert_selector(self.wrap_xhtml(markup), 'body *:-soup-contains("that")', ['1', '2'], flags=util.XHTML)
def test_contains_cdata_xml(self):
markup = '\n <div id="1">Testing that <span id="2"><![CDATA[that]]></span>contains works.</div>\n '
self.assert_selector(markup, '*:-soup-contains("that")', ['1', '2'], flags=util.XML)
def test_contains_iframe(self):
markup = '\n <div id="1">\n <p>Testing text</p>\n <iframe>\n <html><body>\n <span id="2">iframe</span>\n </body></html>\n </iframe>\n </div>\n '
self.assert_selector(markup, 'div:-soup-contains("iframe")', [], flags=util.PYHTML)
self.assert_selector(markup, 'div:-soup-contains("text")', ['1'], flags=util.PYHTML)
self.assert_selector(markup, 'span:-soup-contains("iframe")', ['2'], flags=util.PYHTML)
def test_contains_iframe_xml(self):
markup = '\n <div id="1">\n <p>Testing text</p>\n <iframe>\n <html><body>\n <span id="2">iframe</span>\n </body></html>\n </iframe>\n </div>\n '
self.assert_selector(markup, 'div:-soup-contains("iframe")', ['1'], flags=util.XML)
self.assert_selector(markup, 'div:-soup-contains("text")', ['1'], flags=util.XML)
self.assert_selector(markup, 'span:-soup-contains("iframe")', ['2'], flags=util.XML)
def test_contains_warn(self):
sv.purge()
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.assert_selector(self.MARKUP, 'body span:contains(that)', ['2'], flags=util.HTML)
self.assertTrue((len(w) == 1))
self.assertTrue(issubclass(w[(- 1)].category, FutureWarning)) |
def find_volume_mappers():
res = []
with suppress_vtk_warnings():
obj = tvtk.Object()
obj.global_warning_display = False
for name in dir(tvtk):
if (('Volume' in name) and ('Mapper' in name)):
try:
klass = getattr(tvtk, name)
inst = klass()
except TypeError:
pass
else:
res.append(name)
ignores = ['VolumeTextureMapper3D', 'VolumeProMapper']
for name in ignores:
if (name in res):
res.remove(name)
return res |
class SequenceTestCase(unittest.TestCase):
def test_to_xml_method_is_working_properly(self):
s = Sequence()
s.duration = 109
s.name = 'previs_edit_v001'
s.rate = Rate(timebase='24', ntsc=False)
s.timecode = '00:00:00:00'
m = Media()
s.media = m
v = Video()
v.width = 1024
v.height = 778
m.video = v
t = Track()
t.enabled = True
t.locked = False
v.tracks.append(t)
f = File()
f.duration = 34
f.name = 'shot2'
f.pathurl = 'file:///home/eoyilmaz/maya/projects/default/data/shot2.mov'
c = Clip()
c.id = 'shot2'
c.start = 1
c.end = 35
c.name = 'shot2'
c.enabled = True
c.duration = 34
c.in_ = 0
c.out = 34
c.file = f
t.clips.append(c)
f = File()
f.duration = 30
f.name = 'shot'
f.pathurl = 'file:///home/eoyilmaz/maya/projects/default/data/shot.mov'
c = Clip()
c.id = 'shot'
c.start = 35
c.end = 65
c.name = 'shot'
c.enabled = True
c.duration = 30
c.in_ = 0
c.out = 30
c.file = f
t.clips.append(c)
f = File()
f.duration = 45
f.name = 'shot1'
f.pathurl = 'file:///home/eoyilmaz/maya/projects/default/data/shot1.mov'
c = Clip()
c.id = 'shot1'
c.start = 65
c.end = 110
c.name = 'shot1'
c.enabled = True
c.duration = 45
c.in_ = 0
c.out = 45
c.file = f
t.clips.append(c)
expected_xml = '<?xml version="1.0" encoding="UTF-8"?>\n<!DOCTYPE xmeml>\n<xmeml version="5">\n<sequence>\n <duration>109</duration>\n <name>previs_edit_v001</name>\n <rate>\n <timebase>24</timebase>\n <ntsc>FALSE</ntsc>\n </rate>\n <timecode>\n <string>00:00:00:00</string>\n </timecode>\n <media>\n <video>\n <format>\n <samplecharacteristics>\n <width>1024</width>\n <height>778</height>\n </samplecharacteristics>\n </format>\n <track>\n <locked>FALSE</locked>\n <enabled>TRUE</enabled>\n <clipitem id="shot2">\n <end>35</end>\n <name>shot2</name>\n <enabled>True</enabled>\n <start>1</start>\n <in>0</in>\n <duration>34</duration>\n <out>34</out>\n <file id="shot2.mov">\n <duration>34</duration>\n <name>shot2</name>\n <pathurl>file://localhost/home/eoyilmaz/maya/projects/default/data/shot2.mov</pathurl>\n </file>\n </clipitem>\n <clipitem id="shot">\n <end>65</end>\n <name>shot</name>\n <enabled>True</enabled>\n <start>35</start>\n <in>0</in>\n <duration>30</duration>\n <out>30</out>\n <file id="shot.mov">\n <duration>30</duration>\n <name>shot</name>\n <pathurl>file://localhost/home/eoyilmaz/maya/projects/default/data/shot.mov</pathurl>\n </file>\n </clipitem>\n <clipitem id="shot1">\n <end>110</end>\n <name>shot1</name>\n <enabled>True</enabled>\n <start>65</start>\n <in>0</in>\n <duration>45</duration>\n <out>45</out>\n <file id="shot1.mov">\n <duration>45</duration>\n <name>shot1</name>\n <pathurl>file://localhost/home/eoyilmaz/maya/projects/default/data/shot1.mov</pathurl>\n </file>\n </clipitem>\n </track>\n </video>\n </media>\n</sequence>\n</xmeml>'
self.assertEqual(expected_xml, s.to_xml())
def test_from_xml_method_is_working_properly(self):
from xml.etree import ElementTree
sequence_node = ElementTree.Element('sequence')
duration_node = ElementTree.SubElement(sequence_node, 'duration')
duration_node.text = '109'
name_node = ElementTree.SubElement(sequence_node, 'name')
name_node.text = 'previs_edit_v001'
rate_node = ElementTree.SubElement(sequence_node, 'rate')
ntsc_node = ElementTree.SubElement(rate_node, 'ntsc')
ntsc_node.text = 'FALSE'
timebase_node = ElementTree.SubElement(rate_node, 'timebase')
timebase_node.text = '24'
timecode_node = ElementTree.SubElement(sequence_node, 'timecode')
string_node = ElementTree.SubElement(timecode_node, 'string')
string_node.text = '00:00:00:00'
media_node = ElementTree.SubElement(sequence_node, 'media')
video_node = ElementTree.SubElement(media_node, 'video')
format_node = ElementTree.SubElement(video_node, 'format')
sc_node = ElementTree.SubElement(format_node, 'samplecharacteristics')
width_node = ElementTree.SubElement(sc_node, 'width')
width_node.text = 1024
height_node = ElementTree.SubElement(sc_node, 'height')
height_node.text = 778
track_node = ElementTree.SubElement(video_node, 'track')
locked_node = ElementTree.SubElement(track_node, 'locked')
locked_node.text = 'FALSE'
enabled_node = ElementTree.SubElement(track_node, 'enabled')
enabled_node.text = 'TRUE'
clip_node = ElementTree.SubElement(track_node, 'clipitem', attrib={'id': 'shot2'})
end_node = ElementTree.SubElement(clip_node, 'end')
end_node.text = '35'
name_node = ElementTree.SubElement(clip_node, 'name')
name_node.text = 'shot2'
enabled_node = ElementTree.SubElement(clip_node, 'enabled')
enabled_node.text = 'True'
start_node = ElementTree.SubElement(clip_node, 'start')
start_node.text = '1'
in_node = ElementTree.SubElement(clip_node, 'in')
in_node.text = '0'
duration_node = ElementTree.SubElement(clip_node, 'duration')
duration_node.text = '34'
out_node = ElementTree.SubElement(clip_node, 'out')
out_node.text = '34'
file_node = ElementTree.SubElement(clip_node, 'file')
duration_node = ElementTree.SubElement(file_node, 'duration')
duration_node.text = '34'
name_node = ElementTree.SubElement(file_node, 'name')
name_node.text = 'shot2'
pathurl_node = ElementTree.SubElement(file_node, 'pathurl')
pathurl = 'file://localhost/home/eoyilmaz/maya/projects/default/data/shot2.mov'
pathurl_node.text = pathurl
clip_node = ElementTree.SubElement(track_node, 'clipitem', attrib={'id': 'shot'})
end_node = ElementTree.SubElement(clip_node, 'end')
end_node.text = '65'
name_node = ElementTree.SubElement(clip_node, 'name')
name_node.text = 'shot'
enabled_node = ElementTree.SubElement(clip_node, 'enabled')
enabled_node.text = 'True'
start_node = ElementTree.SubElement(clip_node, 'start')
start_node.text = '35'
in_node = ElementTree.SubElement(clip_node, 'in')
in_node.text = '0'
duration_node = ElementTree.SubElement(clip_node, 'duration')
duration_node.text = '30'
out_node = ElementTree.SubElement(clip_node, 'out')
out_node.text = '30'
file_node = ElementTree.SubElement(clip_node, 'file')
duration_node = ElementTree.SubElement(file_node, 'duration')
duration_node.text = '30'
name_node = ElementTree.SubElement(file_node, 'name')
name_node.text = 'shot'
pathurl_node = ElementTree.SubElement(file_node, 'pathurl')
pathurl = 'file://localhost/home/eoyilmaz/maya/projects/default/data/shot.mov'
pathurl_node.text = pathurl
clip_node = ElementTree.SubElement(track_node, 'clipitem', attrib={'id': 'shot1'})
end_node = ElementTree.SubElement(clip_node, 'end')
end_node.text = '110'
name_node = ElementTree.SubElement(clip_node, 'name')
name_node.text = 'shot1'
enabled_node = ElementTree.SubElement(clip_node, 'enabled')
enabled_node.text = 'True'
start_node = ElementTree.SubElement(clip_node, 'start')
start_node.text = '65'
in_node = ElementTree.SubElement(clip_node, 'in')
in_node.text = '0'
duration_node = ElementTree.SubElement(clip_node, 'duration')
duration_node.text = '45'
rate_node = ElementTree.SubElement(clip_node, 'rate')
ntsc_node = ElementTree.SubElement(rate_node, 'ntsc')
ntsc_node.text = 'FALSE'
timebase_node = ElementTree.SubElement(rate_node, 'timebase')
timebase_node.text = '24'
out_node = ElementTree.SubElement(clip_node, 'out')
out_node.text = '45'
file_node = ElementTree.SubElement(clip_node, 'file')
duration_node = ElementTree.SubElement(file_node, 'duration')
duration_node.text = '45'
name_node = ElementTree.SubElement(file_node, 'name')
name_node.text = 'shot1'
pathurl_node = ElementTree.SubElement(file_node, 'pathurl')
pathurl = 'file://localhost/home/eoyilmaz/maya/projects/default/data/shot1.mov'
pathurl_node.text = pathurl
s = Sequence()
s.from_xml(sequence_node)
self.assertEqual(109, s.duration)
self.assertEqual('previs_edit_v001', s.name)
r = s.rate
self.assertEqual(False, r.ntsc)
self.assertEqual('24', r.timebase)
self.assertEqual('00:00:00:00', s.timecode)
m = s.media
v = m.video
self.assertEqual(1024, v.width)
self.assertEqual(778, v.height)
t = v.tracks[0]
self.assertEqual(False, t.locked)
self.assertEqual(True, t.enabled)
c = t.clips[0]
self.assertEqual(35, c.end)
self.assertEqual('shot2', c.name)
self.assertEqual(True, c.enabled)
self.assertEqual(1, c.start)
self.assertEqual(0, c.in_)
self.assertEqual(34, c.duration)
self.assertEqual(34, c.out)
f = c.file
self.assertEqual(34, f.duration)
self.assertEqual('shot2', f.name)
self.assertEqual('file://localhost/home/eoyilmaz/maya/projects/default/data/shot2.mov', f.pathurl)
c = t.clips[1]
self.assertEqual(65, c.end)
self.assertEqual('shot', c.name)
self.assertEqual(True, c.enabled)
self.assertEqual(35, c.start)
self.assertEqual(0, c.in_)
self.assertEqual(30, c.duration)
self.assertEqual(30, c.out)
f = c.file
self.assertEqual(30, f.duration)
self.assertEqual('shot', f.name)
self.assertEqual('file://localhost/home/eoyilmaz/maya/projects/default/data/shot.mov', f.pathurl)
c = t.clips[2]
self.assertEqual(110, c.end)
self.assertEqual('shot1', c.name)
self.assertEqual(True, c.enabled)
self.assertEqual(65, c.start)
self.assertEqual(0, c.in_)
self.assertEqual(45, c.duration)
self.assertEqual(45, c.out)
f = c.file
self.assertEqual(45, f.duration)
self.assertEqual('shot1', f.name)
self.assertEqual('file://localhost/home/eoyilmaz/maya/projects/default/data/shot1.mov', f.pathurl)
def test_to_edl_will_raise_RuntimeError_if_no_Media_instance_presents(self):
s = Sequence()
with self.assertRaises(RuntimeError) as cm:
s.to_edl()
self.assertEqual('Can not run Sequence.to_edl() without a Media instance, please add a Media instance to this Sequence instance.', cm.exception.message)
def test_to_edl_method_will_return_a_List_instance(self):
s = Sequence()
m = Media()
s.media = m
result = s.to_edl()
self.assertTrue(isinstance(result, List))
def test_to_edl_method_is_working_properly(self):
sm = pm.PyNode('sequenceManager1')
sm.set_version('v001')
xml_path = os.path.abspath('./test_data/test_v001.xml')
sm.from_xml(xml_path)
sequence = sm.generate_sequence_structure()
edl_list = sm.to_edl()
self.assertTrue(isinstance(edl_list, List))
self.assertEqual(sequence.name, edl_list.title)
self.assertEqual(3, len(edl_list.events))
e1 = edl_list.events[0]
e2 = edl_list.events[1]
e3 = edl_list.events[2]
self.assertTrue(isinstance(e1, Event))
self.assertTrue(isinstance(e2, Event))
self.assertTrue(isinstance(e3, Event))
clips = sequence.media.video.tracks[0].clips
self.assertTrue(isinstance(clips[0], Clip))
self.assertEqual('000001', e1.num)
self.assertEqual('SEQ001_HSNI_003_0010_v001', e1.clip_name)
self.assertEqual('SEQ001_HSNI_003_0010_v001', e1.reel)
self.assertEqual('V', e1.track)
self.assertEqual('C', e1.tr_code)
self.assertEqual('00:00:00:00', e1.src_start_tc)
self.assertEqual('00:00:01:10', e1.src_end_tc)
self.assertEqual('00:00:00:01', e1.rec_start_tc)
self.assertEqual('00:00:01:11', e1.rec_end_tc)
self.assertEqual('* FROM CLIP NAME: SEQ001_HSNI_003_0010_v001', e1.comments[0])
self.assertEqual('/tmp/SEQ001_HSNI_003_0010_v001.mov', e1.source_file)
self.assertEqual('* SOURCE FILE: /tmp/SEQ001_HSNI_003_0010_v001.mov', e1.comments[1])
self.assertEqual('000002', e2.num)
self.assertEqual('SEQ001_HSNI_003_0020_v001', e2.clip_name)
self.assertEqual('SEQ001_HSNI_003_0020_v001', e2.reel)
self.assertEqual('V', e2.track)
self.assertEqual('C', e2.tr_code)
self.assertEqual('00:00:00:00', e2.src_start_tc)
self.assertEqual('00:00:01:07', e2.src_end_tc)
self.assertEqual('00:00:01:11', e2.rec_start_tc)
self.assertEqual('00:00:02:18', e2.rec_end_tc)
self.assertEqual('/tmp/SEQ001_HSNI_003_0020_v001.mov', e2.source_file)
self.assertEqual('* FROM CLIP NAME: SEQ001_HSNI_003_0020_v001', e2.comments[0])
self.assertEqual('* SOURCE FILE: /tmp/SEQ001_HSNI_003_0020_v001.mov', e2.comments[1])
self.assertEqual('000003', e3.num)
self.assertEqual('SEQ001_HSNI_003_0030_v001', e3.clip_name)
self.assertEqual('SEQ001_HSNI_003_0030_v001', e3.reel)
self.assertEqual('V', e3.track)
self.assertEqual('C', e3.tr_code)
self.assertEqual('00:00:00:00', e3.src_start_tc)
self.assertEqual('00:00:01:22', e3.src_end_tc)
self.assertEqual('00:00:02:18', e3.rec_start_tc)
self.assertEqual('00:00:04:16', e3.rec_end_tc)
self.assertEqual('/tmp/SEQ001_HSNI_003_0030_v001.mov', e3.source_file)
self.assertEqual('* FROM CLIP NAME: SEQ001_HSNI_003_0030_v001', e3.comments[0])
self.assertEqual('* SOURCE FILE: /tmp/SEQ001_HSNI_003_0030_v001.mov', e3.comments[1])
def test_from_edl_method_is_working_properly(self):
from edl import Parser
p = Parser('24')
edl_path = os.path.join(__file__, '/test_data/test_v001.edl')
with open(edl_path) as f:
edl_list = p.parse(f)
r = Rate(timebase='24')
s = Sequence(rate=r)
s.from_edl(edl_list)
self.assertEqual('SEQ001_HSNI_003', s.name)
self.assertEqual(111, s.duration)
r = s.rate
self.assertEqual('24', r.timebase)
self.assertEqual('00:00:00:00', s.timecode)
m = s.media
self.assertTrue(isinstance(m, Media))
v = m.video
self.assertTrue(isinstance(v, Video))
t = v.tracks[0]
self.assertEqual(False, t.locked)
self.assertEqual(True, t.enabled)
clips = t.clips
self.assertEqual(3, len(clips))
clip1 = clips[0]
clip2 = clips[1]
clip3 = clips[2]
self.assertTrue(isinstance(clip1, Clip))
self.assertTrue(isinstance(clip2, Clip))
self.assertTrue(isinstance(clip3, Clip))
self.assertEqual(34, clip1.duration)
self.assertEqual(True, clip1.enabled)
self.assertEqual(35, clip1.end)
self.assertEqual('SEQ001_HSNI_003_0010_v001', clip1.id)
self.assertEqual(10, clip1.in_)
self.assertEqual('SEQ001_HSNI_003_0010_v001', clip1.name)
self.assertEqual(44, clip1.out)
self.assertEqual(1, clip1.start)
self.assertEqual('Video', clip1.type)
f = clip1.file
self.assertTrue(isinstance(f, File))
self.assertEqual(44, f.duration)
self.assertEqual('SEQ001_HSNI_003_0010_v001', f.name)
self.assertEqual('file://localhost/tmp/SEQ001_HSNI_003_0010_v001.mov', f.pathurl)
self.assertEqual(31, clip2.duration)
self.assertEqual(True, clip2.enabled)
self.assertEqual(66, clip2.end)
self.assertEqual('SEQ001_HSNI_003_0020_v001', clip2.id)
self.assertEqual(10, clip2.in_)
self.assertEqual('SEQ001_HSNI_003_0020_v001', clip2.name)
self.assertEqual(41, clip2.out)
self.assertEqual(35, clip2.start)
self.assertEqual('Video', clip2.type)
f = clip2.file
self.assertTrue(isinstance(f, File))
self.assertEqual(41, f.duration)
self.assertEqual('SEQ001_HSNI_003_0020_v001', f.name)
self.assertEqual('file://localhost/tmp/SEQ001_HSNI_003_0020_v001.mov', f.pathurl)
self.assertEqual(46, clip3.duration)
self.assertEqual(True, clip3.enabled)
self.assertEqual(112, clip3.end)
self.assertEqual('SEQ001_HSNI_003_0030_v001', clip3.id)
self.assertEqual(10, clip3.in_)
self.assertEqual('SEQ001_HSNI_003_0030_v001', clip3.name)
self.assertEqual(56, clip3.out)
self.assertEqual(66, clip3.start)
self.assertEqual('Video', clip3.type)
f = clip3.file
self.assertTrue(isinstance(f, File))
self.assertEqual(56, f.duration)
self.assertEqual('SEQ001_HSNI_003_0030_v001', f.name)
self.assertEqual('file://localhost/tmp/SEQ001_HSNI_003_0030_v001.mov', f.pathurl)
def test_from_edl_method_is_working_properly_with_negative_timecodes(self):
from edl import Parser
p = Parser('24')
edl_path = os.path.abspath('./test_data/test_v003.edl')
with open(edl_path) as f:
edl_list = p.parse(f)
r = Rate(timebase='24')
s = Sequence(rate=r)
s.from_edl(edl_list)
self.assertEqual('SEQ001_HSNI_003', s.name)
self.assertEqual(247, s.duration)
self.assertEqual('24', s.rate.timebase)
self.assertEqual('00:00:00:00', s.timecode)
m = s.media
self.assertTrue(isinstance(m, Media))
v = m.video
self.assertTrue(isinstance(v, Video))
t = v.tracks[0]
self.assertEqual(False, t.locked)
self.assertEqual(True, t.enabled)
clips = t.clips
self.assertEqual(3, len(clips))
clip1 = clips[0]
clip2 = clips[1]
clip3 = clips[2]
self.assertTrue(isinstance(clip1, Clip))
self.assertTrue(isinstance(clip2, Clip))
self.assertTrue(isinstance(clip3, Clip))
self.assertEqual(176, clip1.duration)
self.assertEqual(True, clip1.enabled)
self.assertEqual(153, clip1.end)
self.assertEqual('SEQ001_HSNI_003_0010_v001', clip1.id)
self.assertEqual(15, clip1.in_)
self.assertEqual('SEQ001_HSNI_003_0010_v001', clip1.name)
self.assertEqual(191, clip1.out)
self.assertEqual((- 23), clip1.start)
self.assertEqual('Video', clip1.type)
f = clip1.file
self.assertTrue(isinstance(f, File))
self.assertEqual(191, f.duration)
self.assertEqual('SEQ001_HSNI_003_0010_v001', f.name)
self.assertEqual('file://localhost/tmp/SEQ001_HSNI_003_0010_v001.mov', f.pathurl)
self.assertEqual(55, clip2.duration)
self.assertEqual(True, clip2.enabled)
self.assertEqual(208, clip2.end)
self.assertEqual('SEQ001_HSNI_003_0020_v001', clip2.id)
self.assertEqual(45, clip2.in_)
self.assertEqual('SEQ001_HSNI_003_0020_v001', clip2.name)
self.assertEqual(100, clip2.out)
self.assertEqual(153, clip2.start)
self.assertEqual('Video', clip2.type)
f = clip2.file
self.assertTrue(isinstance(f, File))
self.assertEqual(100, f.duration)
self.assertEqual('SEQ001_HSNI_003_0020_v001', f.name)
self.assertEqual('file://localhost/tmp/SEQ001_HSNI_003_0020_v001.mov', f.pathurl)
self.assertEqual(1, clip3.duration)
self.assertEqual(True, clip3.enabled)
self.assertEqual(224, clip3.end)
self.assertEqual('SEQ001_HSNI_003_0030_v001', clip3.id)
self.assertEqual(0, clip3.in_)
self.assertEqual('SEQ001_HSNI_003_0030_v001', clip3.name)
self.assertEqual(1, clip3.out)
self.assertEqual(208, clip3.start)
self.assertEqual('Video', clip3.type)
f = clip3.file
self.assertTrue(isinstance(f, File))
self.assertEqual(1, f.duration)
self.assertEqual('SEQ001_HSNI_003_0030_v001', f.name)
self.assertEqual('file://localhost/tmp/SEQ001_HSNI_003_0030_v001.mov', f.pathurl)
def test_to_metafuze_xml_is_working_properly(self):
s = Sequence()
s.duration = 109
s.name = 'SEQ001_HSNI_003'
s.timecode = '00:00:00:00'
r = Rate()
s.rate = r
r.ntsc = False
r.timebase = '24'
m = Media()
s.media = m
v = Video()
v.width = 1024
v.height = 778
m.video = v
t = Track()
t.enabled = True
t.locked = False
v.tracks.append(t)
f = File()
f.duration = 34
f.name = 'SEQ001_HSNI_003_0010_v001'
f.pathurl = 'file://localhost/tmp/SEQ001_HSNI_003_0010_v001.mov'
c = Clip()
c.id = '0010'
c.start = 1
c.end = 35
c.name = 'SEQ001_HSNI_003_0010_v001'
c.enabled = True
c.duration = 34
c.in_ = 0
c.out = 34
c.file = f
t.clips.append(c)
f = File()
f.duration = 30
f.name = 'SEQ001_HSNI_003_0020_v001'
f.pathurl = 'file://localhost/tmp/SEQ001_HSNI_003_0020_v001.mov'
c = Clip()
c.id = '0020'
c.start = 35
c.end = 65
c.name = 'SEQ001_HSNI_003_0020_v001'
c.enabled = True
c.duration = 30
c.in_ = 0
c.out = 30
c.file = f
t.clips.append(c)
f = File()
f.duration = 45
f.name = 'SEQ001_HSNI_003_0030_v001'
f.pathurl = 'file://localhost/tmp/SEQ001_HSNI_003_0030_v001.mov'
c = Clip()
c.id = '0030'
c.start = 65
c.end = 110
c.name = 'SEQ001_HSNI_003_0030_v001'
c.enabled = True
c.duration = 45
c.in_ = 0
c.out = 45
c.file = f
t.clips.append(c)
expected_xmls = ['<?xml version=\'1.0\' encoding=\'UTF-8\'?>\n<MetaFuze_BatchTranscode xmlns:xsi=" xsi:noNamespaceSchemaLocation="MetaFuzeBatchTranscode.xsd">\n <Configuration>\n <Local>8</Local>\n <Remote>8</Remote>\n </Configuration>\n <Group>\n <FileList>\n <File>\\tmp\\SEQ001_HSNI_003_0010_v001.mov</File>\n </FileList>\n <Transcode>\n <Version>1.0</Version>\n <File>\\tmp\\SEQ001_HSNI_003_0010_v001.mxf</File>\n <ClipName>SEQ001_HSNI_003_0010_v001</ClipName>\n <ProjectName>SEQ001_HSNI_003</ProjectName>\n <TapeName>SEQ001_HSNI_003_0010_v001</TapeName>\n <TC_Start>00:00:00:00</TC_Start>\n <DropFrame>false</DropFrame>\n <EdgeTC>** TimeCode N/A **</EdgeTC>\n <FilmType>35.4</FilmType>\n <KN_Start>AAAAAAAA-0000+00</KN_Start>\n <Frames>33</Frames>\n <Width>1024</Width>\n <Height>778</Height>\n <PixelRatio>1.0000</PixelRatio>\n <UseFilmInfo>false</UseFilmInfo>\n <UseTapeInfo>true</UseTapeInfo>\n <AudioChannelCount>0</AudioChannelCount>\n <UseMXFAudio>false</UseMXFAudio>\n <UseWAVAudio>false</UseWAVAudio>\n <SrcBitsPerChannel>8</SrcBitsPerChannel>\n <OutputPreset>DNxHD 36</OutputPreset>\n <OutputPreset>\n <Version>2.0</Version>\n <Name>DNxHD 36</Name>\n <ColorModel>YCC 709</ColorModel>\n <BitDepth>8</BitDepth>\n <Format>1080 24p</Format>\n <Compression>DNxHD 36</Compression>\n <Conversion>Letterbox (center)</Conversion>\n <VideoFileType>.mxf</VideoFileType>\n <IsDefault>false</IsDefault>\n </OutputPreset>\n <Eye></Eye>\n <Scene></Scene>\n <Comment></Comment>\n </Transcode>\n </Group>\n</MetaFuze_BatchTranscode>', '<?xml version=\'1.0\' encoding=\'UTF-8\'?>\n<MetaFuze_BatchTranscode xmlns:xsi=" xsi:noNamespaceSchemaLocation="MetaFuzeBatchTranscode.xsd">\n <Configuration>\n <Local>8</Local>\n <Remote>8</Remote>\n </Configuration>\n <Group>\n <FileList>\n <File>\\tmp\\SEQ001_HSNI_003_0020_v001.mov</File>\n </FileList>\n <Transcode>\n <Version>1.0</Version>\n <File>\\tmp\\SEQ001_HSNI_003_0020_v001.mxf</File>\n <ClipName>SEQ001_HSNI_003_0020_v001</ClipName>\n <ProjectName>SEQ001_HSNI_003</ProjectName>\n <TapeName>SEQ001_HSNI_003_0020_v001</TapeName>\n <TC_Start>00:00:00:00</TC_Start>\n <DropFrame>false</DropFrame>\n <EdgeTC>** TimeCode N/A **</EdgeTC>\n <FilmType>35.4</FilmType>\n <KN_Start>AAAAAAAA-0000+00</KN_Start>\n <Frames>29</Frames>\n <Width>1024</Width>\n <Height>778</Height>\n <PixelRatio>1.0000</PixelRatio>\n <UseFilmInfo>false</UseFilmInfo>\n <UseTapeInfo>true</UseTapeInfo>\n <AudioChannelCount>0</AudioChannelCount>\n <UseMXFAudio>false</UseMXFAudio>\n <UseWAVAudio>false</UseWAVAudio>\n <SrcBitsPerChannel>8</SrcBitsPerChannel>\n <OutputPreset>DNxHD 36</OutputPreset>\n <OutputPreset>\n <Version>2.0</Version>\n <Name>DNxHD 36</Name>\n <ColorModel>YCC 709</ColorModel>\n <BitDepth>8</BitDepth>\n <Format>1080 24p</Format>\n <Compression>DNxHD 36</Compression>\n <Conversion>Letterbox (center)</Conversion>\n <VideoFileType>.mxf</VideoFileType>\n <IsDefault>false</IsDefault>\n </OutputPreset>\n <Eye></Eye>\n <Scene></Scene>\n <Comment></Comment>\n </Transcode>\n </Group>\n</MetaFuze_BatchTranscode>', '<?xml version=\'1.0\' encoding=\'UTF-8\'?>\n<MetaFuze_BatchTranscode xmlns:xsi=" xsi:noNamespaceSchemaLocation="MetaFuzeBatchTranscode.xsd">\n <Configuration>\n <Local>8</Local>\n <Remote>8</Remote>\n </Configuration>\n <Group>\n <FileList>\n <File>\\tmp\\SEQ001_HSNI_003_0030_v001.mov</File>\n </FileList>\n <Transcode>\n <Version>1.0</Version>\n <File>\\tmp\\SEQ001_HSNI_003_0030_v001.mxf</File>\n <ClipName>SEQ001_HSNI_003_0030_v001</ClipName>\n <ProjectName>SEQ001_HSNI_003</ProjectName>\n <TapeName>SEQ001_HSNI_003_0030_v001</TapeName>\n <TC_Start>00:00:00:00</TC_Start>\n <DropFrame>false</DropFrame>\n <EdgeTC>** TimeCode N/A **</EdgeTC>\n <FilmType>35.4</FilmType>\n <KN_Start>AAAAAAAA-0000+00</KN_Start>\n <Frames>44</Frames>\n <Width>1024</Width>\n <Height>778</Height>\n <PixelRatio>1.0000</PixelRatio>\n <UseFilmInfo>false</UseFilmInfo>\n <UseTapeInfo>true</UseTapeInfo>\n <AudioChannelCount>0</AudioChannelCount>\n <UseMXFAudio>false</UseMXFAudio>\n <UseWAVAudio>false</UseWAVAudio>\n <SrcBitsPerChannel>8</SrcBitsPerChannel>\n <OutputPreset>DNxHD 36</OutputPreset>\n <OutputPreset>\n <Version>2.0</Version>\n <Name>DNxHD 36</Name>\n <ColorModel>YCC 709</ColorModel>\n <BitDepth>8</BitDepth>\n <Format>1080 24p</Format>\n <Compression>DNxHD 36</Compression>\n <Conversion>Letterbox (center)</Conversion>\n <VideoFileType>.mxf</VideoFileType>\n <IsDefault>false</IsDefault>\n </OutputPreset>\n <Eye></Eye>\n <Scene></Scene>\n <Comment></Comment>\n </Transcode>\n </Group>\n</MetaFuze_BatchTranscode>']
result = s.to_metafuze_xml()
self.maxDiff = None
self.assertEqual(expected_xmls[0], result[0])
self.assertEqual(expected_xmls[1], result[1])
self.assertEqual(expected_xmls[2], result[2]) |
class OptionSeriesPolygonSonificationContexttracksMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Navigation():
def __init__(self, ui):
self.page = ui.page
def __format_text(self, text: Union[(str, dict)], size: str=None, italic: bool=True) -> str:
if isinstance(text, dict):
sub_title = self.page.ui.div(list(text.values())[0])
sub_title.options.managed = False
if italic:
sub_title.style.css.italic()
sub_title.style.css.color = self.page.theme.greys[4]
sub_title.style.css.text_transform = 'lowercase'
sub_title.style.css.display = 'inline'
sub_title.style.css.font_size = (size or self.page.body.style.globals.font.normal((- 3)))
return ('<b>%s</b> %s' % (list(text.keys())[0], sub_title.html()))
return text
def up(self, icon: str='fas fa-arrow-up', top: int=20, right: int=20, bottom: int=None, tooltip: str=None, width: Union[(tuple, int)]=(25, 'px'), height: Union[(tuple, int)]=(25, 'px'), options: dict=None, profile: Union[(bool, dict)]=False):
width = Arguments.size(width, unit='px')
height = Arguments.size(height, unit='px')
du = self.page.ui.icon(icon, width=width, height=height, options=options, profile=profile).css({'border': '1px solid black', 'position': 'fixed', 'width': 'none', 'border-radius': '20px', 'padding': '8px', 'right': ('%spx' % right)})
if (top is not None):
du.style.css.top = top
else:
du.style.css.bottom = bottom
du.style.add_classes.div.background_hover()
self.page.js.onReady(self.page.js.window.events.addScrollListener([self.page.js.if_((self.page.js.window.scrollY > 50), [du.dom.show()]).else_(du.dom.hide())]))
if (tooltip is not None):
du.tooltip(tooltip)
du.click([self.page.js.window.scrollUp(), self.page.js.objects.this.hide()])
html.Html.set_component_skin(du)
return du
def down(self, icon: str='fas fa-arrow-down', top: int=20, right: int=20, bottom: int=None, tooltip: str=None, width: Union[(tuple, int)]=(25, 'px'), height: Union[(tuple, int)]=(25, 'px'), options: dict=None, profile: Union[(bool, dict)]=False):
width = Arguments.size(width, unit='%')
dd = self.page.ui.icon(icon, width=width, height=height, options=options, profile=profile).css({'border': '1px solid black', 'position': 'fixed', 'width': 'none', 'border-radius': '20px', 'padding': '8px', 'right': ('%spx' % right)})
if (bottom is not None):
dd.style.css.bottom = bottom
else:
dd.style.css.top = top
dd.style.add_classes.div.background_hover()
self.page.js.onReady(self.page.js.window.events.addScrollListener([self.page.js.if_((self.page.js.window.scrollY < (self.page.js.window.scrollMaxY - 50)), [dd.dom.show()]).else_(dd.dom.hide())]))
if (tooltip is not None):
dd.tooltip(tooltip)
dd.click([self.page.js.window.scrollTo(), self.page.js.objects.this.hide()])
html.Html.set_component_skin(dd)
return dd
def to(self, y, x: int=None, icon: str='fas fa-map-pin', top: int=20, right: int=20, bottom: Optional[int]=None, tooltip: str=None, width: Union[(tuple, int)]=(25, 'px'), height: Union[(tuple, int)]=(25, 'px'), options: dict=None, profile: Union[(bool, dict)]=False):
width = Arguments.size(width, unit='%')
dd = self.page.ui.icon(icon, width=width, height=height, options=options, profile=profile).css({'border': '1px solid black', 'position': 'fixed', 'width': 'none', 'border-radius': '20px', 'padding': '8px', 'right': ('%spx' % right)})
if (bottom is not None):
dd.style.css.bottom = bottom
else:
dd.style.css.top = top
dd.style.add_classes.div.background_hover()
if (tooltip is not None):
dd.tooltip(tooltip)
self.page.js.onReady(self.page.js.window.events.addScrollListener([self.page.js.if_((self.page.js.window.scrollY > y), [dd.dom.show()]).else_(dd.dom.hide())]))
dd.click([self.page.js.window.scrollTo(x=x, y=y), self.page.js.objects.this.hide()])
html.Html.set_component_skin(dd)
return dd
def pin(self, text: str, url: str='#', icon: str='fas fa-map-pin', top: int=20, right: int=20, bottom: int=None, tooltip: str=None, width: Union[(tuple, int)]=(25, 'px'), height: Union[(tuple, int)]=(25, 'px'), options: dict=None, profile: Union[(bool, dict)]=False):
width = Arguments.size(width, unit='%')
dd = self.page.ui.icon(icon, width=width, height=height, options=options, profile=profile)
h_url = self.page.ui.link(text, url=url)
div = self.page.ui.div([dd, h_url]).css({'border': '1px solid black', 'position': 'fixed', 'width': 'none', 'border-radius': '30px', 'padding': '10px 15px', 'right': ('%spx' % right), 'background-color': self.page.theme.greys[0]})
if (bottom is not None):
div.style.css.bottom = bottom
else:
div.style.css.top = top
div.attr['class'].add('CssDivOnHoverWidth')
h_url.css({'display': 'none', 'white-space': 'nowrap'})
div.on('mouseover', [h_url.dom.css({'display': 'inline-block', 'padding-left': '10px'})])
div.on('mouseout', [h_url.dom.css({'display': 'none', 'padding-left': '0px'})])
if (tooltip is not None):
div.tooltip(tooltip)
html.Html.set_component_skin(div)
return div
def scroll(self, progress: int=0, height: Union[(tuple, int)]=(3, 'px'), options: dict=None, profile: Union[(bool, dict)]=False):
height = Arguments.size(height, unit='px')
p = self.page.ui.sliders.progressbar(progress, height=height, options=options, profile=profile)
self.page.js.onReady(self.page.js.window.events.addScrollListener([p.build(self.page.js.window.scrollPercentage)]))
html.Html.set_component_skin(p)
return p
def indices(self, count: int, selected: int=1, width: Union[(tuple, int)]=(100, '%'), height: Union[(tuple, int)]=(None, 'px'), html_code: str=None, options: dict=None, profile: Union[(bool, dict)]=False):
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
dflt_options = {'div_css': {'display': 'inline-block', 'margin': '0 2px'}, 'selected': selected}
dflt_options.update((options or {}))
html_indices = html.HtmlContainer.Indices(self.page, count, width, height, html_code, dflt_options, profile)
html.Html.set_component_skin(html_indices)
return html_indices
def more(self, text: str='See more', icon: Optional[Union[(str, bool)]]=None, width: Union[(tuple, int)]=('auto', ''), tooltip: Optional[str]=None, height: Union[(tuple, int)]=(None, 'px'), align: str='left', html_code: Optional[str]=None, profile: Union[(dict, bool)]=None, options: Optional[dict]=None):
btn = self.page.ui.buttons.text(text=text, icon=icon, width=width, tooltip=tooltip, height=height, align=align, html_code=html_code, profile=profile, options=options)
btn.style.css.font_factor((- 3))
btn.style.css.cursor = 'pointer'
btn.attr['click-count'] = 0
def rewind():
return btn.dom.setAttribute('click-count', 0)
def next():
return btn.dom.getAttribute('click-count')
btn.dom.rewind = rewind
btn.dom.next = next
def click(js_funcs, source_event=None, profile=None, on_ready=False):
if on_ready:
self.page.body.onReady([btn.dom.events.trigger('click')])
extra = [self.page.js.objects.new((btn.dom.getAttribute('click-count').toNumber() + 1), 'countClick'), btn.dom.setAttribute('click-count', self.page.js.objects.get('countClick'))]
return btn.on('click', (extra + js_funcs), profile, source_event)
btn.click = click
return btn
def points(self, count: int, selected: int=0, width: Union[(tuple, int)]=(100, '%'), height: Union[(tuple, int)]=(None, 'px'), html_code: str=None, options: dict=None, profile: Union[(dict, bool)]=False) -> html.HtmlContainer.Points:
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
dflt_options = {'div_css': {'display': 'inline-block', 'margin': '0 2px'}, 'selected': selected}
dflt_options.update((options or {}))
html_points = html.HtmlContainer.Points(self.page, count, width, height, html_code, dflt_options, profile)
html.Html.set_component_skin(html_points)
return html_points
def dots(self, count: int, selected: int=1, position: str='right', width: Union[(tuple, int)]=(100, '%'), height: Union[(tuple, int)]=(None, 'px'), html_code: str=None, options: dict=None, profile: Union[(dict, bool)]=False):
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
dflt_options = {'div_css': {'margin': '2px', 'float': position}, 'selected': selected}
dflt_options.update((options or {}))
html_points = html.HtmlContainer.Points(self.page, count, width, height, html_code, dflt_options, profile)
html.Html.set_component_skin(html_points)
return html_points
def path(self, record: List[dict], divider: str=None, width: Union[(tuple, int)]=(100, '%'), height: Union[(tuple, int)]=(None, 'px'), options: dict=None, profile: Union[(dict, bool)]=False):
if (divider is None):
divider = self.page.symbols.shapes.BLACK_RIGHT_POINTING_TRIANGLE
div = self.page.ui.div(width=width, height=height, options=options, profile=profile)
for rec in record[:(- 1)]:
div += self.page.ui.link(rec['text'], url=rec.get('url', '#')).css({'display': 'inline-block'})
div += self.page.ui.text(divider).css({'display': 'inline-block', 'margin': '0 5px', 'font-size': self.page.body.style.globals.font.normal((- 2))})
div += self.page.ui.link(record[(- 1)]['text'], url=record[(- 1)].get('url', '#')).css({'display': 'inline-block'})
html.Html.set_component_skin(div)
return div
def nav(self, logo=None, title: str=None, components=None, width: Union[(tuple, int)]=(100, '%'), height: Union[(tuple, int)]=(40, 'px'), options: dict=None, avatar: bool=False, profile: Union[(dict, bool)]=False) -> html.HtmlMenu.HtmlNavBar:
comp_id = 'page_nav_bar'
if (comp_id not in self.page.components):
nav_bar = self.bar(logo, title, width, height, options, avatar=avatar, html_code=comp_id, profile=profile)
else:
nav_bar = self.page.components[comp_id]
if (components is not None):
for component in components:
nav_bar.add(component)
html.Html.set_component_skin(nav_bar)
return nav_bar
def bar(self, logo=None, title=None, width: Union[(tuple, int)]=(100, '%'), height: Union[(tuple, int)]=(40, 'px'), options=None, html_code=None, avatar: Union[(bool, str)]=False, profile: Union[(dict, bool)]=False) -> html.HtmlMenu.HtmlNavBar:
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
components = []
(options, scroll_height) = ((options or {}), (- 5))
options['logo_height'] = (((height[0] - 10), height[1]) if ('logo_height' not in options) else Arguments.size(options['logo_height'], unit='px'))
options['logo_width'] = (((height[0] - 10), height[1]) if ('logo_width' not in options) else Arguments.size(options['logo_width'], unit='px'))
if (logo is None):
logo = self.page.ui.icons.epyk(size=options['logo_height'])
logo.style.css.max_width = 40
logo.style.css.max_height = 40
components.append(logo)
else:
if (not hasattr(logo, 'options')):
logo_url = logo
logo = self.page.ui.div(height=options['logo_height'], width=options['logo_width'])
if logo_url:
logo.style.css.background_url(logo_url)
components.append(logo)
if (title is not None):
title = self.__format_text(title, self.page.body.style.globals.font.normal(5), italic=False)
title = self.page.ui.div(title, height=(100, '%'))
title.style.css.text_transform = 'uppercase'
title.style.css.margin_left = 5
title.style.css.margin_right = 5
title.style.css.bold()
components.append(title)
if options.get('status', False):
scroll = self.page.ui.navigation.scroll()
scroll_height = 5
scroll.style.css.display = 'block'
scroll.options.managed = False
scroll.style.css.height = scroll_height
html_nav = html.HtmlMenu.HtmlNavBar(self.page, components, width=width, height=height, options=options, html_code=html_code, profile=profile)
if options.get('status', False):
html_nav.scroll = scroll
html_nav.logo = logo
if options.get('scroll', True):
self.page.body.onReady([self.page.js.number(0, 'window.prevScrollpos')])
self.page.body.scroll([('var currentScrollPos = window.pageYOffset;\nif (window.prevScrollpos > currentScrollPos) {%(dom)s.style.top = "0"} \nelse {%(dom)s.style.top = "-%(height)spx"};\nwindow.prevScrollpos = currentScrollPos;\n' % {'dom': html_nav.dom.varName, 'height': height[0]})])
if (logo and (options.get('center') is not None)):
html_nav.logo.style.css.margin = 'auto'
html_nav.logo.style.css.display = 'block'
else:
html_nav.logo.style.css.margin_right = 20
html_nav.logo.style.css.display = 'inline-block'
html_nav.title = title
html_nav.style.css.line_height = height[0]
Defaults_css.BODY_CONTAINER = {'padding-top': (height[0] + scroll_height)}
self.page.body.style.custom_class({'padding-top': ('%spx' % ((height[0] + 5) + scroll_height))}, 'body', is_class=False)
html.Html.set_component_skin(html_nav)
if avatar:
if isinstance(avatar, bool):
avatar = ''
html_nav.avatar = self.page.ui.images.avatar(avatar, width=(height[0] - 10), height=(height[0] - 10), options={'status': False})
html_nav.avatar.style.css.margin_left = 20
return html_nav
def banner(self, image: str='', text: str='', link: str='', width: Union[(tuple, int)]=(100, '%'), height: Union[(tuple, int)]=(None, 'px'), options: dict=None, profile: Union[(dict, bool)]=False):
div = self.page.ui.div(width=width, height=height, options=options, profile=profile)
h_image = self.page.ui.img(image)
h_text = self.page.ui.text(text)
h_link = self.page.ui.links.button('click', link)
h_row = self.page.ui.row([h_image, self.page.ui.col([h_text, h_link])])
(div + h_row)
div.style.css.background_color = self.page.theme.colors[3]
div.style.css.color = 'white'
div.style.css.font_size = self.page.body.style.globals.font.normal(5)
div.style.css.text_align = 'center'
div.style.css.padding = '5px 15px'
html.Html.set_component_skin(div)
return div
def footer(self, components=None, width: Union[(tuple, int)]=(100, '%'), height: Union[(tuple, int)]=(80, 'px'), fixed=False, options=None, profile=False) -> html.HtmlMenu.HtmlFooter:
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
component = html.HtmlMenu.HtmlFooter(self.page, components, width=width, height=height, options=options, profile=profile)
if fixed:
self.page.body.style.css.padding_bottom = height[0]
else:
component.style.css.position = None
html.Html.set_component_skin(component)
return component
def side(self, components=None, anchor=None, size=262, position='right', options=None, profile=False, z_index: int=20, overlay: bool=False):
position_type = ('absolute' if (self.page.body.template is None) else 'fixed')
d = self.page.ui.div(components, options=options, profile=profile)
d.css({'background': self.page.theme.colors[2], 'position': position_type, 'top': 0, 'height': '100%', 'overflow-x': 'hidden', 'width': ('%spx' % size), 'z-index': z_index})
if (position == 'left'):
d.css({'left': 0, 'margin-left': ('-%spx' % size), 'border-right': ('1px solid %s' % self.page.theme.notch()), 'padding': '5px'})
else:
d.css({'right': 0, 'margin-right': ('-%spx' % size), 'border-left': ('1px solid %s' % self.page.theme.notch()), 'padding': '5px'})
self.page.body.style.custom_class({'overflow-x': 'hidden', 'position': 'relative', 'min-height': '100%'}, 'html, body', is_class=False)
def close():
return d.dom.toggle_transition('margin-left', '0px', ('-%spx' % size))
d.close = close
if (Defaults_css.BODY_CONTAINER is not None):
d.style.padding_top = (Defaults_css.BODY_CONTAINER.get('padding-top', (- 10)) + 10)
if overlay:
overlay = self.page.ui.div(width=(100, 'vw'), height=(100, 'vh'))
overlay.style.css.z_index = (z_index - 1)
overlay.style.css.left = 0
overlay.style.css.top = 0
overlay.style.css.right = 0
overlay.style.css.display = None
overlay.style.css.position = 'fixed'
overlay.style.css.cursor = 'pointer'
overlay.style.css.background = 'rgba(0, 0, 0, 0.3)'
overlay_event = [overlay.dom.toggle()]
d.overlay = overlay
else:
overlay_event = []
html.Html.set_component_skin(d)
if (anchor is None):
if (position == 'left'):
i = self.page.ui.icon('fas fa-bars').click((overlay_event + [d.dom.toggle_transition('margin-left', '0px', ('-%spx' % size))]))
i.style.css.float = 'right'
if (position_type == 'fixed'):
i.style.css.position = 'fixed'
i.style.css.right = 10
i.style.css.top = 5
else:
i = self.page.ui.icon('fas fa-bars').click((overlay_event + [d.dom.toggle_transition('margin-right', '0px', ('-%spx' % size))]))
if (position_type == 'fixed'):
i.style.css.position = 'fixed'
i.style.css.left = 10
i.style.css.top = 10
i.css({'padding': '5px'})
if overlay:
overlay.click([i.dom.events.trigger('click')])
else:
if (position == 'left'):
anchor.click((overlay_event + [d.dom.toggle_transition('margin-left', '0px', ('-%spx' % size))]))
else:
anchor.click((overlay_event + [d.dom.toggle_transition('margin-right', '0px', ('-%spx' % size))]))
if overlay:
overlay.click([anchor.dom.events.trigger('click')])
return d
def pilcrow(self, text: str='', html_code: str=None, options: dict=None, profile: Union[(dict, bool)]=None):
p = self.page.ui.div(('%s¶' % text), html_code=html_code, options=options, profile=profile)
p.style.css.font_size = self.page.body.style.globals.font.normal(5)
p.style.css.cursor = 'pointer'
p.click([self.page.js.window.scrollTo(y=self.page.js.objects.this.offsetTop)])
html.Html.set_component_skin(p)
return p
def panel(self, width: Union[(tuple, int)]=(100, '%'), height: Union[(tuple, int)]=(100, '%'), options: dict=None, profile: Union[(dict, bool)]=None, helper: str=None) -> html.HtmlMenu.PanelsBar:
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='%')
dfl_options = {'position': 'top'}
if (options is not None):
dfl_options.update(options)
h_drawer = html.HtmlMenu.PanelsBar(self.page, width, height, dfl_options, helper, profile)
html.Html.set_component_skin(h_drawer)
return h_drawer
def shortcut(self, components=None, logo=None, size=(40, 'px'), options=None, profile=None, html_code=None) -> html.HtmlMenu.Shortcut:
size = Arguments.size(size, unit='px')
dfl_options = {'position': 'left'}
if (options is not None):
dfl_options.update(options)
if (dfl_options['position'] in ['top', 'bottom']):
width = (100, '%')
height = size
else:
width = size
height = (100, '%')
h_drawer = html.HtmlMenu.Shortcut(self.page, (components or []), logo, width, height, html_code, dfl_options, profile)
h_drawer.style.css.padding = '5px 10px'
html.Html.set_component_skin(h_drawer)
return h_drawer |
def test_traverse_attributes():
provider1 = providers.Object('bar')
provider2 = providers.Object('baz')
provider = providers.Factory(dict)
provider.add_attributes(foo='foo', bar=provider1, baz=provider2)
all_providers = list(provider.traverse())
assert (len(all_providers) == 2)
assert (provider1 in all_providers)
assert (provider2 in all_providers) |
class OptionSeriesCylinderDataDatalabelsTextpath(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
def plot_lift_curve2(c_l_dict1, c_l_dict2, plot_range_deg=[(- 100), 100]):
aoa_deg = np.linspace(plot_range_deg[0], plot_range_deg[1], num=((plot_range_deg[1] - plot_range_deg[0]) + 1))
aoa_rad = ((aoa_deg * math.pi) / 180)
c_l_vec1 = np.zeros(aoa_deg.shape[0])
c_l_vec2 = np.zeros(aoa_deg.shape[0])
for i in range(aoa_deg.shape[0]):
stall_region = cropped_sym_sigmoid(aoa_rad[i], x_offset=((15 * math.pi) / 180), scale_fac=30)
flow_attached_region = (1 - stall_region)
c_l_vec1[i] = (((flow_attached_region * c_l_dict1['c_l_offset']) + ((flow_attached_region * c_l_dict1['c_l_lin']) * aoa_rad[i])) + ((stall_region * math.sin((2 * aoa_rad[i]))) * c_l_dict1['c_l_stall']))
c_l_vec2[i] = (((flow_attached_region * c_l_dict2['c_l_offset']) + ((flow_attached_region * c_l_dict2['c_l_lin']) * aoa_rad[i])) + ((stall_region * math.sin((2 * aoa_rad[i]))) * c_l_dict2['c_l_stall']))
plt.plot(aoa_deg, c_l_vec1, label='prediction')
plt.plot(aoa_deg, c_l_vec2, label='expectation')
plt.title('Lift coefficient over angle of attack [deg]')
plt.xlabel('Angle of Attack [deg]')
plt.ylabel('Lift Coefficient') |
def delete_saved_data(ctx, profile_id):
files = list(ctx.obj.data_dir.rglob(f'{profile_id}*'))
if (not files):
click.echo('[*] No associated saved data found for configuration profile')
elif click.confirm(f'[*] Do you want to delete the {len(files)} saved files associated with the configuration profile?', default=True):
for file in files:
file.unlink()
msg = f'File deleted ({file})'
LOGGER.info(msg)
index_event(ctx.obj.es, module=__name__, event_type='INFO', event=msg)
click.secho(f'[*] {msg}', fg='green') |
class TestNamedTraitObserverEqualHash(unittest.TestCase):
def test_not_equal_notify(self):
observer1 = NamedTraitObserver(name='foo', notify=True, optional=True)
observer2 = NamedTraitObserver(name='foo', notify=False, optional=True)
self.assertNotEqual(observer1, observer2)
def test_not_equal_name(self):
observer1 = NamedTraitObserver(name='foo', notify=True, optional=True)
observer2 = NamedTraitObserver(name='bar', notify=True, optional=True)
self.assertNotEqual(observer1, observer2)
def test_not_equal_optional(self):
observer1 = NamedTraitObserver(name='foo', notify=True, optional=False)
observer2 = NamedTraitObserver(name='foo', notify=True, optional=True)
self.assertNotEqual(observer1, observer2)
def test_equal_observers(self):
observer1 = NamedTraitObserver(name='foo', notify=True, optional=True)
observer2 = NamedTraitObserver(name='foo', notify=True, optional=True)
self.assertEqual(observer1, observer2)
self.assertEqual(hash(observer1), hash(observer2))
def test_not_equal_type(self):
observer = NamedTraitObserver(name='foo', notify=True, optional=True)
imposter = mock.Mock()
imposter.name = 'foo'
imposter.notify = True
imposter.optional = True
self.assertNotEqual(observer, imposter)
def test_slots(self):
observer = NamedTraitObserver(name='foo', notify=True, optional=True)
with self.assertRaises(AttributeError):
observer.__dict__
with self.assertRaises(AttributeError):
observer.__weakref__
def test_eval_repr_roundtrip(self):
observer = NamedTraitObserver(name='foo', notify=True, optional=True)
self.assertEqual(eval(repr(observer)), observer) |
class TestPatchStorageConfig():
(scope='function')
def url(self, oauth_client: ClientDetail) -> str:
return (V1_URL_PREFIX + STORAGE_CONFIG)
(scope='function')
def payload(self):
return [{'name': 'test destination', 'type': StorageType.s3.value, 'details': {'auth_method': S3AuthMethod.SECRET_KEYS.value, 'bucket': 'some-bucket', 'naming': 'some-filename-convention-enum', 'max_retries': 10}, 'format': 'csv'}]
def test_patch_storage_config_not_authenticated(self, api_client: TestClient, payload, url):
response = api_client.patch(url, headers={}, json=payload)
assert (401 == response.status_code)
def test_patch_storage_config_incorrect_scope(self, api_client: TestClient, payload, url, generate_auth_header):
auth_header = generate_auth_header([STORAGE_READ])
response = api_client.patch(url, headers=auth_header, json=payload)
assert (403 == response.status_code)
def test_patch_storage_config_with_no_key(self, db: Session, api_client: TestClient, payload, url, generate_auth_header):
auth_header = generate_auth_header([STORAGE_CREATE_OR_UPDATE])
response = api_client.patch(url, headers=auth_header, json=payload)
assert (200 == response.status_code)
response_body = json.loads(response.text)
assert (response_body['succeeded'][0]['key'] == 'test_destination')
storage_config = db.query(StorageConfig).filter_by(key='test_destination')[0]
storage_config.delete(db)
def test_put_storage_config_with_invalid_key(self, db: Session, api_client: TestClient, payload, url, generate_auth_header):
payload[0]['key'] = '*invalid-key'
auth_header = generate_auth_header([STORAGE_CREATE_OR_UPDATE])
response = api_client.patch(url, headers=auth_header, json=payload)
assert (422 == response.status_code)
assert (json.loads(response.text)['detail'][0]['msg'] == "FidesKeys must only contain alphanumeric characters, '.', '_', '<', '>' or '-'. Value provided: *invalid-key")
def test_patch_storage_config_with_key(self, db: Session, api_client: TestClient, payload, url, generate_auth_header):
payload[0]['key'] = 'my_s3_bucket'
auth_header = generate_auth_header([STORAGE_CREATE_OR_UPDATE])
response = api_client.patch(url, headers=auth_header, json=payload)
assert (200 == response.status_code)
response_body = json.loads(response.text)
storage_config = db.query(StorageConfig).filter_by(key='my_s3_bucket')[0]
expected_response = {'succeeded': [{'name': 'test destination', 'type': StorageType.s3.value, 'details': {'auth_method': S3AuthMethod.SECRET_KEYS.value, 'bucket': 'some-bucket', 'naming': 'some-filename-convention-enum', 'max_retries': 10}, 'key': 'my_s3_bucket', 'format': 'csv', 'is_default': False}], 'failed': []}
assert (expected_response == response_body)
storage_config.delete(db)
.parametrize('auth_method', [S3AuthMethod.SECRET_KEYS.value, S3AuthMethod.AUTOMATIC.value])
def test_patch_storage_config_with_different_auth_methods(self, db: Session, api_client: TestClient, payload, url, generate_auth_header, auth_method):
payload[0]['key'] = 'my_s3_bucket'
payload[0]['details']['auth_method'] = auth_method
auth_header = generate_auth_header([STORAGE_CREATE_OR_UPDATE])
response = api_client.patch(url, headers=auth_header, json=payload)
assert (200 == response.status_code)
response_body = json.loads(response.text)
storage_config = db.query(StorageConfig).filter_by(key='my_s3_bucket')[0]
assert (auth_method == response_body['succeeded'][0]['details']['auth_method'])
storage_config.delete(db)
def test_patch_config_response_format_not_specified(self, url, db: Session, api_client: TestClient, generate_auth_header):
key = 'my_s3_upload'
payload = [{'key': key, 'name': 'my-test-dest', 'type': StorageType.s3.value, 'details': {'auth_method': S3AuthMethod.SECRET_KEYS.value, 'bucket': 'some-bucket', 'naming': 'some-filename-convention-enum', 'max_retries': 10}}]
auth_header = generate_auth_header([STORAGE_CREATE_OR_UPDATE])
response = api_client.patch(url, headers=auth_header, json=payload)
assert (response.status_code == 200)
assert (json.loads(response.text)['succeeded'][0]['format'] == ResponseFormat.json.value)
response = api_client.patch((V1_URL_PREFIX + STORAGE_CONFIG), headers=auth_header, json=payload)
assert (response.status_code == 200)
assert (json.loads(response.text)['succeeded'][0]['format'] == ResponseFormat.json.value)
storage_config = StorageConfig.get_by(db=db, field='key', value=key)
storage_config.delete(db)
def test_patch_storage_config_missing_detail(self, api_client: TestClient, url, generate_auth_header):
auth_header = generate_auth_header([STORAGE_CREATE_OR_UPDATE])
response = api_client.patch(url, headers=auth_header, json=[{'key': 'my_s3_upload', 'name': 'my-test-dest', 'type': StorageType.s3.value, 'details': {'auth_method': S3AuthMethod.SECRET_KEYS.value, 'naming': 'request_id', 'max_retries': 10}}])
assert (response.status_code == 422)
errors = response.json()['detail']
assert ('details' in errors[0]['loc'])
assert (errors[0]['msg'] == '["field required (\'bucket\',)"]') |
(EcsClient, '__init__')
def test_is_deployed_if_no_tasks_should_be_running(client, service):
client.list_tasks.return_value = RESPONSE_LIST_TASKS_0
action = EcsAction(client, CLUSTER_NAME, SERVICE_NAME)
service[u'desiredCount'] = 0
is_deployed = action.is_deployed(service)
assert (is_deployed is True) |
class VLANHost(FaucetHost):
intf_root_name = None
vlans = None
vlan_intfs = None
def config(self, vlans=None, **params):
super_config = super().config(**params)
if (vlans is None):
vlans = [100]
self.vlans = vlans
self.vlan_intfs = {}
batch_cmds = []
intf = self.defaultIntf()
self.intf_root_name = intf.name
unique_intfs = set()
def _config_vlan(root_name, full_name, vlan_id):
batch_cmds.extend([('link add link %s name %s type vlan id %s' % (root_name, full_name, vlan_id)), ('link set dev %s up' % full_name)])
def _config_ip(config_full_name, config_ip):
batch_cmds.extend([('addr add %s dev %s' % (config_ip, config_full_name))])
if ('vlan_intfs' in params):
vlan_intfs = params.get('vlan_intfs', {})
for (vlan_id, ip_addr) in vlan_intfs.items():
if isinstance(vlan_id, tuple):
intf_name = ('%s' % intf.name)
for vlan_i in vlan_id:
prev_name = intf_name
intf_name += ('.%s' % vlan_i)
if (intf_name not in unique_intfs):
_config_vlan(prev_name, intf_name, vlans[vlan_i])
unique_intfs.add(intf_name)
self.nameToIntf[intf_name] = intf
self.vlan_intfs.setdefault(vlan_id, [])
self.vlan_intfs[vlan_id].append(intf_name)
_config_ip(intf_name, ip_addr)
else:
intf_name = ('%s.%s' % (intf, vlans[vlan_id]))
_config_vlan(intf, intf_name, vlans[vlan_id])
_config_ip(intf_name, ip_addr)
self.nameToIntf[intf_name] = intf
self.vlan_intfs[vlan_id] = intf_name
else:
for vlan_id in vlans:
intf_name = ('%s.%s' % (intf, vlan_id))
_config_vlan(intf, intf_name, vlan_id)
_config_ip(intf_name, params['ip'])
self.nameToIntf[intf_name] = intf
self.vlan_intfs.setdefault(vlan_id, [])
self.vlan_intfs[vlan_id].append(intf_name)
intf.name = intf_name
batch_cmds.extend([('addr flush dev %s' % self.intf_root_name)])
self.run_ip_batch(batch_cmds)
return super_config |
def test_adposition_complements(logfile):
for (lemma, comps) in adposition_complements.items():
if (('left' not in comps) and ('right' not in comps) and ('poss' not in comps)):
print_error_word_miss_context(lemma, '-1 / +1 COMP or 0 POSS', logfile)
biggest = 0
total = 0
if ('left' in comps):
lefts = 0
for case in adposition_complements[lemma]['left']:
lefts += adposition_complements[lemma]['left'][case]
total += adposition_complements[lemma]['left'][case]
biggest = lefts
if ('right' in comps):
rights = 0
for case in adposition_complements[lemma]['right']:
rights += adposition_complements[lemma]['right'][case]
total += adposition_complements[lemma]['right'][case]
if (rights > biggest):
biggest = rights
if ('poss' in comps):
total += adposition_complements[lemma]['poss']
if (adposition_complements[lemma]['poss'] > biggest):
biggest = adposition_complements[lemma]['poss']
if (('none' in comps) and (('right' in comps) or ('left' in comps))):
if (adposition_complements[lemma]['none'] > biggest):
print_suspicion_word_context(lemma, '-1 / +1 COMP or 0 POSS', biggest, adposition_complements[lemma]['none'], total, logfile) |
class ModelAPITestCase(unittest.TestCase):
def _get_simple_graph_and_output(self, test_name: str, dynamic_shape: bool=False, unsqueeze_output: bool=False) -> Tuple[(Model, Tuple[(torch.Tensor, torch.Tensor)], Tuple[(torch.Tensor, torch.Tensor)])]:
target = detect_target()
input_0 = Tensor(shape=[1], dtype='float16', name='input_0', is_input=True)
input_0_view = ops.reshape()(input_0, [1])
input_1 = Tensor(shape=[(IntVar([1, 1]) if dynamic_shape else 1)], dtype='float16', name='input_1', is_input=True)
output = ops.elementwise(FuncEnum.MUL)(input_0_view, input_1)
if unsqueeze_output:
output = ops.unsqueeze(0)(output)
output._attrs['name'] = 'output'
output._attrs['is_output'] = True
module = compile_model(output, target, './tmp', test_name)
in0_pt = torch.randn([1]).cuda().half()
in1_pt = torch.randn([1]).cuda().half()
output_pt = torch.mul(in0_pt, in1_pt)
if unsqueeze_output:
output_pt = output_pt.unsqueeze(0)
output_storage = torch.randn(output_pt.shape).cuda().half()
return (module, (in0_pt, in1_pt), (output_pt, output_storage))
def test_set_unnamed_input(self):
target = detect_target()
input_0 = Tensor(shape=[1], dtype='float16', name='input_0', is_input=True)
input_1 = Tensor(shape=[1], dtype='float16', is_input=True)
output = ops.elementwise(FuncEnum.SUB)(input_0, input_1)
output._attrs['name'] = 'output'
output._attrs['is_output'] = True
module = compile_model(output, target, './tmp', 'test_set_unnamed_input')
in0_pt = torch.randn([1]).cuda().half()
in1_pt = torch.randn([1]).cuda().half()
output_pt = (in0_pt - in1_pt)
output_storage = torch.empty_like(output_pt)
module.run_with_tensors([in0_pt, in1_pt], [output_storage])
self.assertTrue(torch.allclose(output_storage, output_pt))
def _test_param_name_to_index(self, output_is_view: bool, name: str):
target = detect_target()
input_0 = Tensor(shape=[1, 2], dtype='float16', name='input_0', is_input=True)
input_1 = Tensor(shape=[1, 2], dtype='float16', name='input_1', is_input=True)
output = ops.elementwise(FuncEnum.SUB)(input_0, input_1)
if output_is_view:
output = ops.squeeze(0)(output)
output._attrs['name'] = 'output'
output._attrs['is_output'] = True
module = compile_model(output, target, './tmp', name)
input_name_to_index = module.get_input_name_to_index_map()
self.assertEqual(input_name_to_index, {'input_0': 0, 'input_1': 1})
output_name_to_index = module.get_output_name_to_index_map()
self.assertEqual(output_name_to_index, {'output': 0})
def test_get_param_name_to_index(self):
self._test_param_name_to_index(output_is_view=False, name='test_get_param_name_to_index')
def test_get_param_name_to_index_output_is_view(self):
self._test_param_name_to_index(output_is_view=True, name='test_get_param_name_to_index_output_is_view')
def test_error_handling_not_enough_inputs_outputs(self):
(module, (in0_pt, in1_pt), outputs) = self._get_simple_graph_and_output('test_error_handling_not_enough_inputs_outputs')
self.assertRaises(RuntimeError, module.run, [], [torch_to_ait_data(outputs[(- 1)])])
self.assertRaises(RuntimeError, module.run_with_tensors, [in0_pt, in1_pt], [])
def test_error_handling_null_inputs_outputs(self):
(module, (in0_pt, in1_pt), outputs) = self._get_simple_graph_and_output('test_error_handling_null_inputs_outputs')
in0_pt_size = list(in0_pt.size())
in1_pt_size = list(in1_pt.size())
self.assertRaises(RuntimeError, module.run, [AITData(0, in0_pt_size, 'float16'), AITData(0, in1_pt_size, 'float16')], [torch_to_ait_data(outputs[(- 1)])])
self.assertRaises(RuntimeError, module.run, [AITData(in0_pt.data_ptr(), in0_pt_size, 'float16'), AITData(in1_pt.data_ptr(), in1_pt_size, 'float16')], [AITData(0, list(outputs[(- 1)].size()), 'float16')])
def test_error_handling_wrong_param_dtypes(self):
(module, (in0_pt, in1_pt), (out_pt, out_ait)) = self._get_simple_graph_and_output('test_error_handling')
in0_pt_size = list(in0_pt.size())
in1_pt_size = list(in1_pt.size())
self.assertRaises(RuntimeError, module.run, [AITData(in0_pt.data_ptr(), in0_pt_size, 'float32'), AITData(in1_pt.data_ptr(), in1_pt_size, 'float32')], [torch_to_ait_data(out_ait)])
self.assertRaises(RuntimeError, module.run, [torch_to_ait_data(in0_pt), torch_to_ait_data(in1_pt)], [AITData(out_ait.data_ptr(), list(out_ait.size()), 'float32')])
self.assertRaises(RuntimeError, module.run_with_tensors, [in0_pt, in1_pt.float()], [out_ait])
self.assertRaises(RuntimeError, module.run_with_tensors, [in0_pt, in1_pt], [out_ait.float()])
def test_one_input_many_constants(self):
target = detect_target()
input_0 = Tensor(shape=[1, 2], dtype='float16', name='input_0', is_input=True)
constant_1 = Tensor(shape=[1, 2], dtype='float16', name='constant_1')
constant_2 = Tensor(shape=[1, 2], dtype='float16', name='constant_2')
x = ops.elementwise(FuncEnum.MUL)(input_0, constant_1)
output = ops.elementwise(FuncEnum.MUL)(x, constant_2)
output._attrs['name'] = 'output'
output._attrs['is_output'] = True
module = compile_model(output, target, './tmp', 'test_one_input_many_constants')
in0_pt = torch.randn((1, 2)).cuda().half()
const_1_pt = torch.randn((1, 2)).cuda().half()
const_2_pt = torch.randn((1, 2)).cuda().half()
module.set_constant_with_tensor('constant_1', const_1_pt)
module.set_constant_with_tensor('constant_2', const_2_pt)
output_data = torch.empty([1, 2]).cuda().half()
module.run_with_tensors([in0_pt], [output_data])
expected = ((in0_pt * const_1_pt.cuda()) * const_2_pt.cuda())
self.assertTrue(torch.allclose(output_data, expected))
def test_get_param_maximum_shape(self):
for dynamic_shape in (False, True):
(module, inputs, output_np) = self._get_simple_graph_and_output('test_get_param_maximum_shape', dynamic_shape=dynamic_shape)
names_to_index = module.get_output_name_to_index_map()
output_shape = module.get_output_maximum_shape(names_to_index['output'])
self.assertEqual(output_shape, [1])
output_shape = module.get_output_maximum_shape('output')
self.assertEqual(output_shape, [1])
def test_error_handling_maximum_shape(self):
(module, inputs, output_np) = self._get_simple_graph_and_output('test_get_param_maximum_shape')
self.assertRaises(ValueError, module.get_output_maximum_shape, 'not_an_output')
self.assertRaises(TypeError, module.get_output_maximum_shape, [])
def test_get_param_maximum_shape_output_is_view(self):
for dynamic_shape in (False, True):
(module, inputs, output_np) = self._get_simple_graph_and_output('test_get_param_maximum_shape', dynamic_shape=dynamic_shape, unsqueeze_output=True)
names_to_index = module.get_output_name_to_index_map()
output_shape = module.get_output_maximum_shape(names_to_index['output'])
self.assertEqual(output_shape, [1, 1])
def test_dynamic_shape_api(self):
target = detect_target()
dynamic_dim = IntVar([1, 10], name='batch_size')
input_0 = Tensor(shape=[dynamic_dim, 2], dtype='float16', name='input_0', is_input=True)
input_1 = Tensor(shape=[dynamic_dim, 2], dtype='float16', name='input_1', is_input=True)
output = ops.elementwise(FuncEnum.MUL)(input_1, input_0)
output._attrs['name'] = 'output'
output._attrs['is_output'] = True
module = compile_model(output, target, './tmp', 'dynamic_shape_api')
for batch_size in (1, 10):
in0_pt = torch.randn([batch_size, 2]).cuda().half()
in1_pt = torch.randn([batch_size, 2]).cuda().half()
output_pt = torch.mul(in0_pt, in1_pt)
output_storage = torch.empty(module.get_output_maximum_shape('output')).cuda().half()
outputs_ait = module.run_with_tensors([in0_pt, in1_pt], [output_storage])
self.assertTrue(torch.allclose(output_pt, outputs_ait['output']))
def _test_output_is_alias_of_input(self, view_of_view: bool):
target = detect_target()
input_0 = Tensor(shape=[2, 2], dtype='float16', name='input_0', is_input=True)
output = ops.reshape()(input_0, [4])
if view_of_view:
output = ops.reshape()(output, [4, 1])
output._attrs['is_output'] = True
output._attrs['name'] = 'output'
module = compile_model(output, target, './tmp', 'output_is_alias_of_input')
in0_pt = torch.randn((2, 2)).cuda().half()
out_shape = ((4, 1) if view_of_view else (4,))
out_pt = in0_pt.reshape(out_shape)
out_ait = torch.empty(out_shape).cuda().half()
module.run_with_tensors([in0_pt], [out_ait])
self.assertTrue(torch.equal(out_pt, out_ait))
def test_output_is_view_of_input(self):
self._test_output_is_alias_of_input(False)
def test_output_is_view_of_view_of_input(self):
self._test_output_is_alias_of_input(True)
def test_output_is_input(self):
target = detect_target()
input_0 = Tensor(shape=[2, 2], dtype='float16', name='input_0', is_input=True, is_output=True)
module = compile_model(input_0, target, './tmp', 'output_is_input')
in0_pt = torch.randn((2, 2)).cuda().half()
out_ait = torch.empty((2, 2)).cuda().half()
module.run_with_tensors([in0_pt], [out_ait])
self.assertTrue(torch.equal(out_ait, in0_pt))
inputs = module.get_input_name_to_index_map()
self.assertEqual(inputs, {'input_0': 0})
outputs = module.get_output_name_to_index_map()
self.assertEqual(outputs, {'input_0': 0})
def _test_output_is_view_of_constant(self, view_of_view: bool):
target = detect_target()
const = Tensor(shape=[2, 2], dtype='float16', name='constant')
output = ops.reshape()(const, [4])
if view_of_view:
output = ops.reshape()(output, [4, 1])
output._attrs['name'] = 'output'
output._attrs['is_output'] = True
module = compile_model(output, target, './tmp', 'output_is_view_of_constant')
const_pt = torch.randn((2, 2)).cuda().half()
out_shape = ((4, 1) if view_of_view else (4,))
out_pt = const_pt.reshape(out_shape)
out_ait = torch.empty(out_shape).cuda().half()
module.set_constant_with_tensor('constant', const_pt)
module.run_with_tensors([], [out_ait])
self.assertTrue(torch.equal(out_ait, out_pt))
def test_output_is_view_of_constant(self):
self._test_output_is_view_of_constant(False)
def test_output_is_view_of_view_of_constant(self):
self._test_output_is_view_of_constant(True)
def test_output_is_constant(self):
target = detect_target()
const = Tensor(shape=[2, 2], dtype='float16', name='constant', is_output=True)
module = compile_model(const, target, './tmp', 'output_is_constant')
const_pt = torch.randn((2, 2)).cuda().half()
out_ait = torch.empty((2, 2)).cuda().half()
module.set_constant_with_tensor('constant', const_pt)
module.run_with_tensors([], [out_ait])
self.assertTrue(torch.equal(out_ait, const_pt))
def _test_output_is_view_of_another_output(self, view_of_view: bool):
target = detect_target()
input_0 = Tensor(shape=[2, 2], dtype='float16', name='input_0', is_input=True)
output = ops.elementwise(FuncEnum.MUL)(input_0, input_0)
output._attrs['is_output'] = True
output._attrs['name'] = 'output'
view = ops.reshape()(output, (4,))
if view_of_view:
view = ops.reshape()(view, (4, 1))
view._attrs['is_output'] = True
view._attrs['name'] = 'view'
output1 = ops.elementwise(FuncEnum.MUL)(view, view)
output1._attrs['is_output'] = True
output1._attrs['name'] = 'output1'
outputs = [output, view, output1]
module = compile_model(outputs, target, './tmp', 'output_is_alias_of_another_output')
out_shape = ((4, 1) if view_of_view else (4,))
in0_pt = torch.randn((2, 2)).cuda().half()
out_pt = (in0_pt * in0_pt)
view_pt = out_pt.reshape(out_shape)
out1_pt = (view_pt * view_pt)
out_ait = torch.empty((2, 2)).cuda().half()
view_ait = torch.empty(out_shape).cuda().half()
out1_ait = torch.empty(out_shape).cuda().half()
module.run_with_tensors([in0_pt], [out_ait, view_ait, out1_ait])
self.assertTrue(torch.equal(out_pt, out_ait))
self.assertTrue(torch.equal(view_pt, view_ait))
self.assertTrue(torch.equal(out1_pt, out1_ait))
def test_output_is_view_of_another_output(self):
self._test_output_is_view_of_another_output(False)
def test_output_is_view_of_view_of_another_output(self):
self._test_output_is_view_of_another_output(True)
def test_output_is_alias_of_input_and_another_output(self):
target = detect_target()
input_0 = Tensor(shape=[2, 2], dtype='float16', name='input_0', is_input=True)
view1 = ops.reshape()(input_0, (1, 4))
view1._attrs['is_output'] = True
view1._attrs['name'] = 'view1'
view2 = ops.reshape()(view1, (4,))
view2._attrs['is_output'] = True
view2._attrs['name'] = 'view2'
module = compile_model([view1, view2], target, './tmp', 'output_is_alias_of_another_output')
in0_pt = torch.randn((2, 2)).cuda().half()
view1_pt = in0_pt.reshape((1, 4))
view2_pt = in0_pt.reshape((4,))
view1_ait = torch.empty((1, 4)).cuda().half()
view2_ait = torch.empty((4,)).cuda().half()
module.run_with_tensors([in0_pt], [view1_ait, view2_ait])
self.assertTrue(torch.equal(view1_pt, view1_ait))
self.assertTrue(torch.equal(view2_pt, view2_ait))
def test_benchmark(self):
(module, (in0, in1), (out_pt, out_ait)) = self._get_simple_graph_and_output('test_benchmark')
(runtime_ms, _, outputs_ait) = module.benchmark([torch_to_ait_data(in0), torch_to_ait_data(in1)], [torch_to_ait_data(out_ait)])
self.assertGreater(runtime_ms, 0)
self.assertTrue(torch.equal(out_pt, out_ait))
self.assertEqual(outputs_ait, {'output': AITData(out_ait.data_ptr(), [1], 'float16')})
(runtime_ms, _, tensors) = module.benchmark_with_tensors([in0, in1], [out_ait])
self.assertGreater(runtime_ms, 0)
self.assertTrue(torch.equal(out_pt, out_ait))
self.assertEqual(len(tensors), 1)
self.assertTrue(torch.equal(tensors['output'], (in0 * in1)))
def test_profile(self):
(module, (in0, in1), (out_pt, out_ait)) = self._get_simple_graph_and_output('test_profile', False, True)
with tempfile.TemporaryDirectory() as tmpdirname:
profile_name = os.path.join(tmpdirname, 'profile.json')
module.profile([torch_to_ait_data(in0), torch_to_ait_data(in1)], [torch_to_ait_data(out_ait)], 20, profile_name)
with open(profile_name) as f:
report = json.load(f)
self.assertTrue(len(report), 1)
for (_, elapsed) in report.items():
self.assertGreater(elapsed['ms_per_iter'], 0)
def test_get_output_dtype(self):
(module, inputs, output_np) = self._get_simple_graph_and_output('test_get_param_dtype')
self.assertEqual(module.get_output_dtype(0), 1)
def test_dynamic_dims_out_of_bounds_error(self):
target = detect_target()
batch_size = IntVar([10, 1], name='batch_size')
input_0 = Tensor(shape=[batch_size, 10], dtype='float16', name='input_0', is_input=True)
output = ops.elementwise(FuncEnum.MUL)(input_0, input_0)
output._attrs['name'] = 'output'
output._attrs['is_output'] = True
module = compile_model(output, target, './tmp', 'test_dynamic_dim_out_of_bounds')
in0_pt = torch.randn((5, 10)).half().cuda()
out_pt = torch.empty(module.get_output_maximum_shape('output')).cuda().half()
self.assertRaises(RuntimeError, module.run, [AITData(in0_pt.data_ptr(), [0, 10], 'float16')], [torch_to_ait_data(out_pt)])
self.assertRaises(RuntimeError, module.run, [AITData(in0_pt.data_ptr(), [11, 10], 'float16')], [torch_to_ait_data(out_pt)])
out = module.run_with_tensors([in0_pt], [out_pt])
self.assertTrue(torch.equal(out['output'], (in0_pt * in0_pt)))
def test_output_can_be_null_if_lower_bound_size_is_zero(self):
target = detect_target()
dynamic_dim = IntVar([0, 10], name='batch_size')
input_0 = Tensor(shape=[dynamic_dim, 2], dtype='float16', name='input_0', is_input=True)
input_1 = Tensor(shape=[dynamic_dim, 2], dtype='float16', name='input_1', is_input=True)
output = ops.elementwise(FuncEnum.MUL)(input_1, input_0)
output._attrs['name'] = 'output'
output._attrs['is_output'] = True
module = compile_model(output, target, './tmp', 'test_output_can_be_null_if_lower_bound_size_is_zero')
shape = [0, 2]
module.run([AITData(0, shape, 'float16'), AITData(0, shape, 'float16')], [AITData(0, [10, 2], 'float16')])
def test_with_tensors_api_fails_on_cpu_inputs(self):
(module, (in0, in1), (out_pt, out_ait)) = self._get_simple_graph_and_output('test_fail_on_cpu_inputs')
self.assertRaises(ValueError, module.run_with_tensors, [in0.cpu(), in1.cpu()], [out_ait])
self.assertRaises(ValueError, module.run_with_tensors, [in0, in1], [out_ait.cpu()])
self.assertRaises(ValueError, module.benchmark_with_tensors, [in0.cpu(), in1.cpu()], [out_ait])
self.assertRaises(ValueError, module.benchmark_with_tensors, [in0, in1], [out_ait.cpu()])
def test_with_tensors_api_fails_on_strided_inputs(self):
target = detect_target()
input_0 = Tensor(shape=[1, 2], dtype='float16', name='input_0', is_input=True)
output = ops.elementwise(FuncEnum.MUL)(input_0, input_0)
output._attrs['name'] = 'output'
output._attrs['is_output'] = True
module = compile_model(output, target, './tmp', 'test_with_tensors_api_fails_on_strided_inputs')
x = torch.randn((1, 1))
in0_pt = x.expand((1, 2))
out_pt = x.expand((1, 2))
self.assertRaises(ValueError, module.run_with_tensors, [in0_pt], [out_pt.contiguous()])
self.assertRaises(ValueError, module.run_with_tensors, [x.contiguous()], [out_pt])
def _get_graph_three_inputs_three_outputs(self):
target = detect_target()
input_0 = Tensor(shape=[1], dtype='float16', name='input_0', is_input=True)
input_1 = Tensor(shape=[1], dtype='float16', name='input_1', is_input=True)
input_2 = Tensor(shape=[1], dtype='float16', name='input_2', is_input=True)
output_0 = ops.elementwise(FuncEnum.ADD)(input_0, input_1)
output_1 = ops.elementwise(FuncEnum.ADD)(input_1, input_2)
output_2 = ops.elementwise(FuncEnum.ADD)(input_0, input_2)
output_0._attrs['name'] = 'output_0'
output_1._attrs['name'] = 'output_1'
output_2._attrs['name'] = 'output_2'
output_0._attrs['is_output'] = True
output_1._attrs['is_output'] = True
output_2._attrs['is_output'] = True
module = compile_model([output_0, output_1, output_2], target, './tmp', 'test_dict_api')
in0_pt = torch.randn((1,)).cuda().half()
in1_pt = torch.randn((1,)).cuda().half()
in2_pt = torch.randn((1,)).cuda().half()
out0_pt = torch.empty((1,)).cuda().half()
out1_pt = torch.empty((1,)).cuda().half()
out2_pt = torch.empty((1,)).cuda().half()
expected_out0 = (in0_pt + in1_pt)
expected_out1 = (in1_pt + in2_pt)
expected_out2 = (in0_pt + in2_pt)
return (module, (in0_pt, in1_pt, in2_pt), (out0_pt, out1_pt, out2_pt), (expected_out0, expected_out1, expected_out2))
def test_dict_api(self):
(module, (in0_pt, in1_pt, in2_pt), outputs, expected) = self._get_graph_three_inputs_three_outputs()
(out0_pt, out1_pt, out2_pt) = outputs
in_args = {'input_0': torch_to_ait_data(in0_pt), 'input_1': torch_to_ait_data(in1_pt), 'input_2': torch_to_ait_data(in2_pt)}
out_args = {'output_0': torch_to_ait_data(out0_pt), 'output_1': torch_to_ait_data(out1_pt), 'output_2': torch_to_ait_data(out2_pt)}
module.run(in_args, out_args)
for (out, expect) in zip(outputs, expected):
self.assertTrue(torch.equal(out, expect))
out.zero_()
module.benchmark(in_args, out_args)
in_args_pt = {'input_0': in0_pt, 'input_1': in1_pt, 'input_2': in2_pt}
out_args_pt = {'output_0': out0_pt, 'output_1': out1_pt, 'output_2': out2_pt}
module.run_with_tensors(in_args_pt, out_args_pt)
for (out, expect) in zip(outputs, expected):
self.assertTrue(torch.equal(out, expect))
out.zero_()
module.benchmark_with_tensors(in_args_pt, out_args_pt)
def test_error_handling_dict_api(self):
(module, (in0_pt, in1_pt, in2_pt), outputs, expected) = self._get_graph_three_inputs_three_outputs()
(out0_pt, out1_pt, out2_pt) = outputs
in_args_pt = {'input_0': in0_pt, 'input_1': in1_pt, 'input_2': in2_pt}
out_args_pt = {'output_0': out0_pt, 'output_1': out1_pt, 'output_2': out2_pt, 'not_an_output': torch.randn((3, 3))}
self.assertRaises(ValueError, module.run_with_tensors, {}, {})
self.assertRaises(ValueError, module.run_with_tensors, in_args_pt, {})
self.assertRaises(ValueError, module.run_with_tensors, in_args_pt, out_args_pt)
def test_error_handling_model_init(self):
for num_runtimes in ((- 1), 0):
target = detect_target()
input_0 = Tensor(shape=[1], dtype='float16', name='input_0', is_input=True, is_output=True)
with self.assertRaises(ValueError):
compile_model(input_0, target, './tmp', 'test_error_handling_model_init', num_runtimes=num_runtimes)
def test_bind_data_to_tensor_host_data(self):
tensor = Tensor([10, 2], dtype='float16')
self.assertRaises(ValueError, tensor._bind_data, _HostConstantTensorData((b'\x00' * 10)))
for dtype in ('float16', 'float32', 'int32', 'int64'):
dtype_size = get_dtype_size(dtype)
data = _HostConstantTensorData(((b'\x00' * 20) * dtype_size), dtype=dtype)
self.assertEqual(data.size(), len(data.to_bytes()))
self.assertTrue(all(((x == 0) for x in data.to_bytes())))
tensor = Tensor([10, 2], dtype=dtype)
tensor._bind_data(data)
self.assertIsNotNone(tensor._attrs['data'])
data_numpy = _NumpyConstantTensorData(np.zeros([10, 2], dtype))
self.assertEqual(data_numpy.size(), len(data_numpy.to_bytes()))
self.assertTrue(all(((x == 0) for x in data_numpy.to_bytes())))
tensor = Tensor([10, 2], dtype=dtype)
tensor._bind_data(data_numpy)
self.assertIsNotNone(tensor._attrs['data'])
def test_bind_torch_tensor_data(self):
small_tensor = torch.randn((5, 2)).cuda().half()
tensor = Tensor([10, 2], dtype='float16')
self.assertRaises(ValueError, tensor._bind_data, _TorchConstantTensorData(small_tensor))
dtype_to_torch = {'float16': torch.float16, 'float32': torch.float32, 'int32': torch.int32, 'int64': torch.int64}
for dtype in dtype_to_torch.keys():
tensor = torch.ones((10, 2), dtype=dtype_to_torch[dtype]).cuda()
data = _TorchConstantTensorData(tensor)
self.assertEqual(data.size(), len(data.to_bytes()))
data_np = np.frombuffer(data.to_bytes(), dtype=dtype).reshape((10, 2))
np.testing.assert_equal(data_np, tensor.cpu().numpy())
tensor = Tensor([10, 2], dtype=dtype)
tensor._bind_data(data)
self.assertIsNotNone(tensor._attrs['data'])
def test_constant_tensor_construction_fails_mismatched_dtypes(self):
torch_data = _TorchConstantTensorData(torch.randn((10, 2)).cuda())
np_data = _NumpyConstantTensorData(np.random.rand(10, 2))
host_data = _HostConstantTensorData((('\x00' * 20) * 4), dtype='float32')
bad_data = (torch_data, np_data, host_data)
for data in bad_data:
tensor = Tensor([10, 2], dtype='float16')
self.assertRaises(ValueError, tensor._bind_data, data)
def _test_use_constant_tensor(self, make_data: Callable[([torch.Tensor], _ConstantTensorData)], name: str, size: int=3):
target = detect_target()
in0_pt = torch.randn((size,)).half()
in1_pt = torch.randn((size,)).half()
in0_data = make_data(in0_pt)
in0 = Tensor(shape=[size], dtype='float16')
in0._bind_data(in0_data)
in1_data = make_data(in1_pt)
in1 = Tensor(shape=[size], dtype='float16')
in1._bind_data(in1_data)
out = ops.elementwise(FuncEnum.MUL)(in0, in1)
out._attrs['name'] = 'output'
out._attrs['is_output'] = True
module = compile_model(out, target, './tmp', name)
output_ait = torch.randn((size,)).half().cuda()
module.run_with_tensors([], [output_ait])
self.assertTrue(torch.equal(output_ait.cpu(), (in0_pt * in1_pt)))
def test_use_internal_constant_tensors_host(self):
self._test_use_constant_tensor((lambda tensor: _HostConstantTensorData(tensor.cpu().numpy().tobytes())), 'test_use_internal_constant_tensors_host')
def test_use_internal_constant_tensors_gpu(self):
self._test_use_constant_tensor((lambda tensor: _TorchConstantTensorData(tensor)), 'test_use_internal_constant_tensors_gpu')
def test_use_internal_constant_tensors_huge(self):
self._test_use_constant_tensor((lambda tensor: _TorchConstantTensorData(tensor)), 'test_use_internal_constant_tensors_huge', size=int((.0 / 2)))
def test_run_return_value_dynamic_batch(self):
target = detect_target()
input_0 = Tensor(shape=[IntVar([0, 2], name='out01'), IntVar([0, 2], name='out12')], dtype='float16', name='out0', is_input=True, is_output=True)
out = ops.elementwise(FuncEnum.MUL)(input_0, input_0)
out._attrs['name'] = 'out1'
out._attrs['is_output'] = True
module = compile_model([input_0, out], target, './tmp', 'test_run_return_value_dynamic_batch')
for a in range(0, 2):
for b in range(0, 2):
in0 = torch.randn([a, b]).cuda().half()
out0 = torch.empty_like(in0)
out1 = torch.empty_like(in0)
expected = {'out0': AITData(out0.data_ptr(), [a, b], 'float16'), 'out1': AITData(out1.data_ptr(), [a, b], 'float16')}
actual = module.run({'out0': torch_to_ait_data(in0)}, {'out0': torch_to_ait_data(out0), 'out1': torch_to_ait_data(out1)})
self.assertEqual(expected, actual)
out_tensors = module.run_with_tensors([in0], [out0, out1])
self.assertEqual(len(out_tensors), 2)
self.assertTrue(torch.equal(out_tensors['out0'], in0))
self.assertTrue(torch.equal(out_tensors['out1'], (in0 * in0)))
def test_run_return_value_static_shapes(self):
target = detect_target()
input_0 = Tensor(shape=[1, 2, 3, 4], dtype='float16', name='out0', is_input=True, is_output=True)
out = ops.elementwise(FuncEnum.MUL)(input_0, input_0)
out._attrs['name'] = 'out1'
out._attrs['is_output'] = True
module = compile_model([input_0, out], target, './tmp', 'test_run_return_value_static_shapes')
in0 = torch.randn([1, 2, 3, 4]).cuda().half()
out0 = torch.empty_like(in0)
out1 = torch.empty_like(in0)
expected = {'out0': AITData(out0.data_ptr(), [1, 2, 3, 4], 'float16'), 'out1': AITData(out1.data_ptr(), [1, 2, 3, 4], 'float16')}
actual = module.run({'out0': torch_to_ait_data(in0)}, {'out0': torch_to_ait_data(out0), 'out1': torch_to_ait_data(out1)})
self.assertEqual(expected, actual)
out_tensors = module.run_with_tensors([in0], [out0, out1])
self.assertEqual(len(out_tensors), 2)
self.assertTrue(torch.equal(out_tensors['out0'], in0))
self.assertTrue(torch.equal(out_tensors['out1'], (in0 * in0)))
def test_run_return_value_dynamic_second_dim(self):
target = detect_target()
input_0 = Tensor(shape=[10, IntVar([0, 2], name='dim'), 2], dtype='float16', name='out0', is_input=True, is_output=True)
out = ops.elementwise(FuncEnum.MUL)(input_0, input_0)
out._attrs['name'] = 'out1'
out._attrs['is_output'] = True
module = compile_model([input_0, out], target, './tmp', 'test_run_return_value_dynamic_second_dim')
for dim in range(0, 2):
in0 = torch.randn([10, dim, 2]).cuda().half()
out0 = torch.empty_like(in0)
out1 = torch.empty_like(in0)
expected = {'out0': AITData(out0.data_ptr(), [10, dim, 2], 'float16'), 'out1': AITData(out1.data_ptr(), [10, dim, 2], 'float16')}
actual = module.run({'out0': torch_to_ait_data(in0)}, {'out0': torch_to_ait_data(out0), 'out1': torch_to_ait_data(out1)})
self.assertEqual(expected, actual)
out_tensors = module.run_with_tensors([in0], {'out0': out0, 'out1': out1})
self.assertEqual(len(out_tensors), 2)
self.assertTrue(torch.equal(out_tensors['out0'], in0))
self.assertTrue(torch.equal(out_tensors['out1'], (in0 * in0)))
def test_many_threads_one_stream(self):
(module, (in0, in1), (out_pt, out_ait)) = self._get_simple_graph_and_output('test_many_threads_one_stream')
(runtime_ms, _, _) = module.benchmark_with_tensors([in0, in1], [out_ait], num_threads=8, count=1000)
def test_many_threads_many_streams(self):
(module, (in0, in1), (out_pt, out_ait)) = self._get_simple_graph_and_output('test_benchmark')
(runtime_ms, _, _) = module.benchmark_with_tensors([in0, in1], [out_ait], num_threads=8, count=1000, use_unique_stream_per_thread=True)
def test_compiled_module_preserves_output_order(self):
input0 = Tensor(shape=[1], dtype='float16', name='input0', is_input=True)
output0 = ops.elementwise(FuncEnum.MUL)(input0, input0)
output0._attrs['is_output'] = True
output0._attrs['name'] = 'output0'
output1 = ops.elementwise(FuncEnum.ADD)(input0, input0)
output1._attrs['is_output'] = True
output1._attrs['name'] = 'output1'
output2 = ops.elementwise(FuncEnum.MUL)(output0, output1)
output2._attrs['is_output'] = True
output2._attrs['name'] = 'output2'
test_name = 'test_compiled_module_preserves_output_order'
for output_ordering in itertools.permutations((output0, output1, output2)):
target = detect_target()
with compile_model(output_ordering, target, './tmp', test_name) as module:
expected_ordering = {tensor._attrs['name']: idx for (idx, tensor) in enumerate(output_ordering)}
self.assertEqual(module.get_output_name_to_index_map(), expected_ordering)
def test_error_non_output_in_output_tensors_list(self):
input0 = Tensor(shape=[1], dtype='float16', name='input0', is_input=True)
intermediate = ops.elementwise(FuncEnum.ADD)(input0, input0)
output0 = ops.elementwise(FuncEnum.MUL)(intermediate, intermediate)
output0._attrs['is_output'] = True
output0._attrs['name'] = 'output0'
target = detect_target()
self.assertRaises((KeyError, ValueError), compile_model, [input0, output0], target, './tmp', 'test_error_non_output_in_output_tensors_list')
def test_error_missing_output_in_output_tensors_list(self):
input0 = Tensor(shape=[1], dtype='float16', name='input0', is_input=True)
intermediate = ops.elementwise(FuncEnum.ADD)(input0, input0)
intermediate._attrs['is_output'] = True
output0 = ops.elementwise(FuncEnum.MUL)(intermediate, intermediate)
output0._attrs['is_output'] = True
output0._attrs['name'] = 'output0'
target = detect_target()
self.assertRaises(ValueError, compile_model, [output0], target, './tmp', 'test_error_missing_output_in_output_tensors_list')
def test_error_duplicate_output_in_output_tensors_list(self):
input0 = Tensor(shape=[1], dtype='float16', name='input0', is_input=True)
intermediate = ops.elementwise(FuncEnum.ADD)(input0, input0)
output0 = ops.elementwise(FuncEnum.MUL)(intermediate, intermediate)
output0._attrs['is_output'] = True
output0._attrs['name'] = 'output0'
target = detect_target()
self.assertRaises(ValueError, compile_model, [output0, output0], target, './tmp', 'test_error_duplicate_output_in_output_tensors_list')
def test_cannot_use_closed_model(self):
(module, (in0_pt, in1_pt), (_, out_storage)) = self._get_simple_graph_and_output('test_cannot_use_closed_model')
module.close()
self.assertRaises(RuntimeError, module.run_with_tensors, [in0_pt, in1_pt], [out_storage])
def test_cannot_use_closed_model_context_manager(self):
(module, (in0_pt, in1_pt), (_, out_storage)) = self._get_simple_graph_and_output('test_cannot_use_closed_model')
with module as m:
pass
self.assertRaises(RuntimeError, m.run_with_tensors, [in0_pt, in1_pt], [out_storage])
def test_run_fails_with_unbound_constants(self):
target = detect_target()
constant_1 = Tensor(shape=[1, 2], dtype='float16', name='constant_1')
constant_2 = Tensor(shape=[1, 2], dtype='float16', name='constant_2')
x = ops.elementwise(FuncEnum.MUL)(constant_1, constant_1)
output = ops.elementwise(FuncEnum.MUL)(x, constant_2)
output._attrs['name'] = 'output'
output._attrs['is_output'] = True
module = compile_model(output, target, './tmp', 'test_run_fails_with_unbound_constants')
const_1_pt = torch.randn((1, 2)).cuda().half()
const_2_pt = torch.randn((1, 2)).cuda().half()
output_data = torch.empty([1, 2]).cuda().half()
with self.assertRaises(RuntimeError):
module.run_with_tensors([], [output_data])
module.set_constant_with_tensor('constant_1', const_1_pt)
with self.assertRaises(RuntimeError):
module.run_with_tensors([], [output_data])
module.set_constant_with_tensor('constant_2', const_2_pt)
module.run_with_tensors([], [output_data])
expected = ((const_1_pt * const_1_pt) * const_2_pt)
self.assertTrue(torch.allclose(output_data, expected))
def test_set_constant_fails_wrong_dtype(self):
def _create_graph():
constant_1 = Tensor(shape=[1, 2], dtype='float16', name='constant_1')
output = ops.elementwise(FuncEnum.MUL)(constant_1, constant_1)
output._attrs['name'] = 'output'
output._attrs['is_output'] = True
return output
for wrong_tensor in (torch.zeros([1, 2]).long().cuda(), torch.zeros([1, 2]).int().cuda(), torch.zeros([1, 2]).float().cuda()):
target = detect_target()
with compile_model(_create_graph(), target, './tmp', 'test_set_constant_fails_wrong_dtype') as module:
self.assertRaises(RuntimeError, module.set_constant_with_tensor, 'constant_1', wrong_tensor)
def test_set_constant_fails_wrong_shape(self):
def _create_graph():
constant_1 = Tensor(shape=[1, 2], dtype='float16', name='constant_1')
output = ops.elementwise(FuncEnum.MUL)(constant_1, constant_1)
output._attrs['name'] = 'output'
output._attrs['is_output'] = True
return output
for wrong_shape in ([2, 2], [3, 4], [0]):
wrong_tensor = torch.randn(wrong_shape).half().cuda()
target = detect_target()
output = _create_graph()
with compile_model(output, target, './tmp', 'test_set_constant_fails_wrong_shape') as module:
self.assertRaises(RuntimeError, module.set_constant_with_tensor, 'constant_1', wrong_tensor)
def test_null_arguments_error(self):
(module, (in0_pt, in1_pt), (_, out_storage)) = self._get_simple_graph_and_output('test_null_arguments_error')
old_handle = module.handle
module.handle = None
self.assertRaises(RuntimeError, module.run_with_tensors, [in0_pt, in1_pt], [out_storage])
self.assertRaises(RuntimeError, module.get_output_dtype, 0)
module.handle = old_handle
def test_memcpy(self):
(module, _, _) = self._get_simple_graph_and_output('test_memcpy')
torch_stream = torch.cuda.Stream().cuda_stream
for stream_ptr in (None, torch_stream):
expected = torch.randn((3, 2, 1)).half().cuda()
actual = torch.empty_like(expected)
module.memcpy(actual.data_ptr(), expected.data_ptr(), (actual.numel() * actual.element_size()), AITemplateMemcpyKind.DeviceToDevice, stream_ptr)
self.assertTrue(torch.equal(expected, actual))
expected = torch.randn((3, 2, 1)).half().cuda()
actual = torch.empty_like(expected).cpu()
module.memcpy(actual.data_ptr(), expected.data_ptr(), (actual.numel() * actual.element_size()), AITemplateMemcpyKind.DeviceToHost, stream_ptr)
self.assertTrue(torch.equal(expected.cpu(), actual))
expected = torch.randn((3, 2, 1)).half()
actual = torch.empty_like(expected).cuda()
module.memcpy(actual.data_ptr(), expected.data_ptr(), (actual.numel() * actual.element_size()), AITemplateMemcpyKind.HostToDevice, stream_ptr)
self.assertTrue(torch.equal(expected, actual.cpu()))
def test_alloc(self):
(module, (in0_pt, in1_pt), (out_pt, out_ait)) = self._get_simple_graph_and_output('test_memcpy')
def alloc_like(tensor: torch.Tensor, stream_ptr: Optional[int]):
assert (tensor.dtype == torch.half)
nbytes = (tensor.numel() * tensor.element_size())
ptr = module.allocate_gpu_memory(nbytes, stream_ptr)
try:
(yield (AITData(ptr, list(tensor.shape), 'float16'), nbytes))
finally:
module.free_gpu_memory(ptr, stream_ptr)
torch_stream = torch.cuda.Stream().cuda_stream
for stream_ptr in (None, torch_stream):
with alloc_like(out_ait, stream_ptr) as (output, nbytes):
module.run({'input_0': torch_to_ait_data(in0_pt), 'input_1': torch_to_ait_data(in1_pt)}, {'output': output})
module.memcpy(out_ait.data_ptr(), output.data_ptr, nbytes, AITemplateMemcpyKind.DeviceToDevice, stream_ptr)
self.assertTrue(torch.equal(out_pt, out_ait))
def test_get_num_runtimes(self):
self.assertEqual(AIT_DEFAULT_NUM_RUNTIMES, 1)
x = Tensor([1], dtype='float16', is_input=True, is_output=True)
with compile_model(x, detect_target(), './tmp', 'test_get_num_runtimes_compile_module_default') as module:
self.assertEqual(module.get_num_runtimes(), 1)
with compile_model(x, detect_target(), './tmp', 'test_get_num_runtimes_compile_module_custom', num_runtimes=2) as module:
self.assertEqual(module.get_num_runtimes(), 2)
def test_ait_data_numpy_conversions(self):
x = Tensor([1], dtype='float16', is_input=True, is_output=True)
with compile_model(x, detect_target(), './tmp', 'test_ait_data_numpy_conversions') as module:
x_shape = [1, 2, 3]
x = np.ones(x_shape, dtype='float16')
x_ait = module.numpy_to_ait_data(x)
self.assertEqual(x_ait.dtype, 'float16')
self.assertEqual(x_ait.shape, x_shape)
x_copied = module.ait_data_to_numpy(x_ait)
np.testing.assert_equal(x, x_copied)
y = torch.ones(x_shape, dtype=torch.float16).cuda()
y_ait = AITData(y.data_ptr(), x_shape, 'float16')
y_np = module.ait_data_to_numpy(y_ait)
np.testing.assert_equal(x, y_np)
def test_numpy_to_ait_data_manual_free(self):
x = Tensor([1], dtype='float16', is_input=True, is_output=True)
with compile_model(x, detect_target(), './tmp', 'test_numpy_to_ait_data_manual_free') as module:
x_shape = [1, 2, 3]
x = np.ones(x_shape, dtype='float16')
x_ait = module.numpy_to_ait_data(x)
module.free_gpu_memory(x_ait.data_ptr)
def test_custom_allocator(self):
x = Tensor([1], dtype='float16', is_input=True)
y = (x * x)
z = (y * y)
z._attrs['is_output'] = True
for allocator_kind in (AITemplateAllocatorKind.DEFAULT, AITemplateAllocatorKind.TRACKING):
with compile_model(z, detect_target(), './tmp', f'test_custom_allocator_{allocator_kind.value}', allocator_kind=AITemplateAllocatorKind.TRACKING) as module:
allocator = module.allocator_handle
self.assertIsNotNone(allocator.value)
if (allocator_kind == AITemplateAllocatorKind.TRACKING):
num_bytes = ctypes.c_size_t()
module.DLL.AITemplateTrackingAllocatorGetNumBytes(allocator, ctypes.byref(num_bytes))
self.assertGreater(num_bytes.value, 0)
x_pt = torch.randn(1).half().cuda()
y_pt = (x_pt * x_pt)
z_pt = (y_pt * y_pt)
z_ait = torch.empty_like(x_pt)
module.run_with_tensors([x_pt], [z_ait])
self.assertTrue(z_ait.equal(z_pt))
def test_get_constant_names(self):
target = detect_target()
input_0 = Tensor(shape=[1, 2], dtype='float16', name='input_0', is_input=True)
constant_0 = Tensor(shape=[1, 2], dtype='float16', name='constant_0')
constant_1 = Tensor(shape=[1, 2], dtype='float16', name='constant_1')
constant_2 = Tensor(shape=[1, 2], dtype='float16', name='constant_2')
constant_3 = Tensor(shape=[1, 2], dtype='float16', name='constant_3')
constant_4 = Tensor(shape=[1, 2], dtype='float16', name='constant_4')
constants = {}
x = ops.elementwise(FuncEnum.MUL)(input_0, constant_0)
x1 = ops.concatenate()([x, x, constant_1])
constants['constant_1'] = get_random_torch_tensor((1, 2), 'float16')
y = ops.concatenate()([constant_2, constant_3, constant_4])
constants['constant_3'] = get_random_torch_tensor((1, 2), 'float16')
output = ops.elementwise(FuncEnum.MUL)(x1, y)
output._attrs['name'] = 'output'
output._attrs['is_output'] = True
module = compile_model(output, target, './tmp', 'test_get_constant_names', constants=constants)
names_0 = module.get_constant_names(unbound_constants_only=True, constant_folding_only=False)
self.assertEqual(set(names_0), {'constant_0', 'constant_2', 'constant_4'})
names_1 = module.get_constant_names(unbound_constants_only=False, constant_folding_only=False)
self.assertEqual(set(names_1), {'constant_0', 'constant_1', 'constant_2', 'constant_3', 'constant_4'})
names_2 = module.get_constant_names(unbound_constants_only=True, constant_folding_only=True)
self.assertEqual(set(names_2), {'constant_2', 'constant_4'})
names_3 = module.get_constant_names(unbound_constants_only=False, constant_folding_only=True)
self.assertEqual(set(names_3), {'constant_2', 'constant_3', 'constant_4'})
names_4 = module.get_constant_folding_input_names(unbound_constants_only=True)
self.assertEqual(set(names_4), {'constant_2', 'constant_4'})
names_5 = module.get_constant_folding_input_names(unbound_constants_only=False)
self.assertEqual(set(names_5), {'constant_2', 'constant_3', 'constant_4'})
def test_get_constant_names_with_ait_generated(self):
target = detect_target()
input_0 = Tensor(shape=[1, 2], dtype='float16', name='input_0', is_input=True)
constant_0 = Tensor(shape=[1, 2], dtype='float16', name='constant_0')
constant_1 = Tensor(shape=[1, 2], dtype='float16', name='constant_1')
constant_2 = Tensor(shape=[1, 2], dtype='float16', name='constant_2')
constant_3 = _create_host_zero_tensor(shape=[1, 2], name='constant_3', dtype='float16')
constant_4 = Tensor(shape=[1, 2], dtype='float16', name='constant_4')
constants = {}
x = ops.elementwise(FuncEnum.MUL)(input_0, constant_0)
x1 = ops.concatenate()([x, x, constant_1])
constants['constant_1'] = get_random_torch_tensor((1, 2), 'float16')
y = ops.concatenate()([constant_2, constant_3, constant_4])
output = ops.elementwise(FuncEnum.MUL)(x1, y)
output._attrs['name'] = 'output'
output._attrs['is_output'] = True
module = compile_model(output, target, './tmp', 'test_get_constant_names_with_ait_generated', constants=constants)
names = module.get_constant_names(unbound_constants_only=False, constant_folding_only=False)
self.assertEqual(set(names), {'constant_0', 'constant_1', 'constant_2', 'constant_4'})
def test_set_many_constants(self):
target = detect_target()
input_0 = Tensor(shape=[1, 2], dtype='float16', name='input_0', is_input=True)
constant_1 = Tensor(shape=[1, 2], dtype='float16', name='constant_1')
constant_2 = Tensor(shape=[1, 2], dtype='float16', name='constant_2')
x = ops.elementwise(FuncEnum.MUL)(input_0, constant_1)
output = ops.elementwise(FuncEnum.MUL)(x, constant_2)
output._attrs['name'] = 'output'
output._attrs['is_output'] = True
module = compile_model(output, target, './tmp', 'test_get_constant_names')
input_0_pt = torch.randn((1, 2)).cuda().half()
constant_1_pt = torch.randn((1, 2)).cuda().half()
constant_2_pt = torch.randn((1, 2)).cuda().half()
module.set_many_constants_with_tensors({'constant_1': constant_1_pt, 'constant_2': constant_2_pt})
output_pt = ((input_0_pt * constant_1_pt) * constant_2_pt)
output_ait = torch.empty_like(input_0_pt)
module.run_with_tensors([input_0_pt], [output_ait])
self.assertTrue(torch.equal(output_pt, output_ait))
def test_async_fold_constants(self):
target = detect_target()
input_0 = Tensor(shape=[10000, 2000], dtype='float16', name='input_0', is_input=True)
constant_1 = Tensor(shape=[10000, 2000], dtype='float16', name='constant_1')
constant_2 = Tensor(shape=[10000, 2000], dtype='float16', name='constant_2')
x = ops.elementwise(FuncEnum.MUL)(input_0, constant_1)
output = ops.elementwise(FuncEnum.MUL)(x, constant_2)
output._attrs['name'] = 'output'
output._attrs['is_output'] = True
module = compile_model(output, target, './tmp', 'test_get_constant_names')
input_0_pt = torch.randn((10000, 2000)).cuda().half()
constant_1_pt = torch.randn((10000, 2000)).cuda().half()
constant_2_pt = torch.randn((10000, 2000)).cuda().half()
output_pt = ((input_0_pt * constant_1_pt) * constant_2_pt)
output_ait = torch.empty_like(input_0_pt)
module.set_many_constants_with_tensors({'constant_1': constant_1_pt, 'constant_2': constant_2_pt})
module.fold_constants(sync=False)
module.run_with_tensors([input_0_pt], [output_ait])
self.assertTrue(torch.equal(output_pt, output_ait)) |
def get_capacity_dict_from_df(df_capacity: pd.DataFrame) -> dict[(str, Any)]:
all_capacity = {}
for zone in df_capacity.index.unique():
df_zone = df_capacity.loc[zone]
zone_capacity = {}
for (i, data) in df_zone.iterrows():
mode_capacity = {}
mode_capacity['datetime'] = data['datetime'].strftime('%Y-%m-%d')
mode_capacity['value'] = round(float(data['value']), 0)
mode_capacity['source'] = SOURCE
zone_capacity[data['mode']] = mode_capacity
all_capacity[zone] = zone_capacity
return all_capacity |
class TestGraphInterface():
def nodes(self) -> List[BasicNode]:
return [BasicNode(i) for i in range(10)]
def test_equals(self, nodes):
edges = [BasicEdge(nodes[0], nodes[1]), BasicEdge(nodes[1], nodes[2]), BasicEdge(nodes[2], nodes[3]), BasicEdge(nodes[2], nodes[1]), BasicEdge(nodes[1], nodes[4]), BasicEdge(nodes[1], nodes[5]), BasicEdge(nodes[4], nodes[5]), BasicEdge(nodes[4], nodes[6])]
graph1 = Graph()
graph1.add_nodes_from(nodes)
graph1.add_edges_from(edges)
graph2 = Graph()
graph2.add_nodes_from(nodes)
graph2.add_edges_from(edges[1:])
assert (graph1 != graph2)
assert (graph2 == graph2.copy())
graph2.add_edge(edges[0])
assert (graph1 == graph2)
graph3 = graph1.copy()
assert (graph1 == graph2 == graph3)
graph3.remove_node(nodes[0])
assert (graph3 != graph1)
def test_dfs(self, nodes):
graph = Graph()
graph.add_nodes_from(nodes)
graph.add_edges_from([BasicEdge(nodes[0], nodes[1]), BasicEdge(nodes[1], nodes[3]), BasicEdge(nodes[1], nodes[4]), BasicEdge(nodes[0], nodes[2]), BasicEdge(nodes[2], nodes[5]), BasicEdge(nodes[5], nodes[2])])
assert (tuple(graph.iter_depth_first(nodes[0])) == (nodes[0], nodes[1], nodes[3], nodes[4], nodes[2], nodes[5]))
assert (tuple(graph.iter_depth_first(nodes[2])) == (nodes[2], nodes[5]))
def test_bfs(self, nodes):
graph = Graph()
graph.add_nodes_from(nodes)
graph.add_edges_from([BasicEdge(nodes[0], nodes[1]), BasicEdge(nodes[1], nodes[3]), BasicEdge(nodes[1], nodes[4]), BasicEdge(nodes[0], nodes[2]), BasicEdge(nodes[2], nodes[5]), BasicEdge(nodes[5], nodes[2])])
assert (tuple(graph.iter_breadth_first(nodes[0])) == (nodes[0], nodes[1], nodes[2], nodes[3], nodes[4], nodes[5]))
def test_postorder(self, nodes):
graph = Graph()
graph.add_edges_from([BasicEdge(nodes[4], nodes[5]), BasicEdge(nodes[1], nodes[2]), BasicEdge(nodes[0], nodes[2]), BasicEdge(nodes[2], nodes[3]), BasicEdge(nodes[0], nodes[1]), BasicEdge(nodes[2], nodes[4]), BasicEdge(nodes[3], nodes[4])])
assert (list(graph.iter_postorder()) == [nodes[5], nodes[4], nodes[3], nodes[2], nodes[1], nodes[0]])
assert (list(graph.iter_postorder(nodes[2])) == [nodes[5], nodes[4], nodes[3], nodes[2]])
graph.add_edge(BasicEdge(nodes[2], nodes[0]))
assert ((list(graph.iter_postorder(nodes[1])) == [nodes[5], nodes[4], nodes[3], nodes[0], nodes[2], nodes[1]]) or (list(graph.iter_postorder(nodes[1])) == [nodes[0], nodes[5], nodes[4], nodes[3], nodes[2], nodes[1]]))
assert ((list(graph.iter_postorder()) == list(graph.iter_postorder(nodes[0]))) or (list(graph.iter_postorder()) == list(graph.iter_postorder(nodes[1]))) or (list(graph.iter_postorder()) == list(graph.iter_postorder(nodes[2]))))
graph.add_edge(BasicEdge(nodes[6], nodes[4]))
assert (list(graph.iter_postorder(nodes[6])) == [nodes[5], nodes[4], nodes[6]])
assert (list(graph.iter_postorder(nodes[3])) == [nodes[5], nodes[4], nodes[3]])
def test_topologicalorder_cylic(self, nodes):
graph = Graph()
graph.add_edges_from([BasicEdge(nodes[0], nodes[2]), BasicEdge(nodes[2], nodes[0]), BasicEdge(nodes[2], nodes[3]), BasicEdge(nodes[5], nodes[4]), BasicEdge(nodes[4], nodes[5]), BasicEdge(nodes[4], nodes[3])])
with pytest.raises(Exception):
list(graph.iter_topological())
def test_topologicalorder_acylic(self, nodes):
graph = Graph()
graph.add_edges_from([BasicEdge(nodes[4], nodes[5]), BasicEdge(nodes[1], nodes[2]), BasicEdge(nodes[0], nodes[2]), BasicEdge(nodes[2], nodes[3]), BasicEdge(nodes[0], nodes[1]), BasicEdge(nodes[2], nodes[4]), BasicEdge(nodes[3], nodes[4])])
assert (list(graph.iter_topological()) == nodes[:6])
def test_preorder(self, nodes):
graph = Graph()
graph.add_edges_from([BasicEdge(nodes[5], nodes[1]), BasicEdge(nodes[1], nodes[0]), BasicEdge(nodes[1], nodes[3]), BasicEdge(nodes[3], nodes[2]), BasicEdge(nodes[3], nodes[4]), BasicEdge(nodes[5], nodes[6]), BasicEdge(nodes[6], nodes[8]), BasicEdge(nodes[8], nodes[7])])
assert (list(graph.iter_preorder()) == [nodes[5], nodes[1], nodes[0], nodes[3], nodes[2], nodes[4], nodes[6], nodes[8], nodes[7]])
def test_node_management(self):
graph = Graph()
n1 = BasicNode(1)
assert (n1 not in graph)
graph.add_node(n1)
graph.add_node((n2 := BasicNode(2)))
assert (graph.nodes == (n1, n2) == tuple(graph))
assert (n1 in graph)
graph.remove_node(n1)
assert (graph.nodes == (n2,) == tuple(graph))
assert ((n1 not in graph) and (n2 in graph))
graph.remove_nodes_from([n2])
assert (graph.nodes == tuple() == tuple(graph))
def test_edge_management(self):
graph = Graph()
graph.add_edge((e1 := BasicEdge((n1 := BasicNode(1)), (n2 := BasicNode(2)))))
graph.add_edge((e2 := BasicEdge((n3 := BasicNode(3)), (n6 := BasicNode(6)))))
assert (graph.edges == (e1, e2))
assert (graph.get_edge(n1, n2) == e1)
assert (graph.get_edge(n1, n3) is None)
assert (graph.get_in_edges(n6) == (e2,))
assert (graph.get_out_edges(n1) == (e1,))
assert (graph.get_incident_edges(n1) == (e1,))
graph.remove_edge(e2)
assert (graph.edges == (e1,))
def test_get_roots(self):
graph = Graph()
graph.add_nodes_from([(n1 := BasicNode(1)), (n2 := BasicNode(2)), (n3 := BasicNode(3))])
graph.add_edges_from([BasicEdge(n1, n2)])
assert (graph.get_roots() == (n1, n3))
def test_get_leaves(self):
graph = Graph()
graph.add_nodes_from([(n1 := BasicNode(1)), (n2 := BasicNode(2)), (n3 := BasicNode(3))])
graph.add_edges_from([(edge := BasicEdge(n1, n2)), BasicEdge(n2, n3), BasicEdge(n3, n1)])
assert (graph.get_leaves() == tuple())
graph.remove_edge(edge)
assert (graph.get_leaves() == (n1,))
def test_order(self):
graph = Graph()
graph.add_nodes_from([(n1 := BasicNode(1)), BasicNode(2), BasicNode(3)])
assert (len(graph) == 3)
graph.remove_node(n1)
assert (len(graph) == 2)
def get_easy_graph() -> Tuple[(Graph, Tuple[(BasicNode, ...)], Tuple[(BasicEdge, ...)])]:
graph = Graph()
nodes = ((n0 := BasicNode(0)), (n1 := BasicNode(1)), (n2 := BasicNode(2)), BasicNode(3))
edges = (BasicEdge(n0, n1), BasicEdge(n1, n0), BasicEdge(n0, n2))
graph.add_nodes_from(nodes)
graph.add_edges_from(edges)
return (graph, nodes, edges)
def test_degree(self):
(graph, nodes, edges) = self.get_easy_graph()
assert ((graph.in_degree(nodes[3]) == 0) and (graph.in_degree(nodes[0]) == 1) and (graph.in_degree(nodes[2]) == 1))
assert ((graph.out_degree(nodes[3]) == 0) and (graph.out_degree(nodes[1]) == 1) and (graph.out_degree(nodes[0]) == 2))
graph.remove_edges_from([edges[0], edges[1]])
assert ((graph.in_degree(nodes[0]) == 0) and (graph.out_degree(nodes[0]) == 1))
def test_relations(self):
(graph, nodes, edges) = self.get_easy_graph()
assert ((graph.get_successors(nodes[0]) == (nodes[1], nodes[2])) and (graph.get_predecessors(nodes[0]) == (nodes[1],)) and (graph.get_adjacent_nodes(nodes[0]) == (nodes[1], nodes[2])))
assert ((graph.get_successors(nodes[1]) == (nodes[0],)) and (graph.get_predecessors(nodes[1]) == (nodes[0],)) and (graph.get_adjacent_nodes(nodes[1]) == (nodes[0],)))
assert ((graph.get_successors(nodes[2]) == tuple()) and (graph.get_predecessors(nodes[2]) == (nodes[0],)) and (graph.get_adjacent_nodes(nodes[2]) == (nodes[0],)))
assert ((graph.get_successors(nodes[3]) == tuple()) and (graph.get_predecessors(nodes[3]) == tuple()) and (graph.get_adjacent_nodes(nodes[3]) == tuple()))
n4 = BasicNode(4)
assert ((graph.get_successors(n4) == tuple()) and (graph.get_predecessors(n4) == tuple()) and (graph.get_adjacent_nodes(n4) == tuple()))
def test_has_path(self):
(graph, nodes, edges) = self.get_easy_graph()
assert (graph.has_path(nodes[0], nodes[1]) and graph.has_path(nodes[1], nodes[0]))
assert ((not graph.has_path(nodes[0], nodes[3])) and (not graph.has_path(nodes[3], nodes[0])))
def test_subgraph(self):
(graph, nodes, edges) = self.get_easy_graph()
subgraph = graph.subgraph(nodes[:2])
assert isinstance(subgraph, Graph)
assert (subgraph.nodes == nodes[:2])
assert (subgraph.edges == edges[:2])
def test_is_acyclic(self):
(graph, nodes, edges) = self.get_easy_graph()
assert (not graph.is_acyclic())
graph.remove_edge(edges[1])
assert graph.is_acyclic()
def test_get_paths(self, nodes):
graph = Graph()
graph.add_edges_from([BasicEdge(nodes[0], nodes[1]), BasicEdge(nodes[0], nodes[2]), BasicEdge(nodes[0], nodes[3]), BasicEdge(nodes[1], nodes[4]), BasicEdge(nodes[2], nodes[0]), BasicEdge(nodes[2], nodes[4])])
assert (list(graph.get_paths(nodes[0], nodes[4])) == [(nodes[0], nodes[1], nodes[4]), (nodes[0], nodes[2], nodes[4])])
def test_get_shortest_path(self, nodes):
graph = Graph()
graph.add_edges_from([BasicEdge(nodes[0], nodes[1]), BasicEdge(nodes[1], nodes[3]), BasicEdge(nodes[3], nodes[6]), BasicEdge(nodes[0], nodes[2]), BasicEdge(nodes[1], nodes[2]), BasicEdge(nodes[2], nodes[4]), BasicEdge(nodes[4], nodes[5]), BasicEdge(nodes[5], nodes[3]), BasicEdge(nodes[5], nodes[6])])
assert (graph.get_shortest_path(nodes[0], nodes[6]) == (nodes[0], nodes[1], nodes[3], nodes[6]))
graph.add_edge(BasicEdge(nodes[0], nodes[6]))
assert (graph.get_shortest_path(nodes[0], nodes[6]) == (nodes[0], nodes[6])) |
def add_render_arguments(parser: argparse.ArgumentParser) -> None:
parser.add_argument('-i', '--input', dest='input_file_names', metavar='<path>', nargs='*', help='input XML file name or names (if not specified, file will be downloaded using the OpenStreetMap API)')
parser.add_argument('-o', '--output', dest='output_file_name', metavar='<path>', default='out/map.svg', help='output SVG file name')
parser.add_argument('-b', '--boundary-box', metavar='<lon1>,<lat1>,<lon2>,<lat2>', help='geo boundary box')
parser.add_argument('--cache', help='path for temporary OSM files', default='cache', metavar='<path>')
parser.add_argument('-z', '--zoom', type=float, metavar='<float>', help='OSM zoom level', default=18.0)
parser.add_argument('-c', '--coordinates', metavar='<latitude>,<longitude>', help='coordinates of any location inside the tile')
parser.add_argument('-s', '--size', metavar='<width>,<height>', help='resulted image size') |
def _upload(package_path: str, package_name: str, version: str) -> None:
logger.info(f" Starting uploading package {package_name} at '{package_path}', version {version}...")
logger.info(f'Uploading binary for package {package_name}: {version}')
onedocker_repo_svc.upload(package_name, version, package_path)
logger.info(f''' Finished uploading '{package_name}, version {version}'.
''') |
def test_abi_deployment_enabled_by_default(network, build):
network.connect('mainnet')
address = '0x0bc529c00c6401aef6d220be8c6ea1667f6ad93e'
Contract.from_abi('abiTester', address, build['abi'])
assert (_get_deployment(address) != (None, None))
Contract.remove_deployment(address) |
class Zones(SuperEnum):
__keys__ = ['id', 'title', 'color', 'map_key', 'incr']
arctic_circle = (1, 'Artic Circle', (150, 150, 250), 'N', 0.6)
northern_temperate = (2, 'Northern Temperate', (150, 250, 150), 'A', 0.9)
northern_subtropics = (3, 'Nothern Subtropics', (150, 250, 200), 'B', 0.6)
northern_tropics = (4, 'Northern Tropics', (230, 150, 150), 'C', 0.3)
southern_tropics = (5, 'Southern Tropics', (250, 180, 150), 'D', 0.3)
southern_subtropics = (6, 'Southern Subtropics', (150, 250, 200), 'E', 0.6)
southern_temperate = (7, 'Southern Temperate', (150, 250, 150), 'F', 0.9)
antarctic_circle = (8, 'Antarctic Circle', (150, 150, 250), 'S', 0.6) |
.xfail(raises=ImageComparisonFailure, reason='Matplotlib plots for reasons a different image size.')
def test_correlate_chromosomes():
outfile_heatmap = NamedTemporaryFile(suffix='heatmap.png', prefix='hicexplorer_test', delete=False)
outfile_scatter = NamedTemporaryFile(suffix='scatter.png', prefix='hicexplorer_test', delete=False)
args = "--matrices {} {} --labels 'first' 'second' --method spearman --log1p --colorMap jet --outFileNameHeatmap {} --outFileNameScatter {} --chromosomes chrUextra chr3LHet --plotNumbers".format((ROOT + 'hicCorrectMatrix/small_test_matrix_ICEcorrected_chrUextra_chr3LHet.h5'), (ROOT + 'hicCorrectMatrix/small_test_matrix_ICEcorrected_chrUextra_chr3LHet.h5'), outfile_heatmap.name, outfile_scatter.name).split()
hicCorrelate.main(args)
res = compare_images(((ROOT + 'hicCorrelate') + '/heatmap_chrom.png'), outfile_heatmap.name, tol=40)
assert (res is None), res
res = compare_images(((ROOT + 'hicCorrelate') + '/scatter_chrom.png'), outfile_scatter.name, tol=40)
assert (res is None), res
os.remove(outfile_heatmap.name)
os.remove(outfile_scatter.name) |
def _format_string_for_conversion(string_number: str) -> str:
commas_occurences = [match.start() for match in re.finditer('\\,', string_number)]
dot_occurences = [match.start() for match in re.finditer('\\.', string_number)]
if ((len(commas_occurences) > 0) and (len(dot_occurences) > 0)):
index_remove_partt = max(commas_occurences[(len(commas_occurences) - 1)], dot_occurences[(len(dot_occurences) - 1)])
number_part = string_number[:index_remove_partt]
degit_part = string_number[(index_remove_partt + 1):]
number_part = re.sub('[^\\d]', '', number_part)
return f'{number_part}.{degit_part}'
if (len(commas_occurences) > 0):
if (len(commas_occurences) == 1):
return string_number.replace(',', '.')
if (len(dot_occurences) > 0):
if (len(dot_occurences) == 1):
return string_number
return re.sub('[^\\d]', '', string_number) |
.asyncio
.manager
class TestVerifyUser():
async def test_invalid_token(self, user_manager: UserManagerMock[UserModel]):
with pytest.raises(InvalidVerifyToken):
(await user_manager.verify('foo'))
async def test_token_expired(self, user_manager: UserManagerMock[UserModel], user: UserModel, verify_token):
with pytest.raises(InvalidVerifyToken):
token = verify_token(user_id=user.id, email=user.email, lifetime=(- 1))
(await user_manager.verify(token))
async def test_missing_user_id(self, user_manager: UserManagerMock[UserModel], user: UserModel, verify_token):
with pytest.raises(InvalidVerifyToken):
token = verify_token(email=user.email)
(await user_manager.verify(token))
async def test_missing_user_email(self, user_manager: UserManagerMock[UserModel], user: UserModel, verify_token):
with pytest.raises(InvalidVerifyToken):
token = verify_token(user_id=user.id)
(await user_manager.verify(token))
async def test_invalid_user_id(self, user_manager: UserManagerMock[UserModel], user: UserModel, verify_token):
with pytest.raises(InvalidVerifyToken):
token = verify_token(user_id='foo', email=user.email)
(await user_manager.verify(token))
async def test_invalid_email(self, user_manager: UserManagerMock[UserModel], user: UserModel, verify_token):
with pytest.raises(InvalidVerifyToken):
token = verify_token(user_id=user.id, email='foo')
(await user_manager.verify(token))
async def test_email_id_mismatch(self, user_manager: UserManagerMock[UserModel], user: UserModel, inactive_user: UserModel, verify_token):
with pytest.raises(InvalidVerifyToken):
token = verify_token(user_id=user.id, email=inactive_user.email)
(await user_manager.verify(token))
async def test_verified_user(self, user_manager: UserManagerMock[UserModel], verified_user: UserModel, verify_token):
with pytest.raises(UserAlreadyVerified):
token = verify_token(user_id=verified_user.id, email=verified_user.email)
(await user_manager.verify(token))
async def test_inactive_user(self, user_manager: UserManagerMock[UserModel], inactive_user: UserModel, verify_token):
token = verify_token(user_id=inactive_user.id, email=inactive_user.email)
verified_user = (await user_manager.verify(token))
assert (verified_user.is_verified is True)
assert (verified_user.is_active is False)
async def test_active_user(self, user_manager: UserManagerMock[UserModel], user: UserModel, verify_token):
token = verify_token(user_id=user.id, email=user.email)
verified_user = (await user_manager.verify(token))
assert (verified_user.is_verified is True)
assert (verified_user.is_active is True) |
def downgrade():
op.drop_column('users_events_roles', 'deleted_at')
op.drop_column('user_permissions', 'deleted_at')
op.drop_column('tickets', 'deleted_at')
op.drop_column('ticket_tag', 'deleted_at')
op.drop_column('ticket_holders', 'deleted_at')
op.drop_column('sponsors', 'deleted_at')
op.drop_column('speaker', 'deleted_at')
op.drop_column('roles', 'deleted_at')
op.drop_column('role_invites', 'deleted_at')
op.drop_column('notifications', 'deleted_at')
op.drop_column('microlocations', 'deleted_at')
op.drop_column('feedback', 'deleted_at')
op.drop_column('faq', 'deleted_at')
op.drop_column('event_types', 'deleted_at')
op.drop_column('event_topics', 'deleted_at')
op.drop_column('event_sub_topics', 'deleted_at')
op.drop_column('event_invoices', 'deleted_at')
op.drop_column('email_notifications', 'deleted_at')
op.drop_column('discount_codes', 'deleted_at')
op.drop_column('custom_forms', 'deleted_at')
op.drop_column('access_codes', 'deleted_at')
op.drop_column('tracks', 'deleted_at')
op.drop_column('orders', 'deleted_at')
op.drop_column('orders_tickets', 'deleted_at') |
class Invoice(DeleteMixin, QuickbooksPdfDownloadable, QuickbooksManagedObject, QuickbooksTransactionEntity, LinkedTxnMixin, SendMixin, VoidMixin):
class_dict = {'DepartmentRef': Ref, 'CurrencyRef': Ref, 'CustomerRef': Ref, 'ClassRef': Ref, 'SalesTermRef': Ref, 'ShipMethodRef': Ref, 'DepositToAccountRef': Ref, 'BillAddr': Address, 'ShipAddr': Address, 'TxnTaxDetail': TxnTaxDetail, 'BillEmail': EmailAddress, 'BillEmailCc': EmailAddress, 'BillEmailBcc': EmailAddress, 'CustomerMemo': CustomerMemo, 'DeliveryInfo': DeliveryInfo, 'RecurDataRef': Ref, 'TaxExemptionRef': Ref, 'MetaData': MetaData}
list_dict = {'CustomField': CustomField, 'Line': DetailLine, 'LinkedTxn': LinkedTxn}
detail_dict = {'SalesItemLineDetail': SalesItemLine, 'SubTotalLineDetail': SubtotalLine, 'DiscountLineDetail': DiscountLine, 'DescriptionOnly': DescriptionOnlyLine, 'GroupLineDetail': GroupLine}
qbo_object_name = 'Invoice'
def __init__(self):
super(Invoice, self).__init__()
self.Deposit = 0
self.Balance = 0
self.AllowIPNPayment = True
self.AllowOnlineCreditCardPayment = False
self.AllowOnlineACHPayment = False
self.DocNumber = None
self.PrivateNote = ''
self.DueDate = ''
self.ShipDate = ''
self.TrackingNum = ''
self.TotalAmt = ''
self.TxnDate = ''
self.ApplyTaxAfterDiscount = False
self.PrintStatus = 'NotSet'
self.EmailStatus = 'NotSet'
self.ExchangeRate = 1
self.GlobalTaxCalculation = 'TaxExcluded'
self.InvoiceLink = ''
self.HomeBalance = 0
self.HomeTotalAmt = 0
self.FreeFormAddress = False
self.EInvoiceStatus = None
self.BillAddr = None
self.ShipAddr = None
self.BillEmail = None
self.BillEmailCc = None
self.BillEmailBcc = None
self.CustomerRef = None
self.CurrencyRef = None
self.CustomerMemo = None
self.DepartmentRef = None
self.TxnTaxDetail = None
self.DeliveryInfo = None
self.RecurDataRef = None
self.SalesTermRef = None
self.ShipMethodRef = None
self.TaxExemptionRef = None
self.MetaData = None
self.CustomField = []
self.Line = []
self.LinkedTxn = []
def __str__(self):
return str(self.TotalAmt)
def to_linked_txn(self):
linked_txn = LinkedTxn()
linked_txn.TxnId = self.Id
linked_txn.TxnType = 'Invoice'
linked_txn.TxnLineId = 1
return linked_txn
def email_sent(self):
if (self.EmailStatus == 'EmailSent'):
return True
return False
def to_ref(self):
ref = Ref()
ref.name = self.DocNumber
ref.type = self.qbo_object_name
ref.value = self.Id
return ref |
def test_delitem(fx_asset):
with Image(filename=str(fx_asset.joinpath('apple.ico'))) as img:
detached = img.sequence[0]
del img.sequence[0]
assert (len(img.sequence) == 3)
assert (img.sequence[0] is not detached)
assert (img.sequence[0].size == (16, 16))
expire(img)
assert (len(img.sequence) == 3)
assert (img.sequence[0] is not detached)
assert (img.sequence[0].size == (16, 16))
assert (img.sequence[1].size == (32, 32))
assert (img.sequence[2].size == (16, 16)) |
def test_render_with_tooltips() -> None:
run((COMMAND_LINES['render_with_tooltips'] + ['--cache', 'tests/data']), (LOG + b'INFO Writing output SVG to out/map.svg...\n'))
with (OUTPUT_PATH / 'map.svg').open(encoding='utf-8') as output_file:
root: Element = ElementTree.parse(output_file).getroot()
assert (len(root) == 8)
assert (len(root[3][0]) == 1)
assert (root[3][0][0].text == 'natural: tree')
assert (root.get('width') == '186.0')
assert (root.get('height') == '198.0') |
def test_model_dialogues_keep_terminal_dialogues_option():
dialogues = DefaultDialogues(name='test', skill_context=Mock())
assert (DefaultDialogues._keep_terminal_state_dialogues == dialogues.is_keep_dialogues_in_terminal_state)
dialogues = DefaultDialogues(name='test', skill_context=Mock(), keep_terminal_state_dialogues=True)
assert (dialogues.is_keep_dialogues_in_terminal_state is True)
assert (DefaultDialogues._keep_terminal_state_dialogues == Dialogues._keep_terminal_state_dialogues)
dialogues = DefaultDialogues(name='test', skill_context=Mock(), keep_terminal_state_dialogues=False)
assert (dialogues.is_keep_dialogues_in_terminal_state is False)
assert (DefaultDialogues._keep_terminal_state_dialogues == Dialogues._keep_terminal_state_dialogues) |
def add_QueryServicer_to_server(servicer, server):
rpc_method_handlers = {'Params': grpc.unary_unary_rpc_method_handler(servicer.Params, request_deserializer=cosmos_dot_mint_dot_v1beta1_dot_query__pb2.QueryParamsRequest.FromString, response_serializer=cosmos_dot_mint_dot_v1beta1_dot_query__pb2.QueryParamsResponse.SerializeToString), 'Inflation': grpc.unary_unary_rpc_method_handler(servicer.Inflation, request_deserializer=cosmos_dot_mint_dot_v1beta1_dot_query__pb2.QueryInflationRequest.FromString, response_serializer=cosmos_dot_mint_dot_v1beta1_dot_query__pb2.QueryInflationResponse.SerializeToString), 'AnnualProvisions': grpc.unary_unary_rpc_method_handler(servicer.AnnualProvisions, request_deserializer=cosmos_dot_mint_dot_v1beta1_dot_query__pb2.QueryAnnualProvisionsRequest.FromString, response_serializer=cosmos_dot_mint_dot_v1beta1_dot_query__pb2.QueryAnnualProvisionsResponse.SerializeToString)}
generic_handler = grpc.method_handlers_generic_handler('cosmos.mint.v1beta1.Query', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,)) |
.django_db
def test_award_endpoint_for_null_recipient_information(client, awards_and_transactions):
resp = client.get('/api/v2/awards/3/', content_type='application/json')
assert (resp.status_code == status.HTTP_200_OK)
assert (json.loads(resp.content.decode('utf-8')).get('recipient') == recipient_without_id_and_name) |
def parse_FMT21H(buffer, dex_object, pc_point, offset):
(v,) = struct.unpack_from('H', buffer, 2)
if (int(buffer[1]) == 25):
arg1 = ('%d' % v)
else:
arg1 = ('%d0000' % v)
return (dex_decode[int(buffer[0])][4], dex_decode[int(buffer[0])][1], ('v%d' % int(buffer[1])), arg1) |
class ChatWithDbAutoExecute(BaseChat):
chat_scene: str = ChatScene.ChatWithDbExecute.value()
def __init__(self, chat_param: Dict):
chat_mode = ChatScene.ChatWithDbExecute
self.db_name = chat_param['select_param']
chat_param['chat_mode'] = chat_mode
super().__init__(chat_param=chat_param)
if (not self.db_name):
raise ValueError(f'{ChatScene.ChatWithDbExecute.value} mode should chose db!')
with root_tracer.start_span('ChatWithDbAutoExecute.get_connect', metadata={'db_name': self.db_name}):
self.database = CFG.LOCAL_DB_MANAGE.get_connect(self.db_name)
self.top_k: int = 50
self.api_call = ApiCall(display_registry=CFG.command_disply)
()
async def generate_input_values(self) -> Dict:
try:
from dbgpt.rag.summary.db_summary_client import DBSummaryClient
except ImportError:
raise ValueError('Could not import DBSummaryClient. ')
client = DBSummaryClient(system_app=CFG.SYSTEM_APP)
table_infos = None
try:
with root_tracer.start_span('ChatWithDbAutoExecute.get_db_summary'):
table_infos = (await blocking_func_to_async(self._executor, client.get_db_summary, self.db_name, self.current_user_input, CFG.KNOWLEDGE_SEARCH_TOP_SIZE))
except Exception as e:
print(('db summary find error!' + str(e)))
if (not table_infos):
table_infos = (await blocking_func_to_async(self._executor, self.database.table_simple_info))
input_values = {'db_name': self.db_name, 'user_input': self.current_user_input, 'top_k': str(self.top_k), 'dialect': self.database.dialect, 'table_info': table_infos, 'display_type': self._generate_numbered_list()}
return input_values
def stream_plugin_call(self, text):
text = text.replace('\n', ' ')
print(f'stream_plugin_call:{text}')
return self.api_call.display_sql_llmvis(text, self.database.run_to_df)
def do_action(self, prompt_response):
print(f'do_action:{prompt_response}')
return self.database.run_to_df |
def _start_local_worker(worker_manager: WorkerManagerAdapter, worker_params: ModelWorkerParameters):
with root_tracer.start_span('WorkerManager._start_local_worker', span_type=SpanType.RUN, metadata={'run_service': SpanTypeRunName.WORKER_MANAGER, 'params': _get_dict_from_obj(worker_params), 'sys_infos': _get_dict_from_obj(get_system_info())}):
worker = _build_worker(worker_params)
if (not worker_manager.worker_manager):
worker_manager.worker_manager = _create_local_model_manager(worker_params)
worker_manager.worker_manager.add_worker(worker, worker_params) |
.usefixtures('use_tmpdir', 'init_eclrun_config')
.requires_eclipse
def test_run(source_root):
shutil.copy(os.path.join(source_root, 'test-data/eclipse/SPE1.DATA'), 'SPE1.DATA')
econfig = ecl_config.Ecl100Config()
erun = ecl_run.EclRun('SPE1.DATA', None)
erun.runEclipse(eclrun_config=ecl_config.EclrunConfig(econfig, '2019.1'))
ok_path = os.path.join(erun.runPath(), f'{erun.baseName()}.OK')
log_path = os.path.join(erun.runPath(), f'{erun.baseName()}.LOG')
assert os.path.isfile(ok_path)
assert os.path.isfile(log_path)
assert (os.path.getsize(log_path) > 0)
errors = erun.parseErrors()
assert (len(errors) == 0) |
def typeof(data: Union[(primitives.JsDataModel, str)], type: Optional[Union[(primitives.JsDataModel, str)]]=None):
if (type is None):
return JsObjects.JsBoolean.JsBoolean(('typeof %s' % JsUtils.jsConvertData(data, None)))
return JsObjects.JsVoid(('typeof %s === %s' % (JsUtils.jsConvertData(data, None), JsUtils.jsConvertData(type, None)))) |
class OptionSeriesStreamgraphDataDragdropGuideboxDefault(Options):
def className(self):
return self._config_get('highcharts-drag-box-default')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('rgba(0, 0, 0, 0.1)')
def color(self, text: str):
self._config(text, js_type=False)
def cursor(self):
return self._config_get('move')
def cursor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#888')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(900)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def test_create_load_object(monkeypatch):
columns = {'input_field_1': 'output_field_1', 'input_field_2': 'output_field_2'}
bools = {'input_field_3': 'output_field_3', 'input_field_4': 'output_field_4', 'input_field_5': 'output_field_5'}
functions = {'output_field_6': (lambda t: (t['input_field_6'] * 2)), 'output_field_7': (lambda t: 'replacement')}
data = {'input_field_1': 'this is field 1', 'input_field_2': 'this is field 2', 'input_field_3': 'true', 'input_field_4': 'false', 'input_field_5': None, 'input_field_6': 5, 'input_field_7': 'this is field 7'}
expected_result = {'output_field_1': 'THIS IS FIELD 1', 'output_field_2': 'THIS IS FIELD 2', 'output_field_3': 'true', 'output_field_4': 'false', 'output_field_5': False, 'output_field_6': 10, 'output_field_7': 'replacement'}
mock_cursor(monkeypatch, data)
actual_result = _create_load_object(data, columns, bools, functions)
assert (actual_result == expected_result) |
class Geometric(Harmony):
def __init__(self) -> None:
self.count = 12
def harmonize(self, color: 'Color', space: str) -> List['Color']:
color1 = self.get_cylinder(color, space)
output = space
space = color1.space()
name = color1._space.hue_name()
degree = current = (360.0 / self.count)
colors = []
for _ in range((self.count - 1)):
colors.append(color1.clone().set(name, (lambda x, value=current: adjust_hue(x, value))))
current += degree
colors.insert(0, color1)
if (output != space):
colors = [color.new(c.convert(output, in_place=True)) for c in colors]
return colors |
def test_update_instructions_with_redundant(variable_x, variable_v, aliased_variable_y, variable):
binary_operation = BinaryOperation(OperationType.plus, [aliased_variable_y[2], aliased_variable_y[3]])
instructions = [Assignment(ListOperation([]), Call(imp_function_symbol('printf'), [Constant()])), Assignment(variable_x[3], variable_x[2]), Assignment(variable_v[2], variable_v[1]), Assignment(aliased_variable_y[4], binary_operation), Assignment(aliased_variable_y[5], binary_operation), Assignment(ListOperation([]), Call(imp_function_symbol('printf'), [variable_x[3], variable_v[2]])), Assignment(variable_v[2], variable_v[3]), Assignment(variable_x[4], variable_v[2]), Assignment(ListOperation([]), Call(imp_function_symbol('printf'), [Constant(), variable_x[4]])), Assignment(variable_x[3], variable_x[4]), Assignment(aliased_variable_y[4], aliased_variable_y[5])]
new_instructions = [Assignment(ListOperation([]), Call(imp_function_symbol('printf'), [Constant()])), Assignment(variable[5], BinaryOperation(OperationType.plus, [variable[3], variable[4]])), Assignment(variable[5], BinaryOperation(OperationType.plus, [variable[3], variable[4]])), Assignment(ListOperation([]), Call(imp_function_symbol('printf'), [variable[1], variable[2]])), Assignment(variable[1], variable[2]), Assignment(ListOperation([]), Call(imp_function_symbol('printf'), [Constant(), variable[1]]))]
nodes = [BasicBlock(i) for i in range(10)]
nodes[0].instructions = instructions[0:4]
nodes[1].instructions = instructions[4:6]
nodes[2].instructions = instructions[6:]
cfg = ControlFlowGraph()
cfg.add_edges_from([UnconditionalEdge(nodes[0], nodes[1]), UnconditionalEdge(nodes[2], nodes[1])])
variable_renamer = VariableRenamer(decompiler_task(cfg), InterferenceGraph(cfg))
variable_renamer.renaming_map = {variable_x[2]: variable[1], variable_x[3]: variable[1], variable_x[4]: variable[1], variable_v[1]: variable[2], variable_v[2]: variable[2], variable_v[3]: variable[2], aliased_variable_y[2]: variable[3], aliased_variable_y[3]: variable[4], aliased_variable_y[4]: variable[5], aliased_variable_y[5]: variable[5]}
variable_renamer.rename()
assert (((nodes[0].instructions + nodes[1].instructions) + nodes[2].instructions) == new_instructions) |
class BaseSQLAFilter(filters.BaseFilter):
def __init__(self, column, name, options=None, data_type=None):
super(BaseSQLAFilter, self).__init__(name, options, data_type)
self.column = column
def get_column(self, alias):
return (self.column if (alias is None) else getattr(alias, self.column.key))
def apply(self, query, value, alias=None):
return super(BaseSQLAFilter, self).apply(query, value) |
class OptionsWidget(Gtk.Widget):
def __init__(self, *args, **kwargs):
super(OptionsWidget, self).__init__(*args, **kwargs)
self._controller = None
def controller(self):
return self._controller
def controller(self, controller):
if self._controller:
self._controller.disconnect(self._options_changed_id)
self._controller.disconnect(self._current_key_changed_id)
self._controller.disconnect(self._update_image_changed_id)
self._controller = controller
self._options_changed_id = self._controller.connect('notify::options', self._update_options)
self._current_key_changed_id = self._controller.connect('notify::current-key', self._update_current_key)
self._update_image_changed_id = self._controller.connect('notify::update-image', self._update_image)
self._visible_changed_id = self._controller.connect('notify::enabled', self._update_visibility)
self.update_options()
self.update_current_key()
def _update_visibility(self, *args):
self.set_visible(self._controller.enabled)
def _update_options(self, *args):
self.update_options()
def update_options(self):
pass
def _update_current_key(self, *args):
self.update_current_key()
def update_current_key():
pass
def _update_image(self, *args):
self.update_image()
def update_image(self):
pass
def calc_popup_position(self, widget):
toplevel = self.get_toplevel()
toplevel.set_type_hint(Gdk.WindowTypeHint.DROPDOWN_MENU)
(menu_req, pref_req) = widget.get_preferred_size()
align = widget.get_halign()
direction = self.get_direction()
window = self.get_window()
screen = widget.get_screen()
monitor_num = screen.get_monitor_at_window(window)
if (monitor_num < 0):
monitor_num = 0
monitor = screen.get_monitor_workarea(monitor_num)
allocation = self.get_allocation()
(ret, x, y) = window.get_origin()
x += allocation.x
y += allocation.y
if ((allocation.width - menu_req.width) > 0):
x += (allocation.width - menu_req.width)
if (((y + allocation.height) + menu_req.height) <= (monitor.y + monitor.height)):
y += allocation.height
elif ((y - menu_req.height) >= monitor.y):
y -= menu_req.height
else:
y -= menu_req.height
return (x, y) |
.asyncio
.manager
class TestCreateUser():
.parametrize('email', ['king.', 'King.'])
async def test_existing_user(self, email: str, user_manager: UserManagerMock[UserModel]):
user = UserCreate(email=email, password='guinevere')
with pytest.raises(UserAlreadyExists):
(await user_manager.create(user))
assert (user_manager.on_after_register.called is False)
.parametrize('email', ['', ''])
async def test_regular_user(self, email: str, user_manager: UserManagerMock[UserModel]):
user = UserCreate(email=email, password='guinevere')
created_user = (await user_manager.create(user))
assert (type(created_user) == UserModel)
assert (user_manager.on_after_register.called is True)
.parametrize('safe,result', [(True, False), (False, True)])
async def test_superuser(self, user_manager: UserManagerMock[UserModel], safe: bool, result: bool):
user = UserCreate(email='', password='guinevere', is_superuser=True)
created_user = (await user_manager.create(user, safe))
assert (type(created_user) == UserModel)
assert (created_user.is_superuser is result)
assert (user_manager.on_after_register.called is True)
.parametrize('safe,result', [(True, True), (False, False)])
async def test_is_active(self, user_manager: UserManagerMock[UserModel], safe: bool, result: bool):
user = UserCreate(email='', password='guinevere', is_active=False)
created_user = (await user_manager.create(user, safe))
assert (type(created_user) == UserModel)
assert (created_user.is_active is result)
assert (user_manager.on_after_register.called is True) |
class ethernet(packet_base.PacketBase):
_PACK_STR = '!6s6sH'
_MIN_LEN = struct.calcsize(_PACK_STR)
_MIN_PAYLOAD_LEN = 46
_TYPE = {'ascii': ['src', 'dst']}
def __init__(self, dst='ff:ff:ff:ff:ff:ff', src='00:00:00:00:00:00', ethertype=ether.ETH_TYPE_IP):
super(ethernet, self).__init__()
self.dst = dst
self.src = src
self.ethertype = ethertype
def parser(cls, buf):
(dst, src, ethertype) = struct.unpack_from(cls._PACK_STR, buf)
return (cls(addrconv.mac.bin_to_text(dst), addrconv.mac.bin_to_text(src), ethertype), ethernet.get_packet_type(ethertype), buf[ethernet._MIN_LEN:])
def serialize(self, payload, prev):
pad_len = (self._MIN_PAYLOAD_LEN - len(payload))
if (pad_len > 0):
payload.extend((b'\x00' * pad_len))
return struct.pack(ethernet._PACK_STR, addrconv.mac.text_to_bin(self.dst), addrconv.mac.text_to_bin(self.src), self.ethertype)
def get_packet_type(cls, type_):
if (type_ <= ether.ETH_TYPE_IEEE802_3):
type_ = ether.ETH_TYPE_IEEE802_3
return cls._TYPES.get(type_) |
class _MethodProxy():
def __init__(self, value: Any, callable_value: Optional[Callable]=None) -> None:
self.__dict__['_value'] = value
self.__dict__['_callable_value'] = (callable_value or value)
def __get__(self, instance: 'StrictMock', owner: Optional[Type['StrictMock']]=None) -> Union[(object, Callable)]:
if (self.__dict__['_value'] is self.__dict__['_callable_value']):
return self.__dict__['_callable_value']
else:
return self
def __getattr__(self, name: str) -> str:
return getattr(self.__dict__['_value'], name)
def __setattr__(self, name: str, value: str) -> None:
return setattr(self.__dict__['_value'], name, value)
def __delattr__(self, name: str) -> None:
return delattr(self.__dict__['_value'], name)
def __call__(self, *args: Any, **kwargs: Any) -> Optional[Any]:
return self.__dict__['_callable_value'](*args, **kwargs)
def __copy__(self) -> '_MethodProxy':
return type(self)(callable_value=self.__dict__['_callable_value'], value=self.__dict__['_value'])
def __deepcopy__(self, memo: Optional[Dict[(Any, Any)]]=None) -> '_MethodProxy':
if (memo is None):
memo = {}
self_copy = type(self)(callable_value=copy.deepcopy(self.__dict__['_callable_value']), value=copy.deepcopy(self.__dict__['_value']))
memo[id(self)] = self_copy
return self_copy
def __repr__(self) -> str:
return repr(self.__dict__['_value']) |
class HTTPException(Exception):
def __init__(self, status_code: int, detail: typing.Optional[str]=None, headers: typing.Optional[typing.Dict[(str, str)]]=None) -> None:
if (detail is None):
detail =
self.status_code = status_code
self.detail = detail
self.headers = headers
def __str__(self) -> str:
return f'{self.status_code}: {self.detail}'
def __repr__(self) -> str:
class_name = self.__class__.__name__
return f'{class_name}(status_code={self.status_code!r}, detail={self.detail!r})' |
def handle_uploaded_file(import_file, request):
logger.info('Uploaded file: %s', import_file)
logger.info('File content type is: %s', import_file.content_type)
if (import_file.content_type == 'text/csv'):
return process_csv(import_file)
elif ((import_file.content_type == 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') or (import_file.content_type == 'application/vnd.ms-excel') or (import_file.content_type == 'application/vms.ms-excel') or (import_file.content_type == 'application/msexcel')):
imp = Import(request, filename=import_file)
return imp.process_file()
elif (import_file.content_type == 'application/vnd.oasis.opendocument.spreadsheet'):
return process_ods(import_file)
else:
raise ImportException((_('Uploaded file mimetype: "%s" has not been recognized!') % import_file.content_type)) |
class EquilateralTriangleTest(unittest.TestCase):
def test_all_sides_are_equal(self):
self.assertIs(equilateral([2, 2, 2]), True)
def test_any_side_is_unequal(self):
self.assertIs(equilateral([2, 3, 2]), False)
def test_no_sides_are_equal(self):
self.assertIs(equilateral([5, 4, 6]), False)
def test_all_zero_sides_is_not_a_triangle(self):
self.assertIs(equilateral([0, 0, 0]), False)
def test_sides_may_be_floats(self):
self.assertIs(equilateral([0.5, 0.5, 0.5]), True) |
class SubDecl(Decl):
block: Block
var: str
def new(self):
return (self.var, 'void()')
def ok(self, block):
return block.ok
def block_same(self) -> (Block.same block):
return []
def block_env(self, block) -> (Block.env block):
return {**self.env, **block.procs}
def ast_dict(self):
return {'block': self.block.ast_dict(), 'var': self.var, 'class': self.__class__.__name__} |
def planSlantAxis(glyphSetFunc, axisLimits, slants=None, samples=None, glyphs=None, designLimits=None, pins=None, sanitize=False):
if (slants is None):
slants = SLANTS
return planAxis(measureSlant, normalizeDegrees, interpolateLinear, glyphSetFunc, 'slnt', axisLimits, values=slants, samples=samples, glyphs=glyphs, designLimits=designLimits, pins=pins, sanitizeFunc=(sanitizeSlant if sanitize else None)) |
def test_multiple_register_pairs_are_handled_correctly():
cfg = ControlFlowGraph()
(eax, ebx, a, b) = ((lambda x, name=name: Variable(name, Integer.int32_t(), ssa_label=x)) for name in ['eax', 'ebx', 'a', 'b'])
(v0, v1) = ((lambda name=name: Variable(name, Integer.int64_t(), ssa_label=0)) for name in ['loc_0', 'loc_1'])
cfg.add_node((n1 := BasicBlock(0, [Assignment(RegisterPair(ebx(0), eax(0), Integer.int64_t()), UnaryOperation(OperationType.cast, [a(0)])), Assignment(a(1), eax(0)), Assignment(b(0), ebx(0)), Assignment(RegisterPair(ebx(1), eax(1), Integer.int64_t()), Call(FunctionSymbol('foo', 0), [RegisterPair(ebx(0), eax(0), Integer.int64_t()), a(1)])), Return([ebx(1)])])))
task = MockTask(cfg)
RegisterPairHandling().run(task)
assert ((n1.instructions == [Assignment(v0(), UnaryOperation(OperationType.cast, [a(0)])), Assignment(eax(0), BinaryOperation(OperationType.bitwise_and, [v0(), lower_register_mask_32.copy()])), Assignment(ebx(0), BinaryOperation(OperationType.right_shift, [v0(), higher_register_mask_32.copy()])), Assignment(a(1), eax(0)), Assignment(b(0), ebx(0)), Assignment(v1(), Call(FunctionSymbol('foo', 0), [v0(), a(1)])), Assignment(eax(1), BinaryOperation(OperationType.bitwise_and, [v1(), lower_register_mask_32.copy()])), Assignment(ebx(1), BinaryOperation(OperationType.right_shift, [v1(), higher_register_mask_32.copy()])), Return([ebx(1)])]) or (n1.instructions == [Assignment(v1(), UnaryOperation(OperationType.cast, [a(0)])), Assignment(eax(0), BinaryOperation(OperationType.bitwise_and, [v1(), lower_register_mask_32.copy()])), Assignment(ebx(0), BinaryOperation(OperationType.right_shift, [v1(), higher_register_mask_32.copy()])), Assignment(a(1), eax(0)), Assignment(b(0), ebx(0)), Assignment(v0(), Call(FunctionSymbol('foo', 0), [v1(), a(1)])), Assignment(eax(1), BinaryOperation(OperationType.bitwise_and, [v0(), lower_register_mask_32.copy()])), Assignment(ebx(1), BinaryOperation(OperationType.right_shift, [v0(), higher_register_mask_32.copy()])), Return([ebx(1)])])) |
class LaunchChannel(ChannelInterface):
mtimes = Signal(str, float, dict)
changed_binaries = Signal(str, dict)
launch_nodes = Signal(str, list)
def __init__(self):
ChannelInterface.__init__(self)
self._args_lock = threading.RLock()
self._cache_file_includes = {}
self._cache_file_unique_includes = {}
self._launch_args = {}
def clear_cache(self, grpc_path=''):
if grpc_path:
try:
del self._cache_file_includes[grpc_path]
except Exception:
pass
try:
del self._cache_file_unique_includes[grpc_path]
except Exception:
pass
else:
self._cache_file_includes.clear()
self._cache_file_unique_includes.clear()
with self._args_lock:
self._launch_args.clear()
def get_launch_manager(self, uri='localhost:12321'):
channel = self.get_insecure_channel(uri)
return (lstub.LaunchStub(channel), channel)
def launch_args(self, grpc_path):
with self._args_lock:
if (grpc_path in self._launch_args):
return self._launch_args[grpc_path]
return {}
def _print_inc_file(self, indent, linenr, path, exists, inc_files):
rospy.loginfo(('%s %.4d\t%s %s' % ((' ' * indent), linenr, ('+' if exists else '-'), path)))
for (ln, ph, ex, ifi) in inc_files:
self._print_inc_file((indent + 2), ln, ph, ex, ifi)
def get_included_files_set(self, grpc_path='grpc://localhost:12321', recursive=True, include_pattern=[], search_in_ext=[]):
result = []
try:
result = self._cache_file_unique_includes[grpc_path]
except KeyError:
rospy.logdebug(('get_included_files_set for %s, recursive: %s' % (grpc_path, recursive)))
(uri, path) = nmdurl.split(grpc_path, with_scheme=False)
(lm, channel) = self.get_launch_manager(uri)
(url, path) = nmdurl.split(grpc_path, with_scheme=True)
reply = lm.get_included_files_set(path, recursive, {}, include_pattern, search_in_ext)
for fname in reply:
result.append(nmdurl.join(url, fname))
self._cache_file_unique_includes[grpc_path] = result
self.close_channel(channel, uri)
return result
def get_included_files(self, grpc_path='grpc://localhost:12321', recursive=True, include_args={}, include_pattern=[], search_in_ext=[]):
dorequest = False
try:
for entry in self._cache_file_includes[grpc_path]:
do_return = True
if ((not recursive) and (entry.rec_depth != 0)):
do_return = False
if do_return:
rospy.logdebug(('get_included_files from cache: %s, include_args: %s' % (entry.inc_path, entry.args)))
(yield entry)
except KeyError:
dorequest = True
if dorequest:
current_path = grpc_path
try:
(uri, path) = nmdurl.split(current_path)
(lm, channel) = self.get_launch_manager(uri)
rospy.logdebug(('get_included_files for %s, recursive: %s, include_args: %s, pattern: %s, search_in_ext: %s' % (grpc_path, recursive, include_args, include_pattern, search_in_ext)))
reply = lm.get_included_files(path, recursive, include_args, include_pattern, search_in_ext)
(url, _) = nmdurl.split(grpc_path, with_scheme=True)
if recursive:
if (grpc_path not in self._cache_file_includes):
self._cache_file_includes[grpc_path] = []
for inc_file in reply:
entry = inc_file
entry.path_or_str = nmdurl.join(url, inc_file.path_or_str)
entry.inc_path = nmdurl.join(url, inc_file.inc_path)
if recursive:
if (current_path not in self._cache_file_includes):
self._cache_file_includes[current_path] = []
self._cache_file_includes[current_path].append(entry)
(yield entry)
except grpc._channel._Rendezvous as grpc_error:
self.clear_cache(grpc_path)
self.clear_cache(current_path)
if (grpc_error.code() == grpc.StatusCode.DEADLINE_EXCEEDED):
raise exceptions.GrpcTimeout(grpc_path, grpc_error)
raise
finally:
self.close_channel(channel, uri)
def get_interpreted_path(self, grpc_url_or_path='grpc://localhost:12321', text=[]):
(uri, _) = nmdurl.split(grpc_url_or_path)
(lm, channel) = self.get_launch_manager(uri)
rospy.logdebug(('get_interpreted_path in text %s' % text))
reply = lm.get_interpreted_path(text)
(url, _) = nmdurl.split(grpc_url_or_path, with_scheme=True)
for (path, exists) in reply:
(yield (nmdurl.join(url, path), exists))
self.close_channel(channel, uri)
def load_launch(self, grpc_path, masteruri='', host='', package='', launch='', args={}):
(uri, path) = nmdurl.split(grpc_path)
(lm, channel) = self.get_launch_manager(uri)
myargs = args
request_args = True
nexttry = True
ok = False
launch_file = ''
args_res = {}
while nexttry:
try:
rospy.logdebug(('load launch file on gRPC server: %s' % grpc_path))
(launch_file, args_res) = lm.load_launch(package, launch, path=path, args=myargs, request_args=request_args, masteruri=masteruri, host=host)
nexttry = False
ok = True
except exceptions.LaunchSelectionRequest as lsr:
rospy.logwarn(('%s\n ...load the last one!' % lsr))
path = lsr.choices[(- 1)]
except exceptions.ParamSelectionRequest as psr:
rospy.loginfo(('Params requered for: %s' % [('%s:=%s' % (name, value)) for (name, value) in psr.choices.items()]))
request_args = False
myargs = psr.choices
params = {}
for (name, value) in psr.choices.items():
params[name] = {':value': value, ':type': 'string'}
raise LaunchArgsSelectionRequest(grpc_path, params, 'Needs input for args')
except exceptions.AlreadyOpenException as aoe:
rospy.logwarn(aoe)
nexttry = False
ok = True
launch_file = aoe.path
except grpc._channel._Rendezvous as grpc_error:
if (grpc_error.code() == grpc.StatusCode.DEADLINE_EXCEEDED):
raise exceptions.GrpcTimeout(grpc_path, grpc_error)
raise
finally:
self.close_channel(channel, uri)
launch_file = nmdurl.join(('grpc://%s' % uri), launch_file)
rospy.logdebug((' load launch file result - %s: %s' % (('OK' if ok else 'ERR'), launch_file)))
with self._args_lock:
rospy.logdebug(('add args after load %s: %s' % (launch_file, args_res)))
self._launch_args[launch_file] = args_res
return (launch_file, args_res)
def reload_launch(self, grpc_path, masteruri=''):
rospy.logdebug(('reload launch %s' % grpc_path))
(uri, path) = nmdurl.split(grpc_path)
(lm, channel) = self.get_launch_manager(uri)
launch_file = ''
try:
(launch_files, changed_nodes) = lm.reload_launch(path, masteruri=masteruri)
if launch_files:
launch_file = launch_files[0]
return (launch_file, [node for node in changed_nodes])
except grpc._channel._Rendezvous as grpc_error:
if (grpc_error.code() == grpc.StatusCode.DEADLINE_EXCEEDED):
raise exceptions.GrpcTimeout(grpc_path, grpc_error)
raise
finally:
self.close_channel(channel, uri)
def unload_launch(self, grpc_path, masteruri=''):
rospy.logdebug(('unload launch %s' % grpc_path))
(uri, path) = nmdurl.split(grpc_path)
(lm, channel) = self.get_launch_manager(uri)
launch_file = lm.unload_launch(path, masteruri)
with self._args_lock:
if (launch_file in self._launch_args):
rospy.logdebug(('delete args after unload %s' % launch_file))
del self._launch_args[launch_file]
self.close_channel(channel, uri)
return launch_file
def get_loaded_files_threaded(self, grpc_path='grpc://localhost:12321'):
self._threads.start_thread(('glft_%s' % grpc_path), target=self._get_loaded_files_threaded, args=(grpc_path,))
def _get_loaded_files_threaded(self, grpc_path='grpc://localhost:12321'):
(uri, path) = nmdurl.split(grpc_path)
rospy.logdebug(('[thread] get loaded_files from %s' % uri))
try:
(lm, channel) = self.get_launch_manager(uri)
result = lm.get_loaded_files()
(url, _) = nmdurl.split(grpc_path, with_scheme=True)
for (_package, path, args, _masteruri, _host) in result:
with self._args_lock:
gpath = nmdurl.join(url, path)
rospy.logdebug(('[thread] add args for %s: %s' % (gpath, args)))
self._launch_args[gpath] = args
except Exception:
import traceback
print(traceback.format_exc())
finally:
self.close_channel(channel, uri)
if hasattr(self, '_threads'):
self._threads.finished(('glft_%s' % grpc_path))
def get_mtimes_threaded(self, grpc_path='grpc://localhost:12321'):
self._threads.start_thread(('gmt_%s' % grpc_path), target=self._get_mtimes_threaded, args=(grpc_path,))
def _get_mtimes_threaded(self, grpc_path='grpc://localhost:12321'):
(uri, path) = nmdurl.split(grpc_path)
rospy.logdebug(('[thread] get mtimes from %s' % uri))
try:
(lm, channel) = self.get_launch_manager(uri)
(rpath, mtime, included_files) = lm.get_mtimes(path)
(url, _) = nmdurl.split(grpc_path, with_scheme=True)
self.mtimes.emit(nmdurl.join(url, rpath), mtime, {nmdurl.join(url, pobj.path): pobj.mtime for pobj in included_files})
except Exception:
pass
finally:
self.close_channel(channel, uri)
if hasattr(self, '_threads'):
self._threads.finished(('gmt_%s' % grpc_path))
def get_changed_binaries_threaded(self, grpc_url='grpc://localhost:12321', nodes=[]):
self._threads.start_thread(('gcbt_%s' % grpc_url), target=self._get_changed_binaries_threaded, args=(grpc_url, nodes))
def _get_changed_binaries_threaded(self, grpc_url='grpc://localhost:12321', nodes=[]):
(uri, _path) = nmdurl.split(grpc_url)
rospy.logdebug(('[thread] get changed binaries from %s' % uri))
try:
(lm, channel) = self.get_launch_manager(uri)
nodes = lm.get_changed_binaries(nodes)
self.changed_binaries.emit(grpc_url, nodes)
except Exception:
import traceback
print(traceback.format_exc())
finally:
self.close_channel(channel, uri)
if hasattr(self, '_threads'):
self._threads.finished(('gcbt_%s' % grpc_url))
def get_nodes(self, grpc_path='grpc://localhost:12321', masteruri=''):
(uri, _) = nmdurl.split(grpc_path)
rospy.logdebug(('get nodes from %s' % uri))
(lm, channel) = self.get_launch_manager(uri)
try:
launch_descriptions = lm.get_nodes(True, masteruri=masteruri)
return launch_descriptions
except grpc.RpcError as gerr:
rospy.logdebug(('remove connection %s' % uri))
raise gerr
finally:
self.close_channel(channel, uri)
def get_nodes_threaded(self, grpc_path='grpc://localhost:12321', masteruri=''):
self._threads.start_thread(('gn_%s_%s' % (grpc_path, masteruri)), target=self._get_nodes_threaded, args=(grpc_path, masteruri))
def _get_nodes_threaded(self, grpc_path='grpc://localhost:12321', masteruri=''):
(uri, _) = nmdurl.split(grpc_path)
rospy.logdebug(('[thread] get nodes from %s' % uri))
(lm, channel) = self.get_launch_manager(uri)
try:
launch_descriptions = lm.get_nodes(True, masteruri=masteruri)
clean_url = nmdurl.nmduri_from_path(grpc_path)
for ld in launch_descriptions:
ld.path = nmdurl.join(clean_url, ld.path)
self.launch_nodes.emit(clean_url, launch_descriptions)
except Exception as err:
self.error.emit('_get_nodes', grpc_path, masteruri, err)
finally:
self.close_channel(channel, uri)
if hasattr(self, '_threads'):
self._threads.finished(('gn_%s_%s' % (grpc_path, masteruri)))
def start_node(self, name, grpc_path='grpc://localhost:12321', masteruri='', reload_global_param=False, loglevel='', logformat='', path='', cmd_prefix=''):
rospy.loginfo(('start node: %s with %s' % (name, grpc_path)))
(uri, opt_launch) = nmdurl.split(grpc_path)
(lm, channel) = self.get_launch_manager(uri)
try:
return lm.start_node(name, opt_binary=path, opt_launch=opt_launch, loglevel=loglevel, logformat=logformat, masteruri=masteruri, reload_global_param=reload_global_param, cmd_prefix=cmd_prefix)
except grpc.RpcError as gerr:
rospy.logdebug(('remove connection %s' % uri))
raise gerr
except exceptions.BinarySelectionRequest as bsr:
rospy.loginfo(('Question while start node: %s' % bsr.error))
binaries = bsr.choices
raise BinarySelectionRequest(binaries, 'Needs binary selection')
except Exception as err:
raise err
finally:
self.close_channel(channel, uri)
def start_standalone_node(self, grpc_url, package, binary, name, ns, args=[], env={}, masteruri=None, host=None):
rospy.loginfo(('start standalone node: %s on %s' % (name, grpc_url)))
(uri, _) = nmdurl.split(grpc_url)
(lm, channel) = self.get_launch_manager(uri)
try:
startcfg = StartConfig(package, binary)
startcfg.name = name
startcfg.namespace = ns
startcfg.fullname = rospy.names.ns_join(ns, name)
startcfg.prefix = ''
startcfg.cwd = ''
startcfg.env = env
startcfg.remaps = {}
startcfg.params = {}
startcfg.clear_params = []
startcfg.args = args
startcfg.masteruri = masteruri
startcfg.host = host
startcfg.loglevel = ''
startcfg.logformat = ''
startcfg.respawn = False
startcfg.respawn_delay = 30
startcfg.respawn_max = 0
startcfg.respawn_min_runtime = 0
return lm.start_standalone_node(startcfg)
except grpc.RpcError as err:
rospy.logdebug(('remove connection %s' % uri))
raise err
except exceptions.BinarySelectionRequest as bsr:
rospy.loginfo(('Question while start node: %s' % bsr.error))
binaries = bsr.choices
raise BinarySelectionRequest(binaries, 'Needs binary selection')
finally:
self.close_channel(channel, uri)
def get_start_cfg(self, name, grpc_path='grpc://localhost:12321', masteruri='', reload_global_param=False, loglevel='', logformat=''):
rospy.logdebug(("get start configuration for '%s' from %s" % (name, grpc_path)))
(uri, _) = nmdurl.split(grpc_path)
(lm, channel) = self.get_launch_manager(uri)
try:
return lm.get_start_cfg(name, loglevel=loglevel, logformat=logformat, masteruri=masteruri, reload_global_param=reload_global_param)
except Exception as err:
raise err
finally:
self.close_channel(channel, uri)
def reset_package_path_threaded(self, grpc_path='grpc://localhost:12321'):
self._threads.start_thread(('rpp_%s' % grpc_path), target=self._reset_package_path, args=(grpc_path,))
def _reset_package_path(self, grpc_path='grpc://localhost:12321'):
(uri, _) = nmdurl.split(grpc_path)
rospy.logdebug(('[thread] reset package path on %s' % uri))
(lm, channel) = self.get_launch_manager(uri)
try:
lm.reset_package_path()
except Exception as err:
self.error.emit('_reset_package_path', grpc_path, '', err)
finally:
self.close_channel(channel, uri)
if hasattr(self, '_threads'):
self._threads.finished(('rpp_%s' % grpc_path)) |
def metadata_manager_with_teardown(jp_environ):
metadata_manager = MetadataManager(schemaspace=ComponentCatalogs.COMPONENT_CATALOGS_SCHEMASPACE_ID)
(yield metadata_manager)
try:
if metadata_manager.get(TEST_CATALOG_NAME):
metadata_manager.remove(TEST_CATALOG_NAME)
except Exception:
pass |
class ConnectionPool():
_shared_state = {}
_db_max_connections = 50
def __init__(self):
self.__dict__ = self._shared_state
if (not hasattr(self, 'connected')):
print('Database connection pool init!')
try:
self.dbPool = psycopg2.pool.ThreadedConnectionPool(1, self._db_max_connections, dbname=settings.DATABASE_DB_NAME, user=settings.DATABASE_USER, password=settings.DATABASE_PASS)
except psycopg2.OperationalError:
self.dbPool = psycopg2.pool.ThreadedConnectionPool(1, self._db_max_connections, host=settings.DATABASE_IP, dbname=settings.DATABASE_DB_NAME, user=settings.DATABASE_USER, password=settings.DATABASE_PASS)
self.connected = True
def getconn(self):
return self.dbPool.getconn()
def putconn(self, conn):
self.dbPool.putconn(conn) |
def test_get_local_addr_with_socket():
transport = MockTransport({'socket': MockSocket(family=socket.AF_IPX)})
assert (get_local_addr(transport) is None)
transport = MockTransport({'socket': MockSocket(family=socket.AF_INET6, sockname=('::1', 123))})
assert (get_local_addr(transport) == ('::1', 123))
transport = MockTransport({'socket': MockSocket(family=socket.AF_INET, sockname=('123.45.6.7', 123))})
assert (get_local_addr(transport) == ('123.45.6.7', 123))
if hasattr(socket, 'AF_UNIX'):
transport = MockTransport({'socket': MockSocket(family=socket.AF_UNIX, sockname=('127.0.0.1', 8000))})
assert (get_local_addr(transport) == ('127.0.0.1', 8000)) |
def get_link_tag(box):
shift = (box[2] / 8)
shift_box = ((box[0] + shift), (box[1] + shift), (box[2] - (shift * 2)), (box[3] - (shift * 2)))
square_left = np.array((shift_box[0], (shift_box[1] + ((shift_box[3] - (shift_box[2] / 2)) / 2)), (shift_box[2] / 2), (shift_box[2] / 2)))
square_right = np.array(((shift_box[0] + (shift_box[2] / 2)), (shift_box[1] + ((shift_box[3] - (shift_box[2] / 2)) / 2)), (shift_box[2] / 2), (shift_box[2] / 2)))
line_start = np.array(((shift_box[0] + (shift_box[2] / 4)), (shift_box[1] + (shift_box[3] / 2))))
line_end = np.array(((shift_box[0] + ((shift_box[2] * 3) / 4)), (shift_box[1] + (shift_box[3] / 2))))
return (square_left, square_right, shift, line_start, line_end) |
def _clean_header(header: HeaderType) -> Dict[(str, Any)]:
if isinstance(header, str):
header = {'description': header}
typedef = header.get('type', 'string')
if (typedef in PY_TYPES):
header['type'] = PY_TYPES[typedef]
elif (isinstance(typedef, (list, tuple)) and (len(typedef) == 1) and (typedef[0] in PY_TYPES)):
header['type'] = 'array'
header['items'] = {'type': PY_TYPES[typedef[0]]}
elif hasattr(typedef, '__schema__'):
header.update(typedef.__schema__)
else:
header['type'] = typedef
return not_none(header) |
class OptionSeriesBubbleLabelStyle(Options):
def fontSize(self):
return self._config_get('0.8em')
def fontSize(self, num: float):
self._config(num, js_type=False)
def fontWeight(self):
return self._config_get('bold')
def fontWeight(self, text: str):
self._config(text, js_type=False) |
class FileThread(Thread):
def __init__(self):
super().__init__()
def run(self):
while 1:
if record.namelist:
pross = record.namelist.pop(0)
img2 = cv2.imread(pross)
record.videoWriter.write(img2)
os.remove(pross)
if ((not record.namelist) and record.recorded2):
print('')
record.videoWriter.release()
break |
def _date_fix(tags):
tags = copy.deepcopy(tags)
for t in ['__date_added', '__last_played', '__modified']:
if (t in tags):
try:
dt = datetime.datetime.fromtimestamp(tags[t])
tags[t] = dt.strftime('%Y-%m-%d %H:%M:%S')
except:
pass
return tags |
.parametrize('type_str, encoder_class, decoder_class', (('address', encoding.AddressEncoder, decoding.AddressDecoder), ('bool', encoding.BooleanEncoder, decoding.BooleanDecoder), ('bytes12', encoding.BytesEncoder, decoding.BytesDecoder), ('function', encoding.BytesEncoder, decoding.BytesDecoder), ('bytes', encoding.ByteStringEncoder, decoding.ByteStringDecoder), ('int', encoding.SignedIntegerEncoder, decoding.SignedIntegerDecoder), ('int128', encoding.SignedIntegerEncoder, decoding.SignedIntegerDecoder), ('string', encoding.TextStringEncoder, decoding.StringDecoder), ('uint', encoding.UnsignedIntegerEncoder, decoding.UnsignedIntegerDecoder), ('uint8', encoding.UnsignedIntegerEncoder, decoding.UnsignedIntegerDecoder), ('int[]', encoding.DynamicArrayEncoder, decoding.DynamicArrayDecoder), ('int[2]', encoding.SizedArrayEncoder, decoding.SizedArrayDecoder), ('int[2][]', encoding.DynamicArrayEncoder, decoding.DynamicArrayDecoder), ('int[][2]', encoding.SizedArrayEncoder, decoding.SizedArrayDecoder)))
def test_default_registry_gives_correct_basic_coders(type_str, encoder_class, decoder_class):
assert isinstance(default_registry.get_encoder(type_str), encoder_class)
assert isinstance(default_registry.get_decoder(type_str), decoder_class) |
def get_ec2_ondemand_instances_prices(filter_region=None, filter_instance_type=None, filter_instance_type_pattern=None, filter_os_type=None, use_cache=False, cache_class=SimpleResultsCache):
urls = [INSTANCES_ON_DEMAND_LINUX_URL, INSTANCES_ON_DEMAND_RHEL_URL, INSTANCES_ON_DEMAND_SLES_URL, INSTANCES_ON_DEMAND_WINDOWS_URL, INSTANCES_ON_DEMAND_WINSQL_URL, INSTANCES_ON_DEMAND_WINSQLWEB_URL]
result = get_ec2_instances_prices(urls, 'ondemand', filter_region, filter_instance_type, filter_instance_type_pattern, filter_os_type, use_cache, cache_class)
return result |
class DomoTestClient():
def __init__(self, domo_token, domo_connection_config: ConnectionConfig):
self.domo_secrets = domo_connection_config.secrets
self.headers = {'Content-Type': 'application/json', 'Authorization': f'Bearer {domo_token}'}
self.base_url = f"
def create_user(self, email_address: str) -> Response:
body = {'email': email_address, 'alternateEmail': email_address, 'name': f'test_connector_ethyca', 'phone': faker.phone_number(), 'title': 'Software Engineer', 'role': 'Participant'}
url = f'{self.base_url}/users?sendInvite=false'
user_response: Response = requests.post(url=url, json=body, headers=self.headers)
return user_response
def get_user(self, user_id: str) -> Response:
url = f'{self.base_url}/users/{user_id}'
user_response: Response = requests.get(url=url, headers=self.headers)
return user_response
def delete_user(self, user_id) -> Response:
url = f'{self.base_url}/users/{user_id}'
user_response: Response = requests.delete(url=url, headers=self.headers)
return user_response |
class WebDavXmlUtils():
def __init__(self):
pass
def parse_get_list_info_response(content):
try:
tree = etree.fromstring(content)
infos = []
for response in tree.findall('.//{DAV:}response'):
href_el = next(iter(response.findall('.//{DAV:}href')), None)
if (href_el is None):
continue
path = unquote(urlsplit(href_el.text).path)
info = dict()
is_dir = (len(response.findall('.//{DAV:}collection')) > 0)
info = WebDavXmlUtils.get_info_from_response(response)
info['isdir'] = is_dir
info['path'] = path
infos.append(info)
return infos
except etree.XMLSyntaxError:
return list()
def parse_get_list_response(content):
try:
tree = etree.fromstring(content)
urns = []
for response in tree.findall('.//{DAV:}response'):
href_el = next(iter(response.findall('.//{DAV:}href')), None)
if (href_el is None):
continue
href = (Urn.separate + unquote(urlsplit(href_el.text).path))
is_dir = (len(response.findall('.//{DAV:}collection')) > 0)
urns.append(Urn(href, is_dir))
return urns
except etree.XMLSyntaxError:
return list()
def create_free_space_request_content():
root = etree.Element('propfind', xmlns='DAV:')
prop = etree.SubElement(root, 'prop')
etree.SubElement(prop, 'quota-available-bytes')
etree.SubElement(prop, 'quota-used-bytes')
tree = etree.ElementTree(root)
return WebDavXmlUtils.etree_to_string(tree)
def parse_free_space_response(content, hostname):
try:
tree = etree.fromstring(content)
node = tree.find('.//{DAV:}quota-available-bytes')
if (node is not None):
return int(node.text)
else:
raise MethodNotSupported(name='free', server=hostname)
except TypeError:
raise MethodNotSupported(name='free', server=hostname)
except etree.XMLSyntaxError:
return str()
def get_info_from_response(response):
find_attributes = {'created': './/{DAV:}creationdate', 'name': './/{DAV:}displayname', 'size': './/{DAV:}getcontentlength', 'modified': './/{DAV:}getlastmodified', 'etag': './/{DAV:}getetag', 'content_type': './/{DAV:}getcontenttype'}
info = dict()
for (name, value) in find_attributes.items():
info[name] = response.findtext(value)
return info
def parse_info_response(content, path, hostname):
response = WebDavXmlUtils.extract_response_for_path(content=content, path=path, hostname=hostname)
return WebDavXmlUtils.get_info_from_response(response)
def parse_is_dir_response(content, path, hostname):
response = WebDavXmlUtils.extract_response_for_path(content=content, path=path, hostname=hostname)
resource_type = response.find('.//{DAV:}resourcetype')
if (resource_type is None):
raise MethodNotSupported(name='is_dir', server=hostname)
dir_type = resource_type.find('{DAV:}collection')
return (True if (dir_type is not None) else False)
def create_get_property_request_content(option):
root = etree.Element('propfind', xmlns='DAV:')
prop = etree.SubElement(root, 'prop')
etree.SubElement(prop, option.get('name', ''), xmlns=option.get('namespace', ''))
tree = etree.ElementTree(root)
return WebDavXmlUtils.etree_to_string(tree)
def parse_get_property_response(content, name):
tree = etree.fromstring(content)
return tree.xpath('//*[local-name() = $name]', name=name)[0].text
def create_set_property_batch_request_content(options):
root_node = etree.Element('propertyupdate', xmlns='DAV:')
set_node = etree.SubElement(root_node, 'set')
prop_node = etree.SubElement(set_node, 'prop')
for option in options:
opt_node = etree.SubElement(prop_node, option['name'], xmlns=option.get('namespace', ''))
opt_node.text = option.get('value', '')
tree = etree.ElementTree(root_node)
return WebDavXmlUtils.etree_to_string(tree)
def etree_to_string(tree):
buff = BytesIO()
tree.write(buff, xml_declaration=True, encoding='UTF-8')
return buff.getvalue()
def extract_response_for_path(content, path, hostname):
prefix = urlparse(hostname).path
try:
tree = etree.fromstring(content)
responses = tree.findall('{DAV:}response')
n_path = Urn.normalize_path(path)
for resp in responses:
href = resp.findtext('{DAV:}href')
if (Urn.compare_path(n_path, href) is True):
return resp
href_without_prefix = (href[len(prefix):] if href.startswith(prefix) else href)
if (Urn.compare_path(n_path, href_without_prefix) is True):
return resp
raise RemoteResourceNotFound(path)
except etree.XMLSyntaxError:
raise MethodNotSupported(name='is_dir', server=hostname) |
(help=dict(clean='clear the doc output; start fresh', build='build html docs', show='show the docs in the browser.'))
def docs(ctx, clean=False, build=False, show=False, **kwargs):
if (not (clean or build or show)):
sys.exit('Task "docs" must be called with --clean, --build or --show')
if clean:
sphinx_clean(DOC_BUILD_DIR)
if build:
sphinx_build(DOC_DIR, DOC_BUILD_DIR)
if show:
sphinx_show(os.path.join(DOC_BUILD_DIR, 'html')) |
(params=['foo', '$key', '$value'])
def initquery(request):
ref = db.Reference(path='foo')
if (request.param == '$key'):
return (ref.order_by_key(), request.param)
if (request.param == '$value'):
return (ref.order_by_value(), request.param)
return (ref.order_by_child(request.param), request.param) |
class OSDisplay(BaseDisplay):
def __init__(self, **args):
try:
copy_docstr(BaseDisplay, OSDisplay)
except:
pass
self.experiment = settings.osexperiment
self.canvas = canvas(self.experiment)
self.dispsize = self.experiment.resolution()
def show(self):
return self.canvas.show()
def show_part(self, rect, screen=None):
return self.canvas.show()
def fill(self, screen=None):
if (screen != None):
self.canvas = screen.canvas
else:
self.canvas = canvas(self.experiment)
def close(self):
pass |
class MedicationRequest(ServiceRequestController):
def on_update_after_submit(self):
self.validate_invoiced_qty()
def set_title(self):
if (frappe.flags.in_import and self.title):
return
self.title = f'{self.patient_name} - {self.medication}'
def before_insert(self):
self.calculate_total_dispensable_quantity()
self.status = 'Draft'
if self.amended_from:
frappe.db.set_value('Medication Request', self.amended_from, 'status', 'Replaced')
def set_order_details(self):
if (not self.medication):
frappe.throw(_('Medication is mandatory to create Medication Request'), title=_('Missing Mandatory Fields'))
medication = frappe.get_doc('Medication', self.medication)
self.item_code = medication.get('item')
if ((not self.staff_role) and medication.get('staff_role')):
self.staff_role = medication.staff_role
if (not self.intent):
self.intent = frappe.db.get_single_value('Healthcare Settings', 'default_intent')
if (not self.priority):
self.priority = frappe.db.get_single_value('Healthcare Settings', 'default_priority')
def calculate_total_dispensable_quantity(self):
if self.number_of_repeats_allowed:
self.total_dispensable_quantity = (self.quantity + (self.number_of_repeats_allowed * self.quantity))
else:
self.total_dispensable_quantity = self.quantity
def update_invoice_details(self, qty):
qty_invoiced = (self.qty_invoiced + qty)
if (qty_invoiced == 0):
status = 'Pending'
if (self.number_of_repeats_allowed and self.total_dispensable_quantity):
if (qty_invoiced < self.total_dispensable_quantity):
status = 'Partly Invoiced'
else:
status = 'Invoiced'
elif (qty_invoiced < self.quantity):
status = 'Partly Invoiced'
else:
status = 'Invoiced'
medication_request_doc = frappe.get_doc('Medication Request', self.name)
medication_request_doc.qty_invoiced = qty_invoiced
medication_request_doc.billing_status = status
medication_request_doc.save(ignore_permissions=True)
def validate_invoiced_qty(self):
if (self.qty_invoiced > self.total_dispensable_quantity):
frappe.throw(_('Maximum billable quantity exceeded by {0}').format(frappe.bold((self.qty_invoiced - self.total_dispensable_quantity))), title=_('Maximum Quantity Exceeded')) |
def _fullscreen_to_file(filename: Union[(os.PathLike, str)]) -> None:
if (not QtDBus):
raise ModuleNotFoundError('QtDBUS not available.')
screenshot_interface = _get_screenshot_interface()
if screenshot_interface.isValid():
result = screenshot_interface.call('Screenshot', True, False, filename)
if result.errorName():
logger.error('Failed move Window!')
logger.error(result.errorMessage())
else:
logger.error('Invalid dbus interface') |
def post_upgrade(from_ver, to_ver, bench_path='.'):
from bench.bench import Bench
from bench.config import redis
from bench.config.nginx import make_nginx_conf
from bench.config.supervisor import generate_supervisor_config
conf = Bench(bench_path).conf
print((('-' * 80) + f'Your bench was upgraded to version {to_ver}'))
if conf.get('restart_supervisor_on_update'):
redis.generate_config(bench_path=bench_path)
generate_supervisor_config(bench_path=bench_path)
make_nginx_conf(bench_path=bench_path)
print('As you have setup your bench for production, you will have to reload configuration for nginx and supervisor. To complete the migration, please run the following commands:\nsudo service nginx restart\nsudo supervisorctl reload') |
class DataCandle(DataChart):
def close(self):
return self._attrs['close']
def close(self, val):
self._attrs['close'] = val
def high(self):
return self._attrs['high']
def high(self, val):
self._attrs['high'] = val
def low(self):
return self._attrs['low']
def low(self, val):
self._attrs['low'] = val
def open(self):
return self._attrs['open']
def open(self, val):
self._attrs['open'] = val
def increasing(self):
return self.sub_data('increasing', DataMove)
def decreasing(self):
return self.sub_data('decreasing', DataMove) |
def usage():
print('usage: vthunting.py [OPTION]')
print(' -h, --help Print this help\n -r, --report Print the VT hunting report\n -s, --slack_report Send the report to a Slack channel\n -e, --email_report Send the report by email\n -t, --telegram_report Send the report to Telegram\n -m, --teams_report Send the report to Microsoft Teams\n -j, --json Print report in json format\n ') |
class SnmpSecurityLevel(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec
subtypeSpec += ConstraintsUnion(SingleValueConstraint(*(1, 2, 3)))
namedValues = NamedValues(*(('authNoPriv', 2), ('authPriv', 3), ('noAuthNoPriv', 1)))
if mibBuilder.loadTexts:
description = 'A Level of Security at which SNMP messages can be sent or with which operations\nare being processed; in particular, one of: noAuthNoPriv - without\nauthentication and without privacy, authNoPriv - with authentication but\nwithout privacy, authPriv - with authentication and with privacy. These three\nvalues are ordered such that noAuthNoPriv is less than authNoPriv and\nauthNoPriv is less than authPriv.\n' |
class CombineTest(unittest.TestCase):
def test_merge(self):
e1 = Event.sequence(array1, interval=0.01)
e2 = Event.sequence(array2, interval=0.01).delay(0.001)
event = e1.merge(e2)
self.assertEqual(event.run(), [i for j in zip(array1, array2) for i in j])
def test_switch(self):
e1 = Event.sequence(array1, interval=0.01)
e2 = Event.sequence(array2, interval=0.01).delay(0.001)
e3 = Event.sequence(array3, interval=0.01).delay(0.002)
event = e1.switch(e2, e3, e2)
self.assertEqual(event.run(), ([0, 100] + array3))
def test_concat(self):
e1 = Event.sequence(array1, interval=0.02)
e2 = Event.sequence(array2, interval=0.02).delay(0.07)
event = e1.concat(e2)
self.assertEqual(event.run(), [0, 1, 2, 3, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109])
def test_chain(self):
e1 = Event.sequence(array1, interval=0.01)
e2 = Event.sequence(array2, interval=0.01).delay(0.001)
event = e1.chain(e2, e1)
self.assertEqual(event.run(), ((array1 + array2) + array1))
def test_zip(self):
e1 = Event.sequence(array1)
e2 = Event.sequence(array2).delay(0.001)
event = e1.zip(e2)
self.assertEqual(event.run(), list(zip(array1, array2)))
def test_zip_self(self):
e1 = Event.sequence(array1)
event = e1.zip(e1)
self.assertEqual(event.run(), list(zip(array1, array1)))
def test_ziplatest(self):
e1 = Event.sequence([0, 1], interval=0.01)
e2 = Event.sequence([2, 3], interval=0.01).delay(0.001)
event = e1.ziplatest(e2)
self.assertEqual(event.run(), [(0, Event.NO_VALUE), (0, 2), (1, 2), (1, 3)]) |
def extractUnderworldtranslateWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TestSeriesFrameHist(TestData):
def test_flight_delay_min_hist(self):
pd_flights = self.pd_flights()
ed_flights = self.ed_flights()
num_bins = 10
pd_flightdelaymin = np.histogram(pd_flights['FlightDelayMin'], num_bins)
pd_bins = pd.DataFrame({'FlightDelayMin': pd_flightdelaymin[1]})
pd_weights = pd.DataFrame({'FlightDelayMin': pd_flightdelaymin[0]})
(ed_bins, ed_weights) = ed_flights['FlightDelayMin']._hist(num_bins=num_bins)
print(pd_bins, ed_bins)
assert_frame_equal(pd_bins, ed_bins, check_exact=False)
assert_frame_equal(pd_weights, ed_weights, check_exact=False)
def test_filtered_hist(self):
pd_flights = self.pd_flights()
ed_flights = self.ed_flights()
num_bins = 10
pd_filteredhist = np.histogram(pd_flights[(pd_flights.FlightDelay == True)].FlightDelayMin, num_bins)
pd_bins = pd.DataFrame({'FlightDelayMin': pd_filteredhist[1]})
pd_weights = pd.DataFrame({'FlightDelayMin': pd_filteredhist[0]})
d = ed_flights[(ed_flights.FlightDelay == True)].FlightDelayMin
print(d.es_info())
(ed_bins, ed_weights) = ed_flights[(ed_flights.FlightDelay == True)].FlightDelayMin._hist(num_bins=num_bins)
assert_frame_equal(pd_bins, ed_bins, check_exact=False)
assert_frame_equal(pd_weights, ed_weights, check_exact=False)
def test_invalid_hist(self):
with pytest.raises(ValueError):
assert self.ed_ecommerce()['products.tax_amount'].hist() |
def check_old_links(app):
usual_sites = ['github.com', 'gitlab.com', 'bitbucket.org']
old_sites = ['gitorious.org', 'code.google.com']
if any(((s in app.Repo) for s in usual_sites)):
for f in ['WebSite', 'SourceCode', 'IssueTracker', 'Changelog']:
v = app.get(f)
if any(((s in v) for s in old_sites)):
(yield _("App is in '{repo}' but has a link to {url}").format(repo=app.Repo, url=v)) |
def test_version_parsing():
for (pe_export_stamp, version_string) in version.PE_EXPORT_STAMP_TO_VERSION.items():
bversion = version.BeaconVersion(version_string)
assert int(pe_export_stamp)
assert bversion.date
assert bversion.tuple
assert bversion.version |
def test_get_contract_factory_with_valid_escrow_manifest(w3):
escrow_manifest = get_ethpm_spec_manifest('escrow', 'v3.json')
escrow_package = w3.pm.get_package_from_manifest(escrow_manifest)
escrow_factory = escrow_package.get_contract_factory('Escrow')
assert escrow_factory.needs_bytecode_linking
safe_send_factory = escrow_package.get_contract_factory('SafeSendLib')
safe_send_tx_hash = safe_send_factory.constructor().transact()
safe_send_tx_receipt = w3.eth.wait_for_transaction_receipt(safe_send_tx_hash)
safe_send_address = safe_send_tx_receipt.contractAddress
linked_escrow_factory = escrow_factory.link_bytecode({'SafeSendLib': safe_send_address})
assert (linked_escrow_factory.needs_bytecode_linking is False)
escrow_tx_hash = linked_escrow_factory.constructor(w3.eth.accounts[0]).transact()
escrow_tx_receipt = w3.eth.wait_for_transaction_receipt(escrow_tx_hash)
escrow_address = escrow_tx_receipt.contractAddress
escrow_instance = linked_escrow_factory(address=escrow_address)
assert (escrow_instance.functions.sender().call() == w3.eth.accounts[0]) |
def test_nl_tagger_return_char(NLP):
text = 'hi Aaron,\r\n\r\nHow is your schedule today, I was wondering if you had time for a phone\r\ncall this afternoon?\r\n\r\n\r\n'
doc = NLP(text)
for token in doc:
if token.is_space:
assert (token.pos == SPACE)
assert (doc[3].text == '\r\n\r\n')
assert doc[3].is_space
assert (doc[3].pos == SPACE) |
def test_chat(browser, clear_log):
browser.slow_click('#button1')
browser.assert_text('Hello, WS1!', 'div.ws1', timeout=5)
browser.assert_text('WS1 CONNECTED', 'div.sse', timeout=5)
clear_log()
browser.slow_click('#button2')
browser.assert_text('Hello, WS2!', 'div.ws2', timeout=5)
browser.assert_text('WS2 CONNECTED', 'div.sse', timeout=5)
clear_log()
browser.type('#input1', '/msg WS2 Apples')
browser.slow_click('#button1')
browser.assert_text('[WS1] Apples', 'div.ws2', timeout=5)
clear_log()
browser.type('#input1', '/msg WS2 Oranges')
browser.slow_click('#button1')
browser.assert_text('[WS1] Oranges', 'div.ws2', timeout=5)
clear_log()
browser.type('#input1', '/msg WS2 Bananas')
browser.slow_click('#button1')
browser.assert_text('[WS1] Bananas', 'div.ws2', timeout=5)
clear_log()
browser.type('#input2', '/msg WS1 Talk to you later...')
browser.slow_click('#button2')
browser.assert_text('[WS2] Talk to you later...', 'div.ws1', timeout=5)
clear_log()
browser.type('#input1', '/quit')
browser.slow_click('#button1')
browser.assert_text('Bye, WS1!', 'div.ws1', timeout=5)
browser.type('#input2', '/quit')
browser.slow_click('#button2')
browser.assert_text('Bye, WS2!', 'div.ws2', timeout=5) |
def test_feeder_set_hopper(client: TestClient, with_stored_recipe: None):
from tests.test_database_models import SAMPLE_DEVICE_HID
response = client.post(f'/api/v1/feeder/{SAMPLE_DEVICE_HID}/hopper', json={'level': 100})
assert (response.status_code == 200)
response = client.get(f'/api/v1/feeder/{SAMPLE_DEVICE_HID}/hopper')
assert (response.status_code == 200)
assert (response.json()['level'] == 100) |
class CabinetHingeEdge(BaseEdge):
char = 'u'
description = 'Edge with cabinet hinges'
def __init__(self, boxes, settings=None, top: bool=False, angled: bool=False) -> None:
super().__init__(boxes, settings)
self.top = top
self.angled = angled
self.char = 'uUvV'[(bool(top) + (2 * bool(angled)))]
def startwidth(self) -> float:
return (self.settings.thickness if (self.top and self.angled) else 0.0)
def __poly(self):
n = self.settings.eyes_per_hinge
p = self.settings.play
e = self.settings.eye
t = self.settings.thickness
spacing = self.settings.spacing
if ((self.settings.style == 'outside') and self.angled):
e = t
elif (self.angled and (not self.top)):
e -= t
if self.top:
poly = [spacing, 90, (e + p)]
else:
poly = [(spacing + p), 90, (e + p), 0]
for i in range(n):
if ((i % 2) ^ self.top):
if (i == 0):
poly += [(- 90), (t + (2 * p)), 90]
else:
poly += [90, (t + (2 * p)), 90]
else:
poly += [(t - p), (- 90), t, (- 90), (t - p)]
if ((n % 2) ^ self.top):
poly += [0, (e + p), 90, (p + spacing)]
else:
poly[(- 1):] = [(- 90), (e + p), 90, (0 + spacing)]
width = ((((t + p) * n) + p) + (2 * spacing))
return (poly, width)
def __call__(self, l, **kw):
n = self.settings.eyes_per_hinge
p = self.settings.play
e = self.settings.eye
t = self.settings.thickness
hn = self.settings.hinges
(poly, width) = self.__poly()
if ((self.settings.style == 'outside') and self.angled):
e = t
elif (self.angled and (not self.top)):
e -= t
hn = min(hn, int((l // width)))
if (hn == 1):
self.edge(((l - width) / 2), tabs=2)
for j in range(hn):
for i in range(n):
if (not ((i % 2) ^ self.top)):
self.rectangularHole((((self.settings.spacing + (0.5 * t)) + p) + (i * (t + p))), (e + (2.5 * t)), t, t)
self.polyline(*poly)
if (j < (hn - 1)):
self.edge(((l - (hn * width)) / (hn - 1)), tabs=2)
if (hn == 1):
self.edge(((l - width) / 2), tabs=2)
def parts(self, move=None) -> None:
(e, b) = (self.settings.eye, self.settings.bore)
t = self.settings.thickness
n = (self.settings.eyes_per_hinge * self.settings.hinges)
pairs = ((n // 2) + (2 * (n % 2)))
if (self.settings.style == 'outside'):
th = ((2 * e) + (4 * t))
tw = (n * (max((3 * t), (2 * e)) + self.boxes.spacing))
else:
th = (((4 * e) + (3 * t)) + self.boxes.spacing)
tw = (max(e, (2 * t)) * pairs)
if self.move(tw, th, move, True, label='hinges'):
return
if (self.settings.style == 'outside'):
ax = max((t / 2), (e - t))
self.moveTo((t + ax))
for i in range(n):
if self.angled:
if (i > (n // 2)):
l = ((4 * t) + ax)
else:
l = ((5 * t) + ax)
else:
l = ((3 * t) + e)
self.hole(0, e, (b / 2.0))
da = math.asin(((t - ax) / e))
dad = math.degrees(da)
dy = (e * (1 - math.cos(da)))
self.polyline(0, ((180 - dad), e), 0, ((- 90) + dad), ((dy + l) - e), (90, t))
self.polyline(0, 90, t, (- 90), t, 90, t, 90, t, (- 90), t, (- 90), t, 90, t, 90, ((ax + t) - e), (- 90), (l - (3 * t)), (90, e))
self.moveTo(((2 * max(e, (1.5 * t))) + self.boxes.spacing))
self.move(tw, th, move, label='hinges')
return
if (e <= (2 * t)):
if self.angled:
corner = [((2 * e) - t), (90, ((2 * t) - e)), 0, (- 90), t, (90, e)]
else:
corner = [(2 * e), (90, (2 * t))]
else:
a = math.asin(((2 * t) / e))
ang = math.degrees(a)
corner = [((e * (1 - math.cos(a))) + (2 * t)), ((- 90) + ang), 0, ((180 - ang), e)]
self.moveTo(max(e, (2 * t)))
for i in range(n):
self.hole(0, e, (b / 2.0))
self.polyline(*([0, (180, e), 0, (- 90), t, 90, t, (- 90), t, (- 90), t, 90, t, 90, t, (90, t)] + corner))
self.moveTo(self.boxes.spacing, (((4 * e) + (3 * t)) + self.boxes.spacing), 180)
if (i % 2):
self.moveTo(((2 * max(e, (2 * t))) + (2 * self.boxes.spacing)))
self.move(th, tw, move, label='hinges') |
def IKTargetDoRotateQuaternion(kwargs: dict) -> OutgoingMessage:
compulsory_params = ['id', 'quaternion', 'duration']
optional_params = ['speed_based', 'relative']
utility.CheckKwargs(kwargs, compulsory_params)
if ('speed_based' not in kwargs):
kwargs['speed_based'] = True
if ('relative' not in kwargs):
kwargs['relative'] = False
msg = OutgoingMessage()
msg.write_int32(kwargs['id'])
msg.write_string('IKTargetDoRotateQuaternion')
msg.write_float32(kwargs['quaternion'][0])
msg.write_float32(kwargs['quaternion'][1])
msg.write_float32(kwargs['quaternion'][2])
msg.write_float32(kwargs['quaternion'][3])
msg.write_float32(kwargs['duration'])
msg.write_bool(kwargs['speed_based'])
msg.write_bool(kwargs['relative'])
return msg |
def _CopyDebugger(target_dir, target_cpu):
win_sdk_dir = SetEnvironmentAndGetSDKDir()
if (not win_sdk_dir):
return
debug_files = [('dbghelp.dll', False), ('dbgcore.dll', True)]
for (debug_file, is_optional) in debug_files:
full_path = os.path.join(win_sdk_dir, 'Debuggers', target_cpu, debug_file)
if (not os.path.exists(full_path)):
if is_optional:
continue
else:
raise Exception(('%s not found in "%s"\r\nYou must install the "Debugging Tools for Windows" feature from the Windows 10 SDK.' % (debug_file, full_path)))
target_path = os.path.join(target_dir, debug_file)
_CopyRuntimeImpl(target_path, full_path) |
class OptionPlotoptionsScatter3dSonificationTracks(Options):
def activeWhen(self) -> 'OptionPlotoptionsScatter3dSonificationTracksActivewhen':
return self._config_sub_data('activeWhen', OptionPlotoptionsScatter3dSonificationTracksActivewhen)
def instrument(self):
return self._config_get('piano')
def instrument(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionPlotoptionsScatter3dSonificationTracksMapping':
return self._config_sub_data('mapping', OptionPlotoptionsScatter3dSonificationTracksMapping)
def midiName(self):
return self._config_get(None)
def midiName(self, text: str):
self._config(text, js_type=False)
def pointGrouping(self) -> 'OptionPlotoptionsScatter3dSonificationTracksPointgrouping':
return self._config_sub_data('pointGrouping', OptionPlotoptionsScatter3dSonificationTracksPointgrouping)
def roundToMusicalNotes(self):
return self._config_get(True)
def roundToMusicalNotes(self, flag: bool):
self._config(flag, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('instrument')
def type(self, text: str):
self._config(text, js_type=False) |
def look_up_cognito_id(event_body):
print('looking up cognito id...', event_body)
try:
response = cognito_client.admin_get_user(UserPoolId=os.environ['USER_POOL_ID'], Username=event_body['email'])
except Exception as e:
if (e.response['Error']['Code'] == 'UserNotFoundException'):
print(f"Error retrieving Cognito Id: user not found for email {event_body['email']}")
raise
else:
print(f"Error retrieving Cognito Id for user {event_body['email']}")
raise
return response['Username'] |
class PreviewWindow(HasTraits):
_engine = Instance(Engine)
_scene = Instance(SceneModel, ())
view = View(Item('_scene', editor=SceneEditor(scene_class=Scene), show_label=False), width=500, height=500)
def add_source(self, src):
self._engine.add_source(src)
def add_module(self, module):
self._engine.add_module(module)
def add_filter(self, filter):
self._engine.add_module(filter)
def clear(self):
self._engine.current_scene.scene.disable_render = True
self._engine.current_scene.children[:] = []
self._engine.current_scene.scene.disable_render = False
def __engine_default(self):
e = Engine()
e.start()
e.new_scene(self._scene)
return e |
class Polynomial_GF256_int(Polynomial_GF256_base):
def to_exp(self):
return Polynomial_GF256_exp([(LOG[x] if x else None) for x in self])
def get_str(self):
return ' + '.join((f'{n}x**{(len(self) - i)}' for (i, n) in enumerate(self, 1)))
def copy_with_increased_degree(self, n):
return Polynomial_GF256_int((self + ([0] * n)))
def __xor__(self, other):
return Polynomial_GF256_int([(x ^ y) for (x, y) in zip(self, other)])
def discard_leading_zeroes(self):
while (self[0] == 0):
self = self[1:]
return Polynomial_GF256_int(self) |
def writeSignificantHDF(pOutFileName, pSignificantDataList, pSignificantKeyList, pViewpointObj, pReferencePointsList, pArgs):
significantFileH5Object = h5py.File(pOutFileName, 'w')
significantFileH5Object.attrs['type'] = 'significant'
significantFileH5Object.attrs['version'] = __version__
significantFileH5Object.attrs['pvalue'] = pArgs.pValue
if (pArgs.xFoldBackground is not None):
significantFileH5Object.attrs['mode_preselection'] = 'xfold'
significantFileH5Object.attrs['mode_preselection_value'] = pArgs.xFoldBackground
elif (pArgs.loosePValue is not None):
significantFileH5Object.attrs['mode_preselection'] = 'loosePValue'
significantFileH5Object.attrs['mode_preselection_value'] = pArgs.loosePValue
else:
significantFileH5Object.attrs['mode_preselection'] = 'None'
significantFileH5Object.attrs['mode_preselection_value'] = 'None'
significantFileH5Object.attrs['range'] = pArgs.range
significantFileH5Object.attrs['combinationMode'] = pArgs.combinationMode
significantFileH5Object.attrs['truncateZeroPvalues'] = pArgs.truncateZeroPvalues
significantFileH5Object.attrs['fixateRange'] = pArgs.fixateRange
significantFileH5Object.attrs['peakInteractionsThreshold'] = pArgs.peakInteractionsThreshold
keys_seen = {}
for (i, (key, data)) in enumerate(zip(pSignificantKeyList, pSignificantDataList)):
if (len(data) == 0):
continue
chromosome = None
start_list = []
end_list = []
gene_name = None
sum_of_interactions = None
relative_distance_list = []
relative_interactions_list = []
pvalue_list = []
xfold_list = []
raw_target_list = []
for datum in data.values():
chromosome = datum[0]
start_list.append(datum[1])
end_list.append(datum[2])
gene_name = datum[3]
sum_of_interactions = datum[4]
relative_distance_list.append(datum[5])
relative_interactions_list.append(datum[6])
pvalue_list.append(datum[7])
xfold_list.append(datum[8])
raw_target_list.append(datum[9])
if (key[0] not in significantFileH5Object):
matrixGroup = significantFileH5Object.create_group(key[0])
keys_seen[key[0]] = set()
else:
matrixGroup = significantFileH5Object[key[0]]
if (chromosome not in matrixGroup):
chromosomeObject = matrixGroup.create_group(chromosome)
else:
chromosomeObject = matrixGroup[chromosome]
if ('genes' not in matrixGroup):
geneGroup = matrixGroup.create_group('genes')
else:
geneGroup = matrixGroup['genes']
success = False
counter = 0
while (not success):
if (counter != 0):
gene_name_key = ((key[2] + '_') + str(counter))
else:
gene_name_key = key[2]
if (gene_name_key in keys_seen[key[0]]):
success = False
else:
keys_seen[key[0]].add(gene_name_key)
success = True
counter += 1
group_name = pViewpointObj.writeInteractionFileHDF5(chromosomeObject, gene_name_key, [chromosome, start_list, end_list, gene_name, sum_of_interactions, relative_distance_list, relative_interactions_list, pvalue_list, xfold_list, raw_target_list], pReferencePointsList[i])
try:
geneGroup[group_name] = chromosomeObject[group_name]
except Exception as exp:
log.debug('exception {}'.format(str(exp)))
log.debug('Gene group given: {}'.format(key[2]))
log.debug('Gene group return: {}'.format(group_name))
significantFileH5Object.close() |
class TestsInit(unittest.TestCase):
def setUpClass(cls):
cls.path_for_test = (mpi.Path(__file__).parent / '../_transonic_testing/src/_transonic_testing/for_test_init.py')
assert cls.path_for_test.exists()
cls.path_backend = path_backend = ((cls.path_for_test.parent / f'__{backend_default}__') / cls.path_for_test.name)
cls.path_ext = path_backend.with_name(backend.name_ext_from_path_backend(path_backend))
def tearDownClass(cls):
if cls.path_ext.exists():
cls.path_ext.unlink()
path_ext = cls.path_ext.with_suffix(ext_suffix)
if path_ext.exists():
path_ext.unlink()
try:
os.environ.pop('TRANSONIC_COMPILE_AT_IMPORT')
except KeyError:
pass
print(mpi.rank, 'end tearDownClass')
def test_transonified(self):
print(mpi.rank, 'start test', flush=1)
try:
os.environ.pop('TRANSONIC_COMPILE_AT_IMPORT')
except KeyError:
pass
try:
del modules[module_name]
except KeyError:
pass
assert (not has_to_compile_at_import())
print(mpi.rank, 'before if self.path_backend.exists()', flush=1)
if self.path_backend.exists():
print(mpi.rank, 'before self.path_backend.unlink()', flush=1)
self.path_backend.unlink()
print(mpi.rank, 'before make_backend_file(self.path_for_test)', flush=1)
if (mpi.rank == 0):
backend.make_backend_file(self.path_for_test)
print(mpi.rank, 'after make_backend_file(self.path_for_test)', flush=1)
mpi.barrier()
from _transonic_testing import for_test_init
importlib.reload(for_test_init)
assert self.path_backend.exists()
assert for_test_init.ts.is_transpiled
for_test_init.func(1, 3.14)
for_test_init.func1(1.1, 2.2)
for_test_init.check_class()
((not can_import_accelerator()), f'{backend.name} is required for TRANSONIC_COMPILE_AT_IMPORT')
def test_pythranize(self):
os.environ['TRANSONIC_COMPILE_AT_IMPORT'] = '1'
try:
del modules[module_name]
except KeyError:
pass
assert has_to_compile_at_import()
if self.path_backend.exists():
self.path_backend.unlink()
if self.path_ext.exists():
self.path_ext.unlink()
path_ext = self.path_ext.with_suffix(ext_suffix)
if path_ext.exists():
path_ext.unlink()
from _transonic_testing import for_test_init
if (not for_test_init.ts.is_compiling):
importlib.reload(for_test_init)
assert (module_name in modules), modules
assert self.path_backend.exists()
ts = for_test_init.ts
assert ts.is_transpiled
if backend.needs_compilation:
assert ts.is_compiling
assert (not ts.is_compiled)
else:
assert ts.is_compiled
for_test_init.func(1, 3.14)
for_test_init.func1(1.1, 2.2)
while (not ts.is_compiled):
time.sleep(0.1)
for_test_init.func(1, 3.14)
for_test_init.func1(1.1, 2.2)
assert (not ts.is_compiling)
assert ts.is_compiled
for_test_init.func(1, 3.14)
for_test_init.func1(1.1, 2.2)
for_test_init.check_class() |
def get_unit_type():
if frappe.db.exists('Healthcare Service Unit Type', 'Inpatient Rooms'):
return frappe.get_doc('Healthcare Service Unit Type', 'Inpatient Rooms')
unit_type = frappe.new_doc('Healthcare Service Unit Type')
unit_type.service_unit_type = 'Inpatient Rooms'
unit_type.inpatient_occupancy = 1
unit_type.is_billable = 1
unit_type.item_code = 'Inpatient Rooms'
unit_type.item_group = 'Services'
unit_type.uom = 'Hour'
unit_type.no_of_hours = 1
unit_type.rate = 4000
unit_type.save()
return unit_type |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.