code stringlengths 281 23.7M |
|---|
def __get_validated_git_repos__(repos_relative):
if (not repos_relative):
repos_relative = '.'
repos = []
for repo in repos_relative:
cloned_repo = clone.create(repo)
if (cloned_repo.name == None):
cloned_repo.location = basedir.get_basedir_git(cloned_repo.location)
cloned_repo.name = os.path.basename(cloned_repo.location)
repos.append(cloned_repo)
return repos |
class Compose(Base):
__exclude_columns__ = 'updates'
__include_extras__ = ('content_type', 'security', 'update_summary')
__tablename__ = 'composes'
release_id = Column(Integer, ForeignKey('releases.id'), primary_key=True, nullable=False)
request = Column(UpdateRequest.db_type(), primary_key=True, nullable=False)
id = None
checkpoints = Column(UnicodeText, nullable=False, default='{}')
error_message = Column(UnicodeText)
date_created = Column(DateTime, nullable=False, default=datetime.utcnow)
state_date = Column(DateTime, nullable=False, default=datetime.utcnow)
release = relationship('Release', back_populates='composes')
state = Column(ComposeState.db_type(), nullable=False, default=ComposeState.requested)
updates = relationship('Update', primaryjoin='and_(foreign(Update.release_id)==Compose.release_id, foreign(Update.request)==Compose.request, Update.locked==True)', passive_deletes=True, order_by='Update.date_submitted', overlaps='release', back_populates='compose')
def content_type(self):
if self.updates:
return self.updates[0].content_type
def from_dict(cls, db, compose):
return db.query(cls).filter_by(release_id=compose['release_id'], request=UpdateRequest.from_string(compose['request'])).one()
def from_updates(cls, updates):
work = {}
for update in updates:
if (not update.request):
log.info('%s request was revoked', update.alias)
continue
ctype = None
if (not update.builds):
log.info(f'No builds in {update.alias}. Skipping.')
continue
for build in update.builds:
if (ctype is None):
ctype = build.type
elif (ctype is not build.type):
raise ValueError(f'Builds of multiple types found in {update.alias}')
key = ('%s-%s' % (update.release.name, update.request.value))
if (key not in work):
work[key] = cls(request=update.request, release_id=update.release.id, release=update.release)
update.locked = True
return list(work.values())
def security(self):
for update in self.updates:
if (update.type is UpdateType.security):
return True
return False
def update_state_date(target, value, old, initiator):
if (value != old):
target.state_date = datetime.utcnow()
def update_summary(self):
return [{'alias': u.alias, 'title': u.get_title(nvr=True, beautify=True)} for u in self.updates]
def __json__(self, request=None, exclude=None, include=None, composer=False):
if composer:
exclude = ('checkpoints', 'error_message', 'date_created', 'state_date', 'release', 'state', 'updates')
include = ('content_type', 'security')
return super(Compose, self).__json__(request=request, exclude=exclude, include=include)
def __lt__(self, other):
if (self.security and (not other.security)):
return True
if (other.security and (not self.security)):
return False
if ((self.request == UpdateRequest.stable) and (other.request != UpdateRequest.stable)):
return True
return False
def __str__(self):
return '<Compose: {} {}>'.format(self.release.name, self.request.description) |
class _MagiclinkShorthandPattern(InlineProcessor):
def __init__(self, pattern, md, user, repo, provider, labels):
self.user = user
self.repo = repo
self.labels = labels
self.provider = (provider if (provider in PROVIDER_INFO) else '')
InlineProcessor.__init__(self, pattern, md) |
def import_practices(bq_conn, sqlite_conn, dates):
date_start = min(dates)
date_end = max(dates)
logger.info('Querying for active practice codes between %s and %s', date_start, date_end)
sql = "\n SELECT DISTINCT practice FROM {hscic}.prescribing_v2\n WHERE month BETWEEN TIMESTAMP('%(start)s') AND TIMESTAMP('%(end)s')\n UNION DISTINCT\n SELECT DISTINCT practice FROM {hscic}.practice_statistics_all_years\n WHERE month BETWEEN TIMESTAMP('%(start)s') AND TIMESTAMP('%(end)s')\n ORDER BY practice\n "
result = bq_conn.query((sql % {'start': date_start, 'end': date_end}))
practice_codes = [row[0] for row in result.rows]
logger.info('Writing %s practice codes to SQLite', len(practice_codes))
sqlite_conn.executemany('INSERT INTO practice (offset, code) VALUES (?, ?)', enumerate(practice_codes)) |
.parametrize('lang', Mnemonic.list_languages())
.parametrize('num_words', [12, 15, 18, 21, 24])
def test_generation(lang, num_words):
m = Mnemonic(lang)
mnemonic = m.generate(num_words)
assert m.is_mnemonic_valid(mnemonic)
if (lang == 'chinese_traditional'):
assert ('chinese' in Mnemonic.detect_language(mnemonic))
else:
assert (Mnemonic.detect_language(mnemonic) == lang)
assert (len(Mnemonic.to_seed(mnemonic)) == 64) |
def gen_profiler(func_attrs: Dict[(str, Any)], workdir: str, header_files: str, backend_spec) -> None:
op_type = func_attrs['op']
file_pairs = []
blockSize = 1024
t_size = int((((func_attrs['preNmsTop'] + blockSize) - 1) / blockSize))
if (backend_spec.backend_name == 'cuda'):
cuda_hmaxmin = True
else:
cuda_hmaxmin = False
elem_rois_type = backend_spec.dtype_to_backend_type(func_attrs['inputs'][0]._attrs['dtype'])
elem_scores_type = backend_spec.dtype_to_backend_type(func_attrs['inputs'][1]._attrs['dtype'])
code = PROFILER_TEMPLATE.render(T_SIZE=t_size, elem_scores_type=elem_scores_type, elem_rois_type=elem_rois_type, header_files=header_files, kernel=KERNEL_TEMPLATE.render(prefix=backend_spec.prefix, cub=backend_spec.cub, cuda_hmaxmin=cuda_hmaxmin), func_signature=FUNC_SIGNATURE.render(func_name=func_attrs['name'], prefix=backend_spec.prefix, index_type=backend_spec.index_type))
op_name = func_attrs['op']
add_profiler(file_pairs, workdir, op_type, op_name, code)
return file_pairs |
def extractNiiselin(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
def test_get_remote_addr_with_socket():
transport = MockTransport({'socket': MockSocket(family=socket.AF_IPX)})
assert (get_remote_addr(transport) is None)
transport = MockTransport({'socket': MockSocket(family=socket.AF_INET6, peername=('::1', 123))})
assert (get_remote_addr(transport) == ('::1', 123))
transport = MockTransport({'socket': MockSocket(family=socket.AF_INET, peername=('123.45.6.7', 123))})
assert (get_remote_addr(transport) == ('123.45.6.7', 123))
if hasattr(socket, 'AF_UNIX'):
transport = MockTransport({'socket': MockSocket(family=socket.AF_UNIX, peername=('127.0.0.1', 8000))})
assert (get_remote_addr(transport) == ('127.0.0.1', 8000)) |
def callTransaction21():
c = {}
c['code'] = 21
c['in'] = []
c['in'].append({'callingPkg': 'readString16'})
c['in'].append({'featureId': 'readString16'})
c['in'].append({'authority': 'readString16'})
c['in'].append({'method': 'readString16'})
c['in'].append({'stringArg': 'readString16'})
c['in'].append({'extras': 'readBundle'})
c['out'] = []
c['out'].append({'__exception': 'readException'})
c['out'].append({'__return': 'readBundle'})
c['name'] = 'call'
c['oneWay'] = False
return c |
def test_safer():
t = PhyloTree('(a,(b,c));', sp_naming_function=(lambda name: name))
tp_unsafe = tm.TreePattern('("node.get_species()=={\'c\'}", node.species=="b")')
assert (list(tp_unsafe.search(t)) == [t.common_ancestor(['b', 'c'])])
tp_safer = tm.TreePattern('("node.get_species()=={\'c\'}", node.species=="b")', safer=True)
with pytest.raises(ValueError):
list(tp_safer.search(t)) |
class TestGetFieldNames(TestData):
def test_get_field_names_all(self):
ed_flights = self.ed_flights()
pd_flights = self.pd_flights()
fields1 = ed_flights._query_compiler.get_field_names(include_scripted_fields=False)
fields2 = ed_flights._query_compiler.get_field_names(include_scripted_fields=True)
assert (fields1 == fields2)
assert_index_equal(pd_flights.columns, pd.Index(fields1))
def test_get_field_names_selected(self):
ed_flights = self.ed_flights()[['Carrier', 'AvgTicketPrice']]
pd_flights = self.pd_flights()[['Carrier', 'AvgTicketPrice']]
fields1 = ed_flights._query_compiler.get_field_names(include_scripted_fields=False)
fields2 = ed_flights._query_compiler.get_field_names(include_scripted_fields=True)
assert (fields1 == fields2)
assert_index_equal(pd_flights.columns, pd.Index(fields1)) |
class TestDump(unittest.TestCase):
def test_csv(self):
filename = 'dump.csv'
CSVDump(dump).write(filename)
os.remove(filename)
def test_py(self):
filename = 'dump.py'
PythonDump(dump).write(filename)
os.remove(filename)
def test_sigrok(self):
filename = 'dump.sr'
SigrokDump(dump).write(filename)
SigrokDump(dump).read(filename)
SigrokDump(dump).write(filename)
os.remove(filename)
def test_vcd(self):
filename = 'dump.vcd'
VCDDump(dump).write(filename)
os.remove(filename) |
class OptionPlotoptionsArearangeOnpointConnectoroptions(Options):
def dashstyle(self):
return self._config_get(None)
def dashstyle(self, text: str):
self._config(text, js_type=False)
def stroke(self):
return self._config_get(None)
def stroke(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(1)
def width(self, num: float):
self._config(num, js_type=False) |
class ReadonlyEditor(BaseReadonlyEditor):
def init(self, parent):
super().init(parent)
if (self.factory.view is not None):
control = self.control
control.Bind(wx.EVT_ENTER_WINDOW, self._enter_window)
control.Bind(wx.EVT_LEAVE_WINDOW, self._leave_window)
control.Bind(wx.EVT_LEFT_DOWN, self._left_down)
control.Bind(wx.EVT_LEFT_UP, self._left_up)
def update_editor(self):
control = self.control
new_value = self.str_value
if (hasattr(self.factory, 'password') and self.factory.password):
new_value = ('*' * len(new_value))
if ((self.item.resizable is True) or (self.item.height != (- 1.0))):
if (control.GetValue() != new_value):
control.SetValue(new_value)
control.SetInsertionPointEnd()
elif (control.GetLabel() != new_value):
control.SetLabel(new_value)
def dispose(self):
if (self.factory.view is not None):
control = self.control
control.Unbind(wx.EVT_ENTER_WINDOW)
control.Unbind(wx.EVT_LEAVE_WINDOW)
control.Unbind(wx.EVT_LEFT_DOWN)
control.Unbind(wx.EVT_LEFT_UP)
super().dispose()
def _set_color(self):
control = self.control
if (not self._in_window):
color = control.GetParent().GetBackgroundColour()
elif self._down:
color = DownColor
else:
color = HoverColor
control.SetBackgroundColour(color)
control.Refresh()
def _enter_window(self, event):
self._in_window = True
self._set_color()
def _leave_window(self, event):
self._in_window = False
self._set_color()
def _left_down(self, event):
self.control.CaptureMouse()
self._down = True
self._set_color()
def _left_up(self, event):
self._set_color()
if (not self._down):
return
self.control.ReleaseMouse()
self._down = False
if self._in_window:
self.object.edit_traits(view=self.factory.view, parent=self.control) |
class OptionPlotoptionsVariwideSonificationTracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def extractFirebirdFictionCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if (('Haventon Chronicles' in item['tags']) and ('Lord of the Wolves' in item['title'])):
vol = 2
return buildReleaseMessageWithType(item, 'Haventon Chronicles', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
tagmap = [("Lawgiver's Blade", "Lawgiver's Blade", 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_multiprocessing(simple_nlp, texts):
ops = get_current_ops()
if isinstance(ops, NumpyOps):
texts = (texts * 3)
expecteds = [simple_nlp(text) for text in texts]
docs = simple_nlp.pipe(texts, n_process=2, batch_size=2)
for (doc, expected_doc) in zip(docs, expecteds):
assert_docs_equal(doc, expected_doc) |
def main():
api_key = os.environ.get('OPENAI_API_KEY')
api_key_input = gr.components.Textbox(lines=1, label='Enter OpenAI API Key', value=api_key, type='password')
model_selection = gr.components.Dropdown(choices=['gpt-4', 'gpt-3.5-turbo'], label='Select a GPT Model', value='gpt-3.5-turbo')
user_input = gr.components.Textbox(lines=3, label='Enter your message')
output_history = gr.outputs.HTML(label='Updated Conversation')
chatbot = chat_agent.create_chatbot(model_name=model_selection.value)
inputs = [api_key_input, model_selection, user_input]
iface = gr.Interface(fn=partial(get_response, chatbot), inputs=inputs, outputs=[output_history], title='LiveQuery GPT-4', description='A simple chatbot using GPT-4 and Gradio with conversation history', allow_flagging='never')
iface.launch() |
class Test_Store():
def store(self, *, app):
return Store(url='memory://', app=app, table=Mock(name='table'))
def test_clear(self, *, store):
store.data['foo'] = 1
store._clear()
assert (not store.data)
def test_apply_changelog_batch(self, *, store):
(event, to_key, to_value) = self.mock_event_to_key_value()
store.apply_changelog_batch([event], to_key=to_key, to_value=to_value)
to_key.assert_called_once_with(b'key')
to_value.assert_called_once_with(b'value')
assert (store.data[to_key()] == to_value())
def test_apply_changelog_batch__deletes_key_for_None_value(self, *, store):
self.test_apply_changelog_batch(store=store)
(event2, to_key, to_value) = self.mock_event_to_key_value(value=None)
assert (to_key() in store.data)
store.apply_changelog_batch([event2], to_key=to_key, to_value=to_value)
assert (to_key() not in store.data)
def test_apply_changelog_batch__deletes_key_and_reassign_it(self, *, store):
self.test_apply_changelog_batch__deletes_key_for_None_value(store=store)
events = [self.mock_event(value=value) for value in ('v1', None, 'v2')]
(to_key, to_value) = self.mock_to_key_value(events[0])
store.apply_changelog_batch(events, to_key=to_key, to_value=to_value)
assert (to_key() in store.data)
def test_apply_changelog_batch__different_partitions(self, *, store):
events = [self.mock_event(key=f'key-{i}'.encode(), partition=i) for i in range(2)]
(to_key, to_value) = self.mock_to_key_value_multi(events)
store.apply_changelog_batch(events, to_key=to_key, to_value=to_value)
assert (to_key.call_args_list[0][0][0] in store.data)
assert (to_key.call_args_list[1][0][0] in store.data)
assert (store._key_partition.get(to_key.call_args_list[0][0][0]) == 0)
assert (store._key_partition.get(to_key.call_args_list[1][0][0]) == 1)
def test_apply_changelog_batch__different_partitions_deletion(self, *, store):
self.test_apply_changelog_batch__different_partitions(store=store)
events = [self.mock_event(key=f'key-{i}'.encode(), value=None, partition=i) for i in range(2)]
(to_key, to_value) = self.mock_to_key_value_multi(events)
store.apply_changelog_batch(events, to_key=to_key, to_value=to_value)
assert (not store._key_partition)
assert (not store.data)
.asyncio
async def test_apply_changelog_batch__different_partitions_repartition_single(self, *, store):
self.test_apply_changelog_batch__different_partitions(store=store)
(await store.on_recovery_completed({TP('foo', 0)}, set()))
assert (len(store.data) == 1)
assert (len(store._key_partition) == 1)
.asyncio
async def test_apply_changelog_batch__different_partitions_repartition_multi(self, *, store):
self.test_apply_changelog_batch__different_partitions(store=store)
(await store.on_recovery_completed({TP('foo', 0), TP('foo', 1)}, set()))
assert (len(store.data) == 2)
assert (len(store._key_partition) == 2)
def mock_event_to_key_value(self, key=b'key', value=b'value', partition=0):
event = self.mock_event(key=key, value=value, partition=partition)
(to_key, to_value) = self.mock_to_key_value(event)
return (event, to_key, to_value)
def mock_event(self, key=b'key', value=b'value', partition=0):
event = Mock(name='event', autospec=Event)
event.key = key
event.value = value
event.message.key = key
event.message.value = value
event.message.partition = partition
return event
def mock_to_key_value(self, event):
to_key = Mock(name='to_key')
to_key.return_value = event.key
to_value = Mock(name='to_value')
to_value.return_value = event.value
return (to_key, to_value)
def mock_to_key_value_multi(self, events):
to_key = Mock(name='to_key')
to_key.side_effect = [e.key for e in events]
to_value = Mock(name='to_value')
to_value.side_effect = [e.value for e in events]
return (to_key, to_value)
def test_persisted_offset(self, *, store):
assert (store.persisted_offset(TP('foo', 0)) is None)
def test_reset_state(self, *, store):
store.reset_state() |
def lazy_import():
from fastly.model.realtime_entry_aggregated import RealtimeEntryAggregated
from fastly.model.realtime_entry_datacenter import RealtimeEntryDatacenter
from fastly.model.realtime_entry_recorded import RealtimeEntryRecorded
globals()['RealtimeEntryAggregated'] = RealtimeEntryAggregated
globals()['RealtimeEntryDatacenter'] = RealtimeEntryDatacenter
globals()['RealtimeEntryRecorded'] = RealtimeEntryRecorded |
def test_density_based_discret_with_delta(bottom, top, quadratic_density):
(w, e, s, n) = ((- 3), 2, (- 4), 5)
tesseroid = (w, e, s, n, bottom, top)
deltas = [0.001, 0.01, 0.1, 1.0]
splits = []
for delta in deltas:
harmonica._forward._tesseroid_variable_density.DELTA_RATIO = delta
splits.append(len(_density_based_discretization(tesseroid, quadratic_density)))
splits = np.array(splits)
assert (splits[1:] < splits[:(- 1)]).all() |
class TestGeneratorsAsciiFields(unittest.TestCase):
def setUp(self):
self.foo_fieldset = self.dummy_fieldset()
self.event_dummy_nested_fields = self.dummy_nested_event_fieldset()
def dummy_fieldset(self):
return {'description': 'foo', 'fields': {'foo.type': {'dashed_name': 'foo-type', 'description': 'describes the foo', 'example': '2016-05-23T08:05:34.853Z', 'flat_name': 'foo.type', 'level': 'core', 'name': 'type', 'normalize': ['array'], 'short': 'describes the foo', 'ignore_above': 1024, 'type': 'keyword', 'allowed_values': [{'description': 'fluffy foo', 'name': 'fluffy'}, {'description': 'coarse foo', 'name': 'coarse'}]}, 'foo.id': {'beta': 'this is a beta field', 'dashed_name': 'foo-id', 'description': 'Unique ID of the foo.', 'example': 'foo123', 'flat_name': 'foo.id', 'ignore_above': 1024, 'level': 'core', 'name': 'id', 'normalize': [], 'short': 'Unique ID of the foo.', 'type': 'keyword'}}, 'reusable': {'expected': [{'as': 'foo', 'at': 'server', 'full': 'server.foo'}, {'as': 'foo', 'at': 'source', 'full': 'source.foo'}, {'as': 'foo', 'at': 'client', 'full': 'client.foo'}, {'as': 'foo', 'at': 'destination', 'full': 'destination.foo'}], 'top_level': False}, 'reused_here': [{'full': 'foo.as', 'schema_name': 'as', 'short': 'Fields describing an AS'}, {'full': 'foo.file', 'schema_name': 'file', 'short': 'Fields describing files', 'normalize': ['array']}], 'group': 2, 'name': 'foo', 'prefix': 'foo.', 'short': 'Foo fields', 'title': 'Foo', 'type': 'group'}
def dummy_nested_event_fieldset(self):
return {'event': {'name': 'event', 'description': 'description', 'type': 'group', 'title': 'Event', 'prefix': 'event.', 'fields': {'event.kind': {'dashed_name': 'event-kind', 'name': 'kind', 'allowed_values': [{'description': 'fluffy foo', 'name': 'fluffy'}, {'description': 'coarse foo', 'name': 'coarse', 'beta': 'beta'}]}, 'event.category': {'dashed_name': 'event-category', 'name': 'category', 'allowed_values': [{'description': 'fluffy foo', 'name': 'fluffy'}, {'description': 'coarse foo', 'name': 'coarse', 'beta': 'beta'}]}, 'event.type': {'dashed_name': 'event-type', 'name': 'type', 'allowed_values': [{'description': 'fluffy foo', 'name': 'fluffy'}, {'description': 'coarse foo', 'name': 'coarse', 'beta': 'beta'}]}, 'event.outcome': {'dashed_name': 'event-outcome', 'name': 'outcome', 'allowed_values': [{'description': 'fluffy foo', 'name': 'fluffy'}, {'description': 'coarse foo', 'name': 'coarse', 'beta': 'beta'}]}}}}
def test_validate_sort_fieldset(self):
sorted_foo_fields = asciidoc_fields.sort_fields(self.foo_fieldset)
self.assertIsInstance(sorted_foo_fields, list)
for field in sorted_foo_fields:
self.assertIsInstance(field.get('allowed_value_names'), list)
self.assertFalse(sorted_foo_fields[0]['allowed_value_names'])
self.assertEqual('id', sorted_foo_fields[0]['name'])
self.assertEqual('type', sorted_foo_fields[1]['name'])
self.assertIn('fluffy', sorted_foo_fields[1]['allowed_value_names'])
self.assertIn('coarse', sorted_foo_fields[1]['allowed_value_names'])
def test_rendering_fieldset_reuse(self):
foo_reuse_fields = asciidoc_fields.render_fieldset_reuse_text(self.foo_fieldset)
expected_sorted_reuse_fields = ('client.foo', 'destination.foo', 'server.foo', 'source.foo')
self.assertEqual(expected_sorted_reuse_fields, tuple(foo_reuse_fields))
def test_rendering_fieldset_nesting(self):
foo_nesting_fields = asciidoc_fields.render_nestings_reuse_section(self.foo_fieldset)
self.assertIsInstance(foo_nesting_fields, list)
self.assertEqual('foo.as.*', foo_nesting_fields[0]['flat_nesting'])
self.assertEqual('as', foo_nesting_fields[0]['name'])
self.assertEqual('Fields describing an AS', foo_nesting_fields[0]['short'])
self.assertEqual('foo.file.*', foo_nesting_fields[1]['flat_nesting'])
self.assertEqual('file', foo_nesting_fields[1]['name'])
self.assertEqual('Fields describing files', foo_nesting_fields[1]['short'])
self.assertEqual(['array'], foo_nesting_fields[1]['normalize'])
def test_check_for_usage_doc_true(self):
usage_files = ['foo.asciidoc']
foo_name = self.foo_fieldset.get('name')
self.assertTrue(asciidoc_fields.check_for_usage_doc(foo_name, usage_file_list=usage_files))
def test_check_for_usage_doc_false(self):
usage_files = ['notfoo.asciidoc']
foo_name = self.foo_fieldset.get('name')
self.assertFalse(asciidoc_fields.check_for_usage_doc(foo_name, usage_file_list=usage_files))
def test_check_for_page_field_value_rendering(self):
rendered_field_values = asciidoc_fields.page_field_values(self.event_dummy_nested_fields)
self.assertIn('beta', rendered_field_values) |
def extractEnXiao(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
titlemap = [('Who Dares Slander My Senior Brother ', 'Who Dares Slander My Senior Brother', 'translated'), ('Who Dares Slander My Senior Brother Chapter', 'Who Dares Slander My Senior Brother', 'translated'), ('Founder of Diabolism Chapter', 'Founder of Diabolism', 'translated')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_disabled02_check_disabled_effect(dash_duo, testfile10Mb_csv, testfileWrongType, js_drag_and_drop):
app = import_app('tests.apps.disabled')
dash_duo.start_server(app)
driver = dash_duo._driver
wait = WebDriverWait(driver, 10)
upload = dash_duo.find_element('#dash-uploader')
configs = dash_duo.find_element('#uploader-configs')
reset_button = dash_duo.find_element('#reset-button')
check_boxes = configs.find_elements(By.XPATH, ".//input[='checkbox']")
assert (len(check_boxes) == 2), 'The provided configs for this app should be 2.'
def upload_test_file_and_validate(upload_component, is_disabled=False, by_dragndrop=False, expect_success=True):
upload_input = upload_component.find_element(By.XPATH, ".//input[='dash-uploader-upload']")
reset_button.click()
wait.until(EC.text_to_be_present_in_element((By.XPATH, "//div[='dash-uploader']/*/label"), 'Text resetted'))
if by_dragndrop:
drag_and_drag_input = driver.execute_script(js_drag_and_drop, upload, 20, 20)
drag_and_drag_input.send_keys(str(testfile10Mb_csv))
else:
if expect_success:
assert upload_input.is_enabled(), 'The uploading button is expected to be enabled.'
else:
with pytest.raises(AssertionError) as err_info:
assert upload_input.is_enabled(), 'The uploading button is expected to be enabled.'
return
upload_input.send_keys(str(testfile10Mb_csv))
if expect_success:
wait.until(EC.text_to_be_present_in_element((By.XPATH, "//div[='dash-uploader']/*/label"), 'Completed'))
else:
with pytest.raises(TimeoutException) as err_info:
wait.until(EC.text_to_be_present_in_element((By.XPATH, "//div[='dash-uploader']/*/label"), 'Completed'))
return
callback_output = dash_duo.find_element('#callback-output')
uploaded_file = callback_output.find_element(By.XPATH, '//ul').text
uploaded_file = Path(uploaded_file)
assert (uploaded_file.name == testfile10Mb_csv.name)
assert uploaded_file.exists()
assert (uploaded_file.stat().st_size == testfile10Mb_csv.stat().st_size)
shutil.rmtree(uploaded_file.parent)
uploader_class = upload.get_attribute('class')
assert ((uploader_class == 'dash-uploader-default') or (uploader_class == 'dash-uploader-completed')), 'The current uploader class should be "dash-uploader-default" or "dash-uploader-completed".'
upload_test_file_and_validate(upload, by_dragndrop=False, expect_success=True)
upload_test_file_and_validate(upload, by_dragndrop=True, expect_success=True)
check_boxes[0].click()
wait.until(EC.text_to_be_present_in_element((By.XPATH, "//span[='configs-output']"), json.dumps([0])))
assert ('dash-uploader-disabled' in upload.get_attribute('class').split()), 'The current uploader class should be "dash-uploader-disabled".'
upload_test_file_and_validate(upload, is_disabled=True, by_dragndrop=False, expect_success=False)
upload_test_file_and_validate(upload, is_disabled=True, by_dragndrop=True, expect_success=False)
check_boxes[0].click()
check_boxes[1].click()
wait.until(EC.text_to_be_present_in_element((By.XPATH, "//span[='configs-output']"), json.dumps([1])))
uploader_class_list = upload.get_attribute('class').split()
assert (('dash-uploader-default' in uploader_class_list) or ('dash-uploader-completed' in uploader_class_list)), 'The current uploader class should be "dash-uploader-default" or "dash-uploader-completed".'
upload_test_file_and_validate(upload, by_dragndrop=False, expect_success=True)
upload_test_file_and_validate(upload, by_dragndrop=True, expect_success=False) |
class Test(unittest.TestCase):
def setUp(self):
hig.timeout = 0
def testCreate(self):
d = hig.Alert(transient_for=toplevel, type=Gtk.MessageType.INFO, primary='Hello!')
self.assertNotEqual(d, None)
self.runAndDismiss(d)
d = hig.Alert(transient_for=toplevel, type=Gtk.MessageType.ERROR, primary='Oh no!', secondary='A terrible thing has happened')
self.runAndDismiss(d)
def testInformation(self):
d = hig.InformationAlert(transient_for=toplevel, primary='Your zipper is undone', secondary='This might be considered unsightly.')
self.runAndDismiss(d)
def testError(self):
d = hig.ErrorAlert(transient_for=toplevel, primary="You don't want to do it like that", secondary='Chaos will ensue.')
self.runAndDismiss(d)
d = hig.ErrorAlert(transient_for=toplevel, primary='Could not destroy universe', secondary='Destructor ray malfunctioned.', fix_button='Try again')
self.runAndDismiss(d)
def testConfirm(self):
d = hig.ConfirmationAlert(transient_for=toplevel, primary='Do you really want to hurt me?', secondary='Do you really want to make me cry?')
self.runAndDismiss(d)
d = hig.ConfirmationAlert(transient_for=toplevel, primary='Convert sub-meson structure?', secondary='The process is agonizingly painful and could result in permanent damage to the space-time continuum', proceed_button='Convert', alternate_button='Go Fishing')
self.runAndDismiss(d)
def runAndDismiss(self, d):
def dismiss():
d.response(Gtk.ResponseType.ACCEPT)
return False
GLib.timeout_add(10, dismiss)
d.destroy()
def testPeriodText(self):
self.assertEqual(hig._periodText((86400 * 3.5)), '3 days')
self.assertEqual(hig._periodText((3600 * 17.2)), '17 hours')
self.assertEqual(hig._periodText((60 * 17)), '17 minutes')
self.assertEqual(hig._periodText(23), '23 seconds')
def testSaveConfirm(self):
d = hig.SaveConfirmationAlert(transient_for=toplevel, document_name='Wombat.doc')
self.runAndDismiss(d)
d = hig.SaveConfirmationAlert(transient_for=toplevel, document_name='Wombat.doc', period=791)
self.runAndDismiss(d)
def testMessagePopper(self):
dd = MockDialog()
self.assertEqual(0, hig.timeout)
hig.timeout = 300
dd.show_error('Hello', 'A catastrophe has occurred')
dd.ask_question('Eh?', "Speak into t'trumpet!", None) |
def test_minimal_renaming_function_arguments_same_color(variable_v, variable_u):
instructions = [Assignment(variable_v[1], BinaryOperation(OperationType.plus, [variable_v[0], variable_u[0]])), Assignment(ListOperation([]), Call(imp_function_symbol('printf'), [variable_v[1]]))]
node = BasicBlock(0, instructions)
cfg = ControlFlowGraph()
cfg.add_node(node)
task = decompiler_task(cfg, None, [Variable('v', Integer.int32_t()), Variable('u', Integer.int32_t())])
interference_graph = InterferenceGraph()
interference_graph.add_nodes_from([variable_v[0], variable_v[1], variable_u[0]])
minimal_variable_renamer = MinimalVariableRenamer(task, interference_graph)
assert (minimal_variable_renamer.renaming_map[variable_v[0]] != minimal_variable_renamer.renaming_map[variable_u[0]]) |
.usefixtures('copy_poly_case')
def test_that_the_ui_show_no_errors_and_enables_update_for_poly_example(qapp):
args = Mock()
args.config = 'poly.ert'
with add_gui_log_handler() as log_handler:
(gui, *_) = ert.gui.main._start_initial_gui_window(args, log_handler)
combo_box = get_child(gui, QComboBox, name='Simulation_mode')
assert (combo_box.count() == 5)
for i in range(combo_box.count()):
assert combo_box.model().item(i).isEnabled()
assert (gui.windowTitle() == 'ERT - poly.ert') |
def test_xor():
x = Fxp(None, True, 8, 4)
xu = Fxp(None, False, 8, 4)
y = Fxp(None, True, 8, 4)
yu = Fxp(None, False, 8, 4)
val_str = ''
mks_str = ''
xor_str = ''
x(('0b' + val_str))
xu(('0b' + val_str))
y(('0b' + mks_str))
yu(('0b' + mks_str))
assert ((x ^ y).bin() == xor_str)
assert ((x ^ yu).bin() == xor_str)
assert ((xu ^ y).bin() == xor_str)
assert ((xu ^ yu).bin() == xor_str)
assert ((x ^ utils.str2num(('0b' + mks_str))).bin() == xor_str)
assert ((xu ^ utils.str2num(('0b' + mks_str))).bin() == xor_str)
assert ((utils.str2num(('0b' + mks_str)) ^ x).bin() == xor_str)
assert ((utils.str2num(('0b' + mks_str)) ^ xu).bin() == xor_str)
val_str = ''
mks_str = ''
xor_str = ''
x(('0b' + val_str))
xu(('0b' + val_str))
y(('0b' + mks_str))
yu(('0b' + mks_str))
assert ((x ^ y).bin() == xor_str)
assert ((x ^ yu).bin() == xor_str)
assert ((xu ^ y).bin() == xor_str)
assert ((xu ^ yu).bin() == xor_str)
assert ((x ^ utils.str2num(('0b' + mks_str))).bin() == xor_str)
assert ((xu ^ utils.str2num(('0b' + mks_str))).bin() == xor_str)
assert ((utils.str2num(('0b' + mks_str)) ^ x).bin() == xor_str)
assert ((utils.str2num(('0b' + mks_str)) ^ xu).bin() == xor_str) |
class OptionPlotoptionsAreasplineSonificationContexttracksMapping(Options):
def frequency(self) -> 'OptionPlotoptionsAreasplineSonificationContexttracksMappingFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsAreasplineSonificationContexttracksMappingFrequency)
def gapBetweenNotes(self) -> 'OptionPlotoptionsAreasplineSonificationContexttracksMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionPlotoptionsAreasplineSonificationContexttracksMappingGapbetweennotes)
def highpass(self) -> 'OptionPlotoptionsAreasplineSonificationContexttracksMappingHighpass':
return self._config_sub_data('highpass', OptionPlotoptionsAreasplineSonificationContexttracksMappingHighpass)
def lowpass(self) -> 'OptionPlotoptionsAreasplineSonificationContexttracksMappingLowpass':
return self._config_sub_data('lowpass', OptionPlotoptionsAreasplineSonificationContexttracksMappingLowpass)
def noteDuration(self) -> 'OptionPlotoptionsAreasplineSonificationContexttracksMappingNoteduration':
return self._config_sub_data('noteDuration', OptionPlotoptionsAreasplineSonificationContexttracksMappingNoteduration)
def pan(self) -> 'OptionPlotoptionsAreasplineSonificationContexttracksMappingPan':
return self._config_sub_data('pan', OptionPlotoptionsAreasplineSonificationContexttracksMappingPan)
def pitch(self) -> 'OptionPlotoptionsAreasplineSonificationContexttracksMappingPitch':
return self._config_sub_data('pitch', OptionPlotoptionsAreasplineSonificationContexttracksMappingPitch)
def playDelay(self) -> 'OptionPlotoptionsAreasplineSonificationContexttracksMappingPlaydelay':
return self._config_sub_data('playDelay', OptionPlotoptionsAreasplineSonificationContexttracksMappingPlaydelay)
def rate(self) -> 'OptionPlotoptionsAreasplineSonificationContexttracksMappingRate':
return self._config_sub_data('rate', OptionPlotoptionsAreasplineSonificationContexttracksMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionPlotoptionsAreasplineSonificationContexttracksMappingTime':
return self._config_sub_data('time', OptionPlotoptionsAreasplineSonificationContexttracksMappingTime)
def tremolo(self) -> 'OptionPlotoptionsAreasplineSonificationContexttracksMappingTremolo':
return self._config_sub_data('tremolo', OptionPlotoptionsAreasplineSonificationContexttracksMappingTremolo)
def volume(self) -> 'OptionPlotoptionsAreasplineSonificationContexttracksMappingVolume':
return self._config_sub_data('volume', OptionPlotoptionsAreasplineSonificationContexttracksMappingVolume) |
class CirnoDropCards(GenericAction):
def __init__(self, source, target, cards):
self.source = source
self.target = target
self.cards = cards
def apply_action(self):
(src, tgt) = (self.source, self.target)
cards = self.cards
g = self.game
g.players.reveal(cards)
g.process_action(DropCards(src, tgt, cards))
return True |
class OptionSeriesAreasplineSonificationContexttracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def main():
parser = argparse.ArgumentParser(usage='%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]')
common.setup_global_opts(parser)
parser.add_argument('appid', nargs='*', help=_('applicationId with optional versionCode in the form APPID[:VERCODE]'))
metadata.add_metadata_arguments(parser)
options = parser.parse_args()
common.options = options
pkgs = common.read_pkg_args(options.appid, True)
allapps = metadata.read_metadata(pkgs)
apps = common.read_app_args(options.appid, allapps, True)
common.read_config(options)
srclib_dir = os.path.join('build', 'srclib')
os.makedirs(srclib_dir, exist_ok=True)
srclibpaths = []
for (appid, app) in apps.items():
(vcs, _ignored) = common.setup_vcs(app)
for build in app.get('Builds', []):
vcs.gotorevision(build.commit, refresh=False)
if build.submodules:
vcs.initsubmodules()
else:
vcs.deinitsubmodules()
for lib in build.srclibs:
srclibpaths.append(common.getsrclib(lib, srclib_dir, prepare=False, build=build))
print('Set up srclibs:')
pprint.pprint(srclibpaths) |
class TestIN(unittest.TestCase):
def test_get_start_of_day(self):
start_day = get_start_of_day(arrow.get('2020-12-16 18:30:00+00:00').datetime)
assert (start_day == arrow.get('2020-12-17 00:00:00+05:30').datetime)
start_day = get_start_of_day(arrow.get('2020-12-16 18:29:00+00:00').datetime)
assert (start_day == arrow.get('2020-12-16 00:00:00+05:30').datetime)
start_day = get_start_of_day(arrow.get('2020-12-17 00:00:00+00:00').datetime)
assert (start_day == arrow.get('2020-12-17 00:00:00+05:30').datetime) |
def apply_unifier(x, subst):
if (subst is None):
return None
elif (len(subst) == 0):
return x
elif isinstance(x, Const):
return x
elif isinstance(x, Var):
if (x.name in subst):
return apply_unifier(subst[x.name], subst)
else:
return x
elif isinstance(x, App):
newargs = [apply_unifier(arg, subst) for arg in x.args]
return App(x.fname, newargs)
else:
return None |
class op(bpy.types.Operator):
bl_idname = 'uv.textools_unwrap_edge_peel'
bl_label = 'Edge Peel'
bl_description = 'Unwrap as a pipe along the edges selected in 3D Space'
bl_options = {'REGISTER', 'UNDO'}
def poll(cls, context):
if (not bpy.context.active_object):
return False
if (bpy.context.active_object.type != 'MESH'):
return False
if (bpy.context.active_object.mode != 'EDIT'):
return False
if (tuple(bpy.context.scene.tool_settings.mesh_select_mode)[1] == False):
return False
return True
def execute(self, context):
selection_mode = bpy.context.scene.tool_settings.uv_select_mode
is_sync = bpy.context.scene.tool_settings.use_uv_select_sync
context_override = utilities_ui.GetContextViewUV()
if (not context_override):
self.report({'ERROR_INVALID_INPUT'}, 'This tool requires an available UV/Image view')
return {'CANCELLED'}
padding = utilities_ui.get_padding()
utilities_uv.multi_object_loop(unwrap_edges_pipe, self, context, padding)
bpy.ops.uv.average_islands_scale()
bpy.ops.uv.pack_islands(rotate=False, margin=padding)
(udim_tile, column, row) = utilities_uv.get_UDIM_tile_coords(bpy.context.active_object)
if (udim_tile != 1001):
if (settings.bversion >= 3.2):
with bpy.context.temp_override(**context_override):
bpy.ops.transform.translate(value=(column, row, 0), mirror=False, use_proportional_edit=False)
else:
bpy.ops.transform.translate(context_override, value=(column, row, 0), mirror=False, use_proportional_edit=False)
if (settings.bversion >= 3.2):
with bpy.context.temp_override(**context_override):
bpy.ops.uv.select_mode(type='VERTEX')
bpy.context.scene.tool_settings.uv_select_mode = selection_mode
if is_sync:
bpy.context.scene.tool_settings.use_uv_select_sync = True
return {'FINISHED'} |
def extractWwwDaisytannenbaumCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def handle_bad_request(view_function):
(view_function)
def wrapper(request, *args, **kwargs):
try:
return view_function(request, *args, **kwargs)
except BadRequestError as e:
context = {'error_code': 400, 'reason': str(e)}
return render(request, '500.html', context, status=400)
return wrapper |
class OptionSeriesSunburstSonificationContexttracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def _parse_from_to_timestamps(param_from: (datetime or None), param_to: (datetime or None)) -> Tuple[(int, int)]:
from_timestamp = None
to_timestamp = None
if (param_from is not None):
from_timestamp = int((param_from.timestamp() * 1000))
if (param_to is None):
param_to = datetime.utcnow()
if (param_to is not None):
if (param_from is None):
from_timestamp = 1
to_timestamp = int((param_to.timestamp() * 1000))
return (from_timestamp, to_timestamp) |
def patch_function(func, *additional_modules):
if (not additional_modules):
additional_modules = ((((_green_os_modules() + _green_select_modules()) + _green_socket_modules()) + _green_thread_modules()) + _green_time_modules())
def patched(*args, **kw):
saver = SysModulesSaver()
for (name, mod) in additional_modules:
saver.save(name)
sys.modules[name] = mod
try:
return func(*args, **kw)
finally:
saver.restore()
return patched |
class OptionSeriesScatterSonificationTracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionPlotoptionsSeriesSonificationTracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def run_task(task, year, month, **kwargs):
if TaskLog.objects.filter(year=year, month=month, task_name=task.name, status=TaskLog.SUCCESSFUL).exists():
return
task_log = TaskLog.objects.create(year=year, month=month, task_name=task.name)
try:
task.run(year, month, **kwargs)
task_log.mark_succeeded()
except BaseException:
import traceback
task_log.mark_failed(formatted_tb=traceback.format_exc())
msg = 'Importing data for {}_{} has failed when running {}.'.format(year, month, task.name)
notify_slack(msg, is_error=True)
raise |
class HeartRateMeasurementChrc(Characteristic):
HR_MSRMT_UUID = '00002a37-0000-1000-8000-00805f9b34fb'
def __init__(self, bus, index, service):
Characteristic.__init__(self, bus, index, self.HR_MSRMT_UUID, ['notify'], service)
self.notifying = False
self.hr_ee_count = 0
def hr_msrmt_cb(self):
value = []
value.append(dbus.Byte(6))
value.append(dbus.Byte(randint(90, 130)))
if ((self.hr_ee_count % 10) == 0):
value[0] = dbus.Byte((value[0] | 8))
value.append(dbus.Byte((self.service.energy_expended & 255)))
value.append(dbus.Byte(((self.service.energy_expended >> 8) & 255)))
self.service.energy_expended = min(65535, (self.service.energy_expended + 1))
self.hr_ee_count += 1
print(('Updating value: ' + repr(value)))
self.PropertiesChanged(GATT_CHRC_IFACE, {'Value': value}, [])
return self.notifying
def _update_hr_msrmt_simulation(self):
print('Update HR Measurement Simulation')
if (not self.notifying):
return
GObject.timeout_add(1000, self.hr_msrmt_cb)
def StartNotify(self):
if self.notifying:
print('Already notifying, nothing to do')
return
self.notifying = True
self._update_hr_msrmt_simulation()
def StopNotify(self):
if (not self.notifying):
print('Not notifying, nothing to do')
return
self.notifying = False
self._update_hr_msrmt_simulation() |
class Attachment():
def __init__(self, filename, content_type, content, disposition='attachment'):
self.filename = filename
self.content_type = content_type
self.content = content
self.disposition = disposition
def payload(self):
return {'Filename': self.filename, 'Content-type': self.content_type, 'content': self.content} |
class OptionSeriesOrganizationSonificationTracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class DWARFReader(object):
def __init__(self, filename):
self._elffile = ELFFile(filename)
self._dwarf = self._elffile.get_dwarf_info()
self._build_offset_lut()
def _build_offset_lut(self):
self.offset_lut = {}
self.function_lut = {}
self.typedef_lut = {}
for cu in self._dwarf.iter_CUs():
for die in cu.iter_DIEs():
if (die.offset in self.offset_lut):
raise ValueError('Collision on Symbols Offsets')
self.offset_lut[die.offset] = die
if (die.tag == 'DW_TAG_subprogram'):
if ('DW_AT_name' in die.attributes):
self.function_lut[die.attributes['DW_AT_name'].value] = die
elif (die.tag == 'DW_TAG_typedef'):
if ('DW_AT_name' in die.attributes):
self.typedef_lut[die.attributes['DW_AT_name'].value] = die
def get_referenced_die(self, attr_str, die):
try:
attr = die.attributes[attr_str]
offset = (attr.value + die.cu.cu_offset)
return self.offset_lut[offset]
except KeyError:
return None
def get_type_size(self, die):
if ('DW_AT_byte_size' in die.attributes):
return die.attributes['DW_AT_byte_size'].value
elif ('DW_AT_type' in die.attributes):
type_die = self.get_referenced_die('DW_AT_type', die)
return self.get_type_size(type_die)
else:
return (- 1)
def get_type_str(self, die, str_list=None):
if (str_list == None):
str_list = []
if ('DW_AT_type' in die.attributes):
type_die = self.get_referenced_die('DW_AT_type', die)
if (type_die.tag == 'DW_TAG_pointer_type'):
self.get_type_str(type_die, str_list)
str_list.append('*')
elif (type_die.tag == 'DW_TAG_const_type'):
str_list.append('const')
self.get_type_str(type_die, str_list)
elif (type_die.tag == 'DW_TAG_volatile_type'):
str_list.append('volatile')
self.get_type_str(type_die, str_list)
elif (type_die.tag == 'DW_TAG_union_type'):
members = []
for c in type_die.iter_children():
member_str = self.get_type_str(c, [])
members.append(member_str)
if (len(members) == 1):
str_list.append(('union {%s};' % member_str[0]))
else:
str_list.append(('union {%s};' % ','.join(member_str)))
elif (type_die.tag == 'DW_TAG_array_type'):
self.get_type_str(type_die, str_list)
str_list.append('[]')
elif (type_die.tag == 'DW_TAG_enumeration_type'):
str_list.append('enum')
self.get_type_str(type_die, str_list)
elif (type_die.tag == 'DW_TAG_subroutine_type'):
params = []
for c in type_die.iter_children():
param_str = self.get_type_str(c, [])
params.append(param_str)
if (len(params) == 1):
str_list.append(('(%s)' % params[0]))
else:
str_list.append(('(%s)' % ','.join(params)))
else:
type_name = type_die.attributes['DW_AT_name'].value
str_list.append(type_name)
else:
str_list.append('void')
return ' '.join(str_list)
def get_parameter_dies(self, funct_die):
params = []
for child in funct_die.iter_children():
if (child.tag == 'DW_TAG_formal_parameter'):
params.append(child)
return params
def get_ret_type_str(self, funct_die):
if ('DW_AT_type' in funct_die.attributes):
ret_type = self.get_type_str(funct_die)
else:
ret_type = 'void'
return ret_type
def get_function_parameters(self, funct_die):
if (funct_die.tag != 'DW_TAG_subprogram'):
raise TypeError('funct_die.tag not a of type DW_TAG_subprogram')
name = funct_die.attributes['DW_AT_name'].value
params = []
for child in funct_die.iter_children():
if (child.tag == 'DW_TAG_formal_parameter'):
param_type_str = self.get_type_str(child)
param_name = child.attributes['DW_AT_name'].value
params.append(((param_type_str + ' ') + param_name))
ret_type = self.get_ret_type_str(funct_die)
return ('%s %s(%s);' % (ret_type, name, ', '.join(params)))
def get_function_die(self, f_name):
return self.function_lut[f_name]
def get_return_type_die(self, func_die):
return
def get_function_prototype(self, f_name):
f_die = self.function_lut[f_name]
return self.get_function_parameters(f_die)
def get_param_name(self, param_die):
return param_die.attributes['DW_AT_name'].value
def get_typedef_desc_from_str(self, typedef_str):
tydef_die = self.typedef_lut[typedef_str]
return self.get_typedef_desc_from_die(tydef_die)
def get_enum_str(self, enum_die):
member_strs = []
for child in enum_die.iter_children():
if (child.tag == 'DW_TAG_enumerator'):
child_name = child.attributes['DW_AT_name'].value
value = child.attributes['DW_AT_const_value'].value
member_strs.append(('%s=%s;' % (child_name, hex(value))))
return ('enum {%s}; ' % ' '.join(member_strs))
def get_typedef_desc_from_die(self, tydef_die):
name = tydef_die.attributes['DW_AT_name'].value
type_die = self.get_referenced_die('DW_AT_type', tydef_die)
if ('DW_AT_byte_size' in type_die.attributes):
size = type_die.attributes['DW_AT_byte_size'].value
else:
size = None
member_strs = []
if (type_die.tag == 'DW_TAG_structure_type'):
for child in type_die.iter_children():
child_name = child.attributes['DW_AT_name'].value
child_type_str = self.get_type_str(child)
c_size = self.get_type_size(child)
decl_str = ('\t%s %s; Size: %i\n' % (child_type_str, child_name, c_size))
member_strs.append(decl_str)
ret_str = ('struct %s {\n%s};' % (name, ''.join(member_strs)))
elif (type_die.tag == 'DW_TAG_enumeration_type'):
ret_str = self.get_enum_str(type_die)
elif (type_die.tag == 'DW_TAG_pointer_type'):
size = self.get_type_size(type_die)
ret_str = ((self.get_type_str(type_die) + ' * ; Size: ') + str(size))
else:
size = self.get_type_size(type_die)
ret_str = ((self.get_type_str(type_die) + 'Size: ') + str(size))
return (ret_str, size) |
class CustomLogicCondition(ConditionInterface, Generic[LOGICCLASS]):
def __init__(self, condition: WorldObject, tmp: bool=False):
if isinstance(condition, Variable):
self._variable = condition
else:
self._variable: BaseVariable = condition.world.new_variable(condition.size, tmp)
self.context.define(self._variable, condition)
def generate_new_context(cls) -> World:
return World()
def _condition(self) -> WorldObject:
if (term := self.context.get_definition(self._variable)):
return term
return self._variable
def __len__(self) -> int:
if isinstance(self._condition, Variable):
return 1
count = 0
for node in self.context.iter_postorder(self._condition):
if (not isinstance(node, Operation)):
continue
count += sum((1 for op in node.operands if isinstance(op, Variable)))
return count
def __str__(self) -> str:
condition = self._condition
if (isinstance(condition, Constant) and (condition.size == 1)):
return ('false' if (condition.unsigned == 0) else 'true')
return str(condition)
def copy(self) -> LOGICCLASS:
return self.__class__(self._condition)
def initialize_symbol(cls, name: str, context: World) -> LOGICCLASS:
return cls(context.variable(name, 1))
def initialize_true(cls, context: World) -> LOGICCLASS:
return cls(context.constant(1, 1))
def initialize_false(cls, context: World) -> LOGICCLASS:
return cls(context.constant(0, 1))
def disjunction_of(cls, clauses: Sequence[LOGICCLASS]) -> LOGICCLASS:
world = clauses[0].context
return cls(world.bitwise_or(*(clause._condition for clause in clauses)))
def conjunction_of(cls, clauses: Sequence[LOGICCLASS]) -> LOGICCLASS:
world = clauses[0].context
return cls(world.bitwise_and(*(clause._condition for clause in clauses)))
def __and__(self, other: LOGICCLASS) -> LOGICCLASS:
return self.__class__(self.context.bitwise_and(self._condition, other._condition))
def __or__(self, other: LOGICCLASS) -> LOGICCLASS:
return self.__class__(self.context.bitwise_or(self._condition, other._condition))
def __invert__(self) -> LOGICCLASS:
return self.__class__(self._custom_negate(self._condition))
def _custom_negate(self, condition: WorldObject) -> WorldObject:
if isinstance(condition, BitwiseNegate):
return condition.operand
return self.context.bitwise_negate(condition)
def context(self) -> World:
return self._variable.world
def is_true(self) -> bool:
return (isinstance(self._condition, Constant) and (self._condition.unsigned != 0))
def is_false(self) -> bool:
return (isinstance(self._condition, Constant) and (self._condition.unsigned == 0))
def is_disjunction(self) -> bool:
return isinstance(self._condition, BitwiseOr)
def is_conjunction(self) -> bool:
return isinstance(self._condition, BitwiseAnd)
def is_negation(self) -> bool:
return isinstance(self._condition, BitwiseNegate)
def operands(self) -> List[LOGICCLASS]:
return self._get_operands()
def _get_operands(self, tmp: bool=False) -> List[LOGICCLASS]:
condition = self._condition
if isinstance(condition, BitVector):
return []
assert isinstance(condition, Operation), f'The condition must be an operation.'
return [self.__class__(operand, tmp) for operand in condition.operands]
def is_symbol(self) -> bool:
return self._is_symbol(self._condition)
def is_literal(self) -> bool:
return self._is_literal(self._condition)
def is_disjunction_of_literals(self) -> bool:
return self._is_disjunction_of_literals(self._condition)
def is_cnf_form(self) -> bool:
if (self.is_true or self.is_false or self.is_disjunction_of_literals):
return True
return (self.is_conjunction and all((self._is_disjunction_of_literals(clause) for clause in self._condition.operands)))
def is_equal_to(self, other: LOGICCLASS) -> bool:
return World.compare(self._condition, other._condition)
def does_imply(self, other: LOGICCLASS) -> bool:
tmp_condition = self.__class__(self.context.bitwise_or(self._custom_negate(self._condition), other._condition))
self.context.free_world_condition(tmp_condition._variable)
tmp_condition._variable.simplify()
does_imply_value = tmp_condition.is_true
self.context.cleanup([tmp_condition._variable])
return does_imply_value
def to_cnf(self) -> LOGICCLASS:
if self.is_cnf_form:
return self
self.context.free_world_condition(self._variable)
ToCnfVisitor(self._variable)
return self
def to_dnf(self) -> LOGICCLASS:
dnf_form = self.copy()
self.context.free_world_condition(dnf_form._variable)
ToDnfVisitor(dnf_form._variable)
return dnf_form
def simplify(self) -> LOGICCLASS:
if isinstance(self._variable, Variable):
self.context.free_world_condition(self._variable)
self._variable.simplify()
else:
new_var = self.context.variable(f'Simplify', 1)
self.context.define(new_var, self._condition)
self.context.free_world_condition(new_var)
new_var.simplify()
self._variable = self.context.new_variable(1, tmp=True)
self.context.substitute(new_var, self._variable)
return self
def get_symbols(self) -> Iterator[LOGICCLASS]:
for symbol in self._get_symbols(self._condition):
(yield self.__class__(symbol))
def get_symbols_as_string(self) -> Iterator[str]:
for symbol in self._get_symbols(self._condition):
(yield str(symbol))
def get_literals(self) -> Iterator[LOGICCLASS]:
for literal in self._get_literals(self._condition):
(yield self.__class__(literal))
def substitute_by_true(self, condition: LOGICCLASS) -> LOGICCLASS:
assert (self.context == condition.context), f'The condition must be contained in the same graph.'
if ((not self.is_true) and (self.is_equal_to(condition) or condition.does_imply(self))):
self._replace_condition_by_true()
return self
self.to_cnf()
if (self.is_true or self.is_false or self.is_negation or self.is_symbol):
return self
condition_operands: List[LOGICCLASS] = condition._get_operands()
operands: List[LOGICCLASS] = self._get_operands()
numb_of_arg_expr: int = (len(operands) if self.is_conjunction else 1)
numb_of_arg_cond: int = (len(condition_operands) if condition.is_conjunction else 1)
if (numb_of_arg_expr <= numb_of_arg_cond):
self.context.cleanup()
return self
subexpressions: List[LOGICCLASS] = ([condition] if (numb_of_arg_cond == 1) else condition_operands)
self._replace_subexpressions_by_true(subexpressions)
to_remove = [cond._variable for cond in (condition_operands + operands) if (cond._variable != cond._condition)]
self.context.cleanup(to_remove)
return self
def _replace_subexpressions_by_true(self, subexpressions: List[LOGICCLASS]):
for (sub_expr_1, sub_expr_2) in product(subexpressions, self.operands):
if sub_expr_1.is_equivalent_to(sub_expr_2):
relations = self.context.get_relation(self._condition, sub_expr_2._condition)
for relation in relations:
self.context.remove_operand(self._condition, relation.sink)
def _replace_condition_by_true(self) -> None:
if self.is_symbol:
self._variable: BaseVariable = self.context.new_variable(self._condition.size)
self.context.define(self._variable, self.context.constant(1, 1))
else:
self.context.replace(self._condition, self.context.constant(1, 1))
self.context.cleanup()
def remove_redundancy(self, condition_handler: ConditionHandler) -> LOGICCLASS:
if (self.is_literal or self.is_true or self.is_false):
return self
assert isinstance(self._condition, Operation), 'We only remove redundancy for operations'
real_condition = self._replace_symbols_by_real_conditions(condition_handler)
self.context.free_world_condition(real_condition._variable)
real_condition.simplify()
self.get_logic_condition(real_condition, condition_handler)
self.context.replace(self._condition, real_condition._condition)
self.context.cleanup()
return self
def get_logic_condition(cls, real_condition: PseudoCustomLogicCondition, condition_handler: ConditionHandler) -> Optional[CustomLogicCondition]:
context = real_condition.context
non_logic_operands = {node for node in context.iter_postorder(real_condition._variable) if (isinstance(node, Operation) and (not isinstance(node, (BitwiseOr, BitwiseAnd, BitwiseNegate))))}
replacement_dict = dict()
expression_of_variables: Dict[(Variable, pseudo.Expression)] = dict()
for symbol in condition_handler:
replacement_dict[condition_handler.get_z3_condition_of(symbol)._condition] = symbol._condition
for operand in [op for op in condition_handler.get_condition_of(symbol) if (not isinstance(op, pseudo.Constant))]:
expression_of_variables[context.variable(real_condition._variable_name_for(operand))] = operand
for operand in non_logic_operands:
negated_operand = operand.copy_tree().negate()
for (condition, symbol) in replacement_dict.items():
if World.compare(condition, operand):
context.replace(operand, symbol)
break
if World.compare(condition, negated_operand):
context.replace(operand, context.bitwise_negate(symbol))
break
else:
new_operands = list()
for op in operand.operands:
if (op in expression_of_variables):
new_operands.append(expression_of_variables[op])
else:
assert isinstance(op, Constant), f'The operand must be a Constant'
new_operands.append(pseudo.Constant(op.signed, pseudo.Integer(op.size, signed=True)))
condition_symbol = condition_handler.add_condition(Condition(real_condition.OPERAND_MAPPING[operand.SYMBOL], new_operands))
context.replace(operand, condition_symbol._condition)
return cls(real_condition._variable)
def _replace_symbols_by_real_conditions(self, condition_handler: ConditionHandler) -> PseudoCustomLogicCondition:
copied_condition = PseudoCustomLogicCondition(self._condition)
self.context.free_world_condition(copied_condition._variable)
condition_nodes = set(self.context.iter_postorder(copied_condition._variable))
for symbol in self.get_symbols():
self._replace_symbol(symbol, condition_handler, condition_nodes)
return copied_condition
def _replace_symbol(self, symbol: CustomLogicCondition, condition_handler: ConditionHandler, condition_nodes: Set[WorldObject]):
world_condition = condition_handler.get_z3_condition_of(symbol)._condition
world_symbol = symbol._condition
for parent in [parent for parent in self.context.parent_operation(world_symbol) if (parent in condition_nodes)]:
for relation in self.context.get_relation(parent, world_symbol):
index = relation.index
self.context.remove_operand(parent, relation.sink)
self.context.add_operand(parent, world_condition, index)
def serialize(self) -> str:
return self._condition.accept(SerializeVisitor())
def deserialize(cls, data: str, context: World) -> LOGICCLASS:
return CustomLogicCondition(context.from_string(data))
def rich_string_representation(self, condition_map: Dict[(LOGICCLASS, pseudo.Condition)]):
return self._rich_string_representation(self._condition, {symbol._condition: condition for (symbol, condition) in condition_map.items()})
def _is_symbol(self, condition: WorldObject) -> bool:
return (isinstance(condition, Variable) and (condition.size == 1) and (self.context.get_definition(condition) is None))
def _is_literal(self, condition: WorldObject) -> bool:
return (self._is_symbol(condition) or (isinstance(condition, BitwiseNegate) and self._is_symbol(condition.operand)))
def _is_disjunction_of_literals(self, condition: WorldObject) -> bool:
if self._is_literal(condition):
return True
return (isinstance(condition, BitwiseOr) and all((self._is_literal(operand) for operand in condition.operands)))
def _get_symbols(self, condition: WorldObject) -> Iterator[Variable]:
for node in self.context.iter_postorder(condition):
if self._is_symbol(node):
(yield node)
def _get_literals(self, condition: WorldObject) -> Iterator[WorldObject]:
if self._is_literal(condition):
(yield condition)
elif isinstance(condition, (BitwiseOr, BitwiseAnd, BitwiseNegate)):
for child in condition.operands:
(yield from self._get_literals(child))
else:
assert (isinstance(condition, Constant) and (condition.size == 1)), f'The condition {condition} does not consist of literals.'
def _rich_string_representation(self, condition: WorldObject, condition_map: Dict[(Variable, pseudo.Condition)]) -> str:
if self._is_symbol(condition):
if (condition in condition_map):
return str(condition_map[condition])
return f'{condition}'
if (isinstance(condition, Constant) and (condition.size == 1)):
return ('false' if (condition.unsigned == 0) else 'true')
if isinstance(condition, BitwiseNegate):
original_condition = condition.operand
if (original_condition in condition_map):
return str(condition_map[original_condition].negate())
return f'!{self._rich_string_representation(original_condition, condition_map)}'
if isinstance(condition, (BitwiseOr, BitwiseAnd)):
operands = condition.operands
symbol = ('|' if isinstance(condition, BitwiseOr) else '&')
if (len(operands) == 1):
return self._rich_string_representation(operands[0], condition_map)
return (('(' + f' {symbol} '.join([f'{self._rich_string_representation(operand, condition_map)}' for operand in operands])) + ')')
return f'{condition}'
def _variable_name_for(expression: pseudo.Expression) -> str:
if isinstance(expression, pseudo.Variable):
return f'{expression},{expression.ssa_name}'
return f'{expression},{[str(var.ssa_name) for var in expression.requirements]}'
OPERAND_MAPPING = {'==': pseudo.OperationType.equal, '!=': pseudo.OperationType.not_equal, 's<=': pseudo.OperationType.less_or_equal, 'u<=': pseudo.OperationType.less_or_equal_us, 's>': pseudo.OperationType.greater, 'u>': pseudo.OperationType.greater_us, 's<': pseudo.OperationType.less, 'u<': pseudo.OperationType.less_us, 's>=': pseudo.OperationType.greater_or_equal, 'u>=': pseudo.OperationType.greater_or_equal_us} |
class TestDistanceSpecificCases(util.ColorAsserts, unittest.TestCase):
def test_delta_e_alternate_calls(self):
self.assertCompare(Color('red').delta_e('blue', method='2000'), Color('red').delta_e('blue', method='2000'))
def test_delta_e_alternate_calls_with_params(self):
self.assertCompare(Color('red').delta_e('blue', method='hyab', space='din99o'), Color('red').delta_e('blue', method='hyab', space='din99o'))
def test_hyab_bad_space(self):
with self.assertRaises(ValueError):
Color('red').delta_e('orange', method='hyab', space='lch')
def test_76_bad_space(self):
with self.assertRaises(ValueError):
Color('red').delta_e('orange', method='76', space='oklab')
def test_94_bad_space(self):
with self.assertRaises(ValueError):
Color('red').delta_e('orange', method='94', space='oklab')
def test_2000_bad_space(self):
with self.assertRaises(ValueError):
Color('red').delta_e('orange', method='2000', space='oklab')
def test_cmc_bad_space(self):
with self.assertRaises(ValueError):
Color('red').delta_e('orange', method='cmc', space='oklab') |
def antivirus_quarantine(data, fos):
vdom = data['vdom']
antivirus_quarantine_data = data['antivirus_quarantine']
antivirus_quarantine_data = flatten_multilists_attributes(antivirus_quarantine_data)
filtered_data = underscore_to_hyphen(filter_antivirus_quarantine_data(antivirus_quarantine_data))
return fos.set('antivirus', 'quarantine', data=filtered_data, vdom=vdom) |
def rldecode(data):
decoded = b''
i = 0
while (i < len(data)):
length = data[i]
if (length == 128):
break
if ((length >= 0) and (length < 128)):
run = data[(i + 1):((i + 1) + (length + 1))]
decoded += run
i = ((i + 1) + (length + 1))
if (length > 128):
run = (data[(i + 1):(i + 2)] * (257 - length))
decoded += run
i = ((i + 1) + 1)
return decoded |
def process_youtube(args):
print('')
print(f'# Process Youtube, dedup: {args.dedup}')
print('')
op = OperatorYoutube()
data = op.readFromJson(args.data_folder, args.run_id, 'youtube.json')
data_deduped = data
need_dedup = utils.str2bool(args.dedup)
if need_dedup:
data_deduped = op.dedup(data, target='toread')
else:
data_deduped = [x for x in data.values()]
data_summarized = op.summarize(data_deduped)
data_ranked = op.rank(data_summarized)
targets = args.targets.split(',')
pushed_stats = op.push(data_ranked, targets)
return op.createStats('YouTube', '', data, data_deduped=data_deduped, data_summarized=data_summarized, data_ranked=data_ranked, pushed_stats=pushed_stats) |
def test_dpa_initialize_raises_exception_f_max_data_value_is_not_in_0_1():
d = scared.DPADistinguisher()
data = np.random.randint(0, 255, (500, 16), dtype='uint8')
traces = np.random.randint(0, 255, (500, 16), dtype='uint8')
with pytest.raises(ValueError):
d._initialize(traces, data) |
class MockChrootsLogic(object):
def get(cls, os_release, os_version, arch, active_only=False, noarch=False):
if (noarch and (not arch)):
query = models.MockChroot.query.filter((models.MockChroot.os_release == os_release), (models.MockChroot.os_version == os_version))
else:
query = models.MockChroot.query.filter((models.MockChroot.os_release == os_release), (models.MockChroot.os_version == os_version), (models.MockChroot.arch == arch))
if active_only:
query = query.filter(models.MockChroot.is_active)
return query
def get_from_name(cls, chroot_name, active_only=False, noarch=False):
name_tuple = cls.tuple_from_name(chroot_name, noarch=noarch)
return cls.get(name_tuple[0], name_tuple[1], name_tuple[2], active_only=active_only, noarch=noarch)
def get_multiple(cls, active_only=False):
query = models.MockChroot.query
if active_only:
query = query.filter((models.MockChroot.is_active == True))
return query
def add(cls, name):
name_tuple = cls.tuple_from_name(name)
if cls.get(*name_tuple).first():
raise exceptions.DuplicateException('Mock chroot with this name already exists.')
new_chroot = models.MockChroot(os_release=name_tuple[0], os_version=name_tuple[1], arch=name_tuple[2])
cls.new(new_chroot)
return new_chroot
def active_names(cls):
return [ch.name for ch in cls.get_multiple(active_only=True).all()]
def active_names_with_comments(cls):
return [(ch.name, ch.comment) for ch in cls.get_multiple(active_only=True).all()]
def new(cls, mock_chroot):
db.session.add(mock_chroot)
def edit_by_name(cls, name, is_active):
name_tuple = cls.tuple_from_name(name)
mock_chroot = cls.get(*name_tuple).first()
if (not mock_chroot):
raise exceptions.NotFoundException("Mock chroot with this name doesn't exist.")
mock_chroot.is_active = is_active
if is_active:
mock_chroot.final_prunerepo_done = False
cls.update(mock_chroot)
return mock_chroot
def update(cls, mock_chroot):
db.session.add(mock_chroot)
def delete_by_name(cls, name):
name_tuple = cls.tuple_from_name(name)
mock_chroot = cls.get(*name_tuple).first()
if (not mock_chroot):
raise exceptions.NotFoundException("Mock chroot with this name doesn't exist.")
cls.delete(mock_chroot)
def delete(cls, mock_chroot):
db.session.delete(mock_chroot)
def tuple_from_name(cls, name, noarch=False):
split_name = (name.rsplit('-', 1) if noarch else name.rsplit('-', 2))
valid = False
if (noarch and (len(split_name) in [2, 3])):
valid = True
if ((not noarch) and (len(split_name) == 3)):
valid = True
if (not valid):
raise MalformedArgumentException('Chroot identification is not valid')
if (noarch and (len(split_name) == 2)):
split_name.append(None)
return tuple(split_name)
def prunerepo_finished(cls, chroots_pruned):
for chroot_name in chroots_pruned:
chroot = cls.get_from_name(chroot_name).one()
if (not chroot.is_active):
chroot.final_prunerepo_done = True
db.session.commit()
return True
def chroots_prunerepo_status(cls):
query = models.MockChroot.query
chroots = {}
for chroot in query:
chroots[chroot.name] = {'active': bool(chroot.is_active), 'final_prunerepo_done': bool(chroot.final_prunerepo_done)}
return chroots |
def USER_MESSAGE(goal, current_dir):
return f'''(USER: {USERNAME})
(DIRECTORY: {current_dir})
Write {OPERATING_SYSTEM} python {PYTHON_VERSION} code so I can achieve my goal by running my code. Do not explain anything. Return only the code. My goal: [{goal}]. Don't forget to print the final result. ''' |
class IsUserTask(DcBasePermission):
def has_permission(self, request, view, args, kwargs):
task_id = kwargs.get('task_id', None)
if ((not task_id) or (not is_valid_task_id(task_id))):
return False
(user_id, owner_id, dc_id) = user_owner_dc_ids_from_task_id(task_id)
request_user_id = str(request.user.id)
if ((not user_id) or (not owner_id)):
return False
try:
dc_id = int(dc_id)
except ValueError:
return False
try:
dc = Dc.objects.get_by_id(dc_id)
except Dc.DoesNotExist:
return False
if request.user.is_admin(request, dc=dc):
return True
return ((user_id == request_user_id) or (owner_id == request_user_id)) |
def extractButterflyscurseStream(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('QTF Record', 'Quick Transmigration Cannon Fodders Record of Counterattacks', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class CargoBuilder(BuilderBase):
def __init__(self, build_opts: 'BuildOptions', ctx, manifest, src_dir, build_dir, inst_dir, build_doc, workspace_dir, manifests_to_build, loader, cargo_config_file) -> None:
super(CargoBuilder, self).__init__(build_opts, ctx, manifest, src_dir, build_dir, inst_dir)
self.build_doc = build_doc
self.ws_dir = workspace_dir
self.manifests_to_build = (manifests_to_build and manifests_to_build.split(','))
self.loader = loader
self.cargo_config_file_subdir = cargo_config_file
def run_cargo(self, install_dirs, operation, args=None) -> None:
args = (args or [])
env = self._compute_env(install_dirs)
env['RUSTC_BOOTSTRAP'] = '1'
env['LIBZ_SYS_STATIC'] = '1'
cmd = (['cargo', operation, '--workspace', ('-j%s' % self.num_jobs)] + args)
self._run_cmd(cmd, cwd=self.workspace_dir(), env=env)
def build_source_dir(self):
return os.path.join(self.build_dir, 'source')
def workspace_dir(self):
return os.path.join(self.build_source_dir(), (self.ws_dir or ''))
def manifest_dir(self, manifest):
return os.path.join(self.build_source_dir(), manifest)
def recreate_dir(self, src, dst) -> None:
if os.path.isdir(dst):
shutil.rmtree(dst)
shutil.copytree(src, dst)
def cargo_config_file(self):
build_source_dir = self.build_dir
if self.cargo_config_file_subdir:
return os.path.join(build_source_dir, self.cargo_config_file_subdir)
else:
return os.path.join(build_source_dir, '.cargo', 'config')
def _create_cargo_config(self):
cargo_config_file = self.cargo_config_file()
cargo_config_dir = os.path.dirname(cargo_config_file)
if (not os.path.isdir(cargo_config_dir)):
os.mkdir(cargo_config_dir)
print(f'Writing cargo config for {self.manifest.name} to {cargo_config_file}')
with open(cargo_config_file, 'w+') as f:
f.write("# Generated by getdeps.py\n[build]\ntarget-dir = '''{}'''\n\n[net]\ngit-fetch-with-cli = true\n\n[profile.dev]\ndebug = false\nincremental = false\n".format(self.build_dir.replace('\\', '\\\\')))
dep_to_git = self._resolve_dep_to_git()
for (_dep, git_conf) in dep_to_git.items():
if ('cargo_vendored_sources' in git_conf):
with open(cargo_config_file, 'a') as f:
vendored_dir = git_conf['cargo_vendored_sources'].replace('\\', '\\\\')
f.write(f'''
[source."{git_conf['repo_url']}"]
directory = "{vendored_dir}"
''')
if self.build_opts.fbsource_dir:
try:
from .facebook.rust import vendored_crates
vendored_crates(self.build_opts.fbsource_dir, cargo_config_file)
except ImportError:
pass
return dep_to_git
def _prepare(self, install_dirs, reconfigure) -> None:
build_source_dir = self.build_source_dir()
self.recreate_dir(self.src_dir, build_source_dir)
dep_to_git = self._create_cargo_config()
if (self.ws_dir is not None):
self._patchup_workspace(dep_to_git)
def _build(self, install_dirs, reconfigure) -> None:
build_source_dir = self.build_source_dir()
if (self.manifests_to_build is None):
self.run_cargo(install_dirs, 'build', ['--out-dir', os.path.join(self.inst_dir, 'bin'), '-Zunstable-options'])
else:
for manifest in self.manifests_to_build:
self.run_cargo(install_dirs, 'build', ['--out-dir', os.path.join(self.inst_dir, 'bin'), '-Zunstable-options', '--manifest-path', self.manifest_dir(manifest)])
self.recreate_dir(build_source_dir, os.path.join(self.inst_dir, 'source'))
def run_tests(self, install_dirs, schedule_type, owner, test_filter, retry, no_testpilot) -> None:
if test_filter:
args = ['--', test_filter]
else:
args = []
if (self.manifests_to_build is None):
self.run_cargo(install_dirs, 'test', args)
if self.build_doc:
self.run_cargo(install_dirs, 'doc', ['--no-deps'])
else:
for manifest in self.manifests_to_build:
margs = ['--manifest-path', self.manifest_dir(manifest)]
self.run_cargo(install_dirs, 'test', (args + margs))
if self.build_doc:
self.run_cargo(install_dirs, 'doc', (['--no-deps'] + margs))
def _patchup_workspace(self, dep_to_git) -> None:
workspace_dir = self.workspace_dir()
config = self._resolve_config(dep_to_git)
if config:
patch_cargo = os.path.join(workspace_dir, 'Cargo.toml')
print(f'writing patch to {patch_cargo}')
with open(patch_cargo, 'r+') as f:
manifest_content = f.read()
if ('[package]' not in manifest_content):
null_file = '/dev/null'
if self.build_opts.is_windows():
null_file = 'nul'
f.write(f'''
[package]
name = "fake_manifest_of_{self.manifest.name}"
version = "0.0.0"
[lib]
path = "{null_file}"
''')
else:
f.write('\n')
f.write(config)
def _resolve_config(self, dep_to_git) -> str:
dep_to_crates = self._resolve_dep_to_crates(self.build_source_dir(), dep_to_git)
config = []
git_url_to_crates_and_paths = {}
for dep_name in sorted(dep_to_git.keys()):
git_conf = dep_to_git[dep_name]
req_crates = sorted(dep_to_crates.get(dep_name, []))
if (not req_crates):
continue
git_url = git_conf.get('repo_url', None)
crate_source_map = git_conf['crate_source_map']
if (git_url and crate_source_map):
crates_to_patch_path = git_url_to_crates_and_paths.get(git_url, {})
for c in req_crates:
if ((c in crate_source_map) and (c not in crates_to_patch_path)):
crates_to_patch_path[c] = crate_source_map[c]
print(f'{self.manifest.name}: Patching crate {c} via virtual manifest in {self.workspace_dir()}')
if crates_to_patch_path:
git_url_to_crates_and_paths[git_url] = crates_to_patch_path
for (git_url, crates_to_patch_path) in git_url_to_crates_and_paths.items():
crates_patches = ['{} = {{ path = "{}" }}'.format(crate, crates_to_patch_path[crate].replace('\\', '\\\\')) for crate in sorted(crates_to_patch_path.keys())]
config.append((f'''
[patch."{git_url}"]
''' + '\n'.join(crates_patches)))
return '\n'.join(config)
def _resolve_dep_to_git(self):
dependencies = self.manifest.get_dependencies(self.ctx)
if (not dependencies):
return []
dep_to_git = {}
for dep in dependencies:
dep_manifest = self.loader.load_manifest(dep)
dep_builder = dep_manifest.get('build', 'builder', ctx=self.ctx)
dep_cargo_conf = dep_manifest.get_section_as_dict('cargo', self.ctx)
dep_crate_map = dep_manifest.get_section_as_dict('crate.pathmap', self.ctx)
if (((not (dep_crate_map or dep_cargo_conf)) and (dep_builder not in ['cargo'])) or (dep == 'rust')):
continue
git_conf = dep_manifest.get_section_as_dict('git', self.ctx)
if ((dep != 'rust') and ('repo_url' not in git_conf)):
raise Exception(f'{dep}: A cargo dependency requires git.repo_url to be defined.')
if (dep_builder == 'cargo'):
dep_source_dir = self.loader.get_project_install_dir(dep_manifest)
dep_source_dir = os.path.join(dep_source_dir, 'source')
else:
fetcher = self.loader.create_fetcher(dep_manifest)
dep_source_dir = fetcher.get_src_dir()
crate_source_map = {}
if dep_crate_map:
for (crate, subpath) in dep_crate_map.items():
if (crate not in crate_source_map):
if self.build_opts.is_windows():
subpath = subpath.replace('/', '\\')
crate_path = os.path.join(dep_source_dir, subpath)
print(f'{self.manifest.name}: Mapped crate {crate} to dep {dep} dir {crate_path}')
crate_source_map[crate] = crate_path
elif dep_cargo_conf:
search_pattern = re.compile('\\[package\\]\nname = "(.*)"')
for (crate_root, _, files) in os.walk(dep_source_dir):
if ('Cargo.toml' in files):
with open(os.path.join(crate_root, 'Cargo.toml'), 'r') as f:
content = f.read()
match = search_pattern.search(content)
if match:
crate = match.group(1)
if crate:
print(f'{self.manifest.name}: Discovered crate {crate} in dep {dep} dir {crate_root}')
crate_source_map[crate] = crate_root
git_conf['crate_source_map'] = crate_source_map
if ((not dep_crate_map) and dep_cargo_conf):
dep_cargo_dir = self.loader.get_project_build_dir(dep_manifest)
dep_cargo_dir = os.path.join(dep_cargo_dir, 'source')
dep_ws_dir = dep_cargo_conf.get('workspace_dir', None)
if dep_ws_dir:
dep_cargo_dir = os.path.join(dep_cargo_dir, dep_ws_dir)
git_conf['cargo_vendored_sources'] = dep_cargo_dir
dep_to_git[dep] = git_conf
return dep_to_git
def _resolve_dep_to_crates(self, build_source_dir, dep_to_git):
if (not dep_to_git):
return {}
dep_to_crates = {}
for (name, git_conf) in dep_to_git.items():
crates = git_conf['crate_source_map'].keys()
if crates:
dep_to_crates.setdefault(name, set()).update(crates)
for (root, _, files) in os.walk(build_source_dir):
for f in files:
if (f == 'Cargo.toml'):
more_dep_to_crates = CargoBuilder._extract_crates_used(os.path.join(root, f), dep_to_git)
for (dep_name, crates) in more_dep_to_crates.items():
existing_crates = dep_to_crates.get(dep_name, set())
for c in crates:
if (c not in existing_crates):
print(f'Patch {self.manifest.name} uses {dep_name} crate {crates}')
existing_crates.insert(c)
dep_to_crates.setdefault(name, set()).update(existing_crates)
return dep_to_crates
def _extract_crates_used(cargo_toml_file, dep_to_git):
deps_to_crates = {}
with open(cargo_toml_file, 'r') as f:
for line in f.readlines():
if (line.startswith('#') or ('git = ' not in line)):
continue
for (dep_name, conf) in dep_to_git.items():
if ('git = "{}"'.format(conf['repo_url']) in line):
pkg_template = ' package = "'
if (pkg_template in line):
(crate_name, _, _) = line.partition(pkg_template)[2].partition('"')
else:
(crate_name, _, _) = line.partition('=')
deps_to_crates.setdefault(dep_name, set()).add(crate_name.strip())
return deps_to_crates
def _resolve_crate_to_path(self, crate, crate_source_map):
search_pattern = '[package]\nname = "{}"'.format(crate)
for (_crate, crate_source_dir) in crate_source_map.items():
for (crate_root, _, files) in os.walk(crate_source_dir):
if ('Cargo.toml' in files):
with open(os.path.join(crate_root, 'Cargo.toml'), 'r') as f:
content = f.read()
if (search_pattern in content):
return crate_root
raise Exception(f'{self.manifest.name}: Failed to find dep crate {crate} in paths {crate_source_map}') |
class TestTachoMotorStopActionValue(ptc.ParameterizedTestCase):
def test_stop_action_illegal(self):
with self.assertRaises(IOError):
self._param['motor'].stop_action = 'ThisShouldNotWork'
def test_stop_action_coast(self):
if ('coast' in self._param['stop_actions']):
self._param['motor'].stop_action = 'coast'
self.assertEqual(self._param['motor'].stop_action, 'coast')
else:
with self.assertRaises(IOError):
self._param['motor'].stop_action = 'coast'
def test_stop_action_brake(self):
if ('brake' in self._param['stop_actions']):
self._param['motor'].stop_action = 'brake'
self.assertEqual(self._param['motor'].stop_action, 'brake')
else:
with self.assertRaises(IOError):
self._param['motor'].stop_action = 'brake'
def test_stop_action_hold(self):
if ('hold' in self._param['stop_actions']):
self._param['motor'].stop_action = 'hold'
self.assertEqual(self._param['motor'].stop_action, 'hold')
else:
with self.assertRaises(IOError):
self._param['motor'].stop_action = 'hold'
def test_stop_action_after_reset(self):
action = 1
if (len(self._param['stop_actions']) < 2):
action = 0
self._param['motor'].stop_action = self._param['stop_actions'][action]
self._param['motor'].action = 'reset'
self.assertEqual(self._param['motor'].stop_action, self._param['stop_actions'][0]) |
def test_register_serialization():
msg = TacMessage(performative=TacMessage.Performative.REGISTER, agent_name='some_agent_name')
msg.to = 'receiver'
envelope = Envelope(to=msg.to, sender='sender', message=msg)
envelope_bytes = envelope.encode()
actual_envelope = Envelope.decode(envelope_bytes)
expected_envelope = envelope
assert (expected_envelope.to == actual_envelope.to)
assert (expected_envelope.sender == actual_envelope.sender)
assert (expected_envelope.protocol_specification_id == actual_envelope.protocol_specification_id)
assert (expected_envelope.message != actual_envelope.message)
actual_msg = TacMessage.serializer.decode(actual_envelope.message)
actual_msg.to = actual_envelope.to
actual_msg.sender = actual_envelope.sender
expected_msg = msg
assert (expected_msg == actual_msg) |
def _preprocess(x):
x['rnd_invcap'] = (x['rnd'] / x['invcap'])
x['capex_invcap'] = (x['capex'] / x['invcap'])
x['ebit_invcap'] = (x['ebit'] / x['invcap'])
x['ev_ebitda'] = (x['ev'] / x['ebitda'])
x['ev_ebit'] = (x['ev'] / x['ebit'])
x['debt_equity'] = (x['debt'] / x['equity'])
x['grossmargin_ebitdamargin'] = (x['grossmargin'] / x['ebitdamargin'])
x['debt_ebit'] = (x['debt'] / x['ebitda'])
return x |
class OptCheck(Options):
def icon_check(self):
return self._config_get('fas fa-check')
_check.setter
def icon_check(self, icon: str):
self._config(icon)
def icon_not_check(self):
return self._config_get('fas fa-times')
_not_check.setter
def icon_not_check(self, icon: str):
self._config(icon)
def disable(self):
return self._config_get(False)
def disable(self, flag: bool):
self._config(flag)
def green(self):
return self._config_get(self.page.theme.success[1])
def green(self, values):
self._config(values)
def red(self):
return self._config_get(self.page.theme.danger.base)
def red(self, values):
self._config(values) |
class AsyncioBrokenComponent(AsyncioComponentForTest):
name = 'component-test-asyncio-broken'
endpoint_name = 'component-test-asyncio-broken'
async def _loop_monitoring_task(self, event_bus: EndpointAPI) -> None:
(await asyncio.Future())
async def do_run(self, event_bus: EndpointAPI) -> None:
async with background_asyncio_service(IdleService(event_bus)):
raise ComponentException('This is a component that crashes after starting a service') |
class OptionPlotoptionsTimelineStatesSelectMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
_app.on('disconnect')
def test_disconnect():
global client_connected
client_connected = False
push_object.trigger_action(push2_python.constants.ACTION_MIDI_DISCONNECTED)
push_object.trigger_action(push2_python.constants.ACTION_DISPLAY_DISCONNECTED)
logging.info('Simulator client disconnected') |
class OptionPlotoptionsPieSonificationTracksMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_dependency_resolution_8():
a0 = Thing('a0.js', [])
a1 = Thing('a1.js', ['b1.js', 'b2.js'])
b1 = Thing('b1.js', ['c1.js'])
b2 = Thing('b2.js', ['d1.js'])
c1 = Thing('c1.js', ['d1.js'])
d1 = Thing('d1.js', [])
aa = (a0, a1, b1, b2, c1, d1)
aa = solve_dependencies(aa)
assert ([a.name for a in aa] == ['a0.js', 'd1.js', 'c1.js', 'b1.js', 'b2.js', 'a1.js'])
aa = (a1, a0, b1, b2, c1, d1)
aa = solve_dependencies(aa)
assert ([a.name for a in aa] == ['d1.js', 'c1.js', 'b1.js', 'b2.js', 'a1.js', 'a0.js'])
aa = (b1, a0, a1, b2, c1, d1)
aa = solve_dependencies(aa)
assert ([a.name for a in aa] == ['d1.js', 'c1.js', 'b1.js', 'a0.js', 'b2.js', 'a1.js']) |
def _create(data: CollectionIn, user: UserType):
with transaction.atomic():
if (data.item.etag is not None):
raise ValidationError('bad_etag', 'etag is not null')
instance = models.Collection(uid=data.item.uid, owner=user)
try:
instance.validate_unique()
except django_exceptions.ValidationError:
raise ValidationError('unique_uid', 'Collection with this uid already exists', status_code=status.HTTP_409_CONFLICT)
instance.save()
main_item = models.CollectionItem.objects.create(uid=data.item.uid, version=data.item.version, collection=instance)
instance.main_item = main_item
instance.save()
process_revisions_for_item(main_item, data.item.content)
(collection_type_obj, _) = models.CollectionType.objects.get_or_create(uid=data.collectionType, owner=user)
models.CollectionMember(collection=instance, stoken=models.Stoken.objects.create(), user=user, accessLevel=models.AccessLevels.ADMIN, encryptionKey=data.collectionKey, collectionType=collection_type_obj).save() |
def test_explicit_stack(logger):
logger.info('This is a test of stacks', extra={'stack': iter_stack_frames()})
assert (len(logger.client.events) == 1)
event = logger.client.events[ERROR][0]
assert ('culprit' in event), event
assert (event['culprit'] == 'tests.handlers.logging.logging_tests.test_explicit_stack')
assert ('message' in event['log']), event
assert (event['log']['message'] == 'This is a test of stacks')
assert ('exception' not in event)
assert ('param_message' in event['log'])
assert (event['log']['param_message'] == 'This is a test of stacks')
assert ('stacktrace' in event['log']) |
def test_raises_warning_if_no_feature_selected(load_diabetes_dataset):
(X, y) = load_diabetes_dataset
sel = SelectBySingleFeaturePerformance(estimator=DecisionTreeRegressor(random_state=0), scoring='neg_mean_squared_error', cv=2, threshold=10)
with pytest.warns(UserWarning):
warnings.warn(sel.fit(X, y), UserWarning) |
def serialize_basemodel_to_literal(basemodel: pydantic.BaseModel, flyteobject_store: BaseModelFlyteObjectStore) -> literals.Literal:
def encoder(obj: Any) -> Union[(str, commons.LiteralObjID)]:
if isinstance(obj, commons.PYDANTIC_SUPPORTED_FLYTE_TYPES):
return flyteobject_store.register_python_object(obj)
return basemodel.__json_encoder__(obj)
basemodel_json = basemodel.json(encoder=encoder)
return make_literal_from_json(basemodel_json) |
class Wrapper():
def __init__(self, estimator, classifier='inline', dtype='float'):
self.dtype = dtype
precision = estimator.get_precision()
self._means = estimator.location_.copy()
self._precision = precision
self._offset = estimator.offset_
if (classifier == 'inline'):
name = 'my_inline_elliptic'
func = '{}_predict(values, length)'.format(name)
code = self.save(name=name)
self.classifier_ = common.CompiledClassifier(code, name=name, call=func, out_dtype='float')
else:
raise ValueError("Unsupported classifier method '{}'".format(classifier))
def mahalanobis(self, X):
def dist(x):
return squared_mahalanobis_distance(x, self._means, precision=self._precision)
p = numpy.array([dist(x) for x in X])
predictions = numpy.array(self.classifier_.predict(X))
return predictions
def score_samples(self, X):
s = (- self.mahalanobis(X))
return s
def predict(self, X):
def predict_one(d):
dist = (- d)
dd = (dist - self._offset)
is_inlier = (1 if (dd > 0) else (- 1))
return is_inlier
distances = self.mahalanobis(X)
return numpy.array([predict_one(d) for d in distances])
def save(self, name=None, file=None):
if (name is None):
if (file is None):
raise ValueError('Either name or file must be provided')
else:
name = os.path.splitext(os.path.basename(file))[0]
code = generate_code(self._means, self._precision, self._offset, name=name)
if file:
with open(file, 'w') as f:
f.write(code)
return code |
class TestRuleExecutor(UnitTestWithNamespace):
def _build_workflow_execution(self):
with Workflow(name='workflow', namespace=self.namespace_name) as workflow:
op_1 = Operator(name='op_1')
op_2 = Operator(name='op_2')
op_3 = Operator(name='op_3')
op_4 = Operator(name='op_4')
op_5 = Operator(name='op_5')
op_1.action_on_condition(action=TaskAction.START, condition=SimpleCondition(expect_event_keys=['event_1'], flag=True))
op_2.action_on_condition(action=TaskAction.START, condition=SimpleCondition(expect_event_keys=['event_2'], flag=False))
op_3.action_on_condition(action=TaskAction.STOP, condition=SimpleCondition(expect_event_keys=['event_3'], flag=True))
op_4.action_on_condition(action=TaskAction.RESTART, condition=SimpleCondition(expect_event_keys=['event_4'], flag=True))
op_5.action_on_condition(action=TaskAction.RESTART, condition=StateCondition(expect_event_keys=['event_5']))
workflow_meta = self.metadata_manager.add_workflow(namespace=self.namespace_name, name=workflow.name, content='', workflow_object=cloudpickle.dumps(workflow))
self.metadata_manager.flush()
snapshot_meta = self.metadata_manager.add_workflow_snapshot(workflow_id=workflow_meta.id, workflow_object=workflow_meta.workflow_object, uri='url', signature='')
self.metadata_manager.flush()
workflow_execution_meta = self.metadata_manager.add_workflow_execution(workflow_id=workflow_meta.id, run_type=ExecutionType.MANUAL, snapshot_id=snapshot_meta.id)
self.metadata_manager.flush()
self.metadata_manager.update_workflow_execution(workflow_execution_id=workflow_execution_meta.id, status=WorkflowStatus.RUNNING.value)
self.metadata_manager.flush()
task_execution_meta = self.metadata_manager.add_task_execution(workflow_execution_id=workflow_execution_meta.id, task_name='op_1')
self.metadata_manager.flush()
self.metadata_manager.update_task_execution(task_execution_id=task_execution_meta.id, status=TaskStatus.SUCCESS)
self.metadata_manager.flush()
self.metadata_manager.add_task_execution(workflow_execution_id=workflow_execution_meta.id, task_name='op_3')
self.metadata_manager.flush()
self.metadata_manager.add_task_execution(workflow_execution_id=workflow_execution_meta.id, task_name='op_4')
self.metadata_manager.flush()
def test_execute_workflow_execution_rule(self):
self._build_workflow_execution()
event = Event(key='event_1', value='')
event.namespace = self.namespace_name
result = self.exec_event_on_workflow_execution(event)
self.assertEqual(1, len(result.task_schedule_commands))
self.assertEqual('op_1', result.task_schedule_commands[0].new_task_execution.task_name)
self.assertEqual(2, result.task_schedule_commands[0].new_task_execution.seq_num)
self.assertEqual(None, result.task_schedule_commands[0].current_task_execution)
self.assertEqual(TaskAction.START, result.task_schedule_commands[0].action)
event = Event(key='event_2', value='')
event.namespace = self.namespace_name
result = self.exec_event_on_workflow_execution(event)
self.assertIsNone(result)
event = Event(key='event_3', value='')
event.namespace = self.namespace_name
result = self.exec_event_on_workflow_execution(event)
self.assertEqual(1, len(result.task_schedule_commands))
self.assertEqual('op_3', result.task_schedule_commands[0].current_task_execution.task_name)
self.assertEqual(1, result.task_schedule_commands[0].current_task_execution.seq_num)
self.assertEqual(None, result.task_schedule_commands[0].new_task_execution)
self.assertEqual(TaskAction.STOP, result.task_schedule_commands[0].action)
event = Event(key='event_4', value='')
event.namespace = self.namespace_name
result = self.exec_event_on_workflow_execution(event)
self.assertEqual(1, len(result.task_schedule_commands))
self.assertEqual('op_4', result.task_schedule_commands[0].current_task_execution.task_name)
self.assertEqual(1, result.task_schedule_commands[0].current_task_execution.seq_num)
self.assertEqual('op_4', result.task_schedule_commands[0].new_task_execution.task_name)
self.assertEqual(2, result.task_schedule_commands[0].new_task_execution.seq_num)
self.assertEqual(TaskAction.RESTART, result.task_schedule_commands[0].action)
event = Event(key='event_5', value='')
event.namespace = self.namespace_name
result = self.exec_event_on_workflow_execution(event)
self.assertIsNone(result)
result = self.exec_event_on_workflow_execution(event)
self.assertEqual(1, len(result.task_schedule_commands))
result = self.exec_event_on_workflow_execution(event)
self.assertIsNone(result)
def exec_event_on_workflow_execution(self, event):
rule_extractor = RuleExtractor()
rule_executor = RuleExecutor(metadata_manager=self.metadata_manager)
results = rule_extractor.extract_workflow_execution_rules(event=event)
result = rule_executor.execute_workflow_execution_rule(event=event, rule=results[0])
self.metadata_manager.flush()
return result
def _build_workflow_trigger(self):
with Workflow(name='workflow', namespace=self.namespace_name) as workflow:
op_1 = Operator(name='op_1')
workflow_meta = self.metadata_manager.add_workflow(namespace=self.namespace_name, name=workflow.name, content='', workflow_object=cloudpickle.dumps(workflow))
self.metadata_manager.flush()
snapshot_meta = self.metadata_manager.add_workflow_snapshot(workflow_id=workflow_meta.id, workflow_object=workflow_meta.workflow_object, uri='url', signature='')
self.metadata_manager.flush()
workflow_execution_meta = self.metadata_manager.add_workflow_execution(workflow_id=workflow_meta.id, run_type=ExecutionType.MANUAL, snapshot_id=snapshot_meta.id)
self.metadata_manager.flush()
self.metadata_manager.update_workflow_execution(workflow_execution_id=workflow_execution_meta.id, status=WorkflowStatus.RUNNING.value)
self.metadata_manager.flush()
self.metadata_manager.add_workflow_trigger(workflow_id=workflow_meta.id, rule=cloudpickle.dumps(WorkflowRule(condition=SimpleCondition(expect_event_keys=['event_1'], flag=True))))
self.metadata_manager.flush()
self.metadata_manager.add_workflow_trigger(workflow_id=workflow_meta.id, rule=cloudpickle.dumps(WorkflowRule(condition=SimpleCondition(expect_event_keys=['event_2'], flag=False))))
self.metadata_manager.flush()
self.metadata_manager.add_workflow_trigger(workflow_id=workflow_meta.id, rule=cloudpickle.dumps(WorkflowRule(condition=SimpleCondition(expect_event_keys=['event_3'], flag=True))))
self.metadata_manager.flush()
self.metadata_manager.add_workflow_trigger(workflow_id=workflow_meta.id, rule=cloudpickle.dumps(WorkflowRule(condition=StateCondition(expect_event_keys=['event_4']))))
self.metadata_manager.flush()
def test_execute_workflow_rule(self):
self._build_workflow_trigger()
event = Event(key='event_1', value='')
event.namespace = self.namespace_name
result = self.exec_event_on_workflow(event)
self.assertIsNotNone(result)
event = Event(key='event_2', value='')
event.namespace = self.namespace_name
result = self.exec_event_on_workflow(event)
self.assertIsNone(result)
event = Event(key='event_3', value='')
event.namespace = self.namespace_name
result = self.exec_event_on_workflow(event)
self.assertIsNotNone(result)
event = Event(key='event_4', value='')
event.namespace = self.namespace_name
result = self.exec_event_on_workflow(event)
self.assertIsNone(result)
result = self.exec_event_on_workflow(event)
self.assertIsNotNone(result)
def exec_event_on_workflow(self, event):
rule_extractor = RuleExtractor()
rule_executor = RuleExecutor(metadata_manager=self.metadata_manager)
results = rule_extractor.extract_workflow_rules(event=event)
result = rule_executor.execute_workflow_rule(event=event, rule=results[0])
self.metadata_manager.flush()
return result |
def cifti_info(filename):
c_info = get_stdout(['wb_command', '-file-information', filename, '-no-map-info'])
cinfo = {}
for line in c_info.split(os.linesep):
if ('Structure' in line):
cinfo['has_LSurf'] = (True if ('CortexLeft' in line) else False)
cinfo['has_RSurf'] = (True if ('CortexRight' in line) else False)
if ('Maps to Surface' in line):
cinfo['maps_to_surf'] = (True if ('true' in line) else False)
if ('Maps to Volume' in line):
cinfo['maps_to_volume'] = (True if ('true' in line) else False)
return cinfo |
def prepare_issfile_for_uninstall(filepath, reboot=False):
iss_filedir = os.path.join(os.path.dirname(__file__), '../../backend/kiwoom_open_api_plus/data/scripts/')
iss_filename = 'uninstall.iss'
iss_filepath = os.path.join(iss_filedir, iss_filename)
shutil.copy(iss_filepath, filepath)
if reboot:
with open(filepath, 'r', encoding=iss_file_encoding) as f:
lines = [line for line in f]
for (i, line) in enumerate(lines):
if line.startswith('BootOption='):
boot_option = 3
lines[i] = 'BootOption={}\n'.format(boot_option)
break
with open(filepath, 'w', encoding=iss_file_encoding) as f:
for line in lines:
f.write(line) |
class WallTypeTray(_WallMountedBox, _TopEdge):
def __init__(self) -> None:
super().__init__()
self.addSettingsArgs(edges.StackableSettings)
self.buildArgParser('sx', 'sy', 'h', 'hi', 'outside', 'bottom_edge')
self.argparser.add_argument('--back_height', action='store', type=float, default=0.0, help='additional height of the back wall')
self.argparser.add_argument('--radius', action='store', type=float, default=0.0, help='radius for strengthening walls with the hooks')
def xSlots(self):
posx = ((- 0.5) * self.thickness)
for x in self.sx[:(- 1)]:
posx += (x + self.thickness)
posy = 0
for y in self.sy:
self.fingerHolesAt(posx, posy, y)
posy += (y + self.thickness)
def ySlots(self):
posy = ((- 0.5) * self.thickness)
for y in self.sy[:(- 1)]:
posy += (y + self.thickness)
posx = 0
for x in reversed(self.sx):
self.fingerHolesAt(posy, posx, x)
posx += (x + self.thickness)
def xHoles(self):
posx = ((- 0.5) * self.thickness)
for x in self.sx[:(- 1)]:
posx += (x + self.thickness)
self.fingerHolesAt(posx, 0, self.hi)
def yHoles(self):
posy = ((- 0.5) * self.thickness)
for y in self.sy[:(- 1)]:
posy += (y + self.thickness)
self.fingerHolesAt(posy, 0, self.hi)
def render(self):
self.generateWallEdges()
b = self.bottom_edge
if self.outside:
self.sx = self.adjustSize(self.sx)
self.sy = self.adjustSize(self.sy)
self.h = self.adjustSize(self.h, b, e2=False)
if self.hi:
self.hi = self.adjustSize(self.hi, b, e2=False)
x = (sum(self.sx) + (self.thickness * (len(self.sx) - 1)))
y = (sum(self.sy) + (self.thickness * (len(self.sy) - 1)))
h = self.h
bh = self.back_height
sameh = (not self.hi)
hi = self.hi = (self.hi or h)
t = self.thickness
self.ctx.save()
self.rectangularWall(x, h, [b, 'f', 'e', 'f'], callback=[self.xHoles], move='up')
self.rectangularWall(x, (h + bh), [b, 'C', 'e', 'c'], callback=[self.mirrorX(self.xHoles, x)], move='up')
if (b != 'e'):
self.rectangularWall(x, y, 'ffff', callback=[self.xSlots, self.ySlots], move='up')
be = ('f' if (b != 'e') else 'e')
for i in range((len(self.sy) - 1)):
e = [edges.SlottedEdge(self, self.sx, be), 'f', edges.SlottedEdge(self, self.sx[::(- 1)], 'e', slots=(0.5 * hi)), 'f']
self.rectangularWall(x, hi, e, move='up')
self.trapezoidSideWall(y, h, (h + bh), [b, 'B', 'e', 'h'], radius=self.radius, callback=[self.yHoles], move='up')
self.moveTo(0, 8)
self.trapezoidSideWall(y, (h + bh), h, [b, 'h', 'e', 'b'], radius=self.radius, callback=[self.mirrorX(self.yHoles, y)], move='up')
self.moveTo(0, 8)
for i in range((len(self.sx) - 1)):
e = [edges.SlottedEdge(self, self.sy, be, slots=(0.5 * hi)), 'f', 'e', 'f']
self.rectangularWall(y, hi, e, move='up') |
def probe_csv(path, probe_size=4096, compression=None, for_is_csv=False, minimum_columns=2, minimum_rows=2):
OPENS = {None: open, 'zip': ZipProbe}
try:
import gzip
OPENS['gzip'] = gzip.open
except ImportError:
pass
try:
import bz2
OPENS['bz2'] = bz2.open
except ImportError:
pass
try:
import lzma
OPENS['lzma'] = lzma.open
except ImportError:
pass
_open = OPENS[compression]
try:
with _open(path, newline='', encoding='utf-8') as f:
sample = f.read(probe_size)
sniffer = csv.Sniffer()
(dialect, has_header) = (sniffer.sniff(sample), sniffer.has_header(sample))
LOG.debug('dialect = %s', dialect)
LOG.debug('has_header = %s', has_header)
if hasattr(dialect, 'delimiter'):
LOG.debug("delimiter = '%s'", dialect.delimiter)
if for_is_csv:
reader = csv.reader(io.StringIO(sample), dialect)
if has_header:
header = next(reader)
LOG.debug('for_is_csv header %s', header)
if (len(header) < minimum_columns):
return (None, False)
cnt = 0
length = None
for row in reader:
cnt += 1
LOG.debug('for_is_csv row %s %s', cnt, row)
if (length is None):
length = len(row)
if (length != len(row)):
return (None, False)
if (cnt >= minimum_rows):
break
if (cnt < minimum_rows):
return (None, False)
return (dialect, has_header)
except UnicodeDecodeError:
return (None, False)
except csv.Error:
return (None, False) |
class TestApp(unittest.TestCase):
module = browsepy
app = browsepy.app
def test_config(self):
try:
with tempfile.NamedTemporaryFile(delete=False) as f:
f.write(b'DIRECTORY_DOWNLOADABLE = False\n')
name = f.name
os.environ['BROWSEPY_TEST_SETTINGS'] = name
self.app.config['directory_downloadable'] = True
self.app.config.from_envvar('BROWSEPY_TEST_SETTINGS')
self.assertFalse(self.app.config['directory_downloadable'])
finally:
os.remove(name) |
def test_config_non_string_types():
class MyValue(object):
def __init__(self, content):
self.content = content
def __str__(self):
return str(self.content)
def __repr__(self):
return repr(self.content)
client = TempStoreClient(server_url='localhost', service_name=MyValue('bar'), secret_token=MyValue('bay'), metrics_interval='0ms')
assert isinstance(client.config.secret_token, str)
assert isinstance(client.config.service_name, str)
client.close() |
_scope
class TestGetWealth(AEATestCaseEmpty):
('click.echo')
def test_get_wealth(self, _echo_mock, password_or_none):
self.generate_private_key(password=password_or_none)
self.add_private_key(password=password_or_none)
self.get_wealth(password=password_or_none)
expected_wealth = 0
_echo_mock.assert_called_with(expected_wealth) |
def test_delete_removes_data_from_underlying_db_after_persist(journal_db, memory_db):
memory_db.set(b'1', b'test-a')
assert (memory_db.exists(b'1') is True)
journal_db.delete(b'1')
assert (memory_db.exists(b'1') is True)
journal_db.persist()
assert (memory_db.exists(b'1') is False) |
class override_method():
def __init__(self, view, request, method):
self.view = view
self.request = request
self.method = method
self.action = getattr(view, 'action', None)
def __enter__(self):
self.view.request = clone_request(self.request, self.method)
action_map = getattr(self.view, 'action_map', {})
self.view.action = action_map.get(self.method.lower())
return self.view.request
def __exit__(self, *args, **kwarg):
self.view.request = self.request
self.view.action = self.action |
class DWARFInfo(object):
def __init__(self, config, debug_info_sec, debug_aranges_sec, debug_abbrev_sec, debug_frame_sec, eh_frame_sec, debug_str_sec, debug_loc_sec, debug_ranges_sec, debug_line_sec, debug_pubtypes_sec, debug_pubnames_sec, debug_addr_sec, debug_str_offsets_sec, debug_line_str_sec, debug_loclists_sec, debug_rnglists_sec, debug_sup_sec, gnu_debugaltlink_sec):
self.config = config
self.debug_info_sec = debug_info_sec
self.debug_aranges_sec = debug_aranges_sec
self.debug_abbrev_sec = debug_abbrev_sec
self.debug_frame_sec = debug_frame_sec
self.eh_frame_sec = eh_frame_sec
self.debug_str_sec = debug_str_sec
self.debug_loc_sec = debug_loc_sec
self.debug_ranges_sec = debug_ranges_sec
self.debug_line_sec = debug_line_sec
self.debug_addr_sec = debug_addr_sec
self.debug_str_offsets_sec = debug_str_offsets_sec
self.debug_line_str_sec = debug_line_str_sec
self.debug_pubtypes_sec = debug_pubtypes_sec
self.debug_pubnames_sec = debug_pubnames_sec
self.debug_loclists_sec = debug_loclists_sec
self.debug_rnglists_sec = debug_rnglists_sec
self.debug_sup_sec = debug_sup_sec
self.gnu_debugaltlink_sec = gnu_debugaltlink_sec
self.supplementary_dwarfinfo = None
self.structs = DWARFStructs(little_endian=self.config.little_endian, dwarf_format=32, address_size=self.config.default_address_size)
self._abbrevtable_cache = {}
self._linetable_cache = {}
self._cu_cache = []
self._cu_offsets_map = []
def has_debug_info(self):
return bool(self.debug_info_sec)
def get_DIE_from_lut_entry(self, lut_entry):
cu = self.get_CU_at(lut_entry.cu_ofs)
return self.get_DIE_from_refaddr(lut_entry.die_ofs, cu)
def get_DIE_from_refaddr(self, refaddr, cu=None):
if (cu is None):
cu = self.get_CU_containing(refaddr)
return cu.get_DIE_from_refaddr(refaddr)
def get_CU_containing(self, refaddr):
dwarf_assert(self.has_debug_info, 'CU lookup but no debug info section')
dwarf_assert((0 <= refaddr < self.debug_info_sec.size), ('refaddr %s beyond .debug_info size' % refaddr))
i = bisect_right(self._cu_offsets_map, refaddr)
start = (self._cu_offsets_map[(i - 1)] if (i > 0) else 0)
for cu in self._parse_CUs_iter(start):
if (cu.cu_offset <= refaddr < (cu.cu_offset + cu.size)):
return cu
raise ValueError(('CU for reference address %s not found' % refaddr))
def get_CU_at(self, offset):
dwarf_assert(self.has_debug_info, 'CU lookup but no debug info section')
dwarf_assert((0 <= offset < self.debug_info_sec.size), ('offset %s beyond .debug_info size' % offset))
return self._cached_CU_at_offset(offset)
def iter_CUs(self):
return self._parse_CUs_iter()
def get_abbrev_table(self, offset):
dwarf_assert((offset < self.debug_abbrev_sec.size), ("Offset '0x%x' to abbrev table out of section bounds" % offset))
if (offset not in self._abbrevtable_cache):
self._abbrevtable_cache[offset] = AbbrevTable(structs=self.structs, stream=self.debug_abbrev_sec.stream, offset=offset)
return self._abbrevtable_cache[offset]
def get_string_from_table(self, offset):
return parse_cstring_from_stream(self.debug_str_sec.stream, offset)
def get_string_from_linetable(self, offset):
return parse_cstring_from_stream(self.debug_line_str_sec.stream, offset)
def line_program_for_CU(self, CU):
top_DIE = CU.get_top_DIE()
if ('DW_AT_stmt_list' in top_DIE.attributes):
return self._parse_line_program_at_offset(top_DIE.attributes['DW_AT_stmt_list'].value, CU.structs)
else:
return None
def has_CFI(self):
return (self.debug_frame_sec is not None)
def CFI_entries(self):
cfi = CallFrameInfo(stream=self.debug_frame_sec.stream, size=self.debug_frame_sec.size, address=self.debug_frame_sec.address, base_structs=self.structs)
return cfi.get_entries()
def has_EH_CFI(self):
return (self.eh_frame_sec is not None)
def EH_CFI_entries(self):
cfi = CallFrameInfo(stream=self.eh_frame_sec.stream, size=self.eh_frame_sec.size, address=self.eh_frame_sec.address, base_structs=self.structs, for_eh_frame=True)
return cfi.get_entries()
def get_pubtypes(self):
if self.debug_pubtypes_sec:
return NameLUT(self.debug_pubtypes_sec.stream, self.debug_pubtypes_sec.size, self.structs)
else:
return None
def get_pubnames(self):
if self.debug_pubnames_sec:
return NameLUT(self.debug_pubnames_sec.stream, self.debug_pubnames_sec.size, self.structs)
else:
return None
def get_aranges(self):
if self.debug_aranges_sec:
return ARanges(self.debug_aranges_sec.stream, self.debug_aranges_sec.size, self.structs)
else:
return None
def location_lists(self):
if (self.debug_loclists_sec and (self.debug_loc_sec is None)):
return LocationLists(self.debug_loclists_sec.stream, self.structs, 5, self)
elif (self.debug_loc_sec and (self.debug_loclists_sec is None)):
return LocationLists(self.debug_loc_sec.stream, self.structs, 4, self)
elif (self.debug_loc_sec and self.debug_loclists_sec):
return LocationListsPair(self.debug_loc_sec.stream, self.debug_loclists_sec.stream, self.structs, self)
else:
return None
def range_lists(self):
if (self.debug_rnglists_sec and (self.debug_ranges_sec is None)):
return RangeLists(self.debug_rnglists_sec.stream, self.structs, 5, self)
elif (self.debug_ranges_sec and (self.debug_rnglists_sec is None)):
return RangeLists(self.debug_ranges_sec.stream, self.structs, 4, self)
elif (self.debug_ranges_sec and self.debug_rnglists_sec):
return RangeListsPair(self.debug_ranges_sec.stream, self.debug_rnglists_sec.stream, self.structs, self)
else:
return None
def get_addr(self, cu, addr_index):
if (not self.debug_addr_sec):
raise DWARFError('The file does not contain a debug_addr section for indirect address access')
cu_addr_base = _get_base_offset(cu, 'DW_AT_addr_base')
return struct_parse(cu.structs.Dwarf_target_addr(''), self.debug_addr_sec.stream, (cu_addr_base + (addr_index * cu.header.address_size)))
def _parse_CUs_iter(self, offset=0):
if (self.debug_info_sec is None):
return
while (offset < self.debug_info_sec.size):
cu = self._cached_CU_at_offset(offset)
offset = ((offset + cu['unit_length']) + cu.structs.initial_length_field_size())
(yield cu)
def _cached_CU_at_offset(self, offset):
i = bisect_right(self._cu_offsets_map, offset)
if ((i >= 1) and (offset == self._cu_offsets_map[(i - 1)])):
return self._cu_cache[(i - 1)]
cu = self._parse_CU_at_offset(offset)
self._cu_offsets_map.insert(i, offset)
self._cu_cache.insert(i, cu)
return cu
def _parse_CU_at_offset(self, offset):
initial_length = struct_parse(self.structs.Dwarf_uint32(''), self.debug_info_sec.stream, offset)
dwarf_format = (64 if (initial_length == ) else 32)
cu_structs = DWARFStructs(little_endian=self.config.little_endian, dwarf_format=dwarf_format, address_size=4, dwarf_version=2)
cu_header = struct_parse(cu_structs.Dwarf_CU_header, self.debug_info_sec.stream, offset)
cu_structs = DWARFStructs(little_endian=self.config.little_endian, dwarf_format=dwarf_format, address_size=cu_header['address_size'], dwarf_version=cu_header['version'])
cu_die_offset = self.debug_info_sec.stream.tell()
dwarf_assert(self._is_supported_version(cu_header['version']), ("Expected supported DWARF version. Got '%s'" % cu_header['version']))
return CompileUnit(header=cu_header, dwarfinfo=self, structs=cu_structs, cu_offset=offset, cu_die_offset=cu_die_offset)
def _is_supported_version(self, version):
return (2 <= version <= 5)
def _parse_line_program_at_offset(self, offset, structs):
if (offset in self._linetable_cache):
return self._linetable_cache[offset]
lineprog_header = struct_parse(structs.Dwarf_lineprog_header, self.debug_line_sec.stream, offset)
def resolve_strings(self, lineprog_header, format_field, data_field):
if lineprog_header.get(format_field, False):
data = lineprog_header[data_field]
for field in lineprog_header[format_field]:
def replace_value(data, content_type, replacer):
for entry in data:
entry[content_type] = replacer(entry[content_type])
if (field.form == 'DW_FORM_line_strp'):
replace_value(data, field.content_type, self.get_string_from_linetable)
elif (field.form == 'DW_FORM_strp'):
replace_value(data, field.content_type, self.get_string_from_table)
elif (field.form in ('DW_FORM_strp_sup', 'DW_FORM_GNU_strp_alt')):
if self.supplementary_dwarfinfo:
replace_value(data, field.content_type, self.supplementary_dwarfinfo.get_string_fromtable)
else:
replace_value(data, field.content_type, (lambda x: str(x)))
elif (field.form in ('DW_FORM_strp_sup', 'DW_FORM_strx', 'DW_FORM_strx1', 'DW_FORM_strx2', 'DW_FORM_strx3', 'DW_FORM_strx4')):
raise NotImplementedError()
resolve_strings(self, lineprog_header, 'directory_entry_format', 'directories')
resolve_strings(self, lineprog_header, 'file_name_entry_format', 'file_names')
if lineprog_header.get('directories', False):
lineprog_header.include_directory = tuple((d.DW_LNCT_path for d in lineprog_header.directories))
if lineprog_header.get('file_names', False):
lineprog_header.file_entry = tuple((Container(**{'name': e.get('DW_LNCT_path'), 'dir_index': e.get('DW_LNCT_directory_index'), 'mtime': e.get('DW_LNCT_timestamp'), 'length': e.get('DW_LNCT_size')}) for e in lineprog_header.file_names))
end_offset = ((offset + lineprog_header['unit_length']) + structs.initial_length_field_size())
lineprogram = LineProgram(header=lineprog_header, stream=self.debug_line_sec.stream, structs=structs, program_start_offset=self.debug_line_sec.stream.tell(), program_end_offset=end_offset)
self._linetable_cache[offset] = lineprogram
return lineprogram
def parse_debugsupinfo(self):
if (self.debug_sup_sec is not None):
self.debug_sup_sec.stream.seek(0)
suplink = self.structs.Dwarf_debugsup.parse_stream(self.debug_sup_sec.stream)
if (suplink.is_supplementary == 0):
return suplink.sup_filename
if (self.gnu_debugaltlink_sec is not None):
self.gnu_debugaltlink_sec.stream.seek(0)
suplink = self.structs.Dwarf_debugaltlink.parse_stream(self.gnu_debugaltlink_sec.stream)
return suplink.sup_filename
return None |
def create_folders(path: Path) -> Tuple[(str, Path)]:
tutorial_folder_name = path.stem
filename = ''.join([token.title() for token in tutorial_folder_name.split('_')])
tutorial_folder = TUTORIALS_DIR.joinpath(tutorial_folder_name)
assets_folder = (tutorial_folder / 'assets')
img_folder = (assets_folder / 'img')
plot_data_folder = (assets_folder / 'plot_data')
if (not tutorial_folder.exists()):
tutorial_folder.mkdir(parents=True, exist_ok=True)
if (not img_folder.exists()):
img_folder.mkdir(parents=True, exist_ok=True)
if (not plot_data_folder.exists()):
plot_data_folder.mkdir(parents=True, exist_ok=True)
return (filename, assets_folder) |
def _build_log_service(config: Dict[(str, Any)]) -> LogService:
config_dependency: Optional[Dict[(str, Any)]] = config.get('dependency')
if ((not config_dependency) or ('LogService' not in config_dependency)):
raise KeyError('LogService is absent in the config.')
log_svc_config: Dict[(str, Any)] = config_dependency['LogService']
log_class = reflect.get_class(log_svc_config['class'])
return log_class(**log_svc_config['constructor']) |
class MyModel():
def __init__(self, random_seed: int):
self.random_seed = random_seed
log.info('Init my model')
def save(self, checkpoint_path: str) -> None:
checkpoint_dir = Path(checkpoint_path)
checkpoint_dir.mkdir(parents=True, exist_ok=True)
log.info(f'Created dir for checkpoints. dir={checkpoint_dir}')
with open((checkpoint_dir / f'checkpoint_{self.random_seed}.pt'), 'w') as f:
f.write(f'{datetime.now()}') |
class MockGE():
def __init__(self, max_jobs=4, qsub_delay=0.0, qacct_delay=15.0, shell='/bin/bash', database_dir=None, debug=False):
if debug:
logging.getLogger().setLevel(logging.DEBUG)
if (database_dir is None):
database_dir = os.path.join(self._user_home(), '.mockGE')
self._database_dir = os.path.abspath(database_dir)
self._db_file = os.path.join(self._database_dir, 'mockGE.sqlite')
self._processes = []
if (not os.path.exists(self._database_dir)):
os.mkdir(self._database_dir)
init_db = False
if (not os.path.exists(self._db_file)):
init_db = True
try:
logging.debug('Connecting to DB')
self._cx = sqlite3.connect(self._db_file)
self._cx.row_factory = sqlite3.Row
except Exception as ex:
print(('Exception connecting to DB: %s' % ex))
raise ex
if init_db:
logging.debug('Setting up DB')
self._init_db()
self._shell = shell
self._max_jobs = max_jobs
self._qsub_delay = qsub_delay
self._qacct_delay = qacct_delay
atexit.register(self.stop)
def stop(self):
self._cleanup_processes()
def _init_db(self):
sql = '\n CREATE TABLE jobs (\n id INTEGER PRIMARY KEY,\n user CHAR,\n state CHAR,\n name VARCHAR,\n command VARCHAR,\n working_dir VARCHAR,\n output_name VARCHAR,\n nslots INTEGER,\n queue VARCHAR,\n join_output CHAR,\n pid INTEGER,\n qsub_time FLOAT,\n start_time FLOAT,\n end_time FLOAT,\n exit_code INTEGER\n )\n '
try:
cu = self._cx.cursor()
cu.execute(sql)
self._cx.commit()
except sqlite3.Error as ex:
print(('Failed to set up database: %s' % ex))
raise ex
def _init_job(self, name, command, working_dir, nslots, queue, output_name, join_output):
cmd = []
for arg in command:
try:
arg.index(' ')
arg = ('"%s"' % arg)
except ValueError:
pass
cmd.append(arg)
command = ' '.join(cmd)
logging.debug(('_init_job: cmd: %s' % cmd))
try:
sql = '\n INSERT INTO jobs (user,state,qsub_time,name,command,working_dir,nslots,queue,output_name,join_output)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n '
cu = self._cx.cursor()
cu.execute(sql, (self._user(), 't', time.time(), name, command, working_dir, nslots, queue, output_name, join_output))
self._cx.commit()
return cu.lastrowid
except Exception as ex:
logging.error(('qsub failed with exception: %s' % ex))
def _start_job(self, job_id):
sql = '\n SELECT name,command,nslots,queue,working_dir,output_name,join_output\n FROM jobs WHERE id==?\n '
cu = self._cx.cursor()
cu.execute(sql, (job_id,))
job = cu.fetchone()
name = job['name']
command = job['command']
nslots = job['nslots']
queue = job['queue']
working_dir = job['working_dir']
output_name = job['output_name']
join_output = job['join_output']
try:
if output_name:
out = os.path.abspath(output_name)
if os.path.isdir(out):
out = os.path.join(out, name)
elif (not os.path.isabs(out)):
out = os.path.join(working_dir, out)
else:
out = os.path.join(working_dir, name)
logging.debug(('Output basename: %s' % out))
stdout_file = ('%s.o%s' % (out, job_id))
redirect = ('1>%s' % stdout_file)
logging.debug(('Stdout: %s' % stdout_file))
if (join_output == 'y'):
redirect = ('%s 2>&1' % redirect)
else:
stderr_file = ('%s.e%s' % (out, job_id))
redirect = ('%s 2>%s' % (redirect, stderr_file))
logging.debug(('Stderr: %s' % stderr_file))
script_file = os.path.join(self._database_dir, ('__job%d.sh' % job_id))
with io.open(script_file, 'wt') as fp:
fp.write((u'#!%s\nNSLOTS=%s QUEUE=%s %s %s\nexit_code=$?\necho "$exit_code" 1>%s/__exit_code.%d\n' % (self._shell, nslots, queue, command, redirect, self._database_dir, job_id)))
os.chmod(script_file, 509)
process = subprocess.Popen(script_file, cwd=working_dir, close_fds=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, preexec_fn=os.setpgrp)
pid = process.pid
self._processes.append(process)
sql = "\n UPDATE jobs SET pid=?,state='r',start_time=?\n WHERE id=?\n "
cu = self._cx.cursor()
cu.execute(sql, (pid, time.time(), job_id))
self._cx.commit()
except Exception as ex:
logging.debug(("Exception trying to start job '%s'" % job_id))
logging.debug(('%s' % ex))
sql = "\n UPDATE jobs SET state='Eqw',start_time=?\n WHERE id=?\n "
cu = self._cx.cursor()
cu.execute(sql, (time.time(), job_id))
self._cx.commit()
def update_jobs(self):
cu = self._cx.cursor()
sql = "\n SELECT id,qsub_time FROM jobs WHERE state=='t'\n "
cu.execute(sql)
jobs = cu.fetchall()
for job in jobs:
if ((time.time() - job['qsub_time']) < self._qsub_delay):
logging.debug("Job %d not ready to go to 'qw'", job['id'])
continue
logging.debug(("Setting state to 'qw' for job %d" % job['id']))
sql = "\n UPDATE jobs SET state='qw' WHERE id=?\n "
cu.execute(sql, (job['id'],))
self._cx.commit()
sql = "\n SELECT id,pid FROM jobs WHERE state=='r'\n "
cu.execute(sql)
jobs = cu.fetchall()
finished_jobs = []
for job in jobs:
job_id = job['id']
pid = job['pid']
try:
logging.debug(('Checking job=%d pid=%d' % (job_id, pid)))
os.kill(pid, 0)
except Exception as ex:
logging.debug(('Exception: %s' % ex))
finished_jobs.append(job_id)
logging.debug(('Finished jobs: %s' % finished_jobs))
for job_id in finished_jobs:
script_file = os.path.join(self._database_dir, ('__job%d.sh' % job_id))
if os.path.exists(script_file):
os.remove(script_file)
exit_code_file = os.path.join(self._database_dir, ('__exit_code.%d' % job_id))
if os.path.exists(exit_code_file):
end_time = os.path.getctime(exit_code_file)
with io.open(exit_code_file, 'rt') as fp:
exit_code = int(fp.read())
os.remove(exit_code_file)
else:
logging.error(('Missing __exit_code file for job %s' % job_id))
end_time = time.time()
exit_code = 1
sql = "\n UPDATE jobs SET state='c',exit_code=?,end_time=?\n WHERE id==?\n "
logging.debug(('SQL: %s' % sql))
cu.execute(sql, (exit_code, end_time, job_id))
if finished_jobs:
self._cx.commit()
sql = "\n SELECT id FROM jobs WHERE state == 'd'\n "
cu.execute(sql)
jobs = cu.fetchall()
deleted_jobs = [job['id'] for job in jobs]
for job_id in deleted_jobs:
sql = '\n SELECT pid FROM jobs WHERE id==?\n '
cu.execute(sql, (job_id,))
job = cu.fetchone()
if (job is None):
continue
try:
pid = int(job['pid'])
os.kill(pid, 9)
except Exception:
pass
sql = "\n UPDATE jobs SET state='c' WHERE id=?\n "
cu.execute(sql, (job_id,))
self._cx.commit()
for name in (('__job%d.sh' % job_id), ('__exit_code.%d' % job_id)):
try:
os.remove(os.path.join(self._database_dir, name))
except OSError:
pass
exit_code_file = os.path.join(self._database_dir, ('__exit_code.%d' % job_id))
sql = "\n SELECT id FROM jobs WHERE state == 'qw'\n "
cu.execute(sql)
jobs = cu.fetchall()
waiting_jobs = [job['id'] for job in jobs]
for job_id in waiting_jobs:
sql = "\n SELECT id,pid FROM jobs WHERE state=='r'\n "
cu.execute(sql)
nrunning = len(cu.fetchall())
if (nrunning < self._max_jobs):
self._start_job(job_id)
else:
break
def _list_jobs(self, user, state=None):
sql = "\n SELECT id,name,user,state,qsub_time,start_time,queue FROM jobs WHERE state != 'c'\n "
args = []
if ((user != '\\*') and (user != '*')):
sql += 'AND user == ?'
args.append(user)
cu = self._cx.cursor()
cu.execute(sql, args)
return cu.fetchall()
def _job_info(self, job_id):
sql = "\n SELECT id,name,user,exit_code,qsub_time,start_time,end_time,queue\n FROM jobs WHERE id==? AND state=='c'\n "
cu = self._cx.cursor()
cu.execute(sql, (job_id,))
return cu.fetchone()
def _mark_for_deletion(self, job_id):
sql = '\n SELECT state FROM jobs WHERE id=?\n '
cu = self._cx.cursor()
cu.execute(sql, (job_id,))
job = cu.fetchone()
if (job is None):
return
state = job['state']
if (state not in ('t', 'qw', 'r', 'Eqw')):
return
new_state = 'd'
sql = '\n UPDATE jobs SET state=? WHERE id=?\n '
cu = self._cx.cursor()
cu.execute(sql, (new_state, job_id))
self._cx.commit()
def _cleanup_processes(self):
logging.debug('Doing cleanup for exit')
cu = self._cx.cursor()
sql = "\n SELECT id,pid FROM jobs WHERE state=='r'\n "
cu.execute(sql)
jobs = cu.fetchall()
for job in jobs:
job_id = job['id']
pid = job['pid']
try:
logging.debug(('Checking job=%d pid=%d' % (job_id, pid)))
os.kill(pid, 9)
except Exception as ex:
pass
for process in self._processes:
process.kill()
def _user(self):
return getpass.getuser()
def _user_home(self):
return os.path.expanduser(('~%s' % self._user()))
def qsub(self, argv):
p = argparse.ArgumentParser()
p.add_argument('-b', action='store')
p.add_argument('-V', action='store_true')
p.add_argument('-N', action='store')
p.add_argument('-cwd', action='store_true')
p.add_argument('-wd', action='store')
p.add_argument('-pe', action='store', nargs=2)
p.add_argument('-j', action='store')
p.add_argument('-o', action='store')
p.add_argument('-e', action='store')
(args, cmd) = p.parse_known_args(argv)
logging.debug(('qsub: cmd: %s' % cmd))
if (len(cmd) == 1):
cmd = cmd[0].split(' ')
if (args.N is not None):
name = str(args.N)
else:
name = cmd[0].split(' ')[0]
logging.debug(('Name: %s' % name))
if args.wd:
working_dir = os.path.abspath(args.wd)
else:
working_dir = os.getcwd()
logging.debug(('Working dir: %s' % working_dir))
if args.pe:
nslots = args.pe[1]
else:
nslots = 1
queue = 'mock.q'
if args.o:
output_name = args.o
else:
output_name = ''
if (args.j == 'y'):
join_output = 'y'
else:
join_output = 'n'
job_id = self._init_job(name, cmd, working_dir, nslots, queue, output_name, join_output)
logging.debug(('Created job %s' % job_id))
print(('Your job %s ("%s") has been submitted' % (job_id, name)))
self.update_jobs()
def qstat(self, argv):
self.update_jobs()
p = argparse.ArgumentParser()
p.add_argument('-u', action='store')
args = p.parse_args(argv)
user = args.u
if (user is None):
user = self._user()
jobs = self._list_jobs(user=user)
if (not jobs):
return
print('job-ID prior name user state submit/start at queue slots ja-task-ID\n')
for job in jobs:
job_id = str(job['id'])
name = str(job['name'])
user = str(job['user'])
state = str(job['state'])
start_time = job['start_time']
queue = job['queue']
if (start_time is None):
start_time = job['qsub_time']
start_time = datetime.datetime.fromtimestamp(start_time).strftime('%m/%d/%Y %H:%M:%S')
line = []
line.append(('%s%s' % (job_id[:7], (' ' * (7 - len(job_id))))))
line.append('0.00001')
line.append(('%s%s' % (name[:10], (' ' * (10 - len(name))))))
line.append(('%s%s' % (user[:12], (' ' * (12 - len(user))))))
line.append(('%s%s' % (state[:5], (' ' * (5 - len(state))))))
line.append(('%s' % start_time))
line.append(('%s%s' % (queue[:30], (' ' * (30 - len(queue))))))
line.append('1')
print(' '.join(line))
def qacct(self, argv):
logging.debug('qacct: invoked')
self.update_jobs()
p = argparse.ArgumentParser()
p.add_argument('-j', action='store')
args = p.parse_args(argv)
job_id = int(args.j)
job_info = self._job_info(job_id)
if (job_info is None):
logging.debug(('qacct: no info returned for job %s' % job_id))
sys.stderr.write(('error: job id %s not found\n' % job_id))
return
elapsed_since_job_end = (time.time() - job_info[6])
logging.debug(('qacct: elapsed time: %s' % elapsed_since_job_end))
if (elapsed_since_job_end < self._qacct_delay):
return
job_id = job_info['id']
name = job_info['name']
user = job_info['user']
exit_code = job_info['exit_code']
qsub_time = datetime.datetime.fromtimestamp(job_info['qsub_time']).strftime('%c')
start_time = datetime.datetime.fromtimestamp(job_info['start_time']).strftime('%c')
end_time = datetime.datetime.fromtimestamp(job_info['end_time']).strftime('%c')
queue = job_info['queue']
print(('\nqname %s \nhostname node001\ngroup mygroup \nowner %s \nproject NONE \ndepartment defaultdepartment \njobname %s \njobnumber %s \ntaskid undefined\naccount sge \npriority 0 \nqsub_time %s\nstart_time %s\nend_time %s\ngranted_pe NONE \nslots 1 \nfailed 0 \nexit_status %s' % (queue, user, name, job_id, qsub_time, start_time, end_time, exit_code)))
def qdel(self, argv):
logging.debug('qdel: invoked')
self.update_jobs()
p = argparse.ArgumentParser()
p.add_argument('job_id', action='store', nargs='+')
args = p.parse_args(argv)
for job_id in args.job_id:
job_id = int(job_id)
self._mark_for_deletion(job_id)
print(('Job %s has been marked for deletion' % job_id)) |
def adobe_campaign_dataset_config(db: Session, adobe_campaign_connection_config: ConnectionConfig, adobe_campaign_dataset: Dict[(str, Any)]) -> Generator:
fides_key = adobe_campaign_dataset['fides_key']
adobe_campaign_connection_config.name = fides_key
adobe_campaign_connection_config.key = fides_key
adobe_campaign_connection_config.save(db=db)
ctl_dataset = CtlDataset.create_from_dataset_dict(db, adobe_campaign_dataset)
dataset = DatasetConfig.create(db=db, data={'connection_config_id': adobe_campaign_connection_config.id, 'fides_key': fides_key, 'ctl_dataset_id': ctl_dataset.id})
(yield dataset)
dataset.delete(db=db)
ctl_dataset.delete(db=db) |
class OptionSeriesFunnelSonificationDefaultinstrumentoptionsMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def extractWeleTranslations(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
guidmap = [('/zhan-xian/zhan-xian-chapter-', 'Zhanxian', 'translated'), ('/sin-city/sin-city-chapter-', 'Sin City', 'translated'), ('/martial-god/martial-god-chapter-', 'Martial God', 'translated'), ('/heaven-awakening/heaven-awakening-chapter-', 'Heaven Awakening Path', 'translated')]
for (tagname, name, tl_type) in guidmap:
if (tagname in item['guid']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class SegmentInfo():
def __init__(self, raw_segment, is_dummy=False):
self.base_addr = raw_segment[0]
self.size = raw_segment[4]
self.flags = raw_segment[6]
self.type = raw_segment[7]
self.is_dummy = is_dummy
def __eq__(self, other):
if isinstance(other, self.__class__):
return (self.__dict__ == other.__dict__)
else:
return False
def __ne__(self, other):
return (not self.__eq__(other)) |
def test_slack_message_attachments_limit():
very_short_attachments = (['attachment'] * (SlackMessageBuilder._MAX_AMOUNT_OF_ATTACHMENTS - 1))
short_attachments = (['attachment'] * SlackMessageBuilder._MAX_AMOUNT_OF_ATTACHMENTS)
long_attachments = (['attachment'] * (SlackMessageBuilder._MAX_AMOUNT_OF_ATTACHMENTS + 1))
assert (SlackMessageSchema(attachments=very_short_attachments).attachments == very_short_attachments)
assert (SlackMessageSchema(attachments=short_attachments).attachments == short_attachments)
assert (SlackMessageSchema(attachments=long_attachments).attachments is None)
assert (SlackMessageSchema(attachments=[]).attachments == []) |
def test_log_base_10_plus_user_passes_var_list(df_vartypes):
transformer = LogTransformer(base='10', variables='Age')
X = transformer.fit_transform(df_vartypes)
transf_df = df_vartypes.copy()
transf_df['Age'] = [1.30103, 1.32222, 1.27875, 1.25527]
assert (transformer.base == '10')
assert (transformer.variables == 'Age')
assert (transformer.variables_ == ['Age'])
assert (transformer.n_features_in_ == 5)
pd.testing.assert_frame_equal(X, transf_df)
Xit = transformer.inverse_transform(X)
Xit['Age'] = Xit['Age'].round().astype('int64')
pd.testing.assert_frame_equal(Xit, df_vartypes) |
class KiwoomOpenApiWNegativeReturnCodeError(KiwoomOpenApiWError):
OP_ERR_NONE = 0
OP_ERR_NO_LOGIN = (- 1)
OP_ERR_LOGIN = (- 100)
OP_ERR_CONNECT = (- 101)
OP_ERR_VERSION = (- 102)
OP_ERR_TRCODE = (- 103)
OP_ERR_NO_REGOPENAPI = (- 104)
OP_ERR_SISE_OVERFLOW = (- 200)
OP_ERR_ORDER_OVERFLOW = (- 201)
OP_ERR_RQ_WRONG_INPUT = (- 202)
OP_ERR_ORD_WRONG_INPUT = (- 300)
OP_ERR_ORD_WRONG_ACCPWD = (- 301)
OP_ERR_ORD_WRONG_ACCNO = (- 302)
OP_ERR_ORD_WRONG_QTY200 = (- 303)
OP_ERR_ORD_WRONG_QTY400 = (- 304)
MSG_ERR_NONE = ''
MSG_ERR_NO_LOGIN = ''
MSG_ERR_LOGIN = ' ( )'
MSG_ERR_CONNECT = ' '
MSG_ERR_VERSION = ' .'
MSG_ERR_TRCODE = 'TrCode .'
MSG_ERR_NO_REGOPENAPI = 'OpenAPI '
MSG_ERR_SISE_OVERFLOW = ''
MSG_ERR_ORDER_OVERFLOW = ''
MSG_ERR_RQ_WRONG_INPUT = '(/) '
MSG_ERR_ORD_WRONG_INPUT = ' '
MSG_ERR_ORD_WRONG_ACCPWD = ' .'
MSG_ERR_ORD_WRONG_ACCNO = ' .'
MSG_ERR_ORD_WRONG_QTY200 = '- 200 '
MSG_ERR_ORD_WRONG_QTY400 = '- 400 '
ERROR_MESSAGE_BY_CODE = {OP_ERR_NONE: MSG_ERR_NONE, OP_ERR_NO_LOGIN: MSG_ERR_NO_LOGIN, OP_ERR_LOGIN: MSG_ERR_LOGIN, OP_ERR_CONNECT: MSG_ERR_CONNECT, OP_ERR_VERSION: MSG_ERR_VERSION, OP_ERR_TRCODE: MSG_ERR_TRCODE, OP_ERR_NO_REGOPENAPI: MSG_ERR_NO_REGOPENAPI, OP_ERR_SISE_OVERFLOW: MSG_ERR_SISE_OVERFLOW, OP_ERR_ORDER_OVERFLOW: MSG_ERR_ORDER_OVERFLOW, OP_ERR_RQ_WRONG_INPUT: MSG_ERR_RQ_WRONG_INPUT, OP_ERR_ORD_WRONG_INPUT: MSG_ERR_ORD_WRONG_INPUT, OP_ERR_ORD_WRONG_ACCPWD: MSG_ERR_ORD_WRONG_ACCPWD, OP_ERR_ORD_WRONG_ACCNO: MSG_ERR_ORD_WRONG_ACCNO, OP_ERR_ORD_WRONG_QTY200: MSG_ERR_ORD_WRONG_QTY200, OP_ERR_ORD_WRONG_QTY400: MSG_ERR_ORD_WRONG_QTY400}
def get_error_message_by_code(cls, code, default=None):
return cls.ERROR_MESSAGE_BY_CODE.get(code, default)
def check_code_or_raise(cls, code):
if (code < 0):
raise cls(code)
return code
def wrap_to_check_code_or_raise(cls, func):
(func)
def wrapper(*args, **kwargs):
return cls.check_code_or_raise(func(*args, **kwargs))
return wrapper
def try_or_raise(cls, arg, message=None):
if isinstance(arg, int):
return cls.check_code_or_raise(arg)
elif callable(arg):
return cls.wrap_to_check_code_or_raise(arg)
else:
raise TypeError(("Expected 'int' or 'callable' but %s found" % type(arg)))
def __init__(self, code, message=None):
if (message is None):
message = self.get_error_message_by_code(code)
super().__init__(message)
self._code = code
self._message = message
def __str__(self):
return self._message
def __repr__(self):
return '{}({!r}, {!r})'.format(self.__class__.__name__, self._code, self._message)
def code(self):
return self._code |
def get_refractive_index(glass_name):
if (_glass_catalogue is None):
_build_glass_catalogue()
if (glass_name not in _glass_catalogue):
for key in _glass_catalogue.keys():
if (key.lower() == glass_name.lower()):
raise ValueError(('The requested glass "%s" was not found in the catalogue. Did you mean "%s"?' % (glass_name, key)))
raise ValueError(('The requested glass "%s" was not found in the catalogue.' % glass_name))
return _glass_catalogue[glass_name] |
class ToolboxLayout(QtWidgets.QVBoxLayout):
def __init__(self, *args, **kwargs):
super(ToolboxLayout, self).__init__(*args, **kwargs)
self._setup_ui()
def _setup_ui(self):
main_tab_widget = QtWidgets.QTabWidget(self.widget())
self.addWidget(main_tab_widget)
general_tab_widget = QtWidgets.QWidget(self.widget())
general_tab_vertical_layout = QtWidgets.QVBoxLayout()
general_tab_widget.setLayout(general_tab_vertical_layout)
main_tab_widget.addTab(general_tab_widget, 'Generic')
from anima.ui.utils import create_button
create_button('Open Version', general_tab_vertical_layout, GeneralTools.version_dialog, callback_kwargs={'mode': 1})
create_button('Save As Version', general_tab_vertical_layout, GeneralTools.version_dialog, callback_kwargs={'mode': 0})
create_button('Browse $HIP', general_tab_vertical_layout, GeneralTools.browse_hip)
create_button('Copy Node Path', general_tab_vertical_layout, GeneralTools.copy_node_path)
create_button('Range From Shot', general_tab_vertical_layout, GeneralTools.range_from_shot)
create_button('Update Render Settings', general_tab_vertical_layout, GeneralTools.update_render_settings)
def export_rsproxy_data_as_json_callback():
import hou
try:
GeneralTools.export_rsproxy_data_as_json()
except (BaseException, hou.OperationFailed) as e:
QtWidgets.QMessageBox.critical(main_tab_widget, 'Export', 'Error!<br><br>{}'.format(traceback.format_exc()))
else:
QtWidgets.QMessageBox.information(main_tab_widget, 'Export', 'Data has been exported correctly!')
create_button('Export RSProxy Data As JSON', general_tab_vertical_layout, export_rsproxy_data_as_json_callback)
batch_rename_layout = QtWidgets.QHBoxLayout()
general_tab_vertical_layout.addLayout(batch_rename_layout)
search_field = QtWidgets.QLineEdit()
search_field.setPlaceholderText('Search')
replace_field = QtWidgets.QLineEdit()
replace_field.setPlaceholderText('Replace')
replace_in_child_nodes_check_box = QtWidgets.QCheckBox()
replace_in_child_nodes_check_box.setToolTip('Replace In Child Nodes')
replace_in_child_nodes_check_box.setChecked(False)
batch_rename_layout.addWidget(search_field)
batch_rename_layout.addWidget(replace_field)
batch_rename_layout.addWidget(replace_in_child_nodes_check_box)
def search_and_replace_callback():
search_str = search_field.text()
replace_str = replace_field.text()
GeneralTools.rename_selected_nodes(search_str, replace_str, replace_in_child_nodes_check_box.isChecked())
create_button('Search && Replace', batch_rename_layout, search_and_replace_callback)
create_button('Import Shaders From Maya', general_tab_vertical_layout, GeneralTools.import_shaders_from_maya)
create_button('Creat Focus Plane', general_tab_vertical_layout, GeneralTools.create_focus_plane)
from anima.dcc.houdini import auxiliary
create_button('Create A Very Nice Camera Rig', general_tab_vertical_layout, auxiliary.very_nice_camera_rig)
general_tab_vertical_layout.addStretch()
crowd_tab_widget = QtWidgets.QWidget(self.widget())
crowd_tab_vertical_layout = QtWidgets.QVBoxLayout()
crowd_tab_widget.setLayout(crowd_tab_vertical_layout)
main_tab_widget.addTab(crowd_tab_widget, 'Crowd')
from anima.dcc.houdini import crowd_tools
create_button('Create Bake Setup', crowd_tab_vertical_layout, crowd_tools.create_bake_setup)
create_button('Create Render Setup', crowd_tab_vertical_layout, crowd_tools.create_render_setup)
crowd_tab_vertical_layout.addStretch() |
def test_proj_bad():
msg = 'type-shape of calling argument may not equal the required type-shape'
def dot(m: size, x: R[(1, 1)], y: R[m]):
huga: R
pass
with pytest.raises(TypeError, match=msg):
def proj(n: size, x: R[(100, 10, 1)], y: R[(10, n)]):
dot(n, x[1], y[0]) |
def initial_alerts():
test_alerts = [PendingTestAlertSchema(id='1', alert_class_id='test_id_1', model_unique_id='elementary.model_id_1', test_unique_id='test_id_1', test_name='test_1', test_created_at='2022-10-10 10:10:10', tags='["one", "two"]', model_meta=dict(owner='["jeff", "john"]'), status='fail', elementary_unique_id='elementary.model_id_1.test_id_1.9cf2f5f6ad.None.generic', detected_at='2022-10-10 10:00:00', database_name='test_db', schema_name='test_schema', table_name='table', suppression_status='pending', test_type='dbt_test', test_sub_type='generic', test_results_description='a mock alert', test_results_query='select * from table', test_short_name='test_1', severity='ERROR'), PendingTestAlertSchema(id='2', alert_class_id='test_id_2', model_unique_id='elementary.model_id_1', test_unique_id='test_id_2', test_name='test_2', test_created_at='2022-10-10 09:10:10', tags='["three"]', model_meta=dict(owner='["jeff", "john"]'), status='fail', elementary_unique_id='elementary.model_id_1.test_id_2.9cf2f5f6ad.None.generic', detected_at='2022-10-10 10:00:00', database_name='test_db', schema_name='test_schema', table_name='table', suppression_status='pending', test_type='dbt_test', test_sub_type='generic', test_results_description='a mock alert', test_results_query='select * from table', test_short_name='test_2', severity='ERROR'), PendingTestAlertSchema(id='3', alert_class_id='test_id_3', model_unique_id='elementary.model_id_2', test_unique_id='test_id_3', test_name='test_3', test_created_at='2022-10-10 10:10:10', tags='one', model_meta=dict(owner='["john"]'), status='fail', elementary_unique_id='elementary.model_id_1.test_id_3.9cf2f5f6ad.None.generic', detected_at='2022-10-10 10:00:00', database_name='test_db', schema_name='test_schema', table_name='table', suppression_status='pending', test_type='dbt_test', test_sub_type='generic', test_results_description='a mock alert', test_results_query='select * from table', test_short_name='test_3', severity='ERROR'), PendingTestAlertSchema(id='4', alert_class_id='test_id_4', model_unique_id='elementary.model_id_2', test_unique_id='test_id_4', test_name='test_4', test_created_at='2022-10-10 09:10:10', tags='["three", "four"]', model_meta=dict(owner='["jeff"]'), status='warn', elementary_unique_id='elementary.model_id_1.test_id_4.9cf2f5f6ad.None.generic', detected_at='2022-10-10 10:00:00', database_name='test_db', schema_name='test_schema', table_name='table', suppression_status='pending', test_type='dbt_test', test_sub_type='generic', test_results_description='a mock alert', test_results_query='select * from table', test_short_name='test_4', severity='ERROR')]
model_alerts = [PendingModelAlertSchema(id='1', alert_class_id='elementary.model_id_1', model_unique_id='elementary.model_id_1', alias='modely', path='my/path', original_path='', materialization='table', message='', full_refresh=False, detected_at='2022-10-10 10:00:00', alert_suppression_interval=0, tags='["one", "two"]', model_meta=dict(owner='["jeff", "john"]'), status='error', database_name='test_db', schema_name='test_schema', suppression_status='pending'), PendingModelAlertSchema(id='2', alert_class_id='elementary.model_id_1', model_unique_id='elementary.model_id_1', alias='modely', path='my/path', original_path='', materialization='table', message='', full_refresh=False, detected_at='2022-10-10 09:00:00', alert_suppression_interval=3, tags='["three"]', model_meta=dict(owner='["john"]'), status='error', database_name='test_db', schema_name='test_schema', suppression_status='pending'), PendingModelAlertSchema(id='3', alert_class_id='elementary.model_id_2', model_unique_id='elementary.model_id_2', alias='model2', path='my/path2', original_path='', materialization='table', message='', full_refresh=False, detected_at='2022-10-10 08:00:00', alert_suppression_interval=1, tags='["three", "four"]', model_meta=dict(owner='["jeff"]'), status='skipped', database_name='test_db', schema_name='test_schema', suppression_status='pending')]
source_freshness_alerts = [PendingSourceFreshnessAlertSchema(id='1', source_freshness_execution_id='1', alert_class_id='elementary.model_id_1', model_unique_id='elementary.model_id_1', alias='modely', path='my/path', original_path='', materialization='table', message='', full_refresh=False, detected_at='2022-10-10 10:00:00', alert_suppression_interval=0, tags='["one", "two"]', model_meta=dict(owner='["jeff", "john"]'), original_status='error', status='fail', snapshotted_at='2023-08-15T12:26:06.884065+00:00', max_loaded_at='1969-12-31T00:00:00+00:00', max_loaded_at_time_ago_in_s=.884065, source_name='elementary_integration_tests', identifier='any_type_column_anomalies_validation', error_after='{"count": null, "period": null}', warn_after='{"count": 1, "period": "minute"}', filter='null', error='problemz', database_name='test_db', schema_name='test_schema', suppression_status='pending'), PendingSourceFreshnessAlertSchema(id='2', source_freshness_execution_id='2', alert_class_id='elementary.model_id_2', model_unique_id='elementary.model_id_2', alias='modely', path='my/path', original_path='', materialization='table', message='', full_refresh=False, detected_at='2022-10-10 10:00:00', alert_suppression_interval=0, tags='["one", "two"]', model_meta=dict(owner='["jeff", "john"]'), status='warn', original_status='warn', snapshotted_at='2023-08-15T12:26:06.884065+00:00', max_loaded_at='1969-12-31T00:00:00+00:00', max_loaded_at_time_ago_in_s=.884065, source_name='elementary_integration_tests', identifier='any_type_column_anomalies_validation', error_after='{"count": null, "period": null}', warn_after='{"count": 1, "period": "minute"}', filter='null', error='problemz', database_name='test_db', schema_name='test_schema', suppression_status='pending'), PendingSourceFreshnessAlertSchema(id='3', source_freshness_execution_id='3', alert_class_id='elementary.model_id_3', model_unique_id='elementary.model_id_3', alias='modely', path='my/path', original_path='', materialization='table', message='', full_refresh=False, detected_at='2022-10-10 10:00:00', alert_suppression_interval=0, tags='["one", "two"]', model_meta=dict(owner='["jeff", "john"]'), original_status='runtime error', status='error', snapshotted_at='2023-08-15T12:26:06.884065+00:00', max_loaded_at='1969-12-31T00:00:00+00:00', max_loaded_at_time_ago_in_s=.884065, source_name='elementary_integration_tests', identifier='any_type_column_anomalies_validation', error_after='{"count": null, "period": null}', warn_after='{"count": 1, "period": "minute"}', filter='null', error='problemz', database_name='test_db', schema_name='test_schema', suppression_status='pending')]
return (test_alerts, model_alerts, source_freshness_alerts) |
class ListOpsTest(unittest.TestCase):
def test_append_empty_lists(self):
self.assertEqual(append([], []), [])
def test_append_list_to_empty_list(self):
self.assertEqual(append([], [1, 2, 3, 4]), [1, 2, 3, 4])
def test_append_empty_list_to_list(self):
self.assertEqual(append([1, 2, 3, 4], []), [1, 2, 3, 4])
def test_append_non_empty_lists(self):
self.assertEqual(append([1, 2], [2, 3, 4, 5]), [1, 2, 2, 3, 4, 5])
def test_concat_empty_list(self):
self.assertEqual(concat([]), [])
def test_concat_list_of_lists(self):
self.assertEqual(concat([[1, 2], [3], [], [4, 5, 6]]), [1, 2, 3, 4, 5, 6])
def test_concat_list_of_nested_lists(self):
self.assertEqual(concat([[[1], [2]], [[3]], [[]], [[4, 5, 6]]]), [[1], [2], [3], [], [4, 5, 6]])
def test_filter_empty_list(self):
self.assertEqual(list_ops_filter((lambda x: ((x % 2) == 1)), []), [])
def test_filter_non_empty_list(self):
self.assertEqual(list_ops_filter((lambda x: ((x % 2) == 1)), [1, 2, 3, 5]), [1, 3, 5])
def test_length_empty_list(self):
self.assertEqual(length([]), 0)
def test_length_non_empty_list(self):
self.assertEqual(length([1, 2, 3, 4]), 4)
def test_map_empty_list(self):
self.assertEqual(list_ops_map((lambda x: (x + 1)), []), [])
def test_map_non_empty_list(self):
self.assertEqual(list_ops_map((lambda x: (x + 1)), [1, 3, 5, 7]), [2, 4, 6, 8])
def test_foldl_empty_list(self):
self.assertEqual(foldl((lambda acc, el: (el * acc)), [], 2), 2)
def test_foldl_direction_independent_function_applied_to_non_empty_list(self):
self.assertEqual(foldl((lambda acc, el: (el + acc)), [1, 2, 3, 4], 5), 15)
def test_foldl_direction_dependent_function_applied_to_non_empty_list(self):
self.assertEqual(foldl((lambda acc, el: (el / acc)), [1, 2, 3, 4], 24), 64)
def test_foldr_empty_list(self):
self.assertEqual(foldr((lambda acc, el: (el * acc)), [], 2), 2)
def test_foldr_direction_independent_function_applied_to_non_empty_list(self):
self.assertEqual(foldr((lambda acc, el: (el + acc)), [1, 2, 3, 4], 5), 15)
def test_foldr_direction_dependent_function_applied_to_non_empty_list(self):
self.assertEqual(foldr((lambda acc, el: (el / acc)), [1, 2, 3, 4], 24), 9)
def test_reverse_empty_list(self):
self.assertEqual(reverse([]), [])
def test_reverse_non_empty_list(self):
self.assertEqual(reverse([1, 3, 5, 7]), [7, 5, 3, 1])
def test_reverse_list_of_lists_is_not_flattened(self):
self.assertEqual(reverse([[1, 2], [3], [], [4, 5, 6]]), [[4, 5, 6], [], [3], [1, 2]])
def test_foldr_foldr_add_string(self):
self.assertEqual(foldr((lambda acc, el: (el + acc)), ['e', 'x', 'e', 'r', 'c', 'i', 's', 'm'], '!'), 'exercism!')
def test_reverse_reverse_mixed_types(self):
self.assertEqual(reverse(['xyz', 4.0, 'cat', 1]), [1, 'cat', 4.0, 'xyz']) |
def test_unknown_media_type():
class TestResource():
async def on_get(self, req, resp):
resp.content_type = 'nope/json'
resp.media = {'something': True}
try:
(await resp.render_body())
except Exception as ex:
assert isinstance(ex, errors.HTTPUnsupportedMediaType)
raise
client = create_client(TestResource())
result = client.simulate_get('/')
assert (result.status_code == 415) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.