code stringlengths 281 23.7M |
|---|
class ColumnSharder(FLDataSharder):
def __init__(self, **kwargs):
init_self_cfg(self, component_class=__class__, config_class=ColumnSharderConfig, **kwargs)
super().__init__(**kwargs)
def shard_for_row(self, csv_row: Dict[(Any, Any)]) -> List[Any]:
unwrapped_colindex = self.cfg.sharding_col
if unwrapped_colindex.isdigit():
unwrapped_colindex = int(unwrapped_colindex)
assert (unwrapped_colindex < len(csv_row)), 'Sharding index out of bounds: '
shard_idx = csv_row[unwrapped_colindex]
if isinstance(shard_idx, torch.Tensor):
shard_idx = shard_idx.item()
return [shard_idx] |
def create_all_dirs(pathinfo, rootpath='.'):
try:
if (not os.path.exists(rootpath)):
os.mkdir(rootpath)
for info in pathinfo:
d = os.path.join(rootpath, os.path.split(info)[0])
if (not os.path.exists(d)):
os.makedirs(d)
except:
print(sys.exc_info(), 33)
print('!') |
class OptionPlotoptionsLollipopLowmarkerStates(Options):
def hover(self) -> 'OptionPlotoptionsLollipopLowmarkerStatesHover':
return self._config_sub_data('hover', OptionPlotoptionsLollipopLowmarkerStatesHover)
def normal(self) -> 'OptionPlotoptionsLollipopLowmarkerStatesNormal':
return self._config_sub_data('normal', OptionPlotoptionsLollipopLowmarkerStatesNormal)
def select(self) -> 'OptionPlotoptionsLollipopLowmarkerStatesSelect':
return self._config_sub_data('select', OptionPlotoptionsLollipopLowmarkerStatesSelect) |
def make_fproc(func_attrs, layout, dtype='float16'):
def fproc(op):
(a_layout, b_layout, c_layout) = layout.cutlass_lib_layouts()
return default_fproc(op=op, a_layout=a_layout, b_layout=b_layout, c_layout=c_layout, epilogue_name=func_attrs['epilogue'], epilogue2_name=func_attrs['epilogue2'], dtype=dtype)
func_attrs['op_instance'] = extract_config(fproc, func_attrs) |
def format_username_openldap(model_fields):
return '{user_identifier},{search_base}'.format(user_identifier=','.join(('{attribute_name}={field_value}'.format(attribute_name=clean_ldap_name(field_name), field_value=clean_ldap_name(field_value)) for (field_name, field_value) in convert_model_fields_to_ldap_fields(model_fields).items())), search_base=settings.LDAP_AUTH_SEARCH_BASE) |
def convert_version(version, app, repodir):
ver = {}
if ('added' in version):
ver['added'] = convert_datetime(version['added'])
else:
ver['added'] = 0
ver['file'] = {'name': '/{}'.format(version['apkName']), version['hashType']: version['hash'], 'size': version['size']}
ipfsCIDv1 = version.get('ipfsCIDv1')
if ipfsCIDv1:
ver['file']['ipfsCIDv1'] = ipfsCIDv1
if ('srcname' in version):
ver['src'] = common.file_entry(os.path.join(repodir, version['srcname']))
if ('obbMainFile' in version):
ver['obbMainFile'] = common.file_entry(os.path.join(repodir, version['obbMainFile']), version['obbMainFileSha256'])
if ('obbPatchFile' in version):
ver['obbPatchFile'] = common.file_entry(os.path.join(repodir, version['obbPatchFile']), version['obbPatchFileSha256'])
ver['manifest'] = manifest = {}
for element in ('nativecode', 'versionName', 'maxSdkVersion'):
if (element in version):
manifest[element] = version[element]
if ('versionCode' in version):
manifest['versionCode'] = version['versionCode']
if (('features' in version) and version['features']):
manifest['features'] = features = []
for feature in version['features']:
features.append({'name': feature})
if ('minSdkVersion' in version):
manifest['usesSdk'] = {}
manifest['usesSdk']['minSdkVersion'] = version['minSdkVersion']
if ('targetSdkVersion' in version):
manifest['usesSdk']['targetSdkVersion'] = version['targetSdkVersion']
else:
manifest['usesSdk']['targetSdkVersion'] = manifest['usesSdk']['minSdkVersion']
if ('signer' in version):
manifest['signer'] = {'sha256': [version['signer']]}
for element in ('uses-permission', 'uses-permission-sdk-23'):
en = element.replace('uses-permission', 'usesPermission').replace('-sdk-23', 'Sdk23')
if ((element in version) and version[element]):
manifest[en] = []
for perm in version[element]:
if perm[1]:
manifest[en].append({'name': perm[0], 'maxSdkVersion': perm[1]})
else:
manifest[en].append({'name': perm[0]})
antiFeatures = app.get('AntiFeatures', {}).copy()
for (name, descdict) in version.get('antiFeatures', dict()).items():
antiFeatures[name] = descdict
if antiFeatures:
ver['antiFeatures'] = {k: dict(sorted(antiFeatures[k].items())) for k in sorted(antiFeatures)}
if ('versionCode' in version):
if (version['versionCode'] > app['CurrentVersionCode']):
ver[RELEASECHANNELS_CONFIG_NAME] = ['Beta']
for build in app.get('Builds', []):
if ((build['versionCode'] == version['versionCode']) and ('whatsNew' in build)):
ver['whatsNew'] = build['whatsNew']
break
return ver |
def extract_yaml(yaml_files):
loaded_yaml = []
for yaml_file in yaml_files:
try:
with open(yaml_file, 'r') as fd:
loaded_yaml.append(yaml.safe_load(fd))
except IOError as e:
print('Error reading file', yaml_file)
raise e
except yaml.YAMLError as e:
print('Error parsing file', yaml_file)
raise e
except Exception as e:
print('General error')
raise e
return loaded_yaml |
def extractBisugotlWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TestComparisonMode(unittest.TestCase):
def setUp(self):
self.a = Foo(name='a')
self.same_as_a = Foo(name='a')
self.different_from_a = Foo(name='not a')
def bar_changed(self, object, trait, old, new):
self.changed_object = object
self.changed_trait = trait
self.changed_old = old
self.changed_new = new
self.changed_count += 1
def reset_change_tracker(self):
self.changed_object = None
self.changed_trait = None
self.changed_old = None
self.changed_new = None
self.changed_count = 0
def check_tracker(self, object, trait, old, new, count):
self.assertEqual(count, self.changed_count)
self.assertIs(object, self.changed_object)
self.assertEqual(trait, self.changed_trait)
self.assertIs(old, self.changed_old)
self.assertIs(new, self.changed_new)
def test_none_first_assignment(self):
nc = NoneCompare()
nc.on_trait_change(self.bar_changed, 'bar')
self.reset_change_tracker()
default_value = nc.bar
nc.bar = self.a
self.check_tracker(nc, 'bar', default_value, self.a, 1)
def test_identity_first_assignment(self):
ic = IdentityCompare()
ic.on_trait_change(self.bar_changed, 'bar')
self.reset_change_tracker()
default_value = ic.bar
ic.bar = self.a
self.check_tracker(ic, 'bar', default_value, self.a, 1)
def test_equality_first_assignment(self):
ec = EqualityCompare()
ec.on_trait_change(self.bar_changed, 'bar')
self.reset_change_tracker()
default_value = ec.bar
ec.bar = self.a
self.check_tracker(ec, 'bar', default_value, self.a, 1)
def test_none_same_object(self):
nc = NoneCompare()
nc.on_trait_change(self.bar_changed, 'bar')
self.reset_change_tracker()
default_value = nc.bar
nc.bar = self.a
self.check_tracker(nc, 'bar', default_value, self.a, 1)
nc.bar = self.a
self.check_tracker(nc, 'bar', self.a, self.a, 2)
def test_identity_same_object(self):
ic = IdentityCompare()
ic.on_trait_change(self.bar_changed, 'bar')
self.reset_change_tracker()
default_value = ic.bar
ic.bar = self.a
self.check_tracker(ic, 'bar', default_value, self.a, 1)
ic.bar = self.a
self.check_tracker(ic, 'bar', default_value, self.a, 1)
def test_equality_same_object(self):
ec = EqualityCompare()
ec.on_trait_change(self.bar_changed, 'bar')
self.reset_change_tracker()
default_value = ec.bar
ec.bar = self.a
self.check_tracker(ec, 'bar', default_value, self.a, 1)
ec.bar = self.a
self.check_tracker(ec, 'bar', default_value, self.a, 1)
def test_none_different_object(self):
nc = NoneCompare()
nc.on_trait_change(self.bar_changed, 'bar')
self.reset_change_tracker()
default_value = nc.bar
nc.bar = self.a
self.check_tracker(nc, 'bar', default_value, self.a, 1)
nc.bar = self.different_from_a
self.check_tracker(nc, 'bar', self.a, self.different_from_a, 2)
def test_identity_different_object(self):
ic = IdentityCompare()
ic.on_trait_change(self.bar_changed, 'bar')
self.reset_change_tracker()
default_value = ic.bar
ic.bar = self.a
self.check_tracker(ic, 'bar', default_value, self.a, 1)
ic.bar = self.different_from_a
self.check_tracker(ic, 'bar', self.a, self.different_from_a, 2)
def test_equality_different_object(self):
ec = EqualityCompare()
ec.on_trait_change(self.bar_changed, 'bar')
self.reset_change_tracker()
default_value = ec.bar
ec.bar = self.a
self.check_tracker(ec, 'bar', default_value, self.a, 1)
ec.bar = self.different_from_a
self.check_tracker(ec, 'bar', self.a, self.different_from_a, 2)
def test_none_different_object_same_as(self):
nc = NoneCompare()
nc.on_trait_change(self.bar_changed, 'bar')
self.reset_change_tracker()
default_value = nc.bar
nc.bar = self.a
self.check_tracker(nc, 'bar', default_value, self.a, 1)
nc.bar = self.same_as_a
self.check_tracker(nc, 'bar', self.a, self.same_as_a, 2)
def test_identity_different_object_same_as(self):
ic = IdentityCompare()
ic.on_trait_change(self.bar_changed, 'bar')
self.reset_change_tracker()
default_value = ic.bar
ic.bar = self.a
self.check_tracker(ic, 'bar', default_value, self.a, 1)
ic.bar = self.same_as_a
self.check_tracker(ic, 'bar', self.a, self.same_as_a, 2)
def test_equality_different_object_same_as(self):
ec = EqualityCompare()
ec.on_trait_change(self.bar_changed, 'bar')
self.reset_change_tracker()
default_value = ec.bar
ec.bar = self.a
self.check_tracker(ec, 'bar', default_value, self.a, 1)
ec.bar = self.same_as_a
self.check_tracker(ec, 'bar', default_value, self.a, 1)
def test_comparison_mode_none_with_cached_property(self):
class Model(HasTraits):
value = Property(depends_on='name')
name = Str(comparison_mode=ComparisonMode.none)
_property
def _get_value(self):
return self.trait_names
instance = Model()
events = []
instance.on_trait_change((lambda : events.append(None)), 'value')
instance.name = 'A'
events.clear()
instance.name = 'A'
self.assertEqual(len(events), 1) |
def call(cmd, input_file=None, input_text=None, encoding=None):
process = get_process(cmd)
if (input_file is not None):
with open(input_file, 'rb') as f:
process.stdin.write(f.read())
if (input_text is not None):
process.stdin.write(input_text)
return get_process_output(process, encoding) |
class SigningHandler(Handler):
SUPPORTED_PROTOCOL = SigningMessage.protocol_id
def setup(self) -> None:
def handle(self, message: Message) -> None:
signing_msg = cast(SigningMessage, message)
signing_dialogues = cast(SigningDialogues, self.context.signing_dialogues)
signing_dialogue = cast(Optional[SigningDialogue], signing_dialogues.update(signing_msg))
if (signing_dialogue is None):
self._handle_unidentified_dialogue(signing_msg)
return
if (signing_msg.performative is SigningMessage.Performative.SIGNED_MESSAGE):
self._handle_signed_message(signing_msg, signing_dialogue)
elif (signing_msg.performative is SigningMessage.Performative.ERROR):
self._handle_error(signing_msg, signing_dialogue)
else:
self._handle_invalid(signing_msg, signing_dialogue)
def teardown(self) -> None:
def _handle_unidentified_dialogue(self, signing_msg: SigningMessage) -> None:
self.context.logger.info(f'received invalid signing message={signing_msg}, unidentified dialogue.')
def _handle_signed_message(self, signing_msg: SigningMessage, signing_dialogue: SigningDialogue) -> None:
self.context.logger.debug(f'received signing message from decision maker, message={signing_msg} in dialogue={signing_dialogue}')
self.context.logger.info(f'received signing message from decision maker, signature={signing_msg.signed_message.body} stored!')
strategy = cast(Strategy, self.context.strategy)
strategy.signature_of_ethereum_address = signing_msg.signed_message.body
strategy.is_ready_to_register = True
def _handle_error(self, signing_msg: SigningMessage, signing_dialogue: SigningDialogue) -> None:
self.context.logger.info(f'transaction signing was not successful. Error_code={signing_msg.error_code} in dialogue={signing_dialogue}')
def _handle_invalid(self, signing_msg: SigningMessage, signing_dialogue: SigningDialogue) -> None:
self.context.logger.warning(f'cannot handle signing message of performative={signing_msg.performative} in dialogue={signing_dialogue}.') |
class BufferedS3Reader(contextlib.AbstractContextManager):
def __init__(self, s3_path: pathlib.Path, storage_service: S3StorageService) -> None:
self.s3_path = s3_path
self.storage_service = storage_service
self.data: Optional[str] = None
self.cursor = 0
def __enter__(self) -> BufferedS3Reader:
self.data = self.storage_service.read(str(self.s3_path))
return self
def __exit__(self, __exc_type: Optional[Type[BaseException]], __exc_value: Optional[BaseException], __traceback: Optional[TracebackType]) -> Optional[bool]:
pass
def seek(self, idx: int) -> None:
data = self.data
if (data is None):
raise ValueError('BufferedS3Reader: data is None')
self.cursor = min(idx, len(data))
def read(self, chars: int=0) -> str:
data = self.data
if (data is None):
raise ValueError('BufferedS3Reader: data is None')
if (chars > 0):
res = data[self.cursor:(self.cursor + chars)]
self.cursor += chars
else:
res = data[self.cursor:]
self.cursor = len(data)
return res
def copy_to_local(self) -> pathlib.Path:
with tempfile.NamedTemporaryFile('w', delete=False) as f:
f.write(str(self.data))
return pathlib.Path(f.name) |
def get_layout_page_with_text_or_graphic_replaced_by_graphic(layout_page: LayoutPage, semantic_graphic: SemanticGraphic, is_only_semantic_graphic_on_page: bool, is_replace_overlapping_text: bool) -> LayoutPage:
layout_graphic = semantic_graphic.layout_graphic
assert layout_graphic
assert layout_graphic.coordinates
graphic_bounding_box = layout_graphic.coordinates.bounding_box
if is_only_semantic_graphic_on_page:
layout_graphic = layout_graphic._replace(related_block=LayoutBlock.for_tokens(list(layout_page.iter_all_tokens())))
modified_layout_page = layout_page.replace(graphics=([_layout_graphic for _layout_graphic in layout_page.graphics if (not is_layout_graphic_within_bounding_box(_layout_graphic, bounding_box=graphic_bounding_box))] + [layout_graphic]))
if is_replace_overlapping_text:
modified_layout_page = modified_layout_page.flat_map_layout_tokens(functools.partial(_remove_tokens_within_bounding_box_flatmap_fn, bounding_box=graphic_bounding_box)).remove_empty_blocks()
return modified_layout_page |
def test_detect_variables_with_na(df_na):
imputer = DropMissingData(missing_only=True, variables=None)
X_transformed = imputer.fit_transform(df_na)
assert (imputer.missing_only is True)
assert (imputer.threshold is None)
assert (imputer.variables is None)
assert (imputer.variables_ == ['Name', 'City', 'Studies', 'Age', 'Marks'])
assert (imputer.n_features_in_ == 6)
assert (X_transformed.shape == (5, 6))
assert (X_transformed['Name'].shape[0] == 5)
assert (X_transformed.isna().sum().sum() == 0) |
def send_confirmation_msg(doc):
if frappe.db.get_single_value('Healthcare Settings', 'send_appointment_confirmation'):
message = frappe.db.get_single_value('Healthcare Settings', 'appointment_confirmation_msg')
try:
send_message(doc, message)
except Exception:
frappe.log_error(frappe.get_traceback(), _('Appointment Confirmation Message Not Sent'))
frappe.msgprint(_('Appointment Confirmation Message Not Sent'), indicator='orange') |
def delegatecall(evm: Evm) -> None:
gas = Uint(pop(evm.stack))
code_address = to_address(pop(evm.stack))
memory_input_start_position = pop(evm.stack)
memory_input_size = pop(evm.stack)
memory_output_start_position = pop(evm.stack)
memory_output_size = pop(evm.stack)
extend_memory = calculate_gas_extend_memory(evm.memory, [(memory_input_start_position, memory_input_size), (memory_output_start_position, memory_output_size)])
message_call_gas = calculate_message_call_gas(U256(0), gas, Uint(evm.gas_left), extend_memory.cost, GAS_CALL)
charge_gas(evm, (message_call_gas.cost + extend_memory.cost))
evm.memory += (b'\x00' * extend_memory.expand_by)
generic_call(evm, message_call_gas.stipend, evm.message.value, evm.message.caller, evm.message.current_target, code_address, False, False, memory_input_start_position, memory_input_size, memory_output_start_position, memory_output_size)
evm.pc += 1 |
class OptionSeriesHeatmapSonificationContexttracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
def test_display_callback():
def run_callback():
context_value.set(AttributeDict(**{'triggered_inputs': [{'prop_id': 'btn-1-ctx-example.n_clicks'}]}))
return display(1, 0, 0)
ctx = copy_context()
output = ctx.run(run_callback)
assert (output == f'You last clicked button with ID btn-1-ctx-example') |
def us_bench_test(freq_min, freq_max, freq_step, vco_freq, bios_filename, bios_timeout=40):
import time
from litex import RemoteClient
bus = RemoteClient()
bus.open()
ctrl = BenchController(bus)
ctrl.load_rom(bios_filename, delay=0.0001)
ctrl.reboot()
uspll = USPLL(bus)
clkout0_clkreg1 = ClkReg1(uspll.read(8))
print(('-' * 80))
print('Running calibration; sys_clk from {:3.3f}MHz to {:3.2f}MHz (step: {:3.2f}MHz)'.format((freq_min / 1000000.0), (freq_max / 1000000.0), (freq_step / 1000000.0)))
print(('-' * 80))
print('')
tested_vco_divs = []
for clk_freq in range(int(freq_min), int(freq_max), int(freq_step)):
vco_div = int((vco_freq / clk_freq))
if (vco_div in tested_vco_divs):
continue
tested_vco_divs.append(vco_div)
print(('-' * 40))
print('sys_clk = {}MHz...'.format(((vco_freq / vco_div) / 1000000.0)))
print(('-' * 40))
clkout0_clkreg1.high_time = ((vco_div // 2) + (vco_div % 2))
clkout0_clkreg1.low_time = (vco_div // 2)
uspll.write(8, clkout0_clkreg1.pack())
duration = 0.5
start = bus.regs.crg_sys_clk_counter.read()
time.sleep(duration)
end = bus.regs.crg_sys_clk_counter.read()
print('Measured sys_clk: {:3.2f}MHz.'.format(((end - start) / (1000000.0 * duration))))
print(('-' * 40))
print('Reboot SoC and get BIOS log...')
print(('-' * 40))
ctrl.reboot()
start = time.time()
while ((time.time() - start) < bios_timeout):
if bus.regs.uart_xover_rxfull.read():
length = 16
elif (not bus.regs.uart_xover_rxempty.read()):
length = 1
else:
time.sleep(0.001)
continue
for c in bus.read(bus.regs.uart_xover_rxtx.addr, length=length, burst='fixed'):
print('{:c}'.format(c), end='')
print('')
bus.close() |
class TestTopicPollVoteView(BaseClientTestCase):
(autouse=True)
def setup(self):
self.perm_handler = PermissionHandler()
self.top_level_forum = create_forum()
self.topic = create_topic(forum=self.top_level_forum, poster=self.user)
self.post = PostFactory.create(topic=self.topic, poster=self.user)
self.poll = TopicPollFactory.create(topic=self.topic)
self.option_1 = TopicPollOptionFactory.create(poll=self.poll)
self.option_2 = TopicPollOptionFactory.create(poll=self.poll)
ForumReadTrackFactory.create(forum=self.top_level_forum, user=self.user)
assign_perm('can_read_forum', self.user, self.top_level_forum)
assign_perm('can_vote_in_polls', self.user, self.top_level_forum)
def test_browsing_works(self):
correct_url = reverse('forum_conversation:topic_poll_vote', kwargs={'pk': self.poll.pk})
response = self.client.post(correct_url, follow=True)
assert (response.status_code == 200)
def test_cannot_be_browed_by_users_who_cannot_vote_in_polls(self):
remove_perm('can_vote_in_polls', self.user, self.top_level_forum)
correct_url = reverse('forum_conversation:topic_poll_vote', kwargs={'pk': self.poll.pk})
response = self.client.post(correct_url, follow=True)
assert (response.status_code == 403)
def test_can_be_used_to_vote(self):
correct_url = reverse('forum_conversation:topic_poll_vote', kwargs={'pk': self.poll.pk})
post_data = {'options': [self.option_1.pk]}
response = self.client.post(correct_url, post_data, follow=True)
assert (response.status_code == 200)
votes = TopicPollVote.objects.filter(voter=self.user)
assert (votes.count() == 1)
assert (votes[0].poll_option == self.option_1)
def test_can_be_used_to_change_a_vote(self):
self.poll.user_changes = True
self.poll.save()
TopicPollVoteFactory.create(voter=self.user, poll_option=self.option_2)
correct_url = reverse('forum_conversation:topic_poll_vote', kwargs={'pk': self.poll.pk})
post_data = {'options': [self.option_1.pk]}
response = self.client.post(correct_url, post_data, follow=True)
assert (response.status_code == 200)
votes = TopicPollVote.objects.filter(voter=self.user)
assert (votes.count() == 1)
assert (votes[0].poll_option == self.option_1) |
def get_regression_quality_metrics(regression_quality_report: Report) -> Dict:
metrics = {}
report_dict = regression_quality_report.as_dict()
metrics['me'] = report_dict['metrics'][0]['result']['current']['mean_error']
metrics['mae'] = report_dict['metrics'][0]['result']['current']['mean_abs_error']
return metrics |
def _clean_args_list(args: List[str]) -> List[str]:
ALLOWLIST = ['--disable-logging', '--project-dir', '--profiles-dir', '--defer', '--threads', '--thread', '--state', '--full-refresh', '-s', '--select', '-m', '--models', '--model', '--exclude', '--selector', '--all', 'run', 'dbt', '-v', '--version', '--debug', '--vars', '--var', '--target', 'build', 'clean', 'compile', 'debug', 'deps', 'docs', 'init', 'list', 'parse', 'seed', 'snapshot', 'source', 'test', 'rpc', 'run-operation']
REDACTED = '[REDACTED]'
output = []
for item in args:
if (item in ALLOWLIST):
output.append(item)
else:
output.append(REDACTED)
return output |
def test_unit_status_check_properties():
def no_op():
pass
widget = qtile_extras.widget.UnitStatus()
widget.draw = no_op
assert (widget.state == 'not-found')
widget._changed(None, {'OtherProperty': Variant('s', 'active')}, None)
assert (widget.state == 'not-found')
widget._changed(None, {'ActiveState': Variant('s', 'active')}, None)
assert (widget.state == 'active') |
def test_load_schema_union_names():
'
load_schema_dir = join(abspath(dirname(__file__)), 'load_schema_test_15')
schema_path = join(load_schema_dir, 'A.avsc')
loaded_schema = fastavro.schema.load_schema(schema_path, _write_hint=False)
expected_schema = [{'name': 'B', 'type': 'record', 'fields': [{'name': 'foo', 'type': 'string'}]}, {'name': 'C', 'type': 'record', 'fields': [{'name': 'bar', 'type': 'string'}]}]
assert (loaded_schema == expected_schema) |
.network
def test_zenodo_downloader_with_slash_in_fname():
with TemporaryDirectory() as local_store:
base_url = (ZENODOURL_W_SLASH + 'santisoler/pooch-test-data-v1.zip')
downloader = DOIDownloader()
outfile = os.path.join(local_store, 'test-data.zip')
downloader(base_url, outfile, None)
fnames = Unzip()(outfile, action='download', pooch=None)
(fname,) = [f for f in fnames if ('tiny-data.txt' in f)]
check_tiny_data(fname) |
.skipif(('WEB3_INFURA_PROJECT_ID' not in os.environ), reason='Infura API key unavailable')
def test_registry_uri_backend(backend):
valid_uri = 'erc1319://0xDECD360e6d4d979edBcDD59c35feeB:1/.0.0'
expected_uri = 'ipfs://QmbeVyFLSuEUxiXKwSsEjef6icpdTdA4kGG9BcrJXKNKUW'
assert (backend.can_translate_uri(valid_uri) is True)
assert (backend.can_resolve_uri(valid_uri) is False)
assert (backend.fetch_uri_contents(valid_uri) == expected_uri) |
def test_file_type_stats(stats_updater, backend_db):
assert (stats_updater.get_file_type_stats() == {'file_types': [], 'firmware_container': []})
type_analysis = generate_analysis_entry(analysis_result={'mime': 'fw/image'})
type_analysis_2 = generate_analysis_entry(analysis_result={'mime': 'file/type1'})
(fw, parent_fo, child_fo) = create_fw_with_parent_and_child()
fw.vendor = 'foobar'
fw.processed_analysis['file_type'] = type_analysis
parent_fo.processed_analysis['file_type'] = type_analysis_2
child_fo.processed_analysis['file_type'] = generate_analysis_entry(analysis_result={'mime': 'file/type2'})
backend_db.add_object(fw)
backend_db.add_object(parent_fo)
backend_db.add_object(child_fo)
insert_test_fw(backend_db, 'fw1', analysis={'file_type': type_analysis}, vendor='test_vendor')
insert_test_fo(backend_db, 'fo1', parent_fw='fw1', analysis={'file_type': type_analysis_2})
stats = stats_updater.get_file_type_stats()
assert ('file_types' in stats)
assert ('firmware_container' in stats)
assert (stats['file_types'] == [('file/type2', 1), ('file/type1', 2)])
assert (stats['firmware_container'] == [('fw/image', 2)])
stats_updater.set_match({'vendor': 'foobar'})
stats = stats_updater.get_file_type_stats()
assert (stats['firmware_container'] == [('fw/image', 1)]), 'query filter does not work'
assert (stats['file_types'] == [('file/type1', 1), ('file/type2', 1)]) |
class OptionPlotoptionsSplineStatesHover(Options):
def animation(self) -> 'OptionPlotoptionsSplineStatesHoverAnimation':
return self._config_sub_data('animation', OptionPlotoptionsSplineStatesHoverAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def halo(self) -> 'OptionPlotoptionsSplineStatesHoverHalo':
return self._config_sub_data('halo', OptionPlotoptionsSplineStatesHoverHalo)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionPlotoptionsSplineStatesHoverMarker':
return self._config_sub_data('marker', OptionPlotoptionsSplineStatesHoverMarker) |
def extractSupermeganetWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def check_withings_connection():
if withings_credentials_supplied:
if (not withings_connected()):
return html.A(className='col-lg-12', children=[dbc.Button('Connect Withings', id='connect-withings-btton', color='primary', className='mb-2', size='sm')], href=connect_withings_link(WithingsAuth(config.get('withings', 'client_id'), config.get('withings', 'client_secret'), callback_uri=config.get('withings', 'redirect_uri'), scope=(AuthScope.USER_ACTIVITY, AuthScope.USER_METRICS, AuthScope.USER_INFO, AuthScope.USER_SLEEP_EVENTS))))
else:
return html.H4('Withings Connected!', className='col-lg-12')
else:
return html.Div() |
_util.copy_func_kwargs(HttpsOptions)
def on_call(**kwargs) -> _typing.Callable[([_C2], _C2)]:
options = HttpsOptions(**kwargs)
def on_call_inner_decorator(func: _C2):
origins: _typing.Any = '*'
if ((options.cors is not None) and (options.cors.cors_origins is not None)):
origins = options.cors.cors_origins
enforce_app_check = False
if ((options.enforce_app_check is None) and (_GLOBAL_OPTIONS.enforce_app_check is not None)):
enforce_app_check = _GLOBAL_OPTIONS.enforce_app_check
elif (options.enforce_app_check is not None):
enforce_app_check = options.enforce_app_check
_cross_origin(methods='POST', origins=origins)
_functools.wraps(func)
def on_call_wrapped(request: Request):
return _on_call_handler(func, request, enforce_app_check)
_util.set_func_endpoint_attr(on_call_wrapped, options._endpoint(func_name=func.__name__, callable=True))
return on_call_wrapped
return on_call_inner_decorator |
def test_tesseroid_layer_invalid_surface_reference(dummy_layer):
(coordinates, surface, reference, _) = dummy_layer
surface_invalid = np.arange(20, dtype=float)
with pytest.raises(ValueError, match='Invalid surface array with shape'):
tesseroid_layer(coordinates, surface_invalid, reference)
reference_invalid = np.zeros(20)
with pytest.raises(ValueError, match='Invalid reference array with shape'):
tesseroid_layer(coordinates, surface, reference_invalid) |
class ImageLibrary(HasPrivateTraits):
volumes = List(ImageVolume)
catalog = Dict(Str, ImageVolume)
images = Property(List, observe='volumes.items.images')
aliases = Dict()
def image_info(self, image_name):
volume = self.find_volume(image_name)
if (volume is not None):
return volume.catalog.get(image_name)
return None
def image_resource(self, image_name):
if (image_name.find(':') < 0):
image_name = (':%s' % image_name[1:])
volume = self.find_volume(image_name)
if (volume is not None):
return volume.image_resource(image_name)
return None
def find_volume(self, image_name):
(volume_name, file_name) = split_image_name(image_name)
catalog = self.catalog
aliases = self.aliases
while (volume_name not in catalog):
volume_name = aliases.get(volume_name)
if (volume_name is None):
return None
return catalog[volume_name]
def add_volume(self, file_name=None):
if (file_name is None):
file_name = join(get_resource_path(2), 'images')
if isfile(file_name):
volume = self._add_volume(file_name)
if (volume is None):
raise TraitError(("'%s' is not a valid image volume." % file_name))
if (volume.name in self.catalog):
self._duplicate_volume(volume.name)
self.catalog[volume.name] = volume
self.volumes.append(volume)
elif isdir(file_name):
catalog = self.catalog
volumes = self._add_path(file_name)
for volume in volumes:
if (volume.name in catalog):
self._duplicate_volume(volume.name)
catalog[volume.name] = volume
self.volumes.extend(volumes)
else:
raise TraitError(("The add method argument must be None or a file or directory path, but '%s' was specified." % file_name))
def add_path(self, volume_name, path=None):
if (volume_name in self.catalog):
raise TraitError(("The volume name '%s' is already in the image library." % volume_name))
if (path is None):
path = join(get_resource_path(2), 'images')
if (not isdir(path)):
raise TraitError(("The image volume path '%s' does not exist." % path))
image_volume_path = join(path, 'image_volume.py')
if exists(image_volume_path):
volume = get_python_value(read_file(image_volume_path), 'volume')
else:
volume = ImageVolume()
volume.trait_set(name=volume_name, path=path, is_zip_file=False)
if (volume.time_stamp < time_stamp_for(stat(path)[ST_MTIME])):
volume.save()
self.catalog[volume_name] = volume
self.volumes.append(volume)
def extract(self, file_name, image_names):
(volume_name, ext) = splitext(basename(file_name))
if (ext == ''):
file_name += '.zip'
volume = ImageVolume(name=volume_name)
if exists(file_name):
raise TraitError(("The '%s' file already exists." % file_name))
zf = ZipFile(file_name, 'w', ZIP_DEFLATED)
error = True
aliases = set()
keywords = set()
images = []
info = {}
try:
for image_name in set(image_names):
if ((image_name[:1] != '') or (image_name.find(':') < 0)):
raise TraitError(("The image name specified by '%s' is not of the form: :name." % image_name))
(image_volume_name, image_file_name) = split_image_name(image_name)
image_volume = self.find_volume(image_name)
if (image_volume is None):
raise TraitError(("Could not find the image volume specified by '%s'." % image_name))
image_info = image_volume.catalog.get(image_name)
if (image_info is None):
raise TraitError(("Could not find the image specified by '%s'." % image_name))
images.append(image_info)
zf.writestr(image_file_name, image_volume.image_data(image_name))
if (image_volume_name != volume_name):
if (image_volume_name not in aliases):
aliases.add(image_volume_name)
for keyword in image_volume.keywords:
keywords.add(keyword)
volume_info = image_volume.volume_info(image_name)
vinfo = info.get(image_volume_name)
if (vinfo is None):
info[image_volume_name] = vinfo = volume_info.clone()
vinfo.image_names.append(image_name)
images.sort(key=(lambda item: item.image_name))
volume.images = images
volume.aliases = list(aliases)
volume.keywords = list(keywords)
volume.info = list(info.values())
zf.writestr('image_volume.py', volume.image_volume_code)
zf.writestr('image_info.py', volume.images_code)
zf.writestr('license.txt', volume.license_text)
error = False
finally:
zf.close()
if error:
remove(file_name)
def _volumes_default(self):
result = []
app_library = join(dirname(abspath(sys.argv[0])), 'library')
if isdir(app_library):
result.extend(self._add_path(app_library))
result.extend(self._add_path(join(get_resource_path(1), 'library')))
paths = environ.get('TRAITS_IMAGES')
if (paths is not None):
separator = ';'
if (system() != 'Windows'):
separator = ':'
for path in paths.split(separator):
result.extend(self._add_path(path))
return result
def _catalog_default(self):
return dict([(volume.name, volume) for volume in self.volumes])
_property
def _get_images(self):
return self._get_images_list()
def _get_images_list(self):
images = []
for volume in self.volumes:
images.extend(volume.images)
images.sort(key=(lambda image: image.image_name))
return images
def _add_path(self, path):
result = []
if isdir(path):
for base in listdir(path):
if (splitext(base)[1] == '.zip'):
volume = self._add_volume(join(path, base))
if (volume is not None):
result.append(volume)
return result
def _add_volume(self, path):
path = abspath(path)
if is_zipfile(path):
zf = FastZipFile(path=path)
volume_name = splitext(basename(path))[0]
names = zf.namelist()
if ('image_volume.py' in names):
volume = get_python_value(zf.read('image_volume.py'), 'volume')
volume.name = volume_name
self._add_aliases(volume)
volume.path = path
volume.zip_file = zf
else:
volume = ImageVolume(name=volume_name, path=path, zip_file=zf)
if (volume.time_stamp < time_stamp_for(stat(path)[ST_MTIME])):
volume.save()
return volume
return None
def _add_aliases(self, volume):
aliases = self.aliases
volume_name = volume.name
for vname in volume.aliases:
if ((vname in aliases) and (volume_name != aliases[vname])):
raise TraitError(("Image library error: Attempt to alias '%s' to '%s' when it is already aliased to '%s'" % (vname, volume_name, aliases[volume_name])))
aliases[vname] = volume_name
def _duplicate_volume(self, volume_name):
raise TraitError(("Attempted to add an image volume called '%s' when a volume with that name is already defined." % volume_name)) |
('config_type', ['strict'])
def test_missing_envs_strict_mode(config, json_config_file_3):
with open(json_config_file_3, 'w') as file:
file.write(json.dumps({'section': {'undefined': '${UNDEFINED}'}}))
with raises(ValueError, match='Missing required environment variable "UNDEFINED"'):
config.from_json(json_config_file_3) |
def enum_values_changed(values, strfunc=str):
if isinstance(values, dict):
data = [(strfunc(v), n) for (n, v) in values.items()]
if (len(data) > 0):
data.sort(key=itemgetter(0))
col = (data[0][0].find(':') + 1)
if (col > 0):
data = [(n[col:], v) for (n, v) in data]
elif (not isinstance(values, SequenceTypes)):
handler = values
if isinstance(handler, CTrait):
handler = handler.handler
if (not isinstance(handler, BaseTraitHandler)):
raise TraitError("Invalid value for 'values' specified")
if handler.is_mapped:
data = [(strfunc(n), n) for n in handler.map.keys()]
data.sort(key=itemgetter(0))
else:
data = [(strfunc(v), v) for v in handler.values]
else:
data = [(strfunc(v), v) for v in values]
names = [x[0] for x in data]
mapping = {}
inverse_mapping = {}
for (name, value) in data:
mapping[name] = value
inverse_mapping[value] = name
return (names, mapping, inverse_mapping) |
def clusterise_dscalar_input(data_file, arguments, surf_settings, tmpdir):
pcluster_dscalar = os.path.join(tmpdir, 'pclusters.dscalar.nii')
wb_cifti_clusters(data_file, pcluster_dscalar, surf_settings, arguments['--max-threshold'], arguments['--area-threshold'], less_than=False, starting_label=1)
pos_clust_data = ciftify.niio.load_concat_cifti_surfaces(pcluster_dscalar)
max_pos = int(np.max(pos_clust_data))
ncluster_dscalar = os.path.join(tmpdir, 'nclusters.dscalar.nii')
wb_cifti_clusters(data_file, ncluster_dscalar, surf_settings, arguments['--min-threshold'], arguments['--area-threshold'], less_than=True, starting_label=(max_pos + 1))
clusters_out = os.path.join(tmpdir, 'clusters.dscalar.nii')
ciftify.utils.run(['wb_command', '-cifti-math "(x+y)"', clusters_out, '-var', 'x', pcluster_dscalar, '-var', 'y', ncluster_dscalar])
return clusters_out |
.parametrize('log_name, change_from, change_to', [('Poro', 'CONT', 'DISC'), ('Poro', 'CONT', 'CONT'), ('Facies', 'DISC', 'CONT')])
def test_set_log_type(simple_well, log_name, change_from, change_to):
mywell = simple_well
assert (mywell.get_logtype(log_name) == change_from)
mywell.set_logtype(log_name, change_to)
assert (mywell.get_logtype(log_name) == change_to) |
def synthetic_attributes():
def decorator(original_class):
original_init = original_class.__init__
def __init__(self, *args, **kws):
method_list = [func for func in dir(original_class) if callable(getattr(original_class, func))]
for method_name in method_list:
method = getattr(original_class, method_name)
if hasattr(method, 'synthetic_mark'):
method(self)
original_init(self, *args, **kws)
original_class.__init__ = __init__
return original_class
return decorator |
class YoutubeWidget(Widget):
DEFAULT_MIN_SIZE = (100, 100)
source = event.StringProp('oHg5SJYRHA0', settable=True, doc='\n The source of the video represented as the Youtube id.\n ')
def _create_dom(self):
global window
node = window.document.createElement('div')
self.inode = window.document.createElement('iframe')
node.appendChild(self.inode)
return node
def _update_canvas_size(self, *events):
size = self.size
if (size[0] or size[1]):
self.inode.style.width = (size[0] + 'px')
self.inode.style.height = (size[1] + 'px')
def __source_changed(self, *events):
base_url = '
self.inode.src = ((base_url + self.source) + '?autoplay=0') |
class Build(Base):
__tablename__ = 'builds'
__exclude_columns__ = ('id', 'package', 'package_id', 'release', 'testcases', 'update_id', 'update', 'override')
__get_by__ = ('nvr',)
nvr = Column(Unicode(100), unique=True, nullable=False)
signed = Column(Boolean, default=False, nullable=False)
override = relationship('BuildrootOverride', back_populates='build', uselist=False)
package_id = Column(Integer, ForeignKey('packages.id'), nullable=False)
package = relationship('Package', back_populates='builds', lazy='joined', innerjoin=True)
release_id = Column(Integer, ForeignKey('releases.id'))
release = relationship('Release', back_populates='builds', lazy=False)
update_id = Column(Integer, ForeignKey('updates.id'), index=True)
update = relationship('Update', back_populates='builds')
testcases = relationship('TestCase', secondary=build_testcase_table, back_populates='builds', order_by='TestCase.name')
type = Column(ContentType.db_type(), nullable=False)
__mapper_args__ = {'polymorphic_on': type, 'polymorphic_identity': ContentType.base}
def _get_kojiinfo(self):
if (not hasattr(self, '_kojiinfo')):
koji_session = buildsys.get_session()
self._kojiinfo = koji_session.getBuild(self.nvr)
return self._kojiinfo
def _get_n_v_r(self):
return self.nvr.rsplit('-', 2)
def nvr_name(self):
return self._get_n_v_r()[0]
def nvr_version(self):
return self._get_n_v_r()[1]
def nvr_release(self):
return self._get_n_v_r()[2]
def get_n_v_r(self):
return (self.nvr_name, self.nvr_version, self.nvr_release)
def get_tags(self, koji=None):
if (not koji):
koji = buildsys.get_session()
return [tag['name'] for tag in koji.listTags(self.nvr)]
def get_owner_name(self):
return self._get_kojiinfo()['owner_name']
def get_build_id(self):
return self._get_kojiinfo()['id']
def get_task_id(self) -> int:
return self._get_kojiinfo().get('task_id')
def get_changelog(self, timelimit=0, lastupdate=False):
return ''
def get_creation_time(self) -> datetime:
return datetime.fromisoformat(self._get_kojiinfo()['creation_time'])
def unpush(self, koji, from_side_tag=False):
log.info(('Unpushing %s' % self.nvr))
release = self.update.release
for tag in self.get_tags(koji):
if (tag == release.pending_signing_tag):
log.info(('Removing %s tag from %s' % (tag, self.nvr)))
koji.untagBuild(tag, self.nvr)
if (tag == release.pending_testing_tag):
log.info(('Removing %s tag from %s' % (tag, self.nvr)))
koji.untagBuild(tag, self.nvr)
if (tag == release.pending_stable_tag):
log.info(('Removing %s tag from %s' % (tag, self.nvr)))
koji.untagBuild(tag, self.nvr)
if (tag == release.testing_tag):
if (not from_side_tag):
log.info(f'Moving {self.nvr} from {tag} to {release.candidate_tag}')
koji.moveBuild(tag, release.candidate_tag, self.nvr)
else:
log.info(f'Removing {tag} tag from {self.nvr}')
koji.untagBuild(tag, self.nvr)
elif (from_side_tag and (tag == release.candidate_tag)):
log.info(f'Removing {tag} tag from {self.nvr}')
koji.untagBuild(tag, self.nvr)
def is_latest(self) -> bool:
koji_session = buildsys.get_session()
koji_builds = koji_session.getLatestBuilds(self.update.release.stable_tag, package=self.package.name)
for koji_build in koji_builds:
build_creation_time = datetime.fromisoformat(koji_build['creation_time'])
if (self.get_creation_time() < build_creation_time):
return False
return True
def update_test_cases(self, db):
if (not config.get('query_wiki_test_cases')):
return
start = datetime.utcnow()
log.debug(f'Querying the wiki for test cases of {self.nvr}')
try:
wiki = MediaWiki(config.get('wiki_url'), user_agent=config.get('wiki_user_agent'))
except Exception as ex:
raise ExternalCallException(f'Failed to connect to Fedora Wiki: {ex}')
cat_page = f'Package {self.package.external_name} test cases'
def list_categorymembers(wiki, cat_page, limit=500):
try:
response = wiki.categorymembers(cat_page, results=limit, subcategories=True)
except Exception as ex:
raise ExternalCallException(f'Failed retrieving testcases from Wiki: {ex}')
members = [entry for entry in response[0] if (entry != '')]
if ((len(response[1]) > 0) and (len(members) < limit)):
for subcat in response[1]:
members.extend(list_categorymembers(wiki, subcat, limit=(limit - len(members))))
log.debug(f'Found the following testcases: {members}')
return members
fetched = set(list_categorymembers(wiki, cat_page))
for case in self.testcases:
if (case.name not in fetched):
self.testcases.remove(case)
log.debug(f'Removed testcase "{case.name}" from {self.nvr}')
for test in fetched:
case = TestCase.get(test)
if (not case):
case = TestCase(name=test)
db.add(case)
db.flush()
log.debug(f'Found new testcase "{case.name}" and added to database')
if (case not in self.testcases):
self.testcases.append(case)
log.debug(f'Added testcase "{case.name}" to {self.nvr}')
db.flush()
log.debug(f'Finished querying for test cases in {(datetime.utcnow() - start)}') |
class ColorFormatter(logging.Formatter):
'Logging Formatter to add colors and count warning / errors.\n\n Taken from Stackeverflow user Sergey Pleshakov from this URL\n
grey = '\x1b[38;21m'
yellow = '\x1b[33;21m'
red = '\x1b[31;21m'
bold_red = '\x1b[31;1m'
reset = '\x1b[0m'
format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
FORMATS = {logging.DEBUG: ((grey + format) + reset), logging.INFO: ((grey + format) + reset), logging.WARNING: ((yellow + format) + reset), logging.ERROR: ((red + format) + reset), logging.CRITICAL: ((bold_red + format) + reset)}
def format(self, record):
log_fmt = self.FORMATS.get(record.levelno)
formatter = logging.Formatter(log_fmt)
return formatter.format(record) |
def run_pcap_to_features(pcap=None, outdir=False):
with tempfile.TemporaryDirectory() as tmpdir:
testdata = os.path.join(tmpdir, 'test_data')
shutil.copytree('./tests/test_data', testdata)
if pcap:
pcap_path = os.path.join(testdata, pcap)
pcap_csv_path = os.path.join(tmpdir, (pcap + '.csv.gz'))
else:
testsdir = os.path.join(tmpdir, 'tests')
shutil.copytree('tests', testsdir)
pcap_path = testsdir
pcap_csv_path = os.path.join(tmpdir, 'pcap.csv.gz')
run_pcap_to_csv(pcap_path, pcap_csv_path)
if outdir:
outpath = tmpdir
else:
outpath = os.path.join(tmpdir, 'combined.csv.gz')
run_csv_to_features(pcap_csv_path, outpath) |
class Solution(object):
def countSegments(self, s):
(n, inchar) = (0, False)
for c in s:
if (c == ' '):
if inchar:
n += 1
inchar = False
else:
inchar = True
if inchar:
n += 1
return n |
def FQ(lib):
if (lib == bn128):
return bn128_FQ
elif (lib == optimized_bn128):
return optimized_bn128_FQ
elif (lib == bls12_381):
return bls12_381_FQ
elif (lib == optimized_bls12_381):
return optimized_bls12_381_FQ
else:
raise Exception('Library Not Found') |
class PopupColorItem(ft.PopupMenuItem):
def __init__(self, color, name):
super().__init__()
self.content = ft.Row(controls=[ft.Icon(name=ft.icons.COLOR_LENS_OUTLINED, color=color), ft.Text(name)])
self.on_click = self.seed_color_changed
self.data = color
async def seed_color_changed(self, e):
self.page.theme = self.page.dark_theme = ft.theme.Theme(color_scheme_seed=self.data)
(await self.page.update_async()) |
class InformationFileNotExist(ErsiliaError):
def __init__(self, model_id):
self.message = 'The eos/dest/{0}/information.json file does not exist.'.format(model_id)
self.hints = 'Try fetching and serving the model first, and make sure the model is written correctly.'
super().__init__(self.message, self.hints) |
class OptionSeriesHeatmapDataMarkerStates(Options):
def hover(self) -> 'OptionSeriesHeatmapDataMarkerStatesHover':
return self._config_sub_data('hover', OptionSeriesHeatmapDataMarkerStatesHover)
def normal(self) -> 'OptionSeriesHeatmapDataMarkerStatesNormal':
return self._config_sub_data('normal', OptionSeriesHeatmapDataMarkerStatesNormal)
def select(self) -> 'OptionSeriesHeatmapDataMarkerStatesSelect':
return self._config_sub_data('select', OptionSeriesHeatmapDataMarkerStatesSelect) |
class SimpleFormValidator(FancyValidator):
__unpackargs__ = ('func',)
validate_partial_form = False
def __initargs__(self, new_attrs):
self.__doc__ = getattr(self.func, '__doc__', None)
def to_python(self, value_dict, state):
value_dict = value_dict.copy()
errors = self.func(value_dict, state, self)
if (not errors):
return value_dict
if isinstance(errors, str):
raise Invalid(errors, value_dict, state)
if isinstance(errors, dict):
raise Invalid(format_compound_error(errors), value_dict, state, error_dict=errors)
if isinstance(errors, Invalid):
raise errors
raise TypeError(('Invalid error value: %r' % errors))
validate_partial = to_python
def decorate(cls, **kw):
def decorator(func):
return cls(func, **kw)
return decorator |
class OptionPlotoptionsPyramidSonificationContexttracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesBulletLabelStyle(Options):
def fontSize(self):
return self._config_get('0.8em')
def fontSize(self, num: float):
self._config(num, js_type=False)
def fontWeight(self):
return self._config_get('bold')
def fontWeight(self, text: str):
self._config(text, js_type=False) |
class HtaConfigTestCase(unittest.TestCase):
def setUp(self) -> None:
self.test_config_path = '/tmp/test_config.json'
self.test_config = {'a': 1, 'b': ['s', 't'], 'c': {'c1': 2, 'c2': {'c21': 10.0}}}
with open(self.test_config_path, 'w+') as fp:
json.dump(self.test_config, fp)
def test_get_default_paths(self):
paths = HtaConfig.get_default_paths()
self.assertEqual(len(paths), 3, f'expect the default file paths to be 3 but got {len(paths)}')
self.assertTrue(all([str(path).endswith(DEFAULT_CONFIG_FILENAME) for path in paths]))
def test_constructor_no_config_file(self):
config = HtaConfig(load_default_paths=False)
self.assertDictEqual(config.get_config(), {})
def test_constructor_one_config_file(self):
config = HtaConfig(self.test_config_path, load_default_paths=False)
self.assertEqual(config.get_config(), self.test_config)
def test_get_config_file_paths(self):
config = HtaConfig(self.test_config_path, load_default_paths=False)
paths = config.get_config_file_paths()
self.assertListEqual(paths, [self.test_config_path])
def test_get_config_all(self):
config = HtaConfig(self.test_config_path, load_default_paths=False)
config_values = config.get_config()
self.assertDictEqual(config_values, self.test_config)
def test_get_config_one_level(self):
config = HtaConfig(self.test_config_path, load_default_paths=False)
self.assertEqual(config.get_config('a'), self.test_config['a'])
self.assertListEqual(config.get_config('b'), self.test_config['b'])
self.assertDictEqual(config.get_config('c'), self.test_config['c'])
def test_get_config_multiple_levels(self):
config = HtaConfig(self.test_config_path, load_default_paths=False)
self.assertDictEqual(config.get_config('c'), self.test_config['c'])
self.assertEqual(config.get_config('c.c1'), self.test_config['c']['c1'])
self.assertEqual(config.get_config('c.c2.c21'), self.test_config['c']['c2']['c21'])
self.assertIsNone(config.get_config('d'))
self.assertIsNone(config.get_config('c.c2.c22'))
self.assertIsNone(config.get_config('c.c1.c3'))
def test_get_config_default_values(self):
config = HtaConfig(self.test_config_path, load_default_paths=False)
self.assertEqual(config.get_config('c', 10), self.test_config['c'])
self.assertEqual(config.get_config('d', 10), 10) |
class Solution():
def isInterleave(self, s1: str, s2: str, s3: str) -> bool:
def is_inter(s1, s2, s3, i, j, prev, tr):
key = (i, j, prev)
if (key in tr):
return tr[key]
if ((i == len(s1)) and (j == len(s2))):
return True
elif ((i > len(s1)) and (j > len(s2))):
return False
ret = False
if (prev <= 1):
ii = i
while (ii < len(s1)):
if (not (s1[ii] == s3[(ii + j)])):
break
if is_inter(s1, s2, s3, (ii + 1), j, 2, tr):
ret = True
break
ii += 1
if ((prev == 0) or (prev == 2)):
jj = j
while (jj < len(s2)):
if (not (s2[jj] == s3[(i + jj)])):
break
if is_inter(s1, s2, s3, i, (jj + 1), 1, tr):
ret = True
break
jj += 1
tr[key] = ret
return ret
if ((not s1) and (not s2) and (not s3)):
return True
if ((len(s1) + len(s2)) != len(s3)):
return False
tr = {}
return is_inter(s1, s2, s3, 0, 0, 0, tr) |
class ScrolledMessageDialog(wx.Dialog):
def __init__(self, parent, msg, caption, pos=wx.DefaultPosition, size=(500, 300)):
wx.Dialog.__init__(self, parent, (- 1), caption, pos, size)
(x, y) = pos
if ((x == (- 1)) and (y == (- 1))):
self.CenterOnScreen(wx.BOTH)
text = wx.TextCtrl(self, (- 1), msg, wx.DefaultPosition, wx.DefaultSize, (((wx.TE_READONLY | wx.TE_MULTILINE) | wx.HSCROLL) | wx.TE_RICH2))
font = wx.Font(8, wx.MODERN, wx.NORMAL, wx.NORMAL)
text.SetStyle(0, len(msg), wx.TextAttr(font=font))
ok = wx.Button(self, wx.ID_OK, 'OK')
text.SetConstraints(Layoutf('t=t5#1;b=t5#2;l=l5#1;r=r5#1', (self, ok)))
ok.SetConstraints(Layoutf('b=b5#1;x%w50#1;w!80;h!25', (self,)))
self.SetAutoLayout(1)
self.Layout() |
class TestRelocation(unittest.TestCase):
def test_dynamic_segment(self):
test_dir = os.path.join('test', 'testfiles_for_unittests')
with open(os.path.join(test_dir, 'x64_bad_sections.elf'), 'rb') as f:
elff = ELFFile(f)
for seg in elff.iter_segments():
if isinstance(seg, DynamicSegment):
relos = seg.get_relocation_tables()
self.assertEqual(set(relos), {'JMPREL', 'RELA'})
def test_dynamic_section(self):
test_dir = os.path.join('test', 'testfiles_for_unittests')
with open(os.path.join(test_dir, 'sample_exe64.elf'), 'rb') as f:
elff = ELFFile(f)
for sect in elff.iter_sections():
if isinstance(sect, DynamicSection):
relos = sect.get_relocation_tables()
self.assertEqual(set(relos), {'JMPREL', 'RELA'})
def test_dynamic_section_solaris(self):
test_dir = os.path.join('test', 'testfiles_for_unittests')
with open(os.path.join(test_dir, 'exe_solaris32_cc.elf'), 'rb') as f:
elff = ELFFile(f)
for sect in elff.iter_sections():
if isinstance(sect, DynamicSection):
relos = sect.get_relocation_tables()
self.assertEqual(set(relos), {'JMPREL', 'REL'}) |
class HDB(PlatformUtilBase):
def __init__(self, device=None, tempdir=None):
super(HDB, self).__init__(device, tempdir)
def push(self, src, tgt):
getLogger().info('push {} to {}'.format(src, tgt))
if (src != tgt):
if os.path.isdir(src):
if os.path.exists(tgt):
shutil.rmtree(tgt)
shutil.copytree(src, tgt)
elif (os.stat(src).st_size < COPY_THRESHOLD):
shutil.copyfile(src, tgt)
os.chmod(tgt, 511)
elif (not os.path.isfile(tgt)):
getLogger().info('Create symlink between {} and {}'.format(src, tgt))
os.symlink(src, tgt)
def pull(self, src, tgt):
getLogger().info('pull {} to {}'.format(src, tgt))
if (src != tgt):
shutil.copyfile(src, tgt)
os.chmod(tgt, 511)
def deleteFile(self, file, *args, **kwargs):
getLogger().info('delete {}'.format(file))
if os.path.isdir(file):
shutil.rmtree(file)
else:
os.remove(file) |
class Last(Op):
__slots__ = ('_last',)
def __init__(self, source=None):
Op.__init__(self, source)
self._last = NO_VALUE
def on_source(self, *args):
self._last = args
def on_source_done(self, source):
self.emit(*self._last)
Op.on_source_done(self, source) |
class TransonicTemporaryJITMethod():
__transonic__ = 'jit_method'
def __init__(self, func, native, xsimd, openmp):
self.func = func
self.native = native
self.xsimd = xsimd
self.openmp = openmp
def __call__(self, self_bis, *args, **kwargs):
raise RuntimeError('Did you forget to decorate a class using methods decorated with transonic? Please decorate it with ') |
class CaretSmartProcessor(util.PatternSequenceProcessor):
PATTERNS = [util.PatSeqItem(re.compile(SMART_INS_SUP, (re.DOTALL | re.UNICODE)), 'double', 'ins,sup'), util.PatSeqItem(re.compile(SMART_SUP_INS, (re.DOTALL | re.UNICODE)), 'double', 'sup,ins'), util.PatSeqItem(re.compile(SMART_INS_SUP2, (re.DOTALL | re.UNICODE)), 'double', 'ins,sup'), util.PatSeqItem(re.compile(SMART_INS, (re.DOTALL | re.UNICODE)), 'single', 'ins'), util.PatSeqItem(re.compile(SUP, (re.DOTALL | re.UNICODE)), 'single', 'sup')] |
def test_required():
example = typesystem.Schema(fields={'field': typesystem.Integer()})
(value, error) = example.validate_or_error({})
assert (dict(error) == {'field': 'This field is required.'})
example = typesystem.Schema(fields={'field': typesystem.Integer(allow_null=True)})
(value, error) = example.validate_or_error({})
assert (dict(value) == {'field': None})
example = typesystem.Schema(fields={'field': typesystem.Integer(default=0)})
(value, error) = example.validate_or_error({})
assert (dict(value) == {'field': 0})
example = typesystem.Schema(fields={'field': typesystem.Integer(allow_null=True, default=0)})
(value, error) = example.validate_or_error({})
assert (dict(value) == {'field': 0})
example = typesystem.Schema(fields={'field': typesystem.String()})
(value, error) = example.validate_or_error({})
assert (dict(error) == {'field': 'This field is required.'})
example = typesystem.Schema(fields={'field': typesystem.String(allow_blank=True)})
(value, error) = example.validate_or_error({})
assert (dict(value) == {'field': ''})
example = typesystem.Schema(fields={'field': typesystem.String(allow_null=True, allow_blank=True)})
(value, error) = example.validate_or_error({})
assert (dict(value) == {'field': None}) |
def ee_xx_impulse(res, aniso, off, time):
tau_h = np.sqrt(((mu_0 * (off ** 2)) / (res * time)))
t0 = (tau_h / ((2 * time) * np.sqrt(np.pi)))
t1 = np.exp(((- (tau_h ** 2)) / 4))
t2 = (((tau_h ** 2) / (2 * (aniso ** 2))) + 1)
t3 = np.exp(((- (tau_h ** 2)) / (4 * (aniso ** 2))))
Exx = (((res / ((2 * np.pi) * (off ** 3))) * t0) * ((- t1) + (t2 * t3)))
Exx[(time == 0)] = (res / ((2 * np.pi) * (off ** 3)))
return Exx |
.parametrize('order_status', ['completed', 'placed'])
def test_stream_get_attendee(db, client, user, jwt, order_status):
(room, stream, session) = get_room_session_stream(db, name='Test Stream')
email = ''
user._email = email
AttendeeOrderSubFactory(event=room.event, order__status=order_status, email=email)
db.session.commit()
response = client.get(f'/v1/video-streams/{stream.id}', content_type='application/vnd.api+json', headers=jwt)
assert (response.status_code == 200)
assert (json.loads(response.data)['data']['id'] == str(stream.id))
assert (json.loads(response.data)['data']['attributes']['name'] == 'Test Stream')
response = client.get(f'/v1/microlocations/{room.id}/video-stream', content_type='application/vnd.api+json', headers=jwt)
assert (response.status_code == 200)
assert (json.loads(response.data)['data']['attributes']['name'] == 'Test Stream')
response = client.get(f'/v1/microlocations/{room.id}?include=video-stream', content_type='application/vnd.api+json', headers=jwt)
assert (response.status_code == 200)
assert (json.loads(response.data)['included'][0]['attributes']['name'] == 'Test Stream')
response = client.get(f'/v1/sessions/{session.id}?include=microlocation.video-stream', content_type='application/vnd.api+json', headers=jwt)
assert (response.status_code == 200)
assert (json.loads(response.data)['included'][1]['attributes']['name'] == 'Test Stream') |
def test_label_to_hash_normalizes_name_using_ensip15():
normalized_name = ENSNormalizedName([Label('test', [TextToken([102, 111, 111])]), Label('test', [TextToken([101, 116, 104])])])
assert (normalized_name.as_text == 'foo.eth')
with patch('ens.utils.normalize_name_ensip15') as mock_normalize_name_ensip15:
for label in normalized_name.labels:
mock_normalize_name_ensip15.return_value = ENSNormalizedName([label])
label_to_hash(label.text)
mock_normalize_name_ensip15.assert_called_once_with(label.text)
mock_normalize_name_ensip15.reset_mock()
assert (label_to_hash('foo').hex() == '0x41b1a0649752af1b28b3dc29a1556eee781e4a4c3a1f7f53f90fa834de098c4d') |
def read_endgame_schema(endgame_version: str, warn=False) -> dict:
endgame_schema_path = ((ENDGAME_SCHEMA_DIR / endgame_version) / 'endgame_ecs_mapping.json.gz')
if (not endgame_schema_path.exists()):
if warn:
relative_path = endgame_schema_path.relative_to(ENDGAME_SCHEMA_DIR)
print(f'Missing file to validate: {relative_path}, skipping', file=sys.stderr)
return
else:
raise FileNotFoundError(str(endgame_schema_path))
schema = json.loads(read_gzip(endgame_schema_path))
return schema |
def render(results, cmdenv, tdb):
from ..formatting import RowFormat, ColumnFormat
if ((not results) or (not results.rows)):
raise TradeException('No data found')
longestNamed = max(results.rows, key=(lambda row: len(row.station.name())))
longestNameLen = len(longestNamed.station.name())
rowFmt = RowFormat().append(ColumnFormat('Station', '<', longestNameLen, key=(lambda row: row.station.name())))
if (cmdenv.quiet < 2):
if cmdenv.nearSystem:
rowFmt.addColumn('DistLy', '>', 6, '.2f', key=(lambda row: row.dist))
rowFmt.append(ColumnFormat('Age/days', '>', '8', '.2f', key=(lambda row: row.age))).append(ColumnFormat('StnLs', '>', '10', key=(lambda row: row.station.distFromStar()))).append(ColumnFormat('Pad', '>', '3', key=(lambda row: TradeDB.padSizes[row.station.maxPadSize]))).append(ColumnFormat('Plt', '>', '3', key=(lambda row: TradeDB.planetStates[row.station.planetary]))).append(ColumnFormat('Flc', '>', '3', key=(lambda row: TradeDB.fleetStates[row.station.fleet]))).append(ColumnFormat('Ody', '>', '3', key=(lambda row: TradeDB.odysseyStates[row.station.odyssey])))
if (not cmdenv.quiet):
(heading, underline) = rowFmt.heading()
print(heading, underline, sep='\n')
for row in results.rows:
print(rowFmt.format(row)) |
class TestEmptyPubtypes(unittest.TestCase):
def test_empty_pubtypes(self):
test_dir = os.path.join('test', 'testfiles_for_unittests')
with open(os.path.join(test_dir, 'empty_pubtypes', 'main.elf'), 'rb') as f:
elf = ELFFile(f)
self.assertEqual(len(elf.get_dwarf_info().get_pubtypes()), 0) |
class DockPaneAction(TaskAction):
object = Property(observe='dock_pane')
dock_pane = Property(Instance(ITaskPane), observe='task')
dock_pane_id = Str()
_property
def _get_dock_pane(self):
if (self.task and (self.task.window is not None)):
return self.task.window.get_dock_pane(self.dock_pane_id, self.task)
return None
def _get_object(self):
return self.dock_pane |
class MPOLearner(acme.Learner):
def __init__(self, policy_network: networks_lib.FeedForwardNetwork, critic_network: networks_lib.FeedForwardNetwork, dataset: Iterator[reverb.ReplaySample], random_key: jnp.ndarray, policy_optimizer: optax.GradientTransformation, critic_optimizer: optax.GradientTransformation, dual_optimizer: optax.GradientTransformation, discount: float, num_samples: int, action_dim: int, target_policy_update_period: int, target_critic_update_period: int, policy_loss_fn: Optional[losses.MPO]=None, counter: Optional[counting.Counter]=None, logger: Optional[loggers.Logger]=None):
policy_loss_fn: losses.MPO = (policy_loss_fn or losses.MPO(epsilon=0.1, epsilon_penalty=0.001, epsilon_mean=0.001, epsilon_stddev=1e-06, init_log_temperature=1.0, init_log_alpha_mean=1.0, init_log_alpha_stddev=10.0))
def compute_loss(policy_params: networks_lib.Params, mpo_params: losses.MPOParams, critic_params: networks_lib.Params, target_policy_params: networks_lib.Params, target_critic_params: networks_lib.Params, transitions: acme_types.Transition, key: jax_types.PRNGKey):
o_tm1 = transitions.observation
o_t = transitions.next_observation
online_action_distribution = policy_network.apply(policy_params, o_t)
target_action_distribution = policy_network.apply(target_policy_params, o_t)
sampled_actions = target_action_distribution.sample(num_samples, seed=key)
tiled_o_t = utils.tile_nested(o_t, num_samples)
sampled_q_t = jax.vmap(critic_network.apply, (None, 0, 0))(target_critic_params, tiled_o_t, sampled_actions)
q_t = jnp.mean(sampled_q_t, axis=0)
q_tm1 = critic_network.apply(critic_params, o_tm1, transitions.action)
batch_td_learning = jax.vmap(rlax.td_learning)
td_error = batch_td_learning(q_tm1, transitions.reward, (discount * transitions.discount), q_t)
critic_loss = jnp.mean(jnp.square(td_error))
(policy_loss, policy_stats) = policy_loss_fn(mpo_params, online_action_distribution=online_action_distribution, target_action_distribution=target_action_distribution, actions=sampled_actions, q_values=sampled_q_t)
policy_loss = jnp.mean(policy_loss)
return ((policy_loss, critic_loss), policy_stats)
def sgd_step(state: TrainingState, transitions: acme_types.Transition):
(key, random_key) = jax.random.split(state.key)
compute_loss_with_inputs = functools.partial(compute_loss, target_policy_params=state.target_policy_params, target_critic_params=state.target_critic_params, transitions=transitions, key=key)
mpo_params = mpo_losses.clip_mpo_params(state.mpo_params, per_dim_constraining=policy_loss_fn.per_dim_constraining)
((policy_loss_value, critic_loss_value), vjpfun, policy_metrics) = jax.vjp(compute_loss_with_inputs, state.policy_params, mpo_params, state.critic_params, has_aux=True)
(policy_gradients, _, _) = vjpfun((1.0, 0.0))
(_, dual_gradients, _) = vjpfun((1.0, 0.0))
(_, _, critic_gradients) = vjpfun((0.0, 1.0))
(policy_updates, policy_opt_state) = policy_optimizer.update(policy_gradients, state.policy_opt_state)
(critic_updates, critic_opt_state) = critic_optimizer.update(critic_gradients, state.critic_opt_state)
(dual_updates, dual_opt_state) = dual_optimizer.update(dual_gradients, state.dual_opt_state)
policy_params = optax.apply_updates(state.policy_params, policy_updates)
critic_params = optax.apply_updates(state.critic_params, critic_updates)
mpo_params = optax.apply_updates(mpo_params, dual_updates)
steps = (state.steps + 1)
target_policy_params = optax.periodic_update(policy_params, state.target_policy_params, steps, target_policy_update_period)
target_critic_params = optax.periodic_update(critic_params, state.target_critic_params, steps, target_critic_update_period)
new_state = TrainingState(policy_params=policy_params, critic_params=critic_params, mpo_params=mpo_params, target_policy_params=target_policy_params, target_critic_params=target_critic_params, policy_opt_state=policy_opt_state, critic_opt_state=critic_opt_state, dual_opt_state=dual_opt_state, key=random_key, steps=steps)
metrics = {'policy_loss': policy_loss_value, 'critic_loss': critic_loss_value, **policy_metrics._asdict()}
return (new_state, metrics)
self._sgd_step = jax.jit(sgd_step)
self._iterator = dataset
def make_initial_state(key: jax_types.PRNGKey):
(key1, key2, key) = jax.random.split(key, 3)
policy_params = policy_network.init(key1)
policy_opt_state = policy_optimizer.init(policy_params)
critic_params = critic_network.init(key2)
critic_opt_state = critic_optimizer.init(critic_params)
mpo_params = policy_loss_fn.init_params(action_dim)
return TrainingState(policy_params=policy_params, critic_params=critic_params, mpo_params=mpo_params, policy_opt_state=policy_opt_state, critic_opt_state=critic_opt_state, dual_opt_state=dual_optimizer.init(mpo_params), target_policy_params=policy_params, target_critic_params=critic_params, key=key, steps=0)
self._state = make_initial_state(random_key)
self._timestamp = None
self._counter = (counter or counting.Counter())
self._logger = (logger or loggers.make_default_logger('learner', save_data=False, asynchronous=True, serialize_fn=utils.fetch_devicearray))
def step(self):
batch = next(self._iterator).data
(self._state, metrics) = self._sgd_step(self._state, batch)
timestamp = time.time()
elapsed_time = ((timestamp - self._timestamp) if self._timestamp else 0)
self._timestamp = timestamp
counts = self._counter.increment(steps=1, walltime=elapsed_time)
self._logger.write({**metrics, **counts})
def get_variables(self, names):
variables = {'policy': self._state.policy_params, 'critic': self._state.critic_params}
return [variables[name] for name in names]
def restore(self, state: TrainingState):
self._state = state
def save(self):
return self._state |
_group.command('show-latest-compatible')
('--package', '-p', help='Name of package')
('--stack_version', '-s', required=True, help='Rule stack version')
def show_latest_compatible_version(package: str, stack_version: str) -> None:
packages_manifest = None
try:
packages_manifest = load_integrations_manifests()
except Exception as e:
click.echo(f'Error loading integrations manifests: {str(e)}')
return
try:
version = find_latest_compatible_version(package, '', Version.parse(stack_version, optional_minor_and_patch=True), packages_manifest)
click.echo(f'Compatible integration version={version!r}')
except Exception as e:
click.echo(f'Error finding compatible version: {str(e)}')
return |
class StickBugged(commands.Cog):
__version__ = '0.0.1'
__author__ = 'flare#0001'
def format_help_for_context(self, ctx):
pre_processed = super().format_help_for_context(ctx)
return f'''{pre_processed}
Cog Version: {self.__version__}
Author: {self.__author__}'''
def __init__(self, bot) -> None:
self.bot = bot
self._stickbug = StickBug()
def blocking(self, io, id):
io = Image.open(io)
self._stickbug.image = io
self._stickbug.video_resolution = (max(min(1280, io.width), 128), max(min(720, io.height), 72))
self._stickbug.lsd_scale = 0.35
video = self._stickbug.video
video.write_videofile((str(cog_data_path(self)) + f'/{id}stick.mp4'), threads=1, preset='superfast', verbose=False, logger=None, temp_audiofile=str((cog_data_path(self) / f'{id}stick.mp3')))
video.close()
return
_concurrency(1, commands.BucketType.default)
(aliases=['stickbug', 'stickbugged'])
async def stick(self, ctx, images: Optional[ImageFinder]):
if (images is None):
images = (await ImageFinder().search_for_images(ctx))
if (not images):
return (await ctx.send_help())
image = images
async with ctx.typing():
io = BytesIO()
if isinstance(image, discord.Asset):
(await image.save(io, seek_begin=True))
else:
async with aio as session:
async with session.get(str(image)) as resp:
if (resp.status != 200):
return (await ctx.send('The picture returned an unknown status code.'))
io.write((await resp.read()))
io.seek(0)
(await asyncio.sleep(0.2))
fake_task = functools.partial(self.blocking, io=io, id=ctx.message.id)
task = self.bot.loop.run_in_executor(None, fake_task)
try:
video_file = (await asyncio.wait_for(task, timeout=300))
except asyncio.TimeoutError as e:
log.error('Timeout creating stickbug video', exc_info=e)
return (await ctx.send('Timeout creating stickbug video.'))
except Exception:
log.exception('Error sending stick bugged video')
return (await ctx.send('An error occured during the creation of the stick bugged video'))
fp = (cog_data_path(self) / f'{ctx.message.id}stick.mp4')
file = discord.File(str(fp), filename='stick.mp4')
try:
(await ctx.send(files=[file]))
except Exception as e:
log.error('Error sending stick bugged video', exc_info=e)
try:
os.remove(fp)
except Exception as e:
log.error('Error deleting stick bugged video', exc_info=e) |
('/url_api/', methods=['GET', 'POST'])
_required
def url_api():
if (not request.json):
js = {'error': True, 'message': 'This endpoint only accepts JSON POST requests.'}
resp = jsonify(js)
resp.status_code = 200
resp.mimetype = 'application/json'
return resp
print('API Request!')
print('session:', g.session)
print('Request method: ', request.method)
print('Request json: ', request.json)
try:
if (('op' in request.json) and ('data' in request.json) and (request.json['op'] in ops)):
data = ops[request.json['op']](g.session, request.json['data'])
else:
data = {'wat': 'wat'}
except Exception as e:
print('Failure in processing url api call!')
traceback.print_exc()
js = {'error': True, 'message': ('Error: \n%s' % (traceback.format_exc(),))}
resp = jsonify(js)
resp.status_code = 200
resp.mimetype = 'application/json'
return resp
response = jsonify(data)
print('ResponseData: ', data)
print('Response: ', response)
response.status_code = 200
response.mimetype = 'application/json'
g.session.commit()
g.session.expire_all()
return response |
class TestAWSUtil(unittest.TestCase):
def test_convert_dict_to_list(self):
expected_list = [{'Name': 'k1', 'Values': ['v1']}, {'Name': 'k2', 'Values': ['v2']}]
self.assertEqual(expected_list, convert_dict_to_list(TEST_DICT, 'Name', 'Values'))
self.assertEqual([], convert_dict_to_list({}, 'Name', 'Values'))
def test_convert_list_dict(self):
self.assertEqual(TEST_DICT, convert_list_to_dict(TEST_LIST, 'Name', 'Value'))
def test_prepare_tags(self):
expected_tags = {'tag:k1': 'v1', 'tag:k2': 'v2'}
self.assertEqual(expected_tags, prepare_tags(TEST_DICT))
def test_is_container_definition_valid_true(self):
valid_container_definitions = ['pl-task-fake-business:2#pl-container-fake-business', 'arn:aws:ecs:us-west-2::task-definition/onedocker-task-shared-us-west-2:1#onedocker-container-shared-us-west-2']
for container_definition in valid_container_definitions:
self.assertTrue(is_container_definition_valid(container_definition))
def test_is_container_definition_valid_false(self):
invalid_container_definitions = ['pl-task-fake-business:2#', 'pl-task-fake-business:2##pl-container-fake-business', 'pl-task-fake-business#pl-container-fake-business', 'pl-container-fake-business']
for container_definition in invalid_container_definitions:
self.assertFalse(is_container_definition_valid(container_definition))
def test_split_container_definition_throw(self):
invalid_container_definition = 'pl-task-fake-business:2#'
with self.assertRaises(InvalidParameterError):
split_container_definition(invalid_container_definition)
def test_split_container_definition(self):
valid_container_definition = 'pl-task-fake-business:2#pl-container-fake-business'
self.assertEqual(('pl-task-fake-business:2', 'pl-container-fake-business'), split_container_definition(valid_container_definition)) |
class Ledger_KeyStore(Hardware_KeyStore):
hw_type = 'ledger'
device = 'Ledger'
handler: Optional['Ledger_Handler']
def __init__(self, data: Dict[(str, Any)], row: 'MasterKeyRow') -> None:
Hardware_KeyStore.__init__(self, data, row)
self.force_watching_only = False
self.signing = False
self.cfg = data.get('cfg', {'mode': 0})
def to_derivation_data(self) -> Dict[(str, Any)]:
obj = super().to_derivation_data()
obj['cfg'] = self.cfg
return obj
def get_derivation(self):
return self.derivation
def get_client(self):
return self.plugin.get_client(self).dongleObject
def get_client_electrum(self):
return self.plugin.get_client(self)
def give_error(self, message, clear_client=False) -> None:
logger.error(message)
if (not self.signing):
assert (self.handler is not None)
self.handler.show_error(message)
else:
self.signing = False
if clear_client:
self.client = None
raise Exception(message)
def set_and_unset_signing(func: Any):
def wrapper(self, *args, **kwargs):
try:
self.signing = True
return func(self, *args, **kwargs)
finally:
self.signing = False
return wrapper
def decrypt_message(self, pubkey, message, password):
raise RuntimeError(_('Encryption and decryption are not supported for {}').format(self.device))
_and_unset_signing
def sign_message(self, sequence, message, password):
message = message.encode('utf8')
message_hash = hashlib.sha256(message).hexdigest().upper()
client = self.get_client()
address_path = (self.get_derivation()[2:] + '/{:d}/{:d}'.format(*sequence))
self.handler.show_message(('Signing message ...\r\nMessage hash: ' + message_hash))
try:
info = self.get_client().signMessagePrepare(address_path, message)
pin = ''
if info['confirmationNeeded']:
pin = self.handler.get_auth(self, info)
if (not pin):
raise UserWarning(_('Cancelled by user'))
pin = str(pin).encode()
signature = self.get_client().signMessageSign(pin)
except BTChipException as e:
if (e.sw == 27264):
self.give_error('Unfortunately, this message cannot be signed by the Ledger wallet. Only alphanumerical messages shorter than 140 characters are supported. Please remove any extra characters (tab, carriage return) and retry.')
elif (e.sw == 27013):
return b''
else:
self.give_error(e, True)
except UserWarning:
self.handler.show_error(_('Cancelled by user'))
return b''
except Exception as e:
self.give_error(e, True)
finally:
self.handler.finished()
rLength = signature[3]
r = signature[4:(4 + rLength)]
sLength = signature[((4 + rLength) + 1)]
s = signature[((4 + rLength) + 2):]
if (rLength == 33):
r = r[1:]
if (sLength == 33):
s = s[1:]
return ((bytes([((27 + 4) + (signature[0] & 1))]) + r) + s)
_and_unset_signing
def sign_transaction(self, tx: Transaction, password: str, tx_context: TransactionContext) -> None:
if tx.is_complete():
return
assert (self.handler is not None)
client = self.get_client()
inputs: List[YInput] = []
inputsPaths = []
chipInputs = []
redeemScripts = []
signatures = []
changePath = ''
changeAmount = None
output = None
outputAmount = None
pin = ''
self.get_client()
foundP2SHSpend = False
allSpendsAreP2SH = True
for txin in tx.inputs:
foundP2SHSpend = (foundP2SHSpend or (txin.type() == ScriptType.MULTISIG_P2SH))
allSpendsAreP2SH = (allSpendsAreP2SH and (txin.type() == ScriptType.MULTISIG_P2SH))
for (i, x_pubkey) in enumerate(txin.x_pubkeys):
if self.is_signature_candidate(x_pubkey):
txin_xpub_idx = i
inputPath = ('%s/%d/%d' % (self.get_derivation()[2:], *x_pubkey.bip32_path()))
break
else:
self.give_error('No matching x_key for sign_transaction')
inputs.append(YInput(txin.value, Transaction.get_preimage_script_bytes(txin), txin_xpub_idx, txin.sequence))
inputsPaths.append(inputPath)
if (foundP2SHSpend and (not allSpendsAreP2SH)):
self.give_error('P2SH / regular input mixed in same transaction not supported')
txOutput = pack_list(tx.outputs, XTxOutput.to_bytes)
if (not foundP2SHSpend):
keystore_fingerprint = self.get_fingerprint()
assert (tx.output_info is not None)
for (tx_output, output_metadatas) in zip(tx.outputs, tx.output_info):
info = output_metadatas.get(keystore_fingerprint)
if ((info is not None) and (len(tx.outputs) != 1)):
(key_derivation, xpubs, m) = info
key_subpath = compose_chain_string(key_derivation)[1:]
changePath = (self.get_derivation()[2:] + key_subpath)
changeAmount = tx_output.value
else:
output = classify_tx_output(tx_output)
outputAmount = tx_output.value
self.handler.show_message(_('Confirm Transaction on your Ledger device...'))
try:
for (i, utxo) in enumerate(inputs):
txin = tx.inputs[i]
sequence = int_to_hex(utxo.sequence, 4)
prevout_bytes = txin.prevout_bytes()
value_bytes = (prevout_bytes + pack_le_int64(utxo.value))
chipInputs.append({'value': value_bytes, 'witness': True, 'sequence': sequence})
redeemScripts.append(utxo.script_sig)
inputIndex = 0
rawTx = tx.serialize()
self.get_client().enableAlternate2fa(False)
self.get_client().startUntrustedTransaction(True, inputIndex, chipInputs, redeemScripts[inputIndex])
outputData = self.get_client().finalizeInputFull(txOutput)
outputData['outputData'] = txOutput
transactionOutput = outputData['outputData']
if outputData['confirmationNeeded']:
outputData['address'] = cast(ScriptTemplate, output).to_string()
self.handler.finished()
auth_pin = self.handler.get_auth(self, outputData)
if (not auth_pin):
raise UserWarning()
pin = auth_pin
self.handler.show_message(_('Confirmed. Signing Transaction...'))
while (inputIndex < len(inputs)):
singleInput = [chipInputs[inputIndex]]
self.get_client().startUntrustedTransaction(False, 0, singleInput, redeemScripts[inputIndex])
inputSignature = self.get_client().untrustedHashSign(inputsPaths[inputIndex], pin, lockTime=tx.locktime, sighashType=tx.nHashType())
inputSignature[0] = 48
signatures.append(inputSignature)
inputIndex = (inputIndex + 1)
except UserWarning:
self.handler.show_error(_('Cancelled by user'))
return
except BTChipException as e:
if (e.sw == 27013):
return
else:
logger.exception('')
self.give_error(e, True)
except Exception as e:
logger.exception('')
self.give_error(e, True)
finally:
self.handler.finished()
for (txin, input, signature) in zip(tx.inputs, inputs, signatures):
txin.signatures[input.txin_xpub_idx] = signature
_and_unset_signing
def show_address(self, derivation_subpath: str) -> None:
client = self.get_client()
address_path = ((self.get_derivation()[2:] + '/') + derivation_subpath)
assert (self.handler is not None)
self.handler.show_message(_('Showing address ...'))
try:
client.getWalletPublicKey(address_path, showOnScreen=True)
except Exception:
pass
finally:
self.handler.finished() |
def start_north_omf_as_a_service():
def _start_north_omf_as_a_service(fledge_url, pi_host, pi_port, pi_db='Dianomic', auth_method='basic', pi_user=None, pi_pwd=None, north_plugin='OMF', service_name='NorthReadingsToPI_WebAPI', start=True, naming_scheme='Backward compatibility', default_af_location='fledge/room1/machine1', pi_use_legacy='true'):
_enabled = (True if start else False)
conn =
data = {'name': service_name, 'plugin': '{}'.format(north_plugin), 'enabled': _enabled, 'type': 'north', 'config': {'PIServerEndpoint': {'value': 'PI Web API'}, 'PIWebAPIAuthenticationMethod': {'value': auth_method}, 'PIWebAPIUserId': {'value': pi_user}, 'PIWebAPIPassword': {'value': pi_pwd}, 'ServerHostname': {'value': pi_host}, 'ServerPort': {'value': str(pi_port)}, 'compression': {'value': 'true'}, 'DefaultAFLocation': {'value': default_af_location}, 'NamingScheme': {'value': naming_scheme}, 'Legacy': {'value': pi_use_legacy}}}
conn.request('POST', '/fledge/service', json.dumps(data))
r = conn.getresponse()
assert (200 == r.status)
retval = json.loads(r.read().decode())
return retval
return _start_north_omf_as_a_service |
def _get_config_value(key: str, default: str='') -> str:
input_path = os.environ.get(('FILE__' + key), None)
if (input_path is not None):
try:
with open(input_path, 'r') as input_file:
return input_file.read().strip()
except IOError as e:
logger.error(f'Unable to read value for {key} from {input_path}: {str(e)}')
return os.environ.get(key, default) |
class Overrides():
override_choices: Dict[(str, Optional[Union[(str, List[str])]])]
override_metadata: Dict[(str, OverrideMetadata)]
append_group_defaults: List[GroupDefault]
config_overrides: List[Override]
known_choices: Dict[(str, Optional[str])]
known_choices_per_group: Dict[(str, Set[str])]
deletions: Dict[(str, Deletion)]
def __init__(self, repo: IConfigRepository, overrides_list: List[Override]) -> None:
self.override_choices = {}
self.override_metadata = {}
self.append_group_defaults = []
self.config_overrides = []
self.deletions = {}
self.known_choices = {}
self.known_choices_per_group = {}
for override in overrides_list:
if override.is_sweep_override():
continue
is_group = repo.group_exists(override.key_or_group)
value = override.value()
is_dict = isinstance(override.value(), dict)
if (is_dict or (not is_group)):
self.config_overrides.append(override)
elif override.is_force_add():
raise ConfigCompositionException(f"force-add of config groups is not supported: '{override.input_line}'")
elif override.is_delete():
key = override.get_key_element()[1:]
value = override.value()
if ((value is not None) and (not isinstance(value, str))):
raise ValueError(f'Config group override deletion value must be a string : {override}')
self.deletions[key] = Deletion(name=value)
elif (not isinstance(value, (str, list))):
raise ValueError(f'Config group override must be a string or a list. Got {type(value).__name__}')
elif override.is_add():
self.append_group_defaults.append(GroupDefault(group=override.key_or_group, package=override.package, value=value, external_append=True))
else:
key = override.get_key_element()
self.override_choices[key] = value
self.override_metadata[key] = OverrideMetadata(external_override=True)
def add_override(self, parent_config_path: str, default: GroupDefault) -> None:
assert default.override
key = default.get_override_key()
if (key not in self.override_choices):
self.override_choices[key] = default.value
self.override_metadata[key] = OverrideMetadata(external_override=False, containing_config_path=parent_config_path, relative_key=default.get_relative_override_key())
def is_overridden(self, default: InputDefault) -> bool:
if isinstance(default, GroupDefault):
return (default.get_override_key() in self.override_choices)
return False
def override_default_option(self, default: GroupDefault) -> None:
key = default.get_override_key()
if (key in self.override_choices):
if isinstance(default, GroupDefault):
default.value = self.override_choices[key]
default.config_name_overridden = True
self.override_metadata[key].used = True
def ensure_overrides_used(self) -> None:
for (key, meta) in self.override_metadata.items():
if (not meta.used):
group = key.split('')[0]
choices = (self.known_choices_per_group[group] if (group in self.known_choices_per_group) else set())
if (len(choices) > 1):
msg = f'''Could not override '{key}'.
Did you mean to override one of {', '.join(sorted(list(choices)))}?'''
elif (len(choices) == 1):
msg = f'''Could not override '{key}'.
Did you mean to override {copy.copy(choices).pop()}?'''
elif (len(choices) == 0):
msg = f"Could not override '{key}'. No match in the defaults list."
else:
assert False
if (meta.containing_config_path is not None):
msg = f"In '{meta.containing_config_path}': {msg}"
if meta.external_override:
msg += f'''
To append to your default list use +{key}={self.override_choices[key]}'''
raise ConfigCompositionException(msg)
def ensure_deletions_used(self) -> None:
for (key, deletion) in self.deletions.items():
if (not deletion.used):
desc = (f'{key}={deletion.name}' if (deletion.name is not None) else key)
msg = f"Could not delete '{desc}'. No match in the defaults list"
raise ConfigCompositionException(msg)
def set_known_choice(self, default: InputDefault) -> None:
if isinstance(default, GroupDefault):
key = default.get_override_key()
if (key not in self.known_choices):
self.known_choices[key] = default.get_name()
else:
prev = self.known_choices[key]
if (default.get_name() != prev):
raise ConfigCompositionException(f"Multiple values for {key}. To override a value use 'override {key}: {prev}'")
group = default.get_group_path()
if (group not in self.known_choices_per_group):
self.known_choices_per_group[group] = set()
self.known_choices_per_group[group].add(key)
def is_deleted(self, default: InputDefault) -> bool:
if (not isinstance(default, GroupDefault)):
return False
key = default.get_override_key()
if (key in self.deletions):
deletion = self.deletions[key]
if (deletion.name is None):
return True
else:
return (deletion.name == default.get_name())
return False
def delete(self, default: InputDefault) -> None:
assert isinstance(default, GroupDefault)
default.deleted = True
key = default.get_override_key()
self.deletions[key].used = True |
class DoraCheckpointSync(Callback):
def __init__(self):
self.xp = get_xp()
def on_load_checkpoint(self, trainer, pl_module, checkpoint):
history = checkpoint['dora_link_history']
self.xp.link.update_history(history)
def on_save_checkpoint(self, trainer, pl_module, checkpoint):
checkpoint['dora_link_history'] = self.xp.link.history
checkpoint['dora_sig'] = self.xp.sig
checkpoint['dora_cfg'] = self.xp.cfg
return checkpoint |
('\n {{\n __m256 tmp = _mm256_hadd_ps({x_data}, {x_data});\n tmp = _mm256_hadd_ps(tmp, tmp);\n __m256 upper_bits = _mm256_castps128_ps256(_mm256_extractf128_ps(tmp, 1));\n tmp = _mm256_add_ps(tmp, upper_bits);\n *{result} += _mm256_cvtss_f32(tmp);\n }}\n ')
def avx2_assoc_reduce_add_ps(x: ([f32][8] AVX2), result: f32):
assert (stride(x, 0) == 1)
for i in seq(0, 8):
result += x[i] |
def fortios_firewall(data, fos, check_mode):
fos.do_member_operation('firewall', 'proxy-addrgrp')
if data['firewall_proxy_addrgrp']:
resp = firewall_proxy_addrgrp(data, fos, check_mode)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'firewall_proxy_addrgrp'))
if check_mode:
return resp
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
def render_matrix(jinja2_env, matrix_dir):
logger.debug('Entered directory: %s.', matrix_dir)
with open(os.path.join(matrix_dir, 'matrix.yml')) as f:
try:
matrix = yaml.load(f)
except:
logger.exception('Failed to load matrix from %s', matrix_dir)
return
project_name = os.path.basename(matrix_dir)
logger.debug('[%s] Loaded matrix: %s', project_name, matrix)
for target in matrix.get('$render', []):
if (not isinstance(target, str)):
logger.error('target name needs to be a str, %s(%s) found.', target, type(target))
continue
logger.info('[%s] Loading target <%s>...', project_name, target)
target_cfg = matrix.get(target)
if (not target_cfg):
logger.error('[%s] target (%s) configuration not found!', project_name, target)
continue
target_dir = os.path.join(matrix_dir, target)
if (not os.path.isdir(target_dir)):
os.mkdir(target_dir)
render_target(jinja2_env, target_dir, project_name, target, target_cfg) |
def counter():
def decorator(func):
decorator.count = 0
(func)
_synthetic()
def decorator_inner(self, *args, **kw):
decorator.count = (decorator.count + 1)
value = decorator.count
return func(self, value)
return decorator_inner
return decorator |
class ACES20651(sRGB):
BASE = 'xyz-d65'
NAME = 'aces2065-1'
SERIALIZE = ('--aces2065-1',)
WHITE = (0.32168, 0.33767)
CHANNELS = (Channel('r', 0.0, 65504.0, bound=True), Channel('g', 0.0, 65504.0, bound=True), Channel('b', 0.0, 65504.0, bound=True))
DYNAMIC_RANGE = 'hdr'
def to_base(self, coords: Vector) -> Vector:
return aces_to_xyz(coords)
def from_base(self, coords: Vector) -> Vector:
return xyz_to_aces(coords) |
def extractMoonjellyfishtranslationWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class FileDownloader():
def __init__(self, context: str='default'):
self.download_handles = getDownloadHandles()
if (context not in self.download_handles):
raise RuntimeError(f'No configuration found for {context}')
self.downloader = self.download_handles[context]()
def downloadFile(self, file, blob=None):
return self.downloader.downloadFile(file, blob=blob)
def getDownloader(self):
return self.downloader |
def get_mime_for_text_file(filename: str) -> str:
if (filename.lower() in SPECIAL_FILES):
return SPECIAL_FILES[filename.lower()]
suffix = Path(filename).suffix.lstrip('.').lower()
if (not suffix):
return 'text/plain'
if (suffix in EXTENSION_TO_MIME):
return EXTENSION_TO_MIME[suffix]
for prefix in ['text', 'text-x', 'application', 'application-x']:
mime = f'{prefix}-{suffix}'
if (mime in MIME_TO_ICON_PATH):
return mime
return 'text/plain' |
def make_suggester(response_parser: Callable, labels: List[str], prompt_path: Path, prompt_example_class: Optional[PromptExample]=None, model: str='text-davinci-003', **kwargs) -> OpenAISuggester:
if (('openai_api_key' not in kwargs) or ('openai_api_org' not in kwargs)):
(api_key, api_org) = get_api_credentials(model)
if ('openai_api_key' not in kwargs):
kwargs['openai_api_key'] = api_key
if ('openai_api_org' not in kwargs):
kwargs['openai_api_org'] = api_org
if ('max_examples' not in kwargs):
kwargs['max_examples'] = 0
if ('prompt_template' not in kwargs):
kwargs['prompt_template'] = load_template(prompt_path)
if ('segment' not in kwargs):
kwargs['segment'] = False
if ('openai_model' not in kwargs):
kwargs['openai_model'] = 'text-davinci-003'
openai = OpenAISuggester(response_parser=response_parser, labels=labels, prompt_example_class=prompt_example_class, **kwargs)
return openai |
class OptionSeriesScatter3dPointEvents(Options):
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def drag(self):
return self._config_get(None)
def drag(self, value: Any):
self._config(value, js_type=False)
def dragStart(self):
return self._config_get(None)
def dragStart(self, value: Any):
self._config(value, js_type=False)
def drop(self):
return self._config_get(None)
def drop(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def remove(self):
return self._config_get(None)
def remove(self, value: Any):
self._config(value, js_type=False)
def select(self):
return self._config_get(None)
def select(self, value: Any):
self._config(value, js_type=False)
def unselect(self):
return self._config_get(None)
def unselect(self, value: Any):
self._config(value, js_type=False)
def update(self):
return self._config_get(None)
def update(self, value: Any):
self._config(value, js_type=False) |
def get_details(session):
import json
from flask_monitoringdashboard import loc
with open((loc() + 'constants.json'), 'r') as f:
constants = json.load(f)
return {'link': config.link, 'dashboard-version': constants['version'], 'config-version': config.version, 'first-request': get_date_of_first_request(session), 'first-request-version': get_date_of_first_request_version(session, config.version), 'total-requests': count_total_requests(session)} |
class DetectWrongState(hass.Hass):
def initialize(self):
self.listen_state_handle_list = []
self.app_switch = self.args['app_switch']
try:
self.entities_on = self.args['entities_on'].split(',')
except KeyError:
self.entities_on = []
try:
self.entities_off = self.args['entities_off'].split(',')
except KeyError:
self.entities_off = []
self.after_sundown = self.args.get('after_sundown')
self.trigger_entity = self.args['trigger_entity']
self.trigger_state = self.args['trigger_state']
self.message = self.args.get('message')
self.message_off = self.args.get('message_off')
self.message_reed = self.args.get('message_reed')
self.message_reed_off = self.args.get('message_reed_off')
self.notify_name = self.args.get('notify_name')
self.use_alexa = self.args.get('use_alexa')
self.notifier = self.get_app('Notifier')
self.listen_state_handle_list.append(self.listen_state(self.state_change, self.trigger_entity))
def state_change(self, entity, attribute, old, new, kwargs):
if (self.get_state(self.app_switch) == 'on'):
if ((new != '') and (new == self.trigger_state)):
if ((self.after_sundown is None) or ((self.after_sundown and self.sun_down()) or (self.after_sundown is not False))):
self.check_entities_should_be_off()
self.check_entities_should_be_on()
def check_entities_should_be_off(self):
off_states = ['off', 'unavailable', 'paused', 'standby']
for entity in self.entities_off:
state = self.get_state(entity)
self.log(f'entity: {entity}')
if ((state is not None) and (state not in off_states)):
if self.is_entity_reed_contact(entity):
message = self.message_reed
else:
self.turn_off(entity)
message = self.message
self.send_notification(message, entity)
def check_entities_should_be_on(self):
for entity in self.entities_on:
state = self.get_state(entity)
if (state == 'off'):
if self.is_entity_reed_contact(entity):
message = self.message_reed_off
else:
self.turn_on(entity)
message = self.message_on
self.send_notification(message, entity)
def is_entity_reed_contact(self, entity):
reed_types = ['window', 'door', 'garage_door']
full_state = self.get_state(entity, attribute='all')
if (full_state is not None):
attributes = full_state['attributes']
self.log('full_state: {}'.format(full_state), level='DEBUG')
if (attributes.get('device_class') in reed_types):
return True
return False
def send_notification(self, message, entity):
if (message is not None):
formatted_message = message.format(self.friendly_name(entity))
self.log(formatted_message)
if (self.notify_name is not None):
self.notifier.notify(self.notify_name, formatted_message, useAlexa=self.use_alexa)
def terminate(self):
for listen_state_handle in self.listen_state_handle_list:
self.cancel_listen_state(listen_state_handle) |
class OptionSeriesSunburstLabelStyle(Options):
def fontSize(self):
return self._config_get('0.8em')
def fontSize(self, num: float):
self._config(num, js_type=False)
def fontWeight(self):
return self._config_get('bold')
def fontWeight(self, text: str):
self._config(text, js_type=False) |
def calcEholo_vert(image_coords: np.ndarray, tangents: np.ndarray, thresh: float=0.001):
nimages = len(image_coords)
tangents_perp = [None for _ in range(nimages)]
findvertlist = [False for _ in range(nimages)]
whileN = 0
while (whileN < 1000):
whileN += 1
if all(findvertlist):
break
for k in range(nimages):
if findvertlist[k]:
continue
tau = tangents[k]
if ((k == 0) and findvertlist[1]):
if findvertlist[1]:
tau_nei = tangents_perp[1]
a = ((- np.dot(tau_nei, tau)) / np.dot(tau, tau))
tangents_perp[k] = ((a * tau) + tau_nei)
findvertlist[k] = True
elif (k == (nimages - 1)):
if findvertlist[(- 2)]:
tau_nei = tangents_perp[(- 2)]
a = ((- np.dot(tau_nei, tau)) / np.dot(tau, tau))
tangents_perp[k] = ((a * tau) + tau_nei)
findvertlist[k] = True
else:
v1 = (image_coords[(k - 1)] - image_coords[k])
v1 = (v1 / np.linalg.norm(v1))
v1taudot = np.abs(np.dot(v1, tau))
v2 = (image_coords[(k + 1)] - image_coords[k])
v2 = (v2 / np.linalg.norm(v2))
v2taudot = np.abs(np.dot(v2, tau))
if (((1.0 - thresh) < v1taudot) and ((1.0 - thresh) < v2taudot)):
if findvertlist[(k - 1)]:
tau_nei = tangents_perp[(k - 1)]
a = ((- np.dot(tau_nei, tau)) / np.dot(tau, tau))
tangents_perp[k] = ((a * tau) + tau_nei)
findvertlist[k] = True
elif findvertlist[(k + 1)]:
tau_nei = tangents_perp[(k + 1)]
a = ((- np.dot(tau_nei, tau)) / np.dot(tau, tau))
tangents_perp[k] = ((a * tau) + tau_nei)
findvertlist[k] = True
elif (thresh <= v1taudot):
a = ((- np.dot(v2, tau)) / np.dot(v1, tau))
tangents_perp[k] = ((a * v1) + v2)
tangents_perp[k] /= np.linalg.norm(tangents_perp[k])
findvertlist[k] = True
elif (thresh <= v2taudot):
a = ((- np.dot(v1, tau)) / np.dot(v2, tau))
tangents_perp[k] = ((a * v2) + v1)
tangents_perp[k] /= np.linalg.norm(tangents_perp[k])
findvertlist[k] = True
else:
tangents_perp[k] = v1
findvertlist[k] = True
for k in range(1, nimages):
vbefore = tangents_perp[(k - 1)]
v = tangents_perp[k]
if (vbefore.dot(v) < 0.0):
tangents_perp[k] *= (- 1)
tangents_perp = np.array(tangents_perp)
return (tangents_perp, findvertlist) |
class BaseAttack(UserAction):
def __init__(self, source, target, damage=1):
self.source = source
self.target = target
self.damage = damage
def apply_action(self):
g = self.game
(source, target) = (self.source, self.target)
rst = g.process_action(LaunchGraze(target))
(self1, rst) = g.emit_event('attack_aftergraze', (self, (not rst)))
assert (self1 is self)
assert (rst in (False, True))
if rst:
g.process_action(Damage(source, target, amount=self.damage))
return True
else:
return False
def is_valid(self):
return (not self.target.dead) |
def _combine_results(*results, average=True):
if (not results):
return None
elif (len(results) == 1):
return results[0]
combined = {k: 0 for k in KINDS}
for result in results:
for kind in KINDS:
combined[kind] += result[kind]
if average:
for kind in KINDS:
combined[kind] /= len(results)
return combined |
class AppModule():
def from_app(cls, app: App, import_name: str, name: str, template_folder: Optional[str], template_path: Optional[str], static_folder: Optional[str], static_path: Optional[str], url_prefix: Optional[str], hostname: Optional[str], cache: Optional[RouteCacheRule], root_path: Optional[str], pipeline: List[Pipe], injectors: List[Injector], opts: Dict[(str, Any)]={}):
return cls(app, name, import_name, template_folder=template_folder, template_path=template_path, static_folder=static_folder, static_path=static_path, url_prefix=url_prefix, hostname=hostname, cache=cache, root_path=root_path, pipeline=pipeline, injectors=injectors, **opts)
def from_module(cls, appmod: AppModule, import_name: str, name: str, template_folder: Optional[str], template_path: Optional[str], static_folder: Optional[str], static_path: Optional[str], url_prefix: Optional[str], hostname: Optional[str], cache: Optional[RouteCacheRule], root_path: Optional[str], opts: Dict[(str, Any)]={}):
if ('.' in name):
raise RuntimeError("Nested app modules' names should not contains dots")
name = ((appmod.name + '.') + name)
if (url_prefix and (not url_prefix.startswith('/'))):
url_prefix = ('/' + url_prefix)
module_url_prefix = ((appmod.url_prefix + (url_prefix or '')) if appmod.url_prefix else url_prefix)
hostname = (hostname or appmod.hostname)
cache = (cache or appmod.cache)
return cls(appmod.app, name, import_name, template_folder=template_folder, template_path=template_path, static_folder=static_folder, static_path=static_path, url_prefix=module_url_prefix, hostname=hostname, cache=cache, root_path=root_path, pipeline=appmod.pipeline, injectors=appmod.injectors, **opts)
def from_module_group(cls, appmodgroup: AppModuleGroup, import_name: str, name: str, template_folder: Optional[str], template_path: Optional[str], static_folder: Optional[str], static_path: Optional[str], url_prefix: Optional[str], hostname: Optional[str], cache: Optional[RouteCacheRule], root_path: Optional[str], opts: Dict[(str, Any)]={}) -> AppModulesGrouped:
mods = []
for module in appmodgroup.modules:
mod = cls.from_module(module, import_name, name, template_folder=template_folder, template_path=template_path, static_folder=static_folder, static_path=static_path, url_prefix=url_prefix, hostname=hostname, cache=cache, root_path=root_path, opts=opts)
mods.append(mod)
return AppModulesGrouped(*mods)
def module(self, import_name: str, name: str, template_folder: Optional[str]=None, template_path: Optional[str]=None, static_folder: Optional[str]=None, static_path: Optional[str]=None, url_prefix: Optional[str]=None, hostname: Optional[str]=None, cache: Optional[RouteCacheRule]=None, root_path: Optional[str]=None, module_class: Optional[Type[AppModule]]=None, **kwargs: Any) -> AppModule:
module_class = (module_class or self.__class__)
return module_class.from_module(self, import_name, name, template_folder=template_folder, template_path=template_path, static_folder=static_folder, static_path=static_path, url_prefix=url_prefix, hostname=hostname, cache=cache, root_path=root_path, opts=kwargs)
def __init__(self, app: App, name: str, import_name: str, template_folder: Optional[str]=None, template_path: Optional[str]=None, static_folder: Optional[str]=None, static_path: Optional[str]=None, url_prefix: Optional[str]=None, hostname: Optional[str]=None, cache: Optional[RouteCacheRule]=None, root_path: Optional[str]=None, pipeline: Optional[List[Pipe]]=None, injectors: Optional[List[Injector]]=None, **kwargs: Any):
self.app = app
self.name = name
self.import_name = import_name
if (root_path is None):
root_path = get_root_path(self.import_name)
self.root_path = root_path
self.template_folder = template_folder
if (template_path and (not template_path.startswith('/'))):
template_path = os.path.join(self.root_path, template_path)
self.template_path = template_path
if (static_path and (not static_path.startswith('/'))):
static_path = os.path.join(self.root_path, static_path)
self._static_path = (os.path.join(self.app.static_path, static_folder) if static_folder else (static_path or self.app.static_path))
self.url_prefix = url_prefix
self.hostname = hostname
self.cache = cache
self._super_pipeline = (pipeline or [])
self._super_injectors = (injectors or [])
self.pipeline = []
self.injectors = []
self.app._register_module(self)
def pipeline(self) -> List[Pipe]:
return self._pipeline
def pipeline(self, pipeline: List[Pipe]):
self._pipeline = (self._super_pipeline + pipeline)
def injectors(self) -> List[Injector]:
return self._injectors
def injectors(self, injectors: List[Injector]):
self._injectors = (self._super_injectors + injectors)
def route(self, paths: Optional[Union[(str, List[str])]]=None, name: Optional[str]=None, template: Optional[str]=None, **kwargs) -> RoutingCtx:
if ((name is not None) and ('.' in name)):
raise RuntimeError("App modules' route names should not contains dots")
name = ((self.name + '.') + (name or ''))
pipeline = kwargs.get('pipeline', [])
injectors = kwargs.get('injectors', [])
if self.pipeline:
pipeline = (self.pipeline + pipeline)
kwargs['pipeline'] = pipeline
if self.injectors:
injectors = (self.injectors + injectors)
kwargs['injectors'] = injectors
kwargs['cache'] = kwargs.get('cache', self.cache)
return self.app.route(paths=paths, name=name, template=template, prefix=self.url_prefix, template_folder=self.template_folder, template_path=self.template_path, hostname=self.hostname, **kwargs)
def websocket(self, paths: Optional[Union[(str, List[str])]]=None, name: Optional[str]=None, **kwargs) -> RoutingCtx:
if ((name is not None) and ('.' in name)):
raise RuntimeError("App modules' websocket names should not contains dots")
name = ((self.name + '.') + (name or ''))
pipeline = kwargs.get('pipeline', [])
if self.pipeline:
pipeline = (self.pipeline + pipeline)
kwargs['pipeline'] = pipeline
return self.app.websocket(paths=paths, name=name, prefix=self.url_prefix, hostname=self.hostname, **kwargs) |
def download_test_datasets(force: bool):
datasets_path = os.path.abspath('datasets')
logging.info('Check datasets directory %s', datasets_path)
if (not os.path.exists(datasets_path)):
logging.info('Create datasets directory %s', datasets_path)
os.makedirs(datasets_path)
else:
logging.info('Datasets directory already exists')
for dataset_name in ('bike_random_forest', 'bike_gradient_boosting', 'kdd_k_neighbors_classifier'):
check_dataset(force, datasets_path, dataset_name) |
class DominoesTest(unittest.TestCase):
def test_empty_input_empty_output(self):
input_dominoes = []
output_chain = can_chain(input_dominoes)
self.assert_correct_chain(input_dominoes, output_chain)
def test_singleton_input_singleton_output(self):
input_dominoes = [(1, 1)]
output_chain = can_chain(input_dominoes)
self.assert_correct_chain(input_dominoes, output_chain)
def test_singleton_that_can_t_be_chained(self):
input_dominoes = [(1, 2)]
output_chain = can_chain(input_dominoes)
self.refute_correct_chain(input_dominoes, output_chain)
def test_three_elements(self):
input_dominoes = [(1, 2), (3, 1), (2, 3)]
output_chain = can_chain(input_dominoes)
self.assert_correct_chain(input_dominoes, output_chain)
def test_can_reverse_dominoes(self):
input_dominoes = [(1, 2), (1, 3), (2, 3)]
output_chain = can_chain(input_dominoes)
self.assert_correct_chain(input_dominoes, output_chain)
def test_can_t_be_chained(self):
input_dominoes = [(1, 2), (4, 1), (2, 3)]
output_chain = can_chain(input_dominoes)
self.refute_correct_chain(input_dominoes, output_chain)
def test_disconnected_simple(self):
input_dominoes = [(1, 1), (2, 2)]
output_chain = can_chain(input_dominoes)
self.refute_correct_chain(input_dominoes, output_chain)
def test_disconnected_double_loop(self):
input_dominoes = [(1, 2), (2, 1), (3, 4), (4, 3)]
output_chain = can_chain(input_dominoes)
self.refute_correct_chain(input_dominoes, output_chain)
def test_disconnected_single_isolated(self):
input_dominoes = [(1, 2), (2, 3), (3, 1), (4, 4)]
output_chain = can_chain(input_dominoes)
self.refute_correct_chain(input_dominoes, output_chain)
def test_need_backtrack(self):
input_dominoes = [(1, 2), (2, 3), (3, 1), (2, 4), (2, 4)]
output_chain = can_chain(input_dominoes)
self.assert_correct_chain(input_dominoes, output_chain)
def test_separate_loops(self):
input_dominoes = [(1, 2), (2, 3), (3, 1), (1, 1), (2, 2), (3, 3)]
output_chain = can_chain(input_dominoes)
self.assert_correct_chain(input_dominoes, output_chain)
def test_nine_elements(self):
input_dominoes = [(1, 2), (5, 3), (3, 1), (1, 2), (2, 4), (1, 6), (2, 3), (3, 4), (5, 6)]
output_chain = can_chain(input_dominoes)
self.assert_correct_chain(input_dominoes, output_chain)
def test_separate_three_domino_loops(self):
input_dominoes = [(1, 2), (2, 3), (3, 1), (4, 5), (5, 6), (6, 4)]
output_chain = can_chain(input_dominoes)
self.refute_correct_chain(input_dominoes, output_chain)
def normalize_dominoes(self, dominoes):
return list(sorted((tuple(sorted(domino)) for domino in dominoes)))
def assert_same_dominoes(self, input_dominoes, output_chain):
msg = 'Dominoes used in the output must be the same as the ones given in the input'
input_normal = self.normalize_dominoes(input_dominoes)
output_normal = self.normalize_dominoes(output_chain)
self.assertEqual(input_normal, output_normal, msg)
def assert_consecutive_dominoes_match(self, output_chain):
for i in range((len(output_chain) - 1)):
msg = 'In chain {}, right end of domino {} ({}) and left end of domino {} ({}) must match'
msg = msg.format(output_chain, i, output_chain[i], (i + 1), output_chain[(i + 1)])
self.assertEqual(output_chain[i][1], output_chain[(i + 1)][0], msg)
def assert_dominoes_at_ends_match(self, output_chain):
msg = 'In chain {}, left end of first domino ({}) and right end of last domino ({}) must match'
msg = msg.format(output_chain, output_chain[0], output_chain[(- 1)])
self.assertEqual(output_chain[0][0], output_chain[(- 1)][1], msg)
def assert_correct_chain(self, input_dominoes, output_chain):
msg = 'There should be a chain for {}'.format(input_dominoes)
self.assertIsNotNone(output_chain, msg)
self.assert_same_dominoes(input_dominoes, output_chain)
if (not any(output_chain)):
return
self.assert_consecutive_dominoes_match(output_chain)
self.assert_dominoes_at_ends_match(output_chain)
def refute_correct_chain(self, input_dominoes, output_chain):
msg = 'There should be no valid chain for {}'.format(input_dominoes)
self.assertIsNone(output_chain, msg) |
class getOptions_result():
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
def isUnion():
return False
def read(self, iprot):
if ((isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocol) and (iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL))) and isinstance(iprot.trans, TTransport.CReadableTransport) and (self.thrift_spec is not None) and (fastproto is not None)):
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
self.checkRequired()
return
if ((isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocol) and (iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL))) and isinstance(iprot.trans, TTransport.CReadableTransport) and (self.thrift_spec is not None) and (fastproto is not None)):
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
self.checkRequired()
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if (ftype == TType.STOP):
break
if (fid == 0):
if (ftype == TType.MAP):
self.success = {}
(_ktype12, _vtype13, _size11) = iprot.readMapBegin()
if (_size11 >= 0):
for _i15 in six.moves.range(_size11):
_key16 = (iprot.readString().decode('utf-8') if UTF8STRINGS else iprot.readString())
_val17 = (iprot.readString().decode('utf-8') if UTF8STRINGS else iprot.readString())
self.success[_key16] = _val17
else:
while iprot.peekMap():
_key18 = (iprot.readString().decode('utf-8') if UTF8STRINGS else iprot.readString())
_val19 = (iprot.readString().decode('utf-8') if UTF8STRINGS else iprot.readString())
self.success[_key18] = _val19
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.checkRequired()
def checkRequired(self):
return
def write(self, oprot):
if ((isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocol) and (oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL))) and (self.thrift_spec is not None) and (fastproto is not None)):
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if ((isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocol) and (oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL))) and (self.thrift_spec is not None) and (fastproto is not None)):
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('getOptions_result')
if (self.success != None):
oprot.writeFieldBegin('success', TType.MAP, 0)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.success))
for (kiter20, viter21) in self.success.items():
(oprot.writeString(kiter20.encode('utf-8')) if (UTF8STRINGS and (not isinstance(kiter20, bytes))) else oprot.writeString(kiter20))
(oprot.writeString(viter21.encode('utf-8')) if (UTF8STRINGS and (not isinstance(viter21, bytes))) else oprot.writeString(viter21))
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = (' ' * 4)
value = pprint.pformat(self.success, indent=0)
value = padding.join(value.splitlines(True))
L.append((' success=%s' % value))
return ('%s(\n%s)' % (self.__class__.__name__, ',\n'.join(L)))
def __eq__(self, other):
if (not isinstance(other, self.__class__)):
return False
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
return (not (self == other))
if (not six.PY2):
__hash__ = object.__hash__ |
class OptionXaxisPlotbandsLabel(Options):
def align(self):
return self._config_get('center')
def align(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def textAlign(self):
return self._config_get(None)
def textAlign(self, text: str):
self._config(text, js_type=False)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('top')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(None)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(None)
def y(self, num: float):
self._config(num, js_type=False) |
def test_root_mount():
app = Flask(__name__)
admin = base.Admin(app, url='/')
admin.add_view(MockView())
client = app.test_client()
rv = client.get('/mockview/')
assert (rv.data == b'Success!')
with app.test_request_context('/'):
rv = client.get(url_for('admin.static', filename='bootstrap/bootstrap2/css/bootstrap.css'))
assert (rv.status_code == 200) |
.usefixtures('use_tmpdir')
def test_that_unicode_decode_error_is_localized_first_line():
with open('test.ert', 'ab') as f:
f.write(b'\xff')
f.write(bytes(dedent('\n QUEUE_OPTION DOCAL MAX_RUNNING 4\n STOP_LONG_RUNNING flase\n NUM_REALIZATIONS not_int\n ENKF_ALPHA not_float\n RUN_TEMPLATE dsajldkald/sdjkahsjka/wqehwqhdsa\n JOB_SCRIPT dnsjklajdlksaljd/dhs7sh/qhwhe\n JOB_SCRIPT non_executable_file\n NUM_REALIZATIONS 1 2 3 4 5\n NUM_REALIZATIONS\n '), 'utf-8'))
with pytest.raises(ConfigValidationError, match="Unsupported non UTF-8 character 'y' found in file: .*test.ert") as caught_error:
ErtConfig.from_file('test.ert')
collected_errors = caught_error.value.errors
assert (len(collected_errors) == 1)
assert (collected_errors[0].line == 1) |
class hello_failed_error_msg(error_msg):
version = 4
type = 1
err_type = 0
def __init__(self, xid=None, code=None, data=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (code != None):
self.code = code
else:
self.code = 0
if (data != None):
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.err_type))
packed.append(struct.pack('!H', self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = hello_failed_error_msg()
_version = reader.read('!B')[0]
assert (_version == 4)
_type = reader.read('!B')[0]
assert (_type == 1)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_err_type = reader.read('!H')[0]
assert (_err_type == 0)
obj.code = reader.read('!H')[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.code != other.code):
return False
if (self.data != other.data):
return False
return True
def pretty_print(self, q):
q.text('hello_failed_error_msg {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('code = ')
value_name_map = {0: 'OFPHFC_INCOMPATIBLE', 1: 'OFPHFC_EPERM'}
if (self.code in value_name_map):
q.text(('%s(%d)' % (value_name_map[self.code], self.code)))
else:
q.text(('%#x' % self.code))
q.text(',')
q.breakable()
q.text('data = ')
q.pp(self.data)
q.breakable()
q.text('}') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.