code
stringlengths
281
23.7M
class FakeOFTable(): def __init__(self, dp_id, num_tables=1, requires_tfm=True): self.dp_id = dp_id self.tables = [[] for _ in range(0, num_tables)] self.groups = {} self.requires_tfm = requires_tfm self.tfm = {} def table_state(self): table_str = str(self.tables) return (hash(frozenset(table_str)), table_str) def __hash__(self): return hash(frozenset(str(self.tables))) def _apply_groupmod(self, ofmsg): def _del(_ofmsg, group_id): if (group_id == ofp.OFPG_ALL): self.groups = {} return if (group_id in self.groups): del self.groups[group_id] def _add(ofmsg, group_id): if (group_id in self.groups): raise FakeOFTableException(('group already in group table: %s' % ofmsg)) self.groups[group_id] = ofmsg def _modify(ofmsg, group_id): if (group_id not in self.groups): raise FakeOFTableException(('group not in group table: %s' % ofmsg)) self.groups[group_id] = ofmsg _groupmod_handlers = {ofp.OFPGC_DELETE: _del, ofp.OFPGC_ADD: _add, ofp.OFPGC_MODIFY: _modify} _groupmod_handlers[ofmsg.command](ofmsg, ofmsg.group_id) def _apply_flowmod(self, ofmsg): def _validate_flowmod_tfm(table_id, tfm_body, ofmsg): if (not self.requires_tfm): return if (table_id == ofp.OFPTT_ALL): if (ofmsg.match.items() and (not self.tfm)): raise FakeOFTableException(('got %s with matches before TFM that defines tables' % ofmsg)) return if (tfm_body is None): raise FakeOFTableException(('got %s before TFM that defines table %u' % (ofmsg, table_id))) def _add(table, flowmod): for fte in table: if flowmod.fte_matches(fte, strict=True): table.remove(fte) break if flowmod.overlaps(fte): raise FakeOFTableException('Overlapping flowmods {} and {}'.format(flowmod, fte)) table.append(flowmod) def _del(table, flowmod): removals = [fte for fte in table if flowmod.fte_matches(fte)] for fte in removals: table.remove(fte) def _del_strict(table, flowmod): for fte in table: if flowmod.fte_matches(fte, strict=True): table.remove(fte) break def _modify(table, flowmod): for fte in table: if flowmod.fte_matches(fte): fte.instructions = flowmod.instructions def _modify_strict(table, flowmod): for fte in table: if flowmod.fte_matches(fte, strict=True): fte.instructions = flowmod.instructions break _flowmod_handlers = {ofp.OFPFC_ADD: _add, ofp.OFPFC_DELETE: _del, ofp.OFPFC_DELETE_STRICT: _del_strict, ofp.OFPFC_MODIFY: _modify, ofp.OFPFC_MODIFY_STRICT: _modify_strict} table_id = ofmsg.table_id tfm_body = self.tfm.get(table_id, None) if ((table_id == ofp.OFPTT_ALL) or (table_id is None)): tables = self.tables else: tables = [self.tables[table_id]] _validate_flowmod_tfm(table_id, tfm_body, ofmsg) flowmod = FlowMod(ofmsg) for table in tables: _flowmod_handlers[ofmsg.command](table, flowmod) if tfm_body: for table in tables: entries = len(table) if (entries > tfm_body.max_entries): tfm_table_details = ('%s : table %u %s full (%u/%u)' % (self.dp_id, table_id, tfm_body.name, entries, tfm_body.max_entries)) flow_dump = '\n\n'.join((tfm_table_details, str(ofmsg), str(tfm_body))) raise FakeOFTableException(flow_dump) def _apply_tfm(self, ofmsg): self.tfm = {body.table_id: body for body in ofmsg.body} def _apply_flowstats(self, ofmsg): self.tables = [] self.requires_tfm = False self.tfm = {} for stat in ofmsg.body: while (len(self.tables) <= stat.table_id): self.tables.append([]) self.tables[stat.table_id].append(FlowMod(stat)) def apply_ofmsgs(self, ofmsgs, ignore_errors=False): for ofmsg in ofmsgs: try: if isinstance(ofmsg, parser.OFPBarrierRequest): continue if isinstance(ofmsg, parser.OFPPacketOut): continue if isinstance(ofmsg, parser.OFPSetConfig): continue if isinstance(ofmsg, parser.OFPSetAsync): continue if isinstance(ofmsg, parser.OFPDescStatsRequest): continue if isinstance(ofmsg, parser.OFPMeterMod): continue if isinstance(ofmsg, parser.OFPTableFeaturesStatsRequest): self._apply_tfm(ofmsg) continue if isinstance(ofmsg, parser.OFPGroupMod): self._apply_groupmod(ofmsg) continue if isinstance(ofmsg, parser.OFPFlowMod): self._apply_flowmod(ofmsg) self.sort_tables() continue if isinstance(ofmsg, parser.OFPFlowStatsReply): self._apply_flowstats(ofmsg) self.sort_tables() continue except FakeOFTableException: if (not ignore_errors): raise if (not ignore_errors): raise FakeOFTableException(('Unsupported flow %s' % str(ofmsg))) def single_table_lookup(self, match, table_id, trace=False): packet_dict = match.copy() table = self.tables[table_id] matching_fte = None for fte in table: if fte.pkt_matches(packet_dict): matching_fte = fte break if trace: sys.stderr.write(('%s: %s\n' % (table_id, matching_fte))) return matching_fte def _process_instruction(self, match, instruction): outputs = OrderedDict() packet_dict = match.copy() pending_actions = [] for action in instruction.actions: if (action.type == ofp.OFPAT_OUTPUT): outputs.setdefault(action.port, []) outputs[action.port].append(packet_dict.copy()) pending_actions = [] continue pending_actions.append(action) if (action.type == ofp.OFPAT_SET_FIELD): packet_dict[action.key] = action.value elif (action.type == ofp.OFPAT_PUSH_VLAN): if (('vlan_vid' in packet_dict) and (packet_dict['vlan_vid'] & ofp.OFPVID_PRESENT)): packet_dict['encap_vid'] = packet_dict['vlan_vid'] packet_dict['vlan_vid'] = ofp.OFPVID_PRESENT elif (action.type == ofp.OFPAT_POP_VLAN): packet_dict.pop('vlan_vid') if ('vlan_pcp' in packet_dict): packet_dict.pop('vlan_pcp') if ('encap_vid' in packet_dict): packet_dict['vlan_vid'] = packet_dict['encap_vid'] packet_dict.pop('encap_vid') else: packet_dict['vlan_vid'] = 0 elif (action.type == ofp.OFPAT_GROUP): if (action.group_id not in self.groups): raise FakeOFTableException(('output group not in group table: %s' % action)) buckets = self.groups[action.group_id].buckets for bucket in buckets: (bucket_outputs, _, _) = self._process_instruction(packet_dict, bucket) for (out_port, out_pkts) in bucket_outputs.items(): outputs.setdefault(out_port, []) outputs[out_port].extend(out_pkts) pending_actions = [] return (outputs, packet_dict, pending_actions) def get_table_output(self, match, table_id, trace=False): next_table = None packet_dict = match.copy() outputs = OrderedDict() matching_fte = self.single_table_lookup(match, table_id, trace) pending_actions = [] if matching_fte: for instruction in matching_fte.instructions: if (instruction.type == ofp.OFPIT_GOTO_TABLE): if (table_id < instruction.table_id): next_table = instruction.table_id else: raise FakeOFTableException('goto to lower table ID') elif (instruction.type == ofp.OFPIT_APPLY_ACTIONS): if (not instruction.actions): raise FakeOFTableException('no-op instruction actions') (instruction_outputs, packet_dict, pending_actions) = self._process_instruction(packet_dict, instruction) for (out_port, out_pkts) in instruction_outputs.items(): outputs.setdefault(out_port, []) outputs[out_port].extend(out_pkts) elif (instruction.type == ofp.OFPIT_WRITE_METADATA): metadata = packet_dict.get('metadata', 0) mask = instruction.metadata_mask mask_compl = (mask ^ ) packet_dict['metadata'] = ((metadata & mask_compl) | (instruction.metadata & mask)) if next_table: pending_actions = [] if pending_actions: raise FakeOFTableException(('flow performs actions on packet after output with no goto: %s' % matching_fte)) return (outputs, packet_dict, next_table) def get_output(self, match, trace=False): table_outputs = {} table_id = 0 next_table = True packet_dict = match.copy() while next_table: next_table = False (outputs, packet_dict, next_table_id) = self.get_table_output(packet_dict, table_id, trace) table_outputs[table_id] = outputs next_table = (next_table_id is not None) table_id = next_table_id return table_outputs def get_port_outputs(self, match, trace=False): port_outputs = {} table_id = 0 next_table = True packet_dict = match.copy() while next_table: next_table = False (outputs, packet_dict, next_table_id) = self.get_table_output(packet_dict, table_id, trace) for (out_port, out_pkts) in outputs.items(): port_outputs.setdefault(out_port, []) for out_pkt in out_pkts: if (out_pkt not in port_outputs[out_port]): port_outputs[out_port].append(out_pkt) next_table = (next_table_id is not None) table_id = next_table_id return port_outputs def is_full_output(self, match, port=None, vid=None, trace=False): table_outputs = self.get_output(match, trace) if trace: sys.stderr.write((pprint.pformat(table_outputs) + '\n')) in_port = match.get('in_port') for table_outputs in table_outputs.values(): for (out_port, out_pkts) in table_outputs.items(): for out_pkt in out_pkts: if ((port == out_port) and (port == out_pkt['in_port'])): continue if (port is None): return True if (vid is None): if (port == out_port): return True if ((out_port == ofp.OFPP_IN_PORT) and (port == in_port)): return True if ((port == out_port) or ((out_port == ofp.OFPP_IN_PORT) and (port == in_port))): if ((vid & ofp.OFPVID_PRESENT) == 0): return (('vlan_vid' not in out_pkt) or ((out_pkt['vlan_vid'] & ofp.OFPVID_PRESENT) == 0)) return (('vlan_vid' in out_pkt) and (vid == out_pkt['vlan_vid'])) return False def lookup(self, match, trace=False): packet_dict = match.copy() instructions = [] table_id = 0 goto_table = True while goto_table: goto_table = False table = self.tables[table_id] matching_fte = None for fte in table: if fte.pkt_matches(packet_dict): matching_fte = fte break if trace: sys.stderr.write(('%d: %s\n' % (table_id, matching_fte))) if matching_fte: for instruction in matching_fte.instructions: instructions.append(instruction) if (instruction.type == ofp.OFPIT_GOTO_TABLE): if (table_id < instruction.table_id): table_id = instruction.table_id goto_table = True elif (instruction.type == ofp.OFPIT_APPLY_ACTIONS): for action in instruction.actions: if (action.type == ofp.OFPAT_SET_FIELD): packet_dict[action.key] = action.value elif (instruction.type == ofp.OFPIT_WRITE_METADATA): metadata = packet_dict.get('metadata', 0) mask = instruction.metadata_mask mask_compl = (mask ^ ) packet_dict['metadata'] = ((metadata & mask_compl) | (instruction.metadata & mask)) return (instructions, packet_dict) def flow_count(self): return sum(map(len, self.tables)) def is_output(self, match, port=None, vid=None, trace=False): full_output = self.is_full_output(match.copy(), port, vid, trace) def _output_result(action, vid_stack, port, vid): if (port is None): return True in_port = match.get('in_port') result = None if (action.port == port): if (port == in_port): result = None elif (vid is None): result = True elif ((vid & ofp.OFPVID_PRESENT) == 0): result = (not vid_stack) else: result = bool((vid_stack and (vid == vid_stack[(- 1)]))) elif ((action.port == ofp.OFPP_IN_PORT) and (port == in_port)): result = True return result def _process_vid_stack(action, vid_stack): if (action.type == ofp.OFPAT_PUSH_VLAN): vid_stack.append(ofp.OFPVID_PRESENT) elif (action.type == ofp.OFPAT_POP_VLAN): vid_stack.pop() elif (action.type == ofp.OFPAT_SET_FIELD): if (action.key == 'vlan_vid'): vid_stack[(- 1)] = action.value return vid_stack if trace: sys.stderr.write(('tracing packet flow %s matching to port %s, vid %s\n' % (match, port, vid))) match_vid = match.get('vlan_vid', 0) vid_stack = [] if ((match_vid & ofp.OFPVID_PRESENT) != 0): vid_stack.append(match_vid) (instructions, _) = self.lookup(match, trace=trace) for instruction in instructions: if (instruction.type != ofp.OFPIT_APPLY_ACTIONS): continue for action in instruction.actions: vid_stack = _process_vid_stack(action, vid_stack) if (action.type == ofp.OFPAT_OUTPUT): output_result = _output_result(action, vid_stack, port, vid) if (output_result is not None): if (output_result != full_output): raise FakeOFTableException('Output functions do not match') return output_result elif (action.type == ofp.OFPAT_GROUP): if (action.group_id not in self.groups): raise FakeOFTableException(('output group not in group table: %s' % action)) buckets = self.groups[action.group_id].buckets for bucket in buckets: bucket_vid_stack = vid_stack for bucket_action in bucket.actions: bucket_vid_stack = _process_vid_stack(bucket_action, bucket_vid_stack) if (bucket_action.type == ofp.OFPAT_OUTPUT): output_result = _output_result(bucket_action, vid_stack, port, vid) if (output_result is not None): if (output_result != full_output): raise FakeOFTableException('Output functions do not match') return output_result if (full_output is not False): raise FakeOFTableException('Output functions do not match') return False def apply_instructions_to_packet(self, match): (_, packet_dict) = self.lookup(match) return packet_dict def __str__(self): string = '' for (table_id, table) in enumerate(self.tables): string += ('\n----- Table %u -----\n' % table_id) string += '\n'.join(sorted([str(flowmod) for flowmod in table])) return string def sort_tables(self): self.tables = [sorted(table, reverse=True) for table in self.tables]
def bg_checks(sampler_list, timeout, t_start): global shutdown now = time.time() if timeout: pdt = (now - (mqtt_last_publish_time() or t_start)) if (pdt > timeout): if mqtt_last_publish_time(): logger.error('MQTT message publish timeout (last %.0fs ago), exit', pdt) else: logger.error('MQTT never published a message after %.0fs, exit', timeout) shutdown = True return False global t_last_store if ((now - (t_last_store or t_start)) > 30): t_last_store = now try: store_states(sampler_list) except Exception as e: logger.error('Error storing states: %s', e) return True
class OptionPlotoptionsBulletSonificationTracksMappingRate(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class OptionPlotoptionsWordcloudSonificationContexttracksMappingTremoloSpeed(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def test_sample_rate_undefined_by_parent(elasticapm_client): trace_parent = TraceParent.from_string('00-0af7651916cd43dd8448eb211c80319c-b7ad6b-03') elasticapm_client.begin_transaction('test', trace_parent=trace_parent) with elasticapm.capture_span('x'): pass transaction = elasticapm_client.end_transaction('test', 'OK') data = transaction.to_dict() assert ('sample_rate' not in data) assert ('sample_rate' not in elasticapm_client.events[constants.SPAN][0])
def extractCottonflavouredWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class ToggleButton(BaseButton): DEFAULT_MIN_SIZE = (10, 28) def _create_dom(self): global window node = window.document.createElement('button') return node def _render_dom(self): return [self.text] ('pointer_click') def __toggle_checked(self, *events): self.user_checked((not self.checked)) ('checked') def __check_changed(self, *events): if self.checked: self.node.classList.add('flx-checked') else: self.node.classList.remove('flx-checked')
def get_latest_release(ver, this_version, force=False): releases_url = ' with suppress(requests.RequestException): release = requests.get(releases_url) if (release.status_code >= 400): release = requests.get(releases_url, proxies=get_proxy(False)) release = release.json() latest_ver = release.get('tag_name', f'v{this_version}')[1:] _version = [int(x) for x in ver.split('.')] compare_ver = [int(x) for x in latest_ver.split('.')] if ((compare_ver > _version) or force): for asset in release.get('assets', []): if ('exe' in asset['name']): return {'version': latest_ver, 'setup': asset['browser_download_url']} return False
class ProgressBar(JQueryUI): def destroy(self): return JsObjects.JsObjects.get(('%s.progressbar("destroy")' % self.component.dom.jquery.varId)) def disable(self): return JsObjects.JsObjects.get(('%s.progressbar("disable")' % self.component.dom.jquery.varId)) def enable(self): return JsObjects.JsObjects.get(('%s.progressbar("enable")' % self.component.dom.jquery.varId)) def instance(self): return JsObjects.JsObjects.get(('%s.progressbar("instance")' % self.component.dom.jquery.varId)) def option(self, data=None, value=None): if (data is None): return JsObjects.JsObjects.get(('%s.progressbar("option")' % self.component.dom.jquery.varId)) data = JsUtils.jsConvertData(data, None) if (value is None): return JsObjects.JsObjects.get(('%s.progressbar("option", %s)' % (self.component.dom.jquery.varId, data))) value = JsUtils.jsConvertData(value, None) return JsObjects.JsObjects.get(('%s.progressbar("option", %s, %s)' % (self.component.dom.jquery.varId, data, value))) def value(self, value=None): if (value is None): return JsObjects.JsObjects.get(('%s.progressbar("value")' % self.component.dom.jquery.varId)) value = JsUtils.jsConvertData(value, None) return JsObjects.JsObjects.get(('%s.progressbar("value", %s)' % (self.component.dom.jquery.varId, value))) def add(self, value=None): value = JsUtils.jsConvertData(value, None) return JsObjects.JsObjects.get(('%(varId)s.progressbar("value", %(varId)s.progressbar("value") + %(jsValue)s)' % {'varId': self.component.dom.jquery.varId, 'jsValue': value})) def tooltip(self, value=None, options=None): return JsObjects.JsVoid((' let options = %(options)s;\n%(varId)s.find(\'div\').attr("title", ""+ (%(varId)s.progressbar("value") / options.max * 100).toFixed(2) +"%% ("+ %(varId)s.progressbar("value") +" / "+ options.max +")")\n ' % {'varId': self.component.dom.jquery.varId, 'options': self.component.options.config_js(options)}))
class IDName(AbstractCrudObject): def __init__(self, fbid=None, parent_id=None, api=None): self._isIDName = True super(IDName, self).__init__(fbid, parent_id, api) class Field(AbstractObject.Field): id = 'id' name = 'name' _field_types = {'id': 'string', 'name': 'string'} def _get_field_enum_info(cls): field_enum_info = {} return field_enum_info
class FncFiltere(): def __init__(self, data, js_src, data_schema=None, profile: Union[(dict, bool)]=False): (self._js_src, self._data_schema, self._data, self.profile) = (js_src, data_schema, data, profile) fnc_name = JsFncsRecords.JsFilter.__name__ fnc_pmts = (['data'] + (list(JsFncsRecords.JsFilter.pmts) or [])) if (fnc_name not in self._js_src.get('js', {}).get('functions', {})): self._js_src.setdefault('js', {}).setdefault('functions', {})[fnc_name] = {'content': ('var result = []; %s;return result' % JsUtils.cleanFncs(JsFncsRecords.JsFilter.content)), 'pmt': fnc_pmts} self._data_schema['filters'] = [] def custom(self, column: str, val: Any, compare_type: str, all_if_empty: bool=True): filter_data = JsUtils.jsConvertData({'colName': column, 'val': val, 'op': compare_type, 'allIfEmpty': all_if_empty}, None) self._data_schema['filters'].append(filter_data) return self._data def not_in_(self, column: str, val: Any): raise ValueError('Not implemented') def not_range_(self, column: str, val: Any, compare_type: str='in', all_if_empty: bool=True): def in_(self, column: str, val: Any): return self.custom(column, val, 'in', True) def range_(self, column: str, val: Any, strict_left: bool=False, strict_right: bool=False): if (not strict_left): if (not strict_right): return self.custom(column, val, '><=', True) return self.custom(column, val, '><', True) if (not strict_right): if (not strict_left): return self.custom(column, val, '=><', True) return self.custom(column, val, '><', True) return self.custom(column, val, '=><=', True) def eq_(self, column: str, val: Any): return self.custom(column, val, '>', True) def sup_(self, column: str, val: Any, strict: bool=False): if strict: return self.custom(column, val, '>', True) return self.custom(column, val, '>=', True) def inf_(self, column: str, val: Any, strict: bool=False): if strict: return self.custom(column, val, '<', True) return self.custom(column, val, '<=', True)
def main(): if (len(sys.argv) < 7): sys.exit('Usage: {} [file] [samples] [first iline] [last iline] [first xline] [last xline]'.format(sys.argv[0])) spec = segyio.spec() filename = sys.argv[1] spec.sorting = 2 spec.format = 1 spec.samples = range(int(sys.argv[2])) spec.ilines = range(*map(int, sys.argv[3:5])) spec.xlines = range(*map(int, sys.argv[5:7])) with segyio.create(filename, spec) as f: start = 0.0 step = 1e-05 trace = np.arange(start=start, stop=(start + (step * len(spec.samples))), step=step, dtype=np.single) tr = 0 for il in spec.ilines: for xl in spec.xlines: f.header[tr] = {segyio.su.offset: 1, segyio.su.iline: il, segyio.su.xline: xl} f.trace[tr] = ((trace + (xl / 100.0)) + il) tr += 1 f.bin.update(tsort=segyio.TraceSortingFormat.INLINE_SORTING)
class ParFile(TBase): def __init__(self, f, dump=None): TBase.__init__(self, 'g', dump) self.grad = [] self.main(f) def main(self, f): if (len(f.children) == 0): return if (f.children[0].type == 'error'): self.error(f.children[0].leaf) return s = f.childByName('nameless') if s: self.params(s) def params(self, node): self.update_settings('gradient', node) def set(self, node): name = node.children[0].leaf val = self.const_exp(node.children[1]) return ir.Move(ir.Var(name, node, val.datatype), val, node, val.datatype)
def test_token_provider_dash(mocker): mocker.patch('foundry_dev_tools.utils.token_provider.AppServiceDashTokenProvider.get_flask_request_headers', return_value={APP_SERVICE_ACCESS_TOKEN_HEADER: 'secret-token-dash'}) with PatchConfig(config_overwrite={'jwt': None}): from foundry_dev_tools.foundry_api_client import FoundryRestClient client = FoundryRestClient() assert (client._config['jwt'] == 'secret-token-dash')
class SignedTransactionMethods(BaseTransactionMethods, SignedTransactionAPI): type_id: Optional[int] = None _property def sender(self) -> Address: return self.get_sender() def validate(self) -> None: if (self.gas < self.intrinsic_gas): raise ValidationError('Insufficient gas') self.check_signature_validity() def is_signature_valid(self) -> bool: try: self.check_signature_validity() except ValidationError: return False else: return True
class DangerousDelegatecall(Checker): def __init__(self, contract_manager, account_manager): super().__init__() self.contract_manager = contract_manager self.account_manager = account_manager self.addresses = [] for contract in contract_manager.contract_dict.values(): self.addresses += contract.addresses for account in account_manager.accounts: self.addresses.append(account.address) def check(self, logger): logger.addresses = self.addresses for (i, log) in enumerate(logger.logs): if (log.op == DELEGATECALL): args_offset = int(log.stack[(- 4)], 16) args_length = int(log.stack[(- 5)], 16) value_from_call0 = False if ((args_length != 0) and (int.from_bytes(bytes.fromhex(log.memory[2:])[args_offset:(args_offset + args_length)], byteorder='big') != 0)): try: (value_from_call0, _) = logger.trace_log_memory((i - 1), args_offset, (args_offset + args_length)) except RecursionError: pass try: (value_from_call1, _) = logger.trace_log_stack((i - 1), (- 2)) except RecursionError: value_from_call1 = False if (value_from_call0 or value_from_call1): return True return False
def test_modify_library(mockproject): with mockproject._path.joinpath('contracts/FooLib.sol').open('w') as fp: fp.write(LIBRARY.replace('true', 'false')) mockproject.load() assert (sorted(mockproject._compile.call_args[0][0]) == ['contracts/BaseFoo.sol', 'contracts/Foo.sol', 'contracts/FooLib.sol'])
_app.callback(Output('sms-demand-result', 'children'), Input('ask-sms', 'n_clicks')) def askCode(n_clicks): ctx = callback_context if ctx.triggered: try: app.myp.remote_client.get_sms_otp_code() return dbc.Alert('SMS sent', color='success') except Exception as e: res = str(e) return dbc.Alert(res, color='danger') raise PreventUpdate()
class OptionPlotoptionsVariablepieStatesSelectHalo(Options): def attributes(self): return self._config_get(None) def attributes(self, value: Any): self._config(value, js_type=False) def opacity(self): return self._config_get(0.25) def opacity(self, num: float): self._config(num, js_type=False) def size(self): return self._config_get(10) def size(self, num: float): self._config(num, js_type=False)
class TestActionClose(TestCase): VERSION = {'version': {'number': '5.0.0'}} def builder(self): self.client = Mock() self.client.info.return_value = self.VERSION self.client.cat.indices.return_value = testvars.state_one self.client.indices.get_settings.return_value = testvars.settings_one self.client.indices.stats.return_value = testvars.stats_one self.client.indices.flush_synced.return_value = testvars.synced_pass self.client.indices.exists_alias.return_value = False self.client.indices.close.return_value = None self.ilo = IndexList(self.client) def test_init_raise(self): self.assertRaises(TypeError, Close, 'invalid') def test_init(self): self.builder() self.client.indices.flush_synced.return_value = None self.client.indices.close.return_value = None clo = Close(self.ilo) self.assertEqual(self.ilo, clo.index_list) self.assertEqual(self.client, clo.client) def test_do_dry_run(self): self.builder() self.ilo = IndexList(self.client) clo = Close(self.ilo) self.assertIsNone(clo.do_dry_run()) def test_do_action(self): self.builder() self.ilo = IndexList(self.client) clo = Close(self.ilo) self.assertIsNone(clo.do_action()) def test_do_action_with_delete_aliases(self): self.builder() self.ilo = IndexList(self.client) clo = Close(self.ilo, delete_aliases=True) self.assertIsNone(clo.do_action()) def test_do_action_with_skip_flush(self): self.builder() self.ilo = IndexList(self.client) clo = Close(self.ilo, skip_flush=True) self.assertIsNone(clo.do_action()) def test_do_action_raises_exception(self): self.builder() self.client.indices.close.side_effect = testvars.fake_fail self.ilo = IndexList(self.client) clo = Close(self.ilo) self.assertRaises(FailedExecution, clo.do_action) def test_do_action_delete_aliases_with_exception(self): self.builder() self.ilo = IndexList(self.client) self.client.indices.delete_alias.side_effect = testvars.fake_fail clo = Close(self.ilo, delete_aliases=True) self.assertIsNone(clo.do_action())
class Template(object): def __init__(self, name, code): self.name = name self.code = code.strip() def compile(self, ctx): compiled = self.code for (key, val) in ctx.items(): compiled = compiled.replace(('{%s}' % key), str(val)) return compiled
def example(): class Example(ft.Column): def __init__(self): super().__init__() self.datepicker = ft.DatePicker(first_date=datetime.datetime(2023, 10, 1), last_date=datetime.datetime(2024, 10, 1), on_change=self.change_date) self.selected_date = ft.Text() self.controls = [ft.ElevatedButton('Pick date', icon=ft.icons.CALENDAR_MONTH, on_click=self.open_date_picker), self.selected_date] async def open_date_picker(self, e): (await self.datepicker.pick_date_async()) async def change_date(self, e): self.selected_date.value = f'Selected date: {self.datepicker.value}' (await e.control.page.update_async()) async def did_mount_async(self): self.page.overlay.append(self.datepicker) (await self.page.update_async()) async def will_unmount_async(self): self.page.overlay.remove(self.datepicker) (await self.page.update_async()) datepicker_example = Example() return datepicker_example
def test_drop_events_in_processor(elasticapm_client, caplog): dropping_processor = mock.MagicMock(return_value=None, event_types=[TRANSACTION], __name__='dropper') shouldnt_be_called_processor = mock.Mock(event_types=[]) elasticapm_client._transport._processors = [dropping_processor, shouldnt_be_called_processor] with caplog.at_level(logging.DEBUG, logger='elasticapm.transport'): elasticapm_client.begin_transaction('test') elasticapm_client.end_transaction('test', 'FAIL') assert (dropping_processor.call_count == 1) assert (shouldnt_be_called_processor.call_count == 0) assert (elasticapm_client._transport.events[TRANSACTION][0] is None) assert_any_record_contains(caplog.records, 'Dropped event of type transaction due to processor mock.mock.dropper', 'elasticapm.transport')
def set_global_machine_arch(arch, binary): binary.config.MACHINE_ARCH = arch if (arch == 'x86'): binary.config.REG_MAPPING = constants.REG_MAPPING_x86 binary.config.SYSCALL_TABLE = constants.SYSCALL_TABLE_x86 binary.config.ADDRESS_BYTE_SIZE = 4 binary.config.HIGH_PC = elif (arch == 'x64'): binary.config.REG_MAPPING = constants.REG_MAPPING_x64 binary.config.SYSCALL_TABLE = constants.SYSCALL_TABLE_x64 binary.config.ADDRESS_BYTE_SIZE = 8 binary.config.HIGH_PC = elif (arch == 'ARM'): binary.config.REG_MAPPING = constants.REG_MAPPING_arm binary.config.SYSCALL_TABLE = constants.SYSCALL_TABLE_arm binary.config.ADDRESS_BYTE_SIZE = 4 binary.config.HIGH_PC =
class EnforcerTestCase(ForsetiTestCase): .object(google.auth, 'default', return_value=(mock.Mock(spec_set=credentials.Credentials), TEST_PROJECT)) def setUpClass(cls, mock_google_credential): fake_global_configs = {'compute': {'max_calls': 18, 'period': 1}} cls.gce_api_client = compute.ComputeClient(global_configs=fake_global_configs, dry_run=True) def setUp(self): self.mock_time = mock.patch.object(date_time, 'get_utc_now_datetime', return_value=MOCK_DATETIME).start() self.gce_api_client.get_networks = mock.Mock(return_value=SAMPLE_TEST_NETWORK_SELFLINK) self.gce_api_client.get_project = mock.Mock(return_value=TEST_PROJECT_RESPONSE) self.gce_api_client.get_firewall_rules = mock.Mock() self.project = TEST_PROJECT self.policy = json.loads(RAW_EXPECTED_JSON_POLICY)
def extractProjektworldwitchesCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('Noble Witches', 'Noble Witches: 506th Joint Fighter Wing', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def test_attach_external_modules_multiple_levels_deep(module1, module2, module3, module4): w3 = Web3(EthereumTesterProvider(), external_modules={'module1': module1, 'module2': (module2, {'submodule1': (module3, {'submodule2': module4})})}) assert w3.is_connected() assert hasattr(w3, 'geth') assert hasattr(w3, 'eth') assert is_integer(w3.eth.chain_id) assert hasattr(w3, 'module1') assert (w3.module1.a == 'a') assert (w3.module1.b == 'b') assert hasattr(w3, 'module2') assert (w3.module2.c == 'c') assert (w3.module2.d() == 'd') assert hasattr(w3.module2, 'submodule1') assert (w3.module2.submodule1.e == 'e') assert hasattr(w3.module2.submodule1, 'submodule2') assert (w3.module2.submodule1.submodule2.f == 'f')
class XMLDiff(DiffBase): def parse(self): parser = lxml.etree.XMLParser(remove_blank_text=True, remove_comments=True) self.config = lxml.etree.parse(self.filename, parser) if (not self.ordered): for parent in self.config.xpath('//*[./*]'): parent[:] = sorted(parent, key=(lambda x: x.tag)) self.pretty = BytesIO() self.config.write(self.pretty, pretty_print=True)
class CREDHIST_FILE(): def __init__(self, raw): self.credhist_entries = {} self.credhist_entries_list = [] self.version = unpack('<L', raw[:4])[0] self.current_guid = unpack('16s', raw[4:20])[0] i = 0 next_len = unpack('<L', raw[((- i) - 4):])[0] i += 4 while (next_len != 0): ch_entry = CREDHIST_ENTRY(data=raw[(- ((i + next_len) - 4)):(- i)]) i += (next_len - 4) self.credhist_entries[bin_to_string(ch_entry['Guid'])] = ch_entry self.credhist_entries_list.append(ch_entry) next_len = unpack('<L', raw[((- i) - 4):(- i)])[0] i += 4 def decrypt_entry_by_index(self, entry_index, key): self.credhist_entries_list[entry_index].decrypt(key) def decrypt_entry_by_guid(self, guid, key): self.credhist_entries[guid].decrypt(key) def decrypt(self, key): keys = [key] for (i, e) in enumerate(self.credhist_entries_list): for k in keys: e.decrypt(k) if (e.pwdhash is not None): break if (e.pwdhash is None): print(('Error decrypting entry #%d' % i)) return keys = deriveKeysFromUserkey(e.sid, e.pwdhash) def dump(self): print('[CREDHIST FILE]') print(('Version : 0x%.8x (%d)' % (self.version, self.version))) print(('Current Guid : %s' % bin_to_string(self.current_guid))) print() for (i, e) in enumerate(self.credhist_entries_list): print(('[Entry #%d]' % i)) e.dump() def summarize(self): print('[CREDHIST FILE]') print(('Version : 0x%.8x (%d)' % (self.version, self.version))) print(('Current Guid : %s' % bin_to_string(self.current_guid))) print() for (i, e) in enumerate(self.credhist_entries_list): print(('[Entry #%d]' % i)) e.summarize()
class ServerSentEvent(): def __init__(self, html_code: Optional[str]=None, src: Optional[Union[(str, primitives.PageModel)]]=None, server: Optional[str]=False): (self.page, self.__server) = (src, server) self._selector = (html_code or ('sse_%s' % id(self))) self.page.properties.js.add_builders(('var %s' % self._selector)) def readyState(self): return JsObjects.JsObject.JsObject.get(('%s.readyState' % self._selector)) def url(self): return JsObjects.JsObject.JsObject.get(('%s.url' % self._selector)) def withCredentials(self): return JsObjects.JsObject.JsObject.get(('%s.url' % self._selector)) def message(self): return JsObjects.JsObject.JsObject.get('event.data') def connect(self, url: Optional[str]=None, port: Optional[int]=None, from_config=None, options: dict=None): if (from_config is not None): self.__connect = ('new EventSource(%s)' % from_config.address) self.page.properties.js.add_builders(('%s = %s' % (self._selector, self.__connect))) return JsObjects.JsVoid(('%s = %s' % (self._selector, self.__connect))) server_root = (('%s:%s' % (url, port)) if (port is not None) else url) self.__connect = (("new EventSource('%s')" % server_root) if (options is None) else ("new EventSource('%s', %s)" % (server_root, JsUtils.jsConvertData(options, None)))) self.page.properties.js.add_builders(('%s = %s' % (self._selector, self.__connect))) return JsObjects.JsVoid(('%s = %s' % (self._selector, self.__connect))) def onmessage(self, js_funcs: Union[(list, str)], profile: Optional[Union[(dict, bool)]]=None): if (not isinstance(js_funcs, list)): js_funcs = [js_funcs] self.page.js.onReady(('%s.onmessage = function (event) { %s }' % (self._selector, JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)))) return self def onerror(self, js_funcs: Union[(list, str)], profile: Optional[Union[(dict, bool)]]=None): if (not isinstance(js_funcs, list)): js_funcs = [js_funcs] self.page.js.onReady(('%s.onerror = function (event) {%s}' % (self._selector, JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)))) return self def onopen(self, js_funcs: Union[(list, str)], profile: Optional[Union[(dict, bool)]]=None): if (not isinstance(js_funcs, list)): js_funcs = [js_funcs] self.page.js.onReady(('%s.onopen = function (event) { %s }' % (self._selector, JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)))) return self def addEventListener(self, event_type: str, js_funcs: Union[(list, str)], profile: Optional[Union[(dict, bool)]]=None): event_type = JsUtils.jsConvertData(event_type, None) return JsObjects.JsVoid(('%(varName)s.addEventListener(%(eventType)s, function (event) {%(data)s})' % {'varName': self._selector, 'eventType': event_type, 'data': JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)})) def on(self, event_type: str, js_funcs: Union[(list, str)], profile: Optional[Union[(dict, bool)]]=None): self.page.js.onReady(self.addEventListener(event_type, js_funcs, profile)) def receive(self, js_funcs: Union[(list, str)], profile: Optional[Union[(dict, bool)]]=None): return JsObjects.JsVoid(('%(varName)s.onmessage = function (event) { %(data)s }' % {'varName': self._selector, 'data': JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)})) def close(self): return JsObjects.JsVoid(('%s.close(); %s = undefined' % (self._selector, self._selector)))
class Lighting(object): swagger_types = {'light': 'list[str]', 'turn': 'list[str]'} attribute_map = {'light': 'light', 'turn': 'turn'} def __init__(self, light=None, turn=None): self._light = None self._turn = None self.discriminator = None if (light is not None): self.light = light if (turn is not None): self.turn = turn def light(self): return self._light def light(self, light): allowed_values = ['Front', 'Rear'] if (not set(light).issubset(set(allowed_values))): raise ValueError('Invalid values for `light` [{0}], must be a subset of [{1}]'.format(', '.join(map(str, (set(light) - set(allowed_values)))), ', '.join(map(str, allowed_values)))) self._light = light def turn(self): return self._turn def turn(self, turn): allowed_values = ['Left', 'Right'] if (not set(turn).issubset(set(allowed_values))): raise ValueError('Invalid values for `turn` [{0}], must be a subset of [{1}]'.format(', '.join(map(str, (set(turn) - set(allowed_values)))), ', '.join(map(str, allowed_values)))) self._turn = turn def to_dict(self): result = {} for (attr, _) in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value)) elif hasattr(value, 'to_dict'): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items())) else: result[attr] = value if issubclass(Lighting, dict): for (key, value) in self.items(): result[key] = value return result def to_str(self): return pprint.pformat(self.to_dict()) def __repr__(self): return self.to_str() def __eq__(self, other): if (not isinstance(other, Lighting)): return False return (self.__dict__ == other.__dict__) def __ne__(self, other): return (not (self == other))
def _create_feature(): fc1 = QuarterlyFeatures(data_key='quarterly', columns=(QUARTER_COLUMNS + DEV_COLUMNS), quarter_counts=QUARTER_COUNTS, max_back_quarter=MAX_BACK_QUARTER, min_back_quarter=MIN_BACK_QUARTER, calc_stats_on_diffs=True, data_preprocessing=_preprocess, verbose=VERBOSE) fc2 = QuarterlyDiffFeatures(data_key='quarterly', columns=(QUARTER_COLUMNS + DEV_COLUMNS), compare_quarter_idxs=COMPARE_QUARTER_IDXS, max_back_quarter=MAX_BACK_QUARTER, min_back_quarter=MIN_BACK_QUARTER, data_preprocessing=_preprocess, verbose=VERBOSE) fc3 = DailyAggQuarterFeatures(daily_data_key='commodities', quarterly_data_key='quarterly', columns=['price'], agg_day_counts=AGG_DAY_COUNTS, max_back_quarter=MAX_BACK_QUARTER, min_back_quarter=MIN_BACK_QUARTER, daily_index=COMMODITIES_CODES, verbose=VERBOSE) fc4 = RelativeGroupFeatures(feature_calculator=fc3, group_data_key='base', group_col='industry', relation_foo=(lambda x, y: (x - y)), keep_group_feats=True, verbose=VERBOSE) fc5 = RelativeGroupFeatures(feature_calculator=fc1, group_data_key='base', group_col='industry', relation_foo=(lambda x, y: (x - y)), keep_group_feats=True, verbose=VERBOSE) feature = FeatureMerger(fc1, fc2, on=['ticker', 'date']) feature = FeatureMerger(feature, fc3, on=['ticker', 'date']) feature = FeatureMerger(feature, fc4, on=['ticker', 'date']) feature = FeatureMerger(feature, fc5, on=['ticker', 'date']) return feature
_dataloader('speech-to-speech') class SpeechToSpeechDataloader(SpeechToTextDataloader): def from_files(cls, source: Union[(Path, str)], target: Union[(Path, str)], tgt_lang: Union[(Path, str, None)]=None) -> SpeechToSpeechDataloader: source_list = load_list_from_file(source) target_list = load_list_from_file(target) tgt_lang_list = [] if (tgt_lang is not None): tgt_lang_list = load_list_from_file(tgt_lang) dataloader = cls(source_list, target_list, tgt_lang_list) return dataloader def from_args(cls, args: Namespace): args.source_type = 'speech' args.target_type = 'speech' return cls.from_files(args.source, args.target, args.tgt_lang)
class OptionSeriesHeatmapSonificationDefaultspeechoptionsMappingPitch(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get('undefined') def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get('undefined') def max(self, text: str): self._config(text, js_type=False) def min(self): return self._config_get('undefined') def min(self, text: str): self._config(text, js_type=False) def within(self): return self._config_get('undefined') def within(self, text: str): self._config(text, js_type=False)
def _handle_error_while_loading_component_module_not_found(configuration: ComponentConfiguration, e: ModuleNotFoundError) -> None: error_message = str(e) match = re.match("No module named '([\\w.]+)'", error_message) if (match is None): raise e from e import_path = match.group(1) parts = import_path.split('.') nb_parts = len(parts) if ((parts[0] != PACKAGES) or (nb_parts < 2)): raise e from e def get_new_error_message_no_package_found() -> str: enforce((nb_parts <= 4), 'More than 4 parts!') author = parts[1] new_message = "No AEA package found with author name '{}'".format(author) if (nb_parts >= 3): pkg_type = parts[2] try: ComponentType(pkg_type[:(- 1)]) except ValueError: return "'{}' is not a valid type name, choose one of {}".format(pkg_type, list(map((lambda x: x.to_plural()), ComponentType))) new_message += ", type '{}'".format(pkg_type) if (nb_parts == 4): pkg_name = parts[3] new_message += ", name '{}'".format(pkg_name) return new_message def get_new_error_message_with_package_found() -> str: enforce((nb_parts >= 5), 'Less than 5 parts!') (author, pkg_name, pkg_type) = parts[:3] the_rest = '.'.join(parts[4:]) return "The package '{}/{}' of type '{}' exists, but cannot find module '{}'".format(author, pkg_name, pkg_type, the_rest) if (nb_parts < 5): new_message = get_new_error_message_no_package_found() else: new_message = get_new_error_message_with_package_found() new_exc = AEAPackageNotFound(new_message) new_exc.__traceback__ = e.__traceback__ e_str = parse_exception(new_exc) raise AEAPackageLoadingError('Package loading error: An error occurred while loading {} {}:\n{}'.format(str(configuration.component_type), configuration.public_id, e_str))
class OptionSeriesLineSonificationContexttracks(Options): def activeWhen(self) -> 'OptionSeriesLineSonificationContexttracksActivewhen': return self._config_sub_data('activeWhen', OptionSeriesLineSonificationContexttracksActivewhen) def instrument(self): return self._config_get('piano') def instrument(self, text: str): self._config(text, js_type=False) def mapping(self) -> 'OptionSeriesLineSonificationContexttracksMapping': return self._config_sub_data('mapping', OptionSeriesLineSonificationContexttracksMapping) def midiName(self): return self._config_get(None) def midiName(self, text: str): self._config(text, js_type=False) def pointGrouping(self) -> 'OptionSeriesLineSonificationContexttracksPointgrouping': return self._config_sub_data('pointGrouping', OptionSeriesLineSonificationContexttracksPointgrouping) def roundToMusicalNotes(self): return self._config_get(True) def roundToMusicalNotes(self, flag: bool): self._config(flag, js_type=False) def showPlayMarker(self): return self._config_get(True) def showPlayMarker(self, flag: bool): self._config(flag, js_type=False) def timeInterval(self): return self._config_get(None) def timeInterval(self, num: float): self._config(num, js_type=False) def type(self): return self._config_get('instrument') def type(self, text: str): self._config(text, js_type=False) def valueInterval(self): return self._config_get(None) def valueInterval(self, num: float): self._config(num, js_type=False) def valueMapFunction(self): return self._config_get('linear') def valueMapFunction(self, value: Any): self._config(value, js_type=False) def valueProp(self): return self._config_get('"x"') def valueProp(self, text: str): self._config(text, js_type=False)
class ColAlterType(BaseAlterType): CHANGE_COL_DEFAULT_VAL = 'change_col_default_val' REORDER_COL = 'reorder_col' ADD_COL = 'add_col' ADD_AUTO_INC_COL = 'add_auto_inc_col' DROP_COL = 'drop_col' CHANGE_COL_DATA_TYPE = 'change_col_data_type' CHANGE_NULL = 'change_null' CHANGE_ENUM = 'change_enum' CHANGE_SET = 'change_set' CHANGE_COL_CHARSET = 'change_col_charset' CHANGE_COL_COLLATE = 'change_col_collate' CHANGE_COL_COMMENT = 'change_col_comment' ADD_TIMESTAMP_COL = 'add_timestamp_col' CHANGE_UNSIGNED = 'change_unsigned'
def SetRotationQuaternion(kwargs: dict) -> OutgoingMessage: compulsory_params = ['id', 'quaternion'] optional_params = ['is_world'] utility.CheckKwargs(kwargs, compulsory_params) msg = OutgoingMessage() msg.write_int32(kwargs['id']) msg.write_string('SetRotationQuaternion') for i in range(4): msg.write_float32(kwargs['quaternion'][i]) if ('is_world' in kwargs.keys()): msg.write_bool(kwargs['is_world']) else: msg.write_bool(True) return msg
def test_get_feature_names_out(df_enc_big): input_features = df_enc_big.columns.tolist() tr = StringSimilarityEncoder() tr.fit(df_enc_big) out = ['var_A_B', 'var_A_D', 'var_A_A', 'var_A_G', 'var_A_C', 'var_A_E', 'var_A_F', 'var_B_A', 'var_B_D', 'var_B_B', 'var_B_G', 'var_B_C', 'var_B_E', 'var_B_F', 'var_C_C', 'var_C_D', 'var_C_B', 'var_C_G', 'var_C_A', 'var_C_E', 'var_C_F'] assert (tr.get_feature_names_out(input_features=None) == out) assert (tr.get_feature_names_out(input_features=input_features) == out) tr = StringSimilarityEncoder(top_categories=1) tr.fit(df_enc_big) out = ['var_A_B', 'var_B_A', 'var_C_C'] assert (tr.get_feature_names_out(input_features=None) == out) assert (tr.get_feature_names_out(input_features=input_features) == out) with pytest.raises(ValueError): tr.get_feature_names_out('var_A') with pytest.raises(ValueError): tr.get_feature_names_out(['var_A', 'hola'])
def update_plugin_translations(plugin): plugin_folder = current_app.pluggy.get_plugin(plugin).__path__[0] translations_folder = os.path.join(plugin_folder, 'translations') source_file = os.path.join(translations_folder, 'messages.pot') if (not os.path.exists(source_file)): return False subprocess.call(['pybabel', 'extract', '-F', 'babel.cfg', '-k', 'lazy_gettext', '-o', source_file, plugin_folder]) subprocess.call(['pybabel', 'update', '-i', source_file, '-d', translations_folder])
class TestIsSameOrParentPathOf(): def test_should_return_true_for_same_path(self): assert (is_same_or_parent_path_of(['parent', 'child1'], ['parent', 'child1']) is True) def test_should_return_true_for_parent_path_of_child(self): assert (is_same_or_parent_path_of(['parent'], ['parent', 'child1']) is True) def test_should_return_false_for_child_path_of_parent(self): assert (is_same_or_parent_path_of(['parent', 'child1'], ['parent']) is False) def test_should_return_false_for_siblings(self): assert (is_same_or_parent_path_of(['parent', 'child1'], ['parent', 'child2']) is False) def test_should_return_false_for_different_parent(self): assert (is_same_or_parent_path_of(['parent1', 'child1'], ['parent2', 'child1']) is False)
def test_partition_of_split(loose_leaf, X): grow_val = X[(0, 0)] growable_vals = loose_leaf.get_growable_vals(X=X, grow_dim=0) assert torch.isclose(torch.tensor([loose_leaf.get_partition_of_split(X=X, grow_dim=0, grow_val=grow_val)]), torch.mean((growable_vals == grow_val.item()).to(torch.float), dtype=torch.float))
def merge_config_sources(user_config: dict, default_config: dict, cli_options: dict) -> dict: for opt in CLI_ONLY_OPTS: del cli_options[opt] config = hierarchical_merge([default_config, user_config, cli_options]) validated_config = validate_config(config) return validated_config
def setup_custom_db(db, scantype, dbtype=DB_TYPE_HMM, silent=False): if (dbtype == DB_TYPE_HMM): (dbpath, host, idmap_file) = setup_custom_hmmdb(db, scantype, silent) elif (dbtype == DB_TYPE_SEQ): (dbpath, host, idmap_file) = setup_custom_seqdb(db, scantype, silent) else: raise EmapperException(f'Unrecognized dbtype {dbtype}.') return (dbpath, host, idmap_file)
class TestPseudoLogicCondition(): .parametrize('condition, result', [(Condition(OperationType.equal, [var_a, constant_5]), 'a,eax#3 == 5'), (Condition(OperationType.less_or_equal, [BinaryOperation(OperationType.plus, [var_a, constant_5]), constant_5]), "a + 0x5,['eax#3'] <= 5"), (Condition(OperationType.greater_or_equal_us, [BinaryOperation(OperationType.plus, [var_a, var_b]), constant_5]), "ULE(5, a + b,['eax#3', 'edx#5'])")]) def test_initialize_from_condition(self, condition, result): cond = PseudoLogicCondition.initialize_from_condition(condition, LogicCondition.generate_new_context()) assert ((str(cond) == result) and isinstance(cond, PseudoLogicCondition)) def test_initialize_from_formula(self): pass .parametrize('term, result, string', [(PseudoLogicCondition(Not(ULE(z3_variable, BitVecVal(5, 32, context)))), PseudoLogicCondition(Not(ULE(z3_variable, BitVecVal(5, 32, context)))), "!(Extract(31, 3, |a + 0x5,['eax#3']|) == 0 & ULE(Extract(2, 0, |a + 0x5,['eax#3']|), 5))"), (PseudoLogicCondition(And(z3_symbol[1], Or(z3_symbol[3], Not(And(z3_symbol[4], z3_symbol[2]))), Not(And(z3_symbol[5], z3_symbol[2], Not(z3_symbol[1]))))), (PseudoLogicCondition(z3_symbol[1]) & (PseudoLogicCondition(z3_symbol[3]) | (~ (PseudoLogicCondition(z3_symbol[4]) & PseudoLogicCondition(z3_symbol[2]))))), '(x1 & (x3 | !(x4 & x2)))')]) def test_simplify(self, term, result, string): assert ((term == result) and (str(term) == string) and isinstance(term, PseudoLogicCondition))
class OptionPlotoptionsXrangeSonificationContexttracksMapping(Options): def frequency(self) -> 'OptionPlotoptionsXrangeSonificationContexttracksMappingFrequency': return self._config_sub_data('frequency', OptionPlotoptionsXrangeSonificationContexttracksMappingFrequency) def gapBetweenNotes(self) -> 'OptionPlotoptionsXrangeSonificationContexttracksMappingGapbetweennotes': return self._config_sub_data('gapBetweenNotes', OptionPlotoptionsXrangeSonificationContexttracksMappingGapbetweennotes) def highpass(self) -> 'OptionPlotoptionsXrangeSonificationContexttracksMappingHighpass': return self._config_sub_data('highpass', OptionPlotoptionsXrangeSonificationContexttracksMappingHighpass) def lowpass(self) -> 'OptionPlotoptionsXrangeSonificationContexttracksMappingLowpass': return self._config_sub_data('lowpass', OptionPlotoptionsXrangeSonificationContexttracksMappingLowpass) def noteDuration(self) -> 'OptionPlotoptionsXrangeSonificationContexttracksMappingNoteduration': return self._config_sub_data('noteDuration', OptionPlotoptionsXrangeSonificationContexttracksMappingNoteduration) def pan(self) -> 'OptionPlotoptionsXrangeSonificationContexttracksMappingPan': return self._config_sub_data('pan', OptionPlotoptionsXrangeSonificationContexttracksMappingPan) def pitch(self) -> 'OptionPlotoptionsXrangeSonificationContexttracksMappingPitch': return self._config_sub_data('pitch', OptionPlotoptionsXrangeSonificationContexttracksMappingPitch) def playDelay(self) -> 'OptionPlotoptionsXrangeSonificationContexttracksMappingPlaydelay': return self._config_sub_data('playDelay', OptionPlotoptionsXrangeSonificationContexttracksMappingPlaydelay) def rate(self) -> 'OptionPlotoptionsXrangeSonificationContexttracksMappingRate': return self._config_sub_data('rate', OptionPlotoptionsXrangeSonificationContexttracksMappingRate) def text(self): return self._config_get(None) def text(self, text: str): self._config(text, js_type=False) def time(self) -> 'OptionPlotoptionsXrangeSonificationContexttracksMappingTime': return self._config_sub_data('time', OptionPlotoptionsXrangeSonificationContexttracksMappingTime) def tremolo(self) -> 'OptionPlotoptionsXrangeSonificationContexttracksMappingTremolo': return self._config_sub_data('tremolo', OptionPlotoptionsXrangeSonificationContexttracksMappingTremolo) def volume(self) -> 'OptionPlotoptionsXrangeSonificationContexttracksMappingVolume': return self._config_sub_data('volume', OptionPlotoptionsXrangeSonificationContexttracksMappingVolume)
class OptionSeriesItemSonificationContexttracksMappingVolume(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
.parametrize('session_type', [SessionType.POMODORO, SessionType.SHORT_BREAK, SessionType.LONG_BREAK]) def test_changes_session_when_button_is_clicked(session_type, session_button, session): session_button.widget.emit('mode_changed', session_type.value) refresh_gui() session.change.assert_called_once_with(session_type)
class TestSeriesRepr(TestData): def test_repr_flights_carrier(self): pd_s = self.pd_flights()['Carrier'] ed_s = ed.Series(ES_TEST_CLIENT, FLIGHTS_INDEX_NAME, 'Carrier') pd_repr = repr(pd_s) ed_repr = repr(ed_s) assert (pd_repr == ed_repr) def test_repr_flights_carrier_5(self): pd_s = self.pd_flights()['Carrier'].head(5) ed_s = ed.Series(ES_TEST_CLIENT, FLIGHTS_INDEX_NAME, 'Carrier').head(5) pd_repr = repr(pd_s) ed_repr = repr(ed_s) assert (pd_repr == ed_repr) def test_repr_empty_series(self): pd_s = self.pd_flights()['Carrier'].head(0) ed_s = ed.Series(ES_TEST_CLIENT, FLIGHTS_INDEX_NAME, 'Carrier').head(0) assert (repr(pd_s) == repr(ed_s)) def test_series_repr_pd_get_option_none(self): show_dimensions = pd.get_option('display.show_dimensions') show_rows = pd.get_option('display.max_rows') try: pd.set_option('display.show_dimensions', False) pd.set_option('display.max_rows', None) ed_flights = self.ed_flights()['Cancelled'].head(40).__repr__() pd_flights = self.pd_flights()['Cancelled'].head(40).__repr__() assert (ed_flights == pd_flights) finally: pd.set_option('display.max_rows', show_rows) pd.set_option('display.show_dimensions', show_dimensions)
def calculate_message_call_gas(value: U256, gas: Uint, gas_left: Uint, memory_cost: Uint, extra_gas: Uint, call_stipend: Uint=GAS_CALL_STIPEND) -> MessageCallGas: call_stipend = (Uint(0) if (value == 0) else call_stipend) if (gas_left < (extra_gas + memory_cost)): return MessageCallGas((gas + extra_gas), (gas + call_stipend)) gas = min(gas, max_message_call_gas(((gas_left - memory_cost) - extra_gas))) return MessageCallGas((gas + extra_gas), (gas + call_stipend))
class OptionPlotoptionsTimelineMarkerStatesSelect(Options): def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def fillColor(self): return self._config_get('#cccccc') def fillColor(self, text: str): self._config(text, js_type=False) def lineColor(self): return self._config_get('#000000') def lineColor(self, text: str): self._config(text, js_type=False) def lineWidth(self): return self._config_get(2) def lineWidth(self, num: float): self._config(num, js_type=False) def radius(self): return self._config_get(None) def radius(self, num: float): self._config(num, js_type=False)
class OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingFrequency(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def get_sensors(): import sensors found_sensors = list() def get_subfeature_value(feature, subfeature_type): subfeature = chip.get_subfeature(feature, subfeature_type) if subfeature: return chip.get_value(subfeature.number) for chip in sensors.get_detected_chips(): for feature in chip.get_features(): if (feature.type == sensors.FEATURE_TEMP): try: name = chip.get_label(feature) max = get_subfeature_value(feature, sensors.SUBFEATURE_TEMP_MAX) current = get_subfeature_value(feature, sensors.SUBFEATURE_TEMP_INPUT) critical = get_subfeature_value(feature, sensors.SUBFEATURE_TEMP_CRIT) if critical: found_sensors.append(Sensor(name=name, current=current, maximum=max, critical=critical)) except sensors.SensorsException: continue return found_sensors
class LoggingKafkaAdditional(ModelNormal): allowed_values = {('compression_codec',): {'None': None, 'GZIP': 'gzip', 'SNAPPY': 'snappy', 'LZ4': 'lz4', 'NULL': 'null'}, ('required_acks',): {'one': 1, 'none': 0, 'all': (- 1)}, ('auth_method',): {'PLAIN': 'plain', 'SCRAM-SHA-256': 'scram-sha-256', 'SCRAM-SHA-512': 'scram-sha-512'}} validations = {} _property def additional_properties_type(): lazy_import() return (bool, date, datetime, dict, float, int, list, str, none_type) _nullable = False _property def openapi_types(): lazy_import() return {'topic': (str,), 'brokers': (str,), 'compression_codec': (str, none_type), 'required_acks': (int,), 'request_max_bytes': (int,), 'parse_log_keyvals': (bool,), 'auth_method': (str,), 'user': (str,), 'password': (str,), 'use_tls': (LoggingUseTls,)} _property def discriminator(): return None attribute_map = {'topic': 'topic', 'brokers': 'brokers', 'compression_codec': 'compression_codec', 'required_acks': 'required_acks', 'request_max_bytes': 'request_max_bytes', 'parse_log_keyvals': 'parse_log_keyvals', 'auth_method': 'auth_method', 'user': 'user', 'password': 'password', 'use_tls': 'use_tls'} read_only_vars = {} _composed_schemas = {} _js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) return self required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes']) _js_args_to_python_args def __init__(self, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) if (var_name in self.read_only_vars): raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
def get_arch_info(): info = idaapi.get_inf_structure() proc = info.procName.lower() bits = get_inf_structure_bitness(info) instruction_set = None instruction_mode = None if (proc == 'metapc'): instruction_set = CS_ARCH_X86 if (bits == 16): instruction_mode = CS_MODE_16 elif (bits == 32): instruction_mode = CS_MODE_32 elif (bits == 64): instruction_mode = CS_MODE_64 return (instruction_set, instruction_mode)
def test_allow_deleted_event_for_admin(db, app): obj = EventFactoryBasic(deleted_at=datetime.now()) db.session.commit() with app.test_request_context('?get_trashed=true'): event = safe_query(Event, 'id', obj.id, 'id') assert (event.id == obj.id) assert (event == obj)
def test_mixed_expressions(mfunctions): (f, one, two) = mfunctions f.sub(0).assign(one.sub(0)) assert evaluate(f.dat.data, (1, 0)) f.assign(0) f.sub(1).assign(one.sub(1)) assert evaluate(f.dat.data, (0, 1)) f.assign(0) two.sub(0).assign(one.sub(0)) assert evaluate(two.dat.data, (1, 2)) two.assign(2) two.sub(1).assign(one.sub(1)) assert evaluate(two.dat.data, (2, 1)) two.assign(2) two.sub(0).assign((one.sub(0) + two.sub(0))) assert evaluate(two.dat.data, (3, 2)) two.assign(2) two.sub(1).assign((two.sub(1) - one.sub(1))) assert evaluate(two.dat.data, (2, 1)) two.assign(2) one0 = one.sub(0) one0 += one.sub(0) assert evaluate(one.dat.data, (2, 1)) one.assign(1) one1 = one.sub(1) one1 -= one.sub(1) assert evaluate(one.dat.data, (1, 0))
class TestOFPInstructionWriteMetadata(unittest.TestCase): type_ = ofproto.OFPIT_WRITE_METADATA len_ = ofproto.OFP_INSTRUCTION_WRITE_METADATA_SIZE metadata = metadata_mask = fmt = ofproto.OFP_INSTRUCTION_WRITE_METADATA_PACK_STR def test_init(self): c = OFPInstructionWriteMetadata(self.metadata, self.metadata_mask) eq_(self.type_, c.type) eq_(self.len_, c.len) eq_(self.metadata, c.metadata) eq_(self.metadata_mask, c.metadata_mask) def _test_parser(self, metadata, metadata_mask): buf = pack(self.fmt, self.type_, self.len_, metadata, metadata_mask) res = OFPInstructionWriteMetadata.parser(buf, 0) eq_(res.len, self.len_) eq_(res.type, self.type_) eq_(res.metadata, metadata) eq_(res.metadata_mask, metadata_mask) def test_parser_metadata_mid(self): self._test_parser(self.metadata, self.metadata_mask) def test_parser_metadata_max(self): metadata = self._test_parser(metadata, self.metadata_mask) def test_parser_metadata_min(self): metadata = 0 self._test_parser(metadata, self.metadata_mask) def test_parser_metadata_mask_max(self): metadata_mask = self._test_parser(self.metadata, metadata_mask) def test_parser_metadata_mask_min(self): metadata_mask = 0 self._test_parser(self.metadata, metadata_mask) def _test_serialize(self, metadata, metadata_mask): c = OFPInstructionWriteMetadata(metadata, metadata_mask) buf = bytearray() c.serialize(buf, 0) res = struct.unpack(self.fmt, six.binary_type(buf)) eq_(res[0], self.type_) eq_(res[1], self.len_) eq_(res[2], metadata) eq_(res[3], metadata_mask) def test_serialize_metadata_mid(self): self._test_serialize(self.metadata, self.metadata_mask) def test_serialize_metadata_max(self): metadata = self._test_serialize(metadata, self.metadata_mask) def test_serialize_metadata_min(self): metadata = 0 self._test_serialize(metadata, self.metadata_mask) def test_serialize_metadata_mask_max(self): metadata_mask = self._test_serialize(self.metadata, metadata_mask) def test_serialize_metadata_mask_min(self): metadata_mask = 0 self._test_serialize(self.metadata, metadata_mask)
def score_models(models, loader): for model in models: name = model.named_steps['classifier'].__class__.__name__ scores = {'model': str(model), 'name': name, 'accuracy': [], 'precision': [], 'recall': [], 'f1': [], 'time': []} for (X_train, X_test, y_train, y_test) in loader: start = time.time() model.fit(X_train, y_train) y_pred = model.predict(X_test) scores['time'].append((time.time() - start)) scores['accuracy'].append(accuracy_score(y_test, y_pred)) scores['precision'].append(precision_score(y_test, y_pred, average='weighted')) scores['recall'].append(recall_score(y_test, y_pred, average='weighted')) scores['f1'].append(f1_score(y_test, y_pred, average='weighted')) (yield scores)
class AndOp(Node): def forward(self, *args, **kwargs): if ((type(args[0]) is tuple) and (len(args) == 1)): args = args[0] if any([(a == False) for a in args]): return False elif any([(a is None) for a in args]): return None else: return all([a for a in args]) def follow(self, *v, **kwargs): return fmap(('*', self.forward(*v))) def final(self, args, operands=None, result=None, **kwargs): if result: if all([(a == 'fin') for a in args]): return 'fin' return 'var' else: if any([((a == 'fin') and (v == False)) for (a, v) in zip(args, operands)]): return 'fin' return 'var' def all(*args): if (len(args) == 0): return None elif (len(args) == 1): return args[0] else: return AndOp([AndOp.all(*args[:(- 1)]), args[(- 1)]]) def token_hint(self): operand_hints = [op.token_hint() for op in self.predecessors] return dict_min_token_hint(operand_hints)
def test_selectors(additionals, utils): for sel in additionals.ALL_SELECTORS: with open(utils.get_info_path('selector', sel)) as f: info = yaml.safe_load(f) assert ('desc' in info) assert ('args' in info) assert isinstance(info['args'], list) for arg in info['args']: assert is_valid_arg(arg) for ana in additionals.ALL_ANALYSERS: with open(utils.get_info_path('analyser', ana)) as f: info = yaml.safe_load(f) assert ('desc' in info) assert ('args' in info) assert isinstance(info['args'], list) for arg in info['args']: assert is_valid_arg(arg)
.parametrize('dens_fn, json_fn, ref_dpm', (('orca_ch4_sto3g_rhf_cis.densities', 'orca_ch4_sto3g_rhf_cis.json', (0.00613, 0.00867, (- 0.0))), ('orca_ch4_sto3g_uhf_cis.densities', 'orca_ch4_sto3g_uhf_cis.json', (0.0, 0.0, 0.0)))) def test_orca_es_densities(dens_fn, json_fn, ref_dpm): dens_dict = parse_orca_densities((WF_LIB_DIR / dens_fn)) wf = assert_dens_mats(dens_dict, (WF_LIB_DIR / json_fn)) cisp = dens_dict['cisp'] dpm = wf.get_dipole_moment(cisp) np.testing.assert_allclose(dpm, ref_dpm, atol=0.0002)
class LFSR(Module): def __init__(self, n_out, n_state, taps): self.o = Signal(n_out) state = Signal(n_state) curval = [state[i] for i in range(n_state)] curval += ([0] * (n_out - n_state)) for i in range(n_out): nv = (~ reduce(xor, [curval[tap] for tap in taps])) curval.insert(0, nv) curval.pop() self.sync += state.eq(Cat(*curval[:n_state])) self.comb += self.o.eq(Cat(*curval))
class RedButton(DefaultObject): desc_closed_lid = 'This is a large red button, inviting yet evil-looking. A closed glass lid protects it.' desc_open_lid = 'This is a large red button, inviting yet evil-looking. Its glass cover is open and the button exposed.' auto_close_msg = "The button's glass lid silently slides back in place." lamp_breaks_msg = 'The lamp flickers, the button going dark.' desc_add_lamp_broken = '\nThe big red button has stopped blinking for the time being.' blink_msgs = ['The red button flashes briefly.', 'The red button blinks invitingly.', 'The red button flashes. You know you wanna push it!'] def at_object_creation(self): self.db.lamp_works = True self.to_closed_state() repeat(35, self._do_blink, persistent=True) def _do_blink(self): if (self.location and self.db.lamp_works): possible_messages = (self.db.blink_msgs or self.blink_msgs) self.location.msg_contents(random.choice(possible_messages)) def _set_desc(self, attrname=None): if attrname: desc = (self.attributes.get(attrname) or getattr(self, attrname)) else: desc = self.db.desc if (not self.db.lamp_works): desc += (self.db.desc_add_lamp_broken or self.desc_add_lamp_broken) self.db.desc = desc def to_closed_state(self, msg=None): self._set_desc('desc_closed_lid') self.cmdset.remove(LidOpenCmdSet) self.cmdset.add(LidClosedCmdSet, persistent=True) if (msg and self.location): self.location.msg_contents(msg) def to_open_state(self): self._set_desc('desc_open_lid') self.cmdset.remove(LidClosedCmdSet) self.cmdset.add(LidOpenCmdSet, persistent=True) delay(35, self.to_closed_state, (self.db.auto_close_msg or self.auto_close_msg), persistent=True) def _unblind_target(self, caller): caller.cmdset.remove(BlindCmdSet) caller.msg('You blink feverishly as your eyesight slowly returns.') self.location.msg_contents(f'{caller.name} seems to be recovering their eyesight, blinking feverishly.', exclude=caller) def blind_target(self, caller): caller.cmdset.add(BlindCmdSet) delay(20, self._unblind_target, caller, persistent=True) def _unbreak_lamp(self): self.db.lamp_works = True self._set_desc() def break_lamp(self): self.db.lamp_works = False self._set_desc() self.location.msg_contents((self.db.lamp_breaks_msg or self.lamp_breaks_msg)) delay(21, self._unbreak_lamp)
def start_command(): parser = argparse.ArgumentParser(description='EmbedChain SlackBot command line interface') parser.add_argument('--host', default='0.0.0.0', help='Host IP to bind') parser.add_argument('--port', default=5000, type=int, help='Port to bind') args = parser.parse_args() slack_bot = SlackBot() slack_bot.start(host=args.host, port=args.port)
def get_latest_submission_ids_for_fiscal_year(fiscal_year: int): cte = With(SubmissionAttributes.objects.filter(submission_window__submission_reveal_date__lte=now(), reporting_fiscal_year=fiscal_year).values('toptier_code').annotate(latest_fiscal_period=Max('reporting_fiscal_period'))) submission_ids = list(cte.join(SubmissionAttributes, toptier_code=cte.col.toptier_code, reporting_fiscal_period=cte.col.latest_fiscal_period, reporting_fiscal_year=fiscal_year).with_cte(cte).values_list('submission_id', flat=True)) return submission_ids
class TestChoiceField(FieldValues): valid_inputs = {'poor': 'poor', 'medium': 'medium', 'good': 'good'} invalid_inputs = {'amazing': ['"amazing" is not a valid choice.']} outputs = {'good': 'good', '': '', 'amazing': 'amazing'} field = serializers.ChoiceField(choices=[('poor', 'Poor quality'), ('medium', 'Medium quality'), ('good', 'Good quality')]) def test_allow_blank(self): field = serializers.ChoiceField(allow_blank=True, choices=[('poor', 'Poor quality'), ('medium', 'Medium quality'), ('good', 'Good quality')]) output = field.run_validation('') assert (output == '') def test_allow_null(self): field = serializers.ChoiceField(allow_null=True, choices=[1, 2, 3]) field.field_name = 'example' value = field.get_value(QueryDict('example=')) assert (value is None) output = field.run_validation(None) assert (output is None) def test_iter_options(self): field = serializers.ChoiceField(choices=[('Numbers', ['integer', 'float']), ('Strings', ['text', 'email', 'url']), 'boolean']) items = list(field.iter_options()) assert items[0].start_option_group assert (items[0].label == 'Numbers') assert (items[1].value == 'integer') assert (items[2].value == 'float') assert items[3].end_option_group assert items[4].start_option_group assert (items[4].label == 'Strings') assert (items[5].value == 'text') assert (items[6].value == 'email') assert (items[7].value == 'url') assert items[8].end_option_group assert (items[9].value == 'boolean') def test_edit_choices(self): field = serializers.ChoiceField(allow_null=True, choices=[1, 2]) field.choices = [1] assert (field.run_validation(1) == 1) with pytest.raises(serializers.ValidationError) as exc_info: field.run_validation(2) assert (exc_info.value.detail == ['"2" is not a valid choice.']) def test_enum_integer_choices(self): from enum import IntEnum class ChoiceCase(IntEnum): first = auto() second = auto() choices = [(ChoiceCase.first, '1'), (ChoiceCase.second, '2')] field = serializers.ChoiceField(choices=choices) assert (field.run_validation(1) == 1) assert (field.run_validation(ChoiceCase.first) == 1) assert (field.run_validation('1') == 1) choices = [(ChoiceCase.first.value, '1'), (ChoiceCase.second.value, '2')] field = serializers.ChoiceField(choices=choices) assert (field.run_validation(1) == 1) assert (field.run_validation(ChoiceCase.first) == 1) assert (field.run_validation('1') == 1) def test_integer_choices(self): class ChoiceCase(IntegerChoices): first = auto() second = auto() choices = [(ChoiceCase.first, '1'), (ChoiceCase.second, '2')] field = serializers.ChoiceField(choices=choices) assert (field.run_validation(1) == 1) assert (field.run_validation(ChoiceCase.first) == 1) assert (field.run_validation('1') == 1) choices = [(ChoiceCase.first.value, '1'), (ChoiceCase.second.value, '2')] field = serializers.ChoiceField(choices=choices) assert (field.run_validation(1) == 1) assert (field.run_validation(ChoiceCase.first) == 1) assert (field.run_validation('1') == 1) def test_text_choices(self): class ChoiceCase(TextChoices): first = auto() second = auto() choices = [(ChoiceCase.first, 'first'), (ChoiceCase.second, 'second')] field = serializers.ChoiceField(choices=choices) assert (field.run_validation(ChoiceCase.first) == 'first') assert (field.run_validation('first') == 'first') choices = [(ChoiceCase.first.value, 'first'), (ChoiceCase.second.value, 'second')] field = serializers.ChoiceField(choices=choices) assert (field.run_validation(ChoiceCase.first) == 'first') assert (field.run_validation('first') == 'first')
def upload_apk_to_virustotal(virustotal_apikey, packageName, apkName, hash, versionCode, **kwargs): import requests logging.getLogger('urllib3').setLevel(logging.WARNING) logging.getLogger('requests').setLevel(logging.WARNING) outputfilename = os.path.join('virustotal', (((((packageName + '_') + str(versionCode)) + '_') + hash) + '.json')) if os.path.exists(outputfilename): logging.debug(((apkName + ' results are in ') + outputfilename)) return outputfilename repofilename = os.path.join('repo', apkName) logging.info((('Checking if ' + repofilename) + ' is on virustotal')) headers = {'User-Agent': 'F-Droid'} if ('headers' in kwargs): for (k, v) in kwargs['headers'].items(): headers[k] = v data = {'apikey': virustotal_apikey, 'resource': hash} needs_file_upload = False while True: r = requests.get((' + urllib.parse.urlencode(data)), headers=headers, timeout=300) if (r.status_code == 200): response = r.json() if (response['response_code'] == 0): needs_file_upload = True else: response['filename'] = apkName response['packageName'] = packageName response['versionCode'] = versionCode if kwargs.get('versionName'): response['versionName'] = kwargs.get('versionName') with open(outputfilename, 'w') as fp: json.dump(response, fp, indent=2, sort_keys=True) if (response.get('positives', 0) > 0): logging.warning((((((repofilename + ' has been flagged by virustotal ') + str(response['positives'])) + ' times:') + '\n\t') + response['permalink'])) break if (r.status_code == 204): logging.warning(_('virustotal.com is rate limiting, waiting to retry...')) time.sleep(30) upload_url = None if needs_file_upload: manual_url = ' size = os.path.getsize(repofilename) if (size > ): logging.error(_('{path} more than 200MB, manually upload: {url}').format(path=repofilename, url=manual_url)) elif (size > ): r = requests.get((' + urllib.parse.urlencode(data)), headers=headers, timeout=300) if (r.status_code == 200): upload_url = r.json().get('upload_url') elif (r.status_code == 403): logging.error(_(('VirusTotal API key cannot upload files larger than 32MB, ' + 'use {url} to upload {path}.')).format(path=repofilename, url=manual_url)) else: r.raise_for_status() else: upload_url = ' if upload_url: logging.info(_('Uploading {apkfilename} to virustotal').format(apkfilename=repofilename)) files = {'file': (apkName, open(repofilename, 'rb'))} r = requests.post(upload_url, data=data, headers=headers, files=files, timeout=300) logging.debug(_('If this upload fails, try manually uploading to {url}').format(url=manual_url)) r.raise_for_status() response = r.json() logging.info(((response['verbose_msg'] + ' ') + response['permalink'])) return outputfilename
class VideoPoll(AbstractCrudObject): def __init__(self, fbid=None, parent_id=None, api=None): self._isVideoPoll = True super(VideoPoll, self).__init__(fbid, parent_id, api) class Field(AbstractObject.Field): close_after_voting = 'close_after_voting' default_open = 'default_open' id = 'id' question = 'question' show_gradient = 'show_gradient' show_results = 'show_results' status = 'status' class Status(): closed = 'closed' results_open = 'results_open' voting_open = 'voting_open' class Action(): attach_to_video = 'ATTACH_TO_VIDEO' close = 'CLOSE' delete_poll = 'DELETE_POLL' show_results = 'SHOW_RESULTS' show_voting = 'SHOW_VOTING' def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') param_types = {} enums = {} request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=VideoPoll, api_type='NODE', response_parser=ObjectParser(reuse_object=self)) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() def api_update(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') param_types = {'action': 'action_enum', 'close_after_voting': 'bool', 'default_open': 'bool', 'show_gradient': 'bool', 'show_results': 'bool'} enums = {'action_enum': VideoPoll.Action.__dict__.values()} request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=VideoPoll, api_type='NODE', response_parser=ObjectParser(reuse_object=self)) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() def get_poll_options(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') param_types = {} enums = {} request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/poll_options', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api)) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() _field_types = {'close_after_voting': 'bool', 'default_open': 'bool', 'id': 'string', 'question': 'string', 'show_gradient': 'bool', 'show_results': 'bool', 'status': 'Status'} def _get_field_enum_info(cls): field_enum_info = {} field_enum_info['Status'] = VideoPoll.Status.__dict__.values() field_enum_info['Action'] = VideoPoll.Action.__dict__.values() return field_enum_info
def default_requires(): setup_spec.extras['default'] = ['tokenizers>=0.14', 'accelerate>=0.20.3', 'protobuf==3.20.3', 'zhipuai', 'dashscope', 'chardet'] setup_spec.extras['default'] += setup_spec.extras['framework'] setup_spec.extras['default'] += setup_spec.extras['knowledge'] setup_spec.extras['default'] += setup_spec.extras['torch'] setup_spec.extras['default'] += setup_spec.extras['quantization'] setup_spec.extras['default'] += setup_spec.extras['cache']
('cuda.perm021fc_crc_bias.gen_function') def gen_function(func_attrs, exec_cond_template, dim_info_dict): problem_args = bmm_common.PROBLEM_ARGS_TEMPLATE.render(mm_info=_get_problem_info(alpha_value=func_attrs.get('alpha', 1))) input_ndims = len(func_attrs['input_accessors'][0].original_shapes) weight_ndims = len(func_attrs['input_accessors'][1].original_shapes) output_ndims = len(func_attrs['output_accessors'][0].original_shapes) return common.gen_function(func_attrs, common_bias.SRC_TEMPLATE, exec_cond_template, problem_args, input_ndims=input_ndims, weight_ndims=weight_ndims, output_ndims=output_ndims, dim_info_dict=dim_info_dict)
class TestUtils(IsolatedAsyncioTestCase): def setUp(self) -> None: self.instance_id = 'test_instance_123' self.server_domain = 'study123.pci.facebook.com' self.server_key_ref_env_var_name = SERVER_PRIVATE_KEY_REF_ENV_VAR self.server_key_region_env_var_name = SERVER_PRIVATE_KEY_REGION_ENV_VAR self.server_key_install_path_env_var_name = SERVER_PRIVATE_KEY_PATH_ENV_VAR def test_distribute_files_among_containers(self) -> None: test_size = random.randint(10, 20) test_number_files = [] test_number_conatiners = [] for _ in range(test_size): test_number_files.append(random.randint(1, 100)) test_number_conatiners.append(random.randint(1, 50)) test_files_per_conatiners = [distribute_files_among_containers(test_number_files[i], test_number_conatiners[i]) for i in range(test_size)] for i in range(test_size): self.assertEqual(test_number_files[i], sum(test_files_per_conatiners[i])) self.assertLessEqual((max(test_files_per_conatiners[i]) - min(test_files_per_conatiners[i])), 1) def test_get_server_uris(self) -> None: expected_result_1 = ['node0.study123.pci.facebook.com', 'node1.study123.pci.facebook.com'] expected_result_2 = None actual_result_1 = gen_tls_server_hostnames_for_publisher(self.server_domain, PrivateComputationRole.PUBLISHER, 2) actual_result_2 = gen_tls_server_hostnames_for_publisher(self.server_domain, PrivateComputationRole.PARTNER, 2) actual_result_3 = gen_tls_server_hostnames_for_publisher(None, PrivateComputationRole.PUBLISHER, 2) self.assertEqual(expected_result_1, actual_result_1) self.assertEqual(expected_result_2, actual_result_2) self.assertEqual(expected_result_2, actual_result_3) def test_generate_env_vars_missing_parameters(self) -> None: result = generate_env_vars_dict() self.assertFalse(('SERVER_HOSTNAME' in result)) self.assertFalse(('IP_ADDRESS' in result)) self.assertFalse((self.server_key_ref_env_var_name in result)) self.assertFalse((self.server_key_region_env_var_name in result)) self.assertFalse((self.server_key_install_path_env_var_name in result)) def test_generate_env_vars_server_hostname_no_ip(self) -> None: key_name = 'SERVER_HOSTNAME' result = generate_env_vars_dict(server_hostname='test_hostname') self.assertFalse((key_name in result)) def test_generate_env_vars_server_ip_no_hostname(self) -> None: key_name = 'IP_ADDRESS' result = generate_env_vars_dict(server_ip_address='127.0.0.1') self.assertFalse((key_name in result)) def test_generate_env_vars_both_server_addresses(self) -> None: expected_ip = '127.0.0.1' expected_ip_key_name = 'IP_ADDRESS' expected_hostname = 'test_hostname' expected_hostname_key_name = 'SERVER_HOSTNAME' result = generate_env_vars_dict(server_ip_address=expected_ip, server_hostname=expected_hostname) self.assertTrue((expected_ip_key_name in result)) self.assertEqual(expected_ip, result[expected_ip_key_name]) self.assertTrue((expected_hostname_key_name in result)) self.assertEqual(expected_hostname, result[expected_hostname_key_name]) def test_generate_env_vars_dicts_list(self) -> None: num_containers = 2 server_ip_addresses = ['test_ip_1', 'test_ip_2'] server_ip_addresses_invalid = ['test_ip_1'] server_hostnames = ['test_hostname_1', 'test_hostname_2'] repository_path = 'test_path' server_cert = 'test_server_cert' ca_cert = 'test_ca_certificate' cert_path = 'test_path' server_private_key_resource_id = 'test_key1' server_private_key_region = 'test-region' server_private_key_install_path = 'test/path' expected_result = [{ONEDOCKER_REPOSITORY_PATH: repository_path, SERVER_CERTIFICATE_ENV_VAR: server_cert, SERVER_CERTIFICATE_PATH_ENV_VAR: cert_path, CA_CERTIFICATE_ENV_VAR: ca_cert, CA_CERTIFICATE_PATH_ENV_VAR: cert_path, SERVER_IP_ADDRESS_ENV_VAR: 'test_ip_1', SERVER_HOSTNAME_ENV_VAR: 'test_hostname_1', SERVER_PRIVATE_KEY_REF_ENV_VAR: server_private_key_resource_id, SERVER_PRIVATE_KEY_REGION_ENV_VAR: server_private_key_region, SERVER_PRIVATE_KEY_PATH_ENV_VAR: server_private_key_install_path}, {ONEDOCKER_REPOSITORY_PATH: repository_path, SERVER_CERTIFICATE_ENV_VAR: server_cert, SERVER_CERTIFICATE_PATH_ENV_VAR: cert_path, CA_CERTIFICATE_ENV_VAR: ca_cert, CA_CERTIFICATE_PATH_ENV_VAR: cert_path, SERVER_IP_ADDRESS_ENV_VAR: 'test_ip_2', SERVER_HOSTNAME_ENV_VAR: 'test_hostname_2', SERVER_PRIVATE_KEY_REF_ENV_VAR: server_private_key_resource_id, SERVER_PRIVATE_KEY_REGION_ENV_VAR: server_private_key_region, SERVER_PRIVATE_KEY_PATH_ENV_VAR: server_private_key_install_path}] server_certificate_provider = MagicMock() server_certificate_provider.get_certificate.return_value = server_cert ca_certificate_provider = MagicMock() ca_certificate_provider.get_certificate.return_value = ca_cert server_key_ref_provider = StaticPrivateKeyReferenceProvider(server_private_key_resource_id, server_private_key_region, server_private_key_install_path) result = generate_env_vars_dicts_list(num_containers=num_containers, repository_path=repository_path, server_certificate_provider=server_certificate_provider, server_certificate_path='test_path', ca_certificate_provider=ca_certificate_provider, ca_certificate_path='test_path', server_ip_addresses=server_ip_addresses, server_hostnames=server_hostnames, server_private_key_ref_provider=server_key_ref_provider) self.assertEqual(result, expected_result) with self.assertRaises(ValueError) as e: generate_env_vars_dicts_list(num_containers=num_containers, repository_path=repository_path, server_certificate_provider=server_certificate_provider, server_certificate_path='test_path', ca_certificate_provider=ca_certificate_provider, ca_certificate_path='test_path', server_ip_addresses=server_ip_addresses_invalid, server_hostnames=server_hostnames, server_private_key_ref_provider=server_key_ref_provider) self.assertIn('num_contaienrs 2; {SERVER_IP_ADDRESS_ENV_VAR} 1', str(e.exception)) def test_generate_env_vars_null_server_key_ref(self) -> None: result = generate_env_vars_dict(server_private_key_ref_provider=NullPrivateKeyReferenceProvider()) self.assertFalse((self.server_key_ref_env_var_name in result)) self.assertFalse((self.server_key_region_env_var_name in result)) self.assertFalse((self.server_key_install_path_env_var_name in result)) def test_generate_env_vars_server_key_ref(self) -> None: expected_resource_id = '12345' expected_region = 'test-region' expected_install_path = 'test/path' key_ref_provider = StaticPrivateKeyReferenceProvider(resource_id=expected_resource_id, region=expected_region, install_path=expected_install_path) result = generate_env_vars_dict(server_private_key_ref_provider=key_ref_provider) self.assertEqual(expected_resource_id, result[self.server_key_ref_env_var_name]) self.assertEqual(expected_region, result[self.server_key_region_env_var_name]) self.assertEqual(expected_install_path, result[self.server_key_install_path_env_var_name])
def _compile_context_struct(configs, lib_name): if (not configs): return ('void', []) ctxt_name = f'{lib_name}_Context' ctxt_def = [f'typedef struct {ctxt_name} {{ ', f''] seen = set() for c in sorted(configs, key=(lambda x: x.name())): name = c.name() if (name in seen): raise TypeError(f'multiple configs named {name}') seen.add(name) if c.is_allow_rw(): sdef_lines = c.c_struct_def() sdef_lines = [f' {line}' for line in sdef_lines] ctxt_def += sdef_lines ctxt_def += [''] else: ctxt_def += [f"// config '{name}' not materialized", ''] ctxt_def += [f'}} {ctxt_name};'] return (ctxt_name, ctxt_def)
class SpeedValue(object): def __eq__(self, other): return (self.to_native_units() == other.to_native_units()) def __ne__(self, other): return (not self.__eq__(other)) def __lt__(self, other): return (self.to_native_units() < other.to_native_units()) def __le__(self, other): return (self.to_native_units() <= other.to_native_units()) def __gt__(self, other): return (self.to_native_units() > other.to_native_units()) def __ge__(self, other): return (self.to_native_units() >= other.to_native_units()) def __rmul__(self, other): return self.__mul__(other)
('/get-auth', methods=['POST']) def get_auth_cookie(): req = request.get_json() if (req['pass'] == '1234'): res = make_response(jsonify({'auth': str(auth)})) res.set_cookie('auth', str(auth)) else: res = make_response(jsonify({'erro': 'nao autorizado'}), 401) res.set_cookie('auth', '0') return res
class HFConfigKeys(): def conv_multi_query(config: FalconConfig) -> bool: return (config.layer.attention.n_query_heads != config.layer.attention.n_key_value_heads) def conv_n_attention_query_heads(config: FalconConfig) -> int: return config.layer.attention.n_query_heads def conv_n_attention_keyvalue_heads(config: FalconConfig) -> int: return config.layer.attention.n_key_value_heads def conv_use_bias(config: FalconConfig) -> bool: return config.layer.feedforward.use_bias def conv_use_alibi(config: FalconConfig) -> bool: return config.layer.attention.use_alibi def conv_use_parallel_attention(config: FalconConfig) -> bool: return config.layer.attention.use_parallel_attention def conv_new_decoder_architecture(config: FalconConfig) -> bool: return config.new_decoder_architecture N_LAYER = HFConfigKey('n_layer', 'n_hidden_layers', (lambda c: CommonCuratedToHFConverters.n_hidden_layers(c))) N_HEAD = HFConfigKey('n_head', 'n_query_heads', (lambda c: HFConfigKeys.conv_n_attention_query_heads(c))) N_HEAD_KV = HFConfigKey('n_head_kv', 'n_key_value_heads', (lambda c: HFConfigKeys.conv_n_attention_keyvalue_heads(c))) NUM_HEAD_KV = HFConfigKey('num_kv_heads', 'n_key_value_heads', (lambda c: HFConfigKeys.conv_n_attention_keyvalue_heads(c))) NUM_ATTENTION_HEADS = HFConfigKey('num_attention_heads', 'n_query_heads', (lambda c: HFConfigKeys.conv_n_attention_query_heads(c))) LAYER_NORM_EPSILON = HFConfigKey('layer_norm_epsilon', 'layer_norm_eps', (lambda c: CommonCuratedToHFConverters.layer_norm_eps(c))) BIAS = HFConfigKey('bias', 'use_bias', (lambda c: HFConfigKeys.conv_use_bias(c))) ALIBI = HFConfigKey('alibi', 'use_alibi', (lambda c: HFConfigKeys.conv_use_alibi(c))) PARALLEL_ATTN = HFConfigKey('parallel_attn', 'use_parallel_attention', (lambda c: HFConfigKeys.conv_use_parallel_attention(c))) NEW_DECODER_ARCHITECTURE = HFConfigKey('new_decoder_architecture', 'new_decoder_architecture', (lambda c: HFConfigKeys.conv_new_decoder_architecture(c))) MULTI_QUERY = HFConfigKey('multi_query', 'multi_query', (lambda c: HFConfigKeys.conv_multi_query(c)))
def test_get_latest_comparisons(backend_db, comparison_db): before = time() (fw_one, fw_two, _, _) = _add_comparison(comparison_db, backend_db) result = comparison_db.page_comparison_results(limit=10) for (comparison_id, hid, submission_date) in result: assert (fw_one.uid in hid) assert (fw_two.uid in hid) assert (fw_one.uid in comparison_id) assert (fw_two.uid in comparison_id) assert (before <= submission_date <= time())
def extractAlicrowCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def main(): if (not is_in_rootdir()): testlog.error('Error: Please run me from the root dir of pyelftools!') return 1 argparser = argparse.ArgumentParser(usage='usage: %(prog)s [options] [file] [file] ...', prog='run_dwarfdump_tests.py') argparser.add_argument('files', nargs='*', help='files to run tests on') argparser.add_argument('--parallel', action='store_true', help='run tests in parallel; always runs all tests w/o verbose') argparser.add_argument('-V', '--verbose', action='store_true', dest='verbose', help='verbose output') argparser.add_argument('-k', '--keep-going', action='store_true', dest='keep_going', help="Run all tests, don't stop at the first failure") argparser.add_argument('--opt', action='store', dest='opt', metavar='<dwarfdump-option>', help='Limit the test one one dwarfdump option.') args = argparser.parse_args() if args.parallel: if (args.verbose or (args.keep_going == False)): print('WARNING: parallel mode disables verbosity and always keeps going') if args.verbose: testlog.info('Running in verbose mode') testlog.info(('Python executable = %s' % sys.executable)) testlog.info(('dwarfdump path = %s' % DWARFDUMP_PATH)) testlog.info(('Given list of files: %s' % args.files)) if (len(args.files) > 0): filenames = args.files else: filenames = sorted(discover_testfiles('test/testfiles_for_dwarfdump')) if ((len(filenames) > 1) and args.parallel): pool = Pool() results = pool.map(run_test_on_file, filenames) failures = results.count(False) else: failures = 0 for filename in filenames: if (not run_test_on_file(filename, args.verbose, args.opt)): failures += 1 if (not args.keep_going): break if (failures == 0): testlog.info('\nConclusion: SUCCESS') return 0 elif args.keep_going: testlog.info('\nConclusion: FAIL ({}/{})'.format(failures, len(filenames))) return 1 else: testlog.info('\nConclusion: FAIL') return 1
class Strategy(Model): def __init__(self, **kwargs: Any) -> None: self._search_query = kwargs.pop('search_query', DEFAULT_SEARCH_QUERY) location = kwargs.pop('location', DEFAULT_LOCATION) self._agent_location = Location(latitude=location['latitude'], longitude=location['longitude']) self._radius = kwargs.pop('search_radius', DEFAULT_SEARCH_RADIUS) ledger_id = kwargs.pop('ledger_id', None) super().__init__(**kwargs) self._ledger_id = (ledger_id if (ledger_id is not None) else self.context.default_ledger_id) self.is_searching = True self._contract_id = str(CONTRACT_ID) def ledger_id(self) -> str: return self._ledger_id def contract_id(self) -> str: return self._contract_id def get_location_and_service_query(self) -> Query: close_to_my_service = Constraint('location', ConstraintType('distance', (self._agent_location, self._radius))) service_key_filter = Constraint(self._search_query['search_key'], ConstraintType(self._search_query['constraint_type'], self._search_query['search_value'])) query = Query([close_to_my_service, service_key_filter]) return query def get_service_query(self) -> Query: service_key_filter = Constraint(self._search_query['search_key'], ConstraintType(self._search_query['constraint_type'], self._search_query['search_value'])) query = Query([service_key_filter], model=SIMPLE_SERVICE_MODEL) return query
def extractSupermeganetCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def clean(path): n_cleaned = 0 for name in MOD_NAMES: name = name.replace('.', '/') for ext in ['so', 'html', 'cpp', 'c']: file_path = (path / f'{name}.{ext}') if file_path.exists(): file_path.unlink() n_cleaned += 1 print(f'Cleaned {n_cleaned} files')
class AvailabilityCmd(): _args(source=(None, dict(type=str, help='File or directory for describing a dataset or source of data with GRIB data.')), stdout=dict(action='store_true', help='Output to stdout (no file).'), yaml=dict(action='store_true', help='Output yaml format.'), keys=('--keys', dict(type=str, help=f"Keys to use in availability default is ({','.join(default_keys)}).", nargs='+'))) def do_availability(self, args): keys = args.keys if (keys == []): keys = default_keys if (keys == ['*']): keys = None self.avail = None path = args.source if (not os.path.exists(path)): print(f'{path} does not exists.') return if os.path.isdir(path): source = availability_of_directory(path) else: source = availability_of_file(path) availability = source._custom_availability(keys=keys) if args.yaml: print(availability.to_yaml()) return if args.stdout: print(availability.tree()) return
class DeviceForm(ModelForm): required_css_class = 'required' class Meta(): model = Device fields = ['name', 'description', 'field_1', 'field_2', 'field_3', 'field_4', 'field_5', 'field_6', 'field_7', 'field_8', 'field_9', 'field_10', 'enable'] def __init__(self, *args, **kwargs): super(DeviceForm, self).__init__(*args, **kwargs) for i in self.fields: if (i not in ['enable']): self.fields[i].widget.attrs['class'] = 'form-control'
class GUID(ct.Structure): _fields_ = [('Data1', DWORD), ('Data2', WORD), ('Data3', WORD), ('Data4', (BYTE * 8))] def __init__(self, name=None): if (name is not None): _CLSIDFromString(str(name), ct.byref(self)) def __repr__(self): return ('GUID("%s")' % str(self)) def __str__(self): p = ct.c_wchar_p() _StringFromCLSID(ct.byref(self), ct.byref(p)) result = p.value _CoTaskMemFree(p) return result def __cmp__(self, other): if isinstance(other, GUID): return cmp(bytes(self), bytes(other)) return (- 1) def __nonzero__(self): return (self != GUID_null) def __eq__(self, other): return (isinstance(other, GUID) and (bytes(self) == bytes(other))) def __hash__(self): return hash(bytes(self)) def copy(self): return GUID(str(self)) def from_progid(cls, progid): if hasattr(progid, '_reg_clsid_'): progid = progid._reg_clsid_ if isinstance(progid, cls): return progid elif isinstance(progid, basestring): if progid.startswith('{'): return cls(progid) inst = cls() _CLSIDFromProgID(str(progid), ct.byref(inst)) return inst else: raise TypeError(('Cannot construct guid from %r' % progid)) def as_progid(self): progid = ct.c_wchar_p() _ProgIDFromCLSID(ct.byref(self), ct.byref(progid)) result = progid.value _CoTaskMemFree(progid) return result def create_new(cls): guid = cls() _CoCreateGuid(ct.byref(guid)) return guid
def extractNeverlandtranslationsWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def test_back_edges(): DG = ClassifiedGraph() DG.add_edge(BasicEdge(v[0], v[1])) DG.add_edge(BasicEdge(v[0], v[3])) DG.add_edge(BasicEdge(v[1], v[2])) DG.add_edge(BasicEdge(v[2], v[0])) DG.add_edge(BasicEdge(v[1], v[3])) DG.add_edge(BasicEdge(v[3], v[4])) DG.add_edge(BasicEdge(v[2], v[4])) assert (DG.back_edges() == {v[0]: {DG.get_edge(v[2], v[0])}}) DG.add_edge(BasicEdge(v[3], v[0])) assert (DG.back_edges() == {v[0]: {DG.get_edge(v[2], v[0]), DG.get_edge(v[3], v[0])}}) DG.add_edge(BasicEdge(v[4], v[5])) DG.add_edge(BasicEdge(v[5], v[4])) assert (DG.back_edges() == {v[0]: {DG.get_edge(v[2], v[0]), DG.get_edge(v[3], v[0])}, v[4]: {DG.get_edge(v[5], v[4])}}) DG.add_edge(BasicEdge(v[4], v[1])) assert (DG.back_edges() == {v[0]: {DG.get_edge(v[2], v[0]), DG.get_edge(v[3], v[0])}, v[4]: {DG.get_edge(v[5], v[4])}}) DG.add_edge(BasicEdge(v[2], v[5])) assert (DG.back_edges() == {v[0]: {DG.get_edge(v[2], v[0]), DG.get_edge(v[3], v[0])}})
def delete_internal(sess, ids): if ids: print(('Doint delete. %s rows requiring update.' % (len(ids),))) else: print('No rows needing deletion.') return ctbl = version_table(db.WebPages.__table__) chunk_size = 5000 for chunk_idx in range(0, len(ids), chunk_size): chunk = ids[chunk_idx:(chunk_idx + chunk_size)] while 1: try: try: ex = sess.query(db.WebPages.url).filter((db.WebPages.id == chunk[0])).one()[0] except sqlalchemy.orm.exc.NoResultFound: ex = sess.query(ctbl.c.url).filter((ctbl.c.id == chunk[0])).all()[0][0] print(("Example removed URL: '%s'" % ex)) q1 = sess.query(db.WebPages).filter(db.WebPages.id.in_(chunk)) affected_rows_main = q1.delete(synchronize_session=False) q2 = sess.query(ctbl).filter(ctbl.c.id.in_(chunk)) affected_rows_ver = q2.delete(synchronize_session=False) sess.commit() print(('Deleted %s rows (%s version table rows). %0.2f%% done.' % (affected_rows_main, affected_rows_ver, (100 * (chunk_idx / len(ids)))))) break except sqlalchemy.exc.InternalError: print('Transaction error (sqlalchemy.exc.InternalError). Retrying.') sess.rollback() except sqlalchemy.exc.OperationalError: print('Transaction error (sqlalchemy.exc.OperationalError). Retrying.') sess.rollback() except sqlalchemy.exc.IntegrityError: print('Transaction error (sqlalchemy.exc.IntegrityError). Retrying.') sess.rollback() except sqlalchemy.exc.InvalidRequestError: print('Transaction error (sqlalchemy.exc.InvalidRequestError). Retrying.') traceback.print_exc() sess.rollback()
def create_event_sub_topics(): event_sub_topic = {'Film, Media & Entertainment': ['Comedy', 'Gaming', 'Anime'], 'Community & Culture': ['City/Town', 'Other', 'LGBT'], 'Home & Lifestyle': ['Dating', 'Home & Garden'], 'Sports & Fitness': ['Volleyball', 'Other'], 'Health & Wellness': ['Yoga', 'Medical'], 'Food & Drink': ['Other', 'Food', 'Beer'], 'Other': ['Avatar', 'Logo'], 'Science & Technology': ['Robotics', 'Other', 'High Tech', 'Science', 'Social Media', 'Medicine', 'Mobile', 'Biotech'], 'Music': ['Cultural', 'Pop', 'Top 40', 'EDM / Electronic', 'R&B', 'Other', 'Classical'], 'Performing & Visual Arts': ['Craft', 'Comedy', 'Fine Art', 'Orchestra'], 'Family & Education': ['Education', 'Baby', 'Reunion'], 'Business & Professional': ['Career', 'Startups & Small Business', 'Educators', 'Design', 'Finance'], 'Charity & Causes': ['Education', 'Other', 'Environment'], 'Hobbies & Special Interest': ['Other', 'Anime/Comics'], 'Seasonal & Holiday': ['Easter', 'Other'], 'Auto, Boat & Air': ['Auto', 'Air'], 'Religion & Spirituality': ['Mysticism and Occult'], 'Government & Politics': ['Non-partisan']} eventopics = db.session.query(EventTopic).all() for keysub_topic in event_sub_topic: for subtopic in event_sub_topic[keysub_topic]: get_or_create(EventSubTopic, name=subtopic, event_topic_id=next((x for x in eventopics if (x.name == keysub_topic))).id)
class AdminUserCreationForm(forms.ModelForm): class Meta(): model = User fields = ('username',) field_classes = {'username': UsernameField} def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) if (self._meta.model.USERNAME_FIELD in self.fields): self.fields[self._meta.model.USERNAME_FIELD].widget.attrs['autofocus'] = True def save(self, commit=True): user = super().save(commit=False) user.set_unusable_password() if commit: user.save() return user
class Preferences(GObject.GObject): __gsignals__ = {'preferences-changed': ((GObject.SignalFlags.RUN_FIRST | GObject.SignalFlags.NO_RECURSE), None, ()), 'image-preferences-changed': ((GObject.SignalFlags.RUN_FIRST | GObject.SignalFlags.NO_RECURSE), None, ())} def __init__(self, config): GObject.GObject.__init__(self) self.config = config self.config.changed = self.changed def changed(self, section): self.emit('preferences-changed') if self.config.image_changed_sections.get(section, False): self.emit('image-preferences-changed') def set(self, section, key, val): self.config.set(section, key, val) def get(self, section, key): return self.config.get(section, key) def getboolean(self, section, key): return self.config.getboolean(section, key) def getint(self, section, key): return self.config.getint(section, key) def set_size(self, width, height): self.config.set_size(width, height) def set_main_window_size(self, width, height): self.config.set_main_window_size(width, height) def get_list(self, name): return self.config.get_list(name) def set_list(self, name, list): self.config.set_list(name, list) def remove_all_in_list_section(self, name): self.config.remove_all_in_list_section(name) def save(self): self.config.save()
def filter_casb_user_activity_data(json): option_list = ['application', 'casb_name', 'category', 'control_options', 'description', 'match', 'match_strategy', 'name', 'type', 'uuid'] json = remove_invalid_fields(json) dictionary = {} for attribute in option_list: if ((attribute in json) and (json[attribute] is not None)): dictionary[attribute] = json[attribute] return dictionary
def SGEMM(M: size, N: size, K: size, A: f32[(M, K)], B: f32[(K, N)], C: f32[(M, N)]): assert (M >= 1) assert (N >= 1) assert (K >= 1) assert (stride(A, 1) == 1) assert (stride(B, 1) == 1) assert (stride(C, 1) == 1) for k in seq(0, K): for i in seq(0, M): for j in seq(0, N): C[(i, j)] += (A[(i, k)] * B[(k, j)])
def compute_embeddings(paths: list, embedding_name: str, device: str='cpu', chunk_size: int=20): (model, embedding_dim, transforms, metadata) = load_pretrained_model(embedding_name=embedding_name) model.to(device) for path in tqdm(paths): inp = path['images'] path['embeddings'] = np.zeros((inp.shape[0], embedding_dim)) path_len = inp.shape[0] preprocessed_inp = torch.cat([transforms(frame) for frame in inp]) for chunk in range(((path_len // chunk_size) + 1)): if ((chunk_size * chunk) < path_len): with torch.no_grad(): inp_chunk = preprocessed_inp[(chunk_size * chunk):min((chunk_size * (chunk + 1)), path_len)] emb = model(inp_chunk.to(device)) emb = emb.to('cpu').data.numpy() path['embeddings'][(chunk_size * chunk):min((chunk_size * (chunk + 1)), path_len)] = emb del path['images'] return paths
def _run_command(args: Any) -> Tuple[(str, str)]: with Popen(args=args, stdout=PIPE, stderr=PIPE) as proc: log.info(f"Running command: {' '.join(args)}") (out, err) = proc.communicate() out_str = (out.decode().strip() if (out is not None) else '') err_str = (err.decode().strip() if (err is not None) else '') log.info(f'''Output: {out_str} Error: {err_str}''') return (out_str, err_str)
class LiteSATAALIGNInserter(Module): def __init__(self, description): self.sink = sink = stream.Endpoint(description) self.source = source = stream.Endpoint(description) cnt = Signal(8) send = Signal() self.sync += If((source.valid & source.ready), cnt.eq((cnt + 1))) self.comb += [send.eq((cnt < 2)), If(send, source.valid.eq(1), source.charisk.eq(1), source.data.eq(primitives['ALIGN']), sink.ready.eq(0)).Else(source.valid.eq(sink.valid), source.data.eq(sink.data), source.charisk.eq(sink.charisk), sink.ready.eq(source.ready))]
class CssInputNoBorder(CssStyle.Style): _attrs = {'border': 'none', 'text-align': 'center', 'cursor': 'text', 'margin': 0} _focus = {'outline': 0} def customize(self): self.attrs.css({'color': 'inherit', 'font-family': self.page.body.style.globals.font.family, 'line-height': ('%spx' % Defaults_html.LINE_HEIGHT), 'font-size': self.page.body.style.globals.font.normal()}) self.hover.css({'color': self.page.theme.notch(1)})
def get_spec(distgit_config): spec_dir = distgit_config['specs'] specfiles = glob.glob(os.path.join(spec_dir, '*.spec')) if (len(specfiles) != 1): abs_spec_dir = os.path.join(os.getcwd(), spec_dir) message = 'Exactly one spec file expected in {0} directory, {1} found'.format(abs_spec_dir, len(specfiles)) raise RuntimeError(message) specfile = os.path.basename(specfiles[0]) return specfile
.parametrize('abi_type,should_match', (('bool[]', True), ('uint[]', True), ('uint[][]', True), ('uint[5][]', True), ('uint[][5]', True), ('int[]', True), ('string[]', True), ('address[]', True), ('bytes[]', True), ('string', False), ('bytes', False), ('uint[', False), ('uint]', False))) def test_is_array_type(abi_type, should_match): is_match = is_array_type(abi_type) assert (is_match is should_match)
class OptionPlotoptionsHistogramTooltipDatetimelabelformats(Options): def day(self): return self._config_get('%A, %e %b %Y') def day(self, text: str): self._config(text, js_type=False) def hour(self): return self._config_get('%A, %e %b, %H:%M') def hour(self, text: str): self._config(text, js_type=False) def millisecond(self): return self._config_get('%A, %e %b, %H:%M:%S.%L') def millisecond(self, text: str): self._config(text, js_type=False) def minute(self): return self._config_get('%A, %e %b, %H:%M') def minute(self, text: str): self._config(text, js_type=False) def month(self): return self._config_get('%B %Y') def month(self, text: str): self._config(text, js_type=False) def second(self): return self._config_get('%A, %e %b, %H:%M:%S') def second(self, text: str): self._config(text, js_type=False) def week(self): return self._config_get('Week from %A, %e %b %Y') def week(self, text: str): self._config(text, js_type=False) def year(self): return self._config_get('%Y') def year(self, text: str): self._config(text, js_type=False)