code
stringlengths
281
23.7M
def test_component_add_bad_dep(): builder = AEABuilder() builder.set_name('aea_1') builder.add_private_key('fetchai') connection = _make_dummy_connection() connection.configuration.pypi_dependencies = {'something': Dependency('something', '==0.1.0')} builder.add_component_instance(connection) a_protocol = Protocol(ProtocolConfig('a_protocol', 'author', '0.1.0', protocol_specification_id='some/author:0.1.0'), DefaultMessage) a_protocol.configuration.pypi_dependencies = {'something': Dependency('something', '==0.2.0')} with pytest.raises(AEAException, match='Conflict on package something: specifier set .*'): builder.add_component_instance(a_protocol)
class Migration(migrations.Migration): dependencies = [('dmd', '0002_auto__1443'), ('frontend', '0046_auto__1412')] operations = [migrations.CreateModel(name='NCSOConcession', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('date', models.DateField(db_index=True)), ('drug', models.CharField(max_length=400)), ('pack_size', models.CharField(max_length=40)), ('price_pence', models.IntegerField()), ('vmpp', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='dmd.VMPP'))]), migrations.AlterUniqueTogether(name='ncsoconcession', unique_together=set([('date', 'vmpp')]))]
def test_get_quarter_from_period(): assert (fyh.get_quarter_from_period(2) == 1) assert (fyh.get_quarter_from_period(3) == 1) assert (fyh.get_quarter_from_period(4) == 2) assert (fyh.get_quarter_from_period(5) == 2) assert (fyh.get_quarter_from_period(6) == 2) assert (fyh.get_quarter_from_period(7) == 3) assert (fyh.get_quarter_from_period(8) == 3) assert (fyh.get_quarter_from_period(9) == 3) assert (fyh.get_quarter_from_period(10) == 4) assert (fyh.get_quarter_from_period(11) == 4) assert (fyh.get_quarter_from_period(12) == 4) assert (fyh.get_quarter_from_period(1) is None) assert (fyh.get_quarter_from_period(13) is None) assert (fyh.get_quarter_from_period(None) is None) assert (fyh.get_quarter_from_period('1') is None) assert (fyh.get_quarter_from_period('a') is None) assert (fyh.get_quarter_from_period({'hello': 'there'}) is None)
def test_existing_mnemonic_bls_withdrawal() -> None: my_folder_path = os.path.join(os.getcwd(), 'TESTING_TEMP_FOLDER') clean_key_folder(my_folder_path) if (not os.path.exists(my_folder_path)): os.mkdir(my_folder_path) runner = CliRunner() inputs = ['TREZOR', 'abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about', '2', '2', '5', 'mainnet', 'MyPassword', 'MyPassword'] data = '\n'.join(inputs) arguments = ['--language', 'english', 'existing-mnemonic', '--folder', my_folder_path, '--mnemonic-password', 'TREZOR'] result = runner.invoke(cli, arguments, input=data) assert (result.exit_code == 0) validator_keys_folder_path = os.path.join(my_folder_path, DEFAULT_VALIDATOR_KEYS_FOLDER_NAME) (_, _, key_files) = next(os.walk(validator_keys_folder_path)) all_uuid = [get_uuid(((validator_keys_folder_path + '/') + key_file)) for key_file in key_files if key_file.startswith('keystore')] assert (len(set(all_uuid)) == 5) if (os.name == 'posix'): for file_name in key_files: assert (get_permissions(validator_keys_folder_path, file_name) == '0o440') clean_key_folder(my_folder_path)
def filter_firewall_policy46_data(json): option_list = ['action', 'comments', 'dstaddr', 'dstintf', 'fixedport', 'ippool', 'logtraffic', 'logtraffic_start', 'name', 'per_ip_shaper', 'permit_any_host', 'policyid', 'poolname', 'schedule', 'service', 'srcaddr', 'srcintf', 'status', 'tcp_mss_receiver', 'tcp_mss_sender', 'traffic_shaper', 'traffic_shaper_reverse', 'uuid'] json = remove_invalid_fields(json) dictionary = {} for attribute in option_list: if ((attribute in json) and (json[attribute] is not None)): dictionary[attribute] = json[attribute] return dictionary
def get_possible_modes() -> set[str]: modes = set() with open('web/src/utils/constants.ts', encoding='utf-8') as file_: search = re.search('^export const modeOrder = (\\[$.*?^]) as const;$', file_.read(), flags=(re.DOTALL | re.MULTILINE)) if (search is not None): group = search.group(1) for mode in eval(group): if mode.endswith(' storage'): modes.update((mode.replace('storage', 'charge'), mode.replace('storage', 'discharge'))) else: modes.add(mode) return modes
class MystReferenceResolver(ReferencesResolver): default_priority = 9 def log_warning(self, target: (None | str), msg: str, subtype: MystWarnings, **kwargs: Any): if (target and self.config.nitpick_ignore and (('myst', target) in self.config.nitpick_ignore)): return if (target and self.config.nitpick_ignore_regex and any(((re.fullmatch(ignore_type, 'myst') and re.fullmatch(ignore_target, target)) for (ignore_type, ignore_target) in self.config.nitpick_ignore_regex))): return LOGGER.warning((msg + f' [myst.{subtype.value}]'), type='myst', subtype=subtype.value, **kwargs) def run(self, **kwargs: Any) -> None: self.document: document for node in findall(self.document)(addnodes.pending_xref): if (node['reftype'] != 'myst'): continue if (node['refdomain'] == 'doc'): self.resolve_myst_ref_doc(node) continue newnode = None contnode = cast(nodes.TextElement, node[0].deepcopy()) target = node['reftarget'] refdoc = node.get('refdoc', self.env.docname) search_domains: (None | list[str]) = self.env.config.myst_ref_domains try: newnode = self.resolve_myst_ref_any(refdoc, node, contnode, search_domains) except NoUri: newnode = contnode if (newnode is None): newnode = self._resolve_myst_ref_intersphinx(node, contnode, target, search_domains) if (newnode is None): self.log_warning(target, f"'myst' cross-reference target not found: {target!r}", MystWarnings.XREF_MISSING, location=node) if (not newnode): newnode = nodes.reference() newnode['refid'] = normalizeLink(target) newnode.append(node[0].deepcopy()) if ((len(newnode.children) == 1) and isinstance(newnode[0], nodes.inline) and (not newnode[0].children)): newnode[0].replace_self(nodes.literal(target, target)) elif (not newnode.children): newnode.append(nodes.literal(target, target)) node.replace_self(newnode) def resolve_myst_ref_doc(self, node: pending_xref): from_docname = node.get('refdoc', self.env.docname) ref_docname: str = node['reftarget'] ref_id: (str | None) = node['reftargetid'] if (ref_docname not in self.env.all_docs): self.log_warning(ref_docname, f'Unknown source document {ref_docname!r}', MystWarnings.XREF_MISSING, location=node) node.replace_self(node[0].deepcopy()) return targetid = '' implicit_text = '' inner_classes = ['std', 'std-doc'] if ref_id: slug_to_section = self.env.metadata[ref_docname].get('myst_slugs', {}) if (ref_id not in slug_to_section): self.log_warning(ref_id, f'local id not found in doc {ref_docname!r}: {ref_id!r}', MystWarnings.XREF_MISSING, location=node) targetid = ref_id else: (_, targetid, implicit_text) = slug_to_section[ref_id] inner_classes = ['std', 'std-ref'] else: implicit_text = clean_astext(self.env.titles[ref_docname]) if node['refexplicit']: caption = node.astext() innernode = nodes.inline(caption, '', classes=inner_classes) innernode.extend(node[0].children) else: innernode = nodes.inline(implicit_text, implicit_text, classes=inner_classes) assert self.app.builder try: ref_node = make_refnode(self.app.builder, from_docname, ref_docname, targetid, innernode) except NoUri: ref_node = innernode node.replace_self(ref_node) def resolve_myst_ref_any(self, refdoc: str, node: pending_xref, contnode: Element, only_domains: (None | list[str])) -> (Element | None): target: str = node['reftarget'] results: list[tuple[(str, Element)]] = [] res = self._resolve_ref_nested(node, refdoc) if res: results.append(('std:ref', res)) res = self._resolve_doc_nested(node, refdoc) if res: results.append(('std:doc', res)) assert self.app.builder if ((only_domains is None) or ('std' in only_domains)): stddomain = cast(StandardDomain, self.env.get_domain('std')) for objtype in stddomain.object_types: key = (objtype, target) if (objtype == 'term'): key = (objtype, target.lower()) if (key in stddomain.objects): (docname, labelid) = stddomain.objects[key] domain_role = ('std:' + stddomain.role_for_objtype(objtype)) ref_node = make_refnode(self.app.builder, refdoc, docname, labelid, contnode) results.append((domain_role, ref_node)) for domain in self.env.domains.values(): if (domain.name == 'std'): continue if ((only_domains is not None) and (domain.name not in only_domains)): continue try: results.extend(domain.resolve_any_xref(self.env, refdoc, self.app.builder, target, node, contnode)) except NotImplementedError: if (not getattr(domain, '__module__', '').startswith('sphinx.')): self.log_warning(None, f"Domain '{domain.__module__}::{domain.name}' has not implemented a `resolve_any_xref` method", MystWarnings.LEGACY_DOMAIN, once=True) for role in domain.roles: res = domain.resolve_xref(self.env, refdoc, self.app.builder, role, target, node, contnode) if (res and len(res) and isinstance(res[0], nodes.Element)): results.append((f'{domain.name}:{role}', res)) if (not results): return None if (len(results) > 1): def stringify(name, node): reftitle = node.get('reftitle', node.astext()) return f':{name}:`{reftitle}`' candidates = ' or '.join((stringify(name, role) for (name, role) in results)) self.log_warning(target, f"more than one target found for 'myst' cross-reference {target}: could be {candidates}", MystWarnings.XREF_AMBIGUOUS, location=node) (res_role, newnode) = results[0] res_domain = res_role.split(':')[0] if ((len(newnode) > 0) and isinstance(newnode[0], nodes.Element)): newnode[0]['classes'] = (newnode[0].get('classes', []) + [res_domain, res_role.replace(':', '-')]) return newnode def _resolve_ref_nested(self, node: pending_xref, fromdocname: str, target=None) -> (Element | None): stddomain = cast(StandardDomain, self.env.get_domain('std')) target = (target or node['reftarget'].lower()) if node['refexplicit']: (docname, labelid) = stddomain.anonlabels.get(target, ('', '')) sectname = node.astext() innernode = nodes.inline(sectname, '') innernode.extend(node[0].children) else: (docname, labelid, sectname) = stddomain.labels.get(target, ('', '', '')) innernode = nodes.inline(sectname, sectname) if (not docname): return None assert self.app.builder return make_refnode(self.app.builder, fromdocname, docname, labelid, innernode) def _resolve_doc_nested(self, node: pending_xref, fromdocname: str) -> (Element | None): docname = docname_join(node.get('refdoc', fromdocname), node['reftarget']) if (docname not in self.env.all_docs): return None if node['refexplicit']: caption = node.astext() innernode = nodes.inline(caption, '', classes=['doc']) innernode.extend(node[0].children) else: caption = clean_astext(self.env.titles[docname]) innernode = nodes.inline(caption, caption, classes=['doc']) assert self.app.builder return make_refnode(self.app.builder, fromdocname, docname, '', innernode) def _resolve_myst_ref_intersphinx(self, node: nodes.Element, contnode: nodes.Element, target: str, only_domains: (list[str] | None)) -> (None | nodes.reference): matches = [m for m in inventory.filter_sphinx_inventories(InventoryAdapter(self.env).named_inventory, targets=target) if ((only_domains is None) or (m.domain in only_domains))] if (not matches): return None if (len(matches) > 1): show_num = 3 matches_str = ', '.join([inventory.filter_string(m.inv, m.domain, m.otype, m.name) for m in matches[:show_num]]) if (len(matches) > show_num): matches_str += ', ...' self.log_warning(target, f'Multiple matches found for {target!r}: {matches_str}', MystWarnings.IREF_AMBIGUOUS, location=node) match = matches[0] newnode = nodes.reference('', '', internal=False, refuri=match.loc) if ('reftitle' in node): newnode['reftitle'] = node['reftitle'] else: newnode['reftitle'] = f'{match.project} {match.version}'.strip() if node.get('refexplicit'): newnode.append(contnode) elif match.text: newnode.append(contnode.__class__(match.text, match.text, classes=['iref', 'myst'])) else: newnode.append(nodes.literal(match.name, match.name, classes=['iref', 'myst'])) return newnode
class ImpalaEvents(ABC): _epoch_stats(np.nanmean, input_name='time') _epoch_stats(np.nanmean, input_name='percent') def time_dequeuing_actors(self, time: float, percent: float): _epoch_stats(np.nanmean, input_name='time') _epoch_stats(np.nanmean, input_name='percent') def time_collecting_actors(self, time: float, percent: float): _epoch_stats(np.nanmean, input_name='time') _epoch_stats(np.nanmean, input_name='percent') def time_learner_rollout(self, time: float, percent: float): _epoch_stats(np.nanmean, input_name='time') _epoch_stats(np.nanmean, input_name='percent') def time_loss_computation(self, time: float, percent: float): _epoch_stats(np.nanmean, input_name='time') _epoch_stats(np.nanmean, input_name='percent') def time_backprob(self, time: float, percent: float): _epoch_stats(np.nanmean, input_name='before') _epoch_stats(np.nanmean, input_name='after') def estimated_queue_sizes(self, before: int, after: int):
class OptionSeriesNetworkgraphSonificationDefaultinstrumentoptionsMappingLowpassResonance(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def test_always_transact(plugintester, mocker, chain): mocker.spy(chain, 'undo') result = plugintester.runpytest() result.assert_outcomes(passed=2) assert (chain.undo.call_count == 0) result = plugintester.runpytest('--coverage') result.assert_outcomes(passed=2) assert (chain.undo.call_count == 1)
def fetch_cfda_id_title_by_number(cfda_number: str) -> Optional[Tuple[(int, str)]]: columns = ['id', 'program_title'] result = Cfda.objects.filter(program_number=cfda_number).values(*columns).first() if (not result): logger.warning('{} not found for cfda_number: {}'.format(','.join(columns), cfda_number)) return (None, None) return (result[columns[0]], result[columns[1]])
def get_table_stats(dp, waiters, to_user=True): stats = dp.ofproto_parser.OFPTableStatsRequest(dp, 0) msgs = [] ofctl_utils.send_stats_request(dp, stats, waiters, msgs, LOG) tables = [] for msg in msgs: stats = msg.body for stat in stats: s = stat.to_jsondict()[stat.__class__.__name__] if to_user: s['table_id'] = UTIL.ofp_table_to_user(stat.table_id) tables.append(s) return wrap_dpid_dict(dp, tables, to_user)
def deb_kernel(packages, kernel_version, current_version, variant=None): if (current_version.startswith(kernel_version) and (not variant)): return current_version import re kernels = set() kernels_add = kernels.add (current_version, current_variant) = re.match('^([0-9-.]+)(-[a-z0-9]+)?$', current_version).groups() variant = ('-' + (variant if (not ((variant is None) or variant.startswith('__omit_place_holder__'))) else (current_variant or '')).lstrip('-')) match = re.compile(('^Package: linux-headers-([a-z0-9-.]+%s)\\s*$' % variant)).match for line in packages['stdout'].splitlines(): line_match = match(line) if line_match: kernels_add(line_match.group(1)) versions = {} for kernel in kernels: version_info = kernel.split('-') version = version_info[0] build = version_info[1] versions[kernel] = (list((int(ver) for ver in version.split('.'))) + [build]) kernels = sorted(versions.keys(), key=versions.get, reverse=True) for kernel in kernels: if kernel.startswith(kernel_version): return kernel raise RuntimeError(('No kernel matching to "%s". Current version: %s. Available kernel versions: %s' % (kernel_version, current_version, ', '.join(reversed(kernels)))))
class Strategy(): def __init__(self, symbols, capital, start, end, options=default_options): self.symbols = symbols self.capital = capital self.start = start self.end = end self.options = options.copy() self.ts = None self.rlog = None self.tlog = None self.dbal = None self.stats = None def _algo(self): pf.TradeLog.cash = self.capital pf.TradeLog.margin = self.options['margin'] mom = {} weights = {} month_count = 0 for (i, row) in enumerate(self.ts.itertuples()): end_flag = pf.is_last_row(self.ts, i) if (month_count == 0): if (self.options['lookback'] is None): lookback = random.choice(range(6, (12 + 1))) else: lookback = self.options['lookback'] month_count = lookback if (row.first_dotm or end_flag): month_count -= 1 mom_field = ('mom' + str(lookback)) p = self.portfolio.get_column_values(row, fields=[mom_field]) for symbol in self.portfolio.symbols: mom[symbol] = p[symbol][mom_field] weights[symbol] = 0 if (end_flag or (self.options['use_regime_filter'] and (row.regime < 0))): pass else: mom = pf.sort_dict(mom, reverse=True) for j in range(self.options['top_tier']): symbol = list(mom.keys())[j] weights[symbol] = (1 / self.options['top_tier']) if self.options['use_absolute_mom']: for (symbol, pct_change) in mom.items(): if (pct_change < 0): weights[symbol] = 0 self.portfolio.adjust_percents(row, weights) self.portfolio.record_daily_balance(row) def run(self): self.portfolio = pf.Portfolio() self.ts = self.portfolio.fetch_timeseries(self.symbols, self.start, self.end, fields=['close'], use_cache=self.options['use_cache'], use_adj=self.options['use_adj']) ts = pf.fetch_timeseries('^GSPC') ts = pf.select_tradeperiod(ts, self.start, self.end, use_adj=False) self.ts['regime'] = pf.CROSSOVER(ts, timeperiod_fast=1, timeperiod_slow=200, band=3.5) self.ts = self.portfolio.calendar(self.ts) lookbacks = range(3, (18 + 1)) for lookback in lookbacks: _indicator(self.symbols, ('mom' + str(lookback)), 'close') def _momentum(ts, input_column=None): return pf.MOMENTUM(ts, lookback=lookback, time_frame='monthly', price=input_column, prevday=False) self.ts = _momentum(self.ts) (self.ts, self.start) = self.portfolio.finalize_timeseries(self.ts, self.start) self.portfolio.init_trade_logs(self.ts) self._algo() self._get_logs() self._get_stats() def _get_logs(self): (self.rlog, self.tlog, self.dbal) = self.portfolio.get_logs() def _get_stats(self): self.stats = pf.stats(self.ts, self.tlog, self.dbal, self.capital)
_module() class NaiveProjectionEncoder(nn.Module): def __init__(self, input_size, output_size, use_embedding: bool=False, use_neck: bool=False, neck_size: int=8, preprocessing=None): super().__init__() self.use_embedding = use_embedding self.input_size = input_size self.output_size = output_size self.preprocessing = preprocessing if use_embedding: self.embedding = nn.Embedding(input_size, output_size) elif use_neck: self.projection = nn.Sequential(nn.Linear(input_size, neck_size), nn.Linear(neck_size, output_size)) else: self.projection = nn.Linear(input_size, output_size) self.reset_params() def reset_params(self): for m in self.modules(): if isinstance(m, nn.Linear): nn.init.xavier_uniform_(m.weight) if (m.bias is not None): nn.init.constant_(m.bias, 0.0) elif isinstance(m, nn.Embedding): nn.init.normal_(m.weight, mean=0, std=(m.embedding_dim ** (- 0.5))) def forward(self, x, *args, **kwargs): if (self.preprocessing is not None): x = self.preprocessing(x) return (self.embedding(x) if self.use_embedding else self.projection(x))
def monitor(exit_event, wait): wait = max(0.1, wait) rapl_power_unit = (0.5 ** readmsr('MSR_RAPL_POWER_UNIT', from_bit=8, to_bit=12, cpu=0)) power_plane_msr = {'Package': 'MSR_INTEL_PKG_ENERGY_STATUS', 'Graphics': 'MSR_PP1_ENERGY_STATUS', 'DRAM': 'MSR_DRAM_ENERGY_STATUS'} prev_energy = {'Package': ((readmsr('MSR_INTEL_PKG_ENERGY_STATUS', cpu=0) * rapl_power_unit), time()), 'Graphics': ((readmsr('MSR_PP1_ENERGY_STATUS', cpu=0) * rapl_power_unit), time()), 'DRAM': ((readmsr('MSR_DRAM_ENERGY_STATUS', cpu=0) * rapl_power_unit), time())} undervolt_values = get_undervolt(convert=True) undervolt_output = ' | '.join(('{:s}: {:.2f} mV'.format(plane, undervolt_values[plane]) for plane in VOLTAGE_PLANES)) log('[D] Undervolt offsets: {:s}'.format(undervolt_output)) iccmax_values = get_icc_max(convert=True) iccmax_output = ' | '.join(('{:s}: {:.2f} A'.format(plane, iccmax_values[plane]) for plane in CURRENT_PLANES)) log('[D] IccMax: {:s}'.format(iccmax_output)) log('[D] Realtime monitoring of throttling causes:\n') while (not exit_event.is_set()): value = readmsr('IA32_THERM_STATUS', from_bit=0, to_bit=15, cpu=0) offsets = {'Thermal': 0, 'Power': 10, 'Current': 12, 'Cross-domain (e.g. GPU)': 14} output = ('{:s}: {:s}'.format(cause, (LIM if bool(((value >> offsets[cause]) & 1)) else OK)) for cause in offsets) vcore = ((readmsr('IA32_PERF_STATUS', from_bit=32, to_bit=47, cpu=0) / (2.0 ** 13)) * 1000) stats2 = {'VCore': '{:.0f} mV'.format(vcore)} total = 0.0 for power_plane in ('Package', 'Graphics', 'DRAM'): energy_j = (readmsr(power_plane_msr[power_plane], cpu=0) * rapl_power_unit) now = time() (prev_energy[power_plane], energy_w) = ((energy_j, now), ((energy_j - prev_energy[power_plane][0]) / (now - prev_energy[power_plane][1]))) stats2[power_plane] = '{:.1f} W'.format(energy_w) total += energy_w stats2['Total'] = '{:.1f} W'.format(total) output2 = ('{:s}: {:s}'.format(label, stats2[label]) for label in stats2) terminator = ('\n' if args.log else '\r') log('[{}] {} || {}{}'.format(power['source'], ' - '.join(output), ' - '.join(output2), (' ' * 10)), end=terminator) exit_event.wait(wait)
def extractVictoriatranslationBlogspotCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class Danmu_layer(): def __init__(self, frame_w, frame_h, danmu_txt): self.frame_w = frame_w self.frame_h = frame_h self.track_h = 100 self.track_num = int((self.frame_h / self.track_h)) self.background = self.create_trans_background() self.tracks_color = [(255, 255, 255, 255), (255, 0, 0, 255), (0, 255, 0, 255), (0, 0, 255, 255), (255, 255, 0, 255), (0, 255, 255, 255), (255, 0, 255, 255)] self.tracks_speed = [2, 4, 6, 8] self.track_obj_list = self.create_track_objs() self.danmu_txt = danmu_txt def create_trans_background(self): return Image.new('RGBA', (self.frame_w, self.frame_h)) def create_track_objs(self): track_obj_list = [] for i in range(self.track_num): color = random.choice(self.tracks_color) speed = random.choice(self.tracks_speed) new_track = Danmu_track(self.frame_w, self.track_h, 1000, speed, 80, color) track_obj_list.append(new_track) return track_obj_list def danmu_text2list(self): text_list = None with open(self.danmu_txt, 'r') as f: text_list = f.readlines() text_list = [t.strip() for t in text_list] return text_list def distri_danmu(self): danmu_text_list = [] track_num = self.track_num danmu_text = self.danmu_text2list() len_danmu_text = len(danmu_text) cols_num = math.ceil((len_danmu_text / track_num)) blank_arr = ['' for i in range(((track_num * cols_num) - len_danmu_text))] fixed_danmu_text = np.concatenate((danmu_text, blank_arr)) indexes_arr = np.arange((track_num * cols_num)).reshape((cols_num, track_num)) return fixed_danmu_text[indexes_arr.T] def compound_tracks(self, frame_index): background = self.background.copy() text_list = self.distri_danmu() danmu_img = [] for track_index in range(self.track_num): danmu_img.append(self.track_obj_list[track_index].draw_text(text_list[track_index].tolist(), frame_index)) for (index, img) in enumerate(danmu_img): background.paste(img, (0, (self.track_h * index))) return background
class ReadEnum(Read[EnumT]): __slots__ = ('enum_type',) def __init__(self, enum_type: Type[EnumT]) -> None: self.enum_type = enum_type def __call__(self, io: IO[bytes]) -> Iterator[EnumT]: value = read_unsigned_varint(io) try: (yield self.enum_type(value)) except ValueError as e: raise IncorrectValueError(f'incorrect value {value} for enum `{self.enum_type!r}`') from e def __repr__(self) -> str: return f'{type(self).__name__}({self.enum_type.__name__})'
class OptionPlotoptionsTimelineSonificationTracksMappingHighpassFrequency(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def compute_com_and_com_vel(pb_client, body_id, indices=None): if (indices is None): indices = range((- 1), pb_client.getNumJoints(body_id)) total_mass = 0.0 com = np.zeros(3) com_vel = np.zeros(3) for i in indices: di = pb_client.getDynamicsInfo(body_id, i) mass = di[0] if (i == (- 1)): (p, _, v, _) = get_base_pQvw(pb_client, body_id) else: ls = pb_client.getLinkState(body_id, i, computeLinkVelocity=True) (p, v) = (np.array(ls[0]), np.array(ls[6])) total_mass += mass com += (mass * p) com_vel += (mass * v) com /= total_mass com_vel /= total_mass return (com, com_vel)
class StaticRadicli(Radicli): data: StaticData disable: bool debug: bool def __init__(self, data: StaticData, disable: bool=False, debug: bool=False, converters: ConvertersType=SimpleFrozenDict()) -> None: super().__init__(prog=data['prog'], help=data['help'], version=data['version'], extra_key=data['extra_key']) self.commands = {name: Command.from_static_json(cmd, converters) for (name, cmd) in data['commands'].items()} self.subcommands = {parent: {name: Command.from_static_json(sub, converters) for (name, sub) in subs.items()} for (parent, subs) in data['subcommands'].items()} self.data = data self.disable = disable self.debug = debug self._debug_start = '===== STATIC =====' self._debug_end = '=== END STATIC ===' def run(self, args: Optional[List[str]]=None) -> None: if self.disable: return if self.debug: print(self._debug_start) super().run(args) if self.debug: print(self._debug_end) def load(cls, file_path: Union[(str, Path)], debug: bool=False, disable: bool=False, converters: ConvertersType=SimpleFrozenDict()) -> 'StaticRadicli': path = Path(file_path) if ((not path.exists()) or (not path.is_file())): raise ValueError(f'Not a valid file path: {path}') with path.open('r', encoding='utf8') as f: data = json.load(f) return cls(data, disable=disable, debug=debug, converters=converters)
def downgrade(): with op.batch_alter_table('user', schema=None) as batch_op: batch_op.drop_constraint(None, type_='unique') batch_op.alter_column('id', existing_type=sqlalchemy_utils.types.uuid.UUIDType(), type_=sa.NUMERIC(precision=16), existing_nullable=False) batch_op.drop_column('task_ids') with op.batch_alter_table('templates', schema=None) as batch_op: batch_op.alter_column('id', existing_type=sqlalchemy_utils.types.uuid.UUIDType(), type_=sa.NUMERIC(precision=16), existing_nullable=False) with op.batch_alter_table('tasks', schema=None) as batch_op: batch_op.alter_column('id', existing_type=sqlalchemy_utils.types.uuid.UUIDType(), type_=sa.NUMERIC(precision=16), existing_nullable=False) op.create_table('sessions', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('session_id', sa.VARCHAR(length=255), nullable=True), sa.Column('data', sa.BLOB(), nullable=True), sa.Column('expiry', sa.DATETIME(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('session_id'))
_required _required _POST def password_change(request): status = 200 pform = ChangePasswordForm(request.user, request.POST) if pform.is_valid(): status = pform.save(request) if (status == 200): messages.success(request, _('Your password was successfully changed')) return redirect('profile') return render(request, 'gui/profile/profile_password_form.html', {'user': request.user, 'pform': pform}, status=status)
class OptionPlotoptionsVectorSonificationTracksMappingPlaydelay(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def set_attributes(): parser = argparse.ArgumentParser(description='FB fio Synthetic Benchmark Suite for storage ver 3.6.0') parser.add_argument('-d', action='store', dest='device', type=str, help='(Required) device path for single device target, ALL for alldata devices, or ALLRAID for all mdraid devices', required=True, default='') parser.add_argument('-c', action='store', dest='factor', type=float, help='(Optional) specify capacity in TB (default = <device capacity>)', default=(- 1.0)) parser.add_argument('-w', action='store', dest='wklds', type=str, help='(Required) filename for workload suite (default = )', required=True, default='') parser.add_argument('-f', action='store', dest='fname', type=str, help='(Required) Results filename (default = )', required=True, default='.') parser.add_argument('-r', action='store', dest='dryrun', type=str, help='(Optional) Set to y to do dry run (default = n)', default='n') parser.add_argument('-t', action='store', dest='health', type=str, help='(Optional) Enter Health Monitoring Tool Syntax (default = )', default='') parser.add_argument('-p', action='store', dest='prep', type=str, help='(Optional) Set to n to skip drive prep, o to prep on first cycle only (default = y)', default='y') parser.add_argument('-n', action='store', dest='cycles', type=int, help='(Optional) Specific the number of run cycles (default = 3 )', default=(- 1)) parser.add_argument('-g', action='store', dest='getflash', type=str, help='(Optional) Set to y to enable flash configuration logging (default = n)', default='n') parser.add_argument('-s', action='append', dest='servers', type=str, help='(Optional) Add a server to the list for client/server mode', default=[]) parser.add_argument('-l', action='store', dest='server_file', type=str, help='(Optional) Path to a text file with a server name on each line', default='') parser.add_argument('-j', action='store', dest='job_scale', type=int, help='(Optional) Scale by jobs (default = 1 job per drive)', default=1) parser.add_argument('-x', action='store_true', dest='exitall', help='(Optional) Pass --exitall to fio') parser.add_argument('-z', action='store_true', dest='deterministic', help='(Optional) Static file and directory names') parser.add_argument('-m', action='store', dest='misc', type=str, help='(Optional) Set a misc variable in a workload suite(default = )', default='') parser.add_argument('-e', action='store', dest='expert', type=str, help='(Optional) Pass this string directly to fio command line invocation and attach just before jobfile argument(default = )', default='') parser.add_argument('-u', action='store', dest='user', type=str, help='(Optional) The user to login as on the server when running fiosynth in client/server mode (default = root)', default='root') parser.add_argument('-y', action='store', dest='tunneling', type=str, help='(Optional) Set to y to perform server/client mode fio via SSH tunnels. (default = n)', default='n') parser.add_argument('--waf', action='store_true', dest='calc_waf', help='(Optional) Set to use calculate WAF (Write Amplification Factor) based on OCP SMART / Health Inforamtion Extended (Log Identifier C0h) data. OCP compliant devices only. (default = disabled)') parser.add_argument('--lm', action='store_true', dest='check_lm', help='(Optional) Set to use OCP Latency Monitor to check max read and max write latencies if targets avaiable. OCP 2.0 compliant devices only. (default = disabled)') parser.add_argument('--targets', action='store_true', dest='show_targets', help='(Optional) Set to add performance targets in the output CSV file. Targets are taken from workload file. (default = disabled)') parser.add_argument('--scale-by-TB', action='store_true', dest='scale_by_TB', help='(Optional) Set to scale BW and IOPs by device capacity in the output CSV file. Workload must have store_by_TB set to true. (default = disabled)') parser.add_argument('-v', action='version', version=parser.description) args = parser.parse_args() return args
class NameOrTotalTupleCommandHandler(MethodCommandHandler): def handle(self, params: str) -> Payload: name: Optional[str] = None (param, name) = split(params) all_params = ((param == '*') or (param == '*;')) params_join = param.endswith(';') total = True index_join = False if (name == '*'): total = False name = None elif (name == '*;'): total = False index_join = True name = None elif (name != ''): total = False if ((not total) and (name is None) and all_params): raise Exception((("Cannot list all elements and parameters at the same '" + params) + "' request")) result = self.get_value(total) assert (isinstance(result, tuple) or isinstance(result, dict)) if ((name is None) or (name == '')): if all_params: assert isinstance(result, tuple) return string_from_dict_optionally(result._asdict(), params_join) if (not total): return dict_from_dict_of_namedtupes(result, param, params, index_join) assert isinstance(result, tuple) if (param in result._fields): return getattr(result, param) raise Exception(f"Element '{param}' in '{params}' is not supported") res = result[name] if all_params: return string_from_dict_optionally(res._asdict(), params_join) elif (param in res._fields): return getattr(res, param) raise Exception(f"Parameter '{param}' in '{params}' is not supported") def get_value(self, total: bool) -> Union[(Dict[(str, NamedTuple)], NamedTuple)]: raise Exception('Not implemented')
class queue_stats_reply(stats_reply): version = 1 type = 17 stats_type = 5 def __init__(self, xid=None, flags=None, entries=None): if (xid != None): self.xid = xid else: self.xid = None if (flags != None): self.flags = flags else: self.flags = 0 if (entries != None): self.entries = entries else: self.entries = [] return def pack(self): packed = [] packed.append(struct.pack('!B', self.version)) packed.append(struct.pack('!B', self.type)) packed.append(struct.pack('!H', 0)) packed.append(struct.pack('!L', self.xid)) packed.append(struct.pack('!H', self.stats_type)) packed.append(struct.pack('!H', self.flags)) packed.append(loxi.generic_util.pack_list(self.entries)) length = sum([len(x) for x in packed]) packed[2] = struct.pack('!H', length) return ''.join(packed) def unpack(reader): obj = queue_stats_reply() _version = reader.read('!B')[0] assert (_version == 1) _type = reader.read('!B')[0] assert (_type == 17) _length = reader.read('!H')[0] orig_reader = reader reader = orig_reader.slice(_length, 4) obj.xid = reader.read('!L')[0] _stats_type = reader.read('!H')[0] assert (_stats_type == 5) obj.flags = reader.read('!H')[0] obj.entries = loxi.generic_util.unpack_list(reader, ofp.common.queue_stats_entry.unpack) return obj def __eq__(self, other): if (type(self) != type(other)): return False if (self.xid != other.xid): return False if (self.flags != other.flags): return False if (self.entries != other.entries): return False return True def pretty_print(self, q): q.text('queue_stats_reply {') with q.group(): with q.indent(2): q.breakable() q.text('xid = ') if (self.xid != None): q.text(('%#x' % self.xid)) else: q.text('None') q.text(',') q.breakable() q.text('flags = ') value_name_map = {1: 'OFPSF_REPLY_MORE'} q.text(util.pretty_flags(self.flags, value_name_map.values())) q.text(',') q.breakable() q.text('entries = ') q.pp(self.entries) q.breakable() q.text('}')
class OptionsChat(Options): def dated(self): return self._config_get(True) def dated(self, flag: bool): if flag: self.component.jsImports.add('moment') self._config(flag) def readonly(self): return self._config_get(True) def readonly(self, flag: bool): self._config(flag) def markdown(self): return self._config_get(False, 'showdown') ('showdown') def markdown(self, values): values = ({} if (values is True) else values) self._config(values, 'showdown')
class ExaError(Exception): def __init__(self, connection, message): self.connection = connection self.message = message super().__init__(self, message) def get_params_for_print(self): return {'message': self.message, 'dsn': self.connection.options['dsn'], 'user': self.connection.options['user'], 'schema': self.connection.current_schema(), 'session_id': self.connection.session_id()} def __str__(self): if (not self.connection.options['verbose_error']): return self.message params = self.get_params_for_print() pad_length = max((len(x) for x in params)) res = '' for k in params: res += f''' {k.ljust(pad_length)} => {params[k]} ''' return (('\n(\n' + res) + ')\n')
.skipif((redis.VERSION < (3,)), reason='pubsub not available as context manager in redis-py 2') .integrationtest def test_publish_subscribe(instrument, elasticapm_client, redis_conn): elasticapm_client.begin_transaction('transaction.test') with capture_span('test_publish_subscribe', 'test'): redis_conn.publish('mykey', 'a') with redis_conn.pubsub() as channel: channel.subscribe('mykey') elasticapm_client.end_transaction('MyView') transactions = elasticapm_client.events[TRANSACTION] spans = elasticapm_client.spans_for_transaction(transactions[0]) expected_signatures = {'test_publish_subscribe', 'PUBLISH', 'SUBSCRIBE'} assert ({t['name'] for t in spans} == expected_signatures) assert (spans[0]['name'] == 'PUBLISH') assert (spans[0]['type'] == 'db') assert (spans[0]['subtype'] == 'redis') assert (spans[0]['action'] == 'query') assert (spans[0]['context']['destination'] == {'address': os.environ.get('REDIS_HOST', 'localhost'), 'port': int(os.environ.get('REDIS_PORT', 6379)), 'service': {'name': '', 'resource': 'redis', 'type': ''}}) assert (spans[1]['name'] == 'SUBSCRIBE') assert (spans[1]['type'] == 'db') assert (spans[1]['subtype'] == 'redis') assert (spans[1]['action'] == 'query') assert (spans[1]['context']['destination'] == {'address': os.environ.get('REDIS_HOST', 'localhost'), 'port': int(os.environ.get('REDIS_PORT', 6379)), 'service': {'name': '', 'resource': 'redis', 'type': ''}}) assert (spans[2]['name'] == 'test_publish_subscribe') assert (spans[2]['type'] == 'test') assert (len(spans) == 3)
_HEADS_REGISTRY.register() class StandardROIHeadsWithSubClass(StandardROIHeads): def __init__(self, cfg, input_shape): super().__init__(cfg, input_shape) self.subclass_on = cfg.MODEL.SUBCLASS.SUBCLASS_ON if (not self.subclass_on): return self.num_subclasses = cfg.MODEL.SUBCLASS.NUM_SUBCLASSES self.subclass_head = build_subclass_head(cfg, self.box_head.output_shape.channels, (self.num_subclasses + 1)) for layer in self.subclass_head: nn.init.normal_(layer.weight, std=0.01) nn.init.constant_(layer.bias, 0.0) def forward(self, images, features, proposals, targets=None): if (not self.subclass_on): return super().forward(images, features, proposals, targets) del images if self.training: proposals = self.label_and_sample_proposals(proposals, targets) for pp_per_im in proposals: if (not pp_per_im.has('gt_subclasses')): background_subcls_idx = 0 pp_per_im.gt_subclasses = torch.cuda.LongTensor(len(pp_per_im)).fill_(background_subcls_idx) del targets features_list = [features[f] for f in self.in_features] box_features = self.box_pooler(features_list, [x.proposal_boxes for x in proposals]) box_features = self.box_head(box_features) predictions = self.box_predictor(box_features) box_features = box_features.view(box_features.shape[0], np.prod(box_features.shape[1:])) pred_subclass_logits = self.subclass_head(box_features) if self.training: losses = self.box_predictor.losses(predictions, proposals) losses.update(self._forward_mask(features, proposals)) losses.update(self._forward_keypoint(features, proposals)) gt_subclasses = cat([p.gt_subclasses for p in proposals], dim=0) loss_subclass = F.cross_entropy(pred_subclass_logits, gt_subclasses, reduction='mean') losses.update({'loss_subclass': loss_subclass}) return (proposals, losses) else: (pred_instances, kept_indices) = self.box_predictor.inference(predictions, proposals) pred_instances = self.forward_with_given_boxes(features, pred_instances) probs = F.softmax(pred_subclass_logits, dim=(- 1)) for (pred_instances_i, kept_indices_i) in zip(pred_instances, kept_indices): pred_instances_i.pred_subclass_prob = torch.index_select(probs, dim=0, index=kept_indices_i.to(torch.int64)) if torch.onnx.is_in_onnx_export(): assert (len(pred_instances) == 1) pred_instances[0].pred_subclass_prob = alias(pred_instances[0].pred_subclass_prob, 'subclass_prob_nms') return (pred_instances, {})
class ExtentDialog(HasTraits): data_x_min = Float data_x_max = Float data_y_min = Float data_y_max = Float data_z_min = Float data_z_max = Float x_min = Range('data_x_min', 'data_x_max', 'data_x_min') x_max = Range('data_x_min', 'data_x_max', 'data_x_max') y_min = Range('data_y_min', 'data_y_max', 'data_y_min') y_max = Range('data_y_min', 'data_y_max', 'data_y_max') z_min = Range('data_z_min', 'data_z_max', 'data_z_min') z_max = Range('data_z_min', 'data_z_max', 'data_z_max') filter = Instance(HasTraits, allow_none=False) _trait_change('x_min,x_max,y_min,y_max,z_min,z_max') def update_extent(self): if ((self.filter is not None) and (self.x_min < self.x_max) and (self.y_min < self.y_max) and (self.z_min < self.z_max)): self.filter.extent = (self.x_min, self.x_max, self.y_min, self.y_max, self.z_min, self.z_max) view = View('x_min', 'x_max', 'y_min', 'y_max', 'z_min', 'z_max', title='Edit extent', resizable=True)
class ShfeTradingCalendarSpider(scrapy.Spider): name = 'shfe_trading_calendar_spider' custom_settings = {} def __init__(self, name=None, **kwargs): super().__init__(name, **kwargs) self.saved_trading_dates = get_trading_calendar(security_type='future', exchange='shfe') self.trading_dates = [] def start_requests(self): latest_trading_date = '{}0101'.format(datetime.today().year) if self.saved_trading_dates: latest_trading_date = self.saved_trading_dates[(- 1)] latest_trading_date = next_date(latest_trading_date) for the_date in pd.date_range(start=latest_trading_date, end=datetime.today()): if ((the_date.weekday() == 5) or (the_date.weekday() == 6)): continue the_date_str = to_time_str(the_time=the_date, time_fmt='%Y%m%d') (yield Request(url=self.get_trading_date_url(the_date=the_date_str), meta={'the_date': the_date_str}, callback=self.download_trading_calendar)) def download_trading_calendar(self, response): if (response.status == 200): self.trading_dates.append(response.meta['the_date']) def from_crawler(cls, crawler, *args, **kwargs): spider = super(ShfeTradingCalendarSpider, cls).from_crawler(crawler, *args, **kwargs) crawler.signals.connect(spider.spider_closed, signal=signals.spider_closed) return spider def spider_closed(self, spider, reason): if self.trading_dates: if self.saved_trading_dates: self.trading_dates.append(self.saved_trading_dates) result_list = drop_duplicate(self.trading_dates) result_list = sorted(result_list) the_path = get_exchange_trading_calendar_path('future', 'shfe') with open(the_path, 'w') as outfile: json.dump(result_list, outfile) spider.logger.info('Spider closed: %s,%s\n', spider.name, reason) def get_trading_date_url(self, the_date): return '
def get_constructor(osm_data: OSMData) -> Constructor: flinger: MercatorFlinger = MercatorFlinger(BoundaryBox((- 0.01), (- 0.01), 0.01, 0.01), 18, osm_data.equator_length) constructor: Constructor = Constructor(osm_data, flinger, SHAPE_EXTRACTOR, CONFIGURATION) constructor.construct_ways() return constructor
def test_point_gauge_output(): filename = 'test_gauge_output.csv' silent_rm(filename) p = PointGauges(gauges=((('u0',), ((0, 0, 0), (1, 1, 1))),), fileName=filename) time_list = [0.0, 1.0, 2.0] run_gauge(p, time_list) correct_gauge_names = ['u0 [ 0 0 0]', 'u0 [ 1 1 1]'] correct_data = np.asarray([[0.0, 0.0, 111.0], [1.0, 0.0, 222.0], [2.0, 0.0, 333.0]]) Comm.get().barrier() (gauge_names, data) = parse_gauge_output(filename) assert (correct_gauge_names == gauge_names) npt.assert_allclose(correct_data, data) delete_file(filename)
def strict_delete(self, match, priority=None): msg4 = ofp.message.flow_delete_strict() msg4.out_port = ofp.OFPP_NONE msg4.buffer_id = msg4.match = match if (priority != None): msg4.priority = priority self.controller.message_send(msg4) do_barrier(self.controller)
class LayoutPageCoordinates(NamedTuple): x: float y: float width: float height: float page_number: int = 0 def from_bounding_box(bounding_box: BoundingBox, page_number: int=0) -> 'LayoutPageCoordinates': return LayoutPageCoordinates(x=bounding_box.x, y=bounding_box.y, width=bounding_box.width, height=bounding_box.height, page_number=page_number) def bounding_box(self) -> BoundingBox: return BoundingBox(x=self.x, y=self.y, width=self.width, height=self.height) def __bool__(self) -> bool: return (not self.is_empty()) def is_empty(self) -> bool: return ((self.width == 0) or (self.height == 0)) def move_by(self, dx: float=0, dy: float=0) -> 'LayoutPageCoordinates': return LayoutPageCoordinates(x=(self.x + dx), y=(self.y + dy), width=self.width, height=self.height, page_number=self.page_number) def get_merged_with(self, other: 'LayoutPageCoordinates') -> 'LayoutPageCoordinates': assert (self.page_number == other.page_number), 'cannot merge coordinates on different pages' x = min(self.x, other.x) y = min(self.y, other.y) width = (max((self.x + self.width), (other.x + other.width)) - x) height = (max((self.y + self.height), (other.y + other.height)) - y) return LayoutPageCoordinates(x=x, y=y, width=width, height=height, page_number=self.page_number)
class SyntenyBackend(): backends = {} def __init__(self): self.target_fasta = None self.threads = None self.blocks = None def make_permutations(self, recipe, blocks, output_dir, overwrite, threads): self.target_fasta = recipe['genomes'][recipe['target']].get('fasta') self.threads = threads self.blocks = blocks files = self.run_backend(recipe, output_dir, overwrite) assert (sorted(files.keys()) == sorted(blocks)) return files def run_backend(self, _recipe, _output_dir, _overwrite): return None def get_target_fasta(self): return self.target_fasta def infer_block_scale(self, recipe): target_fasta = recipe['genomes'][recipe['target']].get('fasta') if ((not target_fasta) or (not os.path.exists(target_fasta))): raise BackendException('Could not open target FASTA file or it is not specified') size = os.path.getsize(target_fasta) if (size < config.vals['big_genome_threshold']): return 'small' else: return 'large' def get_available_backends(): return SyntenyBackend.backends def register_backend(name, instance): assert (name not in SyntenyBackend.backends) SyntenyBackend.backends[name] = instance
def get_episodes(html, url): id = int(re.search('mc(\\d+)', url).group(1)) if (id not in comic_detail): comic_detail.load(id) detail = comic_detail.pop(id) return [Episode('{} - {}'.format(ep['short_title'], ep['title']), urljoin(url, '/mc{}/{}?from=manga_detail'.format(id, ep['id']))) for ep in reversed(detail['data']['ep_list'])]
class AclResponseAllOf(ModelNormal): allowed_values = {} validations = {} _property def additional_properties_type(): return (bool, date, datetime, dict, float, int, list, str, none_type) _nullable = False _property def openapi_types(): return {'service_id': (str,), 'version': (str,), 'id': (str,)} _property def discriminator(): return None attribute_map = {'service_id': 'service_id', 'version': 'version', 'id': 'id'} read_only_vars = {'service_id', 'version', 'id'} _composed_schemas = {} _js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) return self required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes']) _js_args_to_python_args def __init__(self, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) if (var_name in self.read_only_vars): raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
class OptionPlotoptionsWaterfallSonificationTracksMappingGapbetweennotes(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class PublishItem(ModelNormal): allowed_values = {} validations = {} _property def additional_properties_type(): lazy_import() return (bool, date, datetime, dict, float, int, list, str, none_type) _nullable = False _property def openapi_types(): lazy_import() return {'channel': (str,), 'formats': (PublishItemFormats,), 'id': (str,), 'prev_id': (str,)} _property def discriminator(): return None attribute_map = {'channel': 'channel', 'formats': 'formats', 'id': 'id', 'prev_id': 'prev-id'} read_only_vars = {} _composed_schemas = {} _js_args_to_python_args def _from_openapi_data(cls, channel, formats, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) self.channel = channel self.formats = formats for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) return self required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes']) _js_args_to_python_args def __init__(self, channel, formats, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) self.channel = channel self.formats = formats for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) if (var_name in self.read_only_vars): raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
class EnumInstance(object): def __init__(self, enum, value): self.enum = enum self.value = value def __eq__(self, value): if (isinstance(value, EnumInstance) and (value.enum is not self.enum)): return False if hasattr(value, 'value'): value = value.value return (self.value == value) def __ne__(self, value): return (self.__eq__(value) is False) def __hash__(self): return hash((self.enum, self.value)) def __str__(self): return '{}.{}'.format(self.enum.name, self.name) def __repr__(self): return '<{}.{}: {}>'.format(self.enum.name, self.name, self.value) def name(self): if (self.value not in self.enum.reverse): return '{}_{}'.format(self.enum.name, self.value) return self.enum.reverse[self.value]
class TrainingTimeEstimator(): def __init__(self, total_users: int, users_per_round: int, epochs: int, training_dist: IDurationDistribution, num_examples: Optional[List[int]]=None): self.total_users = total_users self.users_per_round = users_per_round self.epochs = epochs self.rounds = int(((self.epochs * self.total_users) / self.users_per_round)) self.num_examples: Optional[List[int]] = num_examples self.training_dist = training_dist def random_select(self): return (1 if (self.num_examples is None) else random.choice(self.num_examples))
def main(): data = load_iris() X = data['data'] y = data['target'] X = X[(y != 0)] y = y[(y != 0)] y -= 1 (X_train, X_test, y_train, y_test) = train_test_split(X, y, test_size=0.33) clf = LogisticRegression() clf.fit(X_train, y_train) y_pred = np.rint(clf.predict(X_test)) accuracy = np.mean((y_pred == y_test)) print('Accuracy:', accuracy) Plot().plot_in_2d(X, y, title='Logistic Regression', accuracy=accuracy, legend_labels=data['target_names'])
def integrate_spherical_annulus_volume(MeshClass, radius=1000, refinement=2): m = MeshClass(radius=radius, refinement_level=refinement) layers = 10 layer_height = (1.0 / (radius * layers)) mesh = ExtrudedMesh(m, layers, layer_height=layer_height, extrusion_type='radial') fs = FunctionSpace(mesh, 'CG', 1, name='fs') f = Function(fs) f.assign(1) exact = (((4 * pi) * (((radius + (1.0 / radius)) ** 3) - (radius ** 3))) / 3) return (np.abs((assemble((f * dx)) - exact)) / exact)
def _download_file_python(url: str, target_path: Path) -> Path: import shutil import tempfile with tempfile.NamedTemporaryFile(delete=False) as temp_file: try: file_path = temp_file.name for (progress, total_size) in _stream_url_data_to_file(url, temp_file.name): if total_size: progress_msg = f'Downloading {url} ... {progress:.2%}' else: progress_msg = f'Downloading {url} ... {progress:.2f} MB' print(progress_msg, end='\r\n') shutil.move(file_path, target_path) except Exception as error: raise error finally: Path(temp_file.name).unlink(missing_ok=True) return target_path
def update_config_file(updates: Dict[(str, Dict[(str, Any)])], config_path_override: str) -> None: config_path = (config_path_override or load_file(file_names=[DEFAULT_CONFIG_PATH])) current_config = load(config_path) for (key, value) in updates.items(): if (key in current_config): current_config[key].update(value) else: current_config.update({key: value}) with open(config_path, 'w', encoding='utf-8') as config_file: dump(current_config, config_file) echo(f'Updated {config_path}:') for (key, value) in updates.items(): for (subkey, val) in value.items(): echo(f' Set {key}.{subkey} = {val}')
def hash(x): if isinstance(x, bytes): return hashlib.sha256(x).digest() elif isinstance(x, Point): return hash(x.serialize()) b = b'' for a in x: if isinstance(a, bytes): b += a elif isinstance(a, int): b += a.to_bytes(32, 'little') elif isinstance(a, Point): b += a.serialize() return hash(b)
def test_get_firmware_number(backend_db, common_db): assert (common_db.get_firmware_number() == 0) backend_db.insert_object(TEST_FW) assert (common_db.get_firmware_number(query={}) == 1) assert (common_db.get_firmware_number(query={'uid': TEST_FW.uid}) == 1) fw_2 = create_test_firmware(bin_path='container/test.7z') backend_db.insert_object(fw_2) assert (common_db.get_firmware_number(query={}) == 2) assert (common_db.get_firmware_number(query={'device_class': 'Router'}) == 2) assert (common_db.get_firmware_number(query={'uid': TEST_FW.uid}) == 1) assert (common_db.get_firmware_number(query={'sha256': TEST_FW.sha256}) == 1)
def test_multisol(): A = make_integer_matrix() m = GSO.Mat(A) lll_obj = LLL.Reduction(m) lll_obj() solutions = [] solutions = Enumeration(m, nr_solutions=200).enumerate(0, 27, 48.5, 0) assert (len(solutions) == (126 / 2)) for (_, sol) in solutions: sol = IntegerMatrix.from_iterable(1, A.nrows, map((lambda x: int(round(x))), sol)) sol = tuple((sol * A)[0]) dist = sum([(x ** 2) for x in sol]) assert (dist == 48) solutions = [] solutions = Enumeration(m, nr_solutions=(126 / 2)).enumerate(0, 27, 100.0, 0) assert (len(solutions) == (126 / 2)) for (_, sol) in solutions: sol = IntegerMatrix.from_iterable(1, A.nrows, map((lambda x: int(round(x))), sol)) sol = tuple((sol * A)[0]) dist = sum([(x ** 2) for x in sol]) assert (dist == 48)
class GetActiveRepositoriesAction(Action): def help_text(cls) -> str: return 'remain only active repos: filter and remove duplicates and skip repos in Blacklist' def name(cls): return 'daily-active-repositories' def _execute(self, day: datetime): return get_daily_active_repositories(date=day)
class TestFlakyMany(AEATestCaseManyFlaky): .flaky(reruns=1) def test_fail_on_first_run(self): file = os.path.join(self.t, 'test_file') if (self.run_count == 1): open(file, 'a').close() raise AssertionError('Expected error to trigger rerun!') assert (self.run_count == 2), 'Should only be rerun once!' assert (not os.path.isfile(file)), 'File should not exist'
def extractKeisotsunatranslationWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('my old gong has amnesia', 'my old gong has amnesia', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class StalkerShotAddAnimationOutputOperator(bpy.types.Operator): bl_label = 'Add Animation Output' bl_idname = 'stalker.shot_add_animation_output_op' stalker_entity_id = bpy.props.IntProperty(name='stalker_entity_id') stalker_entity_name = bpy.props.StringProperty(name='stalker_entity_name') def execute(self, context): logger.debug(('inside %s.execute()' % self.__class__.__name__)) shot = Shot.query.get(self.stalker_entity_id) logger.debug(('shot: %s' % shot)) return set(['FINISHED'])
(estimator=_estimator_docstring, scoring=_scoring_docstring, cv=_cv_docstring, confirm_variables=_confirm_variables_docstring, variables=_variables_numerical_docstring, missing_values=_missing_values_docstring, variables_=_variables_attribute_docstring, feature_names_in_=_feature_names_in_docstring, n_features_in_=_n_features_in_docstring, fit_transform=_fit_transform_docstring, get_support=_get_support_docstring) class SmartCorrelatedSelection(BaseSelector): def __init__(self, variables: Variables=None, method: str='pearson', threshold: float=0.8, missing_values: str='ignore', selection_method: str='missing_values', estimator=None, scoring: str='roc_auc', cv=3, confirm_variables: bool=False): if ((not isinstance(threshold, float)) or (threshold < 0) or (threshold > 1)): raise ValueError('threshold must be a float between 0 and 1') if (missing_values not in ['raise', 'ignore']): raise ValueError("missing_values takes only values 'raise' or 'ignore'.") if (selection_method not in ['missing_values', 'cardinality', 'variance', 'model_performance']): raise ValueError("selection_method takes only values 'missing_values', 'cardinality', 'variance' or 'model_performance'.") if ((selection_method == 'model_performance') and (estimator is None)): raise ValueError('Please provide an estimator, e.g., RandomForestClassifier or select another selection_method') if ((selection_method == 'missing_values') and (missing_values == 'raise')): raise ValueError("To select the variables with least missing values, we need to allow this transformer to contemplate variables with NaN by setting missing_values to 'ignore.") super().__init__(confirm_variables) self.variables = _check_variables_input_value(variables) self.method = method self.threshold = threshold self.missing_values = missing_values self.selection_method = selection_method self.estimator = estimator self.scoring = scoring self.cv = cv def fit(self, X: pd.DataFrame, y: pd.Series=None): X = check_X(X) self._confirm_variables(X) if (self.variables is None): self.variables_ = find_numerical_variables(X) else: self.variables_ = check_numerical_variables(X, self.variables_) self._check_variable_number() if (self.missing_values == 'raise'): _check_contains_na(X, self.variables_) _check_contains_inf(X, self.variables_) if ((self.selection_method == 'model_performance') and (y is None)): raise ValueError('y is needed to fit the transformer') self.correlated_feature_sets_ = [] _correlated_matrix = X[self.variables_].corr(method=self.method) _examined_features = set() for feature in _correlated_matrix.columns: if (feature not in _examined_features): _examined_features.add(feature) _temp_set = set([feature]) _features_to_compare = [f for f in _correlated_matrix.columns if (f not in _examined_features)] for f2 in _features_to_compare: if (abs(_correlated_matrix.loc[(f2, feature)]) > self.threshold): _temp_set.add(f2) _examined_features.add(f2) if (len(_temp_set) > 1): self.correlated_feature_sets_.append(_temp_set) _selected_features = [f for f in X.columns if (f not in set().union(*self.correlated_feature_sets_))] if (self.selection_method == 'missing_values'): for feature_group in self.correlated_feature_sets_: feature_group = list(feature_group) f = X[feature_group].isnull().sum().sort_values(ascending=True).index[0] _selected_features.append(f) elif (self.selection_method == 'cardinality'): for feature_group in self.correlated_feature_sets_: feature_group = list(feature_group) f = X[feature_group].nunique().sort_values(ascending=False).index[0] _selected_features.append(f) elif (self.selection_method == 'variance'): for feature_group in self.correlated_feature_sets_: feature_group = list(feature_group) f = X[feature_group].std().sort_values(ascending=False).index[0] _selected_features.append(f) else: for feature_group in self.correlated_feature_sets_: temp_perf = [] for feature in feature_group: model = cross_validate(self.estimator, X[feature].to_frame(), y, cv=self.cv, return_estimator=False, scoring=self.scoring) temp_perf.append(model['test_score'].mean()) f = list(feature_group)[temp_perf.index(max(temp_perf))] _selected_features.append(f) self.features_to_drop_ = [f for f in self.variables_ if (f not in _selected_features)] self._get_feature_names_in(X) return self
class LinuxNetwork(): def network_services_in_priority_order(): conns = NetworkManager.Settings.ListConnections() conns = list(filter((lambda x: ('autoconnect-priority' in x.GetSettings()['connection'])), conns)) def uint32(signed_integer): return int(ctypes.c_uint32(signed_integer).value) conns.sort(key=(lambda x: uint32(x.GetSettings()['connection']['autoconnect-priority'])), reverse=True) return [NetworkService(conn) for conn in conns]
.parametrize('ops', ALL_OPS) .parametrize('dtype', FLOAT_TYPES) .parametrize('index_dtype', ['int32', 'uint32']) def test_gather_add(ops, dtype, index_dtype): table = ops.xp.arange(12, dtype=dtype).reshape(4, 3) indices = ops.xp.array([[0, 2], [3, 1], [0, 1]], dtype=index_dtype) gathered = ops.gather_add(table, indices) ops.xp.testing.assert_allclose(gathered, [[6.0, 8.0, 10.0], [12.0, 14.0, 16.0], [3.0, 5.0, 7.0]])
def print_pred(args, screen_name, prediction): if (prediction[0] > prediction[1]): label = 'legit' confidence = prediction[0] if (args.only in (label, None)): print(('%20s legit %f %%' % (screen_name, (prediction[0] * 100.0)))) else: label = 'bot' confidence = prediction[1] if (args.only in (label, None)): print(('%20s bot %f %%' % (screen_name, (prediction[1] * 100.0)))) return (label, (confidence * 100.0))
def fortios_extension_controller(data, fos): fos.do_member_operation('extension-controller', 'extender-profile') if data['extension_controller_extender_profile']: resp = extension_controller_extender_profile(data, fos) else: fos._module.fail_json(msg=('missing task body: %s' % 'extension_controller_extender_profile')) return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {})
def plot_line_webgl(viz, env, args): webgl_num_points = 200000 webgl_x = np.linspace((- 1), 0, webgl_num_points) webgl_y = (webgl_x ** 3) viz.line(X=webgl_x, Y=webgl_y, opts=dict(title='{} points using WebGL'.format(webgl_num_points), webgl=True), env=env, win='WebGL demo') return webgl_x
def _remove_builtins(fake_tb): traceback = fake_tb while traceback: frame = traceback.tb_frame while frame: frame.f_globals = dict(((k, v) for (k, v) in frame.f_globals.items() if (k not in dir(builtins)))) frame = frame.f_back traceback = traceback.tb_next
class CssImgH2(CssStyle.Style): _attrs = {'opacity': 0, 'transition': 'all 0.2s ease-in-out', 'text-transform': 'uppercase', 'text-align': 'center', 'position': 'relative', 'padding': '10px', 'margin': '20px 0 0 0'} _hover = {'opacity': 1, 'transform': 'translateY(0px)'} _selectors = {'child': 'h2'} def customize(self): self.css({'color': self.page.theme.colors[0]})
def execute(commands, parameters): for command in commands: click.echo('[EXECUTING] {}'.format(command.format(**parameters))) try: subprocess.check_call([arg.format(**parameters) for arg in command.split()]) except subprocess.CalledProcessError as exc: print(exc) sys.exit(1)
class OptionSeriesVariwideSonificationTracksMappingVolume(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class OptionSeriesFunnelDataDragdropGuideboxDefault(Options): def className(self): return self._config_get('highcharts-drag-box-default') def className(self, text: str): self._config(text, js_type=False) def color(self): return self._config_get('rgba(0, 0, 0, 0.1)') def color(self, text: str): self._config(text, js_type=False) def cursor(self): return self._config_get('move') def cursor(self, text: str): self._config(text, js_type=False) def lineColor(self): return self._config_get('#888') def lineColor(self, text: str): self._config(text, js_type=False) def lineWidth(self): return self._config_get(1) def lineWidth(self, num: float): self._config(num, js_type=False) def zIndex(self): return self._config_get(900) def zIndex(self, num: float): self._config(num, js_type=False)
class Context(): def __init__(self, i3l: Connection, tree: Con, workspace_sequence: Optional[WorkspaceSequence]): self.i3l = i3l self.tree = tree self.focused = tree.find_focused() self.workspace = self.focused.workspace() self.containers = self._sync_containers(self.workspace) self.workspace_sequence = (self._sync_workspace_sequence(self.containers, workspace_sequence) if (workspace_sequence is not None) else None) def contains_container(self, con_id: int) -> bool: containers = [container for container in self.containers if (container.id == con_id)] return (len(containers) > 0) def sorted_containers(self) -> List[Con]: return sorted(self.containers, key=(lambda container: self.workspace_sequence.get_order(container.id))) def workspace_width(self, ratio: float=1.0) -> int: return int((self.workspace.rect.width * ratio)) def workspace_height(self, ratio: float=1.0) -> int: return int((self.workspace.rect.height * ratio)) def exec(self, payload: str) -> List[CommandReply]: return self.i3l.command(payload) def send_tick(self, payload: str) -> TickReply: return self.i3l.send_tick(payload) def xdo_unmap_window(self, window_id: Optional[int]=None): if (window_id is None): window_id = self.focused.window command = shlex.split(f'xdotool windowunmap {window_id}') subprocess.run(command) def xdo_map_window(self, rebuild_container: RebuildContainer): window_id = rebuild_container.window command = shlex.split(f'xdotool windowsize {window_id} {rebuild_container.width} {rebuild_container.height} windowmove {window_id} {rebuild_container.x} {rebuild_container.y} windowmap {window_id}') subprocess.run(command) def resync(self) -> 'Context': self.tree = self.i3l.get_tree() focused = self.tree.find_focused() workspace = focused.workspace() self.containers = self._sync_containers(workspace) return self def _sync_containers(cls, workspace: Con) -> List[Con]: containers = [container for container in workspace if is_layout_container(container)] return sorted(containers, key=(lambda container: container.window)) def _sync_workspace_sequence(containers: List[Con], workspace_sequence: WorkspaceSequence) -> WorkspaceSequence: for container in containers: if (workspace_sequence.get_order(container.id) is None): workspace_sequence.set_order(container) return workspace_sequence
class TestSegmentSentimentAnalysisDataClass(): .parametrize(('kwargs', 'expected'), [_assign_markers_parametrize(segment='Valid segment', sentiment=SentimentEnum.POSITIVE.value, sentiment_rate=0, expected={'sentiment': 'Positive', 'sentiment_rate': 0.0}), _assign_markers_parametrize(segment='Valid segment', sentiment=SentimentEnum.NEGATIVE.value, sentiment_rate=1.0, expected={'sentiment': 'Negative', 'sentiment_rate': 1.0}), _assign_markers_parametrize(segment='Valid segment', sentiment=SentimentEnum.NEUTRAL.value, sentiment_rate=0.578, expected={'sentiment': 'Neutral', 'sentiment_rate': 0.58}), _assign_markers_parametrize(segment='Valid segment', sentiment='neutral', sentiment_rate=None, expected={'sentiment': 'Neutral', 'sentiment_rate': None})], ids=['test_with_sentiment_positive_enum_rate_0', 'test_with_sentiment_negative_enum_rate_1.00', 'test_with_sentiment_neutral_enum_rate_0.578', 'test_with_sentiment_neutral_rate_none']) def test_valid_input(self, kwargs, expected): expected['segment'] = kwargs['segment'] segment_sentiment_class = SegmentSentimentAnalysisDataClass(**kwargs) assert (segment_sentiment_class.segment == expected['segment']), 'The value of `segment` must not change during the assignment' assert (segment_sentiment_class.sentiment == expected['sentiment']), "The value of `sentiment` must be in ['Positive', 'Negative', 'Neutral']" assert (segment_sentiment_class.sentiment_rate == expected['sentiment_rate']), 'The value of `sentiment_rate` must be rounded to the hundredth' .parametrize(('kwargs', 'expected'), [_assign_markers_parametrize(segment='Valid segment', sentiment='invalid sentiment', sentiment_rate=0, expected={'raise_type': ValueError, 'raise_message': "Sentiment must be 'Positive' or 'Negative' or 'Neutral'"}), _assign_markers_parametrize(segment='Valid segment', sentiment=1, sentiment_rate=0, expected={'raise_type': TypeError, 'raise_message': 'Sentiment must be a string'}), _assign_markers_parametrize(segment=1, sentiment='Positive', sentiment_rate=0, expected={'raise_type': TypeError, 'raise_message': 'Segment must be a string'}), _assign_markers_parametrize(segment='Valid segment', sentiment=SentimentEnum.POSITIVE.value, sentiment_rate='0', expected={'raise_type': TypeError, 'raise_message': 'Sentiment rate must be a float'})], ids=['test_with_bad_sentiment_format', 'test_with_non_str_sentiment', 'test_with_non_str_segment', 'test_with_non_number_rate']) def test_invalid_input(self, kwargs, expected): with pytest.raises((expected['raise_type'], ValidationError), match=expected['raise_message']): segment_sentiment_class = SegmentSentimentAnalysisDataClass(**kwargs)
class RandomInstanceCrop(aug.Augmentation): def __init__(self, crop_scale: Tuple[(float, float)]=(0.8, 1.6), fix_instance=False): super().__init__() self.crop_scale = crop_scale self.fix_instance = fix_instance assert (isinstance(crop_scale, (list, tuple)) and (len(crop_scale) == 2)), crop_scale def get_transform(self, image: np.ndarray, annotations: List[Any]) -> Transform: assert isinstance(annotations, (list, tuple)), annotations assert all((('bbox' in x) for x in annotations)), annotations assert all((('bbox_mode' in x) for x in annotations)), annotations image_size = image.shape[:2] annotations = [x for x in annotations if (x.get('iscrowd', 0) == 0)] if (len(annotations) == 0): return NoOpTransform() if (not self.fix_instance): sel_index = np.random.randint(len(annotations)) else: sel_index = 0 for (idx, instance) in enumerate(annotations): if (idx != sel_index): instance['iscrowd'] = 1 instance = annotations[sel_index] bbox_xywh = BoxMode.convert(instance['bbox'], instance['bbox_mode'], BoxMode.XYWH_ABS) scale = np.random.uniform(*self.crop_scale) bbox_xywh = bu.scale_bbox_center(bbox_xywh, scale) bbox_xywh = bu.clip_box_xywh(bbox_xywh, image_size).int() return CropTransform(*bbox_xywh.tolist(), orig_h=image_size[0], orig_w=image_size[1])
class MockPopenHandle(object): def __init__(self, returncode=None, stdout=None, stderr=None): self.returncode = (returncode or 0) self.stdout = (stdout or 'mock stdout') self.stderr = (stderr or 'mock stderr') def communicate(self): return (self.stdout.encode(), self.stderr.encode())
def test_lp_default_handling(): def t1(a: int) -> NamedTuple('OutputsBC', t1_int_output=int, c=str): a = (a + 2) return (a, ('world-' + str(a))) def my_wf(a: int, b: int) -> (str, str, int, int): (x, y) = t1(a=a) (u, v) = t1(a=b) return (y, v, x, u) lp = launch_plan.LaunchPlan.create('test1', my_wf) assert (len(lp.parameters.parameters) == 2) assert lp.parameters.parameters['a'].required assert (lp.parameters.parameters['a'].default is None) assert lp.parameters.parameters['b'].required assert (lp.parameters.parameters['b'].default is None) assert (len(lp.fixed_inputs.literals) == 0) lp_with_defaults = launch_plan.LaunchPlan.create('test2', my_wf, default_inputs={'a': 3}) assert (len(lp_with_defaults.parameters.parameters) == 2) assert (not lp_with_defaults.parameters.parameters['a'].required) assert (lp_with_defaults.parameters.parameters['a'].default == _literal_models.Literal(scalar=_literal_models.Scalar(primitive=_literal_models.Primitive(integer=3)))) assert (len(lp_with_defaults.fixed_inputs.literals) == 0) lp_with_fixed = launch_plan.LaunchPlan.create('test3', my_wf, fixed_inputs={'a': 3}) assert (len(lp_with_fixed.parameters.parameters) == 1) assert (len(lp_with_fixed.fixed_inputs.literals) == 1) assert (lp_with_fixed.fixed_inputs.literals['a'] == _literal_models.Literal(scalar=_literal_models.Scalar(primitive=_literal_models.Primitive(integer=3)))) def my_wf2(a: int, b: int=42) -> (str, str, int, int): (x, y) = t1(a=a) (u, v) = t1(a=b) return (y, v, x, u) lp = launch_plan.LaunchPlan.create('test4', my_wf2) assert (len(lp.parameters.parameters) == 2) assert (len(lp.fixed_inputs.literals) == 0) lp_with_defaults = launch_plan.LaunchPlan.create('test5', my_wf2, default_inputs={'a': 3}) assert (len(lp_with_defaults.parameters.parameters) == 2) assert (len(lp_with_defaults.fixed_inputs.literals) == 0) assert (lp_with_defaults(b=3) == ('world-5', 'world-5', 5, 5)) lp_with_fixed = launch_plan.LaunchPlan.create('test6', my_wf2, fixed_inputs={'a': 3}) assert (len(lp_with_fixed.parameters.parameters) == 1) assert (len(lp_with_fixed.fixed_inputs.literals) == 1) assert (lp_with_fixed(b=3) == ('world-5', 'world-5', 5, 5)) lp_with_fixed = launch_plan.LaunchPlan.create('test7', my_wf2, fixed_inputs={'b': 3}) assert (len(lp_with_fixed.parameters.parameters) == 1) assert (len(lp_with_fixed.fixed_inputs.literals) == 1)
class table_feature_prop_wildcards(table_feature_prop): type = 10 def __init__(self, oxm_ids=None): if (oxm_ids != None): self.oxm_ids = oxm_ids else: self.oxm_ids = [] return def pack(self): packed = [] packed.append(struct.pack('!H', self.type)) packed.append(struct.pack('!H', 0)) packed.append(loxi.generic_util.pack_list(self.oxm_ids)) length = sum([len(x) for x in packed]) packed[1] = struct.pack('!H', length) return ''.join(packed) def unpack(reader): obj = table_feature_prop_wildcards() _type = reader.read('!H')[0] assert (_type == 10) _length = reader.read('!H')[0] orig_reader = reader reader = orig_reader.slice(_length, 4) obj.oxm_ids = loxi.generic_util.unpack_list(reader, ofp.common.uint32.unpack) return obj def __eq__(self, other): if (type(self) != type(other)): return False if (self.oxm_ids != other.oxm_ids): return False return True def pretty_print(self, q): q.text('table_feature_prop_wildcards {') with q.group(): with q.indent(2): q.breakable() q.text('oxm_ids = ') q.pp(self.oxm_ids) q.breakable() q.text('}')
(scope='module') def simple_mesh(): nnx = 4 nny = 4 x = ((- 1.0), (- 1.0)) L = (2.0, 2.0) refinementLevels = 2 nLayersOfOverlap = 1 parallelPartitioningType = 0 skipInit = False mlMesh = MeshTools.MultilevelQuadrilateralMesh(nnx, nny, 1, x[0], x[1], 0.0, L[0], L[1], 1.0, refinementLevels, skipInit, nLayersOfOverlap, parallelPartitioningType, useC=False) (yield (mlMesh, nnx, nny))
.parametrize('ops', ALL_OPS) .parametrize('dtype', FLOAT_TYPES) def test_backprop_reduce_mean(ops, dtype): dX = ops.backprop_reduce_mean(ops.xp.arange(1, 7, dtype=dtype).reshape(2, 3), ops.xp.array([4, 2], dtype='int32')) assert (dX.dtype == dtype) ops.xp.testing.assert_allclose(dX, [[0.25, 0.5, 0.75], [0.25, 0.5, 0.75], [0.25, 0.5, 0.75], [0.25, 0.5, 0.75], [2.0, 2.5, 3.0], [2.0, 2.5, 3.0]]) with pytest.raises(ValueError, match='lengths must be'): ops.backprop_reduce_mean(ops.xp.arange(1, 7, dtype=dtype).reshape(2, 3), ops.xp.array([(- 1), 2], dtype='int32'))
class ElyraPropertyList(list): def to_dict(self: List[ElyraPropertyListItem], use_prop_as_value: bool=False) -> Dict[(str, str)]: prop_dict = {} for prop in self: if ((prop is None) or (not isinstance(prop, ElyraPropertyListItem))): continue prop_key = prop.get_key_for_dict_entry() if (prop_key is None): continue prop_value = prop.get_value_for_dict_entry() if use_prop_as_value: prop_value = prop prop_dict[prop_key] = prop_value return prop_dict def deduplicate(self: ElyraPropertyList) -> ElyraPropertyList: instance_dict = self.to_dict(use_prop_as_value=True) return ElyraPropertyList({**instance_dict}.values()) def merge(primary: ElyraPropertyList, secondary: ElyraPropertyList) -> ElyraPropertyList: primary_dict = primary.to_dict(use_prop_as_value=True) secondary_dict = secondary.to_dict(use_prop_as_value=True) merged_list = list({**secondary_dict, **primary_dict}.values()) return ElyraPropertyList(merged_list) def difference(minuend: ElyraPropertyList, subtrahend: ElyraPropertyList) -> ElyraPropertyList: subtract_dict = minuend.to_dict(use_prop_as_value=True) for key in subtrahend.to_dict().keys(): if (key in subtract_dict): subtract_dict.pop(key) return ElyraPropertyList(subtract_dict.values()) def add_to_execution_object(self, runtime_processor: RuntimePipelineProcessor, execution_object: Any): for item in self: if isinstance(item, ElyraPropertyListItem): item.add_to_execution_object(runtime_processor=runtime_processor, execution_object=execution_object)
class OptionSeriesVariablepieSonificationDefaultspeechoptionsMappingRate(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
.skipcomplex def test_p_independence_hgrad(mesh, variant): family = 'Lagrange' expected = ([16, 12] if (mesh.topological_dimension() == 3) else [9, 7]) solvers = ([fdmstar] if (variant is None) else [fdmstar, facetstar]) for degree in range(3, 6): element = FiniteElement(family, cell=mesh.ufl_cell(), degree=degree, variant=variant) V = FunctionSpace(mesh, element) problem = build_riesz_map(V, grad) for (sp, expected_it) in zip(solvers, expected): assert (solve_riesz_map(problem, sp) <= expected_it)
('bodhi.server.scripts.sar.initialize_db', mock.Mock()) class TestSar(BasePyTestCase): def test_invalid_user(self): runner = testing.CliRunner() r = runner.invoke(sar.get_user_data, [('--username=' + 'invalid_user')]) assert (r.exit_code == 0) assert (r.output == '') def test_invalid_user_human_readable(self): runner = testing.CliRunner() r = runner.invoke(sar.get_user_data, [('--username=' + 'invalid_user'), '--human-readable']) assert (r.exit_code == 0) assert (r.output == 'User not found.\n') def test_valid_user(self): now = datetime.utcnow() now_str = str(now.strftime('%Y-%m-%d %H:%M:%S')) comment = self.db.query(models.Comment).all()[0] comment.timestamp = now self.db.commit() expected_output = EXPECTED_JSON_OUTPUT.replace('1984-11-02 00:00:00', now_str, 1) expected_output = expected_output.replace('ALIAS', comment.update.alias) runner = testing.CliRunner() r = runner.invoke(sar.get_user_data, [('--username=' + 'guest')]) assert (r.exit_code == 0) assert (r.output == expected_output) .dict(os.environ, {'SAR_USERNAME': 'guest'}) def test_valid_user_envvar(self): now = datetime.utcnow() now_str = str(now.strftime('%Y-%m-%d %H:%M:%S')) comment = self.db.query(models.Comment).all()[0] comment.timestamp = now self.db.commit() expected_output = EXPECTED_JSON_OUTPUT.replace('1984-11-02 00:00:00', now_str, 1) expected_output = expected_output.replace('ALIAS', comment.update.alias) runner = testing.CliRunner() r = runner.invoke(sar.get_user_data) assert (r.exit_code == 0) assert (r.output == expected_output) def test_valid_user_human_readable(self): now = datetime.utcnow() now_str = str(now.strftime('%Y-%m-%d %H:%M:%S')) comment = self.db.query(models.Comment).all()[0] comment.timestamp = now self.db.commit() expected_output = EXPECTED_USER_DATA_OUTPUT.replace('1984-11-02 00:00:00', now_str, 1) expected_output = expected_output.format(comment.update.alias, comment.update.alias) runner = testing.CliRunner() r = runner.invoke(sar.get_user_data, [('--username=' + 'guest'), '--human-readable']) assert (r.exit_code == 0)
def naics_test_data(db): baker.make('search.AwardSearch', award_id=1, latest_transaction_id=1) baker.make('search.AwardSearch', award_id=2, latest_transaction_id=2) baker.make('search.AwardSearch', award_id=3, latest_transaction_id=3) baker.make('search.AwardSearch', award_id=4, latest_transaction_id=4) baker.make('search.TransactionSearch', transaction_id=1, award_id=1, is_fpds=True, federal_action_obligation=1, generated_pragmatic_obligation=1, action_date='2020-01-01', fiscal_action_date='2020-04-01', naics_code='NAICS 1234') baker.make('search.TransactionSearch', transaction_id=2, award_id=2, is_fpds=True, federal_action_obligation=1, generated_pragmatic_obligation=1, action_date='2020-01-02', fiscal_action_date='2020-04-02', naics_code='NAICS 1234') baker.make('search.TransactionSearch', transaction_id=3, award_id=3, is_fpds=True, federal_action_obligation=2, generated_pragmatic_obligation=2, action_date='2020-01-03', fiscal_action_date='2020-04-03', naics_code='NAICS 9876') baker.make('search.TransactionSearch', transaction_id=4, award_id=4, is_fpds=True, federal_action_obligation=2, generated_pragmatic_obligation=2, action_date='2020-01-04', fiscal_action_date='2020-04-04', naics_code='NAICS 9876') baker.make('references.NAICS', code='NAICS 1234', description='SOURCE NAICS DESC 1234', year=1955) baker.make('references.NAICS', code='NAICS 9876', description='SOURCE NAICS DESC 9876', year=1985)
class BucketsAclScanner(base_scanner.BaseScanner): def __init__(self, global_configs, scanner_configs, service_config, model_name, snapshot_timestamp, rules): super(BucketsAclScanner, self).__init__(global_configs, scanner_configs, service_config, model_name, snapshot_timestamp, rules) self.rules_engine = buckets_rules_engine.BucketsRulesEngine(rules_file_path=self.rules, snapshot_timestamp=self.snapshot_timestamp) self.rules_engine.build_rule_book(self.global_configs) def _flatten_violations(violations): for violation in violations: violation_data = {'role': violation.role, 'entity': violation.entity, 'email': violation.email, 'domain': violation.domain, 'bucket': violation.bucket, 'full_name': violation.full_name, 'project_id': violation.project_id} (yield {'full_name': violation.full_name, 'resource_id': violation.resource_id, 'resource_type': violation.resource_type, 'resource_name': violation.resource_name, 'rule_index': violation.rule_index, 'rule_name': violation.rule_name, 'violation_type': violation.violation_type, 'violation_data': violation_data, 'resource_data': violation.resource_data}) def _output_results(self, all_violations): all_violations = self._flatten_violations(all_violations) self._output_results_to_db(all_violations) def _find_violations(self, bucket_acls): all_violations = [] LOGGER.info('Finding bucket acl violations...') for bucket_acl in bucket_acls: violations = self.rules_engine.find_violations(bucket_acl) LOGGER.debug(violations) all_violations.extend(violations) return all_violations def _retrieve(self): model_manager = self.service_config.model_manager (scoped_session, data_access) = model_manager.get(self.model_name) with scoped_session as session: bucket_acls = [] gcs_policies = [policy for policy in data_access.scanner_iter(session, 'gcs_policy')] for gcs_policy in gcs_policies: bucket = gcs_policy.parent project_id = bucket.parent.name acls = json.loads(gcs_policy.data) bucket_acls.extend(BucketAccessControls.from_list(project_id=project_id, full_name=bucket.full_name, acls=acls)) return bucket_acls def run(self): buckets_acls = self._retrieve() all_violations = self._find_violations(buckets_acls) self._output_results(all_violations)
class TestHTTPStatus(): def test_raise_status_in_before_hook(self, client): response = client.simulate_request(path='/status', method='GET') assert (response.status == falcon.HTTP_200) assert (response.status_code == 200) assert (response.headers['x-failed'] == 'False') assert (response.text == 'Pass') def test_raise_status_in_responder(self, client): response = client.simulate_request(path='/status', method='POST') assert (response.status == falcon.HTTP_200) assert (response.status_code == 200) assert (response.headers['x-failed'] == 'False') assert (response.text == 'Pass') def test_raise_status_runs_after_hooks(self, client): response = client.simulate_request(path='/status', method='PUT') assert (response.status == falcon.HTTP_200) assert (response.status_code == 200) assert (response.headers['x-failed'] == 'False') assert (response.text == 'Pass') def test_raise_status_survives_after_hooks(self, client): response = client.simulate_request(path='/status', method='DELETE') assert (response.status == falcon.HTTP_201) assert (response.status_code == 201) assert (response.headers['x-failed'] == 'False') assert (response.text == 'Pass') def test_raise_status_empty_body(self, client): response = client.simulate_request(path='/status', method='PATCH') assert (response.text == '')
def _get_kwargs(*, client: Client, user_id: str, lock: bool) -> Dict[(str, Any)]: url = '{}/admin/users/lock'.format(client.base_url) headers: Dict[(str, str)] = client.get_headers() cookies: Dict[(str, Any)] = client.get_cookies() params: Dict[(str, Any)] = {} params['user_id'] = user_id params['lock'] = lock params = {k: v for (k, v) in params.items() if ((v is not UNSET) and (v is not None))} return {'method': 'post', 'url': url, 'headers': headers, 'cookies': cookies, 'timeout': client.get_timeout(), 'follow_redirects': client.follow_redirects, 'params': params}
def test_request_bad_port(django_elasticapm_client): request = WSGIRequest(environ={'wsgi.input': io.BytesIO(), 'REQUEST_METHOD': 'POST', 'SERVER_NAME': 'testserver', 'SERVER_PORT': '${port}', 'CONTENT_TYPE': 'text/html', 'ACCEPT': 'text/html'}) request.read(1) django_elasticapm_client.capture('Message', message='foo', request=request) assert (len(django_elasticapm_client.events[ERROR]) == 1) event = django_elasticapm_client.events[ERROR][0] assert ('request' in event['context']) request = event['context']['request'] assert ('url' not in request)
def run_gauge(p, time_list, nd=3, total_nodes=None): (model, initialConditions) = gauge_setup(nd, total_nodes) p.attachModel(model, None) m = model.levelModelList[(- 1)] m.setInitialConditions(initialConditions, time_list[0]) m.timeIntegration.tLast = time_list[0] tCount = 0 p.calculate() for t in time_list[1:]: tCount += 1 m.setInitialConditions(initialConditions, t) m.timeIntegration.tLast = t p.calculate()
def run_spinner(spinner): global ACTIVE_SPINNER try: if ((not isinstance(sys.stderr, DummyFile)) and SPINNER_ENABLED): spinner.start() spinner_started = True save_stdout = sys.stdout save_stderr = sys.stderr sys.stdout = DummyFile(sys.stdout, spinner) sys.stderr = DummyFile(sys.stderr, spinner) logger = logging.getLogger('base') save_handlers = list(logger.handlers) logger.handlers.clear() logger.addHandler(StreamHandler(stream=sys.stdout)) ACTIVE_SPINNER = spinner else: spinner_started = False (yield) finally: if spinner_started: spinner._show_cursor() spinner.stop() sys.stdout = save_stdout sys.stderr = save_stderr logger.handlers = save_handlers ACTIVE_SPINNER = None
class ContextGenerator(object): def __init__(self, default_ctx): self.default_ctx = ManifestContext(default_ctx) self.ctx_by_project = {} def set_value_for_project(self, project_name, key, value): project_ctx = self.ctx_by_project.get(project_name) if (project_ctx is None): project_ctx = self.default_ctx.copy() self.ctx_by_project[project_name] = project_ctx project_ctx.set(key, value) def set_value_for_all_projects(self, key, value): self.default_ctx.set(key, value) for ctx in self.ctx_by_project.values(): ctx.set(key, value) def get_context(self, project_name): return self.ctx_by_project.get(project_name, self.default_ctx)
class OptionSeriesNetworkgraphSonificationDefaultinstrumentoptionsMappingVolume(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def test_jinja_template_rendering_without_examples(example_text): nlp = spacy.blank('en') doc = nlp.make_doc(example_text) llm_ner = make_summarization_task(examples=None, max_n_words=10) prompt = list(llm_ner.generate_prompts([doc]))[0] assert (prompt.strip() == f""" You are an expert summarization system. Your task is to accept Text as input and summarize the Text in a concise way. The summary must not, under any circumstances, contain more than 10 words. Here is the Text that needs to be summarized: ''' {example_text} ''' Summary:""".strip())
class OnUserRoleCreated(TaskBase): __name__ = 'on_user_role_created' async def run(self, user_id: str, role_id: str, workspace_id: str): workspace = (await self._get_workspace(uuid.UUID(workspace_id))) async with self.get_workspace_session(workspace) as session: role_repository = RoleRepository(session) user_permission_repository = UserPermissionRepository(session) role = (await role_repository.get_by_id(uuid.UUID(role_id))) if (role is None): raise ObjectDoesNotExistTaskError(Role, role_id) user_permissions: list[UserPermission] = [] for permission in role.permissions: user_permissions.append(UserPermission(user_id=uuid.UUID(user_id), permission_id=permission.id, from_role_id=role.id)) (await user_permission_repository.create_many(user_permissions))
_heads([Derivative, RealDerivative, ComplexDerivative, ComplexBranchDerivative, MeromorphicDerivative]) def tex_Derivative(head, args, **kwargs): argstr = [arg.latex(**kwargs) for arg in args] assert ((len(args) == 2) and (args[1].head() == For)) forargs = args[1].args() if (len(forargs) == 2): (var, point) = forargs order = Expr(1) elif (len(forargs) == 3): (var, point, order) = forargs else: raise ValueError if (not args[0].is_atom()): f = args[0].head() if (f.is_symbol() and (f not in (Exp, Sqrt)) and (args[0].args() == (var,))): pointstr = point.latex(in_small=True) fstr = args[0].head().latex() if (order.is_integer() and (order._integer == 0)): return ('%s(%s)' % (fstr, pointstr)) if (order.is_integer() and (order._integer == 1)): return ("%s'(%s)" % (fstr, pointstr)) if (order.is_integer() and (order._integer == 2)): return ("%s''(%s)" % (fstr, pointstr)) if (order.is_integer() and (order._integer == 3)): return ("%s'''(%s)" % (fstr, pointstr)) return ('{%s}^{(%s)}(%s)' % (fstr, order.latex(), pointstr)) if (1 and ((f in subscript_call_latex_table) and (len(args[0].args()) == 2) and (args[0].args()[1] == var))): arg0 = args[0].args()[0].latex(in_small=True) fstr = subscript_call_latex_table[f] pointstr = point.latex(in_small=True) if (order.is_integer() and (order._integer == 0)): return ('%s_{%s}(%s)' % (fstr, arg0, pointstr)) if (order.is_integer() and (order._integer == 1)): return ("%s'_{%s}(%s)" % (fstr, arg0, pointstr)) if (order.is_integer() and (order._integer == 2)): return ("%s''_{%s}(%s)" % (fstr, arg0, pointstr)) if (order.is_integer() and (order._integer == 3)): return ("%s'''_{%s}(%s)" % (fstr, arg0, pointstr)) return ('{%s}^{(%s)}_{%s}(%s)' % (fstr, order.latex(), arg0, pointstr)) varstr = var.latex() pointstr = point.latex(in_small=True) orderstr = order.latex() if (var == point): if (order.is_integer() and (order._integer == 1)): return ('\\frac{d}{d %s}\\, %s' % (varstr, argstr[0])) else: return ('\\frac{d^{%s}}{{d %s}^{%s}} %s' % (orderstr, varstr, orderstr, argstr[0])) elif (order.is_integer() and (order._integer == 1)): return ('\\left[ \\frac{d}{d %s}\\, %s \\right]_{%s = %s}' % (varstr, argstr[0], varstr, pointstr)) else: return ('\\left[ \\frac{d^{%s}}{{d %s}^{%s}} %s \\right]_{%s = %s}' % (orderstr, varstr, orderstr, argstr[0], varstr, pointstr))
class ScanQRTextEdit(ButtonsTextEdit, MessageBoxMixin): def __init__(self, text='', allow_multi=False): ButtonsTextEdit.__init__(self, text) self.allow_multi = allow_multi self.setReadOnly(0) self.addButton('file.png', self.file_input, _('Read file')) icon = ('qrcode_white.png' if ColorScheme.dark_scheme else 'qrcode.png') self.addButton(icon, self.qr_input, _('Read QR code')) def file_input(self): (fileName, __) = QFileDialog.getOpenFileName(self, 'select file') if (not fileName): return try: with open(fileName, 'r', encoding='utf-8') as f: data = f.read() except UnicodeDecodeError as reason: self.show_critical(((_('The selected file appears to be a binary file.') + '\n') + _('Please ensure you only import text files.')), title=_('Not a text file')) return self.setText(data) def qr_input(self, ignore_uris: bool=False): try: data = qrscanner.scan_barcode(app_state.config.get_video_device()) except Exception as e: self.show_error(str(e)) data = '' if (not data): data = '' if self.allow_multi: new_text = ((self.text() + data) + '\n') else: new_text = data if ignore_uris: self.setText(new_text, ignore_uris) else: self.setText(new_text) return data def contextMenuEvent(self, e): m = self.createStandardContextMenu() m.addAction(_('Read QR code'), self.qr_input) m.exec_(e.globalPos())
def dump_keepalive_packet(packet): if (logging.getLogger().getEffectiveLevel() > 5): return if (packet.subtype == 'stype_status'): logging.log(5, 'keepalive {} model {} ({}) player {} ip {} mac {} devcnt {} u2 {} u3 {}'.format(packet.subtype, packet.model, packet.device_type, packet.content.player_number, packet.content.ip_addr, packet.content.mac_addr, packet.content.device_count, packet.content.u2, packet.content.u3)) elif (packet.subtype == 'stype_ip'): logging.log(5, 'keepalive {} model {} ({}) player {} ip {} mac {} iteration {} assignment {} u2 {}'.format(packet.subtype, packet.model, packet.device_type, packet.content.player_number, packet.content.ip_addr, packet.content.mac_addr, packet.content.iteration, packet.content.player_number_assignment, packet.content.u2)) elif (packet.subtype == 'stype_mac'): logging.log(5, 'keepalive {} model {} ({}) mac {} iteration {} u2 {}'.format(packet.subtype, packet.model, packet.device_type, packet.content.mac_addr, packet.content.iteration, packet.content.u2)) elif (packet.subtype == 'stype_number'): logging.log(5, 'keepalive {} model {} ({}) proposed_player_number {} iteration {}'.format(packet.subtype, packet.model, packet.device_type, packet.content.proposed_player_number, packet.content.iteration)) elif (packet.subtype == 'stype_hello'): logging.log(5, 'keepalive {} model {} ({}) u2 {}'.format(packet.subtype, packet.model, packet.device_type, packet.content.u2)) else: logging.warning('BUG: unhandled packet type {}'.format(packet.subtype))
() def reply_to_message_update_type(reply_to_message): edited_message = None channel_post = None edited_channel_post = None inline_query = None chosen_inline_result = None callback_query = None shipping_query = None pre_checkout_query = None poll = None poll_answer = None my_chat_member = None chat_member = None chat_join_request = None return types.Update(, reply_to_message, edited_message, channel_post, edited_channel_post, inline_query, chosen_inline_result, callback_query, shipping_query, pre_checkout_query, poll, poll_answer, my_chat_member, chat_member, chat_join_request)
def collate_fn(batch: List[Tuple[(Tensor, str)]], gpt2_type: str='distilgpt2', max_length: int=1024) -> Tuple[(Tensor, Tensor, Tensor)]: tokenizer = get_tokenizer(gpt2_type) (bos, eos) = (tokenizer.bos_token, tokenizer.eos_token) encoded = tokenizer.batch_encode_plus([f'{bos}{random.choice(y)}{eos}' for (_, y) in batch], max_length=max_length, padding=True, truncation=True, return_tensors='pt') encoder_hidden_states = torch.stack([torch.from_numpy(x) for (x, _) in batch], dim=0).reshape(len(batch), 1, (- 1)) return (encoder_hidden_states.float(), encoded['input_ids'], encoded['attention_mask'])
class TestPickable(): def test_pickable_attribute(self): attribute_foo = Attribute('foo', int, True, 'a foo attribute.') try: pickle.dumps(attribute_foo) except Exception: pytest.fail('Error during pickling.') def test_pickable_data_model(self): attribute_foo = Attribute('foo', int, True, 'a foo attribute.') attribute_bar = Attribute('bar', str, True, 'a bar attribute.') data_model_foobar = DataModel('foobar', [attribute_foo, attribute_bar], 'A foobar data model.') try: pickle.dumps(data_model_foobar) except Exception: pytest.fail('Error during pickling.') def test_pickable_description(self): attribute_foo = Attribute('foo', int, True, 'a foo attribute.') attribute_bar = Attribute('bar', str, True, 'a bar attribute.') data_model_foobar = DataModel('foobar', [attribute_foo, attribute_bar], 'A foobar data model.') description_foobar = Description({'foo': 1, 'bar': 'baz'}, data_model=data_model_foobar) try: pickle.dumps(description_foobar) except Exception: pytest.fail('Error during pickling.') def test_pickable_query(self): attribute_foo = Attribute('foo', int, True, 'a foo attribute.') attribute_bar = Attribute('bar', str, True, 'a bar attribute.') data_model_foobar = DataModel('foobar', [attribute_foo, attribute_bar], 'A foobar data model.') query = Query([And([Or([Not(Constraint('foo', ConstraintType('==', 1))), Not(Constraint('bar', ConstraintType('==', 'baz')))]), Constraint('foo', ConstraintType('<', 2))])], data_model_foobar) try: pickle.dumps(query) except Exception: pytest.fail('Error during pickling.')
def setup_regtest(app_state) -> HeadersRegTestMod: while True: try: regtest_import_privkey_to_node() delete_headers_file(app_state.headers_filename()) (Net._net.CHECKPOINT, Net._net.VERIFICATION_BLOCK_MERKLE_ROOT) = calculate_regtest_checkpoint(Net.MIN_CHECKPOINT_HEIGHT) logger.info("using regtest network - miner funds go to: '%s' (not part of this wallet)", Net.REGTEST_P2PKH_ADDRESS) break except (NewConnectionError, requests.exceptions.ConnectionError) as e: sleep_time = 5.0 logger.error(f'node is offline, retrying in {sleep_time} seconds...') time.sleep(sleep_time) except Exception: break return HeadersRegTestMod.from_file(Net.COIN, app_state.headers_filename(), Net.CHECKPOINT)
def update_row(row, result, latitude_column='latitude', longitude_column='longitude', geojson=False, spatialite=False, raw=''): if geojson: row[GEOMETRY_COLUMN] = {'type': 'Point', 'coordinates': [result.longitude, result.latitude]} elif spatialite: row[GEOMETRY_COLUMN] = f'POINT ({result.longitude} {result.latitude})' else: row[longitude_column] = result.longitude row[latitude_column] = result.latitude if raw: row[raw] = result.raw return row
class AuditLoggingRulesEngine(bre.BaseRulesEngine): def __init__(self, rules_file_path, snapshot_timestamp=None): super(AuditLoggingRulesEngine, self).__init__(rules_file_path=rules_file_path, snapshot_timestamp=snapshot_timestamp) self.rule_book = None def build_rule_book(self, global_configs=None): self.rule_book = AuditLoggingRuleBook(global_configs, self._load_rule_definitions(), snapshot_timestamp=self.snapshot_timestamp) def find_violations(self, project, audit_config, force_rebuild=False): if ((self.rule_book is None) or force_rebuild): self.build_rule_book() violations = self.rule_book.find_violations(project, audit_config) return set(violations) def add_rules(self, rules): if (self.rule_book is not None): self.rule_book.add_rules(rules)
class OptionSeriesBellcurveStatesSelectHalo(Options): def attributes(self): return self._config_get(None) def attributes(self, value: Any): self._config(value, js_type=False) def opacity(self): return self._config_get(0.25) def opacity(self, num: float): self._config(num, js_type=False) def size(self): return self._config_get(10) def size(self, num: float): self._config(num, js_type=False)