code
stringlengths
281
23.7M
class CmdQuell(COMMAND_DEFAULT_CLASS): key = 'quell' aliases = ['unquell'] locks = 'cmd:pperm(Player)' help_category = 'General' account_caller = True def _recache_locks(self, account): if self.session: char = self.session.puppet if char: char.locks.reset() account.locks.reset() def func(self): account = self.account permstr = ((account.is_superuser and '(superuser)') or ('(%s)' % ', '.join(account.permissions.all()))) if (self.cmdstring in ('unquell', 'unquell')): if (not account.attributes.get('_quell')): self.msg(f'Already using normal Account permissions {permstr}.') else: account.attributes.remove('_quell') self.msg(f'Account permissions {permstr} restored.') else: if account.attributes.get('_quell'): self.msg(f'Already quelling Account {permstr} permissions.') return account.attributes.add('_quell', True) puppet = (self.session.puppet if self.session else None) if puppet: cpermstr = ('(%s)' % ', '.join(puppet.permissions.all())) cpermstr = f"Quelling to current puppet's permissions {cpermstr}." cpermstr += f''' (Note: If this is higher than Account permissions {permstr}, the lowest of the two will be used.)''' cpermstr += '\nUse unquell to return to normal permission usage.' self.msg(cpermstr) else: self.msg(f'Quelling Account permissions {permstr}. Use unquell to get them back.') self._recache_locks(account)
class Filter(): def __init__(self, args): args = args.split(',') self.branch_name = args[0].encode('ascii', 'replace') self.starting_commit_hash = args[1].encode('ascii', 'strict') self.branch_parents = set() def commit_message_filter(self, commit_data): hg_hash = commit_data['hg_hash'] rev = commit_data['revision'] rev_parents = commit_data['parents'] if ((hg_hash == self.starting_commit_hash) or any(((rp in self.branch_parents) for rp in rev_parents))): self.branch_parents.add(rev) commit_data['branch'] = self.branch_name sys.stderr.write(('\nchanging r%s to branch %r\n' % (rev, self.branch_name))) sys.stderr.flush()
class Transform(): def __init__(self, compute_func: Callable, outputs: 'dict[str, Output] | None'=None, inputs: 'dict[str, Input] | None'=None, decorator: str='spark'): self._compute_func = compute_func self.inputs = (inputs or {}) self.outputs = (outputs or {}) self._bound_transform = True self._type = decorator for (name, tinput) in self.inputs.items(): if (not isinstance(tinput, Input)): raise ValueError(f"Input '{name}' to transform {self} is not a transforms.api.Input") for (_, toutput) in self.outputs.items(): if (not isinstance(toutput, Output)): raise ValueError(f"Output '{compute_func.__name__}' of transform {self} is not a transforms.api.Output") self._use_context = ('ctx' in inspect.getfullargspec(compute_func).args) def __call__(self, *args, **kwargs): try: return self._compute_func(*args, **kwargs) except Exception as exc: setattr(exc, '__transform_compute_error', True) raise def compute(self): if (self._type == 'pandas'): return self._compute_pandas() if (self._type == 'transform'): return self._compute_transform() return self._compute_spark() def _compute_spark(self) -> 'pyspark.sql.DataFrame': kwargs = {name: i.dataframe() for (name, i) in self.inputs.items()} if self._use_context: kwargs['ctx'] = TransformContext() output_df = self(**kwargs) if (not isinstance(output_df, pyspark.sql.DataFrame)): raise ValueError(f'Expected {self} to return a pyspark.sql.DataFrame, instead got {output_df}') return output_df def _compute_pandas(self): kwargs = {name: i.dataframe().toPandas() for (name, i) in self.inputs.items()} if self._use_context: kwargs['ctx'] = TransformContext() output_df = self(**kwargs) import pandas as pd if (not isinstance(output_df, pd.DataFrame)): raise ValueError(f'Expected {self} to return a pandas.DataFrame, instead got {output_df}') return output_df def _compute_transform(self): inputs = {argument_name: TransformInput(i) for (argument_name, i) in self.inputs.items()} outputs = {argument_name: TransformOutput(o, argument_name) for (argument_name, o) in self.outputs.items()} kwargs = {**inputs, **outputs} if self._use_context: kwargs['ctx'] = TransformContext() self(**kwargs) return {name: i.dataframe() for (name, i) in outputs.items()}
class Quadro(): def __init__(self, colunas=None): self.colunas = (colunas or []) def inserir_coluna(self, coluna): self.colunas.append(coluna) def inserir_tarefa(self, tarefa): self.colunas[0].insere_tarefa(tarefa) def __repr__(self): return f'Quadro(colunas={self.colunas})' def mover(self, tarefa): if (tarefa in self.colunas[0]): self.colunas[0].remover_tarefas(tarefa) self.colunas[1].insere_tarefa(tarefa)
class SwitcherPanel(wxScrolledPanel): def __init__(self, parent, id, model, label=None, cache=True, **kw): wxScrolledPanel.__init__(self, parent, id, **kw) self.SetupScrolling() self.model = model self.cache = cache self._page_cache = {} self.current = None self._create_widget(model, label) model.observe(self._on_selected_changed, 'selected') return def _on_selected_changed(self, event): selected = event.new self._show_page(selected) return def _create_widget(self, model, label): self.sizer = sizer = wx.BoxSizer(wx.VERTICAL) self.SetSizer(sizer) self.SetAutoLayout(True) if (model.selected != (- 1)): self._show_page(model.selected) pass sizer.Fit(self) def _show_page(self, index): if (self.current is not None): self.current.Show(False) self.sizer.Remove(self.current) page = self._page_cache.get(index) if ((not self.cache) or (page is None)): page = self.model.create_page(self, index) self._page_cache[index] = page self.sizer.Add(page, 1, wx.EXPAND) page.Show(True) self.current = page self.sizer.Layout()
class ZoomAttrs(Options): def enabled(self): return self._config_get() def enabled(self, flag: bool): self._config(flag) def mode(self): return self._config_get() def mode(self, value): self._config(value) def rangeMin(self) -> ZoomRange: return self._config_sub_data('rangeMin', ZoomRange) def rangeMax(self) -> ZoomRange: return self._config_sub_data('rangeMax', ZoomRange) def speed(self): return self._config_get() def speed(self, num: float): self._config(num) def threshold(self): return self._config_get() def threshold(self, num: float): self._config(num)
def test_ajax_fk(): (app, db, admin) = setup() class BaseModel(peewee.Model): class Meta(): database = db class Model1(BaseModel): test1 = peewee.CharField(max_length=20) test2 = peewee.CharField(max_length=20) def __str__(self): return self.test1 class Model2(BaseModel): model1 = peewee.ForeignKeyField(Model1) Model1.create_table() Model2.create_table() view = CustomModelView(Model2, url='view', form_ajax_refs={'model1': {'fields': ('test1', 'test2')}}) admin.add_view(view) assert (u'model1' in view._form_ajax_refs) model = Model1(test1=u'first', test2=u'') model.save() model2 = Model1(test1=u'foo', test2=u'bar') model2.save() loader = view._form_ajax_refs[u'model1'] mdl = loader.get_one(model.id) assert (mdl.test1 == model.test1) items = loader.get_list(u'fir') assert (len(items) == 1) assert (items[0].id == model.id) items = loader.get_list(u'bar') assert (len(items) == 1) assert (items[0].test1 == u'foo') form = view.create_form() assert (form.model1.__class__.__name__ == u'AjaxSelectField') with app.test_request_context('/admin/view/'): assert (u'value=""' not in form.model1()) form.model1.data = model assert (((u'data-json="[%s, "first"]"' % as_unicode(model.id)) in form.model1()) or (u'data-json="[%s, "first"]"' % as_unicode(model.id))) assert ((u'value="%s"' % as_unicode(model.id)) in form.model1()) client = app.test_client() req = client.get(u'/admin/view/ajax/lookup/?name=model1&query=foo') assert (req.data == (b'[[%d, "foo"]]' % model2.id)) client.post('/admin/view/new/', data={u'model1': as_unicode(model.id)}) mdl = Model2.select().first() assert (mdl is not None) assert (mdl.model1 is not None) assert (mdl.model1.id == model.id) assert (mdl.model1.test1 == u'first')
class GanttChartRenderer(): def to_html(self, df: pd.DataFrame, chart_width: Optional[int]=None) -> str: fig = px.timeline(df, x_start='Start', x_end='Finish', y='Name', color='Name', width=chart_width) fig.update_xaxes(tickangle=90, rangeslider_visible=True, tickformatstops=[dict(dtickrange=[None, 1], value='%3f ms'), dict(dtickrange=[1, 60], value='%S:%3f s'), dict(dtickrange=[60, 3600], value='%M:%S m'), dict(dtickrange=[3600, None], value='%H:%M h')]) fig.update_yaxes(showticklabels=False, title='') fig.update_layout(autosize=True, legend=dict(orientation='h', y=1.02)) return fig.to_html()
() def format(session): session.install('black', 'isort', 'flynt', 'unasync') session.run('python', 'utils/run-unasync.py') session.run('isort', '--profile=black', *SOURCE_FILES) session.run('flynt', *SOURCE_FILES) session.run('black', *SOURCE_FILES) session.run('python', 'utils/license-headers.py', 'fix', *SOURCE_FILES) lint(session)
def draw(G: nx.Graph, layout_func: Callable, group_by: Hashable, sort_by: Hashable, color_by: Hashable=None, alpha_by: Hashable=None, size_by: Hashable=None, layout_kwargs: Dict={}, encodings_kwargs: Dict={}, rescale_func=rescale, ax=None): if (ax is None): ax = plt.gca() nt = node_table(G) pos = layout_func(nt, group_by, sort_by, **layout_kwargs) node_color = node_colors(nt, color_by) encodings_kwargs = deepcopy(encodings_kwargs) alpha_bounds = encodings_kwargs.pop('alpha_bounds', None) alpha = (transparency(nt, alpha_by, alpha_bounds) * encodings_kwargs.pop('alpha_scale', 1)) size = (node_size(nt, size_by) * encodings_kwargs.pop('size_scale', 1)) patches = node_glyphs(nt, pos, node_color, alpha, size, **encodings_kwargs) for patch in patches: ax.add_patch(patch) rescale_func(G) return pos
class RolloutEvaluator(Evaluator): def __init__(self, eval_env: StructuredVectorEnv, n_episodes: int, model_selection: Optional[ModelSelectionBase], deterministic: bool=False): self.eval_env = eval_env self.n_episodes = n_episodes self.model_selection = model_selection self.deterministic = deterministic (Evaluator) def evaluate(self, policy: TorchPolicy) -> None: policy.eval() n_done_episodes = 0 observations = self.eval_env.reset() self.eval_env.clear_epoch_stats() while (n_done_episodes < self.n_episodes): sampled_action = policy.compute_action(observations, actor_id=self.eval_env.actor_id(), maze_state=None, deterministic=self.deterministic) (observations, rewards, dones, infos) = self.eval_env.step(sampled_action) n_done_episodes += np.count_nonzero(dones) self.eval_env.write_epoch_stats() if self.model_selection: reward = self.eval_env.get_stats_value(BaseEnvEvents.reward, LogStatsLevel.EPOCH, name='mean') self.model_selection.update(reward)
def fetch_exchange(zone_key1: str, zone_key2: str, session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> list[dict[(str, Any)]]: if target_datetime: raise NotImplementedError('This parser is not yet able to parse past dates') sorted_zone_keys = '->'.join(sorted([zone_key1, zone_key2])) requests_obj = (session or Session()) response = requests_obj.get(MAP_URL) map_html = response.text interchange_text = extract_text(map_html, 'var interconexion', '];') interchange_text = extract_text(interchange_text, '[') interchange_list = [float((g.replace("'", '') or 0)) for g in interchange_text.split(',')] if (sorted_zone_keys == 'HN->NI'): flow = ((- 1) * (interchange_list[0] + interchange_list[1])) elif (sorted_zone_keys == 'CR->NI'): flow = ((- 1) * (interchange_list[2] + interchange_list[3])) else: raise NotImplementedError('This exchange pair is not implemented') exchange_list = ExchangeList(logger) exchange_list.append(zoneKey=ZoneKey(sorted_zone_keys), datetime=get_time_from_system_map(map_html), netFlow=flow, source='cndc.org.ni') return exchange_list.to_list()
class DigestApp(): def __init__(self, algorithm: str='SHA-256', send_response_after_attempt: int=1, qop: str='auth', regenerate_nonce: bool=True) -> None: self.algorithm = algorithm self.send_response_after_attempt = send_response_after_attempt self.qop = qop self._regenerate_nonce = regenerate_nonce self._response_count = 0 def __call__(self, request: -> if (self._response_count < self.send_response_after_attempt): return self.challenge_send(request) data = {'auth': request.headers.get('Authorization')} return json=data) def challenge_send(self, request: -> self._response_count += 1 nonce = (hashlib.sha256(os.urandom(8)).hexdigest() if self._regenerate_nonce else 'ee96edced2a0b43e4869e96ebe27563f369c1205a049d06419bb51d8aeddf3d3') challenge_data = {'nonce': nonce, 'qop': self.qop, 'opaque': 'ee6378f3ee14ebfd2fff54b70a91a7cf242ab2271380db0e14bda1', 'algorithm': self.algorithm, 'stale': 'FALSE'} challenge_str = ', '.join(('{}="{}"'.format(key, value) for (key, value) in challenge_data.items() if value)) headers = {'www-authenticate': f'Digest realm=" {challenge_str}'} return headers=headers)
class ZabbixAPIAdapterTests(TestCase): def _create_db_user(self): user = User() user.first_name = get_random_string(10) user.last_name = get_random_string(10) user.api_access = True user.is_active = True user.is_staff = False user.password = get_random_string(10) user.email = (((get_random_string(10) + '') + get_random_string(10)) + '.com') user.username = ('Test_' + get_random_string(10)) user.password = get_random_string(10) user.save() user.userprofile.alerting_email = user.email user.userprofile.alerting_phone = get_random_string(10) user.userprofile.alerting_jabber = ('jabber_' + user.email) user.userprofile.save() return user def _create_db_group(self): group = Role() group.name = ('Group_N' + get_random_string(10)) group.alias = ('Group_A' + get_random_string(10)) group.save() group.dc_set.add(self.dc) assert (self.dc in Role.objects.filter(name=group.name).first().dc_set.all()) return group def _create_dc(self): dc = Dc() dc.name = ('DC_' + get_random_string(10)) dc.alias = dc.name dc.owner = User.objects.get(id=settings.ADMIN_USER) dc.site = get_random_string(10) dc.save() return dc def setUp(self): self.dc = self._create_dc() self.zabbix = get_monitoring(self.dc) self.ezx_zapi = self.zabbix.ezx.zapi self.db_users = [] self.db_groups = [] self.hostgroups = [] self.actions = [] for x in range(5): self.db_users.append(self._create_db_user()) for x in range(5): self.db_groups.append(self._create_db_group()) for x in range(5): self.hostgroups.append(get_random_string(10)) for x in range(5): self.actions.append(get_random_string(10)) def tearDown(self): for user in self.db_users: try: self.zabbix.user_delete(name=user.username) except MonitoringError: pass user.delete() for group in self.db_groups: try: self.zabbix.user_group_delete(name=group.name) except MonitoringError: pass group.delete() for hostgroup in self.hostgroups: try: self.zabbix.hostgroup_delete(hostgroup) except MonitoringError: pass for action_name in self.actions: try: self.zabbix.action_delete(action_name) except MonitoringError: pass self.dc.delete() def _get_zabbix_user(self, db_user): return self.ezx_zapi.user.get(dict(filter={'alias': db_user.username}, selectMedias='extend', selectUsrgrps='extend', selectMediatypes='extend'))[0] def _check_user_media(self, db_user, zabbix_user): zabbix_user_medias = {} for media_type in zabbix_user.get('mediatypes', []): for media in zabbix_user.get('medias', []): if (media_type['mediatypeid'] == media['mediatypeid']): zabbix_user_medias[media_type['description']] = media['sendto'] for media_type in ZabbixMediaContainer.MEDIA_TYPES: wanted_user_media_sendto = ZabbixMediaContainer.extract_sendto_from_user(media_type, db_user) media_type_desc = ZabbixMediaContainer.get_media_type_desc(media_type, dc_settings=self.dc.settings) if (wanted_user_media_sendto and media_type_desc): self.assertEqual(zabbix_user_medias.get(media_type_desc, None), wanted_user_media_sendto, "user's media type={} sendto={} should be in zabbix".format(media_type, wanted_user_media_sendto)) elif media_type_desc: self.assertNotIn(media_type_desc, zabbix_user_medias, 'user should not have media type={} in zabbix'.format(media_type)) elif wanted_user_media_sendto: self.assertNotIn(wanted_user_media_sendto, zabbix_user_medias.values(), 'user should not have media type={} in zabbix'.format(media_type)) else: raise AssertionError def _sync_zabbix_user_group(self, db_group, new=False, empty=False): if new: self.assertListEqual(self.ezx_zapi.usergroup.get({'search': {'name': ZabbixUserGroupContainer.user_group_name_factory(self.dc.name, db_group.name)}}), [], 'the group should not exist') self.zabbix.user_group_sync(group=db_group) zabbix_user_group_ = self.ezx_zapi.usergroup.get({'search': {'name': ZabbixUserGroupContainer.user_group_name_factory(self.dc.name, db_group.name)}, 'selectUsers': 'extend'}) self.assertEqual(len(zabbix_user_group_), 1, 'the group should be in zabbix by now') zabbix_user_group = zabbix_user_group_[0] zabbix_user_group_users = {user['alias']: user for user in zabbix_user_group.get('users', [])} if empty: self.assertEquals(len(zabbix_user_group_users), 0, 'the group should be empty in zabbix') return for db_user in db_group.user_set.all(): self.assertIn(db_user.username, zabbix_user_group_users, 'user should be member of the user group in zabbix') zabbix_user = self._get_zabbix_user(db_user) self.assertEquals(zabbix_user_group_users[db_user.username]['userid'], zabbix_user['userid']) self._check_user_media(db_user, zabbix_user) def _delete_zabbix_user_group(self, db_group, last_for_all_users=False): self.zabbix.user_group_delete(db_group.name) self.assertListEqual(self.ezx_zapi.usergroup.get({'search': {'name': ZabbixUserGroupContainer.user_group_name_factory(self.dc.name, db_group.name)}, 'limit': 1}), [], "the group shouldn't exist anymore") if last_for_all_users: for db_user in db_group.user_set.all(): self.assertListEqual(self.ezx_zapi.user.get(dict(filter={'alias': db_user.username})), [], "user shouldn't be in zabbix anymore") def _sync_zabbix_user(self, db_user, new=False): if new: self.assertListEqual(self.ezx_zapi.user.get(dict(filter={'alias': db_user.username})), [], "user shouldn't be in zabbix before creation") self.zabbix.user_sync(db_user) zabbix_user_ = self.ezx_zapi.user.get(dict(filter={'alias': db_user.username}, selectMedias='extend', selectUsrgrps='extend', selectMediatypes='extend')) if (not db_user.roles.exists()): self.assertEqual(len(zabbix_user_), 0, 'user without groups should not exist in zabbix') return self.assertEqual(len(zabbix_user_), 1, 'user should be in zabbix by now') zabbix_user = zabbix_user_[0] zabbix_user_groups = set((group['name'] for group in zabbix_user.get('usrgrps', []))) wanted_zabbix_user_groups = set((ZabbixUserGroupContainer.user_group_name_factory(self.dc.name, db_group.name) for db_group in db_user.roles.all())) self.assertSetEqual(zabbix_user_groups, wanted_zabbix_user_groups, "user's groups in zabbix should match groups in DB") self._check_user_media(db_user, zabbix_user) def _delete_zabbix_user(self, db_user): self.zabbix.user_delete(db_user.username) self.assertListEqual(self.ezx_zapi.user.get(dict(filter={'alias': db_user.username})), [], "user shouldn't be in zabbix anymore") def test_001_create_delete_user_group_empty(self): db_group = self.db_groups[0] self._sync_zabbix_user_group(db_group, new=True, empty=True) self._delete_zabbix_user_group(db_group, last_for_all_users=True) def test_002_create_delete_user(self): db_group = self.db_groups[0] self._sync_zabbix_user_group(db_group, new=True, empty=True) db_user = self.db_users[0] self._sync_zabbix_user(db_user, new=True) db_user.roles.add(db_group) self._sync_zabbix_user(db_user, new=True) self._delete_zabbix_user(db_user) self._delete_zabbix_user_group(db_group, last_for_all_users=True) db_user.roles.remove(db_group) def test_003_create_delete_user_group_nonempty(self): db_group = self.db_groups[0] self._sync_zabbix_user_group(db_group, new=True, empty=True) db_users = self.db_users for user in db_users: user.roles.add(db_group) self._sync_zabbix_user_group(db_group, new=False, empty=False) self._delete_zabbix_user_group(db_group, last_for_all_users=True) for user in db_users: user.roles.remove(db_group) def _test_user_group_manipulation(self): raise NotImplementedError() def _test_user_manipulation(self): raise NotImplementedError() def _get_new_hostgroup(self, hostgroup_name, dc_bound=True): if dc_bound: dc_name = self.dc.name else: dc_name = None self.assertListEqual(self.ezx_zapi.hostgroup.get({'filter': {'name': ZabbixHostGroupContainer.hostgroup_name_factory(dc_name, hostgroup_name)}}), [], 'hostgroup should not exist in zabbix') self.assertRaises(RemoteObjectDoesNotExist, self.zabbix.hostgroup_detail, hostgroup_name, dc_bound=dc_bound) def _get_existing_hostgroup(self, hostgroup_name, dc_bound=True): if dc_bound: dc_name = self.dc.name else: dc_name = None zabbix_hostgroup_ = self.ezx_zapi.hostgroup.get({'filter': {'name': ZabbixHostGroupContainer.hostgroup_name_factory(dc_name, hostgroup_name)}}) self.assertEqual(len(zabbix_hostgroup_), 1, 'hostgroup should exist in zabbix') hostgroup_info = self.zabbix.hostgroup_detail(hostgroup_name, dc_bound=dc_bound) self.assertEqual(hostgroup_info['name'], hostgroup_name) def _create_new_hostgroup(self, hostgroup_name, dc_bound=True): if dc_bound: dc_name = self.dc.name else: dc_name = None self.assertListEqual(self.ezx_zapi.hostgroup.get({'filter': {'name': ZabbixHostGroupContainer.hostgroup_name_factory(dc_name, hostgroup_name)}}), [], 'hostgroup should not exist in zabbix') hostgroup_info = self.zabbix.hostgroup_create(hostgroup_name, dc_bound=dc_bound) zabbix_hostgroup_ = self.ezx_zapi.hostgroup.get({'filter': {'name': ZabbixHostGroupContainer.hostgroup_name_factory(dc_name, hostgroup_name)}}) self.assertEqual(len(zabbix_hostgroup_), 1, 'hostgroup should exist in zabbix') self.assertEqual(hostgroup_info['name'], hostgroup_name) return hostgroup_info def _create_existing_hostgroup(self, hostgroup_name, dc_bound=True): if dc_bound: dc_name = self.dc.name else: dc_name = None zabbix_hostgroup_ = self.ezx_zapi.hostgroup.get({'filter': {'name': ZabbixHostGroupContainer.hostgroup_name_factory(dc_name, hostgroup_name)}}) self.assertEqual(len(zabbix_hostgroup_), 1, 'hostgroup should exist in zabbix') self.assertRaises(RemoteObjectAlreadyExists, self.zabbix.hostgroup_create, hostgroup_name, dc_bound=dc_bound) def _delete_existing_hostgroup(self, hostgroup_name, dc_bound=True): if dc_bound: dc_name = self.dc.name else: dc_name = None self.zabbix.hostgroup_delete(hostgroup_name, dc_bound=dc_bound) self.assertListEqual(self.ezx_zapi.hostgroup.get({'filter': {'name': ZabbixHostGroupContainer.hostgroup_name_factory(dc_name, hostgroup_name)}}), [], 'hostgroup should not exist in zabbix anymore') def _delete_new_hostgroup(self, hostgroup_name, dc_bound=True): if dc_bound: dc_name = self.dc.name else: dc_name = None self.assertListEqual(self.ezx_zapi.hostgroup.get({'filter': {'name': ZabbixHostGroupContainer.hostgroup_name_factory(dc_name, hostgroup_name)}}), [], 'hostgroup should not exist in zabbix anymore') self.assertRaises(RemoteObjectDoesNotExist, self.zabbix.hostgroup_delete, hostgroup_name, dc_bound=dc_bound) def test_101_create_get_delete_hostgroup(self): hostgroup = self.hostgroups[0] self._create_new_hostgroup(hostgroup) self._get_existing_hostgroup(hostgroup) self._delete_existing_hostgroup(hostgroup) self._create_new_hostgroup(hostgroup, dc_bound=False) self._get_existing_hostgroup(hostgroup, dc_bound=False) self._delete_existing_hostgroup(hostgroup, dc_bound=False) def test_102_create_delete_existing_hostgroup(self): hostgroup = self.hostgroups[0] self._create_new_hostgroup(hostgroup) self._create_existing_hostgroup(hostgroup) self._delete_existing_hostgroup(hostgroup) self._create_new_hostgroup(hostgroup, dc_bound=False) self._create_existing_hostgroup(hostgroup, dc_bound=False) self._delete_existing_hostgroup(hostgroup, dc_bound=False) def test_103_delete_new_hostgroup(self): hostgroup = self.hostgroups[0] self._delete_new_hostgroup(hostgroup) self._delete_new_hostgroup(hostgroup, dc_bound=False) def test_104_get_new_hostgroup(self): hostgroup = self.hostgroups[0] self._get_new_hostgroup(hostgroup) self._get_new_hostgroup(hostgroup, dc_bound=False) def _get_new_action(self, action_name): self.assertListEqual(self.ezx_zapi.action.get({'filter': {'name': ZabbixActionContainer.action_name_factory(self.dc.name, action_name)}}), [], 'action should not exist in zabbix') self.assertRaises(RemoteObjectDoesNotExist, self.zabbix.action_detail, action_name) def _get_existing_action(self, action_name): zabbix_action_ = self.ezx_zapi.action.get({'filter': {'name': ZabbixActionContainer.action_name_factory(self.dc.name, action_name)}}) self.assertEqual(len(zabbix_action_), 1, 'action should exist in zabbix') action_info = self.zabbix.action_detail(action_name) self.assertEqual(action_info['name'], action_name) return action_info def _create_new_action(self, action_name, action_data, nonexistent_usergroups=False): self.assertListEqual(self.ezx_zapi.action.get({'filter': {'name': ZabbixActionContainer.action_name_factory(self.dc.name, action_name)}}), [], 'action should not exist in zabbix') if nonexistent_usergroups: self.assertRaises(RelatedRemoteObjectDoesNotExist, self.zabbix.action_create, action_name, action_data) return action_info = self.zabbix.action_create(action_name, action_data) zabbix_action_ = self.ezx_zapi.action.get({'filter': {'name': ZabbixActionContainer.action_name_factory(self.dc.name, action_name)}}) self.assertEqual(len(zabbix_action_), 1, 'action should exist in zabbix') self.assertEqual(action_info['name'], action_name) return action_info def _create_existing_action(self, action_name, action_data): zabbix_action_ = self.ezx_zapi.action.get({'filter': {'name': ZabbixActionContainer.action_name_factory(self.dc.name, action_name)}}) self.assertEqual(len(zabbix_action_), 1, 'action should exist in zabbix') self.assertRaises(RemoteObjectAlreadyExists, self.zabbix.action_create, action_name, action_data) def _update_new_action(self, action_name, action_data): self.assertListEqual(self.ezx_zapi.action.get({'filter': {'name': ZabbixActionContainer.action_name_factory(self.dc.name, action_name)}}), [], 'action should not exist in zabbix') self.assertRaises(RemoteObjectDoesNotExist, self.zabbix.action_update, action_name, action_data) def _update_existing_action(self, action_name, action_data, nonexistent_usergroups=False): zabbix_action_ = self.ezx_zapi.action.get({'filter': {'name': ZabbixActionContainer.action_name_factory(self.dc.name, action_name)}}) self.assertEqual(len(zabbix_action_), 1, 'action should exist in zabbix') if nonexistent_usergroups: self.assertRaises(RelatedRemoteObjectDoesNotExist, self.zabbix.action_update, action_name, action_data) return action_info = self.zabbix.action_update(action_name, action_data) zabbix_action_ = self.ezx_zapi.action.get({'filter': {'name': ZabbixActionContainer.action_name_factory(self.dc.name, action_name)}}) self.assertEqual(len(zabbix_action_), 1, 'action should exist in zabbix') self.assertEqual(action_info['name'], action_name) return action_info def _delete_existing_action(self, action_name): self.zabbix.action_delete(action_name) self.assertListEqual(self.ezx_zapi.action.get({'filter': {'name': ZabbixActionContainer.action_name_factory(self.dc.name, action_name)}}), [], 'action should not exist in zabbix anymore') def _delete_new_action(self, action_name): self.assertListEqual(self.ezx_zapi.action.get({'filter': {'name': ZabbixActionContainer.action_name_factory(self.dc.name, action_name)}}), [], 'action should not exist in zabbix anymore') self.assertRaises(RemoteObjectDoesNotExist, self.zabbix.action_delete, action_name) def _initial_action_data(self): return {'usergroups': [], 'hostgroups': [], 'enabled': True, 'message_subject': get_random_string(10), 'message_text': get_random_string(20), 'recovery_message_enabled': False, 'recovery_message_subject': get_random_string(10), 'recovery_message_text': get_random_string(20)} def _prepare_action_data(self, action_data, create_usergroups=True): if create_usergroups: for db_group in action_data.get('usergroups', []): self.zabbix.user_group_sync(group=db_group) if ('usergroups' in action_data): action_data['usergroups'] = [db_group.name for db_group in action_data['usergroups']] return action_data def _cleanup_action_data(self, action_data): for db_group_name in action_data.get('usergroups', []): self.zabbix.user_group_delete(db_group_name) return action_data def test_201_create_get_update_delete_action(self): action_name = self.actions[0] create_data = dict(self._initial_action_data(), usergroups=self.db_groups[:2], hostgroups=self.hostgroups[:2], enabled=False) self._create_new_action(action_name, create_data, nonexistent_usergroups=True) self._prepare_action_data(create_data) self.assertDictContainsSubset(create_data, self._create_new_action(action_name, create_data)) self.assertDictContainsSubset(create_data, self._get_existing_action(action_name)) update_data = dict(usergroups=self.db_groups[2:], hostgroups=self.hostgroups[2:], enabled=True) self._update_existing_action(action_name, update_data, nonexistent_usergroups=True) self._prepare_action_data(update_data) self._create_existing_action(action_name, dict(self._initial_action_data(), **update_data)) self.assertDictContainsSubset(update_data, self._update_existing_action(action_name, update_data)) self.assertDictContainsSubset(update_data, self._get_existing_action(action_name)) self._delete_existing_action(action_name) self._cleanup_action_data(create_data) self._cleanup_action_data(update_data) def test_202_delete_new_action(self): action_name = self.actions[0] self._delete_new_action(action_name) def test_203_get_new_action(self): action_name = self.actions[0] self._get_new_action(action_name) def test_204_update_new_action(self): action_name = self.actions[0] self._update_new_action(action_name, self._initial_action_data())
class Repeat(GObject.Object): SONG_CHANGED_MANUAL = 0 SONG_CHANGED_EOS = 1 def __init__(self, shell, toggle_button): GObject.Object.__init__(self) self.repeat_song = False self.toggle_button = toggle_button self.song_changed = self.SONG_CHANGED_MANUAL player = shell.props.shell_player player.props.player.connect('eos', self.on_gst_player_eos) player.connect('playing-song-changed', self.on_song_change) try: popover = Gtk.Popover.new(toggle_button) except AttributeError: popover = CustomPopover(toggle_button) else: popover.set_modal(False) finally: repeat = RepeatPopContainer(popover, toggle_button) popover.add(repeat) toggle_button.connect('toggled', self._on_toggle, popover, repeat) repeat.connect('repeat-type-changed', self._on_repeat_type_changed) self._on_repeat_type_changed(repeat, repeat.get_repeat_type()) def _on_toggle(self, toggle, popover, repeat): if toggle.get_active(): popover.show_all() self.repeat_song = (repeat.get_repeat_type() == RepeatPopContainer.ONE_SONG) else: popover.hide() self.repeat_song = False self._set_toggle_tooltip(repeat) print('on toggle', self.repeat_song) def _set_toggle_tooltip(self, repeat): cl = CoverLocale() cl.switch_locale(cl.Locale.LOCALE_DOMAIN) if self.toggle_button.get_has_tooltip(): if (repeat.get_repeat_type() == RepeatPopContainer.ALL_SONGS): message = _('Repeat all tracks') else: message = _('Repeat the current track') self.toggle_button.set_tooltip_text(message) cl = CoverLocale() cl.switch_locale(cl.Locale.RB) def _on_repeat_type_changed(self, repeat, repeat_type): if self.toggle_button.get_active(): if (repeat_type == RepeatPopContainer.ONE_SONG): self.repeat_song = True else: self.repeat_song = False else: self.repeat_song = False self._set_toggle_tooltip(repeat) print('repeat type changed', self.repeat_song) def on_gst_player_eos(self, gst_player, stream_data, early=0): if self.repeat_song: self.song_changed = self.SONG_CHANGED_EOS def on_song_change(self, player, time): if (self.song_changed == self.SONG_CHANGED_EOS): self.song_changed = self.SONG_CHANGED_MANUAL player.do_previous() def on_elapsed_change(self, player, time): if self.repeat_song: duration = player.get_playing_song_duration() if (duration > 0): if (time >= (duration - 2)): player.set_playing_time(0)
.django_db def test_specific_award(): create_dummy_awards() baker.make('search.TransactionSearch', **transaction_1) baker.make('search.TransactionSearch', **transaction_2) baker.make('search.TransactionSearch', **transaction_3) test_payload = {'award_id': '2'} svs = TransactionViewSet() test_params = svs._parse_and_validate_request(test_payload) subawards_logic = svs._business_logic(test_params) expected_response = [format_response(transaction_2)] assert (expected_response == subawards_logic) test_payload['page'] = 2 test_params = svs._parse_and_validate_request(test_payload) subawards_logic = svs._business_logic(test_params) assert ([] == subawards_logic)
def test_context_lines_processor(elasticapm_client): abs_path = os.path.join(os.path.dirname(__file__), '..', 'utils', 'stacks') fname1 = os.path.join(abs_path, 'linenos.py') fname2 = os.path.join(abs_path, 'linenos2.py') data = {'exception': {'stacktrace': [{'context_metadata': (fname1, 3, 2, None, None)}, {'context_metadata': (fname2, 5, 2, None, None)}, {'context_metadata': (fname1, 17, 2, None, None)}, {'no': 'context'}]}} processed = processors.add_context_lines_to_frames(elasticapm_client, data) assert (processed['exception']['stacktrace'] == [{'pre_context': ['1', '2'], 'context_line': '3', 'post_context': ['4', '5']}, {'pre_context': ['c', 'd'], 'context_line': 'e', 'post_context': ['f', 'g']}, {'pre_context': ['15', '16'], 'context_line': '17', 'post_context': ['18', '19']}, {'no': 'context'}])
class PMGPC(PCBase, PMGBase): _prefix = 'pmg_' def configure_pmg(self, pc, pdm): odm = pc.getDM() ppc = PETSc.PC().create(comm=pc.comm) ppc.setOptionsPrefix((pc.getOptionsPrefix() + 'pmg_')) ppc.setType('mg') ppc.setOperators(*pc.getOperators()) ppc.setDM(pdm) opts = PETSc.Options((pc.getOptionsPrefix() + 'pmg_')) opts['mg_coarse_pc_mg_levels'] = (odm.getRefineLevel() + 1) return ppc def apply(self, pc, x, y): return self.ppc.apply(x, y) def applyTranspose(self, pc, x, y): return self.ppc.applyTranspose(x, y) def coarsen_bc_value(self, bc, cV): return 0
class OptionSeriesFunnel3dSonificationTracksMappingPan(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class OptionSeriesFunnelSonificationDefaultinstrumentoptionsMappingLowpass(Options): def frequency(self) -> 'OptionSeriesFunnelSonificationDefaultinstrumentoptionsMappingLowpassFrequency': return self._config_sub_data('frequency', OptionSeriesFunnelSonificationDefaultinstrumentoptionsMappingLowpassFrequency) def resonance(self) -> 'OptionSeriesFunnelSonificationDefaultinstrumentoptionsMappingLowpassResonance': return self._config_sub_data('resonance', OptionSeriesFunnelSonificationDefaultinstrumentoptionsMappingLowpassResonance)
def test_capture_headers_config_is_dynamic_for_transactions(client, django_elasticapm_client): django_elasticapm_client.config.update(version='1', capture_headers=True) with override_settings(**middleware_setting(django.VERSION, ['elasticapm.contrib.django.middleware.TracingMiddleware'])): client.post(reverse('elasticapm-no-error')) transaction = django_elasticapm_client.events[TRANSACTION][0] assert transaction['context']['request']['headers'] django_elasticapm_client.config.update(version='1', capture_headers=False) with override_settings(**middleware_setting(django.VERSION, ['elasticapm.contrib.django.middleware.TracingMiddleware'])): client.post(reverse('elasticapm-no-error')) transaction = django_elasticapm_client.events[TRANSACTION][1] assert ('headers' not in transaction['context']['request'])
(Session, 'client') (Session, '__init__') def test_client_init(mocked_init, mocked_client): mocked_init.return_value = None EcsClient(u'access_key_id', u'secret_access_key', u'region', u'profile', u'session_token') mocked_init.assert_called_once_with(aws_access_key_id=u'access_key_id', aws_secret_access_key=u'secret_access_key', profile_name=u'profile', region_name=u'region', aws_session_token=u'session_token') mocked_client.assert_any_call(u'ecs') mocked_client.assert_any_call(u'events')
def FairMarketcapDiffSF1(max_back_quarter: int=None, min_back_quarter: int=None, data_source: Optional[str]=None, pretrained: bool=True, verbose: bool=None) -> Pipeline: if (data_source is not None): global DATA_SOURCE DATA_SOURCE = data_source if (max_back_quarter is not None): global MAX_BACK_QUARTER MAX_BACK_QUARTER = max_back_quarter if (min_back_quarter is not None): global MIN_BACK_QUARTER MIN_BACK_QUARTER = min_back_quarter if (verbose is not None): global VERBOSE VERBOSE = verbose if (DATA_SOURCE == 'sf1'): _check_download_data() data = _create_data() feature = _create_feature() target = _create_target() model = _create_model() pipeline = Pipeline(feature=feature, target=target, model=model, data=data, out_name=OUT_NAME) core_path = '{}/{}.pickle'.format(config['models_path'], OUT_NAME) if pretrained: if (not os.path.exists(core_path)): urlretrieve(URL, core_path) pipeline.load_core(core_path) return pipeline
(slots=True) class Transaction(Tx): output_info: Optional[List[Dict[(bytes, Any)]]] = attr.ib(default=None) context: TransactionContext = attr.ib(default=attr.Factory(TransactionContext)) SIGHASH_FORKID = 64 def from_io(cls, inputs, outputs, locktime=0): return cls(version=1, inputs=inputs, outputs=outputs.copy(), locktime=locktime) def read(cls, read): return cls(read_le_int32(read), read_list(read, XTxInput.read), read_list(read, XTxOutput.read), read_le_uint32(read)) def read_extended(cls, read): return cls(read_le_int32(read), read_list(read, XTxInput.read_extended), read_list(read, XTxOutput.read), read_le_uint32(read)) def to_bytes(self): return b''.join((pack_le_int32(self.version), pack_list(self.inputs, XTxInput.to_bytes), pack_list(self.outputs, XTxOutput.to_bytes), pack_le_uint32(self.locktime))) def from_extended_bytes(cls, raw: bytes) -> 'Transaction': return cls.read_extended(BytesIO(raw).read) def __str__(self): return self.serialize() def is_complete(self): return all((txin.is_complete() for txin in self.inputs)) def update_signatures(self, signatures): if self.is_complete(): return if (len(self.inputs) != len(signatures)): raise RuntimeError('expected {} signatures; got {}'.format(len(self.inputs), len(signatures))) for (txin, signature) in zip(self.inputs, signatures): full_sig = (signature + bytes([self.nHashType()])) logger.warning(f'Signature: {full_sig.hex()}') if (full_sig in txin.signatures): continue pubkeys = [x_pubkey.to_public_key() for x_pubkey in txin.x_pubkeys] pre_hash = self.preimage_hash(txin) rec_sig_base = der_signature_to_compact(signature) for recid in range(4): rec_sig = (rec_sig_base + bytes([recid])) try: public_key = PublicKey.from_recoverable_signature(rec_sig, pre_hash, None) except (InvalidSignatureError, ValueError): continue if (public_key in pubkeys): try: public_key.verify_recoverable_signature(rec_sig, pre_hash, None) except Exception: logger.exception('') continue j = pubkeys.index(public_key) logger.debug(f'adding sig {j} {public_key} {full_sig}') txin.signatures[j] = full_sig break def get_preimage_script_bytes(self, txin) -> bytes: _type = txin.type() if (_type == ScriptType.P2PKH): x_pubkey = txin.x_pubkeys[0] script = x_pubkey.to_public_key().P2PKH_script() return script.to_bytes() elif ((_type == ScriptType.MULTISIG_P2SH) or (_type == ScriptType.MULTISIG_BARE)): return multisig_script(txin.x_pubkeys, txin.threshold) elif (_type == ScriptType.MULTISIG_ACCUMULATOR): return AccumulatorMultiSigOutput([v.to_bytes() for v in txin.x_pubkeys], txin.threshold).to_script_bytes() elif (_type == ScriptType.P2PK): x_pubkey = txin.x_pubkeys[0] script = x_pubkey.to_public_key().P2PK_script() return script.to_bytes() else: raise RuntimeError('Unknown txin type', _type) def BIP_LI01_sort(self): self.inputs.sort(key=(lambda txin: txin.prevout_bytes())) self.outputs.sort(key=(lambda output: (output.value, output.script_pubkey.to_bytes()))) def nHashType(cls) -> int: return (1 | cls.SIGHASH_FORKID) def preimage_hash(self, txin): input_index = self.inputs.index(txin) script_code = self.get_preimage_script_bytes(txin) sighash = SigHash(self.nHashType()) return self.signature_hash(input_index, txin.value, script_code, sighash=sighash) def serialize(self) -> str: return self.to_bytes().hex() def txid(self) -> Optional[str]: if self.is_complete(): return hash_to_hex_str(self.hash()) return None def input_value(self) -> int: return sum((txin.value for txin in self.inputs)) def output_value(self) -> int: return sum((output.value for output in self.outputs)) def get_fee(self) -> int: return (self.input_value() - self.output_value()) def size(self) -> int: if self.is_complete(): return len(self.to_bytes()) return self.estimated_size() def estimated_size(self) -> int: saved_inputs = self.inputs self.inputs: List[XTxInput] = [] size_without_inputs = len(self.to_bytes()) self.inputs = saved_inputs input_size = sum((txin.estimated_size() for txin in self.inputs)) return (size_without_inputs + input_size) def signature_count(self) -> Tuple[(int, int)]: r = 0 s = 0 for txin in self.inputs: signatures = txin.signatures_present() s += len(signatures) r += txin.threshold return (s, r) def sign(self, keypairs: Dict[(XPublicKey, Tuple[(bytes, bool)])]) -> None: assert all((isinstance(key, XPublicKey) for key in keypairs)) for txin in self.inputs: if txin.is_complete(): continue for (j, x_pubkey) in enumerate(txin.x_pubkeys): if (x_pubkey in keypairs): logger.debug('adding signature for %s', x_pubkey) (sec, compressed) = keypairs[x_pubkey] txin.signatures[j] = self._sign_txin(txin, sec) logger.debug('is_complete %s', self.is_complete()) def _sign_txin(self, txin: XTxInput, privkey_bytes: bytes) -> bytes: pre_hash = self.preimage_hash(txin) privkey = PrivateKey(privkey_bytes) sig = privkey.sign(pre_hash, None) return (sig + pack_byte(self.nHashType())) def from_dict(cls, data: Dict[(str, Any)]) -> 'Transaction': version = data.get('version', 0) tx = cls.from_hex(data['hex']) if (version == 1): input_data: Optional[List[Dict[(str, Any)]]] = data.get('inputs') if (input_data is not None): assert (len(tx.inputs) == len(input_data)) for (i, txin) in enumerate(tx.inputs): txin.script_type = ScriptType(input_data[i]['script_type']) txin.threshold = int(input_data[i]['threshold']) txin.value = int(input_data[i]['value']) txin.signatures = [bytes.fromhex(v) for v in input_data[i]['signatures']] txin.x_pubkeys = [XPublicKey.from_dict(v) for v in input_data[i]['x_pubkeys']] output_data: Optional[List[Dict[(str, Any)]]] = data.get('outputs') if (output_data is not None): assert (len(tx.outputs) == len(output_data)) for (i, txout) in enumerate(tx.outputs): txout.script_type = ScriptType(output_data[i]['script_type']) txout.x_pubkeys = [XPublicKey.from_dict(v) for v in output_data[i]['x_pubkeys']] if ('description' in data): tx.context.description = str(data['description']) if ('prev_txs' in data): for tx_hex in data['prev_txs']: ptx = cls.from_hex(tx_hex) tx.context.prev_txs[ptx.hash()] = ptx assert (tx.is_complete() == data['complete']), 'transaction completeness mismatch' elif (version == 0): assert tx.is_complete(), 'raw transactions must be complete' return tx def to_dict(self, force_signing_metadata: bool=False) -> Dict[(str, Any)]: out: Dict[(str, Any)] = {'version': 1, 'hex': self.to_hex(), 'complete': self.is_complete()} if self.context.description: out['description'] = self.context.description if (force_signing_metadata or (not out['complete'])): out['inputs'] = [] for txin in self.inputs: input_entry: Dict[(str, Any)] = {} input_entry['script_type'] = txin.script_type input_entry['threshold'] = txin.threshold input_entry['value'] = txin.value input_entry['signatures'] = [sig.hex() for sig in txin.signatures] input_entry['x_pubkeys'] = [xpk.to_dict() for xpk in txin.x_pubkeys] out['inputs'].append(input_entry) output_data = [] if any((len(o.x_pubkeys) for o in self.outputs)): for txout in self.outputs: output_entry: Dict[(str, Any)] = {} output_entry['script_type'] = txout.script_type output_entry['x_pubkeys'] = [xpk.to_dict() for xpk in txout.x_pubkeys] output_data.append(output_entry) if len(output_data): out['outputs'] = output_data return out def to_format(self, format: TxSerialisationFormat) -> TxSerialisedType: if (format == TxSerialisationFormat.RAW): return self.to_bytes() elif (format == TxSerialisationFormat.HEX): return self.to_hex() elif (format in (TxSerialisationFormat.JSON, TxSerialisationFormat.JSON_WITH_PROOFS)): return self.to_dict() raise NotImplementedError(f'unhanded format {format}')
def get_subparts(n): subtrees = [] if is_dup(n): for ch in n.get_children(): ch.detach() subtrees.extend(get_subparts(ch)) else: to_visit = [] for _n in n.leaves(is_leaf_fn=is_dup): if is_dup(_n): to_visit.append(_n) for _n in to_visit: _n.detach() freaks = [_n for _n in n.descendants() if ((len(_n.children) == 1) or ((not hasattr(_n, '_leaf')) and (not _n.children)))] for s in freaks: s.delete(prevent_nondicotomic=True) while (len(n.children) == 1): n = n.children[0] n.detach() if ((not n.children) and (not hasattr(n, '_leaf'))): pass else: subtrees.append(n) for _n in to_visit: subtrees.extend(get_subparts(_n)) return subtrees
def get_user_emails(service_config, member_types=None): if (not member_types): member_types = ['gsuite_user'] emails = [] with service_config.scoped_session() as session: inventory_index_id = DataAccess.get_latest_inventory_index_id(session) for inventory_row in DataAccess.iter(session, inventory_index_id, type_list=member_types): emails.append(inventory_row.get_resource_data()['primaryEmail']) return emails
def test_validate_with_missing_kfp_component(jp_environ, kubeflow_pipelines_runtime_instance): runner = CliRunner() with runner.isolated_filesystem(): valid_file_path = (((Path(__file__).parent / 'resources') / 'pipelines') / 'kfp_3_node_custom.pipeline') pipeline_file_path = (Path.cwd() / 'foo.pipeline') with open(pipeline_file_path, 'w') as pipeline_file: with open(valid_file_path) as valid_file: valid_data = json.load(valid_file) valid_data['pipelines'][0]['nodes'][0]['op'] = (valid_data['pipelines'][0]['nodes'][0]['op'] + 'Missing') pipeline_file.write(json.dumps(valid_data)) result = runner.invoke(pipeline, ['validate', str(pipeline_file_path), '--runtime-config', kubeflow_pipelines_runtime_instance]) assert ('Validating pipeline...' in result.output) assert ('[Error][Calculate data hash] - This component was not found in the catalog.' in result.output) assert (result.exit_code != 0)
def draw_grid(hex_grid): def color_heightmap(h): alt = int(h.altitude) return (alt, alt, alt) def color_terrain(h): return h.color_terrain def color_rivers(h): return h.color_rivers def color_temperature_end_year(h): return h.color_temperature[0] def color_temperature_mid_year(h): return h.color_temperature[1] def temperature_end_year(h): return h.temperature[0] def temperature_mid_year(h): return h.temperature[1] def color_biome(h): return h.color_biome def color_territories(h): return h.color_territories def color_satellite(h): return h.color_satellite def color_features(h): if h.has_feature(HexFeature.lava_flow): return (200, 100, 0) if h.has_feature(HexFeature.volcano): return (255, 0, 0) if h.has_feature(HexFeature.crater): return (255, 255, 0) return (200, 200, 200) def color_resources(h): if (h.resource is not None): return h.resource.get('type').color return (100, 100, 100) def color_zone(h): return h.zone.color def key_zone(h): return h.zone.map_key def hex_latitude(h): return h.latitude def color_pressure_end_year(h): return h.color_pressure[0] def color_pressure_mid_year(h): return h.color_pressure[1] def pressure_number_end_year(h): return h.pressure[0] def pressure_number_mid_year(h): return h.pressure[1] def color_wind_end_year(h): return h.color_pressure[0] def color_wind_mid_year(h): return h.color_pressure[1] def wind_display_end_year(h): wind = h.wind[0].get('direction') if wind: return wind.arrow return '-' def wind_display_mid_year(h): wind = h.wind[1].get('direction') if wind: return wind.arrow return '-' def color_hex_type(h): if h.is_land: return (0, 255, 0) return (0, 0, 255) def color_geoforms(h): for g in GeoformType.list(): if (h.geoform.type is g): return g.color return (0, 0, 255) HexGridDraw(hex_grid, color_features, '../output/map_features.png', show_coasts=True, rivers=False) HexGridDraw(hex_grid, color_heightmap, '../output/map_height.png', rivers=False, show_coasts=True) HexGridDraw(hex_grid, color_terrain, '../output/map_terrain.png', rivers=True, show_coasts=True) HexGridDraw(hex_grid, color_hex_type, '../output/map_hex_types.png', rivers=True, show_coasts=True) HexGridDraw(hex_grid, color_geoforms, '../output/map_geoforms.png', rivers=False, show_coasts=True) HexGridDraw(hex_grid, color_rivers, '../output/map_rivers.png', rivers=True, show_coasts=True) HexGridDraw(hex_grid, color_temperature_end_year, '../output/map_temp_end_year.png', rivers=False, show_coasts=True) HexGridDraw(hex_grid, color_temperature_mid_year, '../output/map_temp_mid_year.png', rivers=False, show_coasts=True) HexGridDraw(hex_grid, color_biome, '../output/map_biome.png', rivers=False) HexGridDraw(hex_grid, color_territories, '../output/map_territories.png', rivers=False, show_coasts=True, borders=True) HexGridDraw(hex_grid, color_satellite, '../output/map_satellite.png') HexGridDraw(hex_grid, color_resources, '../output/map_resources.png') HexGridDraw(hex_grid, color_zone, '../output/map_zone.png', text_func=key_zone, rivers=False, show_coasts=False) HexGridDraw(hex_grid, color_zone, '../output/map_latitude.png', text_func=hex_latitude, rivers=False, show_coasts=False) HexGridDraw(hex_grid, color_pressure_end_year, '../output/map_pressure_end_year.png', rivers=False, show_coasts=True) HexGridDraw(hex_grid, color_pressure_mid_year, '../output/map_pressure_mid_year.png', rivers=False, show_coasts=True) HexGridDraw(hex_grid, color_wind_end_year, '../output/map_wind_end_year.png', text_func=wind_display_end_year, rivers=False, show_coasts=True) HexGridDraw(hex_grid, color_wind_mid_year, '../output/map_wind_mid_year.png', text_func=wind_display_mid_year, rivers=False, show_coasts=True) for t in hex_grid.territories: print('Territory {}:\n\tSize: {}\n\tColor: {}\n\tLandlocked: {}\n\tAverage Temperature: {}\n\tAverage Moisture: {}\n\tNeighbors: {}'.format(t.id, t.size, t.color, t.landlocked, t.avg_temp, t.avg_moisture, t.neighbors)) print('\tBiomes:') for b in t.biomes: print('\t - {}: {} - {}%'.format(b.get('biome').title, b.get('count'), round(((b.get('count') / t.size) * 100), 2))) print('\tGroups: {}'.format(len(t.groups))) for g in t.groups: print('\t\tHexes: {}, X: {}, Y: {}'.format(g.get('size'), g.get('x'), g.get('y')))
((API + '.get_dataset_identity'), MagicMock(return_value={'dataset_rid': DATASET_RID, 'dataset_path': DATASET_PATH, 'last_transaction_rid': TRANSACTION_RID, 'last_transaction': {'rid': TRANSACTION_RID, 'transaction': {}}})) ((API + '.get_dataset_schema')) ((API + '.list_dataset_files')) ((API + '.is_dataset_in_trash')) ((API + '.get_dataset_last_transaction')) ((API + '.get_dataset_rid')) def test_fetch_dataset(get_dataset_rid, get_dataset_last_transaction, is_dataset_in_trash, list_dataset_files, get_dataset_schema, mocker): from_foundry_and_cache = mocker.spy(CachedFoundryClient, '_download_dataset_and_return_local_path') from_cache = mocker.spy(CachedFoundryClient, '_return_local_path_of_cached_dataset') online = mocker.spy(CachedFoundryClient, '_get_dataset_identity_online') offline = mocker.spy(CachedFoundryClient, '_get_dataset_identity_offline') get_dataset_rid.return_value = DATASET_RID get_dataset_last_transaction.return_value = {'rid': TRANSACTION_RID} is_dataset_in_trash.return_value = False df = get_spark_session().createDataFrame([[1]], 'col1:int') get_dataset_schema.return_value = {'fieldSchemaList': [{'type': 'INTEGER', 'name': 'col1', 'nullable': True, 'customMetadata': {}}], 'dataFrameReaderClass': 'com.palantir.foundry.spark.input.ParquetDataFrameReader', 'customMetadata': {'format': 'parquet', 'options': {}}} list_dataset_files.return_value = ['spark/dataset.parquet'] def download_dataset_files_mock(self, dataset_rid: str, output_directory: str, files: list, view='master'): path = Path(output_directory).joinpath('spark') path.mkdir(parents=True, exist_ok=True) df.write.format('parquet').option('compression', 'snappy').save(path=os.fspath(path), mode='overwrite') from foundry_dev_tools.foundry_api_client import FoundryRestClient backup = FoundryRestClient.download_dataset_files FoundryRestClient.download_dataset_files = download_dataset_files_mock fdt = CachedFoundryClient() (path, dataset_identity) = fdt.fetch_dataset(DATASET_PATH, 'master') assert (path.split(os.sep, maxsplit=(- 1))[(- 1)] == (TRANSACTION_RID + '.parquet')) assert (dataset_identity == {'dataset_rid': DATASET_RID, 'last_transaction_rid': TRANSACTION_RID, 'last_transaction': {'rid': TRANSACTION_RID, 'transaction': {}}, 'dataset_path': DATASET_PATH}) online.assert_called() online.reset_mock() offline.assert_not_called() offline.reset_mock() from_foundry_and_cache.assert_called() from_cache.assert_not_called() from_foundry_and_cache.reset_mock() from_cache.reset_mock() (path2, dataset_identity2) = fdt.fetch_dataset(DATASET_PATH, 'master') from_foundry_and_cache.assert_not_called() from_cache.assert_called() online.reset_mock() offline.reset_mock() assert (path == path2) fdt = CachedFoundryClient({'transforms_freeze_cache': True}) (path3, dataset_identity3) = fdt.fetch_dataset(DATASET_PATH, 'master') assert (path == path2 == path3) online.assert_not_called() online.reset_mock() offline.assert_called() offline.reset_mock() (path4, dataset_identity4) = fdt.fetch_dataset(DATASET_RID, 'master') assert (path == path2 == path3 == path4) FoundryRestClient.download_dataset_files = backup
def test_joined_inheritance(): (app, db, admin) = setup() with app.app_context(): class Parent(db.Model): id = db.Column(db.Integer, primary_key=True) test = db.Column(db.String) discriminator = db.Column('type', db.String(50)) __mapper_args__ = {'polymorphic_on': discriminator} class Child(Parent): __tablename__ = 'children' __mapper_args__ = {'polymorphic_identity': 'child'} id = db.Column(db.ForeignKey(Parent.id), primary_key=True) name = db.Column(db.String(100)) db.create_all() view = CustomModelView(Child, db.session, form_columns=['id', 'test', 'name']) admin.add_view(view) client = app.test_client() rv = client.get('/admin/child/') assert (rv.status_code == 200) rv = client.post('/admin/child/new/', data=dict(id=1, test='foo', name='bar')) assert (rv.status_code == 302) rv = client.get('/admin/child/edit/?id=1') assert (rv.status_code == 200) data = rv.data.decode('utf-8') assert ('foo' in data) assert ('bar' in data)
class Rule(object): RuleViolation = collections.namedtuple('RuleViolation', ['resource_type', 'resource_id', 'resource_name', 'full_name', 'rule_name', 'rule_index', 'violation_type', 'service', 'log_type', 'unexpected_exemptions', 'resource_data']) def __init__(self, rule_name, rule_index, rule): self.rule_name = rule_name self.rule_index = rule_index self.rule = rule def find_violations(self, project, audit_config): service = self.rule['service'] for log_type in self.rule['log_types']: configs = audit_config.service_configs if ((log_type not in configs.get(service, {})) and (log_type not in configs.get(IamAuditConfig.ALL_SERVICES, {}))): (yield self.RuleViolation(resource_type=project.type, resource_id=project.id, resource_name=project.display_name, full_name=project.full_name, rule_name=self.rule_name, rule_index=self.rule_index, violation_type=VIOLATION_TYPE, service=service, log_type=log_type, unexpected_exemptions=None, resource_data=project.data)) else: if (service == IamAuditConfig.ALL_SERVICES): applicable_services = list(configs.keys()) elif (IamAuditConfig.ALL_SERVICES in configs): applicable_services = [service, IamAuditConfig.ALL_SERVICES] else: applicable_services = [service] for applicable_service in applicable_services: unexpected_exemptions = sorted((configs[applicable_service].get(log_type, set()) - self.rule['allowed_exemptions'])) if unexpected_exemptions: (yield self.RuleViolation(resource_name=project.display_name, resource_type=project.type, resource_id=project.id, full_name=project.full_name, rule_name=self.rule_name, rule_index=self.rule_index, violation_type=VIOLATION_TYPE, service=applicable_service, log_type=log_type, unexpected_exemptions=tuple(unexpected_exemptions), resource_data=project.data))
class OptionPlotoptionsTimelineSonificationDefaultspeechoptionsMappingVolume(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
('This method will be removed in Falcon 4.0.') def compile_uri_template(template): if (not isinstance(template, str)): raise TypeError('uri_template is not a string') if (not template.startswith('/')): raise ValueError("uri_template must start with '/'") if ('//' in template): raise ValueError("uri_template may not contain '//'") if ((template != '/') and template.endswith('/')): template = template[:(- 1)] expression_pattern = '{([a-zA-Z]\\w*)}' fields = set(re.findall(expression_pattern, template)) escaped = re.sub('[\\.\\(\\)\\[\\]\\?\\*\\+\\^\\|]', '\\\\\\g<0>', template) pattern = re.sub(expression_pattern, '(?P<\\1>[^/]+)', escaped) pattern = (('\\A' + pattern) + '\\Z') return (fields, re.compile(pattern, re.IGNORECASE))
def _convert_poll_options(poll_options): if (poll_options is None): return None elif (len(poll_options) == 0): return [] elif isinstance(poll_options[0], str): return poll_options elif isinstance(poll_options[0], types.PollOption): return [option.text for option in poll_options] else: return poll_options
def get_image_size(source: ImageSizeInput) -> ImageSize: if isinstance(source, ImageSize): return source if (isinstance(source, str) and (source in IMAGE_SIZE_PRESETS)): size = IMAGE_SIZE_PRESETS[source] return size raise TypeError(f'Invalid value for ImageSize: {source}')
def fortios_firewall(data, fos, check_mode): fos.do_member_operation('firewall', 'internet-service-owner') if data['firewall_internet_service_owner']: resp = firewall_internet_service_owner(data, fos, check_mode) else: fos._module.fail_json(msg=('missing task body: %s' % 'firewall_internet_service_owner')) if check_mode: return resp return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {})
class test(testing.TestCase): def test_simple(self): args = main(nelems=10) self.assertAlmostEqual64(args['u'], '\n eNp9zrENwCAMBEBGYQJ444o2ozAAYgFmYhLEFqxAmye1FUtf+PSy7Jw9J6yoKGiMYsUTrq44kaVKZ7JM\n +lWlDdlymEFXXC2o3H1C8mmzXz5t6OwhPfTDO+2na9+1f7D/teYFdsk5vQ==')
def test_dataclass_complex_transform(two_sample_inputs): my_input = two_sample_inputs[0] my_input_2 = two_sample_inputs[1] ctx = FlyteContextManager.current_context() literal_type = TypeEngine.to_literal_type(MyInput) first_literal = TypeEngine.to_literal(ctx, my_input, MyInput, literal_type) assert (first_literal.scalar.generic['apriori_config'] is not None) converted_back_1 = TypeEngine.to_python_value(ctx, first_literal, MyInput) assert (converted_back_1.apriori_config is not None) second_literal = TypeEngine.to_literal(ctx, converted_back_1, MyInput, literal_type) assert (second_literal.scalar.generic['apriori_config'] is not None) converted_back_2 = TypeEngine.to_python_value(ctx, second_literal, MyInput) assert (converted_back_2.apriori_config is not None) input_list = [my_input, my_input_2] input_list_type = TypeEngine.to_literal_type(List[MyInput]) literal_list = TypeEngine.to_literal(ctx, input_list, List[MyInput], input_list_type) assert (literal_list.collection.literals[0].scalar.generic['apriori_config'] is not None) assert (literal_list.collection.literals[1].scalar.generic['apriori_config'] is not None)
def build_ryu_args(argv): args = parse_args(argv[1:]) if args.version: print_version() return [] prog = os.path.basename(argv[0]) ryu_args = [] if args.use_stderr: ryu_args.append('--use-stderr') if args.use_syslog: ryu_args.append('--use-syslog') if args.verbose: ryu_args.append('--verbose') for (arg, val) in vars(args).items(): if ((not val) or (not arg.startswith('ryu'))): continue if (arg == 'ryu_app_lists'): continue if ((arg == 'ryu_config_file') and (not os.path.isfile(val))): continue arg_name = arg.replace('ryu_', '').replace('_', '-') ryu_args.append(('--%s=%s' % (arg_name, val))) if (args.gauge or (os.path.basename(prog) == 'gauge')): ryu_args.append('faucet.gauge') else: ryu_args.append('faucet.faucet') if args.ryu_app_lists: ryu_args.extend(args.ryu_app_lists) ryu_args.insert(0, 'osken-manager') return ryu_args
def test_dimensions_tuple(): fpath = 'data/chap4-7/iflr/multidimensions-validated.dlis' with dlis.load(fpath) as (f, *_): frame = f.object('FRAME', 'FRAME-VALIDATE', 10, 0) curves = frame.curves() assert (curves[0][1].size == 3) assert (curves[0][1][0] == (56, 0.0625, 0.0625)) assert (curves[0][1][1] == (43, 0.0625, 0.0625)) assert (curves[0][1][2] == (71, 0.5, 0.5))
class TestHCTSerialize(util.ColorAssertsPyTest): COLORS = [('color(--hct 50 30 75 / 0.5)', {}, 'color(--hct 50 30 75 / 0.5)'), ('color(--hct 50 30 75)', {'alpha': True}, 'color(--hct 50 30 75 / 1)'), ('color(--hct 50 30 75 / 0.5)', {'alpha': False}, 'color(--hct 50 30 75)'), ('color(--hct 50 30 none)', {}, 'color(--hct 50 30 0)'), ('color(--hct 50 30 none)', {'none': True}, 'color(--hct 50 30 none)'), ('color(--hct 50 230 75)', {}, 'color(--hct 50 230 75)'), ('color(--hct 50 230 75)', {'fit': False}, 'color(--hct 50 230 75)'), ('color(--hct 207.41 -113.36 53.237)', {}, 'color(--hct 27.41 77.618 53.237)')] .parametrize('color1,options,color2', COLORS) def test_colors(self, color1, options, color2): self.assertEqual(Color(color1).to_string(**options), color2)
.parametrize('oper, expected', [('add', 12), ('sub', 8), ('div', 5), ('mul', 20), ('set', 2)]) def test_oper_single_point_in_polygon(oper, expected): for ver in (1, 2): pol = Polygons(SMALL_POLY_INNER) poi = Points([(4.0, 4.0, 10.0)]) poi.operation_polygons(pol, value=2, opname=oper, inside=True, version=ver) assert (poi.dataframe[poi.zname].values[0] == expected)
def test_get_variable_descriptions(): x_tsk = get_serializable(entity_mapping, serialization_settings, x) x_input_vars = x_tsk.template.interface.inputs a_ann = x_input_vars['a'].type.annotation assert isinstance(a_ann, TypeAnnotation) assert (a_ann.annotations['foo'] == {'bar': 1}) b_ann = x_input_vars['b'].type.annotation assert (b_ann is None) y0_tsk = get_serializable(entity_mapping, serialization_settings, y0) y0_input_vars = y0_tsk.template.interface.inputs y0_a_ann = y0_input_vars['a'].type.collection_type.annotation assert isinstance(y0_a_ann, TypeAnnotation) assert (y0_a_ann.annotations['foo'] == {'bar': 1}) y1_tsk = get_serializable(entity_mapping, serialization_settings, y1) y1_input_vars = y1_tsk.template.interface.inputs y1_a_ann = y1_input_vars['a'].type.annotation assert isinstance(y1_a_ann, TypeAnnotation) assert (y1_a_ann.annotations['foo'] == {'bar': 1})
class MPPanZoom(BaseTool): pan = Instance(MPPanTool) zoom = Instance(MPDragZoom) event_state = Enum('normal', 'pan', 'zoom') _blobs = Delegate('zoom') _moves = Delegate('zoom') def _dispatch_stateful_event(self, event, suffix): self.zoom.dispatch(event, suffix) event.handled = False self.pan.dispatch(event, suffix) if (len(self._blobs) == 2): self.event_state = 'zoom' elif (len(self._blobs) == 1): self.event_state = 'pan' elif (len(self._blobs) == 0): self.event_state = 'normal' else: assert (len(self._blobs) <= 2) if (suffix == 'blob_up'): event.window.release_blob(event.bid) elif (suffix == 'blob_down'): event.window.release_blob(event.bid) event.window.capture_blob(self, event.bid, event.net_transform()) event.handled = True def _component_changed(self, old, new): self.pan.component = new self.zoom.component = new def _pan_default(self): return MPPanTool(self.component) def _zoom_default(self): return MPDragZoom(self.component)
class PSCCodesMixin(): underscore_name = 'psc_codes' validation_pattern = compile('[A-Z0-9]{1,4}') def validate_filter_values(cls, filter_values): if isinstance(filter_values, list): for code in filter_values: if ((not isinstance(code, str)) or (not cls.validation_pattern.fullmatch(code))): raise UnprocessableEntityException(f"PSC codes must be one to four character uppercased alphanumeric strings. Offending code: '{code}'.") elif isinstance(filter_values, dict): for key in ('require', 'exclude'): code_lists = (filter_values.get(key) or []) if (not isinstance(code_lists, list)): raise UnprocessableEntityException(f'require and exclude properties must be arrays of arrays.') for code_list in code_lists: if (not isinstance(code_list, list)): raise UnprocessableEntityException(f'require and exclude properties must be arrays of arrays.') for (seq, code) in enumerate(code_list): if ((seq == 0) and (code not in PSC_GROUPS)): raise UnprocessableEntityException(f"Tier1 PSC filter values must be one of: {tuple(PSC_GROUPS)}. Offending code: '{code}'.") elif ((seq > 0) and ((not isinstance(code, str)) or (not cls.validation_pattern.fullmatch(code)))): raise UnprocessableEntityException(f"PSC codes must be one to four character uppercased alphanumeric strings. Offending code: '{code}'.") else: raise UnprocessableEntityException(f'psc_codes must be an array or object') def split_filter_values(cls, filter_values): if isinstance(filter_values, list): require = [[f] for f in filter_values] exclude = [] elif isinstance(filter_values, dict): require = (filter_values.get('require') or []) exclude = (filter_values.get('exclude') or []) else: raise UnprocessableEntityException(f'psc_codes must be an array or object') return (require, exclude) def handle_tier1_names(code_lists): expanded_list = [] for code_list in code_lists: if (code_list[0] in PSC_GROUPS): if (len(code_list) == 1): expanded_list.extend(PSC_GROUPS[code_list[0]]['expanded_terms']) else: expanded_list.append(code_list[1:]) else: expanded_list.append(code_list) return expanded_list
def broadlink_a1_sensors_timer(scheduler, delay, device, mqtt_prefix): scheduler.enter(delay, 1, broadlink_a1_sensors_timer, [scheduler, delay, device, mqtt_prefix]) try: text_values = cf.get('broadlink_a1_sensors_text_values', False) is_json = cf.get('broadlink_a1_sensors_json', False) sensors = (device.check_sensors() if text_values else device.check_sensors_raw()) if is_json: topic = (mqtt_prefix + 'sensors') value = json.dumps(sensors) logging.debug(("Sending A1 sensors '%s' to topic '%s'" % (value, topic))) mqttc.publish(topic, value, qos=qos, retain=retain) else: for name in sensors: topic = ((mqtt_prefix + 'sensor/') + name) value = str(sensors[name]) logging.debug(("Sending A1 %s '%s' to topic '%s'" % (name, value, topic))) mqttc.publish(topic, value, qos=qos, retain=retain) except: logging.exception('Error')
class TestOFPBarrierRequest(unittest.TestCase): def test_serialize(self): c = OFPBarrierRequest(_Datapath) c.serialize() eq_(ofproto.OFP_VERSION, c.version) eq_(ofproto.OFPT_BARRIER_REQUEST, c.msg_type) eq_(ofproto.OFP_HEADER_SIZE, c.msg_len) eq_(0, c.xid) fmt = ofproto.OFP_HEADER_PACK_STR res = unpack(fmt, six.binary_type(c.buf)) eq_(ofproto.OFP_VERSION, res[0]) eq_(ofproto.OFPT_BARRIER_REQUEST, res[1]) eq_(len(c.buf), res[2]) eq_(0, c.xid)
def compute_max_saturation(a: float, b: float, lms_to_rgb: Matrix, ok_coeff: List[List[Vector]]) -> float: if (alg.vdot(ok_coeff[0][0], [a, b]) > 1): (k0, k1, k2, k3, k4) = ok_coeff[0][1] (wl, wm, ws) = lms_to_rgb[0] elif (alg.vdot(ok_coeff[1][0], [a, b]) > 1): (k0, k1, k2, k3, k4) = ok_coeff[1][1] (wl, wm, ws) = lms_to_rgb[1] else: (k0, k1, k2, k3, k4) = ok_coeff[2][1] (wl, wm, ws) = lms_to_rgb[2] sat = ((((k0 + (k1 * a)) + (k2 * b)) + (k3 * (a ** 2))) + ((k4 * a) * b)) k_l = alg.vdot(OKLAB_TO_LMS3[0][1:], [a, b]) k_m = alg.vdot(OKLAB_TO_LMS3[1][1:], [a, b]) k_s = alg.vdot(OKLAB_TO_LMS3[2][1:], [a, b]) l_ = (1.0 + (sat * k_l)) m_ = (1.0 + (sat * k_m)) s_ = (1.0 + (sat * k_s)) l = (l_ ** 3) m = (m_ ** 3) s = (s_ ** 3) l_ds = ((3.0 * k_l) * (l_ ** 2)) m_ds = ((3.0 * k_m) * (m_ ** 2)) s_ds = ((3.0 * k_s) * (s_ ** 2)) l_ds2 = ((6.0 * (k_l ** 2)) * l_) m_ds2 = ((6.0 * (k_m ** 2)) * m_) s_ds2 = ((6.0 * (k_s ** 2)) * s_) f = (((wl * l) + (wm * m)) + (ws * s)) f1 = (((wl * l_ds) + (wm * m_ds)) + (ws * s_ds)) f2 = (((wl * l_ds2) + (wm * m_ds2)) + (ws * s_ds2)) sat = (sat - ((f * f1) / ((f1 ** 2) - ((0.5 * f) * f2)))) return sat
def valve_switch_factory(logger, dp, pipeline, stack_manager): restricted_bcast_arpnd = bool(dp.restricted_bcast_arpnd_ports()) eth_dst_hairpin_table = dp.tables.get('eth_dst_hairpin', None) vlan_acl_table = dp.tables.get('vlan_acl', None) switch_args = {'logger': logger, 'ports': dp.ports, 'vlans': dp.vlans, 'vlan_table': dp.tables['vlan'], 'vlan_acl_table': vlan_acl_table, 'eth_src_table': dp.tables['eth_src'], 'eth_dst_table': dp.tables['eth_dst'], 'eth_dst_hairpin_table': eth_dst_hairpin_table, 'flood_table': dp.tables['flood'], 'classification_table': dp.classification_table, 'pipeline': pipeline, 'use_group_table': dp.group_table, 'groups': dp.groups, 'combinatorial_port_flood': dp.combinatorial_port_flood, 'canonical_port_order': dp.canonical_port_order, 'restricted_bcast_arpnd': restricted_bcast_arpnd, 'has_externals': dp.has_externals, 'learn_ban_timeout': dp.learn_ban_timeout, 'learn_timeout': dp.timeout, 'learn_jitter': dp.learn_jitter, 'cache_update_guard_time': dp.cache_update_guard_time, 'idle_dst': dp.idle_dst, 'dp_high_priority': dp.high_priority, 'dp_highest_priority': dp.highest_priority, 'faucet_dp_mac': dp.faucet_dp_mac, 'drop_spoofed_faucet_mac': dp.drop_spoofed_faucet_mac} if dp.stack: switch_class = ValveSwitchStackManagerNoReflection if dp.stack.root_flood_reflection: switch_class = ValveSwitchStackManagerReflection logger.info('Using stacking root flood reflection') else: logger.info('Not using stacking root flood reflection') switch_args.update({'stack_manager': stack_manager}) return switch_class(**switch_args) switch_class = ValveSwitchManager if dp.use_idle_timeout: switch_class = ValveSwitchFlowRemovedManager return switch_class(**switch_args)
def downgrade(): op.drop_constraint('datasetconfig_ctl_dataset_id_fkey', 'datasetconfig', type_='foreignkey') op.drop_index(op.f('ix_datasetconfig_ctl_dataset_id'), table_name='datasetconfig') op.drop_column('datasetconfig', 'ctl_dataset_id') bind = op.get_bind() remove_automigrated_ctl_datasets_query: TextClause = text("DELETE FROM ctl_datasets WHERE meta->>'fides_source'= :automigration_string") bind.execute(remove_automigrated_ctl_datasets_query, {'automigration_string': AUTO_MIGRATED_STRING})
def test_multi_model_roundtrip_bytes(): model = chain(Maxout(5, 10, nP=2), Maxout(2, 3)).initialize() b = model.layers[0].get_param('b') b += 1 b = model.layers[1].get_param('b') b += 2 data = model.to_bytes() b = model.layers[0].get_param('b') b -= 1 b = model.layers[1].get_param('b') b -= 2 model = model.from_bytes(data) assert (model.layers[0].get_param('b')[(0, 0)] == 1) assert (model.layers[1].get_param('b')[(0, 0)] == 2)
class OptionPlotoptionsAreaSonificationTracksMappingHighpassFrequency(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def install(module: str, options: str=None): executable = None try: for path in glob.glob('{}/bin/python*'.format(sys.exec_prefix)): if (os.access(path, os.X_OK) and (not path.lower().endswith('dll'))): executable = path logging.debug("Blender's Python interpreter: {}".format(executable)) break except Exception as e: logging.error(e) if (executable is None): logging.error("Unable to locate Blender's Python interpreter") if is_installed('ensurepip'): subprocess.call([executable, '-m', 'ensurepip']) elif (not is_installed('pip')): url = ' filepath = '{}/get-pip.py'.format(os.getcwd()) try: requests = importlib.import_module('requests') response = requests.get(url) with open(filepath, 'w') as f: f.write(response.text) subprocess.call([executable, filepath]) except Exception as e: logging.error(e) finally: if os.path.isfile(filepath): os.remove(filepath) if ((not is_installed(module)) and (executable is not None)): try: if ((options is None) or (options.strip() == '')): subprocess.call([executable, '-m', 'pip', 'install', module]) else: subprocess.call([executable, '-m', 'pip', 'install', options, module]) except Exception as e: logging.error(e) return is_installed(module)
def _make_rich_rext(text: str, style: StyleType, formatter: RichHelpFormatter) -> Union[(Markdown, Text)]: config = formatter.config text = inspect.cleandoc(text) if config.use_markdown: if config.use_markdown_emoji: text = Emoji.replace(text) return Markdown(text, style=style) if config.use_rich_markup: return formatter.highlighter(Text.from_markup(text, style=style)) else: return formatter.highlighter(Text(text, style=style))
class T2CharStringTest(unittest.TestCase): def stringToT2CharString(cls, string): return T2CharString(program=stringToProgram(string), private=PrivateDict()) def test_calcBounds_empty(self): cs = self.stringToT2CharString('endchar') bounds = cs.calcBounds(None) self.assertEqual(bounds, None) def test_calcBounds_line(self): cs = self.stringToT2CharString('100 100 rmoveto 40 10 rlineto -20 50 rlineto endchar') bounds = cs.calcBounds(None) self.assertEqual(bounds, (100, 100, 140, 160)) def test_calcBounds_curve(self): cs = self.stringToT2CharString('100 100 rmoveto -50 -150 200 0 -50 150 rrcurveto endchar') bounds = cs.calcBounds(None) self.assertEqual(bounds, (91., (- 12.5), 208., 100)) def test_charstring_bytecode_optimization(self): cs = self.stringToT2CharString('100.0 100 rmoveto -50.0 -150 200.5 0.0 -50 150 rrcurveto endchar') cs.isCFF2 = False cs.private._isCFF2 = False cs.compile() cs.decompile() self.assertEqual(cs.program, [100, 100, 'rmoveto', (- 50), (- 150), 200.5, 0, (- 50), 150, 'rrcurveto', 'endchar']) cs2 = self.stringToT2CharString('100.0 rmoveto -50.0 -150 200.5 0.0 -50 150 rrcurveto') cs2.isCFF2 = True cs2.private._isCFF2 = True cs2.compile(isCFF2=True) cs2.decompile() self.assertEqual(cs2.program, [100, 'rmoveto', (- 50), (- 150), 200.5, 0, (- 50), 150, 'rrcurveto']) def test_encodeFloat(self): testNums = [((- 9.), '1e e9 a4 ff'), (9.4, '1e 9a 4f'), (456.8, '1e 45 6a 8f'), (0.0, '1e 0f'), ((- 0.0), '1e 0f'), (1.0, '1e 1f'), ((- 1.0), '1e e1 ff'), (9876537.0, '1e 98 76 53 7f'), (.0, '1e 1a 23 45 67 9b 09 ff'), (0., '1e a0 00 98 76 53 7f'), (98765.37, '1e 98 76 5a 37 ff')] for sample in testNums: encoded_result = encodeFloat(sample[0]) self.assertEqual(hexenc(encoded_result), sample[1]) decoded_result = read_realNumber(None, None, encoded_result, 1) self.assertEqual(decoded_result[0], float(('%.8g' % sample[0]))) def test_encode_decode_fixed(self): testNums = [((- 9.), 'ff ff f6 99 9a', (- 9.3999939)), ((- 9.4), 'ff ff f6 99 9a', (- 9.3999939)), (9.4, 'ff 00 09 66 66', 9.3999939), (9.4, 'ff 00 09 66 66', 9.3999939), (456.8, 'ff 01 c8 cc cd', ), ((- 456.8), 'ff fe 37 33 33', (- ))] for (value, expected_hex, expected_float) in testNums: encoded_result = encodeFixed(value) self.assertEqual(hexenc(encoded_result), expected_hex) decoded_result = read_fixed1616(None, None, encoded_result, 1) self.assertAlmostEqual(decoded_result[0], expected_float) def test_toXML(self): program = ['107 53.4004 166.199 hstem', '174.6 163.801 vstem', '338.4 142.8 rmoveto', '28 0 21.9 9 15.8 18 15.8 18 7.9 20.79959 0 23.6 rrcurveto', 'endchar'] cs = self.stringToT2CharString(' '.join(program)) self.assertEqual(getXML(cs.toXML), program) def test_fromXML(self): cs = T2CharString() for (name, attrs, content) in parseXML(['<CharString name="period"> 338.4 142.8 rmoveto', ' 28 0 21.9 9 15.8 18 15.8 18 7.9 20.79959 0 23.6 rrcurveto', ' endchar</CharString>']): cs.fromXML(name, attrs, content) expected_program = [, , 'rmoveto', 28, 0, 21.8999939, 9, 15.8000031, 18, 15.8000031, 18, 7.8999939, 20.7995911, 0, 23.6000061, 'rrcurveto', 'endchar'] self.assertEqual(len(cs.program), len(expected_program)) for (arg, expected_arg) in zip(cs.program, expected_program): if isinstance(arg, str): self.assertIsInstance(expected_arg, str) self.assertEqual(arg, expected_arg) else: self.assertNotIsInstance(expected_arg, str) self.assertAlmostEqual(arg, expected_arg) def test_pen_closePath(self): cs = self.stringToT2CharString('100 100 rmoveto -50 -150 200 0 -50 150 rrcurveto') pen = RecordingPen() cs.draw(pen) self.assertEqual(pen.value[(- 1)], ('closePath', ()))
def test_default_ens_path(tmpdir): with tmpdir.as_cwd(): config_file = 'test.ert' with open(config_file, 'w', encoding='utf-8') as f: f.write('\nNUM_REALIZATIONS 1\n ') ert_config = ErtConfig.from_file(config_file) default_ens_path = ert_config.ens_path with open(config_file, 'a', encoding='utf-8') as f: f.write('\nENSPATH storage\n ') ert_config = ErtConfig.from_file(config_file) set_in_file_ens_path = ert_config.ens_path assert (default_ens_path == set_in_file_ens_path) config_dict = {ConfigKeys.NUM_REALIZATIONS: 1, 'ENSPATH': os.path.join(os.getcwd(), 'storage')} dict_set_ens_path = ErtConfig.from_dict(config_dict).ens_path assert (dict_set_ens_path == config_dict['ENSPATH'])
class PyOuts(): def __init__(self, page: Optional[primitives.PageModel]=None, options: Optional[dict]=None): (self.page, self._options) = (page, options) (self.excluded_packages, html_tmpl) = (None, HtmlTmplBase.JUPYTERLAB) (self.__requireJs, self.__requireJs_attrs, self.__jupyter_cell) = (None, {}, False) def _to_html_obj(self, htmlParts: Optional[List[str]]=None, cssParts: Optional[Dict[(str, Any)]]=None, split_js: bool=False): order_components = list(self.page.components.keys()) if (htmlParts is None): (htmlParts, cssParts) = ([], {}) for component_id in order_components: component = self.page.components[component_id] if (component.name == 'Body'): cssParts.update(component.style.get_classes_css()) continue if component.options.managed: htmlParts.append(component.html()) if self.__jupyter_cell: component.options.managed = False cssParts.update(component.style.get_classes_css()) (onloadParts, onloadPartsCommon) = (list(self.page.properties.js.frgs), {}) for (data_id, data) in self.page._props.get('data', {}).get('sources', {}).items(): onloadParts.append(('var data_%s = %s' % (data_id, json.dumps(data)))) for (k, v) in self.page._props.get('js', {}).get('functions', {}).items(): pmt = (('(%s)' % ', '.join(list(v['pmt']))) if ('pmt' in v) else '{}') onloadParts.append(('function %s%s{%s}' % (k, pmt, v['content'].strip()))) for (c, d) in self.page._props.get('js', {}).get('configs', {}).items(): onloadParts.append(str(d)) for (c, d) in self.page._props.get('js', {}).get('datasets', {}).items(): onloadParts.append(d) for b in self.page._props.get('js', {}).get('builders', []): onloadParts.append(b) for b in self.page._props.get('js', {}).get('builders_css', []): onloadParts.append(b) for component_id in order_components: component = self.page.components[component_id] if (component.name == 'Body'): for (event, source_funcs) in component._browser_data['keys'].items(): for (source, event_funcs) in source_funcs.get_event().items(): str_funcs = JsUtils.jsConvertFncs(event_funcs['content'], toStr=True, profile=event_funcs.get('profile', False)) onloadParts.append(("%s.addEventListener('%s', function(event){%s})" % (source, event, str_funcs))) continue onloadParts.extend(component._browser_data['component_ready']) for (event, source_funcs) in component._browser_data['mouse'].items(): for (source, event_funcs) in source_funcs.items(): func_args = (['event'] + event_funcs.get('args', [])) str_funcs = JsUtils.jsConvertFncs(event_funcs['content'], toStr=True, profile=event_funcs.get('profile', False)) if ('sub_items' in event_funcs): onloadParts.append(("%s.on('%s', '%s', function(%s){%s})" % (source, event, event_funcs['sub_items'], ', '.join(func_args), str_funcs))) else: onloadParts.append(("%s.%s('%s', function(%s){%s})" % (source, event_funcs.get('fncType', 'addEventListener'), event, ', '.join(func_args), str_funcs))) for (event, source_funcs) in component._browser_data['keys'].items(): for (source, event_funcs) in source_funcs.get_event().items(): str_funcs = JsUtils.jsConvertFncs(event_funcs['content'], toStr=True, profile=event_funcs.get('profile', False)) onloadParts.append(("%s.addEventListener('%s', function(event){%s})" % (source, event, str_funcs))) for on_ready_frg in self.page._props.get('js', {}).get('onReady', []): onloadParts.append(on_ready_frg) for (event, source_funcs) in self.page._props.get('js', {}).get('events', []).items(): for (source, event_funcs) in source_funcs.get_event().items(): str_funcs = JsUtils.jsConvertFncs(event_funcs['content'], toStr=True) onloadParts.append(("%s.addEventListener('%s', function(event){%s})" % (source, event, str_funcs))) if (self.page is not None): import_mng = self.page.imports else: import_mng = Imports.ImportManager(page=self.page) self.page.jsLocalImports.add(('data:text/js;base64,%s' % Imports.string_to_base64(JsGlobals.set_global_options(self.page._props.get('js', {}).get('constructors', {}), self.page.properties.js.get_init_options())))) onloadParts.insert(0, 'if(window.exports){Object.keys(window.exports).forEach(function(key){window[key] = window.exports[key]})}') results = {'cssStyle': ('%s\n%s' % ('\n'.join([v for v in cssParts.values()]), self.page.properties.css.text)), 'cssContainer': ';'.join([('%s:%s' % (k, v)) for (k, v) in self.page._props.get('css', {}).get('container', {}).items()]), 'content': '\n'.join(htmlParts), 'jsFrgsCommon': onloadPartsCommon, 'jsFrgs': ';'.join(onloadParts), 'cssImports': import_mng.cssResolve(self.page.cssImport, self.page.cssLocalImports, excluded=self.excluded_packages), 'jsImports': import_mng.jsResolve(self.page.jsImports, self.page.jsLocalImports, excluded=self.excluded_packages)} return results def _repr_html_(self): if (self.__requireJs is not None): results = self.__requireJs else: results = self._to_html_obj() if (self.page is not None): import_manager = self.page.imports else: import_manager = Imports.ImportManager(page=self.page) require_js = import_manager.to_requireJs(results, self.excluded_packages) lib_paths = [] for (k, p) in require_js['paths'].items(): if (not k.startswith("'local_")): lib_paths.append(("%s: '%s'" % (k, p))) results['paths'] = ('{%s}' % ', '.join(lib_paths)) results['jsFrgs_in_req'] = require_js['jsFrgs'] if self.__requireJs_attrs: results.update(self.__requireJs_attrs) results['pageId'] = id(self.page) return (self.html_tmpl.strip() % results) def jupyterlab(self): self.__jupyter_cell = True self.html_tmpl = HtmlTmplBase.JUPYTERLAB self.excluded_packages = ['bootstrap'] return self def jupyter(self, verbose: bool=False, requireJs: Optional[dict]=None, closure: bool=True, requirejs_path: Optional[dict]=None, requirejs_func: Optional[dict]=None): if closure: self.page._props['js']['onReady'].append(('\nvar outputCell = document.getElementById("result_cell_%(pageId)s").parentNode.parentNode.parentNode.parentNode.getElementsByClassName("output_prompt")[0];\ndocument.getElementById("result_cell_%(pageId)s").parentNode.parentNode.parentNode.parentNode.getElementsByClassName("out_prompt_overlay")[0].display = "none";\nvar icon = outputCell.getElementsByTagName("div")[0];\nif (typeof icon === "undefined"){\n let iconCell = document.createElement(\'div\'); iconCell.innerHTML = "&#8722;"; \n iconCell.style["font-size"] = "25px"; iconCell.style.cursor = "pointer"; iconCell.style.color = "black"; iconCell.style.margin = "5px 0";\n let iconRunCell = document.createElement(\'div\'); iconRunCell.innerHTML = "&nbsp;"; iconRunCell.style.margin = "5px 0";\n iconRunCell.style["font-size"] = "15px"; iconRunCell.style.cursor = "pointer"; iconRunCell.style["line-height"] = "15px";\n iconRunCell.className = "fa-step-forward fa"; iconRunCell.style.color = "black";\n iconRunCell.addEventListener("click", function(){\n this.parentNode.parentNode.parentNode.parentNode.parentNode.getElementsByClassName(\'run_this_cell\')[0].dispatchEvent(new Event(\'click\'))\n });\n iconCell.addEventListener("click", function(){\n if(this.innerText == ""){\n this.parentNode.parentNode.parentNode.parentNode.parentNode.getElementsByClassName(\'input\')[0].style.display = \'none\';\n this.parentNode.parentNode.parentNode.parentNode.parentNode.getElementsByClassName(\'output_prompt\')[0].firstChild.style.display = \'none\';\n this.innerHTML = "&#43;";\n } else {\n this.parentNode.parentNode.parentNode.parentNode.parentNode.getElementsByClassName(\'input\')[0].style.display = \'flex\';\n this.parentNode.parentNode.parentNode.parentNode.parentNode.getElementsByClassName(\'output_prompt\')[0].firstChild.style.display = \'block\';\n this.innerHTML = "&#8722;";}\n });\n outputCell.appendChild(iconRunCell); outputCell.appendChild(iconCell); iconCell.dispatchEvent(new Event(\'click\'));\n}; \n' % {'pageId': id(self.page)})) self.html_tmpl = HtmlTmplBase.JUPYTER (self.__requireJs, self.__jupyter_cell) = (requireJs, True) if (requirejs_path is not None): self.__requireJs_attrs['paths'] = requirejs_path if (requirejs_func is not None): self.__requireJs_attrs['jsFrgs_in_req'] = requirejs_func try: import notebook self.excluded_packages = [] nb_path = os.path.split(notebook.__file__)[0] for f in os.listdir(os.path.join(nb_path, 'static', 'components')): if (f in ['font-awesome', 'moment']): continue if verbose: print(('Package already available in Jupyter: %s' % f)) self.excluded_packages.append(Imports.NOTEBOOK_MAPPING.get(f, f)) except Exception as err: self.excluded_packages = ['bootstrap', 'jquery', 'moment', 'jqueryui', 'mathjax'] return self def w3cTryIt(self, path: Optional[str]=None, name: Optional[str]=None): if (path is None): path = os.path.join(os.getcwd(), 'outs', 'w3schools') else: path = os.path.join(path, 'w3schools') if (not os.path.exists(path)): os.makedirs(path) if (name is None): name = int(time.time()) file_path = os.path.join(path, ('%s.html' % name)) with open(file_path, 'w') as f: f.write(self._repr_html_()) return file_path def codepen(self, path: Optional[str]=None, name: Optional[str]=None): self.jsfiddle(path, name, framework='codepen') def jsfiddle(self, path: Optional[str]=None, name: Optional[str]=None, framework: str='jsfiddle'): if (path is None): path = os.path.join(os.getcwd(), 'outs', framework) else: path = os.path.join(path, framework) if (not os.path.exists(path)): os.makedirs(path, exist_ok=True) if os.path.exists(path): if (name is None): name = int(time.time()) results = self._to_html_obj() with open(os.path.join(path, ('%s.js' % name)), 'w') as f: f.write(results['jsFrgs']) with open(os.path.join(path, ('%s.html' % name)), 'w') as f: f.write(('%s\n' % results['cssImports'])) f.write(('%s\n' % results['jsImports'])) f.write(results['content']) with open(os.path.join(path, ('%s.css' % name)), 'w') as f: f.write(results['cssStyle']) return path def html_file(self, path: Optional[str]=None, name: Optional[str]=None, options: Optional[dict]=None, print_paths: bool=False, run_id: Union[(bool, str)]=True): from epyk import configs options = (options or {}) if (path is None): path = os.path.join(os.getcwd(), 'outs') if (not os.path.exists(path)): os.makedirs(path) if (name is None): if configs.keys: name = self.page.json_config_file elif (run_id is True): name = ('%s_%s' % (os.path.basename(sys.argv[0])[:(- 3)], int(time.time()))) elif (run_id is False): name = os.path.basename(sys.argv[0])[:(- 3)] else: name = ('%s_%s' % (os.path.basename(sys.argv[0])[:(- 3)], run_id)) name = (name if (not name.endswith('.html')) else name[:(- 5)]) html_file_path = os.path.join(path, ('%s.html' % name)) htmlParts = [] cssParts = dict(self.page.body.style.get_classes_css()) order_components = list(self.page.components.keys()) for component_id in order_components: component = self.page.components[component_id] if (component.name == 'Body'): continue if component.options.managed: htmlParts.append(component.html()) cssParts.update(component.style.get_classes_css()) body = str(self.page.body.set_content(self.page, '\n'.join(htmlParts))) results = self._to_html_obj(htmlParts, cssParts, split_js=(options.get('split', False) in (True, 'js'))) if options.get('split', False): static_path = path static_url = (self.page.imports.static_url or '.') if ((options['split'] is True) or (options['split'] == 'css')): css_filename = (('%s.min' % name) if options.get('minify', False) else name) results['cssImports'] = ('%s\n<link rel="stylesheet" href="%s/%s.css" type="text/css">\n\n' % (results['cssImports'], options.get('css_route', ('%s/css' % static_url)), css_filename)) if (options.get('static_path') is not None): static_path = os.path.join(path, options.get('static_path')) if (not os.path.exists(os.path.join(static_path, 'css'))): os.makedirs(os.path.join(static_path, 'css')) css_file_path = os.path.join(static_path, 'css', ('%s.css' % css_filename)) with open(css_file_path, 'w') as f: if options.get('minify', False): f.write(results['cssStyle'].replace('\n', '')) else: f.write(results['cssStyle']) results['cssStyle'] = '' if print_paths: print('css', ('file:///%s' % css_file_path.replace('\\', '/'))) if ((options['split'] is True) or (options['split'] == 'js')): js_filename = (('%s.min' % name) if options.get('minify', False) else name) body = ('%s\n\n<script language="javascript" type="text/javascript" src="%s/%s.js"></script>' % (body, options.get('js_route', ('%s/js' % static_url)), js_filename)) if (not os.path.exists(os.path.join(static_path, 'js'))): os.makedirs(os.path.join(static_path, 'js')) js_file_path = os.path.join(static_path, 'js', ('%s.js' % js_filename)) with open(js_file_path, 'w') as f: funcs = [JsLinter.parse(v, minify=options.get('minify', False)) for v in results['jsFrgsCommon'].values()] f.write('\n\n'.join(funcs)) if print_paths: print('js', ('file:///%s' % js_file_path.replace('\\', '/'))) for (js_id, wk_content) in self.page._props.get('js', {}).get('workers', {}).items(): body += ('\n<script id="%s" type="javascript/worker">\n%s\n</script>' % (js_id, wk_content)) with open(html_file_path, 'w') as f: results['body'] = body results['header'] = self.page.headers f.write((HtmlTmplBase.STATIC_PAGE % results)) if print_paths: print('html', ('file:///%s' % html_file_path.replace('\\', '/'))) if configs.keys: with open(os.path.join(path, ('%s.json' % name)), 'w') as fp: fp.write(configs.to_json()) return html_file_path def web(self) -> dict: html_parts = [] css_parts = dict(self.page.body.style.get_classes_css()) order_components = list(self.page.components.keys()) for component_id in order_components: component = self.page.components[component_id] if (component.name == 'Body'): continue if component.options.managed: html_parts.append(component.html()) css_parts.update(component.style.get_classes_css()) body = str(self.page.body.set_content(self.page, '\n'.join(html_parts))) results = self._to_html_obj(html_parts, css_parts, split_js=True) results['body'] = body.replace('<body', '<div').replace('</body>', '</div>') return results def component(self, selector: str): return def publish(self, server: str, root_path: str, selector: str, alias: Optional[str]=None, target_folder: str='apps'): from epyk.web import angular, node, vue, react, deno, svelte if (server.upper() == 'NODE'): srv = node.Node(root_path, page=self.page) elif (server.upper() == 'DENO'): srv = deno.Deno(root_path, page=self.page) elif (server.upper() == 'ANGULAR'): srv = angular.Angular(root_path, page=self.page) elif (server.upper() == 'VUE'): srv = vue.VueJs(root_path, page=self.page) elif (server.upper() == 'REACT'): srv = react.React(root_path, page=self.page) elif (server.upper() == 'SVELTE'): srv = svelte.Svelte(root_path, page=self.page) else: raise ValueError(('Server type - %s - not recognised [Node, Deno, Angular, Vue, React, Svelte]' % server)) srv.publish(alias, selector=selector, page=self.page, target_folder=target_folder) return srv def markdown_file(self, path: Optional[str]=None, name: Optional[str]=None): if (path is None): path = os.path.join(os.getcwd(), 'outs', 'markdowns') else: path = os.path.join(path, 'markdowns') if (not os.path.exists(path)): os.makedirs(path) if os.path.exists(path): if (name is None): name = ('md_%s.amd' % int(time.time())) file_path = os.path.join(path, name) with open(file_path, 'w') as f: order_components = list(self.page.components.keys()) for component_id in order_components: component = self.page.components[component_id] if (component.name == 'Body'): continue if hasattr(component, 'to_markdown'): f.write(('%s\n' % component.to_markdown(component.vals))) return file_path def html(self): self.html_tmpl = HtmlTmplBase.STATIC_PAGE results = self._to_html_obj() if (self.page is not None): import_mng = self.page.imports else: import_mng = Imports.ImportManager(page=self.page) require_js = import_mng.to_requireJs(results, self.excluded_packages) results['paths'] = ('{%s}' % ', '.join([("%s: '%s'" % (k, p)) for (k, p) in require_js['paths'].items()])) results['jsFrgs_in_req'] = require_js['jsFrgs'] html_parts = [] css_parts = dict(self.page.body.style.get_classes_css()) results['cssStyle'] += '\n'.join(list(css_parts.values())) order_components = list(self.page.components.keys()) for component_id in order_components: component = self.page.components[component_id] if (component.name == 'Body'): continue if component.options.managed: html_parts.append(component.html()) css_parts.update(component.style.get_classes_css()) body = str(self.page.body.set_content(self.page, '\n'.join(html_parts))) for (js_id, wk_content) in self.page._props.get('js', {}).get('workers', {}).items(): body += ('\n<script id="%s" type="javascript/worker">\n%s\n</script>' % (js_id, wk_content)) results['body'] = body results['header'] = self.page.headers return (self.html_tmpl.strip() % results) def browser(self): return OutBrowsers(self)
def check(username, password, scanid, uri): if (password in username): attack_result = {'id': 25, 'scanid': scanid, 'url': uri, 'alert': 'Weak Password', 'impact': 'High', 'req_headers': 'NA', 'req_body': 'NA', 'res_headers': 'NA', 'res_body': 'NA'} dbupdate.insert_record(attack_result) else: check_weak_password(password, scanid, uri)
def test_thread_events(): _init_events() ucb = UiCallback() ncb = NormalCallback() def _run(): on_ui_thread[0] = False event.log_event('test', ucb, None) t = threading.Thread(target=_run) t.start() t.join() assert (ncb.called is True) assert (ncb.on_ui_thread is False) assert (ucb.called is True) ucb.destroy() ncb.destroy() _finish_events()
def main(): ook_Headers = [';pulse data'] pulse_samples = [] dat_sample = None if _debug: print(f'open {filen}') with open(filen, 'r', encoding='utf-8') as fd: header = fd.readline().strip() if (header not in ook_Headers): print(f"Error: {filen} is not a 'rtl_443 ook' data file") sys.exit(1) for line in fd: if line.startswith(';end'): if _debug: print('\n\ndat_sample', dat_sample) print('pulse_samples', pulse_samples) if verbose: print(f"Adding packet with {file_header['pulses']} pulses") dat_sample = None continue if (dat_sample is None): dat_sample = {} dat_sample['header'] = file_header = {} dat_sample['data'] = pulse_data = [] pulse_samples.append(dat_sample) if (line.startswith(';ook') or line.startswith(';fsk')): a = line[1:].strip().split(None, 2) if a[1].isnumeric(): if (int(a[1]) < MIN_PULSES): if verbose: print(f'skipping packet with {a[1]} pulses') skip_to_next(fd) continue file_header['pulses'] = int(a[1]) file_header['modulation'] = a[0] if (line[0] == ';'): a = line[1:].strip().split(None, 1) file_header[a[0]] = a[1] continue pulse_data.append(line.strip()) print('Total packets in file', len(pulse_samples)) sub_data = gen_sub(rf_freq, pulse_samples) if (_debug or (verbose > 2)): pprint.pprint(pulse_samples) if args.outfname: outfilen = args.outfname if (not outfilen.endswith('.sub')): outfilen += '.sub' else: outfilen = (os.path.splitext(filen)[0] + '.sub') with open(outfilen, 'w', encoding='utf-8') as fd: print(sub_data, file=fd)
def example(): c = ft.Container() c.theme = ft.Theme(color_scheme_seed=ft.colors.YELLOW) c.theme_mode = ft.ThemeMode.LIGHT c.content = ft.Column([ft.Container(content=ft.ElevatedButton('Outside Container theme button'), bgcolor=ft.colors.SURFACE_VARIANT, padding=20, width=300), ft.Container(theme=ft.Theme(color_scheme=ft.ColorScheme(primary=ft.colors.PINK)), content=ft.ElevatedButton('Inherited theme button'), bgcolor=ft.colors.SURFACE_VARIANT, padding=20, width=300), ft.Container(theme=ft.Theme(color_scheme_seed=ft.colors.INDIGO), theme_mode=ft.ThemeMode.DARK, content=ft.ElevatedButton('Unique theme button'), bgcolor=ft.colors.SURFACE_VARIANT, padding=20, width=300)]) return c
def test_vectorize_forwarding(golden): def scal(n: size, alpha: R, x: [R][n]): for i in seq(0, n): x[i] = (alpha * x[i]) stmt = scal.find('x[_] = _') scal1 = divide_loop(scal, 'for i in _:_', 8, ('io', 'ii'), tail='cut') scal2 = bind_expr(scal1, [stmt.rhs().lhs()], 'alphaReg') scal3 = expand_dim(scal2, 'alphaReg', '8', 'ii') scal4 = lift_alloc(scal3, 'alphaReg') scal5 = fission(scal4, scal4.find('alphaReg[_] = _').after()) assert (str(scal5.forward(stmt)) == golden)
class TestStateUpdateMessage(): def test_message_consistency(self): currency_endowment = {'FET': 100} good_endowment = {'a_good': 2} exchange_params = {'FET': 10.0} utility_params = {'a_good': 20.0} assert StateUpdateMessage(performative=StateUpdateMessage.Performative.INITIALIZE, amount_by_currency_id=currency_endowment, quantities_by_good_id=good_endowment, exchange_params_by_currency_id=exchange_params, utility_params_by_good_id=utility_params) currency_change = {'FET': 10} good_change = {'a_good': 1} stum = StateUpdateMessage(performative=StateUpdateMessage.Performative.APPLY, amount_by_currency_id=currency_change, quantities_by_good_id=good_change) assert stum._is_consistent() assert (len(stum.valid_performatives) == 3) stum = StateUpdateMessage(performative=StateUpdateMessage.Performative.END) assert stum._is_consistent() def test_message_inconsistency(self): currency_endowment = {'FET': 100} good_endowment = {'a_good': 2} exchange_params = {'UNKNOWN': 10.0} utility_params = {'a_good': 20.0} with pytest.raises(ValueError, match='Field .* is not supported'): StateUpdateMessage(performative=StateUpdateMessage.Performative.INITIALIZE, amount_by_currency_id=currency_endowment, quantities_by_good_id=good_endowment, exchange_params_by_currency_id=exchange_params, utility_params_by_good_id=utility_params, non_exists_field='some value')
class ProphetHandler(THBEventHandler): interested = ['action_apply'] def handle(self, evt_type, act): if ((evt_type == 'action_apply') and isinstance(act, PrepareStage)): tgt = act.target if (not tgt.has_skill(Prophet)): return act g = self.game if (not g.user_input([tgt], ChooseOptionInputlet(self, (False, True)))): return act g.process_action(ProphetAction(tgt, tgt)) return act
def fortios_antivirus(data, fos): fos.do_member_operation('antivirus', 'heuristic') if data['antivirus_heuristic']: resp = antivirus_heuristic(data, fos) else: fos._module.fail_json(msg=('missing task body: %s' % 'antivirus_heuristic')) return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {})
def getstatusjson(st, globdata): rstr = '' if (st == 'Status'): rstr += (((('"Status": {"Module": ' + str(globdata.modtype)) + ',"DeviceName":"') + str(Settings.Settings['Name'])) + '",') rstr += (('"FriendlyName": ["' + str(globdata.datas[0][1])) + '"') if (globdata.taskdevicepluginconfig[0] > 1): for n in range(1, globdata.taskdevicepluginconfig[0]): rstr += ((',"' + str(globdata.datas[n][1])) + '"') rstr += '],"Topic": "","ButtonTopic": "0","PowerOnState": 0,"LedState": 0,"SaveData": 1,"SaveState": 1,"ButtonRetain": 0,"PowerRetain": 0,' if getswitchstate(0, globdata): s = 1 else: s = 0 rstr += (('"Power": ' + str(s)) + '}') elif (st == 'StatusPRM'): rstr += '"StatusPRM": {"Baudrate": 115200,"GroupTopic": "","OtaUrl": "","Sleep": 0,"BootCount": 1,"SaveCount": 1,"SaveAddress": "FB000",' rstr += (('"Uptime": "' + rpieTime.getuptime(3)) + '"}') elif (st == 'StatusNET'): try: defaultdev = Settings.NetMan.getprimarydevice() if (Settings.NetworkDevices[defaultdev].ip == ''): defaultdev = (- 1) except: defaultdev = (- 1) if (defaultdev == (- 1)): try: defaultdev = Settings.NetMan.getsecondarydevice() except: defaultdev = (- 1) rstr += '"StatusNET": {"Webserver": 2,"WifiConfig": 4,' rstr += (('"Hostname": "' + str(Settings.Settings['Name'])) + '",') rstr += (('"IPAddress": "' + str(Settings.NetworkDevices[defaultdev].ip)) + '",') rstr += (('"Gateway": "' + str(Settings.NetworkDevices[defaultdev].gw)) + '",') rstr += (('"Subnetmask": "' + str(Settings.NetworkDevices[defaultdev].mask)) + '",') dnss = Settings.NetworkDevices[defaultdev].dns.strip().split(' ') rstr += (('"DNSServer": "' + str(dnss[0])) + '",') rstr += (('"Mac": "' + str(Settings.NetworkDevices[defaultdev].mac).upper()) + '"}') elif (st == 'StatusSNS'): rstr += (('"StatusSNS": {"Time": "' + datetime.now().strftime('%Y-%m-%dT%H:%M:%S')) + '",') if (globdata.taskdevicepluginconfig[0] == 1): if (getswitchstate(0, globdata) == 1): st = 'ON' else: st = 'OFF' rstr += (('"Switch1": "' + st) + '"') else: for s in range(int(globdata.taskdevicepluginconfig[0])): if (getswitchstate(s, globdata) == 1): st = 'ON' else: st = 'OFF' rstr += (((('"Switch' + str((s + 1))) + '": "') + st) + '"') if (s < (int(globdata.taskdevicepluginconfig[0]) - 1)): rstr += ',' rstr += '}' elif (st == 'StatusSTS'): rstr += '"StatusSTS": {"Vcc": 3.1415,' rstr += (('"Time": "' + str(datetime.now().strftime('%Y-%m-%dT%H:%M:%S'))) + '",') rstr += (('"Uptime": "' + rpieTime.getuptime(3)) + '",') if (globdata.taskdevicepluginconfig[0] == 1): rstr += '"POWER": "' if (getswitchstate(0, globdata) == 1): rstr += 'ON' else: rstr += 'OFF' rstr += '",' else: for s in range(globdata.taskdevicepluginconfig[0]): rstr += (('"POWER' + str((s + 1))) + '": "') if (getswitchstate(s, globdata) == 1): rstr += 'ON' else: rstr += 'OFF' rstr += '",' rstr += '"Wifi": {"AP": 1,' rstr += (('"SSId": "' + str(Settings.NetMan.WifiSSID)) + '",') rstr += (('"RSSI": ' + str(OS.get_rssi())) + ',') rstr += '"APMac": "","Channel":1}}' elif (st == 'StatusTIM'): rstr += '"StatusTIM": {' rstr += (('"UTC": "' + str(datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S'))) + '",') rstr += (('"Local": "' + str(datetime.now().strftime('%Y-%m-%dT%H:%M:%S'))) + '",') rstr += '"StartDST": "1970-01-01T00:00:00","EndDST": "1970-01-01T00:00:00",' rstr += '"Timezone": "+00:00"}' elif (st == 'StatusMEM'): rstr += '"StatusMEM": {"ProgramSize": 2048,"Free": 2048,"ProgramFlashSize": 4096,"FlashSize": 4096,"FlashMode": 3,' rstr += (('"Heap": ' + str(OS.FreeMem())) + '}') elif (st == 'StatusFWR'): rstr += '"StatusFWR": {"Version": "5.12.0a","Boot": 1,' rstr += (('"BuildDateTime": "' + str(globdata.btime)) + '",') rstr += (((('"Core": "' + rpieGlobals.PROGNAME) + ' ') + rpieGlobals.PROGVER) + '",') rstr += (((('"SDK": "Python ' + sys.version.replace('\n', '')) + ' ') + platform.platform()) + '",') cpui = OS.get_cpu() frarr = str(cpui['speed']).split() try: fr = str(int(misc.str2num(frarr[0]))) except: fr = str(frarr[0]) rstr += (((('"CpuFrequency": "' + fr) + '","Hardware":"') + str(cpui['model'])) + '"}') return rstr
class TestTyping(TestCase): .skipif((sys.version_info < (3, 7)), reason='subscriptable classes requires Python 3.7 or higher') def test_genericview_is_subscriptable(self): assert (generics.GenericAPIView is generics.GenericAPIView['foo']) .skipif((sys.version_info < (3, 7)), reason='subscriptable classes requires Python 3.7 or higher') def test_listview_is_subscriptable(self): assert (generics.ListAPIView is generics.ListAPIView['foo']) .skipif((sys.version_info < (3, 7)), reason='subscriptable classes requires Python 3.7 or higher') def test_instanceview_is_subscriptable(self): assert (generics.RetrieveAPIView is generics.RetrieveAPIView['foo'])
def run(): runpath_file = sys.argv[1] iteration = 0 ok_filename = 'RUNPATH_WORKFLOW_{}.OK' if os.path.isfile(ok_filename.format(0)): iteration = 1 curdir = sys.argv[2] runpath_line = '{iens:03d} {pwd}/poly_out/realization-{iens}/iter-{iter} poly_{iens} {iter:03d}\n' with open(runpath_file, encoding='utf-8') as fh: rpf = ''.join([runpath_line.format(iens=iens, iter=iteration, pwd=curdir) for iens in [1, 2, 4, 8, 16, 32, 64]]) assert (fh.read() == rpf) with open(ok_filename.format(iteration), 'w', encoding='utf-8') as fh: fh.write(':)')
def _construct_trinity_config_params(args: argparse.Namespace) -> Iterable[Tuple[(str, Union[(int, str, bytes, Path, Tuple[(str, ...)])])]]: (yield ('network_id', args.network_id)) (yield ('trinity_tmp_root_dir', args.trinity_tmp_root_dir)) if args.trinity_tmp_root_dir: (yield ('trinity_root_dir', (Path(tempfile.gettempdir()) / Path(_random_symbol_of_length(4))))) elif (args.trinity_root_dir is not None): (yield ('trinity_root_dir', args.trinity_root_dir)) if (args.genesis is not None): if (args.data_dir is None): raise ValueError('When providing a custom genesis, must also provide a data-dir') (yield ('genesis_config', args.genesis)) if (args.data_dir is not None): (yield ('data_dir', args.data_dir)) if (args.nodekey is not None): if os.path.isfile(args.nodekey): (yield ('nodekey_path', args.nodekey)) else: (yield ('nodekey', decode_hex(args.nodekey))) if (args.max_peers is not None): (yield ('max_peers', args.max_peers)) elif ('sync_mode' in args): (yield ('max_peers', _default_max_peers(args.sync_mode))) else: (yield ('max_peers', DEFAULT_MAX_PEERS)) if (args.port is not None): (yield ('port', args.port)) if (args.preferred_nodes is None): (yield ('preferred_nodes', tuple())) else: (yield ('preferred_nodes', tuple(args.preferred_nodes)))
class OptionSeriesTimelineSonificationTracksMappingTremoloSpeed(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def test_remove_inter(): outfile = NamedTemporaryFile(suffix='.cool', prefix='test_matrix', delete=True) outfile.close() args = '--matrix {} --outFileName {} --chromosomes 1 2 3 {} --action {} --interIntraHandling {} '.format((ROOT + 'hicAdjustMatrix/gm12878_1_2_3.cool'), outfile.name, (ROOT + 'hicAdjustMatrix/remove.bed'), 'keep', 'inter').split() compute(hicAdjustMatrix.main, args, 5) test = hm.hiCMatrix((ROOT + 'hicAdjustMatrix/inter-removed.cool')) new = hm.hiCMatrix(outfile.name) np.assert_almost_equal(test.matrix.data, new.matrix.data, decimal=5) np.assert_equal(test.cut_intervals, new.cut_intervals) os.unlink(outfile.name)
def test_asyncio_thread2(): loop = asyncio.get_event_loop() assert (not loop.is_running()) tt = [] for i in range(5): t = threading.Thread(target=make_new_loop_and_run) tt.append(t) for t in tt: t.start() make_new_loop_and_run() for t in tt: t.join()
class PlaylistNotebook(SmartNotebook): def __init__(self, manager_name, player, hotkey): SmartNotebook.__init__(self) self.tab_manager = PlaylistManager(manager_name) self.manager_name = manager_name self.player = player self._moving_tab = False self.tab_history = [] self.history_counter = 90000 item = menu.simple_separator('clear-sep', []) item.register('playlist-closed-tab-menu', self) item = menu.simple_menu_item('clear-history', ['clear-sep'], _('_Clear Tab History'), 'edit-clear-all', self.clear_closed_tabs) item.register('playlist-closed-tab-menu', self) submenu = menu.ProviderMenu('playlist-closed-tab-menu', self) def factory(menu_, parent, context): if (self.page_num(parent) == (- 1)): return None item = Gtk.MenuItem.new_with_mnemonic(_('Recently Closed _Tabs')) if (len(self.tab_history) > 0): item.set_submenu(submenu) else: item.set_sensitive(False) return item item = menu.MenuItem(('%s-tab-history' % manager_name), factory, ['tab-close']) item.register('playlist-tab-context-menu') self.actions = NotebookActionService(self, 'playlist-notebook-actions') self.accelerator = Accelerator(hotkey, _('Restore closed tab'), (lambda *x: self.restore_closed_tab(0))) providers.register('mainwindow-accelerators', self.accelerator) self.load_saved_tabs() self.tab_placement_map = {'left': Gtk.PositionType.LEFT, 'right': Gtk.PositionType.RIGHT, 'top': Gtk.PositionType.TOP, 'bottom': Gtk.PositionType.BOTTOM} self.connect('page-added', self.on_page_added) self.connect('page-removed', self.on_page_removed) self.on_option_set('gui_option_set', settings, 'gui/show_tabbar') self.on_option_set('gui_option_set', settings, 'gui/tab_placement') event.add_ui_callback(self.on_option_set, 'gui_option_set') def create_tab_from_playlist(self, playlist): page = PlaylistPage(playlist, self.player) tab = NotebookTab(self, page) self.add_tab(tab, page) return tab def create_new_playlist(self): seen = [] default_playlist_name = _('Playlist %d') default_name_parts = default_playlist_name.split('%d') for n in range(self.get_n_pages()): page = self.get_nth_page(n) name = page.get_page_name() name_parts = [name[0:len(default_name_parts[0])], name[(len(name) - len(default_name_parts[1])):]] if (name_parts == default_name_parts): number = name[len(name_parts[0]):(len(name) - len(name_parts[1]))] try: number = int(number) except ValueError: pass else: seen += [number] seen.sort() n = 1 while True: if (n not in seen): break n += 1 playlist = Playlist((default_playlist_name % n)) return self.create_tab_from_playlist(playlist) def add_default_tab(self): return self.create_new_playlist() def load_saved_tabs(self): names = self.tab_manager.list_playlists() if (not names): return count = (- 1) count2 = 0 names.sort() added_tabs = {} name_re = re.compile('^order(?P<tab>\\d+)\\.(?P<tag>[^.]*)\\.(?P<name>.*)$') for (i, name) in enumerate(names): match = name_re.match(name) if ((not match) or (not match.group('tab')) or (not match.group('name'))): logger.error('`%r` did not match valid playlist file', name) continue logger.debug('Adding playlist %d: %s', i, name) logger.debug('Tab:%s; Tag:%s; Name:%s', match.group('tab'), match.group('tag'), match.group('name')) pl = self.tab_manager.get_playlist(name) pl.name = match.group('name') if (match.group('tab') not in added_tabs): self.create_tab_from_playlist(pl) added_tabs[match.group('tab')] = pl pl = added_tabs[match.group('tab')] if (match.group('tag') == 'current'): count = i if (self.player.queue.current_playlist is None): self.player.queue.set_current_playlist(pl) elif (match.group('tag') == 'playing'): count2 = i self.player.queue.set_current_playlist(pl) if (count == (- 1)): count = count2 self.set_current_page(count) def save_current_tabs(self): names = self.tab_manager.list_playlists() for name in names: logger.debug('Removing tab %s', name) self.tab_manager.remove_playlist(name) for (n, page) in enumerate(self): if (not isinstance(page, PlaylistPage)): continue tag = '' if (page.playlist is self.player.queue.current_playlist): tag = 'playing' elif (n == self.get_current_page()): tag = 'current' page.playlist.name = ('order%d.%s.%s' % (n, tag, page.playlist.name)) logger.debug('Saving tab %r', page.playlist.name) try: self.tab_manager.save_playlist(page.playlist, True) except Exception: logger.exception('Error saving tab %r', page.playlist.name) def show_current_track(self): for (n, page) in enumerate(self): if ((not isinstance(page, PlaylistPage)) and (not isinstance(page, QueuePage))): continue if (page.playlist is not self.player.queue.current_playlist): continue self.set_current_page(n) if (page.playlist.current_position > (- 1)): page.view.scroll_to_cell(page.playlist.current_position) page.view.set_cursor(page.playlist.current_position) return True def on_page_added(self, notebook, child, page_number): if (self.get_n_pages() > 1): self.set_show_tabs(True) def on_page_removed(self, notebook, child, page_number): if (self.get_n_pages() == 1): self.set_show_tabs(settings.get_option('gui/show_tabbar', True)) if (not self._moving_tab): if (settings.get_option('gui/save_closed_tabs', True) and isinstance(child, PlaylistPage)): self.save_closed_tab(child.playlist) if (not isinstance(child, QueuePage)): child.destroy() def restore_closed_tab(self, pos=None, playlist=None, item_name=None): ret = self.remove_closed_tab(pos, playlist, item_name) if (ret is not None): self.create_tab_from_playlist(ret[0]) def save_closed_tab(self, playlist): if (len(self.tab_history) > settings.get_option('gui/max_closed_tabs', 10)): self.remove_closed_tab((- 1)) item_name = ('playlist%05d' % self.history_counter) close_time = datetime.now() def factory(menu_, parent, context): item = None dt = (datetime.now() - close_time) if (dt.seconds > 60): display_name = _('{playlist_name} ({track_count} tracks, closed {minutes} min ago)').format(playlist_name=playlist.name, track_count=len(playlist), minutes=(dt.seconds // 60)) else: display_name = _('{playlist_name} ({track_count} tracks, closed {seconds} sec ago)').format(playlist_name=playlist.name, track_count=len(playlist), seconds=dt.seconds) item = Gtk.ImageMenuItem.new_with_mnemonic(display_name) item.set_image(Gtk.Image.new_from_icon_name('music-library', Gtk.IconSize.MENU)) if (self.tab_history[0][1].name == item_name): (key, mods) = Gtk.accelerator_parse(self.accelerator.keys) item.add_accelerator('activate', menu.FAKEACCELGROUP, key, mods, Gtk.AccelFlags.VISIBLE) item.connect('activate', (lambda w: self.restore_closed_tab(item_name=item_name))) return item item = menu.MenuItem(item_name, factory, []) providers.register('playlist-closed-tab-menu', item, self) self.history_counter -= 1 self.tab_history.insert(0, (playlist, item)) def get_closed_tab(self, pos=None, playlist=None, item_name=None): if (pos is not None): try: return self.tab_history[pos] except IndexError: return None elif (playlist is not None): for (pl, item) in self.tab_history: if (pl == playlist): return (pl, item) elif (item_name is not None): for (pl, item) in self.tab_history: if (item.name == item_name): return (pl, item) return None def remove_closed_tab(self, pos=None, playlist=None, item_name=None): ret = self.get_closed_tab(pos, playlist, item_name) if (ret is not None): self.tab_history.remove(ret) providers.unregister('playlist-closed-tab-menu', ret[1], self) return ret def clear_closed_tabs(self, widget, name, parent, context): for i in range(len(self.tab_history)): self.remove_closed_tab(0) def focus_tab(self, tab_nr): if (tab_nr < self.get_n_pages()): self.set_current_page(tab_nr) self.get_current_tab().focus() def select_next_tab(self): tab_nr = self.get_current_page() tab_nr += 1 tab_nr %= self.get_n_pages() self.set_current_page(tab_nr) def select_prev_tab(self): tab_nr = self.get_current_page() tab_nr -= 1 tab_nr %= self.get_n_pages() self.set_current_page(tab_nr) def on_option_set(self, event, settings, option): if (option == 'gui/show_tabbar'): show_tabbar = settings.get_option(option, True) if ((not show_tabbar) and (self.get_n_pages() > 1)): show_tabbar = True self.set_show_tabs(show_tabbar) if (option == 'gui/tab_placement'): tab_placement = settings.get_option(option, 'top') self.set_tab_pos(self.tab_placement_map[tab_placement])
def extractClesesiaBlogspot(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())): return None tagmap = [('I Became Vampire girl when dead in another world', 'I Became Vampire girl when dead in another world', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class Migration(migrations.Migration): dependencies = [('manager', '0002_auto__1228')] operations = [migrations.AlterField(model_name='event', name='cropping', field=image_cropping.fields.ImageRatioField('image', '700x450', adapt_rotation=False, allow_fullsize=False, free_crop=True, help_text='The image must be 700x450 px. You can crop it here.', hide_image_field=False, size_warning=True, verbose_name='Cropping'))]
_os(*metadata.platforms) def main(username='rta-tester', remote_host=None): if (not remote_host): common.log('A remote host is required to detonate this RTA', '!') return common.MISSING_REMOTE_HOST common.enable_logon_auditing(remote_host) common.log('Brute forcing login with invalid password against {}'.format(remote_host)) ps_command = '\n $PW = ConvertTo-SecureString "such-secure-passW0RD!" -AsPlainText -Force\n $CREDS = New-Object System.Management.Automation.PsCredential {username}, $PW\n Invoke-WmiMethod -ComputerName {host} -Class Win32_process -Name create -ArgumentList ipconfig -Credential $CREDS\n ' command = ['powershell', '-c', ps_command.format(username=username, host=remote_host)] for i in range(4): common.execute(command, wait=(i == 3)) time.sleep(1) common.log('Password spraying against {}'.format(remote_host)) for i in range(5): random_user = ''.join(random.sample(string.ascii_letters, 10)) command = ['powershell', '-c', ps_command.format(username=random_user, host=remote_host)] common.execute(command, wait=(i == 4)) time.sleep(2)
class SortedAlertsSchema(BaseModel): send: Union[(List[PendingTestAlertSchema], List[PendingModelAlertSchema], List[PendingSourceFreshnessAlertSchema])] skip: Union[(List[PendingTestAlertSchema], List[PendingModelAlertSchema], List[PendingSourceFreshnessAlertSchema])] class Config(): smart_union = True
class TestFileSystemHelpers(): def setup_method(self): self.current_cwd = os.getcwd() def teardown_method(self): os.chdir(self.current_cwd) .parametrize('working_directory', [os.getcwd(), '/']) def test_check_correct_src_dir(self, working_directory): real_src_dir = get_src_dir() cwd = os.getcwd() try: os.chdir(working_directory) assert Path(f'{real_src_dir}/helperFunctions/file_system.py').is_file() assert (get_src_dir() == real_src_dir), 'same source dir before and after chdir' finally: os.chdir(cwd) def test_get_fact_bin_dir(self): bin_dir = get_fact_bin_dir() files_in_bin_dir = [os.path.basename(f) for f in get_files_in_dir(bin_dir)] assert os.path.isdir(bin_dir) assert ('fact_extractor/bin' in bin_dir) assert ('untrx' in files_in_bin_dir) def test_file_is_zero(self): assert file_is_empty(f'{get_test_data_dir()}/zero_byte'), 'file is empty' assert (not file_is_empty(f'{get_test_data_dir()}/get_files_test/testfile1')), 'file is not empty' assert (not file_is_empty(os.path.join(get_test_data_dir(), 'broken_link'))), 'Broken link is not empty' def test_sanitize_file_name(self): assert (file_name_sanitize('../../../../a/b/c/d') == 'a/b/c/d'), 'file was not sanitized' assert (file_name_sanitize('dir/../../../../a/b/c/d') == 'dir/a/b/c/d'), 'file was not sanitized' def test_file_is_zero_broken_link(self): assert (not file_is_empty(os.path.join(get_test_data_dir(), 'broken_link'))), 'Broken link is not empty'
_scheduler('exp_lr') class ExpLR(Scheduler): def __init__(self, optimizer, decay_start, decay_rate, decay_steps, min_rate, last_epoch=(- 1), verbose=False): self.decay_start = decay_start self.decay_rate = decay_rate self.decay_steps = decay_steps self.min_rate = min_rate super().__init__(optimizer, last_epoch, verbose) def get_lr(self): if (self.last_epoch < self.decay_start): return list(self.base_lrs) rate = max(self.min_rate, (self.decay_rate ** ((self.last_epoch - self.decay_start) / self.decay_steps))) return [(rate * base_lr) for base_lr in self.base_lrs]
class QuandlCommoditiesData(): def __init__(self, host: str=load_config()['mongodb_host'], username: str=load_secrets()['mongodb_adminusername'], password: str=load_secrets()['mongodb_adminpassword'], db_name: str='ml_investment'): self.host = host self.username = username self.password = password self.db_name = db_name _auto_reconnect(3) def load(self, index: List[str]) -> pd.DataFrame: index = [x.replace('/', '_') for x in index] with MongoClient(host=self.host, username=self.username, password=self.password) as client: cursor = client[self.db_name]['quandl_commodities'].find({'commodity_code': {'$in': index}}) cursor = cursor.sort('date', pymongo.DESCENDING) result = [x for x in cursor] result = pd.DataFrame(result) result['date'] = result['date'].apply((lambda x: np.datetime64(x, 'ms'))) return result
class AnkiHabiticaCommon(): config = {} user_settings = {} log = logging.Logger def setupLog(cls): cls.log = logging.getLogger('AnkiHabitica') if cls.user_settings['debug']: cls.log.setLevel(logging.DEBUG) else: cls.log.setLevel(logging.ERROR) logName = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'AnkiHabitica.log') roll = os.path.isfile(logName) fh = logging.handlers.RotatingFileHandler(logName, backupCount=5) if roll: fh.doRollover() fmt = logging.Formatter('%(asctime)s [%(threadName)14s:%(filename)18s:%(lineno)5s - %(funcName)30s()] %(levelname)8s: %(message)s') fh.setFormatter(fmt) cls.log.addHandler(fh) class settings(): pass
class OptionSeriesWindbarbOnpoint(Options): def connectorOptions(self) -> 'OptionSeriesWindbarbOnpointConnectoroptions': return self._config_sub_data('connectorOptions', OptionSeriesWindbarbOnpointConnectoroptions) def id(self): return self._config_get(None) def id(self, text: str): self._config(text, js_type=False) def position(self) -> 'OptionSeriesWindbarbOnpointPosition': return self._config_sub_data('position', OptionSeriesWindbarbOnpointPosition)
def extractWhatsawhizzerwebnovelsCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None badwords = ['isekai no monogatari'] if any([(bad in item['tags']) for bad in badwords]): return None if ('To unlock this content' in item['contents']): return None if ('This chapter can only be viewed' in item['contents']): return None if (len(item['contents']) < 1000): return None tagmap = [('Rise of the Midnight King', 'Rise of the Midnight King', 'oel'), ('NTR Crush', 'NTR Crush : I Will Steal Every Girl', 'oel'), ('tales of a seductress', 'tales of a seductress', 'oel'), ('screwed by my boss', 'screwed by my boss', 'oel'), ('Hawtness', 'Hawtness', 'oel'), ('Time and Place', 'The Timefall Saga - Time and Place', 'oel'), ('EPASH', 'Enslaved Pregnant Animalgirl Sister Harem With No NTR', 'oel'), ('Pushing Up Gravestones', 'Pushing Up Gravestones', 'oel'), ('My Dungeon Life', 'My Dungeon Life', 'oel'), ('World of Women', 'World of Women', 'oel'), ('getting lucky', 'getting lucky', 'oel'), ('Tales of an Enchantress', 'Tales of an Enchantress', 'oel'), ('toae', 'Tales of an Enchantress', 'oel'), ('moth', 'The Man of the House', 'oel'), ('Power of Creation', 'Power of Creation', 'oel'), ('std', 'Sex Trafficking for Dummies', 'oel'), ('vampires kiss', 'The Vampires Kiss', 'oel'), ('Requiem to the Stars', 'Requiem to the Stars', 'oel'), ('guy on a spaceship', 'Guy on a Spaceship', 'oel'), ('enslaved', 'Enslaved Sister Harem', 'oel'), ('the last dread pirate', 'The Last Dread Pirate', 'oel'), ('zoo', 'The Zoo', 'oel'), ('apocalypse', 'Its Not Easy Making Money In the Apocalypse', 'oel'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) chp_prefixes = [('My Dungeon Life Chapter ', 'My Dungeon Life', 'oel'), ('Its Not Easy Making Money In the Apocalypse Chapter ', 'Its Not Easy Making Money In the Apocalypse', 'oel'), ('Sex Trafficking for Dummies', 'Sex Trafficking for Dummies', 'oel')] for (prefix, series, tl_type) in chp_prefixes: if item['title'].lower().startswith(prefix.lower()): return buildReleaseMessageWithType(item, series, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class OptionSeriesPolygonStatesSelect(Options): def animation(self) -> 'OptionSeriesPolygonStatesSelectAnimation': return self._config_sub_data('animation', OptionSeriesPolygonStatesSelectAnimation) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def halo(self) -> 'OptionSeriesPolygonStatesSelectHalo': return self._config_sub_data('halo', OptionSeriesPolygonStatesSelectHalo) def lineWidth(self): return self._config_get(None) def lineWidth(self, num: float): self._config(num, js_type=False) def lineWidthPlus(self): return self._config_get(1) def lineWidthPlus(self, num: float): self._config(num, js_type=False) def marker(self) -> 'OptionSeriesPolygonStatesSelectMarker': return self._config_sub_data('marker', OptionSeriesPolygonStatesSelectMarker)
def extractTrlperishablecharacterepisodeWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class OptionSeriesArcdiagramSonificationPointgrouping(Options): def algorithm(self): return self._config_get('minmax') def algorithm(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def groupTimespan(self): return self._config_get(15) def groupTimespan(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get('y') def prop(self, text: str): self._config(text, js_type=False)
class Migration(migrations.Migration): initial = True dependencies = [] operations = [migrations.CreateModel(name='ChatMessage', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('user', models.CharField(max_length=64)), ('date', models.DateTimeField(auto_now=True, db_index=True)), ('text', models.TextField())]), migrations.CreateModel(name='ChatRoom', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('eid', models.CharField(max_length=64, unique=True))]), migrations.AddField(model_name='chatmessage', name='room', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='chat.ChatRoom'))]
class _EventBuffer(): def __init__(self): self._buffer = [] self._tail = '' def append(self, char): self._buffer.append(char) self._tail += char self._tail = self._tail[(- 4):] def truncate(self): (head, sep, _) = self.buffer_string.rpartition('\n') rem = (head + sep) self._buffer = list(rem) self._tail = rem[(- 4):] def is_end_of_field(self): last_two_chars = self._tail[(- 2):] return ((last_two_chars == '\n\n') or (last_two_chars == '\r\r') or (self._tail == '\r\n\r\n')) def buffer_string(self): return ''.join(self._buffer)
def main(): global sent a = ArgumentParser() a.add_argument('-f', '--fsa', metavar='FSAFILE', required=True, help="HFST's optimised lookup binary data for the transducer to be applied") a.add_argument('-i', '--input', metavar='INFILE', type=str, required=True, dest='infile', help='source of analysis data') a.add_argument('-m', '--master', metavar='TSVFILE', type=str, required=True, dest='tsvfile', help='source of existing lexical data') opts = a.parse_args() if opts.infile: test_corpora_files = [opts.infile] else: test_corpora_files = glob('*.text') lemma_log = open('missing_word_ids.log', 'w') case_log = open('missing_nominal_cases.log', 'w') comp_log = open('missing_comparatives.log', 'w') adposition_log = open('adposition_complements.log', 'w') adposition_stats = open('adposition_complements_full.log', 'w') adjective_log = open('adjective_agreements.log', 'w') omorfi = Omorfi() omorfi.load_filename(opts.fsa) gather_lemmas(open(opts.tsvfile)) test_corpora = list() for test_corpus_file in test_corpora_files: try: test_corpora.append(open(test_corpus_file)) except IOError as ioe: print('Failed to open corpus ', test_corpus_file, ':', ioe) for test_corpus in test_corpora: print('lines from', test_corpus.name) linen = 0 for line in test_corpus: linen += 1 if ((linen % 200000) == 0): print(linen, '...! Time to reload everything because memory is leaking very badly indeed!') sent = list() omorfi = None omorfi = Omorfi() omorfi.load_filename(opts.fsa) gc.collect() if ((linen % 1000) == 0): print(linen, '...', end='\r') for punct in '".,:;?!()': line = line.replace(punct, ((' ' + punct) + ' ')) for token in line.split(): analyses = omorfi.analyse(token) add_to_sent(analyses, token) stat_word_ids(token, analyses) stat_nominal_cases(token, analyses, case_log) stat_adjective_comps(token, analyses, comp_log) print('Testing statistics') test_zero_lemmas(lemma_log) test_zero_cases(case_log) test_zero_comps(comp_log) test_adposition_complements(adposition_log) test_adjective_agreements(adjective_log) print('Writing accurate statistics') print_adposition_stats(adposition_stats) print_lemma_stats(open('lemmas.freqs', 'w')) print_case_stats(open('cases.freqs', 'w')) exit(0)
class OptionPlotoptionsVectorSonificationContexttracks(Options): def activeWhen(self) -> 'OptionPlotoptionsVectorSonificationContexttracksActivewhen': return self._config_sub_data('activeWhen', OptionPlotoptionsVectorSonificationContexttracksActivewhen) def instrument(self): return self._config_get('piano') def instrument(self, text: str): self._config(text, js_type=False) def mapping(self) -> 'OptionPlotoptionsVectorSonificationContexttracksMapping': return self._config_sub_data('mapping', OptionPlotoptionsVectorSonificationContexttracksMapping) def midiName(self): return self._config_get(None) def midiName(self, text: str): self._config(text, js_type=False) def pointGrouping(self) -> 'OptionPlotoptionsVectorSonificationContexttracksPointgrouping': return self._config_sub_data('pointGrouping', OptionPlotoptionsVectorSonificationContexttracksPointgrouping) def roundToMusicalNotes(self): return self._config_get(True) def roundToMusicalNotes(self, flag: bool): self._config(flag, js_type=False) def showPlayMarker(self): return self._config_get(True) def showPlayMarker(self, flag: bool): self._config(flag, js_type=False) def timeInterval(self): return self._config_get(None) def timeInterval(self, num: float): self._config(num, js_type=False) def type(self): return self._config_get('instrument') def type(self, text: str): self._config(text, js_type=False) def valueInterval(self): return self._config_get(None) def valueInterval(self, num: float): self._config(num, js_type=False) def valueMapFunction(self): return self._config_get('linear') def valueMapFunction(self, value: Any): self._config(value, js_type=False) def valueProp(self): return self._config_get('"x"') def valueProp(self, text: str): self._config(text, js_type=False)
class ImageMessageFactory(MessageFactory): def __init__(self, large: bool=False): self.large = large def send_message(self, slave: MockSlaveChannel, chat: Chat, target: Optional[Message]=None) -> Message: if self.large: path = Path('tests/mocks/large_image_0.png') else: path = Path('tests/mocks/image.png') return slave.send_file_like_message(MsgType.Image, path, 'image/png', chat, target=target, reactions=True, commands=True, substitution=True) def compare_message(self, tg_msg: Message, efb_msg: EFBMessage) -> None: if self.large: assert tg_msg.file assert (tg_msg.file.name == efb_msg.filename) size = efb_msg.path.stat().st_size assert (tg_msg.file.size == size) else: assert tg_msg.photo assert (efb_msg.text in tg_msg.raw_text) for i in efb_msg.reactions: assert (i in tg_msg.raw_text) if efb_msg.commands: assert (tg_msg.button_count == len(efb_msg.commands)) def edit_message(self, slave: MockSlaveChannel, message: Message) -> Optional[Message]: return slave.edit_file_like_message_text(message, reactions=True, commands=True, substitution=True) def edit_message_media(self, slave: MockSlaveChannel, message: Message) -> Optional[Message]: if self.large: path = Path('tests/mocks/large_image_1.png') else: path = Path('tests/mocks/image_1.png') return slave.edit_file_like_message(message, path, mime='image/png', reactions=True, commands=True, substitution=True) def finalize_message(self, tg_msg: Message, efb_msg: EFBMessage): if (efb_msg.file and (not efb_msg.file.closed)): efb_msg.file.close() def __str__(self): return f'{self.__class__.__name__}(large={self.large})'
def extractNirellavtranslationWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class OptionSeriesVariwideOnpointConnectoroptions(Options): def dashstyle(self): return self._config_get(None) def dashstyle(self, text: str): self._config(text, js_type=False) def stroke(self): return self._config_get(None) def stroke(self, text: str): self._config(text, js_type=False) def width(self): return self._config_get(1) def width(self, num: float): self._config(num, js_type=False)
class MainWindow(SplitApplicationWindow): ratio = Float(0.3) direction = Str('vertical') def _create_lhs(self, parent): self._tree_viewer = FileTreeViewer(parent, input=os.path.abspath(os.curdir), sorter=FileSorter()) self._tree_viewer.observe(self._on_tree_anytrait_changed, match((lambda name, ctrait: True))) return self._tree_viewer.control def _create_rhs(self, parent): self._python_shell = PythonShell(parent) self._python_shell.bind('widget', self._tree_viewer) self._python_shell.bind('w', self._tree_viewer) return self._python_shell.control def _on_tree_anytrait_changed(self, event): print('trait', event.name, 'value', event.new) return
class MyMessage(Message): int_field: int str_field: str float_field: float bool_field: bool bytes_field: bytes int_enum_field: MyIntEnum str_enum_field: MyStrEnum fixed_bytes_field: BytesType(length=10) list_field: List[int] dict_field: Dict[(str, str)] dataclass_field: MyDataclass
def test_transaction_cancelled_span(elasticapm_client): elasticapm_client.begin_transaction('test_type') with elasticapm.capture_span('test') as span: span.cancel() elasticapm_client.end_transaction('foo') transaction = elasticapm_client.events[constants.TRANSACTION][0] spans = elasticapm_client.events[constants.SPAN] assert (len(spans) == 0) assert (transaction['span_count']['started'] == 0) assert (transaction['span_count']['dropped'] == 0)
(eq=False) class OSMWay(OSMElement): nodes: Optional[List[OSMNode]] = field(default_factory=list) visible: Optional[str] = None def from_dict(cls, structure: Dict[(str, Any)], nodes: Dict[(int, OSMNode)]) -> 'OSMWay': return cls(structure['id'], structure.get('tags', {}), [nodes[x] for x in structure['nodes']], visible=structure.get('visible')) def is_cycle(self) -> bool: return (self.nodes[0] == self.nodes[(- 1)]) def __repr__(self) -> str: return f'Way <{self.id_}> {self.nodes}'
_os(*metadata.platforms) def main(): masquerade = '/tmp/testfile' common.create_macos_masquerade(masquerade) common.log('Launching fake curl commands to download payload') common.execute([masquerade, 'childprocess', 'curl', 'portquiz.net'], timeout=5, kill=True) common.remove_file(masquerade)
def patch_readme(text='', readme='README.rst', start='devices-list-start', end='devices-list-end'): result = '' in_marker = False with open(readme, 'r') as file_: for line in file_: if (end in line): result += '\n' in_marker = False if (not in_marker): result += line if (start in line): in_marker = True result += text return result
def remove_unsupported_code(infile, outfile): remove_regexs = ['__attribute__\\s*\\(\\s*\\(\\s*\\S*\\s*\\)\\s*\\)', '__asm.*\\);'] with open(infile, 'rt') as infile_fd: contents = infile_fd.read() for regex in remove_regexs: contents = re.sub(regex, '', contents) with open(outfile, 'wt') as outfile_fd: outfile_fd.write(contents)