code
stringlengths
281
23.7M
def check_logged_in(): print('\nVerifying logged in to Platform.sh CLI...') auth_info_output = make_sp_call('platform auth:info --quiet', capture_output=True) if ('LoginRequiredException' in auth_info_output.stderr.decode()): msg = '\n----- Error: Not logged in through CLI -----' msg += '\nPlease log in to the Platform.sh CLI and then run the integration test.' msg += '\n You can log in with the command: platform login' msg += '\n-----\n' print(msg) exit_msg = 'Please run `platform login` and then run integration tests.' pytest.exit(exit_msg)
class QemuExecInstaller(AbstractPluginInstaller): base_path = Path(__file__).resolve().parent def install_docker_images(self): self._build_docker_image('fact/qemu-exec:alpine-3.18') def install_files(self): with TemporaryDirectory(dir=str(self.base_path)) as tmp_dir: if (Path(f'{self.base_path}/test/data/test_tmp_dir/lib/libc.so.6').exists() and Path(f'{self.base_path}/test/data/test_tmp_dir/lib/ld.so.1').exists() and Path(f'{self.base_path}/test/data/test_tmp_dir_2/lib/libc.so.6').exists() and Path(f'{self.base_path}/test/data/test_tmp_dir_2/lib/ld.so.1').exists()): return url_libc6_mips = ' dest_libc6_mips = f'{tmp_dir}/libc6-mips-cross_2.23-0ubuntu3cross1_all.deb' urllib.request.urlretrieve(url_libc6_mips, dest_libc6_mips) with OperateInDirectory(tmp_dir): run_cmd_with_logging(f'ar x {dest_libc6_mips} data.tar.xz') run_cmd_with_logging(f'tar -xf {tmp_dir}/data.tar.xz -C {tmp_dir}') Path('test/data/test_tmp_dir/lib').mkdir(exist_ok=True, parents=True) Path('test/data/test_tmp_dir_2/fact_extracted/lib').mkdir(exist_ok=True, parents=True) run_cmd_with_logging(f'cp {tmp_dir}/usr/mips-linux-gnu/lib/libc-2.23.so test/data/test_tmp_dir/lib/libc.so.6') run_cmd_with_logging(f'cp {tmp_dir}/usr/mips-linux-gnu/lib/ld-2.23.so test/data/test_tmp_dir/lib/ld.so.1') run_cmd_with_logging(f'mv {tmp_dir}/usr/mips-linux-gnu/lib/libc-2.23.so test/data/test_tmp_dir_2/fact_extracted/lib/libc.so.6') run_cmd_with_logging(f'mv {tmp_dir}/usr/mips-linux-gnu/lib/ld-2.23.so test/data/test_tmp_dir_2/fact_extracted/lib/ld.so.1')
def render_notebook(*args, **kwargs): import markdown from pymdownx import slugs, superfences from js import document gamut = kwargs.get('gamut', WEBSPACE) text = globals().get('content', '') extensions = ['markdown.extensions.toc', 'markdown.extensions.smarty', 'pymdownx.betterem', 'markdown.extensions.attr_list', 'markdown.extensions.tables', 'markdown.extensions.abbr', 'markdown.extensions.footnotes', 'pymdownx.superfences', 'pymdownx.highlight', 'pymdownx.inlinehilite', 'pymdownx.magiclink', 'pymdownx.tilde', 'pymdownx.caret', 'pymdownx.smartsymbols', 'pymdownx.emoji', 'pymdownx.escapeall', 'pymdownx.tasklist', 'pymdownx.striphtml', 'pymdownx.snippets', 'pymdownx.keys', 'pymdownx.saneheaders', 'pymdownx.arithmatex', 'pymdownx.blocks.admonition', 'pymdownx.blocks.details', 'pymdownx.blocks.html', 'pymdownx.blocks.definition', 'pymdownx.blocks.tab'] extension_configs = {'markdown.extensions.toc': {'slugify': slugs.slugify(case='lower'), 'permalink': ''}, 'markdown.extensions.smarty': {'smart_quotes': False}, 'pymdownx.arithmatex': {'generic': True, 'block_tag': 'pre'}, 'pymdownx.superfences': {'preserve_tabs': True, 'custom_fences': [{'name': 'diagram', 'class': 'diagram', 'format': superfences.fence_code_format}, {'name': 'playground', 'class': 'playground', 'format': color_command_formatter(LIVE_INIT, gamut), 'validator': live_color_command_validator}, {'name': 'python', 'class': 'highlight', 'format': color_command_formatter(LIVE_INIT, gamut), 'validator': live_color_command_validator}, {'name': 'py', 'class': 'highlight', 'format': color_command_formatter(LIVE_INIT, gamut), 'validator': live_color_command_validator}]}, 'pymdownx.inlinehilite': {'custom_inline': [{'name': 'color', 'class': 'color', 'format': color_formatter(LIVE_INIT, gamut)}]}, 'pymdownx.magiclink': {'repo_url_shortener': True, 'repo_url_shorthand': True, 'social_url_shorthand': True, 'user': 'facelessuser', 'repo': 'coloraide'}, 'pymdownx.keys': {'separator': '+'}, 'pymdownx.blocks.tab': {'alternate_style': True}, 'pymdownx.blocks.admonition': {'types': ['new', 'settings', 'note', 'abstract', 'info', 'tip', 'success', 'question', 'warning', 'failure', 'danger', 'bug', 'example', 'quote']}, 'pymdownx.blocks.details': {'types': [{'name': 'details-new', 'class': 'new'}, {'name': 'details-settings', 'class': 'settings'}, {'name': 'details-note', 'class': 'note'}, {'name': 'details-abstract', 'class': 'abstract'}, {'name': 'details-info', 'class': 'info'}, {'name': 'details-tip', 'class': 'tip'}, {'name': 'details-success', 'class': 'success'}, {'name': 'details-question', 'class': 'question'}, {'name': 'details-warning', 'class': 'warning'}, {'name': 'details-failure', 'class': 'failure'}, {'name': 'details-danger', 'class': 'danger'}, {'name': 'details-bug', 'class': 'bug'}, {'name': 'details-example', 'class': 'example'}, {'name': 'details-quote', 'class': 'quote'}]}} try: html = markdown.markdown(text, extensions=extensions, extension_configs=extension_configs) except Exception: html = '' content = document.getElementById('__notebook-render') content.innerHTML = html
def iter_seq(seq): (seqnum, name, seq, servers, dbtype, evalue_thr, score_thr, max_hits, maxseqlen, fixed_Z, skip, cut_ga) = seq num_servers = len(servers) num_server = (seqnum % num_servers) (host, port) = servers[num_server] if (skip and (name in skip)): return (name, (- 1), ['SKIPPED'], len(seq), None) if (maxseqlen and (len(seq) > maxseqlen)): return (name, (- 1), [('SEQ_TOO_LARGE ' + str(len(seq)))], len(seq), None) if (not seq): return (name, (- 1), ['NO_SEQ_FOUND'], len(seq), None) if (cut_ga == True): cut_ga_p = ' --cut_ga' else: cut_ga_p = '' seq = re.sub('-.', '', seq) data = ('--%s 1%s\n>%s\n%s\n//' % (dbtype, cut_ga_p, name, seq)) (etime, hits) = scan_hits(data, host, port, cut_ga=cut_ga, evalue_thr=evalue_thr, score_thr=score_thr, max_hits=max_hits, fixed_Z=fixed_Z) return (name, etime, hits, len(seq), None)
def confirm_use_org_name(org_name): msg = dedent(f''' --- The Platform.sh CLI requires an organization name when creating a new project. --- When using --automate-all, a project will be created on your behalf. The following organization name was found: {org_name} This organization will be used to create a new project. If this is not okay, enter n to cancel this operation. ''') return msg
_lxml def test_simple_xml(): xml = BeautifulSoup(SIMPLE_XML, 'xml') assert xml.select_one('Envelope') assert xml.select_one('Envelope Header') assert xml.select_one('Header') assert (not xml.select_one('envelope')) assert (not xml.select_one('envelope header')) assert (not xml.select_one('header'))
def _cmd_scatter(args): cnarr = (read_cna(args.filename, sample_id=args.sample_id) if args.filename else None) segarr = (read_cna(args.segment, sample_id=args.sample_id) if args.segment else None) varr = load_het_snps(args.vcf, args.sample_id, args.normal_id, args.min_variant_depth, args.zygosity_freq) scatter_opts = {k: v for (k, v) in (('do_trend', args.trend), ('by_bin', args.by_bin), ('window_width', args.width), ('y_min', args.y_min), ('y_max', args.y_max), ('fig_size', args.fig_size), ('antitarget_marker', args.antitarget_marker), ('segment_color', args.segment_color)) if (v is not None)} if args.range_list: with PdfPages(args.output) as pdf_out: for region in tabio.read_auto(args.range_list).coords(): try: if (args.title is not None): scatter_opts['title'] = f'{args.title} {region.chromosome}' scatter.do_scatter(cnarr, segarr, varr, show_range=region, **scatter_opts) except ValueError as exc: logging.warning('Not plotting region %r: %s', to_label(region), exc) pdf_out.savefig() pyplot.close() else: if (args.title is not None): scatter_opts['title'] = args.title scatter.do_scatter(cnarr, segarr, varr, args.chromosome, args.gene, **scatter_opts) if args.output: oformat = os.path.splitext(args.output)[(- 1)].replace('.', '') pyplot.savefig(args.output, format=oformat, bbox_inches='tight') logging.info('Wrote %s', args.output) else: pyplot.show()
class TestExon(unittest.TestCase): def test_exon_no_data(self): exon = Exon('PSR.hg.1', 'A1BG') self.assertEqual(exon.name, 'PSR.hg.1') self.assertEqual(exon.gene_symbol, 'A1BG') self.assertEqual(exon.log2_fold_change, None) self.assertEqual(exon.p_value, None) self.assertEqual(str(exon), 'PSR.hg.1:A1BG:log2FoldChange=None;p-value=None') def test_exon_with_data(self): log2_fold_change = (- 0.) p_value = 0.5347865 exon = Exon('PSR.hg.1', 'A1BG', log2_fold_change=log2_fold_change, p_value=p_value) self.assertEqual(exon.name, 'PSR.hg.1') self.assertEqual(exon.gene_symbol, 'A1BG') self.assertEqual(exon.log2_fold_change, log2_fold_change) self.assertEqual(exon.p_value, p_value) self.assertEqual(str(exon), ('PSR.hg.1:A1BG:log2FoldChange=%s;p-value=%s' % (float(log2_fold_change), float(p_value))))
def downgrade(): op.drop_index(op.f('ix_privacyexperiencehistory_regions'), table_name='privacyexperiencehistory') op.drop_index(op.f('ix_privacyexperiencehistory_id'), table_name='privacyexperiencehistory') op.drop_table('privacyexperiencehistory') op.drop_index(op.f('ix_privacyexperience_regions'), table_name='privacyexperience') op.drop_index(op.f('ix_privacyexperience_id'), table_name='privacyexperience') op.drop_table('privacyexperience') op.drop_index(op.f('ix_privacyexperiencetemplate_regions'), table_name='privacyexperiencetemplate') op.drop_index(op.f('ix_privacyexperiencetemplate_id'), table_name='privacyexperiencetemplate') op.drop_table('privacyexperiencetemplate')
def extractTakemewithyuuHomeBlog(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('gomene onii-sama', 'Gomen ne, Onii-sama', 'translated'), ('', 'Gomen ne, Onii-sama', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def fortios_firewall(data, fos, check_mode): fos.do_member_operation('firewall', 'internet-service-addition') if data['firewall_internet_service_addition']: resp = firewall_internet_service_addition(data, fos, check_mode) else: fos._module.fail_json(msg=('missing task body: %s' % 'firewall_internet_service_addition')) if check_mode: return resp return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {})
def vm_deploy(vm, force_stop=False): if force_stop: cmd = ('vmadm stop %s -F >/dev/null 2>/dev/null; vmadm get %s 2>/dev/null' % (vm.uuid, vm.uuid)) else: cmd = ('vmadm start %s >/dev/null 2>/dev/null; vmadm get %s 2>/dev/null' % (vm.uuid, vm.uuid)) msg = 'Deploy server' lock = ('vmadm deploy ' + vm.uuid) meta = {'output': {'returncode': 'returncode', 'stderr': 'message', 'stdout': 'json'}, 'replace_stderr': ((vm.uuid, vm.hostname),), 'msg': msg, 'vm_uuid': vm.uuid} callback = ('api.vm.base.tasks.vm_deploy_cb', {'vm_uuid': vm.uuid}) return execute(ERIGONES_TASK_USER, None, cmd, meta=meta, lock=lock, callback=callback, queue=vm.node.fast_queue, nolog=True, ping_worker=False, check_user_tasks=False)
def lazy_import(): from fastly.model.historical_ddos import HistoricalDdos from fastly.model.historical_ddos_meta import HistoricalDdosMeta from fastly.model.platform_ddos_response_all_of import PlatformDdosResponseAllOf from fastly.model.platform_ddos_response_data import PlatformDdosResponseData globals()['HistoricalDdos'] = HistoricalDdos globals()['HistoricalDdosMeta'] = HistoricalDdosMeta globals()['PlatformDdosResponseAllOf'] = PlatformDdosResponseAllOf globals()['PlatformDdosResponseData'] = PlatformDdosResponseData
class Sso(MethodView): def __init__(self): if (CRACK_CONF['auth']['type'] == 'saml2'): self.meta_url = CRACK_CONF['auth']['saml_manifest'] self.meta_file = CRACK_CONF['auth']['meta_file'] self.entity_id = CRACK_CONF['auth']['entity_id'] self.group = CRACK_CONF['auth']['group'] self.saml_auth = auth.Saml2(self.meta_url, self.meta_file, self.entity_id) self.saml_client = self.saml_auth.s_client() def get(self): if (CRACK_CONF['auth']['type'] == 'saml2'): (self.reqid, info) = self.saml_client.prepare_for_authenticate() redirect_url = None for (key, value) in info['headers']: if (key == 'Location'): redirect_url = value response = redirect(redirect_url, code=302) return response else: return (jsonify(ERR_METH_NOT), 405) def post(self): if (not (CRACK_CONF['auth']['type'] == 'saml2')): return (jsonify(ERR_METH_NOT), 405) saml_resp = request.form['SAMLResponse'] logger.debug('SAML SSO reponse received:\n {}'.format(saml_resp)) try: saml_parse = self.saml_client.parse_authn_request_response(saml_resp, BINDING_HTTP_POST) except sigver.SignatureError as err: return ('Invalid Signature', 500) except saml2.validate.ResponseLifetimeExceed as err: return ('Invalid SAML Request', 500) if saml_parse.authn_statement_ok(): user_info = saml_parse.ava.items() groups = [] for (key, val) in user_info: if ('name' in key): username = val[0] if ('email' in key): email = val[0] else: email = None if (self.group and ('Group' in key)): groups = val if self.group: if (len(groups) > 0): if (self.group not in groups): logger.debug('User authorised, but not in valid domain group') return ('User is not authorised to use this service', 401) else: logger.debug('No groups returned in SAML response') return ('User is not authorised to use this service', 401) logger.debug('Authenticated: {}'.format(username)) user = load_user(username) if user: crackq.app.session_interface.regenerate(session) elif email_check(email): create_user(username, email=email) else: create_user(username) user = load_user(username) if isinstance(user, User): crackq.app.session_interface.regenerate(session) login_user(user) else: logger.error('No user object loaded') return (jsonify(ERR_BAD_CREDS), 401) return redirect('/') else: logger.debug('Login error') return (jsonify(ERR_BAD_CREDS), 401)
class Endpoint(): states = ['known', 'unknown', 'operating', 'queued'] transitions = [endpoint_transit_wrap('operate', 'unknown', 'operating'), endpoint_transit_wrap('queue', 'unknown', 'queued'), endpoint_transit_wrap('operate', 'known', 'operating'), endpoint_transit_wrap('queue', 'known', 'queued'), endpoint_transit_wrap('operate', 'queued', 'operating'), endpoint_transit_wrap('known', 'known', 'known'), endpoint_transit_wrap('unknown', 'known', 'unknown'), endpoint_transit_wrap('known', 'unknown', 'known'), endpoint_transit_wrap('unknown', 'unknown', 'unknown'), endpoint_transit_wrap('known', 'operating', 'known'), endpoint_transit_wrap('unknown', 'operating', 'unknown'), endpoint_transit_wrap('known', 'queued', 'known'), endpoint_transit_wrap('unknown', 'queued', 'unknown')] copro_states = ['copro_unknown', 'copro_coprocessing', 'copro_nominal', 'copro_suspicious', 'copro_queued'] copro_transitions = [endpoint_copro_transit_wrap('copro_coprocess', 'copro_unknown', 'copro_coprocessing'), endpoint_copro_transit_wrap('copro_queue', 'copro_unknown', 'copro_queued'), endpoint_copro_transit_wrap('copro_coprocess', 'copro_queued', 'copro_coprocessing'), endpoint_copro_transit_wrap('copro_nominal', 'copro_coprocessing', 'copro_nominal'), endpoint_copro_transit_wrap('copro_nominal', 'copro_queued', 'copro_nominal'), endpoint_copro_transit_wrap('copro_suspicious', 'copro_coprocessing', 'copro_suspicious'), endpoint_copro_transit_wrap('copro_queue', 'copro_nominal', 'copro_queued'), endpoint_copro_transit_wrap('copro_coprocess', 'copro_nominal', 'copro_coprocessing'), endpoint_copro_transit_wrap('copro_queue', 'copro_suspicious', 'copro_queued'), endpoint_copro_transit_wrap('copro_coprocess', 'copro_suspicious', 'copro_coprocessing')] def __init__(self, hashed_val): self.name = hashed_val.strip() self.ignore = False self.copro_ignore = False self.endpoint_data = None self.p_next_state = None self.p_prev_state = None self.p_next_copro_state = None self.p_prev_copro_state = None self.acl_data = [] self.metadata = {} self.state = None self.copro_state = None self.state_time = 0 self.copro_state_time = 0 self.observed_time = 0 def _update_state_time(self, *args, **kwargs): self.state_time = time.time() def _update_copro_state_time(self, *args, **kwargs): self.copro_state_time = time.time() def encode(self): endpoint_d = {'name': self.name, 'state': self.state, 'copro_state': self.copro_state, 'ignore': self.ignore, 'endpoint_data': self.endpoint_data, 'p_next_state': self.p_next_state, 'p_prev_state': self.p_prev_state, 'acl_data': self.acl_data, 'metadata': self.metadata, 'observed_time': self.observed_time} return str(json.dumps(endpoint_d)) def mac_addresses(self): return self.metadata.get('mac_addresses', {}) def get_roles_confidences_pcap_labels(self): top_role = NO_DATA second_role = NO_DATA third_role = NO_DATA top_conf = '0' second_conf = '0' third_conf = '0' pcap_labels = NO_DATA for metadata in self.mac_addresses().values(): classification = metadata.get('classification', {}) if ('labels' in classification): (top_role, second_role, third_role) = classification['labels'][:3] if ('confidences' in classification): (top_conf, second_conf, third_conf) = classification['confidences'][:3] metadata_pcap_labels = metadata.get('pcap_labels', None) if metadata_pcap_labels: pcap_labels = metadata_pcap_labels return ((top_role, second_role, third_role), (top_conf, second_conf, third_conf), pcap_labels) def get_ipv4_os(self): if ('ipv4_addresses' in self.metadata): ipv4 = self.endpoint_data['ipv4'] for (ip, ip_metadata) in self.metadata['ipv4_addresses'].items(): if (ip == ipv4): if ('short_os' in ip_metadata): return ip_metadata['short_os'] return NO_DATA def touch(self): self.observed_time = time.time() def observed_timeout(self, timeout): return ((time.time() - self.observed_time) > timeout) def state_age(self): return (int(time.time()) - self.state_time) def state_timeout(self, timeout): return (self.state_age() > timeout) def copro_state_age(self): return (int(time.time()) - self.copro_state_time) def copro_state_timeout(self, timeout): return (self.copro_state_age() > timeout) def queue_next(self, next_state): self.p_next_state = next_state self.queue() def machine_trigger(self, state): self.machine.events[state].trigger(self) def trigger_next(self): self.p_prev_state = self.state if self.p_next_state: self.machine_trigger(self.p_next_state) self.p_next_state = None def copro_queue_next(self, next_state): self.p_next_copro_state = next_state self.copro_queue() def copro_machine_trigger(self, state): self.copro_machine.events[state].trigger(self) def copro_trigger_next(self): if self.p_next_copro_state: self.copro_machine_trigger(self.p_next_copro_state) self.p_next_copro_state = None def operation_active(self): return (self.state == 'operating') def operation_requested(self, next_state=None): if (next_state is None): next_state = self.p_next_state return (next_state == 'operate') def force_unknown(self): self.unknown() self.p_next_state = None def default(self): if (not self.ignore): if (self.state != 'unknown'): if (self.state == 'operating'): self.p_next_state = 'operate' elif (self.state == 'queued'): self.p_next_state = 'queue' elif (self.state == 'known'): self.p_next_state = self.state self.unknown() def make_hash(machine, trunk=False): h = hashlib.sha256() words = ['tenant', 'mac', 'segment'] if trunk: words.append('ipv4') words.append('ipv6') pre_h = ''.join([str(machine.get(word, 'missing')) for word in words]) h.update(pre_h.encode('utf-8')) post_h = h.hexdigest() return post_h
class OptionSeriesDumbbellLabelStyle(Options): def fontSize(self): return self._config_get('0.8em') def fontSize(self, num: float): self._config(num, js_type=False) def fontWeight(self): return self._config_get('bold') def fontWeight(self, text: str): self._config(text, js_type=False)
def test_header_chain_get_vm_class_for_block_number(base_db, genesis_header): chain = ChainForTesting.from_genesis_header(base_db, genesis_header) assert (chain.get_vm_class_for_block_number(0) is VM_A) for num in range(1, 10): assert (chain.get_vm_class_for_block_number(num) is VM_A) assert (chain.get_vm_class_for_block_number(10) is VM_B) for num in range(11, 100, 5): assert (chain.get_vm_class_for_block_number(num) is VM_B)
def check_lastfm(force_check=False): providers = get_search_providers() print(providers) print(force_check) if (force_check or ('lastfm-search' in providers)): connected = lastfm_connected() print(connected) return connected elif (not ('lastfm-search' in providers)): print('not lastm-search') return True else: print('returning default') return False
class OptionSeriesLineSonificationTracksMappingPan(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class OptionPlotoptionsItemSonificationContexttracksActivewhen(Options): def crossingDown(self): return self._config_get(None) def crossingDown(self, num: float): self._config(num, js_type=False) def crossingUp(self): return self._config_get(None) def crossingUp(self, num: float): self._config(num, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get(None) def prop(self, text: str): self._config(text, js_type=False)
class TextNotesTab(QWidget): def __init__(self, parent): self.parent = parent QWidget.__init__(self) self.setup_ui() def setup_ui(self): self.vbox_right = QVBoxLayout() r_lbl = QLabel('Text notes, not in Queue') r_lbl.setAlignment(Qt.AlignmentFlag.AlignCenter) self.vbox_right.addWidget(r_lbl) self.search_bar_right = QLineEdit() self.search_bar_right.setPlaceholderText('Type to search') self.search_bar_right.textChanged.connect(self.search_enter) self.vbox_right.addWidget(self.search_bar_right) self.t_view_right = NoteList(self) self.vbox_right.addWidget(self.t_view_right) self.vbox_right.setAlignment(Qt.AlignmentFlag.AlignHCenter) self.setLayout(self.vbox_right) def refresh(self): self.search_bar_right.clear() self.fill_list(get_text_notes_not_in_queue()) def fill_list(self, db_list): self.t_view_right.fill(db_list) def search_enter(self): inp = self.search_bar_right.text() if ((inp is None) or (len(inp.strip()) == 0)): self.fill_list(get_text_notes_not_in_queue()) return res = find_unqueued_text_notes(inp) self.fill_list(res)
.integration() def test_delete_simple_folder(random_file, fsspec_write_test_folder): random_folder = random_file.get() file1 = random_file.get() file1_in_folder = f'{random_folder}/{file1}' file2 = random_file.get() file2_in_folder = f'{random_folder}/{file2}' fs = FoundryFileSystem(dataset=fsspec_write_test_folder[0], branch='master') with fs.transaction: with fs.open(file1_in_folder, 'w') as f: f.write('content') with fs.open(file2_in_folder, 'w') as f: f.write('content2') ls_result = fs.ls(random_folder, detail=False) assert (file1_in_folder in ls_result) assert (file2_in_folder in ls_result) with pytest.raises(IsADirectoryError): fs.rm(random_folder) assert fs.exists(random_folder) fs.rm(random_folder, recursive=True) assert (not fs.exists(random_folder))
class OptionSeriesHeatmapSonificationDefaultinstrumentoptionsPointgrouping(Options): def algorithm(self): return self._config_get('minmax') def algorithm(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def groupTimespan(self): return self._config_get(15) def groupTimespan(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get('y') def prop(self, text: str): self._config(text, js_type=False)
def unpackage(): rootDir = getRootDir.getEnvsDir() compressed = getArg.getArg(0) if (not compressed): sys.exit(text.unpackageHelper) if (not isRoot.isRoot()): sys.exit(text.notRoot) if (not remove.checkIfExists(compressed)): sys.exit(text.packageDoesntExist) env = compressed.split('/')[(- 1)].split('.')[0] if remove.checkIfExists((rootDir + env)): sys.exit(text.envAlreadyExists) decompress(compressed, (rootDir + env)) print(text.unpackagedSuccessful)
def test_does_not_count_multiple_kicks(paragon_chain): head = paragon_chain.get_canonical_head() clique = get_clique(paragon_chain) snapshot = validate_seal_and_get_snapshot(clique, head) assert (len(snapshot.tallies) == 0) alice_votes_bob = make_next_header(paragon_chain, head, ALICE_PK, coinbase=BOB, nonce=NONCE_AUTH) snapshot = validate_seal_and_get_snapshot(clique, alice_votes_bob) assert (snapshot.get_sorted_signers() == [ALICE, BOB]) alice_kicks_bob = make_next_header(paragon_chain, alice_votes_bob, ALICE_PK, coinbase=BOB, nonce=NONCE_DROP) snapshot = validate_seal_and_get_snapshot(clique, alice_kicks_bob) assert (snapshot.get_sorted_signers() == [ALICE, BOB]) assert (snapshot.tallies[BOB].action == VoteAction.KICK) assert (snapshot.tallies[BOB].votes == 1) alice_kicks_bob_again = make_next_header(paragon_chain, alice_kicks_bob, ALICE_PK, coinbase=BOB, nonce=NONCE_DROP, difficulty=1) snapshot = validate_seal_and_get_snapshot(clique, alice_kicks_bob_again) assert (snapshot.get_sorted_signers() == [ALICE, BOB]) assert (snapshot.tallies[BOB].action == VoteAction.KICK) assert (snapshot.tallies[BOB].votes == 1)
def upgrade(): op.add_column('updates', sa.Column('autotime', sa.Boolean())) op.execute('UPDATE updates SET autotime=FALSE') op.alter_column('updates', 'autotime', existing_type=sa.Boolean(), nullable=False) op.add_column('updates', sa.Column('stable_days', sa.Integer())) op.execute('UPDATE updates SET stable_days=0') op.alter_column('updates', 'stable_days', existing_type=sa.Integer(), nullable=False)
.requires_window_manager ('ert.gui.tools.run_analysis.run_analysis_tool.QMessageBox') ('ert.gui.tools.run_analysis.run_analysis_tool.smoother_update') (RunAnalysisTool, '_enable_dialog', new=(lambda self, enable: None)) def test_success(mock_esupdate, mock_msgbox, mock_tool, qtbot): mock_tool.run() qtbot.waitUntil((lambda : (len(mock_msgbox.information.mock_calls) > 0)), timeout=2000) mock_msgbox.critical.assert_not_called() mock_msgbox.information.assert_called_once_with(None, 'Analysis finished', "Successfully ran analysis for case 'source'.") mock_esupdate.assert_called_once() mock_tool._dialog.accept.assert_called_once()
def test_bkz_bkz(): for (m, n) in dimensions: if ((m < 2) or (n < 2)): continue A = make_integer_matrix(m, n) b00 = [] for float_type in float_types: B = copy(A) M = GSO.Mat(B, float_type=float_type) lll_obj = LLL.Reduction(M) param = BKZ.Param(block_size=min(m, 40), strategies=BKZ.DEFAULT_STRATEGY) bkz = BKZ.Reduction(M, lll_obj, param) bkz() b00.append(B[(0, 0)]) for i in range(1, len(b00)): assert (b00[0] == b00[i])
def _maybe_add_example_with_dropped_nonviolated_prompt_categories(training_example: TrainingExample, formatted_examples_being_built: List[str], indices_of_all_categories: range, nonviolated_category_indices: List[int], formatter_configs: FormatterConfigs) -> None: if (not formatter_configs.augmentation_configs.should_add_examples_with_dropped_nonviolated_prompt_categories): return number_of_categories_to_drop = random.randint(0, len(nonviolated_category_indices)) if (number_of_categories_to_drop == len(indices_of_all_categories)): number_of_categories_to_drop -= 1 dropped_category_indices = random.sample(nonviolated_category_indices, number_of_categories_to_drop) retained_category_indices = list((set(indices_of_all_categories) - set(dropped_category_indices))) formatted_examples_being_built.append(_create_formatted_finetuning_example(training_example, formatter_configs, category_indices_to_include_in_llama_guard_prompt=retained_category_indices))
class TestGetConnectionDataset(): def test_get_dataset_not_authenticated(self, dataset_config, connection_config, api_client) -> None: dataset_url = get_connection_dataset_url(connection_config, dataset_config) response = api_client.get(dataset_url, headers={}) assert (response.status_code == 401) def test_get_dataset_wrong_scope(self, dataset_config, connection_config, api_client: TestClient, generate_auth_header) -> None: dataset_url = get_connection_dataset_url(connection_config, dataset_config) auth_header = generate_auth_header(scopes=[DATASET_CREATE_OR_UPDATE]) response = api_client.get(dataset_url, headers=auth_header) assert (response.status_code == 403) def test_get_dataset_does_not_exist(self, dataset_config, connection_config, api_client: TestClient, generate_auth_header) -> None: dataset_url = get_connection_dataset_url(connection_config, None) auth_header = generate_auth_header(scopes=[DATASET_READ]) response = api_client.get(dataset_url, headers=auth_header) assert (response.status_code == 404) def test_get_dataset_invalid_connection_key(self, dataset_config, connection_config, api_client: TestClient, generate_auth_header) -> None: dataset_url = get_connection_dataset_url(None, dataset_config) dataset_url.replace(connection_config.key, 'nonexistent_key') auth_header = generate_auth_header(scopes=[DATASET_READ]) response = api_client.get(dataset_url, headers=auth_header) assert (response.status_code == 404) def test_get_dataset(self, dataset_config, connection_config, api_client: TestClient, generate_auth_header): dataset_url = get_connection_dataset_url(connection_config, dataset_config) auth_header = generate_auth_header(scopes=[DATASET_READ]) response = api_client.get(dataset_url, headers=auth_header) assert (response.status_code == 200) response_body = json.loads(response.text) assert (response_body['fides_key'] == dataset_config.fides_key) assert (len(response_body['collections']) == 1)
def main(args: Optional[List[str]]=None) -> Any: args = (args or sys.argv[1:]) if ((not args) or (args == ['--help'])): _print_usage() _print_help() sys.exit(0) else: script_name = args[0] scripts = {script.name: script for script in entry_points(group='console_scripts')} if (script_name in scripts): script = scripts[script_name] (module_path, function_name) = script.value.split(':', 1) prog = script_name elif (':' in script_name): (module_path, function_name) = args[0].split(':', 1) prog = module_path.split('.', 1)[0] else: _print_usage() console.print(Panel(Text.from_markup(f'No such script: [bold]{script_name}[/]'), border_style=STYLE_ERRORS_PANEL_BORDER, title=ERRORS_PANEL_TITLE, title_align=ALIGN_ERRORS_PANEL)) console.print(Padding('Please run [yellow bold]rich-click --help[/] for usage information.', (0, 1)), style='dim') sys.exit(1) if (len(args) > 1): if (args[1] == '--'): del args[1] sys.argv = [prog, *args[1:]] patch() module = import_module(module_path) function = getattr(module, function_name) return function()
def main(): module = AnsibleModule(argument_spec=dict(server_url=dict(required=True, default=None, aliases=['url']), login_user=dict(required=True), login_password=dict(required=True), template=dict(required=True), state=dict(default='present'), timeout=dict(default=10, type='int'), target=dict(required=False), format=dict(required=False, default='xml'), validate_certs=dict(required=False, default=False, type='bool')), supports_check_mode=True) if (not ZabbixAPI): module.fail_json(msg='Missing required zabbix-api module (check docs or install with: pip install zabbix-api-erigones)') raise AssertionError server_url = module.params['server_url'] login_user = module.params['login_user'] login_password = module.params['login_password'] template = module.params['template'] state = module.params['state'] timeout = module.params['timeout'] target = module.params['target'] fmt = module.params['format'] ssl_verify = module.params['validate_certs'] try: zbx = ZabbixAPI(server=server_url, timeout=timeout, ssl_verify=ssl_verify) zbx.login(login_user, login_password) except Exception as e: module.fail_json(msg=('Failed to connect to Zabbix server: %s' % e)) raise AssertionError changed = False if (state in ('present', 'import')): if os.path.exists(template): template_filename = template template_name = get_template_name(module, template_filename) else: module.fail_json(msg=('%s not found' % template)) raise AssertionError elif (state == 'absent'): template_name = template elif (state == 'export'): template_name = template if (target is None): module.fail_json(msg=('with state=%s target is required' % state)) raise AssertionError else: module.fail_json(msg=("Invalid state: '%s'" % state)) raise AssertionError if module.check_mode: module.exit_json(changed=True) if (state in ('present', 'import')): import_template(module, zbx, template_filename, fmt) module.exit_json(changed=True) if check_template(module, zbx, template_name): if (state == 'absent'): template_id = get_template_id(module, zbx, template_name) remove_template(module, zbx, template_id) changed = True if (state == 'export'): template_id = get_template_id(module, zbx, template_name) export_template(module, zbx, template_id, target, fmt) changed = True module.exit_json(changed=changed)
class TestBootstrapHookHandler(): class UnitTestComponentLoader(): def __init__(self, root_path, component_entry_point, recurse): self.root_path = root_path self.component_entry_point = component_entry_point self.recurse = recurse self.registration_function = None def load(self): return self.registration_function class UnitTestHook(): def __init__(self, phase='post_install'): self.phase = phase self.call_counter = 0 def post_install_hook(self, config_names, variables, **kwargs): self.call_counter += variables['increment'] def register(self, handler): handler.register(self.phase, self.post_install_hook) handler.register(self.phase, self.post_install_hook) def test_loads_module(self): plugin = team.PluginDescriptor('unittest-plugin') hook = self.UnitTestHook() handler = team.BootstrapHookHandler(plugin, loader_class=self.UnitTestComponentLoader) handler.loader.registration_function = hook handler.load() handler.invoke('post_install', variables={'increment': 4}) assert (hook.call_counter == (2 * 4)) def test_cannot_register_for_unknown_phase(self): plugin = team.PluginDescriptor('unittest-plugin') hook = self.UnitTestHook(phase='this_is_an_unknown_install_phase') handler = team.BootstrapHookHandler(plugin, loader_class=self.UnitTestComponentLoader) handler.loader.registration_function = hook with pytest.raises(exceptions.SystemSetupError) as exc: handler.load() assert (exc.value.args[0] == "Unknown bootstrap phase [this_is_an_unknown_install_phase]. Valid phases are: ['post_install'].")
def test_load_data(mocker): content = 'temp.json' mock_document = {'doc_id': hashlib.sha256((content + ', '.join(['content1', 'content2'])).encode()).hexdigest(), 'data': [{'content': 'content1', 'meta_data': {'url': content}}, {'content': 'content2', 'meta_data': {'url': content}}]} mocker.patch('embedchain.loaders.json.JSONLoader.load_data', return_value=mock_document) json_loader = JSONLoader() result = json_loader.load_data(content) assert ('doc_id' in result) assert ('data' in result) expected_data = [{'content': 'content1', 'meta_data': {'url': content}}, {'content': 'content2', 'meta_data': {'url': content}}] assert (result['data'] == expected_data) expected_doc_id = hashlib.sha256((content + ', '.join(['content1', 'content2'])).encode()).hexdigest() assert (result['doc_id'] == expected_doc_id)
(urls.POLICY_DETAIL, status_code=HTTP_200_OK, response_model=schemas.PolicyResponse, dependencies=[Security(verify_oauth_client, scopes=[scope_registry.POLICY_READ])]) def get_policy(*, policy_key: FidesKey, db: Session=Depends(deps.get_db)) -> schemas.Policy: return get_policy_or_error(db, policy_key)
def dq_a2(m0, m1, m2, o0, o1, o2, n0, n1, n2): x0 = (- o0) x1 = (n0 + x0) x2 = (m0 + x0) x3 = (- o1) x4 = (n1 + x3) x5 = (m1 + x3) x6 = (((- x1) * x5) + (x2 * x4)) x7 = (- o2) x8 = (n2 + x7) x9 = (m2 + x7) x10 = ((x1 * x9) - (x2 * x8)) x11 = (((- x4) * x9) + (x5 * x8)) x12 = (((x10 ** 2) + (x11 ** 2)) + (x6 ** 2)) x13 = math.sqrt(x12) x14 = (((x1 * x2) + (x4 * x5)) + (x8 * x9)) x15 = ((x12 + (x14 ** 2)) ** (- 1)) x16 = (x13 * x15) x17 = (2 * n1) x18 = ((- 2) * o1) x19 = (x17 + x18) x20 = ((1 / 2) * x6) x21 = (2 * n2) x22 = ((- 2) * o2) x23 = (x21 + x22) x24 = ((1 / 2) * x10) x25 = (x13 ** (- 1)) x26 = (2 * n0) x27 = ((- 2) * o0) x28 = (x26 + x27) x29 = ((1 / 2) * x11) x30 = (2 * m1) x31 = ((- x17) + x30) x32 = (2 * m2) x33 = ((- x21) + x32) x34 = ((x14 * x15) * x25) x35 = (2 * m0) x36 = ((- x26) + x35) x37 = (x18 + x30) x38 = (x22 + x32) x39 = (x27 + x35) return np.array([(((- x1) * x16) + (((x14 * x15) * x25) * ((x19 * x20) - (x23 * x24)))), ((((x14 * x15) * x25) * (((- x20) * x28) + (x23 * x29))) - (x16 * x4)), ((((x14 * x15) * x25) * (((- x19) * x29) + (x24 * x28))) - (x16 * x8)), (((- x16) * (((- m0) - n0) - x27)) + (x34 * ((x20 * x31) - (x24 * x33)))), (((- x16) * (((- m1) - n1) - x18)) + (x34 * (((- x20) * x36) + (x29 * x33)))), (((- x16) * (((- m2) - n2) - x22)) + (x34 * ((x24 * x36) - (x29 * x31)))), ((((x14 * x15) * x25) * (((- x20) * x37) + (x24 * x38))) - (x16 * x2)), ((((x14 * x15) * x25) * ((x20 * x39) - (x29 * x38))) - (x16 * x5)), ((((x14 * x15) * x25) * (((- x24) * x39) + (x29 * x37))) - (x16 * x9))], dtype=np.float64)
def test_wf1_with_list_of_inputs(): def t1(a: int) -> typing.NamedTuple('OutputsBC', t1_int_output=int, c=str): return ((a + 2), 'world') def t2(a: typing.List[str]) -> str: return ' '.join(a) def my_wf(a: int, b: str) -> (int, str): (xx, yy) = t1(a=a) d = t2(a=[b, yy]) return (xx, d) x = my_wf(a=5, b='hello') assert (x == (7, 'hello world')) def my_wf2(a: int, b: str) -> int: (x, y) = t1(a=a) t2(a=[b, y]) return x x = my_wf2(a=5, b='hello') assert (x == 7) assert (context_manager.FlyteContextManager.size() == 1)
def main(args): motion = bvh.load(args.input_file) trajectory = motion.positions(local=False).reshape(motion.num_frames(), (- 1)) print('Total length (in s) ', motion.length()) acc = np.diff(trajectory, n=2, axis=0) acc_norm = np.linalg.norm(acc, axis=1) segs = find_peaks(acc_norm, height=0.075)[0] print('Number of detected segments ', len(segs)) if (args.output_plot is not None): plot_changepoints(acc_norm, segs, args.output_plot)
def bump_version(v: version.Version, level: str) -> str: release: List[int] = list(v.release) stage: Optional[str] pre: Optional[int] (stage, pre) = (v.pre if v.pre else (None, None)) dev: Optional[int] = v.dev post: Optional[int] = v.post if (level in ('major', 'minor', 'patch')): segments = 0 if (level == 'major'): segments = 1 elif (level == 'minor'): segments = 2 elif (level == 'patch'): segments = 3 if ((not any(release[segments:])) and ((stage is not None) or (dev is not None))): pass else: release[(segments - 1)] += 1 release[segments:] = ([0] * max((len(release) - segments), 0)) stage = pre = post = dev = None elif (level == 'alpha'): if (stage is None): if (dev is None): release[(- 1)] += 1 (stage, pre) = ('a', 1) elif (stage > 'a'): release[(- 1)] += 1 (stage, pre) = ('a', 1) elif (stage == 'a'): if (not dev): if (pre is None): pre = 1 else: pre += 1 post = dev = None elif (level == 'beta'): if (stage is None): if (dev is None): release[(- 1)] += 1 (stage, pre) = ('b', 1) elif (stage > 'b'): release[(- 1)] += 1 (stage, pre) = ('b', 1) elif (stage == 'b'): if (not dev): if (pre is None): pre = 1 else: pre += 1 elif (stage < 'b'): pre = 1 stage = 'b' post = dev = None elif (level == 'post'): if (post is not None): post += 1 else: post = 1 dev = None elif (level == 'dev'): if (dev is not None): dev += 1 else: if stage: pre += 1 else: release[(- 1)] += 1 dev = 1 ver = '.'.join((str(i) for i in release)) if (stage is not None): ver += f'{stage}{pre}' if (post is not None): ver += f'.post{post}' if (dev is not None): ver += f'.dev{dev}' return ver
class OptionSeriesTimelineDragdrop(Options): def draggableX(self): return self._config_get(None) def draggableX(self, flag: bool): self._config(flag, js_type=False) def draggableY(self): return self._config_get(None) def draggableY(self, flag: bool): self._config(flag, js_type=False) def dragHandle(self) -> 'OptionSeriesTimelineDragdropDraghandle': return self._config_sub_data('dragHandle', OptionSeriesTimelineDragdropDraghandle) def dragMaxX(self): return self._config_get(None) def dragMaxX(self, num: float): self._config(num, js_type=False) def dragMaxY(self): return self._config_get(None) def dragMaxY(self, num: float): self._config(num, js_type=False) def dragMinX(self): return self._config_get(None) def dragMinX(self, num: float): self._config(num, js_type=False) def dragMinY(self): return self._config_get(None) def dragMinY(self, num: float): self._config(num, js_type=False) def dragPrecisionX(self): return self._config_get(0) def dragPrecisionX(self, num: float): self._config(num, js_type=False) def dragPrecisionY(self): return self._config_get(0) def dragPrecisionY(self, num: float): self._config(num, js_type=False) def dragSensitivity(self): return self._config_get(2) def dragSensitivity(self, num: float): self._config(num, js_type=False) def groupBy(self): return self._config_get(None) def groupBy(self, text: str): self._config(text, js_type=False) def guideBox(self) -> 'OptionSeriesTimelineDragdropGuidebox': return self._config_sub_data('guideBox', OptionSeriesTimelineDragdropGuidebox) def liveRedraw(self): return self._config_get(True) def liveRedraw(self, flag: bool): self._config(flag, js_type=False)
class DecisionStateMachine(): def get_decision(self) -> Optional[Command]: raise NotImplementedError def cancel(self, immediate_cancellation_callback: Callable) -> bool: raise NotImplementedError def handle_started_event(self, event: HistoryEvent): raise NotImplementedError def handle_cancellation_initiated_event(self): raise NotImplementedError def handle_cancellation_event(self): raise NotImplementedError def handle_cancellation_failure_event(self, event: HistoryEvent): raise NotImplementedError def handle_completion_event(self): raise NotImplementedError def handle_initiation_failed_event(self, event: HistoryEvent): raise NotImplementedError def handle_initiated_event(self, event: HistoryEvent): raise NotImplementedError def handle_decision_task_started_event(self): raise NotImplementedError def get_state(self) -> DecisionState: raise NotImplementedError def is_done(self) -> bool: raise NotImplementedError def get_id(self) -> DecisionId: raise NotImplementedError
class Solution(): def gameOfLife(self, board: List[List[int]]) -> None: def find_live_neighbors(board, i, j): directions = [((- 1), (- 1)), ((- 1), 0), ((- 1), 1), (0, (- 1)), (0, 1), (1, (- 1)), (1, 0), (1, 1)] num = 0 for (yd, xd) in directions: (y, x) = ((i + yd), (j + xd)) if (not (0 <= y < len(board))): continue if (not (0 <= x < len(board[0]))): continue if ((board[y][x] & 1) == 1): num += 1 return num def should_live(board, i, j, live_neighbors): curr = (board[i][j] & 1) if ((curr == 1) and ((live_neighbors == 2) or (live_neighbors == 3))): return True if ((curr == 0) and (live_neighbors == 3)): return True return False if ((not board) or (not board[0])): return for (i, row) in enumerate(board): for (j, elem) in enumerate(row): live_neighbors = find_live_neighbors(board, i, j) is_live = should_live(board, i, j, live_neighbors) if is_live: board[i][j] = (elem | 2) for (i, row) in enumerate(board): for (j, elem) in enumerate(row): board[i][j] = ((elem & 2) >> 1)
class TestMappings(BaseRuleTest): FP_FILES = get_fp_data_files() def evaluate(self, documents, rule, expected, msg): filtered = evaluate(rule, documents) self.assertEqual(expected, len(filtered), msg) return filtered def test_true_positives(self): mismatched_ecs = [] mappings = load_etc_dump('rule-mapping.yml') for rule in self.production_rules: if ((rule.contents.data.type == 'query') and (rule.contents.data.language == 'kuery')): if (rule.id not in mappings): continue mapping = mappings[rule.id] expected = mapping['count'] sources = mapping.get('sources') rta_file = mapping['rta_name'] self.assertTrue(sources, 'No sources defined for: {} - {} '.format(rule.id, rule.name)) msg = 'Expected TP results did not match for: {} - {}'.format(rule.id, rule.name) data_files = [get_data_files('true_positives', rta_file).get(s) for s in sources] data_file = combine_sources(*data_files) results = self.evaluate(data_file, rule, expected, msg) ecs_versions = set([r.get('ecs', {}).get('version') for r in results]) rule_ecs = set(rule.metadata.get('ecs_version').copy()) if (not (ecs_versions & rule_ecs)): msg = '{} - {} ecs_versions ({}) not in source data versions ({})'.format(rule.id, rule.name, ', '.join(rule_ecs), ', '.join(ecs_versions)) mismatched_ecs.append(msg) if mismatched_ecs: msg = 'Rules detected with source data from ecs versions not listed within the rule: \n{}'.format('\n'.join(mismatched_ecs)) warnings.warn(msg) def test_false_positives(self): for rule in self.production_rules: if ((rule.contents.data.type == 'query') and (rule.contents.data.language == 'kuery')): for (fp_name, merged_data) in get_fp_data_files().items(): msg = 'Unexpected FP match for: {} - {}, against: {}'.format(rule.id, rule.name, fp_name) self.evaluate(copy.deepcopy(merged_data), rule, 0, msg)
class OptionPlotoptionsVectorSonificationDefaultinstrumentoptions(Options): def activeWhen(self) -> 'OptionPlotoptionsVectorSonificationDefaultinstrumentoptionsActivewhen': return self._config_sub_data('activeWhen', OptionPlotoptionsVectorSonificationDefaultinstrumentoptionsActivewhen) def instrument(self): return self._config_get('piano') def instrument(self, text: str): self._config(text, js_type=False) def mapping(self) -> 'OptionPlotoptionsVectorSonificationDefaultinstrumentoptionsMapping': return self._config_sub_data('mapping', OptionPlotoptionsVectorSonificationDefaultinstrumentoptionsMapping) def midiName(self): return self._config_get(None) def midiName(self, text: str): self._config(text, js_type=False) def pointGrouping(self) -> 'OptionPlotoptionsVectorSonificationDefaultinstrumentoptionsPointgrouping': return self._config_sub_data('pointGrouping', OptionPlotoptionsVectorSonificationDefaultinstrumentoptionsPointgrouping) def roundToMusicalNotes(self): return self._config_get(True) def roundToMusicalNotes(self, flag: bool): self._config(flag, js_type=False) def showPlayMarker(self): return self._config_get(True) def showPlayMarker(self, flag: bool): self._config(flag, js_type=False) def type(self): return self._config_get('instrument') def type(self, text: str): self._config(text, js_type=False)
def extractThemtlgWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def extractMochadelighttranslationWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class SizedArrayEncoder(BaseArrayEncoder): array_size = None def __init__(self, **kwargs): super().__init__(**kwargs) self.is_dynamic = self.item_encoder.is_dynamic def validate(self): super().validate() if (self.array_size is None): raise ValueError('`array_size` may not be none') def validate_value(self, value): super().validate_value(value) if (len(value) != self.array_size): self.invalidate_value(value, exc=ValueOutOfBounds, msg=f'value has {len(value)} items when {self.array_size} were expected') def encode(self, value): encoded_elements = self.encode_elements(value) return encoded_elements
class SettingsGroup(db.Model, CRUDMixin): __tablename__ = 'settingsgroup' key = db.Column(db.String(255), primary_key=True) name = db.Column(db.String(255), nullable=False) description = db.Column(db.Text, nullable=False) settings = db.relationship('Setting', lazy='dynamic', backref='group', cascade='all, delete-orphan') def __repr__(self): return '<{} {}>'.format(self.__class__.__name__, self.key)
class Slider(Html.Html): requirements = ('jqueryui',) name = 'Slider' _option_cls = OptSliders.OptionsSlider is_range = False def __init__(self, page: primitives.PageModel, number: int, min_val: float, max_val: float, width: tuple, height: tuple, helper: Optional[str], options: Optional[dict], html_code: Optional[str], profile: Optional[Union[(bool, dict)]], verbose: bool=False): options.update({'max': max_val, 'min': min_val}) super(Slider, self).__init__(page, number, html_code=html_code, profile=profile, options=options, css_attrs={'width': width, 'height': height}, verbose=verbose) if self.options.show_min_max: self.style.css.padding = '0 10px' self.style.css.margin = '15px 0' self.add_helper(helper) self.__output = None def output(self) -> Html.Html: if (self.__output is None): self.__output = self.page.ui.tags.span(html_code=('out_%s' % self.html_code)) self.__output.attr['class'].clear() self.__output.css({'position': 'relative', 'top': '15px', 'font-size': '14px', 'width': '80px', 'display': 'inline-block', 'text-align': 'center', 'left': '-35px'}) self.__output.attr['name'] = ('out_%s' % self.html_code) self.__output.onReady([("%(jqId)s.find('.ui-slider-handle').append(%(outComp)s)" % {'jqId': self.js.varId, 'outComp': self.__output.js.jquery.varId})]) self.options.js_tree['out_builder_opts'] = self.__output.options.config_js() native_path = os.environ.get('NATIVE_JS_PATH') internal_native_path = Path(Path(__file__).resolve().parent, '..', 'js', 'native') if (native_path is None): native_path = internal_native_path native_builder = Path(native_path, ('%s.js' % self.__output.builder_name)) internal_native_builder = Path(internal_native_path, ('%s.js' % self.__output.builder_name)) if native_builder.exists(): self.page.js.customFile(('%s.js' % self.__output.builder_name), path=native_path) self.options.js_tree['out_builder_fnc'] = ('%s%s' % (self.__output.builder_name[0].lower(), self.__output.builder_name[1:])) self.page.properties.js.add_constructor(self.__output.builder_name, None) elif internal_native_builder.exists(): self.page.js.customFile(('%s.js' % self.__output.builder_name), path=internal_native_builder) self.options.js_tree['out_builder_fnc'] = ('%s%s' % (self.__output.builder_name[0].lower(), self.__output.builder_name[1:])) self.__output.page.properties.js.add_constructor(self.__output.builder_name, None) else: self.page.properties.js.add_constructor(self.__output.builder_name, ('function %s(htmlObj, data, options){%s}' % (self.__output.builder_name, self.__output._js__builder__))) return self.__output def output(self, component: Html.Html): self.__output = component self.options.force_show_current = True native_path = os.environ.get('NATIVE_JS_PATH') internal_native_path = Path(Path(__file__).resolve().parent, '..', 'js', 'native') if (native_path is None): native_path = internal_native_path native_builder = Path(native_path, ('%s.js' % self.__output.builder_name)) internal_native_builder = Path(internal_native_path, ('%s.js' % self.__output.builder_name)) if native_builder.exists(): self.page.js.customFile(('%s.js' % self.__output.builder_name), path=native_path) self.options.js_tree['out_builder_fnc'] = ('%s%s' % (self.__output.builder_name[0].lower(), self.__output.builder_name[1:])) self.page.properties.js.add_constructor(self.__output.builder_name, None) elif internal_native_builder.exists(): self.page.js.customFile(('%s.js' % self.__output.builder_name), path=internal_native_builder) self.options.js_tree['out_builder_fnc'] = ('%s%s' % (self.__output.builder_name[0].lower(), self.__output.builder_name[1:])) self.page.properties.js.add_constructor(self.__output.builder_name, None) else: self.options.js_tree['out_builder_fnc'] = self.__output.builder_name self.options.js_tree['out_builder_opts'] = self.__output.options.config_js() self.page.properties.js.add_constructor(self.__output.builder_name, ('function %s(htmlObj, data, options){%s}' % (self.__output.builder_name, self.__output._js__builder__))) component.attr['name'] = ('out_%s' % self.html_code) def options(self) -> OptSliders.OptionsSlider: return super().options def style(self) -> GrpClsJqueryUI.ClassSlider: if (self._styleObj is None): self._styleObj = GrpClsJqueryUI.ClassSlider(self) return self._styleObj def js(self) -> JsQueryUi.Slider: if (self._js is None): self._js = JsQueryUi.Slider(self, page=self.page) return self._js def change(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=None, on_ready: bool=False): if (not isinstance(js_funcs, list)): js_funcs = [js_funcs] self.options.change(JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)) return self def start(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=None): if (not isinstance(js_funcs, list)): js_funcs = [js_funcs] self._jsStyles['start'] = ('function(event, ui){%s}' % JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)) return self def slide(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=None): if (not isinstance(js_funcs, list)): js_funcs = [js_funcs] self._jsStyles['slide'] = ('function(event, ui){%s}' % JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)) return self def stop(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=None): if (not isinstance(js_funcs, list)): js_funcs = [js_funcs] self._jsStyles['stop'] = ('function(event, ui){%s}' % JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)) return self def dom(self) -> JsHtmlJqueryUI.JsHtmlSlider: if (self._dom is None): self._dom = JsHtmlJqueryUI.JsHtmlSlider(self, page=self.page) return self._dom _js__builder__ = ('options.value = data; %(jqId)s.slider(options).css(options.css);\nif (typeof options.handler_css !== \'undefined\'){%(jqId)s.find(\'.ui-slider-handle\').css(options.handler_css)}\nif((typeof options.out_builder_fnc !== "undefined") && (typeof window[options.out_builder_fnc] !== "undefined")){\n window[options.out_builder_fnc](document.getElementsByName(\'out_\'+ htmlObj.id)[0], data, options.out_builder_opts); \n}' % {'jqId': JsQuery.decorate_var('htmlObj', convert_var=False)}) def __str__(self): self.page.properties.js.add_builders(self.refresh()) if ('slide' in self.options.js_tree): self.page.properties.js.add_builders(self.js.slide(self._vals)) if self.options.show_min_max: return ('<div %(strAttr)s>\n <div style="width:100%%;height:20px">\n <span style="float:left;display:inline-block">%(min)s</span>\n <span style="float:right;display:inline-block">%(max)s</span>\n </div>\n <div id="%(htmlCode)s"></div>\n</div>%(helper)s' % {'strAttr': self.get_attrs(with_id=False), 'min': self.options.min, 'htmlCode': self.htmlCode, 'max': self.options.max, 'helper': self.helper}) return ('<div %(strAttr)s>\n <div id="%(htmlCode)s"></div>\n</div>%(helper)s' % {'strAttr': self.get_attrs(with_id=False), 'min': self.options.min, 'htmlCode': self.htmlCode, 'max': self.options.max, 'helper': self.helper})
class ClassSelect(GrpCls.ClassHtml): def __init__(self, component: primitives.HtmlModel): super(ClassSelect, self).__init__(component) (self._css_class_dt, self._css_class_dt_ui, self._css_select) = (None, None, None) (self._css_select_input, self._css_item_option, self._css_item_options, self._css_item_selected) = (4 * [None]) (self._css_menu_li, self._css_select_search, self._css_select_menu_hover) = (None, None, None) self.classList['main'].add(self.cls_select) self.classList['main'].add(self.cls_select_button) self.classList['main'].add(self.cls_select_outline) self.classList['other'].add(self.cls_select_input) self.classList['main'].add(self.cls_item_option) self.classList['main'].add(self.cls_item_options) self.classList['other'].add(self.cls_item_selected) def css(self) -> AttrSelect: if (self._css_struct is None): self._css_struct = AttrSelect(self.component) return self._css_struct def cls_item_selected(self) -> Classes.CatalogSelect.CatalogSelect: if (self._css_item_selected is None): self._css_item_selected = Classes.CatalogSelect.CatalogSelect(self.component.page, self.classList['other'], component=self.component).selected() return self._css_item_selected def cls_select(self) -> Classes.CatalogSelect.CatalogSelect: if (self._css_select is None): self._css_select = Classes.CatalogSelect.CatalogSelect(self.component.page, self.classList['main'], component=self.component).base() return self._css_select def cls_select_button(self) -> Classes.CatalogSelect.CatalogSelect: if (self._css_class_dt is None): self._css_class_dt = Classes.CatalogSelect.CatalogSelect(self.component.page, self.classList['main'], component=self.component).button() return self._css_class_dt def cls_select_outline(self) -> Classes.CatalogSelect.CatalogSelect: if (self._css_class_dt_ui is None): self._css_class_dt_ui = Classes.CatalogSelect.CatalogSelect(self.component.page, self.classList['main'], component=self.component).outline() return self._css_class_dt_ui def cls_item_options(self) -> Classes.CatalogSelect.CatalogSelect: if (self._css_item_options is None): self._css_item_options = Classes.CatalogSelect.CatalogSelect(self.component.page, self.classList['other'], component=self.component).option() return self._css_item_options def cls_item_option(self) -> Classes.CatalogSelect.CatalogSelect: if (self._css_item_option is None): self._css_item_option = Classes.CatalogSelect.CatalogSelect(self.component.page, self.classList['other'], component=self.component).item() return self._css_item_option def cls_select_input(self) -> Classes.CatalogSelect.CatalogSelect: if (self._css_select_input is None): self._css_select_input = Classes.CatalogSelect.CatalogSelect(self.component.page, self.classList['other'], component=self.component).search_box_input() return self._css_select_input
_mock.patch('flytekit.clients.friendly._RawSynchronousFlyteClient.list_projects') def test_list_projects_paginated(mock_raw_list_projects): client = _SynchronousFlyteClient(PlatformConfig.for_endpoint('a.b.com', True)) client.list_projects_paginated(limit=100, token='') project_list_request = _project_pb2.ProjectListRequest(limit=100, token='', filters=None, sort_by=None) mock_raw_list_projects.assert_called_with(project_list_request=project_list_request)
('model') def check_no_namespace(progress_controller=None): if (progress_controller is None): progress_controller = ProgressControllerBase() if len(pm.listNamespaces()): progress_controller.complete() raise PublishError('There should be no <b>Namespaces</b> in a <b>Model</b> scene.') progress_controller.complete()
def on_array(default=None): def outer(f): (f) def wrapper(a, **kwargs): a = np.asfarray(a) a = a[(~ np.isnan(a))] if (not len(a)): return np.nan if (len(a) == 1): if (default is None): return a[0] return default return f(a, **kwargs) return wrapper return outer
def get_valid_jump_destinations(code: bytes) -> Set[Uint]: valid_jump_destinations = set() pc = Uint(0) while (pc < len(code)): try: current_opcode = Ops(code[pc]) except ValueError: pc += 1 continue if (current_opcode == Ops.JUMPDEST): valid_jump_destinations.add(pc) elif (Ops.PUSH1.value <= current_opcode.value <= Ops.PUSH32.value): push_data_size = ((current_opcode.value - Ops.PUSH1.value) + 1) pc += push_data_size pc += 1 return valid_jump_destinations
def _get_dbt_packages() -> Iterator[Tuple[(str, Optional[str])]]: package_distributions = importlib_metadata.packages_distributions() for dbt_plugin_name in package_distributions.get('dbt', []): distribution = importlib_metadata.distribution(dbt_plugin_name) if (dbt_plugin_name == 'dbt-core'): continue if (dbt_plugin_name == 'dbt-fal'): continue (yield (dbt_plugin_name, distribution.version))
def extractArkMachineTranslations(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None if ('ark volume' in item['title'].lower()): return buildReleaseMessageWithType(item, 'Ark', vol, chp, frag=frag, postfix=postfix) if (('ark the legend volume' in item['title'].lower()) or ('ATL' in item['tags'])): return buildReleaseMessageWithType(item, 'Ark The Legend', vol, chp, frag=frag, postfix=postfix) if ('lms volume' in item['title'].lower()): return buildReleaseMessageWithType(item, 'Legendary Moonlight Sculptor', vol, chp, frag=frag, postfix=postfix) return False
class FuncCall(Node): __slots__ = ('name', 'args', 'coord', '__weakref__') def __init__(self, name, args, coord=None): self.name = name self.args = args self.coord = coord def children(self): nodelist = [] if (self.name is not None): nodelist.append(('name', self.name)) if (self.args is not None): nodelist.append(('args', self.args)) return tuple(nodelist) attr_names = ()
def download_and_extract(archive, dest_dir, filename_prefix='', clobber=True): debug_print((((('download_and_extract(archive=' + archive) + ', dest_dir=') + dest_dir) + ')')) url = urljoin(emsdk_packages_url, archive) def try_download(url, silent=False): return download_file(url, download_dir, (not KEEP_DOWNLOADS), filename_prefix, silent=silent) success = False if (('wasm-binaries' in archive) and (os.path.splitext(archive)[1] == '.xz')): success = try_download(url, silent=True) if (not success): alt_url = url.replace('.tar.xz', '.tbz2') success = try_download(alt_url, silent=True) if success: url = alt_url if (not success): success = try_download(url) if (not success): return False if clobber: remove_tree(dest_dir) download_target = get_download_target(url, download_dir, filename_prefix) if archive.endswith('.zip'): return unzip(download_target, dest_dir) else: return untargz(download_target, dest_dir)
class TopologyView(GenericViewSet): queryset = NetworkSliceSubnet.objects.all() serializer_class = NetworkSliceSubnetTopologySerializer def list(self, request): queryset = self.filter_queryset(self.get_queryset()) serializer = self.serializer_class(queryset, many=True) nsinfo_object = serializer.data response_list = list() link_count = 0 for element in nsinfo_object: response = {'links': [], 'nodes': []} if element['nsInfo']: if (element['nsInfo']['nsInstanceName'] != None): response['nodes'].append({'id': element['nssiId'], 'name': element['nsInfo']['nsInstanceName'], 'symbolSize': 10, 'symbol': 'roundRect', 'attributes': {'modularity_class': 0}}) else: response['nodes'].append({'id': element['nssiId'], 'name': element['nsInfo']['nsInstanceDescription'], 'symbolSize': 10, 'symbol': 'roundRect', 'attributes': {'modularity_class': 0}}) nsinfo = eval(element['nsInfo']['vnfInstance']) if ('nsInstanceName' in element['nsInfo']): for _ in nsinfo: addresses = str() cp_id = str() vnf_state = _['instantiatedVnfInfo']['vnfState'] for extCpInfo in _['instantiatedVnfInfo']['extCpInfo']: cp_id = extCpInfo['id'] cp_protocol_info = extCpInfo['cpProtocolInfo'] ip_over_ethernet = cp_protocol_info[0]['ipOverEthernet'] ip_addresses = ip_over_ethernet['ipAddresses'] if ip_addresses[0]['isDynamic']: addresses = ip_addresses[0]['addresses'] response['nodes'].append({'id': _['id'], 'name': _['vnfProductName'], 'instantiationState': _['instantiationState'], 'vnfState': vnf_state, 'symbolSize': 10, 'symbol': 'triangle', 'attributes': {'modularity_class': 1}, 'address': addresses}) response['links'].append({'id': str(link_count), 'source': element['nssiId'], 'target': _['id']}) link_count += 1 else: print('Tacker Topology') for _ in nsinfo: response['nodes'].append({'id': nsinfo[_], 'name': _, 'instantiationState': None, 'vnfState': None, 'symbolSize': 10, 'symbol': 'image:// 'attributes': {'modularity_class': 1}, 'address': None}) response['links'].append({'id': str(link_count), 'source': element['nssiId'], 'target': nsinfo[_]}) link_count += 1 response_list.append(response) return response_cors(request.method, JsonResponse(response_list, safe=False)) def retrieve(self, request, *args, **kwargs): instance = self.get_object() serializer = self.get_serializer(instance) nsinfo_object = serializer.data response = {'links': [], 'nodes': []} link_count = 0 if nsinfo_object['nsInfo']: if (nsinfo_object['nsInfo']['nsInstanceName'] != None): response['nodes'].append({'id': nsinfo_object['nssiId'], 'name': nsinfo_object['nsInfo']['nsInstanceName'], 'symbolSize': 10, 'symbol': 'roundRect', 'attributes': {'modularity_class': 0}}) else: response['nodes'].append({'id': nsinfo_object['nssiId'], 'name': nsinfo_object['nsInfo']['nsInstanceDescription'], 'symbolSize': 10, 'symbol': 'roundRect', 'attributes': {'modularity_class': 0}}) nsinfo = eval(nsinfo_object['nsInfo']['vnfInstance']) if ('nsInstanceName' in nsinfo_object['nsInfo']): for _ in nsinfo: addresses = str() cp_id = str() vnf_state = _['instantiatedVnfInfo']['vnfState'] for extCpInfo in _['instantiatedVnfInfo']['extCpInfo']: cp_id = extCpInfo['id'] cp_protocol_info = extCpInfo['cpProtocolInfo'] ip_over_ethernet = cp_protocol_info[0]['ipOverEthernet'] ip_addresses = ip_over_ethernet['ipAddresses'] if ip_addresses[0]['isDynamic']: addresses = ip_addresses[0]['addresses'] if (_['vnfProductName'] == 'upf'): response['nodes'].append({'id': _['id'], 'name': _['vnfProductName'], 'instantiationState': _['instantiationState'], 'vnfState': vnf_state, 'symbolSize': 10, 'symbol': 'image:// 'attributes': {'modularity_class': 1}, 'address': addresses}) elif (_['vnfProductName'] == 'hss'): response['nodes'].append({'id': _['id'], 'name': _['vnfProductName'], 'instantiationState': _['instantiationState'], 'vnfState': vnf_state, 'symbolSize': 10, 'symbol': 'image:// 'attributes': {'modularity_class': 1}, 'address': addresses}) elif (_['vnfProductName'] == 'amf'): response['nodes'].append({'id': _['id'], 'name': _['vnfProductName'], 'instantiationState': _['instantiationState'], 'vnfState': vnf_state, 'symbolSize': 10, 'symbol': 'image:// 'attributes': {'modularity_class': 1}, 'address': addresses}) elif (_['vnfProductName'] == 'smf'): response['nodes'].append({'id': _['id'], 'name': _['vnfProductName'], 'instantiationState': _['instantiationState'], 'vnfState': vnf_state, 'symbolSize': 10, 'symbol': 'image:// 'attributes': {'modularity_class': 1}, 'address': addresses}) elif (_['vnfProductName'] == 'mongodb'): response['nodes'].append({'id': _['id'], 'name': _['vnfProductName'], 'instantiationState': _['instantiationState'], 'vnfState': vnf_state, 'symbolSize': 10, 'symbol': 'image:// 'attributes': {'modularity_class': 1}, 'address': addresses}) elif (_['vnfProductName'] == 'webui'): response['nodes'].append({'id': _['id'], 'name': _['vnfProductName'], 'instantiationState': _['instantiationState'], 'vnfState': vnf_state, 'symbolSize': 10, 'symbol': 'image:// 'attributes': {'modularity_class': 1}, 'address': addresses}) elif (_['vnfProductName'] == 'pcrf'): response['nodes'].append({'id': _['id'], 'name': _['vnfProductName'], 'instantiationState': _['instantiationState'], 'vnfState': vnf_state, 'symbolSize': 10, 'symbol': 'image:// 'attributes': {'modularity_class': 1}, 'address': addresses}) elif (_['vnfProductName'] == 'ausf'): response['nodes'].append({'id': _['id'], 'name': _['vnfProductName'], 'instantiationState': _['instantiationState'], 'vnfState': vnf_state, 'symbolSize': 10, 'symbol': 'image:// 'attributes': {'modularity_class': 1}, 'address': addresses}) elif (_['vnfProductName'] == 'nssf'): response['nodes'].append({'id': _['id'], 'name': _['vnfProductName'], 'instantiationState': _['instantiationState'], 'vnfState': vnf_state, 'symbolSize': 10, 'symbol': 'image:// 'attributes': {'modularity_class': 1}, 'address': addresses}) elif (_['vnfProductName'] == 'udm'): response['nodes'].append({'id': _['id'], 'name': _['vnfProductName'], 'instantiationState': _['instantiationState'], 'vnfState': vnf_state, 'symbolSize': 10, 'symbol': 'image:// 'attributes': {'modularity_class': 1}, 'address': addresses}) elif (_['vnfProductName'] == 'udr'): response['nodes'].append({'id': _['id'], 'name': _['vnfProductName'], 'instantiationState': _['instantiationState'], 'vnfState': vnf_state, 'symbolSize': 10, 'symbol': 'image:// 'attributes': {'modularity_class': 1}, 'address': addresses}) elif (_['vnfProductName'] == 'nrf'): response['nodes'].append({'id': _['id'], 'name': _['vnfProductName'], 'instantiationState': _['instantiationState'], 'vnfState': vnf_state, 'symbolSize': 10, 'symbol': 'image:// 'attributes': {'modularity_class': 1}, 'address': addresses}) elif (_['vnfProductName'] == 'pcf'): response['nodes'].append({'id': _['id'], 'name': _['vnfProductName'], 'instantiationState': _['instantiationState'], 'vnfState': vnf_state, 'symbolSize': 10, 'symbol': 'image:// 'attributes': {'modularity_class': 1}, 'address': addresses}) else: response['nodes'].append({'id': _['id'], 'name': _['vnfProductName'], 'instantiationState': _['instantiationState'], 'vnfState': vnf_state, 'symbolSize': 10, 'symbol': 'image:// 'attributes': {'modularity_class': 1}, 'address': addresses}) response['links'].append({'id': str(link_count), 'source': nsinfo_object['nssiId'], 'target': _['id']}) link_count += 1 else: print('Tacker Topology') for _ in nsinfo: response['nodes'].append({'id': nsinfo[_], 'name': _, 'instantiationState': None, 'vnfState': None, 'symbolSize': 10, 'symbol': 'image:// 'attributes': {'modularity_class': 1}, 'address': None}) response['links'].append({'id': str(link_count), 'source': nsinfo_object['nssiId'], 'target': nsinfo[_]}) link_count += 1 return response_cors(request.method, JsonResponse(response))
def min_length_discard(records, min_length): logging.info('Applying _min_length_discard generator: discarding records shorter than %d.', min_length) for record in records: if (len(record) < min_length): logging.debug('Discarding short sequence: %s, length=%d', record.id, len(record)) else: (yield record)
def test_liveness_if_else(construct_graph_if_else, variable_u, variable_v, aliased_variables_y, aliased_variables_z): (nodes, cfg) = construct_graph_if_else liveness_analysis = LivenessAnalysis(cfg) assert ((liveness_analysis.live_in_of(nodes[0]) == {aliased_variables_y[1], aliased_variables_z[2]}) and (liveness_analysis.live_in_of(nodes[1]) == {variable_u[1], variable_v[1]}) and (liveness_analysis.live_in_of(nodes[2]) == {variable_u[1], variable_v[1]}) and (liveness_analysis.live_in_of(nodes[3]) == {variable_u[4], variable_v[3]})) assert ((liveness_analysis.live_out_of(nodes[0]) == {variable_u[1], variable_v[1]}) and (liveness_analysis.live_out_of(nodes[1]) == {variable_u[2], variable_v[2]}) and (liveness_analysis.live_out_of(nodes[2]) == {variable_u[3], variable_v[1]}) and (liveness_analysis.live_out_of(nodes[3]) == set()))
def main(): parser = argparse.ArgumentParser(description='Runs clang-tidy over all files in a compilation database. Requires clang-tidy and clang-apply-replacements in $PATH or in your build directory.') parser.add_argument('-allow-enabling-alpha-checkers', action='store_true', help='allow alpha checkers from clang-analyzer.') parser.add_argument('-clang-tidy-binary', metavar='PATH', help='path to clang-tidy binary') parser.add_argument('-clang-apply-replacements-binary', metavar='PATH', help='path to clang-apply-replacements binary') parser.add_argument('-checks', default=None, help='checks filter, when not specified, use clang-tidy default') config_group = parser.add_mutually_exclusive_group() config_group.add_argument('-config', default=None, help='Specifies a configuration in YAML/JSON format: -config="{Checks: \'*\', CheckOptions: {x: y}}" When the value is empty, clang-tidy will attempt to find a file named .clang-tidy for each source file in its parent directories.') config_group.add_argument('-config-file', default=None, help='Specify the path of .clang-tidy or custom config file: e.g. -config-file=/some/path/myTidyConfigFile. This option internally works exactly the same way as -config option after reading specified config file. Use either -config-file or -config, not both.') parser.add_argument('-header-filter', default=None, help='regular expression matching the names of the headers to output diagnostics from. Diagnostics from the main file of each translation unit are always displayed.') parser.add_argument('-line-filter', default=None, help='List of files with line ranges to filter thewarnings.') if yaml: parser.add_argument('-export-fixes', metavar='filename', dest='export_fixes', help='Create a yaml file to store suggested fixes in, which can be applied with clang-apply-replacements.') parser.add_argument('-j', type=int, default=0, help='number of tidy instances to be run in parallel.') parser.add_argument('files', nargs='*', default=['.*'], help='files to be processed (regex on path)') parser.add_argument('-fix', action='store_true', help='apply fix-its') parser.add_argument('-format', action='store_true', help='Reformat code after applying fixes') parser.add_argument('-style', default='file', help='The style of reformat code after applying fixes') parser.add_argument('-use-color', type=strtobool, nargs='?', const=True, help="Use colors in diagnostics, overriding clang-tidy's default behavior. This option overrides the 'UseColor' option in .clang-tidy file, if any.") parser.add_argument('-p', dest='build_path', help='Path used to read a compile command database.') parser.add_argument('-extra-arg', dest='extra_arg', action='append', default=[], help='Additional argument to append to the compiler command line.') parser.add_argument('-extra-arg-before', dest='extra_arg_before', action='append', default=[], help='Additional argument to prepend to the compiler command line.') parser.add_argument('-quiet', action='store_true', help='Run clang-tidy in quiet mode') parser.add_argument('-load', dest='plugins', action='append', default=[], help='Load the specified plugin in clang-tidy.') args = parser.parse_args() db_path = 'compile_commands.json' if (args.build_path is not None): build_path = args.build_path else: build_path = find_compilation_database(db_path) clang_tidy_binary = find_binary(args.clang_tidy_binary, 'clang-tidy', build_path) tmpdir = None if (args.fix or (yaml and args.export_fixes)): clang_apply_replacements_binary = find_binary(args.clang_apply_replacements_binary, 'clang-apply-replacements', build_path) tmpdir = tempfile.mkdtemp() try: invocation = get_tidy_invocation('', clang_tidy_binary, args.checks, None, build_path, args.header_filter, args.allow_enabling_alpha_checkers, args.extra_arg, args.extra_arg_before, args.quiet, args.config_file, args.config, args.line_filter, args.use_color, args.plugins) invocation.append('-list-checks') invocation.append('-') if args.quiet: with open(os.devnull, 'w') as dev_null: subprocess.check_call(invocation, stdout=dev_null) else: subprocess.check_call(invocation) except: print('Unable to run clang-tidy.', file=sys.stderr) sys.exit(1) database = json.load(open(os.path.join(build_path, db_path))) files = set([make_absolute(entry['file'], entry['directory']) for entry in database]) max_task = args.j if (max_task == 0): max_task = multiprocessing.cpu_count() file_name_re = re.compile('|'.join(args.files)) return_code = 0 try: task_queue = queue.Queue(max_task) failed_files = [] lock = threading.Lock() for _ in range(max_task): t = threading.Thread(target=run_tidy, args=(args, clang_tidy_binary, tmpdir, build_path, task_queue, lock, failed_files)) t.daemon = True t.start() for name in files: if file_name_re.search(name): task_queue.put(name) task_queue.join() if len(failed_files): return_code = 1 except KeyboardInterrupt: print('\nCtrl-C detected, goodbye.') if tmpdir: shutil.rmtree(tmpdir) os.kill(0, 9) if (yaml and args.export_fixes): print((('Writing fixes to ' + args.export_fixes) + ' ...')) try: merge_replacement_files(tmpdir, args.export_fixes) except: print('Error exporting fixes.\n', file=sys.stderr) traceback.print_exc() return_code = 1 if args.fix: print('Applying fixes ...') try: apply_fixes(args, clang_apply_replacements_binary, tmpdir) except: print('Error applying fixes.\n', file=sys.stderr) traceback.print_exc() return_code = 1 if tmpdir: shutil.rmtree(tmpdir) sys.exit(return_code)
def test_deeply_nested_optional_parameter_groups(): grandparent: GrandParent = GrandParent.setup() assert (grandparent.niece is None) assert (grandparent.nephew is None) grandparent: GrandParent = GrandParent.setup('--niece.child.name Bob') assert (grandparent.niece == Parent(child=Child(name='Bob'))) assert (grandparent.nephew is None)
class SchemaV1(SchemaAPI): def make_canonical_head_hash_lookup_key() -> bytes: return b'v1:canonical_head_hash' def make_block_number_to_hash_lookup_key(block_number: BlockNumber) -> bytes: number_to_hash_key = (b'block-number-to-hash:%d' % block_number) return number_to_hash_key def make_block_hash_to_score_lookup_key(block_hash: Hash32) -> bytes: return (b'block-hash-to-score:%s' % block_hash) def make_header_chain_gaps_lookup_key() -> bytes: return b'v1:header_chain_gaps' def make_chain_gaps_lookup_key() -> bytes: return b'v1:chain_gaps' def make_checkpoint_headers_key() -> bytes: return b'v1:checkpoint-header-hashes-list' def make_transaction_hash_to_block_lookup_key(transaction_hash: Hash32) -> bytes: return (b'transaction-hash-to-block:%s' % transaction_hash) def make_withdrawal_hash_to_block_lookup_key(withdrawal_hash: Hash32) -> bytes: return (b'withdrawal-hash-to-block:%s' % withdrawal_hash)
def count_calls_to(utility_name, call_list, call_contains=None): count = 0 for cmd in call_list: if cmd.startswith(utility_name): if (call_contains != None): if (call_contains in cmd): count += 1 else: count += 1 return count
class solver(object): def __init__(self, step=1.0, accel=None): if (step < 0): raise ValueError('Step should be a positive number.') self.step = step self.accel = (acceleration.dummy() if (accel is None) else accel) def pre(self, functions, x0): self.sol = np.asarray(x0) self.smooth_funs = [] self.non_smooth_funs = [] self._pre(functions, self.sol) self.accel.pre(functions, self.sol) def _pre(self, functions, x0): raise NotImplementedError('Class user should define this method.') def algo(self, objective, niter): self.sol[:] = self.accel.update_sol(self, objective, niter) self.step = self.accel.update_step(self, objective, niter) self._algo() def _algo(self): raise NotImplementedError('Class user should define this method.') def post(self): self._post() self.accel.post() del self.sol, self.smooth_funs, self.non_smooth_funs def _post(self): raise NotImplementedError('Class user should define this method.') def objective(self, x): return self._objective(x) def _objective(self, x): obj_smooth = [f.eval(x) for f in self.smooth_funs] obj_nonsmooth = [f.eval(x) for f in self.non_smooth_funs] return (obj_nonsmooth + obj_smooth)
def test_tensor_function_zero_with_subset(W): f = Function(W) assert (W.node_set.size > 3) subset = op2.Subset(W.node_set, [0, 1, 2]) f.assign(1) assert np.allclose(f.dat.data_ro, 1.0) f.zero(subset=subset) assert np.allclose(f.dat.data_ro[:3], 0.0) assert np.allclose(f.dat.data_ro[3:], 1.0)
def test_sign_and_recover_message_public_key(ethereum_private_key_file): account = EthereumCrypto(ethereum_private_key_file) sign_bytes = account.sign_message(message=b'hello') assert (len(sign_bytes) > 0), 'The len(signature) must not be 0' recovered_public_keys = EthereumApi.recover_public_keys_from_message(message=b'hello', signature=sign_bytes) assert (len(recovered_public_keys) == 1), 'Wrong number of public keys recovered.' assert (EthereumApi.get_address_from_public_key(recovered_public_keys[0]) == account.address), 'Failed to recover the correct address.'
def test_memoize(): def foo(*args, **kwargs): calls.append(None) return (args, kwargs) calls = [] for x in range(2): ret = foo() expected = ((), {}) assert (ret == expected) assert (len(calls) == 1) for x in range(2): ret = foo(1) expected = ((1,), {}) assert (ret == expected) assert (len(calls) == 2) for x in range(2): ret = foo(1, bar=2) expected = ((1,), {'bar': 2}) assert (ret == expected) assert (len(calls) == 3) foo.cache_clear() ret = foo() expected = ((), {}) assert (ret == expected) assert (len(calls) == 4) assert (foo.__doc__ == 'foo docstring')
class GuidedDeathEffect(GenericAction): def __init__(self, source, target_list): self.source = source self.target = source self.target_list = target_list def apply_action(self): g = self.game for p in self.target_list: g.process_action(GuidedDeathLifeLost(p, p)) return True
_converter(acc_ops.avg_pool2d) def acc_ops_avg_pool2d(target: Target, args: Tuple[(Argument, ...)], kwargs: Dict[(str, Argument)], name: str) -> ConverterOutput: input_val = ait_nchw2nhwc(kwargs['input']) if (not isinstance(input_val, AITTensor)): raise RuntimeError(f'Non-tensor inputs for {name}: {input_val}') kernel_size = identical_elem_tuple_to_int(kwargs['kernel_size']) stride = (identical_elem_tuple_to_int(kwargs['stride']) if kwargs['stride'] else kernel_size) padding = identical_elem_tuple_to_int(kwargs['padding']) ceil_mode = kwargs['ceil_mode'] count_include_pad = kwargs['count_include_pad'] divisor_override = kwargs['divisor_override'] if (ceil_mode or (not count_include_pad) or divisor_override): raise RuntimeError('Non-default ceil_mode/count_include_pad/divisor_override not supported yet') result = avg_pool2d(kernel_size=kernel_size, stride=stride, pad=padding)(input_val) return ait_nhwc2nchw(result)
def name_replace(records, search_regex, replace_pattern): regex = re.compile(search_regex) for record in records: maybe_id = record.description.split(None, 1)[0] if (maybe_id == record.id): record.description = regex.sub(replace_pattern, record.description) record.id = record.description.split(None, 1)[0] else: record.id = regex.sub(replace_pattern, record.id) record.description = regex.sub(replace_pattern, record.description) (yield record)
def test_data_act_database_url_and_parts_error_if_inconsistent_none_parts(): consistent_dict = {ENV_CODE_VAR: _UnitTestDbPartsNoneConfig.ENV_CODE, 'DATABASE_URL': 'postgres://dummy::12345/fresh_new_db_name', 'DATA_BROKER_DATABASE_URL': 'postgres://broker:-foobar:54321/fresh_new_db_name_broker', 'BROKER_DB_HOST': 'broker-foobar', 'BROKER_DB_PORT': '54321', 'BROKER_DB_NAME': 'fresh_new_db_name_broker', 'BROKER_DB_USER': 'broker', 'BROKER_DB_PASSWORD': 'pass'} mismatched_parts = {'BROKER_DB_HOST': 'bad_host', 'BROKER_DB_PORT': '990099', 'BROKER_DB_NAME': 'misnamed_db', 'BROKER_DB_USER': 'fake_user', 'BROKER_DB_PASSWORD': 'not_your_secret'} for (part, bad_val) in mismatched_parts.items(): test_env = consistent_dict.copy() test_env[part] = bad_val with mock.patch.dict(os.environ, test_env, clear=True): with pytest.raises(ValidationError) as exc_info: _UnitTestDbPartsNoneConfig(_env_file=None) provided = mismatched_parts[part] expected = consistent_dict[part] if (part == 'BROKER_DB_PASSWORD'): provided = SecretStr(provided) expected = (('*' * len(expected)) if expected else None) expected_error = f'Part: {part}, Part Value Provided: {provided}, Value found in DATA_BROKER_DATABASE_URL: {expected}' assert exc_info.match(re.escape(expected_error))
def is_item_present_unified(ctx: Context, item_type: str, item_public_id: PublicId) -> bool: is_in_vendor = is_item_present(ctx.cwd, ctx.agent_config, item_type, item_public_id, is_vendor=True) if (item_public_id.author != ctx.agent_config.author): return is_in_vendor return (is_in_vendor or is_item_present(ctx.cwd, ctx.agent_config, item_type, item_public_id, is_vendor=False))
class frozenmultiset(TestCase): def test_constructor(self): src = ('spam', 'bacon', 'sausage', 'spam') for (name, value) in [('tuple', src), ('frozenmultiset', nutils.types.frozenmultiset(src))]: with self.subTest(name=name): frozen = nutils.types.frozenmultiset(value) for item in ('spam', 'bacon', 'sausage'): self.assertEqual({k: tuple(frozen).count(k) for k in set(src)}, {'spam': 2, 'bacon': 1, 'sausage': 1}) def test_preserve_order(self): for src in [('spam', 'bacon', 'sausage', 'spam'), ('spam', 'egg', 'spam', 'spam', 'bacon', 'spam')]: with self.subTest(src=src): self.assertEqual(tuple(nutils.types.frozenmultiset(src)), src) def test_and(self): for (l, r, lar) in [[['spam', 'eggs'], ['spam', 'spam', 'eggs'], ['spam', 'eggs']], [['spam'], ['eggs'], []], ([['spam', 'spam']] * 3)]: with self.subTest(l=l, r=r, lar=lar): self.assertEqual((nutils.types.frozenmultiset(l) & nutils.types.frozenmultiset(r)), nutils.types.frozenmultiset(lar)) with self.subTest(l=r, r=l, lar=lar): self.assertEqual((nutils.types.frozenmultiset(r) & nutils.types.frozenmultiset(l)), nutils.types.frozenmultiset(lar)) def test_sub(self): for (l, r, lmr, rml) in [[['spam', 'eggs'], ['spam', 'spam', 'eggs'], [], ['spam']], [['spam'], ['eggs'], ['spam'], ['eggs']], [['spam'], ['spam'], [], []]]: with self.subTest(l=l, r=r, lmr=lmr): self.assertEqual((nutils.types.frozenmultiset(l) - nutils.types.frozenmultiset(r)), nutils.types.frozenmultiset(lmr)) with self.subTest(l=r, r=l, lmr=rml): self.assertEqual((nutils.types.frozenmultiset(r) - nutils.types.frozenmultiset(l)), nutils.types.frozenmultiset(rml)) def test_pickle(self): src = ('spam', 'bacon', 'sausage', 'spam') frozen = pickle.loads(pickle.dumps(nutils.types.frozenmultiset(src))) self.assertIsInstance(frozen, nutils.types.frozenmultiset) self.assertEqual(frozen, nutils.types.frozenmultiset(src)) def test_hash(self): src = ('spam', 'bacon', 'sausage', 'spam') ref = nutils.types.frozenmultiset(src) for perm in itertools.permutations(src): with self.subTest(perm=perm): self.assertEqual(hash(nutils.types.frozenmultiset(src)), hash(ref)) def test_nutils_hash(self): for perm in itertools.permutations(('spam', 'bacon', 'sausage', 'spam')): with self.subTest(perm=perm): frozen = nutils.types.frozenmultiset(perm) self.assertEqual(nutils.types.nutils_hash(frozen).hex(), 'f3fd9c6d4741af2eee6308deddcb714c') def test_eq(self): src = ('spam', 'bacon', 'sausage', 'spam') ref = nutils.types.frozenmultiset(src) for perm in itertools.permutations(src): with self.subTest(perm=perm): self.assertEqual(nutils.types.frozenmultiset(src), ref) def test_contains(self): src = ('spam', 'bacon', 'sausage', 'spam') frozen = nutils.types.frozenmultiset(src) for item in ('spam', 'bacon', 'eggs'): with self.subTest(item=item): if (item in src): self.assertIn(item, frozen) else: self.assertNotIn(item, frozen) def test_len(self): src = ('spam', 'bacon', 'sausage', 'spam') frozen = nutils.types.frozenmultiset(src) self.assertEqual(len(frozen), len(src)) def test_nonzero(self): self.assertTrue(nutils.types.frozenmultiset(['spam', 'eggs'])) self.assertFalse(nutils.types.frozenmultiset([])) def test_add(self): l = nutils.types.frozenmultiset(['spam', 'bacon']) r = nutils.types.frozenmultiset(['sausage', 'spam']) lpr = nutils.types.frozenmultiset(['spam', 'bacon', 'sausage', 'spam']) self.assertEqual((l + r), lpr) def test_isdisjoint(self): for (l, r, disjoint) in [[['spam', 'eggs'], ['spam', 'spam', 'eggs'], False], [['spam'], ['eggs'], True], [['spam'], ['spam'], False]]: with self.subTest(l=l, r=r, disjoint=disjoint): self.assertEqual(nutils.types.frozenmultiset(l).isdisjoint(nutils.types.frozenmultiset(r)), disjoint)
class UnderscoreProcessor(util.PatternSequenceProcessor): PATTERNS = [util.PatSeqItem(re.compile(UNDER_STRONG_EM, (re.DOTALL | re.UNICODE)), 'double', 'strong,em'), util.PatSeqItem(re.compile(UNDER_EM_STRONG, (re.DOTALL | re.UNICODE)), 'double', 'em,strong'), util.PatSeqItem(re.compile(UNDER_STRONG_EM2, (re.DOTALL | re.UNICODE)), 'double', 'strong,em'), util.PatSeqItem(re.compile(UNDER_STRONG_EM3, (re.DOTALL | re.UNICODE)), 'double2', 'strong,em'), util.PatSeqItem(re.compile(UNDER_STRONG, (re.DOTALL | re.UNICODE)), 'single', 'strong'), util.PatSeqItem(re.compile(UNDER_EM, (re.DOTALL | re.UNICODE)), 'single', 'em')]
class TestMakeDefaultExpiresAt(TestCase): def test_disabled(self): conf = config.ExpireTimeDefaultSettings(ENABLE=False) result = helpers.make_default_expires_at(conf) self.assertIsNone(result) def test_enabled(self): conf = config.ExpireTimeDefaultSettings(ENABLE=True, MINUTES=0, HOURS=0, DAYS=1) expected = (datetime.utcnow() + timedelta(days=1)) result = helpers.make_default_expires_at(conf) self.assertIsNotNone(result) self.assertEqual(expected.isoformat(timespec='hours'), result.isoformat(timespec='hours'))
def run(interface, app, host='127.0.0.1', port=8000, loop='auto', loop_opt=False, log_level=None, workers=1, threads=1, threading_mode='workers', backlog=1024, enable_websockets=True, ssl_certfile: Optional[str]=None, ssl_keyfile: Optional[str]=None): app_path = ':'.join([app[0], (app[1] or 'app')]) runner = Granian(app_path, address=host, port=port, interface=interface, workers=workers, threads=threads, pthreads=threads, threading_mode=threading_mode, loop=loop, loop_opt=loop_opt, websockets=enable_websockets, backlog=backlog, log_level=log_level, ssl_cert=ssl_certfile, ssl_key=ssl_keyfile) runner.serve()
def fix_line(line, site, filetype): assert (filetype in ['timings', 'pins', 'properties']), 'Unsupported filetype' line = line.split() newline = list() sites_count = int(line[1]) newline.append(line[0]) newline.append('1') newline.append(site) newline.append('1') newline.append(site) entries = list() all_entries = 0 loc = 2 for site in range(0, sites_count): bels_count = int(line[(loc + 1)]) loc += 2 for bel in range(0, bels_count): entries_count = int(line[(loc + 1)]) loc += 2 all_entries += entries_count for entry in range(0, entries_count): if (filetype == 'timings'): for delay_word in range(0, 6): entries.append(line[loc]) loc += 1 elif (filetype == 'pins'): for pin_word in range(0, 4): entries.append(line[loc]) loc += 1 elif (filetype == 'properties'): entries.append(line[loc]) loc += 1 values_count = int(line[loc]) entries.append(line[loc]) loc += 1 for value in range(0, values_count): entries.append(line[loc]) loc += 1 newline.append(str(all_entries)) newline.extend(entries) return (' '.join(newline) + '\n')
def get_boolean_from_request(request_parameters: ImmutableMultiDict, name: str) -> bool: try: parameter = json.loads(request_parameters.get(name, 'false')) if (not isinstance(parameter, bool)): raise TypeError() except (AttributeError, KeyError): return False except (json.JSONDecodeError, TypeError): raise ValueError(f'{name} must be true or false') return parameter
class TestLayoutBlock(): def test_should_parse_text_with_two_tokens(self): layout_block = LayoutBlock.for_text('token1 token2', tail_whitespace='\n') assert ([(token.text, token.whitespace) for token in layout_block.lines[0].tokens] == [('token1', ' '), ('token2', '\n')]) def test_should_parse_text_with_punctuation_tokens(self): layout_block = LayoutBlock.for_text('token1. token2', tail_whitespace='\n') assert ([(token.text, token.whitespace) for token in layout_block.lines[0].tokens] == [('token1', ''), ('.', ' '), ('token2', '\n')]) def test_should_create_lines_based_on_line_meta(self): line_meta_1 = LayoutLineMeta(line_id=1) line_meta_2 = LayoutLineMeta(line_id=2) line_tokens_1 = [LayoutToken(text, line_meta=line_meta_1) for text in ['token1.1', 'token1.2']] line_tokens_2 = [LayoutToken(text, line_meta=line_meta_2) for text in ['token2.1', 'token2.2']] layout_block = LayoutBlock.for_tokens((line_tokens_1 + line_tokens_2)) assert (len(layout_block.lines) == 2) assert (layout_block.lines[0].tokens == line_tokens_1) assert (layout_block.lines[1].tokens == line_tokens_2)
def CustomEnumEditor(parent, factory, ui, object, name, description, style='custom', **kwargs): if (factory._enum is None): import traitsui.editors.enum_editor as enum_editor factory._enum = enum_editor.ToolkitEditorFactory(values=list(range(factory.low, (factory.high + 1))), cols=factory.cols) if (style == 'simple'): return factory._enum.simple_editor(ui, object, name, description, parent) return factory._enum.custom_editor(ui, object, name, description, parent)
class UnaryOp(OperatorInterface): def __init__(self, func_name: str): super(UnaryOp, self).__init__() self.func_name: str = func_name self.fwd_out: torch.tensor = None self.grad_in: torch.tensor = None def forward(self, *args, **kwargs): with torch.no_grad(): getattr(args[0], self.func_name)(*args[1:], **kwargs) def create_grad(self): pass def backward(self): pass
_as_global_kernel_arg.register(CellFacetKernelArg) def _as_global_kernel_arg_cell_facet(_, self): if self._mesh.extruded: num_facets = self._mesh._base_mesh.ufl_cell().num_facets() else: num_facets = self._mesh.ufl_cell().num_facets() return op2.DatKernelArg((num_facets, 2))
def test_outlay_calculations(client, awards_and_transactions): defc = baker.make('references.DisasterEmergencyFundCode', code='L') baker.make('submissions.DABSSubmissionWindowSchedule', submission_fiscal_year=2019, submission_fiscal_month=12, is_quarter=True, submission_reveal_date='2020-04-01', period_start_date='2020-04-01') baker.make('submissions.SubmissionAttributes', pk=4, reporting_fiscal_period=12, reporting_fiscal_year=2019, reporting_period_end='2020-06-30', quarter_format_flag=True, is_final_balances_for_fy=True, reporting_period_start='2020-04-01') baker.make('awards.FinancialAccountsByAwards', award_id=1, transaction_obligated_amount=10, gross_outlay_amount_by_award_cpe=10, ussgl487200_down_adj_pri_ppaid_undel_orders_oblig_refund_cpe=(- 1), ussgl497200_down_adj_pri_paid_deliv_orders_oblig_refund_cpe=(- 2), disaster_emergency_fund=defc, submission_id=4) baker.make('awards.FinancialAccountsByAwards', award_id=2, gross_outlay_amount_by_award_cpe=None, ussgl487200_down_adj_pri_ppaid_undel_orders_oblig_refund_cpe=None, ussgl497200_down_adj_pri_paid_deliv_orders_oblig_refund_cpe=None, submission_id=4) baker.make('awards.FinancialAccountsByAwards', award_id=3, gross_outlay_amount_by_award_cpe=20, ussgl487200_down_adj_pri_ppaid_undel_orders_oblig_refund_cpe=20, ussgl497200_down_adj_pri_paid_deliv_orders_oblig_refund_cpe=20, disaster_emergency_fund=defc, submission_id=4) baker.make('awards.FinancialAccountsByAwards', award_id=3, gross_outlay_amount_by_award_cpe=30, ussgl487200_down_adj_pri_ppaid_undel_orders_oblig_refund_cpe=30, ussgl497200_down_adj_pri_paid_deliv_orders_oblig_refund_cpe=30, submission_id=4) resp = client.get('/api/v2/awards/1/') assert (resp.status_code == status.HTTP_200_OK) assert (json.loads(resp.content.decode('utf-8'))['account_obligations_by_defc'] == [{'code': 'L', 'amount': 10.0}]) assert (json.loads(resp.content.decode('utf-8'))['account_outlays_by_defc'] == [{'code': 'L', 'amount': 7.0}]) assert (json.loads(resp.content.decode('utf-8'))['total_account_obligation'] == 10.0) assert (json.loads(resp.content.decode('utf-8'))['total_account_outlay'] == 7.0) assert (json.loads(resp.content.decode('utf-8'))['total_outlay'] == 7.0) resp = client.get('/api/v2/awards/2/') assert (resp.status_code == status.HTTP_200_OK) assert (json.loads(resp.content.decode('utf-8'))['total_outlay'] is None) resp = client.get('/api/v2/awards/3/') assert (resp.status_code == status.HTTP_200_OK) assert (json.loads(resp.content.decode('utf-8'))['total_outlay'] == 150)
def test_filter_structures(): s1 = td.Structure(geometry=td.Box(size=(1, 1, 1)), medium=SCENE.medium) s2 = td.Structure(geometry=td.Box(size=(1, 1, 1), center=(1, 1, 1)), medium=SCENE.medium) plane = td.Box(center=(0, 0, 1.5), size=(td.inf, td.inf, 0)) SCENE._filter_structures_plane_medium(structures=[s1, s2], plane=plane)
def downgrade(): op.create_table('modules', sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), sa.Column('ticket_include', sa.BOOLEAN(), autoincrement=False, nullable=True), sa.Column('payment_include', sa.BOOLEAN(), autoincrement=False, nullable=True), sa.Column('donation_include', sa.BOOLEAN(), autoincrement=False, nullable=True), sa.PrimaryKeyConstraint('id', name='modules_pkey'))
def test_insert_intersecting_cases_before(task): condition_handler = ConditionHandler() cond_2_symbol = condition_handler.add_condition(Condition(OperationType.equal, [var_c, const[2]])) ast = AbstractSyntaxForest(condition_handler=condition_handler) root = ast.factory.create_seq_node() missing_case = ast.factory.create_condition_node(condition=cond_2_symbol) switch = ast.factory.create_switch_node(var_c) true_branch = ast.factory.create_true_node() case1 = ast.factory.create_case_node(var_c, const[1]) case2 = ast.factory.create_case_node(var_c, const[2], break_case=True) code_nodes = [ast.factory.create_code_node([Assignment(var_b, BinaryOperation(OperationType.plus, [var_b, const[(i + 1)]]))]) for i in range(3)] ast._add_nodes_from((code_nodes + [root, missing_case, switch, case1, case2, true_branch])) ast._add_edges_from([(root, missing_case), (root, switch), (missing_case, true_branch), (true_branch, code_nodes[0]), (switch, case1), (switch, case2), (case1, code_nodes[1]), (case2, code_nodes[2])]) ast._code_node_reachability_graph.add_reachability_from(((code_nodes[0], code_nodes[2]), (code_nodes[1], code_nodes[2]))) root.sort_children() switch.sort_cases() sibling_reachability = ast.get_sibling_reachability_of_children_of(root) reachability_graph = SiblingReachabilityGraph(sibling_reachability) ast.set_current_root(root) mcfic = MissingCaseFinderIntersectingConstants(ast, RestructuringOptions(True, True, 2, LoopBreakOptions.structural_variable), switch, reachability_graph) mcfic.insert(CaseNodeCandidate(missing_case, mcfic._get_const_eq_check_expression_of_disjunction(cond_2_symbol), cond_2_symbol)) assert (isinstance(ast.current_root, SeqNode) and (len(ast.current_root.children) == 2)) assert (isinstance((cond := ast.current_root.children[0]), ConditionNode) and cond.true_branch_child)
class bdist_wheel(Command): description = 'create a wheel distribution' user_options = [('bdist-dir=', 'b', 'temporary directory for creating the distribution'), ('plat-name=', 'p', ('platform name to embed in generated filenames (default: %s)' % get_platform())), ('keep-temp', 'k', ('keep the pseudo-installation tree around after ' + 'creating the distribution archive')), ('dist-dir=', 'd', 'directory to put final built distributions in'), ('skip-build', None, 'skip rebuilding everything (for testing/debugging)'), ('relative', None, 'build the archive using relative paths(default: false)'), ('owner=', 'u', 'Owner name used when creating a tar file [default: current user]'), ('group=', 'g', 'Group name used when creating a tar file [default: current group]'), ('universal', None, 'make a universal wheel (default: false)'), ('python-tag=', None, ('Python implementation compatibility tag (default: py%s)' % get_impl_ver()[0])), ('build-number=', None, 'Build number for this particular version. As specified in PEP-0427, this must start with a digit. [default: None]'), ('py-limited-api=', None, 'Python tag (cp32|cp33|cpNN) for abi3 wheel tag (default: false)')] boolean_options = ['keep-temp', 'skip-build', 'relative', 'universal'] def initialize_options(self): self.bdist_dir = None self.data_dir = None self.plat_name = None self.plat_tag = None self.format = 'zip' self.keep_temp = False self.dist_dir = None self.distinfo_dir = None self.egginfo_dir = None self.root_is_pure = None self.skip_build = None self.relative = False self.owner = None self.group = None self.universal = False self.python_tag = ('py' + get_impl_ver()[0]) self.build_number = None self.py_limited_api = False self.plat_name_supplied = False def finalize_options(self): if (self.bdist_dir is None): bdist_base = self.get_finalized_command('bdist').bdist_base self.bdist_dir = os.path.join(bdist_base, 'wheel') self.data_dir = (self.wheel_dist_name + '.data') self.plat_name_supplied = (self.plat_name is not None) need_options = ('dist_dir', 'plat_name', 'skip_build') self.set_undefined_options('bdist', *zip(need_options, need_options)) self.root_is_pure = (not (self.distribution.has_ext_modules() or self.distribution.has_c_libraries())) if (self.py_limited_api and (not re.match(PY_LIMITED_API_PATTERN, self.py_limited_api))): raise ValueError(("py-limited-api must match '%s'" % PY_LIMITED_API_PATTERN)) wheel = self.distribution.get_option_dict('wheel') if ('universal' in wheel): val = wheel['universal'][1].strip() if (val.lower() in ('1', 'true', 'yes')): self.universal = True if ((self.build_number is not None) and (not self.build_number[:1].isdigit())): raise ValueError('Build tag (build-number) must start with a digit.') def wheel_dist_name(self): components = (safer_name(self.distribution.get_name()), safer_version(self.distribution.get_version())) if self.build_number: components += (self.build_number,) return '-'.join(components) def get_tag(self): if self.plat_name_supplied: plat_name = self.plat_name elif self.root_is_pure: plat_name = 'any' else: plat_name = (self.plat_name or get_platform()) if ((plat_name in ('linux-x86_64', 'linux_x86_64')) and (sys.maxsize == )): plat_name = 'linux_i686' plat_name = plat_name.replace('-', '_').replace('.', '_') if self.root_is_pure: if self.universal: impl = 'py2.py3' else: impl = self.python_tag tag = (impl, 'none', plat_name) else: impl_name = get_abbr_impl() impl_ver = get_impl_ver() impl = (impl_name + impl_ver) if (self.py_limited_api and (impl_name + impl_ver).startswith('cp3')): impl = self.py_limited_api abi_tag = 'abi3' else: abi_tag = str(get_abi_tag()).lower() tag = (impl, abi_tag, plat_name) supported_tags = pep425tags.get_supported(supplied_platform=(plat_name if self.plat_name_supplied else None)) if (not self.py_limited_api): assert (tag == supported_tags[0]), ('%s != %s' % (tag, supported_tags[0])) assert (tag in supported_tags), 'would build wheel with unsupported tag {}'.format(tag) return tag def get_archive_basename(self): (impl_tag, abi_tag, plat_tag) = self.get_tag() archive_basename = ('%s-%s-%s-%s' % (self.wheel_dist_name, impl_tag, abi_tag, plat_tag)) return archive_basename def run(self): build_scripts = self.reinitialize_command('build_scripts') build_scripts.executable = 'python' if (not self.skip_build): self.run_command('build') install = self.reinitialize_command('install', reinit_subcommands=True) install.root = self.bdist_dir install.compile = False install.skip_build = self.skip_build install.warn_dir = False install_scripts = self.reinitialize_command('install_scripts') install_scripts.no_ep = True for key in ('headers', 'scripts', 'data', 'purelib', 'platlib'): setattr(install, ('install_' + key), os.path.join(self.data_dir, key)) basedir_observed = '' if (os.name == 'nt'): basedir_observed = os.path.normpath(os.path.join(self.data_dir, '..')) self.install_libbase = self.install_lib = basedir_observed setattr(install, ('install_purelib' if self.root_is_pure else 'install_platlib'), basedir_observed) logger.info('installing to %s', self.bdist_dir) self.run_command('install') archive_basename = self.get_archive_basename() pseudoinstall_root = os.path.join(self.dist_dir, archive_basename) if (not self.relative): archive_root = self.bdist_dir else: archive_root = os.path.join(self.bdist_dir, self._ensure_relative(install.install_base)) self.set_undefined_options('install_egg_info', ('target', 'egginfo_dir')) self.distinfo_dir = os.path.join(self.bdist_dir, ('%s.dist-info' % self.wheel_dist_name)) self.egg2dist(self.egginfo_dir, self.distinfo_dir) self.write_wheelfile(self.distinfo_dir) self.write_record(self.bdist_dir, self.distinfo_dir) if (not os.path.exists(self.dist_dir)): os.makedirs(self.dist_dir) wheel_name = archive_wheelfile(pseudoinstall_root, archive_root) if ('WHEEL_TOOL' in os.environ): subprocess.call([os.environ['WHEEL_TOOL'], 'sign', wheel_name]) getattr(self.distribution, 'dist_files', []).append(('bdist_wheel', get_python_version(), wheel_name)) if (not self.keep_temp): logger.info('removing %s', self.bdist_dir) if (not self.dry_run): rmtree(self.bdist_dir) def write_wheelfile(self, wheelfile_base, generator=(('bdist_wheel (' + wheel_version) + ')')): from email.message import Message msg = Message() msg['Wheel-Version'] = '1.0' msg['Generator'] = generator msg['Root-Is-Purelib'] = str(self.root_is_pure).lower() if (self.build_number is not None): msg['Build'] = self.build_number (impl_tag, abi_tag, plat_tag) = self.get_tag() for impl in impl_tag.split('.'): for abi in abi_tag.split('.'): for plat in plat_tag.split('.'): msg['Tag'] = '-'.join((impl, abi, plat)) wheelfile_path = os.path.join(wheelfile_base, 'WHEEL') logger.info('creating %s', wheelfile_path) with open(wheelfile_path, 'w') as f: Generator(f, maxheaderlen=0).flatten(msg) def _ensure_relative(self, path): (drive, path) = os.path.splitdrive(path) if (path[0:1] == os.sep): path = (drive + path[1:]) return path def license_file(self): metadata = self.distribution.get_option_dict('metadata') if ('license_file' not in metadata): return None return metadata['license_file'][1] def egg2dist(self, egginfo_path, distinfo_path): def adios(p): if (os.path.exists(p) and (not os.path.islink(p)) and os.path.isdir(p)): shutil.rmtree(p) elif os.path.exists(p): os.unlink(p) adios(distinfo_path) if (not os.path.exists(egginfo_path)): import glob pat = os.path.join(os.path.dirname(egginfo_path), '*.egg-info') possible = glob.glob(pat) err = ('Egg metadata expected at %s but not found' % (egginfo_path,)) if possible: alt = os.path.basename(possible[0]) err += (' (%s found - possible misnamed archive file?)' % (alt,)) raise ValueError(err) if os.path.isfile(egginfo_path): pkginfo_path = egginfo_path pkg_info = pkginfo_to_metadata(egginfo_path, egginfo_path) os.mkdir(distinfo_path) else: pkginfo_path = os.path.join(egginfo_path, 'PKG-INFO') pkg_info = pkginfo_to_metadata(egginfo_path, pkginfo_path) shutil.copytree(egginfo_path, distinfo_path, ignore=(lambda x, y: {'PKG-INFO', 'requires.txt', 'SOURCES.txt', 'not-zip-safe'})) dependency_links_path = os.path.join(distinfo_path, 'dependency_links.txt') with open(dependency_links_path, 'r') as dependency_links_file: dependency_links = dependency_links_file.read().strip() if (not dependency_links): adios(dependency_links_path) write_pkg_info(os.path.join(distinfo_path, 'METADATA'), pkg_info) license = self.license_file() if license: license_filename = 'LICENSE.txt' shutil.copy(license, os.path.join(distinfo_path, license_filename)) adios(egginfo_path) def write_record(self, bdist_dir, distinfo_dir): from .util import urlsafe_b64encode record_path = os.path.join(distinfo_dir, 'RECORD') record_relpath = os.path.relpath(record_path, bdist_dir) def walk(): for (dir, dirs, files) in os.walk(bdist_dir): dirs.sort() for f in sorted(files): (yield os.path.join(dir, f)) def skip(path): return (path == record_relpath) with open_for_csv(record_path, 'w+') as record_file: writer = csv.writer(record_file) for path in walk(): relpath = os.path.relpath(path, bdist_dir) if skip(relpath): hash = '' size = '' else: with open(path, 'rb') as f: data = f.read() digest = hashlib.sha256(data).digest() hash = ('sha256=' + native(urlsafe_b64encode(digest))) size = len(data) record_path = os.path.relpath(path, bdist_dir).replace(os.path.sep, '/') if isinstance(record_path, bytes): record_path = record_path.decode(sys.getfilesystemencoding()).encode('utf-8') writer.writerow((record_path, hash, size))
def log_sites_summary(log, loci, s_vars): (s_total, s_mean, s_ci, s_min, s_max) = s_vars text = ' Informative Sites summary ' log.info(text.center(65, '-')) log.info('[Sites] loci:\t{:,}'.format(loci)) log.info('[Sites] total:\t{:,}'.format(s_total)) log.info('[Sites] mean:\t{:.2f}'.format(s_mean)) log.info('[Sites] 95% CI:\t{:.2f}'.format(s_ci)) log.info('[Sites] min:\t{}'.format(s_min)) log.info('[Sites] max:\t{:,}'.format(s_max))
def filter_versioned_items(items: t.Iterable[_VersionedObj], constraints: t.Iterable[t.Tuple[(str, _ComparableObj)]], to_version: t.Callable[([_VersionedObj], _ComparableObj)], sort=False) -> t.List[_VersionedObj]: constraints = list(constraints) new_items = [] for item in items: version = to_version(item) matches = [] for (oper_str, version_limit) in constraints: oper = OPERATORS[oper_str] try: match = oper(version, version_limit) except VersionComparisonError as err: log.debug(err) match = False matches.append(match) if all(matches): new_items.append(item) if sort: return sorted(new_items, key=to_version) return new_items
def evaljs(jscode, whitespace=True, print_result=True, extra_nodejs_args=None): global _eval_count cmd = [get_node_exe()] if extra_nodejs_args: cmd.extend(extra_nodejs_args) if (len(jscode) > (2 ** 14)): if print_result: raise RuntimeError('evaljs() wont send more than 16 kB of code over the command line, but cannot use a file unless print_result is False.') _eval_count += 1 fname = ('pscript_%i_%i.js' % (os.getpid(), _eval_count)) filename = os.path.join(tempfile.gettempdir(), fname) with open(filename, 'wb') as f: f.write(jscode.encode()) cmd += ['--use_strict', filename] else: filename = None p_or_e = (['-p', '-e'] if print_result else ['-e']) cmd += ((['--use_strict'] + p_or_e) + [jscode]) if (sys.version_info[0] < 3): cmd = [c.encode('raw_unicode_escape') for c in cmd] try: res = subprocess.check_output(cmd, stderr=subprocess.STDOUT) except Exception as err: if hasattr(err, 'output'): err = err.output.decode() else: err = str(err) err = ((err[:400] + '...') if (len(err) > 400) else err) raise RuntimeError(err) finally: if (filename is not None): try: os.remove(filename) except Exception: pass res = res.decode().rstrip() if (print_result and res.endswith('undefined')): res = res[:(- 9)].rstrip() if (not whitespace): res = res.replace('\n', '').replace('\t', '').replace(' ', '') return res
_required _required _POST def nic_settings_form(request, hostname): vm = get_vm(request, hostname) if request.user.is_admin(request): action = None form = AdminServerNicSettingsForm(request, vm, request.POST, prefix='opt-nic') else: action = 'update' form = ServerNicSettingsForm(request, vm, request.POST, prefix='opt-nic') if form.is_valid(): status = form.save(action=action, args=(vm.hostname, form.cleaned_data['nic_id'])) if (status == 204): return HttpResponse(None, status=status) elif (status in (200, 201)): return redirect('vm_details', hostname=vm.hostname) return render(request, 'gui/vm/nic_settings_form.html', {'nic_settingsform': form, 'vm': vm})
class TestAcceptAnyArg(TestCase): def test_for_partial_call_accepts_all_other_args(self): self.mock_callable(sample_module, 'test_function').for_partial_call('a').to_return_value(['blah']) sample_module.test_function('a', 'b') def test_for_partial_call_accepts_all_other_kwargs(self): self.mock_callable(sample_module, 'test_function').for_partial_call('firstarg', 'secondarg', kwarg1='a').to_return_value(['blah']) sample_module.test_function('firstarg', 'secondarg', kwarg1='a', kwarg2='x') def test_for_partial_call_accepts_all_other_args_and_kwargs(self): self.mock_callable(sample_module, 'test_function').for_partial_call('firstarg', kwarg1='a').to_return_value(['blah']) sample_module.test_function('firstarg', 'xx', kwarg1='a', kwarg2='x') def test_for_partial_call_fails_if_no_required_args_are_present(self): with self.assertRaises(mock_callable.UnexpectedCallArguments): self.mock_callable(sample_module, 'test_function').for_partial_call('firstarg', kwarg1='a').to_return_value(['blah']) sample_module.test_function('differentarg', 'alsodifferent', kwarg1='a', kwarg2='x') def test_for_partial_call_fails_if_no_required_kwargs_are_present(self): with self.assertRaises(mock_callable.UnexpectedCallArguments): self.mock_callable(sample_module, 'test_function').for_partial_call('firstarg', kwarg1='x').to_return_value(['blah']) sample_module.test_function('firstarg', 'secondarg', kwarg1='a', kwarg2='x') def test_matchers_work_with_for_partial_call(self): self.mock_callable(sample_module, 'test_function').for_partial_call(matchers.Any(), 'secondarg').to_return_value(['blah']) sample_module.test_function('asdasdeas', 'secondarg', kwarg1='a', kwarg2='x')
.skipif((not (keras_weights_available and torch_weights_available)), reason='CRAFT weights required.') def test_pytorch_identical_output(): import torch weights_path_torch = keras_ocr.tools.download_and_verify(url=' filename='craft_mlt_25k.pth', sha256='4a5efbfb48be75e1e2b57f8de3d84f213004b14b85fd4b3748db17') weights_path_keras = keras_ocr.tools.download_and_verify(url=' filename='craft_mlt_25k.h5', sha256='7283ce2ff05a0617e9740c316175ff3bacdd7215dbdf1a726890d5099431f899') model_keras = keras_ocr.detection.build_keras_model(weights_path=weights_path_keras) model_pytorch = keras_ocr.detection.build_torch_model(weights_path=weights_path_torch) image = keras_ocr.tools.read('tests/test_image.jpg') X = keras_ocr.detection.compute_input(image)[(np.newaxis,)] y_pred_keras = model_keras.predict(X) y_pred_torch = model_pytorch.forward(torch.from_numpy(X.transpose(0, 3, 1, 2)))[0].detach().numpy() np.testing.assert_almost_equal(y_pred_keras, y_pred_torch, decimal=4)
def extractFakTranslations(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())): return None if (('Shrouding the Heavens' in item['tags']) or ('STH' in item['tags'])): return buildReleaseMessageWithType(item, 'Shrouding the Heavens', vol, chp, frag=frag, postfix=postfix) if ('KGGD' in item['tags']): return buildReleaseMessageWithType(item, 'Killing Grounds of Gods and Devils', vol, chp, frag=frag, postfix=postfix) return False
class OptionSeriesPackedbubbleSonificationContexttracksMappingTremoloSpeed(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class Handler(metaclass=MetaHandler): __slots__ = ['app'] def __init__(self, app): self.app = app def on_event(cls, event: str) -> Callable[([EventHandler], EventHandlerWrapper)]: def wrap(f: EventHandler) -> EventHandlerWrapper: return EventHandlerWrapper(event, f) return wrap def get_event_handler(self, event_type: str) -> Union[(EventHandler, EventHandlerWrapper)]: return self._events_handlers_.get(event_type, _event_missing) def __call__(self, scope: Scope, receive: Receive, send: Send) -> Awaitable[None]: return self.handle_events(scope, receive, send) async def handle_events(self, scope: Scope, receive: Receive, send: Send): task: Optional[EventLooper] = _event_looper event = None while task: (task, event) = (await task(self, scope, receive, send, event))
class Okhsl(HSL): BASE = 'oklab' NAME = 'okhsl' SERIALIZE = ('--okhsl',) CHANNELS = (Channel('h', 0.0, 360.0, bound=True, flags=FLG_ANGLE), Channel('s', 0.0, 1.0, bound=True), Channel('l', 0.0, 1.0, bound=True)) CHANNEL_ALIASES = {'hue': 'h', 'saturation': 's', 'lightness': 'l'} def to_base(self, coords: Vector) -> Vector: return okhsl_to_oklab(coords, LMS_TO_SRGBL, SRGBL_COEFF) def from_base(self, coords: Vector) -> Vector: return oklab_to_okhsl(coords, LMS_TO_SRGBL, SRGBL_COEFF)
_module() class NsfHifiGAN(pl.LightningModule): def __init__(self, checkpoint_path: str='checkpoints/nsf_hifigan/model', config_file: Optional[str]=None, use_natural_log: bool=True, **kwargs): super().__init__() if (config_file is None): config_file = (Path(checkpoint_path).parent / 'config.json') with open(config_file) as f: data = f.read() json_config = json.loads(data) self.h = AttrDict(json_config) self.model = Generator(self.h) self.use_natural_log = use_natural_log cp_dict = torch.load(checkpoint_path, map_location='cpu') if ('state_dict' not in cp_dict): self.model.load_state_dict(cp_dict['generator']) else: self.model.load_state_dict({k.replace('generator.', ''): v for (k, v) in cp_dict['state_dict'].items() if k.startswith('generator.')}) self.model.eval() self.model.remove_weight_norm() self.mel_transform = PitchAdjustableMelSpectrogram(sample_rate=self.h.sampling_rate, n_fft=self.h.n_fft, win_length=self.h.win_size, hop_length=self.h.hop_size, f_min=self.h.fmin, f_max=self.h.fmax, n_mels=self.h.num_mels) if ('mel_channels' in kwargs): kwargs['num_mels'] = kwargs.pop('mel_channels') for (k, v) in kwargs.items(): if (getattr(self.h, k, None) != v): raise ValueError(f'Incorrect value for {k}: {v}') _grad() def spec2wav(self, mel, f0, key_shift=0): c = mel[None] if ((key_shift is not None) and (key_shift != 0)): f0 *= (2 ** (key_shift / 12)) if (self.use_natural_log is False): c = (2.30259 * c) f0 = f0[None].to(c.dtype) y = self.model(c, f0).view((- 1)) return y def device(self): return next(self.model.parameters()).device def wav2spec(self, wav_torch, sr=None, key_shift=0, speed=1.0): if (sr is None): sr = self.h.sampling_rate if (sr != self.h.sampling_rate): _wav_torch = librosa.resample(wav_torch.cpu().numpy(), orig_sr=sr, target_sr=self.h.sampling_rate) wav_torch = torch.from_numpy(_wav_torch).to(wav_torch.device) mel_torch = self.mel_transform(wav_torch, key_shift=key_shift, speed=speed)[0] mel_torch = dynamic_range_compression(mel_torch) if (self.use_natural_log is False): mel_torch = (0.434294 * mel_torch) return mel_torch