code
stringlengths
281
23.7M
def create_page(item): (modules, classes, methods, functions, attributes) = inspect_members(item) trace = '' parts = item.split('.') if (len(parts) > 1): for (cnt, p) in enumerate(parts[:(- 1)]): if (cnt > 0): link = ((("<a href='" + '.'.join(parts[:(cnt + 1)])) + ".html'>") + parts[cnt]) else: link = ((("<a href='" + p) + ".html'>") + parts[cnt]) trace = (((trace + link) + '</a>') + '.') title = ((("<span class='title'>" + trace) + parts[(- 1)]) + '</span>') title = (title + '<br />') doc = eval(item).__doc__ if (doc is None): doc = '' doc = format_doc(doc) doc = doc.lstrip() doc = doc.replace('\n ', '\n') file_path = os.path.split(os.path.abspath(__file__))[0] p = os.path.abspath('{0}/../../CHANGES.md'.format(file_path)) version_nr = '{0}' with open(p) as f: for line in f: l = line.lower().lstrip() if (l.startswith('upcoming') or l.startswith('coming up')): version_nr += '+' if l.startswith('version'): version_nr = version_nr.format(line[8:13]) break page = '\n<html>\n<head>\n<link rel="shortcut icon" href="favicon.ico" type="image/x-icon" />\n<title>API reference for {0} ({1})</title>\n{2}\n</head>\n<pre>\n{3}\n<span class=\'definition\'>{4}</span><br />\n{5}{6}{7}{8}{9}\n<hr >\n{10}{11}{12}\n</pre>\n</html>'.format(item, version_nr, css, title, doc, create_module_section(modules, item), create_classes_section(classes, item), create_methods_section(methods), create_functions_section(functions), create_attributes_section(attributes), create_classes_details_section(classes, item), create_methods_details_section(methods), create_functions_details_section(functions)) p = os.path.abspath('{0}/{1}.html'.format(os.path.split(os.path.abspath(__file__))[0], item)) print('create', p) with open(p, 'w') as f: f.write(page) if modules: for m in modules: create_page(((item + '.') + m[0])) if classes: for c in classes: create_page(((item + '.') + c[0]))
def consolidate_clusters_unilocal(clusters): consolidated_clusters = [] for cluster in clusters: average_start = (sum([member.get_source()[1] for member in cluster]) / len(cluster)) average_end = (sum([member.get_source()[2] for member in cluster]) / len(cluster)) if (len(cluster) > 1): std_span = stdev([(member.get_source()[2] - member.get_source()[1]) for member in cluster]) std_pos = stdev([((member.get_source()[2] + member.get_source()[1]) / 2) for member in cluster]) else: std_span = None std_pos = None score = calculate_score(cluster, std_span, std_pos, (average_end - average_start), cluster[0].type) consolidated_clusters.append(SignatureClusterUniLocal(cluster[0].get_source()[0], int(round(average_start)), int(round(average_end)), score, len(cluster), cluster, cluster[0].type, std_span, std_pos)) return consolidated_clusters
class ProductVariant(AbstractObject): def __init__(self, api=None): super(ProductVariant, self).__init__() self._isProductVariant = True self._api = api class Field(AbstractObject.Field): label = 'label' options = 'options' product_field = 'product_field' _field_types = {'label': 'string', 'options': 'list<string>', 'product_field': 'string'} def _get_field_enum_info(cls): field_enum_info = {} return field_enum_info
def get_max(part: str, cannon_id: int, cannon_maxes: dict[(int, dict[(int, int)])]) -> Optional[int]: part_id = get_part_id_from_str(part) if (cannon_id not in cannon_maxes): return None if (part_id not in cannon_maxes[cannon_id]): return None return cannon_maxes[cannon_id][part_id]
def replay_insert_row(old_column_list, new_table_name, delta_table_name, id_col_name, ignore: str=False) -> str: ignore = ('IGNORE' if ignore else '') return 'INSERT {ignore} INTO `{new}` ({cols})SELECT {cols} FROM `{delta}` FORCE INDEX (PRIMARY) WHERE `{delta}`.`{id_col}` IN %s '.format(**{'ignore': ignore, 'cols': list_to_col_str(old_column_list), 'new': escape(new_table_name), 'delta': escape(delta_table_name), 'id_col': escape(id_col_name)})
class Qu2CuTest(): .parametrize('quadratics, expected, tolerance, cubic_only', [([[(0, 0), (0, 1), (2, 1), (2, 0)]], [((0, 0), (0, (4 / 3)), (2, (4 / 3)), (2, 0))], 0.1, True), ([[(0, 0), (0, 1), (2, 1), (2, 2)]], [((0, 0), (0, (4 / 3)), (2, (2 / 3)), (2, 2))], 0.2, True), ([[(0, 0), (0, 1), (1, 1)], [(1, 1), (3, 1), (3, 0)]], [((0, 0), (0, 1), (1, 1)), ((1, 1), (3, 1), (3, 0))], 0.2, False), ([[(0, 0), (0, 1), (1, 1)], [(1, 1), (3, 1), (3, 0)]], [((0, 0), (0, (2 / 3)), ((1 / 3), 1), (1, 1)), ((1, 1), ((7 / 3), 1), (3, (2 / 3)), (3, 0))], 0.2, True)]) def test_simple(self, quadratics, expected, tolerance, cubic_only): expected = [tuple(((pytest.approx(p[0]), pytest.approx(p[1])) for p in curve)) for curve in expected] c = quadratic_to_curves(quadratics, tolerance, cubic_only) assert (c == expected) def test_roundtrip(self): DATADIR = os.path.join(os.path.dirname(__file__), '..', 'cu2qu', 'data') with open(os.path.join(DATADIR, 'curves.json'), 'r') as fp: curves = json.load(fp) tolerance = 1 splines = [curve_to_quadratic(c, tolerance) for c in curves] reconsts = [quadratic_to_curves([spline], tolerance) for spline in splines] for (curve, reconst) in zip(curves, reconsts): assert (len(reconst) == 1) curve = tuple(((pytest.approx(p[0]), pytest.approx(p[1])) for p in curve)) assert (curve == reconst[0]) def test_main(self): qu2cu_main() benchmark_main()
def main(): parser = argparse.ArgumentParser(description='LiteX SoC on Arty A7-35') parser.add_argument('--build', action='store_true', help='Build bitstream') parser.add_argument('--mode-single', action='store_true', help='Build bitstream') parser.add_argument('--load', action='store_true', help='Load bitstream') parser.add_argument('--flash', action='store_true', help='Flash Bitstream') builder_args(parser) soc_core_args(parser) args = parser.parse_args() m = mode.DOUBLE if args.mode_single: m = mode.SINGLE soc = BaseSoC(sys_clk_freq=.0, mode=m, **soc_core_argdict(args)) builder = Builder(soc, **builder_argdict(args)) builder.build(run=args.build) if args.load: prog = soc.platform.create_programmer() prog.load_bitstream(os.path.join(builder.gateware_dir, (soc.build_name + '.bit'))) exit()
.parametrize('primary_type, expected', (('Mail', 'Mail(Person from,Person to,Person[] cc,string contents)Person(string name,address wallet)'), ('Person', 'Person(string name,address wallet)'))) def test_encode_type_eip712_with_array(primary_type, expected, eip712_example_with_array_types): assert (encode_type(primary_type, eip712_example_with_array_types) == expected)
def create_old_measure_value(): with patched_global_matrixstore_from_data_factory(build_factory()): call_command('import_measures', definitions_only=True, measure='desogestrel') m = Measure.objects.get(id='desogestrel') m.measurevalue_set.create(month='2010-01-01') m.measureglobal_set.create(month='2010-01-01')
def load_es_index(spark: SparkSession, source_df: DataFrame, base_config: dict, index_name: str, routing: str, doc_id: str) -> None: index_config = base_config.copy() index_config['es.resource.write'] = index_name index_config['es.mapping.routing'] = routing index_config['es.mapping.id'] = doc_id jvm_es_config_map = source_df._jmap(index_config) jvm_data_df = source_df._jdf spark.sparkContext._jvm.org.elasticsearch.spark.sql.EsSparkSQL.saveToEs(jvm_data_df, jvm_es_config_map)
class AdCreativeLinkDataCallToActionValue(AbstractObject): def __init__(self, api=None): super(AdCreativeLinkDataCallToActionValue, self).__init__() self._isAdCreativeLinkDataCallToActionValue = True self._api = api class Field(AbstractObject.Field): app_destination = 'app_destination' app_link = 'app_link' application = 'application' event_id = 'event_id' lead_gen_form_id = 'lead_gen_form_id' link = 'link' link_caption = 'link_caption' link_format = 'link_format' page = 'page' product_link = 'product_link' whatsapp_number = 'whatsapp_number' _field_types = {'app_destination': 'string', 'app_link': 'string', 'application': 'string', 'event_id': 'string', 'lead_gen_form_id': 'string', 'link': 'string', 'link_caption': 'string', 'link_format': 'string', 'page': 'string', 'product_link': 'string', 'whatsapp_number': 'string'} def _get_field_enum_info(cls): field_enum_info = {} return field_enum_info
def _try_remove_key(ctx: Context, type_: str, connection: bool=False) -> None: private_keys = (ctx.agent_config.connection_private_key_paths if connection else ctx.agent_config.private_key_paths) existing_keys = private_keys.keys() if (type_ not in existing_keys): raise click.ClickException(f"There is no {('connection ' if connection else '')}key registered with id {type_}.") private_keys.delete(type_) ctx.agent_loader.dump(ctx.agent_config, open_file(os.path.join(ctx.cwd, DEFAULT_AEA_CONFIG_FILE), 'w'))
def _read_config_files(paths): configparser = make_config_parser() configparser.optionxform = str configparser.read(paths) defaults = _make_defaults_dict() config = {} with environment(**defaults): for section in configparser.sections(): config[section] = {} for key in configparser.options(section): value = configparser.get(section, key, vars=defaults) config[section][key] = expandvars(value) return config
_renderer(wrap_type=TestLogLoss) class TestLogLossRenderer(TestRenderer): def render_html(self, obj: TestLogLoss) -> TestHtmlInfo: info = super().render_html(obj) result: ClassificationQualityMetricResult = obj.metric.get_result() curr_metrics = result.current.plot_data ref_metrics = (None if (result.reference is None) else result.reference.plot_data) if (curr_metrics is not None): fig = plot_boxes(curr_for_plots=curr_metrics, ref_for_plots=ref_metrics, color_options=self.color_options) info.with_details('Logarithmic Loss', plotly_figure(title='', figure=fig)) return info
def test_s2t_with_tgt_lang(root_path=ROOT_PATH): args_path = Path.joinpath(root_path, 'examples', 'speech_to_text') os.chdir(args_path) with tempfile.TemporaryDirectory() as tmpdirname: cli.sys.argv[1:] = ['--agent', os.path.join(root_path, 'examples', 'speech_to_text', 'counter_in_tgt_lang_agent.py'), '--user-dir', os.path.join(root_path, 'examples'), '--agent-class', 'agents.CounterInTargetLanguage', '--source-segment-size', '1000', '--source', os.path.join(root_path, 'examples', 'speech_to_text', 'source.txt'), '--target', os.path.join(root_path, 'examples', 'speech_to_text', 'reference/en.txt'), '--output', tmpdirname, '--tgt-lang', os.path.join(root_path, 'examples', 'speech_to_text', 'reference/tgt_lang.txt')] cli.main() with open(os.path.join(tmpdirname, 'instances.log'), 'r') as f: for line in f: instance = LogInstance(line.strip()) assert (instance.prediction == '1 segundos 2 segundos 3 segundos 4 segundos 5 segundos 6 segundos 7 segundos')
class LocalJob(core.Job[R]): def __init__(self, folder: tp.Union[(Path, str)], job_id: str, tasks: tp.Sequence[int]=(0,), process: tp.Optional["subprocess.Popen['bytes']"]=None) -> None: super().__init__(folder, job_id, tasks) self._cancel_at_deletion = False self._sub_jobs: tp.Sequence['LocalJob[R]'] = self._sub_jobs self._process = process for sjob in self._sub_jobs: sjob._process = process def done(self, force_check: bool=False) -> bool: state = self.get_info()['jobState'] return (state != 'RUNNING') def state(self) -> str: try: return self.get_info().get('jobState', 'unknown') except Exception: return 'UNKNOWN' def get_info(self, mode: str='force') -> tp.Dict[(str, str)]: if (self._process is None): state = 'NO PROCESS AND NO RESULT' if self.paths.result_pickle.exists(): state = 'FINISHED' return {'jobState': state} poll = self._process.poll() if (poll is None): state = 'RUNNING' elif (poll < 0): state = 'INTERRUPTED' else: state = 'FINISHED' return {'jobState': state} def cancel(self, check: bool=True) -> None: if (self._process is not None): self._process.send_signal(signal.SIGINT) def _interrupt(self) -> None: if (self._process is not None): self._process.send_signal(LocalJobEnvironment._usr_sig()) def __del__(self) -> None: if self._cancel_at_deletion: if (not (self.get_info().get('jobState') == 'FINISHED')): self.cancel(check=False) if self.paths.result_pickle.exists(): _PROCESSES.pop(self.job_id, None) def __getstate__(self) -> tp.Any: out = dict(self.__dict__) out['_process'] = None if (self._process is not None): _PROCESSES[self.job_id] = self._process return out def __setstate__(self, state: tp.Any) -> None: self.__dict__.update(state) self._process = _PROCESSES.get(self.job_id, None)
class TestDemo(unittest.TestCase): def test_demo_previous_button_default(self): demo = Demo(model=DemoPath()) if get_action_enabled(previous_tool, demo): demo.perform(None, next_tool, None) def test_demo_next_button_default(self): demo = Demo(model=DemoPath()) if get_action_enabled(next_tool, demo): demo.perform(None, next_tool, None) def test_demo_parent_button_default(self): demo = Demo(model=DemoPath()) if get_action_enabled(parent_tool, demo): demo.perform(None, parent_tool, None) def test_demo_init_set_children(self): resources = [DemoPath(), DemoPath()] model = DemoVirtualDirectory(resources=resources) demo = Demo(model=model) demo.selected_node = None info = UIInfo(ui=UI(handler=Handler())) demo.init(info) self.assertIs(demo.selected_node, resources[0]) def test_demo_init_no_children_to_be_set(self): model = DemoVirtualDirectory(resources=[]) demo = Demo(model=model) demo.selected_node = None info = UIInfo(ui=UI(handler=Handler())) demo.init(info) self.assertIsNone(demo.selected_node)
.django_db def test_extract_hash_from_duns_or_uei_via_duns(): example_duns = '' expected_hash = 'a52a7544-829b-c925-e1ba-d04d3171c09a' baker.make('recipient.RecipientLookup', **TEST_RECIPIENT_LOOKUPS[expected_hash]) recipient_hash = recipients.extract_hash_from_duns_or_uei(example_duns) assert (UUID(expected_hash) == recipient_hash)
class CSRFRotationTests(ResolveInfoTestCase): _jwt_settings(JWT_CSRF_ROTATION=True) def test_csrf_rotation(self): info_mock = self.info_mock(AnonymousUser()) decorators.csrf_rotation((lambda cls, root, info, *args, **kwargs: None))(self, None, info_mock) if (django.VERSION >= (4,)): self.assertTrue(info_mock.context.META['CSRF_COOKIE_NEEDS_UPDATE']) else: self.assertTrue(info_mock.context.csrf_cookie_needs_reset)
def join_cand_cmte_names(query): query = query.subquery() return models.db.session.query(query, models.CandidateHistory.candidate_id.label('candidate_id'), models.CommitteeHistory.committee_id.label('committee_id'), models.CandidateHistory.name.label(CANDIDATE_NAME_LABEL), models.CommitteeHistory.name.label(COMMITTEE_NAME_LABEL)).outerjoin(models.CandidateHistory, sa.and_((query.c.cand_id == models.CandidateHistory.candidate_id), (query.c.cycle == models.CandidateHistory.two_year_period))).outerjoin(models.CommitteeHistory, sa.and_((query.c.cmte_id == models.CommitteeHistory.committee_id), (query.c.cycle == models.CommitteeHistory.cycle)))
class LaunchFilesWidget(QDockWidget): load_signal = Signal(str, dict, str) load_profile_signal = Signal(str) edit_signal = Signal(str) transfer_signal = Signal(list) save_profile_signal = Signal(str) def __init__(self, parent=None): QDockWidget.__init__(self, parent) self.__current_path = os.path.expanduser('~') ui_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'ui', 'LaunchFilesDockWidget.ui') loadUi(ui_file, self, custom_widgets={'EnhancedLineEdit': EnhancedLineEdit}) self.hostLabel.setVisible(False) self.ui_button_progress_cancel_cfg.setIcon(nm.settings().icon('crystal_clear_button_close.png')) self.ui_button_reload.setIcon(nm.settings().icon('oxygen_view_refresh.png')) self.ui_button_edit.setIcon(nm.settings().icon('crystal_clear_edit_launch.png')) self.ui_button_new.setIcon(nm.settings().icon('crystal_clear_add.png')) self.ui_button_transfer.setIcon(nm.settings().icon('crystal_clear_launch_file_transfer.png')) self.ui_button_save_profile.setIcon(nm.settings().icon('crystal_clear_profile_new.png')) self.ui_button_load.setIcon(nm.settings().icon('crystal_clear_launch_file.png')) self._current_search = '' pal = self.palette() self._default_color = pal.color(QPalette.Window) self.progress_queue = ProgressQueue(self.ui_frame_progress_cfg, self.ui_bar_progress_cfg, self.ui_button_progress_cancel_cfg, 'Launch File') self.launchlist_model = LaunchListModel(progress_queue=self.progress_queue, viewobj=self.ui_file_view) self.launchlist_proxy_model = QSortFilterProxyModel(self) self.launchlist_proxy_model.setSourceModel(self.launchlist_model) self.name_delegate = HTMLDelegate(check_for_ros_names=False, palette=self.palette()) self.ui_file_view.setItemDelegateForColumn(0, self.name_delegate) self.ui_file_view.setModel(self.launchlist_proxy_model) self.ui_file_view.setAlternatingRowColors(True) self.ui_file_view.activated.connect(self.on_launch_selection_activated) self.ui_file_view.setDragDropMode(QAbstractItemView.DragOnly) self.ui_file_view.setDragEnabled(True) sm = self.ui_file_view.selectionModel() sm.selectionChanged.connect(self.on_ui_file_view_selection_changed) self.launchlist_model.pathlist_handled.connect(self.on_pathlist_handled) self.launchlist_model.error_on_path.connect(self.on_error_on_path) self.ui_search_line.refresh_signal.connect(self.set_package_filter) self.ui_search_line.stop_signal.connect(self.stop) self.ui_button_reload.clicked.connect(self.on_reload_clicked) self.ui_button_edit.clicked.connect(self.on_edit_xml_clicked) self.ui_button_transfer.clicked.connect(self.on_transfer_file_clicked) self.ui_button_save_profile.clicked.connect(self.on_save_profile_clicked) self.ui_button_load.clicked.connect(self.on_load_xml_clicked) self._menu_add = QMenu() create_file_action = QAction(nm.settings().icon('crystal_clear_launch_file_new.png'), 'create file', self, statusTip='', triggered=self.on_new_xml_clicked) create_dir_action = QAction(nm.settings().icon('crystal_clear_folder.png'), 'create directory', self, statusTip='', triggered=self.on_new_dir_clicked) self._menu_add.addAction(create_file_action) self._menu_add.addAction(create_dir_action) self.ui_button_new.setMenu(self._menu_add) self._masteruri2name = {} self._reload_timer = None self._first_path = self.launchlist_model.current_path def stop(self): self.progress_queue.stop() self.ui_search_line.set_process_active(False) self._stop_timer_reload() def set_current_master(self, masteruri, mastername): self.launchlist_model.set_current_master(masteruri, mastername) self._masteruri2name[masteruri.rstrip(os.path.sep)] = mastername mname = self.path2mastername(self.launchlist_model.current_path) if mname: color = QColor.fromRgb(nm.settings().host_color(mname, self._default_color.rgb())) self._new_color(color) def path2mastername(self, grpc_path): try: muri = nmdurl.masteruri(grpc_path) return self._masteruri2name[muri.rstrip(os.path.sep)] except Exception as _: pass return '' def on_launch_selection_activated(self, activated): selected = self._pathItemsFromIndexes(self.ui_file_view.selectionModel().selectedIndexes(), False) mname = self.path2mastername(self.launchlist_model.current_path) for item in selected: try: self.ui_search_line.set_process_active(True) lfile = self.launchlist_model.expand_item(item.path, item.id) if (lfile is not None): self.ui_search_line.set_process_active(False) if item.is_launch_file(): nm.settings().launch_history_add(item.path) self.load_signal.emit(item.path, {}, None) elif item.is_profile_file(): nm.settings().launch_history_add(item.path) self.load_profile_signal.emit(item.path) elif item.is_config_file(): self.edit_signal.emit(lfile) mname = self.path2mastername(self.launchlist_model.current_path) self.hostLabel.setText(('Remote <b>%s</b>' % mname)) if (mname and (self._first_path != self.launchlist_model.current_path)): self.hostLabel.setVisible(True) else: self.hostLabel.setVisible(False) if mname: color = QColor.fromRgb(nm.settings().host_color(mname, self._default_color.rgb())) self._new_color(color) except Exception as e: import traceback print(traceback.format_exc()) rospy.logwarn(('Error while load launch file %s: %s' % (item, utf8(e)))) MessageBox.warning(self, 'Load error', ('Error while load launch file:\n%s' % item.name), ('%s' % utf8(e))) def _new_color(self, color): bg_style_launch_dock = ('QWidget#ui_dock_widget_contents { background-color: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, stop: 0 %s, stop: 0.7 %s);}' % (color.name(), self._default_color.name())) self.setStyleSheet(('%s' % bg_style_launch_dock)) def on_pathlist_handled(self, gpath): self.ui_search_line.set_process_active(False) self.ui_button_new.setEnabled((not self.launchlist_model.is_in_root)) self._stop_timer_reload() def on_error_on_path(self, gpath): if ((gpath == self._current_search) or (gpath == self.launchlist_model.current_path)): self.ui_search_line.set_process_active(False) if self.launchlist_model.is_in_root: self._reload_timer = threading.Timer(2.0, nm.nmd().file.list_path_threaded) self._reload_timer.start() def _stop_timer_reload(self): if ((self._reload_timer is not None) and self._reload_timer.is_alive()): try: self._reload_timer.cancel() self._reload_timer = None except Exception: pass def _on_timer_reload_callback(self, event=None): nm.nmd().file.list_path_threaded(self.launchlist_model.current_path) self._reload_timer = threading.Timer(2.0, nm.nmd().file.list_path_threaded) self._reload_timer.start() def on_launch_selection_changed(self, selected, deselected): print('selection launch changed') def load_file(self, path, args={}, masteruri=None): if (path is not None): if path.endswith('.launch'): self.load_signal.emit(path, args, masteruri) elif path.endswith('.nmprofile'): self.load_profile_signal.emit(path) def on_ui_file_view_selection_changed(self, selected, deselected): selected = self._pathItemsFromIndexes(self.ui_file_view.selectionModel().selectedIndexes(), False) for item in selected: islaunch = item.is_launch_file() isconfig = item.is_config_file() isprofile = item.is_profile_file() self.ui_button_edit.setEnabled((islaunch or isconfig or isprofile)) self.ui_button_load.setEnabled((islaunch or isprofile)) self.ui_button_transfer.setEnabled((islaunch or isconfig)) def set_package_filter(self, text): if text: if text.startswith(os.path.sep): self._current_search = nmdurl.join(self.launchlist_model.current_grpc, text) self.launchlist_model.set_path(text) else: self.launchlist_model.show_packages(text) self.ui_search_line.set_process_active(False) else: self.launchlist_model.reload_current_path() def on_reload_clicked(self): self.launchlist_model.reload_current_path(clear_cache=True) def on_edit_xml_clicked(self): selected = self._pathItemsFromIndexes(self.ui_file_view.selectionModel().selectedIndexes(), False) for item in selected: path = self.launchlist_model.expand_item(item.path, item.id) if (path is not None): self.edit_signal.emit(path) def on_new_xml_clicked(self): if (not self.launchlist_model.is_in_root): items = self.launchlist_model.add_new_item('new.launch', PathItem.LAUNCH_FILE) if items: index = self.launchlist_proxy_model.mapFromSource(self.launchlist_model.index(1, 0)) self.ui_file_view.selectionModel().select(index, QItemSelectionModel.Select) self.ui_file_view.setCurrentIndex(index) self.ui_file_view.edit(index) def on_new_dir_clicked(self): if (not self.launchlist_model.is_in_root): items = self.launchlist_model.add_new_item('new', PathItem.FOLDER) if items: index = self.launchlist_proxy_model.mapFromSource(self.launchlist_model.index(1, 0)) self.ui_file_view.selectionModel().select(index, QItemSelectionModel.Select) self.ui_file_view.setCurrentIndex(index) self.ui_file_view.edit(index) def on_transfer_file_clicked(self): selected = self._pathItemsFromIndexes(self.ui_file_view.selectionModel().selectedIndexes(), False) paths = list() for item in selected: path = self.launchlist_model.expand_item(item.path, item.id) if (path is not None): paths.append(path) if paths: self.transfer_signal.emit(paths) def on_save_profile_clicked(self): (_netloc, path) = nmdurl.split(self.launchlist_model.current_path, with_scheme=True) self.save_profile_signal.emit(path) def on_load_xml_clicked(self): selected = self._pathItemsFromIndexes(self.ui_file_view.selectionModel().selectedIndexes(), False) for item in selected: path = self.launchlist_model.expand_item(item.path, item.id) if (path is not None): nm.settings().launch_history_add(item.path) self.load_signal.emit(path, {}, None) def _pathItemsFromIndexes(self, indexes, recursive=True): result = [] for index in indexes: if (index.column() == 0): model_index = self.launchlist_proxy_model.mapToSource(index) item = self.launchlist_model.itemFromIndex(model_index) if ((item is not None) and isinstance(item, PathItem)): result.append(item) return result def keyPressEvent(self, event): key_mod = QApplication.keyboardModifiers() if (not (self.ui_file_view.state() == QAbstractItemView.EditingState)): if ((event == QKeySequence.Delete) or ((event.key() == Qt.Key_Delete) and (key_mod & Qt.ShiftModifier))): selected = self._pathItemsFromIndexes(self.ui_file_view.selectionModel().selectedIndexes(), False) for item in selected: if (item in nm.settings().launch_history): nm.settings().launch_history_remove(item.path) self.launchlist_model.reload_current_path() elif (not self.launchlist_model.is_in_root): if (key_mod & Qt.ShiftModifier): (rem_uri, rem_path) = nmdurl.split(item.path) host = rem_uri.split(':') result = MessageBox.question(self, 'Delete Question', ('Delete %s\ %s' % (rem_path, host[0])), buttons=(MessageBox.No | MessageBox.Yes)) if (result == MessageBox.Yes): try: nm.nmd().file.delete(item.path) self.launchlist_model.reload_current_path(clear_cache=True) except Exception as e: rospy.logwarn(('Error while delete %s: %s' % (item.path, utf8(e)))) MessageBox.warning(self, 'Delete error', ('Error while delete:\n%s' % item.name), ('%s' % utf8(e))) else: MessageBox.information(self, 'Delete Info', 'Use Shift+Del to delete files or directories', buttons=MessageBox.Ok) elif ((not key_mod) and (event.key() == Qt.Key_F4) and self.ui_button_edit.isEnabled()): self.on_edit_xml_clicked() elif (event == QKeySequence.Find): self.ui_search_line.setFocus(Qt.ActiveWindowFocusReason) elif (event == QKeySequence.Paste): self.launchlist_model.paste_from_clipboard() elif (event == QKeySequence.Copy): selected = self.ui_file_view.selectionModel().selectedIndexes() indexes = [] for s in selected: indexes.append(self.launchlist_proxy_model.mapToSource(s)) self.launchlist_model.copy_to_clipboard(indexes) if (self.ui_search_line.hasFocus() and (event.key() == Qt.Key_Escape)): self.launchlist_model.reload_current_path() self.ui_search_line.setText('') self.ui_file_view.setFocus(Qt.ActiveWindowFocusReason) QDockWidget.keyReleaseEvent(self, event)
.parametrize('solver_type', ['mg', 'mgmatfree']) def test_preconditioner_coarsening(solver_type): nlevel = 2 base = UnitSquareMesh(10, 10) mh = MeshHierarchy(base, nlevel) mesh = mh[(- 1)] V = FunctionSpace(mesh, 'CG', 2) R = FunctionSpace(mesh, 'R', 0) alpha = Function(R) alpha.assign(0.01) beta = Function(R) beta.assign(100) (exact, f) = manufacture_solution(V) v = TestFunction(V) u = TrialFunction(V) a = (inner((alpha * grad(u)), grad(v)) * dx) Jp = (inner(((beta * alpha) * grad(u)), grad(v)) * dx) bcs = DirichletBC(V, 0.0, (1, 2, 3, 4)) L = (inner((alpha * f), v) * dx) uh = function.Function(V) parameters = {'mat_type': 'matfree', 'snes_type': 'ksponly', 'ksp_convergence_test': 'skip', 'ksp_type': 'richardson', 'ksp_max_it': 1, 'ksp_richardson_scale': float(beta), 'pc_type': 'python', 'pc_python_type': 'firedrake.AssembledPC', 'assembled': solver_parameters(solver_type)} solve((a == L), uh, bcs=bcs, J=a, Jp=Jp, solver_parameters=parameters) assert (norm(assemble((exact - uh))) < 4e-06)
class Solution(object): def partitionDisjoint(self, A): (mx, ms) = ([], []) for n in A: if (not mx): mx.append(n) else: mx.append(max(mx[(- 1)], n)) for n in reversed(A): if (not ms): ms.append(n) else: ms.append(min(ms[(- 1)], n)) ms = list(reversed(ms)) for (i, n) in enumerate(mx): if (i >= (len(A) - 1)): continue n2 = ms[(i + 1)] if (n2 >= n): return (i + 1) return len(A)
def system_read_logs(state, log_files): from core import settings log_files_result = {} log_path = settings.LOGDIR for log in log_files: abs_path_name = os.path.join(log_path, log) try: with open(abs_path_name) as f: log_files_result[log] = read_file(f) except IOError as exc: logger.error('Error retrieving log file %s (%s)', abs_path_name, exc) log_files_result[log] = None return log_files_result
def round_trip_dump(data, stream=None, indent=None, block_seq_indent=None, top_level_colon_align=None, prefix_colon=None, explicit_start=None, explicit_end=None, version=None): import srsly.ruamel_yaml return srsly.ruamel_yaml.round_trip_dump(data, stream=stream, indent=indent, block_seq_indent=block_seq_indent, top_level_colon_align=top_level_colon_align, prefix_colon=prefix_colon, explicit_start=explicit_start, explicit_end=explicit_end, version=version)
def set_weights_inverse_capacity(topology): try: max_capacity = float(max((topology.adj[u][v]['capacity'] for (u, v) in topology.edges()))) except KeyError: raise ValueError('All links must have a capacity attribute') for (u, v) in topology.edges(): capacity = topology.adj[u][v]['capacity'] weight = (max_capacity / capacity) topology.adj[u][v]['weight'] = weight
def upgrade(): op.create_table('groups', sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('created_at', sa.DateTime(timezone=True), nullable=True), sa.Column('modified_at', sa.DateTime(timezone=True), nullable=True), sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('id')) op.add_column('events', sa.Column('group_id', sa.Integer(), nullable=True)) op.create_foreign_key(u'events_group_id_fkey', 'events', 'groups', ['group_id'], ['id'], ondelete='SET NULL') op.add_column('events_version', sa.Column('group_id', sa.Integer(), autoincrement=False, nullable=True))
def replace_method_in_dvspec(method_name_string: str, replacement_method: Callable) -> None: for dvspec_submodule_info in iter_modules(dvspec.__path__): dvspec_submodule = importlib.import_module(((dvspec.__name__ + '.') + dvspec_submodule_info.name)) replace_module_method(dvspec_submodule, method_name_string, replacement_method)
('cuda.gemm_rrr.gen_function') def gen_function(func_attrs, exec_cond_template, dim_info_dict): input_ndims = len(func_attrs['input_accessors'][0].original_shapes) weight_ndims = len(func_attrs['input_accessors'][1].original_shapes) output_ndims = len(func_attrs['output_accessors'][0].original_shapes) backend_spec = CUDASpec() elem_input_type = backend_spec.dtype_to_lib_type(func_attrs['inputs'][0]._attrs['dtype']) elem_output_type = backend_spec.dtype_to_lib_type(func_attrs['outputs'][0]._attrs['dtype']) problem_args = PROBLEM_ARGS_TEMPLATE.render(elem_input_type=elem_input_type, elem_output_type=elem_output_type) problem_args_cutlass_3x = PROBLEM_ARGS_TEMPLATE_CUTLASS_3X.render(elem_input_type=elem_input_type, elem_output_type=elem_output_type) return common.gen_function(func_attrs=func_attrs, src_template=common.SRC_TEMPLATE, exec_cond_template=exec_cond_template, problem_args=problem_args, problem_args_cutlass_3x=problem_args_cutlass_3x, input_ndims=input_ndims, weight_ndims=weight_ndims, output_ndims=output_ndims, dim_info_dict=dim_info_dict, support_split_k=True, output_addr_calculator=common.OUTPUT_ADDR_CALCULATOR.render(stride_dim='*b_dim1', output_accessor=func_attrs['output_accessors'][0]))
class IRCRoomOccupant(IRCPerson, RoomOccupant): def __init__(self, mask, room): super().__init__(mask) self._room = room def room(self) -> Room: return self._room def __unicode__(self): return self._nickmask def __str__(self): return self.__unicode__() def __repr__(self): return f'<{self.__unicode__()} - {super().__repr__()}>'
def find_all_uncategorized_dataset_fields(existing_datasets: List[Dataset], source_datasets: List[Dataset]) -> Tuple[(List[str], int)]: uncategorized_fields = [] total_field_count = 0 for source_dataset in source_datasets: existing_dataset = next((existing_dataset for existing_dataset in existing_datasets if (existing_dataset.name == source_dataset.name)), None) (current_uncategorized_keys, current_field_count) = find_uncategorized_dataset_fields(existing_dataset=existing_dataset, source_dataset=source_dataset) total_field_count += current_field_count uncategorized_fields += current_uncategorized_keys return (uncategorized_fields, total_field_count)
class TestMaxValueValidatorValidation(TestCase): def test_max_value_validation_serializer_success(self): serializer = ValidationMaxValueValidatorModelSerializer(data={'number_value': 99}) assert serializer.is_valid() def test_max_value_validation_serializer_fails(self): serializer = ValidationMaxValueValidatorModelSerializer(data={'number_value': 101}) assert (not serializer.is_valid()) assert (serializer.errors == {'number_value': ['Ensure this value is less than or equal to 100.']}) def test_max_value_validation_success(self): obj = ValidationMaxValueValidatorModel.objects.create(number_value=100) request = factory.patch('/{}'.format(obj.pk), {'number_value': 98}, format='json') view = UpdateMaxValueValidationModel().as_view() response = view(request, pk=obj.pk).render() assert (response.status_code == status.HTTP_200_OK) def test_max_value_validation_fail(self): obj = ValidationMaxValueValidatorModel.objects.create(number_value=100) request = factory.patch('/{}'.format(obj.pk), {'number_value': 101}, format='json') view = UpdateMaxValueValidationModel().as_view() response = view(request, pk=obj.pk).render() assert (response.content == b'{"number_value":["Ensure this value is less than or equal to 100."]}') assert (response.status_code == status.HTTP_400_BAD_REQUEST)
def set_measure_fields(apps, schema_editor): Measure = apps.get_model('frontend', 'Measure') for m in Measure.objects.all().iterator(): v = arrays_to_strings(_get_measure_data(m.id)) m.numerator_from = v['numerator_from'] m.numerator_where = v['numerator_where'] m.numerator_columns = v['numerator_columns'] m.denominator_from = v['denominator_from'] m.denominator_where = v['denominator_where'] m.denominator_columns = v['denominator_columns'] m.save()
class flow_modify(flow_mod): version = 6 type = 14 _command = 1 def __init__(self, xid=None, cookie=None, cookie_mask=None, table_id=None, idle_timeout=None, hard_timeout=None, priority=None, buffer_id=None, out_port=None, out_group=None, flags=None, importance=None, match=None, instructions=None): if (xid != None): self.xid = xid else: self.xid = None if (cookie != None): self.cookie = cookie else: self.cookie = 0 if (cookie_mask != None): self.cookie_mask = cookie_mask else: self.cookie_mask = 0 if (table_id != None): self.table_id = table_id else: self.table_id = 0 if (idle_timeout != None): self.idle_timeout = idle_timeout else: self.idle_timeout = 0 if (hard_timeout != None): self.hard_timeout = hard_timeout else: self.hard_timeout = 0 if (priority != None): self.priority = priority else: self.priority = 0 if (buffer_id != None): self.buffer_id = buffer_id else: self.buffer_id = 0 if (out_port != None): self.out_port = out_port else: self.out_port = 0 if (out_group != None): self.out_group = out_group else: self.out_group = 0 if (flags != None): self.flags = flags else: self.flags = 0 if (importance != None): self.importance = importance else: self.importance = 0 if (match != None): self.match = match else: self.match = ofp.match() if (instructions != None): self.instructions = instructions else: self.instructions = [] return def pack(self): packed = [] packed.append(struct.pack('!B', self.version)) packed.append(struct.pack('!B', self.type)) packed.append(struct.pack('!H', 0)) packed.append(struct.pack('!L', self.xid)) packed.append(struct.pack('!Q', self.cookie)) packed.append(struct.pack('!Q', self.cookie_mask)) packed.append(struct.pack('!B', self.table_id)) packed.append(util.pack_fm_cmd(self._command)) packed.append(struct.pack('!H', self.idle_timeout)) packed.append(struct.pack('!H', self.hard_timeout)) packed.append(struct.pack('!H', self.priority)) packed.append(struct.pack('!L', self.buffer_id)) packed.append(util.pack_port_no(self.out_port)) packed.append(struct.pack('!L', self.out_group)) packed.append(struct.pack('!H', self.flags)) packed.append(struct.pack('!H', self.importance)) packed.append(self.match.pack()) packed.append(loxi.generic_util.pack_list(self.instructions)) length = sum([len(x) for x in packed]) packed[2] = struct.pack('!H', length) return ''.join(packed) def unpack(reader): obj = flow_modify() _version = reader.read('!B')[0] assert (_version == 6) _type = reader.read('!B')[0] assert (_type == 14) _length = reader.read('!H')[0] orig_reader = reader reader = orig_reader.slice(_length, 4) obj.xid = reader.read('!L')[0] obj.cookie = reader.read('!Q')[0] obj.cookie_mask = reader.read('!Q')[0] obj.table_id = reader.read('!B')[0] __command = util.unpack_fm_cmd(reader) assert (__command == 1) obj.idle_timeout = reader.read('!H')[0] obj.hard_timeout = reader.read('!H')[0] obj.priority = reader.read('!H')[0] obj.buffer_id = reader.read('!L')[0] obj.out_port = util.unpack_port_no(reader) obj.out_group = reader.read('!L')[0] obj.flags = reader.read('!H')[0] obj.importance = reader.read('!H')[0] obj.match = ofp.match.unpack(reader) obj.instructions = loxi.generic_util.unpack_list(reader, ofp.instruction.instruction.unpack) return obj def __eq__(self, other): if (type(self) != type(other)): return False if (self.xid != other.xid): return False if (self.cookie != other.cookie): return False if (self.cookie_mask != other.cookie_mask): return False if (self.table_id != other.table_id): return False if (self.idle_timeout != other.idle_timeout): return False if (self.hard_timeout != other.hard_timeout): return False if (self.priority != other.priority): return False if (self.buffer_id != other.buffer_id): return False if (self.out_port != other.out_port): return False if (self.out_group != other.out_group): return False if (self.flags != other.flags): return False if (self.importance != other.importance): return False if (self.match != other.match): return False if (self.instructions != other.instructions): return False return True def pretty_print(self, q): q.text('flow_modify {') with q.group(): with q.indent(2): q.breakable() q.text('xid = ') if (self.xid != None): q.text(('%#x' % self.xid)) else: q.text('None') q.text(',') q.breakable() q.text('cookie = ') q.text(('%#x' % self.cookie)) q.text(',') q.breakable() q.text('cookie_mask = ') q.text(('%#x' % self.cookie_mask)) q.text(',') q.breakable() q.text('table_id = ') q.text(('%#x' % self.table_id)) q.text(',') q.breakable() q.text('idle_timeout = ') q.text(('%#x' % self.idle_timeout)) q.text(',') q.breakable() q.text('hard_timeout = ') q.text(('%#x' % self.hard_timeout)) q.text(',') q.breakable() q.text('priority = ') q.text(('%#x' % self.priority)) q.text(',') q.breakable() q.text('buffer_id = ') q.text(('%#x' % self.buffer_id)) q.text(',') q.breakable() q.text('out_port = ') q.text(util.pretty_port(self.out_port)) q.text(',') q.breakable() q.text('out_group = ') q.text(('%#x' % self.out_group)) q.text(',') q.breakable() q.text('flags = ') value_name_map = {1: 'OFPFF_SEND_FLOW_REM', 2: 'OFPFF_CHECK_OVERLAP', 4: 'OFPFF_RESET_COUNTS', 8: 'OFPFF_NO_PKT_COUNTS', 16: 'OFPFF_NO_BYT_COUNTS', 128: 'OFPFF_BSN_SEND_IDLE'} q.text(util.pretty_flags(self.flags, value_name_map.values())) q.text(',') q.breakable() q.text('importance = ') q.text(('%#x' % self.importance)) q.text(',') q.breakable() q.text('match = ') q.pp(self.match) q.text(',') q.breakable() q.text('instructions = ') q.pp(self.instructions) q.breakable() q.text('}')
class WorkspaceUser(UUIDModel, CreatedUpdatedAt, MainBase): __tablename__ = 'workspace_users' workspace_id: Mapped[UUID4] = mapped_column(GUID, ForeignKey(Workspace.id, ondelete='CASCADE'), nullable=False) user_id: Mapped[UUID4] = mapped_column(GUID, nullable=False) workspace: Mapped[Workspace] = relationship('Workspace', back_populates='workspace_users', lazy='joined') def __repr__(self) -> str: return f'WorkspaceUser(id={self.id}, workspace_id={self.workspace_id}, user_id={self.user_id})'
def _create_directory_for_authentication(): logging.info('Creating directory for authentication') dburi = config.frontend.authentication.user_database factauthdir = '/'.join(dburi.split('/')[:(- 1)])[10:] mkdir_process = subprocess.run(f'sudo mkdir -p --mode=0744 {factauthdir}', shell=True, stdout=PIPE, stderr=STDOUT, text=True) chown_process = subprocess.run(f'sudo chown {os.getuid()}:{os.getgid()} {factauthdir}', shell=True, stdout=PIPE, stderr=STDOUT, text=True) if (not all(((return_code == 0) for return_code in [mkdir_process.returncode, chown_process.returncode]))): raise InstallationError('Error in creating directory for authentication database.\n{}'.format('\n'.join((mkdir_process.stdout, mkdir_process.stdout))))
class Settings(WorkFlowSettings): def __init__(self, arguments): WorkFlowSettings.__init__(self, arguments) self.subject = self.__get_subject(arguments) self.fmri_label = arguments['<task_label>'] (self.results_dir, self.log) = self.__set_results_dir() (self.func_4D, self.num_TR, self.TR_in_ms) = self.__set_func_4D(arguments['<func.nii.gz>']) self.func_ref = self.__get_func_3D(arguments['--func-ref']) self.registered_to_this_T1w = self.__get_reg_t1w(arguments['--T1w-anat']) self.smoothing = self.__set_smoothing(arguments['--SmoothingFWHM']) self.dilate_percent_below = arguments['--DilateBelowPct'] self.dilate_factor = 10 self.diagnostics = self.__set_surf_diagnostics(arguments['--OutputSurfDiagnostics']) self.already_atlas_transformed = arguments['--already-in-MNI'] self.run_flirt = arguments['--FLIRT-to-T1w'] self.vol_reg = self.__define_volume_registration(arguments) self.surf_reg = self.__define_surface_registration(arguments) self.grayord_res = self.grayord_res[0] def __get_subject(self, arguments): subject_id = arguments['<subject>'] return Subject(self.work_dir, subject_id) def __set_results_dir(self): results_dir = os.path.join(self.subject.atlas_space_dir, 'Results', self.fmri_label) log = os.path.join(results_dir, 'ciftify_subject_fmri.log') if os.path.exists(log): logger.error('Subject output already exits.\n To force rerun, delete or rename the logfile:\n\t{}'.format(log)) sys.exit(1) if (not os.path.exists(results_dir)): ciftify.utils.make_dir(results_dir) return (results_dir, log) def __set_func_4D(self, func_4D): if (not os.path.isfile(func_4D)): logger.error('fMRI input {} does not exist :(..Exiting'.format(func_4D)) sys.exit(1) num_TR = first_word(get_stdout(['fslval', func_4D, 'dim4'])) TR_in_ms = first_word(get_stdout(['fslval', func_4D, 'pixdim4'])) return (func_4D, num_TR, TR_in_ms) def __get_reg_t1w(self, anat_arg): if anat_arg: anat_input = ciftify.meants.NibInput(anat_arg) if (not (anat_input.type == 'nifti')): logger.critical('--T1w-anat input {} is not a readable nifti file.'.format(anat_arg)) sys.exit(1) return anat_input.path else: return None def __define_volume_registration(self, arguments, method='FSL_fnirt', standard_res='2mm'): registration_config = WorkFlowSettings.get_config_entry(self, 'registration') for key in ['src_dir', 'dest_dir', 'xfms_dir']: try: subfolders = registration_config[key] except KeyError: logger.critical('registration config does not contain expectedkey {}'.format(key)) sys.exit(1) registration_config[key] = os.path.join(self.subject.path, subfolders) resolution_config = WorkFlowSettings.get_resolution_config(self, method, standard_res) registration_config.update(resolution_config) return registration_config def __define_surface_registration(self, arguments): surf_mode = ciftify.utils.get_registration_mode(arguments) if (surf_mode == 'MSMSulc'): RegName = 'MSMSulc' elif (surf_mode == 'FS'): RegName = 'reg.reg_LR' else: logger.critical('--reg-name argument must be "FS" or "MSMSulc"') sys.exit(1) L_sphere = os.path.join(self.subject.atlas_native_dir, '{}.L.sphere.{}.native.surf.gii'.format(self.subject.id, RegName)) if (not os.path.exists(L_sphere)): logger.critical('Registration Sphere {} not found'.format(L_sphere)) sys.exit(1) return RegName def __get_func_3D(self, func_ref): return ReferenceVolume(func_ref) def __set_smoothing(self, smoothing_user_arg): return Smoothing(smoothing_user_arg) def __set_surf_diagnostics(self, OutputSurfDiagnostics): return DiagnosticSettings(self.results_dir, OutputSurfDiagnostics) def get_log_handler(self, formatter): fh = logging.FileHandler(self.log) fh.setLevel(logging.INFO) fh.setFormatter(formatter) return fh def print_settings(self): logger.info('{}---### Start of User Settings ###---'.format(os.linesep)) logger.info('Arguments:') logger.info('\tInput_fMRI: {}'.format(self.func_4D)) logger.info('\t\tNumber of TRs: {}'.format(self.num_TR)) logger.info('\t\tTR(ms): {}'.format(self.TR_in_ms)) logger.info('\tCIFTIFY_WORKDIR: {}'.format(self.work_dir)) logger.info('\tSubject: {}'.format(self.subject.id)) logger.info('\tfMRI Output Label: {}'.format(self.fmri_label)) logger.info('\t{}'.format(self.func_ref.descript)) logger.info('\tSurface Registration Sphere: {}'.format(self.surf_reg)) logger.info('\tT1w intermiadate for registation: {}'.format(self.registered_to_this_T1w)) if (self.smoothing.sigma > 0): logger.info('\tSmoothingFWHM: {}'.format(self.smoothing.fwhm)) logger.info('\tSmoothing Sigma: {}'.format(self.smoothing.sigma)) else: logger.info('\tNo smoothing will be applied') if self.dilate_percent_below: logger.info('\tWill fill holes defined as data with intensity below {} percentile'.format(self.dilate_percent_below)) logger.info('\tMulthreaded subprocesses with use {} threads'.format(self.n_cpus)) logger.info('{}---### End of User Settings ###---'.format(os.linesep)) logger.info('\nThe following settings are set by default:') logger.info('\tGrayordinatesResolution: {}'.format(self.grayord_res)) logger.info('\tLowResMesh: {}k'.format(self.low_res))
def get_trees_from_nexus_or_newick(btext, name_newick): text = btext.decode('utf8').strip() try: trees = nexus.get_trees(text) return [{'name': name, 'newick': nw} for (name, nw) in trees.items()] except nexus.NexusError: return [{'name': name_newick, 'newick': text}]
def test_form_regexes(): assert re.match(REGEX_BOOTSTRAP_IMAGE, 'fedora:33') assert re.match(REGEX_BOOTSTRAP_IMAGE, 'fedora') assert re.match(REGEX_BOOTSTRAP_IMAGE, 'registry.fedoraproject.org/fedora:rawhide') assert re.match(REGEX_BOOTSTRAP_IMAGE, 'registry.fedoraproject.org/fedora') assert (not re.match(REGEX_BOOTSTRAP_IMAGE, 'docker://example.com/test:30')) items = ['fedora', 'fedora-*-x86_64', 'fedora-*-*', 'fedora-39-x86_64', 'fedora-rawhide-aarch64', 'amazonlinux-2023-aarch64', 'centos-stream+epel-next-9-x86_64', 'openeuler-22.03-x86_64', 'opensuse-leap-15.4-x86_64', 'opensuse-leap-15.4-x86_64'] for item in items: assert re.match(REGEX_CHROOT_DENYLIST, item) for item in ['fe|ora', '#fedora', 'fedora/39', 'fedora:39']: assert (not re.match(REGEX_CHROOT_DENYLIST, item))
def test_stacktraces_have_templates(client, django_elasticapm_client): TEMPLATE_DEBUG = (django.VERSION < (1, 9)) TEMPLATES_copy = deepcopy(settings.TEMPLATES) TEMPLATES_copy[0]['OPTIONS']['debug'] = TEMPLATE_DEBUG with override_settings(TEMPLATE_DEBUG=TEMPLATE_DEBUG, TEMPLATES=TEMPLATES_copy, **middleware_setting(django.VERSION, ['elasticapm.contrib.django.middleware.TracingMiddleware'])): resp = client.get(reverse('render-heavy-template')) assert (resp.status_code == 200) transactions = django_elasticapm_client.events[TRANSACTION] assert (len(transactions) == 1) transaction = transactions[0] assert (transaction['result'] == 'HTTP 2xx') spans = django_elasticapm_client.events[SPAN] assert (len(spans) == 2), [t['name'] for t in spans] expected_names = {'list_users.html', 'something_expensive'} assert ({t['name'] for t in spans} == expected_names) assert (spans[0]['name'] == 'something_expensive') for frame in spans[0]['stacktrace']: if ((frame['lineno'] == 4) and frame['filename'].endswith(os.path.join('django', 'testapp', 'templates', 'list_users.html'))): break else: assert (False is True), 'Template was not found'
def velx(X, t): if (ct.problem == 0): return 1.0 elif (ct.problem == 1): r = np.sqrt((((X[0] - xc) ** 2) + ((X[1] - yc) ** 2))) return ((xc - X[0]) / (r + 1e-10)) elif (ct.problem == 2): r = np.sqrt((((X[0] - xc) ** 2) + ((X[1] - yc) ** 2))) return (((1 - (2 * X[0])) / (r + 1e-10)) * np.maximum(0, (r - 0.1))) else: raise 'Not implemented'
class _TwoParameterQuestionAnsweringWrapper(_QuestionAnsweringWrapperModule): def __init__(self, model: PreTrainedModel): super().__init__(model=model) def forward(self, input_ids: Tensor, attention_mask: Tensor) -> Tensor: inputs = {'input_ids': input_ids, 'attention_mask': attention_mask} response = self._hf_model(**inputs) if isinstance(response, tuple): return torch.stack(list(response), dim=0) return response
class GrpcModellerFactory(object): def __init__(self, config): self.config = config def create_and_register_service(self, server): service = GrpcModeller(modeller_api=modeller.Modeller(self.config)) model_pb2_grpc.add_ModellerServicer_to_server(service, server) LOGGER.info('Service %s created and registered', service) return service
class Mouse(Input): _quit_corner_location = None _corner_rect_size = (30, 30) _quit_action_events = [] def __init__(self, show_cursor=True, track_button_events=None, track_motion_events=None): Input.__init__(self) if is_android_running(): Mouse._quit_corner_location = 1 if (show_cursor is None): show_cursor = defaults.mouse_show_cursor if (track_button_events is None): track_button_events = defaults.mouse_track_button_events if (track_motion_events is None): track_motion_events = defaults.mouse_track_motion_events if show_cursor: self.show_cursor(track_button_events, track_motion_events) else: self.track_button_events = track_button_events self.track_motion_events = track_motion_events def set_quit_corner_location(corner, corner_rect_size=(None, None)): if (corner is not None): if ((not isinstance(corner, int)) or (corner < 0) or (corner > 3)): corner = None print('Warning: {} is an unknown corner location. Mouse quit event is deactivated.'.format(corner)) Mouse._quit_corner_location = corner try: Mouse._corner_rect_size = (int(corner_rect_size[0]), int(corner_rect_size[1])) except Exception: pass def process_quit_event(click_position=None): if (Mouse._quit_corner_location not in (0, 1, 2, 3)): return False if (click_position is None): pos = None for event in pygame.event.get(pygame.MOUSEBUTTONDOWN): if (event.button > 0): screen_size = _internals.active_exp.screen.surface.get_size() pos = pygame.mouse.get_pos() pos = ((pos[0] - (screen_size[0] // 2)), ((- pos[1]) + (screen_size[1] // 2))) break if (pos is None): return False else: return Mouse.process_quit_event(click_position=pos) if ((Mouse._quit_corner_location == 0) or (Mouse._quit_corner_location == 3)): threshold_x = ((- _internals.active_exp.screen.center_x) + Mouse._corner_rect_size[0]) else: threshold_x = (_internals.active_exp.screen.center_x - Mouse._corner_rect_size[0]) if ((Mouse._quit_corner_location == 0) or (Mouse._quit_corner_location == 1)): threshold_y = (_internals.active_exp.screen.center_y - Mouse._corner_rect_size[1]) else: threshold_y = ((- _internals.active_exp.screen.center_y) + Mouse._corner_rect_size[1]) if (((Mouse._quit_corner_location == 0) and (click_position[0] < threshold_x) and (click_position[1] > threshold_y)) or ((Mouse._quit_corner_location == 1) and (click_position[0] > threshold_x) and (click_position[1] > threshold_y)) or ((Mouse._quit_corner_location == 2) and (click_position[0] > threshold_x) and (click_position[1] < threshold_y)) or ((Mouse._quit_corner_location == 3) and (click_position[0] < threshold_x) and (click_position[1] < threshold_y))): Mouse._quit_action_events.append(get_time()) if (len(Mouse._quit_action_events) >= 3): diff = (get_time() - Mouse._quit_action_events.pop(0)) if (diff < 1): simulated_key = pygame.event.Event(pygame.KEYDOWN, {'key': _internals.active_exp.keyboard.get_quit_key()}) return _internals.active_exp.keyboard.process_control_keys(key_event=simulated_key) return False def track_button_events(self): return self._track_button_events _button_events.setter def track_button_events(self, value): self._track_button_events = value if value: pygame.event.set_allowed(pygame.MOUSEBUTTONDOWN) pygame.event.set_allowed(pygame.MOUSEBUTTONUP) else: pygame.event.set_blocked(pygame.MOUSEBUTTONDOWN) pygame.event.set_blocked(pygame.MOUSEBUTTONUP) def track_motion_events(self): return self._track_motion_events _motion_events.setter def track_motion_events(self, value): self._track_motion_events = value if value: pygame.event.set_allowed(pygame.MOUSEMOTION) else: pygame.event.set_blocked(pygame.MOUSEMOTION) def pressed_buttons(self): pygame.event.pump() return pygame.mouse.get_pressed() def is_cursor_visible(self): visible = pygame.mouse.set_visible(False) pygame.mouse.set_visible(visible) return visible def get_last_button_down_event(self, process_quit_event=True): rtn = None for event in pygame.event.get(pygame.MOUSEBUTTONDOWN): if (event.button > 0): rtn = (event.button - 1) if (rtn == 0): if (process_quit_event and Mouse.process_quit_event(self.position)): return (- 1) return rtn def get_last_button_up_event(self): rtn = None for event in pygame.event.get(pygame.MOUSEBUTTONUP): if (event.button > 0): rtn = (event.button - 1) return rtn def check_button_pressed(self, button_number): btns = self.pressed_buttons if ((len(btns) >= 1) and (button_number >= 0)): return btns[button_number] else: return False def check_wheel(self): evt = self.get_last_button_down_event() if (evt == 3): return 'up' elif (evt == 4): return 'down' else: return None def position(self): pygame.event.pump() screen_size = _internals.active_exp.screen.surface.get_size() pos = pygame.mouse.get_pos() return ((pos[0] - (screen_size[0] // 2)), ((- pos[1]) + (screen_size[1] // 2))) def position(self, position): screen_size = _internals.active_exp.screen.surface.get_size() pos = ((position[0] + (screen_size[0] // 2)), ((- position[1]) + (screen_size[1] // 2))) pygame.mouse.set_pos(pos) def set_cursor(self, size, hotspot, xormasks, andmasks): return pygame.mouse.set_cursor(size, hotspot, xormasks, andmasks) def get_cursor(self): return pygame.mouse.get_cursor() def clear(self): pygame.event.clear(pygame.MOUSEBUTTONDOWN) pygame.event.clear(pygame.MOUSEBUTTONUP) pygame.event.clear(pygame.MOUSEMOTION) if self._logging: _internals.active_exp._event_file_log('Mouse,cleared', 2) def wait_event(self, wait_button=True, wait_motion=True, buttons=None, duration=None, wait_for_buttonup=False, callback_function=None, process_control_events=True): if _internals.skip_wait_methods: return (None, None, None, None) start = get_time() self.clear() old_pos = pygame.mouse.get_pos() btn_id = None rt = None motion_occured = False if (buttons is None): buttons = [0, 1, 2, 3, 4] else: try: buttons = list(buttons) except Exception: buttons = [buttons] while True: if isinstance(callback_function, FunctionType): rtn_callback = callback_function() if isinstance(rtn_callback, _internals.CallbackQuitEvent): btn_id = rtn_callback rt = int(((get_time() - start) * 1000)) break if _internals.active_exp.is_initialized: rtn_callback = _internals.active_exp._execute_wait_callback() if isinstance(rtn_callback, _internals.CallbackQuitEvent): btn_id = rtn_callback rt = int(((get_time() - start) * 1000)) break if process_control_events: if _internals.active_exp.keyboard.process_control_keys(): break if wait_motion: motion_occured = (old_pos != pygame.mouse.get_pos()) if wait_button: if wait_for_buttonup: btn_id = self.get_last_button_up_event() else: btn_id = self.get_last_button_down_event(process_quit_event=process_control_events) if (btn_id == (- 1)): btn_id = None break elif ((btn_id in buttons) or motion_occured): rt = int(((get_time() - start) * 1000)) break elif ((duration is not None) and (int(((get_time() - start) * 1000)) >= duration)): break position_in_expy_coordinates = self.position if self._logging: _internals.active_exp._event_file_log('Mouse,received,{0}-{1},wait_event'.format(btn_id, motion_occured)) return (btn_id, motion_occured, position_in_expy_coordinates, rt) def wait_press(self, buttons=None, duration=None, wait_for_buttonup=False, callback_function=None, process_control_events=True): rtn = self.wait_event(wait_button=True, wait_motion=False, buttons=buttons, duration=duration, wait_for_buttonup=wait_for_buttonup, callback_function=callback_function, process_control_events=process_control_events) return (rtn[0], rtn[2], rtn[3]) def wait_motion(self, duration=None, callback_function=None, process_control_events=True): rtn = self.wait_event(wait_button=False, wait_motion=True, buttons=[], duration=duration, wait_for_buttonup=False, callback_function=callback_function, process_control_events=process_control_events) if isinstance(rtn[0], _internals.CallbackQuitEvent): return (rtn[0], rtn[3]) else: return (rtn[2], rtn[3]) def show_cursor(self, track_button_events=True, track_motion_events=False): pygame.mouse.set_visible(True) self.track_button_events = track_button_events self.track_motion_events = track_motion_events def hide_cursor(self, track_button_events=False, track_motion_events=False): pygame.mouse.set_visible(False) self.track_button_events = track_button_events self.track_motion_events = track_motion_events def _self_test(exp): from .. import stimuli info = 'This will test how timing accurate your mouse is.\n\n[Press RETURN to continue]' stimuli.TextScreen('Mouse test (1)', info).present() exp.keyboard.wait(misc.constants.K_RETURN) mouse = Mouse() go = stimuli.TextLine('Keep on moving...') go.preload() stimuli.TextLine('Please move the mouse').present() mouse.wait_motion() go.present() exp.clock.reset_stopwatch() motion = [] while (exp.clock.stopwatch_time < 200): (_pos, rt) = mouse.wait_motion() motion.append(rt) stimuli.TextLine('Thanks').present() polling_time = misc.statistics.mode(motion) info = 'Your mouse polling time is {0} ms.\n\n[Press RETURN to continue] '.format(polling_time) text = stimuli.TextScreen('Results', info) text.present() exp.keyboard.wait([misc.constants.K_RETURN]) info = 'This will test if you mouse buttons work.\nPlease press all buttons one after the other to see if the corresponding buttons on the screen light up.\nWhen done, click inside one of the buttons on the screen to end the test.\nIf your mouse buttons do not work, you can quit by pressing q.\n\n[Press RETURN to continue]' stimuli.TextScreen('Mouse test (2)', info).present() exp.keyboard.wait(misc.constants.K_RETURN) rects = [stimuli.Rectangle(size=[30, 30], position=[(- 50), 0]), stimuli.Rectangle(size=[30, 30], position=[0, 0]), stimuli.Rectangle(size=[30, 30], position=[50, 0])] canvas = stimuli.Canvas(size=[350, 500]) btn = None go_on = True while go_on: canvas.clear_surface() for (cnt, r) in enumerate(rects): r.unload() if (cnt == btn): r.colour = misc.constants.C_YELLOW else: r.colour = misc.constants.C_RED r.plot(canvas) if (btn == 3): text = 'Mouse wheel UP' elif (btn == 4): text = 'Mouse wheel DOWN' else: text = '' stimuli.TextLine(text, position=[0, 50]).plot(canvas) canvas.present() btn = None while (btn is None): btn = mouse.get_last_button_down_event() if (btn is not None): position = mouse.position for r in rects: if r.overlapping_with_position(position): buttons_work = 1 mouse.hide_cursor() go_on = False break elif exp.keyboard.check(keys=misc.constants.K_q): buttons_work = 0 mouse.hide_cursor() go_on = False break result = {} result['testsuite_mouse_polling_time'] = (str(polling_time) + ' ms') result['testsuite_mouse_buttons_work'] = buttons_work return result
def distance_euclidean(color: 'Color', sample: 'Color', space: str='lab-d65') -> float: coords1 = color.convert(space, norm=False).coords(nans=False) coords2 = sample.convert(space, norm=False).coords(nans=False) return math.sqrt(sum((((x - y) ** 2.0) for (x, y) in zip(coords1, coords2))))
def install_sibelia(prefix): if (not test_tools()): return False initial_dir = os.getcwd() tmp_dir = os.path.join(initial_dir, 'sibelia-build') if os.path.isdir(tmp_dir): shutil.rmtree(tmp_dir) os.mkdir(tmp_dir) os.chdir(tmp_dir) print('Downloading source...', file=sys.stderr) urlretrieve(SIBELIA_LINK, 'master.tar.gz') subprocess.check_call(['tar', '-xf', 'master.tar.gz']) os.chdir('Sibelia-master/build') srcdir = os.path.join('..', 'src') subprocess.check_call(['cmake', srcdir, '-DONLY_SIBELIA=1', ('-DCMAKE_INSTALL_PREFIX=' + tmp_dir)]) subprocess.check_call(['make']) subprocess.check_call(['make', 'install']) sibelia_bin_src = os.path.join(tmp_dir, 'bin', 'Sibelia') sibelia_bin_dst = os.path.join(initial_dir, prefix, 'Sibelia') shutil.copy(sibelia_bin_src, sibelia_bin_dst) os.chdir(initial_dir) shutil.rmtree(tmp_dir) return True
class forward_error_correction(bsn_tlv): type = 149 def __init__(self, value=None): if (value != None): self.value = value else: self.value = 0 return def pack(self): packed = [] packed.append(struct.pack('!H', self.type)) packed.append(struct.pack('!H', 0)) packed.append(struct.pack('!B', self.value)) length = sum([len(x) for x in packed]) packed[1] = struct.pack('!H', length) return ''.join(packed) def unpack(reader): obj = forward_error_correction() _type = reader.read('!H')[0] assert (_type == 149) _length = reader.read('!H')[0] orig_reader = reader reader = orig_reader.slice(_length, 4) obj.value = reader.read('!B')[0] return obj def __eq__(self, other): if (type(self) != type(other)): return False if (self.value != other.value): return False return True def pretty_print(self, q): q.text('forward_error_correction {') with q.group(): with q.indent(2): q.breakable() q.text('value = ') value_name_map = {0: 'OFP_BSN_FORWARD_ERROR_CORRECTION_DEFAULT', 1: 'OFP_BSN_FORWARD_ERROR_CORRECTION_ENABLE', 2: 'OFP_BSN_FORWARD_ERROR_CORRECTION_DISABLE', 3: 'OFP_BSN_FORWARD_ERROR_CORRECTION_ENABLE_FIRE_CODE', 4: 'OFP_BSN_FORWARD_ERROR_CORRECTION_ENABLE_REED_SOLOMON', 5: 'OFP_BSN_FORWARD_ERROR_CORRECTION_ENABLE_REED_SOLOMON544'} if (self.value in value_name_map): q.text(('%s(%d)' % (value_name_map[self.value], self.value))) else: q.text(('%#x' % self.value)) q.breakable() q.text('}')
def examples_to_trials(examples, params): trials = [] NA = object() for (tid, ex) in enumerate(examples): _coconut_match_to_0 = ex _coconut_match_check_0 = False _coconut_match_set_name_gain = _coconut_sentinel if _coconut.isinstance(_coconut_match_to_0, _coconut.abc.Mapping): _coconut_match_temp_4 = _coconut_match_to_0.get('gain', _coconut_sentinel) if (_coconut_match_temp_4 is not _coconut_sentinel): _coconut_match_set_name_gain = _coconut_match_temp_4 _coconut_match_check_0 = True if _coconut_match_check_0: if (_coconut_match_set_name_gain is not _coconut_sentinel): gain = _coconut_match_set_name_gain if _coconut_match_check_0: loss = negate_objective(gain) else: loss = ex['loss'] result = _coconut.dict((('status', STATUS_OK), ('loss', loss))) vals = _coconut.dict() idxs = _coconut.dict() for (k, v) in get_names_and_features(ex['values'], params, fallback_func=(lambda name, func, *args, **kwargs: NA), converters=_coconut.dict((('choice', (lambda val, choices: choices.index(val))), ('randrange', (lambda val, start, stop, step: (val - start))))), convert_fallback=False): vals[k] = ([v] if (v is not NA) else []) idxs[k] = ([tid] if (v is not NA) else []) misc = _coconut.dict((('tid', tid), ('idxs', idxs), ('vals', vals), ('cmd', None))) trials.append(_coconut.dict((('tid', tid), ('result', result), ('misc', misc), ('spec', spec_from_misc(misc)), ('state', JOB_STATE_DONE), ('owner', None), ('book_time', None), ('refresh_time', None), ('exp_key', None)))) return trials
class GemmKind(enum.Enum): Gemm = auto() GemmPermute = auto() BatchGemm = auto() BatchGemmPermute = auto() SplitKGemm = auto() Grouped = auto() BatchGemmSoftmaxGemm = auto() BatchGemmSoftmaxGemmPermute = auto() GemmPermuteM2N3 = auto() GemmPermuteM3N2 = auto()
def get_context_menu_stylesheet(text_color, bg_color, selected_color, disabled_text_color=None): if (disabled_text_color is None): disabled_text_color = [int((0.5 * abs((text_color[i] + bg_color[i])))) for i in range(3)] style_dict = {'QMenu': {'color': 'rgb({0},{1},{2})'.format(*text_color), 'background-color': 'rgb({0},{1},{2})'.format(*bg_color), 'border': '1px solid rgba({0},{1},{2},30)'.format(*text_color), 'border-radius': '3px'}, 'QMenu::item': {'padding': '5px 18px 2px', 'background-color': 'transparent'}, 'QMenu::item:selected': {'color': 'rgb({0},{1},{2})'.format(*text_color), 'background-color': 'rgba({0},{1},{2},200)'.format(*selected_color)}, 'QMenu::item:disabled': {'color': 'rgb({0},{1},{2})'.format(*disabled_text_color)}, 'QMenu::separator': {'height': '1px', 'background': 'rgba({0},{1},{2}, 50)'.format(*text_color), 'margin': '4px 8px'}} stylesheet = '' for (css_class, css) in style_dict.items(): style = '{} {{\n'.format(css_class) for (elm_name, elm_val) in css.items(): style += ' {}:{};\n'.format(elm_name, elm_val) style += '}\n' stylesheet += style return stylesheet
def test_wrapped_resource_with_hooks_aware_of_resource(client, wrapped_resource_aware): client.app.add_route('/wrapped_aware', wrapped_resource_aware) expected = 'fluffy and cute' result = client.simulate_get('/wrapped_aware') assert (result.status_code == 200) assert (expected == result.text) for test in (client.simulate_head, client.simulate_put, client.simulate_post): result = test(path='/wrapped_aware') assert (result.status_code == 200) assert (wrapped_resource_aware.resp.text == expected) result = client.simulate_patch('/wrapped_aware') assert (result.status_code == 405) result = client.simulate_options('/wrapped_aware') assert (result.status_code == 200) assert (not result.text)
class FilterWidget(Gtk.Grid): def __init__(self, criteria): super(FilterWidget, self).__init__() self.set_column_spacing(10) self.set_row_spacing(2) self.criteria = criteria self.rows = [] def add_criteria_row(self): criterion = Criterion(self.criteria) criterion.show() n = len(self.rows) if (n != 0): criterion.set_state(self.rows[(- 1)][0].get_state()) remove_btn = Gtk.Button() image = Gtk.Image() image.set_from_icon_name('list-remove', Gtk.IconSize.BUTTON) remove_btn.add(image) remove_btn_handler_id = remove_btn.connect('clicked', self.__remove_clicked) remove_btn.show_all() self.attach(criterion, 0, n, 1, 1) self.attach(remove_btn, 1, n, 1, 1) self.rows.append((criterion, remove_btn, remove_btn_handler_id)) def remove_criteria_row(self, row): self.remove_row(row) del self.rows[row] def __remove_clicked(self, widget): n = self.child_get_property(widget, 'top-attach') self.remove_criteria_row(n) def get_state(self): state = [] for row in self.rows: state.append(row[0].get_state()) state[(- 1)][0].reverse() return state def set_state(self, state): n_present = len(self.rows) n_required = len(state) for i in range(n_present, n_required): self.add_criteria_row() for i in range(n_present, n_required, (- 1)): self.remove_criteria_row((i - 1)) for (i, cstate) in enumerate(state): cstate[0].reverse() self.rows[i][0].set_state(cstate)
def test_guess_of_dataclassjsonmixin(): class Foo(DataClassJSONMixin): x: int y: str z: typing.Dict[(str, int)] def hello(self): ... lt = TypeEngine.to_literal_type(Foo) foo = Foo(1, 'hello', {'world': 3}) lv = TypeEngine.to_literal(FlyteContext.current_context(), foo, Foo, lt) lit_dict = {'a': lv} lr = LiteralsResolver(lit_dict) assert (lr.get('a', Foo) == foo) assert (hasattr(lr.get('a', Foo), 'hello') is True)
class TestHTML(util.PluginTestCase): def setup_fs(self): config = self.dedent("\n matrix:\n - name: html\n sources:\n - '{}/**/*.txt'\n aspell:\n lang: en\n d: en_US\n hunspell:\n d: en_US\n pipeline:\n - pyspelling.filters.html:\n attributes:\n - alt\n ignores:\n - ':is(code, pre)'\n - 'span:matches(.some-class, #some-id)'\n ").format(self.tempdir) self.mktemp('.html.yml', config, 'utf-8') def test_html(self): bad_comment_words = ['helo', 'begn'] bad_content_words = ['flga', 'graet'] bad_attr_words = ['recieve', 'teh'] bad_words = ((bad_comment_words + bad_content_words) + bad_attr_words) good_words = ['yes', 'word'] template = self.dedent('\n <html>\n <head>\n </head>\n <body>\n <!-- {} -->\n <p>{}<code>kjaljw aljwk</code><img src="./image.png" alt="{}"/></p>\n <pre>uouqei euowuw\n </pre>\n <p><span class="some-class">dksj dkjsk</span><span id="some-id">ksjk akjsks</span>\n </body>\n </html>\n ').format('\n'.join((bad_comment_words + good_words)), ' '.join((bad_content_words + good_words)), ' '.join((bad_attr_words + good_words))) self.mktemp('test.txt', template, 'utf-8') self.assert_spellcheck('.html.yml', bad_words)
def tts(model, mel): if use_cuda: model = model.cuda() model.encoder.eval() model.postnet.eval() sequence = Variable(torch.from_numpy(mel)).unsqueeze(0) if use_cuda: sequence = sequence.cuda() (mel_outputs, linear_outputs, alignments, vq_penalty, encoder_penalty, entropy) = model.forward_eval(sequence) linear_output = linear_outputs[0].cpu().data.numpy() spectrogram = audio.denormalize(linear_output) alignment = alignments[0].cpu().data.numpy() waveform = audio.inv_spectrogram(linear_output.T) return (waveform, alignment, spectrogram)
.parametrize('response,alignment_mode,gold_ents', [('PER: Jacq', 'strict', []), ('PER: Jacq', 'contract', []), ('PER: Jacq', 'expand', [('Jacques', 'PER')]), ('PER: Jean J', 'contract', [('Jean', 'PER')]), ('PER: Jean Jacques, aim', 'strict', [('Jean Jacques', 'PER')]), ('PER: random', 'expand', [])]) def test_ner_alignment(response, alignment_mode, gold_ents): text = 'Jean Jacques and Jaime went to the library.' labels = 'PER,ORG,LOC' llm_ner = make_ner_task_v2(labels=labels, alignment_mode=alignment_mode) nlp = spacy.blank('en') doc_in = nlp.make_doc(text) doc_out = list(llm_ner.parse_responses([doc_in], [response]))[0] pred_ents = [(ent.text, ent.label_) for ent in doc_out.ents] assert (pred_ents == gold_ents)
def _need_admin(f: Callable[([Admin, Client, T], Any)]) -> Callable[([Admin, Tuple[(Client, T)]], EventHub.StopPropagation)]: def wrapper(self: Admin, ev: Tuple[(Client, T)]) -> EventHub.StopPropagation: core = self.core (u, m) = ev if (core.auth.pid_of(u) not in self.admins): return STOP f(self, u, m) u.write(msg.SystemMsg(msg='')) return STOP return wrapper
def test_invalid_destination_file_format(create_input_file, create_output_centerline_file): input_polygon_shp = create_input_file('polygons', 'shp') output_centerline_file = create_output_centerline_file('unknown') runner = CliRunner() result = runner.invoke(create_centerlines, [input_polygon_shp, output_centerline_file]) assert isinstance(result.exception, UnsupportedVectorType)
class TestUsageWithPatchCallable(testslide.TestCase): def test_patch_callable(self): self.mock_callable(sample_module, 'test_function').for_call(testslide.matchers.RegexMatches('foo'), testslide.matchers.RegexMatches('bar')).to_return_value(['mocked_response']) with self.assertRaises(testslide.mock_callable.UnexpectedCallArguments): sample_module.test_function('meh', 'moh') sample_module.test_function('foo', 'bar')
class ApiEmail(View): def post(self, request: WSGIRequest): res = {'code': 333, 'msg': '!', 'self': None} form = EmailForm(request.data) if (not form.is_valid()): (res['self'], res['msg']) = clean_form(form) return JsonResponse(res) valid_email_obj = request.session.get('valid_email_obj') if valid_email_obj: time_stamp = valid_email_obj['time_stamp'] now_stamp = time.time() if ((now_stamp - time_stamp) < 60): res['msg'] = '' return JsonResponse(res) valid_email_code = ''.join(random.sample('', 6)) request.session['valid_email_obj'] = {'code': valid_email_code, 'email': form.cleaned_data['email'], 'time_stamp': time.time()} Thread(target=send_mail, args=('!', f', {valid_email_code}, 5', settings.EMAIL_HOST_USER, [form.cleaned_data.get('email')], False)).start() Email.objects.create(email=form.cleaned_data.get('email'), content='') res['code'] = 0 return JsonResponse(res)
def sigmoid_focal_loss_star(inputs: torch.Tensor, targets: torch.Tensor, alpha: float=(- 1), gamma: float=1, reduction: str='none') -> torch.Tensor: inputs = inputs.float() targets = targets.float() shifted_inputs = (gamma * (inputs * ((2 * targets) - 1))) loss = ((- F.logsigmoid(shifted_inputs)) / gamma) if (alpha >= 0): alpha_t = ((alpha * targets) + ((1 - alpha) * (1 - targets))) loss *= alpha_t if (reduction == 'mean'): loss = loss.mean() elif (reduction == 'sum'): loss = loss.sum() return loss
def perform_reuse(fields): (foreign_reuses, self_nestings) = order_reuses(fields) for order in sorted(foreign_reuses.keys()): for (schema_name, reuse_entries) in foreign_reuses[order].items(): schema = fields[schema_name] for reuse_entry in reuse_entries: nest_as = reuse_entry['as'] destination_schema_name = reuse_entry['full'].split('.')[0] destination_schema = fields[destination_schema_name] ensure_valid_reuse(schema, destination_schema) new_field_details = copy.deepcopy(schema['field_details']) new_field_details['name'] = nest_as new_field_details['original_fieldset'] = schema_name new_field_details['intermediate'] = True reused_fields = copy.deepcopy(schema['fields']) set_original_fieldset(reused_fields, schema_name) destination_fields = field_group_at_path(reuse_entry['at'], fields) destination_fields[nest_as] = {'field_details': new_field_details, 'fields': reused_fields} append_reused_here(schema, reuse_entry, destination_schema) for (schema_name, reuse_entries) in self_nestings.items(): schema = fields[schema_name] ensure_valid_reuse(schema) reused_fields = copy.deepcopy(schema['fields']) set_original_fieldset(reused_fields, schema_name) for reuse_entry in reuse_entries: nest_as = reuse_entry['as'] new_field_details = copy.deepcopy(schema['field_details']) new_field_details['name'] = nest_as new_field_details['original_fieldset'] = schema_name new_field_details['intermediate'] = True if (reuse_entry['at'] != schema_name): destination_fields = field_group_at_path(reuse_entry['at'], fields) else: destination_fields = schema['fields'] destination_fields[nest_as] = {'field_details': new_field_details, 'fields': copy.deepcopy(reused_fields)} append_reused_here(schema, reuse_entry, fields[schema_name])
def _simple_lv_subnets_determination(sb_code_parameters, hv_subnet, hv_grid_number, input_path): if (sb_code_parameters[2] == ''): lv_subnets = [] elif (sb_code_parameters[2] == 'HV'): lv_subnet_list = ['HV1', 'HV2'] lv_subnets = {'all': lv_subnet_list, 1: ['HV1'], 2: ['HV2']}[sb_code_parameters[4]] else: load_data = pd.read_csv(os.path.join(input_path, 'Load.csv'), sep=';') lv_types = load_data.loc[load_data.subnet.str.startswith(((hv_subnet + '_') + sb_code_parameters[2]))].profile.value_counts() filtered_lv_types = lv_types[(pd.Series(lv_types.index.str[:2]).str.upper() == sb_code_parameters[2]).values] lv_subnet_list = [] for (type_, number) in filtered_lv_types.items(): if (type_[:2].upper() == sb_code_parameters[2]): if (type_[3:] in _grid_number_dict()[sb_code_parameters[2]].keys()): lv_subnet_list += [((sb_code_parameters[2] + str(_grid_number_dict()[sb_code_parameters[2]][type_[3:]])) + ('.%i' % (i + (hv_grid_number * 100)))) for i in range(1, (1 + number))] if (sb_code_parameters[4] == 'all'): lv_subnets = lv_subnet_list elif isinstance(sb_code_parameters[4], str): if ((sb_code_parameters[2] + sb_code_parameters[4]) not in lv_subnet_list): raise ValueError((("'sb_code_parameters[4]' %s is " % sb_code_parameters[4]) + "not in 'lv_subnet_list'.")) lv_subnets = [(sb_code_parameters[2] + sb_code_parameters[4])] else: raise ValueError(("'sb_code_parameters[4]' must be a string, e.g. 'all' or 'MV1.1'" + ' (depending on the voltage level).')) return lv_subnets
class OptionSeriesBulletSonificationTracksActivewhen(Options): def crossingDown(self): return self._config_get(None) def crossingDown(self, num: float): self._config(num, js_type=False) def crossingUp(self): return self._config_get(None) def crossingUp(self, num: float): self._config(num, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get(None) def prop(self, text: str): self._config(text, js_type=False)
.parametrize('coords, ref_energy, ref_norm_forces', [((0.0, 0.0, 0.0), 0.0, 0.0), ((1.0, 0.0, 0.0), 0.0, 0.0), ((2.0, 0.0, 0.0), 4.0, 4.0), ((2.0, 2.0, 0.0), 8.0, (2 * (8.0 ** 0.5))), ((4.0, 0.0, 0.0), 16.0, 8.0)]) def test_harmonic_sphere(coords, ref_energy, ref_norm_forces): atoms = ('X',) coords = np.array(coords) geom = Geometry(atoms, coords) potentials = [{'type': 'harmonic_sphere', 'k': 1, 'radius': 1}] calc = ExternalPotential(potentials=potentials) geom.set_calculator(calc) energy = geom.energy assert (energy == pytest.approx(ref_energy)) forces = geom.forces norm_forces = np.linalg.norm(forces) assert (norm_forces == pytest.approx(ref_norm_forces))
def setup_to_fail(): shutil.copy('/etc/pam.d/system-auth', '/etc/pam.d/system-auth.bak') shutil.copy('/etc/pam.d/password-auth', '/etc/pam.d/password-auth.bak') shellexec("sed -i 's/sha512/md5/' /etc/pam.d/system-auth") shellexec("sed -i 's/sha512/md5/' /etc/pam.d/password-auth") (yield None) shutil.move('/etc/pam.d/system-auth.bak', '/etc/pam.d/system-auth') shutil.move('/etc/pam.d/password-auth.bak', '/etc/pam.d/password-auth')
_order_decorator(jwt_required) def is_owner(f): (f) def decorated_function(*args, **kwargs): user = current_user if user.is_staff: return f(*args, **kwargs) if (('event_id' in kwargs) and user.is_owner(kwargs['event_id'])): return f(*args, **kwargs) raise ForbiddenError({'source': ''}, 'Owner access is required') return decorated_function
class RSAKey(object): def __init__(self, n=0, e=0, d=0, p=0, q=0, dP=0, dQ=0, qInv=0): if ((n and (not e)) or (e and (not n))): raise AssertionError() self.n = n self.e = e self.d = d self.p = p self.q = q self.dP = dP self.dQ = dQ self.qInv = qInv self.blinder = 0 self.unblinder = 0 def __len__(self): return numBits(self.n) def hasPrivateKey(self): return (self.d != 0) def hashAndSign(self, bytes): hashBytes = SHA1(bytearray(bytes)) prefixedHashBytes = self._addPKCS1SHA1Prefix(hashBytes) sigBytes = self.sign(prefixedHashBytes) return sigBytes def hashAndVerify(self, sigBytes, bytes): hashBytes = SHA1(bytearray(bytes)) prefixedHashBytes1 = self._addPKCS1SHA1Prefix(hashBytes, False) prefixedHashBytes2 = self._addPKCS1SHA1Prefix(hashBytes, True) result1 = self.verify(sigBytes, prefixedHashBytes1) result2 = self.verify(sigBytes, prefixedHashBytes2) return (result1 or result2) def sign(self, bytes): if (not self.hasPrivateKey()): raise AssertionError() paddedBytes = self._addPKCS1Padding(bytes, 1) m = bytesToNumber(paddedBytes) if (m >= self.n): raise ValueError() c = self._rawPrivateKeyOp(m) sigBytes = numberToByteArray(c, numBytes(self.n)) return sigBytes def verify(self, sigBytes, bytes): if (len(sigBytes) != numBytes(self.n)): return False paddedBytes = self._addPKCS1Padding(bytes, 1) c = bytesToNumber(sigBytes) if (c >= self.n): return False m = self._rawPublicKeyOp(c) checkBytes = numberToByteArray(m, numBytes(self.n)) return (checkBytes == paddedBytes) def encrypt(self, bytes): paddedBytes = self._addPKCS1Padding(bytes, 2) m = bytesToNumber(paddedBytes) if (m >= self.n): raise ValueError() c = self._rawPublicKeyOp(m) encBytes = numberToByteArray(c, numBytes(self.n)) return encBytes def decrypt(self, encBytes): if (not self.hasPrivateKey()): raise AssertionError() if (len(encBytes) != numBytes(self.n)): return None c = bytesToNumber(encBytes) if (c >= self.n): return None m = self._rawPrivateKeyOp(c) decBytes = numberToByteArray(m, numBytes(self.n)) if ((decBytes[0] != 0) or (decBytes[1] != 2)): return None for x in range(1, (len(decBytes) - 1)): if (decBytes[x] == 0): break else: return None return decBytes[(x + 1):] def _addPKCS1SHA1Prefix(self, bytes, withNULL=True): if (not withNULL): prefixBytes = bytearray([48, 31, 48, 7, 6, 5, 43, 14, 3, 2, 26, 4, 20]) else: prefixBytes = bytearray([48, 33, 48, 9, 6, 5, 43, 14, 3, 2, 26, 5, 0, 4, 20]) prefixedBytes = (prefixBytes + bytes) return prefixedBytes def _addPKCS1Padding(self, bytes, blockType): padLength = (numBytes(self.n) - (len(bytes) + 3)) if (blockType == 1): pad = ([255] * padLength) elif (blockType == 2): pad = bytearray(0) while (len(pad) < padLength): padBytes = getRandomBytes((padLength * 2)) pad = [b for b in padBytes if (b != 0)] pad = pad[:padLength] else: raise AssertionError() padding = bytearray((([0, blockType] + pad) + [0])) paddedBytes = (padding + bytes) return paddedBytes def _rawPrivateKeyOp(self, m): if (not self.blinder): self.unblinder = getRandomNumber(2, self.n) self.blinder = powMod(invMod(self.unblinder, self.n), self.e, self.n) m = ((m * self.blinder) % self.n) c = self._rawPrivateKeyOpHelper(m) c = ((c * self.unblinder) % self.n) self.blinder = ((self.blinder * self.blinder) % self.n) self.unblinder = ((self.unblinder * self.unblinder) % self.n) return c def _rawPrivateKeyOpHelper(self, m): s1 = powMod(m, self.dP, self.p) s2 = powMod(m, self.dQ, self.q) h = (((s1 - s2) * self.qInv) % self.p) c = (s2 + (self.q * h)) return c def _rawPublicKeyOp(self, c): m = powMod(c, self.e, self.n) return m def acceptsPassword(self): return False def generate(bits): key = RSAKey() p = getRandomPrime((bits // 2), False) q = getRandomPrime((bits // 2), False) t = lcm((p - 1), (q - 1)) key.n = (p * q) key.e = 65537 key.d = invMod(key.e, t) key.p = p key.q = q key.dP = (key.d % (p - 1)) key.dQ = (key.d % (q - 1)) key.qInv = invMod(q, p) return key
def _get_long_form(nlp, text, short): doc = nlp(' '.join([text, short])) long_end = ((text.count(' ') + short.count(' ')) + 1) long_form = doc[0:long_end] short_start = (long_end + short.count('(')) short_end = (short_start + 1) short_form = doc[short_start:short_end] abbr = find_abbreviation(long_form, short_form) if (abbr is not None): return abbr[0] return abbr
def __for_method__Myclass__func(self_attr, self_attr2, arg): if (__for_method__Myclass__func(self_attr, self_attr2, (arg - 1)) < 1): return 1 else: a = (__for_method__Myclass__func(self_attr, self_attr2, (arg - 1)) * __for_method__Myclass__func(self_attr, self_attr2, (arg - 1))) return ((a + ((self_attr * self_attr2) * arg)) + __for_method__Myclass__func(self_attr, self_attr2, (arg - 1)))
def run(molname, min_rings): filename = filenames[molname] mol = Chem.MolFromMolFile(filename, removeHs=False) setups = mk_prep.prepare(mol) assert (len(setups) == 1) setup = setups[0] print(('\n%s' % molname)) found_rings = 0 for ring in setup.rings: print(('len=%d, atoms:' % len(ring)), ring) found_rings += 1 print(('Got %d, needed %d' % (found_rings, min_rings))) assert (found_rings >= min_rings)
def extractWwwZxzxzxInfo(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def test_coloring_formatter(): formatter = ColoringFormatter(fmt='[%(levelname)s]: %(message)s') formatted_string = formatter.format(logging.LogRecord('foo', logging.ERROR, '', 24, TEST_STRING, (), None)) assert formatted_string.endswith(TEST_STRING) assert (f'[{TerminalColors.RED}ERROR{TerminalColors.ENDC}]' in formatted_string)
class _PathAttribute(StringifyMixin, TypeDisp, _Value): _PACK_STR = '!BB' _PACK_STR_LEN = '!B' _PACK_STR_EXT_LEN = '!H' _ATTR_FLAGS = None def __init__(self, value=None, flags=0, type_=None, length=None): if (type_ is None): type_ = self._rev_lookup_type(self.__class__) self.flags = flags self.type = type_ self.length = length if (value is not None): self.value = value def parser(cls, buf): (flags, type_) = struct.unpack_from(cls._PACK_STR, six.binary_type(buf)) rest = buf[struct.calcsize(cls._PACK_STR):] if ((flags & BGP_ATTR_FLAG_EXTENDED_LENGTH) != 0): len_pack_str = cls._PACK_STR_EXT_LEN else: len_pack_str = cls._PACK_STR_LEN (length,) = struct.unpack_from(len_pack_str, six.binary_type(rest)) rest = rest[struct.calcsize(len_pack_str):] value = bytes(rest[:length]) rest = rest[length:] subcls = cls._lookup_type(type_) return (subcls(flags=flags, type_=type_, length=length, **subcls.parse_value(value)), rest) def serialize(self): if (self._ATTR_FLAGS is not None): self.flags = ((self.flags & (~ (BGP_ATTR_FLAG_OPTIONAL | BGP_ATTR_FLAG_TRANSITIVE))) | self._ATTR_FLAGS) value = self.serialize_value() self.length = len(value) if (self.flags & BGP_ATTR_FLAG_EXTENDED_LENGTH): len_pack_str = self._PACK_STR_EXT_LEN elif (self.length > 255): self.flags |= BGP_ATTR_FLAG_EXTENDED_LENGTH len_pack_str = self._PACK_STR_EXT_LEN else: self.flags &= (~ BGP_ATTR_FLAG_EXTENDED_LENGTH) len_pack_str = self._PACK_STR_LEN buf = bytearray() msg_pack_into(self._PACK_STR, buf, 0, self.flags, self.type) msg_pack_into(len_pack_str, buf, len(buf), self.length) return (buf + value)
.flaky(reruns=MAX_FLAKY_RERUNS) def test_run_with_default_connection(): runner = CliRunner() agent_name = 'myagent' cwd = os.getcwd() t = tempfile.mkdtemp() shutil.copytree(Path(ROOT_DIR, 'packages'), Path(t, 'packages')) os.chdir(t) result = runner.invoke(cli, [*CLI_LOG_OPTION, 'init', '--author', AUTHOR]) assert (result.exit_code == 0) result = runner.invoke(cli, [*CLI_LOG_OPTION, 'create', '--local', agent_name]) assert (result.exit_code == 0) os.chdir(Path(t, agent_name)) result = runner.invoke(cli, [*CLI_LOG_OPTION, 'generate-key', FetchAICrypto.identifier]) assert (result.exit_code == 0) result = runner.invoke(cli, [*CLI_LOG_OPTION, 'add-key', FetchAICrypto.identifier]) assert (result.exit_code == 0) try: process = PexpectWrapper([sys.executable, '-m', 'aea.cli', 'run'], env=os.environ.copy(), maxread=10000, encoding='utf-8', logfile=sys.stdout) process.expect('Start processing messages', timeout=10) process.control_c() process.wait_to_complete(10) assert (process.returncode == 0) finally: process.terminate() process.wait_to_complete(10) os.chdir(cwd) try: shutil.rmtree(t) except (OSError, IOError): pass
class TestContent(TestCase): def test_normalize_and_to_dict(self): product_id = 'product-1' dict_fields = {'quantity': 2, 'item_price': 3.14, 'title': 'title4', 'description': 'description5', 'brand': 'brand6', 'category': 'category7', 'delivery_category': DeliveryCategory.HOME_DELIVERY} normalized_fields = dict_fields.copy() normalized_fields['id'] = product_id normalized_fields['delivery_category'] = dict_fields['delivery_category'].value dict_fields['product_id'] = product_id content = Content(product_id=dict_fields['product_id'], quantity=dict_fields['quantity'], item_price=dict_fields['item_price'], title=dict_fields['title'], description=dict_fields['description'], brand=dict_fields['brand'], category=dict_fields['category'], delivery_category=dict_fields['delivery_category']) self.assertEqual(content.to_dict(), dict_fields) self.assertEqual(content.normalize(), normalized_fields) def test_equals(self): content1 = Content(product_id='product-1', quantity=2, item_price=3.14, title='title4', description='description5', brand='brand6', category='category7') content2 = Content(product_id='product-1', quantity=2, item_price=3.14, title='title4', description='description5', brand='brand6', category='category7') self.assertTrue((content1 == content2)) def test_not_equals(self): content1 = Content(product_id='product-1', quantity=2, item_price=3.14, title='title4', description='description5', brand='brand6', category='category7') content2 = Content(product_id='product-1', quantity=2, item_price=3.14, title='title4', brand='brand6', category='category7') self.assertTrue((content1 != content2)) def test_delivery_category_is_validated_when_set(self): delivery_category = 'undefined_delivery_category' with self.assertRaises(TypeError) as context: Content(delivery_category=delivery_category) expected_exception_message = ('delivery_category must be of type DeliveryCategory. Passed invalid category: ' + delivery_category) self.assertTrue((expected_exception_message in str(context.exception)))
class JSONBaseProvider(BaseProvider): def __init__(self) -> None: self.request_counter = itertools.count() def decode_rpc_response(self, raw_response: bytes) -> RPCResponse: text_response = to_text(raw_response) return cast(RPCResponse, FriendlyJsonSerde().json_decode(text_response)) def encode_rpc_request(self, method: RPCEndpoint, params: Any) -> bytes: rpc_dict = {'jsonrpc': '2.0', 'method': method, 'params': (params or []), 'id': next(self.request_counter)} encoded = FriendlyJsonSerde().json_encode(rpc_dict, Web3JsonEncoder) return to_bytes(text=encoded) def is_connected(self, show_traceback: bool=False) -> bool: try: response = self.make_request(RPCEndpoint('web3_clientVersion'), []) except OSError as e: if show_traceback: raise ProviderConnectionError(f'Problem connecting to provider with error: {type(e)}: {e}') return False if ('error' in response): if show_traceback: raise ProviderConnectionError(f'Error received from provider: {response}') return False if (response['jsonrpc'] == '2.0'): return True else: if show_traceback: raise ProviderConnectionError(f'Bad jsonrpc version: {response}') return False
class UserSchema(UserSchemaPublic): class Meta(): type_ = 'user' self_view = 'v1.user_detail' self_view_kwargs = {'id': '<id>'} inflect = dasherize facebook_url = fields.Url(allow_none=True) twitter_url = fields.Url(allow_none=True) instagram_url = fields.Url(allow_none=True) google_plus_url = fields.Url(allow_none=True) password = fields.Str(required=True, load_only=True) is_super_admin = fields.Boolean(dump_only=True) is_admin = fields.Boolean() facebook_id = fields.Integer(dump_only=True) is_sales_admin = fields.Boolean() is_marketer = fields.Boolean() is_user_owner = fields.Boolean(dump_only=True) is_user_organizer = fields.Boolean(dump_only=True) is_user_coorganizer = fields.Boolean(dump_only=True) is_user_track_organizer = fields.Boolean(dump_only=True) is_user_moderator = fields.Boolean(dump_only=True) is_user_registrar = fields.Boolean(dump_only=True) is_verified = fields.Boolean() is_blocked = fields.Boolean() last_accessed_at = fields.DateTime(dump_only=True) created_at = fields.DateTime(dump_only=True) deleted_at = fields.DateTime(dump_only=True) details = fields.Str(allow_none=True) language_prefrence = fields.Str(allow_none=True) contact = fields.Str(allow_none=True) billing_contact_name = fields.Str(allow_none=True) billing_phone = fields.Str(allow_none=True) billing_state = fields.Str(allow_none=True) billing_country = fields.Str(allow_none=True) billing_tax_info = fields.Str(allow_none=True) company = fields.Str(allow_none=True) billing_address = fields.Str(allow_none=True) billing_city = fields.Str(allow_none=True) billing_zip_code = fields.Str(allow_none=True) billing_additional_info = fields.Str(allow_none=True) is_rocket_chat_registered = fields.Bool(dump_only=True) notifications = Relationship(self_view='v1.user_notification', self_view_kwargs={'id': '<id>'}, related_view='v1.notification_list', related_view_kwargs={'user_id': '<id>'}, schema='NotificationSchema', many=True, type_='notification') feedbacks = Relationship(attribute='feedback', self_view='v1.user_feedback', self_view_kwargs={'id': '<id>'}, related_view='v1.feedback_list', related_view_kwargs={'user_id': '<id>'}, schema='FeedbackSchema', many=True, type_='feedback') event_invoices = Relationship(self_view='v1.user_event_invoices', self_view_kwargs={'id': '<id>'}, related_view='v1.event_invoice_list', related_view_kwargs={'user_id': '<id>'}, schema='EventInvoiceSchema', many=True, type_='event-invoice') speakers = Relationship(attribute='speaker', self_view='v1.user_speaker', self_view_kwargs={'id': '<id>'}, related_view='v1.speaker_list', related_view_kwargs={'user_id': '<id>'}, schema='SpeakerSchema', many=True, type_='speaker') access_codes = Relationship(self_view='v1.user_access_codes', self_view_kwargs={'id': '<id>'}, related_view='v1.access_code_list', related_view_kwargs={'user_id': '<id>'}, schema='AccessCodeSchema', type_='access-codes') discount_codes = Relationship(self_view='v1.user_discount_codes', self_view_kwargs={'id': '<id>'}, related_view='v1.discount_code_list', related_view_kwargs={'user_id': '<id>'}, schema='DiscountCodeSchemaPublic', type_='discount-codes') email_notifications = Relationship(self_view='v1.user_email_notifications', self_view_kwargs={'id': '<id>'}, related_view='v1.email_notification_list', related_view_kwargs={'user_id': '<id>'}, schema='EmailNotificationSchema', many=True, type_='email-notification') alternate_emails = Relationship(self_view='v1.user_emails', self_view_kwargs={'id': '<id>'}, related_view='v1.user_emails_list', related_view_kwargs={'user_id': '<id>'}, schema='UserEmailSchema', many=True, type_='user-emails') sessions = Relationship(attribute='session', self_view='v1.user_session', self_view_kwargs={'id': '<id>'}, related_view='v1.session_list', related_view_kwargs={'user_id': '<id>'}, schema='SessionSchema', many=True, type_='session') groups = Relationship(self_view='v1.user_group', self_view_kwargs={'id': '<id>'}, related_view='v1.group_list', related_view_kwargs={'user_id': '<id>'}, schema='GroupSchema', many=True, type_='group') owner_events = Relationship(self_view='v1.user_owner_events', self_view_kwargs={'id': '<id>'}, related_view='v1.event_list', related_view_kwargs={'user_owner_id': '<id>'}, schema='EventSchema', many=True, type_='event') organizer_events = Relationship(self_view='v1.user_organizer_events', self_view_kwargs={'id': '<id>'}, related_view_kwargs={'user_organizer_id': '<id>'}, related_view='v1.event_list', schema='EventSchema', many=True, type_='event') coorganizer_events = Relationship(self_view='v1.user_coorganizer_events', self_view_kwargs={'id': '<id>'}, related_view='v1.event_list', related_view_kwargs={'user_coorganizer_id': '<id>'}, schema='EventSchema', many=True, type_='event') track_organizer_events = Relationship(self_view='v1.user_track_organizer_events', self_view_kwargs={'id': '<id>'}, related_view='v1.event_list', related_view_kwargs={'user_track_organizer_id': '<id>'}, schema='EventSchema', many=True, type_='event') registrar_events = Relationship(self_view='v1.user_registrar_events', self_view_kwargs={'id': '<id>'}, related_view='v1.event_list', related_view_kwargs={'user_registrar_id': '<id>'}, schema='EventSchema', many=True, type_='event') moderator_events = Relationship(self_view='v1.user_moderator_events', self_view_kwargs={'id': '<id>'}, related_view='v1.event_list', related_view_kwargs={'user_moderator_id': '<id>'}, schema='EventSchema', many=True, type_='event') attendees = Relationship(self_view='v1.user_attendees', self_view_kwargs={'id': '<id>'}, related_view='v1.attendee_list', related_view_kwargs={'user_id': '<id>'}, schema='AttendeeSchemaPublic', many=True, type_='attendee') events = Relationship(self_view='v1.user_events', self_view_kwargs={'id': '<id>'}, related_view='v1.event_list', related_view_kwargs={'user_id': '<id>'}, schema='EventSchema', many=True, type_='event') favourite_events = Relationship(self_view='v1.user_user_favourite_events', self_view_kwargs={'id': '<id>'}, related_view='v1.user_favourite_events_list', related_view_kwargs={'user_id': '<id>'}, schema='UserFavouriteEventSchema', many=True, type_='user-favourite-event') favourite_sessions = Relationship(self_view='v1.user_user_favourite_sessions', self_view_kwargs={'id': '<id>'}, related_view='v1.user_favourite_sessions_list', related_view_kwargs={'user_id': '<id>'}, schema='UserFavouriteSessionSchema', many=True, type_='user-favourite-session') followed_groups = Relationship(self_view='v1.user_user_follow_groups', self_view_kwargs={'id': '<id>'}, related_view='v1.user_follow_group_list', related_view_kwargs={'user_id': '<id>'}, schema='UserFollowGroupSchema', many=True, type_='user-follow-group') orders = Relationship(attribute='orders', self_view='v1.user_orders', self_view_kwargs={'id': '<id>'}, related_view='v1.orders_list', related_view_kwargs={'user_id': '<id>'}, schema='OrderSchema', many=True, type_='order') marketer_events = Relationship(self_view='v1.user_marketer_events', self_view_kwargs={'id': '<id>'}, related_view='v1.event_list', schema='EventSchema', type_='event', many=True) sales_admin_events = Relationship(self_view='v1.user_sales_admin_events', self_view_kwargs={'id': '<id>'}, related_view='v1.event_list', schema='EventSchema', type_='event', many=True)
class ImageVm(object): vm = None def __init__(self): self.init() def init(self, reset=False): from vms.models import Vm if ((self.vm is None) or reset): image_vm_uuid = self.get_uuid() if image_vm_uuid: vm = Vm.objects.select_related('node').get(uuid=image_vm_uuid) else: vm = False self.vm = vm def __nonzero__(self): return bool(self.vm) __bool__ = __nonzero__ def get_uuid(): return DefaultDc().settings.VMS_IMAGE_VM def get_nic_id(): return DefaultDc().settings.VMS_IMAGE_VM_NIC def get_additional_sources(): return DefaultDc().settings.VMS_IMAGE_SOURCES def node(self): if self: return self.vm.node else: return None def ip(self): vm_ips_active = self.vm.json_active_get_ips(allowed_ips=False) try: return vm_ips_active[(self.get_nic_id() - 1)] except LookupError: try: return self.vm.primary_ip_active except LookupError: return vm_ips_active[0] def has_ip(self): try: return (self.vm and self.ip) except LookupError: return False def datasets_dir(self): return settings.VMS_IMAGE_VM_DATASETS_DIR.format(zfs_filesystem=self.vm.json_active['zfs_filesystem']) def repo_name(self): assert self, 'Image VM does not exist' return ('_' + self.vm.hostname.lower()) def repo_url(self): assert self, 'Image VM does not exist' return (' % self.ip) def sources(self): src = [] if self: try: src = [self.repo_url] except LookupError: pass src.extend(self.get_additional_sources()) return src def get_imgadm_conf(sources): conf = PickleDict(settings.VMS_IMAGE_IMGADM_CONF) system_sources = conf.get('sources', []) conf['sources'] = ([{'type': 'imgapi', 'url': url} for url in sources] + system_sources) return conf
class Email(models.Model): nid = models.AutoField(primary_key=True) email = models.EmailField(verbose_name='') content = models.TextField(verbose_name='') create_date = models.DateTimeField(verbose_name='', auto_now=True) def __str__(self): return self.email class Meta(): verbose_name_plural = ''
def parse_union(components: List[str]) -> SelectionUnion: raw_specs = itertools.chain.from_iterable((r.split(OP_SET_UNION) for r in components)) union_components = [] for raw_spec in raw_specs: union_components.append(SelectionIntersection(raw_spec.split(OP_SET_INTERSECTION))) return SelectionUnion(union_components)
.parametrize('dep, lseries, distance, expected', [([1.0], [1], 1.2, [False]), ([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], [1, 1, 1, 2, 2, 2], 0.7, [False, False, True, True, False, False]), ([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], [1, 1, 1, 2, 3, 2], 1.2, [False, False, True, True, True, True]), ([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], [1, 1, 1, 2, 3, 2], 0.49999, [False, False, False, False, False, False]), ([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], [1, 1, 1, 2, 3, 2], 0.5, [False, False, True, True, True, True]), ([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], [1, 1, 1, 2, np.nan, np.nan], 1.2, [False, False, True, True, False, False])]) def test_well_mask_shoulder_get_bseries_by_distance(dep, lseries, distance, expected): from xtgeo.well._well_oper import _get_bseries_by_distance dep = pd.Series(dep, dtype='float64') lseries = pd.Series(lseries, dtype='float64') expected = np.array(expected, dtype='bool') result = _get_bseries_by_distance(dep, lseries, distance) assert (result == expected).all()
(deprecated_in='3.7.0', removed_in='4.0', details='Use Create or Update instead') class Install(SubcommandBase): description = "DEPRECATED. Install a metadata instance into a given schemaspace. Use 'create' or 'update' instead." subcommand_description = "DEPRECATED. Install a metadata instance into schemaspace '{schemaspace}'." schemaspace_base_class = SchemaspaceInstall def __init__(self, **kwargs): super().__init__(**kwargs)
class TestProvider(TestCase): def setUp(self): super(TestProvider, self).setUp() self.source_json = {} ('{0}.open'.format(builtins), new_callable=mock.mock_open()) ('copr_rpmbuild.providers.base.os.mkdir') def test_create_rpmmacros(self, mock_mkdir, mock_open): task = {'task_id': '123', 'chroot': None, 'project_owner': '', 'project_name': 'copr-dev', 'source_type': BuildSourceEnum.scm} macros = macros_for_task(task, self.config) provider = Provider(self.source_json, self.config, macros) rpmmacros = mock.MagicMock() mock_open.return_value = rpmmacros provider.create_rpmmacros() mock_open.assert_called_with('{0}/.rpmmacros'.format(provider.workdir), 'w') calls = [mock.call.__enter__().write('%_disable_source_fetch 0\n'), mock.call.__enter__().write('%__urlhelper_localopts --proto -all,+ mock.call.__enter__().write('%copr_username \n'), mock.call.__enter__().write('%copr_projectname copr-dev\n'), mock.call.__enter__().write('%buildtag .copr123\n'), mock.call.__enter__().write('%vendor Unknown Copr - group \n')] rpmmacros.assert_has_calls(calls, any_order=True) ('copr_rpmbuild.providers.base.os.mkdir') ('copr_rpmbuild.providers.base.Provider.create_rpmmacros') def test_workdir_in_workspace(self, _mock_create_rpmmacros, _mock_mkdir): ws = self.config.get('main', 'workspace') provider = Provider(self.source_json, self.config) assert (os.path.join(ws, 'workdir-') in provider.workdir) def test_retry_package(): def no_wait_gen(): while True: (yield 0) count_to_pass = 3 _exception(wait_gen=no_wait_gen, exception=RuntimeError, max_tries=3, jitter=None) def dummy_func(some_arg): nonlocal count_to_pass count_to_pass -= 1 if (count_to_pass > 0): raise RuntimeError('Throwing an exc') return some_arg assert (dummy_func(2) == 2)
.parametrize('min_success_ratio, type_t', [(None, int), (1, int), (0.5, typing.Optional[int])]) def test_map_task_min_success_ratio(min_success_ratio, type_t): def some_task1(inputs: int) -> int: return inputs def my_wf1() -> typing.List[type_t]: return map_task(some_task1, min_success_ratio=min_success_ratio)(inputs=[1, 2, 3, 4]) my_wf1()
class _CxSetParamFromValue(): def __init__(self, param_name, field_value_name): self._param_name = param_name self._field_value_name = field_value_name def src(self, indentation): return "{0}params['{1}'] = {2}".format((_TAB_STR * indentation), self._param_name, self._field_value_name)
def read_header_in_place(fpatch): header = fpatch.read(1) if (len(header) != 1): raise Error('Failed to read the patch header.') (patch_type, compression) = unpack_header(header) if (patch_type != PATCH_TYPE_IN_PLACE): raise Error('Expected patch type {}, but got {}.'.format(PATCH_TYPE_IN_PLACE, patch_type)) compression = convert_compression(compression) memory_size = unpack_size(fpatch) segment_size = unpack_size(fpatch) shift_size = unpack_size(fpatch) from_size = unpack_size(fpatch) to_size = unpack_size(fpatch) return (compression, memory_size, segment_size, shift_size, from_size, to_size)
def init_real_reset(ns, Nc, rate_node, RR, real_reset, feedthrough, targets, sync, real_time_factor, simulate_delays, E, scheduler, node): dispose = [] if real_reset: for i in feedthrough: rate_str = ('%s/rate/%s' % (ns, i['address'][(len(ns) + 1):])) rate_in = eagerx.utils.utils.get_param_with_blocking(rate_str, node.backend) if (not (rate_in == rate_node)): raise ValueError(('Rate of the reset node (%s) must be exactly the same as the feedthrough node rate (%s).' % (rate_node, rate_in))) (zipped_channels, zipped_flags) = init_channels(ns, Nc, rate_node, feedthrough, sync, real_time_factor, simulate_delays, E, scheduler, node, is_feedthrough=True) target_signal = rx.zip(*[t['msg'] for t in targets]) RR_ho = BehaviorSubject(zipped_channels) d_RR_ho = RR.pipe(ops.combine_latest(target_signal), ops.map((lambda x: Nc.pipe(ops.map((lambda x: None)), ops.start_with(None))))).subscribe(RR_ho) rr_channel = RR_ho.pipe(ops.switch_latest()) dispose += [RR_ho, d_RR_ho] else: zipped_flags = rx.never().pipe(ops.start_with({})) rr_channel = Nc.pipe(ops.map((lambda x: None)), ops.start_with(None)) return (rr_channel, zipped_flags, dispose)
class OptionSeriesScatterSonificationTracksMappingNoteduration(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def run(): print('\nmodule top(input di, output do);\n assign do = di;\n ') params = {} sites = sorted(list(gen_sites())) for ((tile_name, site_name), isone) in zip(sites, util.gen_fuzz_states(len(sites))): params[tile_name] = (site_name, isone) print('endmodule') write_params(params)
class Text(SVGItem): name = 'SVG Text' tag = 'text' def __init__(self, page, text, x, y, fill): super(Text, self).__init__(page, text) self.set_attrs({'x': x, 'y': y, 'fill': fill}) self.html_objs = [] def line(self, text: str, x: float, y: float) -> 'TSpan': self.html_objs.append(TSpan(self.page, text, x, y)) return self.html_objs[(- 1)] def __str__(self): str_c = ''.join([(h.html() if hasattr(h, 'html') else str(h)) for h in self.html_objs]) return ('<%s %s>%s%s</%s>' % (self.tag, self.get_attrs(css_class_names=self.style.get_classes()), self.val, str_c, self.tag))
def test_run_path_is_deleted(snake_oil_case_storage: ErtConfig, qtbot: QtBot): snake_oil_case = snake_oil_case_storage args_mock = Mock() args_mock.config = 'snake_oil.ert' with StorageService.init_service(ert_config=args_mock.config, project=os.path.abspath(snake_oil_case.ens_path)), open_storage(snake_oil_case.ens_path, mode='w') as storage: gui = _setup_main_window(EnKFMain(snake_oil_case), args_mock, GUILogHandler()) gui.notifier.set_storage(storage) simulation_panel = gui.findChild(SimulationPanel) assert isinstance(simulation_panel, SimulationPanel) simulation_mode_combo = simulation_panel.findChild(QComboBox) assert isinstance(simulation_mode_combo, QComboBox) simulation_mode_combo.setCurrentText(EnsembleExperiment.name()) start_simulation = simulation_panel.findChild(QWidget, name='start_simulation') assert start_simulation assert isinstance(start_simulation, QToolButton) run_path = Path(snake_oil_case.model_config.runpath_format_string.replace('<IENS>', '0').replace('<ITER>', '0')) with open((run_path / 'dummy'), 'w', encoding='utf-8') as dummy_file: dummy_file.close() def handle_dialog(): qtbot.waitUntil((lambda : (gui.findChild(QMessageBox) is not None))) message_box = gui.findChild(QMessageBox) assert message_box qtbot.mouseClick(message_box.buttons()[0], Qt.LeftButton) QTimer.singleShot(500, (lambda : handle_run_path_dialog(gui, qtbot, delete_run_path=True))) QTimer.singleShot(500, handle_dialog) qtbot.mouseClick(start_simulation, Qt.LeftButton) qtbot.waitUntil((lambda : (gui.findChild(RunDialog) is not None))) run_dialog = gui.findChild(RunDialog) qtbot.mouseClick(run_dialog.show_details_button, Qt.LeftButton) qtbot.waitUntil(run_dialog.done_button.isVisible, timeout=100000) qtbot.waitUntil((lambda : (run_dialog._tab_widget.currentWidget() is not None))) qtbot.mouseClick(run_dialog.done_button, Qt.LeftButton) assert (not os.path.exists((run_path / dummy_file.name)))
class AbcdInstance(process): _fields = ('net', 'asname', 'args', 'keywords', 'starargs', 'kwargs') _attributes = ('lineno', 'col_offset') def __init__(self, net, asname=None, args=[], keywords=[], starargs=None, kwargs=None, lineno=0, col_offset=0, **ARGS): process.__init__(self, **ARGS) self.net = net self.asname = asname self.args = list(args) self.keywords = list(keywords) self.starargs = starargs self.kwargs = kwargs self.lineno = int(lineno) self.col_offset = int(col_offset)
def test_instanton_action(): geom = AnaPot().get_saddles(i=0, geom_kwargs={'coord_type': 'mwcartesian'}) calc = geom.calculator def calc_getter(): return AnaPot() P = 10 inst = Instanton.from_ts(geom, calc_getter=calc_getter, P=P) res = inst.action_gradient() action = res['action'] assert (action == pytest.approx(12.09601)) grad_res = inst.action_gradient() gradient = grad_res['gradient'] dr = 1e-06 coords = inst.coords.copy() num_grad = np.zeros_like(coords) fin_diff(inst, 'action', 'action', num_grad) np.testing.assert_allclose(gradient, num_grad) hess_res = inst.action_hessian() hessian = hess_res['hessian'] num_hessian = np.zeros((coords.size, coords.size)) fin_diff(inst, 'action_gradient', 'gradient', num_hessian) np.testing.assert_allclose(hessian, num_hessian, atol=1e-08)
class Superclass_Reference(Name): def __init__(self, t_at, n_prefix, n_reference): super().__init__() assert isinstance(t_at, MATLAB_Token) assert (t_at.kind == 'AT') assert isinstance(n_prefix, Name) assert isinstance(n_reference, Name) self.t_at = t_at self.t_at.set_ast(self) self.n_prefix = n_prefix self.n_prefix.set_parent(self) self.n_reference = n_reference self.n_reference.set_parent(self) def loc(self): return self.t_at.location def visit(self, parent, function, relation): self._visit(parent, function, relation) self.n_prefix.visit(self, function, 'Prefix') self.n_reference.visit(self, function, 'Reference') self._visit_end(parent, function, relation) def __str__(self): return ('%%s' % (self.n_prefix, self.n_reference))
class ShellCompleter(Completer): def __init__(self, command_registry): super(Completer, self).__init__() self._command_registry = command_registry def get_completions(self, document, complete_event): if document.on_first_line: cmd_and_args = split_command(document.text_before_cursor) if (len(cmd_and_args) > 1): (cmd, args) = cmd_and_args cmd_instance = self._command_registry.find_command(cmd) if (not cmd_instance): return [] return cmd_instance.get_completions(cmd, Document(args, ((document.cursor_position - len(document.text)) + len(args))), complete_event) else: return self._command_registry.get_completions(document, complete_event) return []
def replace_with_env_var(value: str, env_variables: dict, default_value: Any=NotSet) -> JSON_TYPES: result = ENV_VARIABLE_RE.match(value) if (not result): return value var_name = result.groupdict()['name'] type_str = result.groupdict()['type'] default = result.groupdict()['default'] if (var_name in env_variables): var_value = env_variables[var_name] elif (default is not None): var_value = default elif (default_value is not NotSet): var_value = default_value else: raise ValueError(f'`{var_name}` not found in env variables and no default value set! Please ensure a .env file is provided.') if (type_str is not None): var_value = convert_value_str_to_type(var_value, type_str) return var_value
.django_db def test_complete_queries(client, create_idv_test_data): _test_post(client, {'award_id': 1, 'type': 'child_idvs', 'limit': 3, 'page': 1, 'sort': 'description', 'order': 'asc'}, (None, None, 1, False, False, False, 3, 4, 5)) _test_post(client, {'award_id': 1, 'type': 'child_awards', 'limit': 3, 'page': 1, 'sort': 'description', 'order': 'asc'}, (None, None, 1, False, False, False, 6))
def update_code_style(): sourcefile = pathjoin(ROOTDIR, 'CODING_STYLE.md') targetfile = pathjoin(DOCSRCDIR, 'Coding', 'Evennia-Code-Style.md') with open(sourcefile) as fil: txt = fil.read() with open(targetfile, 'w') as fil: fil.write(txt) print(' -- Updated Evennia-Code-Style.md')
def _get_runtime_info(python=None, full=False): if (python and (python != sys.executable)): argv = [python, __file__, '_dump'] if full: argv.append('--full') proc = subprocess.run(argv, text=True, capture_output=True) try: proc.check_returncode() except subprocess.CalledProcessError as exc: print(exc.stderr) raise return json.loads(proc.stdout) (version, _git, builddate, compiler) = _parse_version(sys.version) stdlib = os.path.dirname(os.__file__) if (os.path.basename(stdlib) == 'Lib'): base_executable = os.path.join(os.path.dirname(stdlib), 'python') if (not os.path.exists(base_executable)): raise NotImplementedError(base_executable) isdev = True else: (major, minor, *_) = sys.version_info if (stdlib == os.path.join(sys.prefix, PLATLIBDIR, f'python{major}.{minor}')): base_executable = sys.executable isdev = False else: raise NotImplementedError(stdlib) isvenv = (sys.prefix != sys.base_prefix) git = _resolve_git(_git, isdev) info = {'version': version, 'version_str': sys.version, 'hexversion': sys.hexversion, 'apiversion': sys.api_version, 'implementation': sys.implementation.name, 'platform': {'name': sys.platform, 'byteorder': sys.byteorder}, 'build': {'date': builddate, 'compiler': compiler, 'isdev': isdev, 'git': git, 'configure_args': None}, 'install': {'executable': sys.executable, 'prefix': sys.prefix, 'exec_prefix': sys.exec_prefix, 'base_executable': base_executable, 'base_prefix': sys.base_prefix, 'base_exec_prefix': sys.base_exec_prefix, 'stdlib': stdlib, 'isvenv': isvenv}} if full: configvars = sysconfig.get_config_vars() configargs = configvars.get('CONFIG_ARGS') if (configargs is not None): info['build']['configure_args'] = shlex.split(configargs) return info