code
stringlengths
281
23.7M
.check_SpecificInfo class SpecificInfo(): process_id: typing.Optional[int] = None phase: plotman.job.Phase = plotman.job.Phase(known=False) started_at: typing.Optional[pendulum.DateTime] = None plot_id: str = '' buckets: int = 0 threads: int = 0 buffer: int = 0 plot_size: int = 0 dst_dir: str = '' tmp_dir1: str = '' tmp_dir2: str = '' phase1_duration_raw: float = 0 phase2_duration_raw: float = 0 phase3_duration_raw: float = 0 phase4_duration_raw: float = 0 total_time_raw: float = 0 copy_time_raw: float = 0 filename: str = '' def common(self) -> plotman.plotters.CommonInfo: return plotman.plotters.CommonInfo(type='chia', dstdir=self.dst_dir, phase=self.phase, tmpdir=self.tmp_dir1, tmp2dir=self.tmp_dir2, completed=(self.total_time_raw > 0), started_at=self.started_at, plot_id=self.plot_id, plot_size=self.plot_size, buffer=self.buffer, buckets=self.buckets, threads=self.threads, phase1_duration_raw=self.phase1_duration_raw, phase2_duration_raw=self.phase2_duration_raw, phase3_duration_raw=self.phase3_duration_raw, phase4_duration_raw=self.phase4_duration_raw, total_time_raw=self.total_time_raw, copy_time_raw=self.copy_time_raw, filename=self.filename)
(scope='module') def df_enc_binary(): df = {'var_A': (((['A'] * 6) + (['B'] * 10)) + (['C'] * 4)), 'var_B': (((['A'] * 10) + (['B'] * 6)) + (['C'] * 4)), 'var_C': ((['AHA'] * 12) + (['UHU'] * 8)), 'var_D': ((['OHO'] * 5) + (['EHE'] * 15)), 'var_num': [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0]} df = pd.DataFrame(df) return df
class OptionSeriesHeatmapSonificationPointgrouping(Options): def algorithm(self): return self._config_get('minmax') def algorithm(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def groupTimespan(self): return self._config_get(15) def groupTimespan(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get('y') def prop(self, text: str): self._config(text, js_type=False)
def get_final_redirect(request: 'pyramid.request.Request'): came_from = request.session.get('came_from', request.route_path('home')) request.session.pop('came_from', None) if (not came_from.startswith(request.host_url)): came_from = request.route_path('home') if came_from.startswith(request.route_url('login')): came_from = request.route_path('home') return HTTPFound(location=came_from)
def _jagged_idx_to_dense_idx(jagged_idx: int, offsets_list: List[List[int]]) -> List[int]: assert (jagged_idx < offsets_list[(- 1)][(- 1)]) result = [] for offsets in reversed(offsets_list): (offset_idx, offset) = _get_preceding_offset_idx(idx=jagged_idx, offsets=offsets) result.append((jagged_idx - offset)) jagged_idx = offset_idx result.append(jagged_idx) return list(reversed(result))
class UTCDateTime(db.TypeDecorator): impl = db.DateTime cache_ok = True def process_bind_param(self, value, dialect): if (value is not None): if (dialect.name in ('sqlite', 'mysql')): return value.replace(tzinfo=None) return value.astimezone(pytz.UTC) def process_result_value(self, value, dialect): if ((dialect.name in ('sqlite', 'mysql')) and (value is not None)): return value.replace(tzinfo=pytz.UTC) return value
def get_item_id_present(agent_config: AgentConfig, item_type: str, item_public_id: PublicId) -> PublicId: registered_item_public_id = get_item_public_id_by_author_name(agent_config, item_type, item_public_id.author, item_public_id.name) if (registered_item_public_id is None): raise AEAEnforceError('Cannot find item.') return registered_item_public_id
def test_trend_weights(simple_model): (coords, coefs, data) = simple_model data_out = data.copy() outlier = (data_out[(20, 20)] * 50) data_out[(20, 20)] += outlier weights = np.ones_like(data) weights[(20, 20)] = 1e-10 trend = Trend(degree=1).fit(coords, data_out, weights) npt.assert_allclose(trend.coef_, coefs) npt.assert_allclose((data_out - trend.predict(coords))[(20, 20)], outlier) npt.assert_allclose(trend.predict(coords), data)
class TestAndroidConfigEncoder(): .parametrize('data', NON_OBJECT_ARGS) def test_invalid_android(self, data): with pytest.raises(ValueError) as excinfo: check_encoding(messaging.Message(topic='topic', android=data)) expected = 'Message.android must be an instance of AndroidConfig class.' assert (str(excinfo.value) == expected) .parametrize('data', NON_STRING_ARGS) def test_invalid_collapse_key(self, data): with pytest.raises(ValueError) as excinfo: check_encoding(messaging.Message(topic='topic', android=messaging.AndroidConfig(collapse_key=data))) assert (str(excinfo.value) == 'AndroidConfig.collapse_key must be a string.') .parametrize('data', (NON_STRING_ARGS + ['foo'])) def test_invalid_priority(self, data): with pytest.raises(ValueError) as excinfo: check_encoding(messaging.Message(topic='topic', android=messaging.AndroidConfig(priority=data))) if isinstance(data, str): assert (str(excinfo.value) == 'AndroidConfig.priority must be "high" or "normal".') else: assert (str(excinfo.value) == 'AndroidConfig.priority must be a non-empty string.') .parametrize('data', NON_UINT_ARGS) def test_invalid_ttl(self, data): with pytest.raises(ValueError) as excinfo: check_encoding(messaging.Message(topic='topic', android=messaging.AndroidConfig(ttl=data))) if isinstance(data, numbers.Number): assert (str(excinfo.value) == 'AndroidConfig.ttl must not be negative.') else: assert (str(excinfo.value) == 'AndroidConfig.ttl must be a duration in seconds or an instance of datetime.timedelta.') .parametrize('data', NON_STRING_ARGS) def test_invalid_package_name(self, data): with pytest.raises(ValueError) as excinfo: check_encoding(messaging.Message(topic='topic', android=messaging.AndroidConfig(restricted_package_name=data))) assert (str(excinfo.value) == 'AndroidConfig.restricted_package_name must be a string.') .parametrize('data', NON_DICT_ARGS) def test_invalid_data(self, data): with pytest.raises(ValueError): check_encoding(messaging.Message(topic='topic', android=messaging.AndroidConfig(data=data))) def test_android_config(self): msg = messaging.Message(topic='topic', android=messaging.AndroidConfig(collapse_key='key', restricted_package_name='package', priority='high', ttl=123, data={'k1': 'v1', 'k2': 'v2'}, fcm_options=messaging.AndroidFCMOptions('analytics_label_v1'))) expected = {'topic': 'topic', 'android': {'collapse_key': 'key', 'restricted_package_name': 'package', 'priority': 'high', 'ttl': '123s', 'data': {'k1': 'v1', 'k2': 'v2'}, 'fcm_options': {'analytics_label': 'analytics_label_v1'}}} check_encoding(msg, expected) .parametrize('ttl', [(0.5, '0.s'), (123, '123s'), (123.45, '123.s'), (datetime.timedelta(days=1, seconds=100), '86500s')]) def test_android_ttl(self, ttl): msg = messaging.Message(topic='topic', android=messaging.AndroidConfig(ttl=ttl[0])) expected = {'topic': 'topic', 'android': {'ttl': ttl[1]}} check_encoding(msg, expected)
def applyBinning(pDataFrame, pBinSize, pMerge=True): pDataFrame_out = pDataFrame.copy() pDataFrame_out[1] = ((pDataFrame[1] / pBinSize).astype(int) * pBinSize) pDataFrame_out[2] = (((pDataFrame[2] / pBinSize).astype(int) + 1) * pBinSize) log.debug('pDataFrame_out {}'.format(pDataFrame_out)) pDataFrame_out = pDataFrame_out.drop_duplicates() log.debug('pDataFrame_out {}'.format(pDataFrame_out)) if pMerge: bedtools_data = BedTool.from_dataframe(pDataFrame_out) bedtools_data = bedtools_data.merge() log.debug('bedtools_data {}'.format(bedtools_data.to_dataframe())) bedtools_data = bedtools_data.sort() log.debug('bedtools_data {}'.format(bedtools_data.to_dataframe())) return bedtools_data.to_dataframe() else: return pDataFrame_out
class NestedIterator(object): def __init__(self, nestedList): self.stack = [[nl, 0] for nl in nestedList][::(- 1)] self.num = None def next(self): if (self.num is not None): num = self.num self.num = None return num (curr, nl) = (None, None) while ((curr is None) and self.stack): (nl, idx) = self.stack[(- 1)] if nl.isInteger(): curr = nl.getInteger() self.stack.pop() else: nums = nl.getList() if (idx >= len(nums)): self.stack.pop() continue self.stack[(- 1)][1] = (idx + 1) self.stack.append([nums[idx], 0]) return curr def hasNext(self): num = self.next() if (num is not None): self.num = num return (num is not None)
def get_attribution_dataset_info(config: Dict[(str, Any)], dataset_id: str, logger: logging.Logger, graphapi_version: Optional[str]=None, graphapi_domain: Optional[str]=None) -> str: client: BoltGraphAPIClient[BoltPAGraphAPICreateInstanceArgs] = BoltGraphAPIClient(config=config, logger=logger, graphapi_version=graphapi_version, graphapi_domain=graphapi_domain) return json.loads(client.get_attribution_dataset_info(dataset_id, [DATASETS_INFORMATION, TARGET_ID]).text)
def parse_repo_params(repo, supported_keys=None): supported_keys = (supported_keys or ['priority']) params = {} qs = parse_qs(urlparse(repo).query) for (k, v) in qs.items(): if (k in supported_keys): value = (int(v[0]) if v[0].isnumeric() else v[0]) params[k] = value return params
.parametrize('source_string, expected_ids', [('/ert/ee/0/real/1111/forward_model/0', {'real': '1111', 'forward_model': '0'}), ('/ert/ee/0/real/1111', {'real': '1111', 'forward_model': None}), ('/ert/ee/0/real/1111', {'real': '1111', 'forward_model': None}), ('/ert/ee/0/real/1111', {'real': '1111', 'forward_model': None}), ('/ert/ee/0', {'real': None, 'forward_model': None})]) def test_source_get_ids(source_string, expected_ids): assert (_get_real_id(source_string) == expected_ids['real']) assert (_get_forward_model_id(source_string) == expected_ids['forward_model'])
def set_time_and_presigned_url_process(process_type: str) -> Tuple[(Callable, int, str)]: if (process_type == PROVIDER_PROCESS): return (get_s3_file_url, URL_SHORT_PERIOD, BUCKET) if (process_type == USER_PROCESS): return (get_cloud_front_file_url, URL_LONG_PERIOD, BUCKET_RESSOURCE)
class DcNetworkForm(SerializerForm): _api_call = dc_network name = forms.ChoiceField(label=_('Network'), required=True, widget=forms.Select(attrs={'class': 'input-select2 narrow disable_created2'})) def __init__(self, request, networks, *args, **kwargs): super(DcNetworkForm, self).__init__(request, None, *args, **kwargs) self.fields['name'].choices = networks.values_list('name', 'alias')
def get_global_clear_params(roscfg): result = [] for cparam in roscfg.clear_params: nodesparam = False for n in roscfg.resolved_node_names: if cparam.startswith(n): nodesparam = True break if (not nodesparam): result.append(cparam) return result
class OnUrlNotValid(Exception): def __init__(self, *args): if args: self.message = args[0] else: self.message = None args[1].statusCode = 500 def __str__(self): if self.message: return 'OnImageUrlNotValid, {0} '.format(self.message) else: return 'OnImageUrlNotValid has been raised'
def selesnick_dtcwt(size, J, no_grad=False, dev='cuda'): x = torch.randn(*size, requires_grad=(not no_grad)).to(dev) xfm = DTCWTForward2(J=J, mode='symmetric').to(dev) for _ in range(5): (Yl, Yh) = xfm(x) if (not no_grad): Yl.backward(torch.ones_like(Yl)) return (Yl, Yh)
class TestTaskCheck(TestCase): PROTO_TASK = {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'description': 'UNIT TEST', 'running_time_in_nanos': , 'action': 'indices:data/write/reindex', 'id': , 'start_time_in_millis': } GENERIC_TASK = {'task': 'I0ekFjMhSPCQz7FUs1zJOg:'} def test_bad_task_id(self): client = Mock() client.tasks.get.side_effect = FAKE_FAIL with pytest.raises(CuratorException, match='Unable to obtain task information for task'): task_check(client, 'foo') def test_incomplete_task(self): client = Mock() test_task = {'completed': False, 'task': self.PROTO_TASK, 'response': {'failures': []}} client.tasks.get.return_value = test_task assert (not task_check(client, task_id=self.GENERIC_TASK['task'])) def test_complete_task(self): client = Mock() test_task = {'completed': True, 'task': self.PROTO_TASK, 'response': {'failures': []}} client.tasks.get.return_value = test_task assert task_check(client, task_id=self.GENERIC_TASK['task'])
class OptionSeriesTimelineSonificationTracksMappingLowpassFrequency(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def signed_integer(entry, option_key='Signed Integer', **kwargs): if (not entry): raise ValueError(_('Must enter a whole number for {option_key}!').format(option_key=option_key)) try: num = int(entry) except ValueError: raise ValueError(_("Could not convert '{entry}' to a whole number for {option_key}!").format(entry=entry, option_key=option_key)) return num
def lazy_import(): from fastly.model.snippet_response_common import SnippetResponseCommon from fastly.model.snippet_with_dynamic_number import SnippetWithDynamicNumber globals()['SnippetResponseCommon'] = SnippetResponseCommon globals()['SnippetWithDynamicNumber'] = SnippetWithDynamicNumber
class OptionPlotoptionsHistogramOnpointPosition(Options): def offsetX(self): return self._config_get(None) def offsetX(self, num: float): self._config(num, js_type=False) def offsetY(self): return self._config_get(None) def offsetY(self, num: float): self._config(num, js_type=False) def x(self): return self._config_get(None) def x(self, num: float): self._config(num, js_type=False) def y(self): return self._config_get(None) def y(self, num: float): self._config(num, js_type=False)
def save_bulk_to_db(items, msg='Saved to db'): try: logging.info(msg) db.session.bulk_save_objects(items) logging.info('added to session') db.session.commit() return True except SQLAlchemyError: logging.exception('DB Exception!') db.session.rollback() return False
class GridPlane(Component): __version__ = 0 plane = Instance(tvtk.Object) axis = Enum('x', 'y', 'z', desc='specifies the axis normal to the grid plane') position = Range(value=0, low='_low', high='_high', enter_set=True, auto_set=False) _low = Int(0) _high = Int(10000) view = View(Group(Item(name='axis'), Item(name='position', enabled_when='_high > 0'))) def __get_pure_state__(self): d = super(GridPlane, self).__get_pure_state__() for name in ('plane', '_low', '_high'): d.pop(name, None) return d def __set_pure_state__(self, state): state_pickler.set_state(self, state) self._position_changed(self.position) def setup_pipeline(self): pass def update_pipeline(self): if (len(self.inputs) == 0): return input = self.inputs[0].get_output_dataset() plane = None if input.is_a('vtkStructuredGrid'): plane = tvtk.StructuredGridGeometryFilter() elif (input.is_a('vtkStructuredPoints') or input.is_a('vtkImageData')): plane = tvtk.ImageDataGeometryFilter() elif input.is_a('vtkRectilinearGrid'): plane = tvtk.RectilinearGridGeometryFilter() else: msg = ('The GridPlane component does not support the %s dataset.' % input.class_name) error(msg) raise TypeError(msg) self.configure_connection(plane, self.inputs[0]) self.plane = plane self.plane.update() self.outputs = [plane] self._update_limits() self._update_extents() extents = list(_get_extent(input)) diff = [(y - x) for (x, y) in zip(extents[::2], extents[1::2])] if (diff.count(0) > 0): self.axis = ['x', 'y', 'z'][diff.index(0)] def update_data(self): self._update_limits() self._update_extents() self.data_changed = True def has_output_port(self): return True def get_output_object(self): return self.plane.output_port def _get_axis_index(self): return {'x': 0, 'y': 1, 'z': 2}[self.axis] def _update_extents(self): inp = self.plane.input extents = list(_get_extent(inp)) pos = self.position axis = self._get_axis_index() extents[(2 * axis)] = pos extents[((2 * axis) + 1)] = pos try: self.plane.set_extent(extents) except AttributeError: self.plane.extent = extents def _update_limits(self): extents = _get_extent(self.plane.input) axis = self._get_axis_index() pos = min(self.position, extents[((2 * axis) + 1)]) self._high = extents[((2 * axis) + 1)] return pos def _axis_changed(self, val): if (len(self.inputs) == 0): return pos = self._update_limits() if (self.position == pos): self._update_extents() self.data_changed = True else: self.position = pos def _position_changed(self, val): if (len(self.inputs) == 0): return self._update_extents() self.data_changed = True
def _executable(): EEGsynth.appid(('org.eegsynth.%s.%s' % (name, __version__))) signal.signal(signal.SIGINT, _stop) _setup() try: global args if args.gui: app = QApplication(sys.argv) app.setWindowIcon(QtGui.QIcon(os.path.join(path, 'doc/figures/logo-128.ico'))) app.aboutToQuit.connect(_stop) signal.signal(signal.SIGINT, _stop) window = MainWindow() window.show() timer = QtCore.QTimer() timer.start(100) timer.timeout.connect(_loop_once) timer.timeout.connect(window.updateLabel) _start() sys.exit(app.exec_()) else: _start() except (SystemExit, KeyboardInterrupt, RuntimeError): _stop()
class TestCoprEdit(CoprsTestCase): ('u1') def test_edit_prefills_id(self, f_users, f_coprs, f_db): self.db.session.add_all([self.u1, self.c1]) r = self.test_client.get('/coprs/{0}/{1}/edit/'.format(self.u1.name, self.c1.name)) assert ('<input hidden id="id" name="id" type="hidden" value="{0}">'.format(self.c1.id).encode('utf-8') in r.data)
def mouse_click_radiobutton_child_in_panel(control, index, delay): children_list = control.GetSizer().GetChildren() if (not (0 <= index <= (len(children_list) - 1))): raise IndexError(index) obj = children_list[index].GetWindow() mouse_click_radiobutton(control=obj, delay=delay)
class AppendHeaderResource(): def on_get(self, req, resp): resp.append_header('X-Things', 'thing-1') resp.append_header('X-THINGS', 'thing-2') resp.append_header('x-thiNgs', 'thing-3') def on_head(self, req, resp): resp.set_header('X-things', 'thing-1') resp.append_header('X-THINGS', 'thing-2') resp.append_header('x-thiNgs', 'thing-3') def on_post(self, req, resp): resp.append_header('X-Things', 'thing-1') c1 = 'ut_existing_user=1; expires=Mon, 14-Jan-2019 21:20:08 GMT; Max-Age=600; path=/' resp.append_header('Set-Cookie', c1) c2 = 'partner_source=deleted; expires=Thu, 01-Jan-1970 00:00:01 GMT; Max-Age=0' resp.append_header('seT-cookie', c2)
def setup_hook(config): metadata = config['metadata'] if (sys.platform == 'win32'): requires = metadata.get('requires_dist', '').split('\n') metadata['requires_dist'] = '\n'.join(requires) config['metadata'] = metadata metadata['version'] = str(version) from pbr import packaging def my_get_script_args(*args, **kwargs): return _main_module()._orig_get_script_args(*args, **kwargs) packaging.override_get_script_args = my_get_script_args easy_install.get_script_args = my_get_script_args orig_get_version = packaging.get_version def my_get_version(package_name, pre_version=None): if (package_name == 'ryu'): return str(version) return orig_get_version(package_name, pre_version) packaging.get_version = my_get_version
class JsOperations(JsRecFunc): def extendArgs(category, originParams, newCols): if (category == 'age'): originParams[1] = (originParams[1] + newCols) for c in newCols: originParams[2][c] = 'sum' return originParams def extendColumns(jsSchema, params): if ((params[0] is not None) and (params[1] is not None)): jsSchema['keys'] |= set(params[0]) jsSchema['values'] |= set(params[1]) alias = 'aggregation' params = ('keys', 'values', 'operations') value = '\n var temp = {};\n var order = [];\n data.forEach( function(rec) { \n var aggKey = []; keys.forEach(function(k){ aggKey.push( rec[k])}); var newKey = aggKey.join("#"); order.push(newKey);\n if (!(newKey in temp)) {temp[newKey] = {}};\n values.forEach(function(v) {\n if (operations[v] === undefined){ if (!(v in temp[newKey])) {temp[newKey][v] = 1} else {temp[newKey][v] += 1} }\n else if (operations[v] == \'sum\') {if (!(v in temp[newKey])) {temp[newKey][v] = rec[v]} else {temp[newKey][v] += rec[v]}}\n else if (operations[v] == \'count\') {if (!(v in temp[newKey])) {temp[newKey][v] = 1} else {temp[newKey][v] += 1}}\n })}); \n order.forEach(function(label) {\n var rec = {}; var splitKey = label.split("#");\n keys.forEach(function(k, i) {rec[k] = splitKey[i];});\n for(var v in temp[label]) {rec[v] = temp[label][v]};\n result.push(rec)})'
class Zip(Module): aliases = ['zip', 'unzip'] def init(self): self.register_info({'author': ['Emilio Pinna'], 'license': 'GPLv3'}) self.register_vectors([PhpFile(payload_path=os.path.join(self.folder, 'php_zip.tpl'), name='php_zip')]) self.register_arguments([{'name': 'rzip', 'help': 'Remote ZIP file'}, {'name': 'rfiles', 'help': 'Remote files to compress. If decompressing, set destination folder.', 'nargs': '+'}, {'name': '--decompress', 'action': 'store_true', 'default': False, 'help': 'Simulate unzip'}]) def run(self, **kwargs): result_err = self.vectors.get_result(name='php_zip', format_args=self.args) if result_err: log.warning(result_err) return return True
def inline_lift_config(gemmini): gemmini = call_eqv(gemmini, 'zero_acc_i32(_, _, _)', zero_acc_i32_v2) gemmini = inline(gemmini, 'zero_acc_i32_v2(_, _, _)') gemmini = inline_window(gemmini, 'dst = res[_]') gemmini = lift_config(gemmini, 'config_zero()') gemmini = call_eqv(gemmini, 'ld_i8_block_id1(_)', ld_i8_block_id1_v2) gemmini = inline(gemmini, 'ld_i8_block_id1_v2(_, _, _, _, _)') gemmini = inline_window(gemmini, 'src = A[_]') gemmini = inline_window(gemmini, 'dst = a[_]') gemmini = lift_config(gemmini, 'config_ld_i8_id1()') gemmini = call_eqv(gemmini, 'ld_i8_block_id2(_)', ld_i8_block_id2_v2) gemmini = inline(gemmini, 'ld_i8_block_id2_v2(_, _, _, _, _)') gemmini = inline_window(gemmini, 'src = B[_]') gemmini = inline_window(gemmini, 'dst = b[_]') gemmini = lift_config(gemmini, 'config_ld_i8_id2()') gemmini = call_eqv(gemmini, 'matmul_acc_i8(_, _, _, _, _)', matmul_acc_i8_v2) gemmini = inline(gemmini, 'matmul_acc_i8_v2(_, _, _, _, _)') gemmini = inline_window(gemmini, 'A = a[_]') gemmini = inline_window(gemmini, 'B = b[_]') gemmini = inline_window(gemmini, 'C = res[_]') gemmini = lift_config(gemmini, 'config_matmul()') gemmini = call_eqv(gemmini, 'st_acc_i8(_, _, _, _, _, _)', st_acc_i8_v2) gemmini = inline(gemmini, 'st_acc_i8_v2(_, _, _, _, _, _)') gemmini = inline_window(gemmini, 'src = res[_]') gemmini = inline_window(gemmini, 'dst = C[_]') gemmini = lift_config(gemmini, 'config_st_acc_i8(_)') return gemmini
def get_meter_config(dp, waiters, meter_id=None, to_user=True): flags = {dp.ofproto.OFPMF_KBPS: 'KBPS', dp.ofproto.OFPMF_PKTPS: 'PKTPS', dp.ofproto.OFPMF_BURST: 'BURST', dp.ofproto.OFPMF_STATS: 'STATS'} if (meter_id is None): meter_id = dp.ofproto.OFPM_ALL else: meter_id = UTIL.ofp_meter_from_user(meter_id) stats = dp.ofproto_parser.OFPMeterConfigStatsRequest(dp, 0, meter_id) msgs = [] ofctl_utils.send_stats_request(dp, stats, waiters, msgs, LOG) configs = [] for msg in msgs: for config in msg.body: c = config.to_jsondict()[config.__class__.__name__] bands = [] for band in config.bands: b = band.to_jsondict()[band.__class__.__name__] if to_user: t = UTIL.ofp_meter_band_type_to_user(band.type) b['type'] = (t if (t != band.type) else 'UNKNOWN') bands.append(b) c_flags = [] for (k, v) in sorted(flags.items()): if (k & config.flags): if to_user: c_flags.append(v) else: c_flags.append(k) c['flags'] = c_flags c['bands'] = bands if to_user: c['meter_id'] = UTIL.ofp_meter_to_user(config.meter_id) configs.append(c) return wrap_dpid_dict(dp, configs, to_user)
def _output_search_results(item_type: str, results: List[Dict], count: int, page: int) -> None: item_type_plural = (item_type + 's') len_results = len(results) if (len_results == 0): click.echo('No {} found.'.format(item_type_plural)) else: click.echo('{} found:\n'.format(item_type_plural.title())) click.echo(format_items(results)) if (count > len_results): click.echo('{} {} out of {}.\nPage {}'.format(len_results, item_type_plural, count, page))
def build_master_spec(): lookahead = spec_builder.ComponentSpecBuilder('lookahead') lookahead.set_network_unit(name='FeedForwardNetwork', hidden_layer_sizes='256') lookahead.set_transition_system(name='shift-only', left_to_right='true') lookahead.add_fixed_feature(name='words', fml='input.word', embedding_dim=64) lookahead.add_rnn_link(embedding_dim=(- 1)) tagger = spec_builder.ComponentSpecBuilder('tagger') tagger.set_network_unit(name='wrapped_units.LayerNormBasicLSTMNetwork', hidden_layer_sizes='256') tagger.set_transition_system(name='tagger') tagger.add_token_link(source=lookahead, fml='input.focus', embedding_dim=64) parser = spec_builder.ComponentSpecBuilder('parser') parser.set_network_unit(name='FeedForwardNetwork', hidden_layer_sizes='256', layer_norm_hidden='true') parser.set_transition_system(name='arc-standard') parser.add_token_link(source=lookahead, fml='input.focus', embedding_dim=64) parser.add_token_link(source=tagger, fml='input.focus stack.focus stack(1).focus', embedding_dim=64) parser.add_fixed_feature(name='labels', embedding_dim=16, fml=' '.join(['stack.child(1).label', 'stack.child(1).sibling(-1).label', 'stack.child(-1).label', 'stack.child(-1).sibling(1).label', 'stack(1).child(1).label', 'stack(1).child(1).sibling(-1).label', 'stack(1).child(-1).label', 'stack(1).child(-1).sibling(1).label', 'stack.child(2).label', 'stack.child(-2).label', 'stack(1).child(2).label', 'stack(1).child(-2).label'])) parser.add_link(source=parser, name='rnn-stack', fml='stack.focus stack(1).focus', source_translator='shift-reduce-step', embedding_dim=64) master_spec = spec_pb2.MasterSpec() master_spec.component.extend([lookahead.spec, tagger.spec, parser.spec]) return master_spec
def run_phpstan(file_path): container_path = '/app/input.php' phpstan_p = run_docker_container('ghcr.io/phpstan/phpstan', combine_stderr_stdout=False, mounts=[Mount(container_path, file_path, type='bind', read_only=True)], command='analyse --error-format=json -- input.php') linter_output = json.loads(phpstan_p.stdout) if ((not linter_output['files']) or (container_path not in linter_output['files'])): return [] issues = [] for message in linter_output['files'][container_path]['messages']: issues.append({'symbol': 'error', 'line': message['line'], 'column': (- 1), 'message': message['message']}) return issues
def get_erpnext_item(integration: str, integration_item_code: str, variant_id: Optional[str]=None, sku: Optional[str]=None, has_variants: Optional[int]=0): item_code = None if sku: item_code = frappe.db.get_value('Ecommerce Item', {'sku': sku, 'integration': integration}, fieldname='erpnext_item_code') if (not item_code): item_code = get_erpnext_item_code(integration, integration_item_code, variant_id=variant_id, has_variants=has_variants) if item_code: return frappe.get_doc('Item', item_code)
class Rofi(Selector): def supported() -> bool: return is_installed('rofi') def name() -> str: return 'rofi' def show_selection(self, entries: List[Entry], prompt: str, show_help_message: bool, show_folders: bool, keybindings: List[Keybinding], additional_args: List[str]) -> Tuple[(Union[(List[Target], None)], Union[(Action, None)], Union[(Entry, None)])]: parameters = ['rofi', '-markup-rows', '-dmenu', '-i', '-sort', '-p', prompt, *self.__build_parameters_for_keybindings(keybindings), *additional_args] if (show_help_message and keybindings): parameters.extend(self.__format_keybindings_message(keybindings)) rofi = run(parameters, input='\n'.join(self.__format_entries(entries, show_folders)), capture_output=True, encoding='utf-8') if (rofi.returncode == 1): return (None, Action.CANCEL, None) elif (rofi.returncode >= 10): keybinding = keybindings[(rofi.returncode - 10)] return_action = keybinding.action return_targets = keybinding.targets else: return_action = None return_targets = None return (return_targets, return_action, self.__parse_formatted_string(rofi.stdout)) def __format_entries(self, entries: List[Entry], show_folders: bool) -> List[str]: max_width = self._calculate_max_width(entries, show_folders) return [f'{self._format_folder(it, show_folders)}<b>{it.name}</b>{self.justify(it, max_width, show_folders)} {it.username}' for it in entries] def __parse_formatted_string(self, formatted_string: str) -> Entry: match = re.compile('(?:(?P<folder>.+)/)?<b>(?P<name>.*?) *</b>(?P<username>.*)').search(formatted_string) return Entry(match.group('name'), match.group('folder'), match.group('username').strip()) def select_target(self, credentials: Credentials, show_help_message: bool, keybindings: List[Keybinding], additional_args: List[str]) -> Tuple[(Union[(List[Target], None)], Union[(Action, None)])]: parameters = ['rofi', '-markup-rows', '-dmenu', '-p', 'Choose target', '-i', *self.__build_parameters_for_keybindings(keybindings), *additional_args] if (show_help_message and keybindings): parameters.extend(self.__format_keybindings_message(keybindings)) rofi = run(parameters, input='\n'.join(self._format_targets_from_credential(credentials)), capture_output=True, encoding='utf-8') if (rofi.returncode == 1): return (None, Action.CANCEL) elif (rofi.returncode >= 10): action = keybindings[(rofi.returncode - 10)].action else: action = None return (self._extract_targets(rofi.stdout), action) def __build_parameters_for_keybindings(self, keybindings: List[Keybinding]) -> List[str]: params = [] for (index, keybinding) in enumerate(keybindings): params.extend([f'-kb-custom-{(1 + index)}', keybinding.shortcut]) return params def __format_keybindings_message(self, keybindings: List[Keybinding]): return ['-mesg', ' | '.join([f'<b>{keybinding.shortcut}</b>: {self.__format_action_and_targets(keybinding)}' for keybinding in keybindings])] def __format_action_and_targets(self, keybinding: Keybinding) -> str: if (keybinding.targets and (Targets.MENU in keybinding.targets)): return 'Menu' elif (keybinding.action == Action.SYNC): return 'Sync logins' elif keybinding.targets: return f"{keybinding.action.value.title()} {', '.join([target.raw for target in keybinding.targets])}" else: return keybinding.action.value.title()
def test_subset_single_pos_format2_all_None(singlepos2_font): font = singlepos2_font gpos = font['GPOS'].table subtable = gpos.LookupList.Lookup[0].SubTable[0] assert (subtable.Format == 2) subtable.Value = ([None] * subtable.ValueCount) subtable.ValueFormat = 0 assert (getXML(subtable.toXML, font) == ['<SinglePos Format="2">', ' <Coverage>', ' <Glyph value="a"/>', ' <Glyph value="b"/>', ' <Glyph value="c"/>', ' </Coverage>', ' <ValueFormat value="0"/>', ' <!-- ValueCount=3 -->', '</SinglePos>']) options = subset.Options() subsetter = subset.Subsetter(options) subsetter.populate(unicodes=[ord('a'), ord('c')]) subsetter.subset(font) assert (getXML(font['GPOS'].table.LookupList.Lookup[0].SubTable[0].toXML, font) == ['<SinglePos Format="1">', ' <Coverage>', ' <Glyph value="a"/>', ' <Glyph value="c"/>', ' </Coverage>', ' <ValueFormat value="0"/>', '</SinglePos>'])
('/blind/<engine>') def blind(engine): template = request.values.get('tpl') if (not template): template = '%s' injection = request.values.get('inj') if (engine == 'mako'): MakoTemplates((template % injection), lookup=mylookup).render() elif (engine == 'jinja2'): Jinja2Env.from_string((template % injection)).render() elif (engine == 'eval'): eval((template % injection)) elif (engine == 'tornado'): tornado.template.Template((template % injection)).generate() return randomword()
class init_cond(object): def __init__(self, L): self.radius = 0.15 self.xc = 0.5 self.yc = 0.75 self.xc2 = 0.5 self.yc2 = 0.25 self.xc3 = 0.25 self.yc3 = 0.5 def uOfXT(self, x, t): if (ct.problem == 0): if ((x[0] >= 0.3) and (x[0] <= 0.7)): return 1.0 else: return 0.0 elif (ct.problem == 1): r = math.sqrt((((x[0] - self.xc) ** 2) + ((x[1] - self.yc) ** 2))) r2 = math.sqrt((((x[0] - self.xc2) ** 2) + ((x[1] - self.yc2) ** 2))) r3 = math.sqrt((((x[0] - self.xc3) ** 2) + ((x[1] - self.yc3) ** 2))) slit = ((x[0] > (self.xc - 0.025)) and (x[0] < (self.xc + 0.025)) and (x[1] < (self.yc + 0.1125))) if ((r <= self.radius) and (slit == False)): return 1.0 elif ((r <= self.radius) and (slit == True)): return 0.0 elif (r2 <= self.radius): return (1.0 - (r2 / self.radius)) elif (r3 <= self.radius): return ((1.0 + math.cos(((math.pi * r3) / self.radius))) / 4.0) else: return 0.0 elif (ct.problem == 2): if (ct.nd == 1): a = 0.15 b = 1.0 if (fabs((x[0] - 0.5)) <= a): return ((b / a) * math.sqrt(((a ** 2) - ((x[0] - 0.5) ** 2)))) else: return 0.0
class ModelsFetcher(FetcherClient): def get_models_runs(self, days_back: Optional[int]=7, exclude_elementary_models: bool=False) -> List[ModelRunSchema]: run_operation_response = self.dbt_runner.run_operation(macro_name='elementary_cli.get_models_runs', macro_args={'days_back': days_back, 'exclude_elementary': exclude_elementary_models}) model_run_dicts = (json.loads(run_operation_response[0]) if run_operation_response else []) model_runs = [ModelRunSchema(**model_run) for model_run in model_run_dicts] return model_runs def get_models(self, exclude_elementary_models: bool=False) -> List[ModelSchema]: run_operation_response = self.dbt_runner.run_operation(macro_name='elementary_cli.get_models', macro_args={'exclude_elementary': exclude_elementary_models}) models = (json.loads(run_operation_response[0]) if run_operation_response else []) models = [ModelSchema(**model) for model in models] return models def get_sources(self) -> List[SourceSchema]: run_operation_response = self.dbt_runner.run_operation(macro_name='elementary_cli.get_sources') sources = (json.loads(run_operation_response[0]) if run_operation_response else []) sources = [SourceSchema(**source) for source in sources] return sources def get_exposures(self) -> List[ExposureSchema]: run_operation_response = self.dbt_runner.run_operation(macro_name='elementary_cli.get_exposures') exposures = (json.loads(run_operation_response[0]) if run_operation_response else []) exposures = [{**exposure, 'raw_queries': (json.loads(exposure['raw_queries']) if exposure.get('raw_queries') else None)} for exposure in exposures] exposures = [ExposureSchema(**exposure) for exposure in exposures] return exposures def get_test_coverages(self) -> List[ModelTestCoverage]: run_operation_response = self.dbt_runner.run_operation(macro_name='elementary_cli.get_dbt_models_test_coverage') coverages = (json.loads(run_operation_response[0]) if run_operation_response else []) coverages = [ModelTestCoverage(**coverage) for coverage in coverages] return coverages
def extractAkashiatranslationsWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('I became the strongest', 'Hazure Waku no Joutai Ijou Sukirude Saikyou ni Natta Ore ga Subete wo Juurin suru made', 'translated'), ('I got stranded', 'Mikai no Wakusei ni Fujichaku Shi ta Kedo Kaere Sou ni Nai node Jingai Harem wo Mezashite mi Masu', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class UpdateView(APIView): LOCK = 'system_update' dc_bound = False def __init__(self, request, data): super(UpdateView, self).__init__(request) self.data = data def is_task_running(cls): return system_update.get_lock(cls.LOCK).exists() def put(self): assert self.request.dc.is_default() ser = UpdateSerializer(self.request, data=self.data) if (not ser.is_valid()): return FailureTaskResponse(self.request, ser.errors, dc_bound=False) version = ser.data['version'] from core.version import __version__ as mgmt_version if ((version == ('v' + mgmt_version)) and (not ser.data.get('force'))): raise PreconditionRequired('System is already up-to-date') obj = self.request.dc msg = LOG_SYSTEM_UPDATE _apiview_ = {'view': 'system_update', 'method': self.request.method, 'version': version} meta = {'apiview': _apiview_, 'msg': LOG_SYSTEM_UPDATE} task_kwargs = ser.data.copy() task_kwargs['dc_id'] = obj.id (tid, err, res) = system_update.call(self.request, None, (), kwargs=task_kwargs, meta=meta, tg=TG_DC_UNBOUND, tidlock=self.LOCK) if err: msg = obj = None return mgmt_task_response(self.request, tid, err, res, msg=msg, obj=obj, api_view=_apiview_, dc_bound=False, data=self.data, detail_dict=ser.detail_dict(force_full=True))
class Channel(): def __init__(self, args): self.args = args self.url = self.args.get('url').replace('#', '%23').replace('\\n', '%0A') self.base_url = (self.url.split('?')[0] if ('?' in self.url) else self.url) self.tag = self.args.get('injection_tag') self.data = {} self.injs = [] self.inj_idx = 0 proxy = self.args.get('proxy') if proxy: self.proxies = {' proxy, ' proxy} else: self.proxies = {} self.get_params = {} self.post_params = {} self.header_params = {} self._parse_url() self._parse_cookies() self._parse_get() self._parse_post() self._parse_header() if (not self.injs): self._parse_get(all_injectable=True) self._parse_post(all_injectable=True) self._parse_header(all_injectable=True) self._parse_method() urllib3.disable_warnings() def _parse_method(self): if self.args.get('request'): self. = self.args.get('request') elif self.post_params: self. = 'POST' else: self. = 'GET' def _parse_url(self): url_path = urlparse.urlparse(self.url).path if (not (self.tag in url_path)): return url_path_base_index = self.url.find(url_path) for index in [i for i in range(url_path_base_index, (url_path_base_index + len(url_path))) if (self.url[i] == self.tag)]: self.injs.append({'field': 'URL', 'param': 'url', 'position': (url_path_base_index + index)}) def _parse_cookies(self): cookies = self.args.get('cookies', []) if cookies: cookie_string = ('Cookie: %s' % ';'.join(cookies)) if (not self.args.get('headers')): self.args['headers'] = [] self.args['headers'].append(cookie_string) def _parse_header(self, all_injectable=False): for param_value in self.args.get('headers', []): if (':' not in param_value): continue (param, value) = param_value.split(':', 1) param = param.strip() value = value.strip() self.header_params[param] = value if (self.tag in param): self.injs.append({'field': 'Header', 'part': 'param', 'param': param}) if ((self.tag in value) or all_injectable): self.injs.append({'field': 'Header', 'part': 'value', 'value': value, 'param': param}) user_agent = self.args.get('user_agent') if (not user_agent): user_agent = ('tplmap/%s' % self.args.get('version')) if (not ('user-agent' in [p.lower() for p in self.header_params.keys()])): self.header_params['User-Agent'] = user_agent def _parse_post(self, all_injectable=False): if self.args.get('data'): params_dict_list = urlparse.parse_qs(self.args.get('data'), keep_blank_values=True) for (param, value_list) in params_dict_list.items(): self.post_params[param] = value_list if (self.tag in param): self.injs.append({'field': 'POST', 'part': 'param', 'param': param}) for (idx, value) in enumerate(value_list): if ((self.tag in value) or all_injectable): self.injs.append({'field': 'POST', 'part': 'value', 'value': value, 'param': param, 'idx': idx}) def _parse_get(self, all_injectable=False): params_dict_list = urlparse.parse_qs(urlparse.urlsplit(self.url).query, keep_blank_values=True) for (param, value_list) in params_dict_list.items(): self.get_params[param] = value_list if (self.tag in param): self.injs.append({'field': 'GET', 'part': 'param', 'param': param}) for (idx, value) in enumerate(value_list): if ((self.tag in value) or all_injectable): self.injs.append({'field': 'GET', 'part': 'value', 'param': param, 'value': value, 'idx': idx}) def req(self, injection): get_params = deepcopy(self.get_params) post_params = deepcopy(self.post_params) header_params = deepcopy(self.header_params) url_params = self.base_url inj = deepcopy(self.injs[self.inj_idx]) if (inj['field'] == 'URL'): position = inj['position'] url_params = ((self.base_url[:position] + injection) + self.base_url[(position + 1):]) elif (inj['field'] == 'POST'): if (inj.get('part') == 'param'): old_value = post_params[inj.get('param')] del post_params[inj.get('param')] if (self.tag in inj.get('param')): new_param = inj.get('param').replace(self.tag, injection) else: new_param = injection post_params[new_param] = old_value if (inj.get('part') == 'value'): if (self.tag in post_params[inj.get('param')][inj.get('idx')]): post_params[inj.get('param')][inj.get('idx')] = post_params[inj.get('param')][inj.get('idx')].replace(self.tag, injection) else: post_params[inj.get('param')][inj.get('idx')] = injection elif (inj['field'] == 'GET'): if (inj.get('part') == 'param'): old_value = get_params[inj.get('param')] del get_params[inj.get('param')] if (self.tag in inj.get('param')): new_param = inj.get('param').replace(self.tag, injection) else: new_param = injection get_params[new_param] = old_value if (inj.get('part') == 'value'): if (self.tag in get_params[inj.get('param')][inj.get('idx')]): get_params[inj.get('param')][inj.get('idx')] = get_params[inj.get('param')][inj.get('idx')].replace(self.tag, injection) else: get_params[inj.get('param')][inj.get('idx')] = injection elif (inj['field'] == 'Header'): injection = injection.replace('\n', '').replace('\r', '') if (inj.get('part') == 'param'): old_value = get_params[inj.get('param')] del header_params[inj.get('param')] if (self.tag in inj.get('param')): new_param = inj.get('param').replace(self.tag, injection) else: new_param = injection header_params[new_param] = old_value if (inj.get('part') == 'value'): if (self.tag in header_params[inj.get('param')]): header_params[inj.get('param')] = header_params[inj.get('param')].replace(self.tag, injection) else: header_params[inj.get('param')] = injection if (self.tag in self.base_url): log.debug(('[URL] %s' % url_params)) if get_params: log.debug(('[GET] %s' % get_params)) if post_params: log.debug(('[POST] %s' % post_params)) if (len(header_params) > 1): log.debug(('[HEDR] %s' % header_params)) try: result = requests.request(method=self. url=url_params, params=get_params, data=post_params, headers=header_params, proxies=self.proxies, verify=False).text except requests.exceptions.ConnectionError as e: if (e and e[0] and (e[0][0] == 'Connection aborted.')): log.info('Error: connection aborted, bad status line.') result = None else: raise if utils.config.log_response: log.debug(('< %s' % result)) return result def detected(self, technique, detail): pass
class LienScannerTest(ForsetiTestCase): .object(lien_scanner, 'lien_rules_engine', autospec=True) def setUp(self, _): self.scanner = lien_scanner.LienScanner({}, {}, mock.MagicMock(), '', '', '') def test_retrieve(self): mock_data_access = mock.MagicMock() mock_data_access.scanner_iter.side_effect = _mock_gcp_resource_iter mock_service_config = mock.MagicMock() mock_service_config.model_manager = mock.MagicMock() mock_service_config.model_manager.get.return_value = (mock.MagicMock(), mock_data_access) self.scanner.service_config = mock_service_config got = self.scanner._retrieve() want = {data.PROJECT: [data.LIEN]} self.assertEqual(got, want)
(scope='function') def oauth2_client_credentials_connection_config(db: Session, oauth2_client_credentials_configuration) -> Generator: secrets = {'domain': 'localhost', 'client_id': 'client', 'client_secret': 'secret', 'access_token': 'access', 'refresh_token': 'refresh'} saas_config = {'fides_key': 'oauth2_client_credentials_connector', 'name': 'OAuth2 Client Credentials Connector', 'type': 'custom', 'description': 'Generic OAuth2 connector for testing', 'version': '0.0.1', 'connector_params': [{'name': item} for item in secrets.keys()], 'client_config': {'protocol': ' 'host': secrets['domain'], 'authentication': {'strategy': 'oauth2_client_credentials', 'configuration': oauth2_client_credentials_configuration}}, 'endpoints': [], 'test_request': {'method': 'GET', 'path': '/test'}} fides_key = saas_config['fides_key'] connection_config = ConnectionConfig.create(db=db, data={'key': fides_key, 'name': fides_key, 'connection_type': ConnectionType.saas, 'access': AccessLevel.write, 'secrets': secrets, 'saas_config': saas_config}) (yield connection_config) connection_config.delete(db)
.EventDecorator() def make_scalar_element(mesh, family, degree, vfamily, vdegree): topology = mesh.topology cell = topology.ufl_cell() if isinstance(family, finat.ufl.FiniteElementBase): return family.reconstruct(cell=cell) if (isinstance(cell, ufl.TensorProductCell) and (vfamily is not None) and (vdegree is not None)): la = finat.ufl.FiniteElement(family, cell=cell.sub_cells()[0], degree=degree) lb = finat.ufl.FiniteElement(vfamily, cell=ufl.interval, degree=vdegree) return finat.ufl.TensorProductElement(la, lb) else: return finat.ufl.FiniteElement(family, cell=cell, degree=degree)
class Tag(models.Model): db_key = models.CharField('key', max_length=255, null=True, help_text='tag identifier', db_index=True) db_category = models.CharField('category', max_length=64, null=True, blank=True, help_text='tag category', db_index=True) db_data = models.TextField('data', null=True, blank=True, help_text='optional data field with extra information. This is not searched for.') db_model = models.CharField('model', max_length=32, null=True, help_text='database model to Tag', db_index=True) db_tagtype = models.CharField('tagtype', max_length=16, null=True, blank=True, help_text='overall type of Tag', db_index=True) class Meta(): verbose_name = 'Tag' unique_together = (('db_key', 'db_category', 'db_tagtype', 'db_model'),) index_together = (('db_key', 'db_category', 'db_tagtype', 'db_model'),) def __lt__(self, other): return (str(self) < str(other)) def __str__(self): return str(('<Tag: %s%s>' % (self.db_key, (('(category:%s)' % self.db_category) if self.db_category else ''))))
.object(docker_image.DockerImage, 'list_images') .object(docker.models.images.ImageCollection, 'get') def test_pull_image_return_existing(mock_pull, mock_list_images): mock_list_images.return_value = ['amazonlinux:2'] mock_pull.return_value = mock.MagicMock() new_docker_image = docker_image.DockerImage() container = new_docker_image.pull_image('amazonlinux:2') assert (container is not None)
class TopicGroupItem(QStandardItem): ITEM_TYPE = (Qt.UserRole + 35) def __init__(self, name, parent=None, is_group=False): dname = '/' if is_group: dname = ('{%s}' % name) elif (name != rospy.names.SEP): dname = ('%s/' % name) QStandardItem.__init__(self, dname) self.parent_item = parent self._name = name self._is_group = is_group self.is_system_group = (name == 'SYSTEM') self._clearup_mark_delete = False def name(self): return self._name def name(self, new_name): self._name = new_name if self._is_group: self.setText((('{' + self._name) + '}')) else: self.setText((self._name + '/')) def is_group(self): return self._is_group def get_namespace(self): name = self._name if ((type(self) == TopicGroupItem) and self._is_group): name = namespace(self._name) result = name if ((self.parent_item is not None) and (type(self.parent_item) != QStandardItem)): result = (normns((self.parent_item.get_namespace() + rospy.names.SEP)) + normns((result + rospy.names.SEP))) return normns(result) def count_topics(self): result = 0 for i in range(self.rowCount()): item = self.child(i) if isinstance(item, TopicGroupItem): result += item.count_nodes() elif isinstance(item, TopicItem): result += 1 return result def get_topic_items_by_name(self, topic_name, recursive=True): result = [] for i in range(self.rowCount()): item = self.child(i) if isinstance(item, TopicGroupItem): if recursive: result[len(result):] = item.get_topic_items_by_name(topic_name) elif (isinstance(item, TopicItem) and (item == topic_name)): return [item] return result def get_topic_items(self, recursive=True): result = [] for i in range(self.rowCount()): item = self.child(i) if isinstance(item, TopicGroupItem): if recursive: result[len(result):] = item.get_topic_items() elif isinstance(item, TopicItem): result.append(item) return result def create_item_list(self, name, parent, is_group): items = [] item = TopicGroupItem(name, parent, is_group) items.append(item) pubItem = QStandardItem() items.append(pubItem) subItem = QStandardItem() items.append(subItem) typeItem = QStandardItem() items.append(typeItem) return items def get_group_item(self, group_name, is_group=True, nocreate=False): (lns, rns) = (group_name, '') if nm.settings().group_nodes_by_namespace: (lns, rns) = lnamespace(group_name) if (lns == rospy.names.SEP): (lns, rns) = lnamespace(rns) if (lns == rospy.names.SEP): return self for i in range(self.rowCount()): item = self.child(i) if isinstance(item, TopicGroupItem): if (item == lns): if rns: return item.get_group_item(rns, is_group) return item elif ((item > lns) and (not nocreate)): items = TopicGroupItem.create_item_list(lns, self, is_group=(is_group and (not rns))) self.insertRow(i, items) if rns: return items[0].get_group_item(rns, is_group) return items[0] if nocreate: return None items = TopicGroupItem.create_item_list(lns, self, is_group=(is_group and (not rns))) self.appendRow(items) if rns: return items[0].get_group_item(rns, is_group) return items[0] def add_node(self, topic): group_item = self if nm.settings().group_nodes_by_namespace: ns = namespace(topic.name) if (ns != rospy.names.SEP): group_item = self.get_group_item(ns, False) new_item_row = TopicItem.create_item_list(topic, self) group_item._add_row(new_item_row) def _add_row(self, row): self.appendRow(row) row[0].parent_item = self row[0].update_view() def clearup(self, fixed_node_names=None): self._clearup(fixed_node_names) self._mark_groups_to_delete() self._remove_marked_groups() def _clearup(self, fixed_node_names=None): removed = False for i in reversed(range(self.rowCount())): item = self.child(i) if isinstance(item, TopicItem): pass else: removed = (item._clearup(fixed_node_names) or removed) if ((self.rowCount() == 0) and (self.parent_item is not None)): self.parent_item._remove_group(self.name) return removed def _remove_group(self, name): for i in range(self.rowCount()): item = self.child(i) if ((type(item) == TopicGroupItem) and (item == name) and (item.rowCount() == 0)): self.removeRow(i) return def _mark_groups_to_delete(self): for i in range(self.rowCount()): item = self.child(i) if isinstance(item, TopicItem): if (self.rowCount() == 1): if (not self.is_group): if (self.parent_item is not None): self._clearup_mark_delete = True else: item._mark_groups_to_delete() if (self.rowCount() == 1): self._clearup_mark_delete = item._clearup_mark_delete def _remove_marked_groups(self): rows2add = [] for i in reversed(range(self.rowCount())): item = self.child(i) if isinstance(item, TopicGroupItem): if item._clearup_mark_delete: row = self._take_node_row(item) if (row is not None): rows2add.append(row) self.removeRow(i) else: item._remove_marked_groups() for row in rows2add: self._add_row(row) def _take_node_row(self, group): result = None if (group.rowCount() == 1): item = group.child(0) if isinstance(item, TopicItem): result = group.takeRow(0) else: result = group._take_node_row(item) return result def remove_node(self, name): removed = False for i in range(self.rowCount()): item = self.child(i) if ((type(item) == TopicItem) and (item == name)): self.removeRow(i) removed = True break elif (type(item) == TopicGroupItem): removed = item.remove_node(name) if removed: break if (removed and (self.rowCount() == 0)): if (type(self.parent_item) == TopicGroupItem): self.parent_item._remove_group(self.name) return removed def update_topic_view(self, updated_topics, topics): for i in range(self.rowCount()): item = self.child(i) if (type(item) == TopicItem): if (item.topic.name in updated_topics): item.update_view(topics[item.topic.name]) elif (type(item) == TopicGroupItem): item.update_topic_view(updated_topics, topics) def index_from_names(self, publisher, subscriber): result = [] for i in range(self.rowCount()): item = self.child(i) if (type(item) == TopicGroupItem): result[len(result):] = item.index_from_names(publisher, subscriber) elif (type(item) == TopicItem): if (item.topic.name in publisher): result.append(item.index()) result.append(self.child(i, 1).index()) if (item.topic.name in subscriber): result.append(item.index()) result.append(self.child(i, 2).index()) return result def type(self): return TopicGroupItem.ITEM_TYPE def __eq__(self, item): if isstring(item): return (self.name.lower() == item.lower()) elif (not (item is None)): return (self.name.lower() == item.name.lower()) return False def __ne__(self, item): return (not (self == item)) def __gt__(self, item): if isstring(item): if self.is_system_group: if (self.name.lower() != item.lower()): return True elif (item.lower() == 'system'): return False return (self.name.lower() > item.lower()) elif (not (item is None)): if item.is_system_group: if (self.name.lower() != item.lower()): return True elif self.is_syste_group: return False return (self.name.lower() > item.name.lower()) return False
class JsSum(JsRecFunc): def extendColumns(jsSchema, params): if ((params[0] is not None) and (params[1] is not None)): jsSchema['keys'] |= set(params[0]) jsSchema['values'] |= set(params[1]) alias = 'sum' params = ('keys', 'values', 'xOrder') value = '\n if ((keys == null) || (values == null)){result = data}\n else{\n var temp = {}; var order = []; \n if (Array.isArray(data)){\n data.forEach( function(rec){ \n var aggKey = []; keys.forEach(function(k){ aggKey.push(rec[k])}); \n var newKey = aggKey.join("#"); if (!(newKey in temp)) {order.push(newKey)};\n if (!(newKey in temp)) {temp[newKey] = {}};\n values.forEach(function(v) {if (!(v in temp[newKey])) {temp[newKey][v] = rec[v]} else {temp[newKey][v] += rec[v]}})})}; \n if(Array.isArray(xOrder)){order = xOrder};\n order.forEach(function(label) {\n var rec = {}; var splitKey = label.split("#");\n keys.forEach(function(k, i) {rec[k] = splitKey[i];});\n for(var v in temp[label]) {rec[v] = temp[label][v]};\n result.push(rec)})}'
def wait_for_socket(ipc_path: str, timeout: int=30) -> None: start = time.time() while (time.time() < (start + timeout)): try: sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock.connect(ipc_path) sock.settimeout(timeout) except (FileNotFoundError, socket.error): time.sleep(0.01) else: break
def jubt(): jubturl = ' try: rsp = _ for match in re.finditer('\\x3Ctd\\x3E\\s*\\x3Ca\\s+href\\x3D[\\x22\\x27](?P<url>.*?)[\\x22\\x27]', rsp, (re.IGNORECASE | re.DOTALL)): if url_is_alive(match.group('url')): jubturl = match.group('url') break return _ except: xbmc.log(msg=format_exc(), level=xbmc.LOGERROR) return ''
def fail_download(download_job, exception, message): write_to_log(message=message, is_error=True, download_job=download_job) stack_trace = ''.join(traceback.format_exception(etype=type(exception), value=exception, tb=exception.__traceback__)) download_job.error_message = f'''{message}: {stack_trace}''' download_job.job_status_id = JOB_STATUS_DICT['failed'] download_job.save()
class KiwoomOpenApiPlusDispatchSignature(KiwoomOpenApiPlusSignature): DISPATCH_SIGNATURES_BY_NAME: Dict[(str, KiwoomOpenApiPlusDispatchSignature)] = {} def from_name(cls, name: str) -> KiwoomOpenApiPlusDispatchSignature: signature = cls.DISPATCH_SIGNATURES_BY_NAME[name] return signature def names(cls) -> List[str]: names = cls.DISPATCH_SIGNATURES_BY_NAME.keys() names = list(names) if (not names): _names = dir_public(KiwoomOpenApiPlusDispatchFunctions) return names def _make_dispatch_signatures_by_name(cls) -> Dict[(str, KiwoomOpenApiPlusDispatchSignature)]: from koapy.backend.kiwoom_open_api_plus.core.KiwoomOpenApiPlusTypeLib import DISPATCH_OLE_ITEM dispatch = DISPATCH_OLE_ITEM dispatch_signatures_by_name: Dict[(str, KiwoomOpenApiPlusDispatchSignature)] = {} if dispatch: dispatch_funcs = [(name, entry) for (name, entry) in dispatch.mapFuncs.items() if (not any([((entry.desc[9] & pythoncom.FUNCFLAG_FRESTRICTED) and (entry.desc[0] != pythoncom.DISPID_NEWENUM)), (entry.desc[3] != pythoncom.FUNC_DISPATCH), (entry.desc[0] == pythoncom.DISPID_NEWENUM)]))] for (func_name, entry) in dispatch_funcs: signature = cls._from_entry(func_name, entry) dispatch_signatures_by_name[func_name] = signature return dispatch_signatures_by_name def _initialize(cls): cls.DISPATCH_SIGNATURES_BY_NAME = cls._make_dispatch_signatures_by_name()
def SetNameBonePosition(kwargs: dict) -> OutgoingMessage: compulsory_params = ['id', 'bone_name', 'bone_position'] optional_params = ['bone_position_y', 'bone_position_z'] utility.CheckKwargs(kwargs, compulsory_params) msg = OutgoingMessage() msg.write_int32(kwargs['id']) msg.write_string('SetNameBonePosition') msg.write_string(kwargs['bone_name']) msg.write_float32(kwargs['bone_position']) msg.write_bool(('bone_position_y' in kwargs)) if ('bone_position_y' in kwargs): msg.write_float32(kwargs['bone_position_y']) msg.write_bool(('bone_position_z' in kwargs)) if ('bone_position_z' in kwargs): msg.write_float32(kwargs['bone_position_z']) return msg
.django_db def test_federal_account_loans_empty(client, monkeypatch, helpers, generic_account_data, elasticsearch_account_index): setup_elasticsearch_test(monkeypatch, elasticsearch_account_index) helpers.patch_datetime_now(monkeypatch, 2022, 12, 31) resp = helpers.post_for_spending_endpoint(client, url, def_codes=['A']) assert (resp.status_code == status.HTTP_200_OK) assert (len(resp.json()['results']) == 0)
class OptionSeriesNetworkgraphSonificationTracks(Options): def activeWhen(self) -> 'OptionSeriesNetworkgraphSonificationTracksActivewhen': return self._config_sub_data('activeWhen', OptionSeriesNetworkgraphSonificationTracksActivewhen) def instrument(self): return self._config_get('piano') def instrument(self, text: str): self._config(text, js_type=False) def mapping(self) -> 'OptionSeriesNetworkgraphSonificationTracksMapping': return self._config_sub_data('mapping', OptionSeriesNetworkgraphSonificationTracksMapping) def midiName(self): return self._config_get(None) def midiName(self, text: str): self._config(text, js_type=False) def pointGrouping(self) -> 'OptionSeriesNetworkgraphSonificationTracksPointgrouping': return self._config_sub_data('pointGrouping', OptionSeriesNetworkgraphSonificationTracksPointgrouping) def roundToMusicalNotes(self): return self._config_get(True) def roundToMusicalNotes(self, flag: bool): self._config(flag, js_type=False) def showPlayMarker(self): return self._config_get(True) def showPlayMarker(self, flag: bool): self._config(flag, js_type=False) def type(self): return self._config_get('instrument') def type(self, text: str): self._config(text, js_type=False)
def main(): p = argparse.ArgumentParser(add_help=False) p.add_argument('--debug', action='store_true', help='verbose operation (default: quiet)') p.add_argument('cmdline', type=str, metavar='CMD', nargs=argparse.REMAINDER) p.add_argument('-h', '--help', action='store_true', help='show this help message and exit') p.add_argument('-v', '--version', action='store_true', help='show version and exit') args = p.parse_args() if args.version: from climetlab import __version__ print(__version__) sys.exit() if args.help: p.print_help() cmdline = ['help'] else: cmdline = args.cmdline sys.argv[1:] = cmdline logging.basicConfig(level=((args.debug and 'DEBUG') or 'WARN')) app = CliMetLabApp() if cmdline: res = app.onecmd(' '.join(cmdline)) if res: sys.exit(res) else: app.cmdloop()
def generate_parameter_assignments(parameters, feature_data, iob_type): verilog = [] is_diff = ('DIFF_' in iob_type) base_iob_type = iob_type.replace('DIFF_', '') for (feature, parameter) in sorted(parameters): condition = [] for iosettings in feature_data: if (iosettings.is_diff != is_diff): continue if (base_iob_type in feature_data[iosettings][feature]): cond = 'IOSTANDARD == "{}"'.format(iosettings.iostandard.upper()) if (base_iob_type in ['O', 'T']): if ((iosettings.drive is not None) and ('DRIVE' in feature)): cond += ' && DRIVE == {}'.format(iosettings.drive) if ('SLEW' in feature): cond += ' && SLEW == "{}"'.format(iosettings.slew) condition.append(cond) condition = sorted(list(set(condition))) condition_str = ' || \n'.join([' ({})'.format(c) for c in condition]) verilog.append('.{}(\n{}\n)'.format(parameter, condition_str)) return ',\n'.join(verilog)
def test_field_order(): reg = Register('REGA', 'Register A') reg.add_bitfields(BitField('bf_a', 'Bit field A', lsb=0, width=3)) reg.add_bitfields(BitField('bf_b', 'Bit field B', lsb=16, width=1)) reg.add_bitfields(BitField('bf_c', 'Bit field C', lsb=5, width=6)) reg.add_bitfields(BitField('bf_d', 'Bit field D', lsb=18, width=12)) assert (reg.bitfield_names == ['bf_a', 'bf_c', 'bf_b', 'bf_d'])
class Subscriber(ABC): def __init__(self): self.events = EventCollection() self.reset() def get_interfaces(self) -> List[Type[ABC]]: def notify_event(self, event: EventRecord): self.events.append(event) def reset(self): self.events = EventCollection() def query_events(self, event_spec: Union[(Callable, Iterable[Callable])]) -> Iterable: if (not isinstance(event_spec, Iterable)): event_spec = [event_spec] for ev in event_spec: assert (self._get_class_that_defined_method(ev) in self.get_interfaces()), f'Event {ev} queried, but class not subscribed. Check your get_interfaces() implementation {self.get_interfaces()}.' return self.events.query_events(event_spec) def _get_class_that_defined_method(method): 'Source: if inspect.ismethod(method): for cls in inspect.getmro(method.__self__.__class__): if (cls.__dict__.get(method.__name__) is method): return cls method = method.__func__ if inspect.isfunction(method): cls = getattr(inspect.getmodule(method), method.__qualname__.split('.<locals>', 1)[0].rsplit('.', 1)[0]) if isinstance(cls, type): return cls return getattr(method, '__objclass__', None)
def gen_deserialize(out): out.write('\n/**\n * Deserialize a match structure according to the version passed\n * version The version to use for deserialization protocol\n * match Pointer to the structure to fill out\n * octets Pointer to an octets object holding serial buffer\n *\n * Normally the octets object will point to a part of a wire buffer.\n */\n\nint\nof_match_deserialize(of_version_t version, of_match_t *match,\n of_object_t *parent, int offset, int length)\n{\n of_object_t obj;\n\n switch (version) {\n') for version in of_g.of_version_range: out.write(('\n case %(ver_name)s:\n of_match_v%(version)d_init(&obj, %(ver_name)s, length, 1);\n of_object_attach(parent, &obj, offset, length);\n OF_TRY(of_match_v%(version)d_to_match(&obj, match));\n break;\n' % dict(version=version, ver_name=of_g.of_version_wire2name[version]))) out.write('\n default:\n return OF_ERROR_COMPAT;\n }\n\n return OF_ERROR_NONE;\n}\n')
class AlbumArtManager(object): def __init__(self): self.storage = RB.ExtDB(name='album-art') def _gen_storage_key(album, artist): key = RB.ExtDBKey.create_storage('album', album) key.add_field('artist', artist) return key def _gen_lookup_key(album, artist): key = RB.ExtDBKey.create_lookup('album', album) key.add_field('artist', artist) return key def ensure_art_exists(self, track, size: str='200x200'): artists = ', '.join((artist.name for artist in track.artists)) album_title = (track.albums[0].title if track.albums else NO_ALBUM) lookup_key = self._gen_lookup_key(album_title, artists) lookup_result = self.storage.lookup(lookup_key)[0] if (track.cover_uri and (not lookup_result)): uri = f" size)}" storage_key = self._gen_storage_key(album_title, artists) self.storage.store_uri(storage_key, RB.ExtDBSourceType.SEARCH, uri)
def shell_entry(): import re import logging import wxpy arg_parser = get_arg_parser() args = arg_parser.parse_args() if args.bot: def get_logging_level(): logging_level = args.logging_level.upper() for level in ('CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG', 'NOTSET'): if level.startswith(logging_level): return getattr(logging, level) else: return logging.INFO logging.basicConfig(level=get_logging_level()) try: bots = dict() for name in args.bot: if (not re.match('\\w+$', name)): continue cache_path = ('wxpy_{}.pkl'.format(name) if args.cache else None) bots[name] = wxpy.Bot(cache_path=cache_path, console_qr=args.console_qr) except KeyboardInterrupt: return banner = 'from . import *\n' for (k, v) in bots.items(): banner += '{}: {}\n'.format(k, v) module_members = dict(inspect.getmembers(wxpy)) embed(local=dict(module_members, **bots), banner=banner, shell=args.shell) elif args.version: print(wxpy.version_details) else: arg_parser.print_help()
def run_pipes(experiment_type): (conn1, conn2) = multiprocessing.Pipe(duplex=False) measuring = multiprocessing.Process(target=get_data_pslab, args=(conn2, experiment_type)) measuring.start() sharing = multiprocessing.Process(target=export, args=(conn1,)) sharing.start() measuring.join() sharing.join() conn1.close() conn2.close()
def cmd_show(python, *, fmt='table', full=False): info = get_python_info(python, full=full) if (fmt == 'table'): import pprint pprint.pprint(info) elif (fmt == 'json'): json.dump(info, sys.stdout, indent=4) print() else: raise ValueError(f'unsupported fmt {fmt!r}')
_force_correct_shape def randn(*shape, dtype=np.complex128): def _randn(data_type_local): return np.random.randn(*shape).astype(data_type_local) if (dtype in (np.float32, np.float64)): return _randn(dtype) elif (dtype is np.complex64): return (_randn(np.float32) + (1j * _randn(np.float32))) elif (dtype is np.complex128): return (_randn(np.float64) + (1j * _randn(np.float64)))
class Sequential_MinAdaptiveModelStep_SS(SO_base): def __init__(self, modelList, system=defaultSystem, stepExact=False): from .StepControl import FLCBDF_controller SO_base.__init__(self, modelList, system, stepExact) self.controllerList = [] for model in self.modelList: if model.levelModelList[(- 1)].timeIntegration.isAdaptive: self.controllerList.append(model) self.maxFailures = model.stepController.maxSolverFailures self.stepFailures = 0 self.updateAfterModelStep = False def converged(self): if (self.its > 0): self.its = 0 return True else: return False def stepExact_system(self, tExact): old = False if old: if ((self.dt_system > 0.0) and ((self.t_system_last + self.dt_system) >= (tExact * (1.0 - self.stepExactEps)))): logEvent(('dt system orig' + str(self.dt_system)), level=5) self.dt_system = (tExact - self.t_system_last) logEvent(('dt system final' + str(self.dt_system)), level=5) elif ((self.dt_system < 0.0) and ((self.t_system_last + self.dt_system) <= (tExact * (1.0 + self.stepExactEps)))): self.dt_system = (tExact - self.t_system_last) else: if (self.dt_system > 0.0): if ((self.t_system_last + self.dt_system) >= (tExact * (1.0 - self.stepExactEps))): logEvent(('dt system orig' + str(self.dt_system)), level=5) self.dt_system = (tExact - self.t_system_last) logEvent(('dt system final' + str(self.dt_system)), level=5) elif ((tExact - (self.t_system_last + self.dt_system)) < old_div(self.dt_system, 2.0)): logEvent(('dt system orig' + str(self.dt_system)), level=5) self.dt_system = old_div((tExact - self.t_system_last), 2.0) logEvent(('dt system final' + str(self.dt_system)), level=5) if (self.dt_system < 0.0): if ((self.t_system_last + self.dt_system) <= (tExact * (1.0 + self.stepExactEps))): self.dt_system = (tExact - self.t_system_last) elif ((tExact - (self.t_system_last + self.dt_system)) > old_div(self.dt_system, 2.0)): self.dt_system = old_div((tExact - self.t_system_last), 2.0) self.t_system = (self.t_system_last + self.dt_system) self.stepSequence = [(self.t_system, m) for m in self.modelList] for model in self.modelList: model.stepController.dt_model = self.dt_system model.stepController.set_dt_allLevels() model.stepController.t_model = self.t_system model.stepController.setSubsteps([self.t_system]) def choose_dt_system(self): self.dt_system = min([model.stepController.dt_model for model in self.controllerList]) self.max_its = max([model.solver.solverList[(- 1)].its for model in self.controllerList]) if (self.max_its < 3): self.dt_system = (self.dt_system_last * 1.1) self.dt_system_last = self.dt_system self.t_system = (self.t_system_last + self.dt_system) self.stepSequence = [(self.t_system, model) for model in self.modelList] for model in self.modelList: model.stepController.dt_model = self.dt_system model.stepController.set_dt_allLevels() def initialize_dt_system(self, t0, tOut): self.its = 0 self.t_system_last = t0 self.dt_system = min([model.stepController.dt_model for model in self.controllerList]) self.dt_system_last = self.dt_system self.t_system = (self.t_system_last + self.dt_system) self.stepSequence = [(self.t_system, m) for m in self.modelList] for model in self.modelList: model.stepController.dt_model = self.dt_system model.stepController.t_model = self.t_system model.stepController.set_dt_allLevels() model.stepController.initializeTimeHistory() logEvent(('Initializing time step on system %s to dt = %12.5e' % (self.system.name, self.dt_system)), level=1) logEvent(('Initializing step sequence for system %s to %s' % (self.system.name, self.stepSequence)), level=1) def retryModelStep_solverFailure(self, model): self.failureType = 'solver' return False def retryModelStep_errorFailure(self, model): self.failureType = 'error' return False def ignoreSequenceStepFailure(self, model): return False def retrySequence_modelStepFailure(self): self.retry = True for model in self.modelList: if (self.failureType == 'solver'): self.retry = (self.retry and model.stepController.retryStep_solverFailure()) else: self.retry = (self.retry and model.stepController.retryStep_errorFailure()) if self.retry: self.stepFailures += 1 if ((self.stepFailures < self.maxFailures) and self.retry): self.choose_dt_system() return True else: return False def modelStepTaken(self, model, t_stepSequence): self.stepFailures = 0 if (model.stepController.t_model >= t_stepSequence): self.exitModelStep[model] = True model.calculateAuxiliaryQuantitiesAfterStep() def sequenceStepTaken(self, model): self.stepFailures = 0 self.exitModelStep[model] = False def sequenceTaken(self): self.its += 1 for model in self.modelList: model.stepController.updateTimeHistory() model.stepController.choose_dt_model()
def compile_path(path: str) -> typing.Tuple[(typing.Pattern[str], str, typing.Dict[(str, Convertor[typing.Any])])]: is_host = (not path.startswith('/')) path_regex = '^' path_format = '' duplicated_params = set() idx = 0 param_convertors = {} for match in PARAM_REGEX.finditer(path): (param_name, convertor_type) = match.groups('str') convertor_type = convertor_type.lstrip(':') assert (convertor_type in CONVERTOR_TYPES), f"Unknown path convertor '{convertor_type}'" convertor = CONVERTOR_TYPES[convertor_type] path_regex += re.escape(path[idx:match.start()]) path_regex += f'(?P<{param_name}>{convertor.regex})' path_format += path[idx:match.start()] path_format += ('{%s}' % param_name) if (param_name in param_convertors): duplicated_params.add(param_name) param_convertors[param_name] = convertor idx = match.end() if duplicated_params: names = ', '.join(sorted(duplicated_params)) ending = ('s' if (len(duplicated_params) > 1) else '') raise ValueError(f'Duplicated param name{ending} {names} at path {path}') if is_host: hostname = path[idx:].split(':')[0] path_regex += (re.escape(hostname) + '$') else: path_regex += (re.escape(path[idx:]) + '$') path_format += path[idx:] return (re.compile(path_regex), path_format, param_convertors)
def handle_tqdm(values: List[Dict[(str, Union[(int, str, NotebookNode)])]]) -> List[Tuple[(int, str)]]: output = sorted(values, key=(lambda item: item['index'])) index = int(output[0]['index']) md = '\n'.join([str(item['data']) for item in output if item['data']]) return [(index, f'''<CellOutput> {{ `{md}` }} </CellOutput> ''')]
class FileContainer(): def __init__(self, filename, encoding, tail=False, doOpen=False): self.__filename = filename self.waitForLineEnd = True self.setEncoding(encoding) self.__tail = tail self.__handler = None self.__pos = 0 self.__pos4hash = 0 self.__hash = '' self.__hashNextTime = (time.time() + 30) handler = open(filename, 'rb') if doOpen: self.__handler = handler return try: stats = os.fstat(handler.fileno()) self.__ino = stats.st_ino if stats.st_size: firstLine = handler.readline() if (firstLine != firstLine.rstrip(b'\r\n')): self.__hash = md5sum(firstLine).hexdigest() if tail: handler.seek(0, 2) self.__pos = handler.tell() finally: handler.close() self.inOperation = tail def __hash__(self): return hash(self.__filename) def __eq__(self, other): return ((id(self) == id(other)) or (self.__filename == (other.__filename if isinstance(other, FileContainer) else other))) def __repr__(self): return ('file-log:' + self.__filename) def getFileName(self): return self.__filename def getFileSize(self): h = self.__handler if (h is not None): stats = os.fstat(h.fileno()) return stats.st_size return os.path.getsize(self.__filename) def setEncoding(self, encoding): codecs.lookup(encoding) self.__encoding = encoding def getEncoding(self): return self.__encoding def getHash(self): return self.__hash def getPos(self): return self.__pos def setPos(self, value): self.__pos = value def open(self, forcePos=None): h = open(self.__filename, 'rb') try: fd = h.fileno() flags = fcntl.fcntl(fd, fcntl.F_GETFD) fcntl.fcntl(fd, fcntl.F_SETFD, (flags | fcntl.FD_CLOEXEC)) myHash = self.__hash stats = os.fstat(h.fileno()) rotflg = ((stats.st_size < self.__pos) or (stats.st_ino != self.__ino)) if (rotflg or (not len(myHash)) or (time.time() > self.__hashNextTime)): myHash = '' firstLine = h.readline() if (firstLine != firstLine.rstrip(b'\r\n')): myHash = md5sum(firstLine).hexdigest() self.__hashNextTime = (time.time() + 30) elif (stats.st_size == self.__pos): myHash = self.__hash if (rotflg or (myHash != self.__hash)): if (self.__hash != ''): logSys.log(logging.MSG, 'Log rotation detected for %s, reason: %r', self.__filename, (stats.st_size, self.__pos, stats.st_ino, self.__ino, myHash, self.__hash)) self.__ino = stats.st_ino self.__pos = 0 self.__hash = myHash if (forcePos is not None): self.__pos = forcePos elif (stats.st_size <= self.__pos): return False h.seek(self.__pos) self.__handler = h h = None finally: if h: h.close() h = None return True def seek(self, offs, endLine=True): h = self.__handler if (h is None): self.open(offs) h = self.__handler h.seek(offs, 0) if (offs and endLine): h.readline() return h.tell() def tell(self): return self.__handler.tell() def decode_line(filename, enc, line): try: return line.decode(enc, 'strict') except (UnicodeDecodeError, UnicodeEncodeError) as e: if ((e.end == len(line)) and (line[e.start] in b'\r\n')): return line[0:e.start].decode(enc, 'replace') global _decode_line_warn lev = 7 if (not _decode_line_warn.get(filename, 0)): lev = logging.WARNING _decode_line_warn.set(filename, 1) logSys.log(lev, "Error decoding line from '%s' with '%s'.", filename, enc) if (logSys.getEffectiveLevel() <= lev): logSys.log(lev, 'Consider setting logencoding to appropriate encoding for this jail. Continuing to process line ignoring invalid characters: %r', line) line = line.decode(enc, 'replace') return line def readline(self, complete=True): if (self.__handler is None): return '' b = self.__handler.readline() if (not b): return None bl = len(b) r = FileContainer.decode_line(self.getFileName(), self.getEncoding(), b) l = r.rstrip('\r\n') if complete: if (l == r): fnd = 0 while 1: r = self.__handler.readline() if (not r): break b += r bl += len(r) r = FileContainer.decode_line(self.getFileName(), self.getEncoding(), b) e = r.find('\n') if ((e >= 0) and (e != (len(r) - 1))): (l, r) = (r[0:e], r[0:(e + 1)]) r = r.encode(self.getEncoding(), 'replace') self.__handler.seek(((- bl) + len(r)), 1) return l l = r.rstrip('\r\n') if (l != r): return l if self.waitForLineEnd: self.__handler.seek((- bl), 1) return None return l def close(self): if (self.__handler is not None): self.__pos = self.__handler.tell() self.__handler.close() self.__handler = None def __iter__(self): return self def __next__(self): line = self.readline() if (line is None): self.close() raise StopIteration return line
class TextBoxOverlay(AbstractOverlay): text = Str font = KivaFont('sans-serif 12') bgcolor = ColorTrait('transparent') alpha = Union(Float(1.0), None) border_color = ColorTrait('dodgerblue') text_color = ColorTrait('black') border_size = Int(1) border_visible = Bool(True) padding = Int(5) max_text_width = Float(0.0) align = Enum('ur', 'ul', 'll', 'lr') alternate_position = Any def overlay(self, component, gc, view_bounds=None, mode='normal'): if (not self.visible): return label = Label(text=self.text, font=self.font, bgcolor='transparent', color=self.text_color, max_width=self.max_text_width, margin=5) (width, height) = label.get_width_height(gc) (valign, halign) = self.align if self.alternate_position: (x, y) = self.alternate_position if (valign == 'u'): y += self.padding else: y -= (self.padding + height) if (halign == 'r'): x += self.padding else: x -= (self.padding + width) else: if (valign == 'u'): y = ((component.y2 - self.padding) - height) else: y = (component.y + self.padding) if (halign == 'r'): x = ((component.x2 - self.padding) - width) else: x = (component.x + self.padding) (x_min, y_min, x_max, y_max) = (component.x, component.y, (component.x + component.width), (component.y + component.height)) if ((x + width) > x_max): x = max(x_min, (x_max - width)) if ((y + height) > y_max): y = max(y_min, (y_max - height)) elif (y < y_min): y = y_min color = self.bgcolor_ if (self.bgcolor != 'transparent'): if self.alpha: color = list(self.bgcolor_) if (len(color) == 4): color[3] = self.alpha else: color += [self.alpha] with gc: gc.translate_ctm(x, y) gc.set_line_width(self.border_size) gc.set_stroke_color(self.border_color_) gc.set_fill_color(color) if self.border_visible: x = y = 0 end_radius = 8.0 gc.begin_path() gc.move_to((x + end_radius), y) gc.arc_to((x + width), y, (x + width), (y + end_radius), end_radius) gc.arc_to((x + width), (y + height), ((x + width) - end_radius), (y + height), end_radius) gc.arc_to(x, (y + height), x, y, end_radius) gc.arc_to(x, y, ((x + width) + end_radius), y, end_radius) gc.draw_path() label.draw(gc)
class TestABCWeighting(object): def test_invalid_params(self): with pytest.raises(ValueError): ABC_weighting('D') def test_freq_resp(self): for curve in {'A', 'B', 'C'}: N = len(responses[curve]) f_test = frequencies[:N] upper = (responses[curve] + upper_limits[:N]) lower = (responses[curve] + lower_limits[:N]) (z, p, k) = ABC_weighting(curve) (w, h) = signal.freqs_zpk(z, p, k, ((2 * pi) * f_test)) levels = (20 * np.log10(abs(h))) if mpl: plt.figure(curve) plt.title('{}-weighting limits (Type 0)'.format(curve)) plt.semilogx(f_test, levels, alpha=0.7, label='analog') plt.semilogx(f_test, upper, 'r:', alpha=0.7) plt.semilogx(f_test, lower, 'r:', alpha=0.7) plt.grid(True, color='0.7', linestyle='-', which='major') plt.grid(True, color='0.9', linestyle='-', which='minor') plt.legend() assert all(np.less_equal(levels, upper)) assert all(np.greater_equal(levels, lower))
def get_hierarchy(): a1 = load_source('climetlab-testing', kind='netcdf', dims=['lat', 'lon', 'forecast_time'], variables=['a'], coord_values=dict(forecast_time=[1, 3])) a2 = load_source('climetlab-testing', kind='netcdf', dims=['lat', 'lon', 'forecast_time'], variables=['a'], coord_values=dict(forecast_time=[2, 4])) b1 = load_source('climetlab-testing', kind='netcdf', dims=['lat', 'lon', 'forecast_time'], variables=['b'], coord_values=dict(forecast_time=[1, 3])) b2 = load_source('climetlab-testing', kind='netcdf', dims=['lat', 'lon', 'forecast_time'], variables=['b'], coord_values=dict(forecast_time=[2, 4])) target = xr.merge([xr.merge([a1.to_xarray(), a2.to_xarray()]), xr.merge([b1.to_xarray(), b2.to_xarray()])]) return (target, a1, a2, b1, b2)
class OptionPlotoptionsTilemapStatesHoverMarker(Options): def enabled(self): return self._config_get(None) def enabled(self, flag: bool): self._config(flag, js_type=False) def enabledThreshold(self): return self._config_get(2) def enabledThreshold(self, num: float): self._config(num, js_type=False) def fillColor(self): return self._config_get(None) def fillColor(self, text: str): self._config(text, js_type=False) def height(self): return self._config_get(None) def height(self, num: float): self._config(num, js_type=False) def lineColor(self): return self._config_get('#ffffff') def lineColor(self, text: str): self._config(text, js_type=False) def lineWidth(self): return self._config_get(0) def lineWidth(self, num: float): self._config(num, js_type=False) def radius(self): return self._config_get(4) def radius(self, num: float): self._config(num, js_type=False) def width(self): return self._config_get(None) def width(self, num: float): self._config(num, js_type=False)
class TaskStatusAllMetCondition(Condition): def __init__(self, condition_list: List[TaskStatusCondition]): events = [] for c in condition_list: events.extend(c.expect_event_keys) super().__init__(expect_event_keys=events) self.condition_list = condition_list def is_met(self, event: Event, context: Context) -> bool: namespace = context.workflow.namespace workflow_name = context.workflow.name for condition in self.condition_list: prefix = TaskStatusChangedEvent.event_key_prefix() index = len(f'{prefix}.{namespace}.{workflow_name}') event_key = condition.expect_event_keys[0] task_name = event_key[(index + 1):] task_status = context.get_task_status(task_name=task_name) if (task_status != condition.expect_status): return False return True
def test_missing_lat_lon_attributes(tmpdir): fname = os.path.join(TEST_DATA_DIR, 'icgem-sample.gdf') attributes = ['longitude', 'latitude'] for attribute in attributes: corrupt = str(tmpdir.join((('missing_' + attribute) + '_attribute.gdf'))) with open(fname) as gdf_file, open(corrupt, 'w') as corrupt_gdf: for line in gdf_file: if (('longitude' in line) and ('latitude' in line)): new_line = line.replace(attribute, 'corrupt') corrupt_gdf.write(new_line) else: corrupt_gdf.write(line) with raises(IOError): load_icgem_gdf(corrupt)
class TestDBMigration(unittest.TestCase): def setUp(self) -> None: self.file = 'test.db' self.url = 'sqlite:///{}'.format(self.file) def tearDown(self) -> None: if os.path.exists(self.file): os.remove(self.file) def test_init_db(self): init_db(url=self.url) self.assertTrue(table_exists(url=self.url, table_name='namespace')) self.assertTrue(table_exists(url=self.url, table_name='workflow')) self.assertTrue(table_exists(url=self.url, table_name='workflow_snapshot')) self.assertTrue(table_exists(url=self.url, table_name='workflow_schedule')) self.assertTrue(table_exists(url=self.url, table_name='workflow_event_trigger')) self.assertTrue(table_exists(url=self.url, table_name='workflow_execution')) self.assertTrue(table_exists(url=self.url, table_name='task_execution')) self.assertTrue(table_exists(url=self.url, table_name='workflow_execution_state')) self.assertTrue(table_exists(url=self.url, table_name='workflow_state'))
def _font_viewer(exp): all_fonts = list(list_fonts().keys()) def info_screen(): stimuli.TextScreen(heading='Expyriment Font Viewer', text='\narrow keys left/right -- Switch font type\narrow keys up/down -- Switch font size\n i -- Switch italic\n b -- Switch bold\n c -- Change text\n h -- Help\n return -- Quit\n\n\n [Touch screen]\nclick left/right side -- Switch font type\nclick up/down side -- Switch font size\nclick center -- Quit\n ', text_font='freemono', text_bold=True, text_justification=0).present() exp.keyboard.wait() default_text = u"The quick brown fox jumps over the lazy dog.\nABCDEFGHIJKLMNOPQRSTUVWXYZ AOU\nabcdefghijklmnopqrstuvwxyz aou\n.:,;eee(*!?')" text = default_text size = 14 font_id = 0 italic = False bold = False quest = io.TextInput(message='Please enter text: (Keep empty for default text)', length=35) mouse = io.Mouse(show_cursor=True) bs = ((exp.screen.size[0] // 3.5), (exp.screen.size[1] // 3.5)) cl = (20, 20, 20) rects = [stimuli.Rectangle(size=bs, position=[0, 0], colour=cl), stimuli.Rectangle(size=bs, position=[int(((bs[0] - exp.screen.size[0]) / 2.2)), 0], colour=cl), stimuli.Rectangle(size=bs, position=[int(((exp.screen.size[0] - bs[0]) / 2.2)), 0], colour=cl), stimuli.Rectangle(size=bs, position=[0, int(((bs[1] - exp.screen.size[1]) / 2.2))], colour=cl), stimuli.Rectangle(size=bs, position=[0, int(((exp.screen.size[1] - bs[1]) / 2.2))], colour=cl)] rect_key_mapping = [constants.K_RETURN, constants.K_LEFT, constants.K_RIGHT, constants.K_UP, constants.K_DOWN] info_screen() while True: font_str = all_fonts[font_id] font_description = "font '{0}', size {1}".format(font_str, size) if italic: font_description += ', italic' if bold: font_description += ', bold' canvas = stimuli.BlankScreen() for r in rects: r.plot(canvas) try: stimuli.TextScreen(heading=font_description, text=text, text_font=font_str, text_size=size, text_justification=0, text_italic=italic, text_bold=bold, text_colour=(255, 255, 255)).plot(canvas) except Exception: stimuli.TextLine(text=("Sorry, I can't display the text with " + '{0}'.format(font_description)), text_colour=constants.C_EXPYRIMENT_ORANGE).plot(canvas) canvas.present() mouse.clear() exp.keyboard.clear() while True: key = exp.keyboard.check() if (mouse.get_last_button_down_event() is not None): for (cnt, r) in enumerate(rects): if r.overlapping_with_position(mouse.position): key = rect_key_mapping[cnt] break if (key is not None): break if (key == constants.K_RETURN): break elif (key == constants.K_UP): size += 2 elif (key == constants.K_DOWN): size -= 2 elif (key == constants.K_LEFT): font_id -= 1 if (font_id < 0): font_id = (len(all_fonts) - 1) elif (key == constants.K_RIGHT): font_id += 1 if (font_id >= len(all_fonts)): font_id = 0 elif (key == constants.K_i): italic = (not italic) elif (key == constants.K_b): bold = (not bold) elif (key == constants.K_c): text = quest.get() if (len(text) <= 0): text = default_text else: info_screen() mouse.hide_cursor()
def combineCsv(csvFolder, fname, dut_list): csvName = ('Combined_Results-%s.csv' % fname) csvPath = os.path.join(csvFolder, csvName) stats = OrderedDict() try: os.remove(csvPath) except OSError: pass csvList = glob.glob(os.path.join(csvFolder, '*.csv')) reader = csv.reader(open(csvList[0])) col_names = next(reader) for row in reader: stats[row[0]] = OrderedDict() for c in col_names[1:]: stats[row[0]][c] = [] for c in csvList: with open(c) as fd: creader = csv.reader(fd) next(creader) for row in creader: for i in range(1, len(row)): stats[row[0]][col_names[i]].append(row[i]) combined_stats = get_combined_stats(stats) with open(csvPath, 'a') as csv_out: writer = csv.writer(csv_out) server_list = ';'.join([dut.serverName for dut in dut_list]) writer.writerow(([fname] + [server_list])) stats_headers = combined_stats[list(combined_stats.keys())[0]].keys() writer.writerow((['Jobname'] + list(stats_headers))) for job in combined_stats.keys(): row = [job] for stat in combined_stats[job].keys(): row.append(combined_stats[job][stat]) writer.writerow(row)
_grad() def test_two_controlnets_same_name(unet: SD1UNet) -> None: SD1ControlnetAdapter(unet, name='cnx').inject() cn2 = SD1ControlnetAdapter(unet, name='cnx') with pytest.raises(AssertionError) as exc: cn2.inject() assert ('Controlnet named cnx is already injected' in str(exc.value))
def test_unsupported_split_enz(single_cell_roff_grid): roff_grid = single_cell_roff_grid roff_grid.split_enz = np.full(8, fill_value=2, dtype=np.uint8).tobytes() roff_grid.zvals = np.ones(16, dtype=np.float32) with pytest.raises(ValueError, match='split type'): roff_grid.xtgeo_zcorn()
class GTHE4LiteSATAPHY(Module): def __init__(self, pads, gen, clk_freq, data_width=16, tx_buffer_enable=False, rx_buffer_enable=False, use_gtgrefclk=True): assert (data_width in [16, 32]) self.data_width = data_width self.ready = Signal() self.tx_idle = Signal() self.tx_polarity = Signal() self.tx_cominit_stb = Signal() self.tx_cominit_ack = Signal() self.tx_comwake_stb = Signal() self.tx_comwake_ack = Signal() self.rx_idle = Signal() self.rx_cdrhold = Signal() self.rx_polarity = Signal() self.rx_cominit_stb = Signal() self.rx_comwake_stb = Signal() self.sink = stream.Endpoint(phy_description(data_width)) self.source = stream.Endpoint(phy_description(data_width)) self.refclk = Signal() self.cplllock = Signal() self.cpllpd = Signal() self.cpllreset = Signal() self.rxctrl0 = Signal((data_width // 8)) self.rxctrl1 = Signal((data_width // 8)) self.rxctrl2 = Signal((data_width // 8)) self.rxctrl3 = Signal((data_width // 8)) self.rxcharisk = Signal((data_width // 8)) self.rxdisperr = Signal((data_width // 8)) self.rxnotintable = Signal((data_width // 8)) rxdisperr = Signal((data_width // 8)) self.comb += [self.rxcharisk.eq(self.rxctrl0), rxdisperr.eq(self.rxctrl1)] self.rxdata = Signal(data_width) self.rxoutclk = Signal() self.rxusrclk = Signal() self.rxusrclk2 = Signal() self.rxcominitdet = Signal() self.rxcomwakedet = Signal() self.txcharisk = Signal((data_width // 8)) self.txdata = Signal(data_width) self.txoutclk = Signal() self.txusrclk = Signal() self.txusrclk2 = Signal() self.txelecidle = Signal(reset=1) self.txcomfinish = Signal() self.txcominit = Signal() self.txcomwake = Signal() self.rxpd = Signal() self.txpd = Signal() div_config = {'gen1': 4, 'gen2': 2, 'gen3': 1} rxout_div = div_config[gen] txout_div = div_config[gen] progdiv = {'gen1': 40.0, 'gen2': 20.0, 'gen3': 10.0} tx_progdiv_cfg = {16: progdiv[gen], 32: (2.0 * progdiv[gen])}[data_width] rx_progdiv_cfg = {16: progdiv[gen], 32: (2.0 * progdiv[gen])}[data_width] self.submodules.tx_init = tx_init = GTHTXInit(clk_freq, buffer_enable=tx_buffer_enable) self.comb += tx_init.plllock.eq(self.cplllock) self.comb += self.cpllreset.eq(tx_init.pllreset) self.submodules.rx_init = rx_init = GTHRXInit(clk_freq, buffer_enable=rx_buffer_enable) self.comb += rx_init.plllock.eq(self.cplllock) self.comb += self.ready.eq((tx_init.done & rx_init.done)) self.comb += [self.txelecidle.eq((self.tx_idle | self.txpd)), self.tx_cominit_ack.eq((self.tx_cominit_stb & self.txcomfinish)), self.tx_comwake_ack.eq((self.tx_comwake_stb & self.txcomfinish)), self.rx_cominit_stb.eq(self.rxcominitdet), self.rx_comwake_stb.eq(self.rxcomwakedet)] self.submodules += _RisingEdge(self.tx_cominit_stb, self.txcominit) self.submodules += _RisingEdge(self.tx_comwake_stb, self.txcomwake) self.sync.sata_rx += [self.source.valid.eq(1), self.source.charisk.eq(self.rxcharisk), self.source.data.eq(self.rxdata)] self.sync.sata_tx += [self.txcharisk.eq(self.sink.charisk), self.txdata.eq(self.sink.data), self.sink.ready.eq(1)] txpd = Signal() txelecidle = Signal(reset=1) txcominit = Signal() txcomwake = Signal() self.specials += [MultiReg(self.txpd, txpd, 'sata_tx'), MultiReg(self.txelecidle, txelecidle, 'sata_tx')] self.submodules += [_PulseSynchronizer(self.txcominit, 'sys', txcominit, 'sata_tx'), _PulseSynchronizer(self.txcomwake, 'sys', txcomwake, 'sata_tx')] txcomfinish = Signal() self.submodules += _PulseSynchronizer(txcomfinish, 'sata_tx', self.txcomfinish, 'sys') rxcominitdet = Signal() rxcomwakedet = Signal() rxdisperr = Signal((data_width // 8)) rxnotintable = Signal((data_width // 8)) self.specials += [MultiReg(rxcominitdet, self.rxcominitdet, 'sys'), MultiReg(rxcomwakedet, self.rxcomwakedet, 'sys'), MultiReg(rxdisperr, self.rxdisperr, 'sys'), MultiReg(rxnotintable, self.rxnotintable, 'sys')] self.drp = DRPInterface() self.submodules.drp_mux = drp_mux = DRPMux() drp_mux.add_interface(self.drp) class Open(Signal): pass rxphaligndone = Signal() self.gth_params = dict(p_ACJTAG_DEBUG_MODE=0, p_ACJTAG_MODE=0, p_ACJTAG_RESET=0, p_ADAPT_CFG0=4096, p_ADAPT_CFG1=51200, p_ADAPT_CFG2=0, p_ALIGN_COMMA_DOUBLE='FALSE', p_ALIGN_COMMA_ENABLE=1023, p_ALIGN_COMMA_WORD=(2 if (data_width == 16) else 4), p_ALIGN_MCOMMA_DET='TRUE', p_ALIGN_MCOMMA_VALUE=643, p_ALIGN_PCOMMA_DET='TRUE', p_ALIGN_PCOMMA_VALUE=380, p_A_RXOSCALRESET=0, p_A_RXPROGDIVRESET=0, p_A_RXTERMINATION=1, p_A_TXDIFFCTRL=12, p_A_TXPROGDIVRESET=0, p_CAPBYPASS_FORCE=0, p_CBCC_DATA_SOURCE_SEL='DECODED', p_CDR_SWAP_MODE_EN=0, p_CFOK_PWRSVE_EN=1, p_CHAN_BOND_KEEP_ALIGN='FALSE', p_CHAN_BOND_MAX_SKEW=1, p_CHAN_BOND_SEQ_1_1=0, p_CHAN_BOND_SEQ_1_2=0, p_CHAN_BOND_SEQ_1_3=0, p_CHAN_BOND_SEQ_1_4=0, p_CHAN_BOND_SEQ_1_ENABLE=15, p_CHAN_BOND_SEQ_2_1=0, p_CHAN_BOND_SEQ_2_2=0, p_CHAN_BOND_SEQ_2_3=0, p_CHAN_BOND_SEQ_2_4=0, p_CHAN_BOND_SEQ_2_ENABLE=15, p_CHAN_BOND_SEQ_2_USE='FALSE', p_CHAN_BOND_SEQ_LEN=1, p_CH_HSPMUX=15420, p_CKCAL1_CFG_0=49344, p_CKCAL1_CFG_1=20672, p_CKCAL1_CFG_2=10, p_CKCAL1_CFG_3=0, p_CKCAL2_CFG_0=49344, p_CKCAL2_CFG_1=32960, p_CKCAL2_CFG_2=0, p_CKCAL2_CFG_3=0, p_CKCAL2_CFG_4=0, p_CKCAL_RSVD0=(128 if tx_buffer_enable else 0), p_CKCAL_RSVD1=1024, p_CLK_CORRECT_USE='FALSE', p_CLK_COR_KEEP_IDLE='FALSE', p_CLK_COR_MAX_LAT=(20 if (not rx_buffer_enable) else {16: 6, 32: 12}[data_width]), p_CLK_COR_MIN_LAT=(18 if (not rx_buffer_enable) else {16: 4, 32: 8}[data_width]), p_CLK_COR_PRECEDENCE='TRUE', p_CLK_COR_REPEAT_WAIT=0, p_CLK_COR_SEQ_1_1=256, p_CLK_COR_SEQ_1_2=256, p_CLK_COR_SEQ_1_3=256, p_CLK_COR_SEQ_1_4=256, p_CLK_COR_SEQ_1_ENABLE=15, p_CLK_COR_SEQ_2_1=256, p_CLK_COR_SEQ_2_2=256, p_CLK_COR_SEQ_2_3=256, p_CLK_COR_SEQ_2_4=256, p_CLK_COR_SEQ_2_ENABLE=15, p_CLK_COR_SEQ_2_USE='FALSE', p_CLK_COR_SEQ_LEN=1, p_CPLL_CFG0=506, p_CPLL_CFG1=35, p_CPLL_CFG2=2, p_CPLL_CFG3=0, p_CPLL_FBDIV=5, p_CPLL_FBDIV_45=4, p_CPLL_INIT_CFG0=690, p_CPLL_LOCK_CFG=488, p_CPLL_REFCLK_DIV=1, p_CTLE3_OCAP_EXT_CTRL=0, p_CTLE3_OCAP_EXT_EN=0, p_DDI_CTRL=0, p_DDI_REALIGN_WAIT=15, p_DEC_MCOMMA_DETECT='TRUE', p_DEC_PCOMMA_DETECT='TRUE', p_DEC_VALID_COMMA_ONLY='TRUE', p_DELAY_ELEC=0, p_DMONITOR_CFG0=0, p_DMONITOR_CFG1=0, p_ES_CLK_PHASE_SEL=0, p_ES_CONTROL=0, p_ES_ERRDET_EN='FALSE', p_ES_EYE_SCAN_EN='FALSE', p_ES_HORZ_OFFSET=0, p_ES_PRESCALE=0, p_ES_QUALIFIER0=0, p_ES_QUALIFIER1=0, p_ES_QUALIFIER2=0, p_ES_QUALIFIER3=0, p_ES_QUALIFIER4=0, p_ES_QUALIFIER5=0, p_ES_QUALIFIER6=0, p_ES_QUALIFIER7=0, p_ES_QUALIFIER8=0, p_ES_QUALIFIER9=0, p_ES_QUAL_MASK0=0, p_ES_QUAL_MASK1=0, p_ES_QUAL_MASK2=0, p_ES_QUAL_MASK3=0, p_ES_QUAL_MASK4=0, p_ES_QUAL_MASK5=0, p_ES_QUAL_MASK6=0, p_ES_QUAL_MASK7=0, p_ES_QUAL_MASK8=0, p_ES_QUAL_MASK9=0, p_ES_SDATA_MASK0=0, p_ES_SDATA_MASK1=0, p_ES_SDATA_MASK2=0, p_ES_SDATA_MASK3=0, p_ES_SDATA_MASK4=0, p_ES_SDATA_MASK5=0, p_ES_SDATA_MASK6=0, p_ES_SDATA_MASK7=0, p_ES_SDATA_MASK8=0, p_ES_SDATA_MASK9=0, p_EYE_SCAN_SWAP_EN=0, p_FTS_DESKEW_SEQ_ENABLE=15, p_FTS_LANE_DESKEW_CFG=15, p_FTS_LANE_DESKEW_EN='FALSE', p_GEARBOX_MODE=0, p_ISCAN_CK_PH_SEL2=0, p_LOCAL_MASTER=1, p_LPBK_BIAS_CTRL=4, p_LPBK_EN_RCAL_B=0, p_LPBK_EXT_RCAL=8, p_LPBK_IND_CTRL0=0, p_LPBK_IND_CTRL1=0, p_LPBK_IND_CTRL2=0, p_LPBK_RG_CTRL=14, p_OOBDIVCTL=1, p_OOB_PWRUP=1, p_PCI3_AUTO_REALIGN='OVR_1K_BLK', p_PCI3_PIPE_RX_ELECIDLE=0, p_PCI3_RX_ASYNC_EBUF_BYPASS=0, p_PCI3_RX_ELECIDLE_EI2_ENABLE=0, p_PCI3_RX_ELECIDLE_H2L_COUNT=0, p_PCI3_RX_ELECIDLE_H2L_DISABLE=0, p_PCI3_RX_ELECIDLE_HI_COUNT=0, p_PCI3_RX_ELECIDLE_LP4_DISABLE=0, p_PCI3_RX_FIFO_DISABLE=0, p_PCIE3_CLK_COR_EMPTY_THRSH=0, p_PCIE3_CLK_COR_FULL_THRSH=16, p_PCIE3_CLK_COR_MAX_LAT=4, p_PCIE3_CLK_COR_MIN_LAT=0, p_PCIE3_CLK_COR_THRSH_TIMER=8, p_PCIE_BUFG_DIV_CTRL=4096, p_PCIE_PLL_SEL_MODE_GEN12=0, p_PCIE_PLL_SEL_MODE_GEN3=3, p_PCIE_PLL_SEL_MODE_GEN4=2, p_PCIE_RXPCS_CFG_GEN3=2725, p_PCIE_RXPMA_CFG=10250, p_PCIE_TXPCS_CFG_GEN3=11428, p_PCIE_TXPMA_CFG=10250, p_PCS_PCIE_EN='FALSE', p_PCS_RSVD0=0, p_PD_TRANS_TIME_FROM_P2=60, p_PD_TRANS_TIME_NONE_P2=25, p_PD_TRANS_TIME_TO_P2=100, p_PREIQ_FREQ_BST=0, p_PROCESS_PAR=2, p_RATE_SW_USE_DRP=1, p_RCLK_SIPO_DLY_ENB=0, p_RCLK_SIPO_INV_EN=0, p_RESET_POWERSAVE_DISABLE=0, p_RTX_BUF_CML_CTRL=2, p_RTX_BUF_TERM_CTRL=0, p_RXBUFRESET_TIME=3, p_RXBUF_ADDR_MODE='FAST', p_RXBUF_EIDLE_HI_CNT=8, p_RXBUF_EIDLE_LO_CNT=0, p_RXBUF_EN=('TRUE' if rx_buffer_enable else 'FALSE'), p_RXBUF_RESET_ON_CB_CHANGE='TRUE', p_RXBUF_RESET_ON_COMMAALIGN='FALSE', p_RXBUF_RESET_ON_EIDLE='FALSE', p_RXBUF_RESET_ON_RATE_CHANGE='TRUE', p_RXBUF_THRESH_OVFLW=(0 if (not rx_buffer_enable) else {16: 61, 32: 57}[data_width]), p_RXBUF_THRESH_OVRD=('TRUE' if rx_buffer_enable else 'FALSE'), p_RXBUF_THRESH_UNDFLW=(4 if (not rx_buffer_enable) else {16: 1, 32: 3}[data_width])) self.gth_params.update(p_RXCDRFREQRESET_TIME=1, p_RXCDRPHRESET_TIME=1, p_RXCDR_CFG0=3, p_RXCDR_CFG0_GEN3=3, p_RXCDR_CFG1=0, p_RXCDR_CFG1_GEN3=0, p_RXCDR_CFG2=({16: 419, 32: 548}[data_width] if (gen == 'gen1') else ({16: 436, 32: 564}[data_width] if (gen == 'gen2') else {16: 452, 32: 580}[data_width])), p_RXCDR_CFG2_GEN2=({16: 547, 32: 548}[data_width] if (gen == 'gen1') else ({16: 564, 32: 564}[data_width] if (gen == 'gen2') else {16: 580, 32: 580}[data_width])), p_RXCDR_CFG2_GEN3=({16: 547, 32: 548}[data_width] if (gen == 'gen1') else ({16: 564, 32: 564}[data_width] if (gen == 'gen2') else {16: 580, 32: 580}[data_width])), p_RXCDR_CFG2_GEN4=356, p_RXCDR_CFG3=18, p_RXCDR_CFG3_GEN2=18, p_RXCDR_CFG3_GEN3=18, p_RXCDR_CFG3_GEN4=18, p_RXCDR_CFG4=23798, p_RXCDR_CFG4_GEN3=23798, p_RXCDR_CFG5=46187, p_RXCDR_CFG5_GEN3=5227, p_RXCDR_FR_RESET_ON_EIDLE=0, p_RXCDR_HOLD_DURING_EIDLE=0, p_RXCDR_LOCK_CFG0=8705, p_RXCDR_LOCK_CFG1=40959, p_RXCDR_LOCK_CFG2=30659, p_RXCDR_LOCK_CFG3=1, p_RXCDR_LOCK_CFG4=0, p_RXCDR_PH_RESET_ON_EIDLE=0, p_RXCFOK_CFG0=0, p_RXCFOK_CFG1=32789, p_RXCFOK_CFG2=686, p_RXCKCAL1_IQ_LOOP_RST_CFG=4, p_RXCKCAL1_I_LOOP_RST_CFG=4, p_RXCKCAL1_Q_LOOP_RST_CFG=4, p_RXCKCAL2_DX_LOOP_RST_CFG=4, p_RXCKCAL2_D_LOOP_RST_CFG=4, p_RXCKCAL2_S_LOOP_RST_CFG=4, p_RXCKCAL2_X_LOOP_RST_CFG=4, p_RXDFELPMRESET_TIME=15, p_RXDFELPM_KL_CFG0=0, p_RXDFELPM_KL_CFG1=41186, p_RXDFELPM_KL_CFG2=256, p_RXDFE_CFG0=2560, p_RXDFE_CFG1=0, p_RXDFE_GC_CFG0=0, p_RXDFE_GC_CFG1=32768, p_RXDFE_GC_CFG2=65504, p_RXDFE_H2_CFG0=0, p_RXDFE_H2_CFG1=2, p_RXDFE_H3_CFG0=0, p_RXDFE_H3_CFG1=32770, p_RXDFE_H4_CFG0=0, p_RXDFE_H4_CFG1=32770, p_RXDFE_H5_CFG0=0, p_RXDFE_H5_CFG1=32770, p_RXDFE_H6_CFG0=0, p_RXDFE_H6_CFG1=32770, p_RXDFE_H7_CFG0=0, p_RXDFE_H7_CFG1=32770, p_RXDFE_H8_CFG0=0, p_RXDFE_H8_CFG1=32770, p_RXDFE_H9_CFG0=0, p_RXDFE_H9_CFG1=32770, p_RXDFE_HA_CFG0=0, p_RXDFE_HA_CFG1=32770, p_RXDFE_HB_CFG0=0, p_RXDFE_HB_CFG1=32770, p_RXDFE_HC_CFG0=0, p_RXDFE_HC_CFG1=32770, p_RXDFE_HD_CFG0=0, p_RXDFE_HD_CFG1=32770, p_RXDFE_HE_CFG0=0, p_RXDFE_HE_CFG1=32770, p_RXDFE_HF_CFG0=0, p_RXDFE_HF_CFG1=32770, p_RXDFE_KH_CFG0=0, p_RXDFE_KH_CFG1=32768, p_RXDFE_KH_CFG2=9747, p_RXDFE_KH_CFG3=16668, p_RXDFE_OS_CFG0=0, p_RXDFE_OS_CFG1=32770, p_RXDFE_PWR_SAVING=1, p_RXDFE_UT_CFG0=0, p_RXDFE_UT_CFG1=3, p_RXDFE_UT_CFG2=0, p_RXDFE_VP_CFG0=0, p_RXDFE_VP_CFG1=32819, p_RXDLY_CFG=16, p_RXDLY_LCFG=48, p_RXELECIDLE_CFG='SIGCFG_4', p_RXGBOX_FIFO_INIT_RD_ADDR=4, p_RXGEARBOX_EN='FALSE', p_RXISCANRESET_TIME=1, p_RXLPM_CFG=0, p_RXLPM_GC_CFG=32768, p_RXLPM_KH_CFG0=0, p_RXLPM_KH_CFG1=2, p_RXLPM_OS_CFG0=0, p_RXLPM_OS_CFG1=32770, p_RXOOB_CFG=6, p_RXOOB_CLK_CFG='PMA', p_RXOSCALRESET_TIME=3, p_RXOUT_DIV=rxout_div, p_RXPCSRESET_TIME=3, p_RXPHBEACON_CFG=0, p_RXPHDLY_CFG=8304, p_RXPHSAMP_CFG=8448, p_RXPHSLIP_CFG=39219, p_RXPH_MONITOR_SEL=0, p_RXPI_AUTO_BW_SEL_BYPASS=0, p_RXPI_CFG0=512, p_RXPI_CFG1=253, p_RXPI_LPM=0, p_RXPI_SEL_LC=0, p_RXPI_STARTCODE=0, p_RXPI_VREFSEL=0, p_RXPMACLK_SEL='DATA', p_RXPMARESET_TIME=3, p_RXPRBS_ERR_LOOPBACK=0, p_RXPRBS_LINKACQ_CNT=15, p_RXREFCLKDIV2_SEL=0, p_RXSLIDE_AUTO_WAIT=7, p_RXSLIDE_MODE='PCS', p_RXSYNC_MULTILANE=0, p_RXSYNC_OVRD=0, p_RXSYNC_SKIP_DA=(1 if (gen == 'gen1') else 0), p_RX_AFE_CM_EN=0, p_RX_BIAS_CFG0=5460, p_RX_BUFFER_CFG=0, p_RX_CAPFF_SARC_ENB=0, p_RX_CLK25_DIV=6, p_RX_CLKMUX_EN=1, p_RX_CLK_SLIP_OVRD=0, p_RX_CM_BUF_CFG=10, p_RX_CM_BUF_PD=0, p_RX_CM_SEL=3, p_RX_CM_TRIM=10, p_RX_CTLE3_LPF=255, p_RX_DATA_WIDTH={16: 20, 32: 40}[data_width], p_RX_DDI_SEL=0, p_RX_DEFER_RESET_BUF_EN='TRUE', p_RX_DEGEN_CTRL=3, p_RX_DFELPM_CFG0=6, p_RX_DFELPM_CFG1=1, p_RX_DFELPM_KLKH_AGC_STUP_EN=1, p_RX_DFE_AGC_CFG0=2, p_RX_DFE_AGC_CFG1=4, p_RX_DFE_KL_LPM_KH_CFG0=1, p_RX_DFE_KL_LPM_KH_CFG1=4, p_RX_DFE_KL_LPM_KL_CFG0=1, p_RX_DFE_KL_LPM_KL_CFG1=4, p_RX_DFE_LPM_HOLD_DURING_EIDLE=0, p_RX_DISPERR_SEQ_MATCH='TRUE', p_RX_DIV2_MODE_B=0, p_RX_DIVRESET_TIME=1, p_RX_EN_CTLE_RCAL_B=0, p_RX_EN_HI_LR=1, p_RX_EXT_RL_CTRL=0, p_RX_EYESCAN_VS_CODE=0, p_RX_EYESCAN_VS_NEG_DIR=0, p_RX_EYESCAN_VS_RANGE=0, p_RX_EYESCAN_VS_UT_SIGN=0, p_RX_FABINT_USRCLK_FLOP=0, p_RX_INT_DATAWIDTH={16: 0, 32: 1}[data_width], p_RX_PMA_POWER_SAVE=0, p_RX_PMA_RSV0=0, p_RX_PROGDIV_CFG=rx_progdiv_cfg, p_RX_PROGDIV_RATE=1, p_RX_RESLOAD_CTRL=0, p_RX_RESLOAD_OVRD=0, p_RX_SAMPLE_PERIOD=7, p_RX_SIG_VALID_DLY=11, p_RX_SUM_DFETAPREP_EN=0, p_RX_SUM_IREF_TUNE=4, p_RX_SUM_RESLOAD_CTRL=3, p_RX_SUM_VCMTUNE=6, p_RX_SUM_VCM_OVWR=0, p_RX_SUM_VREF_TUNE=4, p_RX_TUNE_AFE_OS=0, p_RX_VREG_CTRL=5, p_RX_VREG_PDB=1, p_RX_WIDEMODE_CDR=0, p_RX_WIDEMODE_CDR_GEN3=0, p_RX_WIDEMODE_CDR_GEN4=1, p_RX_XCLK_SEL=('RXDES' if rx_buffer_enable else 'RXUSR'), p_RX_XMODE_SEL=0, p_SAMPLE_CLK_PHASE=0, p_SAS_12G_MODE=0, p_SATA_BURST_SEQ_LEN=6, p_SATA_BURST_VAL=4, p_SATA_CPLL_CFG='VCO_3000MHZ', p_SATA_EIDLE_VAL=4, p_SHOW_REALIGN_COMMA='FALSE', p_SIM_DEVICE='ULTRASCALE_PLUS', p_SIM_MODE='FAST', p_SIM_RECEIVER_DETECT_PASS='TRUE', p_SIM_RESET_SPEEDUP='TRUE', p_SIM_TX_EIDLE_DRIVE_LEVEL='Z', p_SRSTMODE=0, p_TAPDLY_SET_TX=0, p_TEMPERATURE_PAR=2, p_TERM_RCAL_CFG=16913, p_TERM_RCAL_OVRD=0, p_TRANS_TIME_RATE=14, p_TST_RSV0=0, p_TST_RSV1=0) self.gth_params.update(p_TXBUF_EN=('TRUE' if tx_buffer_enable else 'FALSE'), p_TXBUF_RESET_ON_RATE_CHANGE='TRUE', p_TXDLY_CFG=32784, p_TXDLY_LCFG=48, p_TXDRVBIAS_N=10, p_TXFIFO_ADDR_CFG='LOW', p_TXGBOX_FIFO_INIT_RD_ADDR=4, p_TXGEARBOX_EN='FALSE', p_TXOUT_DIV=txout_div, p_TXPCSRESET_TIME=3, p_TXPHDLY_CFG0=24688, p_TXPHDLY_CFG1=(10 if ((gen == 'gen3') and (data_width == 16)) else 15), p_TXPH_CFG=(1827 if (gen == 'gen1') else ({16: 803, 32: 1827}[data_width] if (gen == 'gen2') else 803)), p_TXPH_CFG2=0, p_TXPH_MONITOR_SEL=0, p_TXPI_CFG=991, p_TXPI_CFG0=0, p_TXPI_CFG1=0, p_TXPI_CFG2=0, p_TXPI_CFG3=1, p_TXPI_CFG4=0, p_TXPI_CFG5=0, p_TXPI_GRAY_SEL=0, p_TXPI_INVSTROBE_SEL=0, p_TXPI_LPM=0, p_TXPI_PPM=0, p_TXPI_PPMCLK_SEL='TXUSRCLK2', p_TXPI_PPM_CFG=0, p_TXPI_SYNFREQ_PPM=1, p_TXPI_VREFSEL=0, p_TXPMARESET_TIME=3, p_TXREFCLKDIV2_SEL=0, p_TXSYNC_MULTILANE=0, p_TXSYNC_OVRD=0, p_TXSYNC_SKIP_DA=0, p_TX_CLK25_DIV=6, p_TX_CLKMUX_EN=1, p_TX_DATA_WIDTH={16: 20, 32: 40}[data_width], p_TX_DCC_LOOP_RST_CFG=4, p_TX_DEEMPH0=0, p_TX_DEEMPH1=0, p_TX_DEEMPH2=0, p_TX_DEEMPH3=0, p_TX_DIVRESET_TIME=1, p_TX_DRIVE_MODE='DIRECT', p_TX_DRVMUX_CTRL=2, p_TX_EIDLE_ASSERT_DELAY=4, p_TX_EIDLE_DEASSERT_DELAY=3, p_TX_FABINT_USRCLK_FLOP=0, p_TX_FIFO_BYP_EN=(0 if tx_buffer_enable else 1), p_TX_IDLE_DATA_ZERO=0, p_TX_INT_DATAWIDTH={16: 0, 32: 1}[data_width], p_TX_LOOPBACK_DRIVE_HIZ='FALSE', p_TX_MAINCURSOR_SEL=0, p_TX_MARGIN_FULL_0=95, p_TX_MARGIN_FULL_1=94, p_TX_MARGIN_FULL_2=92, p_TX_MARGIN_FULL_3=90, p_TX_MARGIN_FULL_4=88, p_TX_MARGIN_LOW_0=70, p_TX_MARGIN_LOW_1=69, p_TX_MARGIN_LOW_2=67, p_TX_MARGIN_LOW_3=66, p_TX_MARGIN_LOW_4=64, p_TX_PHICAL_CFG0=0, p_TX_PHICAL_CFG1=32256, p_TX_PHICAL_CFG2=(513 if tx_buffer_enable else 512), p_TX_PI_BIASSET=0, p_TX_PI_IBIAS_MID=0, p_TX_PMADATA_OPT=0, p_TX_PMA_POWER_SAVE=0, p_TX_PMA_RSV0=8, p_TX_PREDRV_CTRL=2, p_TX_PROGCLK_SEL='PREPI', p_TX_PROGDIV_CFG=tx_progdiv_cfg, p_TX_PROGDIV_RATE=1, p_TX_QPI_STATUS_EN=0, p_TX_RXDETECT_CFG=50, p_TX_RXDETECT_REF=4, p_TX_SAMPLE_PERIOD=7, p_TX_SARC_LPBK_ENB=0, p_TX_SW_MEAS=0, p_TX_VREG_CTRL=0, p_TX_VREG_PDB=0, p_TX_VREG_VREFSEL=0, p_TX_XCLK_SEL=('TXOUT' if tx_buffer_enable else 'TXUSR'), p_USB_BOTH_BURST_IDLE=0, p_USB_BURSTMAX_U3WAKE=127, p_USB_BURSTMIN_U3WAKE=99, p_USB_CLK_COR_EQ_EN=0, p_USB_EXT_CNTL=1, p_USB_IDLEMAX_POLLING=699, p_USB_IDLEMIN_POLLING=299, p_USB_LFPSPING_BURST=5, p_USB_LFPSPOLLING_BURST=49, p_USB_LFPSPOLLING_IDLE_MS=4, p_USB_LFPSU1EXIT_BURST=29, p_USB_LFPSU2LPEXIT_BURST_MS=99, p_USB_LFPSU3WAKE_BURST_MS=499, p_USB_LFPS_TPERIOD=3, p_USB_LFPS_TPERIOD_ACCURATE=1, p_USB_MODE=0, p_USB_PCIE_ERR_REP_DIS=0, p_USB_PING_SATA_MAX_INIT=21, p_USB_PING_SATA_MIN_INIT=12, p_USB_POLL_SATA_MAX_BURST=8, p_USB_POLL_SATA_MIN_BURST=4, p_USB_RAW_ELEC=0, p_USB_RXIDLE_P0_CTRL=1, p_USB_TXIDLE_TUNE_ENABLE=1, p_USB_U1_SATA_MAX_WAKE=7, p_USB_U1_SATA_MIN_WAKE=4, p_USB_U2_SAS_MAX_COM=64, p_USB_U2_SAS_MIN_COM=36, p_USE_PCS_CLK_PHASE_SEL=0, p_Y_ALL_MODE=0) self.gth_params.update(i_PCSRSVDIN=0, i_GTRSVD=0, i_TSTIN=0, i_GTTXRESETSEL=0, i_GTRXRESETSEL=0, i_RESETOVRD=0, i_DRPADDR=drp_mux.addr, i_DRPCLK=drp_mux.clk, i_DRPDI=drp_mux.di, o_DRPDO=drp_mux.do, i_DRPEN=drp_mux.en, o_DRPRDY=drp_mux.rdy, i_DRPWE=drp_mux.we, i_DRPRST=0, i_CPLLRESET=0, i_CPLLPD=self.cpllreset, o_CPLLLOCK=self.cplllock, i_CPLLLOCKEN=1, i_CPLLREFCLKSEL=(7 if use_gtgrefclk else 1), i_GTGREFCLK=(self.refclk if use_gtgrefclk else 0), i_GTREFCLK0=(0 if use_gtgrefclk else self.refclk), i_QPLL0CLK=0, i_QPLL0REFCLK=0, i_QPLL1CLK=0, i_QPLL1REFCLK=0, i_QPLL0FREQLOCK=0, i_QPLL1FREQLOCK=0, o_TXOUTCLK=self.txoutclk, i_TXSYSCLKSEL=0, i_TXPLLCLKSEL=0, i_TXOUTCLKSEL=(2 if tx_buffer_enable else 5), i_GTTXRESET=tx_init.gtXxreset, o_TXRESETDONE=tx_init.Xxresetdone, i_TXDLYSRESET=tx_init.Xxdlysreset, o_TXDLYSRESETDONE=tx_init.Xxdlysresetdone, o_TXPHALIGNDONE=tx_init.Xxphaligndone, i_TXUSERRDY=tx_init.Xxuserrdy, i_TXSYNCMODE=1, i_TXDLYBYPASS=(1 if tx_buffer_enable else 0), i_TXPHDLYPD=(1 if tx_buffer_enable else 0), i_TX8B10BBYPASS=0, i_TXCTRL0=0, i_TXCTRL1=0, i_TXCTRL2=self.txcharisk, i_TXDATA=self.txdata, i_TXUSRCLK=self.txusrclk, i_TXUSRCLK2=self.txusrclk2, i_RXPD=Replicate(self.rxpd, 2), i_TXPD=Replicate(txpd, 2), i_TXDEEMPH=0, i_TXDIFFCTRL=24, i_TXPRECURSOR=0, i_TXPOSTCURSOR=0, i_TXMAINCURSOR=0, i_TXINHIBIT=0, i_TXPDELECIDLEMODE=0, i_TXELECIDLE=txelecidle, i_TXMARGIN=0, i_TXRATE=0, i_TXSWING=0, i_LOOPBACK=0, i_GTRXRESET=rx_init.gtXxreset, o_RXRESETDONE=rx_init.Xxresetdone, i_RXDLYSRESET=rx_init.Xxdlysreset, o_RXDLYSRESETDONE=rx_init.Xxdlysresetdone, i_RXPMARESET=0, o_RXPHALIGNDONE=rxphaligndone, i_RXSYNCALLIN=rxphaligndone, i_RXUSERRDY=rx_init.Xxuserrdy, i_RXSYNCIN=0, i_RXSYNCMODE=1, o_RXSYNCDONE=rx_init.Xxsyncdone, i_RXDLYBYPASS=(1 if rx_buffer_enable else 0), i_RXPHDLYPD=(1 if rx_buffer_enable else 0), i_RXDFEAGCCTRL=1, i_RXDFECFOKFCNUM=13, i_RXDFEXYDEN=1, i_RXLPMEN=1, i_RXRATE=0, i_RXSYSCLKSEL=0, i_RXOUTCLKSEL=2, i_RXPLLCLKSEL=0, o_RXOUTCLK=self.rxoutclk, i_RXUSRCLK=self.rxusrclk, i_RXUSRCLK2=self.rxusrclk2, i_RX8B10BEN=1, i_RXCDRFREQRESET=0, i_RXCDRHOLD=self.rx_cdrhold, o_RXCDRLOCK=Open(), i_RXCDROVRDEN=0, i_RXCDRRESET=0, i_RXOSCALRESET=0, o_RXOSINTDONE=Open(), o_RXOSINTSTARTED=Open(), o_RXOSINTSTROBEDONE=Open(), o_RXOSINTSTROBESTARTED=Open(), o_RXBYTEISALIGNED=Open(), o_RXBYTEREALIGN=Open(), o_RXCOMMADET=Open(), i_RXCOMMADETEN=1, i_RXMCOMMAALIGNEN=1, i_RXPCOMMAALIGNEN=1, i_RXSLIDE=0, o_GTPOWERGOOD=Open(), o_RXCOMSASDET=Open(), o_RXCOMWAKEDET=rxcomwakedet, o_RXCOMINITDET=rxcominitdet, o_RXELECIDLE=Open(), i_RXELECIDLEMODE=0, o_TXCOMFINISH=txcomfinish, i_TXCOMINIT=txcominit, i_TXCOMSAS=0, i_TXCOMWAKE=txcomwake, i_TX8B10BEN=1, o_RXCTRL0=self.rxctrl0, o_RXCTRL1=self.rxctrl1, o_RXCTRL2=self.rxctrl2, o_RXCTRL3=self.rxctrl3, o_RXDATA=self.rxdata, i_TXPOLARITY=self.tx_polarity, i_RXPOLARITY=self.rx_polarity, i_GTHRXP=pads.rx_p, i_GTHRXN=pads.rx_n, o_GTHTXP=pads.tx_p, o_GTHTXN=pads.tx_n) self.specials += Instance('GTHE4_CHANNEL', **self.gth_params)
class Solution(): def union(self, a, b): self.parent[self.find(a)] = self.find(b) def find(self, a): if (self.parent[a] != a): self.parent[a] = self.find(self.parent[a]) return self.parent[a] def smallestStringWithSwaps(self, s: str, pairs: List[List[int]]) -> str: self.parent = list(range(len(s))) for (a, b) in pairs: self.union(a, b) group = defaultdict((lambda : ([], []))) for (i, ch) in enumerate(s): parent = self.find(i) group[parent][0].append(i) group[parent][1].append(ch) res = ([''] * len(s)) for (ids, chars) in group.values(): ids.sort() chars.sort() for (ch, i) in zip(chars, ids): res[i] = ch return ''.join(res)
(connection.features.uses_savepoints, "'atomic' requires transactions and savepoints.") class MultiDBTransactionAPIExceptionTests(TestCase): databases = '__all__' def setUp(self): self.view = APIExceptionView.as_view() connections.databases['default']['ATOMIC_REQUESTS'] = True connections.databases['secondary']['ATOMIC_REQUESTS'] = True def tearDown(self): connections.databases['default']['ATOMIC_REQUESTS'] = False connections.databases['secondary']['ATOMIC_REQUESTS'] = False def test_api_exception_rollback_transaction(self): request = factory.post('/') num_queries = (4 if connection.features.can_release_savepoints else 3) with self.assertNumQueries(num_queries): with transaction.atomic(), transaction.atomic(using='secondary'): response = self.view(request) assert transaction.get_rollback() assert transaction.get_rollback(using='secondary') assert (response.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR) assert (BasicModel.objects.count() == 0)
def get_default_cl_cwl(memtype, tck): f_to_cl_cwl = OrderedDict() if (memtype == 'SDR'): f_to_cl_cwl[.0] = (2, None) f_to_cl_cwl[.0] = (3, None) elif (memtype == 'DDR2'): f_to_cl_cwl[.0] = (3, 2) f_to_cl_cwl[.0] = (4, 3) f_to_cl_cwl[.0] = (5, 4) f_to_cl_cwl[.0] = (6, 5) f_to_cl_cwl[.0] = (7, 5) elif (memtype == 'DDR3'): f_to_cl_cwl[.0] = (6, 5) f_to_cl_cwl[.0] = (7, 6) f_to_cl_cwl[.0] = (10, 7) f_to_cl_cwl[.0] = (11, 8) f_to_cl_cwl[.0] = (13, 9) elif (memtype == 'DDR4'): f_to_cl_cwl[.0] = (9, 9) f_to_cl_cwl[.0] = (11, 9) f_to_cl_cwl[.0] = (13, 10) f_to_cl_cwl[.0] = (15, 11) f_to_cl_cwl[.0] = (16, 12) f_to_cl_cwl[.0] = (18, 14) else: raise ValueError for (f, (cl, cwl)) in f_to_cl_cwl.items(): m = (2 if ('DDR' in memtype) else 1) if (tck >= (m / f)): return (cl, cwl) raise ValueError
def read_input(): ofinputs_by_version = defaultdict((lambda : [])) filenames = sorted(glob.glob(('%s/openflow_input/*' % root_dir))) filenames = [x for x in filenames if (not x.endswith('~'))] for filename in filenames: log(('Processing struct file: ' + filename)) ofinput = process_input_file(filename) for wire_version in ofinput.wire_versions: version = loxi_globals.OFVersions.from_wire(wire_version) if (version in loxi_globals.OFVersions.target_versions): ofinputs_by_version[wire_version].append(ofinput) return ofinputs_by_version
class RedisCache(BaseCache): _conn: Redis def __init__(self, *, fallback=None, app: (Quart | None)=None, redis_url: (str | None)=None, **kw): super().__init__(fallback=fallback) self._conn = None if ((app is None) or (redis_url is None)): raise ValueError("'app' or 'redis_url' cannot be None") if (Redis is None): raise OptionalRequirementMissing('redis requirement must be installed for redis cache') _serving async def handle_lifespan(): logger.info('connecting to redis...') self._conn = Redis.from_url(redis_url) for attempt in range(1, 7): try: (await self._conn.ping()) break except RedisError as err: logger.warning('failed to connect to redis, attempt %d of 6', attempt) if (attempt >= 6): logger.critical('could not connect to redis') raise CacheException('could not connect to redis') from err (await asyncio.sleep(attempt)) logger.info('connected to redis') (yield) logger.info('closing redis connection...') (await self._conn.close()) logger.info('closed redis connection') async def push_paste_any(self, paste_id, /, *, meta=None, html=None, raw=None, update_fallback: bool=True): to_cache = {} if meta: to_cache[f'{paste_id}__meta'] = meta.json() if html: to_cache[f'{paste_id}__html'] = html if raw: to_cache[f'{paste_id}__raw'] = raw try: (await self._conn.mset(to_cache)) except RedisError as err: logger.error("failed to connect to redis cache: '%s'", err.args) if (self._fallback and update_fallback): (await self._fallback.push_paste_any(paste_id, meta=meta, html=html, raw=raw)) async def get_paste_meta(self, paste_id): cached = None try: cached = (await self._conn.get(f'{paste_id}__meta')) if cached: cached = PasteMeta.parse_raw(cached) except RedisError as err: logger.error("failed to connect to redis cache: '%s'", err.args) if ((cached is None) and self._fallback): cached = (await self._fallback.get_paste_meta(paste_id)) if (cached is not None): (await self.push_paste_any(paste_id, meta=cached, update_fallback=False)) return cached async def get_paste_rendered(self, paste_id): cached = None try: cached = (await self._conn.get(f'{paste_id}__html')) if cached: cached = cached.decode() except RedisError as err: logger.error("failed to connect to redis cache: '%s'", err.args) if ((cached is None) and self._fallback): cached = (await self._fallback.get_paste_rendered(paste_id)) if (cached is not None): (await self.push_paste_any(paste_id, html=cached, update_fallback=False)) return cached async def get_paste_raw(self, paste_id): cached = None try: cached = (await self._conn.get(f'{paste_id}__raw')) except RedisError as err: logger.error("failed to connect to redis cache: '%s'", err.args) if ((cached is None) and self._fallback): cached = (await self._fallback.get_paste_raw(paste_id)) if (cached is not None): (await self.push_paste_any(paste_id, raw=cached, update_fallback=False)) return cached async def remove_paste(self, paste_id: str): try: (await self._conn.delete(f'{paste_id}__meta', f'{paste_id}__html', f'{paste_id}__raw')) except RedisError as err: logger.error("failed to connect to redis cache: '%s'", err.args)
class OptionSeriesSplineSonificationTracksMappingPitch(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get('y') def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get('c6') def max(self, text: str): self._config(text, js_type=False) def min(self): return self._config_get('c2') def min(self, text: str): self._config(text, js_type=False) def scale(self): return self._config_get(None) def scale(self, value: Any): self._config(value, js_type=False) def within(self): return self._config_get('yAxis') def within(self, text: str): self._config(text, js_type=False)
class BusinessDocsTestCase(DocsTestCase): def test_get_product_catalogs(self): business = Business(DocsDataStore.get('business_id')) catalogs = business.get_product_catalogs() self.store_response(catalogs[0]) def test_get_insights(self): business = Business(DocsDataStore.get('business_id')) insights = business.get_insights(fields=[Insights.Field.campaign_id, Insights.Field.unique_clicks, Insights.Field.impressions], params={'level': Insights.Level.campaign, 'date_preset': Insights.Preset.yesterday}) self.store_response(insights)
def test_flash_lone_windows_set_to_never_with_existing_window(no_lone_server: FlashServer, window: Window) -> None: with watching_windows([window]) as watchers: with server_running(no_lone_server): assert (window.opacity == pytest.approx(0.2)) change_focus(window) assert (window.opacity == pytest.approx(0.2)) assert (watchers[0].opacity_events == [1, 0.2, 1])
def testSampleRegexsFactory(name, basedir): def testFilter(self): self.assertTrue(os.path.isfile(os.path.join(TEST_FILES_DIR, 'logs', name)), ("No sample log file available for '%s' filter" % name)) filenames = [name] regexsUsedRe = set() commonOpts = {} faildata = {} i = 0 while (i < len(filenames)): filename = filenames[i] i += 1 logFile = FileContainer(os.path.join(TEST_FILES_DIR, 'logs', filename), 'UTF-8', doOpen=True) logFile.waitForLineEnd = False ignoreBlock = False lnnum = 0 for line in logFile: lnnum += 1 jsonREMatch = re.match('^#+ ?(failJSON|(?:file|filter)Options|addFILE):(.+)$', line) if jsonREMatch: try: faildata = json.loads(jsonREMatch.group(2)) if (jsonREMatch.group(1) == 'fileOptions'): commonOpts = faildata continue if (jsonREMatch.group(1) == 'filterOptions'): self._filterTests = [] ignoreBlock = False for faildata in (faildata if isinstance(faildata, list) else [faildata]): if commonOpts: opts = commonOpts.copy() opts.update(faildata) else: opts = faildata self.assertTrue(isinstance(opts, dict)) if opts.get('test.condition'): ignoreBlock = (not eval(opts.get('test.condition'))) if (not ignoreBlock): fltOpts = self._filterOptions(opts) fltName = opts.get('test.filter-name') if (not fltName): fltName = (str(fltOpts) if fltOpts else '') fltName = (name + fltName) flt = self._readFilter(fltName, name, basedir, opts=fltOpts) self._filterTests.append((fltName, flt, opts)) continue if (jsonREMatch.group(1) == 'addFILE'): filenames.append(faildata) continue except ValueError as e: raise ValueError(('%s: %s:%i' % (e, logFile.getFileName(), lnnum))) line = next(logFile) elif (ignoreBlock or line.startswith('#') or (not line.strip())): continue else: faildata = {} if ignoreBlock: continue if (not self._filterTests): fltName = name flt = self._readFilter(fltName, name, basedir, opts=None) self._filterTests = [(fltName, flt, {})] line = line.rstrip('\r\n') for (fltName, flt, opts) in self._filterTests: if (faildata.get('constraint') and (not eval(faildata['constraint']))): continue (flt, regexsUsedIdx) = flt regexList = flt.getFailRegex() failregex = (- 1) try: fail = {} if (opts.get('logtype') != 'journal'): ret = flt.processLine(line) else: if opts.get('test.prefix-line'): line = (opts.get('test.prefix-line') + line) ret = flt.processLine(('', TEST_NOW_STR, line), TEST_NOW) if ret: found = [] for ret in ret: (failregex, fid, fail2banTime, fail) = ret if ((fid is None) or fail.get('nofail')): regexsUsedIdx.add(failregex) regexsUsedRe.add(regexList[failregex]) continue found.append(ret) ret = found if (not ret): self.assertFalse(faildata.get('match', False), 'Line not matched when should have') continue self.assertTrue(faildata.get('match', False), "Line matched when shouldn't have") self.assertEqual(len(ret), 1, ('Multiple regexs matched %r' % [x[0] for x in ret])) for ret in ret: (failregex, fid, fail2banTime, fail) = ret for (k, v) in faildata.items(): if (k not in ('time', 'match', 'desc', 'constraint')): fv = fail.get(k, None) if (fv is None): if (k == 'host'): fv = fid if (k == 'attempts'): fv = len(fail.get('matches', {})) if isinstance(fv, (set, list, dict)): self.assertSortedEqual(fv, v) continue self.assertEqual(fv, v) t = faildata.get('time', None) if (t is not None): try: jsonTimeLocal = datetime.datetime.strptime(t, '%Y-%m-%dT%H:%M:%S') except ValueError: jsonTimeLocal = datetime.datetime.strptime(t, '%Y-%m-%dT%H:%M:%S.%f') jsonTime = time.mktime(jsonTimeLocal.timetuple()) jsonTime += (jsonTimeLocal.microsecond / 1000000.0) self.assertEqual(fail2banTime, jsonTime, ('UTC Time mismatch %s (%s) != %s (%s) (diff %.3f seconds)' % (fail2banTime, time.strftime('%Y-%m-%dT%H:%M:%S', time.gmtime(fail2banTime)), jsonTime, time.strftime('%Y-%m-%dT%H:%M:%S', time.gmtime(jsonTime)), (fail2banTime - jsonTime)))) regexsUsedIdx.add(failregex) regexsUsedRe.add(regexList[failregex]) except AssertionError as e: import pprint raise AssertionError(('%s: %s on: %s:%i, line:\n %s\nregex (%s):\n %s\nfaildata: %s\nfail: %s' % (fltName, e, logFile.getFileName(), lnnum, line, failregex, (regexList[failregex] if (failregex != (- 1)) else None), '\n'.join(pprint.pformat(faildata).splitlines()), '\n'.join(pprint.pformat(fail).splitlines())))) for (fltName, flt) in self._filters.items(): (flt, regexsUsedIdx) = flt regexList = flt.getFailRegex() for (failRegexIndex, failRegex) in enumerate(regexList): self.assertTrue(((failRegexIndex in regexsUsedIdx) or (failRegex in regexsUsedRe)), ('%s: Regex has no samples: %i: %r' % (fltName, failRegexIndex, failRegex))) return testFilter
def build_missing_envoy_docker_image(manager: source_manager.SourceManager, envoy_image_tag: str) -> None: have_build_options = manager.have_build_options(proto_source.SourceRepository.SourceIdentity.SRCID_ENVOY) log.debug(f'Build options exist?: {have_build_options}') existing_images = [] if (not have_build_options): new_docker_image = docker_image.DockerImage() existing_images = new_docker_image.list_images() log.debug(f'Existing image tags: {existing_images}') image_name = generate_envoy_image_name_from_tag(envoy_image_tag) if (image_name not in existing_images): build_envoy_docker_image(manager, envoy_image_tag)
.usefixtures('use_tmpdir') def test_that_run_template_replace_symlink_does_not_write_to_source(prior_ensemble): config_text = dedent('\n NUM_REALIZATIONS 1\n JOBNAME my_case%d\n RUN_TEMPLATE template.tmpl result.txt\n ') Path('template.tmpl').write_text('I want to replace: <IENS>', encoding='utf-8') Path('config.ert').write_text(config_text, encoding='utf-8') ert_config = ErtConfig.from_file('config.ert') run_context = ensemble_context(prior_ensemble, [True], 0, None, '', 'name_%', 'name') run_path = Path(run_context[0].runpath) os.makedirs(run_path) Path('start.txt').write_text('I dont want to replace in this file', encoding='utf-8') os.symlink('start.txt', (run_path / 'result.txt')) create_run_path(run_context, ert_config.substitution_list, ert_config) assert ((run_path / 'result.txt').read_text(encoding='utf-8') == 'I want to replace: 0') assert (Path('start.txt').read_text(encoding='utf-8') == 'I dont want to replace in this file')
class PushThread(QtCore.QThread): def __init__(self, scrobbler, tracks, parent=None): QtCore.QThread.__init__(self, parent) self.scrobbler = scrobbler self.tracks = tracks def run(self): try: for track in self.tracks: track = [item.encode('utf-8') for item in track] (timestamp, trackname, artistname, albumname, trackmbid, artistmbid, albummbid) = track self.scrobbler.add_track(scrobble.ScrobbleTrack(timestamp, trackname, artistname, albumname, trackmbid)) self.emit(QtCore.SIGNAL('progress'), (timestamp, trackname, artistname, albumname, trackmbid)) self.scrobbler.submit(sleep_func=(lambda s: self.msleep(int((s * 1000))))) except Exception as e: import traceback backtrace = traceback.format_exc(e) self.emit(QtCore.SIGNAL('error'), backtrace)
class OFPGroupStats(StringifyMixin): def __init__(self, length=None, group_id=None, ref_count=None, packet_count=None, byte_count=None, duration_sec=None, duration_nsec=None, bucket_stats=None): super(OFPGroupStats, self).__init__() self.length = length self.group_id = group_id self.ref_count = ref_count self.packet_count = packet_count self.byte_count = byte_count self.duration_sec = duration_sec self.duration_nsec = duration_nsec self.bucket_stats = bucket_stats def parser(cls, buf, offset): group = struct.unpack_from(ofproto.OFP_GROUP_STATS_PACK_STR, buf, offset) group_stats = cls(*group) group_stats.bucket_stats = [] total_len = (group_stats.length + offset) offset += ofproto.OFP_GROUP_STATS_SIZE while (total_len > offset): b = OFPBucketCounter.parser(buf, offset) group_stats.bucket_stats.append(b) offset += ofproto.OFP_BUCKET_COUNTER_SIZE return group_stats