code
stringlengths
281
23.7M
class BaseDummyPen(object): def __init__(self, *args, **kwargs): self.commands = [] def __str__(self): return _repr_pen_commands(self.commands) def addComponent(self, glyphName, transformation, **kwargs): self.commands.append(('addComponent', (glyphName, transformation), kwargs))
.unit def test_sort_create_update_create() -> None: resource_1 = models.DataCategory(organization_fides_key=1, fides_key='some_resource', name='Test resource 1', description='Test Description') resource_2 = models.DataCategory(organization_fides_key=1, fides_key='another_system', name='Test System 2', description='Test Description') expected_create_result = [resource_2] manifest_resource_list = [resource_2] server_resource_list = [resource_1] (create_result, update_result) = sort_create_update(manifest_resource_list, server_resource_list) assert (create_result == expected_create_result) assert (update_result == [])
_meta(characters.sakuya.Dagger) class Dagger(): name = '' description = '<style=Card.Name></style>,<style=Card.Name></style>' def clickable(self): me = self.me if (not (me.cards or me.showncards or me.equips)): return False return self.accept_cards([characters.sakuya.Dagger(me)]) def is_complete(self, skill): assert skill.is_card(characters.sakuya.Dagger) cl = skill.associated_cards if ((len(cl) != 1) or ('equipment' not in cl[0].category)): return (False, '!') return (True, '!!') def is_action_valid(self, sk, tl): (rst, reason) = self.is_complete(sk) if (not rst): return (rst, reason) else: return AttackCard().ui_meta.is_action_valid(sk, tl) def effect_string(self, act): (src, tgt) = (act.source, act.target) sk = act.card c = sk.associated_cards[0] return f'{N.char(src)}{N.card(c)}<style=Skill.Name></style>,{N.char(tgt)}!' def sound_effect(self, act): return random.choice(['thb-cv-sakuya_dagger1', 'thb-cv-sakuya_dagger2'])
class CountingKubernetesProcessor(KubernetesProcessor): aconf: Config kind: KubernetesGVK key: str def __init__(self, aconf: Config, kind: KubernetesGVK, key: str) -> None: self.aconf = aconf self.kind = kind self.key = key def kinds(self) -> FrozenSet[KubernetesGVK]: return frozenset([self.kind]) def _process(self, obj: KubernetesObject) -> None: self.aconf.incr_count(self.key)
class BikeuStation(BikeShareStation): def __init__(self, info): super(BikeuStation, self).__init__() self.latitude = float(info['Latitude']) self.longitude = float(info['Longitude']) self.name = info['Name'] self.bikes = int(info['TotalAvailableBikes']) self.free = (int(info['TotalLocks']) - self.bikes) bike_info = info['Stations']['TKStation'][0]['AvailableBikes'] bikes = bike_info.get('TKBike', []) bike_uids = map((lambda b: b.get('BikeIdentifier')), bikes) bike_uids = filter(None, bike_uids) self.extra = {'uid': info['id']} if bike_uids: self.extra['bike_uids'] = list(bike_uids)
class ReceiverThread(Thread): def __init__(self, client, addr, parent): Thread.__init__(self) self.daemon = True self.client = client self.parent = parent def run(self): data = self.client.recv(1024) if data: self.parent.data_arrived(data) self.client.send(b'XSSer "token-hub" service running... ;-)\n\n') self.client.send(b'### INCOMING DATA:\n\n') self.client.send(data) self.client.close() self.parent.client_finished(self)
class OptionPlotoptionsNetworkgraphSonificationDefaultinstrumentoptionsMappingHighpassFrequency(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
(scope='function') def served_notice_history_for_tcf_feature(db: Session, fides_user_provided_identity) -> Generator: pref_1 = ServedNoticeHistory.create(db=db, data={'acknowledge_mode': False, 'serving_component': 'tcf_overlay', 'fides_user_device_provided_identity_id': fides_user_provided_identity.id, 'feature': 2}, check_name=False) (yield pref_1) pref_1.delete(db)
class TestHNSWMinHashJaccard(TestHNSW): def _create_random_points(self, high=50, n=100, dim=10): sets = np.random.randint(0, high, (n, dim)) return MinHash.bulk(sets, num_perm=128) def _create_index(self, minhashes, keys=None): hnsw = HNSW(distance_func=minhash_jaccard_distance, m=16, ef_construction=100) self._insert_points(hnsw, minhashes, keys) return hnsw def _search_index(self, index, queries, k=10): return super()._search_index_dist(index, queries, minhash_jaccard_distance, k)
def dmenu_pass(command): if (command != 'dmenu'): return None try: dm_patch = (b'P' in run(['dmenu', '-h'], capture_output=True, check=False).stderr) except FileNotFoundError: dm_patch = False color = keepmenu.CONF.get('dmenu_passphrase', 'obscure_color', fallback='#222222') return (['-P'] if dm_patch else ['-nb', color, '-nf', color])
_register_parser _set_msg_type(ofproto.OFPT_CONTROLLER_STATUS) class OFPControllerStatus(MsgBase): def __init__(self, datapath, status=None): super(OFPControllerStatus, self).__init__(datapath) self.status = status def parser(cls, datapath, version, msg_type, msg_len, xid, buf): msg = super(OFPControllerStatus, cls).parser(datapath, version, msg_type, msg_len, xid, buf) msg.status = OFPControllerStatusStats.parser(msg.buf, ofproto.OFP_HEADER_SIZE) return msg
.parametrize('deprecated_args', (dict(upward=5000.0, spacing=1), dict(upward=5000.0, shape=(6, 6)), dict(upward=5000.0, spacing=1, region=((- 4000.0), 0, 5000.0, 7000.0)), dict(upward=5000.0, shape=(6, 6), region=((- 4000.0), 0, 5000.0, 7000.0)))) def test_error_deprecated_args(coordinates_small, data_small, region, deprecated_args): eqs = EquivalentSources().fit(coordinates_small, data_small) grid_coords = vd.grid_coordinates(region=region, shape=(4, 4), extra_coords=2000.0) msg = "The 'upward', 'region', 'shape' and 'spacing' arguments have been" with pytest.raises(ValueError, match=msg): eqs.grid(coordinates=grid_coords, **deprecated_args)
def test_climb_lanczos(): calc = AnaPot() geoms = calc.get_path(2) gs_kwargs = {'perp_thresh': 0.5, 'reparam_check': 'rms', 'climb': True, 'climb_rms': 0.2, 'climb_lanczos': True, 'climb_lanczos_rms': 0.2} gs = GrowingString(geoms, (lambda : AnaPot()), **gs_kwargs) opt_kwargs = {'keep_last': 0, 'rms_force': 0.02, 'rms_force_only': True} opt = StringOptimizer(gs, **opt_kwargs) opt.run() assert opt.is_converged assert (opt.cur_cycle == 23)
class DropColumnsTestCase(unittest.TestCase): def setUp(self): self.sequences = [SeqRecord(Seq('AAA'), id='s1'), SeqRecord(Seq('A-G'), id='s2'), SeqRecord(Seq('-A-'), id='s3')] def test_basic(self): r = list(transform.drop_columns(self.sequences, [slice(1, None)])) self.assertEqual([i.id for i in self.sequences], [i.id for i in r]) self.assertEqual(['A', 'A', '-'], [str(i.seq) for i in r]) def test_multi(self): r = list(transform.drop_columns(self.sequences, [slice(0, 1), slice(2, None)])) self.assertEqual([i.id for i in self.sequences], [i.id for i in r]) self.assertEqual(['A', '-', 'A'], [str(i.seq) for i in r])
.object(docker.models.images.ImageCollection, 'list') def test_list_images(mock_list_images): expected_image_tags = ['image:1', 'image:2', 'image:3'] mock_list_images.return_value = map((lambda tag: mock.Mock(tags=[tag])), expected_image_tags) new_docker_image = docker_image.DockerImage() images = new_docker_image.list_images() assert (images == expected_image_tags)
class TCNFMDecoder(nn.Module): def __init__(self, n_blocks=2, hidden_channels=64, out_channels=6, kernel_size=3, dilation_base=2, apply_padding=True, deploy_residual=False, input_keys=None, z_size=None, output_complete_controls=True): super().__init__() dilation_factor = (((dilation_base ** n_blocks) - 1) / (dilation_base - 1)) self.receptive_field = (1 + ((2 * (kernel_size - 1)) * dilation_factor)) print('[INFO] TCNFNDecoder - receptive field is: {}'.format(self.receptive_field)) self.input_keys = input_keys n_keys = len(input_keys) self.output_complete_controls = output_complete_controls if (n_keys == 2): in_channels = 2 elif (n_keys == 3): in_channels = (2 + z_size) else: raise ValueError('Expected 2 or 3 input keys. got: {}'.format(input_keys)) base = 0 net = [] net.append(TCN_block(in_channels, hidden_channels, hidden_channels, kernel_size, dilation=(dilation_base ** base), apply_padding=apply_padding, deploy_residual=deploy_residual)) if (n_blocks > 2): for i in range((n_blocks - 2)): base += 1 net.append(TCN_block(hidden_channels, hidden_channels, hidden_channels, kernel_size, dilation=(dilation_base ** base), apply_padding=apply_padding)) base += 1 net.append(TCN_block(hidden_channels, hidden_channels, out_channels, kernel_size, dilation=(dilation_base ** base), apply_padding=apply_padding, deploy_residual=deploy_residual, last_block=True)) self.net = nn.Sequential(*net) def forward(self, x): conditioning = torch.cat([x[k] for (v, k) in enumerate(self.input_keys)], (- 1)).permute([0, (- 1), (- 2)]) ol = self.net(conditioning) ol = ol.permute([0, (- 1), (- 2)]) if (self.output_complete_controls is True): synth_params = {'f0_hz': x['f0'], 'ol': ol} else: synth_params = ol return synth_params
class SubmitAsyncSearch(Runner): async def __call__(self, es, params): request_params = params.get('request-params', {}) response = (await es.async_search.submit(body=mandatory(params, 'body', self), index=params.get('index'), params=request_params)) op_name = mandatory(params, 'name', self) search_id = response.get('id') CompositeContext.put(op_name, search_id) def __repr__(self, *args, **kwargs): return 'submit-async-search'
def fetch_subset(ctx, source, dest, fraction=DEFAULT_FRACTION, log=True): cmd = 'rdbms-subsetter {source} {dest} {fraction}'.format(**locals()) if log: cmd += ' --logarithmic' for table in (FULL_TABLES + EXCLUDE_TABLES): cmd += ' --exclude-table {0}'.format(table) for (table, key) in FORCE_INCLUDE: cmd += ' --force {0}:{1}'.format(table, key) cmd += ' --config data/subset-config.json' cmd += ' --yes' ctx.run(cmd, echo=True)
def amazon_video_labels_parser(response): labels = [] for label in response['Labels']: parents = [] for parent in label['Label']['Parents']: if parent['Name']: parents.append(parent['Name']) boxes = [] for instance in label['Label']['Instances']: video_box = VideoLabelBoundingBox(top=instance['BoundingBox'].get('Top', 0), left=instance['BoundingBox'].get('Left', 0), width=instance['BoundingBox'].get('Width', 0), height=instance['BoundingBox'].get('Height', 0)) boxes.append(video_box) videolabel = VideoLabel(timestamp=[VideoLabelTimeStamp(start=(float(label['Timestamp']) / 1000.0), end=None)], confidence=(label['Label'].get('Confidence', 0) / 100), name=label['Label']['Name'], category=parents, bounding_box=boxes) labels.append(videolabel) return labels
def example(): from flet_contrib.color_picker import ColorPicker async def open_color_picker(e): e.control.page.dialog = d d.open = True (await e.control.page.update_async()) color_picker = ColorPicker(color='#c8df6f', width=300) color_icon = ft.IconButton(icon=ft.icons.BRUSH, on_click=open_color_picker) async def change_color(e): color_icon.icon_color = color_picker.color d.open = False (await e.control.page.update_async()) async def close_dialog(e): d.open = False (await d.update_async()) d = ft.AlertDialog(content=color_picker, actions=[ft.TextButton('OK', on_click=change_color), ft.TextButton('Cancel', on_click=close_dialog)], actions_alignment=ft.MainAxisAlignment.END, on_dismiss=change_color) return color_icon
class Range_Expression(Expression): def __init__(self, n_first, t_first_colon, n_last, t_second_colon=None, n_stride=None): super().__init__() assert isinstance(n_first, Expression) assert isinstance(n_last, Expression) assert isinstance(t_first_colon, MATLAB_Token) if t_second_colon: assert isinstance(t_second_colon, MATLAB_Token) assert (t_second_colon.kind == 'COLON') assert isinstance(n_stride, Expression) else: assert (n_stride is None) self.t_first_colon = t_first_colon self.t_first_colon.set_ast(self) self.t_second_colon = t_second_colon if self.t_second_colon: self.t_second_colon.set_ast(self) self.n_first = n_first self.n_first.set_parent(self) self.n_last = n_last self.n_last.set_parent(self) self.n_stride = n_stride if self.n_stride: self.n_stride.set_parent(self) def loc(self): return self.t_first_colon.location def visit(self, parent, function, relation): self._visit(parent, function, relation) self.n_first.visit(self, function, 'First') if self.n_stride: self.n_stride.visit(self, function, 'Stride') self.n_last.visit(self, function, 'Last') self._visit_end(parent, function, relation) def __str__(self): if self.n_stride: return ('%s:%s:%s' % (self.n_first, self.n_stride, self.n_last)) else: return ('%s:%s' % (self.n_first, self.n_last))
.parametrize('range_header, exp_content_range, exp_content', [('bytes=1-3', 'bytes 1-3/16', '123'), ('bytes=-3', 'bytes 13-15/16', 'def'), ('bytes=8-', 'bytes 8-15/16', '89abcdef'), ('words=1-3', None, 'abcdef'), ('bytes=15-30', 'bytes 15-15/16', 'f'), ('bytes=0-30', 'bytes 0-15/16', 'abcdef'), ('bytes=-30', 'bytes 0-15/16', 'abcdef')]) .parametrize('use_fallback', [True, False]) def test_range_requests(client, range_header, exp_content_range, exp_content, patch_open, monkeypatch, use_fallback): def validate(path): if (use_fallback and (not path.endswith('index.html'))): raise OSError(errno.ENOENT, 'File not found') patch_open(b'abcdef', validate=validate) monkeypatch.setattr('os.path.isfile', (lambda file: file.endswith('index.html'))) client.app.add_static_route('/downloads', '/opt/somesite/downloads', fallback_filename='index.html') response = client.simulate_request(path='/downloads/thing.zip', headers={'Range': range_header}) if (exp_content_range is None): assert (response.status == falcon.HTTP_200) else: assert (response.status == falcon.HTTP_206) assert (response.text == exp_content) assert (int(response.headers['Content-Length']) == len(exp_content)) assert (response.headers.get('Content-Range') == exp_content_range) assert (response.headers.get('Accept-Ranges') == 'bytes') if use_fallback: assert (response.headers.get('Content-Type') == 'text/html') else: assert (response.headers.get('Content-Type') in _MIME_ALTERNATIVE['application/zip'])
class Constraints(): def __init__(self, *args, **kwargs): self.binary = kwargs['binary'] self.constraints = set() self.constraints.add(frozenset(self.binary.functions.functions)) def to_json(self): query = [] for constraint in self.constraints: constraint = list(map((lambda n: n.id), constraint)) query.append(OrderedDict([('cn', '!='), ('n', constraint)])) return query
class PetersburgVM(ByzantiumVM): fork = 'petersburg' block_class: Type[BlockAPI] = PetersburgBlock _state_class: Type[StateAPI] = PetersburgState create_header_from_parent = staticmethod(create_petersburg_header_from_parent) compute_difficulty = staticmethod(compute_petersburg_difficulty) configure_header = configure_petersburg_header get_uncle_reward = staticmethod(get_uncle_reward(EIP1234_BLOCK_REWARD)) def get_block_reward() -> int: return EIP1234_BLOCK_REWARD
def home(request): records = Record.objects.all() if (request.method == 'POST'): username = request.POST['username'] password = request.POST['password'] user = authenticate(request, username=username, password=password) if (user is not None): login(request, user) messages.success(request, 'You Have Been Logged In!') return redirect('home') else: messages.success(request, 'There Was An Error Logging In, Please Try Again...') return redirect('home') else: return render(request, 'home.html', {'records': records})
class OptionSeriesPieSonificationTracksMappingPlaydelay(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class TestFastaChromIterator(unittest.TestCase): def test_fastachromiterator(self): fp = io.StringIO(fasta_data) fasta = FastaChromIterator(fp=fp) expected = (('chr2L', 'Cgacaatgcacgacagagga\nagcagCTCAAGATAccttct'), ('chr2R', 'CTCAAGATAccttctacaga\nCgacaatgcacgacagagga')) nchroms = 0 for (chrom, expt) in zip(fasta, expected): nchroms += 1 self.assertEqual(chrom, expt) self.assertEqual(nchroms, len(expected))
def test_should_erase_return_data_with_vm_error(computation): assert (computation.get_gas_remaining() == 100) computation.return_data = b'\x1337' with computation: raise VMError('Triggered VMError for tests') assert computation.should_erase_return_data assert (computation.return_data == b'')
_stats() def compile_model(tensor: Union[(Tensor, List[Tensor])], target: backend.target.Target, workdir: str, test_name: str, profile_devs: List[int]=None, dynamic_profiling_strategy: DynamicProfileStrategy=DynamicProfileStrategy.MAX, dll_name: str='test.so', num_runtimes: int=AIT_DEFAULT_NUM_RUNTIMES, profile_dir: str=None, constants: Optional[Dict[(str, TorchTensor)]]=None, allocator_kind: Optional[AITemplateAllocatorKind]=None, debug_settings: AITDebugSettings=_DEBUG_SETTINGS, do_optimize_graph: bool=True, profile_timeout: int=500) -> Model: if (constants is None): constants = {} recompile = os.getenv('AIT_RECOMPILE', '1') graph = None os.makedirs(workdir, exist_ok=True) test_name = test_name.replace(',', '_') test_dir = os.path.join(workdir, test_name) _LOGGER.info(f'Start to compile AIT model. test_dir={test_dir!r}') if (profile_dir is None): profile_dir = workdir if (int(recompile) == 1): os.makedirs(test_dir, exist_ok=True) with target: reset_name_counters() graph = compiler.transform.toposort(tensor) graph_utils.dump_graph_debug_str_to_file(graph, test_dir, 'toposort') output_tensors = ([tensor] if isinstance(tensor, Tensor) else tensor) _validate_tensor_args(graph, output_tensors) compiler.transform.bind_constants(graph, constants) graph_utils.dump_graph_debug_str_to_file(graph, test_dir, 'bind_constants') compiler.transform.remove_unused_ops(graph) graph_utils.dump_graph_debug_str_to_file(graph, test_dir, 'remove_unused_ops') compiler.transform.remove_no_ops(graph) graph_utils.dump_graph_debug_str_to_file(graph, test_dir, 'remove_no_ops') compiler.transform.name_graph(graph) graph_utils.dump_graph_debug_str_to_file(graph, test_dir, 'name_graph') if debug_settings.dump_ait_to_py: dump_program(tensor, debug_settings.dump_ait_to_py) compiler.transform.dedup_symbolic_name(graph) graph_utils.dump_graph_debug_str_to_file(graph, test_dir, 'dedup_symbolic_name') compiler.transform.mark_param_tensor(graph) graph_utils.dump_graph_debug_str_to_file(graph, test_dir, 'mark_param_tensor') start_t = datetime.now() graph = compiler.transform.optimize_graph(graph, test_dir, optimize=do_optimize_graph) graph_utils.dump_graph_debug_str_to_file(graph, test_dir, 'optimize_graph') _LOGGER.info(f'optimized graph elapsed time: {elapsed_dt_sec(start_t)}') compiler.transform.mark_special_views(graph) compiler.transform.refine_graph(graph) graph_utils.dump_graph_debug_str_to_file(graph, test_dir, 'refine_graph') if (profile_devs is None): device_env = os.getenv(target.dev_select_flag(), None) if (device_env is None): profile_devs = [0] else: profile_devs = device_env.split(',') compiler.transform.profile(graph, profile_dir, profile_devs, dynamic_profiling_strategy, profile_timeout) graph_utils.dump_graph_debug_str_to_file(graph, test_dir, 'profile') start_t = datetime.now() constant_folding_workdir = os.path.join(workdir, test_name) os.makedirs(constant_folding_workdir, exist_ok=True) (graph, constant_folding_file_pairs, constant_folding_inputs) = compiler.transform.constant_folding(graph, workdir, test_name) graph_utils.dump_graph_debug_str_to_file(graph, test_dir, 'constant_folding') _LOGGER.info(f'folded constants elapsed time: {elapsed_dt_sec(start_t)}') compiler.transform.dedup_symbolic_name(graph) graph_utils.dump_graph_debug_str_to_file(graph, test_dir, 'dedup_symbolic_name') (max_blob, max_constant_blob, workspace) = compiler.transform.memory_planning(graph) _verify_outputs_still_in_graph(graph, output_tensors) _mark_isolated_int_vars(graph) graph_utils.dump_graph_debug_str_to_file(graph, test_dir, 'memory_planning') file_pairs = backend.codegen.gen_function_src(graph, workdir, test_name) file_pairs.extend(constant_folding_file_pairs) new_output_tensor_dict = {tensor._attrs['name']: tensor for tensor in graph if tensor._attrs['is_output']} output_tensors = ([tensor] if isinstance(tensor, Tensor) else tensor) output_tensors = [new_output_tensor_dict[tensor._attrs['name']] for tensor in output_tensors] main_pairs = backend.codegen.gen_library_src(graph, max_blob, max_constant_blob, workspace, workdir, output_tensors, test_name, additional_unbound_constants=constant_folding_inputs, debug_settings=debug_settings) file_pairs.extend(main_pairs) start_t = datetime.now() compile_engine = backend.builder.get_compile_engine() compile_engine.make(file_pairs, dll_name, workdir, test_name, debug_settings) _LOGGER.info(f'compiled the final .so file elapsed time: {elapsed_dt_sec(start_t)}') module = Model(os.path.join(workdir, test_name, dll_name), num_runtimes, allocator_kind) module.debug_sorted_graph = graph return module
def gpio_commands(cmd): res = False cmdarr = cmd.split(',') cmdarr[0] = cmdarr[0].strip().lower() if (cmdarr[0] == 'gpio'): pin = (- 1) val = (- 1) gi = (- 1) logline = '' try: pin = int(cmdarr[1].strip()) val = int(cmdarr[2].strip()) except: pin = (- 1) if ((pin > (- 1)) and (val in [0, 1])): logline = ((('BCM' + str(pin)) + ' set to ') + str(val)) misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, logline) suc = False try: suc = True gpios.HWPorts.output(pin, val) syncvalue(pin, val) gi = gpios.GPIO_refresh_status(pin, pstate=val, pluginid=1, pmode='output', logtext=logline) except Exception as e: misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ((('BCM' + str(pin)) + ': ') + str(e))) suc = False if (gi > (- 1)): return gpios.GPIO_get_status(gi) res = True elif (cmdarr[0] == 'pwm'): pin = (- 1) prop = (- 1) gi = (- 1) logline = '' try: pin = int(cmdarr[1].strip()) prop = int(float(cmdarr[2].strip())) except: pin = (- 1) prop = (- 1) fade = 0 try: fade = int(float(cmdarr[3].strip())) except: fade = 0 freq = 1000 try: freq = int(float(cmdarr[4].strip())) except: freq = 1000 if ((pin > (- 1)) and (prop > (- 1))): suc = False try: suc = True if (fade == 0): gpios.HWPorts.output_pwm(pin, prop, freq) logline = (((((('BCM' + str(pin)) + ' PWM ') + str(prop)) + '% ') + str(freq)) + 'Hz') misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, logline) else: cs = gpios.GPIO_get_statusid(pin) prev_value = 0 try: if (cs > (- 1)): if (gpios.GPIOStatus[cs]['mode'] == 'pwm'): prev_value = int(gpios.GPIOStatus[cs]['state']) except: prev_value = 0 step_value = ((int((prop - prev_value)) << 12) / fade) curr_value = (int(prev_value) << 12) i = fade while (i > 0): curr_value += step_value new_value = (int(curr_value) >> 12) gpios.HWPorts.output_pwm(pin, new_value, freq) time.sleep(0.001) i -= 1 gpios.HWPorts.output_pwm(pin, prop, freq) gi = gpios.GPIO_refresh_status(pin, pstate=prop, pluginid=1, pmode='pwm', logtext=logline) syncpwm(pin, prop) except Exception as e: misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ((('BCM' + str(pin)) + ' PWM ') + str(e))) suc = False if (gi > (- 1)): return gpios.GPIO_get_status(gi) res = True elif (cmdarr[0] == 'pulse'): pin = (- 1) val = (- 1) gi = (- 1) logline = '' try: pin = int(cmdarr[1].strip()) val = int(float(cmdarr[2].strip())) except: pin = (- 1) dur = 100 try: dur = float(cmdarr[3].strip()) except: dur = 100 if ((pin > (- 1)) and (val in [0, 1])): logline = (('BCM' + str(pin)) + ': Pulse started') misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, logline) try: syncvalue(pin, val) gpios.HWPorts.output(pin, val) s = (dur / 1000) time.sleep(s) gpios.HWPorts.output(pin, (1 - val)) syncvalue(pin, (1 - val)) gi = gpios.GPIO_refresh_status(pin, pstate=(1 - val), pluginid=1, pmode='output', logtext=logline) except Exception as e: misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ((('BCM' + str(pin)) + ': ') + str(e))) suc = False misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, (('BCM' + str(pin)) + ': Pulse ended')) if (gi > (- 1)): return gpios.GPIO_get_status(gi) res = True elif (cmdarr[0] == 'longpulse'): pin = (- 1) val = (- 1) gi = (- 1) logline = '' try: pin = int(cmdarr[1].strip()) val = int(float(cmdarr[2].strip())) except: pin = (- 1) dur = 2 try: dur = float(float(cmdarr[3].strip())) except: dur = 2 if ((pin > (- 1)) and (val in [0, 1])): logline = (('BCM' + str(pin)) + ': LongPulse started') misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, logline) try: gpios.HWPorts.output(pin, val) syncvalue(pin, val) gi = gpios.GPIO_refresh_status(pin, pstate=val, pluginid=1, pmode='output', logtext=logline) except Exception as e: misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ((('BCM' + str(pin)) + ': ') + str(e))) suc = False rarr = [pin, (1 - val)] rpieTime.addsystemtimer(dur, timercb, rarr) if (gi > (- 1)): return gpios.GPIO_get_status(gi) res = True elif (cmdarr[0] == 'tone'): pin = (- 1) freq = (- 1) dur = 0 gi = (- 1) logline = '' try: pin = int(cmdarr[1].strip()) freq = int(cmdarr[2].strip()) dur = int(cmdarr[3].strip()) except: pin = (- 1) freq = (- 1) dur = 0 if ((pin > (- 1)) and (freq > (- 1)) and (dur > 0)): suc = False try: suc = True logline = (((('BCM' + str(pin)) + ' ') + str(freq)) + 'Hz') misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, logline) play_tone(pin, freq, dur) gpios.HWPorts.output_pwm(pin, 0, 0) gi = gpios.GPIO_refresh_status(pin, pstate=0, pluginid=1, pmode='pwm', logtext=logline) except Exception as e: misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ((('BCM' + str(pin)) + ' Tone ') + str(e))) suc = False if (gi > (- 1)): return gpios.GPIO_get_status(gi) res = True elif (cmdarr[0] == 'rtttl'): cmdarr = cmd.replace(':', ',').split(',') pin = (- 1) gi = (- 1) logline = '' try: pin = int(cmdarr[1].strip()) except: pin = (- 1) if (pin > (- 1)): suc = False try: sp = cmd.find(':') if (sp > (- 1)): rtproc = threading.Thread(target=play_rtttl, args=(pin, ('t' + cmd[sp:]))) rtproc.daemon = True rtproc.start() suc = True except Exception as e: misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, str(e)) suc = False res = True elif (cmdarr[0] == 'servo'): snr = (- 1) pin = (- 1) pos = (- 1) gi = (- 1) logline = '' try: snr = int(cmdarr[1].strip()) pin = int(cmdarr[2].strip()) pos = int(float(cmdarr[3].strip())) except: snr = (- 1) pin = (- 1) pos = 0 if ((snr > (- 1)) and (pin > (- 1)) and (pos > 0)): suc = False try: suc = True logline = (((('BCM' + str(pin)) + ' to servo ') + str(pos)) + ' angle') misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, logline) setservoangle(pin, pos) gi = gpios.GPIO_refresh_status(pin, pstate=0, pluginid=1, pmode='servo', logtext=logline) except Exception as e: misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ((('BCM' + str(pin)) + ' Servo ') + str(e))) suc = False if (gi > (- 1)): return gpios.GPIO_get_status(gi) res = True elif (cmdarr[0] == 'status'): pin = (- 1) subcmd = '' try: subcmd = str(cmdarr[1].strip()).lower() pin = int(cmdarr[2].strip()) except: pin = (- 1) print(e) if ((pin > (- 1)) and (subcmd == 'gpio')): gi = gpios.GPIO_refresh_status(pin) if (gi > (- 1)): return gpios.GPIO_get_status(gi) res = True return res
def parse_rpn(rpn) -> Root: from ast import literal_eval stack = [] for e in rpn.split(' '): if (e == '+'): (b, a) = (stack.pop(), stack.pop()) stack.append(Addition(a, b)) elif (e == '*'): (b, a) = (stack.pop(), stack.pop()) stack.append(Multiplication(a, b)) elif (e == 'SUM'): stack.reverse() stack = [Sum(*stack)] else: value = literal_eval(e) assert (type(value) in [int, float]) stack.append(Constant(value)) assert (len(stack) == 1) return Root(stack[0])
def test_verify_location_message(base_message): msg = base_message msg.type = MsgType.Location msg.attributes = LocationAttribute(latitude=0.0, longitude=0.0) msg.verify() with pytest.raises(AssertionError) as exec_info: msg.attributes = LocationAttribute(latitude='0.0', longitude=1.0) msg.verify() assert ('Latitude' in exec_info.value.args[0]) with pytest.raises(AssertionError) as exec_info: msg.attributes = LocationAttribute(latitude=1.0, longitude=10) msg.verify() assert ('Longitude' in exec_info.value.args[0])
def remove_from_parent_config(zone_key: ZoneKey): parent_config_path = (ROOT_PATH / f"config/zones/{zone_key.split('-')[0]}.yaml") if parent_config_path.exists(): with YamlFilePatcher(parent_config_path) as f: sub_zone_names = f.content['subZoneNames'] if (zone_key in sub_zone_names): sub_zone_names.remove(zone_key) run_shell_command(f'npx --config {PRETTIER_CONFIG_PATH} --write {parent_config_path}', cwd=ROOT_PATH)
class ActiveUserSelectorUtils(): def convert_to_probability(user_utility: torch.Tensor, fraction_with_zero_prob: float, softmax_temperature: float, weights=None) -> torch.Tensor: if (weights is None): weights = torch.ones(len(user_utility), dtype=torch.float) num_to_zero_out = math.floor((fraction_with_zero_prob * len(user_utility))) sorted_indices = torch.argsort(user_utility, descending=True).tolist() unnormalized_probs = (torch.exp((softmax_temperature * user_utility)) * weights) if (num_to_zero_out > 0): for i in sorted_indices[(- num_to_zero_out):]: unnormalized_probs[i] = 0 tmp_sum = sum(unnormalized_probs.tolist()) assert (tmp_sum > 0) normalized_probs = (unnormalized_probs / tmp_sum) return normalized_probs def normalize_by_sample_count(user_utility: torch.Tensor, user_sample_counts: torch.Tensor, averaging_exponent: float) -> torch.Tensor: sample_averaging_weights = (1 / torch.pow(user_sample_counts, averaging_exponent)) user_utility = (sample_averaging_weights * user_utility) return user_utility def samples_per_user(data_provider: IFLDataProvider) -> torch.Tensor: samples_per_user = [data_provider.get_train_user(u).num_train_examples() for u in data_provider.train_user_ids()] samples_per_user = torch.tensor(samples_per_user, dtype=torch.float) return samples_per_user def select_users(users_per_round: int, probs: torch.Tensor, fraction_uniformly_random: float, rng: Any) -> List[int]: num_total_users = len(probs) num_randomly_selected = math.floor((users_per_round * fraction_uniformly_random)) num_actively_selected = (users_per_round - num_randomly_selected) assert (len(torch.nonzero(probs)) >= num_actively_selected) if (num_actively_selected > 0): actively_selected_indices = torch.multinomial(probs, num_actively_selected, replacement=False, generator=rng).tolist() else: actively_selected_indices = [] if (num_randomly_selected > 0): tmp_probs = torch.tensor([(0 if (x in actively_selected_indices) else 1) for x in range(num_total_users)], dtype=torch.float) randomly_selected_indices = torch.multinomial(tmp_probs, num_randomly_selected, replacement=False, generator=rng).tolist() else: randomly_selected_indices = [] selected_indices = (actively_selected_indices + randomly_selected_indices) return selected_indices def sample_available_users(users_per_round: int, available_users: List[int], rng: torch.Generator) -> List[int]: if (users_per_round >= len(available_users)): return copy.copy(available_users) selected_indices = torch.multinomial(torch.ones(len(available_users), dtype=torch.float), users_per_round, replacement=False, generator=rng).tolist() return [available_users[idx] for idx in selected_indices]
def greet_user(): path = Path('user_info.json') user_dict = get_stored_user_info(path) if user_dict: print(f"Welcome back, {user_dict['username']}!") print(f"Hope you've been playing some {user_dict['game']}. ") print(f"Have you seen a {user_dict['animal']} recently?") else: user_dict = get_new_user_info(path) msg = f"We'll remember you when you return, {user_dict['username']}!" print(msg)
class BaseTemplateAttack(BasePartitionedAttack): def __init__(self, container_building, selection_function, reverse_selection_function, model, convergence_step=None, partitions=None, precision='float32'): super().__init__(selection_function=selection_function, model=model, precision=precision, discriminant=(lambda scores: scores), convergence_step=convergence_step) self._init_template(partitions=partitions, reverse_selection_function=reverse_selection_function, container_building=container_building) def build(self): logger.debug(f'Build analysis object: {self._build_analysis}.') self._build_analysis.run(self.container_building) self.partitions = self._build_analysis.partitions self.templates = self._build_analysis.results self.pooled_covariance_inv = self._build_analysis.pooled_covariance_inv self.pooled_covariance = self._build_analysis.pooled_covariance self.is_build = True def _init_template(self, partitions, reverse_selection_function, container_building): distinguishers.partitioned._set_partitions(self, partitions) if (not isinstance(container_building, _container.Container)): raise TypeError('TemplateAttack must be instantiated with a `Container` instance for building phase.') self.container_building = container_building self._build_analysis = _TemplateBuildAnalysis(selection_function=reverse_selection_function, model=self.model, partitions=self.partitions, precision=self.precision) self.is_build = False
def process_specimen(fasm_file, params_json): (sites, diff_tiles) = create_sites_from_fasm(fasm_file) with open(params_json) as f: params = json.load(f) count = 0 for p in params['tiles']: tile = p['tile'] for site in p['site'].split(' '): site_y = (int(site[(site.find('Y') + 1):]) % 2) if generate.skip_broken_tiles(p): continue site_key = 'IOB_Y{}'.format(site_y) if ((tile, site_key) not in sites): assert (p['type'] is None), p continue site_from_fasm = sites[(tile, site_key)] if ((site_y == 0) or (tile not in diff_tiles)): assert (p['type'] in site_from_fasm['type']), (tile, site_key, p['type'], site_from_fasm['type']) else: assert (p['type'] is None), p if (p['type'] is None): continue assert ('PULLTYPE' in p), p assert ('PULLTYPE' in site_from_fasm), site_from_fasm if (verilog.unquote(p['PULLTYPE']) == ''): pulltype = verilog.quote('NONE') else: pulltype = p['PULLTYPE'] assert (pulltype == site_from_fasm['PULLTYPE']), (tile, site_key, p, site_from_fasm) assert ('IOSTANDARDS' in site_from_fasm), (tile, site) iostandard = verilog.unquote(p['IOSTANDARD']) if iostandard.startswith('DIFF_'): iostandard = iostandard[5:] assert (iostandard in site_from_fasm['IOSTANDARDS']), (p['IOSTANDARD'], site_from_fasm['IOSTANDARDS']) if (p['type'] not in ['IBUF', 'IBUFDS']): if (verilog.unquote(p['SLEW']) == ''): slew = verilog.quote('SLOW') else: slew = p['SLEW'] assert (slew == site_from_fasm['SLEW']), (tile, site_key, p, site_from_fasm) assert ('DRIVES' not in p), p assert ('DRIVES' in site_from_fasm), (tile, site, p['type'], site_from_fasm) if (p['DRIVE'] is None): assert (None in site_from_fasm['DRIVES']), (tile, site_key, p['DRIVE'], site_from_fasm['DRIVES']) elif (p['DRIVE'] is ''): if (None in site_from_fasm['DRIVES']): pass else: assert ('I12' in site_from_fasm['DRIVES']), (tile, site_key, p['DRIVE'], site_from_fasm['DRIVES']) else: assert ('I{}'.format(p['DRIVE']) in site_from_fasm['DRIVES']), (tile, site_key, p['DRIVE'], site_from_fasm['DRIVES']) count += 1 return count
def init(model: Model[(SeqT, SeqT)], X: Optional[SeqT]=None, Y: Optional[SeqT]=None) -> None: layer: Model[(ArrayXd, ArrayXd)] = model.layers[0] layer.initialize(X=(_get_array(model, X) if (X is not None) else X), Y=(_get_array(model, Y) if (Y is not None) else Y)) for dim_name in layer.dim_names: value = layer.maybe_get_dim(dim_name) if (value is not None): model.set_dim(dim_name, value)
class TestAttribute(util.TestCase): MARKUP = '\n <div id="div">\n <p id="0">Some text <span id="1"> in a paragraph</span>.</p>\n <a id="2" href=" <span id="3">Direct child</span>\n <pre id="pre">\n <span id="4">Child 1</span>\n <span id="5">Child 2</span>\n <span id="6">Child 3</span>\n </pre>\n </div>\n ' MARKUP_CONTAINS = '\n <div id="div">\n <p id="0" class="test1 test2 test3">Some text <span id="1"> in a paragraph</span>.</p>\n <a id="2" href=" <span id="3">Direct child</span>\n <pre id="pre" class="test-a test-b">\n <span id="4">Child 1</span>\n <span id="5">Child 2</span>\n <span id="6">Child 3</span>\n </pre>\n </div>\n ' MARKUP_NULL = '\n <div id="div">\n <p id="0">Some text <span id="1"> in a paragraph</span>.</p>\n <a id="2" href=" <span id="3">Direct child</span>\n <pre id="pre">\n <span id="4">Child 1</span>\n <span id="5">Child 2</span>\n <span id="6">Child 3</span>\n </pre>\n </div>\n ' def test_attribute(self): self.assert_selector(self.MARKUP, '[href]', ['2'], flags=util.HTML) def test_attribute_with_spaces(self): self.assert_selector(self.MARKUP, '[ href ]', ['2'], flags=util.HTML) def test_multi_attribute(self): self.assert_selector('\n <div id="div">\n <p id="0">Some text <span id="1"> in a paragraph</span>.</p>\n <a id="2" href=" <span id="3">Direct child</span>\n <pre id="pre">\n <span id="4" class="test">Child 1</span>\n <span id="5" class="test" data-test="test">Child 2</span>\n <span id="6">Child 3</span>\n <span id="6">Child 3</span>\n </pre>\n </div>\n ', 'span[id].test[data-test=test]', ['5'], flags=util.HTML) def test_attribute_equal_no_quotes(self): self.assert_selector(self.MARKUP, '[id=\\35]', ['5'], flags=util.HTML) def test_attribute_equal_with_quotes(self): self.assert_selector(self.MARKUP, "[id='5']", ['5'], flags=util.HTML) def test_attribute_equal_with_double_quotes(self): self.assert_selector(self.MARKUP, '[id="5"]', ['5'], flags=util.HTML) def test_attribute_equal_quotes_and_spaces(self): self.assert_selector(self.MARKUP, '[ id = "5" ]', ['5'], flags=util.HTML) def test_attribute_equal_case_insensitive_attribute(self): self.assert_selector(self.MARKUP, '[ID="5"]', ['5'], flags=util.HTML) def test_attribute_bad(self): self.assert_selector('<span bad="5"></span>', '[ id = "5" ]', [], flags=util.HTML) def test_attribute_escaped_newline(self): self.assert_selector(self.MARKUP, '[id="pr\\\ne"]', ['pre'], flags=util.HTML) def test_attribute_equal_literal_null(self): self.assert_selector(self.MARKUP_NULL, '[id="\x00pre"]', ['pre'], flags=util.HTML) def test_attribute_equal_escaped_null(self): self.assert_selector(self.MARKUP_NULL, '[id="\\0 pre"]', ['pre'], flags=util.HTML) def test_invalid_tag(self): self.assert_raises('[href]p', SelectorSyntaxError) def test_malformed(self): self.assert_raises('div[attr={}]', SelectorSyntaxError) def test_attribute_type_html(self): markup = '\n <html>\n <body>\n <div id="div">\n <p type="TEST" id="0">Some text <span id="1"> in a paragraph</span>.</p>\n <a type="test" id="2" href=" <span id="3">Direct child</span>\n <pre id="pre">\n <span id="4">Child 1</span>\n <span id="5">Child 2</span>\n <span id="6">Child 3</span>\n </pre>\n </div>\n </body>\n </html>\n ' self.assert_selector(markup, '[type="test"]', ['0', '2'], flags=util.HTML) def test_attribute_type_xml(self): markup = '\n <html>\n <body>\n <div id="div">\n <p type="TEST" id="0">Some text <span id="1"> in a paragraph</span>.</p>\n <a type="test" id="2" href=" <span id="3">Direct child</span>\n <pre id="pre">\n <span id="4">Child 1</span>\n <span id="5">Child 2</span>\n <span id="6">Child 3</span>\n </pre>\n </div>\n </body>\n </html>\n ' self.assert_selector(markup, '[type="test"]', ['2'], flags=util.XML) def test_attribute_type_xhtml(self): markup = '\n <?xml version="1.0" encoding="UTF-8"?>\n <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"\n " <html lang="en" xmlns=" <head>\n </head>\n <body>\n <div id="div">\n <p type="TEST" id="0">Some text <span id="1"> in a paragraph</span>.</p>\n <a type="test" id="2" href=" <span id="3">Direct child</span>\n <pre id="pre">\n <span id="4">Child 1</span>\n <span id="5">Child 2</span>\n <span id="6">Child 3</span>\n </pre>\n </div>\n </body>\n </html>\n ' self.assert_selector(markup, '[type="test"]', ['2'], flags=util.XHTML) def test_attribute_start_dash(self): self.assert_selector('\n <div id="div">\n <p id="0" lang="en-us">Some text <span id="1"> in a paragraph</span>.</p>\n <a id="2" href=" <span id="3">Direct child</span>\n <pre id="pre">\n <span id="4">Child 1</span>\n <span id="5">Child 2</span>\n <span id="6">Child 3</span>\n </pre>\n </div>\n ', '[lang|=en]', ['0'], flags=util.HTML) def test_attribute_contains_space_middle(self): self.assert_selector(self.MARKUP_CONTAINS, '[class~=test2]', ['0'], flags=util.HTML) def test_attribute_contains_space_start(self): self.assert_selector(self.MARKUP_CONTAINS, '[class~=test-a]', ['pre'], flags=util.HTML) def test_attribute_contains_space_end(self): self.assert_selector(self.MARKUP_CONTAINS, '[class~=test-b]', ['pre'], flags=util.HTML) def test_attribute_contains_cannot_have_spaces(self): self.assert_selector(self.MARKUP_CONTAINS, '[class~="test1 test2"]', [], flags=util.HTML) def test_attribute_contains_cannot_have_empty(self): self.assert_selector(self.MARKUP_CONTAINS, '[class~=""]', [], flags=util.HTML) def test_attribute_contains_cannot_have_escaped_spaces(self): self.assert_selector(self.MARKUP_CONTAINS, '[class~="test1\\ test2"]', [], flags=util.HTML) def test_none_inputs(self): soup = BeautifulSoup('<span>text</span>', 'html.parser') soup.span['foo'] = None self.assertEqual(len(soup.select('span[foo]')), 1) def test_numeric_inputs(self): soup = BeautifulSoup('<span>text</span>', 'html.parser') soup.span['foo'] = 3 self.assertEqual(len(soup.select('span[foo="3"]')), 1) soup.span['foo'] = 3.3 self.assertEqual(len(soup.select('span[foo="3.3"]')), 1) def test_sequence_inputs(self): soup = BeautifulSoup('<span>text</span>', 'html.parser') soup.span['foo'] = [3, '4'] self.assertEqual(len(soup.select('span[foo="3 4"]')), 1) def test_bytes_inputs(self): soup = BeautifulSoup('<span>text</span>', 'html.parser') soup.span['foo'] = b'test' self.assertEqual(len(soup.select('span[foo="test"]')), 1) def test_weird_inputs(self): soup = BeautifulSoup('<span>text</span>', 'html.parser') soup.span['foo'] = {'3': '4'} self.assertEqual(len(soup.select('span[foo="{\'3\': \'4\'}"]')), 1) def test_nested_sequences(self): with self.assertRaises(Exception): soup = BeautifulSoup('<span>text</span>', 'html.parser') soup.span['foo'] = [['1']] soup.select("span['foo']")
def to_2d(arr: np.ndarray, axis: int) -> np.ndarray: arr = np.asarray(arr) axis = ((arr.ndim + axis) if (axis < 0) else axis) if (axis >= arr.ndim): raise ValueError(f'axis {axis} is out of array axes {arr.ndim}') tr_axes = list(range(arr.ndim)) tr_axes.pop(axis) tr_axes.append(axis) new_shape = ((np.prod(arr.shape) // arr.shape[axis]), arr.shape[axis]) return arr.transpose(tr_axes).reshape(new_shape)
class OptionPlotoptionsPolygonSonificationDefaultinstrumentoptionsMappingFrequency(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class FaucetUntaggedSameVlanIPv6RouteTest(FaucetUntaggedTest): CONFIG_GLOBAL = '\nvlans:\n 100:\n description: "untagged"\n faucet_vips: ["fc00::10:1/112", "fc00::20:1/112"]\n routes:\n - route:\n ip_dst: "fc00::10:0/112"\n ip_gw: "fc00::10:2"\n - route:\n ip_dst: "fc00::20:0/112"\n ip_gw: "fc00::20:2"\n' CONFIG = ('\n nd_neighbor_timeout: 2\n max_resolve_backoff_time: 1\n' + CONFIG_BOILER_UNTAGGED) def test_untagged(self): (first_host, second_host) = self.hosts_name_ordered()[:2] first_host_ip = ipaddress.ip_interface('fc00::10:2/112') first_host_ctrl_ip = ipaddress.ip_address('fc00::10:1') second_host_ip = ipaddress.ip_interface('fc00::20:2/112') second_host_ctrl_ip = ipaddress.ip_address('fc00::20:1') self.add_host_ipv6_address(first_host, first_host_ip) self.add_host_ipv6_address(second_host, second_host_ip) self.add_host_route(first_host, second_host_ip, first_host_ctrl_ip) self.add_host_route(second_host, first_host_ip, second_host_ctrl_ip) self.wait_for_route_as_flow(first_host.MAC(), first_host_ip.network) self.wait_for_route_as_flow(second_host.MAC(), second_host_ip.network) self.one_ipv6_ping(first_host, second_host_ip.ip) self.one_ipv6_ping(first_host, second_host_ctrl_ip) self.one_ipv6_ping(second_host, first_host_ip.ip) self.one_ipv6_ping(second_host, first_host_ctrl_ip)
class TestOFPActionVlanVid(unittest.TestCase): type_ = {'buf': b'\x00\x01', 'val': ofproto.OFPAT_SET_VLAN_VID} len_ = {'buf': b'\x00\x08', 'val': ofproto.OFP_ACTION_VLAN_VID_SIZE} vlan_vid = {'buf': b'<\x0e', 'val': 15374} zfill = (b'\x00' * 2) buf = (((type_['buf'] + len_['buf']) + vlan_vid['buf']) + zfill) c = OFPActionVlanVid(vlan_vid['val']) def setUp(self): pass def tearDown(self): pass def test_init(self): eq_(self.vlan_vid['val'], self.c.vlan_vid) def test_parser(self): res = self.c.parser(self.buf, 0) eq_(self.vlan_vid['val'], res.vlan_vid) (AssertionError) def test_parser_check_type(self): type_ = {'buf': b'\x00\x02', 'val': 2} buf = (((type_['buf'] + self.len_['buf']) + self.vlan_vid['buf']) + self.zfill) self.c.parser(buf, 0) (AssertionError) def test_parser_check_len(self): len_ = {'buf': b'\x00\x07', 'val': 7} buf = (((self.type_['buf'] + len_['buf']) + self.vlan_vid['buf']) + self.zfill) self.c.parser(buf, 0) def test_serialize(self): buf = bytearray() self.c.serialize(buf, 0) fmt = ofproto.OFP_ACTION_VLAN_VID_PACK_STR res = struct.unpack(fmt, six.binary_type(buf)) eq_(self.type_['val'], res[0]) eq_(self.len_['val'], res[1]) eq_(self.vlan_vid['val'], res[2])
('devo ter a seguinte tarefa para fazer') def checar_se_tarefa_esta_para_ser_feita(context): feature_table = context.table['0'] tarefa = {} tarefa['title'] = feature_table['nome'] tarefa['description'] = feature_table['descricao'] tarefa['done'] = literal_eval(feature_table['estado']) response = context.request.json() del response[0]['id'] assert (tarefa in response), f'{response} {tarefa}'
class ExpandableFieldsSerializerMixinTests(SerializerMixinTestCase): def serialize(self, **context): return OwnerTestSerializer(self.owner_tyrell, context=context).data def expand_instance_id(self, instance): return instance.pk def test_no_expansion(self): self.assertDictEqual(self.serialize(), dict(id=self.owner_tyrell.pk, name=self.owner_tyrell.name, organization_id=self.expand_instance_id(self.organization_ecorp))) def test_expand_foreign_key(self): self.assertDictEqual(self.serialize(expand={'organization'}), dict(id=self.owner_tyrell.pk, name=self.owner_tyrell.name, organization_id=self.expand_instance_id(self.organization_ecorp), organization=dict(id=self.organization_ecorp.pk, name=self.organization_ecorp.name))) def test_full_expand_one_to_many_key(self): self.assertDictEqual(self.serialize(expand={'cars'}), dict(id=self.owner_tyrell.pk, name=self.owner_tyrell.name, organization_id=self.expand_instance_id(self.organization_ecorp), cars=[dict(id=self.sku_p100d.pk, variant=self.sku_p100d.variant, model_id=self.expand_instance_id(self.carmodel_model_s))])) def test_id_only_expand_one_to_many_key(self): self.assertDictEqual(self.serialize(expand_id_only={'cars'}), dict(id=self.owner_tyrell.pk, name=self.owner_tyrell.name, organization_id=self.expand_instance_id(self.organization_ecorp), cars=[self.expand_instance_id(self.sku_p100d)])) def test_expand_further_info_serializer(self): self.assertDictEqual(self.serialize(expand={'identities'}), dict(id=self.owner_tyrell.pk, name=self.owner_tyrell.name, organization_id=self.expand_instance_id(self.organization_ecorp), identities=dict(email=self.owner_tyrell.email))) def test_multiple_expansion(self): self.assertDictEqual(self.serialize(expand={'identities', 'organization', 'cars'}), dict(id=self.owner_tyrell.pk, name=self.owner_tyrell.name, organization_id=self.expand_instance_id(self.organization_ecorp), identities=dict(email=self.owner_tyrell.email), organization=dict(id=self.organization_ecorp.pk, name=self.organization_ecorp.name), cars=[dict(id=self.sku_p100d.pk, variant=self.sku_p100d.variant, model_id=self.expand_instance_id(self.carmodel_model_s))])) def test_nested_expansion(self): self.assertDictEqual(self.serialize(expand={'cars__model__manufacturer'}), dict(id=self.owner_tyrell.pk, name=self.owner_tyrell.name, organization_id=self.expand_instance_id(self.organization_ecorp), cars=[dict(id=self.sku_p100d.pk, variant=self.sku_p100d.variant, model_id=self.expand_instance_id(self.carmodel_model_s), model=dict(id=self.carmodel_model_s.pk, name=self.carmodel_model_s.name, manufacturer_id=self.expand_instance_id(self.manufacturer_tesla), manufacturer=dict(id=self.manufacturer_tesla.pk, name=self.manufacturer_tesla.name)))])) def test_nested_expansion_with_standard_fields(self): serialized = OwnerWithCarsTestSerializer(self.owner_tyrell, context=dict(expand={'cars__model'})) self.assertDictEqual(serialized.data, dict(id=self.owner_tyrell.pk, name=self.owner_tyrell.name, cars=[dict(id=self.sku_p100d.pk, variant=self.sku_p100d.variant, model_id=self.expand_instance_id(self.carmodel_model_s), model=dict(id=self.carmodel_model_s.pk, name=self.carmodel_model_s.name, manufacturer_id=self.expand_instance_id(self.manufacturer_tesla)))])) def test_custom_id_source_unexpanded(self): self.assertDictEqual(OwnerWithCustomIdSourceTestSerializer(self.owner_tyrell).data, dict(id=self.owner_tyrell.pk, name=self.owner_tyrell.name, organization_id=self.expand_instance_id(self.organization_ecorp))) def test_custom_id_source_expanded(self): serialized = OwnerWithCustomIdSourceTestSerializer(self.owner_tyrell, context=dict(expand={'organization'})).data self.assertDictEqual(serialized, dict(id=self.owner_tyrell.pk, name=self.owner_tyrell.name, organization_id=self.expand_instance_id(self.organization_ecorp), organization=dict(id=self.organization_ecorp.pk, name=self.organization_ecorp.name))) def test_method_field_serializer_unexpanded(self): self.assertDictEqual(OwnerWithMethodFieldTestSerializer(self.owner_tyrell).data, dict(id=self.owner_tyrell.pk, name=self.owner_tyrell.name)) def test_method_field_serializer_expanded(self): serialized = OwnerWithMethodFieldTestSerializer(self.owner_tyrell, context=dict(expand={'cars'})).data self.assertDictEqual(serialized, dict(id=self.owner_tyrell.pk, name=self.owner_tyrell.name, cars=[self.sku_p100d.variant])) def test_custom_id_field(self): class CustomIdSerializer(OwnerTestSerializer): def get_organization_id(self, owner): return '{0}-{1}'.format(owner.name, owner.organization.name) serialized = CustomIdSerializer(self.owner_tyrell).data self.assertDictEqual(serialized, dict(id=self.owner_tyrell.pk, name=self.owner_tyrell.name, organization_id='{0}-{1}'.format(self.owner_tyrell.name, self.organization_ecorp.name))) def test_custom_id_only_field(self): class CustomIdSerializer(OwnerTestSerializer): def get_cars_id_only(self, owner): return [car.variant for car in owner.cars.all()] serialized = CustomIdSerializer(self.owner_tyrell, context=dict(expand_id_only={'cars'})).data self.assertDictEqual(serialized, dict(id=self.owner_tyrell.pk, name=self.owner_tyrell.name, organization_id=self.expand_instance_id(self.organization_ecorp), cars=[self.sku_p100d.variant])) def test_custom_source_id_only_field(self): class CustomIdSourceSerializer(OwnerTestSerializer): class Meta(OwnerTestSerializer.Meta): expandable_fields = dict(car_skus=dict(serializer=SkuTestSerializer, many=True, source='cars')) serialized = CustomIdSourceSerializer(self.owner_tyrell, context=dict(expand_id_only={'car_skus'})).data self.assertDictEqual(serialized, dict(id=self.owner_tyrell.pk, name=self.owner_tyrell.name, car_skus=[self.expand_instance_id(self.sku_p100d)])) def test_nested_id_only_expansion(self): self.assertDictEqual(self.serialize(expand_id_only={'cars__model__skus'}), dict(id=self.owner_tyrell.pk, name=self.owner_tyrell.name, organization_id=self.expand_instance_id(self.organization_ecorp), cars=[dict(id=self.sku_p100d.pk, variant=self.sku_p100d.variant, model_id=self.expand_instance_id(self.carmodel_model_s), model=dict(id=self.carmodel_model_s.pk, name=self.carmodel_model_s.name, manufacturer_id=self.expand_instance_id(self.manufacturer_tesla), skus=[self.expand_instance_id(self.sku_p100d), self.expand_instance_id(self.sku_70)]))])) def test_exceed_default_max_depth(self): with self.assertRaises(ValueError): self.serialize(expand={'cars__model__manufacturer__models'}) _settings(REST_FRAMEWORK=dict(SERIALIZER_EXTENSIONS=dict(MAX_EXPAND_DEPTH=4))) def test_max_depth_setting_below_value(self): self.serialize(expand={'cars__model__manufacturer__models'}) _settings(REST_FRAMEWORK=dict(SERIALIZER_EXTENSIONS=dict(MAX_EXPAND_DEPTH=2))) def test_max_depth_setting_above_value(self): with self.assertRaises(ValueError): self.serialize(expand={'cars__model__manufacturer__models'}) def test_one_to_one_field_id_only_expansion(self): with self.assertRaises(ValueError): self.serialize(expand_id_only={'organization'}) def test_unmatched_root_field_not_expandable(self): with self.assertRaises(ValueError): self.serialize(expand={'not_found'}) def test_unmatched_nested_field_not_expandable(self): with self.assertRaises(ValueError): self.serialize(expand={'organization__not_found'}) def test_unmatched_root_field_not_id_expandable(self): with self.assertRaises(ValueError): self.serialize(expand_id_only={'not_found'}) def test_unmatched_nested_field_not_id_expandable(self): with self.assertRaises(ValueError): self.serialize(expand_id_only={'organization__not_found'}) def test_can_ignore_matching_validation_through_context(self): self.assertDictEqual(self.serialize(expand={'organization', 'not_found'}, validate_expand_instructions=False), dict(id=self.owner_tyrell.pk, name=self.owner_tyrell.name, organization_id=self.expand_instance_id(self.organization_ecorp), organization=dict(id=self.organization_ecorp.pk, name=self.organization_ecorp.name))) def test_can_ignore_matching_validation_through_meta(self): class IgnoreSerializer(OwnerTestSerializer): class Meta(OwnerTestSerializer.Meta): validate_expand_instructions = False serialized = IgnoreSerializer(self.owner_tyrell, context=dict(expand={'organization', 'not_found'})).data self.assertDictEqual(serialized, dict(id=self.owner_tyrell.pk, name=self.owner_tyrell.name, organization_id=self.expand_instance_id(self.organization_ecorp), organization=dict(id=self.organization_ecorp.pk, name=self.organization_ecorp.name))) def test_can_ignore_matching_validation_through_method(self): class IgnoreSerializer(OwnerTestSerializer): def get_validate_expand_instructions(self): return False serialized = IgnoreSerializer(self.owner_tyrell, context=dict(expand={'organization', 'not_found'})).data self.assertDictEqual(serialized, dict(id=self.owner_tyrell.pk, name=self.owner_tyrell.name, organization_id=self.expand_instance_id(self.organization_ecorp), organization=dict(id=self.organization_ecorp.pk, name=self.organization_ecorp.name))) def test_deserialize_read_only_id_field(self): serializer = CarModelTestSerializer(data=dict(name='Ka', manufacturer_id=self.expand_instance_id(self.manufacturer_tesla))) self.assertTrue(serializer.is_valid()) self.assertEqual(dict(name='Ka'), serializer.validated_data) def test_deserialize_writable_field(self): serializer = CarModelWithWritableManufacturerTestSerializer(data=dict(name='Ka', manufacturer_id=self.expand_instance_id(self.manufacturer_tesla))) self.assertTrue(serializer.is_valid()) self.assertEqual(dict(name='Ka', manufacturer_id=self.manufacturer_tesla.pk, manufacturer_id_resolved=self.manufacturer_tesla), serializer.validated_data) def test_writable_field_required(self): serializer = CarModelWithWritableManufacturerTestSerializer(data=dict(name='Ka')) self.assertFalse(serializer.is_valid()) self.assertTrue(('manufacturer_id' in serializer.errors)) def test_read_only_field_not_required(self): serializer = CarModelTestSerializer(data=dict(name='Ka')) self.assertTrue(serializer.is_valid()) def test_unexpanded_non_model_serializer(self): serializer = NonModelTestSerializer({}) self.assertDictEqual(serializer.data, dict(is_model=False)) def test_expanded_non_model_serializer_with_non_model_serializer(self): serializer = NonModelTestSerializer({}, context=dict(expand={'non_model'})) self.assertDictEqual(serializer.data, dict(is_model=False, non_model=dict(is_model=False))) def test_expanded_non_model_serializer_with_model_serializer(self): serializer = NonModelTestSerializer({}, context=dict(expand={'sku'})) self.assertDictEqual(serializer.data, dict(is_model=False, sku=dict(id=self.sku_p100d.pk, variant=self.sku_p100d.variant, model_id=self.expand_instance_id(self.carmodel_model_s))))
def test_multi_model_can_from_dict(): model = chain(Maxout(5, 10, nP=2), Maxout(2, 3)).initialize() model_dict = model.to_dict() assert model.can_from_dict(model_dict) assert chain(Maxout(5, 10, nP=2), Maxout(2, 3)).can_from_dict(model_dict) resized = chain(Maxout(5, 10, nP=3), Maxout(2, 3)) assert (not resized.can_from_dict(model_dict))
class TestFBNetV3MaskRCNNFPNFP32(RCNNBaseTestCases.TemplateTestCase): def setup_custom_test(self): super().setup_custom_test() self.cfg.merge_from_file('detectron2go://mask_rcnn_fbnetv3g_fpn.yaml') def test_inference(self): self._test_inference() _parameterized_test_export([['_ops', False], ['torchscript', True], ['torchscript__ops', False], ['torchscript_int8', False]]) def test_export(self, predictor_type, compare_match): _maybe_skip_test(self, predictor_type) self._test_export(predictor_type, compare_match=compare_match)
(scope='function') def powerconfig(request, powerwidget): class PowerConfig(libqtile.confreader.Config): auto_fullscreen = True keys = [] mouse = [] groups = [libqtile.config.Group('a')] layouts = [libqtile.layout.Max()] floating_layout = libqtile.resources.default_config.floating_layout screens = [libqtile.config.Screen(top=libqtile.bar.Bar([powerwidget(**getattr(request, 'param', dict()))], 50))] (yield PowerConfig)
class DailyDBTestDBCase(UnitTestDBBase): def setUp(self): super(DailyDBTestDBCase, self).setUp() from stalker import Status, StatusList self.status_new = Status.query.filter_by(code='NEW').first() self.status_wfd = Status.query.filter_by(code='WFD').first() self.status_rts = Status.query.filter_by(code='RTS').first() self.status_wip = Status.query.filter_by(code='WIP').first() self.status_prev = Status.query.filter_by(code='PREV').first() self.status_hrev = Status.query.filter_by(code='HREV').first() self.status_drev = Status.query.filter_by(code='DREV').first() self.status_cmpl = Status.query.filter_by(code='CMPL').first() self.status_open = Status.query.filter_by(code='OPEN').first() self.status_cls = Status.query.filter_by(code='CLS').first() self.daily_status_list = StatusList.query.filter_by(target_entity_type='Daily').first() self.task_status_list = StatusList.query.filter_by(target_entity_type='Task').first() from stalker import Repository, Project self.test_repo = Repository(name='Test Repository', code='TR') from stalker.db.session import DBSession DBSession.add(self.test_repo) self.test_project = Project(name='Test Project', code='TP', repository=self.test_repo) DBSession.add(self.test_project) from stalker import Task self.test_task1 = Task(name='Test Task 1', project=self.test_project, status_list=self.task_status_list) DBSession.add(self.test_task1) self.test_task2 = Task(name='Test Task 2', project=self.test_project, status_list=self.task_status_list) DBSession.add(self.test_task2) self.test_task3 = Task(name='Test Task 3', project=self.test_project, status_list=self.task_status_list) DBSession.add(self.test_task3) DBSession.commit() from stalker import Version self.test_version1 = Version(task=self.test_task1) DBSession.add(self.test_version1) DBSession.commit() self.test_version2 = Version(task=self.test_task1) DBSession.add(self.test_version2) DBSession.commit() self.test_version3 = Version(task=self.test_task1) DBSession.add(self.test_version3) DBSession.commit() self.test_version4 = Version(task=self.test_task2) DBSession.add(self.test_version4) DBSession.commit() from stalker import Link self.test_link1 = Link(original_filename='test_render1.jpg') self.test_link2 = Link(original_filename='test_render2.jpg') self.test_link3 = Link(original_filename='test_render3.jpg') self.test_link4 = Link(original_filename='test_render4.jpg') DBSession.add_all([self.test_link1, self.test_link2, self.test_link3, self.test_link4]) self.test_version1.outputs = [self.test_link1, self.test_link2, self.test_link3] self.test_version4.outputs = [self.test_link4] DBSession.commit() def test_tasks_attribute_will_return_a_list_of_tasks(self): from stalker import Daily daily = Daily(name='Test Daily', project=self.test_project, status_list=self.daily_status_list) daily.links = [self.test_link1, self.test_link2] from stalker.db.session import DBSession DBSession.add(daily) DBSession.commit() assert (daily.tasks == [self.test_task1]) def test_versions_attribute_will_return_a_list_of_versions(self): from stalker import Daily daily = Daily(name='Test Daily', project=self.test_project, status_list=self.daily_status_list) daily.links = [self.test_link1, self.test_link2] from stalker.db.session import DBSession DBSession.add(daily) DBSession.commit() assert (daily.versions == [self.test_version1])
class IndexMeta(DocumentMeta): _document_initialized = False def __new__(cls, name, bases, attrs): new_cls = super().__new__(cls, name, bases, attrs) if cls._document_initialized: index_opts = attrs.pop('Index', None) index = cls.construct_index(index_opts, bases) new_cls._index = index index.document(new_cls) cls._document_initialized = True return new_cls def construct_index(cls, opts, bases): if (opts is None): for b in bases: if hasattr(b, '_index'): return b._index return Index(name=None) i = Index(getattr(opts, 'name', '*'), using=getattr(opts, 'using', 'default')) i.settings(**getattr(opts, 'settings', {})) i.aliases(**getattr(opts, 'aliases', {})) for a in getattr(opts, 'analyzers', ()): i.analyzer(a) return i
class TestDeletedTickets(PaginationTestCase): __test__ = True ZenpyType = Ticket api_name = 'tickets' expected_single_result_type = TicketAudit object_kwargs = dict(subject='test', description='test') pagination_limit = 10 def create_objects(self): job_status = self.create_multiple_zenpy_objects(5) for r in job_status.results: self.created_objects.append(Ticket(id=r.id)) self.delete_method(self.created_objects) self.created_objects = [] def test_delete_and_restore(self): cassette_name = '{}'.format(self.generate_cassette_name()) with self.recorder.use_cassette(cassette_name=cassette_name, serialize_with='prettyjson'): ticket_audit = self.create_single_zenpy_object() ticket = self.unpack_object(ticket_audit) self.delete_method(ticket) self.get_api_method('restore')(ticket) ticket_restored = self.api(id=ticket.id) self.assertIsInstance(ticket_restored, self.ZenpyType) self.assertInCache(ticket_restored) self.recursively_call_properties(ticket_restored) def test_permanently_delete(self): cassette_name = '{}'.format(self.generate_cassette_name()) with self.recorder.use_cassette(cassette_name=cassette_name, serialize_with='prettyjson'): ticket_audit = self.create_single_zenpy_object() ticket = self.unpack_object(ticket_audit) self.delete_method(ticket) self.created_objects = [] job_status = self.get_api_method('permanently_delete')(ticket) self.wait_for_job_status(job_status) with self.assertRaises(RecordNotFoundException): self.get_api_method('restore')(ticket)
class EnumsTest(unittest.TestCase): .task(taskno=1) def test_parse_log_level_set_ing(self): self.assertIs(parse_log_level('[INF]: File deleted'), LogLevel.INFO, msg='The Log level is incorrect') .task(taskno=1) def test_parse_log_level_set_wrn(self): self.assertIs(parse_log_level('[WRN]: File is being overwritten'), LogLevel.WARNING, msg='The Log level is incorrect') .task(taskno=1) def test_parse_log_level_set_err(self): self.assertIs(parse_log_level('[ERR]: Some Random Log'), LogLevel.ERROR, msg='The Log level is incorrect') .task(taskno=2) def test_parse_log_level_set_xyz(self): self.assertIs(parse_log_level('[XYZ]: Some Random Log'), LogLevel.UNKNOWN, msg='The Log level is incorrect') .task(taskno=3) def test_convert_to_short_log_set1(self): self.assertEqual(convert_to_short_log(LogLevel.ERROR, 'Stack overflow'), '6:Stack overflow', msg='The converted short log is incorrect') .task(taskno=3) def test_convert_to_short_log_set2(self): self.assertEqual(convert_to_short_log(LogLevel.WARNING, 'This is a warning'), '5:This is a warning', msg='The converted short log is incorrect') .task(taskno=4) def test_get_warn_alias(self): self.assertIs(get_warn_alias(), LogLevel.WARN, msg='The warn alias returned is incorrect') .task(taskno=5) def test_get_members(self): self.assertListEqual(get_members(), [('TRACE', 'TRC'), ('DEBUG', 'DBG'), ('INFO', 'INF'), ('WARNING', 'WRN'), ('ERROR', 'ERR'), ('FATAL', 'FTL'), ('UNKNOWN', 'UKN')], msg='The Members list of the enum is incorrect')
def extractTranslationsdrtWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('iceblade magician', 'The Iceblade Magician Rules over the World', 'translated'), ('wortenia', 'Wortenia Senki', 'translated'), ('Wortenia Senki', 'Wortenia Senki', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def get_rulesets(ruledir, recurse): if (os.path.isdir(ruledir) and recurse): yaml_files = [y for x in os.walk(ruledir) for y in glob(os.path.join(x[0], '*.yaml'))] elif (os.path.isdir(ruledir) and (not recurse)): yaml_files = get_files(ruledir, 'yaml') elif os.path.isfile(ruledir): yaml_files = [ruledir] extracted_files = extract_yaml(yaml_files) rulesets = [] for extracted_yaml in extracted_files: rulesets.append(ruleset.Ruleset(extracted_yaml)) return rulesets
class ConfigManager(): data: ConfigData config_entry: ConfigEntry def update(self, config_entry: ConfigEntry): data = config_entry.data options = config_entry.options result = ConfigData() result.name = data.get(CONF_NAME) result.host = data.get(CONF_HOST) result.port = data.get(CONF_PORT, 80) result.ssl = data.get(CONF_SSL, False) result.should_store = self._get_config_data_item(CONF_STORE_DATA, options, data, False) result.update_interval = self._get_config_data_item(CONF_UPDATE_INTERVAL, options, data, 60) result.log_level = self._get_config_data_item(CONF_LOG_LEVEL, options, data, LOG_LEVEL_DEFAULT) self.config_entry = config_entry self.data = result def _get_config_data_item(key, options, data, default_value=None): data_result = data.get(key, default_value) result = options.get(key, data_result) return result
class SOEFConnection(Connection): connection_id = PUBLIC_ID DEFAULT_CONNECTION_CHECK_TIMEOUT: float = 15.0 DEFAULT_CONNECTION_CHECK_MAX_RETRIES: int = 3 def __init__(self, **kwargs: Any) -> None: if (kwargs.get('configuration') is None): kwargs['excluded_protocols'] = (kwargs.get('excluded_protocols') or []) kwargs['restricted_to_protocols'] = (kwargs.get('excluded_protocols') or [OefSearchMessage.protocol_id]) super().__init__(**kwargs) api_key = cast(str, self.configuration.config.get('api_key')) connection_check_timeout = cast(float, self.configuration.config.get('connection_check_timeout', self.DEFAULT_CONNECTION_CHECK_TIMEOUT)) connection_check_max_retries = cast(int, self.configuration.config.get('connection_check_max_retries', self.DEFAULT_CONNECTION_CHECK_MAX_RETRIES)) is_ = cast(bool, self.configuration.config.get('is_ True)) soef_addr = cast(str, self.configuration.config.get('soef_addr')) soef_port = cast(int, self.configuration.config.get('soef_port')) chain_identifier = cast(str, self.configuration.config.get('chain_identifier')) token_storage_path = cast(Optional[str], self.configuration.config.get('token_storage_path')) not_none_params = {'api_key': api_key, 'soef_addr': soef_addr, 'soef_port': soef_port} for (param_name, param_value) in not_none_params.items(): if (param_value is None): raise ValueError(f'{param_name} must be set!') self.api_key = api_key self.is_ = is_ self.soef_addr = soef_addr self.soef_port = soef_port self.channel = SOEFChannel(self.address, self.api_key, self.is_ self.soef_addr, self.soef_port, data_dir=self.data_dir, chain_identifier=chain_identifier, token_storage_path=token_storage_path, connection_check_timeout=connection_check_timeout, connection_check_max_retries=connection_check_max_retries) async def connect(self) -> None: if self.is_connected: return with self._connect_context(): (await self.channel.connect()) def in_queue(self) -> Optional[asyncio.Queue]: return self.channel.in_queue async def disconnect(self) -> None: if self.is_disconnected: return if (self.in_queue is None): raise ValueError('In queue not set.') self.state = ConnectionStates.disconnecting (await self.channel.disconnect()) self.state = ConnectionStates.disconnected async def receive(self, *args: Any, **kwargs: Any) -> Optional['Envelope']: try: if (self.in_queue is None): raise ValueError('In queue not set.') envelope = (await self.in_queue.get()) if (envelope is None): self.logger.debug('Received None.') return None self.logger.debug('Received envelope: {}'.format(envelope)) return envelope except CancelledError: self.logger.debug('Receive cancelled.') return None except Exception as e: self.logger.exception(e) return None async def send(self, envelope: 'Envelope') -> None: if self.is_connected: (await self.channel.send(envelope))
class PrettyTable(object): def __init__(self, df, tstyle=None, header_row=False, header_col=True, center=False, rpt_header=0): self.df = df self.num_rows = df.shape[0] self.num_cols = df.shape[1] self.header_row = header_row self.header_col = header_col self.style = tstyle self.center = center self.rpt_header = rpt_header if (tstyle is None): self.cell_style = CellStyle() self.corner_style = CellStyle() self.header_row_styles = [CellStyle() for i in range(self.num_rows)] self.header_col_styles = [CellStyle() for i in range(self.num_cols)] self.cell_styles = [[CellStyle() for i in range(self.num_cols)] for j in range(self.num_rows)] else: self.cell_style = tstyle.cell_style self.corner_style = tstyle.corner_style self.header_row_styles = [tstyle.row_head_style.copy() for i in range(self.num_rows)] self.header_col_styles = [tstyle.col_head_style.copy() for i in range(self.num_cols)] self.cell_styles = [[self.cell_style.copy() for i in range(self.num_cols)] for j in range(self.num_rows)] def set_cell_style(self, style=None, tuples=None, rows=None, cols=None, format_function=None, **kwargs): if (style is None): style = CellStyle() for (key, value) in kwargs.items(): k = key.replace('_', '-') style.set(k, value) if (format_function is not None): style.format_function = format_function if tuples: for tup in tuples: i = tup[0] j = tup[1] self.cell_styles[i][j] = style.copy() if ((rows is None) and (cols is None)): return if (rows is None): rows = range(self.num_rows) if (cols is None): cols = range(self.num_cols) for i in rows: for j in cols: self.cell_styles[i][j] = style.copy() def set_row_header_style(self, style=None, indices=None, format_function=None, **kwargs): if (style is None): style = CellStyle() for (key, value) in kwargs.items(): k = key.replace('_', '-') style.set(k, value) if (format_function is not None): style.format_function = format_function if (indices is None): indices = range(self.num_rows) for i in indices: self.header_row_styles[i] = style.copy() def set_col_header_style(self, style=None, indices=None, format_function=None, **kwargs): if (indices is None): indices = range(self.num_cols) if (style is None): style = CellStyle() if (format_function is not None): style.format_function = format_function for (key, value) in kwargs.items(): k = key.replace('_', '-') style.set(k, value) for i in indices: self.header_col_styles[i] = style.copy() def set_corner_style(self, style=None, format_function=None, **kwargs): if (style is None): style = CellStyle() for (key, value) in kwargs.items(): k = key.replace('_', '-') style.set(k, value) if (format_function is not None): style.format_function = format_function self.corner_style = style def update_cell_style(self, rows=None, cols=None, format_function=None, **kwargs): if (rows is None): rows = range(self.num_rows) if (cols is None): cols = range(self.num_cols) for i in rows: for j in cols: style = self.cell_styles[i][j] self.set_cell_style(style=style, rows=[i], cols=[j], format_function=format_function, **kwargs) def update_row_header_style(self, indices=None, format_function=None, **kwargs): if (indices is None): indices = range(self.num_rows) for i in indices: style = self.header_row_styles[i] self.set_row_header_style(style=style, indices=[i], format_function=format_function, **kwargs) def update_col_header_style(self, indices=None, format_function=None, **kwargs): if (indices is None): indices = range(self.num_cols) for i in indices: style = self.header_col_styles[i] self.set_col_header_style(style=style, indices=[i], format_function=format_function, **kwargs) def update_corner_style(self, format_function=None, **kwargs): style = self.corner_style self.set_corner_style(style=style, format_function=format_function, **kwargs) def reset_cell_style(self, rows=None, cols=None): if (rows is None): rows = range(self.num_rows) if (cols is None): cols = range(self.num_cols) for i in rows: for j in cols: self.set_cell_style(style=CellStyle(), rows=[i], cols=[j]) def reset_row_header_style(self, indices=None): if (indices is None): indices = range(self.num_rows) for i in indices: self.set_row_header_style(style=CellStyle(), indices=[i]) def reset_col_header_style(self, indices=None): if (indices is None): indices = range(self.num_cols) for i in indices: self.set_col_header_style(style=CellStyle(), indices=[i]) def reset_corner_style(self): self.set_corner_style(style=CellStyle()) def _repr_html_(self): html = ('<table style="%s">' % self.cell_style.css()) if self.header_col: html += ('<tr style="%s">' % self.cell_style.css()) if self.header_row: html += ('<td style="%s"></td>' % self.corner_style.css()) for j in range(self.num_cols): if (self.header_col_styles is not None): header_style = self.header_col_styles[j].css() header_data = self.header_col_styles[j].column_format(self.df.columns[j]) else: header_style = self.cell_style.css() header_data = self.cell_style.column_format(self.df.columns[j]) html += ('<td style="%s">' % header_style) html += header_data html += '</td>' html += '</tr>' for i in range(self.num_rows): html += ('<tr style="%s">' % self.cell_style.css()) if self.header_row: if (self.header_row_styles is not None): header_style = self.header_row_styles[i].css() header_data = self.header_row_styles[i].column_format(self.df.index.values[i]) else: header_style = self.cell_style.css() header_data = self.cell_style.column_format(self.df.index.values[i]) html += ('<td style="%s">' % header_style) html += header_data html += '</td>' for j in range(self.num_cols): if (self.cell_styles[i][j] is not None): col_style = self.cell_styles[i][j].css() col_data = self.cell_styles[i][j].column_format(self.df.iloc[(i, j)]) else: col_style = self.cell_style.css() col_data = self.cell_style.column_format(self.df.iloc[(i, j)]) html += ('<td style="%s">' % col_style) html += col_data html += '</td>' html += '</tr>' if ((self.rpt_header > 0) and (((i + 1) % self.rpt_header) == 0) and (i < (self.num_rows - 1))): if self.header_col: html += ('<tr style="%s">' % self.cell_style.css()) if self.header_row: html += ('<td style="%s"></td>' % self.corner_style.css()) for j in range(self.num_cols): if (self.header_col_styles is not None): header_style = self.header_col_styles[j].css() header_data = self.header_col_styles[j].column_format(self.df.columns[j]) else: header_style = self.cell_style.css() header_data = self.cell_style.column_format(self.df.columns[j]) html += ('<td style="%s">' % header_style) html += header_data html += '</td>' html += '</tr>' html += '</table>' if self.center: return ('<center>%s</center>' % html) else: return html def copy(self): p = PrettyTable(self.df, self.style, self.header_row, self.header_col) p.header_row_styles = [item.copy() for item in self.header_row_styles] p.header_col_styles = [item.copy() for item in self.header_col_styles] p.cell_styles = [[self.cell_styles[i][j].copy() for j in range(self.num_cols)] for i in range(self.num_rows)] p.corner_style = self.corner_style.copy() p.center = self.center return p
def test_logout_ise(): testutil.add_response('login_response_200') testutil.add_response('logout_response_500') testutil.add_response('api_version_response_200') client = testutil.get_client() logout = client.logout() assert (logout[0] is None) assert (logout[1].status == 500)
class TestsAchromatic(util.ColorAsserts, unittest.TestCase): def test_achromatic(self): self.assertEqual(Color('#222222').convert('jzczhz').is_achromatic(), True) self.assertEqual(Color('#222222').convert('jzczhz').set('cz', (lambda c: (c + 1e-08))).is_achromatic(), True) self.assertEqual(Color('#222222').convert('jzczhz').set('cz', (lambda c: (- c))).set('h', (lambda h: (h + 180))).is_achromatic(), True) self.assertEqual(Color('srgb', [5.2, 5.2, 5.2]).convert('jzczhz').is_achromatic(), True) self.assertEqual(Color('jzczhz', [NaN, 0.0, 270]).is_achromatic(), True) self.assertEqual(Color('jzczhz', [0, NaN, 270]).is_achromatic(), True) self.assertEqual(Color('jzczhz', [0, 0.5, 270]).is_achromatic(), False) self.assertEqual(Color('pink').convert('jzczhz').is_achromatic(), False) self.assertEqual(Color('jzczhz', [NaN, 0.5, 270]).is_achromatic(), False) self.assertEqual(Color('jzczhz', [0.2, NaN, 270]).is_achromatic(), False) self.assertEqual(Color('jzczhz', [NaN, NaN, 270]).is_achromatic(), True) self.assertEqual(Color('jzczhz', [(- 0.05), 0, 0]).is_achromatic(), True)
def _test_correct_response_for_recipient_location_state_without_geo_filters(client): resp = client.post('/api/v2/search/spending_by_geography', content_type='application/json', data=json.dumps({'scope': 'recipient_location', 'geo_layer': 'state', 'filters': {'time_period': [{'start_date': '2018-10-01', 'end_date': '2020-09-30'}]}})) expected_response = {'scope': 'recipient_location', 'geo_layer': 'state', 'results': [{'aggregated_amount': 5500550.0, 'display_name': 'South Carolina', 'per_capita': 5500.55, 'population': 1000, 'shape_code': 'SC'}, {'aggregated_amount': 55000.0, 'display_name': 'Washington', 'per_capita': 5.5, 'population': 10000, 'shape_code': 'WA'}], 'messages': [get_time_period_message()]} assert (resp.status_code == status.HTTP_200_OK), 'Failed to return 200 Response' resp_json = resp.json() resp_json['results'].sort(key=_get_shape_code_for_sort) assert (resp_json == expected_response)
def test_user_card_management(client, msend): assert ('_test_' in settings.STRIPE_PUBLISHABLE_KEY) assert ('_test_' in settings.STRIPE_SECRET_KEY) assert (stripe.api_key in settings.STRIPE_TEST_SECRET_KEY) r = client.post('/register', data={'email': '', 'password': 'uva'}) assert (r.status_code == 302) user = User.query.filter_by(email='').first() assert (user.plan == Plan.free) token = stripe.Token.create(card={'number': '', 'exp_month': '11', 'exp_year': '2026', 'cvc': '123'})['id'] r = client.post('/account/upgrade', data={'stripeToken': token}) user = User.query.filter_by(email='').first() assert (user.plan == Plan.gold) token = stripe.Token.create(card={'number': '', 'exp_month': '11', 'exp_year': '2021', 'cvc': '345'})['id'] r = client.post('/card/add', data={'stripeToken': token}) customer = stripe.Customer.retrieve(user.stripe_id) cards = customer.sources.all(object='card').data assert (len(cards) == 2) token = stripe.Token.create(card={'number': '', 'exp_month': '11', 'exp_year': '2026', 'cvc': '123'})['id'] r = client.post('/card/add', data={'stripeToken': token}, follow_redirects=True) assert ('That card already exists in your wallet' in r.data.decode('utf-8')) r = client.post(('/card/%s/delete' % cards[1].id)) cards = customer.sources.all(object='card').data assert (len(cards) == 1) customer.delete()
class TestCredentialsOffline(): def test_repr(self): c = Credentials('id', 'secret') assert repr(c).startswith('Credentials(') c.close() def test_credentials_initialisation(self): Credentials(client_id='id', client_secret='secret', redirect_uri='uri').close() def test_credentials_only_client_id_mandatory(self): Credentials('id').close() def test_basic_token_with_no_secret_raises(self): c = Credentials('id') with pytest.raises(ValueError): c.request_client_token() c.close() def test_server_error_raises_ c = Credentials('id', 'secret') response = mock_response(500, {}) c.send = MagicMock(return_value=response) with pytest.raises(HTTPError): c.request_client_token() c.close() def test_client_error_with_description(self): c = Credentials('id', 'secret') error = {'error': 'Bad thing', 'error_description': 'because reasons'} response = mock_response(400, error) c.send = MagicMock(return_value=response) with pytest.raises(HTTPError): c.request_client_token() c.close() def test_client_error_without_description(self): c = Credentials('id', 'secret') error = {'error': 'Bad thing'} response = mock_response(400, error) c.send = MagicMock(return_value=response) with pytest.raises(HTTPError): c.request_client_token() c.close() def test_user_authorisation_url(self): c = Credentials('id', redirect_uri='uri') url = c.user_authorisation_url('scope', 'state') assert ('scope=scope' in url) assert ('state=state' in url) c.close() def test_user_authorisation_accepts_scope_list(self): c = Credentials('id', redirect_uri='uri') url = c.user_authorisation_url(['a', 'b'], 'state') assert ('scope=a+b' in url) c.close() def test_request_user_token(self): c = Credentials('id', 'secret', 'uri') send = MagicMock(return_value=mock_response()) with patch((cred_module + '.send'), send): c.request_user_token('code') send.assert_called_once() c.close() def test_refresh_user_token_uses_old_refresh_if_not_returned(self): c = Credentials('id', 'secret') token = token_dict() token['refresh_token'] = None response = mock_response(content=token) send = MagicMock(return_value=response) with patch((cred_module + '.send'), send): refreshed = c.refresh_user_token('refresh') assert (refreshed.refresh_token == 'refresh') c.close() def test_refresh_user_token_refresh_replaced_if_returned(self): c = Credentials('id', 'secret') token = token_dict() response = mock_response(content=token) send = MagicMock(return_value=response) with patch((cred_module + '.send'), send): refreshed = c.refresh_user_token('refresh') assert (refreshed.refresh_token == token['refresh_token']) c.close() def test_pkce_user_authorisation(self): c = Credentials('id', redirect_uri='redirect') c.pkce_user_authorisation('scope', 'state') c.close() def test_request_pkce_token(self): c = Credentials('id') c.send = MagicMock(return_value=mock_response()) token = c.request_pkce_token('scope', 'verifier') assert token.uses_pkce c.close() def test_refresh_pkce_token(self): c = Credentials('id') c.send = MagicMock(return_value=mock_response()) token = c.refresh_pkce_token('refresh') assert token.uses_pkce c.close() def test_auto_refresh_client_token(self): c = Credentials('id', 'secret') token = make_token({'refresh_token': None}) c.request_client_token = MagicMock(return_value=token) c.refresh(token) c.request_client_token.assert_called_once() c.close() def test_auto_refresh_user_token(self): c = Credentials('id', 'secret') token = make_token(uses_pkce=False) c.refresh_user_token = MagicMock(return_value=token) c.refresh(token) c.refresh_user_token.assert_called_once() c.close() def test_auto_refresh_pkce_token(self): c = Credentials('id') token = make_token(uses_pkce=True) c.refresh_pkce_token = MagicMock(return_value=token) c.refresh(token) c.refresh_pkce_token.assert_called_once() c.close()
class StructuredTransforms1DInterfacesRight(TestCase, Common): def setUp(self): super().setUp() self.seq = nutils.transformseq.StructuredTransforms(x1, (nutils.transformseq.IntAxis(0, 3, 9, 0, True),), 0) self.check = ((x1, i10, e0), (x1, i11, e0), (x1, i12, e0)) self.checkmissing = ((x1, i13, e0), (x1, i10, e1), (x1, i11, e1), (x1, i12, e1), (x1, i13, e1)) self.checkrefs = References.uniform(point, 3) self.checktodims = 1 self.checkfromdims = 0
class _MockAsyncCallableDSL(_MockCallableDSL): _NAME: str = 'mock_async_callable' def __init__(self, target: Union[(str, type)], method: str, caller_frame_info: Traceback, callable_returns_coroutine: bool, allow_private: bool=False, type_validation: bool=True) -> None: self._callable_returns_coroutine = callable_returns_coroutine super().__init__(target, method, caller_frame_info, allow_private=allow_private, type_validation=type_validation) self._allow_coro = True def _validate_patch(self) -> None: return super()._validate_patch(name=self._NAME, other_name='mock_callable', coroutine_function=True, callable_returns_coroutine=self._callable_returns_coroutine) def _get_callable_mock(self) -> _CallableMock: return _CallableMock(self._original_target, self._method, self.caller_frame_info, is_async=True, callable_returns_coroutine=self._callable_returns_coroutine, type_validation=self.type_validation) def with_implementation(self, func: Callable) -> '_MockAsyncCallableDSL': if (not callable(func)): raise ValueError('{} must be callable.'.format(func)) self._add_runner(_AsyncImplementationRunner(self._original_target, self._method, self._original_callable, func)) return self def with_wrapper(self, func: Callable) -> '_MockAsyncCallableDSL': if (not callable(func)): raise ValueError('{} must be callable.'.format(func)) if (not self._original_callable): raise ValueError('Can not wrap original callable that does not exist.') (func) async def wrapper(*args: Any, **kwargs: Any) -> Any: coro = func(self._original_callable, *args, **kwargs) if (not _is_coroutine(coro)): raise NotACoroutine(f'''Function did not return a coroutine. {func} must return a coroutine.''') return (await coro) self._add_runner(_AsyncImplementationRunner(self._original_target, self._method, self._original_callable, wrapper)) return self def to_call_original(self) -> '_MockAsyncCallableDSL': if (not self._original_callable): raise ValueError('Can not call original callable that does not exist.') self._add_runner(_AsyncCallOriginalRunner(self._original_target, self._method, self._original_callable)) return self
def get_installedversion(): json_query = xbmc.executeJSONRPC('{ "jsonrpc": "2.0", "method": "Application.GetProperties", "params": {"properties": ["version", "name"]}, "id": 1 }') json_query = json.loads(json_query) version = 14 if ('result' in json_query): if ('version' in json_query['result']): version = int(json_query['result']['version']['major']) return version
class CacheDecorator(CacheHashMixin): def __init__(self, handler: CacheHandler, key: Optional[str], duration: Union[(int, str, None)]='default'): super().__init__() self._cache = handler self.key = key self.duration = duration self.add_strategy('args') self.add_strategy('kwargs', self.dict_strategy) def _key_from_wrapped(self, f: Callable[(..., Any)]) -> str: return ((f.__module__ + '.') + f.__name__) def _wrap_sync(self, f: Callable[(..., Any)]) -> Callable[(..., Any)]: (f) def wrap(*args, **kwargs) -> Any: if ((not args) and (not kwargs)): key = (self.key or self._key_from_wrapped(f)) else: key = self._build_ctx_key(args=args, kwargs=kwargs) return self._cache.get_or_set(key, (lambda : f(*args, **kwargs)), self.duration) return wrap def _wrap_loop(self, f: Callable[(..., Awaitable[Any])]) -> Callable[(..., Awaitable[Any])]: (f) async def wrap(*args, **kwargs) -> Any: if ((not args) and (not kwargs)): key = (self.key or self._key_from_wrapped(f)) else: key = self._build_ctx_key(args=args, kwargs=kwargs) return (await self._cache.get_or_set_loop(key, (lambda : f(*args, **kwargs)), self.duration)) return wrap def __call__(self, f: Callable[(..., Any)]) -> Callable[(..., Any)]: rv = (self._wrap_loop(f) if asyncio.iscoroutinefunction(f) else self._wrap_sync(f)) if (not self.key): self.key = ((f.__module__ + '.') + f.__name__) return rv
def get_lpddr5_phy_init_sequence(phy_settings, timing_settings): from litedram.phy.lpddr5.basephy import FREQUENCY_RANGES from litedram.phy.lpddr5.commands import SpecialCmd, MPC, BankOrganization rl = phy_settings.cl wl = phy_settings.cwl wck_ck_ratio = phy_settings.wck_ck_ratio bl = 16 dq_odt = getattr(phy_settings, 'dq_odt', 'RZQ/2') ca_odt = getattr(phy_settings, 'dq_odt', 'RZQ/2') pull_down_drive_strength = getattr(phy_settings, 'pull_down_drive_strength', 'RZQ/2') soc_odt = getattr(phy_settings, 'soc_odt', 'disable') wck_odt = getattr(phy_settings, 'wck_odt', 'disable') vref_ca = getattr(phy_settings, 'vref_ca', 34.0) vref_dq = getattr(phy_settings, 'vref_dq', 34.0) def get_frange(): for fr in FREQUENCY_RANGES[wck_ck_ratio]: fr = fr.for_set(wl_set='A', rl_set=0) if ((fr.wl == wl) and (fr.rl == rl)): return fr raise ValueError frange = get_frange() rzq_map = {'disable': 0, 'RZQ/1': 1, 'RZQ/2': 2, 'RZQ/3': 3, 'RZQ/4': 4, 'RZQ/5': 5, 'RZQ/6': 6} def get_vref(vref_percent, testing=False): assert (10.0 <= vref_percent <= 73.5) assert (testing or (vref_percent >= 15.0)), f'Vref of {vref_percent:.2f}% (<15%) meant only for testing purpose' vref_percent = (round((vref_percent * 2)) / 2) reg = int(((vref_percent - 10.0) * 2)) assert (0 <= reg <= 127) return reg mr = {} mr[1] = reg([(3, 1, 0), (4, 4, frange.mr)]) mr[2] = reg([(0, 4, frange.mr), (4, 4, frange.n_wr_op)]) mr[3] = reg([(0, 3, rzq_map[pull_down_drive_strength]), (3, 2, BankOrganization.B16), (5, 1, 0)]) mr[10] = reg([(0, 0, 0), (2, 2, 0), (4, 2, 0), (6, 2, 0)]) mr[11] = reg([(0, 3, rzq_map[dq_odt]), (3, 1, 0), (4, 3, rzq_map[ca_odt])]) mr[12] = reg([(0, 7, get_vref(vref_ca)), (7, 1, 0)]) mr[13] = 0 mr[14] = reg([(0, 7, get_vref(vref_dq)), (7, 1, 0)]) mr[15] = get_vref(vref_dq) mr[17] = reg([(0, 3, rzq_map[soc_odt]), (3, 1, 1), (4, 1, 1), (5, 1, 1), (6, 1, 0), (7, 1, 0)]) mr[18] = reg([(0, 3, rzq_map[wck_odt]), (3, 1, 0), (4, 1, 0), (6, 1, 0), (7, 1, {2: 1, 4: 0}[wck_ck_ratio])]) mr[20] = reg([(0, 2, 1)]) mr[22] = 0 mr[28] = reg([(0, 1, 0), (1, 1, 0), (2, 2, 1), (5, 1, 0)]) def cmd_mr(ma): op = mr[ma] assert (ma < (2 ** 7)), 'MR address to big: {}'.format(ma) assert (op < (2 ** 8)), 'MR opcode to big: {}'.format(op) a = op ba = ma return ('Load More Register {}'.format(ma), a, ba, cmds['MODE_REGISTER'], 200) def ck(sec): fmax = .0 return int(math.ceil((sec * fmax))) init_sequence = [('Assert reset', 0, 0, 'DFII_CONTROL_ODT', ck(0.0002)), ('Release reset', 0, 0, cmds['UNRESET'], (ck(0.002) + 5)), ('Toggle CS', 0, SpecialCmd.NOP, 'DFII_COMMAND_WE|DFII_COMMAND_CS', ck(2e-06)), *[cmd_mr(ma) for ma in sorted(mr.keys())], ('ZQ Calibration latch', MPC.ZQC_LATCH, SpecialCmd.MPC, 'DFII_COMMAND_WE|DFII_COMMAND_CS', max(4, ck(3e-08)))] return (init_sequence, mr)
.parametrize('_estimator, _importance', _estimators_importance) def test_feature_importances(_estimator, _importance, df_test): (X, y) = df_test sel = RecursiveFeatureAddition(_estimator, threshold=(- 100)).fit(X, y) _importance.sort(reverse=True) assert (list(np.round(sel.feature_importances_.values, 4)) == _importance) sel = RecursiveFeatureElimination(_estimator, threshold=(- 100)).fit(X, y) _importance.sort(reverse=False) assert (list(np.round(sel.feature_importances_.values, 4)) == _importance)
def stop_task_execution(args): workflow_execution_id = int(args.workflow_execution_id) task_name = args.task_name ops.stop_task_execution(workflow_execution_id=workflow_execution_id, task_name=task_name) print(f'Stopping execution of task: {task_name} of workflow execution: {workflow_execution_id}.')
def test_blobValueOf_specificValues(): test_values = ['Value without newline', 'Value with \nembedded\n newlines', '\nValue with single enclosing newlines\n', '\n\nValue with double enclosing newlines\n\n', '', *[('\n' * c) for c in (1, 2, 3, 10)]] for val in test_values: print(f'blobValueOf() {val!r}') js_buffer = globalThis.Buffer['from'](val, 'utf-8') blob_value = js_buffer.blobValueOf() json_value = js_buffer.valueOf() assert (json_value['type'] == 'Buffer') assert (blob_value == bytes(json_value['data']) == bytes(val, 'utf-8'))
class OptionSonificationGlobalcontexttracksMappingFrequency(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def value(self): return self._config_get(None) def value(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def sdiv(computation: ComputationAPI) -> None: (numerator, denominator) = map(unsigned_to_signed, computation.stack_pop_ints(2)) pos_or_neg = ((- 1) if ((numerator * denominator) < 0) else 1) if (denominator == 0): result = 0 else: result = (pos_or_neg * (abs(numerator) // abs(denominator))) computation.stack_push_int(signed_to_unsigned(result))
class PedersenMMRProof(): def __init__(self, root: FQ, position, item: Point, peaks: List[Point], siblings: List[Point]): self.root = root self.position = position self.item = item self.peaks = peaks self.siblings = siblings self.zkp = None assert PedersenMMR.inclusion_proof(root, position, item, peaks, siblings) def __str__(self): return (((('root: {}\n'.format(self.root) + 'position: {}\n'.format(self.position)) + 'item: {}\n'.format(self.item)) + 'peaks: {}\n'.format(self.peaks)) + 'siblings: {}'.format(self.siblings)) def zk_proof(self, r: Field, v: Field): if (self.zkp is None): self.zkp = PedersenMMR.zk_inclusion_proof(self.root, self.position, r, v, self.peaks, self.siblings) return self.zkp
() ('--input', '-i', 'input_', required=True, help='Input path') ('--output', '-o', help='Output path') ('--all', 'all_', is_flag=True, help='Sync all the things?') ('--overwrite', is_flag=True, help='Overwrite local files') def sync(input_: str, output: str, all_: bool, overwrite: bool) -> None: print('Syncing')
.parametrize('input_exts, input_files, matches', [([], ['test.f', 'test.F', 'test.f90', 'test.F90', 'test.f03', 'test.F03', 'test.f18', 'test.F18', 'test.f77', 'test.F77', 'test.f95', 'test.F95', 'test.for', 'test.FOR', 'test.fpp', 'test.FPP'], ([True] * 16)), ([], ['test.ff', 'test.f901', 'test.f90.ff'], [False, False, False]), (['\\.inc'], ['test.inc', 'testinc', 'test.inc2'], [True, False, False]), (['inc.*'], ['test.inc', 'testinc', 'test.inc2'], [True, True, True])]) def test_src_file_exts(input_exts: list[str], input_files: list[str], matches: list[bool]): regex = create_src_file_exts_regex(input_exts) results = [bool(regex.search(file)) for file in input_files] assert (results == matches)
class TestObserverError(unittest.TestCase): def setUp(self): push_exception_handler(reraise_exceptions=True) self.addCleanup(pop_exception_handler) def test_trait_is_not_list(self): team = Team() team.observe((lambda e: None), trait('leader').list_items()) person = Person() with self.assertRaises(ValueError) as exception_cm: team.leader = person self.assertIn('Expected a TraitList to be observed', str(exception_cm.exception)) def test_items_on_a_list_not_observable_by_named_trait(self): team = Team() team.observe((lambda e: None), trait('member_names').list_items().trait('does_not_exist')) with self.assertRaises(ValueError) as exception_cm: team.member_names = ['Paul'] self.assertEqual(str(exception_cm.exception), "Trait named 'does_not_exist' not found on 'Paul'.") def test_extended_trait_on_any_value(self): team = Team() team.any_value = 123 with self.assertRaises(ValueError) as exception_cm: team.observe((lambda e: None), trait('any_value').trait('does_not_exist')) self.assertEqual(str(exception_cm.exception), "Trait named 'does_not_exist' not found on 123.") def test_no_new_trait_added(self): team = Team() team.observe((lambda e: None), trait('leader').trait('does_not_exist')) with self.assertRaises(ValueError): team.leader = Person() self.assertNotIn('does_not_exist', team.leader.trait_names())
_mgr_cli.command('delete') ('--name', required=True, help='Repository name', type=str) ('--yes', is_flag=True, callback=delete_callback, expose_value=False, prompt='Are you sure you want to delete the repository?') _context def _delete(ctx, name): logger = logging.getLogger('curator.repomgrcli._delete') client = get_client(ctx) try: logger.info('Deleting repository %s...', name) client.snapshot.delete_repository(name=name) except NotFoundError: logger.error('Unable to delete repository: %s Not Found.', name) sys.exit(1)
class TestVTKDocMassager(unittest.TestCase): def test_doc_massage(self): doc = 'This is a test. All VTK classes and vtk classes\nare named like this: vtkActor, vtkLODProperty,\nvtkXMLDataReader, vtk3DSImporter etc. The methods \nof a VTK object are like GetData, GetOutput, \nSetRepresentationToWireframe. Ivars are named like\nSpecularColor, Write3DPropsAsRasterImage etc.' ret = 'This is a test. All VTK classes and vtk classes\nare named like this: Actor, LODProperty,\nXMLDataReader, ThreeDSImporter etc. The methods \nof a VTK object are like get_data, get_output, \nset_representation_to_wireframe. Ivars are named like\nspecular_color, write3d_props_as_raster_image etc.' dm = indenter.VTKDocMassager() self.assertEqual(dm.massage(doc), ret) def test_rename_class(self): dm = indenter.VTKDocMassager() t = 'vtkFooBar vtkXMLDataReader vtk3DSReader vtk2000Bug' r = dm._rename_class(t) correct = 'FooBar XMLDataReader ThreeDSReader Two000Bug' self.assertEqual(r, correct) def test_remove_sig(self): dm = indenter.VTKDocMassager() t = 'V.GetOutput(int) -> vtkStructuredPoints\nC++: vtkStructuredPoints *GetOutput (int idx);\nV.GetOutput() -> vtkStructuredPoints\nC++: vtkStructuredPoints *GetOutput ();\n\n Set/Get the output of this reader.\n' r = dm._remove_sig(t) correct = ' Set/Get the output of this reader.\n' self.assertEqual(r, correct) t = 'V.GetOutput(int) -> vtkStructuredPoints\nC++: vtkStructuredPoints *GetOutput (int idx);\nV.GetOutput() -> vtkStructuredPoints\nC++: vtkStructuredPoints *GetOutput ();\n\n' r = dm._remove_sig(t) correct = '' self.assertEqual(r, correct) def test_class_doc(self): dm = indenter.VTKDocMassager() indent = indenter.Indent() out = cStringIO.StringIO() doc = 'vtkLODProperty, vtkXMLDataReader, vtk3DSImporter\nSetRepresentationToWireframe, Write3DPropsAsRasterImage' dm.write_class_doc(doc, out, indent) out.seek(0) ret = out.read() correct = ' r"""\n LODProperty, XMLDataReader, ThreeDSImporter\n set_representation_to_wireframe, write3d_props_as_raster_image\n """\n' self.assertEqual(ret, correct) out = cStringIO.StringIO() doc = '' dm.write_class_doc(doc, out, indent) out.seek(0) ret = out.read() self.assertEqual(ret, ' r"""\n \n """\n') def test_trait_doc(self): dm = indenter.VTKDocMassager() indent = indenter.Indent() out = cStringIO.StringIO() doc = 'V.GetOutput(int) -> vtkStructuredPoints\nC++: vtkStructuredPoints *GetOutput (int idx);\nV.GetOutput() -> vtkStructuredPoints\nC++: vtkStructuredPoints *GetOutput ();\n\nvtkLODProperty, vtkXMLDataReader, vtk3DSImporter\nSetRepresentationToWireframe, Write3DPropsAsRasterImage' dm.write_trait_doc(doc, out, indent) out.seek(0) ret = out.read() correct = ' r"""\n LODProperty, XMLDataReader, ThreeDSImporter\n set_representation_to_wireframe, write3d_props_as_raster_image\n """\n' self.assertEqual(ret, correct) out = cStringIO.StringIO() doc = 'V.GetOutput(int) -> vtkStructuredPoints\nC++: vtkStructuredPoints *GetOutput (int idx);\nV.GetOutput() -> vtkStructuredPoints\nC++: vtkStructuredPoints *GetOutput ();\n\n' dm.write_trait_doc(doc, out, indent) out.seek(0) ret = out.read() self.assertEqual(ret, ' r"""\n \n """\n') def test_method_doc(self): dm = indenter.VTKDocMassager() indent = indenter.Indent() out = cStringIO.StringIO() doc = 'V.GetOutput(int) -> vtkStructuredPoints\nC++: vtkStructuredPoints *GetOutput (int idx);\nV.GetOutput() -> vtkStructuredPoints\nC++: vtkStructuredPoints *GetOutput ();\n\nvtkLODProperty, vtkXMLDataReader, vtk3DSImporter\nSetRepresentationToWireframe, Write3DPropsAsRasterImage' dm.write_method_doc(doc, out, indent) out.seek(0) ret = out.read() correct = ' r"""\n V.get_output(int) -> StructuredPoints\n V.get_output() -> StructuredPoints\n\n LODProperty, XMLDataReader, ThreeDSImporter\n set_representation_to_wireframe, write3d_props_as_raster_image\n """\n' self.assertEqual(ret, correct) out = cStringIO.StringIO() doc = 'V.GetOutput(int) -> vtkStructuredPoints\nC++: vtkStructuredPoints *GetOutput (int idx);\nV.GetOutput() -> vtkStructuredPoints\nC++: vtkStructuredPoints *GetOutput ();\n\n' dm.write_method_doc(doc, out, indent) out.seek(0) ret = out.read() correct = ' r"""\n V.get_output(int) -> StructuredPoints\n V.get_output() -> StructuredPoints\n """\n' self.assertEqual(ret, correct) out = cStringIO.StringIO() doc = '\nGetProminentComponentValues(self, comp:int,\n values:vtkVariantArray, uncertainty:float=1.e-6,\n minimumProminence:float=1.e-3) -> None\nC++: virtual void GetProminentComponentValues(int comp,\n vtkVariantArray *values, double uncertainty=1.e-6,\n double minimumProminence=1.e-3)\n\nPopulate the given vtkVariantArray with a set of distinct values.\nIn practice, $N >= \\frac{5}{P}\\mathrm{ln}\\left(\\frac{1}{PU}\\right)$\n'.lstrip() dm.write_method_doc(doc, out, indent) ret = out.getvalue() correct = ' r"""\n GetProminentComponentValues(self, comp:int,\n values:VariantArray, uncertainty:float=1.e-6,\n minimumProminence:float=1.e-3) -> None\n virtual void GetProminentComponentValues(int comp,\n VariantArray *values, double uncertainty=1.e-6,\n double minimumProminence=1.e-3)\n\n Populate the given VariantArray with a set of distinct values.\n In practice, $N >= \\frac{5}{P}\\mathrm{ln}\\left(\\frac{1}{PU}\\right)$\n """\n' self.assertEqual(ret, correct) self.assertIsInstance(eval(ret), str) def test_get_method_doc(self): dm = indenter.VTKDocMassager() doc = 'V.GetOutput(int) -> vtkStructuredPoints\nC++: vtkStructuredPoints *GetOutput (int idx);\nV.GetOutput() -> vtkStructuredPoints\nC++: vtkStructuredPoints *GetOutput ();\n\nvtkLODProperty, vtkXMLDataReader, vtk3DSImporter\nSetRepresentationToWireframe, Write3DPropsAsRasterImage' ret = dm.get_method_doc(doc) correct = 'get_output(int) -> StructuredPoints\nget_output() -> StructuredPoints\n\nLODProperty, XMLDataReader, ThreeDSImporter\nset_representation_to_wireframe, write3d_props_as_raster_image' self.assertEqual(ret, correct) doc = 'V.GetOutput(int) -> vtkStructuredPoints\nC++: vtkStructuredPoints *GetOutput (int idx);\nV.GetOutput() -> vtkStructuredPoints\nC++: vtkStructuredPoints *GetOutput ();\n\n' ret = dm.get_method_doc(doc) correct = 'get_output(int) -> StructuredPoints\nget_output() -> StructuredPoints\n' self.assertEqual(ret, correct)
class Tile(object): def __init__(self, tilename, tile_dbs): self.tilename = tilename self.tilename_upper = self.tilename.upper() self.tile_dbs = tile_dbs self.wires = None self.sites = None self.pips = None self.pips_by_name = {} def yield_sites(sites): for site in sites: site_pins = [] for (name, site_pin_info) in site['site_pins'].items(): if (site_pin_info is not None): (wire, timing) = get_site_pin_timing(site_pin_info) site_pins.append(SitePin(name=name, wire=wire, timing=timing)) else: site_pins.append(SitePin(name=name, wire=None, timing=None)) (yield Site(name=site['name'], prefix=site['prefix'], type=site['type'], x=site['x_coord'], y=site['y_coord'], site_pins=site_pins)) def yield_pips(pips): for (name, pip) in pips.items(): (yield Pip(name=name, net_to=pip['dst_wire'], net_from=pip['src_wire'], can_invert=bool(int(pip['can_invert'])), is_directional=bool(int(pip['is_directional'])), is_pseudo=bool(int(pip['is_pseudo'])), is_pass_transistor=is_pass_transistor(pip), timing=get_pip_timing(pip.get('src_to_dst')), backward_timing=get_pip_timing(pip.get('dst_to_src')))) with OpenSafeFile(self.tile_dbs.tile_type) as f: tile_type = json.load(f) assert (self.tilename_upper == tile_type['tile_type']) self.wires = get_wires(tile_type['wires']) self.sites = tuple(yield_sites(tile_type['sites'])) self.pips = tuple(yield_pips(tile_type['pips'])) self.wire_info = {} def get_wires(self): return self.wires def get_sites(self): return self.sites def get_pips(self): return self.pips def get_pip_by_name(self, name): if (len(self.pips_by_name) == 0): for pip in self.pips: self.pips_by_name[pip.name] = pip return self.pips_by_name[name] def get_wire_info(self, target_wire, allow_pseudo=False): if (len(self.wire_info) == 0): for wire in self.wires: pips = list() sites = list() for site in self.sites: for site_pin in site.site_pins: if (site_pin.wire == wire): sites.append((site.name, site_pin.name)) for pip in self.pips: pseudo_filter = ((not pip.is_pseudo) or allow_pseudo) if (((wire == pip.net_to) or (wire == pip.net_from)) and pseudo_filter): pips.append(pip.name) assert (wire not in self.wire_info) self.wire_info[wire] = WireInfo(pips=pips, sites=sites) return self.wire_info[target_wire] def get_instance_sites(self, grid_info): site_names = set() for site in self.sites: site_name = '{}_X{}Y{}'.format(site.prefix, site.x, site.y) (origin_x, origin_y) = lib.find_origin_coordinate(site_name, grid_info.sites.keys()) x = (site.x + origin_x) y = (site.y + origin_y) site_name = '{}_X{}Y{}'.format(site.prefix, x, y) if (site_name not in grid_info.sites): type_count = 0 for (site_name_from_grid, site_type) in grid_info.sites.items(): if (site.type == site_type): type_count += 1 site_name = site_name_from_grid assert (type_count == 1), (site_name, type_count) site_names.add(site_name) assert (site.type == grid_info.sites[site_name]) (yield Site(name=site_name, prefix=site.prefix, type=site.type, x=x, y=y, site_pins=site.site_pins)) assert (site_names == set(grid_info.sites.keys()))
class OptionPlotoptionsAreasplineSonificationContexttracksMappingHighpassFrequency(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def check_prescribing_data_in_bigquery(date, bq_client): if (not settings.CHECK_DATA_IN_BQ): return results = bq_client.query(('\n SELECT COUNT(*)\n FROM {hscic}.prescribing_v2\n WHERE month = TIMESTAMP("%s")\n ' % (date,))) assert (results.rows[0][0] > 0)
def quadrupole3d_21(ax, da, A, bx, db, B, R): result = numpy.zeros((6, 6, 3), dtype=float) x0 = ((ax + bx) ** (- 1.0)) x1 = (x0 * ((ax * A[0]) + (bx * B[0]))) x2 = (- x1) x3 = (x2 + R[0]) x4 = (x2 + B[0]) x5 = (x3 * x4) x6 = (2.0 * x5) x7 = (x0 + x6) x8 = (x3 * x7) x9 = (x2 + A[0]) x10 = (x7 * x9) x11 = ((- 2.0) * x1) x12 = (x11 + R[0]) x13 = (x12 + B[0]) x14 = (3.0 * x0) x15 = (x0 * (x12 + A[0])) x16 = (x3 * x9) x17 = (2.0 * x16) x18 = (x0 + x17) x19 = (x15 + (x18 * x3)) x20 = (x4 * x9) x21 = (2.0 * x20) x22 = (x0 * (((x14 + x17) + x21) + x6)) x23 = (x0 * x13) x24 = (x10 + x23) x25 = (2.0 * x24) x26 = (x22 + (x25 * x3)) x27 = 1. x28 = ((ax * bx) * x0) x29 = (((5. * da) * db) * numpy.exp(((- x28) * ((((A[0] - B[0]) ** 2) + ((A[1] - B[1]) ** 2)) + ((A[2] - B[2]) ** 2))))) x30 = (numpy.sqrt(x0) * x29) x31 = (x0 * x30) x32 = (x27 * x31) x33 = (0. * x32) x34 = (x0 * ((ax * A[1]) + (bx * B[1]))) x35 = (- x34) x36 = (x35 + B[1]) x37 = (x3 ** 2) x38 = (x33 * ((x0 * ((x14 + (4.0 * x16)) + (2.0 * x37))) + ((2.0 * x19) * x9))) x39 = (x0 * ((ax * A[2]) + (bx * B[2]))) x40 = (- x39) x41 = (x40 + B[2]) x42 = (x35 + A[1]) x43 = (0.25 * x31) x44 = (x26 * x43) x45 = (0.5 * x0) x46 = (x36 * x42) x47 = ((x0 ** 1.5) * x29) x48 = (x47 * (x45 + x46)) x49 = (0.5 * x19) x50 = (x30 * x45) x51 = (x19 * x50) x52 = (x40 + A[2]) x53 = (x41 * x52) x54 = (x47 * (x45 + x53)) x55 = ((x42 ** 2) + x45) x56 = (x23 + x8) x57 = (0. * x27) x58 = (x47 * x57) x59 = (x56 * x58) x60 = ((- 2.0) * x34) x61 = (x60 + B[1]) x62 = (2.0 * x46) x63 = ((x0 * (x61 + A[1])) + (x42 * (x0 + x62))) x64 = (x37 + x45) x65 = (x58 * x64) x66 = (0. * x27) x67 = (x47 * x66) x68 = (x64 * x67) x69 = (x50 * x52) x70 = (x45 + (x52 ** 2)) x71 = ((- 2.0) * x39) x72 = (x71 + B[2]) x73 = (2.0 * x53) x74 = ((x0 * (x72 + A[2])) + (x52 * (x0 + x73))) x75 = (x35 + R[1]) x76 = (x33 * (x22 + (x25 * x9))) x77 = (x36 * x75) x78 = (x45 + x77) x79 = (x15 + (x18 * x9)) x80 = (x58 * x79) x81 = (0. * x32) x82 = (x41 * x81) x83 = (x42 * x75) x84 = (x45 + x83) x85 = (0.5 * x47) x86 = (x24 * x85) x87 = (x61 + R[1]) x88 = (x0 * x87) x89 = (2.0 * x77) x90 = (x0 + x89) x91 = (x42 * x90) x92 = (x88 + x91) x93 = (x16 + x45) x94 = (x85 * x93) x95 = (x47 * x93) x96 = (x0 * ((x60 + A[1]) + R[1])) x97 = (2.0 * x83) x98 = (x0 + x97) x99 = ((x42 * x98) + x96) x100 = (x47 * (x45 + x5)) x101 = (x100 * x57) x102 = (x0 * (((x14 + x62) + x89) + x97)) x103 = (2.0 * x92) x104 = (x102 + (x103 * x42)) x105 = (x3 * x33) x106 = (x100 * x66) x107 = (x3 * x67) x108 = (x3 * x81) x109 = (x40 + R[2]) x110 = (x109 * x81) x111 = (x109 * x41) x112 = (x111 + x45) x113 = (x42 * x50) x114 = (x109 * x52) x115 = (x114 + x45) x116 = (x72 + R[2]) x117 = (x0 * x116) x118 = (2.0 * x111) x119 = (x0 + x118) x120 = (x119 * x52) x121 = (x117 + x120) x122 = (x0 * ((x71 + A[2]) + R[2])) x123 = (2.0 * x114) x124 = (x0 + x123) x125 = (x122 + (x124 * x52)) x126 = (x0 * (((x118 + x123) + x14) + x73)) x127 = (2.0 * x121) x128 = (x126 + (x127 * x52)) x129 = ((x0 * ((x11 + A[0]) + B[0])) + (x9 * (x0 + x21))) x130 = (x75 ** 2) x131 = (x130 + x45) x132 = (x131 * x58) x133 = (x45 + (x9 ** 2)) x134 = (x75 * x90) x135 = (x134 + x88) x136 = (x135 * x58) x137 = (x133 * x67) x138 = ((x75 * x98) + x96) x139 = (x20 + x45) x140 = (x139 * x85) x141 = (x102 + (x103 * x75)) x142 = (x141 * x43) x143 = (x50 * x9) x144 = (x139 * x47) x145 = (x33 * ((x0 * (((2.0 * x130) + x14) + (4.0 * x83))) + ((2.0 * x138) * x42))) x146 = (x4 * x47) x147 = (x146 * x66) x148 = (x47 * x9) x149 = (x109 ** 2) x150 = (x149 + x45) x151 = (x150 * x58) x152 = (x109 * x119) x153 = (x117 + x152) x154 = (x153 * x58) x155 = ((x109 * x124) + x122) x156 = ((x109 * x127) + x126) x157 = (x156 * x43) x158 = (x33 * ((x0 * (((4.0 * x114) + x14) + (2.0 * x149))) + ((2.0 * x155) * x52))) result[(0, 0, 0)] = numpy.sum(((- x33) * ((x0 * ((((2.0 * x10) + (x13 * x14)) + x19) + x8)) + (x26 * x9)))) result[(0, 0, 1)] = numpy.sum(((- x36) * x38)) result[(0, 0, 2)] = numpy.sum(((- x38) * x41)) result[(0, 1, 0)] = numpy.sum(((- x42) * x44)) result[(0, 1, 1)] = numpy.sum(((- x48) * x49)) result[(0, 1, 2)] = numpy.sum((((- x41) * x42) * x51)) result[(0, 2, 0)] = numpy.sum(((- x44) * x52)) result[(0, 2, 1)] = numpy.sum((((- x36) * x51) * x52)) result[(0, 2, 2)] = numpy.sum(((- x49) * x54)) result[(0, 3, 0)] = numpy.sum(((- x55) * x59)) result[(0, 3, 1)] = numpy.sum(((- x63) * x65)) result[(0, 3, 2)] = numpy.sum((((- x41) * x55) * x68)) result[(0, 4, 0)] = numpy.sum((((- x42) * x56) * x69)) result[(0, 4, 1)] = numpy.sum((((- x48) * x52) * x64)) result[(0, 4, 2)] = numpy.sum((((- x42) * x54) * x64)) result[(0, 5, 0)] = numpy.sum(((- x59) * x70)) result[(0, 5, 1)] = numpy.sum((((- x36) * x68) * x70)) result[(0, 5, 2)] = numpy.sum(((- x65) * x74)) result[(1, 0, 0)] = numpy.sum(((- x75) * x76)) result[(1, 0, 1)] = numpy.sum(((- x78) * x80)) result[(1, 0, 2)] = numpy.sum((((- x75) * x79) * x82)) result[(1, 1, 0)] = numpy.sum(((- x84) * x86)) result[(1, 1, 1)] = numpy.sum(((- x92) * x94)) result[(1, 1, 2)] = numpy.sum((((- x41) * x84) * x95)) result[(1, 2, 0)] = numpy.sum((((- x24) * x69) * x75)) result[(1, 2, 1)] = numpy.sum((((- x52) * x78) * x95)) result[(1, 2, 2)] = numpy.sum((((- x54) * x75) * x93)) result[(1, 3, 0)] = numpy.sum(((- x101) * x99)) result[(1, 3, 1)] = numpy.sum(((- x104) * x105)) result[(1, 3, 2)] = numpy.sum((((- x3) * x82) * x99)) result[(1, 4, 0)] = numpy.sum((((- x100) * x52) * x84)) result[(1, 4, 1)] = numpy.sum((((- x3) * x69) * x92)) result[(1, 4, 2)] = numpy.sum((((- x3) * x54) * x84)) result[(1, 5, 0)] = numpy.sum((((- x106) * x70) * x75)) result[(1, 5, 1)] = numpy.sum((((- x107) * x70) * x78)) result[(1, 5, 2)] = numpy.sum((((- x108) * x74) * x75)) result[(2, 0, 0)] = numpy.sum(((- x109) * x76)) result[(2, 0, 1)] = numpy.sum((((- x110) * x36) * x79)) result[(2, 0, 2)] = numpy.sum(((- x112) * x80)) result[(2, 1, 0)] = numpy.sum((((- x109) * x113) * x24)) result[(2, 1, 1)] = numpy.sum((((- x109) * x48) * x93)) result[(2, 1, 2)] = numpy.sum((((- x112) * x42) * x95)) result[(2, 2, 0)] = numpy.sum(((- x115) * x86)) result[(2, 2, 1)] = numpy.sum((((- x115) * x36) * x95)) result[(2, 2, 2)] = numpy.sum(((- x121) * x94)) result[(2, 3, 0)] = numpy.sum((((- x106) * x109) * x55)) result[(2, 3, 1)] = numpy.sum((((- x108) * x109) * x63)) result[(2, 3, 2)] = numpy.sum((((- x107) * x112) * x55)) result[(2, 4, 0)] = numpy.sum((((- x100) * x115) * x42)) result[(2, 4, 1)] = numpy.sum((((- x115) * x3) * x48)) result[(2, 4, 2)] = numpy.sum((((- x113) * x121) * x3)) result[(2, 5, 0)] = numpy.sum(((- x101) * x125)) result[(2, 5, 1)] = numpy.sum((((- x108) * x125) * x36)) result[(2, 5, 2)] = numpy.sum(((- x105) * x128)) result[(3, 0, 0)] = numpy.sum(((- x129) * x132)) result[(3, 0, 1)] = numpy.sum(((- x133) * x136)) result[(3, 0, 2)] = numpy.sum((((- x131) * x137) * x41)) result[(3, 1, 0)] = numpy.sum(((- x138) * x140)) result[(3, 1, 1)] = numpy.sum(((- x142) * x9)) result[(3, 1, 2)] = numpy.sum((((- x138) * x143) * x41)) result[(3, 2, 0)] = numpy.sum((((- x131) * x144) * x52)) result[(3, 2, 1)] = numpy.sum((((- x135) * x69) * x9)) result[(3, 2, 2)] = numpy.sum((((- x131) * x54) * x9)) result[(3, 3, 0)] = numpy.sum(((- x145) * x4)) result[(3, 3, 1)] = numpy.sum(((- x33) * ((x0 * (((x134 + x138) + (x14 * x87)) + (2.0 * x91))) + (x141 * x42)))) result[(3, 3, 2)] = numpy.sum(((- x145) * x41)) result[(3, 4, 0)] = numpy.sum((((- x138) * x4) * x69)) result[(3, 4, 1)] = numpy.sum(((- x142) * x52)) result[(3, 4, 2)] = numpy.sum((((- 0.5) * x138) * x54)) result[(3, 5, 0)] = numpy.sum((((- x131) * x147) * x70)) result[(3, 5, 1)] = numpy.sum(((- x136) * x70)) result[(3, 5, 2)] = numpy.sum(((- x132) * x74)) result[(4, 0, 0)] = numpy.sum((((- x110) * x129) * x75)) result[(4, 0, 1)] = numpy.sum((((- x109) * x137) * x78)) result[(4, 0, 2)] = numpy.sum((((- x112) * x137) * x75)) result[(4, 1, 0)] = numpy.sum((((- x109) * x144) * x84)) result[(4, 1, 1)] = numpy.sum((((- x109) * x143) * x92)) result[(4, 1, 2)] = numpy.sum((((- x112) * x148) * x84)) result[(4, 2, 0)] = numpy.sum((((- x115) * x144) * x75)) result[(4, 2, 1)] = numpy.sum((((- x115) * x148) * x78)) result[(4, 2, 2)] = numpy.sum((((- x121) * x143) * x75)) result[(4, 3, 0)] = numpy.sum((((- x110) * x4) * x99)) result[(4, 3, 1)] = numpy.sum((((- x104) * x109) * x33)) result[(4, 3, 2)] = numpy.sum((((- x112) * x58) * x99)) result[(4, 4, 0)] = numpy.sum((((- x115) * x146) * x84)) result[(4, 4, 1)] = numpy.sum((((- x115) * x85) * x92)) result[(4, 4, 2)] = numpy.sum((((- x121) * x84) * x85)) result[(4, 5, 0)] = numpy.sum(((((- x125) * x4) * x75) * x81)) result[(4, 5, 1)] = numpy.sum((((- x125) * x58) * x78)) result[(4, 5, 2)] = numpy.sum((((- x128) * x33) * x75)) result[(5, 0, 0)] = numpy.sum(((- x129) * x151)) result[(5, 0, 1)] = numpy.sum((((- x137) * x150) * x36)) result[(5, 0, 2)] = numpy.sum(((- x133) * x154)) result[(5, 1, 0)] = numpy.sum((((- x144) * x150) * x42)) result[(5, 1, 1)] = numpy.sum((((- x150) * x48) * x9)) result[(5, 1, 2)] = numpy.sum((((- x113) * x153) * x9)) result[(5, 2, 0)] = numpy.sum(((- x140) * x155)) result[(5, 2, 1)] = numpy.sum((((- x143) * x155) * x36)) result[(5, 2, 2)] = numpy.sum(((- x157) * x9)) result[(5, 3, 0)] = numpy.sum((((- x147) * x150) * x55)) result[(5, 3, 1)] = numpy.sum(((- x151) * x63)) result[(5, 3, 2)] = numpy.sum(((- x154) * x55)) result[(5, 4, 0)] = numpy.sum((((- x113) * x155) * x4)) result[(5, 4, 1)] = numpy.sum((((- 0.5) * x155) * x48)) result[(5, 4, 2)] = numpy.sum(((- x157) * x42)) result[(5, 5, 0)] = numpy.sum(((- x158) * x4)) result[(5, 5, 1)] = numpy.sum(((- x158) * x36)) result[(5, 5, 2)] = numpy.sum(((- x33) * ((x0 * ((((x116 * x14) + (2.0 * x120)) + x152) + x155)) + (x156 * x52)))) return result
def create_suffix_array(suffix_array, data, suffix_array_algorithm): if (suffix_array_algorithm == 'sais'): sais(data, suffix_array) elif (suffix_array_algorithm == 'divsufsort'): divsufsort(data, suffix_array) else: raise Error('Bad suffix array algorithm {}.'.format(suffix_array_algorithm))
class SocialLinkList(ResourceList): def query(self, view_kwargs): query_ = self.session.query(SocialLink) query_ = event_query(query_, view_kwargs) return query_ view_kwargs = True methods = ['GET'] schema = SocialLinkSchema data_layer = {'session': db.session, 'model': SocialLink, 'methods': {'query': query}}
def write_file(features: List[str], expected_records: List[Dict[(str, Any)]]) -> bytes: seen_channels: Set[str] = set() seen_schemas: Set[str] = set() output = BytesIO() writer = Writer(output=output, index_types=index_type_from_features(features), compression=CompressionType.NONE, repeat_channels=('rch' in features), repeat_schemas=('rsh' in features), use_chunking=('ch' in features), use_statistics=('st' in features), use_summary_offsets=('sum' in features), enable_crcs=True, enable_data_crcs=True) for line in expected_records: (type, fields) = (line['type'], line['fields']) record = deserialize_record(type, fields) if isinstance(record, Header): writer.start(record.profile, record.library) if isinstance(record, Attachment): writer.add_attachment(create_time=record.create_time, log_time=record.log_time, name=record.name, media_type=record.media_type, data=record.data) if isinstance(record, Channel): if (record.topic not in seen_channels): writer.register_channel(schema_id=record.schema_id, topic=record.topic, message_encoding=record.message_encoding, metadata=record.metadata) seen_channels.add(record.topic) if isinstance(record, Message): writer.add_message(channel_id=record.channel_id, log_time=record.log_time, data=record.data, publish_time=record.publish_time, sequence=record.sequence) if isinstance(record, Schema): if (record.name not in seen_schemas): writer.register_schema(name=record.name, encoding=record.encoding, data=record.data) seen_schemas.add(record.name) if isinstance(record, Metadata): writer.add_metadata(record.name, record.metadata) writer.finish() return output.getvalue()
def fat_tree_topology(k): if (not isinstance(k, int)): raise TypeError('k argument must be of int type') if ((k < 1) or ((k % 2) == 1)): raise ValueError('k must be a positive even integer') topo = DatacenterTopology(type='fat_tree') topo.name = ('fat_tree_topology(%d)' % k) n_core = ((k // 2) ** 2) topo.add_nodes_from([v for v in range(int(n_core))], layer='core', type='switch') for pod in range(k): aggr_start_node = topo.number_of_nodes() aggr_end_node = (aggr_start_node + (k // 2)) edge_start_node = aggr_end_node edge_end_node = (edge_start_node + (k // 2)) aggr_nodes = range(aggr_start_node, aggr_end_node) edge_nodes = range(edge_start_node, edge_end_node) topo.add_nodes_from(aggr_nodes, layer='aggregation', type='switch', pod=pod) topo.add_nodes_from(edge_nodes, layer='edge', type='switch', pod=pod) topo.add_edges_from([(u, v) for u in aggr_nodes for v in edge_nodes], type='aggregation_edge') for core_node in range(n_core): for pod in range(k): aggr_node = ((n_core + (core_node // (k // 2))) + (k * pod)) topo.add_edge(core_node, aggr_node, type='core_aggregation') for u in [v for v in topo.nodes() if (topo.node[v]['layer'] == 'edge')]: leaf_nodes = range(topo.number_of_nodes(), (topo.number_of_nodes() + (k // 2))) topo.add_nodes_from(leaf_nodes, layer='leaf', type='host', pod=topo.node[u]['pod']) topo.add_edges_from([(u, v) for v in leaf_nodes], type='edge_leaf') return topo
class OptionSeriesTimelineSonificationDefaultinstrumentoptionsMappingLowpass(Options): def frequency(self) -> 'OptionSeriesTimelineSonificationDefaultinstrumentoptionsMappingLowpassFrequency': return self._config_sub_data('frequency', OptionSeriesTimelineSonificationDefaultinstrumentoptionsMappingLowpassFrequency) def resonance(self) -> 'OptionSeriesTimelineSonificationDefaultinstrumentoptionsMappingLowpassResonance': return self._config_sub_data('resonance', OptionSeriesTimelineSonificationDefaultinstrumentoptionsMappingLowpassResonance)
class PortalFS(_utils.FSTree): def __init__(self, root: Optional[str]=None): if (root is None): root = '~/BENCH' super().__init__(root) self.jobs = PortalJobsFS(self.root) def currentjob(self) -> str: return self.jobs.requests.current def queues(self) -> queue_mod.JobQueuesFS: return queue_mod.JobQueuesFS(self.jobs.queues)
def set_widget_style(widget, dark_theme=False): palette = widget.palette() if dark_theme: palette.setColor(QtGui.QPalette.Window, QtGui.QColor(53, 53, 53)) palette.setColor(QtGui.QPalette.WindowText, QtCore.Qt.white) palette.setColor(QtGui.QPalette.Base, QtGui.QColor(25, 25, 25)) palette.setColor(QtGui.QPalette.AlternateBase, QtGui.QColor(53, 53, 53)) palette.setColor(QtGui.QPalette.ToolTipBase, QtCore.Qt.white) palette.setColor(QtGui.QPalette.ToolTipText, QtCore.Qt.white) palette.setColor(QtGui.QPalette.Text, QtCore.Qt.white) palette.setColor(QtGui.QPalette.Disabled, QtGui.QPalette.Text, QtGui.QColor(QtCore.Qt.darkGray)) palette.setColor(QtGui.QPalette.Button, QtGui.QColor(53, 53, 53)) palette.setColor(QtGui.QPalette.ButtonText, QtCore.Qt.white) palette.setColor(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, QtGui.QColor(QtCore.Qt.darkGray)) palette.setColor(QtGui.QPalette.BrightText, QtCore.Qt.red) palette.setColor(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, QtGui.QColor(255, 128, 128)) palette.setColor(QtGui.QPalette.Link, QtGui.QColor(42, 130, 218)) palette.setColor(QtGui.QPalette.Highlight, QtGui.QColor(42, 130, 218)) palette.setColor(QtGui.QPalette.HighlightedText, QtCore.Qt.black) else: palette.setColor(QtGui.QPalette.Window, QtGui.QColor(239, 239, 239)) palette.setColor(QtGui.QPalette.WindowText, QtGui.QColor(0, 0, 0)) palette.setColor(QtGui.QPalette.Base, QtGui.QColor(255, 255, 255)) palette.setColor(QtGui.QPalette.AlternateBase, QtGui.QColor(247, 247, 247)) palette.setColor(QtGui.QPalette.ToolTipBase, QtGui.QColor(255, 255, 220)) palette.setColor(QtGui.QPalette.ToolTipText, QtGui.QColor(0, 0, 0)) palette.setColor(QtGui.QPalette.Text, QtGui.QColor(0, 0, 0)) palette.setColor(QtGui.QPalette.Disabled, QtGui.QPalette.Text, QtGui.QColor(190, 190, 190)) palette.setColor(QtGui.QPalette.Button, QtGui.QColor(239, 239, 239)) palette.setColor(QtGui.QPalette.ButtonText, QtGui.QColor(0, 0, 0)) palette.setColor(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, QtGui.QColor(190, 190, 190)) palette.setColor(QtGui.QPalette.BrightText, QtGui.QColor(255, 255, 255)) palette.setColor(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, QtGui.QColor(216, 216, 216)) palette.setColor(QtGui.QPalette.Link, QtGui.QColor(0, 0, 255)) palette.setColor(QtGui.QPalette.Highlight, QtGui.QColor(48, 140, 198)) palette.setColor(QtGui.QPalette.HighlightedText, QtGui.QColor(255, 255, 255)) widget.setPalette(palette)
class PyperfResultsFile(): SUFFIX = '.json' COMPRESSED_SUFFIX = '.json.gz' COMPRESSOR = gzip _SUFFIXES = (SUFFIX, COMPRESSED_SUFFIX) def from_raw(cls, raw: Any) -> 'PyperfResultsFile': if (not raw): raise ValueError(raw) elif isinstance(raw, cls): return raw elif isinstance(raw, str): return cls(raw) else: raise TypeError(raw) def from_uploadid(cls, uploadid: Any, resultsroot: Optional[str]=None, *, compressed: bool=False) -> 'PyperfResultsFile': uploadid = PyperfUploadID.from_raw(uploadid, fail=True) return cls(f'{uploadid}{cls.SUFFIX}', resultsroot, compressed=compressed) def _resolve_filename(cls, filename: str, resultsroot: Optional[str], compressed: Optional[bool]) -> Tuple[(str, str, str)]: if (not filename): raise ValueError('missing filename') filename = cls._ensure_suffix(filename, compressed) return normalize_results_filename(filename, resultsroot) def _ensure_suffix(cls, filename: str, compressed: Optional[bool]) -> str: if (not filename.endswith((cls.SUFFIX, cls.COMPRESSED_SUFFIX))): raise ValueError(f'unsupported file suffix ({filename})') elif (compressed is None): return filename elif (compressed == cls._is_compressed(filename)): return filename else: if compressed: (old, new) = (cls.SUFFIX, cls.COMPRESSED_SUFFIX) else: (old, new) = (cls.COMPRESSED_SUFFIX, cls.SUFFIX) return (filename[:(- len(old))] + new) def _is_compressed(cls, filename: str) -> bool: return filename.endswith(cls.COMPRESSED_SUFFIX) def __init__(self, filename: str, resultsroot: Optional[str]=None, *, compressed: Optional[bool]=None): (filename, relfile, resultsroot) = self._resolve_filename(filename, resultsroot, compressed) if os.path.isdir(filename): raise NotImplementedError(filename) self._filename = filename self._relfile = relfile self._resultsroot = resultsroot def __repr__(self): return f'{type(self).__name__}({self.filename!r})' def __str__(self): return self._filename def __eq__(self, other): raise NotImplementedError def filename(self) -> str: return self._filename def relfile(self) -> str: return self._relfile def resultsroot(self) -> Optional[str]: return self._resultsroot def uploadid(self) -> Optional[PyperfUploadID]: return PyperfUploadID.from_filename(self.filename) def iscompressed(self) -> bool: return self._is_compressed(self._filename) def read(self) -> PyperfResults: _open = (self.COMPRESSOR.open if self.iscompressed else open) with _open(self._filename) as infile: text = infile.read() if (not text): raise RuntimeError(f'{self.filename} is empty') data = json.loads(text) return PyperfResults(data, self) def write(self, results: PyperfResults) -> None: data = results.data _open = (self.COMPRESSOR.open if self.iscompressed else open) if self.iscompressed: text = json.dumps(data, indent=2) with _open(self._filename, 'w') as outfile: outfile.write(text.encode('utf-8')) else: with _open(self._filename, 'w') as outfile: json.dump(data, outfile, indent=2) def copy_to(self, filename: str, resultsroot: Optional[str]=None, *, compressed: Optional[bool]=None) -> 'PyperfResultsFile': copied: PyperfResultsFile if isinstance(filename, PyperfResultsFile): copied = filename if (copied._resultsroot and resultsroot and (resultsroot != copied._resultsroot)): raise ValueError(f'resultsroot mismatch ({resultsroot} != {copied._resultsroot})') else: if (not filename): filename = self._filename elif os.path.isdir(filename): raise NotImplementedError(filename) elif ((not resultsroot) and ((not os.path.isabs(filename)) or filename.startswith(self._resultsroot))): resultsroot = self._resultsroot (filename, relfile, resultsroot) = self._resolve_filename(filename, resultsroot, compressed) if (filename == self._filename): raise ValueError(f'copying to self ({filename})') cls = type(self) copied = cls.__new__(cls) copied._filename = filename copied._relfile = relfile copied._resultsroot = resultsroot if (copied.iscompressed == self.iscompressed): if (copied._filename == self._filename): pass shutil.copyfile(self._filename, copied._filename) else: results = self.read() copied.write(results) return copied def compare(self, others: Sequence['PyperfResultsFile']) -> Optional[PyperfComparisons]: optional = [] if (len(others) == 1): optional.append('--group-by-speed') cwd = self._resultsroot proc = _utils.run_fg(sys.executable, '-m', 'pyperf', 'compare_to', *optional, '--table', self._relfile, *(o._relfile for o in others), cwd=cwd) if proc.returncode: logger.warning(proc.stdout) return None filenames = [self._filename, *(o.filename for o in others)] return PyperfComparisons.parse_table(proc.stdout, filenames)
class OptionPlotoptionsAreasplineSonificationTracksMappingNoteduration(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class OptionSeriesStreamgraphSonificationTracksMappingNoteduration(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
_toolkit([ToolkitName.qt, ToolkitName.wx]) class TestTabularEditor(BaseTestMixin, UnittestTools, unittest.TestCase): def setUp(self): BaseTestMixin.setUp(self) def tearDown(self): BaseTestMixin.tearDown(self) (is_wx(), 'Issue enthought/traitsui#752') def test_tabular_editor_single_selection(self): with reraise_exceptions(), self.report_and_editor(get_view()) as (report, editor): process_cascade_events() people = report.people self.assertEqual(report.selected_row, (- 1)) self.assertIsNone(report.selected) set_selected_single(editor, 1) process_cascade_events() self.assertEqual(report.selected_row, 1) self.assertEqual(report.selected, people[1]) set_selected_single(editor, 2) process_cascade_events() self.assertEqual(report.selected_row, 2) self.assertEqual(report.selected, people[2]) (is_wx(), 'Issue enthought/traitsui#752') def test_tabular_editor_multi_selection(self): view = get_view(multi_select=True) with reraise_exceptions(), self.report_and_editor(view) as (report, editor): process_cascade_events() people = report.people self.assertEqual(report.selected_rows, []) self.assertEqual(report.multi_selected, []) set_selected_multiple(editor, [0, 1]) process_cascade_events() self.assertEqual(report.selected_rows, [0, 1]) self.assertEqual(report.multi_selected, people[:2]) set_selected_multiple(editor, [2]) process_cascade_events() self.assertEqual(report.selected_rows, [2]) self.assertEqual(report.multi_selected, [people[2]]) clear_selection(editor) process_cascade_events() self.assertEqual(report.selected_rows, []) self.assertEqual(report.multi_selected, []) (is_wx(), 'Issue enthought/traitsui#752') def test_tabular_editor_single_selection_changed(self): with reraise_exceptions(), self.report_and_editor(get_view()) as (report, editor): process_cascade_events() people = report.people self.assertEqual(get_selected_rows(editor), []) report.selected_row = 1 process_cascade_events() self.assertEqual(get_selected_rows(editor), [1]) self.assertEqual(report.selected, people[1]) report.selected = people[2] process_cascade_events() self.assertEqual(get_selected_rows(editor), [2]) self.assertEqual(report.selected_row, 2) report.selected = Person(name='invalid', age=(- 1)) process_cascade_events() self.assertEqual(get_selected_rows(editor), [2]) self.assertEqual(report.selected_row, 2) report.selected_row = (- 1) process_cascade_events() self.assertEqual(get_selected_rows(editor), []) self.assertEqual(report.selected, None) (is_wx(), 'Issue enthought/traitsui#752') def test_tabular_editor_multi_selection_changed(self): view = get_view(multi_select=True) with reraise_exceptions(), self.report_and_editor(view) as (report, editor): process_cascade_events() people = report.people self.assertEqual(get_selected_rows(editor), []) report.selected_rows = [0, 1] process_cascade_events() self.assertEqual(get_selected_rows(editor), [0, 1]) self.assertEqual(report.multi_selected, people[:2]) report.multi_selected = [people[2], people[0]] process_cascade_events() self.assertEqual(sorted(get_selected_rows(editor)), [0, 2]) self.assertEqual(sorted(report.selected_rows), [0, 2]) invalid_person = Person(name='invalid', age=(- 1)) report.multi_selected = [people[2], invalid_person] process_cascade_events() self.assertEqual(sorted(get_selected_rows(editor)), [0, 2]) self.assertEqual(sorted(report.selected_rows), [0, 2]) report.selected_rows = [] process_cascade_events() self.assertEqual(get_selected_rows(editor), []) self.assertEqual(report.multi_selected, []) (is_wx(), 'Issue enthought/traitsui#752') def test_tabular_editor_multi_selection_items_changed(self): view = get_view(multi_select=True) with reraise_exceptions(), self.report_and_editor(view) as (report, editor): process_cascade_events() people = report.people self.assertEqual(get_selected_rows(editor), []) report.selected_rows.extend([0, 1]) process_cascade_events() self.assertEqual(get_selected_rows(editor), [0, 1]) self.assertEqual(report.multi_selected, people[:2]) report.selected_rows[1] = 2 process_cascade_events() self.assertEqual(get_selected_rows(editor), [0, 2]) self.assertEqual(report.multi_selected, people[0:3:2]) report.multi_selected[0] = people[1] process_cascade_events() self.assertEqual(sorted(get_selected_rows(editor)), [1, 2]) self.assertEqual(sorted(report.selected_rows), [1, 2]) report.multi_selected[0] = Person(name='invalid', age=(- 1)) process_cascade_events() self.assertEqual(sorted(get_selected_rows(editor)), [1, 2]) self.assertEqual(sorted(report.selected_rows), [1, 2]) (is_wx(), 'Issue enthought/traitsui#752') def test_selected_reacts_to_model_changes(self): with self.report_and_editor(get_view()) as (report, editor): people = report.people self.assertIsNone(report.selected) self.assertEqual(report.selected_row, (- 1)) report.selected = people[1] self.assertEqual(report.selected, people[1]) self.assertEqual(report.selected_row, 1) report.selected = None self.assertIsNone(report.selected) self.assertEqual(report.selected_row, (- 1)) report.selected_row = 0 self.assertEqual(report.selected, people[0]) self.assertEqual(report.selected_row, 0) report.selected_row = (- 1) self.assertIsNone(report.selected) self.assertEqual(report.selected_row, (- 1)) (is_wx(), 'Issue enthought/traitsui#752') def test_event_synchronization(self): with self.report_and_editor(get_view()) as (report, editor): with self.assertTraitChanges(editor, 'refresh', count=1): report.refresh = True with self.assertTraitChanges(editor, 'refresh', count=1): report.refresh = True with self.assertTraitChanges(editor, 'update', count=1): report.update = True with self.assertTraitChanges(editor, 'update', count=1): report.update = True (is_wx(), 'Issue enthought/traitsui#752') def test_adapter_columns_changes(self): with reraise_exceptions(), self.report_and_editor(get_view()) as (report, editor): editor.adapter.columns = [('Name', 'name'), ('Age', 'age')] editor.adapter.columns = [('Name', 'name')] process_cascade_events() (is_wx(), 'Issue enthought/traitsui#752') def test_view_column_resized_attribute_error_workaround(self): with reraise_exceptions(), self.report_and_editor(get_view()) as (_, editor): editor.adapter.columns = [('Name', 'name')] def report_and_editor(self, view): report = Report(people=[Person(name='Theresa', age=60), Person(name='Arlene', age=46), Person(name='Karen', age=40)]) with create_ui(report, dict(view=view)) as ui: (editor,) = ui.get_editors('people') (yield (report, editor))
class ArrayDataModel(AbstractDataModel, HasRequiredTraits): data = _AtLeastTwoDArray() index_manager = Instance(TupleIndexManager, args=()) label_header_type = Instance(AbstractValueType, factory=ConstantValue, kw={'text': 'Index'}, allow_none=False) column_header_type = Instance(AbstractValueType, factory=IntValue, kw={'is_editable': False}, allow_none=False) row_header_type = Instance(AbstractValueType, factory=IntValue, kw={'is_editable': False}, allow_none=False) value_type = Instance(AbstractValueType, allow_none=False, required=True) def get_column_count(self): return self.data.shape[(- 1)] def can_have_children(self, row): if (len(row) < (self.data.ndim - 1)): return True return False def get_row_count(self, row): if (len(row) < (self.data.ndim - 1)): return self.data.shape[len(row)] return 0 def get_value(self, row, column): if (len(row) == 0): if (len(column) == 0): return None return column[0] elif (len(column) == 0): return row[(- 1)] else: index = tuple((row + column)) if (len(index) != self.data.ndim): return None return self.data[index] def can_set_value(self, row, column): index = tuple((row + column)) return (len(index) == self.data.ndim) def set_value(self, row, column, value): if self.can_set_value(row, column): index = tuple((row + column)) self.data[index] = value self.values_changed = (row, column, row, column) else: raise DataViewSetError() def get_value_type(self, row, column): if (len(row) == 0): if (len(column) == 0): return self.label_header_type return self.column_header_type elif (len(column) == 0): return self.row_header_type elif (len(row) < (self.data.ndim - 1)): return no_value else: return self.value_type ('data') def data_updated(self, event): if (event.new.shape == event.old.shape): if (self.data.size > 0): self.values_changed = ((0,), (0,), ((event.old.shape[0] - 1),), ((event.old.shape[(- 1)] - 1),)) else: self.structure_changed = True ('value_type.updated') def value_type_updated(self, event): if (self.data.size > 0): self.values_changed = ((0,), (0,), ((self.data.shape[0] - 1),), ((self.data.shape[(- 1)] - 1),)) ('column_header_type.updated') def column_header_type_updated(self, event): if (self.data.shape[(- 1)] > 0): self.values_changed = ((), (0,), (), ((self.data.shape[(- 1)] - 1),)) ('row_header_type.updated') def value_header_type_updated(self, event): if (self.data.shape[0] > 0): self.values_changed = ((0,), (), ((self.data.shape[0] - 1),), ()) ('label_header_type.updated') def label_header_type_updated(self, event): self.values_changed = ((), (), (), ()) def _data_default(self): from numpy import zeros return zeros(shape=(0, 0))
(IPythonShell) class PythonShell(MPythonShell, LayoutWidget): command_executed = Event() key_pressed = Event(KeyPressedEvent) def __init__(self, parent=None, **traits): create = traits.pop('create', None) super().__init__(parent=parent, **traits) if create: self.create() warnings.warn('automatic widget creation is deprecated and will be removed in a future Pyface version, code should not pass the create parameter and should instead call create() explicitly', DeprecationWarning, stacklevel=2) elif (create is not None): warnings.warn('setting create=False is no longer required', DeprecationWarning, stacklevel=2) def interpreter(self): return self.control.interpreter def execute_command(self, command, hidden=True): self.control.execute(command, hidden=hidden) def execute_file(self, path, hidden=True): self.control.execute_file(path, hidden=hidden) def get_history(self): return (self.control._history, self.control._history_index) def set_history(self, history, history_index): if (not (0 <= history_index <= len(history))): history_index = len(history) self.control._set_history(history, history_index) def _create_control(self, parent): return PyfacePythonWidget(self, parent) def _add_event_listeners(self): super()._add_event_listeners() self.control.executed.connect(self._on_command_executed) self._event_filter.signal.connect(self._on_obj_drop) def _remove_event_listeners(self): if (self.control is not None): self.control.executed.disconnect(self._on_command_executed) self._event_filter.signal.disconnect(self._on_obj_drop) self.control._remove_event_listeners() super()._remove_event_listeners() def __event_filter_default(self): return _DropEventEmitter(self.control) def _on_obj_drop(self, obj): name = 'dragged' if (hasattr(obj, 'name') and isinstance(obj.name, str) and (len(obj.name) > 0)): py_name = python_name(obj.name) try: if eval(py_name, {py_name: True}): name = py_name except Exception: pass self.control.interpreter.locals[name] = obj self.control.execute(name) self.control._control.setFocus()
class GridDirective(SphinxDirective): has_content = True required_arguments = 0 optional_arguments = 1 final_argument_whitespace = True option_spec = {'gutter': gutter_option, 'margin': margin_option, 'padding': padding_option, 'outline': directives.flag, 'reverse': directives.flag, 'class-container': directives.class_option, 'class-row': directives.class_option} def run(self) -> List[nodes.Node]: try: column_classes = (row_columns_option(self.arguments[0]) if self.arguments else []) except ValueError as exc: raise self.error(f'Invalid directive argument: {exc}') self.assert_has_content() grid_classes = ['sd-container-fluid', 'sd-sphinx-override'] container = create_component('grid-container', ((((grid_classes + self.options.get('margin', ['sd-mb-4'])) + self.options.get('padding', [])) + (['sd-border-1'] if ('outline' in self.options) else [])) + self.options.get('class-container', []))) self.set_source_info(container) row = create_component('grid-row', ((((['sd-row'] + column_classes) + self.options.get('gutter', [])) + (['sd-flex-row-reverse'] if ('reverse' in self.options) else [])) + self.options.get('class-row', []))) self.set_source_info(row) container += row self.state.nested_parse(self.content, self.content_offset, row) for item in row.children: if (not is_component(item, 'grid-item')): LOGGER.warning(f"All children of a 'grid-row' should be 'grid-item' [{WARNING_TYPE}.grid]", location=item, type=WARNING_TYPE, subtype='grid') break return [container]
def build_file_description(source_file_template, sources): with RetrieveFileFromUri(source_file_template).get_file_object() as f: file_description_text = ''.join(tuple((line.decode('utf-8') for line in f if (not line.startswith(b'#'))))) return file_description_text.format(**{source.source_type: source.file_name for source in sources})
def getModifiers(chart): modifiers = [] asc = chart.getAngle(const.ASC) ascRulerID = essential.ruler(asc.sign) ascRuler = chart.getObject(ascRulerID) moon = chart.getObject(const.MOON) factors = [[MOD_ASC, asc], [MOD_ASC_RULER, ascRuler], [MOD_MOON, moon]] mars = chart.getObject(const.MARS) saturn = chart.getObject(const.SATURN) sun = chart.getObject(const.SUN) affect = [[mars, [0, 90, 180]], [saturn, [0, 90, 180]], [sun, [0]]] for (affectingObj, affectingAsps) in affect: for (factor, affectedObj) in factors: modf = modifierFactor(chart, factor, affectedObj, affectingObj, affectingAsps) if modf: modifiers.append(modf) return modifiers
class TestNumberOfDuplicatedRows(BaseIntegrityValueTest): name: ClassVar = 'Number of Duplicate Rows' def get_condition_from_reference(self, reference: Optional[DatasetSummary]): if (reference is not None): ref_num_of_duplicates = reference.number_of_duplicated_rows curr_number_of_rows = self.metric.get_result().current.number_of_rows ref_number_of_rows = reference.number_of_rows mult = (curr_number_of_rows / ref_number_of_rows) return TestValueCondition(eq=approx((ref_num_of_duplicates * mult), 0.1), source=ValueSource.REFERENCE) return TestValueCondition(eq=0) def calculate_value_for_test(self) -> Numeric: return self.metric.get_result().current.number_of_duplicated_rows def get_description(self, value: Numeric) -> str: return f'The number of duplicate rows is {value}. The test threshold is {self.get_condition()}.'
def get_kube_client(in_cluster: bool=False, config_file: Optional[str]=None) -> client.CoreV1Api: if in_cluster: config.load_incluster_config() elif (config_file is not None): config.load_kube_config(config_file=config_file) else: config.load_kube_config() return client.CoreV1Api()