code
stringlengths
281
23.7M
def hpluv_to_luv(hpluv: Vector) -> Vector: (h, s, l) = hpluv c = 0.0 if (l > (100 - 1e-07)): l = 100 elif (l < 1e-08): l = 0.0 else: _hx_max = max_safe_chroma_for_l(l) c = ((_hx_max / 100) * s) (a, b) = alg.polar_to_rect(c, h) return [l, a, b]
class PaddedDenseToJaggedTestCase(unittest.TestCase): def _test_padded_dense_to_jagged(self, jagged_max_shape: List[int], offsets_list: List[List[int]], dtype: str='float16', offsets_dtype: str='int32', use_jagged_space_indexing: bool=False, pass_jagged_int_var_as_total_length: bool=False, test_suffix: str=''): batch_size = jagged_max_shape[0] batch_dim = IntVar(values=[1, (batch_size * 2)], name='batch_size') sequence_shape = jagged_max_shape[1:(1 + len(offsets_list))] sequence_dims = [IntImm(value=dim) for dim in sequence_shape] inner_shape = jagged_max_shape[(1 + len(offsets_list)):] inner_dims = [IntImm(value=dim) for dim in inner_shape] total_length = offsets_list[(- 1)][(- 1)] total_length_dim = IntVar(values=[1, (total_length * 2)], name='total_length') jagged_dims = [JaggedDim(min_value=0, max_value=N) for N in sequence_shape] offsets_dims = [IntVar(values=[2, (len(offsets) * 2)]) for offsets in offsets_list] DENSE = Tensor(shape=[batch_dim, *sequence_dims, *inner_dims], name='dense', dtype=dtype, is_input=True) OFFSETS_LIST = [Tensor(shape=[offsets_dim], name=f'offsets{i}', dtype=offsets_dtype, is_input=True) for (i, offsets_dim) in enumerate(offsets_dims)] SOURCE = Tensor(shape=[total_length_dim, *inner_dims], name='source', dtype=dtype, is_input=True) ANOTHER = ops.make_jagged(batch_dim=batch_dim, jagged_dims=jagged_dims)(source=SOURCE, offsets_list=OFFSETS_LIST) total_length_to_pass = total_length_dim if pass_jagged_int_var_as_total_length: total_length_to_pass = ANOTHER._attrs['shape'][0] JAGGED = ops.padded_dense_to_jagged(total_length=total_length_to_pass)(x=DENSE, offsets_list=OFFSETS_LIST) RESULT = ops.elementwise(FuncEnum.ADD)(JAGGED, ANOTHER) RESULT._attrs['name'] = 'result' RESULT._attrs['is_output'] = True assert (not DENSE.is_jagged()) assert JAGGED.is_jagged() assert ANOTHER.is_jagged() assert RESULT.is_jagged() model = compile_model([RESULT], detect_target(use_jagged_space_indexing=use_jagged_space_indexing), './tmp', f'test_padded_dense_to_jagged_{test_suffix}') torch_offsets_dtype = string_to_torch_dtype(offsets_dtype) offsets_pt = {f'offsets{i}': torch.tensor(offsets, dtype=torch_offsets_dtype).cuda() for (i, offsets) in enumerate(offsets_list)} dense_pt = get_random_torch_tensor(jagged_max_shape, dtype) result_pt = jagged_utils.dense_to_jagged(dense=dense_pt, offsets_list=list(offsets_pt.values())) source = torch.zeros_like(result_pt) result = torch.empty_like(result_pt) inputs = {'dense': dense_pt, 'source': source, **offsets_pt} model.run_with_tensors(inputs, [result]) torch.testing.assert_close(result, result_pt) ([param(1, 'int32', [4, 3, 8], 'float16', False), param(2, 'int32', [4, 3, 4], 'float16', False), param(3, 'int32', [4, 3, 2], 'float16', False), param(4, 'int32', [4, 3, 1], 'float16', True), param(5, 'int64', [4, 3, 4], 'float32', False), param(6, 'int64', [4, 3, 2], 'float32', False), param(7, 'int64', [4, 3, 1], 'float32', True)]) def test_padded_dense_to_jagged_single_offsets(self, i, offsets_dtype, jagged_max_shape, dtype, pass_jagged_int_var_as_total_length): for use_jagged_space_indexing in [False, True]: self._test_padded_dense_to_jagged(jagged_max_shape=jagged_max_shape, offsets_list=[[0, 1, 4, 6, 7]], dtype=dtype, offsets_dtype=offsets_dtype, use_jagged_space_indexing=use_jagged_space_indexing, pass_jagged_int_var_as_total_length=pass_jagged_int_var_as_total_length, test_suffix=f'single_offsets_{dtype}_{i}') ([param(1, 'int32', [3, 4, 5, 150, 3, 8], 'float16'), param(2, 'int32', [3, 4, 5, 150, 1, 4], 'float16'), param(3, 'int32', [3, 4, 5, 150, 3, 2], 'float16'), param(4, 'int32', [3, 4, 5, 150, 1, 1], 'float16'), param(5, 'int64', [3, 4, 5, 150, 1, 4], 'float32'), param(6, 'int64', [3, 4, 5, 150, 3, 2], 'float32'), param(7, 'int64', [3, 4, 5, 150, 3, 1], 'float32')]) def test_padded_dense_to_jagged_multiple_offsets(self, i, offsets_dtype, jagged_max_shape, dtype): for use_jagged_space_indexing in [False, True]: self._test_padded_dense_to_jagged(jagged_max_shape=jagged_max_shape, offsets_list=[[0, 1, 3, 5], [0, 2, 4, 7, 9, 10], [0, 6, 8, 19, 23, 45, 67, 98, 123, 256, 321]], dtype=dtype, offsets_dtype=offsets_dtype, use_jagged_space_indexing=use_jagged_space_indexing, test_suffix=f'multiple_offsets_{dtype}_{i}') def _benchmark_padded_dense_to_jagged(self, B: int, N: int, D: int, dtype: str='float16', offsets_dtype: str='int32', use_jagged_space_indexing: bool=False, test_suffix: str='', num_iters: int=1000): batch_dim = IntVar(values=[1, B], name='batch_size') sequence_dim = IntImm(value=N, name='sequence_dim') total_length_dim = IntVar(values=[1, (B * N)], name='total_length') embedding_dim = IntImm(value=D, name='embedding_dim') offsets_dim = IntVar(values=[2, (B + 1)], name='offsets_dim') DENSE = Tensor(shape=[batch_dim, sequence_dim, embedding_dim], name='dense', dtype=dtype, is_input=True) OFFSETS_LIST = [Tensor(shape=[offsets_dim], name='offsets', dtype=offsets_dtype, is_input=True)] JAGGED = ops.padded_dense_to_jagged(total_length=total_length_dim)(x=DENSE, offsets_list=OFFSETS_LIST) SOURCE = Tensor(shape=[total_length_dim, embedding_dim], name='source', dtype=dtype, is_input=True) ANOTHER = ops.make_jagged(batch_dim=batch_dim, jagged_dims=[JaggedDim(min_value=0, max_value=N)])(source=SOURCE, offsets_list=OFFSETS_LIST) RESULT = ops.elementwise(FuncEnum.ADD)(JAGGED, ANOTHER) RESULT._attrs['name'] = 'result' RESULT._attrs['is_output'] = True model = compile_model([RESULT], detect_target(use_jagged_space_indexing=use_jagged_space_indexing), './tmp', f'benchmark_padded_dense_to_jagged_{test_suffix}') random.seed(0) load_factors = [(i / 20) for i in range(1, 21)] offset_tensors = [jagged_utils.generate_offsets(batch_size=B, max_seq_len=N, load_factor=load_factor, offsets_dtype=offsets_dtype) for load_factor in load_factors] results = [] for (load_factor, offsets_pt) in zip(load_factors, offset_tensors): total_length = offsets_pt[(- 1)].item() dense_pt = get_random_torch_tensor([B, N, D], dtype) inputs = {'dense': dense_pt, 'offsets': offsets_pt} outputs = [get_torch_empty_tensor([total_length, D], dtype)] source_pt = get_random_torch_tensor([total_length, D], dtype) inputs['source'] = source_pt with tempfile.NamedTemporaryFile('r') as f: model.profile_with_tensors(inputs=inputs, outputs=outputs, num_iters=num_iters, filename=f.name) profiling_data = json.loads(f.read()) padded_dense_to_jagged_records = [profiling_data[func_name] for func_name in profiling_data if func_name.startswith('padded_dense_to_jagged')] assert (len(padded_dense_to_jagged_records) == 1) runtime_ms = padded_dense_to_jagged_records[0]['ms_per_iter'] dense_item = (total_length * D) jagged_item = (total_length * D) size = (2 if (dtype == 'float16') else 4) bandwidth = (((jagged_item + dense_item) * size) / ((runtime_ms * 0.001) * .0)) results.append([load_factor, runtime_ms, bandwidth]) print() print(f'B={B!r}, N={N!r}, D={D!r}, dtype={dtype!r}:') print() for (load_factor, runtime_ms, bandwidth) in results: print(f'load factor: {int((load_factor * 100))}%, runtime: {round(runtime_ms, 6)} ms, bandwidth: {round(bandwidth, 3)} GB/s') def _test_benchmark_padded_dense_to_jagged(self): self._benchmark_padded_dense_to_jagged(B=1024, N=260, D=256, dtype='float16', offsets_dtype='int32', use_jagged_space_indexing=False, isolated_total_length=True, test_suffix='benchmark')
class DirectoryReader(Reader): def __init__(self, source, path): super().__init__(source, path) self._content = [] filter = make_file_filter(self.filter, self.path) for (root, _, files) in os.walk(self.path): for file in files: full = os.path.join(root, file) LOG.debug('%s', full) if filter(full): self._content.append(full) def mutate(self): if (len(self._content) == 1): return find_reader(self.source, self._content[0]) return self def mutate_source(self): if os.path.exists(os.path.join(self.path, '.zattrs')): return load_source('zarr', self.path) return load_source('multi', [load_source('file', path=path, filter=self.filter, merger=self.merger) for path in sorted(self._content)], filter=self.filter, merger=self.merger) def save(self, path): shutil.copytree(self.path, path) def write(self, f): raise NotImplementedError()
class MayonaizeShrimpLiveProcessor(HtmlProcessor.HtmlPageProcessor): wanted_mimetypes = ['text/html'] want_priority = 80 loggerPath = 'Main.Text.MayonaizeShrimp' def wantsUrl(url): if re.search('^ url): print(("ms Wants url: '%s'" % url)) return True return False def preprocessBody(self, soup): badspans = soup.find_all('span', style=re.compile('color\\W?:\\W?#ffffff', re.I)) for bad in badspans: bad.decompose() return soup
def create_v6flowspec_actions(actions=None): from ryu.services.protocols.bgp.api.prefix import FLOWSPEC_ACTION_TRAFFIC_RATE, FLOWSPEC_ACTION_TRAFFIC_ACTION, FLOWSPEC_ACTION_REDIRECT, FLOWSPEC_ACTION_TRAFFIC_MARKING action_types = {FLOWSPEC_ACTION_TRAFFIC_RATE: BGPFlowSpecTrafficRateCommunity, FLOWSPEC_ACTION_TRAFFIC_ACTION: BGPFlowSpecTrafficActionCommunity, FLOWSPEC_ACTION_REDIRECT: BGPFlowSpecRedirectCommunity, FLOWSPEC_ACTION_TRAFFIC_MARKING: BGPFlowSpecTrafficMarkingCommunity} return _create_actions(actions, action_types)
class HackageBackendtests(DatabaseTestCase): def setUp(self): super().setUp() create_distro(self.session) self.create_project() def create_project(self): project = models.Project(name='cpphs', homepage=' backend=BACKEND) self.session.add(project) self.session.commit() project = models.Project(name='foobar', homepage=' backend=BACKEND) self.session.add(project) self.session.commit() def test_get_version(self): pid = 1 project = models.Project.get(self.session, pid) exp = '1.20.9.1' obs = backend.StackageBackend.get_version(project) self.assertEqual(obs, exp) pid = 2 project = models.Project.get(self.session, pid) self.assertRaises(AnityaPluginException, backend.StackageBackend.get_version, project) def test_get_version_url(self): project = models.Project(name='test', homepage=' backend=BACKEND) exp = ' obs = backend.StackageBackend.get_version_url(project) self.assertEqual(obs, exp) def test_get_versions(self): pid = 1 project = models.Project.get(self.session, pid) exp = ['1.20.9.1'] obs = backend.StackageBackend.get_ordered_versions(project) self.assertEqual(obs, exp) pid = 2 project = models.Project.get(self.session, pid) self.assertRaises(AnityaPluginException, backend.StackageBackend.get_version, project)
def expand_file_arguments(argv): new_args = [] expanded = False for arg in argv: if arg.startswith(''): expanded = True with open(arg[1:], 'r') as f: for line in f.readlines(): new_args += shlex.split(line) else: new_args.append(arg) if expanded: print(f"esptool.py {' '.join(new_args)}") return new_args return argv
def _set_assets_details(app): with open(config.asset_build_meta_file, 'r') as f: meta_file = json.load(f) assets = [] themes = [] for (output_path, details) in meta_file['output'].items(): asset_type = details['type'] name = details['name'] filename = os.path.basename(output_path) url = _normalize_asset_url((config.asset_url + filename)) asset = Asset(name, asset_type, url) assets.append(asset) if (asset_type == 'theme'): themes.append(ThemeAsset(name, name, (name == 'uchan'), asset)) app.jinja_env.globals['assets'] = assets app.jinja_env.globals['assets_themes'] = themes
class SchedulerServiceStub(object): def __init__(self, channel): self.addNamespace = channel.unary_unary('/ai_flow.SchedulerService/addNamespace', request_serializer=message__pb2.NamespaceProto.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.getNamespace = channel.unary_unary('/ai_flow.SchedulerService/getNamespace', request_serializer=message__pb2.NameRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.updateNamespace = channel.unary_unary('/ai_flow.SchedulerService/updateNamespace', request_serializer=message__pb2.NamespaceProto.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.listNamespaces = channel.unary_unary('/ai_flow.SchedulerService/listNamespaces', request_serializer=message__pb2.ListRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.deleteNamespace = channel.unary_unary('/ai_flow.SchedulerService/deleteNamespace', request_serializer=message__pb2.NameRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.addWorkflow = channel.unary_unary('/ai_flow.SchedulerService/addWorkflow', request_serializer=message__pb2.WorkflowProto.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.updateWorkflow = channel.unary_unary('/ai_flow.SchedulerService/updateWorkflow', request_serializer=scheduler__service__pb2.UpdateWorkflowRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.getWorkflow = channel.unary_unary('/ai_flow.SchedulerService/getWorkflow', request_serializer=message__pb2.WorkflowIdentifier.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.deleteWorkflow = channel.unary_unary('/ai_flow.SchedulerService/deleteWorkflow', request_serializer=message__pb2.WorkflowIdentifier.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.disableWorkflow = channel.unary_unary('/ai_flow.SchedulerService/disableWorkflow', request_serializer=message__pb2.WorkflowIdentifier.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.enableWorkflow = channel.unary_unary('/ai_flow.SchedulerService/enableWorkflow', request_serializer=message__pb2.WorkflowIdentifier.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.listWorkflows = channel.unary_unary('/ai_flow.SchedulerService/listWorkflows', request_serializer=scheduler__service__pb2.ListWorkflowsRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.addWorkflowSnapshot = channel.unary_unary('/ai_flow.SchedulerService/addWorkflowSnapshot', request_serializer=message__pb2.WorkflowSnapshotProto.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.getWorkflowSnapshot = channel.unary_unary('/ai_flow.SchedulerService/getWorkflowSnapshot', request_serializer=message__pb2.IdRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.listWorkflowSnapshots = channel.unary_unary('/ai_flow.SchedulerService/listWorkflowSnapshots', request_serializer=scheduler__service__pb2.ListWorkflowItemsRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.deleteWorkflowSnapshot = channel.unary_unary('/ai_flow.SchedulerService/deleteWorkflowSnapshot', request_serializer=message__pb2.IdRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.deleteWorkflowSnapshots = channel.unary_unary('/ai_flow.SchedulerService/deleteWorkflowSnapshots', request_serializer=message__pb2.WorkflowIdentifier.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.startWorkflowExecution = channel.unary_unary('/ai_flow.SchedulerService/startWorkflowExecution', request_serializer=message__pb2.WorkflowIdentifier.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.stopWorkflowExecution = channel.unary_unary('/ai_flow.SchedulerService/stopWorkflowExecution', request_serializer=message__pb2.IdRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.stopWorkflowExecutions = channel.unary_unary('/ai_flow.SchedulerService/stopWorkflowExecutions', request_serializer=message__pb2.WorkflowIdentifier.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.deleteWorkflowExecution = channel.unary_unary('/ai_flow.SchedulerService/deleteWorkflowExecution', request_serializer=message__pb2.IdRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.getWorkflowExecution = channel.unary_unary('/ai_flow.SchedulerService/getWorkflowExecution', request_serializer=message__pb2.IdRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.listWorkflowExecutions = channel.unary_unary('/ai_flow.SchedulerService/listWorkflowExecutions', request_serializer=scheduler__service__pb2.ListWorkflowItemsRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.startTaskExecution = channel.unary_unary('/ai_flow.SchedulerService/startTaskExecution', request_serializer=message__pb2.TaskExecutionIdentifier.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.stopTaskExecution = channel.unary_unary('/ai_flow.SchedulerService/stopTaskExecution', request_serializer=message__pb2.TaskExecutionIdentifier.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.getTaskExecution = channel.unary_unary('/ai_flow.SchedulerService/getTaskExecution', request_serializer=message__pb2.IdRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.listTaskExecutions = channel.unary_unary('/ai_flow.SchedulerService/listTaskExecutions', request_serializer=scheduler__service__pb2.ListTaskExecutionsRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.addWorkflowSchedule = channel.unary_unary('/ai_flow.SchedulerService/addWorkflowSchedule', request_serializer=message__pb2.WorkflowScheduleProto.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.getWorkflowSchedule = channel.unary_unary('/ai_flow.SchedulerService/getWorkflowSchedule', request_serializer=message__pb2.IdRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.listWorkflowSchedules = channel.unary_unary('/ai_flow.SchedulerService/listWorkflowSchedules', request_serializer=scheduler__service__pb2.ListWorkflowItemsRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.deleteWorkflowSchedule = channel.unary_unary('/ai_flow.SchedulerService/deleteWorkflowSchedule', request_serializer=message__pb2.IdRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.deleteWorkflowSchedules = channel.unary_unary('/ai_flow.SchedulerService/deleteWorkflowSchedules', request_serializer=message__pb2.WorkflowIdentifier.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.pauseWorkflowSchedule = channel.unary_unary('/ai_flow.SchedulerService/pauseWorkflowSchedule', request_serializer=message__pb2.IdRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.resumeWorkflowSchedule = channel.unary_unary('/ai_flow.SchedulerService/resumeWorkflowSchedule', request_serializer=message__pb2.IdRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.addWorkflowTrigger = channel.unary_unary('/ai_flow.SchedulerService/addWorkflowTrigger', request_serializer=message__pb2.WorkflowTriggerProto.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.getWorkflowTrigger = channel.unary_unary('/ai_flow.SchedulerService/getWorkflowTrigger', request_serializer=message__pb2.IdRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.listWorkflowTriggers = channel.unary_unary('/ai_flow.SchedulerService/listWorkflowTriggers', request_serializer=scheduler__service__pb2.ListWorkflowItemsRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.deleteWorkflowTrigger = channel.unary_unary('/ai_flow.SchedulerService/deleteWorkflowTrigger', request_serializer=message__pb2.IdRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.deleteWorkflowTriggers = channel.unary_unary('/ai_flow.SchedulerService/deleteWorkflowTriggers', request_serializer=message__pb2.WorkflowIdentifier.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.pauseWorkflowTrigger = channel.unary_unary('/ai_flow.SchedulerService/pauseWorkflowTrigger', request_serializer=message__pb2.IdRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString) self.resumeWorkflowTrigger = channel.unary_unary('/ai_flow.SchedulerService/resumeWorkflowTrigger', request_serializer=message__pb2.IdRequest.SerializeToString, response_deserializer=message__pb2.Response.FromString)
def _get_secret_from_env() -> Tuple[(str, str)]: openai_org = os.environ.get('LMQL_OPENAI_ORG', '') if ('LMQL_OPENAI_SECRET' in os.environ): openai_secret = os.environ['LMQL_OPENAI_SECRET'] elif ('OPENAI_API_KEY' in os.environ): openai_secret = os.environ['OPENAI_API_KEY'] else: raise ValueError('OpenAI API secret not found in env variables') return (openai_secret, openai_org)
class Spiral(Layout): def __init__(self, workspace_name: str, params: List[Any]): super().__init__(LayoutName.SPIRAL, workspace_name) try: self.main_ratio = (float(params[0]) if (len(params) > 0) else 0.5) self.screen_direction = (ScreenDirection(params[1]) if (len(params) > 1) else ScreenDirection.INSIDE) except ValueError: self.main_ratio = 0.5 self.screen_direction = ScreenDirection.INSIDE self._warn_wrong_parameters(params) def _params(self) -> List[Any]: return [self.main_ratio, self.screen_direction.value] def anchor_mark(self) -> Optional[str]: return self.mark_last() def split_direction(self, context: Context) -> Optional[Direction]: return (Direction.HORIZONTAL if ((len(context.containers) % 2) == 0) else Direction.VERTICAL) def _update(self, context: Context): if ((len(context.containers) % 2) == 1): if ((self.screen_direction == ScreenDirection.INSIDE) and ((((len(context.containers) - 1) / 2) % 2) == 0)): context.exec(f'[con_id="{context.focused.id}"] move up') if (len(context.containers) > 1): ratio = pow((1 - self.main_ratio), ((len(context.containers) - 1) / 2)) context.exec(f'resize set height {context.workspace_height(ratio)}') else: if ((self.screen_direction == ScreenDirection.INSIDE) and (((len(context.containers) / 2) % 2) == 0)): context.exec(f'[con_id="{context.focused.id}"] move left') ratio = pow((1 - self.main_ratio), (len(context.containers) / 2)) context.exec(f'resize set width {context.workspace_width(ratio)}') def create(cls, workspace_name: str, params: List[Any]) -> Optional['Layout']: return Spiral(workspace_name, params)
def print_help(): print() print(f'{log.default}Command Menu{log.reset}') print(f'{log.default}set{log.reset} - used to set search parameters for cyphers, double/single quotes not required for any sub-commands') print(f'{log.default} sub-commands{log.reset}') print(f'{log.default} user{log.reset} - the user to use in user-specific cyphers (MUST include )') print(f'{log.default} group{log.reset} - the group to use in group-specific cyphers (MUST include )') print(f'{log.default} computer{log.reset} - the computer to use in computer-specific cyphers (SHOULD include .domain.name or )') print(f'{log.default} regex{log.reset} - the regex to use in regex-specific cyphers') print(f'{log.default} example{log.reset}') print(' set user svc-') print(' set group domain ') print(' set computer dc01.domain.local') print(' set regex .*((?i)web).*') print(f'{log.default}run{log.reset} - used to run cyphers') print(f'{log.default} parameters{log.reset}') print(f'{log.default} cypher number{log.reset} - the number of the cypher to run') print(f'{log.default} example{log.reset}') print(' run 7') print(f'{log.default}export{log.reset} - used to export cypher results to txt files') print(f'{log.default} parameters{log.reset}') print(f'{log.default} cypher number{log.reset} - the number of the cypher to run and then export') print(f'{log.default} output filename{log.reset} - the number of the output file, extension not needed') print(f'{log.default} example{log.reset}') print(f' export 31 results') print(f'{log.default}list{log.reset} - used to show a list of cyphers') print(f'{log.default} parameters{log.reset}') print(f'{log.default} list type{log.reset} - the type of cyphers to list (general, user, group, computer, regex, all)') print(f'{log.default} example{log.reset}') print(f' list general') print(f' list user') print(f' list group') print(f' list computer') print(f' list regex') print(f' list all') print(f'{log.default}search{log.reset} - used to search the list of cyphers') print(f'{log.default} parameters{log.reset}') print(f'{log.default} search query{log.reset} - the search string') print(f'{log.default} example{log.reset}') print(f' search domain admin') print(f' search shortest') print(f'{log.default}q, quit, exit, stop{log.reset} - used to exit the program') print(f'{log.default}clear, cls{log.reset} - used to clear the terminal') print(f'{log.default}help, ?{log.reset} - used to display this help menu') print()
def test_error_if_denominator_probability_is_zero_2_vars(): df = {'var_A': (((['A'] * 6) + (['B'] * 10)) + (['C'] * 4)), 'var_B': (((['A'] * 10) + (['B'] * 6)) + (['C'] * 4)), 'var_C': (((['A'] * 6) + (['B'] * 10)) + (['C'] * 4)), 'target': [1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0]} df = pd.DataFrame(df) encoder = WoEEncoder(variables=None) with pytest.raises(ValueError) as record: encoder.fit(df, df['target']) msg = "During the WoE calculation, some of the categories in the following features contained 0 in the denominator or numerator, and hence the WoE can't be calculated: var_A, var_C." assert (str(record.value) == msg)
class ConfigReader(): def __init__(self, use_config=None, share_config=None, **kwargs): self._cfg_share = None self._cfg = None if (use_config is not None): self._cfg = use_config if (share_config is not None): self._cfg_share = share_config self._cfg_share_kwargs = kwargs self._cfg_share_basedir = None elif (self._cfg is None): self._cfg = ConfigReaderUnshared(**kwargs) def setBaseDir(self, basedir): if self._cfg: self._cfg.setBaseDir(basedir) else: self._cfg_share_basedir = basedir def getBaseDir(self): if self._cfg: return self._cfg.getBaseDir() else: return self._cfg_share_basedir def share_config(self): return self._cfg_share def read(self, name, once=True): if (not self._cfg): self._create_unshared(name) if (once and (self._cfg.read_cfg_files is not None)): return self._cfg.read_cfg_files logSys.info('Loading configs for %s under %s ', name, self._cfg.getBaseDir()) ret = self._cfg.read(name) self._cfg.read_cfg_files = ret return ret def _create_unshared(self, name=''): if ((not self._cfg) and (self._cfg_share is not None)): self._cfg = self._cfg_share.get(name) if (not self._cfg): self._cfg = ConfigReaderUnshared(share_config=self._cfg_share, **self._cfg_share_kwargs) if (self._cfg_share_basedir is not None): self._cfg.setBaseDir(self._cfg_share_basedir) self._cfg_share[name] = self._cfg else: self._cfg = ConfigReaderUnshared(**self._cfg_share_kwargs) def sections(self): try: return (n for n in self._cfg.sections() if (not n.startswith('KNOWN/'))) except AttributeError: return [] def has_section(self, sec): try: return self._cfg.has_section(sec) except AttributeError: return False def has_option(self, sec, opt, withDefault=True): return (self._cfg.has_option(sec, opt) if withDefault else (opt in self._cfg._sections.get(sec, {}))) def merge_defaults(self, d): self._cfg.get_defaults().update(d) def merge_section(self, section, *args, **kwargs): try: return self._cfg.merge_section(section, *args, **kwargs) except AttributeError: raise NoSectionError(section) def options(self, section, withDefault=False): try: return self._cfg.options(section, withDefault) except AttributeError: raise NoSectionError(section) def get(self, sec, opt, raw=False, vars={}): try: return self._cfg.get(sec, opt, raw=raw, vars=vars) except AttributeError: raise NoSectionError(sec) def getOptions(self, section, *args, **kwargs): try: return self._cfg.getOptions(section, *args, **kwargs) except AttributeError: raise NoSectionError(section)
def convert_to_richtext(apps, schema_editor): ContentPage = apps.get_model('home', 'ContentPage') for page in ContentPage.objects.all(): if (page.body.raw_text is None): raw_text = ''.join([child.value.source for child in page.body if (child.block_type == 'rich_text')]) page.body = raw_text page.save()
_tuple def _compute_probabilities(miner_data: Iterable[MinerData], wait_blocks: int, sample_size: int) -> Iterable[Probability]: miner_data_by_price = tuple(sorted(miner_data, key=operator.attrgetter('low_percentile_gas_price'), reverse=True)) for idx in range(len(miner_data_by_price)): low_percentile_gas_price = miner_data_by_price[idx].low_percentile_gas_price num_blocks_accepting_price = sum((m.num_blocks for m in miner_data_by_price[idx:])) inv_prob_per_block = ((sample_size - num_blocks_accepting_price) / sample_size) probability_accepted = (1 - (inv_prob_per_block ** wait_blocks)) (yield Probability(low_percentile_gas_price, probability_accepted))
def _get_attention(ops, Q, key_transform, X, lengths, is_train): (K, K_bp) = key_transform(X, is_train=is_train) attention = ops.gemm(K, ops.reshape2f(Q, (- 1), 1)) attention = ops.softmax_sequences(attention, lengths) def get_attention_bwd(d_attention): d_attention = ops.backprop_softmax_sequences(d_attention, attention, lengths) dQ = ops.gemm(K, d_attention, trans1=True) dY = ops.xp.outer(d_attention, Q) dX = K_bp(dY) return (dQ, dX) return (attention, get_attention_bwd)
def test_update_web3(deployed_safe_math, w3): new_w3 = Web3(Web3.EthereumTesterProvider()) (original_package, _) = deployed_safe_math assert (original_package.w3 is w3) new_package = original_package.update_w3(new_w3) assert (new_package.w3 is new_w3) assert (original_package is not new_package) assert (original_package.manifest == new_package.manifest) with pytest.raises(EthPMValidationError, match='Package has no matching URIs on chain.'): new_package.deployments
def _get_protection_flags(flags): protection = '' modifier = '' if (flags & 1): protection = 'NOACCESS' elif (flags & 2): protection = 'R' elif (flags & 4): protection = 'RW' elif (flags & 8): protection = 'W Copy' elif (flags & 16): protection = 'X' elif (flags & 32): protection = 'RX' elif (flags & 64): protection = 'RWX' elif (flags & 32): protection = 'RWX Copy' if (flags & 1): modifier = 'GUARD' elif (flags & 2): modifier = 'NOCACHE' elif (flags & 4): modifier = 'WRITECOMBINE' return ((protection + ' ') + modifier).strip()
class OptionSeriesHistogramSonificationContexttracksActivewhen(Options): def crossingDown(self): return self._config_get(None) def crossingDown(self, num: float): self._config(num, js_type=False) def crossingUp(self): return self._config_get(None) def crossingUp(self, num: float): self._config(num, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get(None) def prop(self, text: str): self._config(text, js_type=False)
def assert_is_canonical_chain(headerdb, headers): if (not headers): return head = headerdb.get_canonical_head() assert_headers_eq(head, headers[(- 1)]) for header in headers: canonical = headerdb.get_canonical_block_header_by_number(header.block_number) assert (canonical == header) base_header = headerdb.get_block_header_by_hash(headers[0].parent_hash) difficulties = tuple((h.difficulty for h in headers)) scores = tuple(accumulate(operator.add, difficulties, base_header.difficulty)) for (header, expected_score) in zip(headers, scores[1:]): actual_score = headerdb.get_score(header.hash) assert (actual_score == expected_score)
def main(page: Page): main_content = Column(scroll='auto') for i in range(100): main_content.controls.append(Text(f'Line {i}')) page.padding = 0 page.spacing = 0 page.horizontal_alignment = 'stretch' page.add(Container(main_content, padding=10, expand=True), Row([Container(Text('Footer'), bgcolor='yellow', padding=5, expand=True)]))
class OnImageNotFound(Exception): def __init__(self, *args): if args: self.message = args[0] else: self.message = None args[1].statusCode = 200 def __str__(self): if self.message: return 'OnImageNotFound, {0} '.format(self.message) else: return 'OnImageNotFound has been raised'
def extractBluemoontranslationsTumblrCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def test_parse_schema_rejects_undleclared_name(): try: fastavro.parse_schema({'type': 'record', 'name': 'test_parse_schema_rejects_undleclared_name', 'fields': [{'name': 'left', 'type': 'Thinger'}]}) assert False, 'Never raised' except fastavro.schema.UnknownType as e: assert ('Thinger' == e.name)
def example(): normal_border = ft.BorderSide(0, ft.colors.with_opacity(0, ft.colors.WHITE)) hovered_border = ft.BorderSide(6, ft.colors.WHITE) async def on_chart_event(e: ft.PieChartEvent): for (idx, section) in enumerate(chart.sections): section.border_side = (hovered_border if (idx == e.section_index) else normal_border) (await chart.update_async()) chart = ft.PieChart(sections=[ft.PieChartSection(25, color=ft.colors.BLUE, radius=80, border_side=normal_border), ft.PieChartSection(25, color=ft.colors.YELLOW, radius=65, border_side=normal_border), ft.PieChartSection(25, color=ft.colors.PINK, radius=60, border_side=normal_border), ft.PieChartSection(25, color=ft.colors.GREEN, radius=70, border_side=normal_border)], sections_space=1, center_space_radius=0, on_chart_event=on_chart_event, expand=True) return chart
class OptionPlotoptionsPackedbubbleStatesHoverHalo(Options): def attributes(self): return self._config_get(None) def attributes(self, value: Any): self._config(value, js_type=False) def opacity(self): return self._config_get(0.25) def opacity(self, num: float): self._config(num, js_type=False) def size(self): return self._config_get(5) def size(self, num: float): self._config(num, js_type=False)
(TTLCache(1, 1800)) def _build_access_token(api_key: str, secret_key: str) -> str: url = ' params = {'grant_type': 'client_credentials', 'client_id': api_key, 'client_secret': secret_key} res = requests.get(url=url, params=params) if (res.status_code == 200): return res.json().get('access_token')
def variable_from_module(module, variable=None, default=None): if (not module): return default mod = mod_import(module) if (not mod): return default if variable: result = [] for var in make_iter(variable): if var: result.append(mod.__dict__.get(var, default)) else: result = [val for (key, val) in mod.__dict__.items() if (not (key.startswith('_') or ismodule(val)))] if (len(result) == 1): return result[0] return result
def test_from_master_key(): keystore = from_master_key('xprv9xpBW4EdWnv4PEASBsu3VuPNAcxRiSMXTjAfZ9dkP5FCrKWCacKZBhS3cJVGCegAUNEp1uXXEncSAyro5CaJFwv7wYFcBQrF6MfWYoAXsTw') assert (keystore.xprv == 'xprv9xpBW4EdWnv4PEASBsu3VuPNAcxRiSMXTjAfZ9dkP5FCrKWCacKZBhS3cJVGCegAUNEp1uXXEncSAyro5CaJFwv7wYFcBQrF6MfWYoAXsTw') assert (keystore.xpub == 'xpub6BoXuZmXMAUMbiEuHuS3s3L6ienv7u5Npx6GMY3MwQnBj7qM89dojVkXTZtbpEvAzxSKAxnnsVDuwSAAvvXHWVncpX46V3LGj5SaKHtNNnc')
def add_MsgServicer_to_server(servicer, server): rpc_method_handlers = {'StoreCode': grpc.unary_unary_rpc_method_handler(servicer.StoreCode, request_deserializer=cosmwasm_dot_wasm_dot_v1_dot_tx__pb2.MsgStoreCode.FromString, response_serializer=cosmwasm_dot_wasm_dot_v1_dot_tx__pb2.MsgStoreCodeResponse.SerializeToString), 'InstantiateContract': grpc.unary_unary_rpc_method_handler(servicer.InstantiateContract, request_deserializer=cosmwasm_dot_wasm_dot_v1_dot_tx__pb2.MsgInstantiateContract.FromString, response_serializer=cosmwasm_dot_wasm_dot_v1_dot_tx__pb2.MsgInstantiateContractResponse.SerializeToString), 'ExecuteContract': grpc.unary_unary_rpc_method_handler(servicer.ExecuteContract, request_deserializer=cosmwasm_dot_wasm_dot_v1_dot_tx__pb2.MsgExecuteContract.FromString, response_serializer=cosmwasm_dot_wasm_dot_v1_dot_tx__pb2.MsgExecuteContractResponse.SerializeToString), 'MigrateContract': grpc.unary_unary_rpc_method_handler(servicer.MigrateContract, request_deserializer=cosmwasm_dot_wasm_dot_v1_dot_tx__pb2.MsgMigrateContract.FromString, response_serializer=cosmwasm_dot_wasm_dot_v1_dot_tx__pb2.MsgMigrateContractResponse.SerializeToString), 'UpdateAdmin': grpc.unary_unary_rpc_method_handler(servicer.UpdateAdmin, request_deserializer=cosmwasm_dot_wasm_dot_v1_dot_tx__pb2.MsgUpdateAdmin.FromString, response_serializer=cosmwasm_dot_wasm_dot_v1_dot_tx__pb2.MsgUpdateAdminResponse.SerializeToString), 'ClearAdmin': grpc.unary_unary_rpc_method_handler(servicer.ClearAdmin, request_deserializer=cosmwasm_dot_wasm_dot_v1_dot_tx__pb2.MsgClearAdmin.FromString, response_serializer=cosmwasm_dot_wasm_dot_v1_dot_tx__pb2.MsgClearAdminResponse.SerializeToString)} generic_handler = grpc.method_handlers_generic_handler('cosmwasm.wasm.v1.Msg', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,))
def test_bool(): c = Config('testconfig', foo=(True, bool, ''), bar=(False, bool, '')) assert (c.foo == True) c.foo = True assert (c.foo == True) c.foo = False assert (c.foo == False) for name in 'yes on true Yes On TRUE 1'.split(' '): c.foo = name assert (c.foo == True) for name in 'no off fAlse No Off FALSE 0'.split(' '): c.foo = name assert (c.foo == False) for name in 'none ok bla asdasdasd cancel'.split(' '): with raises(ValueError): c.foo = name for val in (1, 2, [2], None, 0, 0.0, 1.0, []): with raises(ValueError): c.foo = val
class TransportLayer(Module): def __init__(self, link, debug=False, loopback=False): self.link = link self.debug = debug self.loopback = loopback self.link.set_transport(self) self.command = None self.n = None def set_command(self, command): self.command = command def send(self, fis): fis.encode() packet = LinkTXPacket(fis.packet) if hasattr(fis, 'data_error_injection'): packet.data_error_injection = True self.link.tx_packets.append(packet) if (self.debug and (not self.loopback)): print_transport(fis, self.n) def callback(self, packet): fis_type = (packet[0] & 255) if (fis_type == fis_types['REG_H2D']): fis = FIS_REG_H2D(packet) elif (fis_type == fis_types['REG_D2H']): fis = FIS_REG_D2H(packet) elif (fis_type == fis_types['DMA_ACTIVATE_D2H']): fis = FIS_DMA_ACTIVATE_D2H(packet) elif (fis_type == fis_types['DATA']): fis = FIS_DATA(packet, direction='H2D') else: fis = FIS_UNKNOWN(packet, direction='H2D') if self.debug: print_transport(fis, self.n) if self.loopback: self.send(fis) else: self.command.callback(fis)
def test_create_chunks_with_config(chunker, text_splitter_mock, loader_mock, app_id, data_type): text_splitter_mock.split_text.return_value = ['Chunk 1', 'long chunk'] loader_mock.load_data.return_value = {'data': [{'content': 'Content 1', 'meta_data': {'url': 'URL 1'}}], 'doc_id': 'DocID'} config = ChunkerConfig(chunk_size=50, chunk_overlap=0, length_function=len, min_chunk_size=10) result = chunker.create_chunks(loader_mock, 'test_src', app_id, config) assert (result['documents'] == ['long chunk'])
class SessionsSpeakersLink(SoftDeletionModel): __tablename__ = 'sessions_speakers_links' id = db.Column(db.Integer, primary_key=True) event_id = db.Column(db.Integer, nullable=False) session_id = db.Column(db.Integer, nullable=False) speaker_id = db.Column(db.Integer, nullable=False) def __repr__(self): return '<SSLink {!r}:{!r}>'.format(self.session_id, self.speaker_id)
class Parser(TestCase): def setUp(self): super().setUp() self.parser = expression_v2._Parser(SerializedOps()) def mkasserts(self, parse): def assertParses(expression, desired_result, desired_indices, *desired_shape): with self.subTest('without-spaces'): s_expression = expression_v2._Substring(expression) (actual_result, actual_shape, actual_indices, summed_indices) = parse(s_expression) self.assertEqual(actual_result, desired_result) self.assertEqual(actual_shape, desired_shape) self.assertEqual(actual_indices, desired_indices) with self.subTest('with-spaces'): s_expression = expression_v2._Substring(((' ' + expression) + ' ')) (actual_result, actual_shape, actual_indices, summed_indices) = parse(s_expression) self.assertEqual(actual_result, desired_result) self.assertEqual(actual_shape, desired_shape) self.assertEqual(actual_indices, desired_indices) def assertRaises(message, expression, markers, check_trim=True): s_expression = expression_v2._Substring(expression) with self.assertRaises(expression_v2.ExpressionSyntaxError) as cm: parse(s_expression) self.assertEqual(str(cm.exception), ((((message + '\n') + expression) + '\n') + markers)) if check_trim: expression_spaces = ((' ' + expression) + ' ') s_expression = expression_v2._Substring(expression_spaces) with self.assertRaises(expression_v2.ExpressionSyntaxError) as cm: parse(s_expression) self.assertEqual(str(cm.exception), ((((message + '\n') + expression_spaces) + '\n ') + markers)) return (assertParses, assertRaises) def test_parse_expression(self): (assertParses, assertRaises) = self.mkasserts(self.parser.parse_expression) assertParses('1', '1i', '') assertParses('-1 + 2', 'add(neg(1i), 2i)', '') assertParses('- 1 + a2_i a2_i + 2', 'add(neg(1i), trace(mul(a2, a2), 0, 1), 2i)', '') assertParses('a2_i + a23_ij a3_j + a2_i', 'add(a2, trace(mul(a23, a3), 1, 2), a2)', 'i', 2) assertParses('a2_i + a23_ij a3_j + a2_i', 'add(a2, trace(mul(a23, a3), 1, 2), a2)', 'i', 2) assertParses('a012_ijk + a021_ikj + a102_jik + a120_jki + a201_kij + a210_kji', 'add(a012, transpose(a021, 0, 2, 1), transpose(a102, 1, 0, 2), transpose(a120, 2, 0, 1), transpose(a201, 1, 2, 0), transpose(a210, 2, 1, 0))', 'ijk', 0, 1, 2) assertParses('-2^2', 'add(neg(pow(2i, 2i)))', '') assertRaises('Index i of the first term [^] is missing in the third term [~].', 'a2_i + a2_i + 3 + a2_i', '^^^^ ~') assertRaises('Index i of the second term [~] is missing in the first term [^].', '1 + a2_i + 3', '^ ~~~~') assertRaises('Index i has length 2 in the first term [^] but length 3 in the fourth term [~].', 'a23_ij + a23_ij + a23_ij + a32_ij', '^^^^^^ ~~~~~~') def test_parse_fraction(self): (assertParses, assertRaises) = self.mkasserts(self.parser.parse_fraction) assertParses('1 / 2', 'div(1i, 2i)', '') assertParses('2 a2_i / 2 a2_j a2_j', 'div(mul(2i, a2), trace(mul(2i, a2, a2), 0, 1))', 'i', 2) assertRaises('Repeated fractions are not allowed. Use parentheses if necessary.', '1 / 2 / 3', '') assertRaises('The denominator must have dimension zero.', '1 / a2_i', ' ^^^^') assertRaises('Index i occurs more than twice.', 'a2_i / a22_ii', '') assertRaises('Index i occurs more than twice.', 'a22_ii / a22_ii', '') def test_parse_term(self): (assertParses, assertRaises) = self.mkasserts(self.parser.parse_term) assertParses('1 a2_i a2_j', 'mul(1i, a2, a2)', 'ij', 2, 2) assertParses('a2_i a23_ij a3_j', 'trace(trace(mul(a2, a23, a3), 0, 1), 0, 1)', '') assertParses('a2_i a3_j a3_j', 'trace(mul(a2, a3, a3), 1, 2)', 'i', 2) assertRaises('Numbers are only allowed at the start of a term.', '1 1', ' ^') assertRaises('Index i is assigned to axes with different lengths: 2 and 3.', '1 a2_i a3_i a', '') assertRaises('Index i occurs more than twice.', '1 a22_ii a2_i a', '') assertRaises('Index i occurs more than twice.', '1 a22_ii a22_ii a', '') def test_parse_power_number(self): (assertParses, assertRaises) = self.mkasserts((lambda s: self.parser.parse_power(s, allow_number=True))) assertParses('1^2', 'pow(1i, 2i)', '') assertParses('1^-2', 'pow(1i, -2i)', '') assertParses('a2_i^2', 'pow(a2, 2i)', 'i', 2) assertRaises('The exponent must have dimension zero.', 'a^(a2_i)', ' ^^^^^^') assertRaises('Index i occurs more than twice.', 'a2_i^(a22_ii)', '') assertRaises('Index i occurs more than twice.', 'a2_i^(a22_ii)', '') assertRaises('Unexpected whitespace before `^`.', 'a ^2', ' ^') assertRaises('Unexpected whitespace after `^`.', 'a^ 2', ' ^') assertRaises('Expected a number, variable, scope, mean, jump or function call.', '^2', '^') def test_parse_power_nonumber(self): (assertParses, assertRaises) = self.mkasserts((lambda s: self.parser.parse_power(s, allow_number=False))) assertParses('a2_i^2', 'pow(a2, 2i)', 'i', 2) assertParses('a23_ij^-2', 'pow(a23, -2i)', 'ij', 2, 3) assertRaises('The exponent must have dimension zero.', 'a^(a2_i)', ' ^^^^^^') assertRaises('Unexpected whitespace before `^`.', 'a ^2', ' ^') assertRaises('Unexpected whitespace after `^`.', 'a^ 2', ' ^') assertRaises('Expected a variable, scope, mean, jump or function call.', '^2', '^') assertRaises('Expected an int.', 'a^2_i', ' ^^^') assertRaises('Expected an int or scoped expression.', 'a^', ' ^') assertRaises('Expected an int or scoped expression.', 'a^a2_i', ' ^^^^') assertRaises('Repeated powers are not allowed. Use parentheses if necessary.', 'a^a^a', '^^^^^') def test_parse_variable(self): (assertParses, assertRaises) = self.mkasserts((lambda s: self.parser.parse_item(s, allow_number=False))) assertParses('a22_ij', 'a22', 'ij', 2, 2) assertParses('a222_iji', 'trace(a222, 0, 2)', 'j', 2) assertParses('a2_0', 'get(a2, 0, 0)', '') assertParses('a23_1i', 'get(a23, 0, 1)', 'i', 3) assertRaises('No such variable: `unknown`.', 'unknown_i', '^^^^^^^') assertRaises('Expected 1 index for variable `a2` but got 2.', 'a2_ij', '^^^^^') assertRaises('Expected 2 indices for variable `a22` but got 1.', 'a22_i', '^^^^^') assertRaises('Index i occurs more than twice.', 'a222_iii', '') assertRaises('Index of axis with length 2 out of range.', 'a23_3i', ' ^') assertRaises('Symbol `$` is not allowed as index.', 'a234_i$j', ' ^') def test_parse_call(self): (assertParses, assertRaises) = self.mkasserts((lambda s: self.parser.parse_item(s, allow_number=False))) assertParses('f(a2_i + a2_i)', 'call(f, add(a2, a2))', 'i', 2) assertParses('f(a2_i (a3_j + a3_j))', 'call(f, mul(a2, scope(add(a3, a3))))', 'ij', 2, 3) assertParses('f62_mi(a256_ilm)', 'trace(trace(call(f62, a256), 2, 3), 0, 2)', 'l', 5) assertParses('f42_ij(a34_ki)', 'trace(call(f42, a34), 1, 2)', 'kj', 3, 2) assertParses('f32_ij(a34_ik)', 'trace(call(f32, a34), 0, 2)', 'kj', 4, 2) assertParses('f23_i0(a2_k)', 'get(call(f23, a2), 2, 0)', 'ki', 2, 2) assertParses('f23_1j(a2_k)', 'get(call(f23, a2), 1, 1)', 'kj', 2, 3) assertRaises('Expected a number, variable, scope, mean, jump or function call.', 'f()', ' ^') assertRaises('No such function: `g`.', 'g(a)', '^') assertRaises('Index i occurs more than twice.', 'f2_i(a22_ii)', '') assertRaises('Index i occurs more than twice.', 'f22_ii(a2_i)', '') assertRaises('Index i occurs more than twice.', 'f22_ii(a22_ii)', '') assertRaises('Index of axis with length 2 out of range.', 'f2_2(a)', ' ^') assertRaises('Expected 2 indices for axes generated by function `f23` but got 1.', 'f23_j(a4_i)', '') assertRaises('Symbol `$` is not allowed as index.', 'f234_i$j(a)', ' ^') def test_parse_item_number(self): (assertParses, assertRaises) = self.mkasserts((lambda s: self.parser.parse_item(s, allow_number=True))) assertRaises('Expected a number, variable, scope, mean, jump or function call.', ' ', '^^^', check_trim=False) assertRaises('Expected a number, variable, scope, mean, jump or function call.', '1a', '^^') assertRaises('Expected a number, variable, scope, mean, jump or function call. Hint: the operators `+`, `-` and `/` must be surrounded by spaces.', '1+a', '^^^') assertRaises('Expected a number, variable, scope, mean, jump or function call. Hint: the operators `+`, `-` and `/` must be surrounded by spaces.', '1-a', '^^^') assertRaises('Expected a number, variable, scope, mean, jump or function call. Hint: the operators `+`, `-` and `/` must be surrounded by spaces.', '1/a', '^^^') def test_parse_item_nonumber(self): (assertParses, assertRaises) = self.mkasserts((lambda s: self.parser.parse_item(s, allow_number=False))) assertRaises('Expected a variable, scope, mean, jump or function call.', ' ', '^^^', check_trim=False) assertRaises('Numbers are only allowed at the start of a term.', '1', '^') assertRaises('Numbers are only allowed at the start of a term.', '1a', '^^') assertRaises('Expected a variable, scope, mean, jump or function call.', 'f[a]', '^^^^') assertRaises('Expected a variable, scope, mean, jump or function call.', 'f{a}', '^^^^') assertRaises('Expected a variable, scope, mean, jump or function call.', 'f<a>', '^^^^') assertRaises('Expected a variable, scope, mean, jump or function call.', '<a>', '^^^') def test_parse_scope(self): (assertParses, assertRaises) = self.mkasserts((lambda s: self.parser.parse_item(s, allow_number=False))) assertParses('(1)', 'scope(1i)', '') assertParses('(1 + a)', 'scope(add(1i, a))', '') assertRaises('Unclosed `(`.', '(1', '^ ~', check_trim=False) assertRaises('Parenthesis `(` closed by `]`.', '(1]', '^ ~') assertRaises('Parenthesis `(` closed by `]`.', '(1])', '^ ~') assertRaises('Unexpected symbols after scope.', '(1)spam', ' ^^^^') def test_parse_mean(self): (assertParses, assertRaises) = self.mkasserts((lambda s: self.parser.parse_item(s, allow_number=False))) assertParses('{1 + 2}', 'mean(add(1i, 2i))', '') assertParses('{(a2_i)}', 'mean(scope(a2))', 'i', 2) def test_parse_jump(self): (assertParses, assertRaises) = self.mkasserts((lambda s: self.parser.parse_item(s, allow_number=False))) assertParses('[1 + 2]', 'jump(add(1i, 2i))', '') assertParses('[(a2_i)]', 'jump(scope(a2))', 'i', 2) def test_parse_signed_int(self): (assertParses, assertRaises) = self.mkasserts(self.parser.parse_signed_int) assertParses('1', '1i', '') assertParses('-1', '-1i', '') assertRaises('Expected an int.', '', '^', check_trim=False) assertRaises('Expected an int.', ' ', '^^^', check_trim=False) assertRaises('Expected an int.', 'a', '^') def test_parse_unsigned_int(self): (assertParses, assertRaises) = self.mkasserts(self.parser.parse_unsigned_int) assertParses('1', '1i', '') assertParses('2', '2i', '') assertParses('34', '34i', '') assertRaises('Expected an int.', '', '^', check_trim=False) assertRaises('Expected an int.', ' ', '^^^', check_trim=False) assertRaises('Expected an int.', 'a', '^') assertRaises('Expected an int.', '-1', '^^') def test_parse_unsigned_float(self): (assertParses, assertRaises) = self.mkasserts(self.parser.parse_unsigned_float) assertParses('1', '1.0f', '') assertParses('1.0', '1.0f', '') assertParses('1.', '1.0f', '') assertParses('0.1', '0.1f', '') assertParses('1e-1', '0.1f', '') assertParses('1.0e-1', '0.1f', '') assertParses('.1e-1', '0.01f', '') assertRaises('Expected a float.', '', '^', check_trim=False) assertRaises('Expected a float.', ' ', '^^^', check_trim=False) assertRaises('Expected a float.', 'a', '^') assertRaises('Expected a float.', '-1.2', '^^^^')
def find_placeholder_dependencies(cfg: CfgSimple): deps = dict() last_length = None while (len(deps) != last_length): last_length = len(deps) for node in cfg.graph.nodes: if isinstance(node, ir.Block): for arg in node.args: if isinstance(arg, ast.PlaceholderArg): deps[arg] = arg if (isinstance(node, ir.Assignment) and (node.expr in deps)): deps[node] = deps[node.expr] return deps
def action_create(argc, argv): args = parse_args(argv) if os.path.exists(args.path): log.error(('path %s already exists' % args.path)) quit() check = [n for n in [int(s.strip()) for s in args.hidden.split(',') if (s.strip() != '')] if (n > 0)] if (len(check) < 1): log.error('the --hidden argument must be a comma separated list of at least one positive integer') quit() ctx = {'NUM_INPUTS': args.num_inputs, 'HIDDEN': ', '.join([str(n) for n in check]), 'NUM_OUTPUTS': args.num_outputs, 'BATCH_SIZE': args.batch_size, 'MAX_EPOCHS': args.max_epochs} log.info('initializing project %s with ANN %d(%s)%d ...', args.path, ctx['NUM_INPUTS'], ctx['HIDDEN'], ctx['NUM_OUTPUTS']) os.makedirs(args.path, exist_ok=True) for tpl in Templates: log.info('creating %s', tpl.name) with open(os.path.join(args.path, tpl.name), 'wt') as fp: data = tpl.compile(ctx) fp.write(data)
class ComparerTitle(): def find_matches(self, list_title): list_title_matches = [] for (a, b) in itertools.combinations(list_title, 2): if (a == b): list_title_matches.append(a) return list_title_matches def extract_match(self, list_title_matches): list_title_matches_set = set(list_title_matches) list_title_count = [] for match in list_title_matches_set: list_title_count.append((list_title_matches.count(match), match)) if (list_title_count and (max(list_title_count)[0] != min(list_title_count)[0])): return max(list_title_count)[1] return None def choose_shortest_title(self, list_title): list_length_string = [] for title in list_title: list_length_string.append((len(title), title)) return min(list_length_string)[1] def extract(self, item, list_article_candidate): list_title = [] for article_candidate in list_article_candidate: if (article_candidate.title is not None): list_title.append(article_candidate.title) if (not list_title): return None list_title_matches = self.find_matches(list_title) matched_title = self.extract_match(list_title_matches) if matched_title: return matched_title elif list_title_matches: return self.choose_shortest_title(set(list_title_matches)) else: return self.choose_shortest_title(list_title)
def draw_line_round_corners_polygon(surf, p1, p2, c, w): if (p1 != p2): p1v = pygame.math.Vector2(p1) p2v = pygame.math.Vector2(p2) lv = (p2v - p1v).normalize() lnv = ((pygame.math.Vector2((- lv.y), lv.x) * w) // 2) pts = [(p1v + lnv), (p2v + lnv), (p2v - lnv), (p1v - lnv)] pygame.draw.polygon(surf, c, pts) pygame.draw.circle(surf, c, p1, round((w / 2))) pygame.draw.circle(surf, c, p2, round((w / 2))) else: pygame.draw.circle(surf, c, p1, round((w / 2)))
def test_LevelFormatter(): stream = StringIO() handler = logging.StreamHandler(stream) formatter = LevelFormatter(fmt={'*': '[%(levelname)s] %(message)s', 'DEBUG': '%(name)s [%(levelname)s] %(message)s', 'INFO': '%(message)s'}) handler.setFormatter(formatter) name = next(unique_logger_name) log = logging.getLogger(name) log.setLevel(logging.DEBUG) log.addHandler(handler) log.debug('this uses a custom format string') log.info('this also uses a custom format string') log.warning('this one uses the default format string') assert (stream.getvalue() == textwrap.dedent((' %s [DEBUG] this uses a custom format string\n this also uses a custom format string\n [WARNING] this one uses the default format string\n ' % name)))
class FlyteScopedSystemException(FlyteScopedException): def __init__(self, exc_type, exc_value, exc_tb, **kwargs): super(FlyteScopedSystemException, self).__init__('SYSTEM', exc_type, exc_value, exc_tb, **kwargs) def verbose_message(self): base_msg = super(FlyteScopedSystemException, self).verbose_message base_msg += '\n\nSYSTEM ERROR! Contact platform administrators.' return base_msg
('/{catchall:path}', response_class=Response, tags=['Default']) def read_other_paths(request: Request) -> Response: path = request.path_params['catchall'] logger.debug(f'Catch all path detected: {path}') try: path = sanitise_url_path(path) except MalisciousUrlException: return get_admin_index_as_response() ui_file = match_route(get_ui_file_map(), path) if (not ui_file): ui_file = get_path_to_admin_ui_file(path) if (ui_file and ui_file.is_file()): if (not path_is_in_ui_directory(ui_file)): raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail='Item not found') logger.debug("catchall request path '{}' matched static admin UI file: {}", path, ui_file) return FileResponse(ui_file) if path.startswith(API_PREFIX[1:]): logger.debug("catchall request path '{}' matched an invalid API route, return 404", path) raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail='Item not found') logger.debug("catchall request path '{}' did not match any admin UI routes, return generic admin UI index", path) return get_admin_index_as_response()
def potenial_moves(): globVar.p_w_moves = [] globVar.p_b_moves = [] globVar.p_w_Num = 0 globVar.p_b_Num = 0 for i in range(len(globVar.w_pieces)): fpc = globVar.w_pieces[i] am = fpc.scan() globVar.r_avail = copy.deepcopy(am) globVar.r_avail_Num = len(am) am = mark_invalid_moves(am, fpc) globVar.p_w_moves.extend(am) board.Grid(globVar.w_pieces[i].row, globVar.w_pieces[i].col).piece.selected = False resetAvailMoves(globVar.p_w_moves) globVar.p_w_Num = len(globVar.p_w_moves) for i in range(len(globVar.b_pieces)): fpc = globVar.b_pieces[i] am = fpc.scan() globVar.r_avail = copy.deepcopy(am) globVar.r_avail_Num = len(am) am = mark_invalid_moves(am, fpc) am = remove_invalid_moves(am) globVar.p_b_moves.extend(am) board.Grid(globVar.b_pieces[i].row, globVar.b_pieces[i].col).piece.selected = False resetAvailMoves(globVar.p_b_moves) globVar.p_b_Num = len(globVar.p_b_moves) clearAllOptions()
class GridPlane(Module): __version__ = 0 grid_plane = Instance(grid_plane.GridPlane, allow_none=False, record=True) actor = Instance(Actor, allow_non=False, record=True) input_info = PipelineInfo(datasets=['image_data', 'structured_grid', 'rectilinear_grid'], attribute_types=['any'], attributes=['any']) view = View(Group(Item(name='grid_plane', style='custom'), Item(name='actor', style='custom'), show_labels=False)) def setup_pipeline(self): self.grid_plane = grid_plane.GridPlane() self.actor = Actor() prop = self.actor.property prop.trait_set(backface_culling=0, frontface_culling=0, representation='w') self.actor.mapper.scalar_visibility = 0 def update_pipeline(self): mm = self.module_manager if (mm is None): return self.grid_plane.inputs = [mm.source] self.actor.set_lut(mm.scalar_lut_manager.lut) self.pipeline_changed = True def update_data(self): self.data_changed = True def _grid_plane_changed(self, old, new): actor = self.actor if (actor is not None): actor.inputs = [new] self._change_components(old, new) def _actor_changed(self, old, new): new.scene = self.scene gp = self.grid_plane if (gp is not None): new.inputs = [gp] self._change_components(old, new)
def _get_array_filter(field, key, value): if isinstance(value, list): return field.contains(value) if isinstance(value, dict): if ('$regex' in value): column = func.array_to_string(field, ',') return _dict_key_to_filter(column, key, value) if ('$contains' in value): return field.contains(_to_list(value['$contains'])) if ('$overlap' in value): return field.overlap(_to_list(value['$overlap'])) raise QueryConversionException(f'Unsupported search option for ARRAY field: {value}') return field.contains([value])
class RaiseExceptionOnRequestMiddleware(ClientMiddleware): class MiddlewareProcessedRequest(Exception): pass def request(self, send_request): def handler(request_id, meta, request, message_expiry_in_seconds): if (request.actions and (request.actions[0].body.get('middleware_was_here') is True)): raise self.MiddlewareProcessedRequest() return send_request(request_id, meta, request, message_expiry_in_seconds) return handler
class TestApi(unittest.TestCase): def test_api_importable(self): from pyface.action import api def test_public_attrs(self): from pyface.action import api attrs = [name for name in dir(api) if (not name.startswith('_'))] for attr in attrs: with self.subTest(attr=attr): self.assertIsNotNone(getattr(api, attr, None))
class FleetClient(NamespacedClient): _rewrite_parameters() async def global_checkpoints(self, *, index: str, checkpoints: t.Optional[t.Sequence[int]]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, pretty: t.Optional[bool]=None, timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, wait_for_advance: t.Optional[bool]=None, wait_for_index: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]: if (index in SKIP_IN_PATH): raise ValueError("Empty value passed for parameter 'index'") __path = f'/{_quote(index)}/_fleet/global_checkpoints' __query: t.Dict[(str, t.Any)] = {} if (checkpoints is not None): __query['checkpoints'] = checkpoints if (error_trace is not None): __query['error_trace'] = error_trace if (filter_path is not None): __query['filter_path'] = filter_path if (human is not None): __query['human'] = human if (pretty is not None): __query['pretty'] = pretty if (timeout is not None): __query['timeout'] = timeout if (wait_for_advance is not None): __query['wait_for_advance'] = wait_for_advance if (wait_for_index is not None): __query['wait_for_index'] = wait_for_index __headers = {'accept': 'application/json'} return (await self.perform_request('GET', __path, params=__query, headers=__headers)) _rewrite_parameters(body_name='searches') async def msearch(self, *, searches: t.Optional[t.Sequence[t.Mapping[(str, t.Any)]]]=None, body: t.Optional[t.Sequence[t.Mapping[(str, t.Any)]]]=None, index: t.Optional[str]=None, allow_no_indices: t.Optional[bool]=None, allow_partial_search_results: t.Optional[bool]=None, ccs_minimize_roundtrips: t.Optional[bool]=None, error_trace: t.Optional[bool]=None, expand_wildcards: t.Optional[t.Union[(t.Sequence[t.Union[("t.Literal['all', 'closed', 'hidden', 'none', 'open']", str)]], t.Union[("t.Literal['all', 'closed', 'hidden', 'none', 'open']", str)])]]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, ignore_throttled: t.Optional[bool]=None, ignore_unavailable: t.Optional[bool]=None, max_concurrent_searches: t.Optional[int]=None, max_concurrent_shard_requests: t.Optional[int]=None, pre_filter_shard_size: t.Optional[int]=None, pretty: t.Optional[bool]=None, rest_total_hits_as_int: t.Optional[bool]=None, search_type: t.Optional[t.Union[("t.Literal['dfs_query_then_fetch', 'query_then_fetch']", str)]]=None, typed_keys: t.Optional[bool]=None, wait_for_checkpoints: t.Optional[t.Sequence[int]]=None) -> ObjectApiResponse[t.Any]: if ((searches is None) and (body is None)): raise ValueError("Empty value passed for parameters 'searches' and 'body', one of them should be set.") elif ((searches is not None) and (body is not None)): raise ValueError("Cannot set both 'searches' and 'body'") if (index not in SKIP_IN_PATH): __path = f'/{_quote(index)}/_fleet/_fleet_msearch' else: __path = '/_fleet/_fleet_msearch' __query: t.Dict[(str, t.Any)] = {} if (allow_no_indices is not None): __query['allow_no_indices'] = allow_no_indices if (allow_partial_search_results is not None): __query['allow_partial_search_results'] = allow_partial_search_results if (ccs_minimize_roundtrips is not None): __query['ccs_minimize_roundtrips'] = ccs_minimize_roundtrips if (error_trace is not None): __query['error_trace'] = error_trace if (expand_wildcards is not None): __query['expand_wildcards'] = expand_wildcards if (filter_path is not None): __query['filter_path'] = filter_path if (human is not None): __query['human'] = human if (ignore_throttled is not None): __query['ignore_throttled'] = ignore_throttled if (ignore_unavailable is not None): __query['ignore_unavailable'] = ignore_unavailable if (max_concurrent_searches is not None): __query['max_concurrent_searches'] = max_concurrent_searches if (max_concurrent_shard_requests is not None): __query['max_concurrent_shard_requests'] = max_concurrent_shard_requests if (pre_filter_shard_size is not None): __query['pre_filter_shard_size'] = pre_filter_shard_size if (pretty is not None): __query['pretty'] = pretty if (rest_total_hits_as_int is not None): __query['rest_total_hits_as_int'] = rest_total_hits_as_int if (search_type is not None): __query['search_type'] = search_type if (typed_keys is not None): __query['typed_keys'] = typed_keys if (wait_for_checkpoints is not None): __query['wait_for_checkpoints'] = wait_for_checkpoints __body = (searches if (searches is not None) else body) __headers = {'accept': 'application/json', 'content-type': 'application/x-ndjson'} return (await self.perform_request('POST', __path, params=__query, headers=__headers, body=__body)) _rewrite_parameters(body_fields=('aggregations', 'aggs', 'collapse', 'docvalue_fields', 'explain', 'ext', 'fields', 'from_', 'highlight', 'indices_boost', 'min_score', 'pit', 'post_filter', 'profile', 'query', 'rescore', 'runtime_mappings', 'script_fields', 'search_after', 'seq_no_primary_term', 'size', 'slice', 'sort', 'source', 'stats', 'stored_fields', 'suggest', 'terminate_after', 'timeout', 'track_scores', 'track_total_hits', 'version'), parameter_aliases={'_source': 'source', '_source_excludes': 'source_excludes', '_source_includes': 'source_includes', 'from': 'from_'}) async def search(self, *, index: str, aggregations: t.Optional[t.Mapping[(str, t.Mapping[(str, t.Any)])]]=None, aggs: t.Optional[t.Mapping[(str, t.Mapping[(str, t.Any)])]]=None, allow_no_indices: t.Optional[bool]=None, allow_partial_search_results: t.Optional[bool]=None, analyze_wildcard: t.Optional[bool]=None, analyzer: t.Optional[str]=None, batched_reduce_size: t.Optional[int]=None, ccs_minimize_roundtrips: t.Optional[bool]=None, collapse: t.Optional[t.Mapping[(str, t.Any)]]=None, default_operator: t.Optional[t.Union[("t.Literal['and', 'or']", str)]]=None, df: t.Optional[str]=None, docvalue_fields: t.Optional[t.Sequence[t.Mapping[(str, t.Any)]]]=None, error_trace: t.Optional[bool]=None, expand_wildcards: t.Optional[t.Union[(t.Sequence[t.Union[("t.Literal['all', 'closed', 'hidden', 'none', 'open']", str)]], t.Union[("t.Literal['all', 'closed', 'hidden', 'none', 'open']", str)])]]=None, explain: t.Optional[bool]=None, ext: t.Optional[t.Mapping[(str, t.Any)]]=None, fields: t.Optional[t.Sequence[t.Mapping[(str, t.Any)]]]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, from_: t.Optional[int]=None, highlight: t.Optional[t.Mapping[(str, t.Any)]]=None, human: t.Optional[bool]=None, ignore_throttled: t.Optional[bool]=None, ignore_unavailable: t.Optional[bool]=None, indices_boost: t.Optional[t.Sequence[t.Mapping[(str, float)]]]=None, lenient: t.Optional[bool]=None, max_concurrent_shard_requests: t.Optional[int]=None, min_compatible_shard_node: t.Optional[str]=None, min_score: t.Optional[float]=None, pit: t.Optional[t.Mapping[(str, t.Any)]]=None, post_filter: t.Optional[t.Mapping[(str, t.Any)]]=None, pre_filter_shard_size: t.Optional[int]=None, preference: t.Optional[str]=None, pretty: t.Optional[bool]=None, profile: t.Optional[bool]=None, q: t.Optional[str]=None, query: t.Optional[t.Mapping[(str, t.Any)]]=None, request_cache: t.Optional[bool]=None, rescore: t.Optional[t.Union[(t.Mapping[(str, t.Any)], t.Sequence[t.Mapping[(str, t.Any)]])]]=None, rest_total_hits_as_int: t.Optional[bool]=None, routing: t.Optional[str]=None, runtime_mappings: t.Optional[t.Mapping[(str, t.Mapping[(str, t.Any)])]]=None, script_fields: t.Optional[t.Mapping[(str, t.Mapping[(str, t.Any)])]]=None, scroll: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, search_after: t.Optional[t.Sequence[t.Union[(None, bool, float, int, str, t.Any)]]]=None, search_type: t.Optional[t.Union[("t.Literal['dfs_query_then_fetch', 'query_then_fetch']", str)]]=None, seq_no_primary_term: t.Optional[bool]=None, size: t.Optional[int]=None, slice: t.Optional[t.Mapping[(str, t.Any)]]=None, sort: t.Optional[t.Union[(t.Sequence[t.Union[(str, t.Mapping[(str, t.Any)])]], t.Union[(str, t.Mapping[(str, t.Any)])])]]=None, source: t.Optional[t.Union[(bool, t.Mapping[(str, t.Any)])]]=None, source_excludes: t.Optional[t.Union[(str, t.Sequence[str])]]=None, source_includes: t.Optional[t.Union[(str, t.Sequence[str])]]=None, stats: t.Optional[t.Sequence[str]]=None, stored_fields: t.Optional[t.Union[(str, t.Sequence[str])]]=None, suggest: t.Optional[t.Mapping[(str, t.Any)]]=None, suggest_field: t.Optional[str]=None, suggest_mode: t.Optional[t.Union[("t.Literal['always', 'missing', 'popular']", str)]]=None, suggest_size: t.Optional[int]=None, suggest_text: t.Optional[str]=None, terminate_after: t.Optional[int]=None, timeout: t.Optional[str]=None, track_scores: t.Optional[bool]=None, track_total_hits: t.Optional[t.Union[(bool, int)]]=None, typed_keys: t.Optional[bool]=None, version: t.Optional[bool]=None, wait_for_checkpoints: t.Optional[t.Sequence[int]]=None, body: t.Optional[t.Dict[(str, t.Any)]]=None) -> ObjectApiResponse[t.Any]: if (index in SKIP_IN_PATH): raise ValueError("Empty value passed for parameter 'index'") __path = f'/{_quote(index)}/_fleet/_fleet_search' __query: t.Dict[(str, t.Any)] = {} __body: t.Dict[(str, t.Any)] = (body if (body is not None) else {}) if ((sort is not None) and ((isinstance(sort, str) and (':' in sort)) or (isinstance(sort, (list, tuple)) and all((isinstance(_x, str) for _x in sort)) and any(((':' in _x) for _x in sort))))): __query['sort'] = sort sort = None if (allow_no_indices is not None): __query['allow_no_indices'] = allow_no_indices if (allow_partial_search_results is not None): __query['allow_partial_search_results'] = allow_partial_search_results if (analyze_wildcard is not None): __query['analyze_wildcard'] = analyze_wildcard if (analyzer is not None): __query['analyzer'] = analyzer if (batched_reduce_size is not None): __query['batched_reduce_size'] = batched_reduce_size if (ccs_minimize_roundtrips is not None): __query['ccs_minimize_roundtrips'] = ccs_minimize_roundtrips if (default_operator is not None): __query['default_operator'] = default_operator if (df is not None): __query['df'] = df if (error_trace is not None): __query['error_trace'] = error_trace if (expand_wildcards is not None): __query['expand_wildcards'] = expand_wildcards if (filter_path is not None): __query['filter_path'] = filter_path if (human is not None): __query['human'] = human if (ignore_throttled is not None): __query['ignore_throttled'] = ignore_throttled if (ignore_unavailable is not None): __query['ignore_unavailable'] = ignore_unavailable if (lenient is not None): __query['lenient'] = lenient if (max_concurrent_shard_requests is not None): __query['max_concurrent_shard_requests'] = max_concurrent_shard_requests if (min_compatible_shard_node is not None): __query['min_compatible_shard_node'] = min_compatible_shard_node if (pre_filter_shard_size is not None): __query['pre_filter_shard_size'] = pre_filter_shard_size if (preference is not None): __query['preference'] = preference if (pretty is not None): __query['pretty'] = pretty if (q is not None): __query['q'] = q if (request_cache is not None): __query['request_cache'] = request_cache if (rest_total_hits_as_int is not None): __query['rest_total_hits_as_int'] = rest_total_hits_as_int if (routing is not None): __query['routing'] = routing if (scroll is not None): __query['scroll'] = scroll if (search_type is not None): __query['search_type'] = search_type if (source_excludes is not None): __query['_source_excludes'] = source_excludes if (source_includes is not None): __query['_source_includes'] = source_includes if (suggest_field is not None): __query['suggest_field'] = suggest_field if (suggest_mode is not None): __query['suggest_mode'] = suggest_mode if (suggest_size is not None): __query['suggest_size'] = suggest_size if (suggest_text is not None): __query['suggest_text'] = suggest_text if (typed_keys is not None): __query['typed_keys'] = typed_keys if (wait_for_checkpoints is not None): __query['wait_for_checkpoints'] = wait_for_checkpoints if (not __body): if (aggregations is not None): __body['aggregations'] = aggregations if (aggs is not None): __body['aggs'] = aggs if (collapse is not None): __body['collapse'] = collapse if (docvalue_fields is not None): __body['docvalue_fields'] = docvalue_fields if (explain is not None): __body['explain'] = explain if (ext is not None): __body['ext'] = ext if (fields is not None): __body['fields'] = fields if (from_ is not None): __body['from'] = from_ if (highlight is not None): __body['highlight'] = highlight if (indices_boost is not None): __body['indices_boost'] = indices_boost if (min_score is not None): __body['min_score'] = min_score if (pit is not None): __body['pit'] = pit if (post_filter is not None): __body['post_filter'] = post_filter if (profile is not None): __body['profile'] = profile if (query is not None): __body['query'] = query if (rescore is not None): __body['rescore'] = rescore if (runtime_mappings is not None): __body['runtime_mappings'] = runtime_mappings if (script_fields is not None): __body['script_fields'] = script_fields if (search_after is not None): __body['search_after'] = search_after if (seq_no_primary_term is not None): __body['seq_no_primary_term'] = seq_no_primary_term if (size is not None): __body['size'] = size if (slice is not None): __body['slice'] = slice if (sort is not None): __body['sort'] = sort if (source is not None): __body['_source'] = source if (stats is not None): __body['stats'] = stats if (stored_fields is not None): __body['stored_fields'] = stored_fields if (suggest is not None): __body['suggest'] = suggest if (terminate_after is not None): __body['terminate_after'] = terminate_after if (timeout is not None): __body['timeout'] = timeout if (track_scores is not None): __body['track_scores'] = track_scores if (track_total_hits is not None): __body['track_total_hits'] = track_total_hits if (version is not None): __body['version'] = version if (not __body): __body = None __headers = {'accept': 'application/json'} if (__body is not None): __headers['content-type'] = 'application/json' return (await self.perform_request('POST', __path, params=__query, headers=__headers, body=__body))
def map_scores_html(sequence, mapping_dict=emaps.fastq_emoji_map, default_value=':heart_eyes:', mapping_function=emojify, spacer=' '): mapped_values = spacer.join([mapping_function(mapping_dict.get(s, default_value)) for s in QualityIO._get_sanger_quality_str(sequence)]) return mapped_values
class TestTimeTicks(ByteTester): def test_decoding(self): result = t.TimeTicks.decode_raw(b'\n') expected = 10 self.assertEqual(result, expected) def test_encoding(self): value = t.TimeTicks(100) result = bytes(value) expected = b'C\x01d' self.assertBytesEqual(result, expected) def test_conversion_to_python(self): result = t.TimeTicks().pythonize() expected = timedelta(days=2, hours=4, minutes=28, seconds=20, microseconds=410000) self.assertEqual(result, expected) def test_conversion_from_python(self): input = timedelta(days=2, hours=4, minutes=28, seconds=20, microseconds=410000) result = t.TimeTicks(input) expected = t.TimeTicks() self.assertEqual(result, expected) def test_conversion_symmetry(self): input = timedelta(days=2, hours=4, minutes=28, seconds=20, microseconds=410000) result = t.TimeTicks(input).pythonize() self.assertEqual(result, input)
class OptionPlotoptionsBubbleSonificationTracksMappingHighpassResonance(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def reader(source, path): assert isinstance(path, str), source if hasattr(source, 'reader'): reader = source.reader LOG.debug('Looking for a reader for %s (%s)', path, reader) if callable(reader): return reader(source, path) if isinstance(reader, str): return _readers()[reader.replace('-', '_')](source, path, None, False) raise TypeError(('Provided reader must be a callable or a string, not %s' % type(reader))) if os.path.isdir(path): from .directory import DirectoryReader return DirectoryReader(source, path).mutate() LOG.debug('Reader for %s', path) with open(path, 'rb') as f: magic = f.read(8) LOG.debug('Looking for a reader for %s (%s)', path, magic) for deeper_check in (False, True): for (name, r) in _readers().items(): reader = r(source, path, magic, deeper_check) if (reader is not None): return reader.mutate() from .unknown import Unknown return Unknown(source, path, magic)
class TextHash(): simhash_threshold = 10 shingleprint_threshold = 0.95 def __init__(self, text): self.text = text if (len(text) < 32): self.hash = {'type': 'simhash', 'value': Simhash(text).value} elif (len(text) < 256): self.hash = {'type': 'simhash', 'value': Simhash(text).value} else: self.hash = {'type': 'shingleprint', 'value': ShinglePrint(text).features} def simhash_distance(s1, s2): x = ((s1 ^ s2) & ((1 << 64) - 1)) ans = 0 while x: ans += 1 x &= (x - 1) return ans def compare(h1, h2): if ((not h1) or (not h2)): return False if (h1['type'] != h2['type']): return False if (h1['type'] == 'textmatch'): return (h1['value'] == h2['value']) elif (h1['type'] == 'simhash'): return (TextHash.simhash_distance(h1['value'], h2['value']) <= TextHash.simhash_threshold) else: return (ShinglePrint.score(h1['value'], h2['value']) >= TextHash.shingleprint_threshold) return False
class TestExtendedLang(util.MdCase): extension = ['pymdownx.highlight', 'pymdownx.superfences', 'pymdownx.inlinehilite'] extension_configs = {'pymdownx.highlight': {'extend_pygments_lang': [{'name': 'php-inline', 'lang': 'php', 'options': {'startinline': True}}]}} def test_extended_lang_inlinehilite(self): self.check_markdown('\n `#!php-inline $a = array("foo" => 0, "bar" => 1);`\n ', '\n <p><code class="highlight"><span class="nv">$a</span> <span class="o">=</span> <span class="k">array</span><span class="p">(</span><span class="s2">&quot;foo&quot;</span> <span class="o">=&gt;</span> <span class="mi">0</span><span class="p">,</span> <span class="s2">&quot;bar&quot;</span> <span class="o">=&gt;</span> <span class="mi">1</span><span class="p">);</span></code></p>\n ', True) def test_extended_lang_superfences(self): self.check_markdown('\n ```php-inline\n $a = array("foo" => 0, "bar" => 1);\n ```\n ', '\n <div class="highlight"><pre><span></span><code><span class="nv">$a</span> <span class="o">=</span> <span class="k">array</span><span class="p">(</span><span class="s2">&quot;foo&quot;</span> <span class="o">=&gt;</span> <span class="mi">0</span><span class="p">,</span> <span class="s2">&quot;bar&quot;</span> <span class="o">=&gt;</span> <span class="mi">1</span><span class="p">);</span>\n </code></pre></div>\n ', True) def test_extended_lang_case(self): self.check_markdown('\n ```PHP-Inline\n $a = array("foo" => 0, "bar" => 1);\n ```\n ', '\n <div class="highlight"><pre><span></span><code><span class="nv">$a</span> <span class="o">=</span> <span class="k">array</span><span class="p">(</span><span class="s2">&quot;foo&quot;</span> <span class="o">=&gt;</span> <span class="mi">0</span><span class="p">,</span> <span class="s2">&quot;bar&quot;</span> <span class="o">=&gt;</span> <span class="mi">1</span><span class="p">);</span>\n </code></pre></div>\n ', True)
def connect_to_pslab(experiment_type): try: device = get_device(experiment_type) except serial.SerialException: time.sleep(1) try: device = get_device(experiment_type) except serial.SerialException: print('PSLab cannot be accessed.') return None return device
def pod_exec(args: List[str], *, name: str, namespace: str, container: str=None, timeout: float=float('inf')) -> Tuple[(str, str)]: core_v1_api = kubernetes.client.CoreV1Api() logger.debug('Running command in pod {}/{}: {}.'.format(namespace, name, ' '.join(args))) ws_client = stream(core_v1_api.connect_post_namespaced_pod_exec, name, namespace, command=args, container=container, stderr=True, stdin=False, stdout=True, tty=False, _preload_content=False) start = time.time() while (ws_client.is_open() and ((time.time() - start) < timeout)): ws_client.update(timeout=((timeout - time.time()) + start)) stdout_channel = ws_client.read_channel(STDOUT_CHANNEL, timeout=0) stderr_channel = ws_client.read_channel(STDERR_CHANNEL, timeout=0) error_channel = ws_client.read_channel(ERROR_CHANNEL, timeout=0) ws_client.close() if (error_channel == ''): raise TimeoutExpired(cmd=args, timeout=timeout, output=stdout_channel, stderr=stderr_channel) else: error_channel_object = json.loads(error_channel) assert isinstance(error_channel_object, dict) assert ('status' in error_channel_object) if (error_channel_object['status'] == 'Success'): pass elif ((error_channel_object['status'] == 'Failure') and ('reason' in error_channel_object) and (error_channel_object['reason'] == 'NonZeroExitCode')): assert ('details' in error_channel_object) assert ('causes' in error_channel_object['details']) assert isinstance(error_channel_object['details']['causes'], list) for cause in error_channel_object['details']['causes']: assert ('reason' in cause) if (cause['reason'] != 'ExitCode'): continue assert ('message' in cause) raise CalledProcessError(returncode=int(cause['message']), cmd=args, output=stdout_channel, stderr=stderr_channel) else: raise RuntimeError(f"Unknown stream status: {error_channel_object['status']}/{error_channel_object.get('reason', 'mot-set')}.") return (stdout_channel, stderr_channel)
('config_path', ['dir1', 'dir2', os.path.abspath('tests/test_apps/app_with_multiple_config_dirs/dir2')]) ('config_name', ['cfg1', 'cfg2']) def test_config_name_and_path_overrides(tmpdir: Path, config_path: str, config_name: str) -> None: cmd = ['tests/test_apps/app_with_multiple_config_dirs/my_app.py', ('hydra.run.dir=' + str(tmpdir)), 'hydra.job.chdir=True', f'--config-name={config_name}', f'--config-path={config_path}'] (result, _err) = run_python_script(cmd) result = result.replace('\r\n', '\n') assert (result == f'{os.path.basename(config_path)}_{config_name}: true')
def test_get_collections(mocker): mock_qdrant_client = mocker.patch('qdrant_client.QdrantClient', autospec=True) mocker.patch('os.getenv', side_effect=(lambda x: 'dummy_value')) qdrant = Qdrant(env_file_path='/path/to/your/env/file') mock_qdrant_client.return_value.get_collections.return_value = ['test_collection1', 'test_collection2'] collections = qdrant.get_collections() mock_qdrant_client.return_value.get_collections.assert_called_once() assert (collections == ['test_collection1', 'test_collection2'])
class HtmlOverlayStates(): def _add_resource(self) -> str: native_path = os.environ.get('NATIVE_JS_PATH') js_state_file = 'StateTemplate.js' js_state_name = 'stateTemplate' internal_native_path = Path(Path(__file__).resolve().parent, '..', '..', 'js', 'native', 'utils') if (native_path is None): native_path = internal_native_path native_builder = Path(native_path, js_state_file) internal_native_builder = Path(internal_native_path, js_state_file) if native_builder.exists(): self.page.js.customFile(js_state_file, path=native_path, authorize=True) self.page.properties.js.add_constructor(js_state_name, None) elif internal_native_builder.exists(): self.page.js.customFile(js_state_file, path=internal_native_builder, authorize=True) self.page.properties.js.add_constructor(js_state_name, None) else: raise ValueError(('%s does not exist' % js_state_file)) return js_state_name def hide_state(self, component_id: Optional[str]=None): self._add_resource() if (component_id is None): return ('hideState(%s)' % self.dom.container) return ('hideState(document.getElementById(%s))' % JsUtils.jsConvertData(component_id, None)) def state(self, status: bool=True, label: str=None, data: types.JS_DATA_TYPES=None, options: types.OPTION_TYPE=None, css_attrs: types.JS_DATA_TYPES=None, component_id: Optional[str]=None, mode: str='loading'): js_state_name = self._add_resource() if (label is not None): if (mode == 'loading'): self.options.templateLoading = label elif (mode == 'error'): self.options.templateError = label if isinstance(data, dict): tmp_data = [('%s: %s' % (JsUtils.jsConvertData(k, None), JsUtils.jsConvertData(v, None))) for (k, v) in data.items()] js_data = ('{%s}' % ','.join(tmp_data)) else: js_data = JsUtils.jsConvertData(data, None) css_attrs = (css_attrs or {}) dflt_css_attrs = {'background': self.page.theme.greys[0]} for (k, v) in dflt_css_attrs.items(): if (k not in css_attrs): css_attrs[k] = v options = (options or {}) options['templateMode'] = mode return ('%s(%s, %s, %s, %s, %s)' % (js_state_name, JsUtils.jsConvertData(status, None), (component_id or self.dom.container), js_data, self.options.config_js(options), JsUtils.jsConvertData(css_attrs, None))) def loading(self, status: bool=True, label: str=None, data: types.JS_DATA_TYPES=None, options: types.OPTION_TYPE=None, css_attrs: types.JS_DATA_TYPES=None, component_id: Optional[str]=None): if ((label is None) and (self.options.templateLoading is None)): label = Default_html.TEMPLATE_LOADING_ONE_LINE return self.state(status, label, data, options, css_attrs, component_id, mode='loading') def error(self, status: bool=True, label: str=None, data: types.JS_DATA_TYPES=None, options: types.OPTION_TYPE=None, css_attrs: types.JS_DATA_TYPES=None, component_id: Optional[str]=None): if ((label is None) and (self.options.templateError is None)): label = Default_html.TEMPLATE_ERROR_ONE_LINE return self.state(status, label, data, options, css_attrs, component_id, mode='error')
(scope='function') def snowflake_connection_config(db: Session, integration_config: Dict[(str, str)], snowflake_connection_config_without_secrets: ConnectionConfig) -> Generator: connection_config = snowflake_connection_config_without_secrets account_identifier = (integration_config.get('snowflake', {}).get('account_identifier') or os.environ.get('SNOWFLAKE_TEST_ACCOUNT_IDENTIFIER')) user_login_name = (integration_config.get('snowflake', {}).get('user_login_name') or os.environ.get('SNOWFLAKE_TEST_USER_LOGIN_NAME')) password = (integration_config.get('snowflake', {}).get('password') or os.environ.get('SNOWFLAKE_TEST_PASSWORD')) warehouse_name = (integration_config.get('snowflake', {}).get('warehouse_name') or os.environ.get('SNOWFLAKE_TEST_WAREHOUSE_NAME')) database_name = (integration_config.get('snowflake', {}).get('database_name') or os.environ.get('SNOWFLAKE_TEST_DATABASE_NAME')) schema_name = (integration_config.get('snowflake', {}).get('schema_name') or os.environ.get('SNOWFLAKE_TEST_SCHEMA_NAME')) if all([account_identifier, user_login_name, password, warehouse_name, database_name, schema_name]): schema = SnowflakeSchema(account_identifier=account_identifier, user_login_name=user_login_name, password=password, warehouse_name=warehouse_name, database_name=database_name, schema_name=schema_name) connection_config.secrets = schema.dict() connection_config.save(db=db) (yield connection_config) connection_config.delete(db)
def test_bkz_call(block_size=10): params = fplll_bkz.Param(block_size=block_size, flags=(fplll_bkz.VERBOSE | fplll_bkz.GH_BND)) for cls in (BKZ, BKZ2): for n in dimensions: FPLLL.set_random_seed(n) A = make_integer_matrix(n) B = copy(A) cls(B)(params=params)
def upgrade(): op.create_table('privacydeclaration', sa.Column('id', sa.String(length=255), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), sa.Column('name', sa.String(), nullable=True), sa.Column('egress', sa.ARRAY(sa.String()), nullable=True), sa.Column('ingress', sa.ARRAY(sa.String()), nullable=True), sa.Column('data_use', sa.String(), nullable=False), sa.Column('data_categories', sa.ARRAY(sa.String()), nullable=True), sa.Column('data_qualifier', sa.String(), nullable=True), sa.Column('data_subjects', sa.ARRAY(sa.String()), nullable=True), sa.Column('dataset_references', sa.ARRAY(sa.String()), nullable=True), sa.Column('system_id', sa.String(), nullable=False), sa.ForeignKeyConstraint(['system_id'], ['ctl_systems.id']), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_privacydeclaration_data_use'), 'privacydeclaration', ['data_use'], unique=False) op.create_index(op.f('ix_privacydeclaration_id'), 'privacydeclaration', ['id'], unique=False) op.create_index(op.f('ix_privacydeclaration_name'), 'privacydeclaration', ['name'], unique=False) op.create_index(op.f('ix_privacydeclaration_system_id'), 'privacydeclaration', ['system_id'], unique=False) bind = op.get_bind() existing_declarations = bind.execute(text('SELECT id, privacy_declarations FROM ctl_systems;')) for row in existing_declarations: system_id = row['id'] old_privacy_declarations = row['privacy_declarations'] for privacy_declaration in old_privacy_declarations: new_privacy_declaration_id: str = ('pri_' + str(uuid.uuid4())) new_data = {**privacy_declaration, 'system_id': system_id, 'id': new_privacy_declaration_id} insert_privacy_declarations_query = text('INSERT INTO privacydeclaration (id, name, data_categories, data_qualifier, data_subjects, dataset_references, egress, ingress, system_id, data_use) VALUES (:id, :name, :data_categories, :data_qualifier, :data_subjects, :dataset_references, :egress, :ingress, :system_id, :data_use)') bind.execute(insert_privacy_declarations_query, new_data) op.drop_column('ctl_systems', 'privacy_declarations')
def parse_turbo_mos(text): float_20 = make_float_class(exact=20) int_ = pp.Word(pp.nums) comment = (pp.Literal('#') + pp.restOfLine) mo_num = int_ sym = pp.Word(pp.alphanums) eigenvalue = (pp.Literal('eigenvalue=') + float_20) nsaos = (pp.Literal('nsaos=') + int_) mo_coeffs = pp.OneOrMore(float_20) mo = pp.Group(((((mo_num + sym) + eigenvalue) + nsaos) + mo_coeffs.setResultsName('mo_coeffs'))) parser = ((((((pp.Literal('$scfmo') + pp.Literal('scfconv=')) + pp.Word(pp.nums)) + pp.Literal('format(4d20.14) ')) + pp.ZeroOrMore(comment)) + pp.OneOrMore(mo).setResultsName('mos')) + pp.Literal('$end')) parsed = parser.parseString(text) mo_coeffs = np.array([mo.mo_coeffs.asList() for mo in parsed.mos]).T return mo_coeffs
def extractHostednovelCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def main(): fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'filters': {'required': False, 'type': 'list', 'elements': 'str'}, 'sorters': {'required': False, 'type': 'list', 'elements': 'str'}, 'formatters': {'required': False, 'type': 'list', 'elements': 'str'}, 'params': {'required': False, 'type': 'dict'}, 'selector': {'required': False, 'type': 'str', 'choices': ['endpoint-control_profile_xml', 'endpoint-control_record-list', 'endpoint-control_registration_summary', 'endpoint-control_installer', 'endpoint-control_installer_download', 'endpoint-control_avatar_download', 'firewall_health', 'firewall_local-in', 'firewall_acl', 'firewall_acl6', 'firewall_internet-service-match', 'firewall_internet-service-details', 'firewall_policy', 'firewall_policy6', 'firewall_proxy-policy', 'firewall_policy-lookup', 'firewall_session', 'firewall_shaper', 'firewall_per-ip-shaper', 'firewall_load-balance', 'firewall_address-fqdns', 'firewall_address-fqdns6', 'firewall_ippool', 'firewall_address-dynamic', 'firewall_address6-dynamic', 'fortiview_statistics', 'fortiview_sandbox-file-details', 'geoip_geoip-query', 'ips_rate-based', 'license_status', 'license_forticare-resellers', 'license_forticare-org-list', 'log_current-disk-usage', 'log_device_state', 'log_forticloud', 'log_fortianalyzer', 'log_fortianalyzer-queue', 'log_hourly-disk-usage', 'log_historic-daily-remote-logs', 'log_stats', 'log_forticloud-report_download', 'log_ips-archive_download', 'log_policy-archive_download', 'log_av-archive_download', 'log_event', 'registration_forticloud_disclaimer', 'registration_forticloud_domains', 'router_ipv4', 'router_ipv6', 'router_statistics', 'router_lookup', 'router_policy', 'router_policy6', 'system_config-revision', 'system_config-revision_file', 'system_config-revision_info', 'system_current-admins', 'system_time', 'system_global-resources', 'system_vdom-resource', 'system_dhcp', 'system_firmware', 'system_firmware_upgrade-paths', 'system_storage', 'system_csf', 'system_csf_pending-authorizations', 'system_modem', 'system_3g-modem', 'system_resource_usage', 'system_sniffer', 'system_sniffer_download', 'system_automation-stitch_stats', 'switch-controller_managed-switch', 'switch-controller_managed-switch_faceplate-xml', 'switch-controller_managed-switch_dhcp-snooping', 'switch-controller_fsw-firmware', 'switch-controller_detected-device', 'switch-controller_validate-switch-prefix', 'system_interface', 'system_interface_dhcp-status', 'system_available-interfaces', 'system_acquired-dns', 'system_resolve-fqdn', 'system_nat46-ippools', 'system_usb-log', 'system_ipconf', 'system_fortiguard_server-info', 'system_fortimanager_status', 'system_fortimanager_backup-summary', 'system_fortimanager_backup-details', 'system_available-certificates', 'system_certificate_download', 'system_debug_download', 'system_com-log_update', 'system_com-log_download', 'system_botnet_stat', 'system_botnet', 'system_botnet-domains', 'system_botnet-domains_stat', 'system_botnet-domains_hits', 'system_ha-statistics', 'system_ha-history', 'system_ha-checksums', 'system_ha-peer', 'system_link-monitor', 'system_config_backup', 'system_config_usb-filelist', 'system_sandbox_stats', 'system_sandbox_status', 'system_sandbox_test-connect', 'system_object_usage', 'system_object-tagging_usage', 'system_status', 'system_timezone', 'system_sensor-info', 'system_security-rating', 'system_security-rating_history', 'system_security-rating_status', 'system_security-rating_lang', 'system_fortiguard-blacklist', 'system_check-port-availability', 'system_external-resource_entry-list', 'extender-controller_extender', 'system_sdn-connector_status', 'user_firewall', 'user_banned', 'user_fortitoken', 'user_detected-device', 'user_device', 'user_device-type', 'user_device-category', 'user_fsso', 'utm_rating-lookup', 'utm_app-lookup', 'utm_application-categories', 'utm_antivirus_stats', 'virtual-wan_health-check', 'virtual-wan_members', 'webfilter_override', 'webfilter_malicious-urls', 'webfilter_malicious-urls_stat', 'webfilter_category-quota', 'webfilter_fortiguard-categories', 'webfilter_trusted-urls', 'vpn_ipsec', 'vpn_one-click_members', 'vpn_one-click_status', 'vpn_ssl', 'vpn_ssl_stats', 'wanopt_history', 'wanopt_webcache', 'wanopt_peer_stats', 'webproxy_pacfile_download', 'webcache_stats', 'wifi_client', 'wifi_managed_ap', 'wifi_firmware', 'wifi_ap_status', 'wifi_interfering_ap', 'wifi_euclid', 'wifi_rogue_ap', 'wifi_spectrum', 'endpoint-control_summary', 'endpoint-control_ems_status', 'firewall_consolidated-policy', 'firewall_security-policy', 'firewall_uuid-list', 'firewall_uuid-type-lookup', 'fortiguard_redirect-portal', 'firewall_sdn-connector-filters', 'fortiview_sandbox-file-list', 'ips_metadata', 'ips_anomaly', 'license_fortianalyzer-status', 'log_forticloud-report-list', 'log_local-report-list', 'log_local-report_download', 'network_lldp_neighbors', 'network_lldp_ports', 'network_dns_latency', 'network_fortiguard_live-services-latency', 'network_ddns_servers', 'network_ddns_lookup', 'router_lookup-policy', 'system_config-script', 'system_config-sync_status', 'system_vdom-link', 'switch-controller_managed-switch_transceivers', 'system_interface_poe', 'system_trusted-cert-authorities', 'system_sandbox_cloud-regions', 'system_interface_transceivers', 'system_vm-information', 'system_security-rating_supported-reports', 'nsx_service_status', 'nsx_instance', 'system_sdn-connector_nsx-security-tags', 'web-ui_custom-language_download', 'user_collected-email', 'user_info_query', 'user_info_thumbnail', 'utm_blacklisted-certificates', 'utm_blacklisted-certificates_statistics', 'virtual-wan_interface-log', 'virtual-wan_sla-log', 'vpn_ocvpn_members', 'vpn_ocvpn_status', 'vpn_ocvpn_meta', 'wifi_network_list', 'wifi_network_status', 'wifi_region-image', 'azure_application-list', 'endpoint-control_ems_cert-status', 'endpoint-control_ems_status-summary', 'fortiguard_service-communication-stats', 'network_reverse-ip-lookup', 'registration_forticloud_device-status', 'switch-controller_managed-switch_health', 'switch-controller_managed-switch_cable-status', 'switch-controller_mclag-icl_eligible-peer', 'system_interface_speed-test-status', 'user_fortitoken-cloud_status', 'wifi_vlan-probe', 'firewall_ippool_mapping', 'network_arp', 'system_interface-connected-admins-info', 'system_ntp_status', 'system_config-error-log_download', 'system_running-processes', 'user_device_query', 'ips_exceed-scan-range', 'firewall_multicast-policy', 'firewall_multicast-policy6', 'firewall_gtp-statistics', 'firewall_gtp-runtime-statistics', 'router_bgp_neighbors', 'router_bgp_neighbors6', 'router_bgp_paths', 'router_bgp_paths6', 'router_ospf_neighbors', 'system_automation-action_stats', 'switch-controller_matched-devices', 'system_ha-table-checksums', 'system_sandbox_connection', 'system_traffic-history_interface', 'system_traffic-history_top-applications', 'videofilter_fortiguard-categories', 'firewall_central-snat-map', 'firewall_dnat', 'ips_hold-signatures', 'router_bgp_paths-statistics', 'system_lte-modem_status', 'system_global-search', 'switch-controller_managed-switch_status', 'switch-controller_managed-switch_port-stats', 'switch-controller_managed-switch_models', 'system_interface_kernel-interfaces', 'system_config_restore-status', 'wifi_meta', 'wifi_ap_channels', 'wifi_ap-names', 'firewall_internet-service-reputation', 'firewall_shaper_multi-class-shaper', 'log_forticloud_connection', 'system_performance_status', 'system_ipam_list', 'system_ipam_status', 'system_acme-certificate-status', 'system_crash-log_download', 'user_banned_check', 'user_info_thumbnail-file', 'vpn-certificate_cert-name-available', 'wifi_unassociated-devices', 'wifi_matched-devices', 'firewall_proxy_sessions', 'firewall_gtp', 'fortiview_proxy-statistics', 'system_ha-hw-interface', 'user_firewall_count', 'firewall_internet-service-basic', 'firewall_vip-overlap', 'switch-controller_managed-switch_port-health', 'switch-controller_managed-switch_tx-rx', 'firewall_network-service-dynamic', 'system_ipam_utilization', 'system_ha-nonsync-checksums', 'wifi_station-capability', 'fortiguard_answers', 'ips_session_performance', 'switch-controller_nac-device_stats', 'switch-controller_isl-lockdown_status', 'wifi_nac-device_stats', 'firewall_sessions', 'fortiview_realtime-statistics', 'fortiview_historical-statistics', 'fortiview_realtime-proxy-statistics', 'log_feature-set', 'forticonverter_eligibility', 'forticonverter_ticket_status', 'forticonverter_sn-list', 'forticonverter_intf-list', 'forticonverter_custom-operation_status', 'forticonverter_intf-mapping', 'forticonverter_mgmt-intf', 'forticonverter_notes', 'forticonverter_download_ready', 'forticonverter_file_download', 'forticonverter_download_status', 'switch-controller_managed-switch_bios', 'system_available-interfaces_meta', 'system_central-management_status', 'user_device_stats', 'casb_saas-application_details', 'switch-controller_mclag-icl_tier-plus-candidates', 'extension-controller_fortigate', 'extension-controller_lan-extension-vdom-status', 'user_proxy', 'user_proxy_count']}, 'selectors': {'required': False, 'type': 'list', 'elements': 'dict', 'options': {'filters': {'required': False, 'type': 'list', 'elements': 'str'}, 'sorters': {'required': False, 'type': 'list', 'elements': 'str'}, 'formatters': {'required': False, 'type': 'list', 'elements': 'str'}, 'params': {'required': False, 'type': 'dict'}, 'selector': {'required': True, 'type': 'str', 'choices': ['endpoint-control_profile_xml', 'endpoint-control_record-list', 'endpoint-control_registration_summary', 'endpoint-control_installer', 'endpoint-control_installer_download', 'endpoint-control_avatar_download', 'firewall_health', 'firewall_local-in', 'firewall_acl', 'firewall_acl6', 'firewall_internet-service-match', 'firewall_internet-service-details', 'firewall_policy', 'firewall_policy6', 'firewall_proxy-policy', 'firewall_policy-lookup', 'firewall_session', 'firewall_shaper', 'firewall_per-ip-shaper', 'firewall_load-balance', 'firewall_address-fqdns', 'firewall_address-fqdns6', 'firewall_ippool', 'firewall_address-dynamic', 'firewall_address6-dynamic', 'fortiview_statistics', 'fortiview_sandbox-file-details', 'geoip_geoip-query', 'ips_rate-based', 'license_status', 'license_forticare-resellers', 'license_forticare-org-list', 'log_current-disk-usage', 'log_device_state', 'log_forticloud', 'log_fortianalyzer', 'log_fortianalyzer-queue', 'log_hourly-disk-usage', 'log_historic-daily-remote-logs', 'log_stats', 'log_forticloud-report_download', 'log_ips-archive_download', 'log_policy-archive_download', 'log_av-archive_download', 'log_event', 'registration_forticloud_disclaimer', 'registration_forticloud_domains', 'router_ipv4', 'router_ipv6', 'router_statistics', 'router_lookup', 'router_policy', 'router_policy6', 'system_config-revision', 'system_config-revision_file', 'system_config-revision_info', 'system_current-admins', 'system_time', 'system_global-resources', 'system_vdom-resource', 'system_dhcp', 'system_firmware', 'system_firmware_upgrade-paths', 'system_storage', 'system_csf', 'system_csf_pending-authorizations', 'system_modem', 'system_3g-modem', 'system_resource_usage', 'system_sniffer', 'system_sniffer_download', 'system_automation-stitch_stats', 'switch-controller_managed-switch', 'switch-controller_managed-switch_faceplate-xml', 'switch-controller_managed-switch_dhcp-snooping', 'switch-controller_fsw-firmware', 'switch-controller_detected-device', 'switch-controller_validate-switch-prefix', 'system_interface', 'system_interface_dhcp-status', 'system_available-interfaces', 'system_acquired-dns', 'system_resolve-fqdn', 'system_nat46-ippools', 'system_usb-log', 'system_ipconf', 'system_fortiguard_server-info', 'system_fortimanager_status', 'system_fortimanager_backup-summary', 'system_fortimanager_backup-details', 'system_available-certificates', 'system_certificate_download', 'system_debug_download', 'system_com-log_update', 'system_com-log_download', 'system_botnet_stat', 'system_botnet', 'system_botnet-domains', 'system_botnet-domains_stat', 'system_botnet-domains_hits', 'system_ha-statistics', 'system_ha-history', 'system_ha-checksums', 'system_ha-peer', 'system_link-monitor', 'system_config_backup', 'system_config_usb-filelist', 'system_sandbox_stats', 'system_sandbox_status', 'system_sandbox_test-connect', 'system_object_usage', 'system_object-tagging_usage', 'system_status', 'system_timezone', 'system_sensor-info', 'system_security-rating', 'system_security-rating_history', 'system_security-rating_status', 'system_security-rating_lang', 'system_fortiguard-blacklist', 'system_check-port-availability', 'system_external-resource_entry-list', 'extender-controller_extender', 'system_sdn-connector_status', 'user_firewall', 'user_banned', 'user_fortitoken', 'user_detected-device', 'user_device', 'user_device-type', 'user_device-category', 'user_fsso', 'utm_rating-lookup', 'utm_app-lookup', 'utm_application-categories', 'utm_antivirus_stats', 'virtual-wan_health-check', 'virtual-wan_members', 'webfilter_override', 'webfilter_malicious-urls', 'webfilter_malicious-urls_stat', 'webfilter_category-quota', 'webfilter_fortiguard-categories', 'webfilter_trusted-urls', 'vpn_ipsec', 'vpn_one-click_members', 'vpn_one-click_status', 'vpn_ssl', 'vpn_ssl_stats', 'wanopt_history', 'wanopt_webcache', 'wanopt_peer_stats', 'webproxy_pacfile_download', 'webcache_stats', 'wifi_client', 'wifi_managed_ap', 'wifi_firmware', 'wifi_ap_status', 'wifi_interfering_ap', 'wifi_euclid', 'wifi_rogue_ap', 'wifi_spectrum', 'endpoint-control_summary', 'endpoint-control_ems_status', 'firewall_consolidated-policy', 'firewall_security-policy', 'firewall_uuid-list', 'firewall_uuid-type-lookup', 'fortiguard_redirect-portal', 'firewall_sdn-connector-filters', 'fortiview_sandbox-file-list', 'ips_metadata', 'ips_anomaly', 'license_fortianalyzer-status', 'log_forticloud-report-list', 'log_local-report-list', 'log_local-report_download', 'network_lldp_neighbors', 'network_lldp_ports', 'network_dns_latency', 'network_fortiguard_live-services-latency', 'network_ddns_servers', 'network_ddns_lookup', 'router_lookup-policy', 'system_config-script', 'system_config-sync_status', 'system_vdom-link', 'switch-controller_managed-switch_transceivers', 'system_interface_poe', 'system_trusted-cert-authorities', 'system_sandbox_cloud-regions', 'system_interface_transceivers', 'system_vm-information', 'system_security-rating_supported-reports', 'nsx_service_status', 'nsx_instance', 'system_sdn-connector_nsx-security-tags', 'web-ui_custom-language_download', 'user_collected-email', 'user_info_query', 'user_info_thumbnail', 'utm_blacklisted-certificates', 'utm_blacklisted-certificates_statistics', 'virtual-wan_interface-log', 'virtual-wan_sla-log', 'vpn_ocvpn_members', 'vpn_ocvpn_status', 'vpn_ocvpn_meta', 'wifi_network_list', 'wifi_network_status', 'wifi_region-image', 'azure_application-list', 'endpoint-control_ems_cert-status', 'endpoint-control_ems_status-summary', 'fortiguard_service-communication-stats', 'network_reverse-ip-lookup', 'registration_forticloud_device-status', 'switch-controller_managed-switch_health', 'switch-controller_managed-switch_cable-status', 'switch-controller_mclag-icl_eligible-peer', 'system_interface_speed-test-status', 'user_fortitoken-cloud_status', 'wifi_vlan-probe', 'firewall_ippool_mapping', 'network_arp', 'system_interface-connected-admins-info', 'system_ntp_status', 'system_config-error-log_download', 'system_running-processes', 'user_device_query', 'ips_exceed-scan-range', 'firewall_multicast-policy', 'firewall_multicast-policy6', 'firewall_gtp-statistics', 'firewall_gtp-runtime-statistics', 'router_bgp_neighbors', 'router_bgp_neighbors6', 'router_bgp_paths', 'router_bgp_paths6', 'router_ospf_neighbors', 'system_automation-action_stats', 'switch-controller_matched-devices', 'system_ha-table-checksums', 'system_sandbox_connection', 'system_traffic-history_interface', 'system_traffic-history_top-applications', 'videofilter_fortiguard-categories', 'firewall_central-snat-map', 'firewall_dnat', 'ips_hold-signatures', 'router_bgp_paths-statistics', 'system_lte-modem_status', 'system_global-search', 'switch-controller_managed-switch_status', 'switch-controller_managed-switch_port-stats', 'switch-controller_managed-switch_models', 'system_interface_kernel-interfaces', 'system_config_restore-status', 'wifi_meta', 'wifi_ap_channels', 'wifi_ap-names', 'firewall_internet-service-reputation', 'firewall_shaper_multi-class-shaper', 'log_forticloud_connection', 'system_performance_status', 'system_ipam_list', 'system_ipam_status', 'system_acme-certificate-status', 'system_crash-log_download', 'user_banned_check', 'user_info_thumbnail-file', 'vpn-certificate_cert-name-available', 'wifi_unassociated-devices', 'wifi_matched-devices', 'firewall_proxy_sessions', 'firewall_gtp', 'fortiview_proxy-statistics', 'system_ha-hw-interface', 'user_firewall_count', 'firewall_internet-service-basic', 'firewall_vip-overlap', 'switch-controller_managed-switch_port-health', 'switch-controller_managed-switch_tx-rx', 'firewall_network-service-dynamic', 'system_ipam_utilization', 'system_ha-nonsync-checksums', 'wifi_station-capability', 'fortiguard_answers', 'ips_session_performance', 'switch-controller_nac-device_stats', 'switch-controller_isl-lockdown_status', 'wifi_nac-device_stats', 'firewall_sessions', 'fortiview_realtime-statistics', 'fortiview_historical-statistics', 'fortiview_realtime-proxy-statistics', 'log_feature-set', 'forticonverter_eligibility', 'forticonverter_ticket_status', 'forticonverter_sn-list', 'forticonverter_intf-list', 'forticonverter_custom-operation_status', 'forticonverter_intf-mapping', 'forticonverter_mgmt-intf', 'forticonverter_notes', 'forticonverter_download_ready', 'forticonverter_file_download', 'forticonverter_download_status', 'switch-controller_managed-switch_bios', 'system_available-interfaces_meta', 'system_central-management_status', 'user_device_stats', 'casb_saas-application_details', 'switch-controller_mclag-icl_tier-plus-candidates', 'extension-controller_fortigate', 'extension-controller_lan-extension-vdom-status', 'user_proxy', 'user_proxy_count']}}}} module = AnsibleModule(argument_spec=fields, supports_check_mode=False) check_legacy_fortiosapi(module) if ((module.params['selector'] and module.params['selectors']) or ((not module.params['selector']) and (not module.params['selectors']))): module.fail_json(msg='please use selector or selectors in a task.') versions_check_result = None if module._socket_path: connection = Connection(module._socket_path) if ('access_token' in module.params): connection.set_option('access_token', module.params['access_token']) if ('enable_log' in module.params): connection.set_option('enable_log', module.params['enable_log']) else: connection.set_option('enable_log', False) fos = FortiOSHandler(connection, module) if module.params['selector']: (is_error, has_changed, result) = fortios_monitor_fact(module.params, fos) else: params = module.params selectors = params['selectors'] is_error = False has_changed = False result = [] for selector_obj in selectors: per_selector = {'vdom': params.get('vdom')} per_selector.update(selector_obj) (is_error_local, has_changed_local, result_local) = fortios_monitor_fact(per_selector, fos) is_error = (is_error or is_error_local) has_changed = (has_changed or has_changed_local) result.append(result_local) else: module.fail_json(**FAIL_SOCKET_MSG) if (versions_check_result and (versions_check_result['matched'] is False)): module.warn('Ansible has detected version mismatch between FortOS system and galaxy, see more details by specifying option -vvv') if (not is_error): if (versions_check_result and (versions_check_result['matched'] is False)): module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result) else: module.exit_json(changed=has_changed, meta=result) elif (versions_check_result and (versions_check_result['matched'] is False)): module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result) else: module.fail_json(msg='Error in repo', meta=result)
def get_options(server): try: response = requests.options(server, allow_redirects=False, verify=False, timeout=5) except (requests.exceptions.ConnectionError, requests.exceptions.MissingSchema): return 'Server {} is not available!'.format(server) try: return {'allowed': response.headers['Allow']} except KeyError: return 'Unable to get HTTP methods'
def post_vilar_allocation(h, k, scorer=None, num_steps=0): original_shape = h.shape (det_h, nondet_h) = h.separate_by(dc.is_deterministic) det_h.name(f'constraint_cont_{num_steps}', nopath=True) continuations_by_predecessor = h.reshape((lambda s: s.predecessor.id)) best_cont_per_predecessor = dc.topk(continuations_by_predecessor, 1, scorer=scorer) best_cont_per_predecessor = best_cont_per_predecessor.name(f'best_cont_{num_steps}', nopath=True) h_top_k = dc.topk(h.flatten(), k, scorer=scorer) h_top_k = h_top_k.reshape(*original_shape).name(f'topk_{num_steps}') h_pool = dc.token_unique(((h_top_k + det_h) + best_cont_per_predecessor).flatten(), (lambda s, t: dc.is_deterministic(s))) num_constraints = np.array([s.num_constraints for s in h_pool.unstructured()]) num_constraints = {det_len: (num_constraints == det_len).sum() for det_len in set(num_constraints.tolist())} beam_size_constrained = (1 * k) bank_sizes = get_bank_sizes_post_vilar(num_constraints, beam_size_constrained) pools_by_const_num = h_pool.separate_by_list((lambda s: s.num_constraints)) h_filtered = DataArray({}) for (num_const, seqs) in pools_by_const_num.items(): seqs_filtered = dc.topk(seqs, bank_sizes[num_const], scorer=scorer).name(f'bank_{num_steps}_{num_const}') assert all([(s.num_constraints == num_const) for s in seqs_filtered.unstructured()]) h_filtered += seqs_filtered h = h_filtered.reshape(*original_shape) return h
def main(): segmk = Segmaker('design.bits', verbose=True) with open('params.json', 'r') as fp: data = json.load(fp) idelay_types = ['FIXED', 'VARIABLE', 'VAR_LOAD'] delay_srcs = ['IDATAIN', 'DATAIN'] for params in data: segmk.add_site_tag(params['IDELAY_IN_USE'], 'IN_USE', True) segmk.add_site_tag(params['IDELAY_NOT_IN_USE'], 'IN_USE', False) loc = verilog.unquote(params['LOC']) value = verilog.unquote(params['IDELAY_TYPE']) value = value.replace('_PIPE', '') add_site_group_zero(segmk, loc, 'IDELAY_TYPE_', idelay_types, 'FIXED', value) value = int(params['IDELAY_VALUE']) for i in range(5): segmk.add_site_tag(loc, ('IDELAY_VALUE[%01d]' % i), (((value >> i) & 1) != 0)) segmk.add_site_tag(loc, ('ZIDELAY_VALUE[%01d]' % i), (((value >> i) & 1) == 0)) value = verilog.unquote(params['DELAY_SRC']) for x in delay_srcs: segmk.add_site_tag(loc, ('DELAY_SRC_%s' % x), int((value == x))) value = verilog.unquote(params['CINVCTRL_SEL']) segmk.add_site_tag(loc, 'CINVCTRL_SEL', int((value == 'TRUE'))) value = verilog.unquote(params['PIPE_SEL']) segmk.add_site_tag(loc, 'PIPE_SEL', int((value == 'TRUE'))) if ('IS_C_INVERTED' in params): segmk.add_site_tag(loc, 'IS_C_INVERTED', int(params['IS_C_INVERTED'])) segmk.add_site_tag(loc, 'ZINV_C', (1 ^ int(params['IS_C_INVERTED']))) segmk.add_site_tag(loc, 'IS_DATAIN_INVERTED', int(params['IS_DATAIN_INVERTED'])) if params['IBUF_IN_USE']: value = verilog.unquote(params['HIGH_PERFORMANCE_MODE']) segmk.add_site_tag(loc, 'HIGH_PERFORMANCE_MODE', int((value == 'TRUE'))) segmk.add_site_tag(loc, 'IS_IDATAIN_INVERTED', int(params['IS_IDATAIN_INVERTED'])) segmk.compile(bitfilter=bitfilter) segmk.write()
class OptionSeriesPackedbubbleStatesHoverMarker(Options): def enabled(self): return self._config_get(None) def enabled(self, flag: bool): self._config(flag, js_type=False) def enabledThreshold(self): return self._config_get(2) def enabledThreshold(self, num: float): self._config(num, js_type=False) def fillColor(self): return self._config_get(None) def fillColor(self, text: str): self._config(text, js_type=False) def height(self): return self._config_get(None) def height(self, num: float): self._config(num, js_type=False) def lineColor(self): return self._config_get('#ffffff') def lineColor(self, text: str): self._config(text, js_type=False) def lineWidth(self): return self._config_get(0) def lineWidth(self, num: float): self._config(num, js_type=False) def radius(self): return self._config_get(4) def radius(self, num: float): self._config(num, js_type=False) def width(self): return self._config_get(None) def width(self, num: float): self._config(num, js_type=False)
class AllDynamic(): def test_all_dynamic(self, arg_builder): assert (arg_builder.ConfigDynamicDefaults.x == 235) assert (arg_builder.ConfigDynamicDefaults.y == 'yarghhh') assert (arg_builder.ConfigDynamicDefaults.z == [10, 20]) assert (arg_builder.ConfigDynamicDefaults.p == 1) assert (arg_builder.ConfigDynamicDefaults.q == 'shhh')
('ecs_deploy.cli.get_client') def test_update_task_empty_docker_label_again(get_client, runner): get_client.return_value = EcsTestClient('acces_key', 'secret_key') result = runner.invoke(cli.update, (TASK_DEFINITION_ARN_1, '-d', 'webserver', 'empty', '')) assert (result.exit_code == 0) assert (not result.exception) assert (u'Update task definition based on: test-task:1' in result.output) assert (u'Updating task definition' not in result.output) assert (u'Changed dockerLabel' not in result.output) assert (u'Successfully created revision: 2' in result.output)
class ExistingBearerTokenAuthenticator(AbstractAuthenticator): _type = SupportedAuthProviders.EXISTING_BEARER_TOKEN def authenticate(self, kf_endpoint: str, runtime_config_name: str, token: str=None) -> Optional[str]: if _empty_or_whitespaces_only(token): raise AuthenticationError(f"A token/password is required to perform this type of authentication. Update runtime configuration '{runtime_config_name}' and try again.", provider=self._type) return token
def parse_static_data(ghidra_analysis, argument): result = [] for arg in argument: addr = ghidra_analysis.flat_api.toAddr(arg) static_data = ghidra_analysis.flat_api.getDataAt(addr) if (static_data is not None): result.append(str(static_data.getDefaultValueRepresentation().strip('"'))) else: try: byte = ghidra_analysis.flat_api.getByte(addr) c_string = '' for i in range(6): if (byte == 0): break c_string += chr(byte) byte = ghidra_analysis.flat_api.getByte(addr.add((i + 1))) result.append(c_string.encode('utf-8').strip()) except ghidra.program.model.mem.MemoryAccessException: result.append(arg) return result
def main(): module_spec = schema_to_module_spec(versioned_schema) mkeyname = 'name' fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'firewall_access_proxy_ssh_client_cert': {'required': False, 'type': 'dict', 'default': None, 'options': {}}} for attribute_name in module_spec['options']: fields['firewall_access_proxy_ssh_client_cert']['options'][attribute_name] = module_spec['options'][attribute_name] if (mkeyname and (mkeyname == attribute_name)): fields['firewall_access_proxy_ssh_client_cert']['options'][attribute_name]['required'] = True module = AnsibleModule(argument_spec=fields, supports_check_mode=False) check_legacy_fortiosapi(module) is_error = False has_changed = False result = None diff = None versions_check_result = None if module._socket_path: connection = Connection(module._socket_path) if ('access_token' in module.params): connection.set_option('access_token', module.params['access_token']) if ('enable_log' in module.params): connection.set_option('enable_log', module.params['enable_log']) else: connection.set_option('enable_log', False) fos = FortiOSHandler(connection, module, mkeyname) versions_check_result = check_schema_versioning(fos, versioned_schema, 'firewall_access_proxy_ssh_client_cert') (is_error, has_changed, result, diff) = fortios_firewall(module.params, fos) else: module.fail_json(**FAIL_SOCKET_MSG) if (versions_check_result and (versions_check_result['matched'] is False)): module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv') if (not is_error): if (versions_check_result and (versions_check_result['matched'] is False)): module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff) else: module.exit_json(changed=has_changed, meta=result, diff=diff) elif (versions_check_result and (versions_check_result['matched'] is False)): module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result) else: module.fail_json(msg='Error in repo', meta=result)
def build_refined_target_paths(row: Row, query_paths: FieldPathNodeInput) -> List[DetailedPath]: found_paths: List[DetailedPath] = [] for (target_path, only) in query_paths.items(): path = refine_target_path(row, list(target_path.levels), only) if path: if isinstance(path[0], list): found_paths.extend(path) else: found_paths.append(path) found_paths.sort(key=len) return found_paths
.django_db def test_naics_autocomplete_success(client, naics_data): resp = client.post('/api/v2/autocomplete/naics/', content_type='application/json', data=json.dumps({'search_text': '212112'})) assert (resp.status_code == status.HTTP_200_OK) assert (len(resp.data['results']) == 1) assert (resp.data['results'][0]['naics_description'] == 'Bituminous Coal Underground Mining') resp = client.post('/api/v2/autocomplete/naics/', content_type='application/json', data=json.dumps({'search_text': 'Mining'})) assert (resp.status_code == status.HTTP_200_OK) assert (len(resp.data['results']) == 2)
_handling.command() ('--input', type=click.File('rb')) def process_optional_file(input: click.File): if (input is None): click.echo('no input file given') else: while True: click.echo(f'Reading from {input.name}...') chunk = input.read(1024) if (not chunk): break click.echo(chunk)
class Pattern(): def __init__(self, pattern, ignore_missing_keys=False): self.ignore_missing_keys = ignore_missing_keys self.pattern = [] self.variables = [] for (i, p) in enumerate(RE1.split(pattern)): if ((i % 2) == 0): self.pattern.append(Constant(p)) else: if ('|' in p): v = Function(p) else: v = Variable(p) self.variables.append(v) self.pattern.append(v) def names(self): return sorted({v.name for v in self.variables}) def substitute(self, *args, **kwargs): params = {} for a in args: params.update(a) params.update(kwargs) for (k, v) in params.items(): if isinstance(v, list): return self._substitute_many(params) return self._substitute_one(params) def _substitute_one(self, params): used = set(params.keys()) result = [] for p in self.pattern: used.discard(p.name) result.append(p.substitute(params)) if (used and (not self.ignore_missing_keys)): raise ValueError('Unused parameter(s): {}'.format(used)) return ''.join((str(x) for x in result)) def _substitute_many(self, params): for (k, v) in list(params.items()): if (not isinstance(v, list)): params[k] = [v] seen = set() result = [] for n in (dict(zip(params.keys(), x)) for x in itertools.product(*params.values())): m = self.substitute(n) if (m not in seen): seen.add(m) result.append(m) return result
def _encode(source_path: str, video_format: Format, encoding_backend: BaseEncodingBackend, options: dict) -> None: with tempfile.NamedTemporaryFile(suffix='_{name}.{extension}'.format(**options)) as file_handler: target_path = file_handler.name video_format.reset_progress() encoding = encoding_backend.encode(source_path, target_path, options['params']) while encoding: try: progress = next(encoding) except StopIteration: break video_format.update_progress(progress) filename = os.path.basename(source_path) video_format.file.save('{filename}_{name}.{extension}'.format(filename=filename, **options), File(open(target_path, mode='rb'))) video_format.update_progress(100)
def render() -> None: global FACE_ANALYSER_ORDER_DROPDOWN global FACE_ANALYSER_AGE_DROPDOWN global FACE_ANALYSER_GENDER_DROPDOWN global FACE_DETECTOR_SIZE_DROPDOWN global FACE_DETECTOR_SCORE_SLIDER global FACE_DETECTOR_MODEL_DROPDOWN with gradio.Row(): FACE_ANALYSER_ORDER_DROPDOWN = gradio.Dropdown(label=wording.get('face_analyser_order_dropdown_label'), choices=facefusion.choices.face_analyser_orders, value=facefusion.globals.face_analyser_order) FACE_ANALYSER_AGE_DROPDOWN = gradio.Dropdown(label=wording.get('face_analyser_age_dropdown_label'), choices=(['none'] + facefusion.choices.face_analyser_ages), value=(facefusion.globals.face_analyser_age or 'none')) FACE_ANALYSER_GENDER_DROPDOWN = gradio.Dropdown(label=wording.get('face_analyser_gender_dropdown_label'), choices=(['none'] + facefusion.choices.face_analyser_genders), value=(facefusion.globals.face_analyser_gender or 'none')) FACE_DETECTOR_MODEL_DROPDOWN = gradio.Dropdown(label=wording.get('face_detector_model_dropdown_label'), choices=facefusion.choices.face_detector_models, value=facefusion.globals.face_detector_model) FACE_DETECTOR_SIZE_DROPDOWN = gradio.Dropdown(label=wording.get('face_detector_size_dropdown_label'), choices=facefusion.choices.face_detector_sizes, value=facefusion.globals.face_detector_size) FACE_DETECTOR_SCORE_SLIDER = gradio.Slider(label=wording.get('face_detector_score_slider_label'), value=facefusion.globals.face_detector_score, step=(facefusion.choices.face_detector_score_range[1] - facefusion.choices.face_detector_score_range[0]), minimum=facefusion.choices.face_detector_score_range[0], maximum=facefusion.choices.face_detector_score_range[(- 1)]) register_ui_component('face_analyser_order_dropdown', FACE_ANALYSER_ORDER_DROPDOWN) register_ui_component('face_analyser_age_dropdown', FACE_ANALYSER_AGE_DROPDOWN) register_ui_component('face_analyser_gender_dropdown', FACE_ANALYSER_GENDER_DROPDOWN) register_ui_component('face_detector_model_dropdown', FACE_DETECTOR_MODEL_DROPDOWN) register_ui_component('face_detector_size_dropdown', FACE_DETECTOR_SIZE_DROPDOWN) register_ui_component('face_detector_score_slider', FACE_DETECTOR_SCORE_SLIDER)
class _AsyncContextManager(): def __init__(self, coro): self._coro = coro self._obj = None async def __aenter__(self): self._obj = (await self._coro) return self._obj async def __aexit__(self, exc_type, exc, tb): (await self._obj.finalize()) self._obj = None
class Riverside(Skill): associated_action = RiversideAction skill_category = ['character', 'active'] target = t_OtherOne() usage = 'drop' def check(self): cl = self.associated_cards if (len(cl) != 1): return False return (cl[0].resides_in.type in ('cards', 'showncards', 'equips'))
class OptionPlotoptionsBulletSonificationTracksMappingPlaydelay(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
() .usefixtures('use_tmpdir') def setup_case(storage): def func(config_text): Path('config.ert').write_text(config_text, encoding='utf-8') ert_config = ErtConfig.from_file('config.ert') prior_ensemble = storage.create_ensemble(storage.create_experiment(responses=ert_config.ensemble_config.response_configuration), name='prior', ensemble_size=ert_config.model_config.num_realizations) run_context = ensemble_context(prior_ensemble, [True], 0, None, '', ert_config.model_config.runpath_format_string, 'name') create_run_path(run_context, ert_config.substitution_list, ert_config) return (ert_config, prior_ensemble) (yield func)
class Entities(): word_break = '<wbr>' word_break_hyphen = '&shy;' non_breaking_space = '&#160;' less_than = '&#60;' greater_than = '&#62;' ampersand = '&#38;' double_quotation_mark = '&#34;' single_quotation_mark_apostrophe = '&#39;' cent = '&#162;' pound = '&#163;' yen = '&#165;' euro = '&#8364;' copyright = '&#169;' registered_trademark = '&#174;' def html4(self): return EntHtml4 def utf8(self): return EntUtf8 def html5_a(self): return EntHtml5_A def html5_b(self): return EntHtml5_B def html5_c(self): return EntHtml5_C def html5_d(self): return EntHtml5_D def exts(self): return Defaults.ENTITIES_ADD_ON
class Migration(migrations.Migration): dependencies = [('awards', '0085_auto__2219'), ('references', '0055_create_new_defc_gtas_column_as_text_field')] operations = [migrations.AddField(model_name='financialaccountsbyawards', name='disaster_emergency_fund_temp', field=models.TextField(null=True, db_column='disaster_emergency_fund_code_temp'))]
class TestSendMail(): .dict('bodhi.server.mail.config', {'exclude_mail': ['', '']}) ('bodhi.server.mail._send_mail') def test_exclude_mail(self, _send_mail): mail.send_mail('', '', 'R013X', 'Want a c00l ?') assert (_send_mail.call_count == 0) .dict('bodhi.server.mail.config', {'smtp_server': 'smtp.fp.o'}) ('bodhi.server.mail.smtplib.SMTP') def test_headers(self, SMTP): smtp = SMTP.return_value mail.send_mail('', '', 'R013X', 'Want a c00l ?', headers={'Bodhi-Is': 'Great'}) SMTP.assert_called_once_with('smtp.fp.o') smtp.sendmail.assert_called_once_with('', [''], b'From: \r\nTo: \r\nBodhi-Is: Great\r\nX-Bodhi: fedoraproject.org\r\nSubject: R013X\r\n\r\nWant a c00l ?') .dict('bodhi.server.mail.config', {'bodhi_email': ''}) ('bodhi.server.mail._send_mail') ('bodhi.server.mail.log.warning') def test_no_from_addr(self, warning, _send_mail): mail.send_mail(None, '', 'R013X', 'Want a c00l ?') warning.assert_called_once_with('Unable to send mail: bodhi_email not defined in the config') assert (_send_mail.call_count == 0) .dict('bodhi.server.mail.config', {'smtp_server': 'smtp.fp.o'}) ('bodhi.server.mail.smtplib.SMTP') def test_send(self, SMTP): smtp = SMTP.return_value mail.send_mail('', '', 'R013X', 'Want a c00l ?') SMTP.assert_called_once_with('smtp.fp.o') smtp.sendmail.assert_called_once_with('', [''], b'From: \r\nTo: \r\nX-Bodhi: fedoraproject.org\r\nSubject: R013X\r\n\r\nWant a c00l ?')
class QAgent(Agent): def __init__(self, model=None, n_actions=None): super().__init__() self.model = model self.n_actions = n_actions def update(self, sd): self.model.load_state_dict(sd) def __call__(self, state, observation, agent_info=None, history=None): initial_state = observation['initial_state'] B = observation.n_elems() if (agent_info is None): agent_info = DictTensor({'epsilon': torch.zeros(B)}) agent_step = None if (state is None): assert initial_state.all() agent_step = torch.zeros(B).long() else: agent_step = ((initial_state.float() * torch.zeros(B)) + ((1 - initial_state.float()) * state['agent_step'])).long() q = self.model(observation['frame']) (qs, action) = q.max(1) raction = torch.tensor(np.random.randint(low=0, high=self.n_actions, size=action.size()[0])) epsilon = agent_info['epsilon'] mask = torch.rand(action.size()[0]).lt(epsilon).float() action = ((mask * raction) + ((1 - mask) * action)) action = action.long() new_state = DictTensor({'agent_step': (agent_step + 1)}) agent_do = DictTensor({'action': action, 'q': q}) state = DictTensor({'agent_step': agent_step}) return (state, agent_do, new_state)
def parse_arguments(argv=sys.argv): description = 'This script runs tests on the system to check for compliance against the CIS Benchmarks. No changes are made to system files by this script.' epilog = f''' Examples: Run with debug enabled: {__file__} --debug Exclude tests from section 1.1 and 1.3.2: {__file__} --exclude 1.1 1.3.2 Include tests only from section 4.1 but exclude tests from section 4.1.1: {__file__} --include 4.1 --exclude 4.1.1 Run only level 1 tests {__file__} --level 1 Run level 1 tests and include some but not all SELinux questions {__file__} --level 1 --include 1.6 --exclude 1.6.1.2 ''' level_choices = [1, 2] log_level_choices = ['DEBUG', 'INFO', 'WARNING', 'CRITICAL'] output_choices = ['csv', 'json', 'psv', 'text', 'tsv'] system_type_choices = ['server', 'workstation'] version_str = f'{os.path.basename(__file__)} {__version__})' parser = ArgumentParser(description=description, epilog=epilog, formatter_class=RawTextHelpFormatter) parser.add_argument('--level', action='store', choices=level_choices, default=0, type=int, help='Run tests for the specified level only') parser.add_argument('--include', action='store', nargs='+', dest='includes', help='Space delimited list of tests to include') parser.add_argument('--exclude', action='store', nargs='+', dest='excludes', help='Space delimited list of tests to exclude') parser.add_argument('-l', '--log-level', action='store', choices=log_level_choices, default='INFO', help='Set log output level') parser.add_argument('--debug', action='store_const', const='DEBUG', dest='log_level', help='Run script with debug output turned on. Equivalent to --log-level DEBUG') parser.add_argument('--nice', action='store_true', default=True, help='Lower the CPU priority for test execution. This is the default behaviour.') parser.add_argument('--no-nice', action='store_false', dest='nice', help='Do not lower CPU priority for test execution. This may make the tests complete faster but at the cost of putting a higher load on the server. Setting this overrides the --nice option.') parser.add_argument('--no-colour', '--no-color', action='store_true', help='Disable colouring for STDOUT. Output redirected to a file/pipe is never coloured.') parser.add_argument('--system-type', action='store', choices=system_type_choices, default='server', help='Set which test level to reference') parser.add_argument('--server', action='store_const', const='server', dest='system_type', help='Use "server" levels to determine which tests to run. Equivalent to --system-type server [Default]') parser.add_argument('--workstation', action='store_const', const='workstation', dest='system_type', help='Use "workstation" levels to determine which tests to run. Equivalent to --system-type workstation') parser.add_argument('--outformat', action='store', choices=output_choices, default='text', help='Output type for results') parser.add_argument('--text', action='store_const', const='text', dest='outformat', help='Output results as text. Equivalent to --output text [default]') parser.add_argument('--json', action='store_const', const='json', dest='outformat', help='Output results as json. Equivalent to --output json') parser.add_argument('--csv', action='store_const', const='csv', dest='outformat', help='Output results as comma-separated values. Equivalent to --output csv') parser.add_argument('--psv', action='store_const', const='psv', dest='outformat', help='Output results as pipe-separated values. Equivalent to --output psv') parser.add_argument('--tsv', action='store_const', const='tsv', dest='outformat', help='Output results as tab-separated values. Equivalent to --output tsv') parser.add_argument('-V', '--version', action='version', version=version_str, help='Print version and exit') parser.add_argument('-c', '--config', action='store', help='Location of config file to load') args = parser.parse_args(argv[1:]) logger = logging.getLogger(__name__) if (args.log_level == 'DEBUG'): logger.setLevel(level=args.log_level) logger.debug('Debugging enabled') if args.nice: logger.debug('Tests will run with reduced CPU priority') if args.no_colour: logger.debug('Coloured output will be disabled') if args.includes: logger.debug(f'Include list is populated "{args.includes}"') else: logger.debug('Include list is empty') if args.excludes: logger.debug(f'Exclude list is populated "{args.excludes}"') else: logger.debug('Exclude list is empty') if (args.level == 0): logger.debug('Going to run tests from any level') elif (args.level == 1): logger.debug('Going to run Level 1 tests') elif (args.level == 2): logger.debug('Going to run Level 2 tests') if (args.system_type == 'server'): logger.debug('Going to use "server" levels for test determination') elif (args.system_type == 'workstation'): logger.debug('Going to use "workstation" levels for test determination') if (args.outformat == 'text'): logger.debug('Going to use "text" outputter') elif (args.outformat == 'json'): logger.debug('Going to use "json" outputter') elif (args.outformat == 'csv'): logger.debug('Going to use "csv" outputter') return args
class OptionPlotoptionsLollipopSonificationContexttracksMappingNoteduration(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class OptionSeriesHistogramSonificationContexttracksMappingHighpassFrequency(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def test_configure_uses_default_config_if_config_name_is_none(create_test_db, create_pymel, create_maya_env): pm = create_pymel from anima.dcc.mayaEnv.render import MayaColorManagementConfigurator MayaColorManagementConfigurator.configure(config_name='scene-linear Rec.709-sRGB') cmp = pm.colorManagementPrefs assert (cmp(q=1, cmEnabled=1) is True) assert (cmp(q=1, configFilePath=1) == '<MAYA_RESOURCES>/OCIO-configs/Maya2022-default/config.ocio') assert (cmp(q=1, configFileVersion=1) == '2.0') assert (cmp(q=1, displayName=1) == 'sRGB') assert (cmp(q=1, outputUseViewTransform=1) is True) assert (cmp(q=1, outputTransformName=1) == 'Un-tone-mapped (sRGB)') assert (cmp(q=1, renderingSpaceName=1) == 'scene-linear Rec.709-sRGB') if (int(pm.about(v=1)) >= 2022): assert (cmp(q=1, viewTransformName=1) == 'Un-tone-mapped (sRGB)') MayaColorManagementConfigurator.configure(config_name=None) assert (cmp(q=1, cmEnabled=1) is True) assert (cmp(q=1, configFilePath=1) == '<MAYA_RESOURCES>/OCIO-configs/Maya2022-default/config.ocio') assert (cmp(q=1, configFileVersion=1) == '2.0') assert (cmp(q=1, displayName=1) == 'sRGB') assert (cmp(q=1, outputUseViewTransform=1) is True) assert (cmp(q=1, outputTransformName=1) == 'ACES 1.0 SDR-video (sRGB)') assert (cmp(q=1, renderingSpaceName=1) == 'ACEScg') if (int(pm.about(v=1)) >= 2022): assert (cmp(q=1, viewTransformName=1) == 'ACES 1.0 SDR-video (sRGB)')
def main(page: ft.Page): normal_border = ft.BorderSide(0, ft.colors.with_opacity(0, ft.colors.WHITE)) hovered_border = ft.BorderSide(6, ft.colors.WHITE) def on_chart_event(e: ft.PieChartEvent): for (idx, section) in enumerate(chart.sections): section.border_side = (hovered_border if (idx == e.section_index) else normal_border) chart.update() chart = ft.PieChart(sections=[ft.PieChartSection(25, color=ft.colors.BLUE, radius=80, border_side=normal_border), ft.PieChartSection(25, color=ft.colors.YELLOW, radius=65, border_side=normal_border), ft.PieChartSection(25, color=ft.colors.PINK, radius=60, border_side=normal_border), ft.PieChartSection(25, color=ft.colors.GREEN, radius=70, border_side=normal_border)], sections_space=1, center_space_radius=0, on_chart_event=on_chart_event, expand=True) page.add(chart)
class BaseUser(CreateUpdateDictModel, Generic[models.ID]): id: models.ID email: EmailStr is_active: bool = True is_superuser: bool = False is_verified: bool = False if PYDANTIC_V2: model_config = ConfigDict(from_attributes=True) else: class Config(): orm_mode = True
class TaskTemplate(_common.FlyteIdlEntity): def __init__(self, id, type, metadata, interface, custom, container=None, task_type_version=0, security_context=None, config=None, k8s_pod=None, sql=None, extended_resources=None): if (((container is not None) and (k8s_pod is not None)) or ((container is not None) and (sql is not None)) or ((k8s_pod is not None) and (sql is not None))): raise ValueError('At most one of container, k8s_pod or sql can be set') self._id = id self._type = type self._metadata = metadata self._interface = interface self._custom = custom self._container = container self._task_type_version = task_type_version self._config = config self._security_context = security_context self._k8s_pod = k8s_pod self._sql = sql self._extended_resources = extended_resources def id(self): return self._id def type(self): return self._type def metadata(self): return self._metadata def interface(self): return self._interface def custom(self): return self._custom def task_type_version(self): return self._task_type_version def container(self): return self._container def config(self): return self._config def security_context(self): return self._security_context def k8s_pod(self): return self._k8s_pod def sql(self): return self._sql def extended_resources(self): return self._extended_resources def to_flyte_idl(self): task_template = _core_task.TaskTemplate(id=self.id.to_flyte_idl(), type=self.type, metadata=self.metadata.to_flyte_idl(), interface=self.interface.to_flyte_idl(), custom=(_json_format.Parse(_json.dumps(self.custom), _struct.Struct()) if self.custom else None), container=(self.container.to_flyte_idl() if self.container else None), task_type_version=self.task_type_version, security_context=(self.security_context.to_flyte_idl() if self.security_context else None), extended_resources=self.extended_resources, config=({k: v for (k, v) in self.config.items()} if (self.config is not None) else None), k8s_pod=(self.k8s_pod.to_flyte_idl() if self.k8s_pod else None), sql=(self.sql.to_flyte_idl() if self.sql else None)) return task_template def from_flyte_idl(cls, pb2_object): return cls(id=_identifier.Identifier.from_flyte_idl(pb2_object.id), type=pb2_object.type, metadata=TaskMetadata.from_flyte_idl(pb2_object.metadata), interface=_interface.TypedInterface.from_flyte_idl(pb2_object.interface), custom=(_json_format.MessageToDict(pb2_object.custom) if pb2_object else None), container=(Container.from_flyte_idl(pb2_object.container) if pb2_object.HasField('container') else None), task_type_version=pb2_object.task_type_version, security_context=(_sec.SecurityContext.from_flyte_idl(pb2_object.security_context) if (pb2_object.security_context and (pb2_object.security_context.ByteSize() > 0)) else None), extended_resources=(pb2_object.extended_resources if pb2_object.HasField('extended_resources') else None), config=({k: v for (k, v) in pb2_object.config.items()} if (pb2_object.config is not None) else None), k8s_pod=(K8sPod.from_flyte_idl(pb2_object.k8s_pod) if pb2_object.HasField('k8s_pod') else None), sql=(Sql.from_flyte_idl(pb2_object.sql) if pb2_object.HasField('sql') else None))
class RpcMixin(): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.rpc_interfaces = {} self.job_map = {} self.job_counter = 0 self.rpc_timeout_s = (60 * 40) self.remote_log = logging.getLogger('Main.RPC.Remote') self.check_open_rpc_interface() self.log.info('RPC Interface initialized') def rpc_interface(self): threadName = threading.current_thread().name procName = multiprocessing.current_process().name thread_key = '{} - {}'.format(threadName, procName) if (thread_key not in self.rpc_interfaces): self.rpc_interfaces[thread_key] = common.get_rpyc.RemoteJobInterface('RWP-RPC-Fetcher') return self.rpc_interfaces[thread_key] def close_rpc_interface(self): threadName = threading.current_thread().name procName = multiprocessing.current_process().name thread_key = '{} - {}'.format(threadName, procName) if (thread_key in self.rpc_interfaces): self.rpc_interfaces.pop(thread_key) else: self.log.warning("Closing RPC interface from a thread that hasn't opened it!") def put_outbound_raw(self, raw_job): errors = 0 while 1: try: self.rpc_interface.put_job(raw_job) return except TypeError: self.check_open_rpc_interface() except KeyError: self.check_open_rpc_interface() except BrokenPipeError: self.check_open_rpc_interface() except Exception as e: self.check_open_rpc_interface() errors += 1 if (errors > 5): raise e def put_outbound_fetch_job(self, jobid, joburl): self.log.info('Dispatching new fetch job') raw_job = buildjob(module='WebRequest', call='getItem', dispatchKey='rwp-rpc-system', jobid=jobid, args=[joburl], kwargs={}, additionalData={'mode': 'fetch'}, postDelay=0) self.put_outbound_raw(raw_job) def put_outbound_callable(self, jobid, serialized, meta={}, call_kwargs={}, early_ack=False, job_unique_id=None): self.log.info('Dispatching new callable job') call_kwargs_out = {'code_struct': serialized} for (key, value) in call_kwargs.items(): call_kwargs_out[key] = value raw_job = buildjob(module='RemoteExec', call='callCode', dispatchKey='rwp-rpc-system', jobid=jobid, kwargs=call_kwargs_out, additionalData=meta, postDelay=0, early_ack=early_ack, serialize=self.pluginName, unique_id=job_unique_id) self.put_outbound_raw(raw_job) def process_responses(self): try: return self.rpc_interface.get_job() except queue.Empty: return None except TypeError: self.check_open_rpc_interface() return None except socket.timeout: self.check_open_rpc_interface() return None except KeyError: self.check_open_rpc_interface() return None def check_open_rpc_interface(self): try: if self.rpc_interface.check_ok(): return except Exception: self.log.error('Failure when probing RPC interface') for line in traceback.format_exc().split('\n'): self.log.error(line) try: self.rpc_interface.close() self.log.warning('Closed interface due to connection exception.') except Exception: self.log.error('Failure when closing errored RPC interface') for line in traceback.format_exc().split('\n'): self.log.error(line) self.close_rpc_interface() self.rpc_interface.check_ok() def put_job(self, remote_cls, call_kwargs=None, meta=None, early_ack=False, job_unique_id=None): if (call_kwargs is None): call_kwargs = {} if (not meta): meta = {} jid = str(uuid.uuid4()) if ('drain' in sys.argv): print('Consuming replies only') self.check_open_rpc_interface() else: scls = rpc_serialize.serialize_class(remote_cls) self.put_outbound_callable(jid, scls, call_kwargs=call_kwargs, meta=meta, early_ack=early_ack, job_unique_id=job_unique_id) return jid def __blocking_dispatch_call_local(self, remote_cls, call_kwargs, meta=None, expect_partials=False): self.log.info('Dispatching new callable job to local executor') print('Kwargs:', call_kwargs) scls = rpc_serialize.serialize_class(remote_cls) call_kwargs_out = {'code_struct': scls} for (key, value) in call_kwargs.items(): call_kwargs_out[key] = value print(local_exec) print(dir(local_exec)) jid = self.job_counter self.job_counter += 1 raw_job = buildjob(module='RemoteExec', call='callCode', dispatchKey='rwp-rpc-system', jobid=jid, kwargs=call_kwargs_out, additionalData=meta, postDelay=0, early_ack=False, serialize=self.pluginName, unique_id=None) rpc_interface = common.get_rpyc.RemoteFetchInterface() rpc_interface.check_ok() ret = rpc_interface.dispatch_request(raw_job) rpc_interface.close() ret['jobid'] = jid ret = self.process_response_items([jid], expect_partials, preload_rets=[ret]) if (not expect_partials): ret = next(ret) return ret def __blocking_dispatch_call_remote(self, remote_cls, call_kwargs, meta=None, expect_partials=False, job_unique_id=None): jobid = self.put_job(remote_cls, call_kwargs, meta, job_unique_id=job_unique_id) ret = self.process_response_items([jobid], expect_partials) if (not expect_partials): ret = next(ret) return ret def blocking_dispatch_call(self, remote_cls, call_kwargs, meta=None, expect_partials=False, local=DO_LOCAL, job_unique_id=None): if local: return self.__blocking_dispatch_call_local(remote_cls=remote_cls, call_kwargs=call_kwargs, meta=meta, expect_partials=expect_partials) else: return self.__blocking_dispatch_call_remote(remote_cls=remote_cls, call_kwargs=call_kwargs, meta=meta, expect_partials=expect_partials, job_unique_id=job_unique_id) def pprint_resp(self, resp): if (len(resp) == 2): (logmsg, response_data) = resp self.print_remote_log(logmsg) for line in pprint.pformat(resp).split('\n'): self.log.info(line) if ('traceback' in resp): if isinstance(resp['traceback'], str): trace_arr = resp['traceback'].split('\n') else: trace_arr = resp['traceback'] for line in trace_arr: self.log.error(line) def print_remote_log(self, log_lines, debug=False): calls = {'[DEBUG] ->': (self.remote_log.debug if debug else None), '[INFO] ->': self.remote_log.info, '[ERROR] ->': self.remote_log.error, '[CRITICAL] ->': self.remote_log.critical, '[WARNING] ->': self.remote_log.warning} for line in log_lines: for (key, log_call) in calls.items(): if ((key in line) and log_call): log_call(line) def process_response_items(self, jobids, preload_rets=[], timeout=None): self.log.info('Waiting for remote response (preloaded: %s)', (len(preload_rets) if preload_rets else 'None')) if (not timeout): timeout = self.rpc_timeout_s assert isinstance(jobids, list) while (timeout or preload_rets): timeout -= 1 if preload_rets: self.log.info('Have preloaded item. Using.') ret = preload_rets.pop(0) else: ret = self.process_responses() if ret: if ('ret' in ret): if (len(ret['ret']) == 2): self.print_remote_log(ret['ret'][0]) if (('partial' in ret) and ret['partial']): timeout = self.rpc_timeout_s (yield (ret, ret['ret'][1])) else: (yield (ret, ret['ret'][1])) if (('jobid' in ret) and (ret['jobid'] in jobids)): jobids.remove(ret['jobid']) self.log.info('Last partial received for job %s, %s remaining.', ret['jobid'], len(jobids)) if (not jobids): return elif ('jobid' in ret): self.log.info('Received completed job response from a previous session (%s, waiting for %s, have: %s)?', ret['jobid'], jobids, (ret['jobid'] in jobids)) else: self.log.error("Response that's not partial, and yet has no jobid?") else: self.pprint_resp(ret) raise RuntimeError(('Response not of length 2 (%s, %s)!' % (len(ret), (len(ret['ret']) == 2)))) else: with open('rerr-{}.json'.format(time.time()), 'w', encoding='utf-8') as fp: fp.write(json.dumps(ret, indent=4, sort_keys=True)) self.pprint_resp(ret) self.log.error('RPC Call has no ret value. Probably encountered a remote exception: %s', ret) time.sleep(1) print('\r`fetch_and_flush` sleeping for {} ({} items remaining)\r'.format(str(timeout).rjust(7), len(jobids)), end='', flush=True) raise RpcTimeoutError(('No RPC Response within timeout period (%s sec)' % self.rpc_timeout_s))
def upgrade(): op.execute('alter type connectiontype rename to connectiontype_old') op.execute("create type connectiontype as enum('postgres', 'mongodb', 'mysql', ' 'snowflake', 'redshift', 'mssql')") op.execute('alter table connectionconfig alter column connection_type type connectiontype using connection_type::text::connectiontype') op.execute('drop type connectiontype_old')
class TenantParams(): path_prefix: str tenant: Tenant client: Client user: User login_session: LoginSession registration_session_password: RegistrationSession registration_session_oauth: RegistrationSession session_token: SessionToken session_token_token: tuple[(str, str)]
def write_header(htmlhandle): htmlhandle.write('\n <head>\n <link rel="stylesheet" href=" integrity="sha384-1q8mTJOASx8j1Au+a5WDVnPi2lkFfwwEAa8hDDdjZlpLegxhjVME1fgjWPGmkzs7" crossorigin="anonymous">\n <style>\n body{\n background-color: rgba(0, 0, 0, 0.1);\n margin-top: 100px;\n }\n h1 {\n text-indent: 15px\n }\n </style>\n </head>\n ')