code
stringlengths
281
23.7M
class SecondaryEndpoint(BaseEndpoint): def __call__(self, id, **kwargs): parameters = {} for (key, value) in kwargs.items(): if (key == 'cursor_pagination'): if (value is True): parameters['page[size]'] = 100 elif (value is not False): parameters['page[size]'] = value else: parameters[key] = value return Url((self.endpoint % dict(id=id)), params=parameters)
() ('--server_config_path', default='./server.gz', help='The path to the previous server.gz file from a previous study.') def resume(server_config_path: str): try: config = joblib.load(server_config_path) except FileNotFoundError: raise FileNotFoundError(f'''Server config file not found at the path: {server_config_path} Please set the path to a valid file dumped by a previous session.''') server = Server.from_dict(config) _safe_search(server)
def test_deserialize_exception_with_traceback(): try: raise Exception('blah') except Exception as ex: e = ex assert (e.__traceback__ is not None) f: Failure = serialize_exception(e) assert isinstance(f, Failure) s = failure_to_str(f) assert isinstance(s, str) f2: Failure = str_to_failure(s) assert isinstance(f2, Failure) assert (f.to_dict() == f2.to_dict()) e2 = deserialize_exception(f2) assert isinstance(e2, Exception) assert (e2.__traceback__ is not None) assert (traceback.format_tb(e.__traceback__) == traceback.format_tb(e2.__traceback__))
def parse_rnde_query(s): data = {} m = re.search('^\\/xx\\/rnde\\.php\\?p=(-?\\d+)&f=(\\d+)&m=(\\d)$', s) if (not m): data['error'] = 'PARSING ERROR!' return data packed_ip = (int(m.group(1)) ^ ).to_bytes(4, byteorder='little', signed=True) data['ip_reported'] = socket.inet_ntoa(packed_ip) if (int(m.group(2)) == 0): data['reporter_byteorder'] = 'big' else: data['reporter_byteorder'] = 'little' data['flag_ext_scan'] = bool(int(m.group(3))) return data
(frozen=True) class QuotedString(): text: str quote: Quote def with_quotes(self) -> str: qc = ("'" if (self.quote == Quote.single) else '"') esc_qc = f'\{qc}' match = None if ('\\' in self.text): text = (self.text + qc) pattern = _ESC_QUOTED_STR[qc] match = pattern.search(text) if (match is None): esc_text = self.text.replace(qc, esc_qc) return f'{qc}{esc_text}{qc}' tokens = [] while (match is not None): (start, stop) = match.span() tokens.append(text[0:start]) new_n_backslashes = (((stop - start) - 1) * 2) tokens.append(('\\' * new_n_backslashes)) if (stop < len(text)): tokens.append(qc) text = text[stop:] match = pattern.search(text) if (len(text) > 1): tokens.append(text[0:(- 1)]) esc_text = ''.join(tokens).replace(qc, esc_qc) return f'{qc}{esc_text}{qc}'
def compare_bkz(classes, matrixf, dimensions, block_sizes, progressive_step_size, seed, threads=2, samples=2, tours=1, pickle_jar=None, logger='compare'): jobs = [] for dimension in dimensions: jobs.append((dimension, [])) for block_size in block_sizes: if (dimension < block_size): continue seed_ = seed jobs_ = [] matrixf_ = matrixf(dimension=dimension, block_size=block_size) for i in range(samples): FPLLL.set_random_seed(seed_) A = IntegerMatrix.random(dimension, **matrixf_) for BKZ_ in classes: args = (BKZ_, A, block_size, tours, progressive_step_size) jobs_.append(((BKZ_.__name__, seed_), args)) seed_ += 1 jobs[(- 1)][1].append((block_size, jobs_)) conductor = Conductor(threads=threads, pickle_jar=pickle_jar, logger=logger) return conductor(jobs)
def compute_sparsity(pReferencePoints, pViewpointObj, pArgs, pQueue): sparsity_list = [] try: chromosome_names = pViewpointObj.hicMatrix.getChrNames() for (i, referencePoint) in enumerate(pReferencePoints): if ((referencePoint is not None) and (referencePoint[0] in chromosome_names)): (region_start, region_end, _) = pViewpointObj.calculateViewpointRange(referencePoint, (pArgs.fixateRange, pArgs.fixateRange)) try: (data_list, _) = pViewpointObj.computeViewpoint(referencePoint, referencePoint[0], region_start, region_end) sparsity = (np.count_nonzero(data_list) / len(data_list)) except (TypeError, IndexError): sparsity = (- 1.0) sparsity_list.append(sparsity) else: sparsity_list.append((- 1.0)) except Exception as exp: pQueue.put(('Fail: ' + str(exp))) return pQueue.put(sparsity_list) return
class _SkillComponentLoader(): def __init__(self, configuration: SkillConfig, skill_context: SkillContext, **kwargs: Any): enforce((configuration.directory is not None), 'Configuration not associated to directory.') self.configuration = configuration self.skill_directory = cast(Path, configuration.directory) self.skill_context = skill_context self.kwargs = kwargs self.skill = Skill(self.configuration, self.skill_context, **self.kwargs) self.skill_dotted_path = f'packages.{self.configuration.public_id.author}.skills.{self.configuration.public_id.name}' def load_skill(self) -> Skill: load_aea_package(self.configuration) python_modules: Set[Path] = self._get_python_modules() declared_component_classes: Dict[(_SKILL_COMPONENT_TYPES, Dict[(str, SkillComponentConfiguration)])] = self._get_declared_skill_component_configurations() component_classes_by_path: Dict[(Path, Set[Tuple[(str, Type[SkillComponent])]])] = self._load_component_classes(python_modules) component_loading_items = self._match_class_and_configurations(component_classes_by_path, declared_component_classes) components = self._get_component_instances(component_loading_items) self._update_skill(components) return self.skill def _update_skill(self, components: _ComponentsHelperIndex) -> None: self.skill.handlers.update(cast(Dict[(str, Handler)], components.get(Handler, {}))) self.skill.behaviours.update(cast(Dict[(str, Behaviour)], components.get(Behaviour, {}))) self.skill.models.update(cast(Dict[(str, Model)], components.get(Model, {}))) self.skill._set_models_on_context() def _get_python_modules(self) -> Set[Path]: ignore_regex = '__pycache__*' all_python_modules = self.skill_directory.rglob('*.py') module_paths: Set[Path] = set(map((lambda p: Path(p).relative_to(self.skill_directory)), filter((lambda x: (not re.match(ignore_regex, x.name))), all_python_modules))) return module_paths def _compute_module_dotted_path(cls, module_path: Path) -> str: suffix = '.'.join(module_path.with_name(module_path.stem).parts) return suffix def _filter_classes(self, classes: List[Tuple[(str, Type)]]) -> List[Tuple[(str, Type[SkillComponent])]]: filtered_classes = filter((lambda name_and_class: (issubclass(name_and_class[1], SkillComponent) and (not str.startswith(name_and_class[1].__module__, 'aea.')) and (not str.startswith(name_and_class[1].__module__, (self.skill_dotted_path + '.'))))), classes) classes = list(filtered_classes) return cast(List[Tuple[(str, Type[SkillComponent])]], classes) def _load_component_classes(self, module_paths: Set[Path]) -> Dict[(Path, Set[Tuple[(str, Type[SkillComponent])]])]: module_to_classes: Dict[(Path, Set[Tuple[(str, Type[SkillComponent])]])] = {} for module_path in module_paths: self.skill_context.logger.debug(f'Trying to load module {module_path}') module_dotted_path: str = self._compute_module_dotted_path(module_path) component_module: types.ModuleType = load_module(module_dotted_path, (self.skill_directory / module_path)) classes: List[Tuple[(str, Type)]] = inspect.getmembers(component_module, inspect.isclass) filtered_classes: List[Tuple[(str, Type[SkillComponent])]] = self._filter_classes(classes) module_to_classes[module_path] = set(filtered_classes) return module_to_classes def _get_declared_skill_component_configurations(self) -> Dict[(_SKILL_COMPONENT_TYPES, Dict[(str, SkillComponentConfiguration)])]: handlers_by_id = dict(self.configuration.handlers.read_all()) behaviours_by_id = dict(self.configuration.behaviours.read_all()) models_by_id = dict(self.configuration.models.read_all()) result: Dict[(_SKILL_COMPONENT_TYPES, Dict[(str, SkillComponentConfiguration)])] = {} for (component_type, components_by_id) in [(Handler, handlers_by_id), (Behaviour, behaviours_by_id), (Model, models_by_id)]: for (component_id, component_config) in components_by_id.items(): result.setdefault(component_type, {})[component_id] = component_config return result def _get_component_instances(self, component_loading_items: List[_SkillComponentLoadingItem]) -> _ComponentsHelperIndex: result: _ComponentsHelperIndex = {} for item in component_loading_items: instance = item.class_(name=item.name, configuration=item.config, skill_context=self.skill_context, **item.config.args) result.setdefault(item.type_, {})[item.name] = instance return result def _get_skill_component_type(cls, skill_component_type: Type[SkillComponent]) -> Type[Union[(Handler, Behaviour, Model)]]: parent_skill_component_types = list(filter((lambda class_: (class_ in (Handler, Behaviour, Model))), skill_component_type.__mro__)) enforce((len(parent_skill_component_types) == 1), f'Class {skill_component_type.__name__} in module {skill_component_type.__module__} is not allowed to inherit from more than one skill component type. Found: {parent_skill_component_types}.') return cast(Type[Union[(Handler, Behaviour, Model)]], parent_skill_component_types[0]) def _match_class_and_configurations(self, component_classes_by_path: Dict[(Path, Set[Tuple[(str, Type[SkillComponent])]])], declared_component_classes: Dict[(_SKILL_COMPONENT_TYPES, Dict[(str, SkillComponentConfiguration)])]) -> List[_SkillComponentLoadingItem]: result: List[_SkillComponentLoadingItem] = [] class_index: Dict[(str, Dict[(_SKILL_COMPONENT_TYPES, Set[Type[SkillComponent]])])] = {} used_classes: Set[Type[SkillComponent]] = set() not_resolved_configurations: Dict[(Tuple[(_SKILL_COMPONENT_TYPES, str)], SkillComponentConfiguration)] = {} for (_path, component_classes) in component_classes_by_path.items(): for (component_classname, _component_class) in component_classes: type_ = self._get_skill_component_type(_component_class) class_index.setdefault(component_classname, {}).setdefault(type_, set()).add(_component_class) for (component_type, by_id) in declared_component_classes.items(): for (component_id, component_config) in by_id.items(): path = component_config.file_path class_name = component_config.class_name if (path is not None): classes_in_path = component_classes_by_path[path] component_class_or_none: Optional[Type[SkillComponent]] = next((actual_class for (actual_class_name, actual_class) in classes_in_path if (actual_class_name == class_name)), None) enforce((component_class_or_none is not None), (self._get_error_message_prefix() + f"Cannot find class '{class_name}' for component '{component_id}' of type '{self._type_to_str(component_type)}' of skill '{self.configuration.public_id}' in module {path}")) component_class = cast(Type[SkillComponent], component_class_or_none) actual_component_type = self._get_skill_component_type(component_class) enforce((actual_component_type == component_type), (self._get_error_message_prefix() + f"Found class '{class_name}' for component '{component_id}' of type '{self._type_to_str(component_type)}' of skill '{self.configuration.public_id}' in module {path}, but the expected type was {self._type_to_str(component_type)}, found {self._type_to_str(actual_component_type)} ")) used_classes.add(component_class) result.append(_SkillComponentLoadingItem(component_id, component_config, component_class, component_type)) else: not_resolved_configurations[(component_type, component_id)] = component_config for ((component_type, component_id), component_config) in copy(not_resolved_configurations).items(): class_name = component_config.class_name classes_by_type = class_index.get(class_name, {}) enforce(((class_name in class_index) and (component_type in classes_by_type)), (self._get_error_message_prefix() + f"Cannot find class '{class_name}' for skill component '{component_id}' of type '{self._type_to_str(component_type)}'")) classes = classes_by_type[component_type] not_used_classes = classes.difference(used_classes) enforce((not_used_classes != 0), f"Cannot find class of skill '{self.configuration.public_id}' for component configuration '{component_id}' of type '{self._type_to_str(component_type)}'.") enforce((len(not_used_classes) == 1), self._get_error_message_ambiguous_classes(class_name, not_used_classes, component_type, component_id)) not_used_class = list(not_used_classes)[0] result.append(_SkillComponentLoadingItem(component_id, component_config, not_used_class, component_type)) used_classes.add(not_used_class) self._print_warning_message_for_unused_classes(component_classes_by_path, used_classes) return result def _print_warning_message_for_unused_classes(self, component_classes_by_path: Dict[(Path, Set[Tuple[(str, Type[SkillComponent])]])], used_classes: Set[Type[SkillComponent]]) -> None: for (path, set_of_class_name_pairs) in component_classes_by_path.items(): set_of_classes = {pair[1] for pair in set_of_class_name_pairs} set_of_unused_classes = set(filter((lambda x: (x not in used_classes)), set_of_classes)) set_of_unused_classes = set(filter((lambda x: (not str.startswith(x.__module__, 'packages.'))), set_of_unused_classes)) if (len(set_of_unused_classes) == 0): continue for unused_class in set_of_unused_classes: component_type_class = self._get_skill_component_type(unused_class) if (issubclass(unused_class, (Handler, Behaviour)) and cast(Union[(Handler, Behaviour)], unused_class).is_programmatically_defined): continue _print_warning_message_for_non_declared_skill_components(self.skill_context, {unused_class.__name__}, set(), self._type_to_str(component_type_class), str(path)) def _type_to_str(cls, component_type: _SKILL_COMPONENT_TYPES) -> str: return component_type.__name__.lower() def _get_error_message_prefix(self) -> str: return f"Error while loading skill '{self.configuration.public_id}': " def _get_error_message_ambiguous_classes(self, class_name: str, not_used_classes: Set, component_type: _SKILL_COMPONENT_TYPES, component_id: str) -> str: return f"{self._get_error_message_prefix()}found many classes with name '{class_name}' for component '{component_id}' of type '{self._type_to_str(component_type)}' in the following modules: {', '.join([c.__module__ for c in not_used_classes])}"
def main(): frameworks = ['bottle', 'django', 'falcon', 'falcon-ext', 'flask', 'pecan', 'werkzeug'] parser = argparse.ArgumentParser(description='Falcon benchmark runner') parser.add_argument('-b', '--benchmark', type=str, action='append', choices=frameworks, dest='frameworks', nargs='+') parser.add_argument('-i', '--iterations', type=int, default=0) parser.add_argument('-t', '--trials', type=int, default=10) parser.add_argument('-p', '--profile', type=str, choices=['standard', 'verbose', 'vmprof']) parser.add_argument('-o', '--profile-output', type=str, default=None) parser.add_argument('-m', '--stat-memory', action='store_true') args = parser.parse_args() if (args.stat_memory and (heapy is None)): print('WARNING: Guppy not installed; memory stats are unavailable.\n') if args.frameworks: frameworks = args.frameworks normalized_frameworks = [] for one_or_many in frameworks: if isinstance(one_or_many, list): normalized_frameworks.extend(one_or_many) else: normalized_frameworks.append(one_or_many) frameworks = normalized_frameworks if args.profile: framework = 'falcon-ext' if (args.profile == 'vmprof'): profile_vmprof(framework, get_env(framework)) else: profile(framework, get_env(framework), filename=args.profile_output, verbose=(args.profile == 'verbose')) print() return datasets = run(frameworks, args.trials, args.iterations, args.stat_memory) if (not datasets): return dataset = consolidate_datasets(datasets) dataset = sorted(dataset, key=(lambda r: r[1])) baseline = dataset[(- 1)][1] print('\nResults:\n') for (i, (name, sec_per_req)) in enumerate(dataset): req_per_sec = round_to_int((Decimal(1) / sec_per_req)) us_per_req = (sec_per_req * Decimal((10 ** 6))) factor = round_to_int((baseline / sec_per_req)) print('{3}. {0:.<20s}{1:.>06d} req/sec or {2: >3.2f} s/req ({4}x)'.format(name, req_per_sec, us_per_req, (i + 1), factor)) if (heapy and args.stat_memory): print() for (name, _, heap_diff) in datasets[0]: title = ('Memory change induced by ' + name) print() print(('=' * len(title))) print(title) print(('=' * len(title))) print(heap_diff) print()
def test_cached_function(isolated_client, capsys, monkeypatch): import inspect import time test_stamp = time.time() real_getsource = inspect.getsource def add_timestamp_to_source(func): return (real_getsource(func) + f''' # {test_stamp}''') monkeypatch.setattr(inspect, 'getsource', add_timestamp_to_source) def get_pipe(): import time print('computing') time.sleep(1) return 'pipe' def factorial(n: int) -> int: import math import time print('computing') time.sleep(1) return math.factorial(n) _client('virtualenv', keep_alive=30, requirements=['pyjokes ']) def regular_function(n): if (get_pipe() == 'pipe'): return factorial(n) assert (regular_function(4) == 24) (out, err) = capsys.readouterr() assert (out.count('computing') == 2) assert (regular_function(3) == 6) (out, err) = capsys.readouterr() assert (out.count('computing') == 1) assert (regular_function(4) == 24) (out, err) = capsys.readouterr() assert (out.count('computing') == 0) assert (regular_function(3) == 6) (out, err) = capsys.readouterr() assert (out.count('computing') == 0) assert (regular_function(5) == 120) (out, err) = capsys.readouterr() assert (out.count('computing') == 1)
class OptionSeriesWordcloudSonificationContexttracksMappingPan(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
('mopac') def test_mopac(): geom = geom_loader('lib:h2o.xyz') calc = MOPAC() geom.set_calculator(calc) forces = geom.forces norm = np.linalg.norm(forces) energy = geom.energy assert (energy == pytest.approx((- 0.))) assert (norm == pytest.approx(0.)) (nus, *_) = geom.get_normal_modes() ref = [1370.787877, 2780.770355, 2786.987219] np.testing.assert_allclose(nus, ref, atol=0.01)
class CheckExtensionAction(argparse.Action): def __call__(self, parser, namespace, value, option_string=None): if (os.getuid() == 0): print('E: You must run nautilus-terminal as regular user to perform an installation check.') sys.exit(1) result = '' retcode = 0 result += '\x1b[1mNautilus Python:\x1b[0m ' if is_nautilus_python_installed(): result += '\x1b[1;32mInstalled\x1b[0m\n' else: retcode = 1 result += '\x1b[1;31mAbsent\x1b[0m\n' result += ' Please install Nautilus Python. Please read the documentation:\n' result += ' result += '\x1b[1mNautilus Terminal Extension:\x1b[0m ' if ((is_system_extension_installed() and (not is_user_extension_installed())) or (is_user_extension_installed() and (not is_system_extension_installed()))): result += '\x1b[1;32mInstalled\x1b[0m\n' elif (is_system_extension_installed() and is_user_extension_installed()): retcode = 1 result += '\x1b[1;31mError\x1b[0m\n' result += ' Nautilus Terminal extension is installed twice...\n' result += ' Please remove one of the installed extentions using one of the following commands:\n' result += ' \x1b[1;34mSystem-wide:\x1b[0m sudo nautilus-terminal --uninstall-system\n' result += ' \x1b[1;34mCurrent user:\x1b[0m nautilus-terminal --uninstall-user\n' result += " NOTE: you may need to replace the 'nautilus-terminal' command by 'python3 -m nautilus_terminal'." else: retcode = 1 result += '\x1b[1;31mAbsent\x1b[0m\n' result += ' Please install the Nautilus Extension with one of the following commands:\n' result += ' \x1b[1;34mSystem-wide:\x1b[0m sudo nautilus-terminal --install-system\n' result += ' \x1b[1;34mCurrent user:\x1b[0m nautilus-terminal --install-user\n' result += " NOTE: you may need to replace the 'nautilus-terminal' command by 'python3 -m nautilus_terminal'." print(result) sys.exit(retcode)
class OptionSeriesPictorialSonificationContexttracksMappingTime(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class Stats(commands.Cog, name='Stats'): def __init__(self, client): self.client = client self.last_time = self.load_stats() async def cog_check(self, ctx): return self.client.user_is_admin(ctx.author) def load_state(self): with open('../state.json', 'r') as statefile: return json.load(statefile) def load_stats(self): state = self.load_state() stats = state.get('stats', dict()) if (not isinstance(stats, dict)): stats = dict() return stats def save_stats(self, stats): state = self.load_state() state['stats'] = stats with open('../state.json', 'w') as statefile: return json.dump(state, statefile, indent=1) (invoke_without_command=True, hidden=True) async def stats(self, ctx): (await ctx.typing()) url = f" async with self.client.session.get(url) as response: r = (await response.json()) statistics = r['items'][0]['statistics'] subs = int(statistics['subscriberCount']) vids = int(statistics['videoCount']) views = int(statistics['viewCount']) disc_members = ctx.channel.guild.member_count now = datetime.now().isoformat() yesterday = (datetime.now() - timedelta(seconds=((60 * 60) * 24))).isoformat() piston_url = f' async with self.client.session.get(piston_url) as response: piston_count = (await response.json()).get('count', 0) piston_url += '&category=bot' async with self.client.session.get(piston_url) as response: piston_bot_count = (await response.json()).get('count', 0) time_diff = int(((time.time() - self.last_time.get('time', (- 1))) // 60)) disc_diff = (disc_members - self.last_time.get('disc', 0)) subs_diff = (subs - self.last_time.get('subs', 0)) vids_diff = (vids - self.last_time.get('vids', 0)) views_diff = (views - self.last_time.get('views', 0)) piston_diff = (piston_count - self.last_time.get('piston_count', 0)) bot_diff = (piston_bot_count - self.last_time.get('piston_bot_count', 0)) self.last_time = {'time': time.time(), 'disc': disc_members, 'subs': subs, 'vids': vids, 'views': views, 'piston_count': piston_count, 'piston_bot_count': piston_bot_count} self.save_stats(self.last_time) response = ['```css', f''' Discord members: [{disc_members}] ''', f"{('+ ' if (disc_diff > 0) else '')}", f"{(str(disc_diff).replace('-', '- ') * bool(disc_diff))}", f''' YouTube subs: [{subs}] ''', f"{('+ ' if (subs_diff > 0) else '')}", f"{(str(subs_diff).replace('-', '- ') * bool(subs_diff))}", f''' YouTube videos: [{vids}] ''', f"{('+ ' if (vids_diff > 0) else '')}", f"{(str(vids_diff).replace('-', '- ') * bool(vids_diff))}", f''' YouTube views: [{views}] {('+ ' if (views_diff > 0) else '')}''', f"{(str(views_diff).replace('-', '- ') * bool(views_diff))}", f''' Piston stats: IRunCode calls/day: [{piston_bot_count}] {('+ ' if (bot_diff > 0) else '')}''', f"{(str(bot_diff).replace('-', '- ') * bool(bot_diff))}", f''' Total calls/day: [{piston_count}] {('+ ' if (piston_diff > 0) else '')}''', f"{(str(piston_diff).replace('-', '- ') * bool(piston_diff))}", f'````last run: ', f"{((str(time_diff) + ' minutes ago') if (time_diff >= 0) else 'N/A')}`"] (await ctx.send(''.join(response))) () async def users(self, ctx, n: typing.Optional[int]=30): (await ctx.typing()) params = {'start': (datetime.utcnow() - timedelta(days=n)).isoformat(), 'limit': 25} url = ' async with self.client.session.get(url, params=params) as response: res = (await response.json()) padding = max([len(i['user']) for i in res]) formatted = [(i['user'].ljust((padding + 2)) + str(i['messages'])) for i in res] (await ctx.send((('```css\n' + '\n'.join(formatted)) + '```'))) () async def channels(self, ctx, user: Member=None, n: typing.Optional[int]=30): (await ctx.typing()) try: params = {'start': (datetime.utcnow() - timedelta(days=n)).isoformat(), 'limit': 25} except OverflowError: return if user: params['discord_id'] = user.id url = ' async with self.client.session.get(url, params=params) as response: res = (await response.json()) padding = max([len(i['channel']) for i in res]) formatted = [(i['channel'].ljust((padding + 2)) + str(i['messages'])) for i in res] (await ctx.send((('```css\n' + '\n'.join(formatted)) + '```'))) () async def uptime(self, ctx): uptime = str((datetime.now() - self.client.felix_start)) (await ctx.send(f'```{uptime[:(- 7)]}```'))
def _assert_equal_entries(utest, found, output, count=None): utest.assertEqual(found[0], output[0]) utest.assertEqual(found[1], (count or output[1])) (found_time, output_time) = (MyTime.localtime(found[2]), MyTime.localtime(output[2])) try: utest.assertEqual(found_time, output_time) except AssertionError as e: utest.assertEqual((float(found[2]), found_time), (float(output[2]), output_time)) if ((len(output) > 3) and (count is None)): if ((os.linesep != '\n') or sys.platform.startswith('cygwin')): srepr = (lambda x: repr(x).replace('\\r', '')) else: srepr = repr utest.assertEqual(srepr(found[3]), srepr(output[3]))
def test_get_identity(): for source_id in [proto_source.SourceRepository.SourceIdentity.SRCID_UNSPECIFIED, proto_source.SourceRepository.SourceIdentity.SRCID_ENVOY, proto_source.SourceRepository.SourceIdentity.SRCID_NIGHTHAWK]: source_repository = proto_source.SourceRepository(identity=source_id) tree = source_tree.SourceTree(source_repository) identity = tree.get_identity() assert (identity == source_id)
.skipif((not coreapi), reason='coreapi is not installed') def test_schema_handles_exception(): schema_view = get_schema_view(permission_classes=[DenyAllUsingPermissionDenied]) request = factory.get('/') response = schema_view(request) response.render() assert (response.status_code == 403) assert (b'You do not have permission to perform this action.' in response.content)
class NesterovAcceleratedGradient(): def __init__(self, learning_rate=0.001, momentum=0.4): self.learning_rate = learning_rate self.momentum = momentum self.w_updt = np.array([]) def update(self, w, grad_func): approx_future_grad = np.clip(grad_func((w - (self.momentum * self.w_updt))), (- 1), 1) if (not self.w_updt.any()): self.w_updt = np.zeros(np.shape(w)) self.w_updt = ((self.momentum * self.w_updt) + (self.learning_rate * approx_future_grad)) return (w - self.w_updt)
def test_compiler_errors(solc4source, solc5source): with pytest.raises(CompilerError): compiler.compile_and_format({'path.sol': solc4source}, solc_version='0.5.7') with pytest.raises(CompilerError): compiler.compile_and_format({'path.sol': solc5source}, solc_version='0.4.25')
def is_boxplot(name, f): name_underscore = (os.path.basename(strip_ngs_extensions(name)) + '_') name_dot = (os.path.basename(utils.rootname(name)) + '.') if f.endswith('_boxplot.png'): if (f.startswith(name_dot) or f.startswith(name_underscore)): return True f1 = f.replace('_QV', '') if (f1.startswith(name_underscore) or f1.startswith(name_dot)): return True f1 = f1.replace('_F3_', '_') if (f1.startswith(name_underscore) or f1.startswith(name_dot)): return True f1 = f1.replace('_F5-BC_', '_') if (f1.startswith(name_underscore) or f1.startswith(name_dot)): return True return False
def generateTCF3(iterationsMap, iteration, t): msg = generateGenericMessage('EiffelTestCaseFinishedEvent', t, '1.0.0', 'TCF3', iteration) link(msg, iterationsMap[iteration]['TCT3'], 'TEST_CASE_EXECUTION') msg['data']['outcome'] = {'verdict': randomizeVerdict(0.99), 'conclusion': 'SUCCESSFUL'} return msg
def lambda_handler(event, context): print(event) cognito_id = event['requestContext']['authorizer']['claims']['sub'] if (event['body'] != None): event_body = json.loads(event['body']) if ((event_body['date_range'] == 7) or (event_body['date_range'] == 30)): date_range = event_body['date_range'] else: date_range = 7 else: date_range = 7 try: quiz_results = quiz_results_service.retrieve_quiz_results(cognito_id, date_range) except Exception as e: print(e) return api_response.response(502, 'Failed to retrieve quiz results') return api_response.response(200, 'Successfully retrieved quiz results', quiz_results)
class GroupTaggerTreeStore(Gtk.TreeStore, Gtk.TreeDragSource, Gtk.TreeDragDest): def __init__(self): super(GroupTaggerTreeStore, self).__init__(GObject.TYPE_BOOLEAN, GObject.TYPE_STRING, GObject.TYPE_BOOLEAN, GObject.TYPE_INT) self.set_sort_column_id(1, Gtk.SortType.ASCENDING) def add_category(self, category): for row in self: if (row[1] == category): return False self.append(None, [True, category, False, Pango.Weight.BOLD]) return True def add_group(self, group, category=uncategorized, selected=True): for row in self: if (row[1] == category): for chrow in row.iterchildren(): if (chrow[1] == group): row[0] = selected return False self.append(row.iter, [selected, group, True, Pango.Weight.NORMAL]) return True it = self.append(None, [True, category, False, Pango.Weight.BOLD]) self.append(it, [selected, group, True, Pango.Weight.NORMAL]) return True def change_name(self, path, name): old = self[path][1] self[path][1] = name return old def delete_selected_categories(self, paths): categories = {} iters = [self.get_iter(path) for path in paths if (self[path].parent is None)] for i in iters: if (i is not None): groups = [] for ch in self[i].iterchildren(): groups.append(ch[1]) categories[self.get_value(i, 1)] = groups self.remove(i) return categories def delete_selected_groups(self, paths): groups = [] iters = [self.get_iter(path) for path in paths if (self[path].parent is not None)] for i in iters: if (i is not None): groups.append(self.get_value(i, 1)) self.remove(i) return groups def get_category(self, path): if (len(path) == 1): if len(self): return self[path][1] else: return self[(path[0],)][1] def get_category_groups(self, category): return [row[1] for row in self.iter_category(category)] def get_selected_groups(self, paths): return [self[path][1] for path in paths if (self[path].parent is not None)] def get_selected_categories(self, paths): return [self[path][1] for path in paths if (self[path].parent is None)] def is_category(self, path): return (len(path) == 1) def iter_active(self): for row in self.iter_group_rows(): if row[0]: (yield row[1]) def iter_category(self, category): for row in self: if (category == row[1]): for chrow in row.iterchildren(): (yield chrow) break def iter_group_rows(self): for row in self: for chrow in row.iterchildren(): (yield chrow) def iter_groups(self): for row in self.iter_group_rows(): (yield row[1]) def load(self, group_categories): for (category, (expanded, groups)) in group_categories.items(): cat = self.append(None, [expanded, category, False, Pango.Weight.BOLD]) for (active, group) in groups: self.append(cat, [active, group, True, Pango.Weight.NORMAL]) def do_row_draggable(self, path): return (self[path].parent is not None) def do_row_drop_possible(self, dest_path, selection_data): (_, _, src_path) = Gtk.tree_get_row_drag_data(selection_data) return ((len(dest_path) == 2) and (src_path[0] != dest_path[0]))
def check_init_func(): with db.session_context() as sess: raw_cur = sess.connection().connection.cursor() cmd = "\n\t\t\tCREATE OR REPLACE FUNCTION upsert_link_raw(\n\t\t\t\t\turl_v text,\n\t\t\t\t\tstarturl_v text,\n\t\t\t\t\tnetloc_v text,\n\t\t\t\t\tdistance_v integer,\n\t\t\t\t\tpriority_v integer,\n\t\t\t\t\taddtime_v timestamp without time zone,\n\t\t\t\t\tstate_v dlstate_enum,\n\t\t\t\t\tupsert_epoch_v integer\n\t\t\t\t\t)\n\t\t\t\tRETURNS VOID AS $$\n\n\t\t\t\tINSERT INTO\n\t\t\t\t\traw_web_pages\n\t\t\t\t\t(url, starturl, netloc, distance, priority, addtime, state, epoch)\n\t\t\t\t-- \t (url, starturl, netloc, distance, priority, addtime, state, epoch)\n\t\t\t\tVALUES\n\t\t\t\t\t( url_v, starturl_v, netloc_v, distance_v, priority_v, addtime_v, state_v, upsert_epoch_v)\n\t\t\t\tON CONFLICT (url) DO\n\t\t\t\t\tUPDATE\n\t\t\t\t\t\tSET\n\t\t\t\t\t\t\tstate = EXCLUDED.state,\n\t\t\t\t\t\t\tstarturl = EXCLUDED.starturl,\n\t\t\t\t\t\t\tnetloc = EXCLUDED.netloc,\n\t\t\t\t\t\t\tepoch = LEAST(EXCLUDED.epoch, raw_web_pages.epoch),\n\t\t\t\t\t\t\t-- Largest distance is 100, but it's not checked\n\t\t\t\t\t\t\tdistance = LEAST(EXCLUDED.distance, raw_web_pages.distance),\n\t\t\t\t\t\t\t-- The lowest priority is 10.\n\t\t\t\t\t\t\tpriority = LEAST(EXCLUDED.priority, raw_web_pages.priority, 10),\n\t\t\t\t\t\t\taddtime = LEAST(EXCLUDED.addtime, raw_web_pages.addtime)\n\t\t\t\t\t\tWHERE\n\t\t\t\t\t\t(\n\t\t\t\t\t\t\t\t(raw_web_pages.epoch IS NULL or raw_web_pages.epoch < upsert_epoch_v)\n\t\t\t\t\t\t\tAND\n\t\t\t\t\t\t\t\traw_web_pages.url = EXCLUDED.url\n\t\t\t\t\t\t\tAND\n\t\t\t\t\t\t\t\t(raw_web_pages.state = 'complete' OR raw_web_pages.state = 'error' OR raw_web_pages.state = 'skipped')\n\t\t\t\t\t\t)\n\t\t\t\t\t;\n\n\t\t\t$$ LANGUAGE SQL;\n\n\t\t" raw_cur.execute(cmd) raw_cur.execute('COMMIT;')
def test_get_faas_data(): context = mock.Mock(invocation_id='fooid', function_name='fname') with mock.patch.dict(os.environ, {'WEBSITE_OWNER_NAME': '2491fc8e-f7c1-4020-b9c6-fd16+my-resource-group-ARegionShortNamewebspace', 'WEBSITE_SITE_NAME': 'foo', 'WEBSITE_RESOURCE_GROUP': 'bar'}): data = get_faas_data(context, True, 'request') assert (data['coldstart'] is True) assert (data['execution'] == 'fooid') assert (data['trigger']['type'] == 'request') assert (data['id'] == '/subscriptions/2491fc8e-f7c1-4020-b9c6-fd16/resourceGroups/bar/providers/Microsoft.Web/sites/foo/functions/fname')
def test_convert_marshmallow_json_schema_to_python_class(): class Foo(DataClassJsonMixin): x: int y: str schema = JSONSchema().dump(typing.cast(DataClassJsonMixin, Foo).schema()) foo_class = convert_marshmallow_json_schema_to_python_class(schema['definitions'], 'FooSchema') foo = foo_class(x=1, y='hello') foo.x = 2 assert (foo.x == 2) assert (foo.y == 'hello') with pytest.raises(AttributeError): _ = foo.c assert dataclasses.is_dataclass(foo_class)
def smartquotes(state: StateCore) -> None: if (not state.md.options.typographer): return for token in state.tokens: if ((token.type != 'inline') or (not QUOTE_RE.search(token.content))): continue if (token.children is not None): process_inlines(token.children, state)
class Timer(): _formats = ('{:,} d', '{} h', '{} m', '{} s', '{} ms') def __init__(self, message: Optional[str]=None, success_logger: Callable=print, failure_logger: Callable=print): self.message = message self.success_logger = success_logger self.failure_logger = failure_logger self.start() def __enter__(self): self.log_starting_message() self.start() return self def __exit__(self, exc_type, exc_value, traceback): self.stop() if (exc_type is None): self.log_success_message() else: self.log_failure_message() def __repr__(self): return self.as_string(self.elapsed) def start(self): self._start = time.perf_counter() self._stop = None self._elapsed = None def stop(self): self._stop = time.perf_counter() self._elapsed = timedelta(seconds=(self._stop - self._start)) def log_starting_message(self): if self.message: self.success_logger(self.starting_message) def starting_message(self): return '[{}] starting...'.format(self.message) def log_success_message(self): if self.message: self.success_logger(self.success_message) def success_message(self): return '[{}] finished successfully after {}'.format(self.message, self) def log_failure_message(self): if self.message: self.failure_logger(self.failure_message) def failure_message(self): return '[{}] FAILED AFTER {}'.format(self.message, self) def elapsed(self): if (self._start is None): raise RuntimeError('Timer has not been started') if (self._elapsed is None): return timedelta(seconds=(time.perf_counter() - self._start)) return self._elapsed def estimated_total_runtime(self, ratio): if (self._start is None): raise RuntimeError('Timer has not been started') if (self._elapsed is None): return timedelta(seconds=((time.perf_counter() - self._start) / ratio)) return self._elapsed def estimated_remaining_runtime(self, ratio): if (self._elapsed is None): return max((self.estimated_total_runtime(ratio) - self.elapsed), timedelta()) return timedelta() def as_string(cls, elapsed): (f, s) = math.modf(elapsed.total_seconds()) ms = round((f * 1000)) (m, s) = divmod(s, 60) (h, m) = divmod(m, 60) (d, h) = divmod(h, 24) return (' '.join((f.format(b) for (f, b) in zip(cls._formats, tuple((int(n) for n in (d, h, m, s, ms)))) if (b > 0))) or 'less than a millisecond')
class Empty(AmbassadorTest): single_namespace = True namespace = 'empty-namespace' extra_ports = [8877] def init(self): if EDGE_STACK: self.xfail = 'XFailing for now' self.manifest_envs += '\n - name: AMBASSADOR_READY_PORT\n value: "8500"\n' def variants(cls) -> Generator[(Node, None, None)]: (yield cls()) def manifests(self) -> str: return (namespace_manifest('empty-namespace') + super().manifests()) def config(self) -> Generator[(Union[(str, Tuple[(Node, str)])], None, None)]: (yield from ()) def queries(self): (yield Query(self.url('ambassador/v0/diag/?json=true&filter=errors'), phase=2)) (yield Query(self.url('_internal/v0/ping', scheme=' port=8877), expected=403)) (yield Query(self.url('ambassador/v0/check_ready', scheme=' port=8877))) def check(self): errors = (self.results[0].json or []) for (source, error) in errors: if (('could not find' in error) and ('CRD definitions' in error)): assert False, f'Missing CRDs: {error}' if ('Ingress resources' in error): assert False, f'Ingress resource error: {error}'
def test_dashboard(client): response = client.get('/', follow_redirects=True) assert (response.status_code == 200) assert (b'Dashboard' in response.data) assert (b'Statistics' in response.data) assert (b'Recent Calls' in response.data) assert (b'Calls per Day' in response.data)
def format_message(inserted): created = [i for i in inserted if i['created']] unmatched = [i for i in inserted if (i['vmpp_id'] is None)] new_mismatched = [i for i in inserted if ((i['vmpp_id'] is not None) and (i['supplied_vmpp_id'] is not None) and (i['vmpp_id'] != i['supplied_vmpp_id']) and i['created'])] msg = f'Fetched {len(inserted)} concessions. ' if (not created): msg += 'Found no new concessions to import.' else: msg += f'Imported {len(created)} new concessions.' if unmatched: msg += '\n\nWe could not confirm that the following concessions have correct VMPP IDs:\n' for item in unmatched: msg += f''' Name: {item['drug']} {item['pack_size']} VMPP: {vmpp_url(item['supplied_vmpp_id'])} From: {item['url']} ''' if new_mismatched: msg += '\n\nThe following concessions were supplied with incorrect VMPP IDs but have been automatically corrected:\n' for item in new_mismatched: msg += f''' Name: {item['drug']} {item['pack_size']} Supplied VMPP: {vmpp_url(item['supplied_vmpp_id'])} Matched VMPP: {vmpp_url(item['vmpp_id'])} From: {item['url']} ''' return msg
def count_of_records_to_process_in_delta(config: dict, spark: 'pyspark.sql.SparkSession') -> Tuple[(int, int, int)]: start = perf_counter() min_max_count_sql = obtain_min_max_count_sql(config).replace('"', '') results = spark.sql(min_max_count_sql).collect()[0].asDict() (min_id, max_id, count) = (results['min'], results['max'], results['count']) msg = f"Found {count:,} {config['data_type']} DB records, took {(perf_counter() - start):.2f}s" logger.info(format_log(msg, action='Extract')) return (count, min_id, max_id)
class RunAEATestCase(TestCase): def test_run_aea_positive_mock(self): ctx = mock.Mock() aea = mock.Mock() ctx.config = {'skip_consistency_check': True} with mock.patch('aea.cli.run._build_aea', return_value=aea): run_aea(ctx, ['author/name:0.1.0'], 'env_file', False) def test_run_aea_positive_install_deps_mock(self): ctx = mock.Mock() aea = mock.Mock() ctx.config = {'skip_consistency_check': True} with mock.patch('aea.cli.run.do_install'): with mock.patch('aea.cli.run._build_aea', return_value=aea): run_aea(ctx, ['author/name:0.1.0'], 'env_file', True) ('aea.cli.run._prepare_environment', _raise_click_exception) def test_run_aea_negative(self, *mocks): ctx = mock.Mock() ctx.config = {'skip_consistency_check': True} with self.assertRaises(ClickException): run_aea(ctx, ['author/name:0.1.0'], 'env_file', False)
(scope='function') def attentive_email_connection_config(db: Session) -> Generator: connection_config = ConnectionConfig.create(db=db, data={'name': 'Attentive', 'key': 'my_email_connection_config', 'connection_type': ConnectionType.attentive, 'access': AccessLevel.write, 'secrets': {'test_email_address': 'processor_', 'recipient_email_address': '', 'advanced_settings': {'identity_types': {'email': True, 'phone_number': False}}, 'third_party_vendor_name': 'Attentive'}}) (yield connection_config) connection_config.delete(db)
def extractRainofsnowCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def enter(): if (not isRoot.isRoot()): sys.exit(text.notRoot) rootDir = getRootDir.getEnvsDir() name = getArg.getArg(0) if (not name): sys.exit(text.enterHelper) template = getFlag.getFlag('-t') if template: create.executeTemplate(template) path = (rootDir + name) if (not os.path.exists(path)): sys.exit(text.envDoesntExist) checkMount(path) enterChroot(path) command = getFlag.getFlag('-c') if command: executeCommand(command=command) else: executeCommand()
class AuthService(): login_time = timezone.now() async def swagger_login(self, *, form_data: OAuth2PasswordRequestForm) -> tuple[(str, User)]: async with async_db_session() as db: current_user = (await UserDao.get_by_username(db, form_data.username)) if (not current_user): raise errors.NotFoundError(msg='') elif (not (await jwt.password_verify((form_data.password + current_user.salt), current_user.password))): raise errors.AuthorizationError(msg='') elif (not current_user.status): raise errors.AuthorizationError(msg=', ') (await UserDao.update_login_time(db, form_data.username, self.login_time)) user = (await UserDao.get(db, current_user.id)) (access_token, _) = (await jwt.create_access_token(str(user.id), multi_login=user.is_multi_login)) return (access_token, user) async def login(self, *, request: Request, obj: AuthLogin, background_tasks: BackgroundTasks) -> tuple[(str, str, datetime, datetime, User)]: async with async_db_session() as db: try: current_user = (await UserDao.get_by_username(db, obj.username)) if (not current_user): raise errors.NotFoundError(msg='') elif (not (await jwt.password_verify((obj.password + current_user.salt), current_user.password))): raise errors.AuthorizationError(msg='') elif (not current_user.status): raise errors.AuthorizationError(msg=', ') captcha_code = (await redis_client.get(f'{settings.CAPTCHA_LOGIN_REDIS_PREFIX}:{request.state.ip}')) if (not captcha_code): raise errors.AuthorizationError(msg=',') if (captcha_code.lower() != obj.captcha.lower()): raise errors.CustomError(error=CustomErrorCode.CAPTCHA_ERROR) (await UserDao.update_login_time(db, obj.username, self.login_time)) user = (await UserDao.get(db, current_user.id)) (access_token, access_token_expire_time) = (await jwt.create_access_token(str(user.id), multi_login=user.is_multi_login)) (refresh_token, refresh_token_expire_time) = (await jwt.create_refresh_token(str(user.id), access_token_expire_time, multi_login=user.is_multi_login)) except errors.NotFoundError as e: raise errors.NotFoundError(msg=e.msg) except (errors.AuthorizationError, errors.CustomError) as e: err_log_info = dict(db=db, request=request, user=current_user, login_time=self.login_time, status=LoginLogStatusType.fail.value, msg=e.msg) task = BackgroundTask(LoginLogService.create, **err_log_info) raise errors.AuthorizationError(msg=e.msg, background=task) except Exception as e: raise e else: log_info = dict(db=db, request=request, user=user, login_time=self.login_time, status=LoginLogStatusType.success.value, msg='') background_tasks.add_task(LoginLogService.create, **log_info) (await redis_client.delete(f'{settings.CAPTCHA_LOGIN_REDIS_PREFIX}:{request.state.ip}')) return (access_token, refresh_token, access_token_expire_time, refresh_token_expire_time, user) async def new_token(*, request: Request, refresh_token: str) -> tuple[(str, str, datetime, datetime)]: user_id = (await jwt.jwt_decode(refresh_token)) if (request.user.id != user_id): raise errors.TokenError(msg=' token ') async with async_db_session() as db: current_user = (await UserDao.get(db, user_id)) if (not current_user): raise errors.NotFoundError(msg='') elif (not current_user.status): raise errors.AuthorizationError(msg=',') current_token = (await get_token(request)) (new_access_token, new_refresh_token, new_access_token_expire_time, new_refresh_token_expire_time) = (await jwt.create_new_token(str(current_user.id), current_token, refresh_token, multi_login=current_user.is_multi_login)) return (new_access_token, new_refresh_token, new_access_token_expire_time, new_refresh_token_expire_time) async def logout(*, request: Request) -> None: token = (await get_token(request)) if request.user.is_multi_login: key = f'{settings.TOKEN_REDIS_PREFIX}:{request.user.id}:{token}' (await redis_client.delete(key)) else: prefix = f'{settings.TOKEN_REDIS_PREFIX}:{request.user.id}:' (await redis_client.delete_prefix(prefix))
class Test_DecimalField(): def test_init_options(self): assert (DecimalField(max_digits=3).max_digits == 3) assert (DecimalField(max_decimal_places=4).max_decimal_places == 4) f = DecimalField(max_digits=3, max_decimal_places=4) f2 = f.clone() assert (f2.max_digits == 3) assert (f2.max_decimal_places == 4) f3 = DecimalField() assert (f3.max_digits is None) assert (f3.max_decimal_places is None) f4 = f3.clone() assert (f4.max_digits is None) assert (f4.max_decimal_places is None) .parametrize('value', [Decimal('Inf'), Decimal('NaN'), Decimal('sNaN')]) def test_infinite(self, value): f = DecimalField(coerce=True, field='foo') with pytest.raises(ValidationError): raise next(f.validate(value)) .parametrize('value,places,digits', [(Decimal(4.1), 100, 2), (Decimal(4.1), 100, 2), (Decimal(4.1), None, 2), (Decimal(4.12), 100, None), (Decimal(4.123), 100, None), (4.1234, 100, 2), (Decimal(4.1234), 100, 2), (Decimal(.1234), 100, 100)]) def test_max_decimal_places__good(self, value, places, digits): f = DecimalField(max_decimal_places=places, max_digits=digits, coerce=True, field='foo') d: Decimal = f.prepare_value(value) for error in f.validate(d): raise error .parametrize('value', [Decimal(1.), Decimal(1.12345), Decimal(.12345)]) def test_max_decimal_places__bad(self, value): f = DecimalField(max_decimal_places=4, coerce=True, field='foo') with pytest.raises(ValidationError): raise next(f.validate(value)) .parametrize('value', [Decimal(12345.), Decimal(123456.12345), Decimal(.12345)]) def test_max_digits__bad(self, value): f = DecimalField(max_digits=4, coerce=True, field='foo') with pytest.raises(ValidationError): raise next(f.validate(value))
class Challenge(): def __init__(self): (a, b, p, o, G, Go) = random.choice(PRECOMPUTED_CURVES) self._a = a self._b = b self._p = p self.curve = Curve('generic curve', p, a, b, Go, G[0], G[1]) self.curve_order = o self.generator_order = Go self.generator = self.curve.G self.priv = self.gen_private_key() self.pub = (self.generator * self.priv) def gen_private_key(self): key_bound = 1 largest_bits = 0 for (f, e) in factorint(self.curve_order).items(): bits = int(f).bit_length() largest_bits = max(largest_bits, bits) key_bound = max(key_bound, f) l = random.randint(1, min(key_bound, self.curve_order)) return l def sign(self, message, priv=None): priv = (priv or self.priv) n = self.curve_order while True: k = random.randint(1, n) Q = (self.generator * k) hash_message = (message + str(int(Q.x))) mhash = hashlib.sha256(hash_message) r = int(mhash.hexdigest(), 16) if ((r % n) == 0): continue s = ((k - (r * priv)) % n) if (s != 0): return (r, s) def verify(self, message, signature): (r, s) = signature n = self.curve_order if ((r < 0) or (s < 1) or (s > (n - 1))): return False Q = ((self.generator * s) + (self.pub * r)) if (Q == self.generator.IDENTITY_ELEMENT): return False hash_message = (message + str(int(Q.x))) mhash = hashlib.sha256(hash_message) v = int(mhash.hexdigest(), 16) return (v == r)
def test_three_pool_arbitrage(get_transaction_hashes, get_addresses): block_number = 123 [transaction_hash] = get_transaction_hashes(1) [account_address, first_pool_address, second_pool_address, third_pool_address, first_token_address, second_token_address, third_token_address] = get_addresses(7) first_token_in_amount = 10 first_token_out_amount = 11 second_token_amount = 15 third_token_amount = 40 transaction_position = 0 swaps = [Swap(abi_name=UNISWAP_V2_PAIR_ABI_NAME, transaction_hash=transaction_hash, transaction_position=transaction_position, protocol=Protocol.uniswap_v2, block_number=block_number, trace_address=[0], contract_address=first_pool_address, from_address=account_address, to_address=second_pool_address, token_in_address=first_token_address, token_in_amount=first_token_in_amount, token_out_address=second_token_address, token_out_amount=second_token_amount), Swap(abi_name=UNISWAP_V3_POOL_ABI_NAME, transaction_hash=transaction_hash, transaction_position=transaction_position, protocol=Protocol.uniswap_v3, block_number=block_number, trace_address=[1], contract_address=second_pool_address, from_address=first_pool_address, to_address=third_pool_address, token_in_address=second_token_address, token_in_amount=second_token_amount, token_out_address=third_token_address, token_out_amount=third_token_amount), Swap(abi_name=UNISWAP_V3_POOL_ABI_NAME, transaction_hash=transaction_hash, transaction_position=transaction_position, protocol=Protocol.uniswap_v3, block_number=block_number, trace_address=[2], contract_address=third_pool_address, from_address=second_pool_address, to_address=account_address, token_in_address=third_token_address, token_in_amount=third_token_amount, token_out_address=first_token_address, token_out_amount=first_token_out_amount)] arbitrages = get_arbitrages(swaps) assert (len(arbitrages) == 1) arbitrage = arbitrages[0] assert (arbitrage.swaps == swaps) assert (arbitrage.account_address == account_address) assert (arbitrage.profit_token_address == first_token_address) assert (arbitrage.start_amount == first_token_in_amount) assert (arbitrage.end_amount == first_token_out_amount) assert (arbitrage.profit_amount == (first_token_out_amount - first_token_in_amount))
def serp_youtube(key, q=None, channelId=None, channelType=None, eventType=None, forContentOwner=None, forDeveloper=None, forMine=None, location=None, locationRadius=None, maxResults=None, onBehalfOfContentOwner=None, order=None, pageToken=None, publishedAfter=None, publishedBefore=None, regionCode=None, relatedToVideoId=None, relevanceLanguage=None, safeSearch=None, topicId=None, type=None, videoCaption=None, videoCategoryId=None, videoDefinition=None, videoDimension=None, videoDuration=None, videoEmbeddable=None, videoLicense=None, videoSyndicated=None, videoType=None): params = locals() supplied_params = {k: v for (k, v) in params.items() if params[k]} type_vid_params = {'eventType', 'relatedToVideoId', 'videoCaption', 'videoCategoryId', 'videoDefinition', 'videoDimension', 'videoDuration', 'videoEmbeddable', 'videoLicense', 'videoSyndicated', 'videoType', 'forMine', 'forContentOwner'} if ((supplied_params.get('type') != 'video') and type_vid_params.intersection(set(supplied_params.keys()))): raise ValueError(('You need to set type="video" if you want to set any of the following:' + str(type_vid_params))) for p in supplied_params: if isinstance(supplied_params[p], (str, int)): supplied_params[p] = [supplied_params[p]] for p in supplied_params: if (p in SERP_YTUBE_VALID_VALS): if (not set(supplied_params[p]).issubset(SERP_YTUBE_VALID_VALS[p])): raise ValueError('Please make sure you provide a valid value for "{}", valid values:\n{}'.format(p, sorted([str(x) for x in SERP_YTUBE_VALID_VALS[p]]))) params_list = _dict_product(supplied_params) base_url = ' responses = [] for param in params_list: param_log = ', '.join([((k + '=') + str(v)) for (k, v) in param.items()]) logging.info(msg=('Requesting: ' + param_log)) resp = requests.get(base_url, params=param) if (resp.status_code >= 400): raise Exception(resp.json()) responses.append(resp) result_df = pd.DataFrame() for (i, resp) in enumerate(responses): snippet_df = pd.DataFrame([x['snippet'] for x in resp.json()['items']]) id_df = pd.DataFrame([x['id'] for x in resp.json()['items']]) if ('channelId' in id_df): id_df = id_df.drop('channelId', axis=1) if ('thumbnails' in snippet_df): thumb_df = json_normalize(snippet_df['thumbnails']) else: thumb_df = pd.DataFrame() page_info = resp.json()['pageInfo'] temp_df = pd.concat([snippet_df, id_df, thumb_df], axis=1).assign(**page_info) temp_df['rank'] = range(1, (len(temp_df) + 1)) if (len(temp_df) == 0): empty_df_cols = ['title', 'description', 'publishedAt', 'channelTitle', 'kind', 'videoId', 'channelId'] temp_df = temp_df.assign(q=[params_list[i]['q']]) temp_df = temp_df.assign(**dict.fromkeys(empty_df_cols)) temp_df = temp_df.assign(**page_info) del params_list[i]['key'] temp_df = temp_df.assign(**params_list[i]) temp_df['nextPageToken'] = resp.json().get('nextPageToken') result_df = pd.concat([result_df, temp_df], sort=False, ignore_index=True) result_df['queryTime'] = datetime.datetime.now(tz=datetime.timezone.utc) result_df['queryTime'] = pd.to_datetime(result_df['queryTime']) specified_cols = ['queryTime', 'rank', 'title', 'description', 'publishedAt', 'channelTitle', 'totalResults', 'kind'] ordered_cols = (list(params_list[i].keys()) + specified_cols) non_ordered = result_df.columns.difference(set(ordered_cols)) final_df = result_df[(ordered_cols + list(non_ordered))] vid_ids = ','.join(final_df['videoId'].dropna()) if vid_ids: vid_details_df = youtube_video_details(vid_ids=vid_ids, key=key) vid_details_df.columns = [('video.' + x) for x in vid_details_df.columns] final_df = pd.merge(final_df, vid_details_df, how='left', left_on='videoId', right_on='video.id') channel_ids = ','.join(final_df['channelId'].dropna()) if channel_ids: channel_details_df = youtube_channel_details(channel_ids=channel_ids, key=key) channel_details_df.columns = [('channel.' + x) for x in channel_details_df.columns] final_df = pd.merge(final_df, channel_details_df, how='left', left_on='channelId', right_on='channel.id') final_df = final_df.drop_duplicates(subset=['videoId']) return final_df.reset_index(drop=True)
class MetaMappingCalendar(MetaMapping): supported_calendar_component = 'VEVENT' _mappings = MetaMapping._mappings.copy() _mappings.update({'C:calendar-description': ('description', None, None), 'ICAL:calendar-color': ('color', IntToRgb, RgbToInt)}) MetaMapping._reverse_mapping(_mappings)
class TextSystem(object): def __init__(self, args): if (not args.show_log): logger.setLevel(logging.INFO) self.text_detector = predict_det.TextDetector(args) self.text_recognizer = predict_rec.TextRecognizer(args) self.use_angle_cls = args.use_angle_cls self.drop_score = args.drop_score if self.use_angle_cls: self.text_classifier = predict_cls.TextClassifier(args) def print_draw_crop_rec_res(self, img_crop_list, rec_res): bbox_num = len(img_crop_list) for bno in range(bbox_num): cv2.imwrite(('./output/img_crop_%d.jpg' % bno), img_crop_list[bno]) logger.info(bno, rec_res[bno]) def __call__(self, img, cls=True): ori_im = img.copy() (dt_boxes, elapse) = self.text_detector(img) logger.debug('dt_boxes num : {}, elapse : {}'.format(len(dt_boxes), elapse)) if (dt_boxes is None): return (None, None) img_crop_list = [] dt_boxes = sorted_boxes(dt_boxes) for bno in range(len(dt_boxes)): tmp_box = copy.deepcopy(dt_boxes[bno]) img_crop = get_rotate_crop_image(ori_im, tmp_box) img_crop_list.append(img_crop) if (self.use_angle_cls and cls): (img_crop_list, angle_list, elapse) = self.text_classifier(img_crop_list) logger.debug('cls num : {}, elapse : {}'.format(len(img_crop_list), elapse)) (rec_res, elapse) = self.text_recognizer(img_crop_list) logger.debug('rec_res num : {}, elapse : {}'.format(len(rec_res), elapse)) (filter_boxes, filter_rec_res) = ([], []) for (box, rec_reuslt) in zip(dt_boxes, rec_res): (text, score) = rec_reuslt if (score >= self.drop_score): filter_boxes.append(box) filter_rec_res.append(rec_reuslt) return (filter_boxes, filter_rec_res)
def extension_controller_extender(data, fos): vdom = data['vdom'] state = data['state'] extension_controller_extender_data = data['extension_controller_extender'] extension_controller_extender_data = flatten_multilists_attributes(extension_controller_extender_data) filtered_data = underscore_to_hyphen(filter_extension_controller_extender_data(extension_controller_extender_data)) if ((state == 'present') or (state is True)): return fos.set('extension-controller', 'extender', data=filtered_data, vdom=vdom) elif (state == 'absent'): return fos.delete('extension-controller', 'extender', mkey=filtered_data['name'], vdom=vdom) else: fos._module.fail_json(msg='state must be present or absent!')
class SecondTask(ExampleTask): id = 'example.second_task' name = 'Second Multi-Tab Editor' menu_bar = SMenuBar(SMenu(TaskAction(name='New', method='new', accelerator='Ctrl+N'), id='File', name='&File'), SMenu(DockPaneToggleGroup(), TaskToggleGroup(), id='View', name='&View')) tool_bars = [SToolBar(TaskAction(method='new', tooltip='New file', image=ImageResource('document_new')), image_size=(32, 32))] def _default_layout_default(self): return TaskLayout(bottom=PaneItem('steps.example_pane'))
class OptionSeriesArcdiagramSonificationDefaultinstrumentoptionsMappingFrequency(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def backup(arg): arg = path.expanduser(arg) if ('*' in arg): for file in glob.iglob(arg): shutil.copyfile(file, (file + time.strftime('%Y-%m-%d_%H%M%S'))) else: if (not is_file(arg)): return shutil.copyfile(arg, (arg + time.strftime('%Y-%m-%d_%H%M%S')))
def simple_verbosity_option(logger_: logging.Logger, *names: str, **kwargs: Any) -> Callable: if (not names): names = ('--verbosity', '-v') kwargs.setdefault('default', 'INFO') kwargs.setdefault('type', click.Choice(LOG_LEVELS, case_sensitive=False)) kwargs.setdefault('metavar', 'LVL') kwargs.setdefault('expose_value', False) kwargs.setdefault('help', 'One of {}'.format(', '.join(LOG_LEVELS))) kwargs.setdefault('is_eager', True) def decorator(f: Callable) -> Callable: def _set_level(ctx: click.Context, param: Any, value: str) -> None: level = logging.getLevelName(value) logger_.setLevel(level) ctx.meta['verbosity'] = value return click.option(*names, callback=_set_level, **kwargs)(f) return decorator
class NotificationTestBase(object): def _prepare_events(self): self.client.namespace = 'namespace_a' self.client.sender = 'sender' self.client.send_event(Event(event_key=EventKey(event_name='key'), message='value1')) self.client.namespace = 'namespace_b' self.client.send_event(Event(event_key=EventKey(event_name='key', event_type='type_a'), message='value2')) self.client.send_event(Event(event_key=EventKey(event_name='key'), message='value3')) self.client.send_event(Event(event_key=EventKey(event_name='key2'), message='value3')) def test_send_event(self): event = self.client.send_event(Event(event_key=EventKey(event_name='key'), message='value1')) self.assertTrue((event.offset >= 1)) def test_list_events(self): self._prepare_events() events = self.client.list_events(event_name='key', namespace='namespace_a') self.assertEqual(1, len(events)) events = self.client.list_events('key', namespace='namespace_b') self.assertEqual(2, len(events)) events = self.client.list_events('key', event_type='type_a') self.assertEqual(1, len(events)) events = self.client.list_events('key', sender='sender') self.assertEqual(3, len(events)) events = self.client.list_events('key', sender='invalid') self.assertEqual(0, len(events)) events = self.client.list_events('key', begin_offset=1, end_offset=2) self.assertEqual(2, len(events)) self.assertEqual('value2', events[0].message) self.assertEqual('value3', events[1].message) def test_count_events(self): self._prepare_events() count = self.client.count_events('key', namespace='namespace_a') self.assertEqual(1, count[0]) count = self.client.count_events('key', namespace='namespace_b') self.assertEqual(2, count[0]) count = self.client.count_events('key', event_type='type_a') self.assertEqual(1, count[0]) count = self.client.count_events('key', sender='sender') self.assertEqual(3, count[0]) self.assertEqual(3, count[1][0].event_count) count = self.client.count_events('key', sender='invalid') self.assertEqual(0, count[0]) self.assertEqual([], count[1]) def test_listen_events(self): event_list = [] self.client.namespace = 'a' self.client.sender = 's' try: event1 = self.client.send_event(Event(EventKey('key'), message='value1')) handle = self.client.register_listener(listener_processor=TestListenerProcessor(event_list), event_keys=[EventKey('key', None)], offset=event1.offset) self.client.send_event(Event(EventKey('key'), message='value2')) self.client.namespace = None self.client.send_event(Event(EventKey('key'), message='value3')) finally: self.client.unregister_listener(handle) self.client.namespace = 'a' events = self.client.list_events('key', begin_offset=event1.offset) self.assertEqual(2, len(events)) self.assertEqual(2, len(event_list)) def test_listen_events_by_event_type(self): event_list = [] try: handle = self.client.register_listener(listener_processor=TestListenerProcessor(event_list), event_keys=[EventKey(event_name='key', event_type='e')]) self.client.send_event(Event(EventKey(event_name='key', event_type='e'), 'value2')) self.client.send_event(Event(EventKey(event_name='key', event_type='f'), 'value2')) finally: self.client.unregister_listener(handle) self.assertEqual(1, len(event_list)) self.assertEqual('e', event_list[0].event_key.event_type) def test_list_all_events(self): self.client.send_event(Event(EventKey('key'), 'value1')) time.sleep(1.0) event2 = self.client.send_event(Event(EventKey('key'), 'value2')) start_time = event2.create_time self.client.send_event(Event(EventKey('key'), 'value3')) events = self.client.list_all_events(start_time) self.assertEqual(2, len(events)) def test_list_all_events_with_id_range(self): event1 = self.client.send_event(Event(EventKey('key'), 'value1')) self.client.send_event(Event(EventKey('key'), 'value2')) event3 = self.client.send_event(Event(EventKey('key'), 'value3')) events = self.client.list_all_events(start_offset=event1.offset, end_offset=event3.offset) self.assertEqual(2, len(events)) def test_listen_all_events(self): event_list = [] handle = None try: handle = self.client.register_listener(listener_processor=TestListenerProcessor(event_list)) self.client.send_event(Event(EventKey('key'), 'value1')) self.client.send_event(Event(EventKey('key'), 'value2')) self.client.send_event(Event(EventKey('key'), 'value3')) finally: if (handle is not None): self.client.unregister_listener(handle) self.assertEqual(3, len(event_list)) def test_listen_all_events_from_id(self): event_list = [] handle = None try: event1 = self.client.send_event(Event(EventKey('key'), message='value1')) handle = self.client.register_listener(listener_processor=TestListenerProcessor(event_list), offset=event1.offset) self.client.send_event(Event(EventKey('key'), 'value2')) self.client.send_event(Event(EventKey('key'), 'value3')) finally: self.client.unregister_listener(handle) self.assertEqual(2, len(event_list)) def test_register_client(self): self.assertIsNotNone(self.client.client_id) tmp_client = EmbeddedNotificationClient(server_uri='localhost:50051', namespace=None, sender=None) self.assertEqual(1, (tmp_client.client_id - self.client.client_id)) def test_is_client_exists(self): client_id = self.client.client_id self.assertIsNotNone(client_id) self.assertEqual(True, self.storage.is_client_exists(client_id)) def test_delete_client(self): client_id = self.client.client_id self.assertIsNotNone(client_id) self.client.close() self.assertEqual(False, self.storage.is_client_exists(client_id)) def test_send_event_idempotence(self): event = Event(EventKey('key'), 'value1') self.client.send_event(event) self.assertEqual(1, self.client.sequence_num_manager.get_sequence_number()) self.assertEqual(1, len(self.client.list_events(event_name='key'))) self.client.send_event(event) self.assertEqual(2, self.client.sequence_num_manager.get_sequence_number()) self.assertEqual(2, len(self.client.list_events(event_name='key'))) self.client.sequence_num_manager._seq_num = 1 self.client.send_event(event) self.assertEqual(2, self.client.sequence_num_manager.get_sequence_number()) self.assertEqual(2, len(self.client.list_events(event_name='key'))) def test_client_recovery(self): event = Event(EventKey('key'), 'value1') self.client.send_event(event) self.client.send_event(event) self.assertEqual(2, self.client.sequence_num_manager.get_sequence_number()) self.assertEqual(2, len(self.client.list_events(event_name='key'))) client2 = EmbeddedNotificationClient(server_uri='localhost:50051', namespace=None, sender=None, client_id=self.client.client_id, initial_seq_num=1) client2.send_event(event) self.assertEqual(2, client2.sequence_num_manager.get_sequence_number()) self.assertEqual(2, len(client2.list_events(event_name='key'))) client2.send_event(event) self.assertEqual(3, client2.sequence_num_manager.get_sequence_number()) self.assertEqual(3, len(client2.list_events(event_name='key')))
def test_correlate(): outfile_heatmap = NamedTemporaryFile(suffix='heatmap.png', prefix='hicexplorer_test', delete=False) outfile_scatter = NamedTemporaryFile(suffix='scatter.png', prefix='hicexplorer_test', delete=False) args = "--matrices {} {} --labels 'first' 'second' --method spearman --log1p --colorMap jet --outFileNameHeatmap {} --outFileNameScatter {}".format((ROOT + 'hicCorrectMatrix/small_test_matrix_ICEcorrected_chrUextra_chr3LHet.h5'), (ROOT + 'hicCorrectMatrix/small_test_matrix_ICEcorrected_chrUextra_chr3LHet.h5'), outfile_heatmap.name, outfile_scatter.name).split() hicCorrelate.main(args) res = compare_images(((ROOT + 'hicCorrelate') + '/heatmap.png'), outfile_heatmap.name, tol=40) assert (res is None), res res = compare_images(((ROOT + 'hicCorrelate') + '/scatter.png'), outfile_scatter.name, tol=40) assert (res is None), res os.remove(outfile_heatmap.name) os.remove(outfile_scatter.name)
def visualize_parser(doc: Union[(spacy.tokens.Doc, List[Dict[(str, str)]])], *, title: Optional[str]='Dependency Parse & Part-of-speech tags', key: Optional[str]=None, manual: bool=False, displacy_options: Optional[Dict]=None) -> None: if (displacy_options is None): displacy_options = dict() if title: st.header(title) if manual: cols = st.columns(1) split_sents = False options = {'compact': cols[0].checkbox('Compact mode', key=f'{key}_parser_compact')} else: cols = st.columns(4) split_sents = cols[0].checkbox('Split sentences', value=True, key=f'{key}_parser_split_sents') options = {'collapse_punct': cols[1].checkbox('Collapse punct', value=True, key=f'{key}_parser_collapse_punct'), 'collapse_phrases': cols[2].checkbox('Collapse phrases', key=f'{key}_parser_collapse_phrases'), 'compact': cols[3].checkbox('Compact mode', key=f'{key}_parser_compact')} docs = ([span.as_doc() for span in doc.sents] if split_sents else [doc]) displacy_options = {**displacy_options, **options} for sent in docs: html = displacy.render(sent, options=displacy_options, style='dep', manual=manual) html = html.replace('\n\n', '\n') if (split_sents and (len(docs) > 1)): st.markdown(f'> {sent.text}') st.write(get_svg(html), unsafe_allow_html=True)
def test_06_take_control_02(interact): with ExitStack() as stack: mocks = [mock.patch('termios.tcsetattr'), mock.patch('termios.tcgetattr'), mock.patch('tty.setraw'), mock.patch('tty.setcbreak')] [stack.enter_context(m) for m in mocks] select_mock = stack.enter_context(mock.patch('select.select')) channel_mock = stack.enter_context(mock.patch.object(interact, 'channel')) stdin_mock = stack.enter_context(mock.patch('sys.stdin')) channel_mock.recv.side_effect = [socket.timeout()] stdin_mock.read.side_effect = [b'ls -all\n', b''] select_mock.side_effect = [[[stdin_mock], [], []], [[stdin_mock, channel_mock], [], []]] interact.take_control()
class OptionSeriesOrganizationSonificationDefaultspeechoptionsPointgrouping(Options): def algorithm(self): return self._config_get('last') def algorithm(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def groupTimespan(self): return self._config_get(15) def groupTimespan(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get('y') def prop(self, text: str): self._config(text, js_type=False)
def search(searchengine, what, sort, maxresult=20): engines = glob(path.join(path.dirname(__file__), 'engines', '*.py')) enginclasslist = [] for engine in engines: engi = path.basename(engine).split('.')[0].strip() if ((len(engi) == 0) or engi.startswith('_')): continue if ((searchengine != engi) and (searchengine != 'all')): continue try: engine_module = __import__('.'.join(('engines', engi))) engine_module = getattr(engine_module, engi) engineclass = getattr(engine_module, engi[3:]) engineclass = engineclass() engineclass.getsearchurl() enginclasslist.append(engineclass) except: pass tasklist = Queue.Queue() queueResult = Queue.Queue() workers = [] for i in range(10): worker = workerSearch(tasklist, queueResult) workers.append(worker) for engineclass in enginclasslist: enginlevel = 0 if (engineclass.page_result_count == 0): engineclass.page_result_count = maxresult pagecount = int((maxresult / engineclass.page_result_count)) if ((maxresult % engineclass.page_result_count) > 0): pagecount = (pagecount + 1) for page in range(1, (pagecount + 1)): tasklist.put({'engine': engineclass, 'what': what, 'sort': sort, 'page': page, 'enginlevel': enginlevel}) enginlevel = (enginlevel + 1) for worker in workers: worker.start() for worker in workers: worker.join() resultlist = {} while (not queueResult.empty()): res_dict = queueResult.get_nowait() xbmc.log(msg=('rhash:' + str(res_dict['link'])), level=xbmc.LOGERROR) rhash = getmagnethash(res_dict['link']) if (not (rhash in resultlist)): resultlist[rhash] = res_dict queueResult.task_done() def getlevel(dict): return dict['level'] return sorted(resultlist.values(), key=getlevel)
def make_axes_innermost(ndim, axes): orig_order = list(range(ndim)) outer_axes = [i for i in orig_order if (i not in axes)] transpose_to = (outer_axes + list(axes)) transpose_from = ([None] * ndim) for (i, axis) in enumerate(transpose_to): transpose_from[axis] = i return (tuple(transpose_to), tuple(transpose_from))
.parametrize('do_final_record', (True, False)) def test_journal_db_discard_to_deleted(journal_db, do_final_record): journal_db[1] = b'original-value' checkpoint_created = journal_db.record() del journal_db[1] checkpoint_deleted = journal_db.record() journal_db[1] = b'value-after-delete' if do_final_record: journal_db.record() assert (journal_db[1] == b'value-after-delete') journal_db.discard(checkpoint_deleted) assert (1 not in journal_db) with pytest.raises(KeyError): journal_db[1] journal_db.discard(checkpoint_created) assert (journal_db[1] == b'original-value')
class EnCh(hass.Hass): def lg(self, msg: str, *args: Any, icon: Optional[str]=None, repeat: int=1, **kwargs: Any) -> None: kwargs.setdefault('ascii_encode', False) message = f"{(f'{icon} ' if icon else ' ')}{msg}" _ = [self.log(message, *args, **kwargs) for _ in range(repeat)] async def initialize(self) -> None: self.icon = APP_ICON if (not py38_or_higher): icon_alert = '' self.lg('', icon=icon_alert) self.lg('') self.lg(f"please update to {hl('Python >= 3.8')}! ", icon=icon_alert) self.lg('') self.lg('', icon=icon_alert) if (not py37_or_higher): raise ValueError self.cfg: Dict[(str, Any)] = dict() self.cfg['show_friendly_name'] = bool(self.args.get('show_friendly_name', True)) self.cfg['init_delay_secs'] = int(self.args.get('initial_delay_secs', INITIAL_DELAY)) hass_sensor: str if (hass_sensor := self.args.get('hass_sensor', 'sensor.ench_entities')): self.cfg['hass_sensor'] = (hass_sensor if hass_sensor.startswith('sensor.') else f'sensor.{hass_sensor}') self.sensor_state: int = 0 self.sensor_attrs: Dict[(str, Any)] = {check: [] for check in CHECKS} self.sensor_attrs.update({'unit_of_measurement': 'Entities', 'should_poll': False}) if ('notify' in self.args): self.cfg['notify'] = self.args.get('notify') init_delay = ((await self.datetime()) + timedelta(seconds=self.cfg['init_delay_secs'])) if ('battery' in self.args): config: Dict[(str, Union[(str, int)])] = self.args.get('battery') self.cfg['battery'] = dict(interval_min=int(config.get('interval_min', INTERVAL_BATTERY_MIN)), min_level=int(config.get('min_level', BATTERY_MIN_LEVEL))) self.choose_notify_recipient('battery', config) (await self.run_every(self.check_battery, init_delay, (self.cfg['battery']['interval_min'] * 60), random_start=(- RANDOMIZE_SEC), random_end=RANDOMIZE_SEC)) if ('unavailable' in self.args): config = self.args.get('unavailable') self.cfg['unavailable'] = dict(interval_min=int(config.get('interval_min', INTERVAL_UNAVAILABLE_MIN)), max_unavailable_min=int(config.get('max_unavailable_min', MAX_UNAVAILABLE_MIN))) self.choose_notify_recipient('unavailable', config) self.run_every(self.check_unavailable, ((await self.datetime()) + timedelta(seconds=self.cfg['init_delay_secs'])), (self.cfg['unavailable']['interval_min'] * 60), random_start=(- RANDOMIZE_SEC), random_end=RANDOMIZE_SEC) if ('stale' in self.args): config = self.args.get('stale', {}) interval_min = config.get('interval_min', INTERVAL_STALE_MIN) max_stale_min = config.get('max_stale_min', MAX_STALE_MIN) self.cfg['stale'] = dict(interval_min=int(min([interval_min, max_stale_min])), max_stale_min=int(max_stale_min)) self.cfg['stale']['entities'] = config.get('entities', []) self.choose_notify_recipient('stale', config) self.run_every(self.check_stale, ((await self.datetime()) + timedelta(seconds=self.cfg['init_delay_secs'])), (self.cfg['stale']['interval_min'] * 60), random_start=(- RANDOMIZE_SEC), random_end=RANDOMIZE_SEC) exclude = set(EXCLUDE) exclude.update([e.lower() for e in self.args.get('exclude', set())]) self.cfg['exclude'] = sorted(list(exclude)) self.cfg.setdefault('_units', dict(interval_min='min', max_stale_min='min', min_level='%')) self.show_info(self.args) async def check_battery(self, _: Any) -> None: check_config = self.cfg['battery'] results: List[Tuple[(str, int)]] = [] self.lg('Checking entities for low battery levels...', icon=APP_ICON, level='DEBUG') states = (await self.get_state()) entities = filter((lambda entity: (not any((fnmatch(entity, pattern) for pattern in self.cfg['exclude'])))), states) for entity in sorted(entities): battery_level = None try: if (('battery_level' in entity) or ('battery' in entity)): battery_level = int(states[entity]['state']) if (not battery_level): for attr in LEVEL_ATTRIBUTES: battery_level = int(states[entity]['attributes'].get(attr)) break except (TypeError, ValueError): pass if (battery_level and (battery_level <= check_config['min_level'])): results.append((entity, battery_level)) last_updated = (await self.last_update(entity)).time().isoformat(timespec='seconds') self.lg(f"{(await self._name(entity))} has low {hl(f'battery {hl(int(battery_level))}')}% | last update: {last_updated}", icon=ICONS['battery']) notify = (self.cfg.get('notify') or check_config.get('notify')) if (notify and results): (await self.call_service(str(notify).replace('.', '/'), message=f"{ICONS['battery']} Battery low ({len(results)}): {', '.join([f'{str((await self._name(entity[0], notification=True)))} {entity[1]}%' for entity in results])}")) if (('hass_sensor' in self.cfg) and self.cfg['hass_sensor']): (await self.update_sensor('battery', [entity[0] for entity in results])) self._print_result('battery', [entity[0] for entity in results], 'low battery levels') async def check_unavailable(self, _: Any) -> None: check_config = self.cfg['unavailable'] results: List[str] = [] self.lg('Checking entities for unavailable/unknown state...', icon=APP_ICON, level='DEBUG') entities = filter((lambda entity: (not any((fnmatch(entity, pattern) for pattern in self.cfg['exclude'])))), (await self.get_state())) for entity in sorted(entities): state = (await self.get_state(entity_id=entity)) if ((state in BAD_STATES) and (entity not in results)): last_update = (await self.last_update(entity)) now: datetime = (await self.datetime(aware=True)) unavailable_time: timedelta = (now - last_update) max_unavailable_min = timedelta(minutes=self.cfg['unavailable']['max_unavailable_min']) if (unavailable_time >= max_unavailable_min): results.append(entity) last_updated = (await self.last_update(entity)).time().isoformat(timespec='seconds') self.lg(f'{(await self._name(entity))} is {hl(state)} since {hl(int((unavailable_time.seconds / 60)))}min | last update: {last_updated}', icon=ICONS[state]) notify = (self.cfg.get('notify') or check_config.get('notify')) if (notify and results): self.call_service(str(notify).replace('.', '/'), message=f"{APP_ICON} Unavailable entities ({len(results)}): {', '.join([str((await self._name(entity, notification=True))) for entity in results])}") if (('hass_sensor' in self.cfg) and self.cfg['hass_sensor']): (await self.update_sensor('unavailable', results)) self._print_result('unavailable', results, 'unavailable/unknown state') async def check_stale(self, _: Any) -> None: check_config = self.cfg['stale'] results: List[str] = [] self.lg('Checking for stale entities...', icon=APP_ICON, level='DEBUG') if self.cfg['stale']['entities']: all_entities = self.cfg['stale']['entities'] else: all_entities = (await self.get_state()) entities = filter((lambda entity: (not any((fnmatch(entity, pattern) for pattern in self.cfg['exclude'])))), all_entities) for entity in sorted(entities): attr_last_updated = (await self.get_state(entity_id=entity, attribute='last_updated')) if (not attr_last_updated): self.lg(f"{(await self._name(entity))} has no 'last_updated' attribute \_()_/ ", icon=ICONS['stale']) continue last_update = self.convert_utc(attr_last_updated) last_updated = (await self.last_update(entity)).time().isoformat(timespec='seconds') now: datetime = (await self.datetime(aware=True)) stale_time: timedelta = (now - last_update) max_stale_min = timedelta(minutes=self.cfg['stale']['max_stale_min']) if (stale_time and (stale_time >= max_stale_min)): results.append(entity) self.lg(f"{(await self._name(entity))} is {hl(f'stale since {hl(int((stale_time.seconds / 60)))}')}min | last update: {last_updated}", icon=ICONS['stale']) notify = (self.cfg.get('notify') or check_config.get('notify')) if (notify and results): self.call_service(str(notify).replace('.', '/'), message=f"{APP_ICON} Stalled entities ({len(results)}): {', '.join([str((await self._name(entity, notification=True))) for entity in results])}") if (('hass_sensor' in self.cfg) and self.cfg['hass_sensor']): (await self.update_sensor('stale', results)) self._print_result('stale', results, 'stalled updates') def choose_notify_recipient(self, check: str, config: Dict[(str, Any)]) -> None: if (('notify' in config) and ('notify' not in self.cfg)): self.cfg[check]['notify'] = config['notify'] async def last_update(self, entity_id: str) -> Any: return self.convert_utc((await self.get_state(entity_id=entity_id, attribute='last_updated'))) async def _name(self, entity: str, friendly_name: bool=False, notification: bool=False) -> Optional[str]: name: Optional[str] = None if self.cfg['show_friendly_name']: name = (await self.friendly_name(entity)) else: name = entity if ((notification is False) and name): name = hl_entity(name) return name def _print_result(self, check: str, entities: List[str], reason: str) -> None: if entities: self.lg(f"{hl(f'{len(entities)} entities')} with {hl(reason)}!", icon=APP_ICON, level='DEBUG') else: self.lg(f"{hl(f'no entities')} with {hl(reason)}!", icon=APP_ICON) async def update_sensor(self, check_name: str, entities: List[str]) -> None: if (check_name not in CHECKS): self.lg(f"Unknown check: {hl(f'no entities')} - {self.cfg['hass_sensor']} not updated!", icon=APP_ICON, level='ERROR') if (len(self.sensor_attrs[check_name]) != len(entities)): self.sensor_attrs[check_name] = entities self.sensor_state = sum([len(self.sensor_attrs[check]) for check in CHECKS]) self.set_state(self.cfg['hass_sensor'], state=self.sensor_state, attributes=self.sensor_attrs) self.lg(f"{hl_entity(self.cfg['hass_sensor'])} -> {hl(self.sensor_state)} | {', '.join([f'{(hl(k) if (k == check_name) else k)}: {hl(len(v))}' for (k, v) in self.sensor_attrs.items() if (type(v) == list)])}", icon=APP_ICON, level='INFO') def show_info(self, config: Optional[Dict[(str, Any)]]=None) -> None: if config: self.config = config if (not self.config): self.lg('no configuration available', icon='!!', level='ERROR') return room = '' if ('room' in self.config): room = f" - {hl(self.config['room'].capitalize())}" self.lg('') self.lg(f'{hl(APP_NAME)} v{hl(__version__)}{room}', icon=self.icon) self.lg('') listeners = self.config.pop('listeners', None) for (key, value) in self.config.items(): if ((key in ['module', 'class']) or key.startswith('_')): continue if isinstance(value, list): self.print_collection(key, value, 2) elif isinstance(value, dict): self.print_collection(key, value, 2) else: self._print_cfg_setting(key, value, 2) if listeners: self.lg(' event listeners:') for listener in sorted(listeners): self.lg(f' - {hl(listener)}') self.lg('') def print_collection(self, key: str, collection: Iterable[Any], indentation: int=2) -> None: self.log(f"{(indentation * ' ')}{key}:") indentation = (indentation + 2) for item in collection: indent = (indentation * ' ') if isinstance(item, dict): if ('name' in item): self.print_collection(item.pop('name', ''), item, indentation) else: self.log(f'{indent}{hl(pformat(item, compact=True))}') elif isinstance(collection, dict): self._print_cfg_setting(item, collection[item], indentation) else: self.log(f'{indent}- {hl(item)}') def _print_cfg_setting(self, key: str, value: Union[(int, str)], indentation: int) -> None: unit = prefix = '' indent = (indentation * ' ') if ((key == 'delay') and isinstance(value, int)): unit = 'min' min_value = f'{int((value / 60))}:{int((value % 60)):02d}' self.log(f'{indent}{key}: {prefix}{hl(min_value)}{unit} ~ {hl(value)}sec') else: if (('_units' in self.config) and (key in self.config['_units'])): unit = self.config['_units'][key] if (('_prefixes' in self.config) and (key in self.config['_prefixes'])): prefix = self.config['_prefixes'][key] self.log(f'{indent}{key}: {prefix}{hl(value)}{unit}')
class OptionPlotoptionsLineStatesHover(Options): def animation(self) -> 'OptionPlotoptionsLineStatesHoverAnimation': return self._config_sub_data('animation', OptionPlotoptionsLineStatesHoverAnimation) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def halo(self) -> 'OptionPlotoptionsLineStatesHoverHalo': return self._config_sub_data('halo', OptionPlotoptionsLineStatesHoverHalo) def lineWidth(self): return self._config_get(None) def lineWidth(self, num: float): self._config(num, js_type=False) def lineWidthPlus(self): return self._config_get(1) def lineWidthPlus(self, num: float): self._config(num, js_type=False) def marker(self) -> 'OptionPlotoptionsLineStatesHoverMarker': return self._config_sub_data('marker', OptionPlotoptionsLineStatesHoverMarker)
class OptionPlotoptionsColumnrangeSonificationDefaultspeechoptionsPointgrouping(Options): def algorithm(self): return self._config_get('last') def algorithm(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def groupTimespan(self): return self._config_get(15) def groupTimespan(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get('y') def prop(self, text: str): self._config(text, js_type=False)
def test_no_timestamp_modify(tmp_path): file_path = (tmp_path / 'test.md') file_path.write_bytes(b'lol\n') initial_access_time = 0 initial_mod_time = 0 os.utime(file_path, (initial_access_time, initial_mod_time)) mdformat.file(file_path) assert (os.path.getmtime(file_path) == initial_mod_time)
.long_test .download .skipif(NO_MARS, reason='No access to MARS') .skip(reason='No access to MARS for now (DHS move)') def test_mars_grib_2(): s = load_source('mars', param=['2t', 'msl'], levtype='sfc', area=[50, (- 50), 20, 50], grid=[1, 1], date='2012-12-13', split_on='param') assert (len(s) == 2)
class RewardWrapper(Wrapper[EnvType], ABC): def step(self, action) -> Tuple[(Any, Any, bool, Dict[(Any, Any)])]: (observation, reward, done, info) = self.env.step(action) return (observation, self.reward(reward), done, info) def reward(self, reward: Any) -> Any: (Wrapper) def get_observation_and_action_dicts(self, maze_state: Optional[MazeStateType], maze_action: Optional[MazeActionType], first_step_in_episode: bool) -> Tuple[(Optional[Dict[(Union[(int, str)], Any)]], Optional[Dict[(Union[(int, str)], Any)]])]: return self.env.get_observation_and_action_dicts(maze_state, maze_action, first_step_in_episode)
.parametrize('method,expected', (('test_endpoint', 'value-a'), ('not_implemented', NotImplementedError))) def test_result_middleware(w3, method, expected): def _callback(method, params): return params[0] w3.middleware_onion.add(construct_result_generator_middleware({'test_endpoint': _callback})) if (isinstance(expected, type) and issubclass(expected, Exception)): with pytest.raises(expected): w3.manager.request_blocking(method, [expected]) else: actual = w3.manager.request_blocking(method, [expected]) assert (actual == expected)
class BoundedSemaphore(Semaphore): def __init__(self, value=1): super().__init__(value) self.original_counter = value def release(self, blocking=True): if (self.counter >= self.original_counter): raise ValueError('Semaphore released too many times') return super().release(blocking)
class TestGraphTaskAffectedConsentSystems(): () def mock_graph_task(self, db, mailchimp_transactional_connection_config_no_secrets, privacy_request_with_consent_policy): task_resources = TaskResources(privacy_request_with_consent_policy, privacy_request_with_consent_policy.policy, [mailchimp_transactional_connection_config_no_secrets], db) tn = TraversalNode(generate_node('a', 'b', 'c', 'c2')) tn.node.dataset.connection_key = mailchimp_transactional_connection_config_no_secrets.key return GraphTask(tn, task_resources) ('fides.api.service.connectors.saas_connector.SaaSConnector.run_consent_request') def test_skipped_consent_task_for_connector(self, mock_run_consent_request, mock_graph_task, db, privacy_request_with_consent_policy, privacy_preference_history, privacy_preference_history_us_ca_provide): privacy_preference_history.privacy_request_id = privacy_request_with_consent_policy.id privacy_preference_history_us_ca_provide.privacy_request_id = privacy_request_with_consent_policy.id privacy_preference_history.save(db) privacy_preference_history_us_ca_provide.save(db) mock_run_consent_request.side_effect = SkippingConsentPropagation('No preferences are relevant') ret = mock_graph_task.consent_request({'email': 'customer-'}) assert (ret is False) db.refresh(privacy_preference_history) db.refresh(privacy_preference_history_us_ca_provide) assert (privacy_preference_history.affected_system_status == {'mailchimp_transactional_instance': 'skipped'}) assert (privacy_preference_history_us_ca_provide.affected_system_status == {'mailchimp_transactional_instance': 'skipped'}) logs = db.query(ExecutionLog).filter((ExecutionLog.privacy_request_id == privacy_request_with_consent_policy.id)).order_by(ExecutionLog.created_at.desc()) assert (logs.first().status == ExecutionLogStatus.skipped) ('fides.api.service.connectors.saas_connector.SaaSConnector.run_consent_request') def test_errored_consent_task_for_connector_no_relevant_preferences(self, mock_run_consent_request, mailchimp_transactional_connection_config_no_secrets, mock_graph_task, db, privacy_request_with_consent_policy, privacy_preference_history, privacy_preference_history_us_ca_provide): privacy_preference_history.privacy_request_id = privacy_request_with_consent_policy.id privacy_preference_history_us_ca_provide.privacy_request_id = privacy_request_with_consent_policy.id privacy_preference_history.save(db) privacy_preference_history_us_ca_provide.save(db) mock_run_consent_request.side_effect = BaseException('Request failed') cache_initial_status_and_identities_for_consent_reporting(db, privacy_request_with_consent_policy, mailchimp_transactional_connection_config_no_secrets, relevant_preferences=[privacy_preference_history_us_ca_provide], relevant_user_identities={'email': 'customer-'}) with pytest.raises(BaseException): ret = mock_graph_task.consent_request({'email': 'customer-'}) assert (ret is False) db.refresh(privacy_preference_history) db.refresh(privacy_preference_history_us_ca_provide) assert (privacy_preference_history.affected_system_status == {'mailchimp_transactional_instance': 'skipped'}) assert (privacy_preference_history_us_ca_provide.affected_system_status == {'mailchimp_transactional_instance': 'error'}) logs = db.query(ExecutionLog).filter((ExecutionLog.privacy_request_id == privacy_request_with_consent_policy.id)).order_by(ExecutionLog.created_at.desc()) assert (logs.first().status == ExecutionLogStatus.error)
class queue_stats_request(stats_request): version = 2 type = 18 stats_type = 5 def __init__(self, xid=None, flags=None, port_no=None, queue_id=None): if (xid != None): self.xid = xid else: self.xid = None if (flags != None): self.flags = flags else: self.flags = 0 if (port_no != None): self.port_no = port_no else: self.port_no = 0 if (queue_id != None): self.queue_id = queue_id else: self.queue_id = 0 return def pack(self): packed = [] packed.append(struct.pack('!B', self.version)) packed.append(struct.pack('!B', self.type)) packed.append(struct.pack('!H', 0)) packed.append(struct.pack('!L', self.xid)) packed.append(struct.pack('!H', self.stats_type)) packed.append(struct.pack('!H', self.flags)) packed.append(('\x00' * 4)) packed.append(util.pack_port_no(self.port_no)) packed.append(struct.pack('!L', self.queue_id)) length = sum([len(x) for x in packed]) packed[2] = struct.pack('!H', length) return ''.join(packed) def unpack(reader): obj = queue_stats_request() _version = reader.read('!B')[0] assert (_version == 2) _type = reader.read('!B')[0] assert (_type == 18) _length = reader.read('!H')[0] orig_reader = reader reader = orig_reader.slice(_length, 4) obj.xid = reader.read('!L')[0] _stats_type = reader.read('!H')[0] assert (_stats_type == 5) obj.flags = reader.read('!H')[0] reader.skip(4) obj.port_no = util.unpack_port_no(reader) obj.queue_id = reader.read('!L')[0] return obj def __eq__(self, other): if (type(self) != type(other)): return False if (self.xid != other.xid): return False if (self.flags != other.flags): return False if (self.port_no != other.port_no): return False if (self.queue_id != other.queue_id): return False return True def pretty_print(self, q): q.text('queue_stats_request {') with q.group(): with q.indent(2): q.breakable() q.text('xid = ') if (self.xid != None): q.text(('%#x' % self.xid)) else: q.text('None') q.text(',') q.breakable() q.text('flags = ') value_name_map = {} q.text(util.pretty_flags(self.flags, value_name_map.values())) q.text(',') q.breakable() q.text('port_no = ') q.text(util.pretty_port(self.port_no)) q.text(',') q.breakable() q.text('queue_id = ') q.text(('%#x' % self.queue_id)) q.breakable() q.text('}')
class OptionPlotoptionsArearangeSonificationTracksPointgrouping(Options): def algorithm(self): return self._config_get('minmax') def algorithm(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def groupTimespan(self): return self._config_get(15) def groupTimespan(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get('y') def prop(self, text: str): self._config(text, js_type=False)
class _TestGlyph3(_TestGlyph): def drawPoints(self, pen): pen.beginPath(identifier='abc') pen.addPoint((0.0, 0.0), 'line', False, 'start', identifier='0000') pen.addPoint((10, 110), 'line', False, None, identifier='0001') pen.endPath() pen.beginPath(identifier='pth2') pen.addPoint((50.0, 75.0), None, False, None, identifier='0002') pen.addPoint((60.0, 50.0), None, False, None, identifier='0003') pen.addPoint((50.0, 0.0), 'curve', True, 'last', identifier='0004') pen.endPath()
class OptionSeriesHeatmapSonificationTracksMappingVolume(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def test_correct_lag_when_using_freq(df_time): date_time = [pd.Timestamp('2020-05-15 12:00:00'), pd.Timestamp('2020-05-15 12:15:00'), pd.Timestamp('2020-05-15 12:30:00'), pd.Timestamp('2020-05-15 12:45:00'), pd.Timestamp('2020-05-15 13:00:00')] expected_results = {'ambient_temp': [31.31, 31.51, 32.15, 32.39, 32.62], 'module_temp': [49.18, 49.84, 52.35, 50.63, 49.61], 'irradiation': [0.51, 0.79, 0.65, 0.76, 0.42], 'color': (['blue'] * 5), 'ambient_temp_lag_1h': [np.nan, np.nan, np.nan, np.nan, 31.31], 'module_temp_lag_1h': [np.nan, np.nan, np.nan, np.nan, 49.18], 'irradiation_lag_1h': [np.nan, np.nan, np.nan, np.nan, 0.51], 'ambient_temp_lag_15min': [np.nan, 31.31, 31.51, 32.15, 32.39], 'module_temp_lag_15min': [np.nan, 49.18, 49.84, 52.35, 50.63], 'irradiation_lag_15min': [np.nan, 0.51, 0.79, 0.65, 0.76]} expected_results_df = pd.DataFrame(data=expected_results, index=date_time) transformer = LagFeatures(freq='1h') df_tr = transformer.fit_transform(df_time) assert df_tr.head(5).equals(expected_results_df.drop(['ambient_temp_lag_15min', 'module_temp_lag_15min', 'irradiation_lag_15min'], axis=1)) transformer = LagFeatures(freq=['1h', '15min']) df_tr = transformer.fit_transform(df_time) assert df_tr.head(5).equals(expected_results_df) transformer = LagFeatures(freq=['1h'], drop_original=True) df_tr = transformer.fit_transform(df_time) assert df_tr.head(5).equals(expected_results_df[['color', 'ambient_temp_lag_1h', 'module_temp_lag_1h', 'irradiation_lag_1h']])
class MeetAllCondition(Condition): def __init__(self, name: str, conditions: List[Condition]): events = [] for c in conditions: events.extend(c.expect_event_keys) super().__init__(expect_event_keys=events) self.conditions = conditions self.name = name def is_met(self, event: Event, context: Context) -> bool: state: ValueState = context.get_state(ValueStateDescriptor(name=self.name)) state_value = state.value() if (state_value is None): state_value = ([False] * len(self.conditions)) for i in range(len(self.conditions)): if match_events(event_keys=self.conditions[i].expect_event_keys, event=event): if self.conditions[i].is_met(event, context): state_value[i] = True else: state_value[i] = False state.update(state_value) for v in state_value: if (not v): return False return True
() ('path', nargs=(- 1), type=click.Path(exists=True, dir_okay=False)) ('--kernel', help='The name of the Jupyter kernel to attach to this markdown file.') def init(path, kernel): from jupyter_book.utils import init_myst_file for ipath in path: init_myst_file(ipath, kernel, verbose=True)
class EditReplyForm(ReplyForm): def __init__(self, post_id, *args, **kwargs): self.form = super(EditReplyForm, self).__init__(*args, **kwargs) self.uploads.choices = generate_choices('Post', id=post_id) uploads = MultiCheckBoxField('Label', coerce=int) submit = SubmitField(lazy_gettext('Edit Reply'), render_kw=form_class_button, validators=[DataRequired()])
class BenjiTestCaseBase(TestCaseBase): def setUp(self): super().setUp() IOFactory.initialize(self.config) StorageFactory.initialize(self.config) def tearDown(self): StorageFactory.close() super().tearDown() def benji_open(self, init_database=False, in_memory_database=False): self.benji = Benji(self.config, init_database=init_database, in_memory_database=in_memory_database, _destroy_database=init_database) return self.benji
def get_dep_names(parsed_lockfile: dict, include_devel: bool=True) -> list: dep_names = [] for (section, packages) in parsed_lockfile.items(): if (section == 'package'): for package in packages: if (((package['category'] == 'dev') and include_devel and (not package['optional'])) or ((package['category'] == 'main') and (not package['optional']))): dep_names.append(package['name']) return dep_names
def test_runs_before(): def t2(a: str, b: str) -> str: return (b + a) () def sleep_task(a: int) -> str: a = (a + 2) return ('world-' + str(a)) def my_subwf(a: int) -> (typing.List[str], int): s = [] for i in range(a): s.append(sleep_task(a=i)) return (s, 5) def my_wf(a: int, b: str) -> (str, typing.List[str], int): subwf_node = create_node(my_subwf, a=a) t2_node = create_node(t2, a=b, b=b) subwf_node.runs_before(t2_node) (subwf_node >> t2_node) return (t2_node.o0, subwf_node.o0, subwf_node.o1) my_wf(a=5, b='hello')
def extractThesecretgardenWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class CreditMemo(DeleteMixin, QuickbooksTransactionEntity, QuickbooksManagedObject, LinkedTxnMixin): class_dict = {'BillAddr': Address, 'ShipAddr': Address, 'DepartmentRef': Ref, 'ClassRef': Ref, 'CustomerRef': Ref, 'CurrencyRef': Ref, 'SalesTermRef': Ref, 'CustomerMemo': CustomerMemo, 'BillEmail': EmailAddress, 'TxnTaxDetail': TxnTaxDetail, 'PaymentMethodRef': Ref, 'DepositToAccountRef': Ref} list_dict = {'CustomField': CustomField, 'Line': DetailLine} detail_dict = {'SalesItemLineDetail': SalesItemLine, 'SubTotalLineDetail': SubtotalLine, 'DiscountLineDetail': DiscountLine, 'DescriptionLineDetail': DescriptionOnlyLine} qbo_object_name = 'CreditMemo' def __init__(self): super(CreditMemo, self).__init__() self.RemainingCredit = 0 self.ExchangeRate = 0 self.DocNumber = '' self.TxnDate = '' self.PrivateNote = '' self.TotalAmt = 0 self.ApplyTaxAfterDiscount = '' self.PrintStatus = 'NotSet' self.EmailStatus = 'NotSet' self.Balance = 0 self.GlobalTaxCalculation = 'TaxExcluded' self.BillAddr = None self.ShipAddr = None self.ClassRef = None self.DepartmentRef = None self.CustomerRef = None self.CurrencyRef = None self.CustomerMemo = None self.BillEmail = None self.TxnTaxDetail = None self.SalesTermRef = None self.CustomField = [] self.Line = [] def __str__(self): return str(self.TotalAmt) def to_ref(self): ref = Ref() ref.type = self.qbo_object_name ref.value = self.Id return ref
.django_db .skip(reason='Test based on pre-databricks loader code. Remove when fully cut over.') def test_load_source_assistance_by_ids(): source_assistance_id_list = [101, 201, 301] _assemble_source_assistance_records(source_assistance_id_list) call_command('fabs_nightly_loader', '--ids', *source_assistance_id_list) usaspending_transactions = TransactionFABS.objects.all() assert (len(usaspending_transactions) == 3) tx_fabs_broker_ref_ids = [_.published_fabs_id for _ in usaspending_transactions] assert (101 in tx_fabs_broker_ref_ids) assert (201 in tx_fabs_broker_ref_ids) assert (301 in tx_fabs_broker_ref_ids) tx_fabs_broker_ref_id_strings = [_.afa_generated_unique for _ in usaspending_transactions] assert ('101' in tx_fabs_broker_ref_id_strings) assert ('201' in tx_fabs_broker_ref_id_strings) assert ('301' in tx_fabs_broker_ref_id_strings) tx_norm_broker_ref_id_strings = [_.transaction.transaction_unique_id for _ in usaspending_transactions] assert ('101' in tx_norm_broker_ref_id_strings) assert ('201' in tx_norm_broker_ref_id_strings) assert ('301' in tx_norm_broker_ref_id_strings) usaspending_awards = Award.objects.all() assert (len(usaspending_awards) == 1) new_award = usaspending_awards[0] tx_norm_awd = [_.transaction.award for _ in usaspending_transactions] assert (usaspending_awards[0] == tx_norm_awd[0] == tx_norm_awd[1] == tx_norm_awd[2]) tx_norm_awd_ids = [_.transaction.award_id for _ in usaspending_transactions] assert (new_award.id == tx_norm_awd_ids[0] == tx_norm_awd_ids[1] == tx_norm_awd_ids[2]) assert (new_award.transaction_unique_id == 'NONE') assert (new_award.latest_transaction.transaction_unique_id == '301') assert (new_award.earliest_transaction.transaction_unique_id == '101') transactions_by_id = {transaction.published_fabs_id: transaction.transaction for transaction in usaspending_transactions} assert (transactions_by_id[101].fiscal_year == 2010) assert (transactions_by_id[201].fiscal_year == 2010) assert (transactions_by_id[301].fiscal_year == 2011) expected_award_amounts = {'non_federal_funding_amount': [], 'total_funding_amount': [], 'total_loan_value': [6000603], 'total_indirect_federal_sharing': [9000603], 'total_obligation': [3000603], 'total_subsidy_cost': []} award_values = list(Award.objects.values(*expected_award_amounts)) for (field, expected_values) in expected_award_amounts.items(): assert (sorted([val[field] for val in award_values]) == expected_values), f'incorrect value for field: {field}' expected_transaction_normalized_amounts = {'non_federal_funding_amount': [5000101, 5000201, 5000301], 'funding_amount': [6000101, 6000201, 6000301], 'face_value_loan_guarantee': [2000101, 2000201, 2000301], 'indirect_federal_sharing': [3000101, 3000201, 3000301], 'federal_action_obligation': [1000101, 1000201, 1000301], 'original_loan_subsidy_cost': [4000101, 4000201, 4000301]} transaction_normalized_values = list(TransactionNormalized.objects.values(*expected_transaction_normalized_amounts)) for (field, expected_values) in expected_transaction_normalized_amounts.items(): assert (sorted([val[field] for val in transaction_normalized_values]) == expected_values), f'incorrect value for field: {field}' expected_transaction_fabs_amounts = {'non_federal_funding_amount': [5000101, 5000201, 5000301], 'total_funding_amount': ['6000101', '6000201', '6000301'], 'face_value_loan_guarantee': [2000101, 2000201, 2000301], 'indirect_federal_sharing': [3000101, 3000201, 3000301], 'federal_action_obligation': [1000101, 1000201, 1000301], 'original_loan_subsidy_cost': [4000101, 4000201, 4000301]} transaction_fabs_values = list(TransactionFABS.objects.values(*expected_transaction_fabs_amounts)) for (field, expected_values) in expected_transaction_fabs_amounts.items(): assert (sorted([val[field] for val in transaction_fabs_values]) == expected_values), f'incorrect value for field: {field}'
class TestAllOF11(unittest.TestCase): def setUp(self): mods = [ofp.action, ofp.message, ofp.common] self.klasses = [klass for mod in mods for klass in mod.__dict__.values() if (isinstance(klass, type) and issubclass(klass, loxi.OFObject) and (not hasattr(klass, 'subtypes')))] self.klasses.sort(key=(lambda x: str(x))) def test_serialization(self): expected_failures = [] for klass in self.klasses: def fn(): obj = klass() if hasattr(obj, 'xid'): obj.xid = 42 buf = obj.pack() obj2 = klass.unpack(OFReader(buf)) self.assertEqual(obj, obj2) if (klass in expected_failures): self.assertRaises(Exception, fn) else: fn() def test_parse_message(self): expected_failures = [] for klass in self.klasses: if (not issubclass(klass, ofp.message.message)): continue def fn(): obj = klass(xid=42) buf = obj.pack() obj2 = ofp.message.parse_message(buf) self.assertEqual(obj, obj2) if (klass in expected_failures): self.assertRaises(Exception, fn) else: fn() def test_show(self): expected_failures = [] for klass in self.klasses: def fn(): obj = klass() if hasattr(obj, 'xid'): obj.xid = 42 obj.show() if (klass in expected_failures): self.assertRaises(Exception, fn) else: fn()
('/get_workflow_ports_md', methods=['POST']) def get_workflow_ports_md(): wf = request.get_json(True) try: wf_unique_id = str(wf['wf_unique_id']) except Exception as e: return jsonify({'Error in your workflow': str(e)}) try: result_dict = fsh.read_workflow_ports_meta_data(wf_unique_id) except Exception as e: return jsonify({'Error processing the data': str(e)}) return jsonify({'Result': result_dict})
def fatal(msg, code=1, end='\n'): outfile = (args.log if args.log else sys.stderr) tstamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')[:(- 3)] full_msg = ('{:s}: [E] {:s}'.format(tstamp, msg) if args.log else '[E] {:s}'.format(msg)) print(full_msg, file=outfile, end=end) sys.exit(code)
class NodeType(GivElm): total = 0 def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.name = kwargs['name'] def __repr__(self): return '(NodeType {})'.format(self.name) def __str__(self): return repr(self) def stat(self): super().stat() NodeType.total += 1
def assert_valid_fields(transaction_dict): missing_keys = REQUIRED_TRANSACTION_KEYS.difference(transaction_dict.keys()) if missing_keys: raise TypeError(f'Transaction must include these fields: {repr(missing_keys)}') superfluous_keys = set(transaction_dict.keys()).difference(ALLOWED_TRANSACTION_KEYS) if superfluous_keys: raise TypeError(f'Transaction must not include unrecognized fields: {repr(superfluous_keys)}') valid_fields: Dict[(str, bool)] valid_fields = apply_formatters_to_dict(LEGACY_TRANSACTION_VALID_VALUES, transaction_dict) if (not all(valid_fields.values())): invalid = {key: transaction_dict[key] for (key, valid) in valid_fields.items() if (not valid)} raise TypeError(f'Transaction had invalid fields: {repr(invalid)}')
def trange_years(start, end, years): dt_start = safe_fromtimestamp(start) dt_end = safe_fromtimestamp(end) dyears = ((dt_start.year - 2000) % years) if (dyears < 0): dyears += years dt = datetime((dt_start.year - dyears), 1, 1, 0, 0, 0, 0) while (dt < dt_start): dt = _advance_years(dt, years) timestamps = [] while (dt <= dt_end): timestamps.append(dt_to_sec(dt)) dt = _advance_years(dt, years) return timestamps
def handle_images_found_in_markdown(markdown: str, new_img_dir: Path, lib_dir: Path) -> str: markdown_image_pattern = re.compile('!\\[[^\\]]*\\]\\((.*?)(?=\\"|\\))(\\".*\\")?\\)') searches = list(re.finditer(markdown_image_pattern, markdown)) if (not searches): return markdown markdown_list = list(markdown) for search in searches: (old_path, _) = search.groups() start = 0 end = 0 search = re.search(old_path, markdown) if (search is not None): (start, end) = search.span() old_path = Path(old_path) name = old_path.name.strip() new_path = f'assets/img/{name}' del markdown_list[start:end] markdown_list.insert(start, new_path) if old_path.exists(): old_img_path = old_path else: old_img_path = ((lib_dir / 'tutorials') / old_path).resolve() new_img_path = str((new_img_dir / name)) shutil.copy(str(old_img_path), new_img_path) return ''.join(markdown_list)
def get_segmentation_label_result(layout_document: LayoutDocument, document_context: TrainingDataDocumentContext) -> LayoutDocumentLabelResult: segmentation_label_model_data_lists = list(iter_labeled_model_data_list_for_model_and_layout_documents(model=document_context.fulltext_models.segmentation_model, model_layout_documents=[layout_document], document_context=document_context)) assert (len(segmentation_label_model_data_lists) == 1) LOGGER.debug('segmentation_label_model_data_lists: %r', segmentation_label_model_data_lists) return get_layout_document_label_result_for_labeled_model_data_list(labeled_model_data_iterable=segmentation_label_model_data_lists[0], layout_document=layout_document)
def set_logging_file(fname: str, filemode: str='w', level: LogValue=DEFAULT_LEVEL) -> None: if (filemode not in 'wa'): raise ValueError("filemode must be either 'w' or 'a'") if ('file' in log.handlers): try: log.handlers['file'].file.close() except Exception: log.warning('Log file could not be closed') finally: del log.handlers['file'] try: file = open(fname, filemode) except Exception: log.error(f'File {fname} could not be opened') return log.handlers['file'] = LogHandler(Console(file=file, force_jupyter=False), level)
def test_deprecated_setting(): watermark_hash_data = b'\x00$\x00\x03\x00 AAECAwQFBgcICQoLDA0ODw==\x00\x00\x00\x00\x00\x00\x00\x00' inject_options_data = b'\x00$\x00\x01\x00\x02\x00\x03' beacon1 = beacon.BeaconConfig(watermark_hash_data) beacon2 = beacon.BeaconConfig(inject_options_data) SETTING_INJECT_OPTIONS = beacon.DeprecatedBeaconSetting.SETTING_INJECT_OPTIONS SETTING_WATERMARKHASH = beacon.BeaconSetting.SETTING_WATERMARKHASH assert (SETTING_WATERMARKHASH.value == SETTING_INJECT_OPTIONS.value == 36) assert beacon1.raw_settings[SETTING_WATERMARKHASH.name] assert (beacon1.settings_by_index[36] == b'AAECAwQFBgcICQoLDA0ODw==') with pytest.raises(KeyError): assert beacon1.raw_settings[SETTING_INJECT_OPTIONS.name] assert (beacon2.raw_settings[SETTING_INJECT_OPTIONS.name] == 3) assert (beacon2.settings_by_index[36] == 3) with pytest.raises(KeyError): assert beacon2.raw_settings[SETTING_WATERMARKHASH.name]
class EfuseMacField(EfuseField): def check_format(self, new_value_str): if (new_value_str is None): raise esptool.FatalError('Required MAC Address in AA:CD:EF:01:02:03 format!') if (new_value_str.count(':') != 5): raise esptool.FatalError('MAC Address needs to be a 6-byte hexadecimal format separated by colons (:)!') hexad = new_value_str.replace(':', '') if (len(hexad) != 12): raise esptool.FatalError('MAC Address needs to be a 6-byte hexadecimal number (12 hexadecimal characters)!') bindata = binascii.unhexlify(hexad) if (esptool.util.byte(bindata, 0) & 1): raise esptool.FatalError('Custom MAC must be a unicast MAC!') return bindata def check(self): (errs, fail) = self.parent.get_block_errors(self.block) if ((errs != 0) or fail): output = ('Block%d has ERRORS:%d FAIL:%d' % (self.block, errs, fail)) else: output = 'OK' return (('(' + output) + ')') def get(self, from_read=True): if (self.name == 'CUSTOM_MAC'): mac = self.get_raw(from_read)[::(- 1)] else: mac = self.get_raw(from_read) return ('%s %s' % (util.hexify(mac, ':'), self.check())) def save(self, new_value): def print_field(e, new_value): print(" - '{}' ({}) {} -> {}".format(e.name, e.description, e.get_bitstring(), new_value)) if (self.name == 'CUSTOM_MAC'): bitarray_mac = self.convert_to_bitstring(new_value) print_field(self, bitarray_mac) super(EfuseMacField, self).save(new_value) else: raise esptool.FatalError('Writing Factory MAC address is not supported')
def extractKaystlsSite(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
(scope='function') def datadog_connection_config(db: session, datadog_config, datadog_secrets) -> Generator: fides_key = datadog_config['fides_key'] connection_config = ConnectionConfig.create(db=db, data={'key': fides_key, 'name': fides_key, 'connection_type': ConnectionType.saas, 'access': AccessLevel.write, 'secrets': datadog_secrets, 'saas_config': datadog_config}) (yield connection_config) connection_config.delete(db)
('Util.sdlog.error') ('Util.sdlog.warning') ('Util.sdlog.info') def test_obtain_lock(mocked_info, mocked_warning, mocked_error): with TemporaryDirectory() as tmpdir, mock.patch('Util.LOCK_DIRECTORY', tmpdir): basename = 'test-obtain-lock.lock' pid_str = str(os.getpid()) lh = util.obtain_lock(basename) assert (not mocked_error.called) assert (lh is not None) lslocks_output = subprocess.check_output(['lslocks', '-n', '-p', pid_str]).decode('utf-8').strip() assert ('WRITE' in lslocks_output) assert ('POSIX' in lslocks_output)
def func_args_hash_func(target): import hashlib import inspect mod = inspect.getmodule(target) path_hash = hashlib.sha224(bytes(mod.__file__, 'utf-8')).hexdigest()[:4] def _cache_key(args, kwargs): cache_key_obj = (to_hashable(args), to_hashable(kwargs)) cache_key_hash = hashlib.sha224(bytes(str(cache_key_obj), 'utf-8')).hexdigest() return (path_hash, cache_key_hash) return _cache_key
def _truncated_normal_sample(key, mean, std, noise_clip=0.3): noise = (jax.random.normal(key=key, shape=mean.shape) * std) noise = jnp.clip(noise, (- noise_clip), noise_clip) sample = (mean + noise) clipped_sample = jnp.clip(sample, (- 1), 1) return ((sample - jax.lax.stop_gradient(sample)) + jax.lax.stop_gradient(clipped_sample))
def _build_submission_queryset_for_derived_fields(submission_closed_period_queryset, column_name): if submission_closed_period_queryset: queryset = Case(When(submission_closed_period_queryset, then=F(column_name)), default=Cast(Value(None), DecimalField(max_digits=23, decimal_places=2)), output_field=DecimalField(max_digits=23, decimal_places=2)) else: queryset = F(column_name) return queryset
class GrpcExplainer(explain_pb2_grpc.ExplainServicer): HANDLE_KEY = 'handle' def _get_handle(self, context): metadata = context.invocation_metadata() metadata_dict = {} for (key, value) in metadata: metadata_dict[key] = value return metadata_dict[self.HANDLE_KEY] def _determine_is_supported(self): if (not self.explainer.config.inventory_config.root_resource_id): LOGGER.exception('root_resource_id is not set in the server config file. Explain will not be supported.') return False root_resource_id = self.explainer.config.inventory_config.root_resource_id if ('organizations' in root_resource_id): return True return False def _set_not_supported_status(context, reply): context.set_code(StatusCode.FAILED_PRECONDITION) context.set_details(FAILED_PRECONDITION_MESSAGE) return reply def __init__(self, explainer_api): super(GrpcExplainer, self).__init__() self.explainer = explainer_api self.is_supported = self._determine_is_supported() def Ping(self, request, _): return explain_pb2.PingReply(data=request.data) _stream def ListResources(self, request, context): reply = explain_pb2.Resource() if (not self.is_supported): (yield self._set_not_supported_status(context, reply)) handle = self._get_handle(context) resources = self.explainer.list_resources(handle, request.prefix) for resource in resources: (yield explain_pb2.Resource(full_resource_name=resource.type_name)) _stream def ListGroupMembers(self, request, context): reply = explain_pb2.GroupMember() if (not self.is_supported): (yield self._set_not_supported_status(context, reply)) handle = self._get_handle(context) member_names = self.explainer.list_group_members(handle, request.prefix) for member in member_names: (yield explain_pb2.GroupMember(member_name=member)) _stream def ListRoles(self, request, context): reply = explain_pb2.Role() if (not self.is_supported): (yield self._set_not_supported_status(context, reply)) handle = self._get_handle(context) role_names = self.explainer.list_roles(handle, request.prefix) for role in role_names: (yield explain_pb2.Role(role_name=role)) def GetIamPolicy(self, request, context): reply = explain_pb2.GetIamPolicyReply() if (not self.is_supported): return self._set_not_supported_status(context, reply) handle = self._get_handle(context) policy = self.explainer.get_iam_policy(handle, request.resource) etag = policy['etag'] bindings = [] for (key, value) in policy['bindings'].items(): binding = explain_pb2.BindingOnResource() binding.role = key binding.members.extend(value) bindings.append(binding) reply.resource = request.resource reply.policy.bindings.extend(bindings) reply.policy.etag = etag return reply def CheckIamPolicy(self, request, context): reply = explain_pb2.CheckIamPolicyReply() if (not self.is_supported): return self._set_not_supported_status(context, reply) handle = self._get_handle(context) authorized = self.explainer.check_iam_policy(handle, request.resource, request.permission, request.identity) reply.result = authorized return reply _stream def ExplainDenied(self, request, context): reply = explain_pb2.BindingStrategy() if (not self.is_supported): (yield self._set_not_supported_status(context, reply)) model_name = self._get_handle(context) binding_strategies = self.explainer.explain_denied(model_name, request.member, request.resources, request.permissions, request.roles) for (overgranting, bindings) in binding_strategies: strategy = explain_pb2.BindingStrategy(overgranting=overgranting) strategy.bindings.extend([explain_pb2.Binding(member=b[1], resource=b[2], role=b[0]) for b in bindings]) (yield strategy) def ExplainGranted(self, request, context): reply = explain_pb2.ExplainGrantedReply() if (not self.is_supported): return self._set_not_supported_status(context, reply) model_name = self._get_handle(context) result = self.explainer.explain_granted(model_name, request.member, request.resource, request.role, request.permission) (bindings, member_graph, resource_names) = result memberships = [] for (child, parents) in member_graph.items(): memberships.append(explain_pb2.Membership(member=child, parents=parents)) reply.memberships.extend(memberships) reply.resource_ancestors.extend(resource_names) reply.bindings.extend([explain_pb2.Binding(member=member, resource=resource, role=role) for (resource, role, member) in bindings]) return reply _stream def GetAccessByPermissions(self, request, context): reply = explain_pb2.Access() if (not self.is_supported): (yield self._set_not_supported_status(context, reply)) model_name = self._get_handle(context) for (role, resource, members) in self.explainer.get_access_by_permissions(model_name, request.role_name, request.permission_name, request.expand_groups, request.expand_resources): (yield explain_pb2.Access(members=members, role=role, resource=resource)) _stream def GetAccessByResources(self, request, context): reply = explain_pb2.Access() if (not self.is_supported): (yield self._set_not_supported_status(context, reply)) model_name = self._get_handle(context) mapping = self.explainer.get_access_by_resources(model_name, request.resource_name, request.permission_names, request.expand_groups) for (role, members) in mapping.items(): access = explain_pb2.Access(role=role, resource=request.resource_name, members=members) (yield access) _stream def GetAccessByMembers(self, request, context): reply = explain_pb2.Access() if (not self.is_supported): (yield self._set_not_supported_status(context, reply)) model_name = self._get_handle(context) for (role, resources) in self.explainer.get_access_by_members(model_name, request.member_name, request.permission_names, request.expand_resources): access = explain_pb2.MemberAccess(role=role, resources=resources, member=request.member_name) (yield access) def GetPermissionsByRoles(self, request, context): reply = explain_pb2.GetPermissionsByRolesReply() if (not self.is_supported): return self._set_not_supported_status(context, reply) model_name = self._get_handle(context) result = self.explainer.get_permissions_by_roles(model_name, request.role_names, request.role_prefixes) permissions_by_roles_map = defaultdict(list) for (role, permission) in result: permissions_by_roles_map[role.name].append(permission.name) permissions_by_roles_list = [] for (role, permissions) in permissions_by_roles_map.items(): permissions_by_roles_list.append(explain_pb2.GetPermissionsByRolesReply.PermissionsByRole(role=role, permissions=permissions)) reply.permissionsbyroles.extend(permissions_by_roles_list) return reply
def fetch_consumption(zone_key: str='IQ', session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> dict: if target_datetime: raise NotImplementedError('This parser is not yet able to parse past dates') r = (session or requests.session()) (data, timestamp) = fetch_data(r) consumption = {'zoneKey': zone_key, 'datetime': timestamp, 'consumption': data['d_1234'], 'source': DATA_SOURCE} return consumption
class DeleteComposableTemplate(Runner): async def __call__(self, es, params): templates = mandatory(params, 'templates', self) only_if_exists = mandatory(params, 'only-if-exists', self) request_params = mandatory(params, 'request-params', self) ops_count = 0 prior_destructive_setting = None current_destructive_setting = None try: for (template_name, delete_matching_indices, index_pattern) in templates: if (not only_if_exists): (await es.indices.delete_index_template(name=template_name, params=request_params, ignore=[404])) ops_count += 1 elif (only_if_exists and (await es.indices.exists_index_template(name=template_name))): self.logger.info('Composable Index template [%s] already exists. Deleting it.', template_name) (await es.indices.delete_index_template(name=template_name, params=request_params)) ops_count += 1 if ((not self.serverless_mode) or self.serverless_operator): if (delete_matching_indices and index_pattern): if (current_destructive_setting is None): current_destructive_setting = False prior_destructive_setting = (await set_destructive_requires_name(es, current_destructive_setting)) ops_count += 1 (await es.indices.delete(index=index_pattern)) ops_count += 1 finally: if (current_destructive_setting is not None): (await set_destructive_requires_name(es, prior_destructive_setting)) ops_count += 1 return {'weight': ops_count, 'unit': 'ops', 'success': True} def __repr__(self, *args, **kwargs): return 'delete-composable-template'