code stringlengths 281 23.7M |
|---|
def decode_list(t: Type[T]) -> Callable[([List[Any]], List[T])]:
decode_item = get_decoding_fn(t)
def _decode_list(val: List[Any]) -> List[T]:
if (type(val) != list):
raise Exception(f"The given value='{val}' is not of a valid input")
return [decode_item(v) for v in val]
return _decode_list |
class Hydra():
def create_main_hydra_file_or_module(cls: Type['Hydra'], calling_file: Optional[str], calling_module: Optional[str], config_path: Optional[str], job_name: str) -> 'Hydra':
config_search_path = create_automatic_config_search_path(calling_file, calling_module, config_path)
return Hydra.create_main_hydra2(job_name, config_search_path)
def create_main_hydra2(cls, task_name: str, config_search_path: ConfigSearchPath) -> 'Hydra':
config_loader: ConfigLoader = ConfigLoaderImpl(config_search_path=config_search_path)
hydra = cls(task_name=task_name, config_loader=config_loader)
from hydra.core.global_hydra import GlobalHydra
GlobalHydra.instance().initialize(hydra)
return hydra
def __init__(self, task_name: str, config_loader: ConfigLoader) -> None:
setup_globals()
self.config_loader = config_loader
JobRuntime().set('name', task_name)
def get_mode(self, config_name: Optional[str], overrides: List[str]) -> Any:
try:
cfg = self.compose_config(config_name=config_name, overrides=overrides, with_log_configuration=False, run_mode=RunMode.MULTIRUN, validate_sweep_overrides=False)
return cfg.hydra.mode
except Exception:
return None
def run(self, config_name: Optional[str], task_function: TaskFunction, overrides: List[str], with_log_configuration: bool=True) -> JobReturn:
cfg = self.compose_config(config_name=config_name, overrides=overrides, with_log_configuration=with_log_configuration, run_mode=RunMode.RUN)
if (cfg.hydra.mode is None):
cfg.hydra.mode = RunMode.RUN
else:
assert (cfg.hydra.mode == RunMode.RUN)
callbacks = Callbacks(cfg)
callbacks.on_run_start(config=cfg, config_name=config_name)
ret = run_job(hydra_context=HydraContext(config_loader=self.config_loader, callbacks=callbacks), task_function=task_function, config=cfg, job_dir_key='hydra.run.dir', job_subdir_key=None, configure_logging=with_log_configuration)
callbacks.on_run_end(config=cfg, config_name=config_name, job_return=ret)
_ = ret.return_value
return ret
def multirun(self, config_name: Optional[str], task_function: TaskFunction, overrides: List[str], with_log_configuration: bool=True) -> Any:
cfg = self.compose_config(config_name=config_name, overrides=overrides, with_log_configuration=with_log_configuration, run_mode=RunMode.MULTIRUN)
callbacks = Callbacks(cfg)
callbacks.on_multirun_start(config=cfg, config_name=config_name)
sweeper = Plugins.instance().instantiate_sweeper(config=cfg, hydra_context=HydraContext(config_loader=self.config_loader, callbacks=callbacks), task_function=task_function)
task_overrides = OmegaConf.to_container(cfg.hydra.overrides.task, resolve=False)
assert isinstance(task_overrides, list)
ret = sweeper.sweep(arguments=task_overrides)
callbacks.on_multirun_end(config=cfg, config_name=config_name)
return ret
def get_sanitized_hydra_cfg(src_cfg: DictConfig) -> DictConfig:
cfg = copy.deepcopy(src_cfg)
with flag_override(cfg, ['struct', 'readonly'], [False, False]):
for key in list(cfg.keys()):
if (key != 'hydra'):
del cfg[key]
with flag_override(cfg.hydra, ['struct', 'readonly'], False):
del cfg.hydra['hydra_help']
del cfg.hydra['help']
return cfg
def get_sanitized_cfg(self, cfg: DictConfig, cfg_type: str) -> DictConfig:
assert (cfg_type in ['job', 'hydra', 'all'])
if (cfg_type == 'job'):
with flag_override(cfg, ['struct', 'readonly'], [False, False]):
del cfg['hydra']
elif (cfg_type == 'hydra'):
cfg = self.get_sanitized_hydra_cfg(cfg)
return cfg
def show_cfg(self, config_name: Optional[str], overrides: List[str], cfg_type: str, package: Optional[str], resolve: bool=False) -> None:
cfg = self.compose_config(config_name=config_name, overrides=overrides, run_mode=RunMode.RUN, with_log_configuration=False)
HydraConfig.instance().set_config(cfg)
OmegaConf.set_readonly(cfg.hydra, None)
cfg = self.get_sanitized_cfg(cfg, cfg_type)
if (package == '_global_'):
package = None
if (package is None):
ret = cfg
else:
ret = OmegaConf.select(cfg, package)
if (ret is None):
sys.stderr.write(f'''package '{package}' not found in config
''')
sys.exit(1)
if (not isinstance(ret, Container)):
print(ret)
else:
if (package is not None):
print(f'# {package}')
if resolve:
OmegaConf.resolve(ret)
sys.stdout.write(OmegaConf.to_yaml(ret))
def get_shell_to_plugin_map(config_loader: ConfigLoader) -> DefaultDict[(str, List[CompletionPlugin])]:
shell_to_plugin: DefaultDict[(str, List[CompletionPlugin])] = defaultdict(list)
for clazz in Plugins.instance().discover(CompletionPlugin):
assert issubclass(clazz, CompletionPlugin)
plugin = clazz(config_loader)
shell_to_plugin[plugin.provides()].append(plugin)
for (shell, plugins) in shell_to_plugin.items():
if (len(plugins) > 1):
lst = ','.join((type(plugin).__name__ for plugin in plugins))
raise ValueError(f'Multiple plugins installed for {shell} : {lst}')
return shell_to_plugin
def shell_completion(self, config_name: Optional[str], overrides: List[str]) -> None:
subcommands = ['install', 'uninstall', 'query']
arguments = OmegaConf.from_dotlist(overrides)
num_commands = sum((1 for key in subcommands if (key in arguments)))
if (num_commands != 1):
raise ValueError(f'Expecting one subcommand from {subcommands} to be set')
shell_to_plugin = self.get_shell_to_plugin_map(self.config_loader)
def find_plugin(cmd: str) -> CompletionPlugin:
if (cmd not in shell_to_plugin):
lst = '\n'.join((('\t' + x) for x in shell_to_plugin.keys()))
raise ValueError(f'''No completion plugin for '{cmd}' found, available :
{lst}''')
return shell_to_plugin[cmd][0]
if ('install' in arguments):
plugin = find_plugin(arguments.install)
plugin.install()
elif ('uninstall' in arguments):
plugin = find_plugin(arguments.uninstall)
plugin.uninstall()
elif ('query' in arguments):
plugin = find_plugin(arguments.query)
plugin.query(config_name=config_name)
def format_args_help(args_parser: ArgumentParser) -> str:
s = ''
overrides: Any = None
for action in args_parser._actions:
if (len(action.option_strings) == 0):
overrides = action
else:
s += f'''{','.join(action.option_strings)} : {action.help}
'''
s += ('Overrides : ' + overrides.help)
return s
def list_all_config_groups(self, parent: str='') -> Sequence[str]:
from hydra.core.object_type import ObjectType
groups: List[str] = []
for group in self.config_loader.list_groups(parent):
if (parent == ''):
group_name = group
else:
group_name = f'{parent}/{group}'
files = self.config_loader.get_group_options(group_name, ObjectType.CONFIG)
dirs = self.config_loader.get_group_options(group_name, ObjectType.GROUP)
if (len(files) > 0):
groups.append(group_name)
if (len(dirs) > 0):
groups.extend(self.list_all_config_groups(group_name))
return groups
def format_config_groups(self, predicate: Callable[([str], bool)], compact: bool=True) -> str:
groups = [x for x in self.list_all_config_groups() if predicate(x)]
s = ''
for group in sorted(groups):
options = sorted(self.config_loader.get_group_options(group))
if compact:
items = ', '.join(options)
line = f'{group}: {items}'
else:
items = '\n'.join(((' ' + o) for o in options))
line = f'''{group}:
{items}'''
s += (line + '\n')
return s
def get_help(self, help_cfg: DictConfig, cfg: DictConfig, args_parser: ArgumentParser, resolve: bool) -> str:
s = string.Template(help_cfg.template)
def is_hydra_group(x: str) -> bool:
return (x.startswith('hydra/') or (x == 'hydra'))
def is_not_hydra_group(x: str) -> bool:
return (not is_hydra_group(x))
help_text = s.substitute(FLAGS_HELP=self.format_args_help(args_parser), HYDRA_CONFIG_GROUPS=self.format_config_groups(is_hydra_group), APP_CONFIG_GROUPS=self.format_config_groups(is_not_hydra_group), CONFIG=OmegaConf.to_yaml(cfg, resolve=resolve))
return help_text
def hydra_help(self, config_name: Optional[str], args_parser: ArgumentParser, args: Any) -> None:
cfg = self.compose_config(config_name=None, overrides=args.overrides, run_mode=RunMode.RUN, with_log_configuration=True)
help_cfg = cfg.hydra.hydra_help
cfg = self.get_sanitized_hydra_cfg(cfg)
help_text = self.get_help(help_cfg, cfg, args_parser, resolve=False)
print(help_text)
def app_help(self, config_name: Optional[str], args_parser: ArgumentParser, args: Any) -> None:
cfg = self.compose_config(config_name=config_name, overrides=args.overrides, run_mode=RunMode.RUN, with_log_configuration=True)
HydraConfig.instance().set_config(cfg)
help_cfg = cfg.hydra.help
clean_cfg = copy.deepcopy(cfg)
clean_cfg = self.get_sanitized_cfg(clean_cfg, 'job')
help_text = self.get_help(help_cfg, clean_cfg, args_parser, resolve=args.resolve)
print(help_text)
def _log_header(header: str, prefix: str='', filler: str='-') -> None:
assert (log is not None)
log.debug((prefix + header))
log.debug((prefix + ''.ljust(len(header), filler)))
def _log_footer(header: str, prefix: str='', filler: str='-') -> None:
assert (log is not None)
log.debug((prefix + ''.ljust(len(header), filler)))
def _print_plugins(self) -> None:
assert (log is not None)
self._log_header(header='Installed Hydra Plugins', filler='*')
all_plugins = {p.__name__ for p in Plugins.instance().discover()}
for plugin_type in [ConfigSource, CompletionPlugin, Launcher, Sweeper, SearchPathPlugin]:
plugins = Plugins.instance().discover(plugin_type)
if (len(plugins) > 0):
Hydra._log_header(header=f'{plugin_type.__name__}:', prefix='\t')
for plugin in plugins:
log.debug(f' {plugin.__name__}')
if (plugin.__name__ in all_plugins):
all_plugins.remove(plugin.__name__)
if (len(all_plugins) > 0):
Hydra._log_header(header='Generic plugins: ', prefix='\t')
for plugin_name in all_plugins:
log.debug(f' {plugin_name}')
def _print_search_path(self, config_name: Optional[str], overrides: List[str], run_mode: RunMode=RunMode.RUN) -> None:
assert (log is not None)
log.debug('')
self._log_header(header='Config search path', filler='*')
box: List[List[str]] = [['Provider', 'Search path']]
cfg = self.compose_config(config_name=config_name, overrides=overrides, run_mode=run_mode, with_log_configuration=False)
HydraConfig.instance().set_config(cfg)
cfg = self.get_sanitized_cfg(cfg, cfg_type='hydra')
sources = cfg.hydra.runtime.config_sources
for sp in sources:
box.append([sp.provider, f'{sp.schema}://{sp.path}'])
(provider_pad, search_path_pad) = get_column_widths(box)
header = '| {} | {} |'.format('Provider'.ljust(provider_pad), 'Search path'.ljust(search_path_pad))
self._log_header(header=header, filler='-')
for source in sources:
log.debug('| {} | {} |'.format(source.provider.ljust(provider_pad), f'{source.schema}://{source.path}'.ljust(search_path_pad)))
self._log_footer(header=header, filler='-')
def _print_plugins_profiling_info(self, top_n: int) -> None:
assert (log is not None)
stats = Plugins.instance().get_stats()
if (stats is None):
return
items = list(stats.modules_import_time.items())
filtered = filter((lambda x: (x[1] > 0.0005)), items)
sorted_items = sorted(filtered, key=(lambda x: x[1]), reverse=True)
top_n = max(len(sorted_items), top_n)
box: List[List[str]] = [['Module', 'Sec']]
for item in sorted_items[0:top_n]:
box.append([item[0], f'{item[1]:.3f}'])
padding = get_column_widths(box)
log.debug('')
self._log_header(header='Profiling information', filler='*')
self._log_header(header=f'Total plugins scan time : {stats.total_time:.3f} seconds', filler='-')
header = f'| {box[0][0].ljust(padding[0])} | {box[0][1].ljust(padding[1])} |'
self._log_header(header=header, filler='-')
del box[0]
for row in box:
a = row[0].ljust(padding[0])
b = row[1].ljust(padding[1])
log.debug(f'| {a} | {b} |')
self._log_footer(header=header, filler='-')
def _print_config_info(self, config_name: Optional[str], overrides: List[str], run_mode: RunMode=RunMode.RUN) -> None:
assert (log is not None)
self._print_search_path(config_name=config_name, overrides=overrides, run_mode=run_mode)
self._print_defaults_tree(config_name=config_name, overrides=overrides)
self._print_defaults_list(config_name=config_name, overrides=overrides)
cfg = run_and_report((lambda : self.compose_config(config_name=config_name, overrides=overrides, run_mode=run_mode, with_log_configuration=False)))
HydraConfig.instance().set_config(cfg)
self._log_header(header='Config', filler='*')
with flag_override(cfg, ['struct', 'readonly'], [False, False]):
del cfg['hydra']
log.info(OmegaConf.to_yaml(cfg))
def _print_defaults_list(self, config_name: Optional[str], overrides: List[str], run_mode: RunMode=RunMode.RUN) -> None:
assert (log is not None)
defaults = self.config_loader.compute_defaults_list(config_name=config_name, overrides=overrides, run_mode=run_mode)
box: List[List[str]] = [['Config path', 'Package', '_self_', 'Parent']]
for d in defaults.defaults:
row = [d.config_path, d.package, ('True' if d.is_self else 'False'), d.parent]
row = [(x if (x is not None) else '') for x in row]
box.append(row)
padding = get_column_widths(box)
del box[0]
log.debug('')
self._log_header('Defaults List', filler='*')
header = '| {} | {} | {} | {} | '.format('Config path'.ljust(padding[0]), 'Package'.ljust(padding[1]), '_self_'.ljust(padding[2]), 'Parent'.ljust(padding[3]))
self._log_header(header=header, filler='-')
for row in box:
log.debug('| {} | {} | {} | {} |'.format(row[0].ljust(padding[0]), row[1].ljust(padding[1]), row[2].ljust(padding[2]), row[3].ljust(padding[3])))
self._log_footer(header=header, filler='-')
def _print_debug_info(self, config_name: Optional[str], overrides: List[str], run_mode: RunMode=RunMode.RUN) -> None:
assert (log is not None)
if log.isEnabledFor(logging.DEBUG):
self._print_all_info(config_name, overrides, run_mode)
def compose_config(self, config_name: Optional[str], overrides: List[str], run_mode: RunMode, with_log_configuration: bool=False, from_shell: bool=True, validate_sweep_overrides: bool=True) -> DictConfig:
cfg = self.config_loader.load_configuration(config_name=config_name, overrides=overrides, run_mode=run_mode, from_shell=from_shell, validate_sweep_overrides=validate_sweep_overrides)
if with_log_configuration:
configure_log(cfg.hydra.hydra_logging, cfg.hydra.verbose)
global log
log = logging.getLogger(__name__)
self._print_debug_info(config_name, overrides, run_mode)
return cfg
def _print_plugins_info(self, config_name: Optional[str], overrides: List[str], run_mode: RunMode=RunMode.RUN) -> None:
self._print_plugins()
self._print_plugins_profiling_info(top_n=10)
def _print_all_info(self, config_name: Optional[str], overrides: List[str], run_mode: RunMode=RunMode.RUN) -> None:
from .. import __version__
self._log_header(f'Hydra {__version__}', filler='=')
self._print_plugins()
self._print_config_info(config_name, overrides, run_mode)
def _print_defaults_tree_impl(self, tree: Union[(DefaultsTreeNode, InputDefault)], indent: int=0) -> None:
assert (log is not None)
from ..core.default_element import GroupDefault, InputDefault, VirtualRoot
def to_str(node: InputDefault) -> str:
if isinstance(node, VirtualRoot):
return node.get_config_path()
elif isinstance(node, GroupDefault):
name = node.get_name()
if (name is None):
name = 'null'
return ((node.get_override_key() + ': ') + name)
else:
return node.get_config_path()
pad = (' ' * indent)
if isinstance(tree, DefaultsTreeNode):
node_str = to_str(tree.node)
if ((tree.children is not None) and (len(tree.children) > 0)):
log.info(((pad + node_str) + ':'))
for child in tree.children:
self._print_defaults_tree_impl(tree=child, indent=(indent + 1))
else:
log.info((pad + node_str))
else:
assert isinstance(tree, InputDefault)
log.info((pad + to_str(tree)))
def _print_defaults_tree(self, config_name: Optional[str], overrides: List[str], run_mode: RunMode=RunMode.RUN) -> None:
assert (log is not None)
defaults = self.config_loader.compute_defaults_list(config_name=config_name, overrides=overrides, run_mode=run_mode)
log.info('')
self._log_header('Defaults Tree', filler='*')
self._print_defaults_tree_impl(defaults.defaults_tree)
def show_info(self, info: str, config_name: Optional[str], overrides: List[str], run_mode: RunMode=RunMode.RUN) -> None:
options = {'all': self._print_all_info, 'defaults': self._print_defaults_list, 'defaults-tree': self._print_defaults_tree, 'config': self._print_config_info, 'plugins': self._print_plugins_info, 'searchpath': self._print_search_path}
simple_stdout_log_config(level=logging.DEBUG)
global log
log = logging.getLogger(__name__)
if (info not in options):
opts = sorted(options.keys())
log.error(f"Info usage: --info [{'|'.join(opts)}]")
else:
options[info](config_name=config_name, overrides=overrides, run_mode=run_mode) |
def add_to_app(components: List[Standalone.Component], app_path: str, folder: str=node.ASSET_FOLDER, name: str='{selector}', raise_exception: bool=False) -> dict:
result = {'dependencies': {}}
for component in components:
result[component.selector] = npm.check_component_requirements(component, app_path, raise_exception)
result['dependencies'].update(result[component.selector])
assets_path = Path(app_path, 'src', folder)
assets_path.mkdir(parents=True, exist_ok=True)
to_component(component, name=name, out_path=str(assets_path))
return result |
.external
.skipif((has_openai_key is False), reason='OpenAI API key not available')
.parametrize('cfg_string', ['zeroshot_cfg_string', 'zeroshot_cfg_string_v2_lds', 'fewshot_cfg_string', 'fewshot_cfg_string_v2', 'ext_template_cfg_string'])
def test_ner_config(cfg_string, request):
cfg_string = request.getfixturevalue(cfg_string)
orig_config = Config().from_str(cfg_string)
nlp = spacy.util.load_model_from_config(orig_config, auto_fill=True)
assert (nlp.pipe_names == ['llm'])
component_cfg = dict(orig_config['components']['llm'])
component_cfg.pop('factory')
nlp2 = spacy.blank('en')
nlp2.add_pipe('llm', config=component_cfg)
assert (nlp2.pipe_names == ['llm'])
pipe = nlp.get_pipe('llm')
assert isinstance(pipe, LLMWrapper)
assert isinstance(pipe.task, LLMTask)
labels = orig_config['components']['llm']['task']['labels']
labels = split_labels(labels)
task = pipe.task
assert isinstance(task, LabeledTask)
assert (sorted(task.labels) == sorted(tuple(labels)))
assert (pipe.labels == task.labels)
assert (nlp.pipe_labels['llm'] == list(task.labels)) |
def move():
mc.send_angles(init_angles[0], 50)
time.sleep(3)
mc.send_angles(init_angles[1], 50)
time.sleep(3)
gripper_on()
mc.send_angles([0.0, 26.27, 0.17, (- 72.86), (- 0.17), (- 77.51), 0.0], 50)
time.sleep(3)
mc.send_angles([(- 2.02), 41.74, 0.43, (- 86.13), (- 0.17), (- 46.05), 0.0], 50)
time.sleep(3)
gripper_off()
time.sleep(2)
tmp = []
while True:
if (not tmp):
tmp = mc.get_angles()
else:
break
time.sleep(0.5)
tmp[6] = 0.0
print(tmp)
mc.send_angles([tmp[0], 0, 0, (- 90), (- 0.0), (- 90), tmp[6]], 50)
time.sleep(3)
mc.send_angles(box_angles[3], 50)
time.sleep(4)
gripper_on()
time.sleep(2)
mc.send_angles(init_angles[0], 50)
time.sleep(4) |
class IterationBased():
def __init__(self, warmup_iterations, iterations):
self._warmup_iterations = warmup_iterations
self._iterations = iterations
if ((warmup_iterations is not None) and (iterations is not None)):
self._total_iterations = (self._warmup_iterations + self._iterations)
if (self._total_iterations == 0):
raise exceptions.RallyAssertionError('Operation must run at least for one iteration.')
else:
self._total_iterations = None
self._it = None
def start(self):
self._it = 0
def sample_type(self):
return (metrics.SampleType.Warmup if (self._it < self._warmup_iterations) else metrics.SampleType.Normal)
def infinite(self):
return (self._iterations is None)
def percent_completed(self):
return ((self._it + 1) / self._total_iterations)
def completed(self):
return (self._it >= self._total_iterations)
def next(self):
self._it += 1
def __str__(self):
return 'iteration-count-based' |
class OptionPlotoptionsErrorbarSonificationDefaultinstrumentoptionsMappingHighpass(Options):
def frequency(self) -> 'OptionPlotoptionsErrorbarSonificationDefaultinstrumentoptionsMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsErrorbarSonificationDefaultinstrumentoptionsMappingHighpassFrequency)
def resonance(self) -> 'OptionPlotoptionsErrorbarSonificationDefaultinstrumentoptionsMappingHighpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsErrorbarSonificationDefaultinstrumentoptionsMappingHighpassResonance) |
def validate_header(header: Header, parent_header: Header) -> None:
ensure((header.timestamp > parent_header.timestamp), InvalidBlock)
ensure((header.number == (parent_header.number + 1)), InvalidBlock)
ensure(check_gas_limit(header.gas_limit, parent_header.gas_limit), InvalidBlock)
ensure((len(header.extra_data) <= 32), InvalidBlock)
block_difficulty = calculate_block_difficulty(header.number, header.timestamp, parent_header.timestamp, parent_header.difficulty)
ensure((header.difficulty == block_difficulty), InvalidBlock)
block_parent_hash = keccak256(rlp.encode(parent_header))
ensure((header.parent_hash == block_parent_hash), InvalidBlock)
if ((header.number >= FORK_CRITERIA.block_number) and (header.number < (FORK_CRITERIA.block_number + 10))):
ensure((header.extra_data == b'dao-hard-fork'), InvalidBlock)
validate_proof_of_work(header) |
class OptionSeriesColumnpyramidSonificationContexttracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesPolygonSonificationContexttracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def create_dummy_serializer(serializer_cls, skip_fields=(), required_fields=()):
class Serializer(s.Serializer):
pass
for (name, field) in iteritems(serializer_cls.base_fields):
if ((name in skip_fields) or field.read_only):
continue
if isinstance(field, s.RelatedField):
new_field = s.CharField()
else:
new_field = deepcopy(field)
if (name in required_fields):
new_field.required = True
else:
new_field.required = False
Serializer.base_fields[name] = new_field
return Serializer |
def test_slurm_node_list() -> None:
with with_slurm_job_nodelist('compute-b24') as env:
assert (['compute-b24'] == env.hostnames)
with with_slurm_job_nodelist('compute-a1,compute-b2') as env:
assert (['compute-a1', 'compute-b2'] == env.hostnames)
with with_slurm_job_nodelist('compute-b2[1,2]') as env:
assert (['compute-b21', 'compute-b22'] == env.hostnames)
with with_slurm_job_nodelist('compute-b2[011,022]') as env:
assert (['compute-b2011', 'compute-b2022'] == env.hostnames)
with with_slurm_job_nodelist('compute-b2[1-3]') as env:
assert (['compute-b21', 'compute-b22', 'compute-b23'] == env.hostnames)
with with_slurm_job_nodelist('compute-b2[1-3,5,6,8]') as env:
assert (['compute-b21', 'compute-b22', 'compute-b23', 'compute-b25', 'compute-b26', 'compute-b28'] == env.hostnames)
with with_slurm_job_nodelist('compute-b2[1-3,5-6,8]') as env:
assert (['compute-b21', 'compute-b22', 'compute-b23', 'compute-b25', 'compute-b26', 'compute-b28'] == env.hostnames)
with with_slurm_job_nodelist('compute-b2[1-3,5-6,8],compute-a1') as env:
assert (['compute-b21', 'compute-b22', 'compute-b23', 'compute-b25', 'compute-b26', 'compute-b28', 'compute-a1'] == env.hostnames)
with with_slurm_job_nodelist('compute[042,044]') as env:
assert (['compute042', 'compute044'] == env.hostnames)
with with_slurm_job_nodelist('compute[042-043,045,048-049]') as env:
assert (['compute042', 'compute043', 'compute045', 'compute048', 'compute049'] == env.hostnames) |
def _cmd_export_vcf(args):
segarr = read_cna(args.segments)
cnarr = (read_cna(args.cnr) if args.cnr else None)
is_sample_female = verify_sample_sex(segarr, args.sample_sex, args.male_reference, args.diploid_parx_genome)
(header, body) = export.export_vcf(segarr, args.ploidy, args.male_reference, args.diploid_parx_genome, is_sample_female, args.sample_id, cnarr)
write_text(args.output, header, body) |
.django_db
def test_award_outlays_sum_multiple_years(client, monkeypatch, helpers, defc_codes, basic_ref_data, early_gtas, multi_year_faba):
helpers.patch_datetime_now(monkeypatch, LATE_YEAR, EARLY_MONTH, 25)
helpers.reset_dabs_cache()
resp = client.get(OVERVIEW_URL)
assert (resp.data['spending']['award_outlays'] == Decimal('1.15')) |
class Myclass():
attr: int
attr2: int
def __init__(self):
self.attr = 1
self.attr2 = 2
def func(self, arg: int):
if (self.func((arg - 1)) < 1):
return 1
else:
a = (self.func((arg - 1)) * self.func((arg - 1)))
return ((a + ((self.attr * self.attr2) * arg)) + self.func((arg - 1))) |
class TestMockAsyncCallableIntegration(TestDSLBase):
def test_mock_async_callable_integration(self):
class SomeClass():
async def do_something():
return 'for real'
def fail_top(context):
_context
def fail_sub_context(context):
async def expect_fail(self):
self.mock_async_callable(SomeClass, 'do_something').for_call().to_return_value('mocked').and_assert_called_once()
def pass_top(context):
_context
def pass_sub_context(context):
async def expect_pass(self):
self.mock_async_callable(SomeClass, 'do_something').for_call().to_return_value('mocked').and_assert_called_once()
assert ((await SomeClass.do_something()) == 'mocked')
examples = _get_name_to_examples()
self.run_example(examples['expect pass'])
with self.assertRaisesRegex(AssertionError, 'calls did not match assertion'):
self.run_example(examples['expect fail'])
def test_leaked_tasks_fails_test_run(self):
async def dummy_async_func():
pass
def fail_top(context):
async def spawn_task_but_dont_await(self):
asyncio.create_task(dummy_async_func())
examples = _get_name_to_examples()
with self.assertRaisesRegex(LeftOverActiveTasks, 'Some tasks were started'):
self.run_example(examples['spawn task but dont await']) |
.external
.skipif((has_openai_key is False), reason='OpenAI API key not available')
.parametrize('cfg_string', ['zeroshot_cfg_string', 'fewshot_cfg_string', 'ext_template_cfg_string'])
def test_lemma_config(cfg_string, request):
cfg_string = request.getfixturevalue(cfg_string)
orig_config = Config().from_str(cfg_string)
nlp = spacy.util.load_model_from_config(orig_config, auto_fill=True)
assert (nlp.pipe_names == ['llm'])
component_cfg = dict(orig_config['components']['llm'])
component_cfg.pop('factory')
nlp2 = spacy.blank('en')
nlp2.add_pipe('llm', config=component_cfg)
assert (nlp2.pipe_names == ['llm']) |
class LowercasePropositions(Vars):
(a, b, c) = props('a b c')
(d, e, f) = props('d e f')
(g, h, i) = props('g h i')
(j, k, l) = props('j k l')
(m, n, o) = props('m n o')
(p, q, r) = props('p q r')
(s, t, u) = props('s t u')
(v, w, x) = props('v w x')
(y, z) = props('y z') |
()
('--config', default='', metavar='FILE', help='path to config file')
('--bench-config', default='', metavar='FILE', help='path to config file')
('--input', multiple=True, help="A list of space separated input images; or a single glob pattern such as 'directory/*.jpg'")
('--output', help='A file or directory to save output visualizations. If not given, will show output in an OpenCV window.')
('--confidence-threshold', type=float, default=0.5, help='Minimum score for instance predictions to be shown')
('--weight', default='', metavar='FILE', help='path to model weights')
('--batch', default=0, help='batch size')
('--display/--no-display', default=False, help='display results')
('--cudagraph/--no-cudagraph', default=False, help='enable CUDA graph')
def run_model(config, bench_config, input, output, confidence_threshold, weight, batch, display, cudagraph):
cfg = get_cfg_defaults()
cfg.merge_from_file(config)
if (bench_config != ''):
cfg.merge_from_file(bench_config)
if (batch > 0):
cfg.SOLVER.IMS_PER_BATCH = batch
cfg.MODEL.WEIGHTS = weight
cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = confidence_threshold
cfg.freeze()
assert (weight != ''), 'export model first: python convert_pt2ait.py model_d2.pkl params_ait.pkl --config configs/faster_rcnn_R_50_DC5.yaml'
demo = Predictor(cfg)
print('run {} end2end'.format(cfg.MODEL.NAME))
cnt = 0
duration = 0
detections = {}
bs = cfg.SOLVER.IMS_PER_BATCH
if input:
if (len(input) == 1):
input = glob.glob(os.path.expanduser(input[0]))
assert input, 'The input path(s) was not found'
batch_data = demo.data_loader(input)
print('{} images, run {} batch'.format(len(input), len(batch_data)))
for batch in tqdm.tqdm(batch_data, disable=(not output)):
results = demo.run_batch(batch, cudagraph)
detections.update(results)
if display:
demo.visualize(results)
duration += demo.benchmark(batch['data'], 10, cudagraph)
cnt += 1
duration /= (cnt * bs)
print(f'AIT Detection: Batch size: {bs}, Time per iter: {duration:.2f} ms, FPS: {(1000 / duration):.2f}') |
def on_enable():
sitekey = os.getenv('GOOGLE_CAPTCHA2_SITEKEY', '6LeIxAcTAAAAAJcZVRqyHh71UMIEGNQ_MXjiZKhI')
secret = os.getenv('GOOGLE_CAPTCHA2_SECRET', '6LeIxAcTAAAAAGG-vFI1TnRWxMZNFuojJ4WifJWe')
if ((not sitekey) or (not secret)):
raise RuntimeError('Required keys not found in the environment. Please set the GOOGLE_CAPTCHA2_SITEKEY and GOOGLE_CAPTCHA2_SECRETenvironment variables.')
method = Recaptcha2Method(sitekey, secret)
verification_service.add_method(method) |
class EventZebraBase(event.EventBase):
def __init__(self, zclient, msg):
super(EventZebraBase, self).__init__()
assert isinstance(msg, zebra.ZebraMessage)
self.__dict__ = msg.__dict__
self.zclient = zclient
def __repr__(self):
m = ', '.join([('%s=%r' % (k, v)) for (k, v) in self.__dict__.items() if (not k.startswith('_'))])
return ('%s(%s)' % (self.__class__.__name__, m))
__str__ = __repr__ |
def set_site_config_nginx_property(site, config, bench_path='.', gen_config=True):
from bench.config.nginx import make_nginx_conf
from bench.bench import Bench
if (site not in Bench(bench_path).sites):
raise Exception('No such site')
update_site_config(site, config, bench_path=bench_path)
if gen_config:
make_nginx_conf(bench_path=bench_path) |
def generate_right_ssml_text(text, speaking_rate, speaking_pitch):
attribs = {'rate': speaking_rate, 'pitch': speaking_pitch}
cleaned_attribs_string = ''
for (k, v) in attribs.items():
if (not v):
continue
cleaned_attribs_string = f"{cleaned_attribs_string} {k}='{v}%'"
if (not cleaned_attribs_string.strip()):
return text
return convert_audio_attr_in_prosody_tag(cleaned_attribs_string, text) |
class KeolisIleviaStation(BikeShareStation):
def __init__(self, fields):
name = fields['nom']
(latitude, longitude) = map(float, fields['localisation'])
bikes = int(fields['nbvelosdispo'])
free = int(fields['nbplacesdispo'])
extra = {'status': fields['etat'], 'uid': str(fields['libelle']), 'city': fields['commune'], 'address': fields['adresse'], 'last_update': fields['datemiseajour'], 'online': (fields['etat'] == 'EN SERVICE'), 'payment-terminal': (fields['type'] == 'AVEC TPE')}
super(KeolisIleviaStation, self).__init__(name, latitude, longitude, bikes, free, extra) |
def resolve_version():
if os.path.isdir(GIT_DIRECTORY):
version = git_version()
print('Computed package version: {}'.format(version))
print('Writing version to version file {}.'.format(VERSION_FILE))
write_version_file(*version)
elif ('$' not in ARCHIVE_COMMIT_HASH):
version = archive_version()
print('Archive package version: {}'.format(version))
print('Writing version to version file {}.'.format(VERSION_FILE))
write_version_file(*version)
elif os.path.isfile(VERSION_FILE):
print('Reading version file {}'.format(VERSION_FILE))
version = read_version_file()
print('Package version from version file: {}'.format(version))
else:
raise RuntimeError('Unable to determine package version. No local Git clone detected, and no version file found at {}.'.format(VERSION_FILE))
return version |
_cache(maxsize=1)
def tokenize_css(css: str, start: int=0) -> dict[(str, Any)]:
tokens = {}
m = RE_HEX.match(css, start)
if m:
tokens['hex'] = {'start': m.group(1), 'value': m.group(0)}
tokens['id'] = 'srgb'
tokens['end'] = m.end()
return tokens
m = RE_NAME.match(css, start)
if m:
if color_names.has_name(m.group(1)):
tokens['name'] = {'color': m.group(1)}
tokens['id'] = 'srgb'
tokens['end'] = m.end()
return tokens
func_name = m.group(0).lower()
m2 = RE_CSS_FUNC.match(func_name)
if (not m2):
return {}
tokens['func'] = {'name': func_name, 'values': [], 'delimiter': ''}
m = RE_FUNC_START.match(css, m.end())
if (not m):
return {}
delimiter = None
if (func_name == 'color'):
m2 = RE_IDENT.match(css, m.end())
if (not m2):
return {}
delimiter = 'space'
tokens['func']['delimiter'] = delimiter
tokens['id'] = m2.group(1)
m = RE_SPACE.match(css, m2.end())
if (not m):
return {}
slash = False
for _ in range(MAX_CHANNELS):
m2 = RE_CHANNEL.match(css, m.end())
if (not m2):
if slash:
return {}
break
m = m2
if m.group(2):
tokens['func']['values'].append({'type': 'percent', 'value': m.group(0)})
elif m.group(3):
tokens['func']['values'].append({'type': 'degree', 'value': m.group(0)})
elif m.group(4):
tokens['func']['values'].append({'type': 'none', 'value': m.group(0)})
else:
tokens['func']['values'].append({'type': 'number', 'value': m.group(0)})
if slash:
break
if (delimiter is None):
m2 = RE_COMMA.match(css, m.end(0))
if (not m2):
m2 = RE_LOOSE_SPACE.match(css, m.end(0))
if m2:
delimiter = 'space'
tokens['func']['delimiter'] = delimiter
else:
delimiter = 'comma'
tokens['func']['delimiter'] = delimiter
elif (delimiter == 'comma'):
m2 = RE_COMMA.match(css, m.end(0))
if (not m2):
break
else:
m2 = RE_SLASH.match(css, m.end(0))
if m2:
slash = True
else:
m2 = RE_LOOSE_SPACE.match(css, m.end(0))
m = m2
tokens['func']['slash'] = slash
m = RE_FUNC_END.match(css, m.end())
if (not m):
return {}
tokens['end'] = m.end()
if ((func_name == 'color') and (not validate_color(tokens))):
return {}
elif func_name.startswith('rgb'):
tokens['id'] = 'srgb'
if (not validate_srgb(tokens)):
return {}
elif (func_name in ('hsl', 'hsla', 'hwb')):
tokens['id'] = ('--hwb' if (func_name == 'hwb') else '--hsl')
if (not validate_cylindrical_srgb(tokens)):
return {}
elif (func_name in ('lab', 'oklab')):
tokens['id'] = ('--' + func_name)
if (not validate_lab(tokens)):
return {}
elif (func_name in ('oklch', 'lch')):
tokens['id'] = ('--' + func_name)
if (not validate_lch(tokens)):
return {}
return tokens |
def extractTheCaptainSLog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
class EmbeddingOpenAI_0x(Embedding):
def __init__(self, model_name='openai'):
super().__init__(model_name)
print(f'Initialized EmbeddingOpenAI 0x: {openai.__version__}')
def dim(self):
return 1536
def getname(self, start_date, prefix='news'):
return f'{prefix}_embedding__{start_date}'.replace('-', '_')
def create(self, text: str, model_name='text-embedding-ada-002', num_retries=3):
api_key = os.getenv('OPENAI_API_KEY')
emb = None
for i in range(1, (num_retries + 1)):
try:
emb = openai.Embedding.create(input=[text], api_key=api_key, model=model_name)
except openai.error.RateLimitError as e:
print(f'[ERROR] RateLimit error during embedding ({i}/{num_retries}): {e}')
if (i == num_retries):
raise
except openai.error.APIError as e:
print(f'[ERROR] Failed during embedding ({i}/{num_retries}): {e}')
if (i == num_retries):
raise
return emb['data'][0]['embedding']
def get_or_create(self, text: str, source='', page_id='', db_client=None, key_ttl=(86400 * 30)):
client = (db_client or DBClient())
embedding = client.get_milvus_embedding_item_id(source, page_id)
if (not embedding):
EMBEDDING_MAX_LENGTH = int(os.getenv('EMBEDDING_MAX_LENGTH', 5000))
embedding = self.create(text[:EMBEDDING_MAX_LENGTH])
client.set_milvus_embedding_item_id(source, page_id, json.dumps(embedding), expired_time=key_ttl)
else:
embedding = utils.fix_and_parse_json(embedding)
return embedding |
class OptionPlotoptionsPictorialLabel(Options):
def boxesToAvoid(self):
return self._config_get(None)
def boxesToAvoid(self, value: Any):
self._config(value, js_type=False)
def connectorAllowed(self):
return self._config_get(False)
def connectorAllowed(self, flag: bool):
self._config(flag, js_type=False)
def connectorNeighbourDistance(self):
return self._config_get(24)
def connectorNeighbourDistance(self, num: float):
self._config(num, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get('undefined')
def formatter(self, value: Any):
self._config(value, js_type=False)
def maxFontSize(self):
return self._config_get(None)
def maxFontSize(self, num: float):
self._config(num, js_type=False)
def minFontSize(self):
return self._config_get(None)
def minFontSize(self, num: float):
self._config(num, js_type=False)
def onArea(self):
return self._config_get(None)
def onArea(self, flag: bool):
self._config(flag, js_type=False)
def style(self) -> 'OptionPlotoptionsPictorialLabelStyle':
return self._config_sub_data('style', OptionPlotoptionsPictorialLabelStyle)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False) |
def test_data_transfer_from_broker(load_broker_data):
call_command('transfer_procurement_records', '--reload-all')
table = SourceProcurementTransaction().table_name
with connections[DEFAULT_DB_ALIAS].cursor() as cursor:
cursor.execute(f'SELECT COUNT(*) FROM {table}')
assert (cursor.fetchall()[0][0] == NUMBER_OF_RECORDS)
id_field = 'detached_award_procurement_id'
cursor.execute(f'SELECT MIN({id_field}), MAX({id_field}) FROM {table}')
(min_id, max_id) = cursor.fetchall()[0]
assert (min_id == )
assert (max_id == )
cursor.execute(f"SELECT * FROM {table} WHERE detached_award_proc_unique = '12K3_-none-__0_-none-_-none-'")
assert (cursor.fetchall()[0] == (, '12K3_-none-__0_-none-_-none-', 'No', None, '2003-08-15 00:00:00', None, None, None, '12K3', '0', None, '0', None, 'SERVCO PACIFIC INC.', '', '012', 'Department of Agriculture (USDA)', '6395', 'USDA APHIS MRPBS', '12K3', 'ANIMAL AND PLANT HEALTH INSPECTION SERVICE', '0.00', '0.00', None, None, None, 'N: No', None, 'A', 'No', 'N', 'N', 'No', 'NOT APPLICABLE', 'X', None, None, None, None, 'D', 'NOT A BUNDLED REQUIREMENT', None, None, 'SMALL BUSINESS', 'S', 'NOT APPLICABLE EXEMPT FROM CAS', 'X', None, None, None, 'nan', datetime.datetime(2017, 8, 28, 5, 55, 23, 827371), None, None, None, None, None, None, None, None, None, None, None, 'NOT COMPETED', 'C', 'NAN', None, 'X', 'NOT APPLICABLE', Decimal('0'), None, 'NOT APPLICABLE', '999', None, None, None, None, None, 'Transaction does not use GFE/GFP', 'N', None, None, None, None, None, None, None, None, None, None, 'E', 'BPA', None, None, None, None, None, 'Not Applicable', 'X', 'X', 'NOT APPLICABLE', '2003-12-27 00:00:00', '2850 PUKOLOA ST STE 101', None, None, 'HONOLULU', 'HI01', 'UNITED STATES', None, None, None, 'HI', None, '', None, None, None, None, None, 'X', 'Not Applicable', 'N', 'No', None, None, '423110', 'AUTOMOBILE AND OTHER MOTOR VEHICLE MERCHANT WHOLESALERS', 'NONE', 'None', None, '0', '0', '2003-09-15 00:00:00', None, None, None, None, None, 'NOT APPLICABLE', 'X', None, None, '2003-08-15 00:00:00', '', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'TRUCKS AND TRUCK TRACTORS, WHEELED', '2320', None, None, None, 'IDV', None, None, None, None, None, None, None, None, None, None, None, None, None, 'NAN', None, 'NAN', None, None, None, None, None, None, 'B', 'PLAN NOT REQUIRED', None, None, 'Firm Fixed Price', 'J', None, 'NAN', None, None, 'SERVCO PACIFIC INC.', '', 'X', 'NO', 'CONT_IDV__12K3', datetime.datetime(2018, 9, 28, 21, 28, 57), None, None, None, None, None, None, None, None, None, 'uei_id', 'ultimate_p_id', False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, None, False, False, None, None, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, None, False, None, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, True, 'D&B')) |
def _ambassador_module_onemapper(status_code, body_kind, body_value, content_type=None):
mod = (_ambassador_module_config() + f'''
error_response_overrides:
- on_status_code: "{status_code}"
body:
''')
if (body_kind == 'text_format_source'):
mod = (mod + f'''
{body_kind}:
filename: "{body_value}"
''')
elif (body_kind == 'json_format'):
mod = (mod + f'''
{body_kind}: {body_value}
''')
else:
mod = (mod + f'''
{body_kind}: "{body_value}"
''')
if (content_type is not None):
mod = (mod + f'''
content_type: "{content_type}"
''')
return mod |
def redirect_if_tags_query(view_fn):
(view_fn)
def wrapper(request, **kwargs):
if (not request.GET.get('tags')):
return view_fn(request, **kwargs)
if ('ccg_code' in kwargs):
url = reverse('measures_for_one_ccg', kwargs=kwargs)
else:
url = reverse('measures_for_one_practice', kwargs={'practice_code': kwargs['practice_code']})
url = '{}?{}'.format(url, request.GET.urlencode())
return HttpResponseRedirect(url)
return wrapper |
class TestCreateEditor(unittest.TestCase):
_traitsui
def test_exists_controls_editor_dialog_style(self):
x = File(exists=True)
editor = x.create_editor()
self.assertEqual(editor.dialog_style, 'open')
x = File(exists=False)
editor = x.create_editor()
self.assertEqual(editor.dialog_style, 'save') |
class ERI(Function):
def eval(cls, ia, ja, ka, ib, jb, kb, ic, jc, kc, id_, jd, kd, N, a, b, c, d, A, B, C, D):
ang_moms = np.array((ia, ja, ka, ib, jb, kb, ic, jc, kc, id_, jd, kd), dtype=int)
ang_moms2d = ang_moms.reshape((- 1), 3)
if any([(am < 0) for am in ang_moms]):
return 0
AB = (A - B)
xi = (a + b)
P = (((a * A) + (b * B)) / xi)
CD = (C - D)
zeta = (c + d)
Q = (((c * C) + (d * D)) / zeta)
theta = ((xi * zeta) / (xi + zeta))
def recur(N, *ang_moms):
return ERI(*ang_moms, N, a, b, c, d, A, B, C, D)
def recur_hrr(bra_or_ket, cart_ind):
if (bra_or_ket == 'bra'):
XYZ = AB
incr_ind = 0
else:
XYZ = CD
incr_ind = 2
decr_ind = (incr_ind + 1)
incr_ang_moms = ang_moms2d.copy()
incr_ang_moms[(incr_ind, cart_ind)] += 1
incr_ang_moms[(decr_ind, cart_ind)] -= 1
incr_ang_moms = incr_ang_moms.flatten()
decr_ang_moms = ang_moms2d.copy()
decr_ang_moms[(decr_ind, cart_ind)] -= 1
decr_ang_moms = decr_ang_moms.flatten()
return (recur(N, *incr_ang_moms) + (XYZ[cart_ind] * recur(N, *decr_ang_moms)))
def recur_vrr(bra_or_ket, cart_ind):
assert ((ib, jb, kb) == (0, 0, 0))
assert ((id_, jd, kd) == (0, 0, 0))
if (bra_or_ket == 'bra'):
XYZ = (P - A)
decr_ind = 0
decr_also_ind = 2
(exp1, exp2) = (zeta, xi)
am1 = ((ia, ja, ka)[cart_ind] - 1)
am2 = (ic, jc, kc)[cart_ind]
sign = (- 1)
else:
XYZ = (Q - C)
decr_ind = 2
decr_also_ind = 0
(exp1, exp2) = (xi, zeta)
am1 = ((ic, jc, kc)[cart_ind] - 1)
am2 = (ia, ja, ka)[cart_ind]
sign = 1
decr_ang_moms = ang_moms2d.copy()
decr_ang_moms[(decr_ind, cart_ind)] -= 1
decr_ang_moms = decr_ang_moms.flatten()
decr2_ang_moms = ang_moms2d.copy()
decr2_ang_moms[(decr_ind, cart_ind)] -= 2
decr2_ang_moms = decr2_ang_moms.flatten()
decr_also_ang_moms = ang_moms2d.copy()
decr_also_ang_moms[(decr_also_ind, cart_ind)] -= 1
decr_also_ang_moms = decr_also_ang_moms.flatten()
denom = (xi + zeta)
quot = (exp1 / denom)
PQ = (P - Q)
return ((((XYZ[cart_ind] * recur(N, *decr_ang_moms)) + (((sign * quot) * PQ[cart_ind]) * recur((N + 1), *decr_ang_moms))) + ((am1 / (2 * exp2)) * (recur(N, *decr2_ang_moms) - (quot * recur((N + 1), *decr2_ang_moms))))) + ((am2 / (2 * denom)) * recur((N + 1), *decr_also_ang_moms)))
if all([(am == 0) for am in ang_moms]):
RAB2 = AB.dot(AB)
RCD2 = CD.dot(CD)
Kab = exp((((- (a * b)) / xi) * RAB2))
Kcd = exp((((- (c * d)) / zeta) * RCD2))
PQ = (P - Q)
RPQ2 = PQ.dot(PQ)
return (((((2 * (pi ** (5 / 2))) / ((xi * zeta) + sqrt((xi + zeta)))) * Kab) * Kcd) * boys(N, (theta * RPQ2)))
elif (ib > 0):
return recur_hrr('bra', 0)
elif (jb > 0):
return recur_hrr('bra', 1)
elif (kb > 0):
return recur_hrr('bra', 2)
elif (id_ > 0):
return recur_hrr('ket', 0)
elif (jd > 0):
return recur_hrr('ket', 1)
elif (kd > 0):
return recur_hrr('ket', 2)
elif (ia > 0):
return recur_vrr('bra', 0)
elif (ja > 0):
return recur_vrr('bra', 1)
elif (ka > 0):
return recur_vrr('bra', 2)
elif (ic > 0):
return recur_vrr('ket', 0)
elif (jc > 0):
return recur_vrr('ket', 1)
elif (kc > 0):
return recur_vrr('ket', 1) |
.parametrize(*decode_tuples_args)
def test_tuple_contract_caller_default_with_decode_tuples(tuple_contract_with_decode_tuples, method_input, tuple_output, type_str, namedtuple_repr):
result = tuple_contract_with_decode_tuples.caller.method(method_input)
assert (result == tuple_output)
assert (str(type(result)) == type_str)
assert (result.__repr__() == namedtuple_repr) |
def extractFroglationXyz(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
bad = ['Fukusyu wo Chikatta Shironeko wa Ryuuou no Hiza no ue de Damin wo Musaboru (Manga)']
if any([(tmp in item['tags']) for tmp in bad]):
return None
tagmap = [('Fukusyu wo Chikatta Shironeko wa Ryuuou no Hiza no ue de Damin wo Musaboru', 'Fukusyu wo Chikatta Shironeko wa Ryuuou no Hiza no ue de Damin wo Musaboru', 'translated'), ('Fukusyu wo Chikatta Shironeko wa Ryuuou no Hiza no ue de Damin wo Musaboru (WN)', 'Fukusyu wo Chikatta Shironeko wa Ryuuou no Hiza no ue de Damin wo Musaboru (WN)', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
.parametrize(('words', 'score_expected'), [(('',), 100), ((' ',), 100), (('.uk',), 100), (('to', ''), 80), (('', ''), 100), (('',), 0)])
def test_email_magic_score(ocr_result, words, score_expected):
ocr_result.words = [{'text': w} for w in words]
magic = EmailMagic()
score = magic.score(ocr_result)
assert (score == score_expected) |
class RelationshipTlsPrivateKey(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'tls_private_key': (RelationshipTlsPrivateKeyTlsPrivateKey,)}
_property
def discriminator():
return None
attribute_map = {'tls_private_key': 'tls_private_key'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class AbcdNet(decl):
_fields = ('name', 'args', 'body')
_attributes = ('lineno', 'col_offset')
def __init__(self, name, args, body, lineno=0, col_offset=0, **ARGS):
decl.__init__(self, **ARGS)
self.name = name
self.args = args
self.body = body
self.lineno = int(lineno)
self.col_offset = int(col_offset) |
def extractDeepdreamtranlationsHomeBlog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [("The Scum Shou's Survival Guide", "The Scum Shou's Survival Guide", 'translated'), ('TSSSG', "The Scum Shou's Survival Guide", 'translated'), ('TSWCSS', 'The Strategy of Washing Clean a Slag Shou', 'translated'), ('The Strategy of Washing Clean a Slag Shou', 'The Strategy of Washing Clean a Slag Shou', 'translated'), ("Heaven Official's Blessing", "Heaven Official's Blessing", 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class BaseProtocol(ProtocolAPI):
def __init__(self, transport: TransportAPI, command_id_offset: int, snappy_support: bool) -> None:
self.logger = get_logger('p2p.protocol.Protocol')
self.transport = transport
self.command_id_offset = command_id_offset
self.snappy_support = snappy_support
self.command_id_by_type = {command_type: (command_id_offset + command_type.protocol_command_id) for command_type in self.commands}
self.command_type_by_id = {command_id: command_type for (command_type, command_id) in self.command_id_by_type.items()}
def __repr__(self) -> str:
return ('(%s, %d)' % (self.name, self.version))
def supports_command(cls, command_type: Type[CommandAPI[Any]]) -> bool:
return (command_type in cls.commands)
def as_capability(cls) -> Capability:
return (cls.name, cls.version)
def get_command_type_for_command_id(self, command_id: int) -> Type[CommandAPI[Any]]:
return self.command_type_by_id[command_id]
def send(self, command: CommandAPI[Any]) -> None:
message = command.encode(self.command_id_by_type[type(command)], self.snappy_support)
self.transport.send(message) |
def verify_apks(signed_apk, unsigned_apk, tmp_dir, v1_only=None):
if (not verify_apk_signature(signed_apk)):
logging.info('...NOT verified - {0}'.format(signed_apk))
return 'verification of signed APK failed'
if (not os.path.isfile(signed_apk)):
return 'can not verify: file does not exists: {}'.format(signed_apk)
if (not os.path.isfile(unsigned_apk)):
return 'can not verify: file does not exists: {}'.format(unsigned_apk)
tmp_apk = os.path.join(tmp_dir, ('sigcp_' + os.path.basename(unsigned_apk)))
try:
apksigcopier.do_copy(signed_apk, unsigned_apk, tmp_apk, v1_only=v1_only, exclude=apksigcopier.exclude_meta)
except apksigcopier.APKSigCopierError as e:
logging.info('...NOT verified - {0}'.format(tmp_apk))
error = 'signature copying failed: {}'.format(str(e))
result = compare_apks(signed_apk, unsigned_apk, tmp_dir, os.path.dirname(unsigned_apk))
if (result is not None):
error += ('\nComparing reference APK to unsigned APK...\n' + result)
return error
if (not verify_apk_signature(tmp_apk)):
logging.info('...NOT verified - {0}'.format(tmp_apk))
error = 'verification of APK with copied signature failed'
result = compare_apks(signed_apk, tmp_apk, tmp_dir, os.path.dirname(unsigned_apk))
if (result is not None):
error += ('\nComparing reference APK to APK with copied signature...\n' + result)
return error
logging.info('...successfully verified')
return None |
def test_issue_21_v0_3_8():
a = [1, 2, 3]
b = [0, 1, 0]
assert (np.inner(a, b) == 2)
na = np.array([1, 2, 3])
nb = np.array([0, 1, 0])
assert (np.inner(na, nb) == np.inner(a, b)).all()
fa = Fxp([1, 2, 3])
fb = Fxp([0, 1, 0])
z = np.inner(fa, fb)
assert (np.inner(fa, fb)() == np.inner(a, b)).all() |
def plot_histogram(counts, fontsize, dpi):
(fig, ax) = plt.subplots(dpi=dpi, figsize=(8, 20))
labels = []
for (k, v) in counts.items():
if isinstance(v, dict):
labels += list(v.keys())
v = list(v.values())
else:
labels.append(k)
v = [v]
bars = plt.barh(((len(labels) + (- len(v))) + np.arange(len(v))), v, height=0.9, label=k)
bar_autolabel(bars, fontsize)
ax.set_yticklabels(labels, fontsize=fontsize)
ax.axes.xaxis.set_ticklabels([])
ax.xaxis.tick_top()
ax.invert_yaxis()
plt.yticks(np.arange(len(labels)))
plt.xscale('log')
plt.legend(ncol=len(counts), loc='upper center') |
()
def setup_to_pass():
rules = ['-w /var/log/lastlog -p wa -k logins', '-w /var/run/faillock -p wa -k logins']
for rule in rules:
print(shellexec(f'echo "{rule}" >> /etc/audit/rules.d/pytest.rules'))
print(shellexec(f'auditctl {rule}'))
(yield None)
print(shellexec('cat /etc/audit/rules.d/pytest.rules'))
print(shellexec('auditctl -l'))
os.remove('/etc/audit/rules.d/pytest.rules')
shellexec('auditctl -D') |
def __app_sync(target, name='', host=None, port=0, view: Optional[AppView]=AppView.FLET_APP, assets_dir='assets', upload_dir=None, web_renderer: WebRenderer=WebRenderer.CANVAS_KIT, use_color_emoji=False, route_url_strategy='path', auth_token=None):
if isinstance(view, str):
view = AppView(view)
if isinstance(web_renderer, str):
web_renderer = WebRenderer(web_renderer)
force_web_view = os.environ.get('FLET_FORCE_WEB_VIEW')
assets_dir = __get_assets_dir_path(assets_dir)
conn = __connect_internal_sync(page_name=name, view=(view if (not force_web_view) else AppView.WEB_BROWSER), host=host, port=port, auth_token=auth_token, session_handler=target, assets_dir=assets_dir, upload_dir=upload_dir, web_renderer=web_renderer, use_color_emoji=use_color_emoji, route_url_strategy=route_url_strategy)
url_prefix = os.getenv('FLET_DISPLAY_URL_PREFIX')
if (url_prefix is not None):
print(url_prefix, conn.page_url)
else:
logger.info(f'App URL: {conn.page_url}')
logger.info('Connected to Flet app and handling user sessions...')
if (((view == AppView.FLET_APP) or (view == AppView.FLET_APP_HIDDEN) or (view == AppView.FLET_APP_WEB)) and (not is_linux_server()) and (not is_embedded()) and (url_prefix is None)):
(fvp, pid_file) = open_flet_view(conn.page_url, (assets_dir if (view != AppView.FLET_APP_WEB) else None), (view == AppView.FLET_APP_HIDDEN))
try:
fvp.wait()
except:
pass
close_flet_view(pid_file)
conn.close()
elif (not is_embedded()):
if ((view == AppView.WEB_BROWSER) and (url_prefix is None)):
open_in_browser(conn.page_url)
terminate = threading.Event()
def exit_gracefully(signum, frame):
logger.debug('Gracefully terminating Flet app...')
terminate.set()
signal.signal(signal.SIGINT, exit_gracefully)
signal.signal(signal.SIGTERM, exit_gracefully)
try:
while True:
if terminate.wait(1):
break
except KeyboardInterrupt:
pass
conn.close() |
class DecimalField(NumberField[Decimal]):
max_digits: Optional[int] = None
max_decimal_places: Optional[int] = None
def __init__(self, *, max_digits: Optional[int]=None, max_decimal_places: Optional[int]=None, **kwargs: Any) -> None:
self.max_digits = max_digits
self.max_decimal_places = max_decimal_places
super().__init__(**kwargs, **{'max_digits': max_digits, 'max_decimal_places': max_decimal_places})
def to_python(self, value: Any) -> Any:
if (self._to_python is None):
if self.model._options.decimals:
return self.prepare_value(value, coerce=True)
return self.prepare_value(value)
else:
return self._to_python(value)
def prepare_value(self, value: Any, *, coerce: Optional[bool]=None) -> Optional[Decimal]:
return (Decimal(value) if self.should_coerce(value, coerce) else value)
def validate(self, value: Decimal) -> Iterable[ValidationError]:
if (not value.is_finite()):
(yield self.validation_error(f'Illegal value in decimal: {value!r}'))
decimal_tuple: Optional[DecimalTuple] = None
mdp = self.max_decimal_places
if mdp:
decimal_tuple = value.as_tuple()
if (abs(decimal_tuple.exponent) > mdp):
(yield self.validation_error(f'{self.field} must have less than {mdp} decimal places.'))
max_digits = self.max_digits
if max_digits:
if (decimal_tuple is None):
decimal_tuple = value.as_tuple()
digits = len(decimal_tuple.digits[:decimal_tuple.exponent])
if (digits > max_digits):
(yield self.validation_error(f'{self.field} must have less than {max_digits} digits.')) |
def extractWwwBlsfeedCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def action_to_str(act, ofctl_action_to_str):
sub_type = act.subtype
if (sub_type == nicira_ext.NXAST_RESUBMIT):
return ('NX_RESUBMIT: {port: %s, table: %s}' % (act.in_port, act.table_id))
elif (sub_type == nicira_ext.NXAST_REG_MOVE):
src_start = act.src_ofs
dst_start = act.dst_ofs
src_end = (src_start + act.n_bits)
dst_end = (dst_start + act.n_bits)
return ('NX_MOVE: {%s[%s..%s]: %s[%s..%s]}' % (act.dst_field, dst_start, dst_end, act.src_field, src_start, src_end))
elif (sub_type == nicira_ext.NXAST_REG_LOAD):
return ('NX_LOAD: {%s%s: %x}' % (act.dst, nicira_ext.ofs_nbits_str(act.ofs_nbits), act.value))
elif (sub_type == nicira_ext.NXAST_LEARN):
specs = []
add_spec = specs.append
for spec in act.specs:
dst_type = spec._dst_type
if (dst_type == 0):
if isinstance(spec.src, (tuple, list)):
src = spec.src[0]
start = spec.src[1]
end = (start + spec.n_bits)
start_end = ('%s..%s' % (start, end))
else:
src = spec.src
start_end = '[]'
add_spec(('%s[%s]' % (src, start_end)))
elif (dst_type == 1):
if isinstance(spec.src, (tuple, list)):
src = spec.src[0]
start = spec.src[1]
end = (start + spec.n_bits)
src_start_end = ('[%s..%s]' % (start, end))
else:
src = spec.src
src_start_end = '[]'
if isinstance(spec.dst, (tuple, list)):
dst = spec.dst[0]
start = spec.dst[1]
end = (start + spec.n_bits)
dst_start_end = ('[%s..%s]' % (start, end))
else:
dst = spec.dst
dst_start_end = '[]'
add_spec(('NX_LOAD {%s%s: %s%s}' % (dst, dst_start_end, src, src_start_end)))
elif (dst_type == 2):
if isinstance(spec.src, (tuple, list)):
src = spec.src[0]
start = spec.src[1]
end = (start + spec.n_bits)
start_end = ('%s..%s' % (start, end))
else:
src = spec.src
start_end = '[]'
add_spec(('output:%s%s' % (src, start_end)))
return ('NX_LEARN: {idle_timeout: %s, hard_timeouts: %s, priority: %s, cookie: %s, flags: %s, table_id: %s, fin_idle_timeout: %s, fin_hard_timeout: %s, specs: %s}' % (act.idle_timeout, act.hard_timeout, act.priority, act.cookie, act.flags, act.table_id, act.fin_idle_timeout, act.self.fin_hard_timeout, specs))
elif (sub_type == nicira_ext.NXAST_CONJUNCTION):
return ('NX_CONJUNCTION: {clause: %s, number_of_clauses: %s, id: %s}' % (act.clause, act.n_clauses, act.id))
elif (sub_type == nicira_ext.NXAST_CT):
if (act.zone_ofs_nbits != 0):
start = act.zone_ofs_nbits
end = (start + 16)
zone = (act.zone_src + ('[%s..%s]' % (start, end)))
else:
zone = act.zone_src
actions = [ofctl_action_to_str(action) for action in act.actions]
return ('NX_CT: {flags: %s, zone: %s, table: %s, alg: %s, actions: %s}' % (act.flags, zone, act.recirc_table, act.alg, actions))
elif (sub_type == nicira_ext.NXAST_NAT):
return ('NX_NAT: {flags: %s, range_ipv4_min: %s, range_ipv4_max: %s, range_ipv6_min: %s, range_ipv6_max: %s, range_proto_min: %s, range_proto_max: %s}' % (act.flags, act.range_ipv4_min, act.range_ipv4_max, act.range_ipv6_min, act.range_ipv6_max, act.range_proto_min, act.range_proto_max))
data_str = base64.b64encode(act.data)
return ('NX_UNKNOWN: {subtype: %s, data: %s}' % (sub_type, data_str.decode('utf-8'))) |
def _print_fnarg(a, env: PrintEnv) -> str:
if (a.type == T.size):
return f'{env.get_name(a.name)} : size'
elif (a.type == T.index):
return f'{env.get_name(a.name)} : index'
else:
ty = _print_type(a.type, env)
mem = (f' {a.mem.name()}' if a.mem else '')
return f'{env.get_name(a.name)} : {ty}{mem}' |
def post_solve_pig_wood(arbiter, space, _):
pigs_to_remove = []
if (arbiter.total_impulse.length > 700):
(pig_shape, wood_shape) = arbiter.shapes
for pig in pigs:
if (pig_shape == pig.shape):
pig.life -= 20
global score
score += 10000
if (pig.life <= 0):
pigs_to_remove.append(pig)
for pig in pigs_to_remove:
space.remove(pig.shape, pig.shape.body)
pigs.remove(pig) |
def test_exception_is_caught_and_shows_error_message(dummy_execute_and_send):
(dummy, m) = dummy_execute_and_send
dummy._execute_and_send(cmd='raises_exception', args=[], match=None, msg=m, template_name=dummy.raises_exception._err_command_template)
assert (dummy.MSG_ERROR_OCCURRED in dummy.pop_message().body)
dummy._execute_and_send(cmd='yields_str_then_raises_exception', args=[], match=None, msg=m, template_name=dummy.yields_str_then_raises_exception._err_command_template)
assert ('foobar' == dummy.pop_message().body)
assert (dummy.MSG_ERROR_OCCURRED in dummy.pop_message().body) |
.parametrize('input_points, expected_points', [(range(10), [0, 4, 8, 9]), ([1, 10, 11, 12, 13, 14, 100, 10000], [1, 13, 10000])])
def test_downsample(string_to_well, input_points, expected_points):
well_definition = '1.01\n Unknown\n custom_name 0 0 0\n 1\n Zonelog DISC 1 zone1 2 zone2 3 zone3'
for i in input_points:
well_definition += f'''
{i} {i} {i} 1'''
well = string_to_well(well_definition)
print(well.dataframe)
well.downsample()
print(well.dataframe)
assert ({'X_UTME': well.dataframe['X_UTME'].values.tolist(), 'Y_UTMN': well.dataframe['Y_UTMN'].values.tolist(), 'Z_TVDSS': well.dataframe['Z_TVDSS'].values.tolist()} == {'X_UTME': expected_points, 'Y_UTMN': expected_points, 'Z_TVDSS': expected_points}) |
class OptionPlotoptionsXrangeSonificationDefaultinstrumentoptionsMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def import_template(module, zbx, filename, fmt):
if (not os.path.exists(filename)):
module.fail_json(msg=('template file %s not found' % filename))
with open(filename) as f:
data = f.read()
try:
return zbx.configuration.import_({'format': fmt, 'source': data, 'rules': {'items': {'createMissing': True, 'updateExisting': True, 'deleteMissing': True}, 'graphs': {'createMissing': True, 'updateExisting': True, 'deleteMissing': True}, 'applications': {'createMissing': True, 'updateExisting': True, 'deleteMissing': True}, 'triggers': {'createMissing': True, 'updateExisting': True, 'deleteMissing': True}, 'templates': {'createMissing': True, 'updateExisting': True}, 'templateScreens': {'createMissing': True, 'updateExisting': True, 'deleteMissing': True}, 'templateLinkage': {'createMissing': True, 'updateExisting': True}, 'discoveryRules': {'createMissing': True, 'updateExisting': True, 'deleteMissing': True}}})
except BaseException as e:
module.fail_json(msg=('Zabbix API problem: %s' % e))
raise AssertionError |
def extractPiperpickupsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('ss', 'Support System: Leading Two Souls Together', 'translated'), ('mstp', 'My Son is a Transmigrated Prince', 'translated'), ('atmlwesm', 'All The Male Leads Want to Eat Swan Meat', 'translated'), ('MGCH', 'Quick Transmigration System: Male God, Come Here', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
chp_prefixes = [('ATMLWESM Chapter ', 'All The Male Leads Want to Eat Swan Meat', 'translated'), ('MGCH Chapter ', 'Quick Transmigration System: Male God, Come Here', 'translated'), ('Cat ', 'Me and My Beloved Cat (Girlfriend)', 'translated')]
for (prefix, series, tl_type) in chp_prefixes:
if item['title'].lower().startswith(prefix.lower()):
return buildReleaseMessageWithType(item, series, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
([Output('activity-date', 'children'), Output('activity-kpi', 'children'), Output('oura-activity-content', 'children')], [Input('last-chart-clicked', 'children')])
def update_oura_activity_contents(date):
date = pd.to_datetime(date).date()
(date_title, star, score) = generate_oura_activity_header_kpi(date)
if star:
kpi_score = [html.I(className='fa fa-star align-middle mr-1', style={'fontSize': '25%'}), score, html.I(className='fa fa-star align-middle ml-1', style={'fontSize': '25%'})]
else:
kpi_score = score
return (date_title, kpi_score, generate_oura_activity_content(date)) |
('pyscf')
def test_composite_run_dict(this_dir):
run_dict = {'geom': {'type': 'cart', 'fn': str((this_dir / '00_ch4.xyz'))}, 'calc': {'type': 'composite', 'calcs': {'high': {'type': 'pyscf', 'basis': '321g'}, 'low': {'type': 'pyscf', 'basis': 'sto3g'}}, 'final': 'high - low', 'pal': 4}}
results = run_from_dict(run_dict)
geom = results.calced_geoms[0]
assert (geom._energy == pytest.approx((- 0.))) |
def _tag_webhook(func: Callable, uri_rule: str, methods: Tuple[str], form_param: Optional[str], raw: bool) -> Callable:
log.info(f"webhooks: Flag to bind {uri_rule} to {getattr(func, '__name__', func)}")
func._err_webhook_uri_rule = uri_rule
func._err_webhook_methods = methods
func._err_webhook_form_param = form_param
func._err_webhook_raw = raw
return func |
class _MemoryBroadcast():
class Subscription():
def __init__(self, queue: asyncio.Queue) -> None:
self._queue = queue
async def __aiter__(self) -> AsyncIterator[str]:
while True:
(yield (await self._queue.get()))
def __init__(self) -> None:
self._subscriptions: Set[asyncio.Queue] = set()
async def publish(self, event: str) -> None:
for queue in self._subscriptions:
(await queue.put(event))
async def subscribe(self) -> AsyncIterator['Subscription']:
queue: asyncio.Queue = asyncio.Queue()
self._subscriptions.add(queue)
try:
(yield self.Subscription(queue))
finally:
self._subscriptions.remove(queue)
(await queue.put(None)) |
class OptionPlotoptionsFunnelSonificationContexttracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_graphql_to_attachment_audio():
data = {'__typename': 'MessageAudio', 'attribution_app': None, 'attribution_metadata': None, 'filename': 'audio.mp3', 'playable_url': ' 'playable_duration_in_ms': 27745, 'is_voicemail': False, 'audio_type': 'FILE_ATTACHMENT', 'url_shimhash': 'AT0...', 'url_skipshim': True}
assert (AudioAttachment(id=None, filename='audio.mp3', url=' duration=datetime.timedelta(seconds=27, microseconds=745000), audio_type='FILE_ATTACHMENT') == graphql_to_attachment(data)) |
def proportions_diff_z_stat_ind(ref: pd.DataFrame, curr: pd.DataFrame):
n1 = len(ref)
n2 = len(curr)
p1 = (float(sum(ref)) / n1)
p2 = (float(sum(curr)) / n2)
P = (float(((p1 * n1) + (p2 * n2))) / (n1 + n2))
return ((p1 - p2) / np.sqrt(((P * (1 - P)) * ((1.0 / n1) + (1.0 / n2))))) |
def test_with_nonexistent_id(client, create_idv_test_data):
response = client.get('/api/v2/idvs/count/federal_account/0/')
assert (response.status_code == status.HTTP_200_OK)
assert (response.data['count'] == 0)
response = client.get('/api/v2/idvs/count/federal_account/CONT_IDV_000/')
assert (response.status_code == status.HTTP_200_OK)
assert (response.data['count'] == 0) |
class TestNormalizedOutputValueDecimalField(TestCase):
def test_normalize_output(self):
field = serializers.DecimalField(max_digits=4, decimal_places=3, normalize_output=True)
output = field.to_representation(Decimal('1.000'))
assert (output == '1')
def test_non_normalize_output(self):
field = serializers.DecimalField(max_digits=4, decimal_places=3, normalize_output=False)
output = field.to_representation(Decimal('1.000'))
assert (output == '1.000')
def test_normalize_coeherce_to_string(self):
field = serializers.DecimalField(max_digits=4, decimal_places=3, normalize_output=True, coerce_to_string=False)
output = field.to_representation(Decimal('1.000'))
assert (output == Decimal('1')) |
.skipif((django.VERSION > (1, 9)), reason='MIDDLEWARE_CLASSES removed in Django 2.0')
def test_user_info_with_non_django_auth(django_elasticapm_client, client):
with (override_settings(INSTALLED_APPS=[app for app in settings.INSTALLED_APPS if (app != 'django.contrib.auth')]) and override_settings(MIDDLEWARE_CLASSES=[m for m in settings.MIDDLEWARE_CLASSES if (m != 'django.contrib.auth.middleware.AuthenticationMiddleware')])):
with pytest.raises(Exception):
resp = client.get(reverse('elasticapm-raise-exc'))
assert (len(django_elasticapm_client.events[ERROR]) == 1)
event = django_elasticapm_client.events[ERROR][0]
assert (event['context']['user'] == {}) |
class Valve():
__slots__ = ['_coprocessor_manager', '_dot1x_manager', '_last_advertise_sec', '_last_fast_advertise_sec', '_last_lldp_advertise_sec', '_last_packet_in_sec', '_packet_in_count_sec', '_port_highwater', '_route_manager_by_eth_type', '_route_manager_by_ipv', '_lldp_manager', '_managers', '_output_only_manager', 'switch_manager', 'stack_manager', 'acl_manager', 'dot1x', 'dp', 'logger', 'logname', 'metrics', 'notifier', 'stale_root', 'ofchannel_logger', 'pipeline', 'recent_ofmsgs']
DEC_TTL = True
USE_BARRIERS = True
STATIC_TABLE_IDS = False
GROUPS = True
def __init__(self, dp, logname, metrics, notifier, dot1x):
self.dot1x = dot1x
self.dp = dp
self.logname = logname
self.metrics = metrics
self.notifier = notifier
self.ofchannel_logger = None
self.logger = None
self.recent_ofmsgs = deque(maxlen=32)
self.stale_root = False
self._packet_in_count_sec = None
self._last_packet_in_sec = None
self._last_advertise_sec = None
self._last_fast_advertise_sec = None
self._last_lldp_advertise_sec = None
self.dp_init()
def _port_vlan_labels(self, port, vlan):
return dict(self.dp.port_labels(port.number), vlan=vlan.vid)
def _inc_var(self, var, labels=None, val=1):
if (labels is None):
labels = self.dp.base_prom_labels()
self.metrics.inc_var(var, labels, val)
def _set_var(self, var, val, labels=None):
if (labels is None):
labels = self.dp.base_prom_labels()
metrics_var = getattr(self.metrics, var)
metrics_var.labels(**labels).set(val)
def _set_port_var(self, var, val, port):
self._set_var(var, val, labels=self.dp.port_labels(port.number))
def _inc_port_var(self, var, port, val=1):
self._inc_var(var, labels=self.dp.port_labels(port.number), val=val)
def _remove_var(self, var, labels=None):
if (labels is None):
labels = self.dp.base_prom_labels()
metrics_var = getattr(self.metrics, var)
label_values = [labels[key] for key in metrics_var._labelnames]
metrics_var.remove(*label_values)
def close_logs(self):
if (self.logger is not None):
valve_util.close_logger(self.logger.logger)
valve_util.close_logger(self.ofchannel_logger)
def dp_init(self, new_dp=None, valves=None):
if new_dp:
dps = None
if valves:
dps = [valve.dp for valve in valves]
new_dp.clone_dyn_state(self.dp, dps)
self.dp = new_dp
self.close_logs()
self.logger = ValveLogger(logging.getLogger((self.logname + '.valve')), self.dp.dp_id, self.dp.name)
self.ofchannel_logger = None
self._packet_in_count_sec = 0
self._last_packet_in_sec = 0
self._last_advertise_sec = 0
self._last_fast_advertise_sec = 0
self._last_lldp_advertise_sec = 0
self._route_manager_by_ipv = {}
self._route_manager_by_eth_type = {}
self._port_highwater = {}
self.dp.reset_refs()
for vlan_vid in self.dp.vlans.keys():
self._port_highwater[vlan_vid] = {}
for port_number in self.dp.ports.keys():
self._port_highwater[vlan_vid][port_number] = 0
self._output_only_manager = OutputOnlyManager(self.dp.tables['vlan'], self.dp.highest_priority)
self._dot1x_manager = None
if (self.dp.dot1x and self.dp.dot1x_ports()):
nfv_sw_port = self.dp.ports[self.dp.dot1x['nfv_sw_port']]
self._dot1x_manager = Dot1xManager(self.dot1x, self.dp.dp_id, self.dp.dot1x_ports, nfv_sw_port)
self.pipeline = valve_pipeline.ValvePipeline(self.dp)
self.acl_manager = None
if self.dp.has_acls:
self.acl_manager = valve_acl.ValveAclManager(self.dp.tables.get('port_acl'), self.dp.tables.get('vlan_acl'), self.dp.tables.get('egress_acl'), self.pipeline, self.dp.meters, self.dp.dp_acls)
self.stack_manager = None
if self.dp.stack:
self.stack_manager = ValveStackManager(self.logger, self.dp, self.dp.stack, self.dp.tunnel_acls, self.acl_manager, self.dp.tables['eth_dst'])
self._lldp_manager = ValveLLDPManager(self.dp.tables['vlan'], self.dp.highest_priority, self.logger, self.notify, self._inc_var, self._set_var, self._set_port_var, self.stack_manager)
self.switch_manager = valve_switch.valve_switch_factory(self.logger, self.dp, self.pipeline, self.stack_manager)
self._coprocessor_manager = None
copro_table = self.dp.tables.get('copro', None)
if copro_table:
self._coprocessor_manager = CoprocessorManager(self.dp.ports, copro_table, self.dp.tables['vlan'], self.dp.tables['eth_src'], self.switch_manager.output_table, self.dp.low_priority, self.dp.high_priority)
for (ipv, route_manager_class, neighbor_timeout) in ((4, valve_route.ValveIPv4RouteManager, self.dp.arp_neighbor_timeout), (6, valve_route.ValveIPv6RouteManager, self.dp.nd_neighbor_timeout)):
fib_table_name = ('ipv%u_fib' % ipv)
if (fib_table_name not in self.dp.tables):
continue
fib_table = self.dp.tables[fib_table_name]
proactive_learn = getattr(self.dp, ('proactive_learn_v%u' % ipv))
route_manager = route_manager_class(self.logger, self.notify, self.dp.global_vlan, neighbor_timeout, self.dp.max_hosts_per_resolve_cycle, self.dp.max_host_fib_retry_count, self.dp.max_resolve_backoff_time, proactive_learn, self.DEC_TTL, self.dp.multi_out, fib_table, self.dp.tables['vip'], self.pipeline, self.dp.routers, self.stack_manager)
self._route_manager_by_ipv[route_manager.IPV] = route_manager
for vlan in self.dp.vlans.values():
if vlan.faucet_vips_by_ipv(route_manager.IPV):
route_manager.active = True
vips_str = list((str(vip) for vip in vlan.faucet_vips_by_ipv(route_manager.IPV)))
self.logger.info(('IPv%u routing is active on %s with VIPs %s' % (route_manager.IPV, vlan, vips_str)))
for eth_type in route_manager.CONTROL_ETH_TYPES:
self._route_manager_by_eth_type[eth_type] = route_manager
self._managers = tuple((manager for manager in (self.pipeline, self.switch_manager, self.acl_manager, self.stack_manager, self._lldp_manager, self._route_manager_by_ipv.get(4), self._route_manager_by_ipv.get(6), self._coprocessor_manager, self._output_only_manager, self._dot1x_manager) if (manager is not None)))
def notify(self, event_dict):
self.notifier.notify(self.dp.dp_id, self.dp.name, event_dict)
def switch_features(self, _msg):
ofmsgs = [valve_of.faucet_config(), valve_of.faucet_async(packet_in=False, notify_flow_removed=False, port_status=False), valve_of.desc_stats_request()]
ofmsgs.extend(self._delete_all_valve_flows())
return ofmsgs
def ofchannel_log(self, ofmsgs):
if (self.dp is None):
return
if (self.dp.ofchannel_log is None):
return
if (self.ofchannel_logger is None):
self.ofchannel_logger = valve_util.get_logger(self.dp.ofchannel_log, self.dp.ofchannel_log, logging.DEBUG, 0)
log_prefix = ('%u %s' % (len(ofmsgs), valve_util.dpid_log(self.dp.dp_id)))
for (i, ofmsg) in enumerate(ofmsgs, start=1):
self.ofchannel_logger.debug('%u/%s %s', i, log_prefix, ofmsg)
def dot1x_event(self, event_dict):
self.notify({'DOT1X': event_dict})
def floods_to_root(self):
return self.switch_manager.floods_to_root(self.dp)
def _delete_all_valve_flows(self):
ofmsgs = [valve_table.wildcard_table.flowdel()]
if (self.dp.all_meters or self.dp.packetin_pps or self.dp.slowpath_pps):
ofmsgs.append(valve_of.meterdel())
if self.dp.group_table:
ofmsgs.append(self.dp.groups.delete_all())
return ofmsgs
def _pipeline_flows():
return []
def _add_default_drop_flows(self):
ofmsgs = []
for table in self.dp.tables.values():
miss_table_name = table.table_config.miss_goto
if miss_table_name:
miss_table = self.dp.tables[miss_table_name]
ofmsgs.append(table.flowmod(priority=self.dp.lowest_priority, inst=(table.goto_miss(miss_table),)))
else:
ofmsgs.append(table.flowdrop(priority=self.dp.lowest_priority))
return ofmsgs
def _add_packetin_meter(self):
if self.dp.packetin_pps:
return [valve_of.controller_pps_meteradd(pps=self.dp.packetin_pps)]
return []
def _add_slowpath_meter(self):
if self.dp.slowpath_pps:
return [valve_of.slowpath_pps_meteradd(pps=self.dp.slowpath_pps)]
return []
def _add_default_flows(self):
ofmsgs = []
ofmsgs.extend(self._delete_all_valve_flows())
ofmsgs.extend(self._add_packetin_meter())
ofmsgs.extend(self._add_slowpath_meter())
if self.dp.meters:
for meter in self.dp.meters.values():
ofmsgs.append(meter.entry_msg)
ofmsgs.extend(self._add_default_drop_flows())
return ofmsgs
def add_vlan(self, vlan, cold_start=False):
self.logger.info(('Configuring %s' % vlan))
ofmsgs = []
if vlan.reserved_internal_vlan:
return ofmsgs
for manager in self._managers:
ofmsgs.extend(manager.add_vlan(vlan, cold_start))
return ofmsgs
def add_vlans(self, vlans, cold_start=False):
ofmsgs = []
for vlan in vlans:
ofmsgs.extend(self.add_vlan(vlan, cold_start=cold_start))
return ofmsgs
def del_vlan(self, vlan):
self.logger.info(('Delete VLAN %s' % vlan))
ofmsgs = []
for manager in self._managers:
ofmsgs.extend(manager.del_vlan(vlan))
expired_hosts = list(vlan.dyn_host_cache.values())
for entry in expired_hosts:
self._update_expired_host(entry, vlan)
vlan.reset_caches()
return ofmsgs
def del_vlans(self, vlans):
ofmsgs = []
for vlan in vlans:
ofmsgs.extend(self.del_vlan(vlan))
return ofmsgs
def _get_all_configured_port_nos(self):
ports = set(self.dp.non_vlan_ports())
for vlan in self.dp.vlans.values():
ports.update(set(vlan.get_ports()))
ports = {port.number for port in ports}
return ports
def _get_ports_status(discovered_up_port_nos, all_configured_port_nos):
port_status = {port_no: (port_no in discovered_up_port_nos) for port_no in all_configured_port_nos}
all_up_port_nos = {port_no for (port_no, status) in port_status.items() if status}
return (port_status, all_up_port_nos)
def _cold_start_ports_and_vlans(self, now, discovered_up_port_nos):
always_up_port_nos = {port.number for port in self.dp.ports.values() if (not port.opstatus_reconf)}
discovered_up_port_nos = discovered_up_port_nos.union(always_up_port_nos)
all_configured_port_nos = self._get_all_configured_port_nos()
(port_status, all_up_port_nos) = self._get_ports_status(discovered_up_port_nos, all_configured_port_nos)
for (port_no, status) in port_status.items():
self._set_port_status(port_no, status, now)
self.notify({'PORTS_STATUS': port_status})
ofmsgs = []
ofmsgs.extend(self._add_default_flows())
for manager in self._managers:
ofmsgs.extend(manager.initialise_tables())
ofmsgs.append(valve_of.faucet_async(packet_in=True, port_status=True, notify_flow_removed=self.dp.use_idle_timeout))
ofmsgs.extend(self.ports_add(all_up_port_nos, cold_start=True, log_msg='configured'))
ofmsgs.extend(self.add_vlans(self.dp.vlans.values(), cold_start=True))
return ofmsgs
def ofdescstats_handler(self, body):
labels = dict(self.dp.base_prom_labels(), mfr_desc=valve_util.utf8_decode(body.mfr_desc), hw_desc=valve_util.utf8_decode(body.hw_desc), sw_desc=valve_util.utf8_decode(body.sw_desc), serial_num=valve_util.utf8_decode(body.serial_num), dp_desc=valve_util.utf8_decode(body.dp_desc))
self._set_var('of_dp_desc_stats', self.dp.dp_id, labels=labels)
def _set_port_status(self, port_no, port_status, now):
if port_status:
self.dp.dyn_up_port_nos.add(port_no)
else:
self.dp.dyn_up_port_nos -= set([port_no])
port = self.dp.ports.get(port_no, None)
if (port is None):
return
self._set_port_var('port_status', port_status, port)
port.dyn_update_time = now
_port_status_codes = {valve_of.ofp.OFPPR_ADD: 'ADD', valve_of.ofp.OFPPR_DELETE: 'DELETE', valve_of.ofp.OFPPR_MODIFY: 'MODIFY'}
def _decode_port_status(cls, reason):
return cls._port_status_codes.get(reason, 'UNKNOWN')
def port_desc_stats_reply_handler(self, port_desc_stats, _other_valves, now):
ofmsgs = []
self.logger.info('port desc stats')
def _fabricate(port_no, reason, status):
self.logger.info(('Port %s fabricating %s status %s' % (port_no, Valve._decode_port_status(reason), status)))
_ofmsgs_by_valve = self.port_status_handler(port_no, reason, (0 if status else valve_of.ofp.OFPPS_LINK_DOWN), _other_valves, now)
if (self in _ofmsgs_by_valve):
ofmsgs.extend(_ofmsgs_by_valve[self])
curr_dyn_port_nos = set((desc.port_no for desc in port_desc_stats))
curr_dyn_port_nos -= set([valve_of.ofp.OFPP_LOCAL])
prev_dyn_up_port_nos = set(self.dp.dyn_up_port_nos)
curr_dyn_up_port_nos = set((desc.port_no for desc in port_desc_stats if valve_of.port_status_from_state(desc.state)))
conf_port_nos = set(self.dp.ports.keys())
no_conf_port_nos = (curr_dyn_port_nos - conf_port_nos)
if (conf_port_nos != curr_dyn_port_nos):
self.logger.info(('delta in known ports: conf %s dyn %s' % (conf_port_nos, curr_dyn_port_nos)))
if (prev_dyn_up_port_nos != curr_dyn_up_port_nos):
self.logger.info(('delta in up state: %s => %s' % (prev_dyn_up_port_nos, curr_dyn_up_port_nos)))
for port_no in no_conf_port_nos:
prev_up = (port_no in prev_dyn_up_port_nos)
curr_up = (port_no in curr_dyn_up_port_nos)
if (prev_up != curr_up):
_fabricate(port_no, valve_of.ofp.OFPPR_MODIFY, curr_up)
for port_no in conf_port_nos:
prev_up = (port_no in prev_dyn_up_port_nos)
curr_up = (port_no in curr_dyn_up_port_nos)
if (prev_up == curr_up):
continue
if (port_no in curr_dyn_port_nos):
if (not prev_up):
_fabricate(port_no, valve_of.ofp.OFPPR_ADD, True)
else:
_fabricate(port_no, valve_of.ofp.OFPPR_MODIFY, curr_up)
else:
_fabricate(port_no, valve_of.ofp.OFPPR_DELETE, False)
ofmsgs_by_valve = {self: ofmsgs}
return ofmsgs_by_valve
def port_status_handler(self, port_no, reason, state, _other_valves, now):
port_status = valve_of.port_status_from_state(state)
self.notify({'PORT_CHANGE': {'port_no': port_no, 'reason': Valve._decode_port_status(reason), 'state': state, 'status': port_status}})
self._set_port_status(port_no, port_status, now)
if (not self.dp.port_no_valid(port_no)):
return {}
port = self.dp.ports[port_no]
if (not port.opstatus_reconf):
return {}
if (reason not in Valve._port_status_codes):
self.logger.warning(('Unhandled port status %s/state %s for %s' % (reason, state, port)))
return {}
ofmsgs_by_valve = {self: []}
new_port_status = ((reason == valve_of.ofp.OFPPR_ADD) or ((reason == valve_of.ofp.OFPPR_MODIFY) and port_status))
blocked_down_state = ((state & valve_of.ofp.OFPPS_BLOCKED) or (state & valve_of.ofp.OFPPS_LINK_DOWN))
live_state = (state & valve_of.ofp.OFPPS_LIVE)
decoded_reason = Valve._decode_port_status(reason)
state_description = ('%s up status %s reason %s state %s' % (port, port_status, decoded_reason, state))
ofmsgs = []
if (new_port_status != port.dyn_phys_up):
self.logger.info(('status change: %s' % state_description))
if new_port_status:
ofmsgs = self.port_add(port_no)
else:
ofmsgs = self.port_delete(port_no, keep_cache=True, other_valves=_other_valves)
else:
self.logger.info(('status did not change: %s' % state_description))
if new_port_status:
if blocked_down_state:
self.logger.info(('%s state down or blocked despite status up, setting to status down' % port))
ofmsgs = self.port_delete(port_no, keep_cache=True, other_valves=_other_valves)
if (not live_state):
self.logger.info(('%s state OFPPS_LIVE reset, ignoring in expectation of port down' % port))
ofmsgs_by_valve[self].extend(ofmsgs)
return ofmsgs_by_valve
def advertise(self, now, _other_values):
if ((not self.dp.advertise_interval) or ((now - self._last_advertise_sec) < self.dp.advertise_interval)):
return {}
self._last_advertise_sec = now
ofmsgs = []
for route_manager in self._route_manager_by_ipv.values():
for vlan in self.dp.vlans.values():
ofmsgs.extend(route_manager.advertise(vlan))
if ofmsgs:
return {self: ofmsgs}
return {}
def _send_lldp_beacon_on_port(self, port, now):
chassis_id = str(self.dp.faucet_dp_mac)
ttl = min((self.dp.lldp_beacon.get('send_interval', self.dp.DEFAULT_LLDP_SEND_INTERVAL) * 3), ((2 ** 16) - 1))
org_tlvs = [(tlv['oui'], tlv['subtype'], tlv['info']) for tlv in port.lldp_beacon['org_tlvs']]
org_tlvs.extend(valve_packet.faucet_lldp_tlvs(self.dp))
org_tlvs.extend(valve_packet.faucet_lldp_stack_state_tlvs(self.dp, port))
system_name = port.lldp_beacon['system_name']
if (not system_name):
system_name = self.dp.lldp_beacon.get('system_name', self.dp.name)
lldp_beacon_pkt = valve_packet.lldp_beacon(self.dp.faucet_dp_mac, chassis_id, port.number, ttl, org_tlvs=org_tlvs, system_name=system_name, port_descr=port.lldp_beacon['port_descr'])
port.dyn_last_lldp_beacon_time = now
return valve_of.packetout(port.number, bytes(lldp_beacon_pkt.data))
def fast_advertise(self, now, _other_valves):
if ((not self.dp.fast_advertise_interval) or ((now - self._last_fast_advertise_sec) < self.dp.fast_advertise_interval)):
return {}
self._last_fast_advertise_sec = now
ofmsgs = []
for port in self.dp.lacp_active_ports:
ofmsgs.extend(self.switch_manager.lacp_advertise(port))
lldp_send_interval = self.dp.lldp_beacon.get('send_interval')
if ((not lldp_send_interval) or ((now - self._last_lldp_advertise_sec) >= lldp_send_interval)):
ports = self.dp.lldp_beacon_send_ports(now)
ofmsgs.extend([self._send_lldp_beacon_on_port(port, now) for port in ports])
self._last_lldp_advertise_sec = now
if ofmsgs:
return {self: ofmsgs}
return {}
def fast_state_expire(self, now, other_valves):
if self.dp.lldp_beacon:
for port in self.dp.ports.values():
if port.dyn_lldp_beacon_recv_state:
age = (now - port.dyn_lldp_beacon_recv_time)
if (age > (self.dp.lldp_beacon['send_interval'] * port.max_lldp_lost)):
self.logger.info(('LLDP for %s inactive after %us' % (port, age)))
port.dyn_lldp_beacon_recv_state = None
return self._lldp_manager.update_stack_link_state(self.dp.stack_ports(), now, self, other_valves)
def _reset_dp_status(self):
self._set_var('dp_status', int(self.dp.dyn_running))
def datapath_connect(self, now, discovered_up_ports):
self.logger.info('Cold start configuring DP')
self.notify({'DP_CHANGE': {'reason': 'cold_start'}})
ofmsgs = self._cold_start_ports_and_vlans(now, discovered_up_ports)
self.dp.cold_start(now)
self._inc_var('of_dp_connections')
self._reset_dp_status()
self.logger.info(self.dp.pipeline_str())
return ofmsgs
def datapath_disconnect(self, now):
self.logger.warning('datapath down')
self.notify({'DP_CHANGE': {'reason': 'disconnect'}})
self.dp.dyn_running = False
self._inc_var('of_dp_disconnections')
self._reset_dp_status()
self.ports_delete(self.dp.ports.keys(), now=now)
def _port_delete_flows_state(self, port, keep_cache=False):
ofmsgs = []
for route_manager in self._route_manager_by_ipv.values():
ofmsgs.extend(route_manager.expire_port_nexthops(port))
for manager in self._managers:
ofmsgs.extend(manager.del_port(port))
if (not keep_cache):
for vlan in port.vlans():
for entry in port.hosts([vlan]):
self._update_expired_host(entry, vlan)
vlan.clear_cache_hosts_on_port(port)
return ofmsgs
def ports_add(self, port_nums, cold_start=False, log_msg='up'):
ofmsgs = []
vlans_with_ports_added = set()
for port_num in port_nums:
if (not self.dp.port_no_valid(port_num)):
self.logger.info(('Ignoring port:%u not present in configuration file' % port_num))
continue
port = self.dp.ports[port_num]
port.dyn_phys_up = True
self.logger.info(('%s (%s) %s' % (port, port.description, log_msg)))
if (not port.running()):
continue
for manager in self._managers:
ofmsgs.extend(manager.add_port(port))
if port.lacp:
ofmsgs.extend(self.lacp_update(port, False, cold_start=cold_start))
if port.stack:
port_vlans = self.dp.vlans.values()
else:
port_vlans = port.vlans()
vlans_with_ports_added.update(set(port_vlans))
if (not cold_start):
ofmsgs.extend(self.add_vlans(vlans_with_ports_added))
return ofmsgs
def port_add(self, port_num):
return self.ports_add([port_num])
def ports_delete(self, port_nums, log_msg='down', keep_cache=False, other_valves=None, now=None):
ofmsgs = []
vlans_with_deleted_ports = set()
for port_num in port_nums:
if (not self.dp.port_no_valid(port_num)):
continue
port = self.dp.ports[port_num]
port.dyn_phys_up = False
self.logger.info(('%s (%s) %s' % (port, port.description, log_msg)))
if now:
self._set_port_status(port_num, False, now)
vlans_with_deleted_ports.update(set(port.vlans()))
if port.lacp:
ofmsgs.extend(self.lacp_update(port, False, other_valves=other_valves))
else:
ofmsgs.extend(self._port_delete_flows_state(port, keep_cache=keep_cache))
for vlan in vlans_with_deleted_ports:
ofmsgs.extend(self.switch_manager.update_vlan(vlan))
return ofmsgs
def port_delete(self, port_num, keep_cache=False, other_valves=None):
return self.ports_delete([port_num], keep_cache=keep_cache, other_valves=other_valves)
def _reset_lacp_status(self, port):
lacp_state = port.actor_state()
lacp_role = port.lacp_port_state()
self._set_port_var('port_lacp_state', lacp_state, port)
self._inc_port_var('port_lacp_state_change_count', port)
self._set_port_var('lacp_port_id', port.lacp_port_id, port)
self._set_port_var('port_lacp_role', lacp_role, port)
self.notify({'LAG_CHANGE': {'port_no': port.number, 'state': lacp_state, 'role': lacp_role}})
def lacp_update(self, port, lacp_up, now=None, lacp_pkt=None, other_valves=None, cold_start=False):
ofmsgs = []
if cold_start:
self.switch_manager.lacp_update_actor_state(port, lacp_up, now, lacp_pkt, cold_start=True)
self.switch_manager.lacp_update_port_selection_state(port, self, other_valves, cold_start=True)
updated = self.switch_manager.lacp_update_actor_state(port, lacp_up, now, lacp_pkt, cold_start=False)
select_updated = self.switch_manager.lacp_update_port_selection_state(port, self, other_valves, cold_start=False)
if (updated or select_updated):
self._reset_lacp_status(port)
if (port.is_port_selected() and port.is_actor_up()):
ofmsgs.extend(self.switch_manager.enable_forwarding(port))
ofmsgs.extend(self.add_vlans(port.vlans()))
else:
ofmsgs.extend(self.switch_manager.disable_forwarding(port))
if (not cold_start):
ofmsgs.extend(self.switch_manager.del_port(port))
ofmsgs.extend(self.switch_manager.add_port(port))
ofmsgs.extend(self.add_vlans(port.vlans()))
return ofmsgs
def lldp_handler(self, now, pkt_meta, other_valves):
if (pkt_meta.eth_type != valve_of.ether.ETH_TYPE_LLDP):
return {}
pkt_meta.reparse_all()
lldp_pkt = valve_packet.parse_lldp(pkt_meta.pkt)
if (not lldp_pkt):
return {}
port = pkt_meta.port
(remote_dp_id, remote_dp_name, remote_port_id, remote_port_state) = valve_packet.parse_faucet_lldp(lldp_pkt, self.dp.faucet_dp_mac)
port.dyn_lldp_beacon_recv_time = now
if (port.dyn_lldp_beacon_recv_state != remote_port_state):
chassis_id = str(self.dp.faucet_dp_mac)
if remote_port_state:
self.logger.info(('LLDP on %s, %s from %s (remote %s, port %u) state %s' % (chassis_id, port, pkt_meta.eth_src, valve_util.dpid_log(remote_dp_id), remote_port_id, port.stack_state_name(remote_port_state))))
port.dyn_lldp_beacon_recv_state = remote_port_state
peer_mac_src = self.dp.ports[port.number].lldp_peer_mac
if (peer_mac_src and (peer_mac_src != pkt_meta.eth_src)):
self.logger.warning(('Unexpected LLDP peer. Received pkt from %s instead of %s' % (pkt_meta.eth_src, peer_mac_src)))
ofmsgs_by_valve = {}
if (remote_dp_id and remote_port_id):
self.logger.debug(('FAUCET LLDP on %s from %s (remote %s, port %u)' % (port, pkt_meta.eth_src, valve_util.dpid_log(remote_dp_id), remote_port_id)))
ofmsgs_by_valve.update(self._lldp_manager.verify_lldp(port, now, self, other_valves, remote_dp_id, remote_dp_name, remote_port_id, remote_port_state))
else:
self.logger.debug(('LLDP on %s from %s: %s' % (port, pkt_meta.eth_src, str(lldp_pkt))))
return ofmsgs_by_valve
def _control_plane_handler(now, pkt_meta, route_manager):
if ((pkt_meta.eth_dst == pkt_meta.vlan.faucet_mac) or (not valve_packet.mac_addr_is_unicast(pkt_meta.eth_dst))):
return route_manager.control_plane_handler(now, pkt_meta)
return []
def rate_limit_packet_ins(self, now):
if (self._last_packet_in_sec != now):
self._last_packet_in_sec = now
self._packet_in_count_sec = 0
self._packet_in_count_sec += 1
if self.dp.ignore_learn_ins:
if ((self._packet_in_count_sec % self.dp.ignore_learn_ins) == 0):
self._inc_var('of_ignored_packet_ins')
return True
return False
def learn_host(self, now, pkt_meta, other_valves):
stacked_other_valves = set()
if self.stack_manager:
stacked_other_valves = self.stack_manager.stacked_valves(other_valves)
learn_port = self.switch_manager.edge_learn_port(stacked_other_valves, pkt_meta)
if (learn_port is not None):
(learn_flows, previous_port, update_cache) = self.switch_manager.learn_host_on_vlan_ports(now, learn_port, pkt_meta.vlan, pkt_meta.eth_src, last_dp_coldstart_time=self.dp.dyn_last_coldstart_time)
if update_cache:
pkt_meta.vlan.add_cache_host(pkt_meta.eth_src, learn_port, now)
if (pkt_meta.l3_pkt is None):
pkt_meta.reparse_ip()
learn_log = ('L2 learned on %s %s (%u hosts total)' % (learn_port, pkt_meta.log(), pkt_meta.vlan.hosts_count()))
stack_descr = None
if pkt_meta.port.stack:
stack_descr = pkt_meta.port.stack_descr()
learn_log += (' from %s' % stack_descr)
previous_port_no = None
if (previous_port is not None):
previous_port_no = previous_port.number
if (pkt_meta.port.number != previous_port_no):
learn_log += (', moved from %s' % previous_port)
if previous_port.stack:
learn_log += (' from %s' % previous_port.stack_descr())
self.logger.info(learn_log)
learn_labels = dict(self.dp.base_prom_labels(), vid=pkt_meta.vlan.vid, eth_src=pkt_meta.eth_src)
self._set_var('learned_l2_port', learn_port.number, labels=learn_labels)
l2_learn_msg = {'port_no': learn_port.number, 'previous_port_no': previous_port_no, 'vid': pkt_meta.vlan.vid, 'eth_src': pkt_meta.eth_src, 'eth_dst': pkt_meta.eth_dst, 'eth_type': pkt_meta.eth_type, 'l3_src_ip': str(pkt_meta.l3_src), 'l3_dst_ip': str(pkt_meta.l3_dst)}
if stack_descr:
l2_learn_msg.update({'stack_descr': stack_descr})
self.notify({'L2_LEARN': l2_learn_msg})
return learn_flows
return []
def parse_rcv_packet(self, in_port, vlan_vid, eth_type, data, orig_len, pkt, eth_pkt, vlan_pkt):
eth_src = eth_pkt.src
eth_dst = eth_pkt.dst
vlan = None
if (vlan_vid in self.dp.vlans):
vlan = self.dp.vlans[vlan_vid]
port = self.dp.ports[in_port]
pkt_meta = valve_packet.PacketMeta(data, orig_len, pkt, eth_pkt, vlan_pkt, port, vlan, eth_src, eth_dst, eth_type)
if (vlan_vid == self.dp.global_vlan):
vlan_vid = valve_packet.int_from_mac(pkt_meta.eth_dst)
vlan = self.dp.vlans.get(vlan_vid, None)
pkt_meta.vlan = vlan
if (vlan is not None):
pkt_meta.eth_dst = vlan.faucet_mac
return pkt_meta
def parse_pkt_meta(self, msg):
if (not self.dp.dyn_running):
return None
if (self.dp.strict_packet_in_cookie and (self.dp.cookie != msg.cookie)):
self.logger.info(('got packet in with unknown cookie %s' % msg.cookie))
return None
if (msg.reason != valve_of.ofp.OFPR_ACTION):
return None
if (not msg.match):
return None
in_port = msg.match['in_port']
if ((not in_port) or (not self.dp.port_no_valid(in_port))):
return None
if (not msg.data):
return None
data = bytes(msg.data[:valve_of.MAX_PACKET_IN_BYTES])
(pkt, eth_pkt, eth_type, vlan_pkt, vlan_vid) = valve_packet.parse_packet_in_pkt(data, max_len=valve_packet.ETH_VLAN_HEADER_SIZE)
if ((pkt is None) or (eth_pkt is None)):
self.logger.info(('unparseable packet from port %u' % in_port))
return None
if ((vlan_vid is not None) and (vlan_vid not in self.dp.vlans) and (vlan_vid != self.dp.global_vlan)):
self.logger.info(('packet for unknown VLAN %u' % vlan_vid))
return None
pkt_meta = self.parse_rcv_packet(in_port, vlan_vid, eth_type, data, msg.total_len, pkt, eth_pkt, vlan_pkt)
if (not valve_packet.mac_addr_is_unicast(pkt_meta.eth_src)):
self.logger.info(('packet with non-unicast eth_src %s port %u' % (pkt_meta.eth_src, in_port)))
return None
if valve_packet.mac_addr_all_zeros(pkt_meta.eth_src):
self.logger.info(('packet with all zeros eth_src %s port %u' % (pkt_meta.eth_src, in_port)))
return None
if (self.dp.stack and self.dp.stack.graph):
if ((not pkt_meta.port.stack) and pkt_meta.vlan and (pkt_meta.vlan not in pkt_meta.port.tagged_vlans) and (pkt_meta.vlan != pkt_meta.port.native_vlan)):
self.logger.warning(('packet from non-stack port number %u is not member of VLAN %u' % (pkt_meta.port.number, pkt_meta.vlan.vid)))
return None
return pkt_meta
def update_config_metrics(self):
self.metrics.reset_dpid(self.dp.base_prom_labels())
self._reset_dp_status()
tables = self.dp.tables.values()
table_id_to_name = {table.table_id: table.name for table in tables}
for table in tables:
table_id = table.table_id
next_tables = [table_id_to_name[t] for t in table.next_tables]
if table.table_config.miss_goto:
miss_table = table.table_config.miss_goto
if (miss_table not in next_tables):
next_tables.append(miss_table)
self._set_var('faucet_config_table_names', table_id, labels=dict(self.dp.base_prom_labels(), table_name=table.name, next_tables=','.join(next_tables)))
def update_metrics(self, now, updated_port=None, rate_limited=False):
def _update_vlan(vlan, now, rate_limited):
if (vlan.dyn_last_updated_metrics_sec and rate_limited):
if ((now - vlan.dyn_last_updated_metrics_sec) < self.dp.metrics_rate_limit_sec):
return False
vlan_labels = dict(self.dp.base_prom_labels(), vlan=vlan.vid)
self._set_var('vlan_hosts_learned', vlan.hosts_count(), labels=vlan_labels)
self._set_var('vlan_learn_bans', vlan.dyn_learn_ban_count, labels=vlan_labels)
for ipv in vlan.ipvs():
self._set_var('vlan_neighbors', vlan.neigh_cache_count_by_ipv(ipv), labels=dict(vlan_labels, ipv=ipv))
return True
def _update_port(vlan, port):
port_vlan_labels = self._port_vlan_labels(port, vlan)
port_vlan_hosts_learned = port.hosts_count(vlans=[vlan])
self._set_port_var('port_learn_bans', port.dyn_learn_ban_count, port)
self._set_var('port_vlan_hosts_learned', port_vlan_hosts_learned, labels=port_vlan_labels)
highwater = self._port_highwater[vlan.vid][port.number]
stats_stale = vlan.dyn_host_cache_stats_stale.get(port.number, True)
if ((highwater == port_vlan_hosts_learned) and (not stats_stale)):
return
if (highwater > port_vlan_hosts_learned):
for i in range(port_vlan_hosts_learned, (highwater + 1)):
self._set_var('learned_macs', 0, dict(port_vlan_labels, n=i))
self._port_highwater[vlan.vid][port.number] = port_vlan_hosts_learned
port_vlan_hosts = port.hosts(vlans=[vlan])
assert (port_vlan_hosts_learned == len(port_vlan_hosts))
for (i, entry) in enumerate(sorted(port_vlan_hosts)):
self._set_var('learned_macs', entry.eth_src_int, dict(port_vlan_labels, n=i))
vlan.dyn_host_cache_stats_stale[port.number] = False
if updated_port:
for vlan in updated_port.vlans():
if ((not vlan.reserved_internal_vlan) and _update_vlan(vlan, now, rate_limited)):
_update_port(vlan, updated_port)
vlan.dyn_last_updated_metrics_sec = now
else:
for vlan in self.dp.vlans.values():
if ((not vlan.reserved_internal_vlan) and _update_vlan(vlan, now, rate_limited)):
for port in vlan.get_ports():
_update_port(vlan, port)
vlan.dyn_last_updated_metrics_sec = now
def _non_vlan_rcv_packet(self, now, other_valves, pkt_meta):
self._inc_var('of_non_vlan_packet_ins')
if pkt_meta.port.lacp:
lacp_ofmsgs_by_valve = self.switch_manager.lacp_handler(now, pkt_meta, self, other_valves, self.lacp_update)
if lacp_ofmsgs_by_valve:
return lacp_ofmsgs_by_valve
return self.lldp_handler(now, pkt_meta, other_valves)
def router_rcv_packet(self, now, pkt_meta):
if (not pkt_meta.vlan.faucet_vips):
return []
route_manager = self._route_manager_by_eth_type.get(pkt_meta.eth_type, None)
if (not (route_manager and route_manager.active)):
return []
pkt_meta.reparse_ip()
if (not pkt_meta.l3_pkt):
return []
control_plane_ofmsgs = self._control_plane_handler(now, pkt_meta, route_manager)
ofmsgs = []
if control_plane_ofmsgs:
ofmsgs.extend(control_plane_ofmsgs)
else:
ofmsgs.extend(route_manager.add_host_fib_route_from_pkt(now, pkt_meta))
ofmsgs.extend(route_manager.resolve_gateways(pkt_meta.vlan, now, resolve_all=False))
ofmsgs.extend(route_manager.resolve_expire_hosts(pkt_meta.vlan, now, resolve_all=False))
return ofmsgs
def _vlan_rcv_packet(self, now, other_valves, pkt_meta):
self._inc_var('of_vlan_packet_ins')
ban_rules = self.switch_manager.ban_rules(pkt_meta)
if ban_rules:
return {self: ban_rules}
ofmsgs_by_valve = self.switch_manager.learn_host_from_pkt(self, now, pkt_meta, other_valves)
return ofmsgs_by_valve
def rcv_packet(self, now, other_valves, pkt_meta):
if (pkt_meta.vlan is None):
return self._non_vlan_rcv_packet(now, other_valves, pkt_meta)
return self._vlan_rcv_packet(now, other_valves, pkt_meta)
def _lacp_state_expire(self, now, _other_valves):
ofmsgs_by_valve = defaultdict(list)
for (lag, ports_up) in self.dp.lags_up().items():
for port in ports_up:
lacp_age = (now - port.dyn_lacp_updated_time)
if (lacp_age > self.dp.lacp_timeout):
self.logger.info(('LAG %s %s expired (age %u)' % (lag, port, lacp_age)))
ofmsgs_by_valve[self].extend(self.lacp_update(port, False, now=now, other_valves=_other_valves))
return ofmsgs_by_valve
def state_expire(self, now, other_valves):
ofmsgs_by_valve = defaultdict(list)
if self.dp.dyn_running:
ofmsgs_by_valve.update(self._lacp_state_expire(now, other_valves))
for vlan in self.dp.vlans.values():
expired_hosts = self.switch_manager.expire_hosts_from_vlan(vlan, now)
if (not self.dp.idle_dst):
for entry in expired_hosts:
ofmsgs_by_valve[self].extend(self.switch_manager.delete_host_from_vlan(entry.eth_src, vlan))
for entry in expired_hosts:
self._update_expired_host(entry, vlan)
for route_manager in self._route_manager_by_ipv.values():
ofmsgs_by_valve[self].extend(route_manager.resolve_expire_hosts(vlan, now))
return ofmsgs_by_valve
def _update_expired_host(self, entry, vlan):
learn_labels = dict(self.dp.base_prom_labels(), vid=vlan.vid, eth_src=entry.eth_src)
self._remove_var('learned_l2_port', labels=learn_labels)
self.notify({'L2_EXPIRE': {'port_no': entry.port.number, 'vid': vlan.vid, 'eth_src': entry.eth_src}})
def _pipeline_diff(self, new_dp):
old_pipeline = self.dp.pipeline_str().splitlines()
new_pipeline = new_dp.pipeline_str().splitlines()
differ = difflib.Differ()
diff = '\n'.join(differ.compare(old_pipeline, new_pipeline))
self.logger.info(('pipeline change: %s' % diff))
def _pipeline_change(self, new_dp):
if new_dp:
if (self.dp.hardware != new_dp.hardware):
return True
old_table_ids = self.dp.pipeline_tableids()
new_table_ids = new_dp.pipeline_tableids()
if (old_table_ids != new_table_ids):
self.logger.info(('table IDs changed, old %s new %s' % (old_table_ids, new_table_ids)))
return True
return False
def _apply_config_changes(self, new_dp, changes, valves=None):
(deleted_ports, changed_ports, added_ports, changed_acl_ports, deleted_vids, changed_vids, all_ports_changed, _, deleted_meters, added_meters, changed_meters) = changes
restart_type = 'cold'
ofmsgs = []
if self._pipeline_change(new_dp):
self.dp_init(new_dp, valves)
return (restart_type, ofmsgs)
if all_ports_changed:
self.logger.info('all ports changed')
self.dp_init(new_dp, valves)
return (restart_type, ofmsgs)
restart_type = None
for change in changes:
if change:
restart_type = 'warm'
break
if (restart_type is None):
self.dp_init(new_dp)
return (restart_type, ofmsgs)
if deleted_ports:
ofmsgs.extend(self.ports_delete(deleted_ports))
if changed_ports:
ofmsgs.extend(self.ports_delete(changed_ports))
if deleted_vids:
deleted_vlans = [self.dp.vlans[vid] for vid in deleted_vids]
ofmsgs.extend(self.del_vlans(deleted_vlans))
if changed_meters:
for meter_key in changed_meters:
old_meter = self.dp.meters.get(meter_key, None)
new_meter = new_dp.meters.get(meter_key, None)
if (old_meter and new_meter):
deleted_meters.add(meter_key)
added_meters.add(meter_key)
if self.acl_manager:
if deleted_meters:
ofmsgs.extend(self.acl_manager.del_meters(deleted_meters))
self.dp_init(new_dp, valves)
if self.acl_manager:
if added_meters:
ofmsgs.extend(self.acl_manager.add_meters(added_meters))
if added_ports:
all_up_port_nos = [port for port in added_ports if (port in self.dp.dyn_up_port_nos)]
ofmsgs.extend(self.ports_add(all_up_port_nos))
if changed_ports:
all_up_port_nos = [port for port in changed_ports if (port in self.dp.dyn_up_port_nos)]
ofmsgs.extend(self.ports_add(all_up_port_nos))
if (self.acl_manager and changed_acl_ports):
for port_num in changed_acl_ports:
port = self.dp.ports[port_num]
ofmsgs.extend(self.acl_manager.cold_start_port(port))
if changed_vids:
changed_vlans = [self.dp.vlans[vid] for vid in changed_vids]
ofmsgs.extend(self.del_vlans(changed_vlans))
ofmsgs.extend(self.add_vlans(changed_vlans, cold_start=True))
if self.stack_manager:
ofmsgs.extend(self.stack_manager.add_tunnel_acls())
return (restart_type, ofmsgs)
def reload_config(self, _now, new_dp, valves=None):
(restart_type, ofmsgs) = self._apply_config_changes(new_dp, self.dp.get_config_changes(self.logger, new_dp), valves)
if (restart_type is not None):
self._inc_var(('faucet_config_reload_%s' % restart_type))
self.logger.info(('%s starting' % restart_type))
if (restart_type == 'cold'):
self.logger.info('forcing DP reconnection to ensure ports are synchronized')
ofmsgs = None
elif (restart_type == 'warm'):
if (not self.dp.dyn_running):
ofmsgs = []
self.notify({'CONFIG_CHANGE': {'restart_type': restart_type}})
return ofmsgs
def _warm_reconfig_port_native_vlans(self, port, new_dyn_dot1x_native_vlan):
ofmsgs = []
old_vlan = port.dyn_dot1x_native_vlan
ofmsgs.extend(self.switch_manager.del_port(port))
port.dyn_dot1x_native_vlan = new_dyn_dot1x_native_vlan
for vlan in ({old_vlan, port.dyn_dot1x_native_vlan, port.native_vlan} - {None}):
vlan.reset_ports(self.dp.ports.values())
ofmsgs.extend(self.switch_manager.update_vlan(vlan))
ofmsgs.extend(self.switch_manager.add_port(port))
return ofmsgs
def add_dot1x_native_vlan(self, port_num, vlan_name):
ofmsgs = []
port = self.dp.ports[port_num]
vlans = [vlan for vlan in self.dp.vlans.values() if (vlan.name == vlan_name)]
if vlans:
vlan = vlans[0]
ofmsgs.extend(self._warm_reconfig_port_native_vlans(port, vlan))
return ofmsgs
def del_dot1x_native_vlan(self, port_num):
ofmsgs = []
port = self.dp.ports[port_num]
if (port.dyn_dot1x_native_vlan is not None):
ofmsgs.extend(self._warm_reconfig_port_native_vlans(port, None))
return ofmsgs
def router_vlan_for_ip_gw(self, vlan, ip_gw):
route_manager = self._route_manager_by_ipv[ip_gw.version]
return route_manager.router_vlan_for_ip_gw(vlan, ip_gw)
def add_route(self, vlan, ip_gw, ip_dst):
route_manager = self._route_manager_by_ipv[ip_dst.version]
return route_manager.add_route(vlan, ip_gw, ip_dst)
def del_route(self, vlan, ip_dst):
route_manager = self._route_manager_by_ipv[ip_dst.version]
return route_manager.del_route(vlan, ip_dst)
def resolve_gateways(self, now, _other_valves):
ofmsgs = []
if self.dp.dyn_running:
for route_manager in self._route_manager_by_ipv.values():
for vlan in self.dp.vlans.values():
ofmsgs.extend(route_manager.resolve_gateways(vlan, now))
if ofmsgs:
return {self: ofmsgs}
return {}
def oferror(self, msg):
orig_msgs = [orig_msg for orig_msg in self.recent_ofmsgs if (orig_msg.xid == msg.xid)]
error_txt = msg
if orig_msgs:
error_txt = ('%s caused by %s' % (error_txt, orig_msgs[0]))
error_type = 'UNKNOWN'
error_code = 'UNKNOWN'
try:
error_tuple = valve_of.OFERROR_TYPE_CODE[msg.type]
error_type = error_tuple[0]
error_code = error_tuple[1][msg.code]
except KeyError:
pass
if self.dp.group_table:
if ((msg.type == valve_of.ofp.OFPET_GROUP_MOD_FAILED) and (msg.code == valve_of.ofp.OFPGMFC_GROUP_EXISTS)):
return
if ((msg.type == valve_of.ofp.OFPET_BAD_ACTION) and (msg.code == valve_of.ofp.OFPBAC_BAD_OUT_GROUP)):
return
if ((msg.type == valve_of.ofp.OFPET_METER_MOD_FAILED) and (msg.code == valve_of.ofp.OFPMMFC_METER_EXISTS)):
return
self._inc_var('of_errors')
self.logger.error(('OFError type: %s code: %s %s' % (error_type, error_code, error_txt)))
def prepare_send_flows(self, flow_msgs):
if (flow_msgs is None):
return flow_msgs
reordered_flow_msgs = valve_of.valve_flowreorder(flow_msgs, use_barriers=self.USE_BARRIERS)
self.ofchannel_log(reordered_flow_msgs)
self._inc_var('of_flowmsgs_sent', val=len(reordered_flow_msgs))
self.recent_ofmsgs.extend(reordered_flow_msgs)
return reordered_flow_msgs
def send_flows(self, ryu_dp, flow_msgs, now):
def ryu_send_flows(local_flow_msgs):
for flow_msg in self.prepare_send_flows(local_flow_msgs):
flow_msg.datapath = ryu_dp
ryu_dp.send_msg(flow_msg)
if (flow_msgs is None):
self.datapath_disconnect(now)
ryu_dp.close()
else:
ryu_send_flows(flow_msgs)
def flow_timeout(self, now, table_id, match):
return self.switch_manager.flow_timeout(now, table_id, match) |
def extractThetranslationsargeantWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if (item['tags'] == ['Uncategorized']):
titlemap = [('TD Chapter ', 'Fun Territory Defense of the Easy-Going Lord ~The Nameless Village Is Made Into the Strongest Fortified City by Production Magic~', 'translated'), ('Tensei Shoujo no Rirekisho', 'Tensei Shoujo no Rirekisho', 'translated'), ('Master of Dungeon', 'Master of Dungeon', 'oel')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
tagmap = [("let's raise the abandoned cat heroes", 'Lets Raise the Abandoned Cat Heroes An Awesome Demon Hunter Became a Teacher, and Is Missed by His S-Rank Pupils', 'translated'), ('fun territory defense', 'Fun Territory Defense of the Easy-Going Lord ~The Nameless Village Is Made Into the Strongest Fortified City by Production Magic~', 'translated'), ("the magic division leader's contract marriage", "the magic division leader's contract marriage", 'translated'), ('the exiled count acquired the space fortress of a super advanced civilization', 'the exiled count acquired the space fortress of a super advanced civilization', 'translated'), ("reincarnator sanboshi's struggle - called the grandson of the demon king-", "reincarnator sanboshi's struggle - called the grandson of the demon king-", 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class Command(object):
def _execute(self, cmd, capture=False, executable=None):
if capture:
p_stdout = subprocess.PIPE
p_stderr = subprocess.PIPE
else:
p_stdout = None
p_stderr = None
pop = subprocess.Popen(cmd, shell=True, executable=executable, stdout=p_stdout, stderr=p_stderr)
(__stdout, __stderr) = pop.communicate()
_stdout = six.text_type(__stdout, 'utf-8')
_stderr = six.text_type(__stderr, 'utf-8')
out = CommandOut(_stdout, _stderr, cmd, pop.returncode)
return out
def execute(self, cmd, capture=True, try_times=1, interval=1):
out = None
for i in range(try_times):
out = self._execute(cmd, capture=capture)
LOG.info(out.command)
if (out.returncode == 0):
return out
LOG.error('stdout: %s', out)
LOG.error('stderr: %s', out.stderr)
if ((i + 1) >= try_times):
break
time.sleep(interval)
raise CommandError(out)
def sudo(self, cmd, capture=True, try_times=1, interval=1):
cmd = ('sudo %s' % cmd)
return self.execute(cmd, capture=capture, try_times=try_times, interval=interval) |
def _load_modules_from_file(filepath: str):
import importlib
import importlib.machinery
import importlib.util
logger.info(f'Importing {filepath}')
(org_mod_name, _) = os.path.splitext(os.path.split(filepath)[(- 1)])
path_hash = hashlib.sha1(filepath.encode('utf-8')).hexdigest()
mod_name = f'unusual_prefix_{path_hash}_{org_mod_name}'
if (mod_name in sys.modules):
del sys.modules[mod_name]
def parse(mod_name, filepath):
try:
loader = importlib.machinery.SourceFileLoader(mod_name, filepath)
spec = importlib.util.spec_from_loader(mod_name, loader)
new_module = importlib.util.module_from_spec(spec)
sys.modules[spec.name] = new_module
loader.exec_module(new_module)
return [new_module]
except Exception as e:
msg = traceback.format_exc()
logger.error(f'Failed to import: {filepath}, error message: {msg}')
return []
return parse(mod_name, filepath) |
def catch_errors(obj, mth, paramflags, interface, mthname):
clsid = getattr(obj, '_reg_clsid_', None)
def call_with_this(*args, **kw):
try:
result = mth(*args, **kw)
except ReturnHRESULT as err:
(hresult, text) = err.args
return ReportError(text, iid=interface._iid_, clsid=clsid, hresult=hresult)
except (COMError, WindowsError) as details:
_error('Exception in %s.%s implementation:', interface.__name__, mthname, exc_info=True)
return HRESULT_FROM_WIN32(winerror(details))
except E_NotImplemented:
_warning('Unimplemented method %s.%s called', interface.__name__, mthname)
return E_NOTIMPL
except:
_error('Exception in %s.%s implementation:', interface.__name__, mthname, exc_info=True)
return ReportException(E_FAIL, interface._iid_, clsid=clsid)
if (result is None):
return S_OK
return result
if (paramflags is None):
has_outargs = False
else:
has_outargs = bool([x[0] for x in paramflags if (x[0] & 2)])
call_with_this.has_outargs = has_outargs
return call_with_this |
class TestWorkflows(unittest.TestCase):
def test_matrix_to_lock_version_defaults(self):
lock_workflow_file = (GITHUB_WORKFLOWS / 'lock-versions.yml')
lock_workflow = yaml.safe_load(lock_workflow_file.read_text())
lock_versions = lock_workflow[True]['workflow_dispatch']['inputs']['branches']['default'].split(',')
matrix_versions = get_stack_versions(drop_patch=True)
err_msg = 'lock-versions workflow default does not match current matrix in stack-schema-map'
self.assertListEqual(lock_versions, matrix_versions[:(- 1)], err_msg) |
class OptionSeriesTreemapDragdropDraghandle(Options):
def className(self):
return self._config_get('highcharts-drag-handle')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#fff')
def color(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('rgba(0, 0, 0, 0.6)')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(901)
def zIndex(self, num: float):
self._config(num, js_type=False) |
.parametrize('items, stack_method', (([1], 'push_int'), ([1, 2, 3], 'push_int'), ([b'1', b'10', b'101', b'1010'], 'push_bytes')))
def test_pop_returns_latest_stack_item(stack, items, stack_method):
method = getattr(stack, stack_method)
for each in items:
method(each)
assert (stack.pop1_any() == items[(- 1)]) |
def _soft_threshold(z, T, handle_complex=True):
sz = np.maximum((np.abs(z) - T), 0)
if (not handle_complex):
sz[:] = (np.sign(z) * sz)
else:
old_err_state = np.seterr(invalid='ignore')
sz[:] = np.nan_to_num((((1.0 * sz) / (sz + T)) * z))
np.seterr(**old_err_state)
return sz |
class TestChaining(testslide.TestCase):
def testBitwiseAnd(self):
self.assertTrue(isinstance((testslide.matchers.Any() & testslide.matchers.AnyStr()), testslide.matchers._AndMatcher))
self.assertEqual((testslide.matchers.Any() & testslide.matchers.AnyStr()), 'a')
self.assertNotEqual((testslide.matchers.Any() & testslide.matchers.AnyStr()), 3)
def testBitwiseOr(self):
self.assertTrue(isinstance((testslide.matchers.Any() | testslide.matchers.AnyStr()), testslide.matchers._OrMatcher))
self.assertEqual((testslide.matchers.AnyInt() | testslide.matchers.AnyStr()), 'a')
self.assertEqual((testslide.matchers.AnyInt() | testslide.matchers.AnyStr()), 3)
self.assertNotEqual((testslide.matchers.AnyInt() | testslide.matchers.AnyStr()), [])
def testBitwiseXor(self):
self.assertTrue(isinstance((testslide.matchers.Any() ^ testslide.matchers.AnyStr()), testslide.matchers._XorMatcher))
self.assertEqual((testslide.matchers.AnyInt() ^ testslide.matchers.AnyStr()), [])
self.assertNotEqual((testslide.matchers.AnyInt() ^ testslide.matchers.AnyStr()), 3)
def testBitwiseInverse(self):
inverted_matcher = (~ testslide.matchers.StrContaining('Fabio'))
self.assertTrue(isinstance(inverted_matcher, testslide.matchers._InvMatcher))
self.assertEqual(inverted_matcher, 'Balint')
self.assertNotEqual(inverted_matcher, 'Fabio.')
def testCannotChainMoreThanTwo(self):
with self.assertRaises(testslide.matchers.AlreadyChainedException):
((testslide.matchers.Any() | testslide.matchers.AnyStr()) | testslide.matchers.AnyInt())
with self.assertRaises(testslide.matchers.AlreadyChainedException):
((testslide.matchers.Any() & testslide.matchers.AnyStr()) & testslide.matchers.AnyInt())
with self.assertRaises(testslide.matchers.AlreadyChainedException):
((testslide.matchers.Any() ^ testslide.matchers.AnyStr()) ^ testslide.matchers.AnyInt()) |
class BaseStateActionCriticComposer(CriticComposerInterface):
def __init__(self, observation_spaces_dict: Dict[(Union[(str, int)], spaces.Dict)], action_spaces_dict: Dict[(Union[(str, int)], spaces.Dict)]):
self._observation_spaces_dict = observation_spaces_dict
self._action_spaces_dict = action_spaces_dict
self._obs_shapes = observation_spaces_to_in_shapes(observation_spaces_dict)
self._only_discrete_spaces = {step_key: True for step_key in self._obs_shapes.keys()}
for (step_key, dict_action_space) in self._action_spaces_dict.items():
for (act_key, act_space) in dict_action_space.spaces.items():
assert isinstance(act_space, (spaces.Discrete, spaces.Box)), 'Only box and discrete spaces supported thus far'
if (self._only_discrete_spaces[step_key] and (not isinstance(act_space, spaces.Discrete))):
self._only_discrete_spaces[step_key] = False
(CriticComposerInterface)
def critic(self) -> TorchStateActionCritic: |
def test_constant(converter):
w = converter._world
assert (converter.convert(Constant(6, Integer.int32_t())) == w.constant(6, 32))
assert (converter.convert(Constant(7.2, Float.float())) == w.constant(7.2, 32))
with pytest.raises(ValueError):
converter.convert(Constant('hello', Pointer(Integer.uint8_t()))) |
def create_isolation_field(level):
choices = []
default_choices = [('default', 'Use default configuration from mock-core-configs.rpm'), ('nspawn', 'systemd-nspawn'), ('simple', 'simple chroot')]
if (level == 'build'):
choices.append(('unchanged', 'Use project/chroot settings'))
elif (level == 'chroot'):
choices.append(('unchanged', 'Use project settings'))
choices.extend(default_choices)
return wtforms.SelectField('Build isolation', choices=choices, validators=[wtforms.validators.Optional()], filters=[NoneFilter(None)], description='Choose the isolation method for running commands in buildroot') |
def install_prerequisites():
logger.info('installing inkscape_silhouette prerequisites')
for prerequisite in prerequisites:
logger.debug('installing %s', prerequisite)
try:
return_code = subprocess.call('pip3 install {}'.format(prerequisite), shell=True)
if (return_code > 0):
raise OSError('command returned code {}, try running again using sudo'.format(return_code))
except OSError:
logger.error("unable to install module. Try running 'pip3 install %s' manually", prerequisite)
raise |
.usefixtures('reindex_setup')
def test_all_documents_get_moved(sync_client):
helpers.reindex(sync_client, 'test_index', 'prod_index')
sync_client.indices.refresh()
assert sync_client.indices.exists(index='prod_index')
assert (50 == sync_client.count(index='prod_index', q='type:questions')['count'])
assert (50 == sync_client.count(index='prod_index', q='type:answers')['count'])
assert ({'answer': 42, 'correct': True, 'type': 'answers'} == sync_client.get(index='prod_index', id=42)['_source']) |
def process_overlaps_clans_queries(namedhits, CLANS_FILE):
if ((not os.path.exists(CLANS_FILE)) or (not os.path.isfile(CLANS_FILE))):
raise Exception(f"Couldn't find PFAM clans file at path {CLANS_FILE}, or it is not a file.")
clans_dict = {}
with gzip.open(CLANS_FILE, 'rt') as clans_f:
for line in clans_f:
data = line.strip().split('\t')
pfname = data[3]
clan = data[1]
if ((clan is not None) and (clan != '')):
clans_dict[pfname] = clan
targets_hits = {}
for (name, querylen, hits) in namedhits:
hitclan = None
for (hid, heval, hscore, hmmfrom, hmmto, sqfrom, sqto, domscore) in hits:
(hmmfrom, hmmto, sqfrom, sqto) = map(int, [hmmfrom, hmmto, sqfrom, sqto])
new_span = set(range(sqfrom, (sqto + 1)))
if (hid in targets_hits):
(total_range, clean_doms) = targets_hits[hid]
total_overlap = (new_span & total_range)
if (len(total_overlap) > 0):
best = True
tmp_clean_doms = []
tmp_overlapping = []
for (pname, pquerylen, phid, pheval, phscore, phmmfrom, phmmto, psqfrom, psqto, pdomscore) in clean_doms:
prev_span = set(range(psqfrom, (psqto + 1)))
overlap = (new_span & prev_span)
if (hitclan is None):
hitclan = clans_dict.get(name)
phitclan = clans_dict.get(pname)
if ((len(overlap) > 0) and (best == True) and (hitclan is not None) and (hitclan == phitclan)):
if (heval > pheval):
best = False
tmp_overlapping.append([pname, pquerylen, phid, pheval, phscore, phmmfrom, phmmto, psqfrom, psqto, pdomscore])
else:
tmp_clean_doms.append([pname, pquerylen, phid, pheval, phscore, phmmfrom, phmmto, psqfrom, psqto, pdomscore])
if (best == True):
tmp_clean_doms.append([name, querylen, hid, heval, hscore, hmmfrom, hmmto, sqfrom, sqto, domscore])
else:
tmp_clean_doms.extend(tmp_overlapping)
for (pname, pquerylen, phid, pheval, phscore, phmmfrom, phmmto, psqfrom, psqto, pdomscore) in clean_doms:
clean_span = set(range(psqfrom, (psqto + 1)))
total_range.update(clean_span)
targets_hits[hid] = (total_range, tmp_clean_doms)
else:
clean_doms.append([name, querylen, hid, heval, hscore, hmmfrom, hmmto, sqfrom, sqto, domscore])
total_range.update(new_span)
else:
clean_doms = [[name, querylen, hid, heval, hscore, hmmfrom, hmmto, sqfrom, sqto, domscore]]
total_range = set()
total_range.update(new_span)
targets_hits[hid] = (total_range, clean_doms)
clean_doms = []
for (hid, (total_range, hclean_doms)) in targets_hits.items():
for (name, querylen, hid, heval, hscore, hmmfrom, hmmto, sqfrom, sqto, domscore) in sorted(hclean_doms, key=(lambda x: x[7])):
clean_doms.append((name, querylen, [[hid, heval, hscore, hmmfrom, hmmto, sqfrom, sqto, domscore]]))
return clean_doms |
.parametrize('server,expected_num_flashes', [(lazy_fixture('no_lone_server'), 0), (lazy_fixture('lone_on_switch_server'), 1), (lazy_fixture('lone_on_open_close_server'), 0)])
def test_workspace_switching_behavior(server: FlashServer, expected_num_flashes: int) -> None:
with new_window_session({0: 1, 1: 1}) as window_session:
with server_running(server):
change_focus(window_session.windows[1][0])
with watching_windows([window_session.get_first_window()]) as watchers:
change_focus(window_session.get_first_window())
while ((not server.events.empty()) or server.processing_event):
sleep(0.01)
assert (watchers[0].count_flashes() == expected_num_flashes) |
class HeaderRoutingAuth(ServiceType):
skip_variant: ClassVar[bool] = True
def __init__(self, *args, **kwargs) -> None:
kwargs['service_manifests'] = '\n---\nkind: Service\napiVersion: v1\nmetadata:\n name: {self.path.k8s}\nspec:\n selector:\n backend: {self.path.k8s}\n ports:\n - name: protocol: TCP\n port: 80\n targetPort: 80\n - name: protocol: TCP\n port: 443\n targetPort: 443\n---\napiVersion: v1\nkind: Pod\nmetadata:\n name: {self.path.k8s}\n labels:\n backend: {self.path.k8s}\nspec:\n containers:\n - name: backend\n image: {images[test-auth]}\n ports:\n - containerPort: 80\n env:\n - name: BACKEND\n value: {self.path.k8s}\n'
super().__init__(*args, **kwargs)
def requirements(self):
(yield ('url', Query((' % self.path.fqdn)))) |
class ControllerBase():
special_vars = ['action', 'controller']
def __init__(self, req, link, data, **config):
self.req = req
self.link = link
self.data = data
self.parent = None
for (name, value) in config.items():
setattr(self, name, value)
def __call__(self, req):
action = self.req.urlvars.get('action', 'index')
if hasattr(self, '__before__'):
self.__before__()
kwargs = self.req.urlvars.copy()
for attr in self.special_vars:
if (attr in kwargs):
del kwargs[attr]
return getattr(self, action)(req, **kwargs) |
class UninstallSystemAction(argparse.Action):
def __call__(self, parser, namespace, value, option_string=None):
if (os.getuid() != 0):
print('E: You must run nautilus-terminal as root to perform a system-wide removal.')
sys.exit(1)
uninstall_system()
sys.exit(0) |
class OptionPlotoptionsBulletSonificationContexttracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesFunnel3dSonificationTracksMappingHighpass(Options):
def frequency(self) -> 'OptionSeriesFunnel3dSonificationTracksMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionSeriesFunnel3dSonificationTracksMappingHighpassFrequency)
def resonance(self) -> 'OptionSeriesFunnel3dSonificationTracksMappingHighpassResonance':
return self._config_sub_data('resonance', OptionSeriesFunnel3dSonificationTracksMappingHighpassResonance) |
def update_shipping_info(doc, method=None):
so = doc
if (not so.has_value_changed(PACKAGE_TYPE_FIELD)):
return
package_type = so.get(PACKAGE_TYPE_FIELD)
if (not package_type):
return
frappe.enqueue(_update_package_info_on_unicommerce, queue='short', so_code=so.name) |
def check(ip, domain, port, args, timeout, payload_map):
username_list = payload_map.get('username')
password_list = payload_map.get('password')
try:
for username in username_list:
for password in password_list:
try:
conn = pymysql.connect(host=ip, port=int(port), user=username, passwd=password, db=args, charset='utf8')
result = conn.db
conn.close()
return (((((': ' + username) + ':') + password) + '\n') + result.decode())
except Exception as e:
pass
except Exception:
raise |
class TestStubAction(PyTestServerTestCase):
server_class = _TestServiceServer
server_settings = {}
def setup_method(self):
super(TestStubAction, self).setup_method()
self.secondary_stub_client = Client(_secondary_stub_client_settings)
_action('test_service', 'test_action_1')
def test_one_stub_as_decorator(self, stub_test_action_1):
stub_test_action_1.return_value = {'value': 1}
response = self.client.call_action('test_service', 'test_action_1')
self.assertEqual({'value': 1}, response.body)
self.assertEqual(1, stub_test_action_1.call_count)
self.assertEqual({}, stub_test_action_1.call_body)
stub_test_action_1.assert_called_once_with({})
if (not six.PY2):
.parametrize(('input_arg',), (('foo',), ('bar',)))
_action('test_service', 'test_action_1')
def test_one_stub_as_decorator_with_pytest_parametrize_before(self, stub_test_action_1, input_arg):
stub_test_action_1.return_value = {'value': 1}
response = self.client.call_action('test_service', 'test_action_1')
self.assertEqual({'value': 1}, response.body)
self.assertEqual(1, stub_test_action_1.call_count)
self.assertEqual({}, stub_test_action_1.call_body)
stub_test_action_1.assert_called_once_with({})
assert (input_arg in ('foo', 'bar'))
_action('test_service', 'test_action_1')
.parametrize(('input_arg',), (('foo',), ('bar',)))
def test_one_stub_as_decorator_with_pytest_parametrize_after(self, stub_test_action_1, input_arg):
stub_test_action_1.return_value = {'value': 1}
response = self.client.call_action('test_service', 'test_action_1')
self.assertEqual({'value': 1}, response.body)
self.assertEqual(1, stub_test_action_1.call_count)
self.assertEqual({}, stub_test_action_1.call_body)
stub_test_action_1.assert_called_once_with({})
assert (input_arg in ('foo', 'bar'))
((('foo',), ('bar',)))
_action('test_service', 'test_action_1')
def test_one_stub_as_decorator_with_3rd_party_parametrize(self, input_arg, stub_test_action_1):
stub_test_action_1.return_value = {'value': 1}
response = self.client.call_action('test_service', 'test_action_1')
self.assertEqual({'value': 1}, response.body)
self.assertEqual(1, stub_test_action_1.call_count)
self.assertEqual({}, stub_test_action_1.call_body)
stub_test_action_1.assert_called_once_with({})
assert (input_arg in ('foo', 'bar'))
_action('test_service', 'test_action_1')
def _external_method_get_response(self, stub_test_action_1):
stub_test_action_1.return_value = {'value': (- 5)}
try:
return self.client.call_action('test_service', 'test_action_1')
finally:
self.assertEqual(1, stub_test_action_1.call_count)
self.assertEqual({}, stub_test_action_1.call_body)
stub_test_action_1.assert_called_once_with({})
def test_one_stub_as_decorated_external_method(self):
response = self._external_method_get_response()
self.assertEqual({'value': (- 5)}, response.body)
def test_one_stub_as_context_manager(self):
with stub_action('test_service', 'test_action_1') as stub_test_action_1:
stub_test_action_1.return_value = {'value': 1}
response = self.client.call_action('test_service', 'test_action_1')
self.assertEqual({'value': 1}, response.body)
self.assertEqual(1, stub_test_action_1.call_count)
self.assertEqual({}, stub_test_action_1.call_body)
stub_test_action_1.assert_called_once_with({})
_action('test_service', 'test_action_1', body={'value': 1})
def test_one_stub_as_decorator_with_body(self, stub_test_action_1):
response = self.client.call_action('test_service', 'test_action_1')
self.assertEqual({'value': 1}, response.body)
self.assertEqual(1, stub_test_action_1.call_count)
self.assertEqual({}, stub_test_action_1.call_body)
stub_test_action_1.assert_called_once_with({})
def test_one_stub_as_context_manager_with_body(self):
with stub_action('test_service', 'test_action_1', body={'value': 1}) as stub_test_action_1:
response = self.client.call_action('test_service', 'test_action_1')
self.assertEqual({'value': 1}, response.body)
self.assertEqual(1, stub_test_action_1.call_count)
self.assertEqual({}, stub_test_action_1.call_body)
stub_test_action_1.assert_called_once_with({})
def test_one_stub_duplicated_as_context_manager(self):
original_client_send_request = Client.send_request
original_client_get_all_responses = Client.get_all_responses
stub = stub_action('test_service', 'test_action_1', body={'value': 1})
with stub as stub_test_action_1, stub as second_stub_test_action_1:
response = self.client.call_action('test_service', 'test_action_1')
for (client_func, original_func) in ((Client.send_request, original_client_send_request), (Client.get_all_responses, original_client_get_all_responses)):
self.assertTrue((six.get_unbound_function(client_func) is six.get_unbound_function(original_func)))
self.assertEqual({'value': 1}, response.body)
self.assertTrue((stub_test_action_1 is second_stub_test_action_1))
self.assertEqual(1, stub_test_action_1.call_count)
self.assertEqual({}, stub_test_action_1.call_body)
stub_test_action_1.assert_called_once_with({})
_action('test_service', 'test_action_2')
_action('test_service', 'test_action_1')
def test_two_stubs_same_service_as_decorator(self, stub_test_action_1, stub_test_action_2):
stub_test_action_1.return_value = {'value': 1}
stub_test_action_2.return_value = {'another_value': 2}
response = self.client.call_action('test_service', 'test_action_1')
self.assertEqual({'value': 1}, response.body)
response = self.client.call_action('test_service', 'test_action_2', {'input_attribute': True})
self.assertEqual({'another_value': 2}, response.body)
self.assertEqual(1, stub_test_action_1.call_count)
self.assertEqual({}, stub_test_action_1.call_body)
self.assertEqual(1, stub_test_action_2.call_count)
self.assertEqual({'input_attribute': True}, stub_test_action_2.call_body)
stub_test_action_1.assert_called_once_with({})
stub_test_action_2.assert_called_once_with({'input_attribute': True})
if (not six.PY2):
.parametrize(('input_arg',), (('foo',), ('bar',)))
_action('test_service', 'test_action_2')
_action('test_service', 'test_action_1')
def test_two_stubs_same_service_as_decorator_with_pytest_parametrize_before(self, stub_test_action_1, stub_test_action_2, input_arg):
stub_test_action_1.return_value = {'value': 1}
stub_test_action_2.return_value = {'another_value': 2}
response = self.client.call_action('test_service', 'test_action_1')
self.assertEqual({'value': 1}, response.body)
response = self.client.call_action('test_service', 'test_action_2', {'input_attribute': True})
self.assertEqual({'another_value': 2}, response.body)
self.assertEqual(1, stub_test_action_1.call_count)
self.assertEqual({}, stub_test_action_1.call_body)
self.assertEqual(1, stub_test_action_2.call_count)
self.assertEqual({'input_attribute': True}, stub_test_action_2.call_body)
stub_test_action_1.assert_called_once_with({})
stub_test_action_2.assert_called_once_with({'input_attribute': True})
assert (input_arg in ('foo', 'bar'))
_action('test_service', 'test_action_2')
_action('test_service', 'test_action_1')
.parametrize(('input_arg',), (('foo',), ('bar',)))
def test_two_stubs_same_service_as_decorator_with_pytest_parametrize_after(self, stub_test_action_1, stub_test_action_2, input_arg):
stub_test_action_1.return_value = {'value': 1}
stub_test_action_2.return_value = {'another_value': 2}
response = self.client.call_action('test_service', 'test_action_1')
self.assertEqual({'value': 1}, response.body)
response = self.client.call_action('test_service', 'test_action_2', {'input_attribute': True})
self.assertEqual({'another_value': 2}, response.body)
self.assertEqual(1, stub_test_action_1.call_count)
self.assertEqual({}, stub_test_action_1.call_body)
self.assertEqual(1, stub_test_action_2.call_count)
self.assertEqual({'input_attribute': True}, stub_test_action_2.call_body)
stub_test_action_1.assert_called_once_with({})
stub_test_action_2.assert_called_once_with({'input_attribute': True})
assert (input_arg in ('foo', 'bar'))
((('foo',), ('bar',)))
_action('test_service', 'test_action_2')
_action('test_service', 'test_action_1')
def test_two_stubs_same_service_as_decorator_with_3rd_party_parametrize(self, input_arg, stub_test_action_1, stub_test_action_2):
stub_test_action_1.return_value = {'value': 1}
stub_test_action_2.return_value = {'another_value': 2}
response = self.client.call_action('test_service', 'test_action_1')
self.assertEqual({'value': 1}, response.body)
response = self.client.call_action('test_service', 'test_action_2', {'input_attribute': True})
self.assertEqual({'another_value': 2}, response.body)
self.assertEqual(1, stub_test_action_1.call_count)
self.assertEqual({}, stub_test_action_1.call_body)
self.assertEqual(1, stub_test_action_2.call_count)
self.assertEqual({'input_attribute': True}, stub_test_action_2.call_body)
stub_test_action_1.assert_called_once_with({})
stub_test_action_2.assert_called_once_with({'input_attribute': True})
assert (input_arg in ('foo', 'bar'))
_action('test_service', 'test_action_2')
_action('test_service', 'test_action_1')
def _two_stubs_external_method_get_response(self, another_value, stub_test_action_1, stub_test_action_2):
stub_test_action_1.return_value = {'value': (- 10)}
stub_test_action_2.return_value = {'another_value': another_value}
try:
return (self.client.call_action('test_service', 'test_action_1'), self.client.call_action('test_service', 'test_action_2', {'input_attribute': False}))
finally:
self.assertEqual(1, stub_test_action_1.call_count)
self.assertEqual({}, stub_test_action_1.call_body)
self.assertEqual(1, stub_test_action_2.call_count)
self.assertEqual({'input_attribute': False}, stub_test_action_2.call_body)
stub_test_action_1.assert_called_once_with({})
stub_test_action_2.assert_called_once_with({'input_attribute': False})
.parametrize(('value2',), (((- 15),), ((- 20),)))
def test_two_stubs_same_service_as_decorated_external_method(self, value2):
(response1, response2) = self._two_stubs_external_method_get_response(value2)
self.assertEqual({'value': (- 10)}, response1.body)
self.assertEqual({'another_value': value2}, response2.body)
_action('test_service', 'test_action_2')
_action('test_service', 'test_action_1')
def test_two_stubs_same_service_as_decorator_multiple_calls_to_one(self, stub_test_action_1, stub_test_action_2):
stub_test_action_1.return_value = {'value': 1}
stub_test_action_2.side_effect = ({'another_value': 2}, {'third_value': 3})
response = self.client.call_action('test_service', 'test_action_1')
self.assertEqual({'value': 1}, response.body)
response = self.client.call_action('test_service', 'test_action_2', {'input_attribute': True})
self.assertEqual({'another_value': 2}, response.body)
response = self.client.call_action('test_service', 'test_action_2', {'another_attribute': False})
self.assertEqual({'third_value': 3}, response.body)
self.assertEqual(1, stub_test_action_1.call_count)
self.assertEqual({}, stub_test_action_1.call_body)
self.assertEqual(2, stub_test_action_2.call_count)
self.assertEqual({'another_attribute': False}, stub_test_action_2.call_body)
self.assertEqual(({'input_attribute': True}, {'another_attribute': False}), stub_test_action_2.call_bodies)
stub_test_action_1.assert_called_once_with({})
stub_test_action_2.assert_has_calls([mock.call({'input_attribute': True}), mock.call({'another_attribute': False})])
def test_stub_action_with_side_effect_mixed_exceptions_and_bodies_as_context_manager(self):
with stub_action('foo', 'bar', side_effect=[MessageReceiveTimeout('No message received'), {'good': 'yes'}]):
with pytest.raises(MessageReceiveTimeout):
self.client.call_action('foo', 'bar')
response = self.client.call_action('foo', 'bar')
assert (response.body == {'good': 'yes'})
_action('foo', 'bar')
def test_stub_action_with_side_effect_mixed_exceptions_and_bodies_as_decorator(self, stub_foo_bar):
stub_foo_bar.side_effect = [MessageReceiveTimeout('No message received'), {'good': 'yes'}]
with pytest.raises(MessageReceiveTimeout):
self.client.call_action('foo', 'bar')
response = self.client.call_action('foo', 'bar')
assert (response.body == {'good': 'yes'})
_action('test_service', 'test_action_1')
def test_two_stubs_same_service_split(self, stub_test_action_1):
stub_test_action_1.return_value = {'value': 1}
with stub_action('test_service', 'test_action_2') as stub_test_action_2:
stub_test_action_2.return_value = {'another_value': 2}
response = self.client.call_action('test_service', 'test_action_1')
self.assertEqual({'value': 1}, response.body)
response = self.client.call_action('test_service', 'test_action_2', {'input_attribute': True})
self.assertEqual({'another_value': 2}, response.body)
self.assertEqual(1, stub_test_action_1.call_count)
self.assertEqual({}, stub_test_action_1.call_body)
self.assertEqual(1, stub_test_action_2.call_count)
self.assertEqual({'input_attribute': True}, stub_test_action_2.call_body)
_action('test_another_service', 'test_action_2')
_action('test_service', 'test_action_1')
def test_two_stubs_different_services_as_decorator(self, stub_test_action_1, stub_test_action_2):
stub_test_action_1.return_value = {'value': 1}
stub_test_action_2.return_value = {'another_value': 2}
response = self.client.call_action('test_service', 'test_action_1')
self.assertEqual({'value': 1}, response.body)
response = self.client.call_action('test_another_service', 'test_action_2', {'input_attribute': True})
self.assertEqual({'another_value': 2}, response.body)
self.assertEqual(1, stub_test_action_1.call_count)
self.assertEqual({}, stub_test_action_1.call_body)
self.assertEqual(1, stub_test_action_2.call_count)
self.assertEqual({'input_attribute': True}, stub_test_action_2.call_body)
_action('test_service', 'test_action_1')
def test_two_stubs_different_services_split(self, stub_test_action_1):
stub_test_action_1.return_value = {'value': 1}
with stub_action('test_another_service', 'test_action_2') as stub_test_action_2:
stub_test_action_2.return_value = {'another_value': 2}
response = self.client.call_action('test_service', 'test_action_1')
self.assertEqual({'value': 1}, response.body)
response = self.client.call_action('test_another_service', 'test_action_2', {'input_attribute': True})
self.assertEqual({'another_value': 2}, response.body)
self.assertEqual(1, stub_test_action_1.call_count)
self.assertEqual({}, stub_test_action_1.call_body)
self.assertEqual(1, stub_test_action_2.call_count)
self.assertEqual({'input_attribute': True}, stub_test_action_2.call_body)
_action('test_service', 'test_action_1', body={'value': 1})
def test_one_stub_as_decorator_with_real_call_handled(self, stub_test_action_1):
response = self.client.call_action('test_service', 'test_action_1')
self.assertEqual(response.body, {'value': 1})
response = self.secondary_stub_client.call_action('cat', 'meow')
self.assertEqual({'type': 'squeak'}, response.body)
response = self.secondary_stub_client.call_action('dog', 'bark')
self.assertEqual({'sound': 'woof'}, response.body)
self.assertEqual(1, stub_test_action_1.call_count)
self.assertEqual({}, stub_test_action_1.call_body)
def test_one_stub_as_context_manager_with_real_call_handled(self):
with stub_action('test_service', 'test_action_1', body={'value': 1}) as stub_test_action_1:
response = self.client.call_action('test_service', 'test_action_1')
self.assertEqual(response.body, {'value': 1})
response = self.secondary_stub_client.call_action('cat', 'meow')
self.assertEqual({'type': 'squeak'}, response.body)
response = self.secondary_stub_client.call_action('dog', 'bark')
self.assertEqual({'sound': 'woof'}, response.body)
self.assertEqual(1, stub_test_action_1.call_count)
self.assertEqual({}, stub_test_action_1.call_body)
_action('test_service', 'test_action_2')
((__name__ + '._test_function'), return_value=3)
def test_as_decorator_with_patch_before(self, mock_randint, stub_test_action_2):
stub_test_action_2.return_value = {'value': 99}
response = self.client.call_actions('test_service', [ActionRequest(action='test_action_1'), ActionRequest(action='test_action_2')])
self.assertEqual(2, len(response.actions))
self.assertEqual({'value': 3}, response.actions[0].body)
self.assertEqual({'value': 99}, response.actions[1].body)
self.assertEqual(1, stub_test_action_2.call_count)
self.assertEqual({}, stub_test_action_2.call_body)
mock_randint.assert_called_once_with(0, 99)
((__name__ + '._test_function'), return_value=7)
_action('test_service', 'test_action_2')
def test_as_decorator_with_patch_after(self, stub_test_action_2, mock_randint):
stub_test_action_2.side_effect = ({'value': 122}, {'also': 157})
response = self.client.call_actions('test_service', [{'action': 'test_action_1'}, {'action': 'test_action_2'}, {'action': 'test_action_2'}])
self.assertEqual(3, len(response.actions))
self.assertEqual({'value': 7}, response.actions[0].body)
self.assertEqual({'value': 122}, response.actions[1].body)
self.assertEqual({'also': 157}, response.actions[2].body)
self.assertEqual(2, stub_test_action_2.call_count)
self.assertEqual(({}, {}), stub_test_action_2.call_bodies)
stub_test_action_2.assert_has_calls([mock.call({}), mock.call({})])
mock_randint.assert_called_once_with(0, 99)
def test_using_start_stop(self):
action_stubber = stub_action('test_service', 'test_action_1')
stubbed_action = action_stubber.start()
stubbed_action.return_value = {'what about': 'this'}
response = self.client.call_action('test_service', 'test_action_1', {'burton': 'guster', 'sean': 'spencer'})
self.assertEqual({'what about': 'this'}, response.body)
self.assertEqual(1, stubbed_action.call_count)
self.assertEqual({'burton': 'guster', 'sean': 'spencer'}, stubbed_action.call_body)
stubbed_action.assert_called_once_with({'burton': 'guster', 'sean': 'spencer'})
action_stubber.stop()
_action('test_service', 'test_action_2', errors=[{'code': 'BAD_FOO', 'field': 'foo', 'message': 'Nope'}])
def test_mock_action_with_error_raises_exception(self, stub_test_action_2):
with self.assertRaises(Client.CallActionError) as e:
self.client.call_action('test_service', 'test_action_2', {'a': 'body'})
self.assertEqual('BAD_FOO', e.exception.actions[0].errors[0].code)
self.assertEqual('foo', e.exception.actions[0].errors[0].field)
self.assertEqual('Nope', e.exception.actions[0].errors[0].message)
self.assertEqual(1, stub_test_action_2.call_count)
self.assertEqual({'a': 'body'}, stub_test_action_2.call_body)
stub_test_action_2.assert_called_once_with({'a': 'body'})
_test_action()
def test_stub_action_with_side_effect_callback(self, _stub_test_action):
response = self.client.call_action('test_service', 'test_action', body={'id': 1, 'type': 'user'})
self.assertEqual(response.body, {'id': 1, 'type': 'user'})
response = self.client.call_action('test_service', 'test_action', body={'id': 2, 'type': 'admin'})
self.assertEqual(response.body, {'id': 2, 'type': 'admin', 'extra': 'data'})
_test_action(add_extra=False)
def test_stub_action_with_side_effect_callback_and_param(self, _stub_test_action):
response = self.client.call_action('test_service', 'test_action', body={'id': 1, 'type': 'user'})
self.assertEqual(response.body, {'id': 1, 'type': 'user'})
response = self.client.call_action('test_service', 'test_action', body={'id': 2, 'type': 'admin'})
self.assertEqual(response.body, {'id': 2, 'type': 'admin'})
def test_stub_action_with_side_effect_callback_in_context_manager(self):
with stub_test_action():
response = self.client.call_action('test_service', 'test_action', body={'id': 1, 'type': 'user'})
self.assertEqual(response.body, {'id': 1, 'type': 'user'})
with stub_test_action():
response = self.client.call_action('test_service', 'test_action', body={'id': 2, 'type': 'admin'})
self.assertEqual(response.body, {'id': 2, 'type': 'admin', 'extra': 'data'})
def test_stub_action_with_side_effect_callback_in_context_manager_and_param(self):
with stub_test_action(add_extra=False):
response = self.client.call_action('test_service', 'test_action', body={'id': 1, 'type': 'user'})
self.assertEqual(response.body, {'id': 1, 'type': 'user'})
with stub_test_action(add_extra=False):
response = self.client.call_action('test_service', 'test_action', body={'id': 2, 'type': 'admin'})
self.assertEqual(response.body, {'id': 2, 'type': 'admin'})
_action('test_service', 'test_action_2', side_effect=ActionError(errors=[Error(code='BAR_BAD', field='bar', message='Uh-uh')]))
def test_stub_action_with_error_side_effect_raises_exception(self, stub_test_action_2):
with self.assertRaises(Client.CallActionError) as e:
self.client.call_action('test_service', 'test_action_2', {'a': 'body'})
self.assertEqual('BAR_BAD', e.exception.actions[0].errors[0].code)
self.assertEqual('bar', e.exception.actions[0].errors[0].field)
self.assertEqual('Uh-uh', e.exception.actions[0].errors[0].message)
self.assertEqual(1, stub_test_action_2.call_count)
self.assertEqual({'a': 'body'}, stub_test_action_2.call_body)
stub_test_action_2.assert_called_once_with({'a': 'body'})
_action('test_service', 'test_action_2', side_effect=JobError(errors=[Error(code='BAR_BAD_JOB', message='Uh-uh job')]))
def test_stub_action_with_job_error_side_effect_raises_job_error_exception(self, stub_test_action_2):
with self.assertRaises(Client.JobError) as e:
self.client.call_action('test_service', 'test_action_2', {'a': 'body'})
self.assertEqual('BAR_BAD_JOB', e.exception.errors[0].code)
self.assertIsNone(e.exception.errors[0].field)
self.assertEqual('Uh-uh job', e.exception.errors[0].message)
self.assertEqual(1, stub_test_action_2.call_count)
self.assertEqual({'a': 'body'}, stub_test_action_2.call_body)
stub_test_action_2.assert_called_once_with({'a': 'body'})
_action('test_service', 'test_action_2')
def test_mock_action_with_error_side_effect_raises_exception(self, stub_test_action_2):
stub_test_action_2.side_effect = ActionError(errors=[Error(code='BAR_BAD', field='bar', message='Uh-uh')])
with self.assertRaises(Client.CallActionError) as e:
self.client.call_action('test_service', 'test_action_2', {'a': 'body'})
self.assertEqual('BAR_BAD', e.exception.actions[0].errors[0].code)
self.assertEqual('bar', e.exception.actions[0].errors[0].field)
self.assertEqual('Uh-uh', e.exception.actions[0].errors[0].message)
self.assertEqual(1, stub_test_action_2.call_count)
self.assertEqual({'a': 'body'}, stub_test_action_2.call_body)
stub_test_action_2.assert_called_once_with({'a': 'body'})
_action('test_service', 'test_action_2')
def test_mock_action_with_job_error_side_effect_raises_job_error_exception(self, stub_test_action_2):
stub_test_action_2.side_effect = JobError(errors=[Error(code='BAR_BAD_JOB', message='Uh-uh job')])
with self.assertRaises(Client.JobError) as e:
self.client.call_action('test_service', 'test_action_2', {'a': 'body'})
self.assertEqual('BAR_BAD_JOB', e.exception.errors[0].code)
self.assertIsNone(e.exception.errors[0].field)
self.assertEqual('Uh-uh job', e.exception.errors[0].message)
self.assertEqual(1, stub_test_action_2.call_count)
self.assertEqual({'a': 'body'}, stub_test_action_2.call_body)
stub_test_action_2.assert_called_once_with({'a': 'body'})
_action('test_service', 'test_action_2')
def test_mock_action_with_job_error_response_raises_job_error_exception(self, stub_test_action_2):
stub_test_action_2.return_value = JobResponse(errors=[Error(code='BAR_BAD_JOB', message='Uh-uh job')])
with self.assertRaises(Client.JobError) as e:
self.client.call_action('test_service', 'test_action_2', {'a': 'body'})
self.assertEqual('BAR_BAD_JOB', e.exception.errors[0].code)
self.assertIsNone(e.exception.errors[0].field)
self.assertEqual('Uh-uh job', e.exception.errors[0].message)
self.assertEqual(1, stub_test_action_2.call_count)
self.assertEqual({'a': 'body'}, stub_test_action_2.call_body)
stub_test_action_2.assert_called_once_with({'a': 'body'})
_action('test_service', 'test_action_2', errors=[{'code': 'INVALID_BAR', 'message': 'A bad message'}])
def test_multiple_actions_stop_on_error(self, stub_test_action_2):
response = self.client.call_actions('test_service', [ActionRequest(action='test_action_1'), ActionRequest(action='test_action_2'), ActionRequest(action='test_action_1')], raise_action_errors=False)
self.assertEqual(2, len(response.actions))
self.assertEqual('INVALID_BAR', response.actions[1].errors[0].code)
self.assertEqual('A bad message', response.actions[1].errors[0].message)
self.assertTrue(stub_test_action_2.called)
_action('test_service', 'test_action_2', errors=[{'code': 'MISSING_BAZ', 'field': 'entity_id', 'message': 'Your entity ID was missing'}])
def test_multiple_actions_continue_on_error(self, mock_test_action_2):
response = self.client.call_actions('test_service', [{'action': 'test_action_1'}, {'action': 'test_action_2'}, {'action': 'test_action_1'}], raise_action_errors=False, continue_on_error=True)
self.assertEqual(3, len(response.actions))
self.assertEqual('MISSING_BAZ', response.actions[1].errors[0].code)
self.assertEqual('entity_id', response.actions[1].errors[0].field)
self.assertEqual('Your entity ID was missing', response.actions[1].errors[0].message)
self.assertTrue(mock_test_action_2.called)
_action('test_service', 'test_action_2', body={'three': 'four'})
_action('test_service', 'test_action_1', body={'one': 'two'})
def test_two_stubs_with_parallel_calls_all_stubbed(self, stub_test_action_1, stub_test_action_2):
job_responses = self.client.call_jobs_parallel([{'service_name': 'test_service', 'actions': [{'action': 'test_action_1', 'body': {'a': 'b'}}]}, {'service_name': 'test_service', 'actions': [{'action': 'test_action_2', 'body': {'c': 'd'}}]}])
self.assertIsNotNone(job_responses)
self.assertEqual(2, len(job_responses))
self.assertEqual(1, len(job_responses[0].actions))
self.assertEqual({'one': 'two'}, job_responses[0].actions[0].body)
self.assertEqual(1, len(job_responses[1].actions))
self.assertEqual({'three': 'four'}, job_responses[1].actions[0].body)
stub_test_action_1.assert_called_once_with({'a': 'b'})
stub_test_action_2.assert_called_once_with({'c': 'd'})
_action('test_service', 'test_action_2')
((__name__ + '._test_function'))
def test_one_stub_with_parallel_calls(self, mock_randint, stub_test_action_2):
mock_randint.side_effect = (42, 17, 31)
stub_test_action_2.return_value = {'concert': 'tickets'}
job_responses = self.client.call_jobs_parallel([{'service_name': 'test_service', 'actions': [{'action': 'test_action_1'}]}, {'service_name': 'test_service', 'actions': [{'action': 'test_action_2', 'body': {'slide': 'rule'}}, {'action': 'test_action_1'}]}, {'service_name': 'test_service', 'actions': [{'action': 'test_action_1'}]}])
self.assertIsNotNone(job_responses)
self.assertEqual(3, len(job_responses))
self.assertEqual(1, len(job_responses[0].actions))
self.assertEqual({'value': 42}, job_responses[0].actions[0].body)
self.assertEqual(2, len(job_responses[1].actions))
self.assertEqual({'concert': 'tickets'}, job_responses[1].actions[0].body)
self.assertEqual({'value': 17}, job_responses[1].actions[1].body)
self.assertEqual(1, len(job_responses[2].actions))
self.assertEqual({'value': 31}, job_responses[2].actions[0].body)
stub_test_action_2.assert_called_once_with({'slide': 'rule'})
_action('test_service', 'test_action_2')
_action('test_service', 'test_action_1')
def test_two_stubs_with_parallel_calls(self, stub_test_action_1, stub_test_action_2):
stub_test_action_1.return_value = {'value': 1}
stub_test_action_2.return_value = {'another_value': 2}
job_responses = Client(dict(self.client.config, **_secondary_stub_client_settings)).call_jobs_parallel([{'service_name': 'test_service', 'actions': [{'action': 'test_action_1', 'body': {'input_attribute': True}}, {'action': 'test_action_2', 'body': {'another_variable': 'Cool'}}]}, {'service_name': 'cat', 'actions': [{'action': 'meow'}]}, {'service_name': 'dog', 'actions': [{'action': 'bark'}]}, {'service_name': 'test_service', 'actions': [{'action': 'does_not_exist'}]}], raise_action_errors=False)
self.assertIsNotNone(job_responses)
self.assertEqual(4, len(job_responses))
self.assertEqual(2, len(job_responses[0].actions))
self.assertEqual({'value': 1}, job_responses[0].actions[0].body)
self.assertEqual({'another_value': 2}, job_responses[0].actions[1].body)
self.assertEqual(1, len(job_responses[1].actions))
self.assertEqual({'type': 'squeak'}, job_responses[1].actions[0].body)
self.assertEqual(1, len(job_responses[2].actions))
self.assertEqual({'sound': 'woof'}, job_responses[2].actions[0].body)
self.assertEqual(1, len(job_responses[3].actions))
self.assertEqual([Error(code='UNKNOWN', message='The action "does_not_exist" was not found on this server.', field='action', is_caller_error=True)], job_responses[3].actions[0].errors)
stub_test_action_1.assert_called_once_with({'input_attribute': True})
stub_test_action_2.assert_called_once_with({'another_variable': 'Cool'})
_action('test_service', 'test_action_2', body={'three': 'four'})
_action('test_service', 'test_action_1')
def test_two_stubs_with_parallel_calls_and_job_response_errors_raised(self, stub_test_action_1, stub_test_action_2):
stub_test_action_1.return_value = JobResponse(errors=[Error(code='BAD_JOB', message='You are a bad job')])
with self.assertRaises(self.client.JobError) as error_context:
self.client.call_jobs_parallel([{'service_name': 'test_service', 'actions': [{'action': 'test_action_1', 'body': {'a': 'b'}}]}, {'service_name': 'test_service', 'actions': [{'action': 'test_action_2', 'body': {'c': 'd'}}]}])
self.assertEqual([Error(code='BAD_JOB', message='You are a bad job')], error_context.exception.errors)
stub_test_action_1.assert_called_once_with({'a': 'b'})
stub_test_action_2.assert_called_once_with({'c': 'd'})
_action('test_service', 'test_action_2', body={'three': 'four'})
_action('test_service', 'test_action_1', side_effect=JobError(errors=[Error(code='BAD_JOB', message='You are a bad job')]))
def test_stub_action_with_two_stubs_with_parallel_calls_and_job_errors_not_raised(self, stub_test_action_1, stub_test_action_2):
job_responses = self.client.call_jobs_parallel([{'service_name': 'test_service', 'actions': [{'action': 'test_action_1', 'body': {'a': 'b'}}]}, {'service_name': 'test_service', 'actions': [{'action': 'test_action_2', 'body': {'c': 'd'}}]}], raise_job_errors=False)
self.assertIsNotNone(job_responses)
self.assertEqual(2, len(job_responses))
self.assertEqual(0, len(job_responses[0].actions))
self.assertEqual([Error(code='BAD_JOB', message='You are a bad job')], job_responses[0].errors)
self.assertEqual(1, len(job_responses[1].actions))
self.assertEqual({'three': 'four'}, job_responses[1].actions[0].body)
stub_test_action_1.assert_called_once_with({'a': 'b'})
stub_test_action_2.assert_called_once_with({'c': 'd'})
_action('test_service', 'test_action_2', body={'three': 'four'})
_action('test_service', 'test_action_1', side_effect=ActionError(errors=[Error(code='BAD_ACTION', message='You are a bad actor')]))
def test_stub_action_with_two_stubs_with_parallel_calls_and_action_errors_raised(self, stub_test_action_1, stub_test_action_2):
with self.assertRaises(self.client.CallActionError) as error_context:
self.client.call_jobs_parallel([{'service_name': 'test_service', 'actions': [{'action': 'test_action_1', 'body': {'a': 'b'}}]}, {'service_name': 'test_service', 'actions': [{'action': 'test_action_2', 'body': {'c': 'd'}}]}])
self.assertEqual([Error(code='BAD_ACTION', message='You are a bad actor', is_caller_error=True)], error_context.exception.actions[0].errors)
stub_test_action_1.assert_called_once_with({'a': 'b'})
stub_test_action_2.assert_called_once_with({'c': 'd'})
_action('test_service', 'test_action_2', body={'three': 'four'})
_action('test_service', 'test_action_1')
def test_two_stubs_with_parallel_calls_and_job_errors_not_raised(self, stub_test_action_1, stub_test_action_2):
stub_test_action_1.side_effect = JobError(errors=[Error(code='BAD_JOB', message='You are a bad job')])
job_responses = self.client.call_jobs_parallel([{'service_name': 'test_service', 'actions': [{'action': 'test_action_1', 'body': {'a': 'b'}}]}, {'service_name': 'test_service', 'actions': [{'action': 'test_action_2', 'body': {'c': 'd'}}]}], raise_job_errors=False)
self.assertIsNotNone(job_responses)
self.assertEqual(2, len(job_responses))
self.assertEqual(0, len(job_responses[0].actions))
self.assertEqual([Error(code='BAD_JOB', message='You are a bad job')], job_responses[0].errors)
self.assertEqual(1, len(job_responses[1].actions))
self.assertEqual({'three': 'four'}, job_responses[1].actions[0].body)
stub_test_action_1.assert_called_once_with({'a': 'b'})
stub_test_action_2.assert_called_once_with({'c': 'd'})
_action('test_service', 'test_action_2', body={'three': 'four'})
_action('test_service', 'test_action_1')
def test_two_stubs_with_parallel_calls_and_action_errors_raised(self, stub_test_action_1, stub_test_action_2):
stub_test_action_1.side_effect = ActionError(errors=[Error(code='BAD_ACTION', message='You are a bad actor')])
with self.assertRaises(self.client.CallActionError) as error_context:
self.client.call_jobs_parallel([{'service_name': 'test_service', 'actions': [{'action': 'test_action_1', 'body': {'a': 'b'}}]}, {'service_name': 'test_service', 'actions': [{'action': 'test_action_2', 'body': {'c': 'd'}}]}])
self.assertEqual([Error(code='BAD_ACTION', message='You are a bad actor', is_caller_error=True)], error_context.exception.actions[0].errors)
stub_test_action_1.assert_called_once_with({'a': 'b'})
stub_test_action_2.assert_called_once_with({'c': 'd'})
_action('test_service', 'test_action_2', body={'three': 'four'})
_action('test_service', 'test_action_1')
def test_two_stubs_with_parallel_calls_and_action_response_errors_not_raised(self, stub_test_action_1, stub_test_action_2):
stub_test_action_1.return_value = ActionResponse(action='test_action_1', errors=[Error(code='BAD_ACTION', message='You are a bad actor')])
job_responses = self.client.call_jobs_parallel([{'service_name': 'test_service', 'actions': [{'action': 'test_action_1', 'body': {'a': 'b'}}]}, {'service_name': 'test_service', 'actions': [{'action': 'test_action_2', 'body': {'c': 'd'}}]}], raise_action_errors=False)
self.assertIsNotNone(job_responses)
self.assertEqual(2, len(job_responses))
self.assertEqual(1, len(job_responses[0].actions))
self.assertEqual([Error(code='BAD_ACTION', message='You are a bad actor')], job_responses[0].actions[0].errors)
self.assertEqual(1, len(job_responses[1].actions))
self.assertEqual({'three': 'four'}, job_responses[1].actions[0].body)
stub_test_action_1.assert_called_once_with({'a': 'b'})
stub_test_action_2.assert_called_once_with({'c': 'd'})
_action('test_service', 'test_action_1', body={'food': 'chicken'})
def test_send_receive_one_stub_simple(self, stub_test_action_1):
request_id = self.client.send_request('test_service', [{'action': 'test_action_1', 'body': {'menu': 'look'}}])
self.assertIsNotNone(request_id)
responses = list(self.client.get_all_responses('test_service'))
self.assertEqual(1, len(responses))
(received_request_id, response) = responses[0]
self.assertEqual(request_id, received_request_id)
self.assertIsNotNone(response)
self.assertEqual([], response.errors)
self.assertEqual(1, len(response.actions))
self.assertEqual([], response.actions[0].errors)
self.assertEqual({'food': 'chicken'}, response.actions[0].body)
stub_test_action_1.assert_called_once_with({'menu': 'look'})
_action('test_service', 'test_action_1')
def test_send_receive_one_stub_multiple_calls(self, stub_test_action_1):
stub_test_action_1.side_effect = ({'look': 'menu'}, {'pepperoni': 'pizza'}, {'cheese': 'pizza'})
request_id1 = self.client.send_request('test_service', [{'action': 'test_action_1', 'body': {'menu': 'look'}}, {'action': 'test_action_1', 'body': {'pizza': 'pepperoni'}}])
request_id2 = self.client.send_request('test_service', [{'action': 'test_action_1', 'body': {'pizza': 'cheese'}}])
self.assertIsNotNone(request_id1)
self.assertIsNotNone(request_id2)
responses = list(self.client.get_all_responses('test_service'))
self.assertEqual(2, len(responses))
response_dict = {k: v for (k, v) in responses}
self.assertIn(request_id1, response_dict)
self.assertIn(request_id2, response_dict)
response = response_dict[request_id1]
self.assertIsNotNone(response)
self.assertEqual([], response.errors)
self.assertEqual(2, len(response.actions))
self.assertEqual([], response.actions[0].errors)
self.assertEqual({'look': 'menu'}, response.actions[0].body)
self.assertEqual({'pepperoni': 'pizza'}, response.actions[1].body)
response = response_dict[request_id2]
self.assertIsNotNone(response)
self.assertEqual([], response.errors)
self.assertEqual(1, len(response.actions))
self.assertEqual([], response.actions[0].errors)
self.assertEqual({'cheese': 'pizza'}, response.actions[0].body)
stub_test_action_1.assert_has_calls([mock.call({'menu': 'look'}), mock.call({'pizza': 'pepperoni'}), mock.call({'pizza': 'cheese'})], any_order=True)
_action('test_service', 'test_action_1')
def test_send_receive_one_stub_one_real_call_mixture(self, stub_test_action_1):
stub_test_action_1.side_effect = (ActionResponse(action='does not matter', body={'look': 'menu'}), ActionResponse(action='no', errors=[Error(code='WEIRD', field='pizza', message='Weird error about pizza')]), ActionError(errors=[Error(code='COOL', message='Another error')]))
actions = [{'action': 'test_action_1', 'body': {'menu': 'look'}}, {'action': 'test_action_2'}, {'action': 'test_action_1', 'body': {'pizza': 'pepperoni'}}, {'action': 'test_action_2'}, {'action': 'test_action_2'}]
request_id1 = self.client.send_request('test_service', actions, continue_on_error=True)
request_id2 = self.client.send_request('test_service', [{'action': 'test_action_1', 'body': {'pizza': 'cheese'}}])
request_id3 = self.client.send_request('test_service', [{'action': 'test_action_2'}])
self.assertIsNotNone(request_id1)
self.assertIsNotNone(request_id2)
self.assertIsNotNone(request_id3)
responses = list(self.client.get_all_responses('test_service'))
self.assertEqual(3, len(responses))
response_dict = {k: v for (k, v) in responses}
self.assertIn(request_id1, response_dict)
self.assertIn(request_id2, response_dict)
self.assertIn(request_id3, response_dict)
response = response_dict[request_id1]
self.assertIsNotNone(response)
self.assertEqual([], response.errors)
self.assertEqual(5, len(response.actions))
self.assertEqual([], response.actions[0].errors)
self.assertEqual({'look': 'menu'}, response.actions[0].body)
self.assertEqual([], response.actions[1].errors)
self.assertEqual({'value': 0}, response.actions[1].body)
self.assertEqual([Error(code='WEIRD', field='pizza', message='Weird error about pizza')], response.actions[2].errors)
self.assertEqual([], response.actions[3].errors)
self.assertEqual({'value': 0}, response.actions[3].body)
self.assertEqual([], response.actions[4].errors)
self.assertEqual({'value': 0}, response.actions[4].body)
response = response_dict[request_id2]
self.assertIsNotNone(response)
self.assertEqual([], response.errors)
self.assertEqual(1, len(response.actions))
self.assertEqual([Error(code='COOL', message='Another error', is_caller_error=True)], response.actions[0].errors)
response = response_dict[request_id3]
self.assertIsNotNone(response)
self.assertEqual([], response.errors)
self.assertEqual(1, len(response.actions))
self.assertEqual([], response.actions[0].errors)
self.assertEqual({'value': 0}, response.actions[0].body)
stub_test_action_1.assert_has_calls([mock.call({'menu': 'look'}), mock.call({'pizza': 'pepperoni'}), mock.call({'pizza': 'cheese'})], any_order=True) |
def test_generated_js1():
m = app.assets.modules['flexx.app._component2']
js = m.get_js()
classes = []
for line in js.splitlines():
if ('._base_class =' in line):
classes.append(line.split('.')[0])
assert (classes == ['LocalProperty', 'BaseAppComponent', 'LocalComponent', 'ProxyComponent', 'StubComponent', 'JsComponent', 'PyComponent'])
print(classes) |
.django_db(transaction=True)
def test_download_transactions_bad_filter_type_raises(client, monkeypatch, download_test_data, elasticsearch_transaction_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
download_generation.retrieve_db_string = Mock(return_value=get_database_dsn_string(settings.DOWNLOAD_DB_ALIAS))
payload = {'filters': '01', 'columns': []}
resp = client.post('/api/v2/download/transactions/', content_type='application/json', data=json.dumps(payload))
assert (resp.status_code == status.HTTP_400_BAD_REQUEST)
assert (resp.json()['detail'] == 'Filters parameter not provided as a dict') |
def test_external_template_actually_loads():
template_path = str((TEMPLATES_DIR / 'ner.jinja2'))
template = file_reader(template_path)
labels = 'PER,ORG,LOC'
nlp = spacy.blank('en')
doc = nlp.make_doc('Alice and Bob went to the supermarket')
llm_ner = make_ner_task_v3(examples=[], labels=labels, template=template)
prompt = list(llm_ner.generate_prompts([doc]))[0]
assert prompt.strip().startswith("Here's the test template for the tests and stuff") |
class categorical_datasource(CategoricalDataSource):
def __init__(self, fnames_file, desc_file, feat_name, feats_dir, feats_dict=None):
super(categorical_datasource, self).__init__(fnames_file, desc_file, feat_name, feats_dir, feats_dict)
def __getitem__(self, idx):
assert (self.feat_type == 'categorical')
fname = self.filenames_array[idx]
arr = {}
arr['fname'] = fname
fname = (((self.feats_dir + '/') + fname.strip()) + '.feats')
f = open(fname)
for line in f:
mask = line.split('\n')[0]
arr['mask'] = mask
return arr |
class OptionPlotoptionsLollipopSonificationContexttracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
def test_threshold_peaks():
data = np.array([[10, 1, 1.8], [14, 2, 4], [12, 3, 2.5]])
assert np.array_equal(threshold_peaks(data, 2.5), np.array([[12, 3, 2.5]]))
data = np.array([[10, 1, 1.8], [14, 2, 4], [12, 3, 2.5]])
assert np.array_equal(threshold_peaks(data, 2, param='BW'), np.array([[14, 2, 4], [12, 3, 2.5]]))
data = np.array([[10, 1, 1.8, 0], [13, 1, 2, 2], [14, 2, 4, 2]])
assert np.array_equal(threshold_peaks(data, 1.5), np.array([[14, 2, 4, 2]])) |
def _dhcpHandler(packet_type):
def decorator(f):
def wrappedHandler(self, packet, source_address, port):
with _PacketWrapper(self, packet, packet_type, source_address, port) as wrapper:
if (not wrapper.valid):
return
f(self, wrapper)
return wrappedHandler
return decorator |
class VmMigrate(APIView):
def __init__(self, request, hostname_or_uuid, data):
super(VmMigrate, self).__init__(request)
self.hostname_or_uuid = hostname_or_uuid
self.data = data
self.vm = get_vm(request, hostname_or_uuid, exists_ok=True, noexists_fail=True)
def put(self):
(request, vm) = (self.request, self.vm)
if vm.locked:
raise VmIsLocked
if (vm.status not in (vm.STOPPED, vm.RUNNING)):
raise VmIsNotOperational('VM is not stopped or running')
if vm.json_changed():
raise PreconditionRequired('VM definition has changed; Update first')
ser = VmMigrateSerializer(request, vm, data=self.data)
if (not ser.is_valid()):
return FailureTaskResponse(request, ser.errors, vm=vm)
if vm.tasks:
raise VmHasPendingTasks
err = True
ghost_vm = None
vm.set_notready()
try:
ghost_vm = ser.save_ghost_vm()
block_key = ser.node_image_import()
detail_dict = ser.detail_dict()
apiview = {'view': 'vm_migrate', 'method': request.method, 'hostname': vm.hostname}
apiview.update(detail_dict)
lock = ('vm_migrate vm:%s' % vm.uuid)
meta = {'output': {'returncode': 'returncode', 'stderr': 'message', 'stdout': 'json'}, 'replace_stderr': ((vm.uuid, vm.hostname),), 'msg': LOG_MIGRATE, 'vm_uuid': vm.uuid, 'slave_vm_uuid': ghost_vm.uuid, 'apiview': apiview}
callback = ('api.vm.migrate.tasks.vm_migrate_cb', {'vm_uuid': vm.uuid, 'slave_vm_uuid': ghost_vm.uuid})
(tid, err) = execute(request, vm.owner.id, ser.esmigrate_cmd, meta=meta, lock=lock, callback=callback, queue=vm.node.fast_queue, block_key=block_key)
if err:
return FailureTaskResponse(request, err, vm=vm)
else:
return TaskResponse(request, tid, msg=LOG_MIGRATE, vm=vm, api_view=apiview, detail_dict=detail_dict, data=self.data)
finally:
if err:
vm.revert_notready()
if ghost_vm:
ghost_vm.delete() |
.django_db
def test_GmSettleRewards(Q, auth_header):
from player.tests import PlayerFactory
PlayerFactory.create()
PlayerFactory.create()
g = GameFactory.create()
game = {'gameId': g.id, 'name': 'foo!', 'type': 'THBattle2v2', 'flags': {}, 'players': [1, 2], 'winners': [1], 'deserters': [2], 'startedAt': '2020-12-02T15:43:05Z', 'duration': 333}
rst = Q('\n mutation TestGmSettleRewards($game: GameInput!) {\n GmSettleRewards(game: $game) {\n id\n }\n }\n ', variables={'game': game}, headers=auth_header)
assert ('errors' not in rst)
rid = rst['data']['GmSettleRewards'][0]['id']
assert rid
models.GameReward.objects.get(id=rid) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.