code stringlengths 281 23.7M |
|---|
('input_conf, passthrough, expected', [param({'node': {'_target_': 'tests.instantiate.AClass', 'a': '${value}', 'b': 20, 'c': 30, 'd': 40}, 'value': 99}, {}, AClass(99, 20, 30, 40), id='interpolation_into_parent'), param({'node': {'_target_': 'tests.instantiate.AClass', '_partial_': True, 'a': '${value}', 'b': 20}, 'value': 99}, {}, partial(AClass, a=99, b=20), id='interpolation_into_parent_partial'), param({'A': {'_target_': 'tests.instantiate.add_values', 'a': 1, 'b': 2}, 'node': {'_target_': 'tests.instantiate.add_values', '_partial_': True, 'a': '${A}'}}, {}, partial(add_values, a=3), id='interpolation_from_recursive_partial'), param({'A': {'_target_': 'tests.instantiate.add_values', 'a': 1, 'b': 2}, 'node': {'_target_': 'tests.instantiate.add_values', 'a': '${A}', 'b': 3}}, {}, 6, id='interpolation_from_recursive')])
def test_interpolation_accessing_parent(instantiate_func: Any, input_conf: Any, passthrough: Dict[(str, Any)], expected: Any) -> Any:
cfg_copy = OmegaConf.create(input_conf)
input_conf = OmegaConf.create(input_conf)
obj = instantiate_func(input_conf.node, **passthrough)
if isinstance(expected, partial):
assert partial_equal(obj, expected)
else:
assert (obj == expected)
assert (input_conf == cfg_copy) |
class CodeGenerator(NodeVisitor):
def __init__(self, environment, name, filename, stream=None, defer_init=False, optimized=True):
if (stream is None):
stream = NativeStringIO()
self.environment = environment
self.name = name
self.filename = filename
self.stream = stream
self.created_block_context = False
self.defer_init = defer_init
self.optimized = optimized
if optimized:
self.optimizer = Optimizer(environment)
self.import_aliases = {}
self.blocks = {}
self.extends_so_far = 0
self.has_known_extends = False
self.code_lineno = 1
self.tests = {}
self.filters = {}
self.debug_info = []
self._write_debug_info = None
self._new_lines = 0
self._last_line = 0
self._first_write = True
self._last_identifier = 0
self._indentation = 0
self._assign_stack = []
self._param_def_block = []
self._context_reference_stack = ['context']
def fail(self, msg, lineno):
raise TemplateAssertionError(msg, lineno, self.name, self.filename)
def temporary_identifier(self):
self._last_identifier += 1
return ('t_%d' % self._last_identifier)
def buffer(self, frame):
frame.buffer = self.temporary_identifier()
self.writeline(('%s = []' % frame.buffer))
def return_buffer_contents(self, frame, force_unescaped=False):
if (not force_unescaped):
if frame.eval_ctx.volatile:
self.writeline('if context.eval_ctx.autoescape:')
self.indent()
self.writeline(('return Markup(concat(%s))' % frame.buffer))
self.outdent()
self.writeline('else:')
self.indent()
self.writeline(('return concat(%s)' % frame.buffer))
self.outdent()
return
elif frame.eval_ctx.autoescape:
self.writeline(('return Markup(concat(%s))' % frame.buffer))
return
self.writeline(('return concat(%s)' % frame.buffer))
def indent(self):
self._indentation += 1
def outdent(self, step=1):
self._indentation -= step
def start_write(self, frame, node=None):
if (frame.buffer is None):
self.writeline('yield ', node)
else:
self.writeline(('%s.append(' % frame.buffer), node)
def end_write(self, frame):
if (frame.buffer is not None):
self.write(')')
def simple_write(self, s, frame, node=None):
self.start_write(frame, node)
self.write(s)
self.end_write(frame)
def blockvisit(self, nodes, frame):
try:
self.writeline('pass')
for node in nodes:
self.visit(node, frame)
except CompilerExit:
pass
def write(self, x):
if self._new_lines:
if (not self._first_write):
self.stream.write(('\n' * self._new_lines))
self.code_lineno += self._new_lines
if (self._write_debug_info is not None):
self.debug_info.append((self._write_debug_info, self.code_lineno))
self._write_debug_info = None
self._first_write = False
self.stream.write((' ' * self._indentation))
self._new_lines = 0
self.stream.write(x)
def writeline(self, x, node=None, extra=0):
self.newline(node, extra)
self.write(x)
def newline(self, node=None, extra=0):
self._new_lines = max(self._new_lines, (1 + extra))
if ((node is not None) and (node.lineno != self._last_line)):
self._write_debug_info = node.lineno
self._last_line = node.lineno
def signature(self, node, frame, extra_kwargs=None):
kwarg_workaround = False
for kwarg in chain((x.key for x in node.kwargs), (extra_kwargs or ())):
if is_python_keyword(kwarg):
kwarg_workaround = True
break
for arg in node.args:
self.write(', ')
self.visit(arg, frame)
if (not kwarg_workaround):
for kwarg in node.kwargs:
self.write(', ')
self.visit(kwarg, frame)
if (extra_kwargs is not None):
for (key, value) in iteritems(extra_kwargs):
self.write((', %s=%s' % (key, value)))
if node.dyn_args:
self.write(', *')
self.visit(node.dyn_args, frame)
if kwarg_workaround:
if (node.dyn_kwargs is not None):
self.write(', **dict({')
else:
self.write(', **{')
for kwarg in node.kwargs:
self.write(('%r: ' % kwarg.key))
self.visit(kwarg.value, frame)
self.write(', ')
if (extra_kwargs is not None):
for (key, value) in iteritems(extra_kwargs):
self.write(('%r: %s, ' % (key, value)))
if (node.dyn_kwargs is not None):
self.write('}, **')
self.visit(node.dyn_kwargs, frame)
self.write(')')
else:
self.write('}')
elif (node.dyn_kwargs is not None):
self.write(', **')
self.visit(node.dyn_kwargs, frame)
def pull_dependencies(self, nodes):
visitor = DependencyFinderVisitor()
for node in nodes:
visitor.visit(node)
for dependency in ('filters', 'tests'):
mapping = getattr(self, dependency)
for name in getattr(visitor, dependency):
if (name not in mapping):
mapping[name] = self.temporary_identifier()
self.writeline(('%s = environment.%s[%r]' % (mapping[name], dependency, name)))
def enter_frame(self, frame):
undefs = []
for (target, (action, param)) in iteritems(frame.symbols.loads):
if (action == VAR_LOAD_PARAMETER):
pass
elif (action == VAR_LOAD_RESOLVE):
self.writeline(('%s = %s(%r)' % (target, self.get_resolve_func(), param)))
elif (action == VAR_LOAD_ALIAS):
self.writeline(('%s = %s' % (target, param)))
elif (action == VAR_LOAD_UNDEFINED):
undefs.append(target)
else:
raise NotImplementedError('unknown load instruction')
if undefs:
self.writeline(('%s = missing' % ' = '.join(undefs)))
def leave_frame(self, frame, with_python_scope=False):
if (not with_python_scope):
undefs = []
for (target, _) in iteritems(frame.symbols.loads):
undefs.append(target)
if undefs:
self.writeline(('%s = missing' % ' = '.join(undefs)))
def func(self, name):
if self.environment.is_async:
return ('async def %s' % name)
return ('def %s' % name)
def macro_body(self, node, frame):
frame = frame.inner()
frame.symbols.analyze_node(node)
macro_ref = MacroRef(node)
explicit_caller = None
skip_special_params = set()
args = []
for (idx, arg) in enumerate(node.args):
if (arg.name == 'caller'):
explicit_caller = idx
if (arg.name in ('kwargs', 'varargs')):
skip_special_params.add(arg.name)
args.append(frame.symbols.ref(arg.name))
undeclared = find_undeclared(node.body, ('caller', 'kwargs', 'varargs'))
if ('caller' in undeclared):
if (explicit_caller is not None):
try:
node.defaults[(explicit_caller - len(node.args))]
except IndexError:
self.fail('When defining macros or call blocks the special "caller" argument must be omitted or be given a default.', node.lineno)
else:
args.append(frame.symbols.declare_parameter('caller'))
macro_ref.accesses_caller = True
if (('kwargs' in undeclared) and (not ('kwargs' in skip_special_params))):
args.append(frame.symbols.declare_parameter('kwargs'))
macro_ref.accesses_kwargs = True
if (('varargs' in undeclared) and (not ('varargs' in skip_special_params))):
args.append(frame.symbols.declare_parameter('varargs'))
macro_ref.accesses_varargs = True
frame.require_output_check = False
frame.symbols.analyze_node(node)
self.writeline(('%s(%s):' % (self.func('macro'), ', '.join(args))), node)
self.indent()
self.buffer(frame)
self.enter_frame(frame)
self.push_parameter_definitions(frame)
for (idx, arg) in enumerate(node.args):
ref = frame.symbols.ref(arg.name)
self.writeline(('if %s is missing:' % ref))
self.indent()
try:
default = node.defaults[(idx - len(node.args))]
except IndexError:
self.writeline(('%s = undefined(%r, name=%r)' % (ref, ('parameter %r was not provided' % arg.name), arg.name)))
else:
self.writeline(('%s = ' % ref))
self.visit(default, frame)
self.mark_parameter_stored(ref)
self.outdent()
self.pop_parameter_definitions()
self.blockvisit(node.body, frame)
self.return_buffer_contents(frame, force_unescaped=True)
self.leave_frame(frame, with_python_scope=True)
self.outdent()
return (frame, macro_ref)
def macro_def(self, macro_ref, frame):
arg_tuple = ', '.join((repr(x.name) for x in macro_ref.node.args))
name = getattr(macro_ref.node, 'name', None)
if (len(macro_ref.node.args) == 1):
arg_tuple += ','
self.write(('Macro(environment, macro, %r, (%s), %r, %r, %r, context.eval_ctx.autoescape)' % (name, arg_tuple, macro_ref.accesses_kwargs, macro_ref.accesses_varargs, macro_ref.accesses_caller)))
def position(self, node):
rv = ('line %d' % node.lineno)
if (self.name is not None):
rv += (' in ' + repr(self.name))
return rv
def dump_local_context(self, frame):
return ('{%s}' % ', '.join((('%r: %s' % (name, target)) for (name, target) in iteritems(frame.symbols.dump_stores()))))
def write_commons(self):
self.writeline('resolve = context.resolve_or_missing')
self.writeline('undefined = environment.undefined')
self.writeline('if 0: yield None')
def push_parameter_definitions(self, frame):
self._param_def_block.append(frame.symbols.dump_param_targets())
def pop_parameter_definitions(self):
self._param_def_block.pop()
def mark_parameter_stored(self, target):
if self._param_def_block:
self._param_def_block[(- 1)].discard(target)
def push_context_reference(self, target):
self._context_reference_stack.append(target)
def pop_context_reference(self):
self._context_reference_stack.pop()
def get_context_ref(self):
return self._context_reference_stack[(- 1)]
def get_resolve_func(self):
target = self._context_reference_stack[(- 1)]
if (target == 'context'):
return 'resolve'
return ('%s.resolve' % target)
def derive_context(self, frame):
return ('%s.derived(%s)' % (self.get_context_ref(), self.dump_local_context(frame)))
def parameter_is_undeclared(self, target):
if (not self._param_def_block):
return False
return (target in self._param_def_block[(- 1)])
def push_assign_tracking(self):
self._assign_stack.append(set())
def pop_assign_tracking(self, frame):
vars = self._assign_stack.pop()
if ((not frame.toplevel) or (not vars)):
return
public_names = [x for x in vars if (x[:1] != '_')]
if (len(vars) == 1):
name = next(iter(vars))
ref = frame.symbols.ref(name)
self.writeline(('context.vars[%r] = %s' % (name, ref)))
else:
self.writeline('context.vars.update({')
for (idx, name) in enumerate(vars):
if idx:
self.write(', ')
ref = frame.symbols.ref(name)
self.write(('%r: %s' % (name, ref)))
self.write('})')
if public_names:
if (len(public_names) == 1):
self.writeline(('context.exported_vars.add(%r)' % public_names[0]))
else:
self.writeline(('context.exported_vars.update((%s))' % ', '.join(imap(repr, public_names))))
def visit_Template(self, node, frame=None):
assert (frame is None), 'no root frame allowed'
eval_ctx = EvalContext(self.environment, self.name)
from .runtime import __all__ as exported
self.writeline(('from __future__ import %s' % ', '.join(code_features)))
self.writeline(('from mdpopups.jinja2.runtime import ' + ', '.join(exported)))
if self.environment.is_async:
self.writeline('from mdpopups.jinja2.asyncsupport import auto_await, auto_aiter, make_async_loop_context')
envenv = (((not self.defer_init) and ', environment=environment') or '')
have_extends = (node.find(nodes.Extends) is not None)
for block in node.find_all(nodes.Block):
if (block.name in self.blocks):
self.fail(('block %r defined twice' % block.name), block.lineno)
self.blocks[block.name] = block
for import_ in node.find_all(nodes.ImportedName):
if (import_.importname not in self.import_aliases):
imp = import_.importname
self.import_aliases[imp] = alias = self.temporary_identifier()
if ('.' in imp):
(module, obj) = imp.rsplit('.', 1)
self.writeline(('from %s import %s as %s' % (module, obj, alias)))
else:
self.writeline(('import %s as %s' % (imp, alias)))
self.writeline(('name = %r' % self.name))
self.writeline(('%s(context, missing=missing%s):' % (self.func('root'), envenv)), extra=1)
self.indent()
self.write_commons()
frame = Frame(eval_ctx)
if ('self' in find_undeclared(node.body, ('self',))):
ref = frame.symbols.declare_parameter('self')
self.writeline(('%s = TemplateReference(context)' % ref))
frame.symbols.analyze_node(node)
frame.toplevel = frame.rootlevel = True
frame.require_output_check = (have_extends and (not self.has_known_extends))
if have_extends:
self.writeline('parent_template = None')
self.enter_frame(frame)
self.pull_dependencies(node.body)
self.blockvisit(node.body, frame)
self.leave_frame(frame, with_python_scope=True)
self.outdent()
if have_extends:
if (not self.has_known_extends):
self.indent()
self.writeline('if parent_template is not None:')
self.indent()
if (supports_yield_from and (not self.environment.is_async)):
self.writeline('yield from parent_template.root_render_func(context)')
else:
self.writeline(('%sfor event in parent_template.root_render_func(context):' % ((self.environment.is_async and 'async ') or '')))
self.indent()
self.writeline('yield event')
self.outdent()
self.outdent((1 + (not self.has_known_extends)))
for (name, block) in iteritems(self.blocks):
self.writeline(('%s(context, missing=missing%s):' % (self.func(('block_' + name)), envenv)), block, 1)
self.indent()
self.write_commons()
block_frame = Frame(eval_ctx)
undeclared = find_undeclared(block.body, ('self', 'super'))
if ('self' in undeclared):
ref = block_frame.symbols.declare_parameter('self')
self.writeline(('%s = TemplateReference(context)' % ref))
if ('super' in undeclared):
ref = block_frame.symbols.declare_parameter('super')
self.writeline(('%s = context.super(%r, block_%s)' % (ref, name, name)))
block_frame.symbols.analyze_node(block)
block_frame.block = name
self.enter_frame(block_frame)
self.pull_dependencies(block.body)
self.blockvisit(block.body, block_frame)
self.leave_frame(block_frame, with_python_scope=True)
self.outdent()
self.writeline(('blocks = {%s}' % ', '.join((('%r: block_%s' % (x, x)) for x in self.blocks))), extra=1)
self.writeline(('debug_info = %r' % '&'.join((('%s=%s' % x) for x in self.debug_info))))
def visit_Block(self, node, frame):
level = 0
if frame.toplevel:
if self.has_known_extends:
return
if (self.extends_so_far > 0):
self.writeline('if parent_template is None:')
self.indent()
level += 1
if node.scoped:
context = self.derive_context(frame)
else:
context = self.get_context_ref()
if (supports_yield_from and (not self.environment.is_async) and (frame.buffer is None)):
self.writeline(('yield from context.blocks[%r][0](%s)' % (node.name, context)), node)
else:
loop = ((self.environment.is_async and 'async for') or 'for')
self.writeline(('%s event in context.blocks[%r][0](%s):' % (loop, node.name, context)), node)
self.indent()
self.simple_write('event', frame)
self.outdent()
self.outdent(level)
def visit_Extends(self, node, frame):
if (not frame.toplevel):
self.fail('cannot use extend from a non top-level scope', node.lineno)
if (self.extends_so_far > 0):
if (not self.has_known_extends):
self.writeline('if parent_template is not None:')
self.indent()
self.writeline(('raise TemplateRuntimeError(%r)' % 'extended multiple times'))
if self.has_known_extends:
raise CompilerExit()
else:
self.outdent()
self.writeline('parent_template = environment.get_template(', node)
self.visit(node.template, frame)
self.write((', %r)' % self.name))
self.writeline(('for name, parent_block in parent_template.blocks.%s():' % dict_item_iter))
self.indent()
self.writeline('context.blocks.setdefault(name, []).append(parent_block)')
self.outdent()
if frame.rootlevel:
self.has_known_extends = True
self.extends_so_far += 1
def visit_Include(self, node, frame):
if node.ignore_missing:
self.writeline('try:')
self.indent()
func_name = 'get_or_select_template'
if isinstance(node.template, nodes.Const):
if isinstance(node.template.value, string_types):
func_name = 'get_template'
elif isinstance(node.template.value, (tuple, list)):
func_name = 'select_template'
elif isinstance(node.template, (nodes.Tuple, nodes.List)):
func_name = 'select_template'
self.writeline(('template = environment.%s(' % func_name), node)
self.visit(node.template, frame)
self.write((', %r)' % self.name))
if node.ignore_missing:
self.outdent()
self.writeline('except TemplateNotFound:')
self.indent()
self.writeline('pass')
self.outdent()
self.writeline('else:')
self.indent()
skip_event_yield = False
if node.with_context:
loop = ((self.environment.is_async and 'async for') or 'for')
self.writeline(('%s event in template.root_render_func(template.new_context(context.get_all(), True, %s)):' % (loop, self.dump_local_context(frame))))
elif self.environment.is_async:
self.writeline('for event in (await template._get_default_module_async())._body_stream:')
elif supports_yield_from:
self.writeline('yield from template._get_default_module()._body_stream')
skip_event_yield = True
else:
self.writeline('for event in template._get_default_module()._body_stream:')
if (not skip_event_yield):
self.indent()
self.simple_write('event', frame)
self.outdent()
if node.ignore_missing:
self.outdent()
def visit_Import(self, node, frame):
self.writeline(('%s = ' % frame.symbols.ref(node.target)), node)
if frame.toplevel:
self.write(('context.vars[%r] = ' % node.target))
if self.environment.is_async:
self.write('await ')
self.write('environment.get_template(')
self.visit(node.template, frame)
self.write((', %r).' % self.name))
if node.with_context:
self.write(('make_module%s(context.get_all(), True, %s)' % (((self.environment.is_async and '_async') or ''), self.dump_local_context(frame))))
elif self.environment.is_async:
self.write('_get_default_module_async()')
else:
self.write('_get_default_module()')
if (frame.toplevel and (not node.target.startswith('_'))):
self.writeline(('context.exported_vars.discard(%r)' % node.target))
def visit_FromImport(self, node, frame):
self.newline(node)
self.write(('included_template = %senvironment.get_template(' % ((self.environment.is_async and 'await ') or '')))
self.visit(node.template, frame)
self.write((', %r).' % self.name))
if node.with_context:
self.write(('make_module%s(context.get_all(), True, %s)' % (((self.environment.is_async and '_async') or ''), self.dump_local_context(frame))))
elif self.environment.is_async:
self.write('_get_default_module_async()')
else:
self.write('_get_default_module()')
var_names = []
discarded_names = []
for name in node.names:
if isinstance(name, tuple):
(name, alias) = name
else:
alias = name
self.writeline(('%s = getattr(included_template, %r, missing)' % (frame.symbols.ref(alias), name)))
self.writeline(('if %s is missing:' % frame.symbols.ref(alias)))
self.indent()
self.writeline(('%s = undefined(%r %% included_template.__name__, name=%r)' % (frame.symbols.ref(alias), ('the template %%r (imported on %s) does not export the requested name %s' % (self.position(node), repr(name))), name)))
self.outdent()
if frame.toplevel:
var_names.append(alias)
if (not alias.startswith('_')):
discarded_names.append(alias)
if var_names:
if (len(var_names) == 1):
name = var_names[0]
self.writeline(('context.vars[%r] = %s' % (name, frame.symbols.ref(name))))
else:
self.writeline(('context.vars.update({%s})' % ', '.join((('%r: %s' % (name, frame.symbols.ref(name))) for name in var_names))))
if discarded_names:
if (len(discarded_names) == 1):
self.writeline(('context.exported_vars.discard(%r)' % discarded_names[0]))
else:
self.writeline(('context.exported_vars.difference_update((%s))' % ', '.join(imap(repr, discarded_names))))
def visit_For(self, node, frame):
loop_frame = frame.inner()
test_frame = frame.inner()
else_frame = frame.inner()
extended_loop = (node.recursive or ('loop' in find_undeclared(node.iter_child_nodes(only=('body',)), ('loop',))))
loop_ref = None
if extended_loop:
loop_ref = loop_frame.symbols.declare_parameter('loop')
loop_frame.symbols.analyze_node(node, for_branch='body')
if node.else_:
else_frame.symbols.analyze_node(node, for_branch='else')
if node.test:
loop_filter_func = self.temporary_identifier()
test_frame.symbols.analyze_node(node, for_branch='test')
self.writeline(('%s(fiter):' % self.func(loop_filter_func)), node.test)
self.indent()
self.enter_frame(test_frame)
self.writeline(((self.environment.is_async and 'async for ') or 'for '))
self.visit(node.target, loop_frame)
self.write(' in ')
self.write(((self.environment.is_async and 'auto_aiter(fiter)') or 'fiter'))
self.write(':')
self.indent()
self.writeline('if ', node.test)
self.visit(node.test, test_frame)
self.write(':')
self.indent()
self.writeline('yield ')
self.visit(node.target, loop_frame)
self.outdent(3)
self.leave_frame(test_frame, with_python_scope=True)
if node.recursive:
self.writeline(('%s(reciter, loop_render_func, depth=0):' % self.func('loop')), node)
self.indent()
self.buffer(loop_frame)
else_frame.buffer = loop_frame.buffer
if extended_loop:
self.writeline(('%s = missing' % loop_ref))
for name in node.find_all(nodes.Name):
if ((name.ctx == 'store') and (name.name == 'loop')):
self.fail("Can't assign to special loop variable in for-loop target", name.lineno)
if node.else_:
iteration_indicator = self.temporary_identifier()
self.writeline(('%s = 1' % iteration_indicator))
self.writeline(((self.environment.is_async and 'async for ') or 'for '), node)
self.visit(node.target, loop_frame)
if extended_loop:
if self.environment.is_async:
self.write((', %s in await make_async_loop_context(' % loop_ref))
else:
self.write((', %s in LoopContext(' % loop_ref))
else:
self.write(' in ')
if node.test:
self.write(('%s(' % loop_filter_func))
if node.recursive:
self.write('reciter')
else:
if (self.environment.is_async and (not extended_loop)):
self.write('auto_aiter(')
self.visit(node.iter, frame)
if (self.environment.is_async and (not extended_loop)):
self.write(')')
if node.test:
self.write(')')
if node.recursive:
self.write(', undefined, loop_render_func, depth):')
else:
self.write(((extended_loop and ', undefined):') or ':'))
self.indent()
self.enter_frame(loop_frame)
self.blockvisit(node.body, loop_frame)
if node.else_:
self.writeline(('%s = 0' % iteration_indicator))
self.outdent()
self.leave_frame(loop_frame, with_python_scope=(node.recursive and (not node.else_)))
if node.else_:
self.writeline(('if %s:' % iteration_indicator))
self.indent()
self.enter_frame(else_frame)
self.blockvisit(node.else_, else_frame)
self.leave_frame(else_frame)
self.outdent()
if node.recursive:
self.return_buffer_contents(loop_frame)
self.outdent()
self.start_write(frame, node)
if self.environment.is_async:
self.write('await ')
self.write('loop(')
if self.environment.is_async:
self.write('auto_aiter(')
self.visit(node.iter, frame)
if self.environment.is_async:
self.write(')')
self.write(', loop)')
self.end_write(frame)
def visit_If(self, node, frame):
if_frame = frame.soft()
self.writeline('if ', node)
self.visit(node.test, if_frame)
self.write(':')
self.indent()
self.blockvisit(node.body, if_frame)
self.outdent()
for elif_ in node.elif_:
self.writeline('elif ', elif_)
self.visit(elif_.test, if_frame)
self.write(':')
self.indent()
self.blockvisit(elif_.body, if_frame)
self.outdent()
if node.else_:
self.writeline('else:')
self.indent()
self.blockvisit(node.else_, if_frame)
self.outdent()
def visit_Macro(self, node, frame):
(macro_frame, macro_ref) = self.macro_body(node, frame)
self.newline()
if frame.toplevel:
if (not node.name.startswith('_')):
self.write(('context.exported_vars.add(%r)' % node.name))
ref = frame.symbols.ref(node.name)
self.writeline(('context.vars[%r] = ' % node.name))
self.write(('%s = ' % frame.symbols.ref(node.name)))
self.macro_def(macro_ref, macro_frame)
def visit_CallBlock(self, node, frame):
(call_frame, macro_ref) = self.macro_body(node, frame)
self.writeline('caller = ')
self.macro_def(macro_ref, call_frame)
self.start_write(frame, node)
self.visit_Call(node.call, frame, forward_caller=True)
self.end_write(frame)
def visit_FilterBlock(self, node, frame):
filter_frame = frame.inner()
filter_frame.symbols.analyze_node(node)
self.enter_frame(filter_frame)
self.buffer(filter_frame)
self.blockvisit(node.body, filter_frame)
self.start_write(frame, node)
self.visit_Filter(node.filter, filter_frame)
self.end_write(frame)
self.leave_frame(filter_frame)
def visit_With(self, node, frame):
with_frame = frame.inner()
with_frame.symbols.analyze_node(node)
self.enter_frame(with_frame)
for (idx, (target, expr)) in enumerate(izip(node.targets, node.values)):
self.newline()
self.visit(target, with_frame)
self.write(' = ')
self.visit(expr, frame)
self.blockvisit(node.body, with_frame)
self.leave_frame(with_frame)
def visit_ExprStmt(self, node, frame):
self.newline(node)
self.visit(node.node, frame)
def visit_Output(self, node, frame):
if (self.has_known_extends and frame.require_output_check):
return
allow_constant_finalize = True
if self.environment.finalize:
func = self.environment.finalize
if (getattr(func, 'contextfunction', False) or getattr(func, 'evalcontextfunction', False)):
allow_constant_finalize = False
elif getattr(func, 'environmentfunction', False):
finalize = (lambda x: text_type(self.environment.finalize(self.environment, x)))
else:
finalize = (lambda x: text_type(self.environment.finalize(x)))
else:
finalize = text_type
outdent_later = False
if frame.require_output_check:
self.writeline('if parent_template is None:')
self.indent()
outdent_later = True
body = []
for child in node.nodes:
try:
if (not allow_constant_finalize):
raise nodes.Impossible()
const = child.as_const(frame.eval_ctx)
except nodes.Impossible:
body.append(child)
continue
try:
if frame.eval_ctx.autoescape:
if hasattr(const, '__html__'):
const = const.__html__()
else:
const = escape(const)
const = finalize(const)
except Exception:
body.append(child)
continue
if (body and isinstance(body[(- 1)], list)):
body[(- 1)].append(const)
else:
body.append([const])
if ((len(body) < 3) or (frame.buffer is not None)):
if (frame.buffer is not None):
if (len(body) == 1):
self.writeline(('%s.append(' % frame.buffer))
else:
self.writeline(('%s.extend((' % frame.buffer))
self.indent()
for item in body:
if isinstance(item, list):
val = repr(concat(item))
if (frame.buffer is None):
self.writeline(('yield ' + val))
else:
self.writeline((val + ','))
else:
if (frame.buffer is None):
self.writeline('yield ', item)
else:
self.newline(item)
close = 1
if frame.eval_ctx.volatile:
self.write('(escape if context.eval_ctx.autoescape else to_string)(')
elif frame.eval_ctx.autoescape:
self.write('escape(')
else:
self.write('to_string(')
if (self.environment.finalize is not None):
self.write('environment.finalize(')
if getattr(self.environment.finalize, 'contextfunction', False):
self.write('context, ')
close += 1
self.visit(item, frame)
self.write((')' * close))
if (frame.buffer is not None):
self.write(',')
if (frame.buffer is not None):
self.outdent()
self.writeline((((len(body) == 1) and ')') or '))'))
else:
format = []
arguments = []
for item in body:
if isinstance(item, list):
format.append(concat(item).replace('%', '%%'))
else:
format.append('%s')
arguments.append(item)
self.writeline('yield ')
self.write((repr(concat(format)) + ' % ('))
self.indent()
for argument in arguments:
self.newline(argument)
close = 0
if frame.eval_ctx.volatile:
self.write('(escape if context.eval_ctx.autoescape else to_string)(')
close += 1
elif frame.eval_ctx.autoescape:
self.write('escape(')
close += 1
if (self.environment.finalize is not None):
self.write('environment.finalize(')
if getattr(self.environment.finalize, 'contextfunction', False):
self.write('context, ')
elif getattr(self.environment.finalize, 'evalcontextfunction', False):
self.write('context.eval_ctx, ')
elif getattr(self.environment.finalize, 'environmentfunction', False):
self.write('environment, ')
close += 1
self.visit(argument, frame)
self.write(((')' * close) + ', '))
self.outdent()
self.writeline(')')
if outdent_later:
self.outdent()
def visit_Assign(self, node, frame):
self.push_assign_tracking()
self.newline(node)
self.visit(node.target, frame)
self.write(' = ')
self.visit(node.node, frame)
self.pop_assign_tracking(frame)
def visit_AssignBlock(self, node, frame):
self.push_assign_tracking()
block_frame = frame.inner()
block_frame.require_output_check = False
block_frame.symbols.analyze_node(node)
self.enter_frame(block_frame)
self.buffer(block_frame)
self.blockvisit(node.body, block_frame)
self.newline(node)
self.visit(node.target, frame)
self.write(' = (Markup if context.eval_ctx.autoescape else identity)(')
if (node.filter is not None):
self.visit_Filter(node.filter, block_frame)
else:
self.write(('concat(%s)' % block_frame.buffer))
self.write(')')
self.pop_assign_tracking(frame)
self.leave_frame(block_frame)
def visit_Name(self, node, frame):
if ((node.ctx == 'store') and frame.toplevel):
if self._assign_stack:
self._assign_stack[(- 1)].add(node.name)
ref = frame.symbols.ref(node.name)
if (node.ctx == 'load'):
load = frame.symbols.find_load(ref)
if (not ((load is not None) and (load[0] == VAR_LOAD_PARAMETER) and (not self.parameter_is_undeclared(ref)))):
self.write(('(undefined(name=%r) if %s is missing else %s)' % (node.name, ref, ref)))
return
self.write(ref)
def visit_NSRef(self, node, frame):
ref = frame.symbols.ref(node.name)
self.writeline(('if not isinstance(%s, Namespace):' % ref))
self.indent()
self.writeline(('raise TemplateRuntimeError(%r)' % 'cannot assign attribute on non-namespace object'))
self.outdent()
self.writeline(('%s[%r]' % (ref, node.attr)))
def visit_Const(self, node, frame):
val = node.as_const(frame.eval_ctx)
if isinstance(val, float):
self.write(str(val))
else:
self.write(repr(val))
def visit_TemplateData(self, node, frame):
try:
self.write(repr(node.as_const(frame.eval_ctx)))
except nodes.Impossible:
self.write(('(Markup if context.eval_ctx.autoescape else identity)(%r)' % node.data))
def visit_Tuple(self, node, frame):
self.write('(')
idx = (- 1)
for (idx, item) in enumerate(node.items):
if idx:
self.write(', ')
self.visit(item, frame)
self.write((((idx == 0) and ',)') or ')'))
def visit_List(self, node, frame):
self.write('[')
for (idx, item) in enumerate(node.items):
if idx:
self.write(', ')
self.visit(item, frame)
self.write(']')
def visit_Dict(self, node, frame):
self.write('{')
for (idx, item) in enumerate(node.items):
if idx:
self.write(', ')
self.visit(item.key, frame)
self.write(': ')
self.visit(item.value, frame)
self.write('}')
def binop(operator, interceptable=True):
def visitor(self, node, frame):
if (self.environment.sandboxed and (operator in self.environment.intercepted_binops)):
self.write(('environment.call_binop(context, %r, ' % operator))
self.visit(node.left, frame)
self.write(', ')
self.visit(node.right, frame)
else:
self.write('(')
self.visit(node.left, frame)
self.write((' %s ' % operator))
self.visit(node.right, frame)
self.write(')')
return visitor
def uaop(operator, interceptable=True):
def visitor(self, node, frame):
if (self.environment.sandboxed and (operator in self.environment.intercepted_unops)):
self.write(('environment.call_unop(context, %r, ' % operator))
self.visit(node.node, frame)
else:
self.write(('(' + operator))
self.visit(node.node, frame)
self.write(')')
return visitor
visit_Add = binop('+')
visit_Sub = binop('-')
visit_Mul = binop('*')
visit_Div = binop('/')
visit_FloorDiv = binop('//')
visit_Pow = binop('**')
visit_Mod = binop('%')
visit_And = binop('and', interceptable=False)
visit_Or = binop('or', interceptable=False)
visit_Pos = uaop('+')
visit_Neg = uaop('-')
visit_Not = uaop('not ', interceptable=False)
del binop, uaop
def visit_Concat(self, node, frame):
if frame.eval_ctx.volatile:
func_name = '(context.eval_ctx.volatile and markup_join or unicode_join)'
elif frame.eval_ctx.autoescape:
func_name = 'markup_join'
else:
func_name = 'unicode_join'
self.write(('%s((' % func_name))
for arg in node.nodes:
self.visit(arg, frame)
self.write(', ')
self.write('))')
def visit_Compare(self, node, frame):
self.visit(node.expr, frame)
for op in node.ops:
self.visit(op, frame)
def visit_Operand(self, node, frame):
self.write((' %s ' % operators[node.op]))
self.visit(node.expr, frame)
def visit_Getattr(self, node, frame):
self.write('environment.getattr(')
self.visit(node.node, frame)
self.write((', %r)' % node.attr))
def visit_Getitem(self, node, frame):
if isinstance(node.arg, nodes.Slice):
self.visit(node.node, frame)
self.write('[')
self.visit(node.arg, frame)
self.write(']')
else:
self.write('environment.getitem(')
self.visit(node.node, frame)
self.write(', ')
self.visit(node.arg, frame)
self.write(')')
def visit_Slice(self, node, frame):
if (node.start is not None):
self.visit(node.start, frame)
self.write(':')
if (node.stop is not None):
self.visit(node.stop, frame)
if (node.step is not None):
self.write(':')
self.visit(node.step, frame)
def visit_Filter(self, node, frame):
if self.environment.is_async:
self.write('await auto_await(')
self.write((self.filters[node.name] + '('))
func = self.environment.filters.get(node.name)
if (func is None):
self.fail(('no filter named %r' % node.name), node.lineno)
if getattr(func, 'contextfilter', False):
self.write('context, ')
elif getattr(func, 'evalcontextfilter', False):
self.write('context.eval_ctx, ')
elif getattr(func, 'environmentfilter', False):
self.write('environment, ')
if (node.node is not None):
self.visit(node.node, frame)
elif frame.eval_ctx.volatile:
self.write(('(context.eval_ctx.autoescape and Markup(concat(%s)) or concat(%s))' % (frame.buffer, frame.buffer)))
elif frame.eval_ctx.autoescape:
self.write(('Markup(concat(%s))' % frame.buffer))
else:
self.write(('concat(%s)' % frame.buffer))
self.signature(node, frame)
self.write(')')
if self.environment.is_async:
self.write(')')
def visit_Test(self, node, frame):
self.write((self.tests[node.name] + '('))
if (node.name not in self.environment.tests):
self.fail(('no test named %r' % node.name), node.lineno)
self.visit(node.node, frame)
self.signature(node, frame)
self.write(')')
def visit_CondExpr(self, node, frame):
def write_expr2():
if (node.expr2 is not None):
return self.visit(node.expr2, frame)
self.write(('undefined(%r)' % ('the inline if-expression on %s evaluated to false and no else section was defined.' % self.position(node))))
self.write('(')
self.visit(node.expr1, frame)
self.write(' if ')
self.visit(node.test, frame)
self.write(' else ')
write_expr2()
self.write(')')
def visit_Call(self, node, frame, forward_caller=False):
if self.environment.is_async:
self.write('await auto_await(')
if self.environment.sandboxed:
self.write('environment.call(context, ')
else:
self.write('context.call(')
self.visit(node.node, frame)
extra_kwargs = ((forward_caller and {'caller': 'caller'}) or None)
self.signature(node, frame, extra_kwargs)
self.write(')')
if self.environment.is_async:
self.write(')')
def visit_Keyword(self, node, frame):
self.write((node.key + '='))
self.visit(node.value, frame)
def visit_MarkSafe(self, node, frame):
self.write('Markup(')
self.visit(node.expr, frame)
self.write(')')
def visit_MarkSafeIfAutoescape(self, node, frame):
self.write('(context.eval_ctx.autoescape and Markup or identity)(')
self.visit(node.expr, frame)
self.write(')')
def visit_EnvironmentAttribute(self, node, frame):
self.write(('environment.' + node.name))
def visit_ExtensionAttribute(self, node, frame):
self.write(('environment.extensions[%r].%s' % (node.identifier, node.name)))
def visit_ImportedName(self, node, frame):
self.write(self.import_aliases[node.importname])
def visit_InternalName(self, node, frame):
self.write(node.name)
def visit_ContextReference(self, node, frame):
self.write('context')
def visit_Continue(self, node, frame):
self.writeline('continue', node)
def visit_Break(self, node, frame):
self.writeline('break', node)
def visit_Scope(self, node, frame):
scope_frame = frame.inner()
scope_frame.symbols.analyze_node(node)
self.enter_frame(scope_frame)
self.blockvisit(node.body, scope_frame)
self.leave_frame(scope_frame)
def visit_OverlayScope(self, node, frame):
ctx = self.temporary_identifier()
self.writeline(('%s = %s' % (ctx, self.derive_context(frame))))
self.writeline(('%s.vars = ' % ctx))
self.visit(node.context, frame)
self.push_context_reference(ctx)
scope_frame = frame.inner(isolated=True)
scope_frame.symbols.analyze_node(node)
self.enter_frame(scope_frame)
self.blockvisit(node.body, scope_frame)
self.leave_frame(scope_frame)
self.pop_context_reference()
def visit_EvalContextModifier(self, node, frame):
for keyword in node.options:
self.writeline(('context.eval_ctx.%s = ' % keyword.key))
self.visit(keyword.value, frame)
try:
val = keyword.value.as_const(frame.eval_ctx)
except nodes.Impossible:
frame.eval_ctx.volatile = True
else:
setattr(frame.eval_ctx, keyword.key, val)
def visit_ScopedEvalContextModifier(self, node, frame):
old_ctx_name = self.temporary_identifier()
saved_ctx = frame.eval_ctx.save()
self.writeline(('%s = context.eval_ctx.save()' % old_ctx_name))
self.visit_EvalContextModifier(node, frame)
for child in node.body:
self.visit(child, frame)
frame.eval_ctx.revert(saved_ctx)
self.writeline(('context.eval_ctx.revert(%s)' % old_ctx_name)) |
class KeyfileCreds(BaseModel):
type: Optional[str] = None
project_id: str = Field(title='Project ID')
private_key_id: Optional[str] = Field(None, title='Private Key ID')
private_key: Optional[str] = Field(None, sensitive=True)
client_email: Optional[EmailStr] = None
client_id: Optional[str] = Field(None, title='Client ID')
auth_uri: Optional[str] = Field(None, title='Auth URI')
token_uri: Optional[str] = Field(None, title='Token URI')
auth_provider_x509_cert_url: Optional[str] = Field(None, title='Auth Provider X509 Cert URL')
client_x509_cert_url: Optional[str] = Field(None, title='Client X509 Cert URL') |
def formatter(record):
extra = record['extra']
scheme = extra.get('scheme', None)
def ifextra(keys, pattern='{}'):
keys = to_iterable(keys)
if all(((k in extra) for k in keys)):
return pattern.format(*[f'{{extra[{k}]}}' for k in keys])
else:
return ''
if (scheme in ('telegram', 'telechecker', 'telemonitor', 'telemessager', 'telelink')):
username = ifextra('username', ' ([cyan]{}[/])')
name = ifextra('name', '([magenta]{}[/]) ')
return f'[blue]{scheme_names[scheme]}[/]{username}: {name}{{message}}'
elif (scheme == 'embywatcher'):
ident = ifextra(['server', 'username'], ' ([cyan]{}:{}[/])')
return f'[blue]{scheme_names[scheme]}[/]{ident}: {{message}}'
elif (scheme == 'datamanager'):
return f'[blue]{scheme_names[scheme]}[/]: {{message}}'
else:
return '{message}' |
class PostDisapproveView(PermissionRequiredMixin, SingleObjectTemplateResponseMixin, BaseDetailView):
context_object_name = 'post'
model = Post
success_message = _('This post has been disapproved successfully.')
template_name = 'forum_moderation/moderation_queue/post_disapprove.html'
def disapprove(self, request, *args, **kwargs):
self.object = self.get_object()
success_url = self.get_success_url()
self.object.delete()
messages.success(self.request, self.success_message)
return HttpResponseRedirect(success_url)
def post(self, request, *args, **kwargs):
return self.disapprove(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['forum'] = self.get_object().topic.forum
return context
def get_success_url(self):
return reverse('forum_moderation:queue')
def get_controlled_object(self):
return self.get_object().topic.forum
def perform_permissions_check(self, user, obj, perms):
return self.request.forum_permission_handler.can_approve_posts(obj, user) |
(no_gui_test_assistant, 'No GuiTestAssistant')
class TestEventTimer(TestCase, GuiTestAssistant):
def setUp(self):
GuiTestAssistant.setUp(self)
def tearDown(self):
GuiTestAssistant.tearDown(self)
def test_basic(self):
timer = EventTimer()
self.assertIsNone(timer.repeat)
self.assertFalse(timer.active)
timer.start()
try:
self.assertTrue(timer.active)
self.event_loop_helper.event_loop()
self.assertTrue(timer.active)
finally:
timer.stop()
self.assertFalse(timer.active)
def test_timer_method(self):
timer = EventTimer.timer()
try:
self.assertTrue(timer.active)
self.event_loop_helper.event_loop()
self.assertTrue(timer.active)
finally:
timer.stop()
self.assertFalse(timer.active)
def test_single_shot_method(self):
timer = EventTimer.single_shot()
handler = ConditionHandler()
timer.observe(handler.callback, 'timeout')
try:
self.assertTrue(timer.active)
self.event_loop_helper.event_loop_until_condition((lambda : (not timer.active)))
self.assertFalse(timer.active)
finally:
timer.stop()
self.assertFalse(timer.active)
self.assertEqual(handler.count, 1)
def test_set_active(self):
timer = EventTimer()
self.assertIsNone(timer.repeat)
self.assertFalse(timer.active)
timer.active = True
try:
self.assertTrue(timer.active)
self.event_loop_helper.event_loop()
self.assertTrue(timer.active)
finally:
timer.active = False
self.assertFalse(timer.active)
def test_timeout_event(self):
timer = EventTimer()
handler = ConditionHandler()
timer.observe(handler.callback, 'timeout')
timer.start()
try:
self.event_loop_helper.event_loop_until_condition(handler.is_called)
finally:
timer.stop()
def test_repeat(self):
timer = EventTimer(repeat=4)
handler = ConditionHandler()
timer.observe(handler.callback, 'timeout')
timer.start()
try:
self.event_loop_helper.event_loop_until_condition((lambda : (not timer.active)))
self.assertFalse(timer.active)
finally:
timer.stop()
self.assertEqual(handler.count, 4)
def test_interval(self):
timer = EventTimer(repeat=4, interval=0.1)
handler = ConditionHandler()
timer.observe(handler.callback, 'timeout')
timer.start()
try:
self.event_loop_helper.event_loop_until_condition((lambda : (not timer.active)))
self.assertFalse(timer.active)
finally:
timer.stop()
self.assertEqual(handler.count, 4)
expected_times = [((timer._start_time + (0.1 * i)) + 0.1) for i in range(4)]
if (not all(((expected <= actual) for (expected, actual) in zip(expected_times, handler.times)))):
print(handler.times)
self.assertTrue(all(((expected <= actual) for (expected, actual) in zip(expected_times, handler.times))))
def test_expire(self):
timer = EventTimer(expire=1.0, interval=0.1)
handler = ConditionHandler()
timer.observe(handler.callback, 'timeout')
timer.start()
try:
self.event_loop_helper.event_loop_until_condition((lambda : (not timer.active)))
self.assertFalse(timer.active)
finally:
timer.stop()
if (not all(((t < ((timer._start_time + timer.expire) + 0.01)) for t in handler.times))):
print(handler.times[(- 1)], (timer._start_time + timer.expire))
self.assertTrue(all(((t < ((timer._start_time + timer.expire) + 0.01)) for t in handler.times))) |
class TableFormatter():
def __init__(self, header, fmts, min_width=7, space=3):
self.min_width = (min_width + (space - 1))
self.space = space
widths = np.array([len(h) for h in header])
smaller_indices = (widths < min_width)
widths[smaller_indices] = min_width
self.widths = widths
header_fmts = self.min_width_fmts()
self._header = self.join_format(header_fmts, header)
self._header += ('\n' + (self.space * ' ').join([('-' * width) for width in self.widths]))
self.fmts = self.min_width_fmts(fmts)
def min_width_fmts(self, raw_fmts=None):
if (not raw_fmts):
raw_fmts = ['s' for _ in self.widths]
return [((('{:>' + '{}'.format(width)) + fmt) + '}') for (width, fmt) in zip(self.widths, raw_fmts)]
def join_format(self, fmts, lst):
return (self.space * ' ').join([fmt.format(item) for (fmt, item) in zip(fmts, lst)])
def header(self):
return self._header
def line(self, *args):
return self.join_format(self.fmts, args) |
def test_basic_forwarding2(golden):
def filter1D(ow: size, kw: size, x: f32[((ow + kw) - 1)], y: f32[ow], w: f32[kw]):
for o in seq(0, ow):
sum: f32
sum = 0.0
for k in seq(0, kw):
sum += (x[(o + k)] * w[k])
y[o] = sum
filter1D = divide_loop(filter1D, 'o', 4, ['outXo', 'outXi'], tail='cut_and_guard')
sum_c = filter1D.find('sum:_')
filter1D = expand_dim(filter1D, sum_c, '4', 'outXi')
filter1D = lift_alloc(filter1D, sum_c)
assert (str(filter1D.forward(sum_c)) == golden) |
def fetch_crlf_payload():
payload_list = []
if (os.getcwd().split('/')[(- 1)] == 'API'):
path = '../Payloads/crlf.txt'
else:
path = 'Payloads/crlf.txt'
with open(path) as f:
for line in f:
if line:
payload_list.append(line.rstrip())
return payload_list |
def test_non_elasticsearch_cached_download_not_found(common_test_data):
external_load_dates = [{'external_data_type__external_data_type_id': EXTERNAL_DATA_TYPE_DICT['fabs'], 'last_load_date': datetime(2021, 1, 30, 12, 0, 0, 0, timezone.utc)}, {'external_data_type__external_data_type_id': EXTERNAL_DATA_TYPE_DICT['fpds'], 'last_load_date': datetime(2021, 1, 30, 12, 0, 0, 0, timezone.utc)}, {'external_data_type__external_data_type_id': EXTERNAL_DATA_TYPE_DICT['es_transactions'], 'last_load_date': datetime(2021, 1, 17, 12, 0, 0, 0, timezone.utc)}, {'external_data_type__external_data_type_id': EXTERNAL_DATA_TYPE_DICT['es_awards'], 'last_load_date': datetime(2021, 1, 17, 16, 0, 0, 0, timezone.utc)}]
for load_date in external_load_dates:
baker.make('broker.ExternalDataLoadDate', **load_date)
result = BaseDownloadViewSet._get_cached_download(json.dumps(JSON_REQUEST))
assert (result is None) |
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
class GroupGEMMRcrBiasActTestCase(unittest.TestCase):
([param('group_gemm_rcr_bias_relu_fp16', 'float16', 'relu'), param('group_gemm_rcr_bias_relu_fp32_sm80', 'float32', 'relu'), param('group_gemm_rcr_bias_relu_bf16', 'bfloat16', 'relu'), param('group_gemm_rcr_bias_sigmoid_fp16', 'float16', 'sigmoid'), param('group_gemm_rcr_bias_sigmoid_fp32_sm80', 'float32', 'sigmoid'), param('group_gemm_rcr_bias_sigmoid_bf16', 'bfloat16', 'sigmoid')])
def test_rcr_activation(self, test_name, dtype, activation):
M = 256
K1 = 128
N1 = 60
K2 = 192
N2 = 64
target = detect_target()
X1 = Tensor(shape=[M, K1], dtype=dtype, name='x1', is_input=True)
X2 = Tensor(shape=[M, K2], dtype=dtype, name='x2', is_input=True)
W1 = Tensor(shape=[N1, K1], dtype=dtype, name='w1', is_input=True)
W2 = Tensor(shape=[N2, K2], dtype=dtype, name='w2', is_input=True)
B1 = Tensor(shape=[N1], dtype=dtype, name='b1', is_input=True)
B2 = Tensor(shape=[N2], dtype=dtype, name='b2', is_input=True)
OP = (ops.group_gemm_rcr_bias_relu() if (activation == 'relu') else ops.group_gemm_rcr_bias_sigmoid())
act_pt = (torch.relu if (activation == 'relu') else torch.sigmoid)
(Y1, Y2) = OP(operand_groups=[[X1, W1, B1], [X2, W2, B2]])
Y1._attrs['name'] = 'y1'
Y1._attrs['is_output'] = True
Y2._attrs['name'] = 'y2'
Y2._attrs['is_output'] = True
module = compile_model([Y1, Y2], target, './tmp', test_name)
X1_pt = get_random_torch_tensor(shape=(M, K1), dtype=dtype)
X2_pt = get_random_torch_tensor(shape=(M, K2), dtype=dtype)
W1_pt = get_random_torch_tensor(shape=(N1, K1), dtype=dtype)
W2_pt = get_random_torch_tensor(shape=(N2, K2), dtype=dtype)
B1_pt = get_random_torch_tensor(shape=(N1,), dtype=dtype)
B2_pt = get_random_torch_tensor(shape=(N2,), dtype=dtype)
Y1_pt = torch.nn.functional.linear(X1_pt, W1_pt, bias=B1_pt)
Y1_pt = act_pt(Y1_pt)
Y2_pt = torch.nn.functional.linear(X2_pt, W2_pt, bias=B2_pt)
Y2_pt = act_pt(Y2_pt)
inputs = {'x1': X1_pt, 'w1': W1_pt, 'b1': B1_pt, 'x2': X2_pt, 'w2': W2_pt, 'b2': B2_pt}
y1 = torch.empty_like(Y1_pt)
y2 = torch.empty_like(Y2_pt)
module.run_with_tensors(inputs, {'y1': y1, 'y2': y2})
torch.testing.assert_close(Y1_pt, y1, atol=0.1, rtol=0.1)
torch.testing.assert_close(Y2_pt, y2, atol=0.1, rtol=0.1) |
def debug(func: Callable[(..., Any)]) -> Callable[(..., Any)]:
(func)
def _wrapper(*args: Any, **kwargs: Any) -> Any:
print(f"<debug> Calling '{func.__name__}' with args={args} and kwargs={kwargs}")
result = func(*args, **kwargs)
print(f"<debug> '{func.__name__}' returned {result}")
return result
return _wrapper |
class op(bpy.types.Operator):
bl_idname = 'uv.textools_color_convert_to_texture'
bl_label = 'Pack Texture'
bl_description = 'Pack ID Colors into single texture and UVs'
bl_options = {'REGISTER', 'UNDO'}
def poll(cls, context):
if (bpy.context.area.ui_type != 'UV'):
return False
if (not bpy.context.active_object):
return False
if (bpy.context.active_object not in bpy.context.selected_objects):
return False
if (len(bpy.context.selected_objects) != 1):
return False
if (bpy.context.active_object.type != 'MESH'):
return False
return True
def execute(self, context):
pack_texture(self, context)
return {'FINISHED'} |
_memory_db
def test_add_role(prompt, capsys):
action_and_inputs = ['create_user', 'test_user', 'list_all_users', 'add_role_to_user', 'test_user', 'guest_analyst', 'list_all_users', 'exit']
for action in action_and_inputs:
prompt.input.send_text(f'''{action}
''')
(test_app, store, db) = _setup_frontend()
start_user_management(test_app, store, db, prompt.session)
captured = capsys.readouterr()
assert ('test_user (guest)' in captured.out)
assert ('test_user (guest, guest_analyst)' in captured.out) |
class OptionPlotoptionsDumbbellSonificationDefaultspeechoptionsMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def gather_dependent_entities(serialized: OrderedDict) -> Tuple[(Dict[(_identifier_model.Identifier, TaskTemplate)], Dict[(_identifier_model.Identifier, admin_workflow_models.WorkflowSpec)], Dict[(_identifier_model.Identifier, _launch_plan_models.LaunchPlanSpec)])]:
task_templates: Dict[(_identifier_model.Identifier, TaskTemplate)] = {}
workflow_specs: Dict[(_identifier_model.Identifier, admin_workflow_models.WorkflowSpec)] = {}
launch_plan_specs: Dict[(_identifier_model.Identifier, _launch_plan_models.LaunchPlanSpec)] = {}
for cp_entity in serialized.values():
if isinstance(cp_entity, TaskSpec):
task_templates[cp_entity.template.id] = cp_entity.template
elif isinstance(cp_entity, _launch_plan_models.LaunchPlan):
launch_plan_specs[cp_entity.id] = cp_entity.spec
elif isinstance(cp_entity, admin_workflow_models.WorkflowSpec):
workflow_specs[cp_entity.template.id] = cp_entity
return (task_templates, workflow_specs, launch_plan_specs) |
class WorkflowMetadataDefaults(_common.FlyteIdlEntity):
def __init__(self, interruptible=None):
self._interruptible = interruptible
def interruptible(self):
return self._interruptible
def to_flyte_idl(self):
return _core_workflow.WorkflowMetadataDefaults(interruptible=self._interruptible)
def from_flyte_idl(cls, pb2_object):
return cls(interruptible=pb2_object.interruptible) |
class TestHighlightStrip(util.MdCase):
extension = ['pymdownx.highlight', 'pymdownx.superfences']
extension_configs = {'pymdownx.highlight': {'stripnl': False}}
def test_no_stripnl(self):
self.check_markdown('\n ```py\n\n import foo\n\n\n import bar\n\n\n\n ```\n ', '\n <div class="highlight"><pre><span></span><code>\n <span class="kn">import</span> <span class="nn">foo</span>\n\n\n <span class="kn">import</span> <span class="nn">bar</span>\n\n\n </code></pre></div>\n ', True) |
class TestAXI(unittest.TestCase):
def _test_axi2native(self, naccesses=16, simultaneous_writes_reads=False, id_rand_enable=False, len_rand_enable=False, data_rand_enable=False, aw_valid_random=0, w_valid_random=0, ar_valid_random=0, r_valid_random=0, w_ready_random=0, b_ready_random=0, r_ready_random=0):
def writes_cmd_generator(axi_port, writes):
prng = random.Random(42)
for write in writes:
while (prng.randrange(100) < aw_valid_random):
(yield)
(yield axi_port.aw.valid.eq(1))
(yield axi_port.aw.addr.eq((write.addr << 2)))
(yield axi_port.aw.burst.eq(write.type))
(yield axi_port.aw.len.eq(write.len))
(yield axi_port.aw.size.eq(write.size))
(yield axi_port.aw.id.eq(write.id))
(yield)
while ((yield axi_port.aw.ready) == 0):
(yield)
(yield axi_port.aw.valid.eq(0))
def writes_data_generator(axi_port, writes):
prng = random.Random(42)
for write in writes:
for (i, (data, strb)) in enumerate(zip(write.data, write.strb)):
while (prng.randrange(100) < w_valid_random):
(yield)
(yield axi_port.w.valid.eq(1))
if (i == (len(write.data) - 1)):
(yield axi_port.w.last.eq(1))
else:
(yield axi_port.w.last.eq(0))
(yield axi_port.w.data.eq(data))
(yield axi_port.w.strb.eq(strb))
(yield)
while ((yield axi_port.w.ready) == 0):
(yield)
(yield axi_port.w.valid.eq(0))
axi_port.reads_enable = True
def writes_response_generator(axi_port, writes):
prng = random.Random(42)
self.writes_id_errors = 0
for write in writes:
(yield axi_port.b.ready.eq(0))
(yield)
while ((yield axi_port.b.valid) == 0):
(yield)
while (prng.randrange(100) < b_ready_random):
(yield)
(yield axi_port.b.ready.eq(1))
(yield)
if ((yield axi_port.b.id) != write.id):
self.writes_id_errors += 1
def reads_cmd_generator(axi_port, reads):
prng = random.Random(42)
while (not axi_port.reads_enable):
(yield)
for read in reads:
while (prng.randrange(100) < ar_valid_random):
(yield)
(yield axi_port.ar.valid.eq(1))
(yield axi_port.ar.addr.eq((read.addr << 2)))
(yield axi_port.ar.burst.eq(read.type))
(yield axi_port.ar.len.eq(read.len))
(yield axi_port.ar.size.eq(read.size))
(yield axi_port.ar.id.eq(read.id))
(yield)
while ((yield axi_port.ar.ready) == 0):
(yield)
(yield axi_port.ar.valid.eq(0))
def reads_response_data_generator(axi_port, reads):
prng = random.Random(42)
self.reads_data_errors = 0
self.reads_id_errors = 0
self.reads_last_errors = 0
while (not axi_port.reads_enable):
(yield)
for read in reads:
for (i, (data, strb)) in enumerate(zip(read.data, read.strb)):
(yield axi_port.r.ready.eq(0))
(yield)
while ((yield axi_port.r.valid) == 0):
(yield)
while (prng.randrange(100) < r_ready_random):
(yield)
(yield axi_port.r.ready.eq(1))
(yield)
data_ref = (data & strb_to_mask(strb, axi_port.data_width))
data_cur = ((yield axi_port.r.data) & strb_to_mask(strb, axi_port.data_width))
if (data_ref != data_cur):
print(f'ref: {data_ref:08x} vs cur: {data_cur:08x}')
self.reads_data_errors += 1
if ((yield axi_port.r.id) != read.id):
self.reads_id_errors += 1
if (i == (len(read.data) - 1)):
if ((yield axi_port.r.last) != 1):
self.reads_last_errors += 1
elif ((yield axi_port.r.last) != 0):
self.reads_last_errors += 1
axi_port = LiteDRAMAXIPort(data_width=32, address_width=32, id_width=8)
dram_port = LiteDRAMNativePort('both', 32, 32)
dut = LiteDRAMAXI2Native(axi_port, dram_port, with_read_modify_write=True)
mem = DRAMMemory(32, 1024)
prng = random.Random(42)
writes = []
offset = 1
for i in range(naccesses):
_id = (prng.randrange((2 ** 8)) if id_rand_enable else i)
_len = (prng.randrange(32) if len_rand_enable else i)
_data = [(prng.randrange((2 ** 32)) if data_rand_enable else j) for j in range((_len + 1))]
_strb = [(prng.randrange((2 ** (32 // 8))) if data_rand_enable else 255) for j in range((_len + 1))]
writes.append(Write(offset, _data, _strb, _id, type=BURST_INCR, len=_len, size=log2_int((32 // 8))))
offset += (_len + 1)
dummy_reads = [Read(1023, [0], [((2 ** (32 // 8)) - 1)], 0, type=BURST_FIXED, len=0, size=log2_int((32 // 8))) for _ in range(32)]
reads = (dummy_reads + writes)
if simultaneous_writes_reads:
axi_port.reads_enable = True
else:
axi_port.reads_enable = False
generators = [writes_cmd_generator(axi_port, writes), writes_data_generator(axi_port, writes), writes_response_generator(axi_port, writes), reads_cmd_generator(axi_port, reads), reads_response_data_generator(axi_port, reads), mem.read_handler(dram_port, rdata_valid_random=r_valid_random), mem.write_handler(dram_port, wdata_ready_random=w_ready_random)]
run_simulation(dut, generators, vcd_name='sim.vcd')
self.assertEqual(self.writes_id_errors, 0)
self.assertEqual(self.reads_data_errors, 0)
self.assertEqual(self.reads_id_errors, 0)
self.assertEqual(self.reads_last_errors, 0)
def test_axi2native_writes_then_reads_no_random(self):
self._test_axi2native(simultaneous_writes_reads=False)
def test_axi2native_writes_and_reads_no_random(self):
self._test_axi2native(simultaneous_writes_reads=True)
def test_axi2native_writes_then_reads_random_bursts(self):
self._test_axi2native(simultaneous_writes_reads=False, id_rand_enable=True, len_rand_enable=True, data_rand_enable=True)
def test_axi2native_writes_and_reads_random_bursts(self):
self._test_axi2native(simultaneous_writes_reads=True, id_rand_enable=True, len_rand_enable=True, data_rand_enable=True)
def test_axi2native_random_w_ready(self):
self._test_axi2native(w_ready_random=90)
def test_axi2native_random_b_ready(self):
self._test_axi2native(b_ready_random=90)
def test_axi2native_random_r_ready(self):
self._test_axi2native(r_ready_random=90)
def test_axi2native_random_aw_valid(self):
self._test_axi2native(aw_valid_random=90)
def test_axi2native_random_w_valid(self):
self._test_axi2native(w_valid_random=90)
def test_axi2native_random_ar_valid(self):
self._test_axi2native(ar_valid_random=90)
def test_axi2native_random_r_valid(self):
self._test_axi2native(r_valid_random=90)
def test_axi2native_random_all(self):
self._test_axi2native(simultaneous_writes_reads=True, id_rand_enable=True, len_rand_enable=True, aw_valid_random=50, w_ready_random=50, b_ready_random=50, w_valid_random=50, ar_valid_random=90, r_valid_random=90, r_ready_random=90) |
.skipif((django.VERSION > (1, 7)), reason='argparse raises CommandError in this case')
('elasticapm.contrib.django.management.commands.elasticapm.Command._get_argv')
def test_subcommand_not_set(argv_mock):
stdout = io.StringIO()
argv_mock.return_value = ['manage.py', 'elasticapm']
call_command('elasticapm', stdout=stdout)
output = stdout.getvalue()
assert ('No command specified' in output) |
class NetworkNodeMonHistorySerializer(MonHistorySerializer):
nic = s.CharField(required=True)
def validate(self, attrs):
nic = attrs.get('nic')
assert nic
if (nic in self.obj.used_nics):
self.item_id = nic
else:
raise s.ValidationError(_('NIC not defined on compute node.'))
return attrs |
def xml_check_text_content(xml_doc_a, xml_doc_b, xpath):
if ((xml_doc_a.nodeType == xml_doc_a.TEXT_NODE) or (xml_doc_a.nodeType == xml_doc_a.CDATA_SECTION_NODE)):
if (xml_doc_a.data != xml_doc_b.data):
return (False, ('Text Node data differs [%s] != [%s] at [%s]' % (xml_doc_a.data, xml_doc_b.data, xpath)))
return (True, None)
return (None, None) |
def find_json_objects(text):
json_objects = []
inside_string = False
escape_character = False
stack = []
start_index = (- 1)
for (i, char) in enumerate(text):
if ((char == '\\') and (not escape_character)):
escape_character = True
continue
if ((char == '"') and (not escape_character)):
inside_string = (not inside_string)
if ((not inside_string) and (char == '\n')):
continue
if (inside_string and (char == '\n')):
char = '\\n'
if (inside_string and (char == '\t')):
char = '\\t'
if ((char in '{[') and (not inside_string)):
stack.append(char)
if (len(stack) == 1):
start_index = i
if ((char in '}]') and (not inside_string) and stack):
if (((char == '}') and (stack[(- 1)] == '{')) or ((char == ']') and (stack[(- 1)] == '['))):
stack.pop()
if (not stack):
end_index = (i + 1)
try:
json_obj = json.loads(text[start_index:end_index])
json_objects.append(json_obj)
except json.JSONDecodeError:
pass
escape_character = (False if escape_character else escape_character)
return json_objects |
def _output_deck(task_name: str, new_user_params: ExecutionParameters):
ctx = FlyteContext.current_context()
local_dir = ctx.file_access.get_random_local_directory()
local_path = f'{local_dir}{os.sep}{DECK_FILE_NAME}'
try:
with open(local_path, 'w', encoding='utf-8') as f:
f.write(_get_deck(new_user_params, ignore_jupyter=True))
logger.info(f'{task_name} task creates flyte deck html to file://{local_path}')
if (ctx.execution_state.mode == ExecutionState.Mode.TASK_EXECUTION):
fs = ctx.file_access.get_filesystem_for_path(new_user_params.output_metadata_prefix)
remote_path = f'{new_user_params.output_metadata_prefix}{ctx.file_access.sep(fs)}{DECK_FILE_NAME}'
kwargs: typing.Dict[(str, str)] = {'ContentType': 'text/html', 'content_type': 'text/html'}
ctx.file_access.put_data(local_path, remote_path, **kwargs)
except Exception as e:
logger.error(f'Failed to write flyte deck html with error {e}.') |
def get_wrap_config(view: sublime.View, pos: int) -> Config:
syntax_name = syntax.doc_syntax(view)
config = get_config(view, pos)
config.syntax = syntax_name
config.type = 'markup'
if syntax.is_html(syntax_name):
config.context = get_html_context(view, pos)
return config |
class ACLTestCase(TestCase):
def setUp(self):
self.username = 'user'
self.password = 'password'
User.objects.create_user(self.username, password=self.password)
self.url_testing = reverse('django_mqtt:mqtt_acl')
self.client = Client()
_settings(MQTT_ACL_ALLOW=True)
def test_login_acl_allow_true(self):
response = self.client.post(self.url_testing, {'username': self.username, 'password': self.password})
self.assertEqual(response.status_code, 200)
_settings(MQTT_ACL_ALLOW=False)
def test_login_acl_allow_false(self):
response = self.client.post(self.url_testing, {'username': self.username, 'password': self.password})
self.assertEqual(response.status_code, 403)
def test_wrong_login(self):
response = self.client.post(self.url_testing, {'username': self.username, 'password': 'wrong'})
self.assertEqual(response.status_code, 403)
def test_wrong_user(self):
response = self.client.post(self.url_testing, {'username': 'wrong', 'password': 'wrong'})
self.assertEqual(response.status_code, 403)
def test_wrong_no_password(self):
response = self.client.post(self.url_testing, {'username': self.username})
self.assertEqual(response.status_code, 403)
def test_wrong_no_username(self):
response = self.client.post(self.url_testing, {'password': self.password})
self.assertEqual(response.status_code, 403)
def test_wrong_no_data(self):
response = self.client.post(self.url_testing, {})
self.assertEqual(response.status_code, 403) |
class OptionSeriesColumnSonificationContexttracksMappingLowpass(Options):
def frequency(self) -> 'OptionSeriesColumnSonificationContexttracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionSeriesColumnSonificationContexttracksMappingLowpassFrequency)
def resonance(self) -> 'OptionSeriesColumnSonificationContexttracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionSeriesColumnSonificationContexttracksMappingLowpassResonance) |
class OptionPlotoptionsTreemapSonificationTracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
class TestGANModule(unittest.TestCase):
def test_module_without_dimension(self, tmp_dir: str) -> None:
module = TestModule()
trainer = Trainer(default_root_dir=tmp_dir, fast_dev_run=True, callbacks=[TensorboardGenerativeModelImageSampler()])
with self.assertRaises(AssertionError):
trainer.fit(module)
def test_logger_without_add_image(self, tmp_dir: str) -> None:
module = TestModule()
trainer = Trainer(default_root_dir=tmp_dir, fast_dev_run=True, logger=CSVLogger(tmp_dir), callbacks=[TensorboardGenerativeModelImageSampler()])
with self.assertRaises(AssertionError):
trainer.fit(module)
def test_callback_triggered(self, tmp_dir: str) -> None:
class MyModule(TestModule):
def __init__(self) -> None:
super().__init__()
self.latent_dim = 32
self.img_dim = (1, 1, 2)
def forward(self, x) -> torch.Tensor:
assert (x.size() == torch.Size([1, 32]))
return super().forward(x)
module = MyModule()
trainer = Trainer(default_root_dir=tmp_dir, fast_dev_run=True, callbacks=[TensorboardGenerativeModelImageSampler(num_samples=1)])
with patch.object(torchvision.utils, 'make_grid', return_value=torch.randn(1, 1, 1)) as mock_call:
trainer.fit(module)
self.assertEqual(mock_call.call_count, 2) |
_action_type(ofproto.OFPAT_SET_NW_TTL, ofproto.OFP_ACTION_NW_TTL_SIZE)
class OFPActionSetNwTtl(OFPAction):
def __init__(self, nw_ttl, type_=None, len_=None):
super(OFPActionSetNwTtl, self).__init__()
self.nw_ttl = nw_ttl
def parser(cls, buf, offset):
(type_, len_, nw_ttl) = struct.unpack_from(ofproto.OFP_ACTION_NW_TTL_PACK_STR, buf, offset)
return cls(nw_ttl)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_NW_TTL_PACK_STR, buf, offset, self.type, self.len, self.nw_ttl) |
_comparable
class Forum(db.Model, CRUDMixin):
__tablename__ = 'forums'
id = db.Column(db.Integer, primary_key=True)
category_id = db.Column(db.Integer, db.ForeignKey('categories.id', ondelete='CASCADE'), nullable=False)
title = db.Column(db.String(255), nullable=False)
description = db.Column(db.Text, nullable=True)
position = db.Column(db.Integer, default=1, nullable=False)
locked = db.Column(db.Boolean, default=False, nullable=False)
show_moderators = db.Column(db.Boolean, default=False, nullable=False)
external = db.Column(db.String(200), nullable=True)
post_count = db.Column(db.Integer, default=0, nullable=False)
topic_count = db.Column(db.Integer, default=0, nullable=False)
last_post_id = db.Column(db.Integer, db.ForeignKey('posts.id'), nullable=True)
last_post = db.relationship('Post', backref='last_post_forum', uselist=False, foreign_keys=[last_post_id])
last_post_user_id = db.Column(db.Integer, db.ForeignKey('users.id', ondelete='SET NULL'), nullable=True)
last_post_user = db.relationship('User', uselist=False, foreign_keys=[last_post_user_id])
last_post_title = db.Column(db.String(255), nullable=True)
last_post_username = db.Column(db.String(255), nullable=True)
last_post_created = db.Column(UTCDateTime(timezone=True), default=time_utcnow, nullable=True)
topics = db.relationship('Topic', backref='forum', lazy='dynamic', cascade='all, delete-orphan')
moderators = db.relationship('User', secondary=moderators, primaryjoin=(moderators.c.forum_id == id), backref=db.backref('forummoderator', lazy='dynamic'), lazy='joined')
groups = db.relationship('Group', secondary=forumgroups, primaryjoin=(forumgroups.c.forum_id == id), backref='forumgroups', lazy='joined')
def slug(self):
return slugify(self.title)
def url(self):
if self.external:
return self.external
return url_for('forum.view_forum', forum_id=self.id, slug=self.slug)
def last_post_url(self):
return url_for('forum.view_post', post_id=self.last_post_id)
def __repr__(self):
return '<{} {}>'.format(self.__class__.__name__, self.id)
def update_last_post(self, commit=True):
last_post = Post.query.filter((Post.topic_id == Topic.id), (Topic.forum_id == self.id)).order_by(Post.date_created.desc()).limit(1).first()
if (last_post is not None):
if (last_post != self.last_post):
self.last_post = last_post
self.last_post_title = last_post.topic.title
self.last_post_user_id = last_post.user_id
self.last_post_username = last_post.username
self.last_post_created = last_post.date_created
else:
self.last_post = None
self.last_post_title = None
self.last_post_user = None
self.last_post_username = None
self.last_post_created = None
if commit:
db.session.commit()
def update_read(self, user, forumsread, topicsread):
if ((not user.is_authenticated) or (topicsread is None)):
return False
read_cutoff = None
if (flaskbb_config['TRACKER_LENGTH'] > 0):
read_cutoff = (time_utcnow() - timedelta(days=flaskbb_config['TRACKER_LENGTH']))
unread_count = Topic.query.outerjoin(TopicsRead, db.and_((TopicsRead.topic_id == Topic.id), (TopicsRead.user_id == user.id))).outerjoin(ForumsRead, db.and_((ForumsRead.forum_id == Topic.forum_id), (ForumsRead.user_id == user.id))).filter((Topic.forum_id == self.id), (Topic.last_updated > read_cutoff), db.or_((TopicsRead.last_read == None), (TopicsRead.last_read < Topic.last_updated)), db.or_((ForumsRead.last_read == None), (ForumsRead.last_read < Topic.last_updated))).count()
if (unread_count == 0):
logger.debug('No unread topics. Trying to mark the forum as read.')
if (forumsread and (forumsread.last_read > topicsread.last_read)):
logger.debug('forumsread.last_read is newer than topicsread.last_read. Everything is read.')
return False
elif forumsread:
logger.debug("Updating existing ForumsRead '{}' object.".format(forumsread))
forumsread.last_read = time_utcnow()
forumsread.save()
return True
logger.debug('Creating new ForumsRead object.')
forumsread = ForumsRead()
forumsread.user = user
forumsread.forum = self
forumsread.last_read = time_utcnow()
forumsread.save()
return True
logger.debug('No ForumsRead object updated - there are still {} unread topics.'.format(unread_count))
return False
def recalculate(self, last_post=False):
topic_count = Topic.query.filter((Topic.forum_id == self.id), (Topic.hidden != True)).count()
post_count = Post.query.filter((Post.topic_id == Topic.id), (Topic.forum_id == self.id), (Post.hidden != True), (Topic.hidden != True)).count()
self.topic_count = topic_count
self.post_count = post_count
if last_post:
self.update_last_post()
self.save()
return self
def save(self, groups=None):
if self.id:
db.session.merge(self)
else:
with db.session.no_autoflush:
if (groups is None):
from flaskbb.user.models import Group
self.groups = Group.query.order_by(Group.name.asc()).all()
db.session.add(self)
db.session.commit()
return self
def delete(self, users=None):
db.session.delete(self)
db.session.commit()
if users:
users_list = []
for user in users:
user.post_count = Post.query.filter_by(user_id=user.id).count()
users_list.append(user)
db.session.add_all(users_list)
db.session.commit()
return self
def move_topics_to(self, topics):
status = False
for topic in topics:
status = topic.move(self)
return status
def get_forum(cls, forum_id, user):
if user.is_authenticated:
(forum, forumsread) = Forum.query.filter((Forum.id == forum_id)).options(db.joinedload('category')).outerjoin(ForumsRead, db.and_((ForumsRead.forum_id == Forum.id), (ForumsRead.user_id == user.id))).add_entity(ForumsRead).first_or_404()
else:
forum = Forum.query.filter((Forum.id == forum_id)).first_or_404()
forumsread = None
return (forum, forumsread)
def get_topics(cls, forum_id, user, page=1, per_page=20):
if user.is_authenticated:
topics = Topic.query.filter_by(forum_id=forum_id).outerjoin(TopicsRead, db.and_((TopicsRead.topic_id == Topic.id), (TopicsRead.user_id == user.id))).outerjoin(Post, (Topic.last_post_id == Post.id)).add_entity(Post).add_entity(TopicsRead).order_by(Topic.important.desc(), Topic.last_updated.desc()).paginate(page, per_page, True)
else:
topics = Topic.query.filter_by(forum_id=forum_id).outerjoin(Post, (Topic.last_post_id == Post.id)).add_entity(Post).order_by(Topic.important.desc(), Topic.last_updated.desc()).paginate(page, per_page, True)
topics.items = [(topic, last_post, None) for (topic, last_post) in topics.items]
return topics |
def apply_weights(cnarr, ref_matched, log2_key, spread_key, epsilon=0.0001):
logging.debug('Weighting bins by size and overall variance in sample')
simple_wt = np.zeros(len(cnarr))
is_anti = cnarr['gene'].isin(params.ANTITARGET_ALIASES)
tgt_cna = cnarr[(~ is_anti)]
tgt_var = (descriptives.biweight_midvariance(tgt_cna.drop_low_coverage().residuals()) ** 2)
bin_sz = np.sqrt((tgt_cna['end'] - tgt_cna['start']))
tgt_simple_wts = (1 - (tgt_var / (bin_sz / bin_sz.mean())))
simple_wt[(~ is_anti)] = tgt_simple_wts
if is_anti.any():
anti_cna = cnarr[is_anti]
anti_ok = anti_cna.drop_low_coverage()
frac_anti_low = (1 - (len(anti_ok) / len(anti_cna)))
if (frac_anti_low > 0.5):
logging.warning('WARNING: Most antitarget bins ({:.2f}%, {:d}/{:d}) have low or no coverage; is this amplicon/WGS?'.format((100 * frac_anti_low), (len(anti_cna) - len(anti_ok)), len(anti_cna)))
anti_var = (descriptives.biweight_midvariance(anti_ok.residuals()) ** 2)
anti_bin_sz = np.sqrt((anti_cna['end'] - anti_cna['start']))
anti_simple_wts = (1 - (anti_var / (anti_bin_sz / anti_bin_sz.mean())))
simple_wt[is_anti] = anti_simple_wts
var_ratio = (max(tgt_var, 0.01) / max(anti_var, 0.01))
if (var_ratio > 1):
logging.info('Targets are %.2f x more variable than antitargets', var_ratio)
else:
logging.info('Antitargets are %.2f x more variable than targets', (1.0 / var_ratio))
if ((ref_matched[spread_key] > epsilon).any() and (np.abs(np.mod(ref_matched[log2_key], 1)) > epsilon).any()):
logging.debug('Weighting bins by coverage spread in reference')
fancy_wt = (1.0 - (ref_matched[spread_key] ** 2))
x = 0.9
weights = ((x * fancy_wt) + ((1 - x) * simple_wt))
else:
weights = simple_wt
return cnarr.add_columns(weight=weights.clip(epsilon, 1.0)) |
class TestLimitedWorkerManager(BaseTestWorkerManager):
limits = []
def setup_worker_manager(self):
self.limits = [PredicateWorkerLimit((lambda x: x.is_odd), 3, name='odd'), PredicateWorkerLimit((lambda x: (not x.is_odd)), 2, name='even')]
self.worker_manager = ToyWorkerManager(redis_connection=self.redis, max_workers=50, log=log, limits=self.limits)
('copr_common.worker_manager.time.sleep')
('copr_common.worker_manager.time.time')
def test_that_limits_are_respected(self, mc_time, _mc_sleep, caplog):
self.worker_manager.task_sleep = 5
self.worker_manager.worker_timeout_start = 1000
mc_time.side_effect = range(1000)
self.worker_manager.run(timeout=150)
messages = ["Task '4' skipped, limit info: 'even', matching: worker:0, worker:2", "Task '6' skipped, limit info: 'even', matching: worker:0, worker:2", "Task '7' skipped, limit info: 'odd', matching: worker:1, worker:3, worker:5", "Task '8' skipped, limit info: 'even', matching: worker:0, worker:2", "Task '9' skipped, limit info: 'odd', matching: worker:1, worker:3, worker:5"]
for msg in messages:
assert (('root', logging.DEBUG, msg) in caplog.record_tuples)
assert (('root', logging.INFO, 'Starting worker worker:5, task.priority=0') in caplog.record_tuples)
self.redis.hset('worker:5', 'status', '0')
self.setup_tasks()
self.worker_manager.run(timeout=150)
assert (('root', logging.INFO, 'Finished worker worker:5') in caplog.record_tuples)
worker_7_started = 'Starting worker worker:7, task.priority=0'
assert (('root', logging.INFO, worker_7_started) not in caplog.record_tuples)
self.setup_tasks(exclude=[5])
self.worker_manager.run(timeout=150)
assert (('root', logging.INFO, worker_7_started) in caplog.record_tuples) |
class AddBosEosPreEncoder(PreEncoder):
def __init__(self, *, bos_piece: Optional[str], eos_piece: Optional[str]):
self.bos_piece = bos_piece
self.eos_piece = eos_piece
def __call__(self, chunks: Iterable[InputChunks]) -> List[InputChunks]:
bos_eos_chunks = []
for seq_chunks in chunks:
bos_chunks = ([SpecialPieceChunk(self.bos_piece)] if (self.bos_piece is not None) else [])
eos_chunks = ([SpecialPieceChunk(self.eos_piece)] if (self.eos_piece is not None) else [])
bos_eos_chunks.append(InputChunks([*bos_chunks, *seq_chunks, *eos_chunks]))
return bos_eos_chunks |
class HeaderFunction():
def __init__(self, path=None, name=None, arguments=None, types: dict=None, imports=None):
if (path is not None):
self.path = path
with open(path) as file:
lines = file.readlines()
last_line = lines[(- 1)]
assert last_line.startswith('cpdef ')
name = last_line.split(' ', 1)[1].split('(', 1)[0]
parts = [part.strip() for part in ''.join(lines[:(- 1)]).split('ctypedef fused ')]
imports = parts[0]
types = {}
for part in parts[1:]:
assert part.startswith(f'__{name}_')
lines = part.split('\n')
arg_name = lines[0].split(f'__{name}_', 1)[1].split(':', 1)[0]
types[arg_name] = set((line.strip() for line in lines[1:]))
if (types is None):
if (arguments is None):
raise ValueError
types = {key: set() for key in arguments}
if (arguments is None):
arguments = types.keys()
self.arguments = arguments
self.name = name
self.types = types
self.imports = imports
def make_code(self):
bits = [(self.imports + '\n\n')]
for (arg, types) in self.types.items():
bits.append(f'''ctypedef fused __{self.name}_{arg}:
''')
for type_ in sorted(types):
bits.append(f''' {type_}
''')
bits.append('\n')
tmp = ', '.join((f'__{self.name}_{arg} {arg}' for arg in self.types))
bits.append(f'cpdef {self.name}({tmp})')
code = ''.join(bits)
return code
def add_signature(self, new_types):
for (new_type, set_types) in zip(new_types, self.types.values()):
set_types.add(new_type)
def update_with_other_header(self, other):
if (self.name != other.name):
raise ValueError
if (self.types.keys() != other.types.keys()):
raise ValueError
for (key, value) in other.types.items():
self.types[key].update(value) |
_overwritable()
def build_mapped_train_loader(cfg, mapper):
if (cfg.DATALOADER.SAMPLER_TRAIN == 'WeightedTrainingSampler'):
data_loader = build_weighted_detection_train_loader(cfg, mapper=mapper)
elif (cfg.DATALOADER.SAMPLER_TRAIN == 'WeightedCategoryTrainingSampler'):
data_loader = build_weighted_detection_train_loader(cfg, mapper=mapper, enable_category_balance=True)
else:
data_loader = build_detection_train_loader(cfg, mapper=mapper)
return data_loader |
class OptionSeriesSunburstSonificationDefaultinstrumentoptionsMappingTremolo(Options):
def depth(self) -> 'OptionSeriesSunburstSonificationDefaultinstrumentoptionsMappingTremoloDepth':
return self._config_sub_data('depth', OptionSeriesSunburstSonificationDefaultinstrumentoptionsMappingTremoloDepth)
def speed(self) -> 'OptionSeriesSunburstSonificationDefaultinstrumentoptionsMappingTremoloSpeed':
return self._config_sub_data('speed', OptionSeriesSunburstSonificationDefaultinstrumentoptionsMappingTremoloSpeed) |
def run_app_with_daemon(fd, is_gui, config_options):
with app_state.async_ as async_:
d = daemon.Daemon(fd, is_gui)
app_state.app.setup_app()
d.start()
try:
app_state.app.run_app()
except KeyboardInterrupt:
pass
finally:
d.stop()
d.join()
sys.exit(0) |
def iter_hmm_hits(hmmfile, cpus, servers, dbtype=DB_TYPE_HMM, evalue_thr=None, score_thr=None, max_hits=None, skip=None, maxseqlen=None, fixed_Z=None, cut_ga=False, silent=False):
pool = multiprocessing.Pool(cpus)
for r in pool.imap(iter_hmm, ([hmmnum, hmmer_version, name, leng, model, servers, dbtype, evalue_thr, score_thr, max_hits, maxseqlen, fixed_Z, skip, cut_ga] for (hmmnum, (hmmer_version, name, leng, model)) in enumerate(iter_hmm_file(hmmfile, skip, silent=silent)))):
(yield r)
pool.terminate()
return |
class InstagramInsightsResult(AbstractCrudObject):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isInstagramInsightsResult = True
super(InstagramInsightsResult, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
description = 'description'
id = 'id'
name = 'name'
period = 'period'
title = 'title'
total_value = 'total_value'
values = 'values'
class Breakdown():
action_type = 'action_type'
follow_type = 'follow_type'
story_navigation_action_type = 'story_navigation_action_type'
surface_type = 'surface_type'
class Metric():
carousel_album_engagement = 'carousel_album_engagement'
carousel_album_impressions = 'carousel_album_impressions'
carousel_album_reach = 'carousel_album_reach'
carousel_album_saved = 'carousel_album_saved'
carousel_album_video_views = 'carousel_album_video_views'
comments = 'comments'
engagement = 'engagement'
exits = 'exits'
follows = 'follows'
ig_reels_avg_watch_time = 'ig_reels_avg_watch_time'
ig_reels_video_view_total_time = 'ig_reels_video_view_total_time'
impressions = 'impressions'
likes = 'likes'
navigation = 'navigation'
plays = 'plays'
profile_activity = 'profile_activity'
profile_visits = 'profile_visits'
reach = 'reach'
replies = 'replies'
saved = 'saved'
shares = 'shares'
taps_back = 'taps_back'
taps_forward = 'taps_forward'
total_interactions = 'total_interactions'
video_views = 'video_views'
class Period():
day = 'day'
days_28 = 'days_28'
lifetime = 'lifetime'
month = 'month'
total_over_range = 'total_over_range'
week = 'week'
class MetricType():
value_default = 'default'
time_series = 'time_series'
total_value = 'total_value'
class Timeframe():
last_14_days = 'last_14_days'
last_30_days = 'last_30_days'
last_90_days = 'last_90_days'
prev_month = 'prev_month'
this_month = 'this_month'
this_week = 'this_week'
_field_types = {'description': 'string', 'id': 'string', 'name': 'string', 'period': 'string', 'title': 'string', 'total_value': 'Object', 'values': 'list<InstagramInsightsValue>'}
def _get_field_enum_info(cls):
field_enum_info = {}
field_enum_info['Breakdown'] = InstagramInsightsResult.Breakdown.__dict__.values()
field_enum_info['Metric'] = InstagramInsightsResult.Metric.__dict__.values()
field_enum_info['Period'] = InstagramInsightsResult.Period.__dict__.values()
field_enum_info['MetricType'] = InstagramInsightsResult.MetricType.__dict__.values()
field_enum_info['Timeframe'] = InstagramInsightsResult.Timeframe.__dict__.values()
return field_enum_info |
class TestGenericLedgerApiHandler(BaseSkillTestCase):
path_to_skill = Path(ROOT_DIR, 'packages', 'fetchai', 'skills', 'generic_seller')
is_agent_to_agent_messages = False
def setup(cls):
super().setup()
cls.ledger_api_handler = cast(GenericLedgerApiHandler, cls._skill.skill_context.handlers.ledger_api)
cls.strategy = cast(GenericStrategy, cls._skill.skill_context.strategy)
cls.fipa_dialogues = cast(FipaDialogues, cls._skill.skill_context.fipa_dialogues)
cls.ledger_api_dialogues = cast(LedgerApiDialogues, cls._skill.skill_context.ledger_api_dialogues)
cls.terms = Terms('some_ledger_id', cls._skill.skill_context.agent_address, 'counterprty', {'currency_id': 50}, {'good_id': (- 10)}, 'some_nonce')
cls.list_of_fipa_messages = (DialogueMessage(FipaMessage.Performative.CFP, {'query': 'some_query'}, True), DialogueMessage(FipaMessage.Performative.PROPOSE, {'proposal': 'some_proposal'}), DialogueMessage(FipaMessage.Performative.ACCEPT), DialogueMessage(FipaMessage.Performative.MATCH_ACCEPT_W_INFORM, {'info': {'address': 'some_term_sender_address'}}), DialogueMessage(FipaMessage.Performative.INFORM, {'info': {'transaction_digest': 'some_transaction_digest_body'}}))
cls.transaction_digest = TransactionDigest('some_ledger_id', 'some_body')
cls.transaction_receipt = TransactionReceipt('some_ledger_id', {'some_key': 'some_value'}, {'some_key': 'some_value'})
cls.list_of_ledger_api_messages = (DialogueMessage(LedgerApiMessage.Performative.GET_TRANSACTION_RECEIPT, {'transaction_digest': cls.transaction_digest}), DialogueMessage(LedgerApiMessage.Performative.TRANSACTION_RECEIPT, {'transaction_receipt': cls.transaction_receipt}))
def test_setup(self):
assert (self.ledger_api_handler.setup() is None)
self.assert_quantity_in_outbox(0)
def test_handle_unidentified_dialogue(self):
incorrect_dialogue_reference = ('', '')
incoming_message = self.build_incoming_message(message_type=LedgerApiMessage, dialogue_reference=incorrect_dialogue_reference, performative=LedgerApiMessage.Performative.GET_BALANCE, ledger_id='some_ledger_id', address='some_address')
with patch.object(self.ledger_api_handler.context.logger, 'log') as mock_logger:
self.ledger_api_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.INFO, f'received invalid ledger_api message={incoming_message}, unidentified dialogue.')
def test_handle_balance(self):
ledger_id = 'some_Ledger_id'
ledger_api_dialogue = cast(LedgerApiDialogue, self.prepare_skill_dialogue(dialogues=self.ledger_api_dialogues, messages=(DialogueMessage(LedgerApiMessage.Performative.GET_BALANCE, {'ledger_id': 'some_ledger_id', 'address': 'some_address'}),), counterparty=LEDGER_API_ADDRESS))
incoming_message = cast(LedgerApiMessage, self.build_incoming_message_for_skill_dialogue(dialogue=ledger_api_dialogue, performative=LedgerApiMessage.Performative.BALANCE, ledger_id=ledger_id, balance=10))
with patch.object(self.ledger_api_handler.context.logger, 'log') as mock_logger:
self.ledger_api_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.INFO, f'starting balance on {ledger_id} ledger={incoming_message.balance}.')
def test_handle_transaction_receipt_is_settled_and_is_valid_last_incoming_fipa_message_is_none(self):
ledger_api_dialogue = cast(LedgerApiDialogue, self.prepare_skill_dialogue(dialogues=self.ledger_api_dialogues, messages=self.list_of_ledger_api_messages[:1], counterparty=LEDGER_API_ADDRESS))
fipa_dialogue = cast(FipaDialogue, self.prepare_skill_dialogue(dialogues=self.fipa_dialogues, messages=self.list_of_fipa_messages[:5], is_agent_to_agent_messages=True))
ledger_api_dialogue.associated_fipa_dialogue = fipa_dialogue
fipa_dialogue.terms = self.terms
fipa_dialogue.data_for_sale = {'data_type_1': 'data_1'}
fipa_dialogue._incoming_messages = []
incoming_message = cast(LedgerApiMessage, self.build_incoming_message_for_skill_dialogue(dialogue=ledger_api_dialogue, performative=LedgerApiMessage.Performative.TRANSACTION_RECEIPT, transaction_receipt=self.transaction_receipt))
with patch.object(aea.crypto.ledger_apis.LedgerApis, 'is_transaction_settled', return_value=True):
with patch.object(aea.crypto.ledger_apis.LedgerApis, 'is_transaction_valid', return_value=True):
with pytest.raises(ValueError, match='Cannot retrieve last fipa message.'):
self.ledger_api_handler.handle(incoming_message)
def test_handle_transaction_receipt_is_settled_and_is_valid(self):
ledger_api_dialogue = cast(LedgerApiDialogue, self.prepare_skill_dialogue(dialogues=self.ledger_api_dialogues, messages=self.list_of_ledger_api_messages[:1], counterparty=LEDGER_API_ADDRESS))
fipa_dialogue = cast(FipaDialogue, self.prepare_skill_dialogue(dialogues=self.fipa_dialogues, messages=self.list_of_fipa_messages[:5], is_agent_to_agent_messages=True))
ledger_api_dialogue.associated_fipa_dialogue = fipa_dialogue
fipa_dialogue.terms = self.terms
fipa_dialogue.data_for_sale = {'data_type_1': 'data_1'}
incoming_message = cast(LedgerApiMessage, self.build_incoming_message_for_skill_dialogue(dialogue=ledger_api_dialogue, performative=LedgerApiMessage.Performative.TRANSACTION_RECEIPT, transaction_receipt=self.transaction_receipt))
for end_state_numbers in (list(self.fipa_dialogues.dialogue_stats.self_initiated.values()) + list(self.fipa_dialogues.dialogue_stats.other_initiated.values())):
assert (end_state_numbers == 0)
with patch.object(aea.crypto.ledger_apis.LedgerApis, 'is_transaction_settled', return_value=True):
with patch.object(aea.crypto.ledger_apis.LedgerApis, 'is_transaction_valid', return_value=True):
with patch.object(self.ledger_api_handler.context.logger, 'log') as mock_logger:
self.ledger_api_handler.handle(incoming_message)
self.assert_quantity_in_outbox(1)
(has_attributes, error_str) = self.message_has_attributes(actual_message=self.get_message_from_outbox(), message_type=FipaMessage, performative=FipaMessage.Performative.INFORM, to=COUNTERPARTY_AGENT_ADDRESS, sender=self.skill.skill_context.agent_address, target=fipa_dialogue.last_incoming_message.message_id, info=fipa_dialogue.data_for_sale)
assert has_attributes, error_str
for end_state_numbers in self.fipa_dialogues.dialogue_stats.self_initiated.values():
assert (end_state_numbers == 0)
for (end_state, end_state_numbers) in self.fipa_dialogues.dialogue_stats.other_initiated.items():
if (end_state == FipaDialogue.EndState.SUCCESSFUL):
assert (end_state_numbers == 1)
else:
assert (end_state_numbers == 0)
mock_logger.assert_any_call(logging.INFO, f'transaction confirmed, sending data={fipa_dialogue.data_for_sale} to buyer={COUNTERPARTY_AGENT_ADDRESS[(- 5):]}.')
def test_handle_transaction_receipt_not_is_settled_or_not_is_valid(self):
ledger_api_dialogue = cast(LedgerApiDialogue, self.prepare_skill_dialogue(dialogues=self.ledger_api_dialogues, messages=self.list_of_ledger_api_messages[:1], counterparty=LEDGER_API_ADDRESS))
fipa_dialogue = cast(FipaDialogue, self.prepare_skill_dialogue(dialogues=self.fipa_dialogues, messages=self.list_of_fipa_messages[:5], is_agent_to_agent_messages=True))
ledger_api_dialogue.associated_fipa_dialogue = fipa_dialogue
fipa_dialogue.terms = self.terms
incoming_message = cast(LedgerApiMessage, self.build_incoming_message_for_skill_dialogue(dialogue=ledger_api_dialogue, performative=LedgerApiMessage.Performative.TRANSACTION_RECEIPT, transaction_receipt=self.transaction_receipt))
with patch.object(aea.crypto.ledger_apis.LedgerApis, 'is_transaction_settled', return_value=True):
with patch.object(aea.crypto.ledger_apis.LedgerApis, 'is_transaction_valid', return_value=False):
with patch.object(self.ledger_api_handler.context.logger, 'log') as mock_logger:
self.ledger_api_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.INFO, f'transaction_receipt={self.transaction_receipt} not settled or not valid, aborting')
def test_handle_error(self):
ledger_api_dialogue = self.prepare_skill_dialogue(dialogues=self.ledger_api_dialogues, messages=self.list_of_ledger_api_messages[:1])
incoming_message = cast(LedgerApiMessage, self.build_incoming_message_for_skill_dialogue(dialogue=ledger_api_dialogue, performative=LedgerApiMessage.Performative.ERROR, code=1))
with patch.object(self.ledger_api_handler.context.logger, 'log') as mock_logger:
self.ledger_api_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.INFO, f'received ledger_api error message={incoming_message} in dialogue={ledger_api_dialogue}.')
def test_handle_invalid(self):
invalid_performative = LedgerApiMessage.Performative.GET_BALANCE
incoming_message = self.build_incoming_message(message_type=LedgerApiMessage, dialogue_reference=('1', ''), performative=invalid_performative, ledger_id='some_ledger_id', address='some_address', to=str(self.skill.skill_context.skill_id))
with patch.object(self.ledger_api_handler.context.logger, 'log') as mock_logger:
self.ledger_api_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.WARNING, f'cannot handle ledger_api message of performative={invalid_performative} in dialogue={self.ledger_api_dialogues.get_dialogue(incoming_message)}.')
def test_teardown(self):
assert (self.ledger_api_handler.teardown() is None)
self.assert_quantity_in_outbox(0) |
def swap(self, context, island_stats_source):
selection_mode = bpy.context.scene.tool_settings.uv_select_mode
bm = bmesh.from_edit_mesh(bpy.context.active_object.data)
uv_layers = bm.loops.layers.uv.verify()
islands_all = utilities_uv.getAllIslands(bm, uv_layers)
islands_equal = []
for island in islands_all:
island_stats = Island_stats(bm, island)
if island_stats_source.isEqual(island_stats):
islands_equal.append(island)
bpy.ops.uv.select_all(action='DESELECT')
for island in islands_equal:
for face in island:
for loop in face.loops:
loop[uv_layers].select = True
bpy.ops.uv.select_mode(type='VERTEX')
bpy.context.scene.tool_settings.uv_select_mode = selection_mode |
def extractBoredtranslationsHomeBlog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return _buildReleaseMessage(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
if (item['tags'] == ['Uncategorized']):
titlemap = [('Max Level Witch c', 'While Killing Slimes for 300 Years, I Became the MAX Level Unknowingly', 'translated'), ('Tensei Shoujo no Rirekisho', 'Tensei Shoujo no Rirekisho', 'translated'), ('Master of Dungeon', 'Master of Dungeon', 'oel')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_unexpected_close(unused_tcp_port, ws):
async def handler(websocket, path):
(await websocket.close())
ws('localhost', unused_tcp_port, handler)
with ExitStack() as stack:
duplexer = SyncWebsocketDuplexer(f'ws://localhost:{unused_tcp_port}', f'ws://localhost:{unused_tcp_port}', None, None)
stack.callback(duplexer.stop)
with pytest.raises(ConnectionClosedOK):
next(duplexer.receive()) |
class RoleSwitch(unittest.TestCase):
def setUp(self):
host = config['controller_host']
self.controllers = [controller.Controller(host=host, port=6633), controller.Controller(host=host, port=6634)]
def runTest(self):
for con in self.controllers:
con.start()
if (not con.connect()):
raise AssertionError(('failed to connect controller %s' % str(con)))
(reply, _) = con.transact(ofp.message.features_request())
self.assertTrue(isinstance(reply, ofp.message.features_reply))
set_role(self, NX_ROLE_MASTER, con=self.controllers[0])
set_role(self, NX_ROLE_SLAVE, con=self.controllers[1])
self.verify_role(self.controllers[0], True)
self.verify_role(self.controllers[1], False)
set_role(self, NX_ROLE_MASTER, con=self.controllers[1])
self.verify_role(self.controllers[0], False)
self.verify_role(self.controllers[1], True)
set_role(self, NX_ROLE_MASTER, con=self.controllers[0])
self.verify_role(self.controllers[0], True)
self.verify_role(self.controllers[1], False)
set_role(self, NX_ROLE_OTHER, con=self.controllers[1])
self.verify_role(self.controllers[0], True)
self.verify_role(self.controllers[1], True)
set_role(self, NX_ROLE_SLAVE, con=self.controllers[0])
set_role(self, NX_ROLE_SLAVE, con=self.controllers[1])
self.verify_role(self.controllers[0], False)
self.verify_role(self.controllers[1], False)
def verify_role(self, con, master):
con.message_send(ofp.message.flow_add(buffer_id=))
do_barrier(con)
err_count = 0
while con.packets:
msg = con.packets.pop(0)[0]
if (msg.type == ofp.OFPT_ERROR):
self.assertEquals(msg.err_type, ofp.OFPET_BAD_REQUEST)
self.assertEquals(msg.code, ofp.OFPBRC_EPERM)
err_count += 1
if master:
self.assertEquals(err_count, 0, 'Expected no errors')
else:
self.assertEquals(err_count, 1, 'Expected errors for each message')
def tearDown(self):
for con in self.controllers:
con.shutdown() |
class VlanExact(MatchTest):
def runTest(self):
match = ofp.match([ofp.oxm.vlan_vid((ofp.OFPVID_PRESENT | 2)), ofp.oxm.vlan_pcp(3)])
matching = {'vid=2 pcp=3': simple_tcp_packet(dl_vlan_enable=True, vlan_vid=2, vlan_pcp=3)}
nonmatching = {'vid=4 pcp=2': simple_tcp_packet(dl_vlan_enable=True, vlan_vid=4, vlan_pcp=2), 'vid=4 pcp=3': simple_tcp_packet(dl_vlan_enable=True, vlan_vid=4, vlan_pcp=3), 'vid=2 pcp=2': simple_tcp_packet(dl_vlan_enable=True, vlan_vid=2, vlan_pcp=2), 'vid=0 pcp=3': simple_tcp_packet(dl_vlan_enable=True, vlan_vid=0, vlan_pcp=3), 'vid=2 pcp=0': simple_tcp_packet(dl_vlan_enable=True, vlan_vid=2, vlan_pcp=0), 'no vlan tag': simple_tcp_packet()}
self.verify_match(match, matching, nonmatching) |
class GemmRCRFunction():
def __init__(self, inputs_pool):
self._it_pool = 0
self._as = [t['a'] for t in inputs_pool]
self._bs = [t['b'] for t in inputs_pool]
self._inputs_pool_size = len(inputs_pool)
self._module = GemmRCRModule()
def next_input(self):
self._it_pool += 1
self._it_pool %= self._inputs_pool_size
return (self._as[self._it_pool], self._bs[self._it_pool])
def __call__(self):
return self._module(*self.next_input()) |
class DeleteIndices():
def __init__(self, ilo, master_timeout=30):
verify_index_list(ilo)
if (not isinstance(master_timeout, int)):
raise TypeError(f'Incorrect type for "master_timeout": {type(master_timeout)}. Should be integer value.')
self.index_list = ilo
self.client = ilo.client
self.master_timeout = (str(master_timeout) + 's')
self.loggit = logging.getLogger('curator.actions.delete_indices')
self.loggit.debug('master_timeout value: %s', self.master_timeout)
def _verify_result(self, result, count):
if (isinstance(result, list) and result):
self.loggit.error('The following indices failed to delete on try #%s:', count)
for idx in result:
self.loggit.error('---%s', idx)
retval = False
else:
self.loggit.debug('Successfully deleted all indices on try #%s', count)
retval = True
return retval
def __chunk_loop(self, chunk_list):
working_list = chunk_list
for count in range(1, 4):
for i in working_list:
self.loggit.info('---deleting index %s', i)
self.client.indices.delete(index=to_csv(working_list), master_timeout=self.master_timeout)
result = [i for i in working_list if (i in get_indices(self.client))]
if self._verify_result(result, count):
return
working_list = result
self.loggit.error('Unable to delete the following indices after 3 attempts: %s', result)
def do_dry_run(self):
show_dry_run(self.index_list, 'delete_indices')
def do_action(self):
self.index_list.empty_list_check()
msg = f'Deleting {len(self.index_list.indices)} selected indices: {self.index_list.indices}'
self.loggit.info(msg)
try:
index_lists = chunk_index_list(self.index_list.indices)
for lst in index_lists:
self.__chunk_loop(lst)
except Exception as err:
report_failure(err) |
.integrationtest
.skipif((not has_postgres_configured), reason='PostgresSQL not configured')
def test_psycopg2_register_json(instrument, postgres_connection, elasticapm_client):
import psycopg2.extras
elasticapm_client.begin_transaction('web.django')
new_type = psycopg2.extras.register_json(postgres_connection, loads=(lambda x: x))
assert (new_type is not None)
new_type = psycopg2.extras.register_json(conn_or_curs=postgres_connection, loads=(lambda x: x))
assert (new_type is not None)
elasticapm_client.end_transaction(None, 'test-transaction') |
def test_apple_roundtrips(use_builtin_types):
pl = plistlib.loads(TESTDATA, use_builtin_types=use_builtin_types)
data = plistlib.dumps(pl, use_builtin_types=use_builtin_types)
pl2 = plistlib.loads(data, use_builtin_types=use_builtin_types)
data2 = plistlib.dumps(pl2, use_builtin_types=use_builtin_types)
assert (data == data2) |
def test_yaml_loader_with_expired_cache(tmpdir, mocker, _json_cache, yaml_string, expected_from_loader):
mock_resp = mocker.Mock()
mock_resp.status_code = 200
mock_resp.text = yaml_string
mock_get = mocker.patch('requests.Session.get', return_value=mock_resp)
mocker.patch('tempfile.gettempdir', return_value=tmpdir)
cache_date_before = Path(tmpdir.join('awsrun.dat')).stat().st_mtime
with freeze_time((datetime.utcnow() + timedelta(days=1, seconds=5))):
acctload.YAMLAccountLoader(' max_age=86400)
mock_get.assert_called_once()
cache_date_after = Path(tmpdir.join('awsrun.dat')).stat().st_mtime
assert (cache_date_before < cache_date_after)
with open(tmpdir.join('awsrun.dat'), encoding='utf-8') as f:
cached_accts = yaml.safe_load(f)
assert (cached_accts == expected_from_loader) |
def extractWwwExenovelsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('[Teaser]' in item['title']):
return None
tagmap = [("History's Strongest Manager", "History's Strongest Manager", 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def extractSleepysmutWpcomstagingCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesWindbarbDataMarkerStates(Options):
def hover(self) -> 'OptionSeriesWindbarbDataMarkerStatesHover':
return self._config_sub_data('hover', OptionSeriesWindbarbDataMarkerStatesHover)
def normal(self) -> 'OptionSeriesWindbarbDataMarkerStatesNormal':
return self._config_sub_data('normal', OptionSeriesWindbarbDataMarkerStatesNormal)
def select(self) -> 'OptionSeriesWindbarbDataMarkerStatesSelect':
return self._config_sub_data('select', OptionSeriesWindbarbDataMarkerStatesSelect) |
.django_db
def test_extent_competed(award_data_fixture, elasticsearch_award_index):
elasticsearch_award_index.update_index()
should = {'match': {'extent_competed': 'F'}}
query = create_query(should)
client = elasticsearch_award_index.client
response = client.search(index=elasticsearch_award_index.index_name, body=query)
assert (response['hits']['total']['value'] == 1)
should = {'match': {'extent_competed': 'J'}}
query = create_query(should)
response = client.search(index=elasticsearch_award_index.index_name, body=query)
assert (response['hits']['total']['value'] == 0) |
_or_str('.fchk')
def geom_from_fchk(text, **geom_kwargs):
data = parse_fchk(text)
atoms = atoms_from_data(data)
try:
coords = data['Opt point 1 Geometries']
except KeyError:
coords = data['Current cartesian coordinates']
coords = np.array(coords)
coords = coords.reshape((- 1), (3 * len(atoms)))
geoms = [Geometry(atoms, coords_, **geom_kwargs) for coords_ in coords]
if (len(geoms) == 1):
geoms = geoms[0]
return geoms |
class TestHSLuvSerialize(util.ColorAssertsPyTest):
COLORS = [('color(--hsluv 50 30 50 / 0.5)', {}, 'color(--hsluv 50 30 50 / 0.5)'), ('color(--hsluv 50 30 50)', {'alpha': True}, 'color(--hsluv 50 30 50 / 1)'), ('color(--hsluv 50 30 50 / 0.5)', {'alpha': False}, 'color(--hsluv 50 30 50)'), ('color(--hsluv 50 30 none)', {}, 'color(--hsluv 50 30 0)'), ('color(--hsluv 50 30 none)', {'none': True}, 'color(--hsluv 50 30 none)'), ('color(--hsluv 50 110 50)', {}, 'color(--hsluv 50.171 100 50.07)'), ('color(--hsluv 50 110 50)', {'fit': False}, 'color(--hsluv 50 110 50)'), ('color(--hsluv 192.18 -100 53.237)', {}, 'color(--hsluv 12.18 23.529 53.237)')]
.parametrize('color1,options,color2', COLORS)
def test_colors(self, color1, options, color2):
self.assertEqual(Color(color1).to_string(**options), color2) |
class SuperuserRequiredTests(ResolveInfoTestCase):
def test_superuser_required(self):
self.user.is_superuser = True
result = decorators.superuser_required((lambda info: None))(self.info_mock(self.user))
self.assertIsNone(result)
def test_permission_denied(self):
func = decorators.superuser_required((lambda info: None))
with self.assertRaises(exceptions.PermissionDenied):
func(self.info_mock(self.user)) |
class Array(object):
def merge_into(self, source, dest, source_end_index, dest_end_index):
if ((source is None) or (dest is None)):
raise TypeError('source or dest cannot be None')
if ((source_end_index < 0) or (dest_end_index < 0)):
raise ValueError('end indices must be >= 0')
if (not source):
return dest
if (not dest):
return source
source_index = (source_end_index - 1)
dest_index = (dest_end_index - 1)
insert_index = ((source_end_index + dest_end_index) - 1)
while (dest_index >= 0):
if (source[source_index] > dest[dest_index]):
source[insert_index] = source[source_index]
source_index -= 1
else:
source[insert_index] = dest[dest_index]
dest_index -= 1
insert_index -= 1
return source |
def test_run_publishers_is_working_properly_with_post_publishers_specified(prepare_publishers):
called = []
('Test', publisher_type=PRE_PUBLISHER_TYPE)
def pre_func1():
called.append('pre_func1')
('Test', publisher_type=PRE_PUBLISHER_TYPE)
def pre_func2():
called.append('pre_func2')
('Test2', publisher_type=PRE_PUBLISHER_TYPE)
def pre_func3():
called.append('pre_func3')
(publisher_type=PRE_PUBLISHER_TYPE)
def pre_func4():
called.append('pre_func4')
('Test', publisher_type=POST_PUBLISHER_TYPE)
def func1():
called.append('func1')
('Test', publisher_type=POST_PUBLISHER_TYPE)
def func2():
called.append('func2')
('Test2', publisher_type=POST_PUBLISHER_TYPE)
def func3():
called.append('func3')
(publisher_type=POST_PUBLISHER_TYPE)
def func4():
called.append('func4')
assert (called == [])
run_publishers('Test', publisher_type=POST_PUBLISHER_TYPE)
assert (called == ['func4', 'func1', 'func2']) |
def hashtagtopath(thehashtag):
(hashtagcurrent, foldercurrent) = ('', '')
notbreaking = True
typemainlocal = []
for x in typemain:
typemainlocal.append(x)
for y in typemainlocal:
if (thehashtag[1:] == y):
printdebug('one of typemain, found on else')
hashtagcurrent = thehashtag
foldercurrent = eval((thehashtag[1:] + 'path'))
os.makedirs(foldercurrent, exist_ok=True)
notbreaking = False
for prefix in typechecker:
if (thehashtag.startswith(('#' + prefix)) and notbreaking):
if (prefix in ['embedding', 'embeddings', 'embed', 'embeds', 'textualinversion', 'ti']):
hashtagcurrent = '#embed'
elif (prefix in ['model', 'models', 'checkpoint', 'checkpoints']):
hashtagcurrent = '#model'
elif (prefix in ['vae', 'vaes']):
hashtagcurrent = '#vae'
elif (prefix in ['lora', 'loras']):
hashtagcurrent = '#lora'
elif (prefix in ['hypernetwork', 'hypernetworks', 'hypernet', 'hypernets', 'hynet', 'hynets']):
hashtagcurrent = '#hynet'
elif (prefix in ['addnetlora', 'loraaddnet', 'additionalnetworks', 'addnet']):
hashtagcurrent = '#addnetlora'
elif (prefix in ['controlnet', 'cnet']):
hashtagcurrent = '#cnet'
elif (prefix in ['extension', 'extensions', 'ext']):
hashtagcurrent = '#ext'
elif (prefix in ['aestheticembedding', 'aestheticembed']):
hashtagcurrent = '#aestheticembed'
elif (prefix in ['upscaler', 'upscale']):
hashtagcurrent = '#upscaler'
elif (prefix in ['altmodel', 'altmodels']):
hashtagcurrent = '#altmodel'
elif (prefix in ['lycoris', 'locon', 'loha']):
hashtagcurrent = '#lycoris'
try:
foldercurrent = eval((hashtagcurrent[1:] + 'path'))
except Exception as e:
print(f'Cannot use hashtag: {e}')
continue
os.makedirs(foldercurrent, exist_ok=True)
return (foldercurrent, hashtagcurrent) |
def apply_region_configs(env_config):
new_config = env_config.copy()
for region in env_config.get('regions', consts.REGIONS):
if isinstance(env_config.get('regions'), dict):
region_specific_config = env_config['regions'][region]
new_config[region] = dict(DeepChainMap(region_specific_config, env_config))
else:
new_config[region] = env_config.copy()
LOG.debug('Region Specific Config:\n%s', new_config)
return new_config |
class ALSGRPC(ServiceType):
skip_variant: ClassVar[bool] = True
def __init__(self, *args, **kwargs) -> None:
kwargs['service_manifests'] = integration_manifests.load('grpc_als_backend')
super().__init__(*args, **kwargs)
def requirements(self):
(yield ('pod', self.path.k8s)) |
class OptionSeriesSunburstSonificationPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class ImportBlockComponent(Application):
logger = logging.getLogger('trinity.components.BlockImport')
def configure_parser(cls, arg_parser: ArgumentParser, subparser: _SubParsersAction) -> None:
import_parser = subparser.add_parser('import', help='Import blocks from a file (RLP encoded)')
import_parser.add_argument('file_path', type=pathlib.Path, help='Specify the file to import from')
import_parser.set_defaults(func=cls.run_import)
def run_import(cls, args: Namespace, trinity_config: TrinityConfig) -> None:
with open(args.file_path, 'rb') as import_file:
file_bytes = import_file.read()
blocks = decode_all(file_bytes, sedes=FrontierBlock)
cls.logger.info('Importing %s blocks', len(blocks))
chain = get_chain(trinity_config)
for block in blocks:
try:
chain.import_block(block)
except (EVMMissingData, ValidationError) as exc:
cls.logger.error(exc)
cls.logger.error('Import failed')
else:
cls.logger.info('Successfully imported %s', block) |
def main():
parser = argparse.ArgumentParser(prog='calc_range_srgb.py', description='Calculate RGB range in the given color.')
parser.add_argument('--color', '-c', action='store', default='', help='The color whose range relative to RGB will be calculated.')
parser.add_argument('--rgb', '-r', action='store', default='srgb', help='The RGB space which the color will be sized against.')
parser.add_argument('--res', '-s', type=int, default=100, help='Resolution to use when calculating range, default is 100.')
parser.add_argument('--precision', '-p', type=int, default=3, help='Precision for displaying the range.')
args = parser.parse_args()
return run(args.color, args.rgb, args.res, args.precision) |
def measure_definition(request, measure):
measure = get_object_or_404(Measure, pk=measure)
context = {'measure': measure, 'measure_details': _get_measure_details(measure.id), 'measure_tags': _get_tags_with_names(measure.tags), 'numerator_sql': _format_measure_sql(columns=measure.numerator_columns, from_=measure.numerator_from, where=measure.numerator_where), 'denominator_sql': _format_measure_sql(columns=measure.denominator_columns, from_=measure.denominator_from, where=measure.denominator_where)}
return render(request, 'measure_definition.html', context) |
def test_epoch_start_and_end_blocks_have_same_dataset_size() -> None:
for _ in range(100):
block_number = Uint(randint((10 ** 9), (2 * (10 ** 9))))
epoch_start_block_number = ((block_number // EPOCH_SIZE) * EPOCH_SIZE)
epoch_end_block_number = ((epoch_start_block_number + EPOCH_SIZE) - 1)
assert (dataset_size(block_number) == dataset_size(epoch_start_block_number) == dataset_size(epoch_end_block_number)) |
def _load_config(directory):
config = configparser.ConfigParser()
files = glob.glob(os.path.join(directory, '*.ini'))
logging.debug('Files %s in config directory %s', files, directory)
config.read(files)
config_dict = {'instances': {}, 'clone_host_map': {}}
instances = config_dict['instances']
for section_name in config.sections():
section = config[section_name]
instance = instances[section_name] = {}
for key in section.keys():
if (key in ['clone_hostnames', 'path_prefixes']):
hostnames = section[key].split()
instance[key] = [h.strip() for h in hostnames]
else:
instance[key] = section[key]
for key in ['sources', 'specs']:
if (key in instance):
continue
instance[key] = '.'
if ('sources_file' not in instance):
instance['sources_file'] = 'sources'
if ('default_sum' not in instance):
instance['default_sum'] = 'md5'
for host in instance['clone_hostnames']:
if (host not in config_dict['clone_host_map']):
config_dict['clone_host_map'][host] = {}
host_dict = config_dict['clone_host_map'][host]
for prefix in instance.get('path_prefixes', ['DEFAULT']):
if (prefix in host_dict):
msg = 'Duplicate prefix {0} for {1} hostname'.format(prefix, host)
raise RuntimeError(msg)
host_dict[prefix] = instance
return config_dict |
('ecs_deploy.cli.get_client')
def test_update_task_with_role_arn(get_client, runner):
get_client.return_value = EcsTestClient('acces_key', 'secret_key')
result = runner.invoke(cli.update, (TASK_DEFINITION_ARN_1, '-r', 'arn:new:role'))
assert (result.exit_code == 0)
assert (not result.exception)
assert (u'Update task definition based on: test-task:1' in result.output)
assert (u'Updating task definition' in result.output)
assert (u'Changed role_arn to: "arn:new:role" (was: "arn:test:role:1")' in result.output)
assert (u'Successfully created revision: 2' in result.output) |
class OptionPlotoptionsStreamgraphSonificationTracksMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestParseDateMath(CuratorTestCase):
def test_assorted_datemaths(self):
for (test_string, expected) in [('<prefix-{now}-suffix>', f"prefix-{datetime.utcnow().strftime('%Y.%m.%d')}-suffix"), ('<prefix-{now-1d/d}>', f"prefix-{(datetime.utcnow() - timedelta(days=1)).strftime('%Y.%m.%d')}"), ('<{now+1d/d}>', f"{(datetime.utcnow() + timedelta(days=1)).strftime('%Y.%m.%d')}"), ('<{now+1d/d}>', f"{(datetime.utcnow() + timedelta(days=1)).strftime('%Y.%m.%d')}"), ('<{now+10d/d{yyyy-MM}}>', f"{(datetime.utcnow() + timedelta(days=10)).strftime('%Y-%m')}")]:
assert (expected == parse_datemath(self.client, test_string)) |
def extractDorysoulWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def _limits():
limits_data = API_CLIENT.get_account_limits()
if JSON_OUTPUT:
_print_json(limits_data)
return
headers = ['Name', 'Amount', 'Country List']
keys = ['limit', 'amount', 'countryList']
text = _create_table_from_dict(headers=headers, value_functions=keys, data=limits_data, numalign='right')
click.echo(text) |
class CommandLineError(TradeException):
def __init__(self, errorStr, usage=None):
(self.errorStr, self.usage) = (errorStr, usage)
def __str__(self):
if self.usage:
return 'ERROR: {}\n\n{}'.format(self.errorStr, self.usage)
else:
return 'ERROR: {}'.format(self.errorStr) |
def link(node: RenderTreeNode, context: RenderContext) -> str:
if (node.info == 'auto'):
autolink_url = node.attrs['href']
assert isinstance(autolink_url, str)
if (autolink_url.startswith('mailto:') and (not node.children[0].content.startswith('mailto:'))):
autolink_url = autolink_url[7:]
return (('<' + autolink_url) + '>')
text = ''.join((child.render(context) for child in node.children))
if context.do_wrap:
text = text.replace(WRAP_POINT, ' ')
ref_label = node.meta.get('label')
if ref_label:
context.env['used_refs'].add(ref_label)
ref_label_repr = ref_label.lower()
if (text.lower() == ref_label_repr):
return f'[{text}]'
return f'[{text}][{ref_label_repr}]'
uri = node.attrs['href']
assert isinstance(uri, str)
uri = maybe_add_link_brackets(uri)
title = node.attrs.get('title')
if (title is None):
return f'[{text}]({uri})'
assert isinstance(title, str)
title = title.replace('"', '\\"')
return f'[{text}]({uri} "{title}")' |
class ActionCodeSettings():
def __init__(self, url, handle_code_in_app=None, dynamic_link_domain=None, ios_bundle_id=None, android_package_name=None, android_install_app=None, android_minimum_version=None):
self.url = url
self.handle_code_in_app = handle_code_in_app
self.dynamic_link_domain = dynamic_link_domain
self.ios_bundle_id = ios_bundle_id
self.android_package_name = android_package_name
self.android_install_app = android_install_app
self.android_minimum_version = android_minimum_version |
class Moods(models.Model):
nid = models.AutoField(primary_key=True)
name = models.CharField(verbose_name='', max_length=16)
ip = models.GenericIPAddressField(verbose_name='ip', default='120.228.2.238')
addr = models.TextField(verbose_name='', null=True)
create_date = models.DateTimeField(verbose_name='', auto_now=True)
content = models.TextField(verbose_name='')
drawing = models.TextField(verbose_name=',;', null=True, blank=True)
comment_count = models.IntegerField(verbose_name='', default=0)
digg_count = models.IntegerField(verbose_name='', default=0)
avatar = models.ForeignKey(to='Avatars', to_field='nid', on_delete=models.SET_NULL, null=True, verbose_name='')
def __str__(self):
return self.name
class Meta():
verbose_name_plural = '' |
def base_h_gen(out, name):
common_top_matter(out, name)
base_h_content(out)
gen_object_enum(out)
out.write('\n/\n *\n * Experimenter IDs\n *\n /\n\n')
for (name, val) in of_g.experimenter_name_to_id.items():
out.write(('#define OF_EXPERIMENTER_ID_%s 0x%08x\n' % (name.upper(), val)))
out.write('\n/\n *\n * OpenFlow Match version specific and generic defines\n *\n /\n')
c_match.gen_v4_match_compat(out)
c_match.gen_match_macros(out)
c_match.gen_oxm_defines(out)
out.write('\n#endif /* Base header file */\n') |
_ns.route('/pending-actions/')
def pending_actions():
busy_namespaces = set()
data = []
for action in actions_logic.ActionsLogic.get_waiting():
if ((action.object_type == 'copr') and (action.action_type == ActionTypeEnum('delete'))):
busy_namespaces.add(action.copr.full_name)
elif (action.copr and (action.copr.full_name in busy_namespaces)):
continue
data.append({'id': action.id, 'priority': (action.priority or action.default_priority)})
return flask.json.dumps(data) |
def extractRoundhousenovelsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def load(request):
session = {}
cookie = request.cookies.get('vulpy_session')
try:
if cookie:
decoded = base64.b64decode(cookie.encode())
if decoded:
session = json.loads(base64.b64decode(cookie))
except Exception:
pass
return session |
def parse_cookie_header(header_value):
cookies = {}
for token in header_value.split(';'):
(name, __, value) = token.partition('=')
name = name.strip()
value = value.strip()
if (not name):
continue
if _COOKIE_NAME_RESERVED_CHARS.search(name):
continue
if ((len(value) > 2) and (value[0] == '"') and (value[(- 1)] == '"')):
value =
if (name in cookies):
cookies[name].append(value)
else:
cookies[name] = [value]
return cookies |
('/join', methods=['POST'])
def join():
name = request.form['name']
room_id = request.form['room-id']
session['player-id'] = str(uuid.uuid4())
session['player-name'] = name
session['player-money'] = 1000
session['room-id'] = (room_id if room_id else None)
return redirect(url_for('index')) |
.django_db
def test_award_count_invalid_defc_type(client, monkeypatch, basic_award, helpers):
helpers.patch_datetime_now(monkeypatch, 2022, 12, 31)
resp = helpers.post_for_count_endpoint(client, url, '100')
assert (resp.status_code == status.HTTP_400_BAD_REQUEST)
assert (resp.data['detail'] == "Invalid value in 'filter|def_codes'. '100' is not a valid type (array)") |
def test_transaction_fixtures(fixture, fixture_transaction_class):
TransactionClass = fixture_transaction_class
try:
txn = TransactionClass.decode(fixture['txbytes'])
except (rlp.DeserializationError, rlp.exceptions.DecodingError):
assert ('hash' not in fixture), 'Transaction was supposed to be valid'
except TypeError as err:
assert (err.args == ("'bytes' object cannot be interpreted as an integer",))
assert ('hash' not in fixture), 'Transaction was supposed to be valid'
except KeyError:
assert fixture['rlpHex']
assert ('hash' not in fixture), 'Transaction was supposed to be valid'
except ValidationError as err:
err_matchers = ('Cannot build typed transaction with', '>= 0x80')
assert all(((_ in err.args[0]) for _ in err_matchers))
assert ('hash' not in fixture), 'Transaction was supposed to be valid'
except UnrecognizedTransactionType as err:
assert (err.args[1] == 'Unknown transaction type')
assert (hex(err.args[0]) not in VALID_TRANSACTION_TYPES)
assert ('hash' not in fixture), 'Transaction was supposed to be valid'
else:
try:
txn.validate()
except ValidationError:
return
if ('sender' in fixture):
assert ('hash' in fixture), 'Transaction was supposed to be invalid'
assert is_same_address(txn.sender, fixture['sender']) |
def _register_scene_prompt_template(scene_registry: Dict[(str, Dict)], prompt_template, language: str, model_names: List[str]):
for model_name in model_names:
if (model_name not in scene_registry):
scene_registry[model_name] = dict()
registry = scene_registry[model_name]
registry[language] = prompt_template |
def gen_data_in_runpath(tmp_path):
simulations_dir = (tmp_path / 'simulations')
simulations_dir.mkdir()
for i in range(3):
realization_dir = (simulations_dir / f'realization-{i}')
realization_dir.mkdir()
(realization_dir / 'iter-0').mkdir()
((realization_dir / 'iter-0') / 'rft_0.txt').write_text(f'{i}.0', encoding='utf-8') |
def extractYaniiitranslationsBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionPlotoptionsScatter3dOnpointPosition(Options):
def offsetX(self):
return self._config_get(None)
def offsetX(self, num: float):
self._config(num, js_type=False)
def offsetY(self):
return self._config_get(None)
def offsetY(self, num: float):
self._config(num, js_type=False)
def x(self):
return self._config_get(None)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(None)
def y(self, num: float):
self._config(num, js_type=False) |
def dfs(dict):
sidebar_items = []
for (key, val) in dict.items():
if (len(modules_and_symbols[key][1]) > 0):
items = ([f'"api/{symbol_to_article[key]}"'] + [f'"api/{symbol_to_article[fullname(key, item)]}"' for (item, _) in modules_and_symbols[key][1]])
else:
items = []
if (val != {}):
items.extend(dfs(val))
sidebar_items.append(module_sidebar(key, items))
return sidebar_items |
def get_capacity_data_for_all_zones(target_datetime: datetime, session: Session) -> dict:
data = get_data_from_url(target_datetime, session)
capacity_dict = {}
for item in data:
if (pycountry.countries.get(alpha_3=item['key'][0]) is not None):
zone = pycountry.countries.get(alpha_3=item['key'][0]).alpha_2
else:
pass
mode: str = IRENA_JSON_TO_MODE_MAPPING[int(item['key'][1])]
value: float = round(float(item['values'][0]), 0)
datetime_value: datetime = datetime.strptime(item['key'][(- 1)], '%y')
if (zone not in capacity_dict):
zone_dict = {mode: {'datetime': datetime_value.strftime('%Y-%m-%d'), 'value': value, 'source': SOURCE}}
capacity_dict[zone] = zone_dict
else:
mode = reallocate_capacity_mode(zone, int(item['key'][1]))
if (mode in capacity_dict[zone]):
zone_dict = capacity_dict[zone][mode]
capacity_dict[zone][mode]['value'] += value
else:
capacity_dict[zone] = {**capacity_dict[zone], **{mode: {'datetime': datetime_value.strftime('%Y-%m-%d'), 'value': value, 'source': SOURCE}}}
return capacity_dict |
def edit_message_caption(token, caption, chat_id=None, message_id=None, inline_message_id=None, parse_mode=None, caption_entities=None, reply_markup=None):
method_url = 'editMessageCaption'
payload = {'caption': caption}
if chat_id:
payload['chat_id'] = chat_id
if message_id:
payload['message_id'] = message_id
if inline_message_id:
payload['inline_message_id'] = inline_message_id
if parse_mode:
payload['parse_mode'] = parse_mode
if caption_entities:
payload['caption_entities'] = json.dumps(types.MessageEntity.to_list_of_dicts(caption_entities))
if reply_markup:
payload['reply_markup'] = _convert_markup(reply_markup)
return _make_request(token, method_url, params=payload, method='post') |
_init.register_param_type
_init_ack.register_param_type
_unresolvable_addr.register_param_type
_restart_with_new_addr.register_param_type
class param_ipv4(param):
_TYPE = {'ascii': ['value']}
def param_type(cls):
return PTYPE_IPV4
def __init__(self, value='127.0.0.1', length=0):
super(param_ipv4, self).__init__(value, length)
def parser(cls, buf):
(_, length) = struct.unpack_from(cls._PACK_STR, buf)
value = None
if (cls._MIN_LEN < length):
fmt = ('%ds' % (length - cls._MIN_LEN))
(value,) = struct.unpack_from(fmt, buf, cls._MIN_LEN)
return cls(addrconv.ipv4.bin_to_text(value), length)
def serialize(self):
buf = bytearray(struct.pack(self._PACK_STR, self.param_type(), self.length))
if self.value:
buf.extend(addrconv.ipv4.text_to_bin(self.value))
if (0 == self.length):
self.length = len(buf)
struct.pack_into('!H', buf, 2, self.length)
return six.binary_type(buf) |
class iplookup(Module):
config = Config({Option('HOST_IP', 'Provide your target IP', True): str('136.158.41.95')})
def run(self):
dataList = []
ip = self.config.option('HOST_IP').value
print(("\n Locating '%s'..." % ip))
TABLE_DATA = []
url = '
data = requests.get((url + ip)).content.decode('utf-8')
values = json.loads(data)
status = values['status']
if (status != 'success'):
print(' Address IP invalid.')
else:
infos = ('IP', ip)
TABLE_DATA.append(infos)
infos = ('ISP', values['isp'])
TABLE_DATA.append(infos)
infos = ('Organisation', values['org'])
TABLE_DATA.append(infos)
infos = ('Pays', values['country'])
TABLE_DATA.append(infos)
infos = ('Region', values['regionName'])
TABLE_DATA.append(infos)
infos = ('Ville', values['city'])
TABLE_DATA.append(infos)
infos = ('Code Postal', values['zip'])
TABLE_DATA.append(infos)
localisation = ((str(values['lat']) + ', ') + str(values['lon']))
infos = ('Localisation', localisation)
TABLE_DATA.append(infos)
infos = ('Maps', (' + localisation))
TABLE_DATA.append(infos)
table = SingleTable(TABLE_DATA, ip)
print(('\n' + table.table)) |
class WandbLogger(base.Logger):
def __init__(self, label: Optional[str]=None, steps_key: Optional[str]=None, *, project: Optional[str]=None, entity: Optional[str]=None, dir: Optional[str]=None, name: Optional[str]=None, group: Optional[str]=None, config: Optional[Any]=None, **wandb_kwargs):
if (wandb is None):
raise ImportError('Logger not supported as `wandb` logger is not installed yet, install it with `pip install wandb`.')
self._label = label
self._iter = 0
self._steps_key = steps_key
if (wandb.run is None):
self._run = wandb.init(project=project, dir=dir, entity=entity, name=name, group=group, config=config, **wandb_kwargs)
else:
self._run = wandb.run
if (steps_key and getattr(self._run, 'define_metric', None)):
prefix = (f'{self._label}/*' if self._label else '*')
self._run.define_metric(prefix, step_metric=f'{self._label}/{self._steps_key}')
def run(self):
return self._run
def write(self, data: base.LoggingData):
data = base.to_numpy(data)
if ((self._steps_key is not None) and (self._steps_key not in data)):
logging.warn('steps key %s not found. Skip logging.', self._steps_key)
return
if self._label:
stats = {f'{self._label}/{k}': v for (k, v) in data.items()}
else:
stats = data
self._run.log(stats)
self._iter += 1
def close(self):
pass |
def diag_quadrupole3d_22(ax, da, A, bx, db, B, R):
result = numpy.zeros((3, 6, 6), dtype=float)
x0 = ((ax + bx) ** (- 1.0))
x1 = (3.0 * x0)
x2 = (x0 * ((ax * A[0]) + (bx * B[0])))
x3 = (- x2)
x4 = (x3 + A[0])
x5 = (x3 + B[0])
x6 = (x4 * x5)
x7 = (2.0 * x6)
x8 = (x3 + R[0])
x9 = (2.0 * x8)
x10 = (x4 * x9)
x11 = (x5 * x8)
x12 = (2.0 * x11)
x13 = (x0 * (((x1 + x10) + x12) + x7))
x14 = ((- 2.0) * x2)
x15 = (x14 + R[0])
x16 = (x15 + B[0])
x17 = (x0 * x16)
x18 = (x0 + x12)
x19 = (x18 * x4)
x20 = (x17 + x19)
x21 = (4.0 * x20)
x22 = (x8 ** 2)
x23 = (x1 + (2.0 * x22))
x24 = (x18 * x8)
x25 = (x17 + x24)
x26 = ((x0 * ((4.0 * x11) + x23)) + ((2.0 * x25) * x5))
x27 = ((x0 * (x15 + A[0])) + (x8 * (x0 + x10)))
x28 = (x0 * ((((x1 * x16) + (2.0 * x19)) + x24) + x27))
x29 = (x13 + (x20 * x9))
x30 = (x28 + (x29 * x5))
x31 = (2.0 * x4)
x32 = ((ax * bx) * x0)
x33 = (((5. * da) * db) * numpy.exp(((- x32) * ((((A[0] - B[0]) ** 2) + ((A[1] - B[1]) ** 2)) + ((A[2] - B[2]) ** 2)))))
x34 = ((x0 ** 1.5) * x33)
x35 = (0. * x34)
x36 = (x0 * ((ax * A[1]) + (bx * B[1])))
x37 = (- x36)
x38 = (x37 + B[1])
x39 = (x34 * x38)
x40 = 1.
x41 = (0. * x40)
x42 = (x41 * (x28 + (x29 * x4)))
x43 = (x0 * ((ax * A[2]) + (bx * B[2])))
x44 = (- x43)
x45 = (x44 + B[2])
x46 = (x34 * x45)
x47 = (((0. * x0) * (x23 + ((4.0 * x4) * x8))) + ((0. * x27) * x31))
x48 = (x38 ** 2)
x49 = (0.5 * x0)
x50 = ((x0 ** 1.5) * x33)
x51 = (x50 * (x48 + x49))
x52 = (x39 * x40)
x53 = (x45 ** 2)
x54 = (x50 * (x49 + x53))
x55 = (x37 + A[1])
x56 = (x34 * x41)
x57 = (x30 * x56)
x58 = (x38 * x55)
x59 = (x50 * (x49 + x58))
x60 = (0.25 * x29)
x61 = ((- 2.0) * x36)
x62 = (x61 + B[1])
x63 = (x0 * (x62 + A[1]))
x64 = (2.0 * x58)
x65 = (x0 + x64)
x66 = ((x38 * x65) + x63)
x67 = (0. * x50)
x68 = (x40 * x67)
x69 = (x27 * x68)
x70 = (0.5 * x27)
x71 = (0. * x40)
x72 = (x27 * x71)
x73 = (x44 + A[2])
x74 = (x45 * x73)
x75 = (x49 + x74)
x76 = (x50 * x75)
x77 = ((- 2.0) * x43)
x78 = (x77 + B[2])
x79 = (x0 * (x78 + A[2]))
x80 = (2.0 * x74)
x81 = (x0 + x80)
x82 = ((x45 * x81) + x79)
x83 = (x49 + (x55 ** 2))
x84 = (x26 * x67)
x85 = ((x55 * x65) + x63)
x86 = (x25 * x68)
x87 = (x50 * x71)
x88 = (x45 * x87)
x89 = (2.0 * x55)
x90 = ((x0 * ((x1 + (2.0 * x48)) + (4.0 * x58))) + (x66 * x89))
x91 = (x22 + x49)
x92 = (x67 * x91)
x93 = (0. * x91)
x94 = (x56 * x73)
x95 = (0.5 * x25)
x96 = (x87 * x91)
x97 = (x49 + (x73 ** 2))
x98 = (x87 * x97)
x99 = ((x73 * x81) + x79)
x100 = (2.0 * x73)
x101 = ((x0 * ((x1 + (2.0 * x53)) + (4.0 * x74))) + (x100 * x82))
x102 = (x5 ** 2)
x103 = (x0 * ((x14 + A[0]) + B[0]))
x104 = (x0 + x7)
x105 = (x103 + (x104 * x5))
x106 = ((x0 * ((x1 + (2.0 * x102)) + (4.0 * x6))) + (x105 * x31))
x107 = (x37 + R[1])
x108 = (x107 ** 2)
x109 = (x108 + x49)
x110 = (x109 * x67)
x111 = (x103 + (x104 * x4))
x112 = (x62 + R[1])
x113 = (x0 * x112)
x114 = (x107 * x38)
x115 = (2.0 * x114)
x116 = (x0 + x115)
x117 = (x107 * x116)
x118 = (x113 + x117)
x119 = (x118 * x68)
x120 = ((x4 ** 2) + x49)
x121 = (x1 + (2.0 * x108))
x122 = ((x0 * ((4.0 * x114) + x121)) + ((2.0 * x118) * x38))
x123 = (x122 * x67)
x124 = (0. * x109)
x125 = (x107 * x89)
x126 = ((x0 * ((x61 + A[1]) + R[1])) + (x107 * (x0 + x125)))
x127 = (x126 * x68)
x128 = (x0 * (((x1 + x115) + x125) + x64))
x129 = (x116 * x55)
x130 = (x113 + x129)
x131 = (x107 * x130)
x132 = (x128 + (2.0 * x131))
x133 = (0.25 * x132)
x134 = (x49 + x6)
x135 = (x134 * x50)
x136 = (0.5 * x135)
x137 = (x0 * ((((x1 * x112) + x117) + x126) + (2.0 * x129)))
x138 = ((x132 * x38) + x137)
x139 = (x138 * x56)
x140 = (x126 * x71)
x141 = (x109 * x87)
x142 = (0.5 * x76)
x143 = ((x0 * (((4.0 * x107) * x55) + x121)) + (x126 * x89))
x144 = (x102 + x49)
x145 = (x144 * x67)
x146 = ((x132 * x55) + x137)
x147 = (x5 * x56)
x148 = (0. * x143)
x149 = (x144 * x50)
x150 = (x34 * x5)
x151 = (x44 + R[2])
x152 = (x151 ** 2)
x153 = (x152 + x49)
x154 = (x153 * x67)
x155 = (x38 * x87)
x156 = (x78 + R[2])
x157 = (x0 * x156)
x158 = (x151 * x45)
x159 = (2.0 * x158)
x160 = (x0 + x159)
x161 = (x151 * x160)
x162 = (x157 + x161)
x163 = (x162 * x68)
x164 = (0. * x153)
x165 = (x1 + (2.0 * x152))
x166 = ((x0 * ((4.0 * x158) + x165)) + ((2.0 * x162) * x45))
x167 = (x166 * x67)
x168 = (x153 * x87)
x169 = (0.5 * x59)
x170 = (x100 * x151)
x171 = ((x0 * ((x77 + A[2]) + R[2])) + (x151 * (x0 + x170)))
x172 = (x171 * x68)
x173 = (x0 * (((x1 + x159) + x170) + x80))
x174 = (x160 * x73)
x175 = (x157 + x174)
x176 = (x151 * x175)
x177 = (x173 + (2.0 * x176))
x178 = (0.25 * x177)
x179 = (x171 * x71)
x180 = (x0 * ((((x1 * x156) + x161) + x171) + (2.0 * x174)))
x181 = ((x177 * x45) + x180)
x182 = (x181 * x56)
x183 = ((x0 * (((4.0 * x151) * x73) + x165)) + (x100 * x171))
x184 = (0. * x183)
x185 = ((x177 * x73) + x180)
result[(0, 0, 0)] = numpy.sum((x35 * ((x0 * ((((4.0 * x13) + (x21 * x5)) + (x21 * x8)) + x26)) + (x30 * x31))))
result[(0, 0, 1)] = numpy.sum((x39 * x42))
result[(0, 0, 2)] = numpy.sum((x42 * x46))
result[(0, 0, 3)] = numpy.sum((x47 * x51))
result[(0, 0, 4)] = numpy.sum(((x45 * x47) * x52))
result[(0, 0, 5)] = numpy.sum((x47 * x54))
result[(0, 1, 0)] = numpy.sum((x55 * x57))
result[(0, 1, 1)] = numpy.sum((x59 * x60))
result[(0, 1, 2)] = numpy.sum(((x46 * x55) * x60))
result[(0, 1, 3)] = numpy.sum((x66 * x69))
result[(0, 1, 4)] = numpy.sum(((x45 * x59) * x70))
result[(0, 1, 5)] = numpy.sum(((x54 * x55) * x72))
result[(0, 2, 0)] = numpy.sum((x57 * x73))
result[(0, 2, 1)] = numpy.sum(((x39 * x60) * x73))
result[(0, 2, 2)] = numpy.sum((x60 * x76))
result[(0, 2, 3)] = numpy.sum(((x51 * x72) * x73))
result[(0, 2, 4)] = numpy.sum(((x38 * x70) * x76))
result[(0, 2, 5)] = numpy.sum((x69 * x82))
result[(0, 3, 0)] = numpy.sum((x83 * x84))
result[(0, 3, 1)] = numpy.sum((x85 * x86))
result[(0, 3, 2)] = numpy.sum(((x25 * x83) * x88))
result[(0, 3, 3)] = numpy.sum((x90 * x92))
result[(0, 3, 4)] = numpy.sum(((x85 * x88) * x91))
result[(0, 3, 5)] = numpy.sum(((x54 * x83) * x93))
result[(0, 4, 0)] = numpy.sum(((x26 * x55) * x94))
result[(0, 4, 1)] = numpy.sum(((x59 * x73) * x95))
result[(0, 4, 2)] = numpy.sum(((x55 * x76) * x95))
result[(0, 4, 3)] = numpy.sum(((x66 * x73) * x96))
result[(0, 4, 4)] = numpy.sum(((x59 * x75) * x91))
result[(0, 4, 5)] = numpy.sum(((x55 * x82) * x96))
result[(0, 5, 0)] = numpy.sum((x84 * x97))
result[(0, 5, 1)] = numpy.sum(((x25 * x38) * x98))
result[(0, 5, 2)] = numpy.sum((x86 * x99))
result[(0, 5, 3)] = numpy.sum(((x51 * x93) * x97))
result[(0, 5, 4)] = numpy.sum(((x38 * x96) * x99))
result[(0, 5, 5)] = numpy.sum((x101 * x92))
result[(1, 0, 0)] = numpy.sum((x106 * x110))
result[(1, 0, 1)] = numpy.sum((x111 * x119))
result[(1, 0, 2)] = numpy.sum(((x109 * x111) * x88))
result[(1, 0, 3)] = numpy.sum((x120 * x123))
result[(1, 0, 4)] = numpy.sum(((x118 * x120) * x88))
result[(1, 0, 5)] = numpy.sum(((x120 * x124) * x54))
result[(1, 1, 0)] = numpy.sum((x105 * x127))
result[(1, 1, 1)] = numpy.sum((x133 * x135))
result[(1, 1, 2)] = numpy.sum(((x126 * x136) * x45))
result[(1, 1, 3)] = numpy.sum((x139 * x4))
result[(1, 1, 4)] = numpy.sum(((x133 * x4) * x46))
result[(1, 1, 5)] = numpy.sum(((x140 * x4) * x54))
result[(1, 2, 0)] = numpy.sum(((x105 * x141) * x73))
result[(1, 2, 1)] = numpy.sum(((x118 * x136) * x73))
result[(1, 2, 2)] = numpy.sum(((x109 * x134) * x76))
result[(1, 2, 3)] = numpy.sum(((x122 * x4) * x94))
result[(1, 2, 4)] = numpy.sum(((x118 * x142) * x4))
result[(1, 2, 5)] = numpy.sum(((x141 * x4) * x82))
result[(1, 3, 0)] = numpy.sum((x143 * x145))
result[(1, 3, 1)] = numpy.sum((x146 * x147))
result[(1, 3, 2)] = numpy.sum((((x148 * x40) * x46) * x5))
result[(1, 3, 3)] = numpy.sum((x35 * ((x0 * (((x122 + (4.0 * x128)) + ((4.0 * x130) * x38)) + (4.0 * x131))) + (x138 * x89))))
result[(1, 3, 4)] = numpy.sum(((x146 * x41) * x46))
result[(1, 3, 5)] = numpy.sum((x148 * x54))
result[(1, 4, 0)] = numpy.sum(((x140 * x149) * x73))
result[(1, 4, 1)] = numpy.sum(((x133 * x150) * x73))
result[(1, 4, 2)] = numpy.sum(((x126 * x142) * x5))
result[(1, 4, 3)] = numpy.sum((x139 * x73))
result[(1, 4, 4)] = numpy.sum((x133 * x76))
result[(1, 4, 5)] = numpy.sum((x127 * x82))
result[(1, 5, 0)] = numpy.sum(((x124 * x149) * x97))
result[(1, 5, 1)] = numpy.sum(((x118 * x5) * x98))
result[(1, 5, 2)] = numpy.sum(((x141 * x5) * x99))
result[(1, 5, 3)] = numpy.sum((x123 * x97))
result[(1, 5, 4)] = numpy.sum((x119 * x99))
result[(1, 5, 5)] = numpy.sum((x101 * x110))
result[(2, 0, 0)] = numpy.sum((x106 * x154))
result[(2, 0, 1)] = numpy.sum(((x111 * x153) * x155))
result[(2, 0, 2)] = numpy.sum((x111 * x163))
result[(2, 0, 3)] = numpy.sum(((x120 * x164) * x51))
result[(2, 0, 4)] = numpy.sum(((x120 * x155) * x162))
result[(2, 0, 5)] = numpy.sum((x120 * x167))
result[(2, 1, 0)] = numpy.sum(((x105 * x168) * x55))
result[(2, 1, 1)] = numpy.sum(((x134 * x153) * x59))
result[(2, 1, 2)] = numpy.sum(((x136 * x162) * x55))
result[(2, 1, 3)] = numpy.sum(((x168 * x4) * x66))
result[(2, 1, 4)] = numpy.sum(((x162 * x169) * x4))
result[(2, 1, 5)] = numpy.sum((((x166 * x4) * x55) * x56))
result[(2, 2, 0)] = numpy.sum((x105 * x172))
result[(2, 2, 1)] = numpy.sum(((x136 * x171) * x38))
result[(2, 2, 2)] = numpy.sum((x135 * x178))
result[(2, 2, 3)] = numpy.sum(((x179 * x4) * x51))
result[(2, 2, 4)] = numpy.sum(((x178 * x39) * x4))
result[(2, 2, 5)] = numpy.sum((x182 * x4))
result[(2, 3, 0)] = numpy.sum(((x149 * x164) * x83))
result[(2, 3, 1)] = numpy.sum(((x168 * x5) * x85))
result[(2, 3, 2)] = numpy.sum((((x162 * x5) * x83) * x87))
result[(2, 3, 3)] = numpy.sum((x154 * x90))
result[(2, 3, 4)] = numpy.sum((x163 * x85))
result[(2, 3, 5)] = numpy.sum((x167 * x83))
result[(2, 4, 0)] = numpy.sum(((x149 * x179) * x55))
result[(2, 4, 1)] = numpy.sum(((x169 * x171) * x5))
result[(2, 4, 2)] = numpy.sum(((x150 * x178) * x55))
result[(2, 4, 3)] = numpy.sum((x172 * x66))
result[(2, 4, 4)] = numpy.sum((x178 * x59))
result[(2, 4, 5)] = numpy.sum((x182 * x55))
result[(2, 5, 0)] = numpy.sum((x145 * x183))
result[(2, 5, 1)] = numpy.sum(((x184 * x5) * x52))
result[(2, 5, 2)] = numpy.sum((x147 * x185))
result[(2, 5, 3)] = numpy.sum((x184 * x51))
result[(2, 5, 4)] = numpy.sum(((x185 * x39) * x41))
result[(2, 5, 5)] = numpy.sum((x35 * ((x0 * (((x166 + (4.0 * x173)) + ((4.0 * x175) * x45)) + (4.0 * x176))) + (x100 * x181))))
return result |
def test_dispatch_to_response_pure_notification_parse_error() -> None:
assert (dispatch_to_response_pure(deserializer=default_deserializer, validator=default_validator, post_process=identity, context=NOCONTEXT, methods={'ping': ping}, request='{') == Left(ErrorResponse(ERROR_PARSE_ERROR, 'Parse error', 'Expecting property name enclosed in double quotes: line 1 column 2 (char 1)', None))) |
def test_percentiles_with_out_of_bounds_fractions():
assert (1 == percentile([1, 2, 3, 4], percentile=10))
assert (1 == percentile([1, 2, 3, 4], percentile=15))
assert (1 == percentile([1, 2, 3, 4], percentile=20))
assert (1 == percentile([1, 2, 3, 4], percentile=25))
assert (1 < percentile([1, 2, 3, 4], percentile=30)) |
class String(ModelField):
def __init__(self, **kwargs):
assert ('max_length' in kwargs), 'max_length is required'
super().__init__(**kwargs)
def get_validator(self, **kwargs) -> typesystem.Field:
return typesystem.String(**kwargs)
def get_column_type(self):
return sqlalchemy.String(length=self.validator.max_length) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.