code stringlengths 281 23.7M |
|---|
class DictValues():
def __init__(self, dct: Dict[(str, str)], enum_key: Dict[(Color, str)], dataclass_val: Dict[(str, User)], passthrough_dict: Dict[(str, LibraryClass)], def_value: Dict[(str, str)]={}):
self.dct = dct
self.enum_key = enum_key
self.dataclass_val = dataclass_val
self.passthrough_dict = passthrough_dict
self.def_value = def_value
def __eq__(self, other):
return (isinstance(other, type(self)) and (self.dct == other.dct) and (self.enum_key == other.enum_key) and (self.dataclass_val == other.dataclass_val) and (self.passthrough_dict == other.passthrough_dict) and (self.def_value == other.def_value)) |
(short_help='Azure Repository')
('--name', required=True, type=str, help='Repository name')
('--client', default='default', show_default=True, type=str, help='Azure named client to use.')
('--container', default='elasticsearch-snapshots', show_default=True, type=str, help='Container name. You must create the Azure container before creating the repository.')
('--base_path', default='', show_default=True, type=str, help='Specifies the path within container to repository data. Defaults to empty (root directory).')
('--chunk_size', type=str, help='Chunk size, e.g. 1g, 10m, 5k. [unbounded]')
('--compress/--no-compress', default=True, show_default=True, help='Enable/Disable metadata compression.')
('--max_restore_rate', type=str, default='20mb', show_default=True, help='Throttles per node restore rate (per second).')
('--max_snapshot_rate', type=str, default='20mb', show_default=True, help='Throttles per node snapshot rate (per second).')
('--readonly', is_flag=True, help='If set, the repository is read-only.')
('--location_mode', default='primary_only', type=click.Choice(['primary_only', 'secondary_only']), help='Note that if you set it to secondary_only, it will force readonly to true.')
('--verify', is_flag=True, help='Verify repository after creation.')
_context
def azure(ctx, name, client, container, base_path, chunk_size, compress, max_restore_rate, max_snapshot_rate, readonly, location_mode, verify):
azure_settings = {'client': client, 'container': container, 'base_path': base_path, 'chunk_size': chunk_size, 'compress': compress, 'max_restore_bytes_per_sec': max_restore_rate, 'max_snapshot_bytes_per_sec': max_snapshot_rate, 'readonly': readonly, 'location_mode': location_mode}
create_repo(ctx, repo_name=name, repo_type='azure', repo_settings=azure_settings, verify=verify) |
.django_db
def test_tas_filter_inappropriate_characters(client, monkeypatch, elasticsearch_award_index, award_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas(client, {'require': [['011', '[abc]']]})
assert (resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY), 'Failed to return 422 Response' |
class CropscaleThumbnailer(Thumbnailer):
THUMBNAIL_SIZE_RE = re.compile('^(?P<w>\\d+)x(?P<h>\\d+)(-(?P<x>\\d+)x(?P<y>\\d+))?$')
MARKER = '_cropscale_'
def generate(self, storage, original, size, miniature):
with storage.open(original) as original_handle:
with BytesIO(original_handle.read()) as original_bytes:
image = Image.open(original_bytes)
(w, h) = (int(size['w']), int(size['h']))
if (size['x'] and size['y']):
(x, y) = (int(size['x']), int(size['y']))
else:
(x, y) = (50, 50)
(src_width, src_height) = image.size
src_ratio = (float(src_width) / float(src_height))
(dst_width, dst_height) = (w, h)
dst_ratio = (float(dst_width) / float(dst_height))
if (dst_ratio < src_ratio):
crop_height = src_height
crop_width = (crop_height * dst_ratio)
x_offset = int(((float((src_width - crop_width)) * x) / 100))
y_offset = 0
else:
crop_width = src_width
crop_height = (crop_width / dst_ratio)
x_offset = 0
y_offset = int(((float((src_height - crop_height)) * y) / 100))
format = image.format
image = image.crop((x_offset, y_offset, (x_offset + int(crop_width)), (y_offset + int(crop_height))))
image = image.resize((dst_width, dst_height), Image.Resampling.LANCZOS)
buf = BytesIO()
if (format.lower() not in ('jpg', 'jpeg', 'png')):
format = 'jpeg'
if (image.mode not in ('RGBA', 'RGB', 'L')):
if (format == 'png'):
image = image.convert('RGBA')
else:
image = image.convert('RGB')
image.save(buf, format, quality=90)
raw_data = buf.getvalue()
buf.close()
storage.delete(miniature)
storage.save(miniature, ContentFile(raw_data))
image.close() |
class Components():
def __init__(self, ui):
self.page = ui.page
def alert(self, kind: str=None, components: List[primitives.HtmlModel]=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, options: dict=None, profile: types.PROFILE_TYPE=None):
content = self.page.web.std.div(components, width=width, height=height, html_code=html_code, options=options, profile=profile)
content.attr['class'].initialise(['alert'])
if (kind is not None):
content.attr['class'].add(('alert-%s' % kind))
content.attr['role'] = 'alert'
if ((options is not None) and options.get('close')):
btn = self.page.web.std.button()
btn.attr['class'].initialise(['btn-close'])
btn.attr['data-bs-dismiss'] = 'alert'
btn.attr['aria-label'] = 'Close'
btn.attr['type'] = 'button'
btn.options.managed = False
content.val.append(btn)
return content
def primary(self, components: List[primitives.HtmlModel]=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, options: dict=None, profile: types.PROFILE_TYPE=None):
content = self.alert('primary', components, width, height, html_code, options, profile)
return content
def secondary(self, components: List[primitives.HtmlModel]=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, options: dict=None, profile: types.PROFILE_TYPE=None):
content = self.alert('secondary', components, width, height, html_code, options, profile)
return content
def success(self, components: List[primitives.HtmlModel]=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, options: dict=None, profile: types.PROFILE_TYPE=None):
content = self.alert('secondary', components, width, height, html_code, options, profile)
return content
def danger(self, components: List[primitives.HtmlModel]=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, options: dict=None, profile: types.PROFILE_TYPE=None):
content = self.alert('danger', components, width, height, html_code, options, profile)
return content
def warning(self, components: List[primitives.HtmlModel]=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, options: dict=None, profile: types.PROFILE_TYPE=None):
content = self.alert('warning', components, width, height, html_code, options, profile)
return content
def info(self, components: List[primitives.HtmlModel]=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, options: dict=None, profile: types.PROFILE_TYPE=None):
content = self.alert('info', components, width, height, html_code, options, profile)
return content
def light(self, components: List[primitives.HtmlModel]=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, options: dict=None, profile: types.PROFILE_TYPE=None):
content = self.alert('light', components, width, height, html_code, options, profile)
return content
def dark(self, components: List[primitives.HtmlModel]=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, options: dict=None, profile: types.PROFILE_TYPE=None):
content = self.alert('dark', components, width, height, html_code, options, profile)
return content |
class SyncTrainingTimeEstimator(TrainingTimeEstimator):
def __init__(self, total_users: int, users_per_round: int, epochs: int, training_dist: IDurationDistribution, num_examples: Optional[List[int]]=None):
super().__init__(total_users=total_users, users_per_round=users_per_round, epochs=epochs, num_examples=num_examples, training_dist=training_dist)
def training_time(self):
round_completion_time = [self.round_completion_time(users_per_round=self.users_per_round, num_examples=self.num_examples, training_dist=self.training_dist) for x in range(self.rounds)]
return sum(round_completion_time)
def round_completion_time(self, users_per_round: int, num_examples: List[int], training_dist: IDurationDistribution):
training_times = [training_dist.training_duration(self.random_select()) for _ in range(users_per_round)]
return max(training_times) |
class _DoNormalize(Cursor_Rewrite):
def __init__(self, proc):
self.C = Sym('temporary_constant_symbol')
self.env = IndexRangeEnvironment(proc._loopir_proc)
self.ir = proc._loopir_proc
self.fwd = (lambda x: x)
super().__init__(proc)
new_preds = self.map_exprs(self.ir.preds)
if new_preds:
(self.ir, fwd) = ic.Cursor.create(self.ir)._child_block('preds')._replace(new_preds)
self.fwd = _compose(fwd, self.fwd)
def result(self, **kwargs):
return api.Procedure(self.ir, _provenance_eq_Procedure=self.provenance, _forward=self.fwd)
def concat_map(self, op, lhs, rhs):
if (op == '+'):
common = {key: (lhs[key] + rhs[key]) for key in lhs if (key in rhs)}
return ((lhs | rhs) | common)
elif (op == '-'):
common = {key: (lhs[key] - rhs[key]) for key in lhs if (key in rhs)}
neg_rhs = {key: (- rhs[key]) for key in rhs}
return ((lhs | neg_rhs) | common)
elif (op == '*'):
assert ((len(rhs) == 1) or (len(lhs) == 1))
if ((len(rhs) == 1) and (self.C in rhs)):
return {key: (lhs[key] * rhs[self.C]) for key in lhs}
else:
assert ((len(lhs) == 1) and (self.C in lhs))
return {key: (rhs[key] * lhs[self.C]) for key in rhs}
else:
assert False, 'bad case'
def normalize_e(self, e):
assert e.type.is_indexable(), f'{e} is not indexable!'
if isinstance(e, LoopIR.Read):
assert (len(e.idx) == 0), 'Indexing inside indexing does not make any sense'
return {e.name: 1}
elif isinstance(e, LoopIR.Const):
return {self.C: e.val}
elif isinstance(e, LoopIR.USub):
e_map = self.normalize_e(e.arg)
return {key: (- e_map[key]) for key in e_map}
elif isinstance(e, LoopIR.BinOp):
lhs_map = self.normalize_e(e.lhs)
rhs_map = self.normalize_e(e.rhs)
return self.concat_map(e.op, lhs_map, rhs_map)
else:
assert False, ('index_start should only be called by' + f' an indexing expression. e was {e}')
def has_div_mod_config(e):
if isinstance(e, LoopIR.Read):
return False
elif isinstance(e, LoopIR.Const):
return False
elif isinstance(e, LoopIR.USub):
return _DoNormalize.has_div_mod_config(e.arg)
elif isinstance(e, LoopIR.BinOp):
if ((e.op == '/') or (e.op == '%')):
return True
else:
lhs = _DoNormalize.has_div_mod_config(e.lhs)
rhs = _DoNormalize.has_div_mod_config(e.rhs)
return (lhs or rhs)
elif isinstance(e, LoopIR.ReadConfig):
return True
else:
assert False, 'bad case'
def index_start(self, e):
def get_normalized_expr(e):
n_map = self.normalize_e(e)
new_e = LoopIR.Const(n_map.get(self.C, 0), T.int, e.srcinfo)
delete_zero = [(n_map[v], v) for v in n_map if ((v != self.C) and (n_map[v] != 0))]
return (new_e, delete_zero)
def division_simplification(e):
(constant, normalization_list) = get_normalized_expr(e.lhs)
d = e.rhs.val
non_divisible_terms = [(coeff, v) for (coeff, v) in normalization_list if ((coeff % d) != 0)]
if (len(non_divisible_terms) == 0):
normalization_list = [((coeff // d), v) for (coeff, v) in normalization_list]
return generate_loopIR(e.lhs, constant.update(val=(constant.val // d)), normalization_list)
elif ((constant.val % d) == 0):
non_divisible_expr = generate_loopIR(e.lhs, constant.update(val=0), non_divisible_terms)
if self.env.check_expr_bounds(0, IndexRangeEnvironment.leq, non_divisible_expr, IndexRangeEnvironment.lt, d):
divisible_terms = [((coeff // d), v) for (coeff, v) in normalization_list if ((coeff % d) == 0)]
return generate_loopIR(e.lhs, constant.update(val=(constant.val // d)), divisible_terms)
else:
non_divisible_expr = generate_loopIR(e.lhs, constant, non_divisible_terms)
if self.env.check_expr_bounds(0, IndexRangeEnvironment.leq, non_divisible_expr, IndexRangeEnvironment.lt, d):
divisible_terms = [((coeff // d), v) for (coeff, v) in normalization_list if ((coeff % d) == 0)]
return generate_loopIR(e.lhs, constant.update(val=0), divisible_terms)
new_lhs = generate_loopIR(e.lhs, constant, normalization_list)
return LoopIR.BinOp('/', new_lhs, e.rhs, e.type, e.srcinfo)
def division_denominator_simplification(e):
assert (e.op == '/')
def has_nested_const_denominator(expr):
if ((expr.op == '/') and isinstance(expr.rhs, LoopIR.Const)):
if (isinstance(expr.lhs, LoopIR.BinOp) and (expr.lhs.op == '/')):
if isinstance(expr.lhs.rhs, LoopIR.Const):
return True
return False
new_e = e
while has_nested_const_denominator(new_e):
new_e = new_e.update(lhs=new_e.lhs.lhs, rhs=LoopIR.Const((new_e.lhs.rhs.val * new_e.rhs.val), new_e.lhs.type, new_e.lhs.srcinfo))
return new_e
def division_simplification_and_try_spliting_denominator(e):
def still_division(e):
return (isinstance(e, LoopIR.BinOp) and (e.op == '/'))
e = division_simplification(e)
if (not still_division(e)):
return e
d = e.rhs.val
lhs = e.lhs
divisor = 2
while ((divisor * divisor) <= d):
if ((d % divisor) == 0):
new_e = LoopIR.BinOp('/', lhs, e.rhs.update(val=divisor), e.type, e.srcinfo)
new_e = division_simplification(new_e)
if (not still_division(new_e)):
return LoopIR.BinOp('/', new_e, e.rhs.update(val=(d // divisor)), e.type, e.srcinfo)
new_e = LoopIR.BinOp('/', lhs, e.rhs.update(val=(d // divisor)), e.type, e.srcinfo)
new_e = division_simplification(new_e)
if (not still_division(new_e)):
return LoopIR.BinOp('/', new_e, e.rhs.update(val=divisor), e.type, e.srcinfo)
divisor += 1
return e
def modulo_simplification(e):
(constant, normalization_list) = get_normalized_expr(e.lhs)
m = e.rhs.val
normalization_list = [(coeff, v) for (coeff, v) in normalization_list if ((coeff % m) != 0)]
if (len(normalization_list) == 0):
return constant.update(val=(constant.val % m))
if ((constant.val % m) == 0):
constant = constant.update(val=0)
new_lhs = generate_loopIR(e.lhs, constant, normalization_list)
if self.env.check_expr_bound(new_lhs, IndexRangeEnvironment.lt, m):
return new_lhs
return LoopIR.BinOp('%', new_lhs, e.rhs, e.type, e.srcinfo)
def generate_loopIR(e_context, constant, normalization_list):
def scale_read(coeff, key):
return LoopIR.BinOp('*', LoopIR.Const(coeff, T.int, e_context.srcinfo), LoopIR.Read(key, [], e_context.type, e_context.srcinfo), e_context.type, e_context.srcinfo)
new_e = constant
for (coeff, v) in sorted(normalization_list):
if (coeff > 0):
new_e = LoopIR.BinOp('+', new_e, scale_read(coeff, v), e_context.type, e_context.srcinfo)
else:
new_e = LoopIR.BinOp('-', new_e, scale_read((- coeff), v), e_context.type, e_context.srcinfo)
return new_e
assert isinstance(e, LoopIR.expr)
if isinstance(e, LoopIR.BinOp):
new_lhs = self.index_start(e.lhs)
new_rhs = self.index_start(e.rhs)
e = e.update(lhs=new_lhs, rhs=new_rhs)
if (isinstance(e, LoopIR.BinOp) and (e.op in ('/', '%'))):
assert isinstance(e.rhs, LoopIR.Const)
if self.has_div_mod_config(e.lhs):
if (e.op == '/'):
return division_denominator_simplification(e)
else:
return e
if (e.op == '/'):
return division_simplification_and_try_spliting_denominator(e)
return modulo_simplification(e)
if self.has_div_mod_config(e):
return e
(constant, normalization_list) = get_normalized_expr(e)
return generate_loopIR(e, constant, normalization_list)
def map_e(self, e):
if e.type.is_indexable():
return self.index_start(e)
return super().map_e(e)
def map_s(self, sc):
s = sc._node
if isinstance(s, LoopIR.If):
new_cond = self.map_e(s.cond)
self.env.enter_scope()
self.map_stmts(sc.body())
self.env.exit_scope()
self.env.enter_scope()
self.map_stmts(sc.orelse())
self.env.exit_scope()
if new_cond:
(self.ir, fwd_repl) = self.fwd(sc)._child_node('cond')._replace(new_cond)
self.fwd = _compose(fwd_repl, self.fwd)
elif isinstance(s, LoopIR.For):
new_lo = self.map_e(s.lo)
new_hi = self.map_e(s.hi)
if new_lo:
(self.ir, fwd_repl) = self.fwd(sc)._child_node('lo')._replace(new_lo)
self.fwd = _compose(fwd_repl, self.fwd)
else:
new_lo = s.lo
if new_hi:
(self.ir, fwd_repl) = self.fwd(sc)._child_node('hi')._replace(new_hi)
self.fwd = _compose(fwd_repl, self.fwd)
else:
new_hi = s.hi
self.env.enter_scope()
self.env.add_loop_iter(s.iter, new_lo, new_hi)
self.map_stmts(sc.body())
self.env.exit_scope()
elif isinstance(s, (LoopIR.Assign, LoopIR.Reduce)):
new_type = self.map_t(s.type)
new_idx = self.map_exprs(s.idx)
new_rhs = self.map_e(s.rhs)
if new_type:
(self.ir, fwd_repl) = self.fwd(sc)._child_node('type')._replace(new_type)
self.fwd = _compose(fwd_repl, self.fwd)
if new_idx:
(self.ir, fwd_repl) = self.fwd(sc)._child_block('idx')._replace(new_idx)
self.fwd = _compose(fwd_repl, self.fwd)
if new_rhs:
(self.ir, fwd_repl) = self.fwd(sc)._child_node('rhs')._replace(new_rhs)
self.fwd = _compose(fwd_repl, self.fwd)
elif isinstance(s, (LoopIR.WriteConfig, LoopIR.WindowStmt)):
new_rhs = self.map_e(s.rhs)
if new_rhs:
(self.ir, fwd_repl) = self.fwd(sc)._child_node('rhs')._replace(new_rhs)
self.fwd = _compose(fwd_repl, self.fwd)
elif isinstance(s, LoopIR.Call):
new_args = self.map_exprs(s.args)
if new_args:
(self.ir, fwd_repl) = self.fwd(sc)._child_block('args')._replace(new_args)
self.fwd = _compose(fwd_repl, self.fwd)
elif isinstance(s, LoopIR.Alloc):
new_type = self.map_t(s.type)
if new_type:
(self.ir, fwd_repl) = self.fwd(sc)._child_node('type')._replace(new_type)
self.fwd = _compose(fwd_repl, self.fwd)
elif isinstance(s, LoopIR.Pass):
pass
else:
raise NotImplementedError(f'bad case {type(s)}')
return None |
def get_app_data_folder() -> str:
app_name = 'BCSFE_Python'
os_name = os.name
if (os_name == 'nt'):
path = os.path.join(os.environ['APPDATA'], app_name)
elif (os_name == 'mac'):
path = os.path.join(os.environ['HOME'], 'Library', 'Application Support', app_name)
elif (os_name == 'posix'):
path = os.path.join(os.environ['HOME'], 'Documents', app_name)
else:
raise Exception(('Unknown OS: %s' % os_name))
helper.create_dirs(path)
return path |
class DecoderFactory(McapDecoderFactory):
def __init__(self):
self._types: Dict[(int, Type[Any])] = {}
def _get_message_classes(self, file_descriptors: Iterable[FileDescriptorProto]):
descriptor_by_name = {file_descriptor.name: file_descriptor for file_descriptor in file_descriptors}
factory = MessageFactory()
def _add_file(file_descriptor: FileDescriptorProto):
for dependency in file_descriptor.dependency:
if (dependency in descriptor_by_name):
_add_file(descriptor_by_name.pop(dependency))
factory.pool.Add(file_descriptor)
while descriptor_by_name:
_add_file(descriptor_by_name.popitem()[1])
return factory.GetMessages([file_descriptor.name for file_descriptor in file_descriptors])
def decoder_for(self, message_encoding: str, schema: Optional[Schema]) -> Optional[Callable[([bytes], Any)]]:
if ((message_encoding != MessageEncoding.Protobuf) or (schema is None) or (schema.encoding != SchemaEncoding.Protobuf)):
return None
generated = self._types.get(schema.id)
if (generated is None):
fds = FileDescriptorSet.FromString(schema.data)
for (name, count) in Counter((fd.name for fd in fds.file)).most_common(1):
if (count > 1):
raise McapError(f'FileDescriptorSet contains {count} file descriptors for {name}')
messages = self._get_message_classes(fds.file)
for (name, klass) in messages.items():
if (name == schema.name):
self._types[schema.id] = klass
generated = klass
if (generated is None):
raise McapError(f'FileDescriptorSet for type {schema.name} is missing that schema')
def decoder(data: bytes) -> Any:
proto_msg = generated()
proto_msg.ParseFromString(data)
return proto_msg
return decoder |
def unsupported_node_fixer(bmg: BMGraphBuilder, typer: LatticeTyper) -> NodeFixer:
usnf = UnsupportedNodeFixer(bmg, typer)
return node_fixer_first_match([type_guard(bn.Chi2Node, usnf._replace_chi2), type_guard(bn.DivisionNode, usnf._replace_division), type_guard(bn.Exp2Node, usnf._replace_exp2), type_guard(bn.IndexNode, usnf._replace_index), type_guard(bn.ItemNode, usnf._replace_item), type_guard(bn.Log10Node, usnf._replace_log10), type_guard(bn.Log1pNode, usnf._replace_log1p), type_guard(bn.Log2Node, usnf._replace_log2), type_guard(bn.LogSumExpTorchNode, usnf._replace_lse), type_guard(bn.LogAddExpNode, usnf._replace_lae), type_guard(bn.SquareRootNode, usnf._replace_squareroot), type_guard(bn.SwitchNode, usnf._replace_switch), type_guard(bn.TensorNode, usnf._replace_tensor), type_guard(bn.UniformNode, usnf._replace_uniform), type_guard(bn.BinomialLogitNode, usnf._replace_binomial_logit)]) |
_db(transaction=True)
def test_load_table_to_delta_for_sam_recipient(spark, s3_unittest_data_bucket, populate_broker_data):
expected_data = [{'awardee_or_recipient_uniqu': '', 'legal_business_name': 'EL COLEGIO DE LA FRONTERA SUR', 'dba_name': 'RESEARCH CENTER', 'ultimate_parent_unique_ide': '', 'ultimate_parent_legal_enti': 'GOBIERNO FEDERAL DE LOS ESTADOS UNIDOS MEXICANOS', 'address_line_1': 'CALLE 10 NO. 264, ENTRE 61 Y 63', 'address_line_2': '', 'city': 'CAMPECHE', 'state': 'CAMPECHE', 'zip': '24000', 'zip4': None, 'country_code': 'MEX', 'congressional_district': None, 'business_types_codes': ['20', '2U', 'GW', 'M8', 'V2'], 'entity_structure': 'X6', 'broker_duns_id': '1', 'update_date': date(2015, 2, 5), 'uei': 'CTKJDNGYLM97', 'ultimate_parent_uei': 'KDULNMSMR7E6'}]
verify_delta_table_loaded_to_delta(spark, 'sam_recipient', s3_unittest_data_bucket, load_command='load_query_to_delta', dummy_data=expected_data) |
def test_flow_predicate():
root = FlowRoot('test', 'This is my flowroot')
node = root.connect('a', (lambda ctx: (('toto' in ctx) and (ctx['toto'] == 'titui'))))
somebody = TestPerson('me')
flow = Flow(root, somebody, {})
assert (node in flow.next_steps())
assert (node not in flow.next_autosteps())
with pytest.raises(InvalidState):
flow.advance(node)
flow.advance(node, enforce_predicate=False)
assert (flow._current_step == node)
flow = Flow(root, somebody, {'toto': 'titui'})
assert (node in flow.next_steps())
assert (node in flow.next_autosteps())
flow.advance(node)
assert (flow._current_step == node) |
def test_mapping_saved_into_es(write_client):
m = mapping.Mapping()
m.field('name', 'text', analyzer=analysis.analyzer('my_analyzer', tokenizer='keyword'))
m.field('tags', 'keyword')
m.save('test-mapping', using=write_client)
assert ({'test-mapping': {'mappings': {'properties': {'name': {'type': 'text', 'analyzer': 'my_analyzer'}, 'tags': {'type': 'keyword'}}}}} == write_client.indices.get_mapping(index='test-mapping')) |
def get_high_incidence_issues():
with db_pool.db_cursor() as cur:
cur.execute('\n\t\t\tSELECT\n\t\t\t\tdbid,\n\t\t\t\tphash,\n\t\t\t\tmatch_count,\n\t\t\t\tdistance\n\t\t\tFROM\n\t\t\t\thigh_incidence_hashes\n\t\t\tORDER BY\n\t\t\t\tmatch_count DESC\n\t\t\tLIMIT\n\t\t\t\t5000\n\t\t\t\t;')
rets = cur.fetchall()
rets = [(dbid, phash, i2b(phash), match_count, distance) for (dbid, phash, match_count, distance) in rets]
return rets |
class FBTapLoggerCommand(fb.FBCommand):
def name(self):
return 'taplog'
def description(self):
return 'Log tapped view to the console.'
def run(self, arguments, options):
parameterExpr = objc.functionPreambleExpressionForObjectParameterAtIndex(0)
breakpoint = lldb.debugger.GetSelectedTarget().BreakpointCreateByName('-[UIApplication sendEvent:]')
breakpoint.SetCondition((((('(int)[' + parameterExpr) + ' type] == 0 && (int)[[[') + parameterExpr) + ' allTouches] anyObject] phase] == 0'))
breakpoint.SetOneShot(True)
callback_name = taplog_callback.__qualname__
lldb.debugger.HandleCommand(('script from %s import %s' % (__name__, callback_name)))
breakpoint.SetScriptCallbackFunction(callback_name)
lldb.debugger.SetAsync(True)
lldb.debugger.HandleCommand('continue') |
class TabsExtraSortMenuCommand(sublime_plugin.WindowCommand):
def run(self):
sort_layout = sublime.load_settings(SETTINGS).get('sort_layout', [])
if len(sort_layout):
self.sort_commands = []
sort_menu = []
for sort_entry in sort_layout:
caption = str(sort_entry.get('caption', ''))
module = str(sort_entry.get('module', ''))
reverse = bool(sort_entry.get('reverse', False))
if (module != ''):
self.sort_commands.append((module, reverse))
sort_menu.append(caption)
if len(sort_menu):
self.window.show_quick_panel(sort_menu, self.check_selection)
def check_selection(self, value):
if (value != (- 1)):
command = self.sort_commands[value]
self.window.run_command('tabs_extra_sort', {'sort_by': command[0], 'reverse': command[1]}) |
def test_template_rendering(instrument, django_elasticapm_client, client):
with override_settings(**middleware_setting(django.VERSION, ['elasticapm.contrib.django.middleware.TracingMiddleware'])):
client.get(reverse('render-heavy-template'))
client.get(reverse('render-heavy-template'))
client.get(reverse('render-heavy-template'))
transactions = django_elasticapm_client.events[TRANSACTION]
assert (len(transactions) == 3)
spans = django_elasticapm_client.spans_for_transaction(transactions[0])
assert (len(spans) == 2), [t['name'] for t in spans]
kinds = ['code', 'template']
assert (set([t['type'] for t in spans]) == set(kinds))
assert (spans[0]['type'] == 'code')
assert (spans[0]['name'] == 'something_expensive')
assert (spans[0]['parent_id'] == spans[1]['id'])
assert (spans[1]['type'] == 'template')
assert (spans[1]['subtype'] == 'django')
assert (spans[1]['action'] == 'render')
assert (spans[1]['name'] == 'list_users.html')
assert (spans[1]['parent_id'] == transactions[0]['id']) |
class PrivateComputationPrivateIdDfcaStageFlow(PrivateComputationBaseStageFlow):
_order_ = 'CREATED PC_PRE_VALIDATION PID_SHARD PID_PREPARE ID_MATCH ID_MATCH_POST_PROCESS ID_SPINE_COMBINER PRIVATE_ID_DFCA_AGGREGATE POST_PROCESSING_HANDLERS'
CREATED = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.CREATION_INITIALIZED, started_status=PrivateComputationInstanceStatus.CREATION_STARTED, completed_status=PrivateComputationInstanceStatus.CREATED, failed_status=PrivateComputationInstanceStatus.CREATION_FAILED, is_joint_stage=False)
PC_PRE_VALIDATION = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.PC_PRE_VALIDATION_INITIALIZED, started_status=PrivateComputationInstanceStatus.PC_PRE_VALIDATION_STARTED, completed_status=PrivateComputationInstanceStatus.PC_PRE_VALIDATION_COMPLETED, failed_status=PrivateComputationInstanceStatus.PC_PRE_VALIDATION_FAILED, is_joint_stage=False)
PID_SHARD = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.PID_SHARD_INITIALIZED, started_status=PrivateComputationInstanceStatus.PID_SHARD_STARTED, completed_status=PrivateComputationInstanceStatus.PID_SHARD_COMPLETED, failed_status=PrivateComputationInstanceStatus.PID_SHARD_FAILED, is_joint_stage=False)
PID_PREPARE = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.PID_PREPARE_INITIALIZED, started_status=PrivateComputationInstanceStatus.PID_PREPARE_STARTED, completed_status=PrivateComputationInstanceStatus.PID_PREPARE_COMPLETED, failed_status=PrivateComputationInstanceStatus.PID_PREPARE_FAILED, is_joint_stage=False)
ID_MATCH = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.ID_MATCHING_INITIALIZED, started_status=PrivateComputationInstanceStatus.ID_MATCHING_STARTED, completed_status=PrivateComputationInstanceStatus.ID_MATCHING_COMPLETED, failed_status=PrivateComputationInstanceStatus.ID_MATCHING_FAILED, is_joint_stage=True, is_retryable=False)
ID_MATCH_POST_PROCESS = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.ID_MATCHING_POST_PROCESS_INITIALIZED, started_status=PrivateComputationInstanceStatus.ID_MATCHING_POST_PROCESS_STARTED, completed_status=PrivateComputationInstanceStatus.ID_MATCHING_POST_PROCESS_COMPLETED, failed_status=PrivateComputationInstanceStatus.ID_MATCHING_POST_PROCESS_FAILED, is_joint_stage=False)
ID_SPINE_COMBINER = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.ID_SPINE_COMBINER_INITIALIZED, started_status=PrivateComputationInstanceStatus.ID_SPINE_COMBINER_STARTED, completed_status=PrivateComputationInstanceStatus.ID_SPINE_COMBINER_COMPLETED, failed_status=PrivateComputationInstanceStatus.ID_SPINE_COMBINER_FAILED, is_joint_stage=False)
PRIVATE_ID_DFCA_AGGREGATE = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.PRIVATE_ID_DFCA_AGGREGATION_INITIALIZED, started_status=PrivateComputationInstanceStatus.PRIVATE_ID_DFCA_AGGREGATION_STARTED, completed_status=PrivateComputationInstanceStatus.PRIVATE_ID_DFCA_AGGREGATION_COMPLETED, failed_status=PrivateComputationInstanceStatus.PRIVATE_ID_DFCA_AGGREGATION_FAILED, is_joint_stage=True)
POST_PROCESSING_HANDLERS = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.POST_PROCESSING_HANDLERS_INITIALIZED, started_status=PrivateComputationInstanceStatus.POST_PROCESSING_HANDLERS_STARTED, completed_status=PrivateComputationInstanceStatus.POST_PROCESSING_HANDLERS_COMPLETED, failed_status=PrivateComputationInstanceStatus.POST_PROCESSING_HANDLERS_FAILED, is_joint_stage=False)
def get_stage_service(self, args: PrivateComputationStageServiceArgs) -> PrivateComputationStageService:
if (self is self.PRIVATE_ID_DFCA_AGGREGATE):
return PrivateIdDfcaAggregateStageService(args.onedocker_binary_config_map, args.mpc_svc)
else:
return self.get_default_stage_service(args) |
def hmm_alignments(n, seed, n_alignments, hmmfile):
cmd = f'hmmemit -a -N {n} --seed {seed} {hmmfile}'
proc = Popen(cmd, shell=True, stdout=PIPE)
proc.wait()
lines = proc.stdout.readlines()
lines = list(map((lambda x: x.decode('utf-8')), lines))
lines = list(map((lambda x: x.rstrip().upper()), lines))
lines = list(filter((lambda x: ((len(x) != 0) and (not (x[0] in {' ', '#', '/'})))), lines))
alignments = gen_alignments(lines, n_alignments)
df = pd.DataFrame(alignments)
return df |
def test_kafka_send_unsampled_transaction(instrument, elasticapm_client, producer, topics):
transaction_object = elasticapm_client.begin_transaction('transaction')
transaction_object.is_sampled = False
producer.send('test', key=b'foo', value=b'bar')
elasticapm_client.end_transaction('foo')
spans = elasticapm_client.events[SPAN]
assert (len(spans) == 0) |
class TestApiGateway(TestCase):
api_endpoint: str
def get_stack_name(cls) -> str:
stack_name = os.environ.get('AWS_SAM_STACK_NAME')
if (not stack_name):
raise Exception('Cannot find env var AWS_SAM_STACK_NAME. \nPlease setup this environment variable with the stack name where we are running integration tests.')
return stack_name
def setUp(self) -> None:
stack_name = TestApiGateway.get_stack_name()
client = boto3.client('cloudformation')
try:
response = client.describe_stacks(StackName=stack_name)
except Exception as e:
raise Exception(f'''Cannot find stack {stack_name}.
Please make sure stack with the name "{stack_name}" exists.''') from e
stacks = response['Stacks']
stack_outputs = stacks[0]['Outputs']
api_outputs = [output for output in stack_outputs if (output['OutputKey'] == 'HelloWorldApi')]
self.assertTrue(api_outputs, f'Cannot find output HelloWorldApi in stack {stack_name}')
self.api_endpoint = api_outputs[0]['OutputValue']
def test_api_gateway(self):
response = requests.get(self.api_endpoint)
self.assertDictEqual(response.json(), {'message': 'hello world'}) |
class TestEntrypoint():
def test_empty_get_all(self, fledge_url, reset_and_start_fledge):
jdoc = self._get_all(fledge_url)
assert ([] == jdoc)
.parametrize('payload', [payload1, payload2, payload3])
def test_create(self, fledge_url, payload):
ep_name = payload['name']
conn =
conn.request('POST', '/fledge/control/manage', body=json.dumps(payload))
r = conn.getresponse()
assert (200 == r.status)
r = r.read().decode()
jdoc = json.loads(r)
assert len(jdoc), 'Failed to create {} entrypoint!'.format(ep_name)
assert ('message' in jdoc)
assert ('{} control entrypoint has been created successfully.'.format(ep_name) == jdoc['message'])
self.verify_details(conn, payload)
self.verify_audit_details(conn, ep_name, 'CTEAD')
def test_get_all(self, fledge_url):
jdoc = self._get_all(fledge_url)
assert (3 == len(jdoc))
assert (['name', 'description', 'permitted'] == list(jdoc[0].keys()))
def test_get_by_name(self, fledge_url):
conn =
conn.request('GET', '/fledge/control/manage/{}'.format(quote(EP_1)))
r = conn.getresponse()
assert (200 == r.status)
r = r.read().decode()
jdoc = json.loads(r)
assert len(jdoc), '{} entrypoint found!'.format(EP_1)
assert (payload1 == jdoc)
assert ('permitted' in jdoc)
.parametrize('name, payload, old_info', [(EP_1, {'anonymous': True}, {'anonymous': False}), (EP_2, {'description': 'Updated', 'type': 'operation', 'operation_name': 'focus', 'allow': ['user']}, {'description': 'Operation 1', 'type': 'operation', 'operation_name': 'distance', 'allow': []}), (EP_3, {'constants': {'c1': '123', 'c2': '100'}, 'variables': {'v1': '900'}}, {'constants': {'c1': '100'}, 'variables': {'v1': '1200'}})])
def test_update(self, fledge_url, name, payload, old_info):
conn =
conn.request('PUT', '/fledge/control/manage/{}'.format(quote(name)), body=json.dumps(payload))
r = conn.getresponse()
assert (200 == r.status)
r = r.read().decode()
jdoc = json.loads(r)
assert ('message' in jdoc)
assert ('{} control entrypoint has been updated successfully.'.format(name) == jdoc['message'])
source = 'CTECH'
conn.request('GET', '/fledge/audit?source={}'.format(source))
r = conn.getresponse()
assert (200 == r.status)
r = r.read().decode()
jdoc = json.loads(r)
assert ('audit' in jdoc)
assert len(jdoc['audit'])
audit = jdoc['audit'][0]
assert ('INFORMATION' == audit['severity'])
assert (source == audit['source'])
assert ('details' in audit)
assert ('entrypoint' in audit['details'])
assert ('old_entrypoint' in audit['details'])
audit_old = audit['details']['old_entrypoint']
audit_new = audit['details']['entrypoint']
assert (name == audit_new['name'])
assert (name == audit_old['name'])
conn.request('GET', '/fledge/control/manage/{}'.format(quote(name)))
r = conn.getresponse()
assert (200 == r.status)
r = r.read().decode()
jdoc = json.loads(r)
assert len(jdoc), '{} entrypoint found!'.format(name)
assert (name == jdoc['name'])
if (name == EP_1):
assert (old_info['anonymous'] == audit_old['anonymous'])
assert (payload['anonymous'] == audit_new['anonymous'])
assert (payload['anonymous'] == jdoc['anonymous'])
elif (name == EP_2):
assert (old_info['description'] == audit_old['description'])
assert (payload['description'] == audit_new['description'])
assert (old_info['type'] == audit_old['type'])
assert (payload['type'] == audit_new['type'])
assert (old_info['operation_name'] == audit_old['operation_name'])
assert (payload['operation_name'] == audit_new['operation_name'])
assert (old_info['allow'] == audit_old['allow'])
assert (payload['allow'] == audit_new['allow'])
assert (payload['description'] == jdoc['description'])
assert (payload['type'] == jdoc['type'])
assert (payload['operation_name'] == jdoc['operation_name'])
assert (payload['allow'] == jdoc['allow'])
elif (name == EP_3):
assert (old_info['constants']['c1'] == audit_old['constants']['c1'])
assert ('c2' not in audit_old['constants'])
assert (payload['constants']['c1'] == audit_new['constants']['c1'])
assert (payload['constants']['c2'] == audit_new['constants']['c2'])
assert (old_info['variables']['v1'] == audit_old['variables']['v1'])
assert (payload['variables']['v1'] == audit_new['variables']['v1'])
assert (payload['constants']['c1'] == jdoc['constants']['c1'])
assert (payload['constants']['c2'] == jdoc['constants']['c2'])
assert (payload['variables']['v1'] == jdoc['variables']['v1'])
else:
pass
.parametrize('name, count', [(EP_1, 2), (EP_2, 1), (EP_3, 0)])
def test_delete(self, fledge_url, name, count):
conn =
conn.request('DELETE', '/fledge/control/manage/{}'.format(quote(name)))
r = conn.getresponse()
assert (200 == r.status)
r = r.read().decode()
jdoc = json.loads(r)
assert len(jdoc), '{} entrypoint found!'.format(name)
assert ('message' in jdoc)
assert ('{} control entrypoint has been deleted successfully.'.format(name) == jdoc['message'])
self.verify_audit_details(conn, name, 'CTEDL')
jdoc = self._get_all(fledge_url)
assert (count == len(jdoc))
def verify_audit_details(self, conn, ep_name, source):
conn.request('GET', '/fledge/audit?source={}'.format(source))
r = conn.getresponse()
assert (200 == r.status)
r = r.read().decode()
jdoc = json.loads(r)
assert len(jdoc), 'No audit record entry found!'
assert ('audit' in jdoc)
assert (ep_name == jdoc['audit'][0]['details']['name'])
assert ('INFORMATION' == jdoc['audit'][0]['severity'])
assert (source == jdoc['audit'][0]['source'])
def verify_details(self, conn, data):
name = data['name']
conn.request('GET', '/fledge/control/manage/{}'.format(quote(name)))
r = conn.getresponse()
assert (200 == r.status)
r = r.read().decode()
jdoc = json.loads(r)
assert len(jdoc), '{} entrypoint found!'.format(name)
data['permitted'] = True
if ('constants' not in data):
data['constants'] = {}
if ('variables' not in data):
data['variables'] = {}
d1 = OrderedDict(sorted(data.items()))
d2 = OrderedDict(sorted(jdoc.items()))
assert (d1 == d2)
def _get_all(self, url):
conn =
conn.request('GET', '/fledge/control/manage')
r = conn.getresponse()
assert (200 == r.status)
r = r.read().decode()
jdoc = json.loads(r)
assert len(jdoc), 'No entrypoint found!'
assert ('controls' in jdoc)
return jdoc['controls'] |
class OptionPlotoptionsStreamgraphDatalabelsTextpath(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
class BeautifierTest(unittest.TestCase):
def setUp(self):
super(BeautifierTest, self).setUp()
self.b = Beautifier()
def tearDown(self):
super(BeautifierTest, self).tearDown()
def testGetInputCmd(self):
cmd = ['test']
self.b.setInputCmd(cmd)
self.assertEqual(self.b.getInputCmd(), cmd)
def testPing(self):
self.b.setInputCmd(['ping'])
self.assertEqual(self.b.beautify('pong'), 'Server replied: pong')
def testVersion(self):
self.b.setInputCmd(['version'])
self.assertEqual(self.b.beautify(version), version)
def testAddJail(self):
self.b.setInputCmd(['add'])
self.assertEqual(self.b.beautify('ssh'), 'Added jail ssh')
def testStartJail(self):
self.b.setInputCmd(['start'])
self.assertEqual(self.b.beautify(None), 'Jail started')
def testStopJail(self):
self.b.setInputCmd(['stop', 'ssh'])
self.assertEqual(self.b.beautify(None), 'Jail stopped')
def testShutdown(self):
self.b.setInputCmd(['stop'])
self.assertEqual(self.b.beautify(None), 'Shutdown successful')
def testStatus(self):
self.b.setInputCmd(['status'])
response = (('Number of jails', 0), ('Jail list', ['ssh', 'exim4']))
output = 'Status\n|- Number of jails:\t0\n`- Jail list:\tssh exim4'
self.assertEqual(self.b.beautify(response), output)
self.b.setInputCmd(['status', 'ssh'])
response = (('Filter', [('Currently failed', 0), ('Total failed', 0), ('File list', '/var/log/auth.log')]), ('Actions', [('Currently banned', 3), ('Total banned', 3), ('Banned IP list', [IPAddr('192.168.0.1'), IPAddr('::ffff:10.2.2.1'), IPAddr('2001:db8::1')])]))
output = 'Status for the jail: ssh\n'
output += '|- Filter\n'
output += '| |- Currently failed:\t0\n'
output += '| |- Total failed:\t0\n'
output += '| `- File list:\t/var/log/auth.log\n'
output += '`- Actions\n'
output += ' |- Currently banned:\t3\n'
output += ' |- Total banned:\t3\n'
output += ' `- Banned IP list:\t192.168.0.1 10.2.2.1 2001:db8::1'
self.assertEqual(self.b.beautify(response), output)
def testFlushLogs(self):
self.b.setInputCmd(['flushlogs'])
self.assertEqual(self.b.beautify('rolled over'), 'logs: rolled over')
def testSyslogSocket(self):
self.b.setInputCmd(['get', 'syslogsocket'])
output = 'Current syslog socket is:\n`- auto'
self.assertEqual(self.b.beautify('auto'), output)
def testLogTarget(self):
self.b.setInputCmd(['get', 'logtarget'])
output = 'Current logging target is:\n`- /var/log/fail2ban.log'
self.assertEqual(self.b.beautify('/var/log/fail2ban.log'), output)
def testLogLevel(self):
self.b.setInputCmd(['get', 'loglevel'])
output = "Current logging level is 'INFO'"
self.assertEqual(self.b.beautify('INFO'), output)
def testDbFile(self):
self.b.setInputCmd(['get', 'dbfile'])
response = '/var/lib/fail2ban/fail2ban.sqlite3'
output = ('Current database file is:\n`- ' + response)
self.assertEqual(self.b.beautify(response), output)
self.assertEqual(self.b.beautify(None), 'Database currently disabled')
def testDbPurgeAge(self):
self.b.setInputCmd(['get', 'dbpurgeage'])
output = 'Current database purge age is:\n`- 86400seconds'
self.assertEqual(self.b.beautify(86400), output)
self.assertEqual(self.b.beautify(None), 'Database currently disabled')
def testLogPath(self):
self.b.setInputCmd(['get', 'sshd', 'logpath'])
response = []
output = 'No file is currently monitored'
self.assertEqual(self.b.beautify(response), output)
response = ['/var/log/auth.log']
output = 'Current monitored log file(s):\n`- /var/log/auth.log'
self.assertEqual(self.b.beautify(response), output)
self.b.setInputCmd(['set', 'sshd', 'addlogpath', '/var/log/messages'])
response = ['/var/log/messages', '/var/log/auth.log']
outputadd = 'Current monitored log file(s):\n'
outputadd += '|- /var/log/messages\n`- /var/log/auth.log'
self.assertEqual(self.b.beautify(response), outputadd)
self.b.setInputCmd(['set', 'sshd', 'dellogpath', '/var/log/messages'])
response = ['/var/log/auth.log']
self.assertEqual(self.b.beautify(response), output)
def testLogEncoding(self):
self.b.setInputCmd(['get', 'sshd', 'logencoding'])
output = 'Current log encoding is set to:\nUTF-8'
self.assertEqual(self.b.beautify('UTF-8'), output)
def testJournalMatch(self):
self.b.setInputCmd(['get', 'sshd', 'journalmatch'])
self.assertEqual(self.b.beautify([]), 'No journal match filter set')
self.b.setInputCmd(['set', 'sshd', 'addjournalmatch'])
response = [['_SYSTEMD_UNIT', 'sshd.service']]
output = 'Current match filter:\n'
output += '_SYSTEMD_UNIT sshd.service'
self.assertEqual(self.b.beautify(response), output)
response.append(['_COMM', 'sshd'])
output += ' + _COMM sshd'
self.assertEqual(self.b.beautify(response), output)
self.b.setInputCmd(['set', 'sshd', 'deljournalmatch'])
response.remove(response[1])
self.assertEqual(self.b.beautify(response), output.split(' + ')[0])
def testDatePattern(self):
self.b.setInputCmd(['get', 'sshd', 'datepattern'])
output = 'Current date pattern set to: '
response = (None, 'Default Detectors')
self.assertEqual(self.b.beautify(None), (output + 'Not set/required'))
self.assertEqual(self.b.beautify(response), (output + 'Default Detectors'))
self.assertEqual(self.b.beautify(('test', 'test')), (output + 'test (test)'))
def testIgnoreIP(self):
self.b.setInputCmd(['get', 'sshd', 'ignoreip'])
output = 'No IP address/network is ignored'
self.assertEqual(self.b.beautify([]), output)
self.b.setInputCmd(['set', 'sshd', 'addignoreip'])
response = [IPAddr('127.0.0.0', 8), IPAddr('::1'), IPAddr('2001:db8::', 32), IPAddr('::ffff:10.0.2.1')]
output = 'These IP addresses/networks are ignored:\n'
output += '|- 127.0.0.0/8\n'
output += '|- ::1\n'
output += '|- 2001:db8::/32\n'
output += '`- 10.0.2.1'
self.assertEqual(self.b.beautify(response), output)
def testFailRegex(self):
self.b.setInputCmd(['get', 'sshd', 'failregex'])
output = 'No regular expression is defined'
self.assertEqual(self.b.beautify([]), output)
output = 'The following regular expression are defined:\n'
output += '|- [0]: ^$\n`- [1]: .*'
self.assertEqual(self.b.beautify(['^$', '.*']), output)
def testActions(self):
self.b.setInputCmd(['get', 'sshd', 'actions'])
output = 'No actions for jail sshd'
self.assertEqual(self.b.beautify([]), output)
output = 'The jail sshd has the following actions:\n'
output += 'iptables-multiport'
self.assertEqual(self.b.beautify(['iptables-multiport']), output)
def testActionProperties(self):
self.b.setInputCmd(['get', 'sshd', 'actionproperties', 'iptables'])
output = 'No properties for jail sshd action iptables'
self.assertEqual(self.b.beautify([]), output)
output = 'The jail sshd action iptables has the following properties:'
output += '\nactionban, actionunban'
response = ('actionban', 'actionunban')
self.assertEqual(self.b.beautify(response), output)
def testActionMethods(self):
self.b.setInputCmd(['get', 'sshd', 'actionmethods', 'iptables'])
output = 'No methods for jail sshd action iptables'
self.assertEqual(self.b.beautify([]), output)
output = 'The jail sshd action iptables has the following methods:\n'
output += 'ban, unban'
self.assertEqual(self.b.beautify(['ban', 'unban']), output)
def testBeautifyError(self):
response = UnknownJailException('sshd')
output = "Sorry but the jail 'sshd' does not exist"
self.assertEqual(self.b.beautifyError(response), output)
response = DuplicateJailException('sshd')
output = "The jail 'sshd' already exists"
self.assertEqual(self.b.beautifyError(response), output)
output = 'Sorry but the command is invalid'
self.assertEqual(self.b.beautifyError(IndexError()), output) |
('foremast.utils.awslambda.boto3.Session')
def test_get_lambda_alias_arn_failure(mock_boto3):
client = mock_boto3.return_value.client.return_value
client.list_aliases.return_value = lambda_no_alias_list_mock()
with pytest.raises(LambdaAliasDoesNotExist):
get_lambda_alias_arn('lambdatest', 'dev', 'us-east-1') |
class WorkflowMethodTask(ITask):
task_id: str = None
workflow_input: Payloads = None
worker: Worker = None
workflow_type: WorkflowType = None
workflow_instance: object = None
ret_value: object = None
data_converter: DataConverter = None
def __post_init__(self):
logger.debug(f'[task-{self.task_id}] Created')
self.task = asyncio.get_event_loop().create_task(self.init_workflow_instance())
async def init_workflow_instance(self):
current_task.set(self)
(cls, _) = self.worker.get_workflow_method(self.workflow_type.name)
try:
self.workflow_instance = cls()
self.task = asyncio.get_event_loop().create_task(self.workflow_main())
except Exception as ex:
logger.error(f'Initialization of Workflow {self.workflow_type.name} failed', exc_info=1)
self.decider.fail_workflow_execution(ex)
self.status = Status.DONE
async def workflow_main(self):
logger.debug(f'[task-{self.task_id}] Running')
if self.is_done():
return
current_task.set(self)
if (self.workflow_type.name not in self.worker.workflow_methods):
self.status = Status.DONE
ex = WorkflowTypeNotFound(self.workflow_type.name)
logger.error(f'Workflow type not found: {self.workflow_type.name}')
self.decider.fail_workflow_execution(ex)
return
(cls, workflow_proc) = self.worker.get_workflow_method(self.workflow_type.name)
if (self.workflow_input is None):
workflow_input = []
else:
type_hints = get_fn_args_type_hints(workflow_proc)
type_hints.pop(0)
workflow_input = self.data_converter.from_payloads(self.workflow_input, type_hints)
self.status = Status.RUNNING
try:
logger.info(f'Invoking workflow {self.workflow_type.name}({str(workflow_input)[1:(- 1)]})')
self.ret_value = (await workflow_proc(self.workflow_instance, *workflow_input))
logger.info(f'Workflow {self.workflow_type.name}({str(workflow_input)[1:(- 1)]}) returned {self.ret_value}')
self.decider.complete_workflow_execution(self.ret_value)
except CancelledError:
logger.debug('Coroutine cancelled (expected)')
except Exception as ex:
logger.error(f'Workflow {self.workflow_type.name}({str(workflow_input)[1:(- 1)]}) failed', exc_info=1)
self.decider.fail_workflow_execution(ex)
finally:
self.status = Status.DONE
def get_workflow_instance(self):
return self.workflow_instance |
.integration_saas
class TestHeapConnector():
def test_connection(self, heap_runner: ConnectorRunner):
heap_runner.test_connection()
async def test_non_strict_erasure_request(self, heap_runner: ConnectorRunner, policy: Policy, erasure_policy_string_rewrite: Policy, heap_erasure_identity_email: str):
(access_results, erasure_results) = (await heap_runner.non_strict_erasure_request(access_policy=policy, erasure_policy=erasure_policy_string_rewrite, identities={'email': heap_erasure_identity_email}))
assert (erasure_results == {'heap_instance:user': 1}) |
_toolkit([ToolkitName.qt])
(NO_WEBKIT_OR_WEBENGINE, 'Tests require either QtWebKit or QtWebEngine')
class TestHTMLEditor(BaseTestMixin, unittest.TestCase):
def setUp(self):
BaseTestMixin.setUp(self)
self.tester = get_custom_ui_tester()
def tearDown(self):
BaseTestMixin.tearDown(self)
def test_init_and_dispose(self):
model = HTMLModel()
view = get_view(base_url_name='')
with self.tester.create_ui(model, dict(view=view)):
pass
def test_base_url_changed(self):
model = HTMLModel()
view = get_view(base_url_name='model_base_url')
with self.tester.create_ui(model, dict(view=view)):
pass
model.model_base_url = '/new_dir'
_toolkit([ToolkitName.qt])
def test_open_internal_link(self):
model = HTMLModel(content="\n <html>\n <a\n href='/#'\n target='_self'\n style='display:block; width: 100%; height: 100%'>\n Internal Link\n </a>\n </html>\n ")
view = View(Item('content', editor=HTMLEditor()))
with self.tester.create_ui(model, dict(view=view)) as ui:
html_view = self.tester.find_by_name(ui, 'content')
with mock.patch('webbrowser.open_new') as mocked_browser:
html_view.perform(MouseClick())
mocked_browser.assert_not_called()
_toolkit([ToolkitName.qt])
def test_open_external_link(self):
model = HTMLModel(content="\n <html>\n <a\n href='test://testing'\n target='_blank'\n style='display:block; width: 100%; height: 100%'>\n External Link\n </a>\n </html>\n ")
view = View(Item('content', editor=HTMLEditor()))
with self.tester.create_ui(model, dict(view=view)) as ui:
html_view = self.tester.find_by_name(ui, 'content')
with mock.patch('webbrowser.open_new') as mocked_browser:
html_view.perform(MouseClick())
self.assertIn('External Link', html_view.inspect(HTMLContent()))
mocked_browser.assert_not_called()
_toolkit([ToolkitName.qt])
def test_open_internal_link_externally(self):
model = HTMLModel(content="\n <html>\n <a\n href='test://testing'\n target='_self'\n style='display:block; width: 100%; height: 100%'>\n Internal Link\n </a>\n </html>\n ")
view = View(Item('content', editor=HTMLEditor(open_externally=True)))
with self.tester.create_ui(model, dict(view=view)) as ui:
html_view = self.tester.find_by_name(ui, 'content')
with mock.patch('webbrowser.open_new') as mocked_browser:
html_view.perform(MouseClick())
self.assertIn('Internal Link', html_view.inspect(HTMLContent()))
mocked_browser.assert_called_once_with('test://testing')
_toolkit([ToolkitName.qt])
def test_open_external_link_externally(self):
model = HTMLModel(content="\n <html>\n <a\n href='test://testing'\n target='_blank'\n style='display:block; width: 100%; height: 100%'>\n External Link\n </a>\n </html>\n ")
view = View(Item('content', editor=HTMLEditor(open_externally=True)))
with self.tester.create_ui(model, dict(view=view)) as ui:
html_view = self.tester.find_by_name(ui, 'content')
with mock.patch('webbrowser.open_new') as mocked_browser:
html_view.perform(MouseClick())
self.assertIn('External Link', html_view.inspect(HTMLContent()))
is_webkit = _is_webkit_page(html_view._target.control.page())
if is_webkit:
mocked_browser.assert_called_once_with('test://testing')
else:
mocked_browser.assert_not_called() |
.parametrize('value, valuefilename', [([1, 2], '1-2.dlis.part'), ([0.5, 1.5], '0.5-1.5.dlis.part'), ([False, True], 'False-True.dlis.part'), (['val1', 'val2'], 'string-val1,val2.dlis.part'), ([(0.5, 1.5), (2.5, 3.5)], 'validated-(0.5-1.5),(2.5-3.5).dlis.part'), ([complex(0.5, 1.5), complex(2.5, 3.5)], 'complex-(0.5-1.5),(2.5-3.5).dlis.part'), ([datetime(2001, 1, 1), datetime(2002, 2, 2)], '2001-Jan-1,2002-Feb-2.dlis.part')])
.parametrize('settype', ['PARAMETER', 'COMPUTATION'])
def test_parameter_computation_repcode(tmpdir, merge_files_oneLR, settype, value, valuefilename):
path = os.path.join(str(tmpdir), 'pc-repcode.dlis')
content = [*assemble_set(settype), ('data/chap4-7/eflr/ndattrs/objattr/' + valuefilename), 'data/chap4-7/eflr/ndattrs/objattr/2.dlis.part', 'data/chap4-7/eflr/ndattrs/objattr/empty-OBNAME.dlis.part']
merge_files_oneLR(path, content)
with dlis.load(path) as (f, *_):
obj = f.object(settype, 'OBJECT', 10, 0)
assert np.array_equal(obj.values[0], value) |
class TransformerModel(Model):
def __init__(self, name: str, get_spans: Callable, tokenizer_config: dict={}, transformer_config: dict={}, mixed_precision: bool=False, grad_scaler_config: dict={}):
hf_model = HFObjects(None, None, None, tokenizer_config, transformer_config)
wrapper = HFWrapper(hf_model, convert_inputs=_convert_transformer_inputs, convert_outputs=_convert_transformer_outputs, mixed_precision=mixed_precision, grad_scaler_config=grad_scaler_config)
super().__init__('transformer', forward, init=init, layers=[wrapper], dims={'nO': None}, attrs={'get_spans': get_spans, 'name': name, 'set_transformer': set_pytorch_transformer, 'has_transformer': False, 'flush_cache_chance': 0.0, 'replace_listener': replace_listener, 'replace_listener_cfg': replace_listener_cfg})
def tokenizer(self):
return self.layers[0].shims[0]._hfmodel.tokenizer
def transformer(self):
return self.layers[0].shims[0]._hfmodel.transformer
def _init_tokenizer_config(self):
return self.layers[0].shims[0]._hfmodel._init_tokenizer_config
def _init_transformer_config(self):
return self.layers[0].shims[0]._hfmodel._init_transformer_config
def copy(self):
copied = TransformerModel(self.name, self.attrs['get_spans'])
params = {}
for name in self.param_names:
params[name] = (self.get_param(name) if self.has_param(name) else None)
copied.params = copy.deepcopy(params)
copied.dims = copy.deepcopy(self._dims)
copied.layers[0] = copy.deepcopy(self.layers[0])
for name in self.grad_names:
copied.set_grad(name, self.get_grad(name).copy())
return copied |
def extractOinktranslationsHomeBlog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('his royal highness, wants a divorce!', 'his royal highness, wants a divorce!', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class CloudBillingRepositoryClient(_base_repository.BaseRepositoryClient):
def __init__(self, quota_max_calls=None, quota_period=60.0, use_rate_limiter=True, cache_discovery=False, cache=None):
if (not quota_max_calls):
use_rate_limiter = False
self._billing_accounts = None
self._projects = None
super(CloudBillingRepositoryClient, self).__init__(API_NAME, versions=['v1'], quota_max_calls=quota_max_calls, quota_period=quota_period, use_rate_limiter=use_rate_limiter, cache_discovery=cache_discovery, cache=cache)
def billing_accounts(self):
if (not self._billing_accounts):
self._billing_accounts = self._init_repository(_CloudBillingBillingAccountsRepository)
return self._billing_accounts
def projects(self):
if (not self._projects):
self._projects = self._init_repository(_CloudBillingProjectsRepository)
return self._projects |
class OptionSeriesTreegraphSonificationTracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class SlateWrapperBag():
def __init__(self, coeffs, constants):
self.coefficients = coeffs
self.constants = constants
self.inames = OrderedDict()
self.needs_cell_orientations = False
self.needs_cell_sizes = False
self.needs_cell_facets = False
self.needs_mesh_layers = False
self.call_name_generator = UniqueNameGenerator(forced_prefix='tsfc_kernel_call_')
self.index_creator = IndexCreator() |
class TacotronOneSeqwiseMultispeakerLSTMs(TacotronOneLSTMsBlock):
def __init__(self, n_vocab, embedding_dim=256, mel_dim=80, linear_dim=1025, r=5, num_spk=2, padding_idx=None, use_memory_mask=False):
super(TacotronOneSeqwiseMultispeakerLSTMs, self).__init__(n_vocab, embedding_dim=256, mel_dim=80, linear_dim=1025, r=5, padding_idx=None, use_memory_mask=False)
self.num_spk = num_spk
self.spk_embedding = nn.Embedding(self.num_spk, 128)
self.embedding = nn.Embedding(n_vocab, embedding_dim)
self.phonesNspk2embedding = SequenceWise(nn.Linear((embedding_dim + 128), embedding_dim))
def forward(self, inputs, spk, targets=None, input_lengths=None):
B = inputs.size(0)
inputs = self.embedding(inputs)
spk_embedding = self.spk_embedding(spk)
spk_embedding = spk_embedding.unsqueeze(1).expand((- 1), inputs.size(1), (- 1))
inputs = torch.cat([inputs, spk_embedding], dim=(- 1))
inputs = torch.tanh(self.phonesNspk2embedding(inputs))
encoder_outputs = self.encoder(inputs, input_lengths)
(mel_outputs, alignments) = self.decoder(encoder_outputs, targets, memory_lengths=input_lengths)
mel_outputs = mel_outputs.view(B, (- 1), self.mel_dim)
linear_outputs = self.postnet(mel_outputs)
linear_outputs = self.last_linear(linear_outputs)
return (mel_outputs, linear_outputs, alignments) |
def _install_package(module, conda, installed, name, version, installed_version):
if (installed and ((version is None) or (installed_version == version))):
module.exit_json(changed=False, name=name, version=version)
if module.check_mode:
if ((not installed) or (installed and (installed_version != version))):
module.exit_json(changed=True)
if version:
install_str = ((name + '=') + version)
else:
install_str = name
command = [conda, 'install', '--yes', install_str]
command = _add_channels_to_command(command, module.params['channels'])
command = _add_extras_to_command(command, module.params['extra_args'])
(rc, stdout, stderr) = module.run_command(command)
if (rc != 0):
module.fail_json(msg=('failed to install package ' + name))
module.exit_json(changed=True, name=name, version=version, stdout=stdout, stderr=stderr) |
('update')
('is_all', '--all', '-a', default=False, is_flag=True, help='Updates the plugin translations as well.')
('--plugin', '-p', type=click.STRING, help='Updates the language of the given plugin.')
def update_translation(is_all, plugin):
if (plugin is not None):
validate_plugin(plugin)
click.secho('[+] Updating language files for plugin {}...'.format(plugin), fg='cyan')
update_plugin_translations(plugin)
else:
click.secho('[+] Updating language files...', fg='cyan')
update_translations(include_plugins=is_all) |
def test_task_failed():
task = Task()
assert (not task.failed)
task.future = asyncio.Future()
assert (not task.failed)
task.future.set_result(None)
assert (not task.failed)
task.future = asyncio.Future()
task.future.set_exception(KeyboardInterrupt())
assert (not task.failed)
task.future = asyncio.Future()
task.future.set_exception(ValueError())
assert task.failed |
def test_event_graph_accumulated_time_before_start(mocker: Any) -> None:
message = DeferredMessage(MyMessage, 'unittest_args', kwargs_field='unittest_kwargs')
topic = Topic(MyMessage)
start = Event(message, topic, 0.0, 1.0)
graph = EventGraph(start)
parent = Event(message, topic, 0.0, 1.0)
child = Event(message, topic, (- 3.0), 1.0)
graph.add_event_at_end(parent, start)
with pytest.raises(LabgraphError):
graph.add_event_at_end(child, parent) |
class TaskLog(_common.FlyteIdlEntity):
class MessageFormat(object):
UNKNOWN = _execution_pb2.TaskLog.UNKNOWN
CSV = _execution_pb2.TaskLog.CSV
JSON = _execution_pb2.TaskLog.JSON
def __init__(self, uri: str, name: str, message_format: typing.Optional[MessageFormat]=None, ttl: typing.Optional[datetime.timedelta]=None):
self._uri = uri
self._name = name
self._message_format = message_format
self._ttl = ttl
def uri(self):
return self._uri
def name(self):
return self._name
def message_format(self):
return self._message_format
def ttl(self):
return self._ttl
def to_flyte_idl(self):
p = _execution_pb2.TaskLog(uri=self.uri, name=self.name, message_format=self.message_format)
if (self.ttl is not None):
p.ttl.FromTimedelta(self.ttl)
return p
def from_flyte_idl(cls, p):
return cls(uri=p.uri, name=p.name, message_format=p.message_format, ttl=(p.ttl.ToTimedelta() if p.ttl else None)) |
class ServiceManagementClient(object):
DEFAULT_MAX_RESULTS = 100
def __init__(self, global_configs, **kwargs):
(max_calls, quota_period) = api_helpers.get_ratelimiter_config(global_configs, API_NAME)
cache_discovery = (global_configs['cache_discovery'] if ('cache_discovery' in global_configs) else False)
self.repository = ServiceManagementRepositoryClient(quota_max_calls=max_calls, quota_period=quota_period, use_rate_limiter=kwargs.get('use_rate_limiter', True), cache_discovery=cache_discovery, cache=global_configs.get('cache'))
def get_all_apis(self):
try:
paged_results = self.repository.services.list(max_results=self.DEFAULT_MAX_RESULTS)
flattened_results = api_helpers.flatten_list_results(paged_results, 'services')
except (errors.HttpError, HttpLib2Error) as e:
api_exception = api_errors.ApiExecutionError('', e)
LOGGER.exception(api_exception)
raise api_exception
LOGGER.debug('Getting all visible APIs, flattened_results = %s', flattened_results)
return flattened_results
def get_produced_apis(self, project_id):
try:
paged_results = self.repository.services.list(producerProjectId=project_id, max_results=self.DEFAULT_MAX_RESULTS)
flattened_results = api_helpers.flatten_list_results(paged_results, 'services')
except (errors.HttpError, HttpLib2Error) as e:
api_exception = api_errors.ApiExecutionError('name', e, 'project_id', project_id)
LOGGER.exception(api_exception)
raise api_exception
LOGGER.debug('Getting the APIs produced by a project, project_id = %s, flattened_results = %s', project_id, flattened_results)
return flattened_results
def get_enabled_apis(self, project_id):
formatted_project_id = self.repository.services.get_formatted_project_name(project_id)
try:
paged_results = self.repository.services.list(consumerId=formatted_project_id, max_results=self.DEFAULT_MAX_RESULTS)
flattened_results = api_helpers.flatten_list_results(paged_results, 'services')
except (errors.HttpError, HttpLib2Error) as e:
api_exception = api_errors.ApiExecutionError('name', e, 'project_id', project_id)
LOGGER.exception(api_exception)
raise api_exception
LOGGER.debug('Getting the enabled APIs for a project, project_id = %s, flattened_results = %s', project_id, flattened_results)
return flattened_results
def get_api_iam_policy(self, service_name):
name = self.repository.services.get_formatted_service_name(service_name)
try:
result = self.repository.services.get_iam_policy(name)
except (errors.HttpError, HttpLib2Error) as e:
api_exception = api_errors.ApiExecutionError('serviceIamPolicy', e, 'serviceName', service_name)
LOGGER.exception(api_exception)
raise api_exception
LOGGER.debug('Getting IAM Policy for a service, service_name = %s, result = %s', service_name, result)
return result
def get_full_api_configuration(self, service_name):
try:
result = self.repository.services.get_config(service_name)
except (errors.HttpError, HttpLib2Error) as e:
api_exception = api_errors.ApiExecutionError('serviceConfig', e, 'serviceName', service_name)
LOGGER.exception(api_exception)
raise api_exception
LOGGER.debug('Getting Service Config for a service, service_name = %s, result = %s', service_name, result)
return result |
def get_data(first_url, second_url):
first_response = send_request(first_url)
second_response = send_request(second_url)
first_content = str(first_response.content)
second_content = str(second_response.content)
first_subdomain_first_filter = re.findall('(?:<TD>)(.*?)</TD>', first_content)
second_subdomain_first_filter = re.findall('(?:<TD>)(.*?)</TD>', second_content)
first_subdomain_second_filter = re.findall('(?:BR>)(.*?)<', first_content)
second_subdomain_second_filter = re.findall('(?:BR>)(.*?)<', second_content)
first_output_first_filter = filter_crt(first_subdomain_first_filter)
second_output_first_filter = filter_crt(second_subdomain_first_filter)
first_output_second_filter = filter_crt(first_subdomain_second_filter)
second_output_second_filter = filter_crt(second_subdomain_second_filter)
return (first_output_first_filter, second_output_first_filter, first_output_second_filter, second_output_second_filter) |
def filter_firewall_vipgrp_data(json):
option_list = ['color', 'comments', 'interface', 'member', 'name', 'uuid']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in json) and (json[attribute] is not None)):
dictionary[attribute] = json[attribute]
return dictionary |
def get_filesystem_event_details(io, metadata, event, extra_detail_io):
sub_operation = read_u8(io)
io.seek(3, 1)
if ((0 != sub_operation) and (FilesystemOperation[event.operation] in FilesystemSubOperations)):
try:
event.operation = FilesystemSubOperations[FilesystemOperation[event.operation]](sub_operation).name
except ValueError:
event.operation += ' <Unknown>'
details_io = BytesIO(io.read(((metadata.sizeof_pvoid * 5) + 20)))
path_info = read_detail_string_info(io)
io.seek(2, 1)
event.path = read_detail_string(io, path_info)
if (metadata.should_get_details and (event.operation in FilesystemSubOperationHandler)):
FilesystemSubOperationHandler[event.operation](io, metadata, event, details_io, extra_detail_io) |
class WebhooksResultGenerator(CursorResultsGenerator):
def process_page(self):
search_results = list()
for object_json in self._response_json['webhooks']:
search_results.append(self.response_handler.api._object_mapping.object_from_json('webhook', object_json))
return search_results |
.django_db
def test_extract_business_categories(monkeypatch):
recipient_hash = 'a52a7544-829b-c925-e1ba-d04d3171c09a'
recipient_name = TEST_RECIPIENT_LOOKUPS[recipient_hash]['legal_business_name']
recipient_uei = TEST_RECIPIENT_LOOKUPS[recipient_hash]['uei']
business_categories = ['le', 'business', 'cat']
utm_objects = Mock()
utm_objects.filter().order_by().values().first.return_value = {'business_categories': business_categories}
monkeypatch.setattr('usaspending_api.search.models.TransactionSearch.objects', utm_objects)
baker.make('recipient.RecipientLookup', **TEST_RECIPIENT_LOOKUPS[recipient_hash])
baker.make('recipient.DUNS', **TEST_DUNS[TEST_RECIPIENT_LOOKUPS[recipient_hash]['duns']])
expected_business_cat = (business_categories + ['category_business'])
business_cat = recipients.extract_business_categories(recipient_name, recipient_uei, recipient_hash)
assert (sorted(business_cat) == sorted(expected_business_cat)) |
def extract_filesystem_bundle(docker_driver, container_id=None, image_name=None):
temporary_dir = tempfile.mkdtemp()
if (container_id is not None):
image = docker_driver.get_docker_client().export(container=container_id)
name = container_id
else:
image = docker_driver.get_docker_client().get_image(image=image_name)
name = image_name.replace('/', '_').replace(':', '_')
with open((((temporary_dir + '/') + name) + '.tar'), 'wb') as file:
for chunk in image:
file.write(chunk)
tarfile = TarFile((((temporary_dir + '/') + name) + '.tar'))
tarfile.extractall(temporary_dir)
os.remove((((temporary_dir + '/') + name) + '.tar'))
if (image_name is not None):
layers = _get_layers_from_manifest(temporary_dir)
_untar_layers(temporary_dir, layers)
return temporary_dir |
class OptionSeriesPyramidSonificationDefaultspeechoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class StartTransform(Runner):
async def __call__(self, es, params):
transform_id = mandatory(params, 'transform-id', self)
timeout = params.get('timeout')
(await es.transform.start_transform(transform_id=transform_id, timeout=timeout))
def __repr__(self, *args, **kwargs):
return 'start-transform' |
class EfuseDefineFields(EfuseFieldsBase):
def __init__(self) -> None:
self.EFUSES = []
self.KEYBLOCKS = []
self.BLOCK2_CALIBRATION_EFUSES = []
dir_name = os.path.dirname(os.path.abspath(__file__))
(dir_name, file_name) = os.path.split(dir_name)
file_name = (file_name + '.yaml')
(dir_name, _) = os.path.split(dir_name)
efuse_file = os.path.join(dir_name, 'efuse_defs', file_name)
with open(f'{efuse_file}', 'r') as r_file:
e_desc = yaml.safe_load(r_file)
super().__init__(e_desc)
for (i, efuse) in enumerate(self.ALL_EFUSES):
if (efuse.name in ['BLOCK_KEY0']):
self.KEYBLOCKS.append(efuse)
BLOCK_KEY0_LOW_128 = copy.deepcopy(efuse)
BLOCK_KEY0_LOW_128.name = 'BLOCK_KEY0_LOW_128'
BLOCK_KEY0_LOW_128.type = 'bytes:16'
BLOCK_KEY0_LOW_128.bit_len = (16 * 8)
BLOCK_KEY0_LOW_128.description = 'BLOCK_KEY0 - lower 128-bits. 128-bit key of Flash Encryption'
BLOCK_KEY0_LOW_128.read_disable_bit = efuse.read_disable_bit[0]
self.KEYBLOCKS.append(BLOCK_KEY0_LOW_128)
BLOCK_KEY0_HI_128 = copy.deepcopy(efuse)
BLOCK_KEY0_HI_128.name = 'BLOCK_KEY0_HI_128'
BLOCK_KEY0_HI_128.word = 4
BLOCK_KEY0_HI_128.type = 'bytes:16'
BLOCK_KEY0_HI_128.bit_len = (16 * 8)
BLOCK_KEY0_HI_128.description = 'BLOCK_KEY0 - higher 128-bits. 128-bits key of Secure Boot'
BLOCK_KEY0_HI_128.read_disable_bit = efuse.read_disable_bit[1]
self.KEYBLOCKS.append(BLOCK_KEY0_HI_128)
self.ALL_EFUSES[i] = None
elif (efuse.category == 'calibration'):
self.BLOCK2_CALIBRATION_EFUSES.append(efuse)
self.ALL_EFUSES[i] = None
for efuse in self.ALL_EFUSES:
if (efuse is not None):
self.EFUSES.append(efuse)
self.ALL_EFUSES = [] |
class Dump():
def __init__(self):
self.variables = []
def add(self, variable):
self.variables.append(variable)
def add_from_layout(self, layout, variable):
offset = 0
for (name, sample_width) in layout:
values = variable[offset:(offset + sample_width)]
values2x = [values[(i // 2)] for i in range((len(values) * 2))]
self.add(DumpVariable(name, sample_width, values2x))
offset += sample_width
def add_from_layout_flatten(self, layout, variable):
offset = 0
for (name, sample_width) in layout:
values = variable[offset:(offset + sample_width)]
values_flatten = [((values[(i // sample_width)] >> (i % sample_width)) & 1) for i in range((len(values) * sample_width))]
self.add(DumpVariable(name, 1, values_flatten))
offset += sample_width
def add_scope_clk(self):
self.add(DumpVariable('scope_clk', 1, ([1, 0] * (len(self) // 2))))
def add_scope_trig(self, offset):
self.add(DumpVariable('scope_trig', 1, (([0] * offset) + ([1] * (len(self) - offset)))))
def __len__(self):
l = 0
for variable in self.variables:
l = max(len(variable), l)
return l |
def get_total_memory_usage(keep_raw=False):
if (importlib.util.find_spec('psutil') is None):
return '??'
else:
import psutil
current_process = psutil.Process(os.getpid())
mem = current_process.memory_info().rss
for child in current_process.children(recursive=True):
try:
mem += child.memory_info().rss
except Exception:
pass
return (mem if keep_raw else human_readable_file_size(mem)) |
def test_bf():
job_id = '63ece9904eb8478896baf3300a2c9513'
hash_file = 'tests/deadbeef.hashes'
outfile = '{}{}.cracked'.format(log_dir, job_id)
pot_path = '{}crackq.pot'.format(log_dir)
hc_args = {'hash_mode': 1000, 'hash_file': hash_file, 'name': 'tests', 'brain': False, 'pot_path': pot_path, 'session': job_id, 'username': False, 'wordlist': None, 'wordlist2': None, 'outfile': outfile, 'attack_mode': 3, 'mask': '?a?a?a?a?a?a?a'}
q_args = {'job_id': job_id, 'kwargs': hc_args}
adder = crackq.cq_api.Adder()
adder.speed_check(q_args=q_args)
time.sleep(1)
crack_q.q_add(q, q_args)
job = q.fetch_job(job_id)
job.meta['CrackQ State'] = 'Run/Restored'
job.meta['Speed Array'] = []
job.save_meta()
time.sleep(30)
started_list = rq.registry.StartedJobRegistry(queue=q).get_job_ids()
assert (job_id in started_list) |
def fortios_log_tacacsplusaccounting3(data, fos):
fos.do_member_operation('log.tacacs+accounting3', 'setting')
if data['log_tacacsplusaccounting3_setting']:
resp = log_tacacsplusaccounting3_setting(data, fos)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'log_tacacsplusaccounting3_setting'))
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
class PlayerRevive(GenericAction):
def __init__(self, source, target, hp):
self.source = source
self.target = target
self.hp = hp
def apply_action(self):
tgt = self.target
assert tgt.dead
tgt.dead = False
tgt.maxlife = tgt.__class__.maxlife
tgt.skills = list(tgt.__class__.skills)
tgt.life = min(tgt.maxlife, self.hp)
return True
def is_valid(self):
return self.target.dead |
def data_fixture(db):
baker.make('accounts.TreasuryAppropriationAccount', pk=1, allocation_transfer_agency_id='123', agency_id='123', main_account_code='0111', fr_entity_code='2345')
baker.make('accounts.TreasuryAppropriationAccount', pk=2, allocation_transfer_agency_id=None, agency_id='234', main_account_code='0222', fr_entity_code='3456')
baker.make('accounts.TreasuryAppropriationAccount', pk=3, allocation_transfer_agency_id=None, agency_id='345', main_account_code='0333', fr_entity_code='4567')
baker.make('accounts.TreasuryAppropriationAccount', pk=4, allocation_transfer_agency_id=None, agency_id='345', main_account_code='0444', fr_entity_code='5678')
baker.make('accounts.TreasuryAppropriationAccount', pk=5, allocation_transfer_agency_id=None, agency_id='345', main_account_code='0444', fr_entity_code='6789')
baker.make('accounts.TreasuryAppropriationAccount', pk=6, allocation_transfer_agency_id=None, agency_id='345', main_account_code='0444', fr_entity_code='6789')
baker.make('accounts.FederalAccount', pk=1, agency_identifier='123', main_account_code='0111')
baker.make('accounts.FederalAccount', pk=2, agency_identifier='234', main_account_code='0222')
baker.make('accounts.FederalAccount', pk=3, agency_identifier='345', main_account_code='0333')
baker.make('accounts.FederalAccount', pk=4, agency_identifier='345', main_account_code='0444')
baker.make('references.FREC', frec_code='2345', associated_cgac_code='123')
baker.make('references.FREC', frec_code='3456', associated_cgac_code='234')
baker.make('references.FREC', frec_code='4567', associated_cgac_code='234')
baker.make('references.FREC', frec_code='5678', associated_cgac_code='123')
baker.make('references.FREC', frec_code='6789', associated_cgac_code='234')
baker.make('references.ToptierAgency', pk=1, toptier_code='123', _fill_optional=True)
baker.make('references.ToptierAgency', pk=2, toptier_code='234', _fill_optional=True)
baker.make('references.ToptierAgency', pk=3, toptier_code='4567', _fill_optional=True) |
def _install_freetz():
logging.info('Installing FREETZ')
current_user = getuser()
freetz_build_config = (Path(__file__).parent / 'freetz.config')
with TemporaryDirectory(prefix='fact_freetz') as build_directory:
with OperateInDirectory(build_directory):
os.umask(18)
install_github_project('Freetz-NG/freetz-ng', ['id -u makeuser || sudo useradd -M makeuser', 'sudo mkdir -p /home/makeuser', 'sudo chown -R makeuser /home/makeuser', f'cp {freetz_build_config} ./.config', f'sudo chown -R makeuser {build_directory}', 'sudo su makeuser -c "make -j$(nproc) tools"', f'sudo chmod -R 777 {build_directory}', f'sudo chown -R {current_user} {build_directory}', f'cp tools/find-squashfs tools/unpack-kernel tools/freetz_bin_functions tools/unlzma tools/unsquashfs4-avm-be tools/unsquashfs4-avm-le tools/unsquashfs3-multi {BIN_DIR}', 'sudo userdel makeuser']) |
def test_point_gauge_output_2():
filename = 'test_gauge_output_2.csv'
silent_rm(filename)
p = PointGauges(gauges=((('u0',), ((0, 0, 0), (0.5, 0.5, 0.5), (1, 1, 1))),), fileName=filename)
time_list = [0.0, 1.0, 2.0]
run_gauge(p, time_list)
correct_gauge_names = ['u0 [ 0 0 0]', 'u0 [ 0.5 0.5 0.5]', 'u0 [ 1 1 1]']
correct_data = np.asarray([[0.0, 0.0, 55.5, 111.0], [1.0, 0.0, 111.0, 222.0], [2.0, 0.0, 166.5, 333.0]])
Comm.get().barrier()
(gauge_names, data) = parse_gauge_output(filename)
assert (correct_gauge_names == gauge_names)
npt.assert_allclose(correct_data, data)
delete_file(filename) |
def _is_status_not_found(error):
if (isinstance(error, errors.HttpError) and (error.resp.status == 404)):
if error.resp.get('content-type', '').startswith('application/json'):
error_details = json.loads(error.content.decode('utf-8'))
if (error_details.get('error', {}).get('status', '') == 'NOT_FOUND'):
LOGGER.debug(error)
return True
return False |
def expected_interactions_non_zero(pSubmatrix):
expected_interactions = np.zeros(pSubmatrix.shape[0])
(row, col) = pSubmatrix.nonzero()
distance = np.absolute((row - col))
occurences = np.zeros(pSubmatrix.shape[0])
for (i, distance_) in enumerate(distance):
expected_interactions[distance_] += pSubmatrix.data[i]
occurences[distance_] += 1
expected_interactions = np.divide(expected_interactions, occurences)
mask = np.isnan(expected_interactions)
expected_interactions[mask] = 0
mask = np.isinf(expected_interactions)
expected_interactions[mask] = 0
return expected_interactions |
class TestCoprDetail(CoprsTestCase):
def test_copr_detail_not_found(self):
r = self.tc.get('/coprs/foo/bar/')
assert (r.status_code == 404)
def test_copr_detail_normal(self, f_users, f_coprs, f_db):
r = self.tc.get('/coprs/{0}/{1}/'.format(self.u1.name, self.c1.name))
assert (r.status_code == 200)
assert (self.c1.name.encode('utf-8') in r.data)
def test_copr_detail_contains_builds(self, f_users, f_coprs, f_mock_chroots, f_builds, f_db):
r = self.tc.get('/coprs/{0}/{1}/builds/'.format(self.u1.name, self.c1.name))
assert (r.data.count(b'<tr class="build') == 2)
def test_copr_detail_anonymous_doesnt_contain_permissions_table_when_no_permissions(self, f_users, f_coprs, f_copr_permissions, f_db):
r = self.tc.get('/coprs/{0}/{1}/permissions/'.format(self.u1.name, self.c1.name))
assert (b'<!--permissions-table-->' not in r.data)
def test_copr_detail_contains_permissions_table(self, f_users, f_coprs, f_copr_permissions, f_db):
r = self.tc.get('/coprs/{0}/{1}/permissions/'.format(self.u2.name, self.c3.name))
assert (b'<!--permissions-table-->' in r.data)
assert ('<td>{0}'.format(self.u3.name).encode('utf-8') in r.data)
assert ('<td>{0}'.format(self.u1.name).encode('utf-8') in r.data)
('u2')
def test_detail_has_correct_permissions_form(self, f_users, f_coprs, f_copr_permissions, f_db):
self.db.session.add_all([self.u2, self.c3])
r = self.test_client.get('/coprs/{0}/{1}/permissions/'.format(self.u2.name, self.c3.name))
assert (r.data.count(b'nothing') == 2)
assert (b'<select id="copr_builder_1" name="copr_builder_1">' in r.data)
assert (b'<select id="copr_admin_1" name="copr_admin_1">' in r.data)
def test_copr_detail_doesnt_show_cancel_build_for_anonymous(self, f_users, f_coprs, f_builds, f_db):
r = self.tc.get('/coprs/{0}/{1}/build/{2}/'.format(self.u2.name, self.c2.name, self.c2.builds[0].id))
assert (b'/cancel_build/' not in r.data)
('u1')
def test_copr_detail_doesnt_allow_non_submitter_to_cancel_build(self, f_users, f_coprs, f_mock_chroots, f_builds, f_db):
self.u1.admin = False
self.db.session.add_all([self.u1, self.u2, self.c2])
r = self.test_client.get('/coprs/{0}/{1}/build/{2}/'.format(self.u2.name, self.c2.name, self.c2.builds[0].id))
assert (b'/cancel_build/' not in r.data)
('u2')
def test_copr_detail_allows_submitter_to_cancel_build(self, f_users, f_coprs, f_mock_chroots, f_builds, f_db):
self.db.session.add_all([self.u2, self.c2])
build_id = self.c2.builds[0].id
r = self.test_client.get('/coprs/{0}/{1}/build/{2}/'.format(self.u2.name, self.c2.name, build_id))
assert (b'/cancel_build/' in r.data)
self.web_ui.cancel_build(self.c2.name, build_id)
build = models.Build.query.get(build_id)
assert (build.state == 'canceled')
def test_codeblock_html_in_project_description(self, f_users, f_coprs):
r = self.tc.get('/coprs/{0}/{1}/'.format(self.u1.name, self.c1.name))
lines = ['<pre><code class="language-python"><div class="highlight"><span></span><span class="c1"># code snippet</span>', '<span class="k">def</span> <span class="nf">foo</span><span class="p">():</span>', ' <span class="n">bar</span><span class="p">()</span>', ' <span class="k">return</span> <span class="mi">1</span>', '</div>', '</code></pre>']
removed_code = ['<blink>']
generated_html = r.data.decode('utf-8')
for line in lines:
assert (line in generated_html)
for line in removed_code:
assert (line not in generated_html) |
class _ThreadedBridgeWorker(threading.Thread):
def __init__(self, mb, options):
threading.Thread.__init__(self)
self.mb = mb
self.options = options
def run(self):
with self.mb as mb:
launched = False
while (not launched):
try:
ts = beem.load.TrackingSender('localhost', mb.port, ('ts_' + mb.label))
launched = True
except:
time.sleep(0.5)
if self.mb.auth:
cid = self.mb.auth.split(':')[0]
gen = beem.msgs.createGenerator(cid, self.options)
else:
gen = beem.msgs.createGenerator(self.mb.label, self.options)
ts.run(gen)
self.stats = ts.stats() |
def highest_justified_epoch(min_total_deposits: wei_value) -> uint256:
epoch: uint256
for i in range():
epoch = (self.current_epoch - convert(i, 'uint256'))
is_justified: bool = self.checkpoints[epoch].is_justified
enough_cur_dyn_deposits: bool = (self.checkpoints[epoch].cur_dyn_deposits >= min_total_deposits)
enough_prev_dyn_deposits: bool = (self.checkpoints[epoch].prev_dyn_deposits >= min_total_deposits)
if (is_justified and (enough_cur_dyn_deposits and enough_prev_dyn_deposits)):
return epoch
if (epoch == self.START_EPOCH):
break
return 0 |
class Analyzer():
MAX_DELAY_SECS = 0.1
TIMEOUT_SECS = 1.0
CACHED_METHODS = ['X', 'Y', 'calcH', 'H', 'H2', 'h', 'h_inv', 'spectrum', 'frequency', 'calibration', 'target']
chirp: np.ndarray
x: np.ndarray
y: np.ndarray
sumH: np.ndarray
numMeasurements: int
rate: int
fmin: float
fmax: float
time: float
def __init__(self, f0: int, f1: int, secs: float, rate: int, ampl: float, calibration: Optional[Correction]=None, target: Optional[Correction]=None):
self.chirp = (ampl * geom_chirp(f0, f1, secs, rate))
self.x = np.concatenate([self.chirp, np.zeros(int((self.MAX_DELAY_SECS * rate)))])
self.y = np.zeros(self.x.size)
self.rate = rate
self.fmin = min(f0, f1)
self.fmax = max(f0, f1)
self.time = 0
self.sumH = np.zeros(self.X().size)
self.numMeasurements = 0
self._calibration = calibration
self._target = target
def __getstate__(self):
return {k: v for (k, v) in self.__dict__.items() if (k not in self.CACHED_METHODS)}
def setCaching(self):
for name in self.CACHED_METHODS:
unbound = getattr(Analyzer, name)
bound = types.MethodType(unbound, self)
setattr(self, name, lru_cache(bound))
def addMeasurements(self, analyzer):
if (not self.isCompatible(analyzer)):
raise ValueError('Incompatible analyzers')
self.sumH = (self.sumH + analyzer.sumH)
self.numMeasurements += analyzer.numMeasurements
self.setCaching()
def isCompatible(self, analyzer):
return (isinstance(analyzer, Analyzer) and np.array_equal(analyzer.x, self.x))
def findMatch(self, recording: array.array) -> bool:
sz = len(recording)
self.time = (sz / self.rate)
if (sz >= self.x.size):
Y = fft(recording)
X = fft(np.flip(self.x), n=sz)
corr = ifft((X * Y)).real
idx = ((int(corr.argmax()) - self.x.size) + 1)
if (idx >= 0):
self.y = np.array(recording[idx:(idx + self.x.size)])
self.numMeasurements += 1
self.sumH += self.calcH()
self.setCaching()
return True
return False
def timedOut(self) -> bool:
return (self.time > ((self.x.size / self.rate) + self.TIMEOUT_SECS))
def frequency(self) -> np.ndarray:
return np.linspace(0, (self.rate // 2), self.X().size)
def freqRange(self, size: int=0) -> slice:
size = (size or self.X().size)
nyq = (self.rate / 2)
i0 = min((size - 1), int((0.5 + ((size * self.fmin) / nyq))))
i1 = min((size - 1), int((0.5 + ((size * self.fmax) / nyq))))
return slice(i0, (i1 + 1))
def calibration(self) -> Optional[np.ndarray]:
return self.interpolateCorrection(self._calibration)
def target(self) -> Optional[np.ndarray]:
return self.interpolateCorrection(self._target)
def interpolateCorrection(self, corr: Optional[Correction]) -> Optional[np.ndarray]:
if (not corr):
return None
corr = sorted((c for c in corr if (c[0] > 0)))
a = np.array(corr, 'd').T
logF = np.log(a[0])
db = a[1]
freq = self.frequency()
interp = np.empty_like(freq)
interp[0] = 0
interp[1:] = np.interp(np.log(freq[1:]), logF, db)
return interp
def X(self) -> np.ndarray:
return rfft(self.x)
def Y(self) -> np.ndarray:
return rfft(self.y)
def calcH(self) -> np.ndarray:
X = self.X()
Y = self.Y()
H = ((Y * np.conj(X)) / ((np.abs(X) ** 2) + 1e-06))
if self._calibration:
H *= (10 ** ((- self.calibration()) / 20))
H = np.abs(H)
return H
def H(self) -> XY:
freq = self.frequency()
H = (self.sumH / (self.numMeasurements or 1))
return XY(freq, H)
def H2(self, smoothing: float) -> XY:
(freq, H) = self.H()
r = self.freqRange()
H2 = np.empty_like(H)
H2[r] = smooth(freq[r], (H[r] ** 2), smoothing)
H2[:r.start] = H2[r.start]
H2[r.stop:] = H2[(r.stop - 1)]
return XY(freq, H2)
def h(self) -> XY:
(_, H) = self.H()
h = irfft(H)
h = np.hstack([h[(h.size // 2):], h[0:(h.size // 2)]])
t = np.linspace(0, (h.size / self.rate), h.size)
return XY(t, h)
def spectrum(self, smoothing: float=0) -> XY:
(freq, H2) = self.H2(smoothing)
r = self.freqRange()
return XY(freq[r], (10 * np.log10(H2[r])))
def h_inv(self, secs: float=0.05, dbRange: float=24, kaiserBeta: float=5, smoothing: float=0, causality: float=0) -> XY:
(freq, H2) = self.H2(smoothing)
if self._target:
H2 = (H2 * (10 ** ((- self.target()) / 10)))
n = int(((secs * self.rate) / 2))
H = (resample(H2, n) ** 0.5)
H /= H.max()
H = np.fmax(H, (10 ** ((- dbRange) / 20)))
Z = (1 / H)
phase = np.exp(((Z.size * 1j) * np.linspace(0, np.pi, Z.size)))
Z = (Z * phase)
z = irfft(Z)
z = z[:(- 1)]
z *= window(z.size, kaiserBeta)
if causality:
z = transform_causality(z, causality)
dim = (1.5 - (0.25 * causality))
norm = ((np.abs(z) ** dim).sum() ** (1 / dim))
z /= norm
t = np.linspace(0, (z.size / self.rate), z.size)
return XY(t, z)
def correctionFactor(self, h_inv: np.ndarray) -> XY:
Z = np.abs(rfft(h_inv))
Z /= Z.max()
freq = np.linspace(0, (self.rate / 2), Z.size)
return XY(freq, Z)
def correctedSpectrum(self, corrFactor: XY) -> Tuple[(XY, XY)]:
(freq, H2) = self.H2(0)
H = (H2 ** 0.5)
r = self.freqRange()
tf = resample(corrFactor.y, H.size)
resp = (20 * np.log10((tf[r] * H[r])))
spectrum = XY(freq[r], resp)
H = (resample(H2, corrFactor.y.size) ** 0.5)
rr = self.freqRange(corrFactor.y.size)
resp = (20 * np.log10((corrFactor.y[rr] * H[rr])))
spectrum_resamp = XY(corrFactor.x[rr], resp)
return (spectrum, spectrum_resamp)
def targetSpectrum(self, spectrum: XY) -> Optional[XY]:
if self._target:
(freq, resp) = spectrum
r = self.freqRange()
target = self.target()[r]
target += np.average((resp - target), weights=(1 / freq))
targetSpectrum = XY(freq, target)
else:
targetSpectrum = None
return targetSpectrum |
class OptionPlotoptionsArcdiagramSonificationContexttracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.parametrize('opt_cls, opt_kwargs_, ref_cycle', [(RFOptimizer, {}, 18), (RFOptimizer, {'adapt_step_func': True}, 18), (NCOptimizer, {}, 13), pytest.param(LBFGS, {'double_damp': True, 'gamma_mult': True}, 19), pytest.param(LBFGS, {'double_damp': True, 'gamma_mult': False}, 19), pytest.param(LBFGS, {'double_damp': False}, 19, marks=pytest.mark.xfail), pytest.param(BFGS, {}, 10, marks=pytest.mark.xfail), (RSA, {}, 17), pytest.param(ConjugateGradient, {'formula': 'PR'}, 170), pytest.param(StabilizedQNMethod, {'bio': False}, 10, marks=pytest.mark.xfail)])
def test_optimizers(opt_cls, opt_kwargs_, ref_cycle):
geom = AnaPot.get_geom((0.667, 1.609, 0.0))
ref_coords = np.array((1.941, 3.8543, 0.0))
opt_kwargs = {'thresh': 'gau_tight', 'dump': False, 'overachieve_factor': 2.0, 'max_cycles': max((ref_cycle + 1), 50)}
opt_kwargs.update(opt_kwargs_)
opt = opt_cls(geom, **opt_kwargs)
opt.run()
assert opt.is_converged
assert (opt.cur_cycle == ref_cycle)
diff = (ref_coords - geom.coords)
diff_norm = np.linalg.norm(diff)
print(f' norm(diff)={diff_norm:.8f}')
assert (diff_norm < 6e-05) |
def create(identifier: str) -> TAuth:
namespace = 'puresnmp_plugins.auth'
loader = Loader(namespace, is_valid_auth_mod)
result = loader.create(identifier)
if (not result):
raise UnknownAuthModel(namespace, identifier, sorted(loader.discovered_plugins.keys()))
return result |
class Delaunay3D(FilterBase):
__version__ = 0
filter = Instance(tvtk.Delaunay3D, args=(), allow_none=False, record=True)
input_info = PipelineInfo(datasets=['structured_grid', 'poly_data', 'unstructured_grid'], attribute_types=['any'], attributes=['any'])
output_info = PipelineInfo(datasets=['unstructured_grid'], attribute_types=['any'], attributes=['any']) |
(PRIVACY_REQUEST_RESUME_FROM_REQUIRES_INPUT, status_code=HTTP_200_OK, response_model=PrivacyRequestResponse, dependencies=[Security(verify_oauth_client, scopes=[PRIVACY_REQUEST_CALLBACK_RESUME])])
def resume_privacy_request_from_requires_input(privacy_request_id: str, *, db: Session=Depends(deps.get_db)) -> PrivacyRequestResponse:
privacy_request: PrivacyRequest = get_privacy_request_or_error(db, privacy_request_id)
if (privacy_request.status != PrivacyRequestStatus.requires_input):
raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail=f"Cannot resume privacy request from 'requires_input': privacy request '{privacy_request.id}' status = {privacy_request.status.value}.")
action_type = None
if privacy_request.policy.get_rules_for_action(ActionType.access):
action_type = ActionType.access
elif privacy_request.policy.get_rules_for_action(ActionType.erasure):
action_type = ActionType.erasure
access_manual_webhooks: List[AccessManualWebhook] = AccessManualWebhook.get_enabled(db, action_type)
try:
for manual_webhook in access_manual_webhooks:
if (action_type == ActionType.access):
privacy_request.get_manual_webhook_access_input_strict(manual_webhook)
if (action_type == ActionType.erasure):
privacy_request.get_manual_webhook_erasure_input_strict(manual_webhook)
except (NoCachedManualWebhookEntry, PydanticValidationError, ManualWebhookFieldsUnset) as exc:
raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail=f'Cannot resume privacy request. {exc}')
logger.info("Resuming privacy request '{}' after manual inputs verified", privacy_request_id)
privacy_request.status = PrivacyRequestStatus.in_processing
privacy_request.save(db=db)
queue_privacy_request(privacy_request_id=privacy_request.id)
return privacy_request |
class DatabaseCache(Cache):
def __init__(self, path, event_ttl=None):
self._database_path = path
self._connection = None
self._event_ttl = event_ttl
def _connect(self):
try:
self._open()
with self._connection as connection:
(yield connection)
except sqlite3.OperationalError:
self._handle_sqlite_error()
raise
finally:
self._close()
def _open(self):
self._connection = sqlite3.connect(self._database_path, timeout=constants.DATABASE_TIMEOUT, isolation_level='EXCLUSIVE')
self._connection.row_factory = sqlite3.Row
self._initialize_schema()
def _close(self):
if (self._connection is not None):
self._connection.close()
self._connection = None
def _initialize_schema(self):
cursor = self._connection.cursor()
try:
for statement in DATABASE_SCHEMA_STATEMENTS:
cursor.execute(statement)
except sqlite3.OperationalError:
self._close()
self._handle_sqlite_error()
raise
def add_event(self, event):
query = "\n INSERT INTO `event`\n (`event_text`, `pending_delete`, `entry_date`) VALUES (?, ?, datetime('now'))"
with self._connect() as connection:
connection.execute(query, (event, False))
def _handle_sqlite_error(self):
(_, exc, _) = sys.exc_info()
if (str(exc) == 'database is locked'):
raise DatabaseLockedError from exc
if (str(exc) == 'disk I/O error'):
raise DatabaseDiskIOError from exc
if (str(exc) == 'unable to open database file'):
raise DatabaseDiskIOError from exc
if (str(exc) == 'attempt to write a readonly database'):
raise DatabaseDiskIOError from exc
def get_queued_events(self):
query_fetch = '\n SELECT `event_id`, `event_text` FROM `event` WHERE `pending_delete` = 0 LIMIT ?;'
query_update_base = 'UPDATE `event` SET `pending_delete`=1 WHERE `event_id` IN (%s);'
with self._connect() as connection:
cursor = connection.cursor()
cursor.execute(query_fetch, (constants.QUEUED_EVENTS_BATCH_SIZE,))
events = cursor.fetchall()
self._bulk_update_events(cursor, events, query_update_base)
return events
def _bulk_update_events(self, cursor, events, statement_base):
event_ids = [event[0] for event in events]
for event_ids_subset in ichunked(event_ids, constants.DATABASE_EVENT_CHUNK_SIZE):
statement = (statement_base % ','.join(('?' * len(event_ids_subset))))
cursor.execute(statement, event_ids_subset)
def requeue_queued_events(self, events):
query_update_base = 'UPDATE `event` SET `pending_delete`=0 WHERE `event_id` IN (%s);'
with self._connect() as connection:
cursor = connection.cursor()
self._bulk_update_events(cursor, events, query_update_base)
def delete_queued_events(self):
query_delete = 'DELETE FROM `event` WHERE `pending_delete`=1;'
with self._connect() as connection:
cursor = connection.cursor()
cursor.execute(query_delete)
def expire_events(self):
if (self._event_ttl is None):
return
query_delete = f"DELETE FROM `event` WHERE `entry_date` < datetime('now', '-{self._event_ttl} seconds');"
with self._connect() as connection:
cursor = connection.cursor()
cursor.execute(query_delete)
def vacuum(self):
with self._connect() as connection:
cursor = connection.cursor()
cursor.execute('VACUUM;')
def get_non_flushed_event_count(self):
query_fetch = 'SELECT count(*) FROM `event` WHERE `pending_delete` = 0;'
with self._connect() as connection:
cursor = connection.cursor()
cursor.execute(query_fetch)
count = cursor.fetchone()[0]
return count |
def _parse_model_max_length(model, tokenizer) -> Optional[int]:
if (not (tokenizer or model)):
return None
try:
if (tokenizer and hasattr(tokenizer, 'model_max_length')):
return tokenizer.model_max_length
if (model and hasattr(model, 'config')):
model_config = model.config
if hasattr(model_config, 'max_sequence_length'):
return model_config.max_sequence_length
if hasattr(model_config, 'max_position_embeddings'):
return model_config.max_position_embeddings
except Exception:
return None |
class BaseBot(object):
func_map_topic = {'on_subscription': 'subscription'}
def on_init(self):
pass
def after_init(self):
pass
def on_event(self, event_item):
self.logger.info('got event:{}'.format(event_item))
def on_timer(self, event_item):
self.logger.info('got event:{}'.format(event_item))
def __init__(self, security_item=None, level=None):
self.logger = logging.getLogger(__name__)
self.on_init()
self.threads = []
if (not hasattr(self, 'living_mode')):
self.living_mode = False
if (not hasattr(self, 'start_date')):
self.topics = []
if (not hasattr(self, 'start_date')):
self.start_date = pd.Timestamp('2013-01-01')
if (not hasattr(self, 'end_date')):
self.end_date = pd.Timestamp.today()
if (not hasattr(self, 'need_account')):
self.need_account = True
if self.need_account:
if (not hasattr(self, 'base_capital')):
self.base_capital = 1000000
if (not hasattr(self, 'buy_cost')):
self.buy_cost = 0.001
if (not hasattr(self, 'sell_cost')):
self.sell_cost = 0.001
if (not hasattr(self, 'slippage')):
self.slippage = 0.001
if (not hasattr(self, 'stock_fuquan')):
self.stock_fuquan = 'hfq'
self.bot_name = type(self).__name__.lower()
if (security_item is not None):
self.security_item = security_item
if (level is not None):
self.level = level
if hasattr(self, 'security_item'):
if (not self.security_item):
raise Exception('you must set one security item!')
self.security_item = to_security_item(self.security_item)
if (self.security_item is None):
raise Exception('invalid security item:{}'.format(self.security_item))
if ((not hasattr(self, 'level')) or (not self.level)):
self.level = 'day'
self.logger.info('bot:{} listen to security_item:{},level:{}'.format(self.bot_name, self.security_item, self.level))
if (self.level == 'day'):
self.quote_topic = get_kafka_kdata_topic(security_id=self.security_item['id'], level=self.level)
elif (self.level == 'tick'):
self.quote_topic = get_kafka_tick_topic(security_id=self.security_item['id'])
else:
self.logger.exception('wrong level:{}'.format(self.level))
else:
if (not hasattr(self, 'time_step')):
self.time_step = timedelta(days=1)
self.logger.info('bot:{} check the market by itself,time_step:{}'.format(self.bot_name, self.time_step))
self._after_init()
self.after_init()
def _after_init(self):
if (type(self.start_date) == str):
self.start_date = pd.Timestamp(self.start_date)
if (type(self.end_date) == str):
self.end_date = pd.Timestamp(self.end_date)
self.current_time = pd.Timestamp(self.start_date)
if self.need_account:
self.account_service = AccountService(bot_name=self.bot_name, timestamp=self.current_time, base_capital=self.base_capital, buy_cost=self.buy_cost, sell_cost=self.sell_cost, slippage=self.slippage, stock_fuquan=self.stock_fuquan)
def __repr__(self):
return '{}({})'.format(self.__class__.__name__, ', '.join(('{}={}'.format(key, self.__dict__[key]) for key in self.__dict__ if (key != 'logger'))))
def consume_topic_with_func(self, topic, func):
if (not topic):
while True:
self.on_timer({'timestamp': self.current_time})
if is_same_date(self.current_time, pd.Timestamp.now()):
time.sleep(self.time_step.total_seconds())
self.current_time += self.time_step
consumer = KafkaConsumer(topic, value_deserializer=(lambda m: json.loads(m.decode('utf8'))), bootstrap_servers=[KAFKA_HOST])
topic_partition = TopicPartition(topic=topic, partition=0)
start_timestamp = int(self.start_date.timestamp())
end_offset = consumer.end_offsets([topic_partition])[topic_partition]
if (end_offset == 0):
self.logger.warning('topic:{} end offset:{}'.format(topic, end_offset))
for message in consumer:
self.logger.info('first message:{} to topic:{}'.format(message, topic))
break
consumer.poll(5, 1)
consumer.seek(topic_partition, 0)
partition_map_offset_and_timestamp = consumer.offsets_for_times({topic_partition: start_timestamp})
if partition_map_offset_and_timestamp:
offset_and_timestamp = partition_map_offset_and_timestamp[topic_partition]
if offset_and_timestamp:
consumer.poll(5, 1)
consumer.seek(topic_partition, offset_and_timestamp.offset)
end_offset = consumer.end_offsets([topic_partition])[topic_partition]
for message in consumer:
if ('timestamp' in message.value):
message_time = to_timestamp(message.value['timestamp'])
else:
message_time = to_timestamp(message.timestamp)
if (self.end_date and ((message_time > self.end_date) or ((message.offset + 1) == end_offset))):
consumer.close()
break
self.current_time = message_time
if False:
self.account_service.calculate_closing_account(self.current_time)
getattr(self, func)(message.value)
else:
consumer.poll(5, 1)
consumer.seek(topic_partition, (consumer.end_offsets([topic_partition])[topic_partition] - 1))
message = consumer.poll(5000, 1)
kafka_end_date = datetime.fromtimestamp(message[topic_partition][0].timestamp).strftime(TIME_FORMAT_DAY)
self.logger.warning('start:{} is after the last record:{}'.format(self.start_date, kafka_end_date))
def run(self):
self.logger.info('start bot:{}'.format(self))
funcs = (set(dir(self)) & self.func_map_topic.keys())
consumer = KafkaConsumer(bootstrap_servers=[KAFKA_HOST])
current_topics = consumer.topics()
for func in funcs:
topic = self.func_map_topic.get(func)
if (topic not in current_topics):
self.logger.exception('you implement func:{},but the topic:{} for it not exist'.format(func, topic))
continue
self.threads.append(threading.Thread(target=self.consume_topic_with_func, args=(self.func_map_topic.get(func), func)))
for the_thread in self.threads:
the_thread.start()
self.consume_topic_with_func(self.quote_topic, 'on_event')
self.logger.info('finish bot:{}'.format(self)) |
def generate_robustness_report(suite_summary: TestSuiteSummary, model_name: str, save_dir: str, logger: Optional[logging.Logger]=None):
cur_dir = Path(__file__).resolve().parent
template_dir = (cur_dir / 'templates/')
environment = Environment(loader=FileSystemLoader(template_dir))
template = environment.get_template('report_template.html')
info = {'suite_description': suite_summary.description, 'model_name': model_name, 'summaries': suite_summary.summaries}
rendered_template = template.render(info)
if (not os.path.isdir(save_dir)):
os.mkdir(Path(save_dir))
filename = f'robustness_report_{model_name}.html'
fpath = (Path(save_dir) / filename)
with open(fpath, 'w', encoding='utf-8') as fid:
fid.write(rendered_template)
abs_path = Path(fpath).resolve()
log_msg = f'Report generated at: {abs_path}'
if (logger is None):
print(log_msg)
else:
logger.info(log_msg)
return |
class OptionPlotoptionsTreemapSonificationContexttracksMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class DefaultGptsMessageMemory(GptsMessageMemory):
def __init__(self):
self.df = pd.DataFrame(columns=[field.name for field in fields(GptsMessage)])
def append(self, message: GptsMessage):
self.df.loc[len(self.df)] = message.to_dict()
def get_by_agent(self, conv_id: str, agent: str) -> Optional[List[GptsMessage]]:
result = self.df.query(f'conv_id==_id and (sender== or receiver==)')
messages = []
for row in result.itertuples(index=False, name=None):
row_dict = dict(zip(self.df.columns, row))
messages.append(GptsMessage.from_dict(row_dict))
return messages
def get_between_agents(self, conv_id: str, agent1: str, agent2: str, current_gogal: Optional[str]=None) -> Optional[List[GptsMessage]]:
result = self.df.query(f'conv_id==_id and ((sender== and receiver==) or (sender== and receiver==)) and current_gogal==_gogal')
messages = []
for row in result.itertuples(index=False, name=None):
row_dict = dict(zip(self.df.columns, row))
messages.append(GptsMessage.from_dict(row_dict))
return messages
def get_by_conv_id(self, conv_id: str) -> Optional[List[GptsMessage]]:
result = self.df.query(f'conv_id==_id')
messages = []
for row in result.itertuples(index=False, name=None):
row_dict = dict(zip(self.df.columns, row))
messages.append(GptsMessage.from_dict(row_dict))
return messages |
def splitH5single(basename1, basename2, proc, start, finaltime, stride):
filename = ((basename1 + str(proc)) + '.h5')
print(' Open:', filename)
f1 = tables.open_file(filename)
filename = ((basename2 + str(proc)) + '.h5')
print(' Open:', filename)
f2 = tables.open_file(filename)
print(' Step:', end=' ')
for step in range(start, (finaltime + 1), stride):
print(step, end=' ')
sys.stdout.flush()
filename = (((('sol.p' + str(proc)) + '.') + str(step)) + '.h5')
hdfFile = tables.open_file(filename, mode='w', title=(filename + ' Data'))
name = ('elementsSpatial_Domain' + str(step))
hdfFile.createArray('/', 'elements', f1.get_node('/', name)[:])
name = ('nodesSpatial_Domain' + str(step))
hdfFile.createArray('/', 'nodes', f1.get_node('/', name)[:])
name = ('u' + str(step))
hdfFile.createArray('/', 'u', f1.get_node('/', name)[:])
name = ('v' + str(step))
hdfFile.createArray('/', 'v', f1.get_node('/', name)[:])
name = ('w' + str(step))
hdfFile.createArray('/', 'w', f1.get_node('/', name)[:])
name = ('p' + str(step))
hdfFile.createArray('/', 'p', f1.get_node('/', name)[:])
name = ('phid' + str(step))
hdfFile.createArray('/', 'phid', f2.get_node('/', name)[:])
hdfFile.close()
f1.close()
f2.close()
print('finished') |
def upgrade():
op.drop_index('ix_ctl_data_qualifiers_fides_key', table_name='ctl_data_qualifiers')
op.drop_index('ix_ctl_data_qualifiers_id', table_name='ctl_data_qualifiers')
op.drop_table('ctl_data_qualifiers')
op.drop_column('ctl_datasets', 'data_qualifier')
op.drop_index('ix_ctl_systems_name', table_name='ctl_systems')
op.drop_column('privacydeclaration', 'data_qualifier')
op.drop_index('ix_ctl_registries_fides_key', table_name='ctl_registries')
op.drop_index('ix_ctl_registries_id', table_name='ctl_registries')
op.drop_table('ctl_registries')
op.drop_column('ctl_systems', 'registry_id')
op.drop_column('ctl_data_uses', 'legal_basis')
op.drop_column('ctl_data_uses', 'special_category')
op.drop_column('ctl_data_uses', 'legitimate_interest')
op.drop_column('ctl_data_uses', 'legitimate_interest_impact_assessment')
op.drop_column('ctl_data_uses', 'recipients')
op.drop_column('ctl_datasets', 'retention')
op.drop_column('ctl_datasets', 'joint_controller')
op.drop_column('ctl_datasets', 'third_country_transfers')
op.drop_column('ctl_systems', 'joint_controller')
op.drop_column('ctl_systems', 'data_responsibility_title')
op.drop_column('ctl_systems', 'third_country_transfers')
op.drop_column('ctl_systems', 'data_protection_impact_assessment') |
def test_checkpoint_deposits(tester, casper, concise_casper, funded_accounts, validation_keys, deposit_amount, deposit_validator, new_epoch, send_vote, mk_suggested_vote):
current_epoch = concise_casper.current_epoch()
assert (concise_casper.checkpoints__cur_dyn_deposits(current_epoch) == 0)
assert (concise_casper.checkpoints__prev_dyn_deposits(current_epoch) == 0)
new_epoch()
current_epoch = concise_casper.current_epoch()
assert (concise_casper.checkpoints__cur_dyn_deposits(current_epoch) == 0)
assert (concise_casper.checkpoints__prev_dyn_deposits(current_epoch) == 0)
initial_validator = deposit_validator(funded_accounts[0], validation_keys[0], deposit_amount)
new_epoch()
current_epoch = concise_casper.current_epoch()
assert (concise_casper.checkpoints__cur_dyn_deposits(current_epoch) == 0)
assert (concise_casper.checkpoints__prev_dyn_deposits(current_epoch) == 0)
new_epoch()
current_epoch = concise_casper.current_epoch()
assert (concise_casper.total_curdyn_deposits_in_wei() == deposit_amount)
assert (concise_casper.total_prevdyn_deposits_in_wei() == 0)
assert (concise_casper.checkpoints__cur_dyn_deposits(current_epoch) == 0)
assert (concise_casper.checkpoints__prev_dyn_deposits(current_epoch) == 0)
send_vote(mk_suggested_vote(initial_validator, validation_keys[0]))
new_epoch()
current_epoch = concise_casper.current_epoch()
assert (concise_casper.total_curdyn_deposits_in_wei() == deposit_amount)
assert (concise_casper.total_prevdyn_deposits_in_wei() == deposit_amount)
assert (concise_casper.checkpoints__cur_dyn_deposits(current_epoch) == deposit_amount)
assert (concise_casper.checkpoints__prev_dyn_deposits(current_epoch) == 0)
second_validator = deposit_validator(funded_accounts[1], validation_keys[1], deposit_amount)
send_vote(mk_suggested_vote(initial_validator, validation_keys[0]))
new_epoch()
current_epoch = concise_casper.current_epoch()
assert (concise_casper.total_curdyn_deposits_in_wei() == deposit_amount)
assert (concise_casper.total_prevdyn_deposits_in_wei() == deposit_amount)
assert (concise_casper.checkpoints__cur_dyn_deposits(current_epoch) == deposit_amount)
assert (concise_casper.checkpoints__prev_dyn_deposits(current_epoch) == deposit_amount)
prev_curdyn_deposits = concise_casper.total_curdyn_deposits_in_wei()
prev_prevdyn_deposits = concise_casper.total_prevdyn_deposits_in_wei()
send_vote(mk_suggested_vote(initial_validator, validation_keys[0]))
new_epoch()
current_epoch = concise_casper.current_epoch()
cur_dyn_deposits = concise_casper.checkpoints__cur_dyn_deposits(current_epoch)
prev_dyn_deposits = concise_casper.checkpoints__prev_dyn_deposits(current_epoch)
assert ((cur_dyn_deposits >= prev_curdyn_deposits) and (cur_dyn_deposits < (prev_curdyn_deposits * 1.01)))
assert ((prev_dyn_deposits >= prev_prevdyn_deposits) and (prev_dyn_deposits < (prev_prevdyn_deposits * 1.01)))
for _ in range(3):
prev_curdyn_deposits = concise_casper.total_curdyn_deposits_in_wei()
prev_prevdyn_deposits = concise_casper.total_prevdyn_deposits_in_wei()
send_vote(mk_suggested_vote(initial_validator, validation_keys[0]))
send_vote(mk_suggested_vote(second_validator, validation_keys[1]))
new_epoch()
current_epoch = concise_casper.current_epoch()
cur_dyn_deposits = concise_casper.checkpoints__cur_dyn_deposits(current_epoch)
prev_dyn_deposits = concise_casper.checkpoints__prev_dyn_deposits(current_epoch)
assert ((cur_dyn_deposits >= prev_curdyn_deposits) and (cur_dyn_deposits < (prev_curdyn_deposits * 1.01)))
assert ((prev_dyn_deposits >= prev_prevdyn_deposits) and (prev_dyn_deposits < (prev_prevdyn_deposits * 1.01))) |
def _wrap_signature_and_type_validation(value: Union[('_DefaultMagic', Callable, '_CallableMock')], template: Any, attr_name: str, type_validation: bool) -> Union[(Callable, '_CallableMock')]:
if _is_a_mock(template):
template = _extract_mock_template(template)
if (not template):
return value
if (not hasattr(template, attr_name)):
return value
callable_template = getattr(template, attr_name)
skip_first_arg = _skip_first_arg(template, attr_name)
(callable_template)
def with_sig_and_type_validation(*args: Any, **kwargs: Any) -> Any:
if _validate_callable_signature(skip_first_arg, callable_template, template, attr_name, args, kwargs):
if type_validation:
_validate_callable_arg_types(skip_first_arg, callable_template, args, kwargs)
return value(*args, **kwargs)
with_sig_and_type_validation.__qualname__ = 'TestSldeValidation({})'.format(with_sig_and_type_validation.__qualname__)
setattr(with_sig_and_type_validation, '__is_testslide_type_validation_wrapping', True)
return with_sig_and_type_validation |
def test_callback_no_command(testbot):
extra_plugin_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'commandnotfound_plugin')
cmd = '!this_is_not_a_real_command_at_all'
expected_str = f'Command fell through: {cmd}'
testbot.exec_command('!plugin deactivate CommandNotFoundFilter')
testbot.bot.plugin_manager._extra_plugin_dir = extra_plugin_dir
testbot.bot.plugin_manager.update_plugin_places([])
testbot.exec_command('!plugin activate TestCommandNotFoundFilter')
assert (expected_str == testbot.exec_command(cmd)) |
def _to_wei(value: WeiInputTypes) -> int:
original = value
if isinstance(value, bytes):
value = HexBytes(value).hex()
if ((value is None) or (value == '0x')):
return 0
if (isinstance(value, float) and ('e+' in str(value))):
(num_str, dec) = str(value).split('e+')
num = (num_str.split('.') if ('.' in num_str) else [num_str, ''])
return int(((num[0] + num[1][:int(dec)]) + ('0' * (int(dec) - len(num[1])))))
if (not isinstance(value, str)):
return _return_int(original, value)
if (value[:2] == '0x'):
return int(value, 16)
for (unit, dec) in UNITS.items():
if ((' ' + unit) not in value):
continue
num_str = value.split(' ')[0]
num = (num_str.split('.') if ('.' in num_str) else [num_str, ''])
return int(((num[0] + num[1][:int(dec)]) + ('0' * (int(dec) - len(num[1])))))
return _return_int(original, value) |
def run_test(layers, quadrilateral):
m = UnitSquareMesh(1, 1, quadrilateral=quadrilateral)
mesh = ExtrudedMesh(m, layers, layer_height=(1.0 / layers))
V = FunctionSpace(mesh, 'CG', 1)
bcs = [DirichletBC(V, 0, 'bottom'), DirichletBC(V, 42, 'top')]
v = TestFunction(V)
u = TrialFunction(V)
a = (inner(grad(u), grad(v)) * dx)
f = Function(V)
f.assign(0)
L = (inner(f, v) * dx)
u = Function(V)
exact = Function(V)
xs = SpatialCoordinate(mesh)
exact.interpolate((42 * xs[2]))
solve((a == L), u, bcs=bcs)
res = sqrt(assemble((inner((u - exact), (u - exact)) * dx)))
return res |
class INI(Database):
_maps = None
_subnets = None
_lock = None
def __init__(self):
self._maps = {}
self._subnets = {}
self._lock = threading.Lock()
self.reinitialise()
def _parse_extra_option(self, reader, section, option):
method = reader.get
none_on_error = False
if (option[1] == ':'):
l_option = option[0].lower()
none_on_error = (l_option != option[0])
if (l_option == 's'):
pass
elif (l_option == 'i'):
method = reader.getint
elif (l_option == 'f'):
method = reader.getfloat
elif (l_option == 'b'):
method = reader.getboolean
real_option = option[2:]
try:
value = method(section, option, None)
except ValueError:
if none_on_error:
return (real_option, None)
raise
else:
return (real_option, value)
def _parse_extra(self, reader, section, omitted, section_type):
extra = {}
for option in reader.options(section):
if (not (option in omitted)):
(option, value) = self._parse_extra_option(section, option)
extra['{}.{}'.format(section_type, option)] = value
return (extra or None)
def _parse_ini(self):
_logger.info("Preparing to read '{}'...".format(config.INI_FILE))
reader = _Config()
if (not reader.read(config.INI_FILE)):
raise ValueError("Unable to read '{}'".format(config.INI_FILE))
subnet_re = re.compile('^(?P<subnet>.+?)\\|(?P<serial>\\d+)$')
for section in reader.sections():
m = subnet_re.match(section)
if m:
self._process_subnet(reader, section, m.group('subnet'), int(m.group('serial')))
else:
try:
mac = MAC(section)
except Exception:
_logger.warn('Unrecognised section encountered: {}'.format(section))
else:
self._process_map(reader, section, mac)
self._validate_references()
def _process_subnet(self, reader, section, subnet, serial):
_logger.debug('Processing subnet: {}'.format(section))
lease_time = reader.getint(section, 'lease-time', None)
if (not lease_time):
raise ValueError("Field 'lease-time' unspecified for '{}'".format(section))
gateway = reader.get(section, 'gateway', None)
subnet_mask = reader.get(section, 'subnet-mask', None)
broadcast_address = reader.get(section, 'broadcast-address', None)
ntp_servers = reader.get(section, 'ntp-servers', None)
domain_name_servers = reader.get(section, 'domain-name-servers', None)
domain_name = reader.get(section, 'domain-name', None)
extra = self._parse_extra(reader, section, ('lease-time', 'gateway', 'subnet-mask', 'broadcast-address', 'ntp-servers', 'domain-name-servers', 'domain-name'), 'subnets')
self._subnets[(subnet, serial)] = (lease_time, gateway, subnet_mask, broadcast_address, ntp_servers, domain_name_servers, domain_name, extra)
def _process_map(self, reader, section, mac):
_logger.debug('Processing map: {}'.format(section))
ip = reader.get(section, 'ip', None)
if (not ip):
raise ValueError("Field 'ip' unspecified for '{}'".format(section))
hostname = reader.get(section, 'hostname', None)
subnet = reader.get(section, 'subnet', None)
if (not subnet):
raise ValueError("Field 'subnet' unspecified for '{}'".format(section))
serial = reader.getint(section, 'serial', None)
if (serial is None):
raise ValueError("Field 'serial' unspecified for '{}'".format(section))
extra = self._parse_extra(reader, section, ('ip', 'hostname', 'subnet', 'serial'), 'maps')
self._maps[int(mac)] = (ip, hostname, (subnet, serial), extra)
def _validate_references(self):
for (mac, (_, _, subnet, _)) in self._maps.items():
if (subnet not in self._subnets):
raise ValueError("MAC '{}' references unknown subnet '{}|{}'".format(MAC(mac), subnet[0], subnet[1]))
def lookupMAC(self, mac):
mac = int(mac)
with self._lock:
map = self._maps.get(mac)
if (not map):
return None
subnet = self._subnets.get(map[2])
extra_map = map[3]
extra_subnet = subnet[7]
if (extra_map and extra_subnet):
extra = extra_map.copy()
extra.update(extra_subnet)
else:
extra = ((extra_map and extra_map.copy()) or (extra_subnet and extra_subnet.copy()))
return Definition(ip=map[0], lease_time=subnet[0], subnet=map[2][0], serial=map[2][1], hostname=map[1], gateways=subnet[1], subnet_mask=subnet[2], broadcast_address=subnet[3], domain_name=subnet[6], domain_name_servers=subnet[5], ntp_servers=subnet[4], extra=extra)
def reinitialise(self):
with self._lock:
self._maps.clear()
self._subnets.clear()
self._parse_ini()
_logger.info('INI-file contents parsed and loaded into memory') |
class DictItemObserver():
__slots__ = ('notify', 'optional')
def __init__(self, *, notify, optional):
self.notify = notify
self.optional = optional
def __hash__(self):
return hash((type(self).__name__, self.notify, self.optional))
def __eq__(self, other):
return ((type(self) is type(other)) and (self.notify == other.notify) and (self.optional == other.optional))
def __repr__(self):
formatted_args = [f'notify={self.notify!r}', f'optional={self.optional!r}']
return f"{self.__class__.__name__}({', '.join(formatted_args)})"
def iter_observables(self, object):
if (not isinstance(object, TraitDict)):
if self.optional:
return
raise ValueError('Expected a TraitDict to be observed, got {!r} (type: {!r})'.format(object, type(object)))
(yield object)
def iter_objects(self, object):
if (not isinstance(object, TraitDict)):
if self.optional:
return
raise ValueError('Expected a TraitDict to be observed, got {!r} (type: {!r})'.format(object, type(object)))
(yield from object.values())
def get_notifier(self, handler, target, dispatcher):
return TraitEventNotifier(handler=handler, target=target, dispatcher=dispatcher, event_factory=dict_event_factory, prevent_event=(lambda event: False))
def get_maintainer(self, graph, handler, target, dispatcher):
return ObserverChangeNotifier(observer_handler=_observer_change_handler, event_factory=dict_event_factory, prevent_event=(lambda event: False), graph=graph, handler=handler, target=target, dispatcher=dispatcher)
def iter_extra_graphs(self, graph):
(yield from ()) |
def generate_params():
bscan_already_on = False
icap_already_on = False
tile_params = []
for (loci, (site, site_type)) in enumerate(sorted(gen_sites())):
p = {}
if ((site_type in 'ICAP') and (not icap_already_on)):
p['ICAP_WIDTH'] = verilog.quote(random.choice(['X32', 'X8', 'X16']))
elif ((site_type in 'BSCAN') and (not bscan_already_on)):
p['JTAG_CHAIN'] = random.randint(1, 4)
bscan_already_on = True
elif (site_type in 'CAPTURE'):
p['ONESHOT'] = verilog.quote(random.choice(['TRUE', 'FALSE']))
elif (site_type in 'STARTUP'):
p['PROG_USR'] = verilog.quote(random.choice(['TRUE', 'FALSE']))
elif (site_type in 'FRAME_ECC'):
p['FARSRC'] = verilog.quote(random.choice(['FAR', 'EFAR']))
elif (site_type in ['DCIRESET', 'USR_ACCESS']):
p['ENABLED'] = random.randint(0, 1)
else:
continue
p['LOC'] = verilog.quote(site)
tile_params.append({'site': site, 'site_type': site_type, 'module': 'mod_{}'.format(site_type), 'params': p})
return tile_params |
def legal_doc_data(*args, **kwargs):
return {'hits': {'hits': [{'_source': {'type': 'document type', 'no': '100', 'summary': 'summery 100', 'documents': [{'document_id': 111, 'category': 'Final Opinion', 'description': 'Closeout Letter', 'url': 'files/legal/aos/100/111.pdf'}, {'document_id': 222, 'category': 'Draft Documents', 'description': 'Vote', 'url': 'files/legal/aos/100/222.pdf'}]}}]}} |
def test_build_from():
key_hash = b'key_hash'
key_data = b'key_data'
value_data = b'value_data'
item = StorageItem.build_from(key_hash, key_data, value_data)
assert (item.key_hash == key_hash)
assert (item.key_data == key_data)
assert (item.value_data == value_data)
assert (item.length == (((32 + _get_object_bytes(key_hash)) + _get_object_bytes(key_data)) + _get_object_bytes(value_data))) |
def test_align_missing_data_designators(o_dir, e_dir, request):
program = 'bin/align/phyluce_align_add_missing_data_designators'
output = os.path.join(o_dir, 'mafft-missing-data-designators')
cmd = [os.path.join(request.config.rootdir, program), '--alignments', os.path.join(e_dir, 'mafft'), '--output', output, '--input-format', 'fasta', '--output-format', 'nexus', '--match-count-output', os.path.join(e_dir, 'taxon-set.incomplete.conf'), '--incomplete-matrix', os.path.join(e_dir, 'taxon-set.incomplete'), '--cores', '1']
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = proc.communicate()
assert (proc.returncode == 0), print('{}'.format(stderr.decode('utf-8')))
output_files = glob.glob(os.path.join(output, '*'))
assert output_files, 'There are no output files'
for output_file in output_files:
name = os.path.basename(output_file)
print(name)
expected_file = os.path.join(e_dir, 'mafft-missing-data-designators', name)
observed = open(output_file).read()
expected = open(expected_file).read()
assert (observed == expected) |
.parametrize('elasticapm_client', [{'client_class': AzureFunctionsTestClient}], indirect=['elasticapm_client'])
def test_service_info(elasticapm_client):
with mock.patch.dict(os.environ, {'FUNCTIONS_EXTENSION_VERSION': '1.1', 'FUNCTIONS_WORKER_RUNTIME': 'MontyPython', 'FUNCTIONS_WORKER_RUNTIME_VERSION': '2.2', 'WEBSITE_INSTANCE_ID': 'foo'}):
service_info = elasticapm_client.get_service_info()
assert (service_info['framework']['name'] == 'Azure Functions')
assert (service_info['framework']['version'] == '1.1')
assert (service_info['runtime']['name'] == 'MontyPython')
assert (service_info['runtime']['version'] == '2.2')
assert (service_info['node']['configured_name'] == 'foo') |
class NoSuchObject(Type[None]):
TYPECLASS = TypeClass.CONTEXT
NATURE = [TypeNature.PRIMITIVE]
TAG = 0
def __init__(self, value: Union[(TWrappedPyType, _SENTINEL_UNINITIALISED)]=UNINITIALISED) -> None:
if (value is UNINITIALISED):
super().__init__(value=None)
else:
super().__init__(value=value) |
class Block(Serializable):
fields = [('header', BlockHeader), ('transaction_list', CountableList(Transaction)), ('uncles', CountableList(BlockHeader))]
def __init__(self, header, transaction_list=None, uncles=None, **kwargs):
super().__init__(header, (transaction_list or []), (uncles or []), **kwargs) |
class HTTPIter(HTTPResponse):
def __init__(self, iter: Iterable[bytes], headers: Dict[(str, str)]={}, cookies: Dict[(str, Any)]={}):
super().__init__(200, headers=headers, cookies=cookies)
self.iter = iter
async def _send_body(self, send):
for chunk in self.iter:
(await send({'type': ' 'body': chunk, 'more_body': True}))
(await send({'type': ' 'body': b'', 'more_body': False}))
async def rsgi(self, protocol: HTTPProtocol):
trx = protocol.response_stream(self.status_code, list(self.rsgi_headers))
for chunk in self.iter:
(await trx.send_bytes(chunk)) |
def extractAaokossWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class LogFormatter(object):
def format(lid, msg, efile=None, eline=None):
rval = ('%3d:' % lid)
if (efile is not None):
rval += ('%s:' % efile)
if (eline is not None):
rval += ('%s:' % eline)
rval += ('%s' % msg)
return rval
def rmte(rmte):
return LogFormatter.format(rmte.get_id(), rmte.get_msg(), rmte.get_efile(), rmte.get_eline()) |
def test_serialization_set_command():
def t1() -> str:
return 'Hello'
def new_command_fn(settings: SerializationSettings) -> typing.List[str]:
return ['echo', 'hello', 'world']
t1.set_command_fn(new_command_fn)
custom_command = t1.get_command(serialization_settings)
assert (['echo', 'hello', 'world'] == custom_command)
t1.reset_command_fn()
custom_command = t1.get_command(serialization_settings)
assert (custom_command[0] == 'pyflyte-execute') |
class _KeywordSearch(_Filter):
underscore_name = 'keyword_search'
def generate_elasticsearch_query(cls, filter_values: List[str], query_type: _QueryType, **options) -> ES_Q:
keyword_queries = []
fields = ['recipient_name', 'parent_recipient_name', 'naics_code', 'naics_description', 'product_or_service_code', 'product_or_service_description', 'transaction_description', 'piid', 'fain', 'uri', 'recipient_unique_id', 'parent_recipient_unique_id', 'description', 'cfda_number', 'cfda_title', 'awarding_toptier_agency_name', 'awarding_subtier_agency_name', 'funding_toptier_agency_name', 'funding_subtier_agency_name', 'business_categories', 'type_description', 'pop_country_code', 'pop_country_name', 'pop_state_code', 'pop_county_code', 'pop_county_name', 'pop_zip5', 'pop_congressional_code', 'pop_city_name', 'recipient_location_country_code', 'recipient_location_country_name', 'recipient_location_state_code', 'recipient_location_county_code', 'recipient_location_county_name', 'recipient_location_zip5', 'recipient_location_congressional_code', 'recipient_location_city_name', 'modification_number', 'recipient_uei', 'parent_uei']
for filter_value in filter_values:
keyword_queries.append(ES_Q('query_string', query=filter_value, default_operator='OR', fields=fields))
return ES_Q('dis_max', queries=keyword_queries) |
class i2c_msg():
def write(address, buf):
if (sys.version_info.major >= 3):
if (type(buf) is str):
buf = bytes(map(ord, buf))
else:
buf = bytes(buf)
elif (type(buf) is not str):
buf = ''.join([chr(x) for x in buf])
return (address, buf) |
def extractCourageMyFriendOrg(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def upgrade():
session = sa.orm.sessionmaker(bind=op.get_bind())()
tito_package_rows = session.execute('SELECT * FROM package WHERE source_type=:param', {'param': 3})
for package in tito_package_rows:
source_dict = (json.loads(package['source_json']) if package['source_json'] else {})
new_source_dict = {'type': 'git', 'clone_url': (source_dict.get('git_url') or ''), 'committish': (source_dict.get('git_branch') or ''), 'subdirectory': (source_dict.get('git_dir') or ''), 'spec': '', 'srpm_build_method': ('tito_test' if source_dict.get('tito_test') else 'tito')}
new_source_json = json.dumps(new_source_dict)
new_source_type = 8
session.execute('UPDATE package SET source_json=:param1, source_type=:param2 WHERE id=:param3', {'param1': new_source_json, 'param2': new_source_type, 'param3': package['id']})
mock_package_rows = session.execute('SELECT * FROM package WHERE source_type=:param', {'param': 4})
for package in mock_package_rows:
source_dict = (json.loads(package['source_json']) if package['source_json'] else {})
new_source_dict = {'type': (source_dict.get('scm_type') or 'git'), 'clone_url': (source_dict.get('scm_url') or ''), 'committish': (source_dict.get('scm_branch') or ''), 'subdirectory': (source_dict.get('scm_subdir') or ''), 'spec': (source_dict.get('spec') or ''), 'srpm_build_method': 'rpkg'}
new_source_json = json.dumps(new_source_dict)
new_source_type = 8
session.execute('UPDATE package SET source_json=:param1, source_type=:param2 WHERE id=:param3', {'param1': new_source_json, 'param2': new_source_type, 'param3': package['id']})
fedpkg_package_rows = session.execute('SELECT * FROM package WHERE source_type=:param', {'param': 7})
for package in fedpkg_package_rows:
source_dict = (json.loads(package['source_json']) if package['source_json'] else {})
new_source_dict = {'type': 'git', 'clone_url': (source_dict.get('clone_url') or ''), 'committish': (source_dict.get('branch') or ''), 'subdirectory': '', 'spec': '', 'srpm_build_method': 'rpkg'}
new_source_json = json.dumps(new_source_dict)
new_source_type = 8
session.execute('UPDATE package SET source_json=:param1, source_type=:param2 WHERE id=:param3', {'param1': new_source_json, 'param2': new_source_type, 'param3': package['id']}) |
class JsHtmlIcon(JsHtml):
def val(self):
return JsObjects.JsObjects.get(('{%s: {value: %s, timestamp: Date.now(), offset: new Date().getTimezoneOffset()}}' % (self.htmlCode, self.component.dom.getAttribute('class'))))
def content(self):
return self.component.dom.getAttribute('class')
def spin(self, status: bool=True):
if status:
return self.component.dom.classList.add('fa-spin')
return self.component.dom.classList.remove('fa-spin')
def pulse(self, status: bool=True):
if status:
return self.component.dom.classList.add('fa-pulse')
return self.component.dom.classList.remove('fa-pulse') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.