code stringlengths 281 23.7M |
|---|
def test_hankel_dlf():
allfilt = ['kong_61_2007', 'kong_241_2007', 'key_101_2009', 'key_201_2009', 'key_401_2009', 'anderson_801_1982', 'key_51_2012', 'key_101_2012', 'key_201_2012', 'wer_201_2018']
for filt in allfilt:
dlf = getattr(filters, filt)()
nr = int(filt.split('_')[1])
fact = np.around(np.average((dlf.base[1:] / dlf.base[:(- 1)])), 15)
assert (len(dlf.base) == nr)
assert (len(dlf.j0) == nr)
assert (len(dlf.j1) == nr)
assert_allclose(dlf.factor, fact) |
(cache=True)
def vehicle_dynamics_st(x, u_init, mu, C_Sf, C_Sr, lf, lr, h, m, I, s_min, s_max, sv_min, sv_max, v_switch, a_max, v_min, v_max):
g = 9.81
u = np.array([steering_constraint(x[2], u_init[0], s_min, s_max, sv_min, sv_max), accl_constraints(x[3], u_init[1], v_switch, a_max, v_min, v_max)])
if (abs(x[3]) < 0.1):
lwb = (lf + lr)
x_ks = x[0:5]
f_ks = vehicle_dynamics_ks(x_ks, u, mu, C_Sf, C_Sr, lf, lr, h, m, I, s_min, s_max, sv_min, sv_max, v_switch, a_max, v_min, v_max)
f = np.hstack((f_ks, np.array([(((u[1] / lwb) * np.tan(x[2])) + ((x[3] / (lwb * (np.cos(x[2]) ** 2))) * u[0])), 0])))
else:
f = np.array([(x[3] * np.cos((x[6] + x[4]))), (x[3] * np.sin((x[6] + x[4]))), u[0], u[1], x[5], (((((((- mu) * m) / ((x[3] * I) * (lr + lf))) * ((((lf ** 2) * C_Sf) * ((g * lr) - (u[1] * h))) + (((lr ** 2) * C_Sr) * ((g * lf) + (u[1] * h))))) * x[5]) + ((((mu * m) / (I * (lr + lf))) * (((lr * C_Sr) * ((g * lf) + (u[1] * h))) - ((lf * C_Sf) * ((g * lr) - (u[1] * h))))) * x[6])) + ((((((mu * m) / (I * (lr + lf))) * lf) * C_Sf) * ((g * lr) - (u[1] * h))) * x[2])), ((((((mu / ((x[3] ** 2) * (lr + lf))) * (((C_Sr * ((g * lf) + (u[1] * h))) * lr) - ((C_Sf * ((g * lr) - (u[1] * h))) * lf))) - 1) * x[5]) - (((mu / (x[3] * (lr + lf))) * ((C_Sr * ((g * lf) + (u[1] * h))) + (C_Sf * ((g * lr) - (u[1] * h))))) * x[6])) + (((mu / (x[3] * (lr + lf))) * (C_Sf * ((g * lr) - (u[1] * h)))) * x[2]))])
return f |
.skipif((not has_tensorflow), reason='needs TensorFlow')
def test_tensorflow_wrapper_thinc_model_subclass(tf_model):
class CustomModel(Model):
def fn(self):
return 1337
model = TensorFlowWrapper(tf_model, model_class=CustomModel)
assert isinstance(model, CustomModel)
assert (model.fn() == 1337) |
def _write(*args: str) -> None:
args_len = len(args)
c = 0
for arg in args:
c += 1
if _is_debug():
_log(f'writing: {arg}')
sys.stdout.write(arg)
if (c < args_len):
if _is_debug():
_log('writetab')
sys.stdout.write('\t')
if _is_debug():
_log('writenewline')
sys.stdout.write('\n')
sys.stdout.flush() |
class MakeMenu():
def __init__(self, desc, owner, popup=False, window=None):
self.owner = owner
if (window is None):
window = owner
self.window = window
self.indirect = getattr(owner, 'call_menu', None)
self.names = {}
self.desc = desc.split('\n')
self.index = 0
if popup:
self.menu = menu = QtGui.QMenu()
self.parse(menu, (- 1))
else:
self.menu = menu = QtGui.QMenuBar()
self.parse(menu, (- 1))
window.setMenuBar(menu)
def parse(self, menu, indent):
while True:
if (self.index >= len(self.desc)):
return
dline = self.desc[self.index]
line = dline.lstrip()
indented = (len(dline) - len(line))
if (indented <= indent):
return
self.index += 1
if ((line == '') or (line[0:1] == '#')):
continue
if (line[0:1] == '-'):
menu.addSeparator()
continue
help = ''
match = help_pat.search(line)
if match:
help = (' ' + match.group(2).strip())
line = (match.group(1) + match.group(3))
col = line.find(':')
if (col >= 0):
handler = line[(col + 1):].strip()
if (handler != ''):
if self.indirect:
self.indirect(None, handler)
handler = self.indirect
else:
try:
_locl = dict(self=self)
exec(('def handler(self=self.owner):\n %s\n' % handler), globals(), _locl)
handler = _locl['handler']
except:
handler = null_handler
else:
try:
_locl = dict(self=self)
exec(('def handler(self=self.owner):\n%s\n' % (self.get_body(indented),)), globals(), _locl)
handler = _locl['handler']
except:
handler = null_handler
not_checked = checked = disabled = False
name = key = ''
line = line[:col]
match = options_pat.search(line)
if match:
line = (match.group(1) + match.group(3))
(not_checked, checked, disabled, name) = option_check('~/-', match.group(2).strip())
label = line.strip()
col = label.find('|')
if (col >= 0):
key = label[(col + 1):].strip()
label = label[:col].strip()
act = menu.addAction(label, handler)
act.setCheckable((not_checked or checked))
act.setStatusTip(help)
if key:
act.setShortcut(key)
if checked:
act.setChecked(True)
if disabled:
act.setEnabled(False)
if name:
self.names[name] = act
setattr(self.owner, name, MakeMenuItem(self, act))
else:
submenu = QtGui.QMenu(line.strip())
self.parse(submenu, indented)
act = menu.addMenu(submenu)
act.setStatusTip(help)
def get_body(self, indent):
result = []
while (self.index < len(self.desc)):
line = self.desc[self.index]
if ((len(line) - len(line.lstrip())) <= indent):
break
result.append(line)
self.index += 1
result = '\n'.join(result).rstrip()
if (result != ''):
return result
return ' pass'
def get_action(self, name):
if isinstance(name, str):
return self.names[name]
return name
def checked(self, name, check=None):
act = self.get_action(name)
if (check is None):
return act.isChecked()
act.setChecked(check)
def enabled(self, name, enable=None):
act = self.get_action(name)
if (enable is None):
return act.isEnabled()
act.setEnabled(enable)
def label(self, name, label=None):
act = self.get_action(name)
if (label is None):
return str(act.text())
act.setText(label) |
('kibana')
_params(*kibana_options)
_context
def kibana_group(ctx: click.Context, **kibana_kwargs):
ctx.ensure_object(dict)
if (sys.argv[(- 1)] in ctx.help_option_names):
click.echo('Kibana client:')
click.echo(format_command_options(ctx))
else:
ctx.obj['kibana'] = get_kibana_client(**kibana_kwargs) |
class JsonInterfaceGenerator(object):
def __init__(self, protocol_version='1.2', debug_prints=False, *args, **kwargs):
super().__init__(*args, **kwargs)
if (protocol_version == None):
protocol_version = '1.2'
self.log = logging.getLogger('Main.ChromeController.WrapperGenerator')
self.line_num = 0
self.do_debug_prints = debug_prints
self.types = {}
self.protocol = self.__load_protocol(protocol_version)
self.__build_interface_class()
def __load_json_file(self, fname):
folder = os.path.split(__file__)[0]
protocol_file_path = os.path.join(folder, '../', 'protocols', fname)
protocol_file_path = os.path.abspath(protocol_file_path)
assert os.path.exists(protocol_file_path), "Protocol file '{}' appears to be missing!".format(protocol_file_path)
with open(protocol_file_path) as fp:
protocol_str = fp.read()
return json.loads(protocol_str)
def __load_protocol(self, protocol_version):
self.log.info('Loading protocol version %s', protocol_version)
main_json_file = 'browser_protocol-r{}.json'.format(protocol_version)
js_json_file = 'js_protocol-r{}.json'.format(protocol_version)
js_file_1 = self.__load_json_file(main_json_file)
js_file_2 = self.__load_json_file(js_json_file)
self.__validate_protocol_version(main_json_file, js_file_1, protocol_version)
self.__validate_protocol_version(js_json_file, js_file_2, protocol_version)
for domain in js_file_2['domains']:
js_file_1['domains'].append(domain)
return js_file_1
def __get_line(self):
self.line_num += 1
return self.line_num
def __validate_protocol_version(self, filename, js_file, protocol_version):
file_protocol_rev = '{}.{}'.format(js_file['version']['major'], js_file['version']['minor'])
errm_1 = 'Version mismatch: {} - {} in file {}'.format(file_protocol_rev, protocol_version, filename)
assert (file_protocol_rev == protocol_version), errm_1
def __build_interface_class(self):
body = [ast.Expr(value=ast.Str(s='\n\n\t')), self.__build__init()]
for subdom in self.protocol['domains']:
subdom_funcs = self.__build_domain_interface(subdom)
body += subdom_funcs
self.interface_class = ast.ClassDef(name='ChromeRemoteDebugInterface', bases=[ast.Name(id='ChromeInterface', ctx=ast.Load())], body=body, keywords=[], decorator_list=[], starargs=None, kwargs=None, lineno=self.__get_line(), col_offset=0)
def __build__init(self):
super_func_call = ast.Call(func=ast.Name(id='super', ctx=ast.Load()), args=[], keywords=[])
if (((sys.version_info[0], sys.version_info[1]) == (3, 5)) or ((sys.version_info[0], sys.version_info[1]) == (3, 6)) or ((sys.version_info[0], sys.version_info[1]) == (3, 7)) or ((sys.version_info[0], sys.version_info[1]) == (3, 8))):
super_func = ast.Call(func=ast.Attribute(value=super_func_call, attr='__init__', ctx=ast.Load()), args=[ast.Starred(value=ast.Name(id='args', ctx=ast.Load()), ctx=ast.Load())], keywords=[ast.keyword(arg=None, value=ast.Name(id='kwargs', ctx=ast.Load()), ctx=ast.Load())])
elif ((sys.version_info[0], sys.version_info[1]) == (3, 4)):
super_func = ast.Call(func=ast.Attribute(value=super_func_call, attr='__init__', ctx=ast.Load()), args=[], keywords=[], starargs=ast.Name(id='args', ctx=ast.Load()), kwargs=ast.Name(id='kwargs', ctx=ast.Load()))
else:
print('Version:', sys.version_info)
raise RuntimeError('This script only functions on python 3.4, 3.5, 3.6, or 3.7. Active python version {}.{}'.format(*sys.version_info))
super_init = ast.Expr(value=super_func, lineno=self.__get_line(), col_offset=0)
body = [super_init]
sig = ast.arguments(args=[ast.arg('self', None)], vararg=ast.arg(arg='args', annotation=None), kwarg=ast.arg(arg='kwargs', annotation=None), varargannotation=None, posonlyargs=[], kwonlyargs=[], kwargannotation=None, defaults=[], kw_defaults=[])
func = ast.FunctionDef(name='__init__', args=sig, body=body, decorator_list=[], lineno=self.__get_line(), col_offset=0)
return func
def __build_domain_interface(self, subdom):
assert ('domain' in subdom)
dom_desc = subdom.get('descripton', '')
dom_name = subdom['domain']
full_name = subdom['domain']
for typen in subdom.get('types', []):
typestr = '{}_{}'.format(dom_name, typen['id'])
assert (typen['id'] not in self.types), 'Duplicate type name: {}'.format(typen['id'])
self.types[typestr] = typen
functions = []
for command in subdom.get('commands', []):
func = self.__build_function(dom_name, full_name, command)
functions.append(func)
return functions
def __build_desc_string(self, dom_name, func_name, func_params):
desc = []
fname = '{}.{}'.format(dom_name, func_name)
desc.append('Function path: {}'.format(fname))
desc.append('\tDomain: {}'.format(dom_name))
desc.append('\tMethod name: {}'.format(func_name))
desc.append('')
if (('experimental' in func_params) and func_params['experimental']):
desc.append("\tWARNING: This function is marked 'Experimental'!")
desc.append('')
if ('parameters' in func_params):
desc.append('\tParameters:')
required = [param for param in func_params['parameters'] if (not param.get('optional', False))]
optional = [param for param in func_params['parameters'] if param.get('optional', False)]
sections = [('\t\tRequired arguments:', required), ('\t\tOptional arguments:', optional)]
sections = [section for section in sections if section[1]]
for (segment_name, items) in sections:
desc.append(segment_name)
for param in items:
if (not ('description' in param)):
param['description'] = 'No description'
if ('type' in param):
desc.append("\t\t\t'{}' (type: {}) -> {}".format(param['name'], param['type'], param['description']))
else:
desc.append("\t\t\t'{}' (type: {}) -> {}".format(param['name'], param['$ref'], param['description']))
if ('returns' in func_params):
desc.append('\tReturns:')
for param in func_params['returns']:
if (not ('description' in param)):
param['description'] = 'No description'
if ('type' in param):
desc.append("\t\t'{}' (type: {}) -> {}".format(param['name'], param['type'], param['description']))
else:
desc.append("\t\t'{}' (type: {}) -> {}".format(param['name'], param['$ref'], param['description']))
else:
desc.append('\tNo return value.')
desc.append('')
if ('description' in func_params):
desc.append('\tDescription: {}'.format(func_params['description']))
desc = [('\t\t' + line) for line in desc]
ret = '\n'.join(desc)
return ret
def __build_conditional_arg_check(self, argname, argtype):
target_value = ast.Subscript(value=ast.Name(id='kwargs', ctx=ast.Load()), slice=ast.Index(ast.Str(s=argname)), ctx=ast.Load())
presence_check = ast.Call(func=ast.Name(id='isinstance', ctx=ast.Load()), args=[target_value, argtype], keywords=[], lineno=self.__get_line())
types = [t.id for t in argtype.elts]
check_message = ast.BinOp(left=ast.Str(s="Optional argument '{}' must be of type '{}'. Received type: '%s'".format(argname, types)), op=ast.Mod(), right=ast.Call(func=ast.Name(id='type', ctx=ast.Load()), args=[target_value], keywords=[]), lineno=self.__get_line())
assert_check = ast.Assert(test=presence_check, msg=check_message, lineno=self.__get_line())
check_body = [assert_check]
check = ast.Compare(left=ast.Str(s=argname, ctx=ast.Load()), ops=[ast.In()], comparators=[ast.Name(id='kwargs', ctx=ast.Load())])
new_ret = ast.If(test=check, body=check_body, orelse=[], lineno=self.__get_line())
return new_ret
def __build_unconditional_arg_check(self, argname, argtype):
presence_check = ast.Call(func=ast.Name(id='isinstance', ctx=ast.Load()), args=[ast.Name(id=argname, ctx=ast.Load()), argtype], keywords=[], lineno=self.__get_line())
types = [t.id for t in argtype.elts]
check_message = ast.BinOp(left=ast.Str(s="Argument '{}' must be of type '{}'. Received type: '%s'".format(argname, types)), op=ast.Mod(), right=ast.Call(func=ast.Name(id='type', ctx=ast.Load()), args=[ast.Name(id=argname, ctx=ast.Load())], keywords=[]), lineno=self.__get_line())
new_ret = ast.Assert(test=presence_check, msg=check_message, lineno=self.__get_line())
return new_ret
def __build_debug_print(self, prefix_str, var_name):
pstmt = ast.Expr(value=ast.Call(func=ast.Name(id='print', ctx=ast.Load()), args=[ast.Str(s=prefix_str), ast.Name(id=var_name, ctx=ast.Load())], keywords=[], lineno=self.__get_line()))
return pstmt
def __build_function(self, dom_name, full_name, func_params):
assert ('name' in func_params)
func_name = func_params['name']
docstr = self.__build_desc_string(dom_name, func_name, func_params)
args = [ast.arg('self', None)]
message_params = []
func_body = []
if docstr:
func_body.append(ast.Expr(ast.Str((('\n' + docstr) + '\n\t\t'))))
for param in func_params.get('parameters', []):
argname = param['name']
param_optional = param.get('optional', False)
if (param_optional is False):
message_params.append(ast.keyword(argname, ast.Name(id=argname, ctx=ast.Load())))
args.append(ast.arg(argname, None))
if self.do_debug_prints:
func_body.append(self.__build_debug_print(argname, argname))
param_type = param.get('type', None)
if (param_type in CHECKS):
if param_optional:
check = self.__build_conditional_arg_check(argname, CHECKS[param_type])
else:
check = self.__build_unconditional_arg_check(argname, CHECKS[param_type])
if check:
func_body.append(check)
optional_params = [param.get('name') for param in func_params.get('parameters', []) if param.get('optional', False)]
func_kwargs = None
if len(optional_params):
value = ast.List(elts=[ast.Str(s=param, ctx=ast.Store()) for param in optional_params], ctx=ast.Load())
create_list = ast.Assign(targets=[ast.Name(id='expected', ctx=ast.Store())], value=value)
func_body.append(create_list)
passed_arg_list = ast.Assign(targets=[ast.Name(id='passed_keys', ctx=ast.Store())], value=ast.Call(func=ast.Name(id='list', ctx=ast.Load()), args=[ast.Call(func=ast.Attribute(value=ast.Name(id='kwargs', ctx=ast.Load()), attr='keys', ctx=ast.Load()), args=[], keywords=[])], keywords=[]))
func_body.append(passed_arg_list)
comprehension = ast.comprehension(target=ast.Name(id='key', ctx=ast.Store()), iter=ast.Name(id='passed_keys', ctx=ast.Load()), ifs=[], is_async=False)
comparator = ast.Name(id='expected', ctx=ast.Load())
listcomp = ast.ListComp(elt=ast.Compare(left=ast.Name(id='key', ctx=ast.Load()), ops=[ast.In()], comparators=[comparator]), generators=[comprehension])
check_message = ast.BinOp(left=ast.Str(s='Allowed kwargs are {}. Passed kwargs: %s'.format(optional_params)), op=ast.Mod(), right=ast.Name(id='passed_keys', ctx=ast.Load()), lineno=self.__get_line())
kwarg_check = ast.Assert(test=ast.Call(func=ast.Name(id='all', ctx=ast.Load()), args=[listcomp], keywords=[]), msg=check_message)
func_body.append(kwarg_check)
func_kwargs = ast.Name(id='kwargs', ctx=ast.Load())
fname = '{}.{}'.format(dom_name, func_name)
fname = ast.Str(s=fname, ctx=ast.Load())
if (((sys.version_info[0], sys.version_info[1]) == (3, 5)) or ((sys.version_info[0], sys.version_info[1]) == (3, 6)) or ((sys.version_info[0], sys.version_info[1]) == (3, 7)) or ((sys.version_info[0], sys.version_info[1]) == (3, 8))):
if func_kwargs:
message_params.append(ast.keyword(arg=None, value=ast.Name(id='kwargs', ctx=ast.Load())))
communicate_call = ast.Call(func=ast.Attribute(value=ast.Name(id='self', ctx=ast.Load()), ctx=ast.Load(), attr='synchronous_command'), args=[fname], keywords=message_params)
elif ((sys.version_info[0], sys.version_info[1]) == (3, 4)):
communicate_call = ast.Call(func=ast.Attribute(value=ast.Name(id='self', ctx=ast.Load()), ctx=ast.Load(), attr='synchronous_command'), args=[fname], kwargs=func_kwargs, keywords=message_params)
else:
print('Version:', sys.version_info)
raise RuntimeError('This script only functions on python 3.4, 3.5, 3.6, or 3.7. Active python version {}.{}'.format(*sys.version_info))
do_communicate = ast.Assign(targets=[ast.Name(id='subdom_funcs', ctx=ast.Store())], value=communicate_call)
func_ret = ast.Return(value=ast.Name(id='subdom_funcs', ctx=ast.Load()))
if (len(optional_params) and self.do_debug_prints):
func_body.append(self.__build_debug_print('kwargs', 'kwargs'))
func_body.append(do_communicate)
func_body.append(func_ret)
if len(optional_params):
kwarg = ast.arg(arg='kwargs', annotation=None)
else:
kwarg = None
sig = ast.arguments(args=args, vararg=None, varargannotation=None, posonlyargs=[], kwonlyargs=[], kwarg=kwarg, kwargannotation=None, defaults=[], kw_defaults=[])
func = ast.FunctionDef(name='{}_{}'.format(full_name, func_name), args=sig, body=func_body, decorator_list=[], lineno=self.__get_line(), col_offset=0)
return func
def __to_module(self):
module_components = [ast.ImportFrom(module='ChromeController.transport', names=[ast.alias('ChromeExecutionManager', None)], level=0), ast.ImportFrom(module='ChromeController.manager_base', names=[ast.alias('ChromeInterface', None)], level=0), self.interface_class]
if (sys.version_info >= (3, 8)):
mod = ast.Module(module_components, [], lineno=self.__get_line(), col_offset=1)
else:
mod = ast.Module(module_components, lineno=self.__get_line(), col_offset=1)
mod = ast.fix_missing_locations(mod)
return mod
def dump_class(self):
indent = '\t'
return astor.to_source(self.__to_module(), indent_with=indent)
def dump_ast(self):
return astor.dump_tree(self.__to_module())
def compile_class(self):
mod = self.__to_module()
code = compile(self.__to_module(), 'no filename', 'exec')
exec(code)
built_class = locals()['ChromeRemoteDebugInterface']
return built_class |
class MultiTagManager():
def __init__(self, logging, client, mind_structure, options):
self.logging = logging
self.client = client
self.mind_structure = mind_structure
self.multi_tag_property = options['multi_tag_property']
def get_multi_select_tags(self, notion_ai, append_tags, collection_index=None):
prop = self.multi_tag_property
print(prop)
collection_schema = self.mind_structure.collection.get('schema')
prop_schema = next((v for (k, v) in collection_schema.items() if (v['name'] == prop)), None)
if (not prop_schema):
raise ValueError(f'"{prop}" property does not exist on the collection!')
if (prop_schema['type'] != 'multi_select'):
raise ValueError(f'"{prop}" is not a multi select property!')
l = []
if ('options' in prop_schema):
for element in prop_schema['options']:
color = DEFAULT_COLOR
print(element)
if ('color' in element):
color = self._notion_color_to_hex(element['color'])
else:
element['color'] = color
x = TagObject().parse_from_notion_element(element=element, tag_color=color)
l.append(x)
l = (l + append_tags)
return create_json_response(notion_ai, status_code=200, append_content=l)
elif (len(append_tags) > 0):
return create_json_response(notion_ai, status_code=200, append_content=append_tags)
else:
raise ValueError(f'"{prop}" property has no tags on it.')
def _get_multi_select_tags_as_list(self, collection_index=0):
self.mind_structure.set_current_collection(int(collection_index))
prop = self.multi_tag_property
collection_schema = self.mind_structure.collection.get('schema')
prop_schema = next((v for (k, v) in collection_schema.items() if (v['name'] == prop)), None)
if (not prop_schema):
raise ValueError(f'"{prop}" property does not exist on the collection!')
if (prop_schema['type'] != 'multi_select'):
raise ValueError(f'"{prop}" is not a multi select property!')
l = []
if ('options' in prop_schema):
for element in prop_schema['options']:
l.append(element['value'])
return l
else:
return l
def update_multi_select_tags(self, notion_ai, id, tags_json, collection_index=0, color=None):
try:
self.logging.info('Updating multi select tags for row {0} {1} {2}'.format(id, tags_json, collection_index))
print('Updating multi select tags for row {0} {1} {2}'.format(id, tags_json, collection_index))
block = self.client.get_block(id)
current_tags_notion = self._get_multi_select_tags_as_list(collection_index)
tag_to_add = []
for tag in tags_json:
if ((tag['option_name'] not in current_tags_notion) or (len(current_tags_notion) == 0)):
print((tag['option_name'] + ' is new'))
value = self.add_new_multi_select_value(self.multi_tag_property, tag['option_name'], tag['option_color'])
else:
print((tag['option_name'] + ' already exists'))
value = tag['option_name']
tag_to_add.append(value)
block.set_property(self.multi_tag_property, tag_to_add)
if (len(block.get_property(self.multi_tag_property)) > 0):
return create_json_response(notion_ai, status_code=205, rowId=id)
else:
return create_json_response(notion_ai, status_code=206, rowId=id)
except ValueError as e:
self.logging.info(e)
print(e)
except requests.exceptions.HTTPError as e:
print(e)
self.logging.info(e)
return create_json_response(notion_ai, status_code=e.response.status_code, rowId=id)
def add_new_multi_select_value(self, prop, value, color=None):
colors = ['default', 'gray', 'brown', 'orange', 'yellow', 'green', 'blue', 'purple', 'pink', 'red']
if (color is None):
color = choice(colors)
else:
color = self._hex_to_notion_color(color)
print('Tag color for {0} will be {1}'.format(value, color))
collection_schema = self.mind_structure.collection.get('schema')
prop_schema = next((v for (k, v) in collection_schema.items() if (v['name'] == prop)), None)
if (not prop_schema):
raise ValueError(f'"{prop}" property does not exist on the collection!')
if (prop_schema['type'] != 'multi_select'):
raise ValueError(f'"{prop}" is not a multi select property!')
if ('options' in prop_schema):
dupe = next((o for o in prop_schema['options'] if (o['value'] == value)), None)
if dupe:
print(f'"{value}" already exists in the schema!')
print(dupe['value'])
return dupe['value']
prop_schema['options'].append({'id': str(uuid1()), 'value': value, 'color': color})
self.mind_structure.collection.set('schema', collection_schema)
return value
else:
prop_schema['options'] = []
prop_schema['options'].append({'id': str(uuid1()), 'value': value, 'color': color})
self.mind_structure.collection.set('schema', collection_schema)
return value
def _notion_color_to_hex(self, color_name):
if (color_name == None):
return '#505558'
elif (color_name == 'default'):
return '#505558'
elif (color_name == 'gray'):
return '#6B6F71'
elif (color_name == 'brown'):
return '#695B55'
elif (color_name == 'orange'):
return '#9F7445'
elif (color_name == 'yellow'):
return '#9F9048'
elif (color_name == 'green'):
return '#467870'
elif (color_name == 'blue'):
return '#487088'
elif (color_name == 'purple'):
return '#6C598F'
elif (color_name == 'pink'):
return '#904D74'
elif (color_name == 'red'):
return '#9F5C58'
def _hex_to_notion_color(self, color_name):
if (color_name == None):
return 'default'
if (color_name == '#505558'):
return 'default'
elif (color_name == '#6B6F71'):
return 'gray'
elif (color_name == '#695B55'):
return 'brown'
elif (color_name == '#9F7445'):
return 'orange'
elif (color_name == '#9F9048'):
return 'yellow'
elif (color_name == '#467870'):
return 'green'
elif (color_name == '#487088'):
return 'blue'
elif (color_name == '#6C598F'):
return 'purple'
elif (color_name == '#904D74'):
return 'pink'
elif (color_name == '#9F5C58'):
return 'red' |
def switch_aliases_to_original_index(index_name=None):
index_name = (index_name or CASE_INDEX)
swapping_index = INDEX_DICT.get(index_name)[3]
es_client = create_es_client()
logger.info(" Moving aliases '{0}' and '{1}' to point to {2}...".format(INDEX_DICT.get(index_name)[1], SEARCH_ALIAS, index_name))
es_client.indices.update_aliases(body={'actions': [{'remove': {'index': swapping_index, 'alias': INDEX_DICT.get(index_name)[1]}}, {'remove': {'index': swapping_index, 'alias': SEARCH_ALIAS}}, {'add': {'index': index_name, 'alias': INDEX_DICT.get(index_name)[1]}}, {'add': {'index': index_name, 'alias': SEARCH_ALIAS}}]})
logger.info(" Moved aliases '{0}' and '{1}' to point to '{2}' successfully.".format(INDEX_DICT.get(index_name)[1], SEARCH_ALIAS, index_name))
time.sleep(60)
logger.info(" Deleting index '{0}'...".format(swapping_index))
es_client.indices.delete(swapping_index)
logger.info(" Deleted swapping index '{0}' successfully.".format(swapping_index))
logger.info(" Task update_mapping_and_reload_legal_data on '{0}' has been completed successfully !!!".format(index_name)) |
def readFunLite():
from optparse import OptionParser
usage = 'usage: %readFunLite [options] meshFile funFile funOutFile'
parser = OptionParser(usage=usage)
parser.add_option('-x', '--nnodes-x', help='use NX nodes in the x direction', action='store', type='int', dest='nx', default=2)
parser.add_option('-y', '--nnodes-y', help='use NY nodes in the y direction', action='store', type='int', dest='ny', default=2)
parser.add_option('-z', '--nnodes-z', help='use NZ nodes in the z direction', action='store', type='int', dest='nz', default=2)
(opts, args) = parser.parse_args()
if (len(args) == 4):
meshFilename = args[1]
funFilename = args[2]
funOutFilename = args[3]
else:
print(usage)
exit(1)
meshIn = open(meshFilename, 'r')
print(('Reading nodes of the mesh from file=%s' % meshFilename))
line = meshIn.readline()
columns = line.split()
nodes = []
adhBase = 1
while (columns[0] != 'ND'):
line = meshIn.readline()
columns = line.split()
else:
while (line and (columns[0] == 'ND')):
nodes.append(Node((int(columns[1]) - adhBase), float(columns[2]), float(columns[3]), float(columns[4])))
line = meshIn.readline()
columns = line.split()
funIn = open(funFilename, 'r')
nNodes = 0
fun = []
for i in range(6):
line = funIn.readline()
print(line.strip())
words = line.split()
if (words[0] == 'ND'):
nNodes = int(words[1])
if (nNodes != len(nodes)):
print("the number of nodes in mesh and function files don't match")
line = funIn.readline()
while (line.strip() != 'ENDDS'):
print(('Reading ' + line.strip()))
words = line.split()
u = zeros(nNodes, Float)
for i in range(nNodes):
u[i] = float(funIn.readline())
fun.append(u)
line = funIn.readline()
print(('Read %i timesteps' % len(fun)))
print('Writing coordinate files x.grf, y.grf, and z.grf')
xiList = {}
xOut = open('x.grf', 'w')
yOut = open('y.grf', 'w')
zOut = open('z.grf', 'w')
for n in nodes:
xOut.write(('%15.8e \n' % n.x))
yOut.write(('%15.8e \n' % n.y))
zOut.write(('%15.8e \n' % n.z))
xOut.close()
yOut.close()
zOut.close()
for (ts, f) in enumerate(fun):
funOutFileTS = ((funOutFilename + repr(ts)) + '.grf')
print(('Writing time step=%i to file=%s' % (ts, funOutFileTS)))
funOut = open(funOutFileTS, 'w')
for v in f:
funOut.write(('%15.8e \n' % v))
funOut.close() |
def main():
parser = argparse.ArgumentParser(description='LiteX SoC on Arty A7-35')
parser.add_argument('--build', action='store_true', help='Build bitstream')
parser.add_argument('--load', action='store_true', help='Load bitstream')
parser.add_argument('--flash', action='store_true', help='Flash Bitstream')
builder_args(parser)
soc_core_args(parser)
args = parser.parse_args()
soc = BaseSoC(sys_clk_freq=.0, **soc_core_argdict(args))
builder = Builder(soc, **builder_argdict(args))
builder.build(run=args.build)
if args.load:
prog = soc.platform.create_programmer()
prog.load_bitstream(os.path.join(builder.gateware_dir, (soc.build_name + '.bit')))
exit() |
class OptionSeriesBoxplotSonificationDefaultinstrumentoptionsMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def train_with_memory_profiler(output_dir, device='cpu'):
ds_name = create_local_dataset(output_dir, 5, 10, 10)
runner = default_runner.Detectron2GoRunner()
cfg = runner.get_default_cfg()
cfg.MODEL.DEVICE = device
cfg.MODEL.META_ARCHITECTURE = 'MetaArchForTestSimple'
cfg.SOLVER.MAX_ITER = 10
cfg.DATASETS.TRAIN = (ds_name,)
cfg.DATASETS.TEST = (ds_name,)
cfg.OUTPUT_DIR = output_dir
cfg.MEMORY_PROFILER.ENABLED = True
cfg.MEMORY_PROFILER.LOG_N_STEPS = 3
cfg.MEMORY_PROFILER.LOG_DURING_TRAIN_AT = 5
runner.register(cfg)
model = runner.build_model(cfg)
runner.do_train(cfg, model, resume=True)
return cfg |
class TestFindProgram(unittest.TestCase):
def setUp(self):
self.ls = None
for d in ('/usr/bin', '/bin'):
ls = os.path.join(d, 'ls')
if os.path.exists(ls):
self.ls = ls
break
if (self.ls is None):
self.fail('unable to locate ls program for test')
def test_find_program_that_exists(self):
self.assertEqual(find_program('ls'), self.ls)
def test_find_program_with_full_path(self):
self.assertEqual(find_program(self.ls), self.ls)
def test_dont_find_program_that_does_exist(self):
self.assertEqual(find_program('/this/doesnt/exist/ls'), None) |
class PyfaceAuiManager(aui.AuiManager):
def CalculateDockSizerLimits(self, dock):
(docks, panes) = aui.CopyDocksAndPanes2(self._docks, self._panes)
sash_size = self._art.GetMetric(aui.AUI_DOCKART_SASH_SIZE)
opposite_size = self.GetOppositeDockTotalSize(docks, dock.dock_direction)
for tmpDock in docks:
if ((tmpDock.dock_direction == dock.dock_direction) and (tmpDock.dock_layer == dock.dock_layer) and (tmpDock.dock_row == dock.dock_row)):
tmpDock.size = 1
break
neighbor_docks = []
horizontal = ((dock.dock_direction == aui.AUI_DOCK_LEFT) or (dock.dock_direction == aui.AUI_DOCK_RIGHT))
right_or_down = ((dock.dock_direction == aui.AUI_DOCK_RIGHT) or (dock.dock_direction == aui.AUI_DOCK_BOTTOM))
for d in docks:
if ((d.dock_direction == dock.dock_direction) and (d.dock_layer == dock.dock_layer)):
if horizontal:
neighbor_docks.append((d.rect.x, d.rect.width))
else:
neighbor_docks.append((d.rect.y, d.rect.height))
neighbor_docks.sort()
(sizer, panes, docks, uiparts) = self.LayoutAll(panes, docks, [], True, False)
client_size = self._frame.GetClientSize()
sizer.SetDimension(0, 0, client_size.x, client_size.y)
sizer.Layout()
for part in uiparts:
part.rect = wx.Rect(part.sizer_item.GetPosition(), part.sizer_item.GetSize())
if (part.type == aui.AuiDockUIPart.typeDock):
part.dock.rect = part.rect
sizer.Destroy()
new_dock = None
for tmpDock in docks:
if ((tmpDock.dock_direction == dock.dock_direction) and (tmpDock.dock_layer == dock.dock_layer) and (tmpDock.dock_row == dock.dock_row)):
new_dock = tmpDock
break
partnerDock = self.GetPartnerDock(dock)
if partnerDock:
if horizontal:
pos = dock.rect.x
size = dock.rect.width
else:
pos = dock.rect.y
size = dock.rect.height
min_pos = pos
max_pos = (pos + size)
if right_or_down:
for (p, s) in neighbor_docks:
if (p >= pos):
max_pos = ((p + s) - sash_size)
break
else:
min_pos = (p + sash_size)
else:
for (p, s) in neighbor_docks:
if (p > pos):
max_pos = ((p + s) - sash_size)
break
else:
min_pos = (p + sash_size)
return (min_pos, max_pos)
direction = new_dock.dock_direction
if (direction == aui.AUI_DOCK_LEFT):
minPix = (new_dock.rect.x + new_dock.rect.width)
maxPix = ((client_size.x - opposite_size) - sash_size)
elif (direction == aui.AUI_DOCK_TOP):
minPix = (new_dock.rect.y + new_dock.rect.height)
maxPix = ((client_size.y - opposite_size) - sash_size)
elif (direction == aui.AUI_DOCK_RIGHT):
minPix = opposite_size
maxPix = (new_dock.rect.x - sash_size)
elif (direction == aui.AUI_DOCK_BOTTOM):
minPix = opposite_size
maxPix = (new_dock.rect.y - sash_size)
return (minPix, maxPix)
def GetPartnerDockFromPos(self, dock, point):
horizontal = ((dock.dock_direction == aui.AUI_DOCK_LEFT) or (dock.dock_direction == aui.AUI_DOCK_RIGHT))
right_or_down = ((dock.dock_direction == aui.AUI_DOCK_RIGHT) or (dock.dock_direction == aui.AUI_DOCK_BOTTOM))
if horizontal:
pos = point.x
else:
pos = point.y
neighbor_docks = []
for d in self._docks:
if ((d.dock_direction == dock.dock_direction) and (d.dock_layer == dock.dock_layer)):
if horizontal:
neighbor_docks.append((d.rect.x, d.rect.width, d))
else:
neighbor_docks.append((d.rect.y, d.rect.height, d))
neighbor_docks.sort()
last = None
if right_or_down:
for (p, s, d) in neighbor_docks:
if (pos < (p + s)):
if (d.dock_row == dock.dock_row):
d = last
break
last = d
else:
neighbor_docks.reverse()
for (p, s, d) in neighbor_docks:
if (pos > p):
if (d.dock_row == dock.dock_row):
d = last
break
last = d
return d
def RestrictResize(self, clientPt, screenPt, createDC):
dock = self._action_part.dock
pane = self._action_part.pane
if createDC:
if (wx.Platform == '__WXMAC__'):
dc = wx.ClientDC(self._frame)
else:
dc = wx.ScreenDC()
aui.DrawResizeHint(dc, self._action_rect)
self._action_rect = wx.Rect()
newPos = (clientPt - self._action_offset)
if (self._action_part.type == aui.AuiDockUIPart.typeDockSizer):
(minPix, maxPix) = self.CalculateDockSizerLimits(dock)
else:
if (not self._action_part.pane):
return
(minPix, maxPix) = self.CalculatePaneSizerLimits(dock, pane)
if (self._action_part.orientation == wx.HORIZONTAL):
newPos.y = aui.Clip(newPos.y, minPix, maxPix)
else:
newPos.x = aui.Clip(newPos.x, minPix, maxPix)
if (self._action_part.type == aui.AuiDockUIPart.typeDockSizer):
partner = self.GetPartnerDockFromPos(dock, newPos)
sash_size = self._art.GetMetric(aui.AUI_DOCKART_SASH_SIZE)
button_size = self._art.GetMetric(aui.AUI_DOCKART_PANE_BUTTON_SIZE)
new_dock_size = 0
direction = dock.dock_direction
if (direction == aui.AUI_DOCK_LEFT):
new_dock_size = (newPos.x - dock.rect.x)
elif (direction == aui.AUI_DOCK_TOP):
new_dock_size = (newPos.y - dock.rect.y)
elif (direction == aui.AUI_DOCK_RIGHT):
new_dock_size = (((dock.rect.x + dock.rect.width) - newPos.x) - sash_size)
elif (direction == aui.AUI_DOCK_BOTTOM):
new_dock_size = (((dock.rect.y + dock.rect.height) - newPos.y) - sash_size)
delta = (new_dock_size - dock.size)
if (delta < ((- dock.size) + sash_size)):
delta = ((- dock.size) + sash_size)
elif ((- button_size) < delta < button_size):
delta = (button_size * (1 if (delta > 0) else (- 1)))
if partner:
if (delta > (partner.size - sash_size)):
delta = (partner.size - sash_size)
partner.size -= delta
dock.size += delta
self.Update()
else:
if dock.IsHorizontal():
oldPixsize = pane.rect.width
newPixsize = ((oldPixsize + newPos.x) - self._action_part.rect.x)
else:
oldPixsize = pane.rect.height
newPixsize = ((oldPixsize + newPos.y) - self._action_part.rect.y)
(totalPixsize, totalProportion) = self.GetTotalPixSizeAndProportion(dock)
partnerPane = self.GetPartnerPane(dock, pane)
if ((totalPixsize <= 0) or (totalProportion <= 0) or (not partnerPane)):
return
while ((oldPixsize > 0) and (totalPixsize > 10) and (((oldPixsize * totalProportion) / totalPixsize) < pane.dock_proportion)):
totalPixsize -= 1
newProportion = ((newPixsize * totalProportion) / totalPixsize)
newProportion = aui.Clip(newProportion, 1, totalProportion)
deltaProp = (newProportion - pane.dock_proportion)
if ((partnerPane.dock_proportion - deltaProp) < 1):
deltaProp = (partnerPane.dock_proportion - 1)
newProportion = (pane.dock_proportion + deltaProp)
partnerPane.dock_proportion -= deltaProp
pane.dock_proportion = newProportion
self.Update()
return True
def UpdateWithoutLayout(self):
pane_count = len(self._panes)
for ii in range(pane_count):
p = self._panes[ii]
if (p.window and p.IsShown() and p.IsDocked()):
p.window.Refresh()
p.window.Update()
if (wx.Platform == '__WXMAC__'):
self._frame.Refresh()
else:
self.Repaint() |
class PseudoDownloader(object):
def __init__(self, overwrite):
self.overwrite = overwrite
def _fetch(self, url, destination):
pass
def _copy_local_directory(self, src, dst):
if os.path.exists(dst):
if self.overwrite:
shutil.rmtree(dst)
else:
return
shutil.copytree(src, dst)
def fetch(self, src, dst):
self._copy_local_directory(src, dst) |
class RelationshipTlsDnsRecordDnsRecord(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'data': ([RelationshipMemberTlsDnsRecord],)}
_property
def discriminator():
return None
attribute_map = {'data': 'data'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class TestURLSnippetsNoMax(util.MdCase):
extension = ['pymdownx.snippets']
extension_configs = {'pymdownx.snippets': {'base_path': [os.path.join(BASE, '_snippets')], 'url_download': True, 'url_max_size': 0}}
('urllib.request.urlopen')
def test_content_length_zero(self, mock_urlopen):
cm = MagicMock()
cm.status = 200
cm.code = 200
cm.readlines.return_value = [b'contents']
cm.headers = {'content-length': str(((1024 * 1024) * 48))}
cm.__enter__.return_value = cm
mock_urlopen.return_value = cm
self.check_markdown('\n --8<-- " ', '\n <p>contents</p>\n ', True) |
class Aspect():
def __init__(self, properties):
self.__dict__.update(properties)
self.active = AspectObject(self.active)
self.passive = AspectObject(self.passive)
def exists(self):
return (self.type != const.NO_ASPECT)
def movement(self):
mov = self.active.movement
if ((self.orb < 1) and (mov == const.SEPARATIVE)):
mov = const.EXACT
return mov
def mutualAspect(self):
return (self.active.inOrb == self.passive.inOrb == True)
def mutualMovement(self):
return (self.active.movement == self.passive.movement)
def getRole(self, ID):
if (self.active.id == ID):
return {'role': 'active', 'inOrb': self.active.inOrb, 'movement': self.active.movement}
elif (self.passive.id == ID):
return {'role': 'passive', 'inOrb': self.passive.inOrb, 'movement': self.passive.movement}
return None
def inOrb(self, ID):
role = self.getRole(ID)
return (role['inOrb'] if role else None)
def __str__(self):
return ('<%s %s %s %s %s>' % (self.active.id, self.passive.id, self.type, self.active.movement, angle.toString(self.orb))) |
def jacobian_double(p: Tuple[(int, int, int)]) -> Tuple[(int, int, int)]:
if (not p[1]):
return (0, 0, 0)
ysq = ((p[1] ** 2) % P)
S = (((4 * p[0]) * ysq) % P)
M = (((3 * (p[0] ** 2)) + (A * (p[2] ** 4))) % P)
nx = (((M ** 2) - (2 * S)) % P)
ny = (((M * (S - nx)) - (8 * (ysq ** 2))) % P)
nz = (((2 * p[1]) * p[2]) % P)
return (nx, ny, nz) |
def test_vscode_with_args(vscode_patches, mock_remote_execution):
(mock_process, mock_prepare_interactive_python, mock_exit_handler, mock_download_vscode, mock_signal, mock_prepare_resume_task_python, mock_prepare_launch_json) = vscode_patches
def t():
return
def wf():
t()
wf()
mock_download_vscode.assert_called_once()
mock_process.assert_called_once()
mock_exit_handler.assert_called_once()
mock_prepare_interactive_python.assert_called_once()
mock_signal.assert_called_once()
mock_prepare_resume_task_python.assert_called_once()
mock_prepare_launch_json.assert_called_once() |
class AudioClipHandler(BaseHandler):
def __init__(self, id_generator, file_index):
super(AudioClipHandler, self).__init__(id_generator, file_index)
def process(self, current_id, obj, cursor, bundle_id):
name = obj['m_Name'].value
size = obj['m_Resource'].value['m_Size'].value
cursor.execute('\n INSERT INTO audio_clips(id, bits_per_sample, frequency, channels, load_type, format)\n VALUES(?,?,?,?,?,?)\n ', (current_id, obj['m_BitsPerSample'].value, obj['m_Frequency'].value, obj['m_Channels'].value, obj['m_LoadType'].value, obj['m_CompressionFormat'].value))
(_, references, field_count) = self._recursive_process(obj, '')
return (name, size, references, field_count)
def init_database(self, cursor):
audio_load_types = [(0, 'Decompress on Load'), (1, 'Compressed in Memory'), (2, 'Streaming')]
audio_formats = [(0, 'PCM'), (1, 'Vorbis'), (2, 'ADPCM'), (3, 'MP3'), (4, 'PSMVAG'), (5, 'HEVAG'), (6, 'XMA'), (7, 'AAC'), (8, 'GCADPCM'), (9, 'ATRAC9')]
cursor.execute('\n CREATE TABLE audio_load_types(\n id INTEGER,\n type TEXT,\n PRIMARY KEY (id)\n )\n ')
cursor.execute('\n CREATE TABLE audio_formats(\n id INTEGER,\n format TEXT,\n PRIMARY KEY (id)\n )\n ')
cursor.execute('\n CREATE TABLE audio_clips(\n id INTEGER,\n bits_per_sample INTEGER,\n frequency INTEGER,\n channels INTEGER,\n load_type INTEGER,\n format INTEGER,\n PRIMARY KEY (id),\n FOREIGN KEY (id) references objects(id)\n FOREIGN KEY (load_type) references audio_load_types(load_type)\n FOREIGN KEY (format) references audio_formats(format)\n )\n ')
cursor.execute('\n CREATE VIEW audio_clip_view AS\n SELECT\n object_view.*,\n audio_clips.bits_per_sample,\n audio_clips.frequency,\n audio_clips.channels,\n audio_load_types.type,\n audio_formats.format\n FROM object_view\n INNER JOIN audio_clips ON object_view.id = audio_clips.id\n LEFT JOIN audio_load_types ON audio_clips.load_type = audio_load_types.id\n LEFT JOIN audio_formats ON audio_clips.format = audio_formats.id\n ')
cursor.executemany('\n INSERT INTO audio_load_types values (?,?)\n ', audio_load_types)
cursor.executemany('\n INSERT INTO audio_formats values (?,?)\n ', audio_formats)
cursor.execute('\n CREATE VIEW view_suspicious_audio_clips AS\n SELECT name, size, bits_per_sample, frequency, channels, type, format, bundle FROM audio_clip_view\n WHERE (type = "Streaming" AND size < 1024*1024) OR (type <> "Streaming" AND size > 1024*1024)\n ') |
class StarletteServerErrorMiddlewareInstrumentation(AsyncAbstractInstrumentedModule):
name = 'starlette'
instrument_list = [('starlette.middleware.errors', 'ServerErrorMiddleware.__call__')]
creates_transactions = True
async def call(self, module, method, wrapped, instance, args, kwargs):
try:
return (await wrapped(*args, **kwargs))
finally:
client = get_client()
if client:
client.end_transaction() |
class Exp(Fixed):
codomain = constraints.positive
def _forward(self, x: torch.Tensor, params: Optional[Sequence[torch.Tensor]]) -> Tuple[(torch.Tensor, Optional[torch.Tensor])]:
y = torch.exp(x)
ladj = self._log_abs_det_jacobian(x, y, params)
return (y, ladj)
def _inverse(self, y: torch.Tensor, params: Optional[Sequence[torch.Tensor]]) -> Tuple[(torch.Tensor, Optional[torch.Tensor])]:
x = y.log()
ladj = self._log_abs_det_jacobian(x, y, params)
return (x, ladj)
def _log_abs_det_jacobian(self, x: torch.Tensor, y: torch.Tensor, params: Optional[Sequence[torch.Tensor]]) -> torch.Tensor:
return x |
def send_event(channel, event_type, data, skip_user_ids=None, async_publish=True, json_encode=True):
from .event import Event
if json_encode:
data = json.dumps(data, cls=DjangoJSONEncoder)
if (skip_user_ids is None):
skip_user_ids = []
storage = get_storage()
channelmanager = get_channelmanager()
if (channelmanager.is_channel_reliable(channel) and storage):
e = storage.append_event(channel, event_type, data)
pub_id = str(e.id)
pub_prev_id = str((e.id - 1))
else:
e = Event(channel, event_type, data)
pub_id = None
pub_prev_id = None
if have_channels():
from .consumers import get_listener_manager
get_listener_manager().add_to_queues(channel, e)
publish_event(channel, event_type, data, pub_id, pub_prev_id, skip_user_ids=skip_user_ids, blocking=(not async_publish)) |
class ElasticAPM(object):
def __init__(self, application, client) -> None:
self.application = application
self.client = client
def __call__(self, environ, start_response):
try:
for event in self.application(environ, start_response):
(yield event)
except Exception:
exc_info = sys.exc_info()
self.handle_exception(exc_info, environ)
exc_info = None
raise
def handle_exception(self, exc_info, environ):
event_id = self.client.capture('Exception', exc_info=exc_info, context={'request': {'method': environ.get('REQUEST_METHOD'), 'url': get_url_dict(get_current_url(environ)), 'headers': dict(get_headers(environ)), 'env': dict(get_environ(environ))}}, handled=False)
return event_id |
.slow_integration_test
def test_clone_subgroup_only_archived():
os.environ['GITLAB_URL'] = '
output = io_util.execute(['-p', '--print-format', 'json', '-a', 'only'], 60)
obj = json.loads(output)
assert (obj['children'][0]['name'] == 'Group Test')
assert (obj['children'][0]['children'][0]['name'] == 'Subgroup Test')
assert (len(obj['children'][0]['children'][0]['children']) == 1)
assert (obj['children'][0]['children'][0]['children'][0]['name'] == 'archived-project') |
def _performance_log(func):
def wrapper(*arg):
start = datetime.datetime.now()
res = func(*arg)
if _log_performance:
usage = resource.getrusage(resource.RUSAGE_SELF)
process_memory = (usage.ru_maxrss / 1000)
delta = (datetime.datetime.now() - start)
delta_milliseconds = int((delta.total_seconds() * 1000))
_LOGGER.info('PERFORMANCE - {0} - milliseconds |{1:>8,}| - memory MB |{2:>8,}|'.format(func.__name__, delta_milliseconds, process_memory))
return res
return wrapper |
class IRIPAllowDeny(IRFilter):
parent: IRResource
action: str
principals: List[Tuple[(str, 'CIDRRange')]]
EnvoyTypeMap: ClassVar[Dict[(str, str)]] = {'remote': 'remote_ip', 'peer': 'direct_remote_ip'}
def __init__(self, ir: 'IR', aconf: Config, rkey: str='ir.ipallowdeny', name: str='ir.ipallowdeny', kind: str='IRIPAllowDeny', parent: (IRResource | None)=None, action: (str | None)=None, **kwargs) -> None:
assert (parent is not None)
assert (action is not None)
super().__init__(ir=ir, aconf=aconf, rkey=rkey, kind=kind, name=name, parent=parent, action=action, **kwargs)
def setup(self, ir: 'IR', aconf: Config) -> bool:
assert self.parent
action: Optional[str] = self.pop('action')
principals: Optional[List[Dict[(str, str)]]] = self.pop('principals')
assert (action is not None)
assert (principals is not None)
action = action.upper()
if ((action != 'ALLOW') and (action != 'DENY')):
raise RuntimeError(f'IRIPAllowDeny action must be ALLOW or DENY, not {action}')
self.action = action
self.principals = []
ir.logger.debug(f'PRINCIPALS: {principals}')
from ..envoy.v3.v3cidrrange import CIDRRange
for pdict in principals:
first = True
for (kind, spec) in pdict.items():
if (not first):
self.parent.post_error(f'ip{self.action.lower()} principals must be separate list elements')
break
first = False
envoy_kind = IRIPAllowDeny.EnvoyTypeMap.get(kind, None)
if (not envoy_kind):
self.parent.post_error(f'ip{self.action.lower()} principal type {kind} unknown: must be peer or remote')
continue
cidrrange = CIDRRange(spec)
if cidrrange:
self.principals.append((envoy_kind, cidrrange))
else:
self.parent.post_error(f'ip_{self.action.lower()} principal {spec} is not valid: {cidrrange.error}')
if (len(self.principals) > 0):
return True
else:
return False
def __str__(self) -> str:
pstrs = [str(x) for x in self.principals]
return f"<IPAllowDeny {self.action}: {', '.join(pstrs)}>"
def as_dict(self) -> dict:
return {'action': self.action, 'principals': [{kind: block.as_dict()} for (kind, block) in self.principals]} |
def arg_botcmd(*args, hidden: bool=None, name: str=None, admin_only: bool=False, historize: bool=True, template: str=None, flow_only: bool=False, unpack_args: bool=True, **kwargs) -> Callable[([BotPlugin, Message, Any], Any)]:
argparse_args = args
if ((len(args) >= 1) and callable(args[0])):
argparse_args = args[1:]
def decorator(func):
if (not hasattr(func, '_err_command')):
err_command_parser = ArgumentParser(prog=(name or func.__name__), description=func.__doc__)
(func)
def wrapper(self, msg, args):
try:
sanitizer_re = re.compile('|'.join((re.escape(ii) for ii in ARG_BOTCMD_CHARACTER_REPLACEMENTS)))
args = sanitizer_re.sub((lambda mm: ARG_BOTCMD_CHARACTER_REPLACEMENTS[mm.group()]), args)
args = shlex.split(args)
parsed_args = err_command_parser.parse_args(args)
except ArgumentParseError as e:
(yield f"I couldn't parse the arguments; {e}")
(yield err_command_parser.format_usage())
return
except HelpRequested:
(yield err_command_parser.format_help())
return
except ValueError as ve:
(yield f"I couldn't parse this command; {ve}")
(yield err_command_parser.format_help())
return
if unpack_args:
func_args = []
func_kwargs = vars(parsed_args)
else:
func_args = [parsed_args]
func_kwargs = {}
if inspect.isgeneratorfunction(func):
for reply in func(self, msg, *func_args, **func_kwargs):
(yield reply)
else:
(yield func(self, msg, *func_args, **func_kwargs))
_tag_botcmd(wrapper, _re=False, _arg=True, hidden=hidden, name=(name or wrapper.__name__), admin_only=admin_only, historize=historize, template=template, flow_only=flow_only, command_parser=err_command_parser)
else:
wrapper = func
update_wrapper(wrapper, argparse_args, kwargs)
return wrapper
return (decorator(args[0]) if callable(args[0]) else decorator) |
class RequestID(namedtuple('RequestID', 'kind timestamp user workerid')):
KIND = types.SimpleNamespace(BENCHMARKS='compile-bench')
_KIND_BY_VALUE = {v: v for (_, v) in vars(KIND).items()}
_workerid_defaulted: bool
def from_raw(cls, raw: Any):
if isinstance(raw, cls):
return raw
elif isinstance(raw, str):
return cls.parse(raw)
elif (not raw):
raise NotImplementedError(raw)
else:
try:
args = tuple(raw)
except TypeError:
raise NotImplementedError(repr(raw))
return cls(*args)
def parse(cls, idstr: str):
kinds = '|'.join(cls._KIND_BY_VALUE)
m = re.match(f'^req-(?:({kinds})-)?(\d{{10}})-(\w+)(?:-(\w+))?$', idstr)
if (not m):
return None
(kind, timestamp, user, workerid) = m.groups()
return cls(kind, int(timestamp), user, workerid)
def generate(cls, cfg: int, user: Optional[str]=None, kind: str=KIND.BENCHMARKS, workerid: str='linux') -> 'RequestID':
user = _utils.resolve_user(cfg, user)
timestamp = int(_utils.utcnow())
return cls(kind, timestamp, user, workerid)
def __new__(cls, kind: Optional[str], timestamp: Optional[int], user: Optional[str], workerid: Optional[str]):
if (not kind):
kind = cls.KIND.BENCHMARKS
else:
try:
kind = cls._KIND_BY_VALUE[kind]
except KeyError:
raise ValueError(f'unsupported kind {kind!r}')
if (not timestamp):
raise ValueError('missing timestamp')
elif isinstance(timestamp, str):
(timestamp, _, _) = timestamp.partition('.')
timestamp = int(timestamp)
elif (not isinstance(timestamp, int)):
try:
timestamp = int(timestamp)
except TypeError:
raise TypeError(f'expected int timestamp, got {timestamp!r}')
if (not user):
raise ValueError('missing user')
elif (not isinstance(user, str)):
raise TypeError(f'expected str for user, got {user!r}')
else:
_utils.check_name(user)
if (not workerid):
workerid_defaulted = True
workerid = 'linux'
else:
workerid_defaulted = False
_utils.check_name(workerid)
self = super().__new__(cls, kind=kind, timestamp=timestamp, user=user, workerid=workerid)
self._workerid_defaulted = workerid_defaulted
return self
def __str__(self):
if self._workerid_defaulted:
suffix = ''
else:
suffix = f'-{self.workerid}'
return f'req-{self.kind}-{self.timestamp}-{self.user}{suffix}'
def date(self) -> Optional[datetime.datetime]:
(dt, _) = _utils.get_utc_datetime(self.timestamp)
return dt |
def lindh_guess(geom):
atoms = [a.lower() for a in geom.atoms]
alphas = [get_lindh_alpha(a1, a2) for (a1, a2) in it.combinations(atoms, 2)]
pair_cov_radii = get_pair_covalent_radii(geom.atoms)
cdm = pdist(geom.coords3d)
rhos = squareform(np.exp((alphas * ((pair_cov_radii ** 2) - (cdm ** 2)))))
ks = {2: 0.45, 3: 0.15, 4: 0.005}
return lindh_style_guess(geom, ks, rhos) |
def should_run(**kwargs):
next_exec_date = kwargs['next_execution_date']
force_to_run = kwargs['dag_run'].conf.setdefault('force_to_run', False)
print('---- weekday: {}, force_to_run: {}, context {}'.format(next_exec_date.weekday(), force_to_run, kwargs))
if force_to_run:
return 'start'
if (next_exec_date.weekday() == 5):
return 'start'
else:
return 'finish' |
def consolidate_query_matches(row: Row, target_path: FieldPath, flattened_matches: Optional[List]=None) -> List[Any]:
if (flattened_matches is None):
flattened_matches = []
if isinstance(row, list):
for elem in row:
consolidate_query_matches(elem, target_path, flattened_matches)
elif isinstance(row, dict):
for (key, value) in row.items():
if (target_path.levels and (key == target_path.levels[0])):
consolidate_query_matches(value, FieldPath(*target_path.levels[1:]), flattened_matches)
elif row:
flattened_matches.append(row)
return flattened_matches |
class BackendResponse(ModelComposed):
allowed_values = {}
validations = {('share_key',): {'regex': {'pattern': '^[A-Za-z0-9]+$'}}}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'address': (str,), 'auto_loadbalance': (bool,), 'between_bytes_timeout': (int,), 'client_cert': (str, none_type), 'comment': (str, none_type), 'connect_timeout': (int,), 'first_byte_timeout': (int,), 'healthcheck': (str, none_type), 'hostname': (str, none_type), 'ipv4': (str, none_type), 'ipv6': (str, none_type), 'keepalive_time': (int, none_type), 'max_conn': (int,), 'max_tls_version': (str, none_type), 'min_tls_version': (str, none_type), 'name': (str,), 'override_host': (str, none_type), 'port': (int,), 'request_condition': (str,), 'share_key': (str, none_type), 'shield': (str, none_type), 'ssl_ca_cert': (str, none_type), 'ssl_cert_hostname': (str, none_type), 'ssl_check_cert': (bool, none_type), 'ssl_ciphers': (str, none_type), 'ssl_client_cert': (str, none_type), 'ssl_client_key': (str, none_type), 'ssl_hostname': (str, none_type), 'ssl_sni_hostname': (str, none_type), 'use_ssl': (bool,), 'weight': (int,), 'created_at': (datetime, none_type), 'deleted_at': (datetime, none_type), 'updated_at': (datetime, none_type), 'service_id': (str,), 'version': (int,), 'locked': (bool,)}
_property
def discriminator():
return None
attribute_map = {'address': 'address', 'auto_loadbalance': 'auto_loadbalance', 'between_bytes_timeout': 'between_bytes_timeout', 'client_cert': 'client_cert', 'comment': 'comment', 'connect_timeout': 'connect_timeout', 'first_byte_timeout': 'first_byte_timeout', 'healthcheck': 'healthcheck', 'hostname': 'hostname', 'ipv4': 'ipv4', 'ipv6': 'ipv6', 'keepalive_time': 'keepalive_time', 'max_conn': 'max_conn', 'max_tls_version': 'max_tls_version', 'min_tls_version': 'min_tls_version', 'name': 'name', 'override_host': 'override_host', 'port': 'port', 'request_condition': 'request_condition', 'share_key': 'share_key', 'shield': 'shield', 'ssl_ca_cert': 'ssl_ca_cert', 'ssl_cert_hostname': 'ssl_cert_hostname', 'ssl_check_cert': 'ssl_check_cert', 'ssl_ciphers': 'ssl_ciphers', 'ssl_client_cert': 'ssl_client_cert', 'ssl_client_key': 'ssl_client_key', 'ssl_hostname': 'ssl_hostname', 'ssl_sni_hostname': 'ssl_sni_hostname', 'use_ssl': 'use_ssl', 'weight': 'weight', 'created_at': 'created_at', 'deleted_at': 'deleted_at', 'updated_at': 'updated_at', 'service_id': 'service_id', 'version': 'version', 'locked': 'locked'}
read_only_vars = {'created_at', 'deleted_at', 'updated_at', 'service_id', 'version', 'locked'}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
_property
def _composed_schemas():
lazy_import()
return {'anyOf': [], 'allOf': [Backend, BackendResponseAllOf, ServiceIdAndVersion, Timestamps], 'oneOf': []} |
def make_resolution_plan(app: App, bench: 'Bench'):
resolution = OrderedDict()
resolution[app.app_name] = app
for app_name in app._get_dependencies():
dep_app = App(app_name, bench=bench)
is_valid_frappe_branch(dep_app.url, dep_app.branch)
dep_app.required_by = app.name
if (dep_app.app_name in resolution):
click.secho(f'{dep_app.app_name} is already resolved skipping', fg='yellow')
continue
resolution[dep_app.app_name] = dep_app
resolution.update(make_resolution_plan(dep_app, bench))
app.local_resolution = [repo_name for (repo_name, _) in reversed(resolution.items())]
return resolution |
def build_unix():
if (not check_raylib_installed()):
raise Exception('ERROR: raylib not found by pkg-config. Please install pkg-config and Raylib.')
raylib_h = (get_the_include_path() + '/raylib.h')
rlgl_h = (get_the_include_path() + '/rlgl.h')
raymath_h = (get_the_include_path() + '/raymath.h')
if (not os.path.isfile(raylib_h)):
raise Exception((('ERROR: ' + raylib_h) + ' not found. Please install Raylib.'))
if (not os.path.isfile(rlgl_h)):
raise Exception((('ERROR: ' + rlgl_h) + ' not found. Please install Raylib.'))
if (not os.path.isfile(raymath_h)):
raise Exception((('ERROR: ' + raylib_h) + ' not found. Please install Raylib.'))
ffi_includes = '\n #include "raylib.h"\n #include "rlgl.h"\n #include "raymath.h"\n '
raygui_h = (get_the_include_path() + '/raygui.h')
if check_header_exists(raygui_h):
ffi_includes += '\n #define RAYGUI_IMPLEMENTATION\n #define RAYGUI_SUPPORT_RICONS\n #include "raygui.h"\n '
physac_h = (get_the_include_path() + '/physac.h')
if check_header_exists(physac_h):
ffi_includes += '\n #define PHYSAC_IMPLEMENTATION\n #include "physac.h"\n '
ffibuilder.cdef(pre_process_header(raylib_h))
ffibuilder.cdef(pre_process_header(rlgl_h))
ffibuilder.cdef(pre_process_header(raymath_h, True))
if os.path.isfile(raygui_h):
ffibuilder.cdef(pre_process_header(raygui_h))
if os.path.isfile(physac_h):
ffibuilder.cdef(pre_process_header(physac_h))
if (platform.system() == 'Darwin'):
print('BUILDING FOR MAC')
extra_link_args = [(get_the_lib_path() + '/libraylib.a'), '-framework', 'OpenGL', '-framework', 'Cocoa', '-framework', 'IOKit', '-framework', 'CoreFoundation', '-framework', 'CoreVideo']
libraries = []
else:
print('BUILDING FOR LINUX')
extra_link_args = (get_lib_flags() + ['-lm', '-lpthread', '-lGL', '-lrt', '-lm', '-ldl', '-lX11', '-lpthread', '-latomic'])
libraries = ['GL', 'm', 'pthread', 'dl', 'rt', 'X11', 'atomic']
ffibuilder.set_source('raylib._raylib_cffi', ffi_includes, include_dirs=[get_the_include_path()], extra_link_args=extra_link_args, libraries=libraries) |
def run(globaldb, localdb, verbose=False):
local_db_files = list()
work_db_files = list()
global_entries = {}
local_entries = {}
final_entries = {}
(verbose and print(('removing %s from %s' % (localdb, globaldb))))
for (line, (tag, bits, mode)) in util.parse_db_lines(globaldb):
global_entries[tag] = bits
for (line, (tag, bits, mode)) in util.parse_db_lines(localdb):
local_entries[tag] = bits
for entry in global_entries:
if (entry not in local_entries):
final_entries[entry] = global_entries[entry]
else:
(verbose and print(('Removing entry %s' % entry)))
util.write_db_lines(globaldb, final_entries) |
class SF1QuarterlyData():
def __init__(self, data_path: Optional[str]=None, quarter_count: Optional[int]=None, dimension: Optional[str]='ARQ'):
if (data_path is None):
data_path = load_config()['sf1_data_path']
self.data_path = data_path
self.quarter_count = quarter_count
self.dimension = dimension
def load(self, index: List[str]) -> pd.DataFrame:
result = []
for ticker in index:
path = '{}/core_fundamental/{}.json'.format(self.data_path, ticker)
if (not os.path.exists(path)):
continue
df = _load_df(path)
df = df[(df['dimension'] == self.dimension)]
if (self.quarter_count is not None):
df = df[:self.quarter_count]
df['date'] = df['datekey']
df = df.sort_values('date', ascending=False)
result.append(df)
if (len(result) == 0):
return None
result = pd.concat(result, axis=0).reset_index(drop=True)
result = result.infer_objects()
result['date'] = result['date'].astype(np.datetime64)
return result
def existing_index(self):
dir_path = '{}/core_fundamental'.format(self.data_path)
index = [x.split('.json')[0] for x in os.listdir(dir_path)]
return index |
def node_swap_abilities(caller, raw_string, **kwargs):
tmp_character = kwargs['tmp_character']
text = f'''
Your current abilities:
STR +{tmp_character.strength}
CUN +{tmp_character.cunning}
WIL +{tmp_character.will}
You can swap the values of two abilities around.
You can only do this once, so choose carefully!
To swap the values of e.g. STR and WIL, write |wSTR WIL|n. Empty to abort.
'''
options = {'key': '_default', 'goto': (_swap_abilities, kwargs)}
return (text, options) |
class ConnectionConfigSecretsSchema(BaseModel, abc.ABC):
_required_components: List[str]
def __init_subclass__(cls: BaseModel, **kwargs: Any):
super().__init_subclass__(**kwargs)
if (not getattr(cls, '_required_components')):
raise TypeError(f"Class {cls.__name__} must define '_required_components.'")
_validator
def required_components_supplied(cls: ConnectionConfigSecretsSchema, values: Dict[(str, Any)]) -> Dict[(str, Any)]:
min_fields_present = all((values.get(component) for component in cls._required_components))
if (not min_fields_present):
raise ValueError(f'{cls.__name__} must be supplied all of: {cls._required_components}.')
return values
class Config():
extra = Extra.ignore
orm_mode = True |
class Game(JsonDeserializable):
def de_json(cls, json_string):
if (json_string is None):
return None
obj = cls.check_json(json_string)
obj['photo'] = Game.parse_photo(obj['photo'])
if ('text_entities' in obj):
obj['text_entities'] = Game.parse_entities(obj['text_entities'])
if ('animation' in obj):
obj['animation'] = Animation.de_json(obj['animation'])
return cls(**obj)
def parse_photo(cls, photo_size_array):
ret = []
for ps in photo_size_array:
ret.append(PhotoSize.de_json(ps))
return ret
def parse_entities(cls, message_entity_array):
ret = []
for me in message_entity_array:
ret.append(MessageEntity.de_json(me))
return ret
def __init__(self, title, description, photo, text=None, text_entities=None, animation=None, **kwargs):
self.title: str = title
self.description: str = description
self.photo: List[PhotoSize] = photo
self.text: str = text
self.text_entities: List[MessageEntity] = text_entities
self.animation: Animation = animation |
class Logout(MethodView):
_required
def get(self):
logger.debug('User logged out: {}'.format(current_user.username))
user = User.query.filter_by(username=current_user.username).first()
crackq.app.session_interface.destroy(session)
user.active = False
db.session.commit()
logout_user()
return ('Logged Out', 200) |
def test_slate_mixed_vector(Wd):
(u, p) = TrialFunctions(Wd)
(v, q) = TestFunctions(Wd)
a = ((inner(u, v) * dx) + (inner(p, q) * dx))
A = Tensor(a)
f = Function(Wd)
f.sub(0).assign(2)
f.sub(1).assign(1)
B = assemble(((A.inv * A) * AssembledVector(f)))
assert numpy.allclose(B.sub(0).dat.data_ro, 2)
assert numpy.allclose(B.sub(1).dat.data_ro, 1)
C = assemble((A * AssembledVector(f)))
expect = assemble(action(a, f))
for (c, e) in zip(C.subfunctions, expect.subfunctions):
assert numpy.allclose(c.dat.data_ro, e.dat.data_ro) |
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('last_imported')
def handle(self, *args, **kwargs):
last_imported = kwargs['last_imported']
prescribing_date = ('-'.join(last_imported.split('_')) + '-01')
date_condition = ('month > TIMESTAMP(DATE_SUB(DATE "%s", INTERVAL 5 YEAR))' % prescribing_date)
path = os.path.join(settings.PIPELINE_METADATA_DIR, 'smoketests')
for sql_file in glob.glob(os.path.join(path, '*.sql')):
test_name = os.path.splitext(os.path.basename(sql_file))[0]
with open(sql_file) as f:
query = f.read().replace('{{ date_condition }}', date_condition)
print(query)
client = Client()
results = client.query(query)
quantity = []
cost = []
items = []
for row in results_to_dicts(results):
quantity.append(row['quantity'])
cost.append(row['actual_cost'])
items.append(row['items'])
print(('Updating test expectations for %s' % test_name))
json_path = os.path.join(path, ('%s.json' % test_name))
with open(json_path, 'w') as f:
obj = {'cost': cost, 'items': items, 'quantity': quantity}
json.dump(obj, f, indent=2) |
def test_yaml_loader_with_cache(tmpdir, mocker, _json_cache, expected_from_loader):
mock_get = mocker.patch('requests.get')
mock_mal = mocker.patch('awsrun.acctload.MetaAccountLoader.__init__')
mocker.patch('tempfile.gettempdir', return_value=tmpdir)
acctload.YAMLAccountLoader(' max_age=86400)
mock_get.assert_not_called()
((accts,), _) = mock_mal.call_args
assert (accts == expected_from_loader) |
def test_shards_no_skipped_field(sync_client):
with patch.object(sync_client, 'options', return_value=sync_client), patch.object(sync_client, 'search', return_value=ObjectApiResponse(raw={'_scroll_id': 'dummy_id', '_shards': {'successful': 5, 'total': 5}, 'hits': {'hits': [{'search_data': 1}]}}, meta=None)), patch.object(sync_client, 'scroll') as scroll_mock, patch.object(sync_client, 'clear_scroll'):
scroll_mock.side_effect = [ObjectApiResponse(raw={'_scroll_id': 'dummy_id', '_shards': {'successful': 5, 'total': 5}, 'hits': {'hits': [{'scroll_data': 42}]}}, meta=None), ObjectApiResponse(raw={'_scroll_id': 'dummy_id', '_shards': {'successful': 5, 'total': 5}, 'hits': {'hits': []}}, meta=None)]
data = list(helpers.scan(sync_client, index='test_index', size=2, raise_on_error=True))
assert (data == [{'search_data': 1}, {'scroll_data': 42}]) |
class OptionPlotoptionsOrganizationLevelsDatalabelsTextpath(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
class OptionSeriesScatterClusterZonesMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(15)
def radius(self, num: float):
self._config(num, js_type=False)
def symbol(self):
return self._config_get('cluster')
def symbol(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
def test_websocket_endpoint_on_receive_text(test_client_factory):
class WebSocketApp(WebSocketEndpoint):
encoding = 'text'
async def on_receive(self, websocket, data):
(await websocket.send_text(f'Message text was: {data}'))
client = test_client_factory(WebSocketApp)
with client.websocket_connect('/ws') as websocket:
websocket.send_text('Hello, world!')
_text = websocket.receive_text()
assert (_text == 'Message text was: Hello, world!')
with pytest.raises(RuntimeError):
with client.websocket_connect('/ws') as websocket:
websocket.send_bytes(b'Hello world') |
def tco_return_handle(tokens):
internal_assert((len(tokens) >= 1), 'invalid tail-call-optimizable return statement tokens', tokens)
if (len(tokens) == 1):
return (('return _coconut_tail_call(' + tokens[0]) + ')')
else:
return (((('return _coconut_tail_call(' + tokens[0]) + ', ') + ', '.join(tokens[1:])) + ')') |
class OptionSeriesPieSonificationDefaultinstrumentoptionsMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
def get_data_pslab(connection, experiment_type):
while True:
device = connect_to_pslab(experiment_type)
if (device is None):
return None
while True:
measurement = experiment_options[experiment_type][0](device)
if (measurement != 0):
connection.send([time.strftime(TIMESTAMP_FORMAT), measurement, experiment_options[experiment_type][1], experiment_type])
time.sleep(MEASURING_INTERVAL)
else:
time.sleep(10)
break |
_frequency(timedelta(days=1))
def fetch_production(zone_key: str='BD', session: Session=Session(), target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> (dict[(str, Any)] | list[dict[(str, Any)]]):
row_data = query(session, target_datetime, logger)
production_data_list = []
for row in row_data:
data = {'zoneKey': zone_key, 'datetime': row['time'], 'production': {}, 'source': 'erp.pgcb.gov.bd'}
known_sources_sum_mw = 0.0
for source_type in ['coal', 'gas', 'hydro', 'oil', 'solar', 'wind']:
if (row[source_type] is not None):
known_sources_sum_mw += row[source_type]
data['production'][source_type] = row[source_type]
if (row['total_generation'] is not None):
unknown_source_mw = (row['total_generation'] - known_sources_sum_mw)
if (unknown_source_mw >= 0):
data['production']['unknown'] = unknown_source_mw
else:
logger.warn(f'Sum of production sources exceeds total generation by {(- unknown_source_mw)}MW.There is probably something wrong...')
production_data_list.append(data)
if (not len(production_data_list)):
raise ParserException(parser='BD.py', message='No valid consumption data for requested day found.')
return production_data_list |
class LinkNode(dict):
def __init__(self):
self.links = []
self.methods_counter = Counter()
super().__init__()
def get_available_key(self, preferred_key):
if (preferred_key not in self):
return preferred_key
while True:
current_val = self.methods_counter[preferred_key]
self.methods_counter[preferred_key] += 1
key = '{}_{}'.format(preferred_key, current_val)
if (key not in self):
return key |
class Category(models.Model):
title = models.CharField(_('title'), max_length=200)
parent = models.ForeignKey('self', blank=True, null=True, on_delete=models.CASCADE, related_name='children', limit_choices_to={'parent__isnull': True}, verbose_name=_('parent'))
slug = models.SlugField(_('slug'), max_length=150)
class Meta():
ordering = ['parent__title', 'title']
verbose_name = _('category')
verbose_name_plural = _('categories')
app_label = 'medialibrary'
objects = CategoryManager()
def __str__(self):
if self.parent_id:
return f'{self.parent.title} - {self.title}'
return self.title
def save(self, *args, **kwargs):
if (not self.slug):
self.slug = slugify(self.title)
super().save(*args, **kwargs)
save.alters_data = True
def path_list(self):
if (self.parent is None):
return [self]
p = self.parent.path_list()
p.append(self)
return p
def path(self):
return ' - '.join((f.title for f in self.path_list())) |
class Tanh(Fixed):
codomain = constraints.interval((- 1.0), 1.0)
def _forward(self, x: torch.Tensor, params: Optional[Sequence[torch.Tensor]]) -> Tuple[(torch.Tensor, Optional[torch.Tensor])]:
y = torch.tanh(x)
ladj = self._log_abs_det_jacobian(x, y, params)
return (y, ladj)
def _inverse(self, y: torch.Tensor, params: Optional[Sequence[torch.Tensor]]) -> Tuple[(torch.Tensor, Optional[torch.Tensor])]:
x = torch.atanh(y)
ladj = self._log_abs_det_jacobian(x, y, params)
return (x, ladj)
def _log_abs_det_jacobian(self, x: torch.Tensor, y: torch.Tensor, params: Optional[Sequence[torch.Tensor]]) -> torch.Tensor:
return (2.0 * ((math.log(2.0) - x) - F.softplus(((- 2.0) * x)))) |
_page.route('/table')
def table():
if (all_data['token'] != None):
return jsonify(msg='This url needs a token to access. If you did not specify a token when start the server, you may access the wrong ip or port. If you specify a token when start the server, use server ip}:{port}/table/{token} to access.')
return render_template('table.html') |
class VariableMap(_common.FlyteIdlEntity):
def __init__(self, variables):
self._variables = variables
def variables(self):
return self._variables
def to_flyte_idl(self):
return _interface_pb2.VariableMap(variables={k: v.to_flyte_idl() for (k, v) in self.variables.items()})
def from_flyte_idl(cls, pb2_object):
return cls({k: Variable.from_flyte_idl(v) for (k, v) in pb2_object.variables.items()}) |
class TestRecoverReferenceSequence(unittest.TestCase):
def test_mutations_only(self):
self.assertEqual(recover_reference_sequence('CGATACGGGGACATCCGGCCTGCTCCTTCTCACATG', '36M', 'MD:Z:1A0C0C0C1T0C0T27'), 'CACCCCTCTGACATCCGGCCTGCTCCTTCTCACATG')
def test_insertions(self):
self.assertEqual(recover_reference_sequence('GAGACGGGGTGACATCCGGCCTGCTCCTTCTCACAT', '6M1I29M', 'MD:Z:0C1C0C1C0T0C27'), 'CACCCCTCTGACATCCGGCCTGCTCCTTCTCACAT')
def test_deletions(self):
self.assertEqual(recover_reference_sequence('AGTGATGGGGGGGTTCCAGGTGGAGACGAGGACTCC', '9M9D27M', 'MD:Z:2G0A5^ATGATGTCA27'), 'AGGAATGGGATGATGTCAGGGGTTCCAGGTGGAGACGAGGACTCC')
def test_insertions_and_deletions(self):
self.assertEqual(recover_reference_sequence('AGTGATGGGAGGATGTCTCGTCTGTGAGTTACAGCA', '2M1I7M6D26M', 'MD:Z:3C3T1^GCTCAG25T0'), 'AGGCTGGTAGCTCAGGGATGTCTCGTCTGTGAGTTACAGCT') |
class TestDeleteUser():
(scope='function')
def url(self, user) -> str:
return f'{V1_URL_PREFIX}{USERS}/{user.id}'
def test_delete_user_not_authenticated(self, url, api_client):
response = api_client.delete(url, headers={})
assert (HTTP_401_UNAUTHORIZED == response.status_code)
def test_create_user_wrong_scope(self, url, api_client, generate_auth_header):
auth_header = generate_auth_header([STORAGE_READ])
response = api_client.delete(url, headers=auth_header)
assert (HTTP_403_FORBIDDEN == response.status_code)
def test_delete_nonexistent_user(self, api_client, db, generate_auth_header):
auth_header = generate_auth_header([USER_DELETE])
url = f'{V1_URL_PREFIX}{USERS}/nonexistent_user'
response = api_client.delete(url, headers=auth_header)
assert (HTTP_404_NOT_FOUND == response.status_code)
def test_delete_self(self, api_client, db):
user = FidesUser.create(db=db, data={'username': 'test_delete_user', 'password': str_to_b64_str('&%3Qe2fGo7')})
saved_user_id = user.id
FidesUserPermissions.create(db=db, data={'user_id': user.id, 'roles': [APPROVER]})
assert (user.permissions is not None)
saved_permissions_id = user.permissions.id
(client, _) = ClientDetail.create_client_and_secret(db, CONFIG.security.oauth_client_id_length_bytes, CONFIG.security.oauth_client_secret_length_bytes, scopes=[USER_DELETE], user_id=user.id)
assert (client.user == user)
saved_client_id = client.id
payload = {JWE_PAYLOAD_SCOPES: [USER_DELETE], JWE_PAYLOAD_CLIENT_ID: client.id, JWE_ISSUED_AT: datetime.now().isoformat()}
jwe = generate_jwe(json.dumps(payload), CONFIG.security.app_encryption_key)
auth_header = {'Authorization': ('Bearer ' + jwe)}
response = api_client.delete(f'{V1_URL_PREFIX}{USERS}/{user.id}', headers=auth_header)
assert (HTTP_204_NO_CONTENT == response.status_code)
db.expunge_all()
user_search = FidesUser.get_by(db, field='id', value=saved_user_id)
assert (user_search is None)
client_search = ClientDetail.get_by(db, field='id', value=saved_client_id)
assert (client_search is None)
permissions_search = FidesUserPermissions.get_by(db, field='id', value=saved_permissions_id)
assert (permissions_search is None)
def test_delete_user(self, api_client, db):
user = FidesUser.create(db=db, data={'username': 'test_delete_user', 'password': str_to_b64_str('&%3Qe2fGo7')})
FidesUserPermissions.create(db=db, data={'user_id': user.id, 'roles': [CONTRIBUTOR]})
other_user = FidesUser.create(db=db, data={'username': 'user_to_delete', 'password': str_to_b64_str('&%3Qe2fGo7')})
saved_user_id = other_user.id
FidesUserPermissions.create(db=db, data={'user_id': other_user.id, 'roles': [APPROVER]})
assert (other_user.permissions is not None)
saved_permissions_id = other_user.permissions.id
(client, _) = ClientDetail.create_client_and_secret(db, CONFIG.security.oauth_client_id_length_bytes, CONFIG.security.oauth_client_secret_length_bytes, roles=[CONTRIBUTOR], user_id=user.id)
(other_user_client, _) = ClientDetail.create_client_and_secret(db, CONFIG.security.oauth_client_id_length_bytes, CONFIG.security.oauth_client_secret_length_bytes, roles=[APPROVER], user_id=other_user.id)
assert (other_user_client.user == other_user)
saved_client_id = other_user_client.id
payload = {JWE_PAYLOAD_ROLES: [CONTRIBUTOR], JWE_PAYLOAD_CLIENT_ID: client.id, JWE_ISSUED_AT: datetime.now().isoformat()}
jwe = generate_jwe(json.dumps(payload), CONFIG.security.app_encryption_key)
auth_header = {'Authorization': ('Bearer ' + jwe)}
response = api_client.delete(f'{V1_URL_PREFIX}{USERS}/{other_user.id}', headers=auth_header)
assert (HTTP_204_NO_CONTENT == response.status_code)
db.expunge_all()
user_search = FidesUser.get_by(db, field='id', value=saved_user_id)
assert (user_search is None)
client_search = ClientDetail.get_by(db, field='id', value=saved_client_id)
assert (client_search is None)
permissions_search = FidesUserPermissions.get_by(db, field='id', value=saved_permissions_id)
assert (permissions_search is None)
def test_delete_user_as_root(self, api_client, db, user, root_auth_header):
other_user = FidesUser.create(db=db, data={'username': 'test_delete_user', 'password': str_to_b64_str('&%3Qe2fGo7')})
FidesUserPermissions.create(db=db, data={'user_id': other_user.id, 'roles': [APPROVER]})
saved_user_id = other_user.id
saved_permission_id = other_user.permissions.id
(user_client, _) = ClientDetail.create_client_and_secret(db, CONFIG.security.oauth_client_id_length_bytes, CONFIG.security.oauth_client_secret_length_bytes, scopes=[USER_DELETE], user_id=other_user.id)
client_id = user_client.id
response = api_client.delete(f'{V1_URL_PREFIX}{USERS}/{other_user.id}', headers=root_auth_header)
assert (HTTP_204_NO_CONTENT == response.status_code)
db.expunge_all()
user_search = FidesUser.get_by(db, field='id', value=saved_user_id)
assert (user_search is None)
client_search = ClientDetail.get_by(db, field='id', value=client_id)
assert (client_search is None)
permissions_search = FidesUserPermissions.get_by(db, field='id', value=saved_permission_id)
assert (permissions_search is None) |
class TestNumberOfRowsWithMissingValues(BaseIntegrityMissingValuesValuesTest):
name: ClassVar = 'The Number Of Rows With Missing Values'
def get_condition_from_reference(self, reference: Optional[DatasetMissingValues]):
if (reference is not None):
curr_number_of_rows = self.metric.get_result().current.number_of_rows
ref_number_of_rows = reference.number_of_rows
mult = (curr_number_of_rows / ref_number_of_rows)
return TestValueCondition(lte=approx((reference.number_of_rows_with_missing_values * mult), relative=0.1), source=ValueSource.REFERENCE)
return TestValueCondition(eq=0)
def calculate_value_for_test(self) -> Numeric:
return self.metric.get_result().current.number_of_rows_with_missing_values
def get_description(self, value: Numeric) -> str:
return f'The number of rows with missing values is {value}. The test threshold is {self.get_condition()}.' |
def find_index_closing_parenthesis(string: str):
assert string.startswith('('), "string has to start with '('"
stack = []
for (index, letter) in enumerate(string):
if (letter == '('):
stack.append(letter)
elif (letter == ')'):
stack.pop()
if (not stack):
return index
raise SyntaxError(f'Transonic syntax error for string {string}') |
class table_features_stats_reply(stats_reply):
version = 6
type = 19
stats_type = 12
def __init__(self, xid=None, flags=None, entries=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (entries != None):
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = table_features_stats_reply()
_version = reader.read('!B')[0]
assert (_version == 6)
_type = reader.read('!B')[0]
assert (_type == 19)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 12)
obj.flags = reader.read('!H')[0]
reader.skip(4)
obj.entries = loxi.generic_util.unpack_list(reader, ofp.common.table_features.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.entries != other.entries):
return False
return True
def pretty_print(self, q):
q.text('table_features_stats_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REPLY_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('entries = ')
q.pp(self.entries)
q.breakable()
q.text('}') |
(_gemm_matmul_acc)
def do_matmul_acc_i8(N: size, M: size, K: size, A: ([i8][(N, 16)] GEMM_SCRATCH), B: ([i8][(K, 16)] GEMM_SCRATCH), C: ([i32][(N, 16)] GEMM_ACCUM)):
assert (N <= 16)
assert (M <= 16)
assert (K <= 16)
for i in seq(0, N):
for j in seq(0, M):
for k in seq(0, K):
a: i32
b: i32
a = A[(i, k)]
b = B[(k, j)]
C[(i, j)] += (a * b) |
class TestSdmHspaParser(unittest.TestCase):
parser = SdmHspaParser(parent=None, icd_ver=(6, 34))
maxDiff = None
def test_sdm_hspa_ul1_rf_info(self):
self.parser.icd_ver = (4, 54)
payload = binascii.unhexlify('3c2a0000b4ffa8e4')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_HSPA_DATA, sdmcmd.sdm_hspa_data.HSPA_UL1_UMTS_RF_INFO, payload, timestamp=0)
result = self.parser.sdm_hspa_ul1_rf_info(packet)
expected = {'stdout': 'HSPA UL1 RF Info: DL UARFCN 10812, RSSI -76.00, TxPwr -70.00'}
self.assertDictEqual(result, expected)
self.parser.icd_ver = (4, 128)
payload = binascii.unhexlify('adff7cfc')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_HSPA_DATA, sdmcmd.sdm_hspa_data.HSPA_UL1_UMTS_RF_INFO, payload, timestamp=0)
result = self.parser.sdm_hspa_ul1_rf_info(packet)
expected = {'stdout': 'HSPA UL1 RF Info: DL UARFCN 10564, RSSI -83.00, TxPwr -9.00'}
self.assertDictEqual(result, expected)
self.parser.icd_ver = (5, 23)
payload = binascii.unhexlify('3c2a4f01202a2d3b')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_HSPA_DATA, sdmcmd.sdm_hspa_data.HSPA_UL1_UMTS_RF_INFO, payload, timestamp=0)
result = self.parser.sdm_hspa_ul1_rf_info(packet)
expected = {'stdout': 'HSPA UL1 RF Info: DL UARFCN 10812, PSC 335, RSSI -69.00, Ec/No -3.50, RSCP -71.00, TxPwr -12.00'}
self.assertDictEqual(result, expected)
self.parser.icd_ver = (7, 2)
payload = binascii.unhexlify('ea0bd501162e2547')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_HSPA_DATA, sdmcmd.sdm_hspa_data.HSPA_UL1_UMTS_RF_INFO, payload, timestamp=0)
result = self.parser.sdm_hspa_ul1_rf_info(packet)
expected = {'stdout': 'HSPA UL1 RF Info: DL UARFCN 3050, PSC 469, RSSI -79.00, Ec/No -1.50, RSCP -79.00, TxPwr 0.00'}
self.assertDictEqual(result, expected)
def test_sdm_hspa_ul1_serving_cell(self):
self.parser.icd_ver = (7, 2)
payload = binascii.unhexlify('d501c6ff0000fdff5000')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_HSPA_DATA, sdmcmd.sdm_hspa_data.HSPA_UL1_SERV_CELL, payload, timestamp=0)
result = self.parser.sdm_hspa_ul1_serving_cell(packet)
expected = {'stdout': 'HSPA UL1 Serving Cell: PSC 469, CPICH RSCP -58.00, Delta RSCP 0.00, Ec/No -3.00, DRX 80 ms'}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('d501c7ff0000fcff8002')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_HSPA_DATA, sdmcmd.sdm_hspa_data.HSPA_UL1_SERV_CELL, payload, timestamp=0)
result = self.parser.sdm_hspa_ul1_serving_cell(packet)
expected = {'stdout': 'HSPA UL1 Serving Cell: PSC 469, CPICH RSCP -57.00, Delta RSCP 0.00, Ec/No -4.00, DRX 640 ms'}
self.assertDictEqual(result, expected)
def test_hspa_ul1_intra_freq_resel(self):
self.parser.icd_ver = (7, 2)
payload = binascii.unhexlify('cffe8ff40008cffe8ff9d018cffe8ffce018cffe8ffc3008cffe8ff25008cffe8ffef008cffe8ff73018cffe8ff9c018cffe8ffd9008cffe8ffe3018cffe8ff70008cffe8ffd6008cffe8ffae018cffe8ff5a018cffe8ff1c018cffe8ff22018cffe8ff06018cffe8ff29018cffe8ff1a008cffe8fffa008cffe8ff65018cffe8ff45018cffe8fff9e44193d26ca')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_HSPA_DATA, sdmcmd.sdm_hspa_data.HSPA_UL1_INTRA_FREQ_RESEL, payload, timestamp=0)
result = self.parser.sdm_hspa_ul1_intra_freq_resel(packet)
expected = {'stdout': 'HSPA UL1 Intra Frequency Reselection:\nMeasurement 0: PSC 103, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 1: PSC 64, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 2: PSC 413, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 3: PSC 462, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 4: PSC 195, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 5: PSC 37, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 6: PSC 239, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 7: PSC 371, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 8: PSC 412, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 9: PSC 217, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 10: PSC 483, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 11: PSC 112, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 12: PSC 214, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 13: PSC 430, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 14: PSC 346, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 15: PSC 284, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 16: PSC 290, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 17: PSC 262, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 18: PSC 297, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 19: PSC 26, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 20: PSC 250, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 21: PSC 357, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 22: PSC 325, CPICH RSCP -116, CPICH Ec/No -24\nExtra: f9e44193d26ca'}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('19005a01a6ffedff67008cffe8ff05018cffe8ff40008cffe8ff9d018cffe8ffce018cffe8ffc3008cffe8ff25008cffe8ffef008cffe8ff73018cffe8ff9c018cffe8ffd9008cffe8ff70008cffe8ffae018cffe8ffd6008cffe8ff1c018cffe8ff22018cffe8fffa008cffe8ff29018cffe8ff45018cffe8ff65018cffe8ff1a008cffe8ff06018cffe8ff71008cffe8ffe3018cffe8fff9e44193d26cac')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_HSPA_DATA, sdmcmd.sdm_hspa_data.HSPA_UL1_INTRA_FREQ_RESEL, payload, timestamp=0)
result = self.parser.sdm_hspa_ul1_intra_freq_resel(packet)
expected = {'stdout': 'HSPA UL1 Intra Frequency Reselection:\nMeasurement 0: PSC 346, CPICH RSCP -90, CPICH Ec/No -19\nMeasurement 1: PSC 103, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 2: PSC 261, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 3: PSC 64, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 4: PSC 413, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 5: PSC 462, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 6: PSC 195, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 7: PSC 37, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 8: PSC 239, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 9: PSC 371, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 10: PSC 412, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 11: PSC 217, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 12: PSC 112, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 13: PSC 430, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 14: PSC 214, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 15: PSC 284, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 16: PSC 290, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 17: PSC 250, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 18: PSC 297, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 19: PSC 325, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 20: PSC 357, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 21: PSC 26, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 22: PSC 262, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 23: PSC 113, CPICH RSCP -116, CPICH Ec/No -24\nMeasurement 24: PSC 483, CPICH RSCP -116, CPICH Ec/No -24\nExtra: f9e44193d26cac'}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('d5a000002c06e9bf40e9bcbc669e6ae0146c9c47150f545a7e22eadeca5d7d5e0100017bfb00017bbf3f6dfc43c07e4ff23e7425badd850d28c0484d11f7745fc978b54bda32e6360d864fe1f4427c3d634c0eaa9c935b4eba87f54381d1c3826c6ecf92834c0526d0dddae9a506ccff6609a604fd0a3695336ebe920046daf3ff6afd45d3a60203c1e04ef211c60a272cd3b5e15aab59a676f51f1f41a7c6a2570d8a39bd1ad')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_HSPA_DATA, sdmcmd.sdm_hspa_data.HSPA_UL1_INTRA_FREQ_RESEL, payload, timestamp=0)
result = self.parser.sdm_hspa_ul1_intra_freq_resel(packet)
expected = {'stdout': 'HSPA UL1 Intra Frequency Reselection:\nExtra: d5a000002c06e9bf40e9bcbc669e6ae0146c9c47150f545a7e22eadeca5d7d5e0100017bfb00017bbf3f6dfc43c07e4ff23e7425badd850d28c0484d11f7745fc978b54bda32e6360d864fe1f4427c3d634c0eaa9c935b4eba87f54381d1c3826c6ecf92834c0526d0dddae9a506ccff6609a604fd0a3695336ebe920046daf3ff6afd45d3a60203c1e04ef211c60a272cd3b5e15aab59a676f51f1f41a7c6a2570d8a39bd1ad'}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('f00f0ffafba0fb1243a0fd124320fff8001900f0ffe001efb42fc42f05433ad42f05433bfc')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_HSPA_DATA, sdmcmd.sdm_hspa_data.HSPA_UL1_INTRA_FREQ_RESEL, payload, timestamp=0)
result = self.parser.sdm_hspa_ul1_intra_freq_resel(packet)
expected = {'stdout': 'HSPA UL1 Intra Frequency Reselection:\nExtra: f00f0ffafba0fb1243a0fd124320fff8001900f0ffe001efb42fc42f05433ad42f05433bfc'}
self.assertDictEqual(result, expected)
def test_hspa_ul1_inter_freq_resel(self):
self.parser.icd_ver = (7, 2)
payload = binascii.unhexlify('a3e2041484f27446cb3130335d2028696ef4d53475f484f27445f53494e474c455f434c49454efffdc40abdc')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_HSPA_DATA, sdmcmd.sdm_hspa_data.HSPA_UL1_INTER_FREQ_RESEL, payload, timestamp=0)
result = self.parser.sdm_hspa_ul1_inter_freq_resel(packet)
expected = {'stdout': 'HSPA UL1 Inter Frequency Reselection:\nExtra: a3e2041484f27446cb3130335d2028696ef4d53475f484f27445f53494e474c455f434c49454efffdc40abdc'}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('0100542ab701acfffeffe636f6d696e67205bd204e535ff4348414e47455f494e442c2050414c20436cb325d2028696ec5f454d7367436c6173735f52544b5f4d5347202d3ecb3130345d2028696ef4d53475ff4d554c54495f434c49454efbcbdffbdfffffff2744484ffdc40a8030000f0672441b6020000ddc408c')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_HSPA_DATA, sdmcmd.sdm_hspa_data.HSPA_UL1_INTER_FREQ_RESEL, payload, timestamp=0)
result = self.parser.sdm_hspa_ul1_inter_freq_resel(packet)
expected = {'stdout': 'HSPA UL1 Inter Frequency Reselection:\nMeasurement 0: UARFCN 10836, PSC 439, CPICH RSCP -84, CPICH Ec/No -2\nExtra: e636f6d696e67205bd204e535ff4348414e47455f494e442c2050414c20436cb325d2028696ec5f454d7367436c6173735f52544b5f4d5347202d3ecb3130345d2028696ef4d53475ff4d554c54495f434c49454efbcbdffbdfffffff2744484ffdc40a8030000f0672441b6020000ddc408c'}
self.assertDictEqual(result, expected)
def test_sdm_hspa_wcdma_rrc_status(self):
self.parser.icd_ver = (7, 2)
payload = binascii.unhexlify('7fc0ffa004205b942c0fe')
result = self.parser.sdm_hspa_wcdma_rrc_status(payload)
expected = {'stdout': 'WCDMA RRC State: RRC Status: DISCONNECTED, Domain: IDLE'}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('7facfface')
result = self.parser.sdm_hspa_wcdma_rrc_status(payload)
expected = {'stdout': 'WCDMA RRC State: RRC Status: CELL_DCH, Domain: IDLE'}
self.assertDictEqual(result, expected)
def test_sdm_hspa_wcdma_serving_cell(self):
self.parser.icd_ver = (7, 2)
payload = binascii.unhexlify('7fe9ffa00422e6c4ec3586263c2ae')
result = self.parser.sdm_hspa_wcdma_serving_cell(payload)
expected = {'stdout': 'WCDMA Serving Cell: UARFCN 10812/9862, MCC 450, MNC 8'}
self.assertDictEqual(result, expected) |
class HistoricalRegionsResponseAllOf(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'data': ([str],)}
_property
def discriminator():
return None
attribute_map = {'data': 'data'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def optimal_iters(func, target_time):
iters = 1
target_time = float(target_time)
max_iters = int(getattr(func, '_benchmark_max_iters', 0))
scale_factor = getattr(func, '_benchmark_scale_factor', 0.0)
for _ in range(10):
if (max_iters and (iters > max_iters)):
return max_iters
t = _run_timeit(func, number=iters)
if (t >= target_time):
return iters
if scale_factor:
iters *= scale_factor
continue
if (t < (target_time / 2)):
iters = ((iters * ((target_time / t) / 2)) + 1)
iters = int((10 ** math.ceil(math.log10(iters))))
elif (t < target_time):
iters *= 2
raise Exception('could not find optimal iterations for time={} func={}'.format(target_time, repr(func))) |
class ImmutableFamily(TestCase):
def test_pickle(self):
T = {nutils.types.Immutable: T_Immutable, nutils.types.Singleton: T_Singleton}[self.cls]
a = T(1, 2, z=3)
b = pickle.loads(pickle.dumps(a))
self.assertEqual(a, b)
def test_eq(self):
class T(self.cls):
def __init__(self, x, y):
pass
class U(self.cls):
def __init__(self, x, y):
pass
self.assertEqual(T(1, 2), T(1, 2))
self.assertNotEqual(T(1, 2), T(2, 1))
self.assertNotEqual(T(1, 2), U(1, 2))
def test_canonical_args(self):
class T(self.cls):
def __init__(self, x, y, z=3):
pass
self.assertEqual(T(x=1, y=2), T(1, 2, 3))
def test_keyword_args(self):
class T(self.cls):
def __init__(self, x, y, **kwargs):
pass
a = T(x=1, y=2, z=3)
b = T(1, 2, z=3)
self.assertEqual(a, b)
def test_nutils_hash(self):
class T(self.cls):
def __init__(self, x, y):
pass
class T1(self.cls, version=1):
def __init__(self, x, y):
pass
class U(self.cls):
def __init__(self, x, y):
pass
self.assertEqual(nutils.types.nutils_hash(T(1, 2)).hex(), nutils.types.nutils_hash(T(1, 2)).hex())
self.assertNotEqual(nutils.types.nutils_hash(T(1, 2)).hex(), nutils.types.nutils_hash(T(2, 1)).hex())
self.assertNotEqual(nutils.types.nutils_hash(T(1, 2)).hex(), nutils.types.nutils_hash(U(1, 2)).hex())
self.assertEqual(nutils.types.nutils_hash(T(1, 2)).hex(), '2f7fb825b73398a20ef5fc75d7a9d615')
self.assertEqual(nutils.types.nutils_hash(T1(1, 2)).hex(), 'b907c718a9a7e8c28300e028cfbb578a608f7620')
_if((lambda cls: (cls is nutils.types.Singleton)))
def test_deduplication(self):
class T(self.cls):
def __init__(self, x, y):
pass
class U(self.cls):
def __init__(self, x, y):
pass
a = T(1, 2)
b = T(1, 2)
c = T(2, 1)
d = U(1, 2)
self.assertIs(a, b)
self.assertEqual(a, b)
self.assertIsNot(a, c)
self.assertNotEqual(a, c)
self.assertIsNot(a, d)
self.assertNotEqual(a, d) |
class FunctionBasedAsyncViewIntegrationTests(TestCase):
def setUp(self):
self.view = basic_async_view
def test_get_succeeds(self):
request = factory.get('/')
response = async_to_sync(self.view)(request)
assert (response.status_code == status.HTTP_200_OK)
assert (response.data == {'method': 'GET'})
def test_logged_in_get_succeeds(self):
user = User.objects.create_user('user', '', 'password')
request = factory.get('/')
del user.is_active
request.user = user
response = async_to_sync(self.view)(request)
assert (response.status_code == status.HTTP_200_OK)
assert (response.data == {'method': 'GET'})
def test_post_succeeds(self):
request = factory.post('/', {'test': 'foo'})
response = async_to_sync(self.view)(request)
expected = {'method': 'POST', 'data': {'test': ['foo']}}
assert (response.status_code == status.HTTP_200_OK)
assert (response.data == expected)
def test_patch_succeeds(self):
request = factory.patch('/', {'test': 'foo'})
response = async_to_sync(self.view)(request)
expected = {'method': 'PATCH', 'data': {'test': ['foo']}}
assert (response.status_code == status.HTTP_200_OK)
assert (response.data == expected)
def test_put_succeeds(self):
request = factory.put('/', {'test': 'foo'})
response = async_to_sync(self.view)(request)
expected = {'method': 'PUT', 'data': {'test': ['foo']}}
assert (response.status_code == status.HTTP_200_OK)
assert (response.data == expected)
def test_options_succeeds(self):
request = factory.options('/')
response = async_to_sync(self.view)(request)
assert (response.status_code == status.HTTP_200_OK)
def test_400_parse_error(self):
request = factory.post('/', 'f00bar', content_type='application/json')
response = async_to_sync(self.view)(request)
expected = {'detail': JSON_ERROR}
assert (response.status_code == status.HTTP_400_BAD_REQUEST)
assert (sanitise_json_error(response.data) == expected) |
def extractVagabondstorytellerBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class StackdriverLoggingClient(object):
def __init__(self, global_configs, **kwargs):
(max_calls, quota_period) = api_helpers.get_ratelimiter_config(global_configs, API_NAME)
cache_discovery = (global_configs['cache_discovery'] if ('cache_discovery' in global_configs) else False)
self.repository = StackdriverLoggingRepositoryClient(quota_max_calls=max_calls, quota_period=quota_period, use_rate_limiter=kwargs.get('use_rate_limiter', True), cache_discovery=cache_discovery, cache=global_configs.get('cache'))
def get_organization_sinks(self, org_id):
name = self.repository.organizations_sinks.get_name(org_id)
try:
paged_results = self.repository.organizations_sinks.list(name)
flattened_results = api_helpers.flatten_list_results(paged_results, 'sinks')
LOGGER.debug('Getting information about organization sinks, org_id = %s, flattened_results = %s', org_id, flattened_results)
return flattened_results
except (errors.HttpError, HttpLib2Error) as e:
api_exception = api_errors.ApiExecutionError('organizations_sinks', e, 'name', name)
LOGGER.exception(api_exception)
raise api_exception
def get_folder_sinks(self, folder_id):
name = self.repository.folders_sinks.get_name(folder_id)
try:
paged_results = self.repository.folders_sinks.list(name)
flattened_results = api_helpers.flatten_list_results(paged_results, 'sinks')
LOGGER.debug('Getting information about folder sinks, folder_id = %s, flattened_results = %s', folder_id, flattened_results)
return flattened_results
except (errors.HttpError, HttpLib2Error) as e:
api_exception = api_errors.ApiExecutionError('folders_sinks', e, 'name', name)
LOGGER.exception(api_exception)
raise api_exception
def get_billing_account_sinks(self, account_id):
name = self.repository.billing_accounts_sinks.get_name(account_id)
try:
paged_results = self.repository.billing_accounts_sinks.list(name)
flattened_results = api_helpers.flatten_list_results(paged_results, 'sinks')
LOGGER.debug('Getting information about billing_account sinks, billing_account_id = %s, flattened_results = %s', account_id, flattened_results)
return flattened_results
except (errors.HttpError, HttpLib2Error) as e:
api_exception = api_errors.ApiExecutionError('billing_accounts_sinks', e, 'name', name)
LOGGER.exception(api_exception)
raise api_exception
def get_project_sinks(self, project_id):
name = self.repository.projects_sinks.get_name(project_id)
try:
paged_results = self.repository.projects_sinks.list(name)
flattened_results = api_helpers.flatten_list_results(paged_results, 'sinks')
LOGGER.debug('Getting information about project sinks, project_id = %s, flattened_results = %s', project_id, flattened_results)
return flattened_results
except (errors.HttpError, HttpLib2Error) as e:
api_exception = api_errors.ApiExecutionError('projects_sinks', e, 'name', name)
LOGGER.exception(api_exception)
raise api_exception |
class Type(BaseClass):
def __init__(self, value=None, klass=None, allow_none=True, **metadata):
if (value is None):
if (klass is None):
klass = object
elif (klass is None):
klass = value
if isinstance(klass, str):
self.validate = self.resolve
elif (not isinstance(klass, type)):
raise TraitError('A Type trait must specify a class.')
self.klass = klass
self._allow_none = allow_none
self.module = get_module_name()
super().__init__(value, **metadata)
def validate(self, object, name, value):
try:
if issubclass(value, self.klass):
return value
except:
if ((value is None) and self._allow_none):
return value
self.error(object, name, value)
def resolve(self, object, name, value):
if isinstance(self.klass, str):
self.resolve_class(object, name, value)
del self.validate
return self.validate(object, name, value)
def info(self):
klass = self.klass
if (not isinstance(klass, str)):
klass = klass.__name__
result = ('a subclass of ' + klass)
if self._allow_none:
return (result + ' or None')
return result
def get_default_value(self):
if (not isinstance(self.default_value, str)):
return super().get_default_value()
return (DefaultValue.callable_and_args, (self.resolve_default_value, (), None))
def resolve_default_value(self):
if isinstance(self.klass, str):
try:
self.resolve_class(None, None, None)
del self.validate
except:
raise TraitError(('Could not resolve %s into a valid class' % self.klass))
return self.klass |
def extractAirfalltranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class ExecuteActions(LogCaptureTestCase):
def setUp(self):
super(ExecuteActions, self).setUp()
self.__jail = DummyJail()
self.__actions = self.__jail.actions
def tearDown(self):
super(ExecuteActions, self).tearDown()
def defaultAction(self, o={}):
self.__actions.add('ip')
act = self.__actions['ip']
act.actionstart = ('echo ip start' + o.get('start', ''))
act.actionban = ('echo ip ban <ip>' + o.get('ban', ''))
act.actionunban = ('echo ip unban <ip>' + o.get('unban', ''))
act.actioncheck = ('echo ip check' + o.get('check', ''))
act.actionflush = ('echo ip flush' + o.get('flush', ''))
act.actionstop = ('echo ip stop' + o.get('stop', ''))
return act
def testActionsAddDuplicateName(self):
self.__actions.add('test')
self.assertRaises(ValueError, self.__actions.add, 'test')
def testActionsManipulation(self):
self.__actions.add('test')
self.assertTrue(self.__actions['test'])
self.assertIn('test', self.__actions)
self.assertNotIn('nonexistant action', self.__actions)
self.__actions.add('test1')
del self.__actions['test']
del self.__actions['test1']
self.assertNotIn('test', self.__actions)
self.assertEqual(len(self.__actions), 0)
self.__actions.setBanTime(127)
self.assertEqual(self.__actions.getBanTime(), 127)
self.assertRaises(ValueError, self.__actions.removeBannedIP, '127.0.0.1')
def testAddBannedIP(self):
self.assertEqual(self.__actions.addBannedIP('192.0.2.1'), 1)
self.assertLogged('Ban 192.0.2.1')
self.pruneLog()
self.assertEqual(self.__actions.addBannedIP(['192.0.2.1', '192.0.2.2', '192.0.2.3']), 2)
self.assertLogged('192.0.2.1 already banned')
self.assertNotLogged('Ban 192.0.2.1')
self.assertLogged('Ban 192.0.2.2')
self.assertLogged('Ban 192.0.2.3')
def testActionsOutput(self):
self.defaultAction()
self.__actions.start()
self.assertLogged(('stdout: %r' % 'ip start'), wait=True)
self.__actions.stop()
self.__actions.join()
self.assertLogged(('stdout: %r' % 'ip flush'), ('stdout: %r' % 'ip stop'))
self.assertEqual(self.__actions.status(), [('Currently banned', 0), ('Total banned', 0), ('Banned IP list', [])])
self.assertEqual(self.__actions.status('short'), [('Currently banned', 0), ('Total banned', 0)])
def testAddActionPython(self):
self.__actions.add('Action', os.path.join(TEST_FILES_DIR, 'action.d/action.py'), {'opt1': 'value'})
self.assertLogged('TestAction initialised')
self.__actions.start()
self.assertTrue(Utils.wait_for((lambda : self._is_logged('TestAction action start')), 3))
self.__actions.stop()
self.__actions.join()
self.assertLogged('TestAction action stop')
self.assertRaises(IOError, self.__actions.add, 'Action3', '/does/not/exist.py', {})
self.__actions.add('Action4', os.path.join(TEST_FILES_DIR, 'action.d/action.py'), {'opt1': 'value', 'opt2': 'value2'})
self.assertRaises(TypeError, self.__actions.add, 'Action5', os.path.join(TEST_FILES_DIR, 'action.d/action.py'), {'opt1': 'value', 'opt2': 'value2', 'opt3': 'value3'})
self.assertRaises(TypeError, self.__actions.add, 'Action5', os.path.join(TEST_FILES_DIR, 'action.d/action.py'), {})
def testAddPythonActionNOK(self):
self.assertRaises(RuntimeError, self.__actions.add, 'Action', os.path.join(TEST_FILES_DIR, 'action.d/action_noAction.py'), {})
self.assertRaises(RuntimeError, self.__actions.add, 'Action', os.path.join(TEST_FILES_DIR, 'action.d/action_nomethod.py'), {})
self.__actions.add('Action', os.path.join(TEST_FILES_DIR, 'action.d/action_errors.py'), {})
self.__actions.start()
self.assertTrue(Utils.wait_for((lambda : self._is_logged('Failed to start')), 3))
self.__actions.stop()
self.__actions.join()
self.assertLogged('Failed to stop')
def testBanActionsAInfo(self):
self.__actions.add('action1', os.path.join(TEST_FILES_DIR, 'action.d/action_modifyainfo.py'), {})
self.__actions.add('action2', os.path.join(TEST_FILES_DIR, 'action.d/action_modifyainfo.py'), {})
self.__jail.putFailTicket(FailTicket('1.2.3.4'))
self.__actions._Actions__checkBan()
self.assertNotLogged('Failed to execute ban')
self.assertLogged('action1 ban deleted aInfo IP')
self.assertLogged('action2 ban deleted aInfo IP')
self.__actions._Actions__flushBan()
self.assertNotLogged('Failed to execute unban')
self.assertLogged('action1 unban deleted aInfo IP')
self.assertLogged('action2 unban deleted aInfo IP')
_alt_time
def testUnbanOnBusyBanBombing(self):
self.__actions.banPrecedence = 3
self.__actions.unbanMaxCount = 5
self.__actions.setBanTime(100)
self.__actions.start()
MyTime.setTime(0)
i = 0
while (i < 20):
ip = ('192.0.2.%d' % i)
self.__jail.putFailTicket(FailTicket(ip, 0))
i += 1
self.assertLogged(' / 20,', wait=True)
MyTime.setTime(200)
while (i < 50):
ip = ('192.0.2.%d' % i)
self.__jail.putFailTicket(FailTicket(ip, 200))
i += 1
self.assertLogged(' / 50,', wait=True)
self.__actions.stop()
self.__actions.join()
self.assertLogged('Unbanned 30, 0 ticket(s)')
self.assertNotLogged('Unbanned 50, 0 ticket(s)')
def testActionsConsistencyCheck(self):
act = self.defaultAction({'check': ' <family>', 'flush': ' <family>'})
act['actionflush?family=inet6'] = (act.actionflush + '; exit 1')
act.actionstart_on_demand = True
act.actionban = ('<actioncheck> ; ' + act.actionban)
act.actionunban = ('<actioncheck> ; ' + act.actionunban)
self.__actions.start()
self.assertNotLogged(('stdout: %r' % 'ip start'))
self.assertEqual(self.__actions.addBannedIP('192.0.2.1'), 1)
self.assertEqual(self.__actions.addBannedIP('2001:db8::1'), 1)
self.assertLogged('Ban 192.0.2.1', 'Ban 2001:db8::1', ('stdout: %r' % 'ip start'), ('stdout: %r' % 'ip ban 192.0.2.1'), ('stdout: %r' % 'ip ban 2001:db8::1'), all=True, wait=True)
self.pruneLog('[test-phase 1a] simulate inconsistent irreparable env by unban')
act['actioncheck?family=inet6'] = (act.actioncheck + '; exit 1')
self.__actions.removeBannedIP('2001:db8::1')
self.assertLogged('Invariant check failed. Unban is impossible.', wait=True)
self.pruneLog('[test-phase 1b] simulate inconsistent irreparable env by flush')
self.__actions._Actions__flushBan()
self.assertLogged(('stdout: %r' % 'ip flush inet4'), ('stdout: %r' % 'ip flush inet6'), 'Failed to flush bans', 'No flush occurred, do consistency check', 'Invariant check failed. Trying to restore a sane environment', ('stdout: %r' % 'ip stop'), 'Failed to flush bans', all=True, wait=True)
self.pruneLog('[test-phase 2] consistent env')
act['actioncheck?family=inet6'] = act.actioncheck
self.assertEqual(self.__actions.addBannedIP('2001:db8::1'), 1)
self.assertLogged('Ban 2001:db8::1', ('stdout: %r' % 'ip start'), ('stdout: %r' % 'ip ban 2001:db8::1'), all=True, wait=True)
self.assertNotLogged(('stdout: %r' % 'ip check inet4'), all=True)
self.pruneLog('[test-phase 3] failed flush in consistent env')
self.__actions._Actions__flushBan()
self.assertLogged('Failed to flush bans', 'No flush occurred, do consistency check', ('stdout: %r' % 'ip flush inet6'), ('stdout: %r' % 'ip check inet6'), all=True, wait=True)
self.assertNotLogged(('stdout: %r' % 'ip flush inet4'), ('stdout: %r' % 'ip stop'), ('stdout: %r' % 'ip start'), 'Unable to restore environment', all=True)
self.pruneLog('[test-phase end] flush successful')
act['actionflush?family=inet6'] = act.actionflush
self.__actions.stop()
self.__actions.join()
self.assertLogged(('stdout: %r' % 'ip flush inet6'), ('stdout: %r' % 'ip stop'), 'action ip terminated', all=True, wait=True)
self.assertNotLogged('ERROR', ('stdout: %r' % 'ip flush inet4'), 'Unban tickets each individualy', all=True)
def testActionsConsistencyCheckDiffFam(self):
act = self.defaultAction({'start': ' <family>', 'check': ' <family>', 'flush': ' <family>', 'stop': ' <family>'})
act['actionflush?family=inet6'] = (act.actionflush + '; exit 1')
act.actionstart_on_demand = True
act.actionrepair_on_unban = True
act.actionban = ('<actioncheck> ; ' + act.actionban)
act.actionunban = ('<actioncheck> ; ' + act.actionunban)
self.__actions.start()
self.assertNotLogged(('stdout: %r' % 'ip start'))
self.assertEqual(self.__actions.addBannedIP('192.0.2.1'), 1)
self.assertEqual(self.__actions.addBannedIP('2001:db8::1'), 1)
self.assertLogged('Ban 192.0.2.1', 'Ban 2001:db8::1', ('stdout: %r' % 'ip start inet4'), ('stdout: %r' % 'ip ban 192.0.2.1'), ('stdout: %r' % 'ip start inet6'), ('stdout: %r' % 'ip ban 2001:db8::1'), all=True, wait=True)
act['actioncheck?family=inet6'] = (act.actioncheck + '; exit 1')
self.pruneLog('[test-phase 1a] simulate inconsistent irreparable env by unban')
self.__actions.removeBannedIP('2001:db8::1')
self.assertLogged('Invariant check failed. Trying to restore a sane environment', ('stdout: %r' % 'ip stop inet6'), all=True, wait=True)
self.assertNotLogged(('stdout: %r' % 'ip start inet6'), ('stdout: %r' % 'ip stop inet4'), ('stdout: %r' % 'ip start inet4'), all=True)
self.pruneLog('[test-phase 1b] simulate inconsistent irreparable env by ban')
self.assertEqual(self.__actions.addBannedIP('2001:db8::1'), 1)
self.assertLogged('Invariant check failed. Trying to restore a sane environment', ('stdout: %r' % 'ip stop inet6'), ('stdout: %r' % 'ip start inet6'), ('stdout: %r' % 'ip check inet6'), 'Unable to restore environment', 'Failed to execute ban', all=True, wait=True)
self.assertNotLogged(('stdout: %r' % 'ip stop inet4'), ('stdout: %r' % 'ip start inet4'), all=True)
act['actioncheck?family=inet6'] = act.actioncheck
self.assertEqual(self.__actions.addBannedIP('2001:db8::2'), 1)
act['actioncheck?family=inet6'] = (act.actioncheck + '; exit 1')
self.pruneLog('[test-phase 1c] simulate inconsistent irreparable env by flush')
self.__actions._Actions__flushBan()
self.assertLogged(('stdout: %r' % 'ip flush inet4'), ('stdout: %r' % 'ip flush inet6'), 'Failed to flush bans', 'No flush occurred, do consistency check', 'Invariant check failed. Trying to restore a sane environment', ('stdout: %r' % 'ip stop inet6'), 'Failed to flush bans in jail', all=True, wait=True)
self.assertNotLogged(('stdout: %r' % 'ip stop inet4'), all=True)
self.pruneLog('[test-phase 2] consistent env')
act['actioncheck?family=inet6'] = act.actioncheck
self.assertEqual(self.__actions.addBannedIP('2001:db8::1'), 1)
self.assertLogged('Ban 2001:db8::1', ('stdout: %r' % 'ip start inet6'), ('stdout: %r' % 'ip ban 2001:db8::1'), all=True, wait=True)
self.assertNotLogged(('stdout: %r' % 'ip check inet4'), ('stdout: %r' % 'ip start inet4'), all=True)
self.pruneLog('[test-phase 3] failed flush in consistent env')
act['actioncheck?family=inet6'] = act.actioncheck
self.__actions._Actions__flushBan()
self.assertLogged('Failed to flush bans', 'No flush occurred, do consistency check', ('stdout: %r' % 'ip flush inet6'), ('stdout: %r' % 'ip check inet6'), all=True, wait=True)
self.assertNotLogged(('stdout: %r' % 'ip flush inet4'), ('stdout: %r' % 'ip stop inet4'), ('stdout: %r' % 'ip start inet4'), ('stdout: %r' % 'ip stop inet6'), ('stdout: %r' % 'ip start inet6'), all=True)
self.pruneLog('[test-phase end] flush successful')
act['actionflush?family=inet6'] = act.actionflush
self.__actions.stop()
self.__actions.join()
self.assertLogged(('stdout: %r' % 'ip flush inet6'), ('stdout: %r' % 'ip stop inet4'), ('stdout: %r' % 'ip stop inet6'), 'action ip terminated', all=True, wait=True)
self.assertNotLogged('ERROR', ('stdout: %r' % 'ip flush inet4'), 'Unban tickets each individualy', all=True)
_alt_time
_tmpdir
def testActionsRebanBrokenAfterRepair(self, tmp):
act = self.defaultAction({'start': ' <family>; touch "<FN>"', 'check': ' <family>; test -f "<FN>"', 'flush': ' <family>; echo -n "" > "<FN>"', 'stop': ' <family>; rm -f "<FN>"', 'ban': ' <family>; echo "<ip> <family>" >> "<FN>"'})
act['FN'] = (tmp + '/<family>')
act.actionstart_on_demand = True
act.actionrepair = 'echo ip repair <family>; touch "<FN>"'
act.actionreban = 'echo ip reban <ip> <family>; echo "<ip> <family> -- rebanned" >> "<FN>"'
self.pruneLog('[test-phase 0] initial ban')
self.assertEqual(self.__actions.addBannedIP(['192.0.2.1', '2001:db8::1']), 2)
self.assertLogged('Ban 192.0.2.1', 'Ban 2001:db8::1', ('stdout: %r' % 'ip start inet4'), ('stdout: %r' % 'ip ban 192.0.2.1 inet4'), ('stdout: %r' % 'ip start inet6'), ('stdout: %r' % 'ip ban 2001:db8::1 inet6'), all=True)
self.pruneLog('[test-phase 1] check ban')
self.dumpFile((tmp + '/inet4'))
self.assertLogged('192.0.2.1 inet4')
self.assertNotLogged('2001:db8::1 inet6')
self.pruneLog()
self.dumpFile((tmp + '/inet6'))
self.assertLogged('2001:db8::1 inet6')
self.assertNotLogged('192.0.2.1 inet4')
MyTime.setTime((MyTime.time() + 4))
self.pruneLog('[test-phase 2] check already banned')
self.assertEqual(self.__actions.addBannedIP(['192.0.2.1', '2001:db8::1', '2001:db8::2']), 1)
self.assertLogged('192.0.2.1 already banned', '2001:db8::1 already banned', 'Ban 2001:db8::2', ('stdout: %r' % 'ip check inet4'), ('stdout: %r' % 'ip check inet6'), all=True)
self.dumpFile((tmp + '/inet4'))
self.dumpFile((tmp + '/inet6'))
self.assertNotLogged('Reban 192.0.2.1', 'Reban 2001:db8::1', ('stdout: %r' % 'ip ban 192.0.2.1 inet4'), ('stdout: %r' % 'ip reban 192.0.2.1 inet4'), ('stdout: %r' % 'ip ban 2001:db8::1 inet6'), ('stdout: %r' % 'ip reban 2001:db8::1 inet6'), '192.0.2.1 inet4 -- repaired', '2001:db8::1 inet6 -- repaired', all=True)
MyTime.setTime((MyTime.time() + 4))
os.remove((tmp + '/inet4'))
os.remove((tmp + '/inet6'))
self.pruneLog('[test-phase 3a] check reban after sane env repaired')
self.assertEqual(self.__actions.addBannedIP(['192.0.2.1', '2001:db8::1']), 2)
self.assertLogged('Invariant check failed. Trying to restore a sane environment', ('stdout: %r' % 'ip repair inet4'), ('stdout: %r' % 'ip repair inet6'), "Reban 192.0.2.1, action 'ip'", "Reban 2001:db8::1, action 'ip'", ('stdout: %r' % 'ip reban 192.0.2.1 inet4'), ('stdout: %r' % 'ip reban 2001:db8::1 inet6'), all=True)
self.pruneLog('[test-phase 3a] check reban by epoch mismatch (without repair)')
self.assertEqual(self.__actions.addBannedIP('2001:db8::2'), 1)
self.assertLogged("Reban 2001:db8::2, action 'ip'", ('stdout: %r' % 'ip reban 2001:db8::2 inet6'), all=True)
self.assertNotLogged('Invariant check failed. Trying to restore a sane environment', ('stdout: %r' % 'ip repair inet4'), ('stdout: %r' % 'ip repair inet6'), "Reban 192.0.2.1, action 'ip'", "Reban 2001:db8::1, action 'ip'", ('stdout: %r' % 'ip reban 192.0.2.1 inet4'), ('stdout: %r' % 'ip reban 2001:db8::1 inet6'), all=True)
self.pruneLog('[test-phase 4] check reban')
self.dumpFile((tmp + '/inet4'))
self.assertLogged('192.0.2.1 inet4 -- rebanned')
self.assertNotLogged('2001:db8::1 inet6 -- rebanned')
self.pruneLog()
self.dumpFile((tmp + '/inet6'))
self.assertLogged('2001:db8::1 inet6 -- rebanned', '2001:db8::2 inet6 -- rebanned', all=True)
self.assertNotLogged('192.0.2.1 inet4 -- rebanned')
act.actionreban = ''
act.actionban = 'exit 1'
self.assertEqual(self.__actions._Actions__reBan(FailTicket('192.0.2.1', 0)), 0)
self.assertLogged('Failed to execute reban', 'Error banning 192.0.2.1', all=True) |
class DemoVirtualDirectory(DemoTreeNodeObject):
description = Str()
resources = List(Instance(DemoTreeNodeObject))
allows_children = Bool(True)
nice_name = Str('Data')
def has_children(self):
return (len(self.resources) > 0)
def get_children(self):
return self.resources |
class Blob(_common.FlyteIdlEntity):
def __init__(self, metadata, uri):
self._metadata = metadata
self._uri = uri
def uri(self):
return self._uri
def metadata(self):
return self._metadata
def to_flyte_idl(self):
return _literals_pb2.Blob(metadata=self.metadata.to_flyte_idl(), uri=self.uri)
def from_flyte_idl(cls, proto):
return cls(metadata=BlobMetadata.from_flyte_idl(proto.metadata), uri=proto.uri) |
def _unpatch(module: ModuleType, name: str, fn: Callable[(..., Any)]) -> None:
if (hasattr(module, '__dict__') and (name in module.__dict__) and isinstance(module.__dict__[name], (classmethod, staticmethod))):
method = module.__dict__[name]
fn = method.__func__
if (not _is_patched(fn)):
return
_unbind_injections(fn) |
def data_files(directory):
paths = defaultdict(list)
for (path, directories, filenames) in os.walk(directory):
filenames = [f for f in filenames if (not (f[0] == '.'))]
directories[:] = [d for d in directories if (not (d[0] == '.'))]
for filename in filenames:
paths[path].append(os.path.join(path, filename))
return paths |
def parse_rpm_output_list(packages_info):
package_lines = packages_info.split('\n')
counter = 0
products = []
for line in package_lines:
if (line.startswith('Name :') or line.startswith('Version :')):
info = line.split(':')[1].rstrip().lstrip()
if (counter == 0):
product = info
counter += 1
else:
version = info
counter = 0
data = {}
data['product'] = product
data['version'] = version
products.append(data)
return products |
def int2ip(ip_int):
try:
return socket.inet_ntoa(struct.pack('!I', ip_int))
except (socket.error, struct.error):
pass
try:
ip_str = socket.inet_ntop(socket.AF_INET6, binascii.unhexlify(dec2hex(ip_int)))
return safeunicode(ip_str, encoding='ascii')
except (socket.error, struct.error):
pass
raise ValueError('{!r} does not appear to be an IPv4 or IPv6 address'.format(ip_int)) |
class conv2d_depthwise(conv2d):
def __init__(self, stride, pad, dilate=1, group=1) -> None:
super().__init__(stride, pad, dilate=dilate, group=group)
self._attrs['op'] = 'conv2d_depthwise'
def __call__(self, x: Tensor, w: Tensor):
self._attrs['inputs'] = [x, w]
self._set_depth()
output_shape = self._infer_shapes(x, w)
output = Tensor(output_shape, src_ops={self})
self._extract_exec_path(x)
self._extract_epilogue_alignment(output_shape)
self._attrs['outputs'] = [output]
return output
def _infer_shape(self, x: List[int], w: List[int]) -> List[int]:
if (w[0] != self._attrs['group']):
raise RuntimeError('W Shape mismatch for conv2d_depthwise')
return super()._infer_shape(x, w)
def is_valid_inputs(x: Tensor, w: Tensor) -> Tuple[(bool, str)]:
x_shape = x._attrs['shape']
if (len(x_shape) != 4):
return (False, f'x should be 4D: x_shape={x_shape!r}')
w_shape = w._attrs['shape']
if (len(w_shape) != 4):
return (False, f'w should be 4D: w_shape={w_shape!r}')
return (True, '') |
def set_pixel(r, g, b):
setup()
if ((not isinstance(r, int)) or (r < 0) or (r > 255)):
raise ValueError('Argument r should be an int from 0 to 255')
if ((not isinstance(g, int)) or (g < 0) or (g > 255)):
raise ValueError('Argument g should be an int from 0 to 255')
if ((not isinstance(b, int)) or (b < 0) or (b > 255)):
raise ValueError('Argument b should be an int from 0 to 255')
(r, g, b) = [int((x * _brightness)) for x in (r, g, b)]
_write_byte(0)
_write_byte(0)
_write_byte(239)
_write_byte(LED_GAMMA[(b & 255)])
_write_byte(LED_GAMMA[(g & 255)])
_write_byte(LED_GAMMA[(r & 255)])
_write_byte(0)
_write_byte(0)
_enqueue() |
def E(inp, L):
emsg1 = 'E L is {}, it should be {}'.format(type(L), 'int')
assert (type(L) == int), emsg1
emsg2 = 'E inp is {}, it should be bytearray'.format(type(inp))
assert (type(inp) == bytearray), emsg2
emsg3 = 'E inp len is {}, it should be {}'.format(len(inp), L)
assert (len(inp) == L), emsg3
ext_inp = bytearray()
for i in range(16):
index = (i % L)
ext_inp.append(inp[index])
assert (len(ext_inp) == Ar_KEY_LEN)
return ext_inp |
def test_that_shows_reaching_definitions_cannot_deal_with_pointers(basic_block_with_pointers, a, b, c, d):
s0 = Assignment(a, Constant(0))
s2 = Assignment(b, UnaryOperation(OperationType.address, [a]))
s3 = Assignment(c, b)
s4 = Assignment(d, c)
rd = ReachingDefinitions(basic_block_with_pointers)
n0 = [n for n in basic_block_with_pointers.nodes][0]
assert (rd.reach_in_block(n0) == set())
assert (rd.reach_out_block(n0) == {s0, s2, s3, s4})
assert (rd.reach_in_stmt(n0, 1) == {s0})
assert (rd.reach_in_stmt(n0, 6) == {s0, s2, s3, s4}) |
def test_explicit_params():
model = torch.nn.Linear(10, 2)
with torch.no_grad():
model.weight.fill_(0.0)
ema = ExponentialMovingAverage(model.parameters(), decay=0.9)
model2 = torch.nn.Linear(10, 2)
with torch.no_grad():
model2.weight.fill_(1.0)
ema.update(model2.parameters())
ema.copy_to()
assert (not torch.all((model.weight == 0.0))) |
class RouterManager(app_manager.RyuApp):
_ROUTER_CLASSES = {vrrp_event.VRRPInterfaceNetworkDevice: {4: sample_router.RouterIPV4Linux, 6: sample_router.RouterIPV6Linux}, vrrp_event.VRRPInterfaceOpenFlow: {4: sample_router.RouterIPV4OpenFlow, 6: sample_router.RouterIPV6OpenFlow}}
def __init__(self, *args, **kwargs):
super(RouterManager, self).__init__(*args, **kwargs)
self._args = args
self._kwargs = kwargs
self.routers = {}
def _router_factory(self, instance_name, monitor_name, interface, config):
cls = None
for (interface_cls, router_clses) in self._ROUTER_CLASSES.items():
if isinstance(interface, interface_cls):
if config.is_ipv6:
cls = router_clses[6]
else:
cls = router_clses[4]
break
self.logger.debug('interface %s %s', type(interface), interface)
self.logger.debug('cls %s', cls)
if (cls is None):
raise ValueError(('Unknown interface type %s %s' % (type(interface), interface)))
kwargs = self._kwargs.copy()
kwargs.update({'name': instance_name, 'monitor_name': monitor_name, 'config': config, 'interface': interface})
app_mgr = app_manager.AppManager.get_instance()
return app_mgr.instantiate(cls, *self._args, **kwargs)
_ev_cls(vrrp_event.EventVRRPStateChanged)
def vrrp_state_changed_handler(self, ev):
if (ev.new_state == vrrp_event.VRRP_STATE_INITIALIZE):
if ev.old_state:
self._shutdown(ev)
else:
self._initialize(ev)
return
router_name = self.routers.get(ev.instance_name)
self.send_event(router_name, ev)
def _initialize(self, ev):
router = self._router_factory(ev.instance_name, ev.monitor_name, ev.interface, ev.config)
self.routers[ev.instance_name] = router.name
self.send_event(router.name, ev)
router.start()
def _shutdown(self, ev):
router_name = self.routers.pop(ev.instance_name)
self.send_event(router_name, ev)
app_mgr = app_manager.AppManager.get_instance()
app_mgr.uninstantiate(router_name) |
class ProgressRenderer(TableDelegate):
def paint(self, painter, option, index):
column = index.model()._editor.columns[index.column()]
obj = index.data(QtCore.Qt.ItemDataRole.UserRole)
progress_bar_option = QtGui.QStyleOptionProgressBar()
progress_bar_option.rect = option.rect
progress_bar_option.minimum = column.get_minimum(obj)
progress_bar_option.maximum = column.get_maximum(obj)
progress_bar_option.progress = int(column.get_raw_value(obj))
progress_bar_option.textVisible = column.get_text_visible()
progress_bar_option.text = column.get_value(obj)
style = QtGui.QApplication.instance().style()
if (sys.platform == 'darwin'):
painter.save()
painter.translate(option.rect.left(), option.rect.top())
style.drawControl(QtGui.QStyle.ControlElement.CE_ProgressBar, progress_bar_option, painter)
painter.restore()
else:
style.drawControl(QtGui.QStyle.ControlElement.CE_ProgressBar, progress_bar_option, painter) |
def extractTdwktranslatesWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionPlotoptionsScatter3dSonificationDefaultspeechoptionsMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
('rig')
def cacheable_attr_to_lowercase(progress_controller=None):
if (progress_controller is None):
progress_controller = ProgressControllerBase()
progress_controller.maximum = 2
root_nodes = auxiliary.get_root_nodes()
progress_controller.increment()
if root_nodes[0].hasAttr('cacheable'):
progress_controller.increment()
root_nodes[0].setAttr('cacheable', root_nodes[0].getAttr('cacheable').lower())
progress_controller.increment()
progress_controller.complete()
progress_controller.complete() |
def over_limit_error():
if request_wants_json():
return jsonify({'error': 'form over quota'})
return (render_template('error.html', title='Form over quota', text='It looks like this form is getting a lot of submissions and ran out of its quota. Try contacting this website through other means or try submitting again later.'), 402) |
class FranceConnectOAuth2(BaseOAuth2[FranceConnectOAuth2AuthorizeParams]):
display_name = 'FranceConnect'
logo_svg = LOGO_SVG
def __init__(self, client_id: str, client_secret: str, integration: bool=False, scopes: Optional[List[str]]=BASE_SCOPES, name='franceconnect'):
endpoints = (ENDPOINTS['integration'] if integration else ENDPOINTS['production'])
super().__init__(client_id, client_secret, endpoints['authorize'], endpoints['access_token'], refresh_token_endpoint=None, revoke_token_endpoint=None, name=name, base_scopes=scopes)
self.profile_endpoint = endpoints['profile']
async def get_authorization_url(self, redirect_uri: str, state: Optional[str]=None, scope: Optional[List[str]]=None, extras_params: Optional[FranceConnectOAuth2AuthorizeParams]=None) -> str:
_extras_params = (extras_params or {})
if (_extras_params.get('nonce') is None):
_extras_params['nonce'] = secrets.token_urlsafe()
return (await super().get_authorization_url(redirect_uri, state, scope, _extras_params))
async def get_id_email(self, token: str) -> Tuple[(str, Optional[str])]:
async with self.get_ as client:
response = (await client.get(self.profile_endpoint, headers={**self.request_headers, 'Authorization': f'Bearer {token}'}))
if (response.status_code >= 400):
raise GetIdEmailError(response.json())
data: Dict[(str, Any)] = response.json()
return (str(data['sub']), data.get('email')) |
.django_db
def test_correct_response_multiple_defc(client, monkeypatch, helpers, elasticsearch_award_index, cfda_awards_and_transactions):
setup_elasticsearch_test(monkeypatch, elasticsearch_award_index)
resp = helpers.post_for_spending_endpoint(client, url, def_codes=['L', 'M'])
expected_results = [{'code': '30.300', 'award_count': 1, 'description': 'CFDA 3', 'id': 300, 'obligation': 2000.0, 'outlay': 1000.0, 'resource_link': 'www.example.com/300', 'applicant_eligibility': 'AE3', 'beneficiary_eligibility': 'BE3', 'cfda_federal_agency': 'Agency 3', 'cfda_objectives': 'objectives 3', 'cfda_website': 'www.example.com/cfda_website/300'}, {'code': '20.200', 'award_count': 2, 'description': 'CFDA 2', 'id': 200, 'obligation': 220.0, 'outlay': 100.0, 'resource_link': 'www.example.com/200', 'applicant_eligibility': 'AE2', 'beneficiary_eligibility': 'BE2', 'cfda_federal_agency': 'Agency 2', 'cfda_objectives': 'objectives 2', 'cfda_website': 'www.example.com/cfda_website/200'}, {'code': '10.100', 'award_count': 1, 'description': 'CFDA 1', 'id': 100, 'obligation': 2.0, 'outlay': 0.0, 'resource_link': None, 'applicant_eligibility': 'AE1', 'beneficiary_eligibility': 'BE1', 'cfda_federal_agency': 'Agency 1', 'cfda_objectives': 'objectives 1', 'cfda_website': None}]
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.json()['results'] == expected_results) |
class StravaWidget(base._Widget, base.MarginMixin):
orientations = base.ORIENTATION_HORIZONTAL
_experimental = True
defaults = [('font', 'sans', 'Default font'), ('fontsize', None, 'Font size'), ('foreground', 'ffffff', 'Text colour'), ('text', '{CA:%b} {CD:.1f}km', 'Widget text'), ('refresh_interval', 1800, 'Time to update data'), ('startup_delay', 10, 'Time before sending first web request'), ('popup_display_timeout', 15, 'Time to display extended info'), ('warning_colour', 'aaaa00', 'Highlight when there is an error.')]
format_map = {'CD': ('current', 'distance'), 'CC': ('current', 'count'), 'CT': ('current', 'format_time'), 'CP': ('current', 'format_pace'), 'CN': ('current', 'name'), 'CA': ('current', 'date'), 'YD': ('year', 'distance'), 'YC': ('year', 'count'), 'YT': ('year', 'format_time'), 'YP': ('year', 'format_pace'), 'YN': ('year', 'name'), 'YA': ('year', 'date'), 'AD': ('alltime', 'distance'), 'AC': ('alltime', 'count'), 'AT': ('alltime', 'format_time'), 'AP': ('alltime', 'format_pace'), 'AN': ('alltime', 'name'), 'AA': ('alltime', 'date')}
_screenshots = [('strava_widget.png', ''), ('strava_widget_detail.png', "Extended info. I've blurred out details of my runs for privacy reasons.")]
_dependencies = ['stravalib', 'pint']
def __init__(self, **config):
base._Widget.__init__(self, bar.CALCULATED, **config)
self.add_defaults(StravaWidget.defaults)
self.add_defaults(base.MarginMixin.defaults)
if ('font_colour' in config):
self.foreground = config['font_colour']
logger.warning('The use of `font_colour` is deprecated. Please update your config to use `foreground` instead.')
self.data = None
self.display_text = ''
def _configure(self, qtile, bar):
base._Widget._configure(self, qtile, bar)
self.timeout_add(self.startup_delay, self.refresh)
def _get_data(self):
return get_strava_data()
def _read_data(self, future):
results = future.result()
if results:
(success, data) = results
if (not success):
logger.warning('Error retrieving data: %s.', data)
else:
self.data = data
self.formatted_data = {}
for (k, v) in self.format_map.items():
obj = self.data
for attr in v:
obj = getattr(obj, attr)
self.formatted_data[k] = obj
self.timeout_add(1, self.bar.draw)
self.timeout_add(self.refresh_interval, self.refresh)
def refresh(self):
future = self.qtile.run_in_executor(self._get_data)
future.add_done_callback(self._read_data)
def calculate_length(self):
total = 0
if ((self.data is not None) and self.text):
text = self.format_text(self.text)
(width, _) = self.drawer.max_layout_size([text], self.font, self.fontsize)
total += (width + (2 * self.margin))
total += self.height
return total
def draw_icon(self):
scale = (self.height / 24.0)
self.drawer.set_source_rgb('ffffff')
self.drawer.ctx.set_line_width(2)
self.drawer.ctx.move_to((8 * scale), (14 * scale))
self.drawer.ctx.line_to((12 * scale), (6 * scale))
self.drawer.ctx.line_to((16 * scale), (14 * scale))
self.drawer.ctx.stroke()
self.drawer.ctx.set_line_width(1)
self.drawer.ctx.move_to((13 * scale), (14 * scale))
self.drawer.ctx.line_to((16 * scale), (20 * scale))
self.drawer.ctx.line_to((19 * scale), (14 * scale))
self.drawer.ctx.stroke()
def draw_highlight(self, top=False, colour='000000'):
self.drawer.set_source_rgb(colour)
y = (0 if top else (self.bar.height - 2))
self.drawer.fillrect(0, y, self.width, 2, 2)
def draw(self):
self.drawer.clear((self.background or self.bar.background))
x_offset = 0
self.draw_icon()
x_offset += self.height
if (self.data is None):
self.draw_highlight(top=True, colour=self.warning_colour)
else:
self.display_text = self.format_text(self.text)
layout = self.drawer.textlayout(self.display_text, self.foreground, self.font, self.fontsize, None, wrap=False)
y_offset = ((self.bar.height - layout.height) / 2)
layout.draw((x_offset + self.margin_x), y_offset)
self.drawer.draw(offsetx=self.offset, offsety=self.offsety, width=self.length)
def button_press(self, x, y, button):
self.show_popup_summary()
def mouse_enter(self, x, y):
pass
def format_text(self, text):
try:
return text.format(**self.formatted_data)
except Exception:
logger.exception('Exception when trying to format text.')
return 'Error'
def show_popup_summary(self):
if (not self.data):
return False
lines = []
heading = '{:^6} {:^20} {:^8} {:^10} {:^6}'.format('Date', 'Title', 'km', 'time', 'pace')
lines.append(heading)
for act in self.data.current.children:
line = '{a.date:%d %b}: {a.name:<20.20} {a.distance:7,.1f} {a.format_time:>10} {a.format_pace:>6}'.format(a=act)
lines.append(line)
sub = '\n{a.date:%b %y}: {a.name:<20.20} {a.distance:7,.1f} {a.format_time:>10} {a.format_pace:>6}'.format(a=self.data.current)
lines.append(sub)
for month in self.data.previous:
line = '{a.groupdate:%b %y}: {a.name:<20.20} {a.distance:7,.1f} {a.format_time:>10} {a.format_pace:>6}'.format(a=month)
lines.append(line)
year = '\n{a.groupdate:%Y} : {a.name:<20.20} {a.distance:7,.1f} {a.format_time:>10} {a.format_pace:>6}'.format(a=self.data.year)
lines.append(year)
alltime = '\nTOTAL : {a.name:<20.20} {a.distance:7,.1f} {a.format_time:>10} {a.format_pace:>6}'.format(a=self.data.alltime)
lines.append(alltime)
self.popup = Popup(self.qtile, y=self.bar.height, width=900, height=900, font='monospace', horizontal_padding=10, vertical_padding=10, opacity=0.8)
self.popup.text = '\n'.join(lines)
self.popup.height = (self.popup.layout.height + (2 * self.popup.vertical_padding))
self.popup.width = (self.popup.layout.width + (2 * self.popup.horizontal_padding))
self.popup.x = min(self.offsetx, (self.bar.width - self.popup.width))
self.popup.place()
self.popup.draw_text()
self.popup.unhide()
self.popup.draw()
self.timeout_add(self.popup_display_timeout, self.popup.kill)
def info(self):
info = base._Widget.info(self)
info['display_text'] = self.display_text
return info |
class ConfigArgBuilder():
def __init__(self, *args, configs: Optional[List]=None, desc: str='', lazy: bool=False, no_cmd_line: bool=False, s3_config: Optional[_T]=None, key: Optional[Union[(str, ByteString)]]=None, salt: Optional[str]=None, **kwargs):
self._verify_attr(args)
self._configs = (configs if (configs is None) else [Path(c) for c in configs])
self._lazy = lazy
self._no_cmd_line = no_cmd_line
self._desc = desc
(self._salt, self._key) = self._maybe_crypto(key, salt, s3_config)
self._payload_obj = AttrPayload(s3_config=s3_config)
self._saver_obj = AttrSaver(s3_config=s3_config)
(fixed_args, tune_args) = self._strip_tune_parameters(args)
self._builder_obj = AttrBuilder(*fixed_args, lazy=lazy, salt=self._salt, key=self._key, **kwargs)
(self._tune_obj, self._tune_payload_obj) = self._handle_tuner_objects(tune_args, s3_config, kwargs)
self._tuner_interface = None
self._tuner_state = None
self._tuner_status = None
self._sample_count = 0
self._fixed_uuid = str(uuid4())
try:
self._args = self._handle_cmd_line()
self._dict_args = self._get_payload(payload_obj=self._payload_obj, input_classes=self._builder_obj.input_classes, ignore_args=tune_args)
self._arg_namespace = self._builder_obj.generate(self._dict_args)
self._arg_namespace.__salt__ = self.salt
self._arg_namespace.__key__ = self.key
if (self._tune_obj is not None):
self._tune_args = self._get_payload(payload_obj=self._tune_payload_obj, input_classes=self._tune_obj.input_classes, ignore_args=fixed_args)
self._tune_namespace = self._tune_obj.generate(self._tune_args)
except Exception as e:
self._print_usage_and_exit(str(e), sys_exit=False)
raise e
def __call__(self, *args, **kwargs) -> _T:
return ConfigArgBuilder(*args, **kwargs)
def generate(self) -> Spockspace:
return self._arg_namespace
def tuner_status(self) -> Dict:
return self._tuner_status
def best(self) -> Spockspace:
return self._tuner_interface.best
def salt(self):
return self._salt
def key(self):
return self._key
def sample(self) -> Spockspace:
if (self._tuner_interface is None):
raise RuntimeError(f'Called sample method without first calling the tuner method that initializes the backend library')
return_tuple = self._tuner_state
self._tuner_status = self._tuner_interface.tuner_status
self._tuner_state = self._tuner_interface.sample()
self._sample_count += 1
return return_tuple
def tuner(self, tuner_config: _T) -> _T:
if (self._tune_obj is None):
raise RuntimeError(f'Called tuner method without passing any decorated classes')
try:
from spock.addons.tune.tuner import TunerInterface
self._tuner_interface = TunerInterface(tuner_config=tuner_config, tuner_namespace=self._tune_namespace, fixed_namespace=self._arg_namespace)
self._tuner_state = self._tuner_interface.sample()
except Exception as e:
raise e
return self
def _print_usage_and_exit(self, msg: Optional[str]=None, sys_exit: bool=True, exit_code: int=1) -> None:
print(f'usage: {sys.argv[0]} -c [--config] config1 [config2, config3, ...]')
print(f'''
{(self._desc if (self._desc != '') else '')}
''')
print('configuration(s):\n')
self._builder_obj.handle_help_info()
if (self._tune_obj is not None):
self._tune_obj.handle_help_info()
if (msg is not None):
print(msg)
if sys_exit:
sys.exit(exit_code)
def _handle_tuner_objects(self, tune_args: List, s3_config: Optional[_T], kwargs: Dict) -> Tuple:
if (len(tune_args) > 0):
try:
from spock.addons.tune.builder import TunerBuilder
from spock.addons.tune.payload import TunerPayload
tuner_builder = TunerBuilder(*tune_args, **kwargs, lazy=self._lazy, salt=self.salt, key=self.key)
tuner_payload = TunerPayload(s3_config=s3_config)
return (tuner_builder, tuner_payload)
except ImportError:
print('Missing libraries to support tune functionality. Please re-install with the extra tune dependencies -- pip install spock-config[tune]')
else:
return (None, None)
def _verify_attr(args: Tuple) -> None:
type_attrs = all([attr.has(arg) for arg in args])
if (not type_attrs):
which_idx = [attr.has(arg) for arg in args].index(False)
if hasattr(args[which_idx], '__name__'):
raise TypeError(f'*args must be of all attrs backend -- missing a decorator on class {args[which_idx].__name__}')
else:
raise TypeError(f'*args must be of all attrs backend -- invalid type {type(args[which_idx])}')
def _strip_tune_parameters(args: Tuple) -> Tuple[(List, List)]:
fixed_args = []
tune_args = []
for arg in args:
if (arg.__module__ == 'spock.backend.config'):
fixed_args.append(arg)
elif (arg.__module__ == 'spock.addons.tune.config'):
tune_args.append(arg)
return (fixed_args, tune_args)
def _handle_cmd_line(self) -> argparse.Namespace:
if (self._no_cmd_line and (self._configs is None)):
raise ValueError('Flag set for preventing command line read but no paths were passed to the config kwarg')
args = (self._build_override_parsers(desc=self._desc) if (not self._no_cmd_line) else argparse.Namespace(config=[], help=False))
if (self._configs is not None):
args = self._get_from_kwargs(args, self._configs)
return args
def _build_override_parsers(self, desc: str) -> argparse.Namespace:
parser = argparse.ArgumentParser(description=desc, add_help=False)
parser.add_argument('-c', '--config', required=False, nargs='+', default=[], type=Path)
parser.add_argument('-h', '--help', action='store_true')
parser = self._builder_obj.build_override_parsers(parser=parser)
if (self._tune_obj is not None):
parser = self._tune_obj.build_override_parsers(parser=parser)
args = parser.parse_args()
return args
def _get_from_kwargs(args: argparse.Namespace, configs: List):
if isinstance(configs, list):
args.config.extend(configs)
else:
raise TypeError(f'configs kwarg must be of type list -- given {type(configs)}')
return args
def _get_payload(self, payload_obj: Type[AttrPayload], input_classes: Tuple, ignore_args: List) -> Dict:
if self._args.help:
self._print_usage_and_exit(sys_exit=True, exit_code=0)
payload = {}
dependencies = {'paths': [], 'rel_paths': [], 'roots': []}
if (payload_obj is not None):
if (len(input_classes) > 0):
if (len(self._args.config) > 0):
for configs in self._args.config:
payload_update = payload_obj.payload(input_classes, ignore_args, configs, self._args, dependencies)
check_payload_overwrite(payload, payload_update, configs)
deep_payload_update(payload, payload_update)
else:
payload_update = payload_obj.payload(input_classes, ignore_args, None, self._args, dependencies)
check_payload_overwrite(payload, payload_update, None)
deep_payload_update(payload, payload_update)
return payload
def _save(self, payload: Spockspace, file_name: str=None, user_specified_path: Path=None, create_save_path: bool=True, extra_info: bool=True, file_extension: str='.yaml', tuner_payload: Optional[Spockspace]=None, fixed_uuid: str=None) -> _T:
if (user_specified_path is not None):
save_path = Path(user_specified_path)
elif (self._builder_obj.save_path is not None):
save_path = Path(self._builder_obj.save_path)
else:
raise ValueError('Save did not receive a valid path from: (1) markup file(s) or (2) the keyword arg user_specified_path')
self._saver_obj.save(payload, save_path, file_name, create_save_path, extra_info, file_extension, tuner_payload, fixed_uuid)
return self
def save(self, file_name: str=None, user_specified_path: Union[(Path, str)]=None, create_save_path: bool=True, extra_info: bool=True, file_extension: str='.yaml', add_tuner_sample: bool=False) -> _T:
if (user_specified_path is not None):
user_specified_path = Path(user_specified_path)
if add_tuner_sample:
if (self._tune_obj is None):
raise ValueError(f'Called save method with add_tuner_sample as `{add_tuner_sample}` without passing any decorated classes -- please use the add_tuner_sample flag for saving only hyper-parameter tuning runs')
file_name = (f'hp.sample.{(self._sample_count + 1)}' if (file_name is None) else f'{file_name}.hp.sample.{(self._sample_count + 1)}')
self._save(self._tuner_state, file_name, user_specified_path, create_save_path, extra_info, file_extension)
else:
self._save(self._arg_namespace, file_name, user_specified_path, create_save_path, extra_info, file_extension, tuner_payload=(self._tune_namespace if (self._tune_obj is not None) else None))
return self
def save_best(self, file_name: str=None, user_specified_path: Path=None, create_save_path: bool=True, extra_info: bool=True, file_extension: str='.yaml') -> _T:
if (self._tune_obj is None):
raise ValueError(f'Called save_best method without passing any decorated classes -- please use the `save()` method for saving non hyper-parameter tuning runs')
file_name = (f'hp.best' if (file_name is None) else f'{file_name}.hp.best')
self._save(Spockspace(**vars(self._arg_namespace), **vars(self.best[0])), file_name, user_specified_path, create_save_path, extra_info, file_extension, fixed_uuid=self._fixed_uuid)
return self
def config_2_dict(self) -> Dict:
return self._saver_obj.dict_payload(self._arg_namespace)
def spockspace_2_dict(self, payload: Spockspace) -> Dict:
return self._saver_obj.dict_payload(payload)
def obj_2_dict(self, obj: Union[(_C, List[_C], Tuple[(_C, ...)])]) -> Dict[(str, Dict)]:
return to_dict(obj, self._saver_obj)
def evolve(self, *args: _C) -> Spockspace:
arg_counts = Counter([type(v).__name__ for v in args])
for (k, v) in arg_counts.items():
if (v > 1):
raise _SpockEvolveError(f'Passed multiple instances (count: {v}) of class `{k}` into `evolve()` -- please pass only a single instance of the class in order to evolve the underlying Spockspace')
elif (k not in self._builder_obj.graph.node_names):
raise _SpockEvolveError(f'Passed class `{k}` into `evolve()` but that class in not within the set of input classes {repr(self._builder_obj.graph.node_names)}')
new_arg_namespace = deepcopy(self._arg_namespace)
topo_idx = sorted(zip([self._builder_obj.graph.topological_order.index(type(v).__name__) for v in args], args))
args = {type(v).__name__: v for (_, v) in topo_idx}
for (k, v) in args.items():
cls_name = type(v).__name__
setattr(new_arg_namespace, cls_name, v)
(new_arg_namespace, all_cls) = self._recurse_upwards(new_arg_namespace, cls_name, args)
return new_arg_namespace
def _recurse_upwards(self, new_arg_namespace: Spockspace, current_cls: str, all_cls: Dict) -> Tuple[(Spockspace, Dict)]:
parents = self._builder_obj.dag[current_cls]
if (len(parents) > 0):
for parent_cls in parents:
parent_name = parent_cls.__name__
new_arg_namespace = self._set_matching_attrs_by_name(new_arg_namespace, current_cls, parent_name)
if (parent_name in all_cls.keys()):
all_cls = self._set_matching_attrs_by_name_args(current_cls, parent_name, all_cls)
(new_arg_namespace, all_cls) = self._recurse_upwards(new_arg_namespace, parent_name, all_cls)
return (new_arg_namespace, all_cls)
def _set_matching_attrs_by_name_args(current_cls_name: str, parent_cls_name: str, all_cls: Dict) -> Dict:
new_arg_namespace = all_cls[parent_cls_name]
names = attr.fields_dict(type(new_arg_namespace)).keys()
for v in names:
if (type(getattr(new_arg_namespace, v)).__name__ == current_cls_name):
new_obj = attr.evolve(new_arg_namespace, **{v: all_cls[current_cls_name]})
all_cls[parent_cls_name] = new_obj
print(f'Evolved CLS Dependency: Parent = {parent_cls_name}, Child = {current_cls_name}, Value = {v}')
return all_cls
def _set_matching_attrs_by_name(new_arg_namespace: Spockspace, current_cls_name: str, parent_cls_name: str) -> Spockspace:
parent_attr = getattr(new_arg_namespace, parent_cls_name)
names = attr.fields_dict(type(parent_attr)).keys()
for v in names:
if (type(getattr(parent_attr, v)).__name__ == current_cls_name):
new_obj = attr.evolve(getattr(new_arg_namespace, parent_cls_name), **{v: getattr(new_arg_namespace, current_cls_name)})
setattr(new_arg_namespace, parent_cls_name, new_obj)
print(f'Evolved: Parent = {parent_cls_name}, Child = {current_cls_name}, Value = {v}')
return new_arg_namespace
def _maybe_crypto(self, key: Optional[Union[(str, ByteString)]], salt: Optional[str], s3_config: Optional[_T]=None, salt_len: int=16) -> Tuple[(str, ByteString)]:
env_resolver = EnvResolver()
salt = self._get_salt(salt, env_resolver, salt_len, s3_config)
key = self._get_key(key, env_resolver, s3_config)
return (salt, key)
def _get_salt(self, salt: Optional[str], env_resolver: EnvResolver, salt_len: int, s3_config: Optional[_T]=None) -> str:
if (salt is None):
salt = make_salt(salt_len)
elif (os.path.splitext(salt)[1] in {'.yaml', '.YAML', '.yml', '.YML'}):
salt = self._handle_yaml_read(salt, access='salt', s3_config=s3_config)
else:
(salt, _) = env_resolver.resolve(salt, str)
return salt
def _get_key(self, key: Optional[Union[(str, ByteString)]], env_resolver: EnvResolver, s3_config: Optional[_T]=None) -> ByteString:
if (key is None):
key = Fernet.generate_key()
elif (os.path.splitext(key)[1] in {'.yaml', '.YAML', '.yml', '.YML'}):
key = self._handle_yaml_read(key, access='key', s3_config=s3_config, encode=True)
elif isinstance(key, str):
(key, _) = env_resolver.resolve(key, str)
key = str.encode(key)
return key
def _handle_yaml_read(value: str, access: str, s3_config: Optional[_T]=None, encode: bool=False) -> Union[(str, ByteString)]:
try:
payload = YAMLHandler().load(Path(value), s3_config)
read_value = payload[access]
if encode:
read_value = str.encode(read_value)
return read_value
except Exception as e:
_SpockCryptoError(f'Attempted to read from path `{value}` but failed') |
def test_ignores_na_in_input_df(df_na):
transformer = ArbitraryOutlierCapper(max_capping_dict=None, min_capping_dict={'Age': 20}, missing_values='ignore')
X = transformer.fit_transform(df_na)
df_transf = df_na.copy()
df_transf['Age'] = np.where((df_transf['Age'] < 20), 20, df_transf['Age'])
assert (transformer.max_capping_dict is None)
assert (transformer.min_capping_dict == {'Age': 20})
assert (transformer.n_features_in_ == 6)
pd.testing.assert_frame_equal(X, df_transf)
assert (X['Age'].min() >= 20)
assert (df_na['Age'].min() < 20) |
class Test(TestCase):
def test_config(self):
field = InlineCKEditorField(config_name='test')
self.assertEqual(field.widget_config, {'ckeditor': None, 'config': 'test'})
field = InlineCKEditorField(config='test2')
self.assertEqual(field.widget_config, {'ckeditor': None, 'config': 'test2'})
field = InlineCKEditorField(ckeditor='test')
self.assertEqual(field.widget_config, {'ckeditor': 'test', 'config': None})
with self.assertRaises(KeyError):
InlineCKEditorField(config_name='_does_not_exist')
with self.assertRaises(TypeError):
InlineCKEditorField(config_name='test', config='test2') |
class Solution():
def closestValue(self, root: TreeNode, target: float) -> int:
def inorder(node):
if (node is None):
return
(yield from inorder(node.left))
(yield node.val)
(yield from inorder(node.right))
(diff, val) = (None, None)
for n in inorder(root):
if ((diff is None) or (diff > abs((n - target)))):
diff = abs((n - target))
val = n
if (n > target):
break
return val |
class BsCards(html.Html.Html):
name = 'Bootstrap Card'
def __init__(self, report, components, title, width, height, options, profile):
super(BsCards, self).__init__(report, [], profile=profile)
self.style.clear_all()
(self.__body, self.__header) = (None, None)
for c in components:
self.__add__(c)
if (title is not None):
self.header.__add__(title)
def __getitem__(self, i):
if (not self.body.val):
return []
return self.body.val[i]
def __add__(self, component):
self.body.__add__(component)
return self
def insert(self, i, component):
self.body.insert(i, component)
return self
def header(self):
if (self.__header is None):
self.__header = Section(self.page, 'div')
self.__header.style.clear_all()
self.__header.attr['class'].add('card-header')
return self.__header
def body(self):
if (self.__body is None):
self.__body = Section(self.page, 'div')
self.__body.style.clear_all()
self.__body.attr['class'].add('card-body')
return self.__body
def __str__(self):
return ('<div %s>%s%s</div>' % (self.get_attrs(css_class_names=self.style.get_classes()), self.header.html(), self.body.html())) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.