code stringlengths 281 23.7M |
|---|
(frozen=True, init=False)
class FalScript():
model: Optional[DbtModel]
path: Path
faldbt: FalDbt
hook_arguments: Optional[Dict[(str, Any)]]
is_hook: bool
timing_type: Optional[TimingType]
def __init__(self, faldbt: FalDbt, model: Optional[DbtModel], path: Union[(str, Path)], hook_arguments: Optional[Dict[(str, Any)]]=None, is_hook: bool=False, is_model: bool=False, timing_type: Optional[TimingType]=None):
object.__setattr__(self, 'model', model)
object.__setattr__(self, 'path', (path if is_model else normalize_path(faldbt.scripts_dir, path)))
object.__setattr__(self, 'faldbt', faldbt)
object.__setattr__(self, 'hook_arguments', hook_arguments)
object.__setattr__(self, 'is_hook', is_hook)
object.__setattr__(self, 'timing_type', timing_type)
self._telemetry()
def _telemetry(self):
try:
_is_global = (self.model is None)
_is_hook = self.is_hook
_timing_type = (str(self.timing_type) if self.timing_type else None)
_is_model = (self.model and (self.model.python_model == self.path))
_path_hash = hashlib.md5(str(self.path).encode()).hexdigest()
_script_timing_desc = (self.timing_type.for_script() if self.timing_type else 'error')
_hook_timing_desc = (self.timing_type.for_hook() if self.timing_type else 'error')
if _is_global:
_script_desc = f'{_script_timing_desc}-global'
_is_hook = None
elif _is_model:
_script_desc = 'fal-model'
_is_global = None
_is_hook = None
elif _is_hook:
_script_desc = f'{_hook_timing_desc}-hook'
else:
_script_desc = f'{_script_timing_desc}-script'
telemetry.log_api(action='falscript_initialized', additional_props={'is_global': _is_global, 'is_hook': _is_hook, 'is_model': _is_model, 'script_timing_type': _timing_type, 'script_desc': _script_desc, 'script_path': _path_hash})
except:
pass
def from_hook(cls, faldbt: FalDbt, model: DbtModel, hook: Hook, timing_type: TimingType):
assert isinstance(hook, LocalHook)
return cls(faldbt=faldbt, model=model, path=hook.path, hook_arguments=hook.arguments, is_hook=True, timing_type=timing_type)
def model_script(cls, faldbt: FalDbt, model: DbtModel):
assert model.python_model, 'path for Python models must be set'
return FalScript(faldbt=faldbt, model=model, path=model.python_model, is_model=True)
def exec(self):
try:
source_code = python_from_file(self.path)
program = compile(source_code, self.path, 'exec')
exec_globals = {'__name__': '__main__', 'context': self._build_script_context(), 'ref': self.faldbt.ref, 'source': self.faldbt.source, 'list_models': self.faldbt.list_models, 'list_models_ids': self.faldbt.list_models_ids, 'list_sources': self.faldbt.list_sources, 'list_features': self.faldbt.list_features, 'execute_sql': self.faldbt.execute_sql}
if (not self.is_hook):
exec_globals['write_to_source'] = self.faldbt.write_to_source
if (self.model is not None):
exec_globals['write_to_model'] = partial(self.faldbt.write_to_model, target_1=self.model.name, target_2=None)
else:
exec_globals['write_to_source'] = _not_allowed_function_maker('write_to_source')
exec_globals['write_to_model'] = _not_allowed_function_maker('write_to_model')
exec(program, exec_globals)
finally:
pass
def relative_path(self):
if self.is_model:
return self.path.relative_to(self.faldbt.project_dir)
else:
return self.path.relative_to(self.faldbt.scripts_dir)
def id(self):
if self.is_model:
return f'(model: {self.relative_path})'
else:
return f'({self.model_name}, {self.relative_path})'
def is_global(self):
return (self.model is None)
def is_model(self):
if ((self.model is not None) and (self.model.python_model is not None)):
return (self.model.python_model == self.path)
def model_name(self):
return ('<GLOBAL>' if self.is_global else self.model.name)
def _build_script_context(self) -> Context:
config: RuntimeConfig = self.faldbt._config
context_config = ContextConfig(config)
target = ContextTarget(config)
if self.is_global:
return Context(current_model=None, target=target, config=context_config)
model: DbtModel = self.model
meta = (model.meta or {})
_del_key(meta, FAL)
tests = _process_tests(model.tests)
current_adapter_response = None
if model.adapter_response:
current_adapter_response = CurrentAdapterResponse(message=str(model.adapter_response), code=model.adapter_response.code, rows_affected=model.adapter_response.rows_affected)
current_model = CurrentModel(name=model.name, alias=model.alias, status=model.status, columns=model.columns, tests=tests, meta=meta, is_incremental=model.is_incremental, adapter_response=current_adapter_response)
return Context(current_model=current_model, target=target, config=context_config, _arguments=self.hook_arguments) |
(name='handle_side_and_related_tags', ignore_result=True)
def handle_side_and_related_tags_task(builds: typing.List[str], pending_signing_tag: str, from_tag: str, pending_testing_tag: typing.Optional[str]=None, candidate_tag: typing.Optional[str]=None):
from .handle_side_and_related_tags import main
log.info('Received an order for handling update tags')
_do_init()
main(builds, pending_signing_tag, from_tag, pending_testing_tag, candidate_tag) |
class Date(TraitType):
default_value_type = DefaultValue.constant
def __init__(self, default_value=None, *, allow_datetime=False, allow_none=False, **metadata):
super().__init__(default_value, **metadata)
self.allow_datetime = allow_datetime
self.allow_none = allow_none
def validate(self, object, name, value):
if (value is None):
if self.allow_none:
return value
elif isinstance(value, datetime.datetime):
if self.allow_datetime:
return value
elif isinstance(value, datetime.date):
return value
self.error(object, name, value)
def info(self):
if self.allow_datetime:
datetime_qualifier = ''
else:
datetime_qualifier = ' non-datetime'
if self.allow_none:
none_qualifier = ' or None'
else:
none_qualifier = ''
return f'a{datetime_qualifier} date{none_qualifier}'
def create_editor(self):
return date_editor() |
_required
def user_email_settings(request, username):
(user, user_is_house_admin_somewhere) = _get_user_and_perms(request, username)
return render(request, 'user_email.html', {'u': user, 'user_is_house_admin_somewhere': user_is_house_admin_somewhere, 'stripe_publishable_key': settings.STRIPE_PUBLISHABLE_KEY}) |
class TestOFPGetConfigReply(unittest.TestCase):
class Datapath(object):
ofproto = ofproto
ofproto_parser = ofproto_v1_0_parser
c = OFPGetConfigReply(Datapath)
def setUp(self):
pass
def tearDown(self):
pass
def test_init(self):
pass
def test_parser(self):
version = {'buf': b'\x01', 'val': ofproto.OFP_VERSION}
msg_type = {'buf': b'\n', 'val': ofproto.OFPT_GET_CONFIG_REPLY}
msg_len = {'buf': b'\x00\x14', 'val': ofproto.OFP_SWITCH_CONFIG_SIZE}
xid = {'buf': b'\x94\xc4\xd2\xcd', 'val': }
flags = {'buf': b'\xa0\xe2', 'val': 41186}
miss_send_len = {'buf': b'6\x0e', 'val': 13838}
buf = (((((version['buf'] + msg_type['buf']) + msg_len['buf']) + xid['buf']) + flags['buf']) + miss_send_len['buf'])
res = OFPGetConfigReply.parser(object, version['val'], msg_type['val'], msg_len['val'], xid['val'], buf)
eq_(version['val'], res.version)
eq_(msg_type['val'], res.msg_type)
eq_(msg_len['val'], res.msg_len)
eq_(xid['val'], res.xid)
eq_(flags['val'], res.flags)
eq_(miss_send_len['val'], res.miss_send_len)
def test_serialize(self):
pass |
class TestMaximumLikelihood(unittest.TestCase):
def test_maximum_likelihood_recovery(self):
rng = np.random.default_rng(18434)
data = rng.uniform((- 2), 2, size=(10, 100))
unique_var = rng.uniform(0.2, 2, size=10)
cor_matrix = np.cov(data)
(loadings, eigenvalues, _) = pca(cor_matrix, 3)
cor_matrix2 = ((loadings loadings.T) + np.diag(unique_var))
initial_guess = (np.ones((10,)) * 0.5)
(loadings_paf, _, variance) = mlfa(cor_matrix2, 3, initial_guess=initial_guess)
_ = mlfa(cor_matrix2, 3)
rotation = procrustes_rotation(loadings, loadings_paf)
updated_loadings = (loadings_paf rotation)
updated_eigs = np.square(updated_loadings).sum(0)
np.testing.assert_allclose(loadings, updated_loadings, rtol=0.001)
np.testing.assert_allclose(eigenvalues, updated_eigs, rtol=0.001)
np.testing.assert_allclose(unique_var, variance, rtol=0.001) |
_or_admin_required
def room_occupancy(request, location_slug, room_id, year):
room = get_object_or_404(Resource, id=room_id)
year = int(year)
response = HttpResponse(content_type='text/csv')
output_filename = ('%s Occupancy Report %d.csv' % (room.name, year))
response['Content-Disposition'] = ('attachment; filename=%s' % output_filename)
writer = csv.writer(response)
if (room.location.slug != location_slug):
writer.writerow(['invalid room'])
return response
writer.writerow([((str(year) + ' Report for ') + room.name)])
writer.writerow(['Month', 'Year', 'Payments Cash', 'Payments Accrual', 'Nights Occupied', 'Nights Available', 'Partial Paid Bookings', 'Comped Nights', 'Outstanding Value', 'Total User Value', 'Net Value to House', 'Externalized Fees', 'Internal Fees', 'Comped Value'])
if ((year < 2012) or (year > datetime.date.today().year)):
return response
for month in range(1, 13):
params = room_occupancy_month(room, month, year)
writer.writerow(params)
return response |
_os(*metadata.platforms)
def main():
commands = '\n .($env:public[13]+$env:public[5]+\'x\')("Write-Host \'This is my test command\' -ForegroundColor Green; start c:\\windows\\system32\\calc.exe")\n iex(((\'W\'+\'rite-Hos\'+\'t no\'+\'HThi\'+\'s\'+\' is\'+\' my test comma\'+\'n\'+\'dnoH\'+\' \'+\'-F\'+\'oregroundCol\'+\'or G\'+\'r\'+\'e\'+\'en\'+\'; start\'+\' c:z\'+\'R\'+\'d\'+\'window\'+\'szRdsystem\'+\'3\'+\'2zRdca\'+\'lc\').rEPlacE(([chaR]122+[chaR]82+[chaR]100),\'\\\').rEPlacE(\'noH\',[StrINg][chaR]39)))\n iex("W\'\'rite-H\'\'ost \'This is my test command\' -Fore\'\'grou\'\'ndC\'\'olor Gr\'\'een; start c:\\windows\\system32\\ca\'\'lc.ex\'\'e")\n iex("Write-Host \'This is my test command\' -ForegroundColor Green; start c:\\windows\\system32\\" + $env:public[-1] + "alc.exe")\n iex(((("{23}{7}{8}{16}{25}{9}{21}{18}{2}{5}{15}{11}{20}{24}{6}{12}{22}{17}{1}{13}{3}{10}{14}{19}{0}{4}" -f \'alc.ex\',\'ndowsSDUsyst\',\'dm4\',\'2\',\'e\',\'H\',\'r\',\'r\',\'ite-Ho\',\'This i\',\'S\',\'oregroundC\',\' Gr\',\'em3\',\'DU\',\' -F\',\'s\',\'rt c:SDUwi\',\'t comman\',\'c\',\'ol\',\'s my tes\',\'een; sta\',\'W\',\'o\',\'t m4H\')).rePlAce(([Char]109+[Char]52+[Char]72),[StrIng][Char]39).rePlAce(([Char]83+[Char]68+[Char]85),\'\\\')))\n i`ex("Write-Host \'This is my t`est co`mmand\' -ForegroundColor Gr`een; start c`:\\wind`ows\\syste`m32\\calc.e`xe")\n &( ([StrIng]$vERbosEpreFereNCE)[1,3]+\'x\'-JoiN\'\') ([char[]]( 105 , 101 ,120 ,40 ,34 , 87,114 ,105,116,101 ,45,72 , 111,115,116, 32 , 39,84, 104,105 ,115 ,32, 105 , 115, 32 , 109 ,121 ,32 , 116 ,101,115 ,116 , 32 , 99, 111,109 ,109, 97 , 110,100 , 39 ,32,45,70,111 ,114 ,101, 103 ,114 ,111 ,117 ,110, 100, 67 ,111 ,108 , 111,114,32, 71 ,114, 101, 101, 110, 59, 32, 115 ,116,97, 114,116, 32, 99,58 ,92 , 119,105 ,110 , 100 , 111 , 119 , 115,92 ,115 , 121 ,115, 116, 101,109, 51 , 50 , 92,99, 97, 108 , 99,46 , 101, 120,101 , 34,41) -jOIN\'\' )\n " $( SET-vARiAble \'ofs\' \'\' )"+[StRInG](\'69>65n78g28g22R57R72>69R74u65g2dR48M6fn73R74V20%27V54n68M69>73n20%69u73V20>6dV79>20V74M65%73g74>20M63M6fM6dn6dV61g6eR64>27M20M2d%46n6fM72M65M67>72>6fn75u6eV64>43g6fV6cM6fn72M20u47n72M65>65>6e%3bR20R73%74V61R72V74u20R63M3an5c%77%69g6e>64%6fg77n73u5cV73V79n73V74>65M6dn33%32V5cV63g61V6cg63%2eg65%78n65%22>29\'.spLiT(\'Mu>RV%gn\')| % { ( [chAr]([coNVErT]::tOINT16( ([sTRING]$_ ) ,16 ))) }) +" $(SET-Item \'vARiable:OFS\' \' \' ) " |& ( $verbOsePREFeRENce.tOstrING()[1,3]+\'X\'-JOIn\'\')\n ${ }= +$(); ${ } =${ }; ${ }= ++ ${ };${ }= ++${ }; ${ }=++ ${ };${ } = ++ ${ };${ }= ++ ${ }; ${ } = ++ ${ };${ }=++ ${ }; ${ }= ++ ${ };${ } =++ ${ }; ${ } ="[" + "$({} ) "[ ${ }] + "$({})"[ "${ }${ }" ]+"$( {} ) "["${ }${ }" ] + "$? "[${ }]+ "]";${ }= "".("$( { } ) "[ "${ }"+"${ }" ] + "$({})"["${ }" +"${ }"]+"$( { } )"[${ }]+ "$({ }) "[ ${ } ] +"$?"[${ }] + "$({ }) "[${ }] );${ } ="$({})"["${ }${ }" ]+ "$({})"[ ${ }] +"${ }"["${ }${ }"]; "${ }(${ }${ }${ }${ } + ${ }${ }${ }${ }+ ${ }${ }${ }${ } + ${ }${ }${ }+ ${ }${ }${ } + ${ }${ }${ } + ${ }${ }${ }${ }+ ${ }${ }${ }${ }+ ${ }${ }${ }${ } + ${ }${ }${ }${ } + ${ }${ }${ }+ ${ }${ }${ }+${ }${ }${ }${ } + ${ }${ }${ }${ } + ${ }${ }${ }${ } +${ }${ }${ }+ ${ }${ }${ }+ ${ }${ }${ }+${ }${ }${ }${ }+${ }${ }${ }${ }+${ }${ }${ }${ } +${ }${ }${ } + ${ }${ }${ }${ } + ${ }${ }${ }${ } + ${ }${ }${ }+${ }${ }${ }${ } +${ }${ }${ }${ }+ ${ }${ }${ } +${ }${ }${ }${ }+${ }${ }${ }${ }+${ }${ }${ }${ }+ ${ }${ }${ }${ }+ ${ }${ }${ }+ ${ }${ }${ }+${ }${ }${ }${ } +${ }${ }${ }${ } +${ }${ }${ }${ } + ${ }${ }${ } +${ }${ }${ }${ } +${ }${ }${ }${ }+ ${ }${ }${ } +${ }${ }${ } +${ }${ }${ }+${ }${ }${ }+${ }${ }${ }${ } + ${ }${ }${ }${ } + ${ }${ }${ }${ }+${ }${ }${ }${ } +${ }${ }${ }${ } + ${ }${ }${ }${ } +${ }${ }${ }${ } +${ }${ }${ }${ }+${ }${ }${ }${ }+ ${ }${ }${ } + ${ }${ }${ }${ } +${ }${ }${ }${ }+ ${ }${ }${ }${ } + ${ }${ }${ }${ } + ${ }${ }${ } + ${ }${ }${ }+ ${ }${ }${ }${ } +${ }${ }${ }${ }+${ }${ }${ }${ } + ${ }${ }${ }${ }+${ }${ }${ }+${ }${ }${ }+ ${ }${ }${ }${ }+${ }${ }${ }${ }+ ${ }${ }${ }+${ }${ }${ }${ }+${ }${ }${ }${ } + ${ }${ }${ }+ ${ }${ }${ } + ${ }${ }${ }+${ }${ }${ } +${ }${ }${ }${ }+${ }${ }${ }${ } + ${ }${ }${ }${ }+${ }${ }${ }${ }+ ${ }${ }${ }${ }+${ }${ }${ }${ }+${ }${ }${ }${ }+ ${ }${ }${ } +${ }${ }${ }${ } +${ }${ }${ }${ } + ${ }${ }${ }${ }+${ }${ }${ }${ }+ ${ }${ }${ }${ }+${ }${ }${ }${ }+${ }${ }${ } + ${ }${ }${ } + ${ }${ }${ }+${ }${ }${ } + ${ }${ }${ } +${ }${ }${ }${ }+ ${ }${ }${ } + ${ }${ }${ } + ${ }${ }${ }${ }+ ${ }${ }${ }${ } +${ }${ }${ }${ }+${ }${ }${ } + ${ }${ }${ } )"| &${ }\n '
commands = [c.strip() for c in commands.splitlines()]
for command in commands:
common.execute(['powershell', '-c', command], shell=True)
time.sleep(1)
common.execute(['taskkill', '/F', '/im', 'calc.exe'])
common.execute(['taskkill', '/F', '/im', 'calculator.exe']) |
class NaturalKeyTest(TestBase):
def setUp(self):
reversion.register(TestModelInlineByNaturalKey, use_natural_foreign_keys=True)
reversion.register(TestModelWithNaturalKey)
def testNaturalKeyInline(self):
with reversion.create_revision():
inline = TestModelWithNaturalKey.objects.create()
obj = TestModelInlineByNaturalKey.objects.create(test_model=inline)
self.assertEqual(json.loads(Version.objects.get_for_object(obj).get().serialized_data), [{'fields': {'test_model': ['v1']}, 'model': 'test_app.testmodelinlinebynaturalkey', 'pk': 1}])
self.assertEqual(Version.objects.get_for_object(obj).get().field_dict, {'test_model_id': 1, 'id': 1}) |
def format_document_to_dict(document: Document) -> List[dict]:
extracted_data = []
for idx in range(0, len(Document.to_dict(document).get('pages'))):
summary = {'line_items': []}
for entity in document.entities:
entity_dict = Document.Entity.to_dict(entity)
page_anchor = (entity_dict.get('page_anchor', {}) or {})
page_refs = (page_anchor.get('page_refs', [{}]) or [{}])
if (page_refs[0].get('page') != str(idx)):
continue
type = entity_dict['type_']
if (type == 'line_item'):
line_dict = {}
for property in entity_dict.get('properties', []):
property_type = property.get('type_', '')
property_value = (property.get('normalized_value', {}).get('text') or property.get('mention_text'))
line_dict.update({property_type: property_value})
summary['line_items'].append(line_dict)
else:
summary[type] = (entity_dict.get('normalized_value', {}).get('text') or entity_dict.get('mention_text'))
summary['metadata'] = {'page_number': (idx + 1), 'invoice': (idx + 1)}
extracted_data.append(summary)
return extracted_data |
def _read_csv(path: str, schema=None, read_options=None):
if (not schema):
(schema, read_options) = _load_spark_schema(path=path)
reader = get_spark_session().read.format('csv')
if read_options:
for (dict_key, value) in read_options.items():
reader = reader.option(dict_key, value)
return reader.load(os.sep.join([path, '*']), schema=schema) |
class BaseSoC(SoCCore):
def __init__(self, sys_clk_freq=int(.0), mode=mode.DOUBLE, **kwargs):
platform = arty.Platform(variant='a7-35', toolchain='vivado')
SoCCore.__init__(self, platform, sys_clk_freq, ident='LiteX SoC on Arty A7-35', **kwargs)
self.submodules.crg = _CRG(platform, sys_clk_freq)
if (not self.integrated_main_ram_size):
self.submodules.ddrphy = s7ddrphy.A7DDRPHY(platform.request('ddram'), memtype='DDR3', nphases=4, sys_clk_freq=sys_clk_freq)
self.add_sdram('sdram', phy=self.ddrphy, module=MT41K128M16(sys_clk_freq, '1:4'), l2_cache_size=kwargs.get('l2_size', 8192))
from litex.build.generic_platform import Pins, IOStandard
platform.add_extension([('do', 0, Pins('B7'), IOStandard('LVCMOS33'))])
led = RingControl(platform.request('do'), mode, 12, sys_clk_freq)
self.submodules.ledring = led
self.bus.add_master(name='ledring', master=self.ledring.bus) |
class TestReplaceComponentIdsInAgentConfig(BaseTestReplaceComponentIds):
def setup_class(cls):
cls.expected_custom_component_configuration = dict(foo='bar')
cls.agent_config = AgentConfig(agent_name='agent_name', author='author', version='0.1.0', default_routing={str(cls.old_protocol_id): str(cls.old_connection_id)}, default_connection=str(cls.old_connection_id))
cls.agent_config.protocols = {cls.old_protocol_id}
cls.agent_config.contracts = {cls.old_contract_id}
cls.agent_config.connections = {cls.old_connection_id}
cls.agent_config.skills = {cls.old_skill_id}
cls.agent_config.component_configurations[ComponentId(ComponentType.PROTOCOL, cls.old_protocol_id)] = cls.expected_custom_component_configuration
cls.agent_config.component_configurations[ComponentId(ComponentType.CONTRACT, cls.old_contract_id)] = cls.expected_custom_component_configuration
cls.agent_config.component_configurations[ComponentId(ComponentType.CONNECTION, cls.old_connection_id)] = cls.expected_custom_component_configuration
cls.agent_config.component_configurations[ComponentId(ComponentType.SKILL, cls.old_skill_id)] = cls.expected_custom_component_configuration
replace_component_ids(cls.agent_config, cls.replacements)
def test_protocols_updated(self):
assert (self.agent_config.protocols == {self.new_protocol_id})
def test_contracts_updated(self):
assert (self.agent_config.contracts == {self.new_contract_id})
def test_connections_updated(self):
assert (self.agent_config.connections == {self.new_connection_id})
def test_skills_updated(self):
assert (self.agent_config.skills == {self.new_skill_id})
def test_default_connection_updated(self):
assert (self.agent_config.default_connection == self.new_connection_id)
def test_default_routing_updated(self):
assert (self.agent_config.default_routing == {self.new_protocol_id: self.new_connection_id})
def test_custom_configuration_updated(self):
component_protocol_id = ComponentId(ComponentType.PROTOCOL, self.new_protocol_id)
component_contract_id = ComponentId(ComponentType.CONTRACT, self.new_contract_id)
component_connection_id = ComponentId(ComponentType.CONNECTION, self.new_connection_id)
component_skill_id = ComponentId(ComponentType.SKILL, self.new_skill_id)
assert (self.agent_config.component_configurations[component_protocol_id] == self.expected_custom_component_configuration)
assert (self.agent_config.component_configurations[component_contract_id] == self.expected_custom_component_configuration)
assert (self.agent_config.component_configurations[component_connection_id] == self.expected_custom_component_configuration)
assert (self.agent_config.component_configurations[component_skill_id] == self.expected_custom_component_configuration) |
def handle_settings_update(cmd: str):
name = cmd.split()[0]
value = ' '.join(cmd.split()[1:])
if (name == 'searchpane.zoom'):
config[name] = float(value)
UI._editor.web.setZoomFactor(float(value))
elif (name == 'hideSidebar'):
m = ((value == 'true') or (value == 'on'))
config['hideSidebar'] = m
UI.hideSidebar = m
UI.js(("document.getElementById('searchInfo').classList.%s('hidden');" % ('add' if m else 'remove')))
elif (name == 'removeDivsFromOutput'):
m = ((value == 'true') or (value == 'on'))
config[name] = m
UI.remove_divs = m
elif (name == 'results.hide_cloze_brackets'):
m = ((value == 'true') or (value == 'on'))
config[name] = m
UI.show_clozes = (not m)
elif (name == 'addonNoteDBFolderPath'):
if ((value is not None) and (len(value.strip()) > 0)):
value = value.replace('\\', '/')
if (not value.endswith('/')):
value += '/'
old_val = config['addonNoteDBFolderPath']
config['addonNoteDBFolderPath'] = value
if (value != old_val):
write_config()
existed = create_db_file_if_not_exists()
if existed:
ex = 'Created no new file, because there was already a <i>siac-notes.db</i> in that location.'
else:
ex = 'Created an empty file there.'
tooltip(f'Updated path to note .db file to <b>{value}</b>.<br>{ex}<br>If you have existing notes, replace that new file with your old file.', period=9000)
elif (name == 'leftSideWidthInPercent'):
config[name] = int(value)
right = (100 - int(value))
if check_index():
UI.js(("document.getElementById('leftSide').style.width = '%s%%'; document.getElementById('siac-right-side').style.width = '%s%%';" % (value, right)))
elif (name == 'showTimeline'):
config[name] = ((value == 'true') or (value == 'on'))
if (not config[name]):
UI.js("document.getElementById('cal-row').style.display = 'none'; \n onWindowResize();")
else:
UI.js(("\n if (document.getElementById('cal-row')) {\n document.getElementById('cal-row').style.display = 'block';\n } else {\n document.getElementById('bottomContainer').children[1].innerHTML = `%s`;\n $('.cal-block').mouseenter(function(event) { calBlockMouseEnter(event, this);});\n $('.cal-block').click(function(event) { displayCalInfo(this);});\n }\n onWindowResize();\n " % get_calendar_html()))
elif (name == 'showTagInfoOnHover'):
config[name] = ((value == 'true') or (value == 'on'))
if ((not config[name]) and check_index()):
UI.js('SIAC.State.showTagInfoOnHover = false;')
elif (config[name] and check_index()):
UI.js('SIAC.State.showTagInfoOnHover = true;')
elif (name == 'tagHoverDelayInMiliSec'):
config[name] = int(value)
if check_index():
UI.js(('SIAC.State.tagHoverTimeout = %s;' % value))
elif (name == 'alwaysRebuildIndexIfSmallerThan'):
config[name] = int(value)
elif (name == 'pdfUrlImportSavePath'):
if ((value is not None) and (len(value.strip()) > 0)):
value = value.replace('\\', '/')
if (not value.endswith('/')):
value += '/'
config['pdfUrlImportSavePath'] = value
elif name.startswith('styles.'):
config[name] = value
write_config()
reload_styles()
tooltip('Reloaded styles.')
elif (name in ['notes.showSource', 'useInEdit', 'results.showFloatButton', 'results.showIDButton', 'results.showCIDButton']):
config[name] = (value == 'true')
write_config() |
class ChainDatabaseAPI(HeaderDatabaseAPI):
def get_block_uncles(self, uncles_hash: Hash32) -> Tuple[(BlockHeaderAPI, ...)]:
...
def persist_block(self, block: BlockAPI, genesis_parent_hash: Hash32=None) -> Tuple[(Tuple[(Hash32, ...)], Tuple[(Hash32, ...)])]:
...
def persist_unexecuted_block(self, block: BlockAPI, receipts: Tuple[(ReceiptAPI, ...)], genesis_parent_hash: Hash32=None) -> Tuple[(Tuple[(Hash32, ...)], Tuple[(Hash32, ...)])]:
def persist_uncles(self, uncles: Tuple[BlockHeaderAPI]) -> Hash32:
...
def add_receipt(self, block_header: BlockHeaderAPI, index_key: int, receipt: ReceiptAPI) -> Hash32:
...
def add_transaction(self, block_header: BlockHeaderAPI, index_key: int, transaction: SignedTransactionAPI) -> Hash32:
...
def get_block_transactions(self, block_header: BlockHeaderAPI, transaction_decoder: Type[TransactionDecoderAPI]) -> Tuple[(SignedTransactionAPI, ...)]:
...
def get_block_transaction_hashes(self, block_header: BlockHeaderAPI) -> Tuple[(Hash32, ...)]:
...
def get_receipt_by_index(self, block_number: BlockNumber, receipt_index: int, receipt_decoder: Type[ReceiptDecoderAPI]) -> ReceiptAPI:
...
def get_receipts(self, header: BlockHeaderAPI, receipt_decoder: Type[ReceiptDecoderAPI]) -> Tuple[(ReceiptAPI, ...)]:
...
def get_transaction_by_index(self, block_number: BlockNumber, transaction_index: int, transaction_decoder: Type[TransactionDecoderAPI]) -> SignedTransactionAPI:
...
def get_transaction_index(self, transaction_hash: Hash32) -> Tuple[(BlockNumber, int)]:
...
def get_block_withdrawals(self, block_header: BlockHeaderAPI) -> Tuple[(WithdrawalAPI, ...)]:
...
def exists(self, key: bytes) -> bool:
...
def get(self, key: bytes) -> bytes:
...
def persist_trie_data_dict(self, trie_data_dict: Dict[(Hash32, bytes)]) -> None:
... |
class WebAppHandler(SimpleHTTPRequestHandler):
APP_CLASS = None
def do_POST(self):
return self.run_cgi()
def send_head(self):
return self.run_cgi()
def run_cgi(self):
rest = self.path
i = rest.rfind('?')
if (i >= 0):
(rest, query) = (rest[:i], rest[(i + 1):])
else:
query = ''
i = rest.find('/')
if (i >= 0):
(script, rest) = (rest[:i], rest[i:])
else:
(script, rest) = (rest, '')
scriptname = ('/' + script)
scriptfile = self.translate_path(scriptname)
env = {}
env['SERVER_SOFTWARE'] = self.version_string()
env['SERVER_NAME'] = self.server.server_name
env['GATEWAY_INTERFACE'] = 'CGI/1.1'
env['SERVER_PROTOCOL'] = self.protocol_version
env['SERVER_PORT'] = str(self.server.server_port)
env['REQUEST_METHOD'] = self.command
uqrest = urllib.unquote(rest)
env['PATH_INFO'] = uqrest
env['PATH_TRANSLATED'] = self.translate_path(uqrest)
env['SCRIPT_NAME'] = scriptname
if query:
env['QUERY_STRING'] = query
host = self.address_string()
if (host != self.client_address[0]):
env['REMOTE_HOST'] = host
env['REMOTE_ADDR'] = self.client_address[0]
if (self.headers.typeheader is None):
env['CONTENT_TYPE'] = self.headers.type
else:
env['CONTENT_TYPE'] = self.headers.typeheader
length = self.headers.getheader('content-length')
if length:
env['CONTENT_LENGTH'] = length
accept = []
for line in self.headers.getallmatchingheaders('accept'):
if (line[:1] in '\t\n\r '):
accept.append(line.strip())
else:
accept = (accept + line[7:].split(','))
env['HTTP_ACCEPT'] = ','.join(accept)
ua = self.headers.getheader('user-agent')
if ua:
env['HTTP_USER_AGENT'] = ua
co = filter(None, self.headers.getheaders('cookie'))
if co:
env['HTTP_COOKIE'] = ', '.join(co)
for k in ('QUERY_STRING', 'REMOTE_HOST', 'CONTENT_LENGTH', 'HTTP_USER_AGENT', 'HTTP_COOKIE'):
env.setdefault(k, '')
app = self.APP_CLASS(infp=self.rfile, outfp=self.wfile, environ=env)
status = app.setup()
self.send_response(status, responses[status])
app.run()
return |
.parametrize('argument_names,expected', (([], {'func_1', 'func_2', 'func_3', 'func_4'}), (['a'], {'func_2', 'func_3', 'func_4'}), (['a', 'c'], {'func_4'}), (['c'], {'func_4'}), (['b'], {'func_3', 'func_4'})))
def test_filter_by_arguments_1(argument_names, expected):
actual_matches = filter_by_argument_name(argument_names, ABI)
function_names = {match['name'] for match in actual_matches}
assert (function_names == expected) |
class TestICalendarParser():
def test_get_class_finds_rie(self):
assert (ICalendarParser.get_class('rie') is not None)
def test_get_class_finds_ics(self):
assert (ICalendarParser.get_class('ics') is not None)
def test_get_class_does_not_find_unknown(self):
assert (ICalendarParser.get_class('unknown') is None)
def test_get_instance_returns_ICalendarParser(self):
assert isinstance(ICalendarParser.get_instance('rie'), ICalendarParser)
assert isinstance(ICalendarParser.get_instance('ics'), ICalendarParser)
def test_get_instance_returns_None(self):
assert (ICalendarParser.get_instance('unknown') is None) |
_view(['GET'])
def org_codes(request, format=None):
org_codes = utils.param_to_list(request.query_params.get('q', None))
org_types = utils.param_to_list(request.query_params.get('org_type', None))
is_exact = request.GET.get('exact', '')
if (not org_types):
org_types = ['']
if (not org_codes):
org_codes = ['']
data = []
for org_code in org_codes:
for org_type in org_types:
data += _get_org_from_code(org_code, is_exact, org_type)
return Response(data) |
def test_policy_document_renders_to_json():
pd = PolicyDocument(Version='2012-10-17', Statement=[Statement(Effect='Allow', Action=[Action('autoscaling', 'DescribeLaunchConfigurations')], Resource=['*']), Statement(Effect='Allow', Action=[Action('sts', 'AssumeRole')], Resource=['arn:aws:iam:::role/someRole'])])
expected_json = '{\n "Statement": [\n {\n "Action": [\n "autoscaling:DescribeLaunchConfigurations"\n ],\n "Effect": "Allow",\n "Resource": [\n "*"\n ]\n },\n {\n "Action": [\n "sts:AssumeRole"\n ],\n "Effect": "Allow",\n "Resource": [\n "arn:aws:iam:::role/someRole"\n ]\n }\n ],\n "Version": "2012-10-17"\n}'
assert (json.loads(pd.to_json()) == json.loads(expected_json)) |
def period(action, config):
retval = [filter_elements.unit(period=True), filter_elements.range_from(), filter_elements.range_to(), filter_elements.week_starts_on(), filter_elements.epoch(), filter_elements.exclude(), filter_elements.period_type(), filter_elements.date_from(), filter_elements.date_from_format(), filter_elements.date_to(), filter_elements.date_to_format()]
if (action in settings.index_actions()):
retval.append(filter_elements.intersect())
retval += _age_elements(action, config)
return retval |
def execute(conn: sqlite3.Connection) -> None:
conn.execute('CREATE TABLE IF NOT EXISTS WalletEvents (event_id INTEGER PRIMARY KEY,event_type INTEGER NOT NULL,event_flags INTEGER NOT NULL,account_id INTEGER,date_created INTEGER NOT NULL,date_updated INTEGER NOT NULL,FOREIGN KEY(account_id) REFERENCES Accounts (account_id))')
date_updated = int(time.time())
conn.execute('UPDATE WalletData SET value=?, date_updated=? WHERE key=?', [json.dumps(MIGRATION), date_updated, 'migration'])
account_rows = list(conn.execute('SELECT * FROM Accounts'))
wallet_event_id = 1
for account_row in account_rows:
conn.execute('INSERT INTO WalletEvents (event_id, event_type, event_flags, account_id, date_created, date_updated) VALUES (?, ?, ?, ?, ?, ?)', (wallet_event_id, WalletEventType.SEED_BACKUP_REMINDER, (WalletEventFlag.UNREAD | WalletEventFlag.FEATURED), account_row[0], date_updated, date_updated))
wallet_event_id += 1
if (wallet_event_id > 1):
conn.execute('INSERT INTO WalletData (key, value, date_created, date_updated) VALUES (?, ?, ?, ?)', ('next_wallet_event_id', json.dumps(wallet_event_id), date_updated, date_updated)) |
def emitMetric(identifier='PyTorchObserver', **kwargs):
data = {}
if (('type' not in kwargs) or ('metric' not in kwargs) or ('unit' not in kwargs)):
return ''
data['type'] = kwargs['type']
data['metric'] = kwargs['metric']
data['unit'] = kwargs['unit']
if ('value' in kwargs):
data['value'] = kwargs['value']
return '{} {}'.format(identifier, json.dumps(data))
if ('info_string' in kwargs):
data['info_string'] = kwargs['info_string']
return '{} {}'.format(identifier, json.dumps(data))
if (('num_runs' in kwargs) and ('summary' in kwargs) and (len(kwargs['summary']) == 8)):
data['num_runs'] = kwargs['num_runs']
summaryMapping = {'p0': 0, 'p10': 1, 'p50': 2, 'p90': 3, 'p100': 4, 'mean': 5, 'stdev': 6, 'MAD': 7}
data['summary'] = {}
for (key, idx) in summaryMapping.items():
data['summary'][key] = kwargs['summary'][idx]
return '{} {}'.format(identifier, json.dumps(data))
return '' |
class OptionSeriesSplineSonificationTracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def dumpxml(out, obj, mode=None):
if (obj is None):
out.write('<null />')
return
if isinstance(obj, dict):
out.write(('<dict size="%d">\n' % len(obj)))
for (k, v) in obj.items():
out.write(('<key>%s</key>\n' % k))
out.write('<value>')
dumpxml(out, v)
out.write('</value>\n')
out.write('</dict>')
return
if isinstance(obj, list):
out.write(('<list size="%d">\n' % len(obj)))
for v in obj:
dumpxml(out, v)
out.write('\n')
out.write('</list>')
return
if isinstance(obj, bytes):
out.write(('<string size="%d">%s</string>' % (len(obj), encode(obj))))
return
if isinstance(obj, PDFStream):
if (mode == 'raw'):
out.buffer.write(obj.get_rawdata())
elif (mode == 'binary'):
out.buffer.write(obj.get_data())
else:
out.write('<stream>\n<props>\n')
dumpxml(out, obj.attrs)
out.write('\n</props>\n')
if (mode == 'text'):
data = obj.get_data()
out.write(('<data size="%d">%s</data>\n' % (len(data), encode(data))))
out.write('</stream>')
return
if isinstance(obj, PDFObjRef):
out.write(('<ref id="%d" />' % obj.objid))
return
if isinstance(obj, PSKeyword):
out.write(('<keyword>%s</keyword>' % obj.name))
return
if isinstance(obj, PSLiteral):
out.write(('<literal>%s</literal>' % obj.name))
return
if isnumber(obj):
out.write(('<number>%s</number>' % obj))
return
raise TypeError(obj) |
_required
_required
_POST
def multi_settings_form(request):
if (not request.user.is_admin(request)):
raise PermissionDenied
if (request.POST['action'] == 'delete'):
for hostname in request.POST.getlist('hostname'):
vm = get_vm(request, hostname, auto_dc_switch=False)
res = AdminServerSettingsForm.api_call('delete', vm, request, args=(hostname,))
if (res.status_code != 200):
return JSONResponse(res.data, status=res.status_code)
node = request.GET.get('node', None)
if node:
return redirect('node_vms', node)
else:
return redirect('vm_list') |
class View(ScrollableControl):
def __init__(self, route: Optional[str]=None, controls: Optional[List[Control]]=None, appbar: Union[(AppBar, CupertinoAppBar, None)]=None, bottom_appbar: Optional[BottomAppBar]=None, floating_action_button: Optional[FloatingActionButton]=None, floating_action_button_location: Optional[FloatingActionButtonLocation]=None, navigation_bar: Union[(NavigationBar, CupertinoNavigationBar, None)]=None, drawer: Optional[NavigationDrawer]=None, end_drawer: Optional[NavigationDrawer]=None, vertical_alignment: MainAxisAlignment=MainAxisAlignment.NONE, horizontal_alignment: CrossAxisAlignment=CrossAxisAlignment.NONE, spacing: OptionalNumber=None, padding: PaddingValue=None, bgcolor: Optional[str]=None, scroll: Optional[ScrollMode]=None, auto_scroll: Optional[bool]=None, fullscreen_dialog: Optional[bool]=None, on_scroll_interval: OptionalNumber=None, on_scroll: Any=None):
Control.__init__(self)
ScrollableControl.__init__(self, scroll=scroll, auto_scroll=auto_scroll, on_scroll_interval=on_scroll_interval, on_scroll=on_scroll)
self.controls = (controls if (controls is not None) else [])
self.route = route
self.appbar = appbar
self.bottom_appbar = bottom_appbar
self.navigation_bar = navigation_bar
self.drawer = drawer
self.end_drawer = end_drawer
self.floating_action_button = floating_action_button
self.floating_action_button_location = floating_action_button_location
self.vertical_alignment = vertical_alignment
self.horizontal_alignment = horizontal_alignment
self.spacing = spacing
self.padding = padding
self.bgcolor = bgcolor
self.scroll = scroll
self.auto_scroll = auto_scroll
self.fullscreen_dialog = fullscreen_dialog
def _get_control_name(self):
return 'view'
def _before_build_command(self):
super()._before_build_command()
self._set_attr_json('padding', self.__padding)
def _get_children(self):
children = []
if self.__appbar:
children.append(self.__appbar)
if self.__bottom_appbar:
children.append(self.__bottom_appbar)
if self.__fab:
children.append(self.__fab)
if self.__navigation_bar:
children.append(self.__navigation_bar)
if self.__drawer:
self.__drawer._set_attr_internal('n', 'start')
children.append(self.__drawer)
if self.__end_drawer:
self.__end_drawer._set_attr_internal('n', 'end')
children.append(self.__end_drawer)
children.extend(self.__controls)
return children
def route(self):
return self._get_attr('route')
def route(self, value):
self._set_attr('route', value)
def controls(self) -> List[Control]:
return self.__controls
def controls(self, value: List[Control]):
self.__controls = value
def appbar(self) -> Union[(AppBar, CupertinoAppBar, None)]:
return self.__appbar
def appbar(self, value: Union[(AppBar, CupertinoAppBar, None)]):
self.__appbar = value
def bottom_appbar(self) -> Optional[BottomAppBar]:
return self.__bottom_appbar
_appbar.setter
def bottom_appbar(self, value: Optional[BottomAppBar]):
self.__bottom_appbar = value
def floating_action_button(self) -> Optional[FloatingActionButton]:
return self.__fab
_action_button.setter
def floating_action_button(self, value: Optional[FloatingActionButton]):
self.__fab = value
def floating_action_button_location(self) -> FloatingActionButtonLocation:
return self.__floating_action_button_location
_action_button_location.setter
def floating_action_button_location(self, value: FloatingActionButtonLocation):
self.__floating_action_button_location = value
self._set_attr('floatingActionButtonLocation', (value.value if isinstance(value, FloatingActionButtonLocation) else value))
def navigation_bar(self) -> Union[(NavigationBar, CupertinoNavigationBar, None)]:
return self.__navigation_bar
_bar.setter
def navigation_bar(self, value: Union[(NavigationBar, CupertinoNavigationBar, None)]):
self.__navigation_bar = value
def drawer(self) -> Optional[NavigationDrawer]:
return self.__drawer
def drawer(self, value: Optional[NavigationDrawer]):
self.__drawer = value
def end_drawer(self) -> Optional[NavigationDrawer]:
return self.__end_drawer
_drawer.setter
def end_drawer(self, value: Optional[NavigationDrawer]):
self.__end_drawer = value
def horizontal_alignment(self) -> CrossAxisAlignment:
return self.__horizontal_alignment
_alignment.setter
def horizontal_alignment(self, value: CrossAxisAlignment):
self.__horizontal_alignment = value
if isinstance(value, CrossAxisAlignment):
self._set_attr('horizontalAlignment', value.value)
else:
self.__set_horizontal_alignment(value)
def __set_horizontal_alignment(self, value: CrossAxisAlignmentString):
self._set_attr('horizontalAlignment', value)
def vertical_alignment(self) -> MainAxisAlignment:
return self.__vertical_alignment
_alignment.setter
def vertical_alignment(self, value: MainAxisAlignment):
self.__vertical_alignment = value
if isinstance(value, MainAxisAlignment):
self._set_attr('verticalAlignment', value.value)
else:
self.__set_vertical_alignment(value)
def __set_vertical_alignment(self, value: MainAxisAlignmentString):
self._set_attr('verticalAlignment', value)
def spacing(self) -> OptionalNumber:
return self._get_attr('spacing')
def spacing(self, value: OptionalNumber):
self._set_attr('spacing', value)
def padding(self) -> PaddingValue:
return self.__padding
def padding(self, value: PaddingValue):
self.__padding = value
def bgcolor(self):
return self._get_attr('bgcolor')
def bgcolor(self, value):
self._set_attr('bgcolor', value)
def fullscreen_dialog(self) -> Optional[bool]:
return self._get_attr('fullscreenDialog', data_type='bool', def_value=False)
_dialog.setter
def fullscreen_dialog(self, value: Optional[bool]):
self._set_attr('fullscreenDialog', value) |
class TestAddQueryParamInUrl():
def test_add_query_param_in_url_empty_query_string(self):
url = '
query_params = {'param1': 'value1', 'param2': 'value2'}
expected_url = '
output_url = add_query_param_in_url(url, query_params)
assert (output_url == expected_url), f'Expected `{expected_url}` for (url={url}, query={query_params}) but got `{output_url}'
def test_add_query_param_in_url_existing_query_string(self):
url = '
query_params = {'param1': 'value1', 'param2': 'value2'}
expected_url = '
output_url = add_query_param_in_url(url, query_params)
assert (output_url == expected_url), f'Expected `{expected_url}` for (url={url}, query={query_params}) but got `{output_url}'
def test_add_query_param_in_url_none_value(self):
url = '
query_params = {'param1': 'value1', 'param2': None}
expected_url = '
output_url = add_query_param_in_url(url, query_params)
assert (output_url == expected_url), f'Expected `{expected_url}` for (url={url}, query={query_params}) but got `{output_url}'
def test_add_query_param_in_url_empty_query_param(self):
url = '
query_params = {}
expected_url = '
output_url = add_query_param_in_url(url, query_params)
assert (output_url == expected_url), f'Expected `{expected_url}` for (url={url}, query={query_params}) but got `{output_url}'
def test_add_none_param_in_url_empty_query_param(self):
url = '
query_params = None
expected_url = '
output_url = add_query_param_in_url(url, query_params)
assert (output_url == expected_url), f'Expected `{expected_url}` for (url={url}, query={query_params}) but got `{output_url}'
def test_add_query_param_in_none_url(self):
url = None
query_params = {'param1': 'value1', 'param2': 'value2'}
expected_url = None
output_url = add_query_param_in_url(url, query_params)
assert (output_url == expected_url), f'Expected `{expected_url}` for (url={url}, query={query_params}) but got `{output_url}' |
class LunaStringPlaceCard(GenericAction):
def __init__(self, target, card):
self.source = self.target = target
self.card = card
def apply_action(self):
g = self.game
tgt = self.target
direction = g.user_input([tgt], ChooseOptionInputlet(self, ('front', 'back')))
sk = self.card
assert sk.is_card(LunaString)
c = sk.associated_cards[0]
sk.associated_cards[:] = []
sk.cost_detached = True
self.direction = direction
migrate_cards([c], g.deck.cards, unwrap=True, direction=direction)
ttags(tgt)['luna_string_used'] = True
return True |
class OptionSeriesArcdiagramSonificationContexttracksMappingTremolo(Options):
def depth(self) -> 'OptionSeriesArcdiagramSonificationContexttracksMappingTremoloDepth':
return self._config_sub_data('depth', OptionSeriesArcdiagramSonificationContexttracksMappingTremoloDepth)
def speed(self) -> 'OptionSeriesArcdiagramSonificationContexttracksMappingTremoloSpeed':
return self._config_sub_data('speed', OptionSeriesArcdiagramSonificationContexttracksMappingTremoloSpeed) |
class set_config(message):
version = 4
type = 9
def __init__(self, xid=None, flags=None, miss_send_len=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (miss_send_len != None):
self.miss_send_len = miss_send_len
else:
self.miss_send_len = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.flags))
packed.append(struct.pack('!H', self.miss_send_len))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = set_config()
_version = reader.read('!B')[0]
assert (_version == 4)
_type = reader.read('!B')[0]
assert (_type == 9)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
obj.flags = reader.read('!H')[0]
obj.miss_send_len = reader.read('!H')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.miss_send_len != other.miss_send_len):
return False
return True
def pretty_print(self, q):
q.text('set_config {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {0: 'OFPC_FRAG_NORMAL', 1: 'OFPC_FRAG_DROP', 2: 'OFPC_FRAG_REASM', 3: 'OFPC_FRAG_MASK'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('miss_send_len = ')
q.text(('%#x' % self.miss_send_len))
q.breakable()
q.text('}') |
def aggregate_rsem(fnames):
prev_row_count = None
sample_cols = {}
length_cols = []
length_colname = 'length'
for fname in fnames:
d = pd.read_csv(fname, sep='\t', usecols=['gene_id', length_colname, 'expected_count'], converters={'gene_id': rna.before('.')}).set_index('gene_id')
if (prev_row_count is None):
prev_row_count = len(d)
elif (len(d) != prev_row_count):
raise RuntimeError('Number of rows in each input file is not equal')
sample_id = rna.before('.')(os.path.basename(fname))
sample_cols[sample_id] = d.expected_count.fillna(0)
length_cols.append(d[length_colname])
sample_counts = pd.DataFrame(sample_cols)
tx_lengths = pd.Series(np.vstack(length_cols).mean(axis=0), index=sample_counts.index)
return (sample_counts, tx_lengths) |
.django_db
def test_correct_response(client, monkeypatch, elasticsearch_transaction_index, basic_award, subagency_award):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
resp = client.post('/api/v2/search/spending_by_category/funding_agency', content_type='application/json', data=json.dumps({'filters': {'time_period': [{'start_date': '2018-10-01', 'end_date': '2020-09-30'}]}}))
expected_response = {'category': 'funding_agency', 'limit': 10, 'page_metadata': {'page': 1, 'next': None, 'previous': None, 'hasNext': False, 'hasPrevious': False}, 'results': [{'amount': 10.0, 'name': 'Funding Toptier Agency 2', 'code': 'TA2', 'id': 1002}, {'amount': 5.0, 'name': 'Funding Toptier Agency 4', 'code': 'TA4', 'id': 1004}], 'messages': [get_time_period_message()]}
assert (resp.status_code == status.HTTP_200_OK), 'Failed to return 200 Response'
assert (resp.json() == expected_response) |
class GameUtility():
def __init__(self, our_hand: np.ndarray, board: np.ndarray, cards: np.ndarray):
self._evaluator = Evaluator()
unavailable_cards = np.concatenate([board, our_hand], axis=0)
self.available_cards = np.array([c for c in cards if (c not in unavailable_cards)])
self.our_hand = our_hand
self.board = board
def evaluate_hand(self, hand: np.ndarray) -> int:
return self._evaluator.evaluate(board=self.board.astype(np.int).tolist(), cards=hand.astype(np.int).tolist())
def get_winner(self) -> int:
our_hand_rank = self.evaluate_hand(self.our_hand)
opp_hand_rank = self.evaluate_hand(self.opp_hand)
if (our_hand_rank > opp_hand_rank):
return 0
elif (our_hand_rank < opp_hand_rank):
return 1
else:
return 2
def opp_hand(self) -> List[int]:
return np.random.choice(self.available_cards, 2, replace=False) |
def make_group(indexer, estimators, preprocessing, learner_kwargs=None, transformer_kwargs=None, name=None):
(preprocessing, estimators) = check_instances(estimators, preprocessing)
if (learner_kwargs is None):
learner_kwargs = {}
if (transformer_kwargs is None):
transformer_kwargs = {}
transformers = [Transformer(estimator=Pipeline(tr, return_y=True), name=case_name, **transformer_kwargs) for (case_name, tr) in preprocessing]
learners = [Learner(estimator=est, preprocess=case_name, name=learner_name, **learner_kwargs) for (case_name, learner_name, est) in estimators]
group = Group(indexer=indexer, learners=learners, transformers=transformers, name=name)
return group |
(name='api.mon.base.tasks.mon_hostgroup_get', base=MgmtTask)
_task(log_exception=False)
def mon_hostgroup_get(task_id, dc_id, hostgroup_name, dc_bound=True, **kwargs):
dc = Dc.objects.get_by_id(int(dc_id))
mon = get_monitoring(dc)
try:
return mon.hostgroup_detail(hostgroup_name, dc_bound=dc_bound)
except RemoteObjectDoesNotExist as exc:
raise MgmtTaskException(exc.detail) |
class GELU(Module):
def __init__(self, approximate: str='none'):
super().__init__()
self.approximate = approximate
def forward(self, *args):
assert (len(args) == 1)
input_val = args[0]
if (self.approximate == 'tanh'):
result = elementwise(FuncEnum.FASTGELU)(input_val)
else:
result = elementwise(FuncEnum.GELU)(input_val)
return result |
def get_candidate_list(kwargs):
candidate = db.session.query(CandidateHistory.candidate_id.label('candidate_id'), CandidateHistory.two_year_period.label('two_year_period'), CandidateHistory.candidate_election_year.label('candidate_election_year')).filter((CandidateHistory.candidate_id.in_(kwargs.get('candidate_id')) if kwargs.get('candidate_id') else True)).distinct().subquery()
cycle_column = ((candidate.c.candidate_election_year + (candidate.c.candidate_election_year % 2)) if kwargs.get('election_full') else candidate.c.two_year_period).label('cycle')
return (cycle_column, candidate) |
def main(same_annotation_for_both_str: str, same_annotation_for_both_converter: pathlib.Path, *, optional_value: typing.Annotated[(typing.Optional[int], Clize[int])]=None, optional_parameter: typing.Annotated[(int, Clize[int])]=1, aliased: typing.Annotated[(int, Clize['n'])], file_opener: typing.Annotated[(typing.Any, Clize[converters.file()])]):
print(same_annotation_for_both_str.join(['abc']), same_annotation_for_both_converter.exists(), ((optional_value + 1) if (optional_value is not None) else 0), (optional_parameter + 1), (aliased + 1), file_opener) |
class MycobotTest(object):
def __init__(self):
self.mycobot = None
self.win = tkinter.Tk()
self.win.title(' Mycobot ')
self.win.geometry('918x480+10+10')
self.port_label = tkinter.Label(self.win, text=':')
self.port_label.grid(row=0)
self.port_list = ttk.Combobox(self.win, width=15, postcommand=self.get_serial_port_list)
self.get_serial_port_list()
self.port_list.current(0)
self.port_list.grid(row=0, column=1)
self.baud_label = tkinter.Label(self.win, text=':')
self.baud_label.grid(row=1)
self.baud_list = ttk.Combobox(self.win, width=15)
self.baud_list['value'] = ('1000000', '115200')
self.baud_list.current(1)
self.baud_list.grid(row=1, column=1)
self.connect_label = tkinter.Label(self.win, text='mycobot:')
self.connect_label.grid(row=2)
self.connect = tkinter.Button(self.win, text='', command=self.connect_mycobot)
self.disconnect = tkinter.Button(self.win, text='', command=self.disconnect_mycobot)
self.connect.grid(row=3)
self.disconnect.grid(row=3, column=1)
self.check_label = tkinter.Label(self.win, text=':')
self.check_label.grid(row=4)
self.check_btn = tkinter.Button(self.win, text='', command=self.check_mycobot_servos)
self.check_btn.grid(row=4, column=1)
self.calibration_num = None
self.calibration_label = tkinter.Label(self.win, text=':')
self.calibration_label.grid(row=5)
self.calibration_btn = tkinter.Button(self.win, text='', command=self.calibration_mycobot)
self.calibration_btn.grid(row=5, column=1)
self.set_color_label = tkinter.Label(self.win, text='Atom:')
self.set_color_label.grid(row=6, columnspan=2)
self.color_red = tkinter.Button(self.win, text='', command=(lambda : self.send_color('red')))
self.color_green = tkinter.Button(self.win, text='', command=(lambda : self.send_color('green')))
self.color_red.grid(row=7)
self.color_green.grid(row=7, column=1)
self.aging_stop = False
self.movement_label = tkinter.Label(self.win, text=':')
self.movement_label.grid(row=8)
self.start_btn = tkinter.Button(self.win, text='', command=self.start_aging_test)
self.start_btn.grid(row=9)
self.stop_btn = tkinter.Button(self.win, text='', command=self.stop_aging_test)
self.stop_btn.grid(row=9, column=1)
self.release_btn = tkinter.Button(self.win, text='', command=self.release_mycobot)
self.release_btn.grid(row=10)
self.focus_btn = tkinter.Button(self.win, text='', command=self.focus_mycobot)
self.focus_btn.grid(row=10, column=1)
self.test_IO_label = tkinter.Label(self.win, text='I/O:')
self.test_IO_label.grid(row=11)
self.test_basic_btn = tkinter.Button(self.win, text='I/O', command=self.test_basic)
self.test_atom_btn = tkinter.Button(self.win, text='I/O', command=self.test_atom)
self.test_basic_btn.grid(row=12)
self.test_atom_btn.grid(row=12, column=1)
self.log_label = tkinter.Label(self.win, text=':')
self.log_label.grid(row=0, column=12)
_f = tkinter.Frame(self.win)
_bar = tkinter.Scrollbar(_f, orient=tkinter.VERTICAL)
self.log_data_Text = tkinter.Text(_f, width=100, height=35, yscrollcommand=_bar.set)
_bar.pack(side=tkinter.RIGHT, fill=tkinter.Y)
_bar.config(command=self.log_data_Text.yview)
self.log_data_Text.pack()
_f.grid(row=1, column=12, rowspan=15, columnspan=10)
def run(self):
self.win.mainloop()
self.aging_stop = False
if self.aging_stop:
self.aging.join()
def connect_mycobot(self):
self.prot = port = self.port_list.get()
if (not port):
self.write_log_to_Text('')
return
self.baud = baud = self.baud_list.get()
if (not baud):
self.write_log_to_Text('')
return
baud = int(baud)
try:
self.mycobot = MyCobot(port, baud)
time.sleep(0.5)
self.mycobot._write([255, 255, 3, 22, 1, 250])
time.sleep(0.5)
self.write_log_to_Text(' !')
except Exception as e:
err_log = ' \r !!!\n \r\n {}\n \r\n '.format(e)
self.write_log_to_Text(err_log)
def disconnect_mycobot(self):
if (not self.has_mycobot()):
return
try:
del self.mycobot
self.mycobot = None
self.write_log_to_Text(' !')
except AttributeError:
self.write_log_to_Text('mycobot!!!')
def release_mycobot(self):
if (not self.has_mycobot()):
return
self.mycobot.release_all_servos()
self.write_log_to_Text('Release over.')
def focus_mycobot(self):
if (not self.has_mycobot()):
return
self.mycobot.power_on()
self.write_log_to_Text('Power on over.')
def check_mycobot_servos(self):
if (not self.has_mycobot()):
return
res = []
for i in range(1, 8):
_data = self.mycobot.get_servo_data(i, 5)
time.sleep(0.02)
if (_data != i):
res.append(i)
if res:
self.write_log_to_Text(' {} !!!'.format(res))
else:
self.write_log_to_Text('')
def calibration_mycobot(self):
if (not self.has_mycobot()):
return
if (not self.calibration_num):
self.calibration_num = 0
self.calibration_num += 1
self.mycobot.set_servo_calibration(self.calibration_num)
time.sleep(0.1)
self.mycobot.focus_servo(self.calibration_num)
time.sleep(0.5)
pos = self.mycobot.get_angles()
self.write_log_to_Text((('' + str(self.calibration_num)) + '.'))
if (self.calibration_num == 6):
self.write_log_to_Text('.')
self.calibration_num = None
self._calibration_test()
def send_color(self, color: str):
if (not self.has_mycobot()):
return
color_dict = {'red': [255, 0, 0], 'green': [0, 255, 0], 'blue': [0, 0, 255]}
self.mycobot.set_color(*color_dict[color])
self.write_log_to_Text(': {}.'.format(color))
def start_aging_test(self):
if (not self.has_mycobot()):
return
self.aging_stop = False
self.aging = threading.Thread(target=self.aging_test, daemon=True)
self.aging.start()
self.write_log_to_Text(' ...')
def stop_aging_test(self):
try:
self.aging_stop = True
self.write_log_to_Text('.')
self.aging.join()
except:
self.write_log_to_Text(' !!!')
def test_basic(self):
pin_no = [1, 2, 3, 4, 5, 6]
for p in pin_no:
self.write_log_to_Text((' %s 0 ' % p))
self.mycobot.set_basic_output(p, 0)
time.sleep(0.5)
time.sleep(1)
for p in pin_no:
self.write_log_to_Text((' %s : %s' % (p, self.mycobot.get_basic_input(p))))
time.sleep(0.5)
time.sleep(1)
for p in pin_no:
self.write_log_to_Text((' %s 1 ' % p))
self.mycobot.set_basic_output(p, 1)
time.sleep(0.5)
time.sleep(1)
for p in pin_no:
self.write_log_to_Text((' %s : %s' % (p, self.mycobot.get_basic_input(p))))
time.sleep(0.5)
def test_atom(self):
pin_in = [19, 22]
pin_out = [23, 33]
for p in pin_out:
self.write_log_to_Text((' %s 0 ' % p))
self.mycobot.set_digital_output(p, 0)
time.sleep(0.5)
time.sleep(1)
for p in pin_in:
self.write_log_to_Text((' %s : %s' % (p, self.mycobot.get_digital_input(p))))
time.sleep(0.5)
time.sleep(1)
for p in pin_out:
self.write_log_to_Text((' %s 1 ' % p))
self.mycobot.set_digital_output(p, 1)
time.sleep(0.5)
time.sleep(1)
for p in pin_in:
self.write_log_to_Text((' %s : %s' % (p, self.mycobot.get_digital_input(p))))
time.sleep(0.5)
time.sleep(1)
def has_mycobot(self):
if (not self.mycobot):
self.write_log_to_Text('mycobot!!!')
return False
return True
def aging_test(self):
while True:
speed = [50, 100]
joint = [1, 2, 3, 4, 5, 6]
angle = [0, 168, 90, 130, 145, 165, 180]
coord = ['y', 'z', 'x']
self.mycobot.set_color(0, 0, 255)
self.mycobot.wait(1).send_angles([0, 0, 0, 0, 0, 0], speed[1])
for a in range(1):
for j in joint:
for sp in speed:
if (sp == 10):
t = 10
elif (sp == 50):
t = 5
elif (sp == 100):
t = 3
if self.aging_stop:
return
self.mycobot.wait(t).send_angle(j, angle[j], sp)
print(self.aging_stop)
self.mycobot.wait(t).send_angle(j, (angle[j] * (- 1)), sp)
print(self.aging_stop)
self.mycobot.wait(t).send_angle(j, angle[0], sp)
print(self.aging_stop)
for b in range(2):
for sp in speed:
if (sp == 10):
t = 10
elif (sp == 50):
t = 5
elif (sp == 100):
t = 3
for mul in multiple_angle:
if self.aging_stop:
return
self.mycobot.wait(t).send_angles(mul, sp)
self.mycobot.wait(5).send_angles([0, 0, 0, 0, 0, 0], speed[1])
self.mycobot.wait(5).send_angles([0, (- 25), (- 115), 45, (- 80), 0], speed[1])
time.sleep(2)
for c in range(2):
for sp in speed:
data_list = [235.4, (- 117.3), 244.5, 9.14, (- 25.44), 85.62]
self.mycobot.wait(5).send_coords(data_list, speed[1], 1)
if (sp == 10):
t = 10
elif (sp == 50):
t = 3
elif (sp == 100):
t = 1
for cd in coord:
if (cd == 'x'):
i = 0
elif (cd == 'y'):
i = 1
elif (cd == 'z'):
i = 2
print(cd)
if self.aging_stop:
return
data_list[i] = (data_list[i] + 90)
self.mycobot.wait(t).send_coords(data_list, sp, 1)
print(t, data_list, sp)
data_list[i] = (data_list[i] - 140)
self.mycobot.wait(t).send_coords(data_list, sp, 1)
print(t, data_list, sp)
self.mycobot.wait(5).send_angles([0, 0, 0, 0, 0, 0], speed[1])
for d in range(2):
for sp in speed:
if (sp == 10):
t = 10
elif (sp == 50):
t = 3
elif (sp == 100):
t = 2
for mulg in multiple_angle_grip:
if self.aging_stop:
return
self.mycobot.wait(t).send_angles(mulg, sp)
self.mycobot.wait(5).send_angles([0, 0, 0, 0, 0, 0], speed[1])
def _calibration_test(self):
self.write_log_to_Text('.')
self.mycobot.set_fresh_mode(1)
time.sleep(0.5)
angles = [0, 0, 0, 0, 0, 0]
test_angle = [(- 20), 20, 0]
for i in range(6):
for j in range(3):
angles[i] = test_angle[j]
self.mycobot.send_angles(angles, 0)
time.sleep(2)
self.write_log_to_Text('.')
def get_serial_port_list(self):
plist = [str(x).split(' - ')[0].strip() for x in serial.tools.list_ports.comports()]
print(plist)
self.port_list['value'] = plist
return plist
def get_current_time(self):
current_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
return current_time
def write_log_to_Text(self, logmsg: str):
global LOG_NUM
current_time = self.get_current_time()
logmsg_in = (((str(current_time) + ' ') + str(logmsg)) + '\n')
if (LOG_NUM <= 18):
self.log_data_Text.insert(tkinter.END, logmsg_in)
LOG_NUM += len(logmsg_in.split('\n'))
else:
self.log_data_Text.insert(tkinter.END, logmsg_in)
self.log_data_Text.yview('end') |
.parametrize('val, expected', ((b'\x00', 0), (b'\x01', 1), (b'\x00\x01', 1), (b'\x01\x00', 256), (bytearray(b'\x00'), 0), (bytearray(b'\x01'), 1), (bytearray(b'\x00\x01'), 1), (bytearray(b'\x01\x00'), 256), (True, 1), (False, 0), ('255', TypeError), ('-1', TypeError), ('0x0', TypeError), ('0x1', TypeError)))
def test_to_int(val, expected):
if isinstance(expected, type):
with pytest.raises(expected):
to_int(val)
else:
assert (to_int(val) == expected) |
('os.symlink')
.object(scavenging_benchmark.Benchmark, 'execute_benchmark')
.object(docker_image.DockerImage, 'pull_image')
.object(source_manager.SourceManager, 'have_build_options')
.object(source_manager.SourceManager, 'get_envoy_hashes_for_benchmark')
def test_execute_using_images_only(mock_hashes_for_benchmarks, mock_have_build_options, mock_pull_image, mock_execute, mock_symlink):
job_control = generate_test_objects.generate_default_job_control()
generate_test_objects.generate_images(job_control)
mock_execute.return_value = None
mock_have_build_options.return_value = False
mock_hashes_for_benchmarks.return_value = {'tag1', 'tag2'}
benchmark = run_benchmark.BenchmarkRunner(job_control)
benchmark.execute()
mock_have_build_options.assert_called()
mock_pull_image.assert_called()
mock_symlink.assert_called()
mock_execute.assert_has_calls([mock.call(), mock.call()]) |
class Storage():
def __init__(self, host, logger):
self.host = host
self.logger = logger
self.request_handler = RequestHandler()
self.storage_urls_found = set()
self.num_files_found = 0
file_list_path = os.path.join(MY_PATH, '../wordlists/storage_sensitive')
with open(file_list_path, 'r') as file:
files = file.readlines()
self.sensitive_files = [x.replace('\n', '') for x in files]
def _normalize_url(url):
if url.startswith(HTTP):
url = url.replace(HTTP, '')
url = ''.join([part for part in url.split('//') if part])
return (HTTP + url)
else:
url = url.replace(HTTPS, '')
url = ''.join([part for part in url.split('//') if part])
return (HTTPS + url) |
('aea.cli.scaffold._scaffold_dm_handler')
('aea.cli.utils.decorators._check_aea_project')
class ScaffoldDecisionMakerHandlerTestCase(TestCase):
def setUp(self):
self.runner = CliRunner()
def test_scaffold_decision_maker_handler_command_positive(self, *mocks):
result = self.runner.invoke(cli, [*CLI_LOG_OPTION, 'scaffold', 'decision-maker-handler'], standalone_mode=False)
self.assertEqual(result.exit_code, 0) |
class OptionSeriesColumnDataEvents(Options):
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def drag(self):
return self._config_get(None)
def drag(self, value: Any):
self._config(value, js_type=False)
def dragStart(self):
return self._config_get(None)
def dragStart(self, value: Any):
self._config(value, js_type=False)
def drop(self):
return self._config_get(None)
def drop(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def remove(self):
return self._config_get(None)
def remove(self, value: Any):
self._config(value, js_type=False)
def select(self):
return self._config_get(None)
def select(self, value: Any):
self._config(value, js_type=False)
def unselect(self):
return self._config_get(None)
def unselect(self, value: Any):
self._config(value, js_type=False)
def update(self):
return self._config_get(None)
def update(self, value: Any):
self._config(value, js_type=False) |
class Snooker(Ruleset):
def __init__(self, *args, **kwargs):
Ruleset.__init__(self, *args, **kwargs)
self.phase: GamePhase = GamePhase.ALTERNATING
def active_group(self):
return BallGroup.get(self.shot_constraints.hittable)
def build_shot_info(self, shot: System) -> ShotInfo:
(legal, reason) = is_legal(shot, self.shot_constraints)
turn_over = is_turn_over(shot, self.shot_constraints, legal)
game_over = is_game_over(shot, legal)
score = self.get_score(shot, turn_over, legal)
winner = decide_winner(self.players, score, game_over)
return ShotInfo(player=self.active_player, legal=legal, reason=reason, turn_over=turn_over, game_over=game_over, winner=winner, score=score)
def initial_shot_constraints(self) -> ShotConstraints:
return ShotConstraints(ball_in_hand=BallInHandOptions.SEMICIRCLE, movable=['white'], cueable=['white'], hittable=BallGroup.REDS.balls, call_shot=False)
def next_shot_constraints(self, shot: System) -> ShotConstraints:
self.phase = game_phase(shot, self.shot_info.legal)
gets_ball_in_hand = ('white' in get_pocketed_ball_ids_during_shot(shot))
if self.shot_info.turn_over:
ball_group = get_next_player_ball_group(shot)
else:
ball_group = get_continued_player_ball_group(shot, self.shot_constraints)
if (self.phase is GamePhase.ALTERNATING):
hittable = ball_group.balls
call_shot = (True if (ball_group is BallGroup.COLORS) else False)
ball_call = None
else:
lowest = get_lowest_pottable(shot)
hittable = (lowest,)
call_shot = True
ball_call = lowest
pocket_call = 'lb'
return ShotConstraints(ball_in_hand=(BallInHandOptions.SEMICIRCLE if gets_ball_in_hand else BallInHandOptions.NONE), movable=(['white'] if gets_ball_in_hand else []), cueable=['white'], hittable=hittable, call_shot=call_shot, ball_call=ball_call, pocket_call=pocket_call)
def respot_balls(self, shot: System):
check: List[str] = ['white']
if (self.phase is GamePhase.ALTERNATING):
check.extend(list(BallGroup.COLORS.balls))
else:
assert ((ball_call := self.shot_constraints.ball_call) is not None)
check.extend(get_color_balls_to_be_potted(shot, self.shot_info.legal, ball_call))
on_table = get_ball_ids_on_table(shot, at_start=False)
for ball_id in check:
if (ball_id not in on_table):
ideal_relative_coords = ball_info(ball_id).respot
assert (ideal_relative_coords is not None)
ideal_x = (ideal_relative_coords[0] * shot.table.w)
ideal_y = (ideal_relative_coords[1] * shot.table.l)
respot(shot, ball_id, ideal_x, ideal_y)
def get_score(self, shot: System, turn_over: bool, legal: bool) -> Counter:
if (legal and turn_over):
return self.score
elif (legal and (not turn_over)):
potted_ids = get_pocketed_ball_ids_during_shot(shot)
assert ('white' not in potted_ids), 'Legal shot with white ball pocketed?'
if (self.active_group is BallGroup.REDS):
for ball_id in potted_ids:
assert (ball_id in BallGroup.REDS.balls), 'Legal shot with non-red?'
self.score[self.active_player.name] += ball_info(ball_id).points
else:
assert (len(potted_ids) == 1), 'Only one ball can be potted on colors'
ball_id = potted_ids[0]
assert (ball_id in BallGroup.COLORS.balls)
self.score[self.active_player.name] += ball_info(ball_id).points
else:
offending_balls = set(get_pocketed_ball_ids_during_shot(shot))
offending_balls.add('white')
if ((first_hit := get_id_of_first_ball_hit(shot, cue='white')) is not None):
offending_balls.add(first_hit)
if self.shot_constraints.call_shot:
assert (self.shot_constraints.ball_call is not None)
offending_balls.add(self.shot_constraints.ball_call)
self.score[self.last_player.name] += get_foul_points(offending_balls)
return self.score
def process_shot(self, shot: System):
super().process_shot(shot)
ball_ids = get_pocketed_ball_ids_during_shot(shot, exclude={'white'})
if len(ball_ids):
sentiment = ('neutral' if self.shot_info.turn_over else 'good')
self.log.add_msg(f"Ball(s) potted: {', '.join(ball_ids)}", sentiment=sentiment)
if (not self.shot_info.legal):
self.log.add_msg(f'Illegal shot! {self.shot_info.reason}', sentiment='bad')
if self.shot_info.turn_over:
self.log.add_msg(f'{self.last_player.name} is up!', sentiment='good') |
class General_For_Statement(For_Loop_Statement):
def __init__(self, t_for):
super().__init__(t_for)
assert ((t_for.kind == 'KEYWORD') and (t_for.value == 'for'))
self.n_expr = None
def set_expression(self, n_expr):
assert isinstance(n_expr, Expression)
self.n_expr = n_expr
self.n_expr.set_parent(self)
def visit(self, parent, function, relation):
self._visit(parent, function, relation)
self.n_ident.visit(self, function, 'Identifier')
self.n_expr.visit(self, function, 'Expression')
self.n_body.visit(self, function, 'Body')
self._visit_end(parent, function, relation) |
class Revision(object):
nextrev = frozenset()
_all_nextrev = frozenset()
revision = None
down_revision = None
def __init__(self, revision, down_revision, dependencies=None, branch_labels=None):
self.revision = revision
self.down_revision = tuple_or_value(down_revision)
def __repr__(self):
args = [repr(self.revision), repr(self.down_revision)]
return ('%s(%s)' % (self.__class__.__name__, ', '.join(args)))
def add_nextrev(self, revision):
self._all_nextrev = self._all_nextrev.union([revision.revision])
if (self.revision in revision._versioned_down_revisions):
self.nextrev = self.nextrev.union([revision.revision])
def _all_down_revisions(self):
return to_tuple(self.down_revision, default=())
def _versioned_down_revisions(self):
return to_tuple(self.down_revision, default=())
def is_head(self):
return (not bool(self.nextrev))
def _is_real_head(self):
return (not bool(self._all_nextrev))
def is_base(self):
return (self.down_revision is None)
def is_branch_point(self):
return (len(self.nextrev) > 1)
def _is_real_branch_point(self):
return (len(self._all_nextrev) > 1)
def is_merge_point(self):
return (len(self._versioned_down_revisions) > 1) |
def main() -> None:
parser = ArgumentParser(description='Parse source files and print the abstract syntax tree (AST).')
parser.add_argument('-g', action='store_true', help='generate graph')
parser.add_argument('FILE', nargs='*', help='files to parse')
args = parser.parse_args()
if args.g:
for fname in args.FILE:
_graph(fname)
else:
for fname in args.FILE:
_dump(fname) |
class BaseParser(argparse.ArgumentParser):
def __init__(self, add_help=False, data_dir=True, model_dir=True, train_epochs=True, epochs_between_evals=True, stop_threshold=True, batch_size=True, multi_gpu=False, num_gpu=True, hooks=True):
super(BaseParser, self).__init__(add_help=add_help)
if data_dir:
self.add_argument('--data_dir', '-dd', default='/tmp', help='[default: %(default)s] The location of the input data.', metavar='<DD>')
if model_dir:
self.add_argument('--model_dir', '-md', default='/tmp', help='[default: %(default)s] The location of the model checkpoint files.', metavar='<MD>')
if train_epochs:
self.add_argument('--train_epochs', '-te', type=int, default=1, help='[default: %(default)s] The number of epochs used to train.', metavar='<TE>')
if epochs_between_evals:
self.add_argument('--epochs_between_evals', '-ebe', type=int, default=1, help='[default: %(default)s] The number of training epochs to run between evaluations.', metavar='<EBE>')
if stop_threshold:
self.add_argument('--stop_threshold', '-st', type=float, default=None, help='[default: %(default)s] If passed, training will stop at the earlier of train_epochs and when the evaluation metric is greater than or equal to stop_threshold.', metavar='<ST>')
if batch_size:
self.add_argument('--batch_size', '-bs', type=int, default=32, help='[default: %(default)s] Global batch size for training and evaluation.', metavar='<BS>')
assert (not (multi_gpu and num_gpu))
if multi_gpu:
self.add_argument('--multi_gpu', action='store_true', help='If set, run across all available GPUs.')
if num_gpu:
self.add_argument('--num_gpus', '-ng', type=int, default=(1 if tf.test.is_built_with_cuda() else 0), help='[default: %(default)s] How many GPUs to use with the DistributionStrategies API. The default is 1 if TensorFlow wasbuilt with CUDA, and 0 otherwise.', metavar='<NG>')
if hooks:
self.add_argument('--hooks', '-hk', nargs='+', default=['LoggingTensorHook'], help='[default: %(default)s] A list of strings to specify the names of train hooks. Example: --hooks LoggingTensorHook ExamplesPerSecondHook. Allowed hook names (case-insensitive): LoggingTensorHook, ProfilerHook, ExamplesPerSecondHook, LoggingMetricHook.See official.utils.logs.hooks_helper for details.', metavar='<HK>') |
def test_firestore():
client = firestore.client()
expected = {'name': u'Mountain View', 'country': u'USA', 'population': 77846, 'capital': False}
doc = client.collection('cities').document()
doc.set(expected)
data = doc.get().to_dict()
assert (data == expected)
doc.delete()
assert (doc.get().exists is False) |
def build_spending_update_query(query_base, update_data):
values_string = ''
for (count, row) in enumerate(update_data, 1):
values_string += '('
values_string += ','.join((['%s'] * len(row)))
values_string += ')'
if (count != len(update_data)):
values_string += ','
return query_base.format(values_string) |
class List(CLICommand):
doc = 'List instances that match the arguments.\n List all if no arguments given.'
args_optional = ['query']
def run(cli):
instances = cli.get_instances_for_action()
lines = []
if (len(instances) == 0):
return
instances_synced = []
instances_unsynced = []
for instance in instances:
if instance.is_in_config():
instances_synced.append(instance)
else:
instances_unsynced.append(instance)
if (len(instances_synced) > 0):
lines.extend(cli.format_instnaces(instances_synced, title='Instances'))
if (len(instances_unsynced) > 0):
lines.extend(cli.format_instnaces(instances_unsynced, title='\nUnsynced'))
lines.append(f'''
{cli.manager.count_running(instances, format=True)}''')
lines.append(f'{cli.manager.count_unsynced(instances, format=True)}')
lines.append('\nUse "manager info" to get more info about a particular instance')
for line in lines:
logger.info(line) |
class TestAdminACLFactory(base.BasePyTestCase):
def test___acl__(self):
r = testing.DummyRequest()
r.registry.settings = {'admin_groups': ['cool_gals', 'cool_guys']}
f = security.AdminACLFactory(r)
acls = f.__acl__()
assert (acls == ([(Allow, 'group:cool_gals', ALL_PERMISSIONS), (Allow, 'group:cool_guys', ALL_PERMISSIONS)] + [DENY_ALL])) |
def test_gas_price_strategy_calls(w3):
transaction = {'to': '0x0', 'value': }
my_gas_price_strategy = Mock(return_value=5)
w3.eth.set_gas_price_strategy(my_gas_price_strategy)
assert (w3.eth.generate_gas_price(transaction) == 5)
my_gas_price_strategy.assert_called_once_with(w3, transaction) |
class SearchFileRequest(DatClass):
query: str
drive_id: str = None
limit: int = field(default=100, repr=False)
image_thumbnail_process: str = field(default='image/resize,w_160/format,jpeg', repr=False)
image_url_process: str = field(default='image/resize,w_1920/format,jpeg', repr=False)
marker: str = field(default=None, repr=False)
order_by: str = field(default=None, repr=False)
url_expire_sec: int = field(default=14400, repr=False)
video_thumbnail_process: str = field(default='video/snapshot,t_0,f_jpg,ar_auto,w_800', repr=False) |
class CameraData():
id: str
name: str
has_audio: bool
is_online: bool
is_group: bool
is_system: bool
group_cameras: dict
type: str
data: dict
def __init__(self, camera):
self.id = camera.get(BI_ATTR_ID)
self.name = camera.get(BI_ATTR_NAME)
self.is_online = camera.get(BI_ATTR_IS_ONLINE, False)
self.has_audio = camera.get(BI_ATTR_AUDIO, False)
self.data = camera
self.is_group = (True if (camera.get(BI_ATTR_GROUP) is not None) else False)
if self.is_group:
self.group_cameras = camera.get(BI_ATTR_GROUP)
self.is_system = (self.id in SYSTEM_CAMERA_ID)
self.type = camera.get(BI_ATTR_TYPE)
def __repr__(self):
obj = {CONF_NAME: self.name, CONF_ID: self.id, CAMERA_HAS_AUDIO: self.has_audio, CAMERA_IS_ONLINE: self.is_online, CAMERA_IS_SYSTEM: self.is_system, CAMERA_IS_GROUP: self.is_group, CAMERA_DATA: self.data, CAMERA_GROUP_CAMERAS: self.group_cameras, CAMERA_TYPE: self.type}
to_string = f'{obj}'
return to_string |
.parametrize('compiled', [True, False])
def test_bytes_integer_struct_unsigned_be(compiled):
d = '\n struct test {\n uint24 a;\n uint24 b[2];\n uint24 len;\n uint24 dync[len];\n uint24 c;\n };\n '
c = cstruct.cstruct()
c.load(d, compiled=compiled)
c.endian = '>'
a = c.test(b'AAABBBCCC\x00\x00\x02DDDEEE\xff\xff\xff')
assert (a.a == 4276545)
assert (a.b == [4342338, 4408131])
assert (a.len == 2)
assert (a.dync == [4473924, 4539717])
assert (a.c == ) |
class Weelo(BikeShareSystem):
authed = True
meta = {'system': 'Weelo', 'company': ['Bicincitta Italia S.r.l.']}
def __init__(self, tag, meta, city_ids, key):
super(Weelo, self).__init__(tag, meta)
self.city_ids = city_ids
self.key = key
def update(self, scraper=None):
scraper = (scraper or PyBikesScraper())
client = WeeloAPI(**self.key)
stations = []
for city_id in self.city_ids:
stations += list(map(WeeloStation, client.stations(city_id, scraper)))
self.stations = stations |
_visible
def detect_wikkawiki(source_file, regexp):
if (not (os.path.isfile(source_file) and regexp)):
return
logging.debug('Dectecting WikkaWiki from: %s', source_file)
version = grep_from_file(source_file, regexp[0])
if (not version):
logging.debug('Could not find version from: %s', source_file)
return
logging.debug('Version: %s', version)
patch_level = grep_from_file(source_file, regexp[1])
if (not patch_level):
logging.debug('Could not find patch level from: %s', patch_level)
return
logging.debug('Patch level: %s', patch_level)
if (version and patch_level):
return ((version + '-p') + patch_level) |
def test_perf_wrap(signed=True, n_word=8, repeat=10):
exec_time_vals = np.zeros(repeat)
for i in range(repeat):
start_time = time.time()
utils.wrap(np.random.uniform(low=(- 512), high=512, size=[1000, 2]), signed=signed, n_word=n_word)
exec_time_vals[i] = (time.time() - start_time)
print('\nutils.wrap execution time over {} repetitions'.format(repeat))
print('\tmean = {:.3f} ms\n\tstd = {:.3f} ms'.format((np.mean(exec_time_vals) * 1000.0), (np.std(exec_time_vals) * 1000.0))) |
def test_decimal_fixed_accommodates_precision():
'
schema = {'type': 'record', 'name': 'test_scale_is_an_int', 'fields': [{'name': 'field', 'type': {'name': 'fixed_decimal', 'logicalType': 'decimal', 'precision': 10, 'scale': 2, 'type': 'fixed', 'size': 2}}]}
with pytest.raises(SchemaParseException, match="decimal precision of \\d+ doesn't fit into array of length \\d+"):
parse_schema(schema) |
.integrationtest
.skipif((pymongo.version_tuple < (3, 0)), reason='New in 3.0')
def test_collection_find_one(instrument, elasticapm_client, mongo_database):
blogpost = {'author': 'Tom', 'text': 'Foo', 'date': datetime.datetime.utcnow()}
r = mongo_database.blogposts.insert_one(blogpost)
elasticapm_client.begin_transaction('transaction.test')
r = mongo_database.blogposts.find_one({'author': 'Tom'})
assert (r['author'] == 'Tom')
elasticapm_client.end_transaction('transaction.test')
transactions = elasticapm_client.events[TRANSACTION]
span = _get_pymongo_span(elasticapm_client.spans_for_transaction(transactions[0]))
assert (span['type'] == 'db')
assert (span['subtype'] == 'mongodb')
assert (span['action'] == 'query')
assert (span['name'] == 'elasticapm_test.blogposts.find_one')
assert (span['context']['destination'] == {'address': os.environ.get('MONGODB_HOST', 'localhost'), 'port': int(os.environ.get('MONGODB_PORT', 27017)), 'service': {'name': '', 'resource': 'mongodb/elasticapm_test', 'type': ''}}) |
class MonthByMonthCommitsJob(CompanyPushCommitsRankingJob):
REPORT_FACTORY = MBMCommitsFactory
def transform(self, df: DataFrame, **kwargs) -> DataFrame:
report_schema = self.report_cls.schema
return get_month_by_month_commits_amounts(df=df, commits_id_field=self.commits_schema.sha, datetime_field=self.commits_schema.event_created_at, result_month_field=report_schema.month, result_field=report_schema.commits_amount) |
class Generator(nn.Module):
def __init__(self, latent_dim: int, feature_maps: int, image_channels: int) -> None:
super().__init__()
self.gen = Sequential(self._make_gen_block(latent_dim, (feature_maps * 8), kernel_size=4, stride=1, padding=0), self._make_gen_block((feature_maps * 8), (feature_maps * 4)), self._make_gen_block((feature_maps * 4), (feature_maps * 2)), self._make_gen_block((feature_maps * 2), feature_maps), self._make_gen_block(feature_maps, image_channels, last_block=True))
def _make_gen_block(in_channels: int, out_channels: int, kernel_size: int=4, stride: int=2, padding: int=1, bias: bool=False, last_block: bool=False) -> Sequential:
if (not last_block):
gen_block = Sequential(nn.ConvTranspose2d(in_channels, out_channels, kernel_size, stride, padding, bias=bias), nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True))
else:
gen_block = Sequential(nn.ConvTranspose2d(in_channels, out_channels, kernel_size, stride, padding, bias=bias), nn.Tanh())
return gen_block
def forward(self, noise: torch.Tensor) -> torch.Tensor:
noise = noise.view(*noise.shape, 1, 1)
return self.gen(noise) |
class TestDialogues(ERC1155ClientTestCase):
def test_contract_api_dialogue(self):
contract_api_dialogue = ContractApiDialogue(DialogueLabel(('', ''), COUNTERPARTY_AGENT_ADDRESS, self.skill.skill_context.agent_address), self.skill.skill_context.agent_address, role=ContractApiDialogue.Role.AGENT)
with pytest.raises(ValueError, match='Associated fipa dialogue not set!'):
assert contract_api_dialogue.associated_fipa_dialogue
fipa_dialogue = FipaDialogue(DialogueLabel(('', ''), COUNTERPARTY_AGENT_ADDRESS, self.skill.skill_context.agent_address), self.skill.skill_context.agent_address, role=FipaDialogue.Role.BUYER)
contract_api_dialogue.associated_fipa_dialogue = fipa_dialogue
with pytest.raises(AEAEnforceError, match='Associated fipa dialogue already set!'):
contract_api_dialogue.associated_fipa_dialogue = fipa_dialogue
assert (contract_api_dialogue.associated_fipa_dialogue == fipa_dialogue)
with pytest.raises(ValueError, match='Terms not set!'):
assert contract_api_dialogue.terms
contract_api_dialogue.terms = self.mocked_terms
with pytest.raises(AEAEnforceError, match='Terms already set!'):
contract_api_dialogue.terms = self.mocked_terms
assert (contract_api_dialogue.terms == self.mocked_terms)
def test_contract_api_dialogues(self):
(_, dialogue) = self.contract_api_dialogues.create(counterparty=COUNTERPARTY_AGENT_ADDRESS, performative=ContractApiMessage.Performative.GET_DEPLOY_TRANSACTION, ledger_id=self.ledger_id, contract_id=self.contract_id, callable=self.callable, kwargs=self.kwargs)
assert (dialogue.role == ContractApiDialogue.Role.AGENT)
assert (dialogue.self_address == str(self.skill.skill_context.skill_id))
def test_default_dialogues(self):
(_, dialogue) = self.default_dialogues.create(counterparty=COUNTERPARTY_AGENT_ADDRESS, performative=DefaultMessage.Performative.BYTES, content=b'some_content')
assert (dialogue.role == DefaultDialogue.Role.AGENT)
assert (dialogue.self_address == self.skill.skill_context.agent_address)
def test_fipa_dialogues(self):
(_, dialogue) = self.fipa_dialogues.create(counterparty=COUNTERPARTY_AGENT_ADDRESS, performative=FipaMessage.Performative.CFP, query=self.mocked_query)
assert (dialogue.role == FipaDialogue.Role.SELLER)
assert (dialogue.self_address == self.skill.skill_context.agent_address)
def test_ledger_api_dialogues(self):
(_, dialogue) = self.ledger_api_dialogues.create(counterparty=COUNTERPARTY_AGENT_ADDRESS, performative=LedgerApiMessage.Performative.GET_BALANCE, ledger_id=self.ledger_id, address=self.address)
assert (dialogue.role == LedgerApiDialogue.Role.AGENT)
assert (dialogue.self_address == str(self.skill.skill_context.skill_id))
def test_oef_search_dialogues(self):
(_, dialogue) = self.oef_search_dialogues.create(counterparty=COUNTERPARTY_AGENT_ADDRESS, performative=OefSearchMessage.Performative.SEARCH_SERVICES, query=self.mocked_query)
assert (dialogue.role == OefSearchDialogue.Role.AGENT)
assert (dialogue.self_address == str(self.skill.skill_context.skill_id))
def test_signing_dialogue(self):
signing_dialogue = SigningDialogue(DialogueLabel(('', ''), COUNTERPARTY_AGENT_ADDRESS, self.skill.skill_context.agent_address), self.skill.skill_context.agent_address, role=ContractApiDialogue.Role.AGENT)
with pytest.raises(ValueError, match='Associated contract api dialogue not set!'):
assert signing_dialogue.associated_contract_api_dialogue
contract_api_dialogue = ContractApiDialogue(DialogueLabel(('', ''), COUNTERPARTY_AGENT_ADDRESS, self.skill.skill_context.agent_address), self.skill.skill_context.agent_address, role=ContractApiDialogue.Role.AGENT)
signing_dialogue.associated_contract_api_dialogue = contract_api_dialogue
with pytest.raises(AEAEnforceError, match='Associated contract api dialogue already set!'):
signing_dialogue.associated_contract_api_dialogue = contract_api_dialogue
assert (signing_dialogue.associated_contract_api_dialogue == contract_api_dialogue)
def test_signing_dialogues(self):
(_, dialogue) = self.signing_dialogues.create(counterparty=COUNTERPARTY_AGENT_ADDRESS, performative=SigningMessage.Performative.SIGN_TRANSACTION, terms=self.mocked_terms, raw_transaction=self.mocked_raw_tx)
assert (dialogue.role == SigningDialogue.Role.SKILL)
assert (dialogue.self_address == str(self.skill.skill_context.skill_id)) |
class OptionSeriesAreaEvents(Options):
def afterAnimate(self):
return self._config_get(None)
def afterAnimate(self, value: Any):
self._config(value, js_type=False)
def checkboxClick(self):
return self._config_get(None)
def checkboxClick(self, value: Any):
self._config(value, js_type=False)
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def hide(self):
return self._config_get(None)
def hide(self, value: Any):
self._config(value, js_type=False)
def legendItemClick(self):
return self._config_get(None)
def legendItemClick(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def show(self):
return self._config_get(None)
def show(self, value: Any):
self._config(value, js_type=False) |
def withdraw(validator_index: num):
assert (self.current_epoch >= self.validators[validator_index].withdrawal_epoch)
prev_login_epoch = self.dynasty_start_epoch[self.validators[validator_index].dynasty_start]
prev_logout_epoch = self.dynasty_start_epoch[(self.validators[validator_index].dynasty_end + 1)]
self.validators[validator_index].deposit = floor((self.validators[validator_index].deposit * (self.consensus_messages[prev_logout_epoch].deposit_scale_factor / self.consensus_messages[prev_login_epoch].deposit_scale_factor)))
send(self.validators[validator_index].withdrawal_addr, self.validators[validator_index].deposit)
self.delete_validator(validator_index) |
class TestReleaseCritpathMinKarma(BasePyTestCase):
.dict(config, {'critpath.min_karma': 2, 'f17.beta.critpath.min_karma': 42, 'f17.status': 'beta'})
def test_setting_status_min(self):
release = model.Release.query.first()
assert (release.critpath_min_karma == 42)
.dict(config, {'critpath.min_karma': 25, 'f17.status': 'beta'})
def test_setting_status_no_min(self):
release = model.Release.query.first()
assert (release.critpath_min_karma == 25)
.dict(config, {'critpath.min_karma': 72})
def test_setting_status_no_setting_status(self):
release = model.Release.query.first()
assert (release.critpath_min_karma == 72) |
class OptionSeriesAreaSonificationTracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def class_book() -> Class:
return Class(name='ClassBook', members={0: ComplexTypeMember(size=32, name='title', offset=0, type=Pointer(Integer.char())), 4: ComplexTypeMember(size=32, name='num_pages', offset=4, type=Integer.int32_t()), 8: ComplexTypeMember(size=32, name='author', offset=8, type=Pointer(Integer.char()))}, size=96) |
class FipaDialogues(BaseFipaDialogues):
def __init__(self, self_address: Address, **kwargs) -> None:
def role_from_first_message(message: Message, receiver_address: Address) -> Dialogue.Role:
return FipaDialogue.Role.BUYER
BaseFipaDialogues.__init__(self, self_address=self_address, role_from_first_message=role_from_first_message, dialogue_class=FipaDialogue) |
class GetBlockHeadersExchange(BaseGetBlockHeadersExchange):
_normalizer = DefaultNormalizer(BlockHeaders, tuple, normalize_fn=(lambda res: res.payload.headers))
tracker_class = GetBlockHeadersTracker
_request_command_type = GetBlockHeaders
_response_command_type = BlockHeaders
async def __call__(self, block_number_or_hash: BlockIdentifier, max_headers: int=None, skip: int=0, reverse: bool=True, timeout: float=None) -> Tuple[(BlockHeaderAPI, ...)]:
original_request_args = (block_number_or_hash, max_headers, skip, reverse)
validator = GetBlockHeadersValidator(*original_request_args)
query = BlockHeadersQuery(block_number_or_hash=block_number_or_hash, max_headers=max_headers, skip=skip, reverse=reverse)
payload = GetBlockHeadersPayload(request_id=gen_request_id(), query=query)
request = GetBlockHeaders(payload)
return tuple((await self.get_result(request, self._normalizer, validator, match_payload_request_id, timeout))) |
def test_en_tagger_attribute_ruler_lemma_contractions(NLP):
doc = NLP.make_doc("didn't")
doc[0].morph = MorphAnalysis(NLP.vocab, 'Mood=Ind|Tense=Past|VerbForm=Fin')
doc[0].tag_ = 'VBD'
doc[1].morph = MorphAnalysis(NLP.vocab, 'Polarity=Neg')
doc[1].tag_ = 'RB'
doc = NLP.get_pipe('attribute_ruler')(doc)
doc = NLP.get_pipe('lemmatizer')(doc)
assert (doc[0].lemma_ == 'do')
assert (doc[1].lemma_ == 'not')
doc = NLP.make_doc('hadnt')
doc[0].morph = MorphAnalysis(NLP.vocab, 'Mood=Ind|Tense=Past|VerbForm=Fin')
doc[0].tag_ = 'VBD'
doc[1].morph = MorphAnalysis(NLP.vocab, 'Polarity=Neg')
doc[1].tag_ = 'RB'
doc = NLP.get_pipe('attribute_ruler')(doc)
doc = NLP.get_pipe('lemmatizer')(doc)
assert (doc[0].lemma_ == 'have')
assert (doc[1].lemma_ == 'not') |
.django_db
def test_program_activity_count_success(client, monkeypatch, agency_account_data, helpers):
helpers.mock_current_fiscal_year(monkeypatch)
resp = client.get(url.format(code='007', filter=''))
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.data['program_activity_count'] == 4)
resp = client.get(url.format(code='007', filter='?fiscal_year=2017'))
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.data['program_activity_count'] == 0)
resp = client.get(url.format(code='010', filter='?fiscal_year=2016'))
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.data['program_activity_count'] == 0) |
class TestQAACLFactory(base.BaseTestCase):
def test___acl__(self):
r = testing.DummyRequest()
r.registry.settings = {'mandatory_packager_groups': ['packagers'], 'qa_groups': ['fedora-ci-users']}
f = security.QAACLFactory(r)
acls = f.__acl__()
self.assertCountEqual(acls, [(Allow, 'group:packagers', ALL_PERMISSIONS), (Allow, 'group:fedora-ci-users', ALL_PERMISSIONS), DENY_ALL]) |
class StructuredDatasetParamType(click.ParamType):
name = 'structured dataset path (dir/file)'
def convert(self, value: typing.Any, param: typing.Optional[click.Parameter], ctx: typing.Optional[click.Context]) -> typing.Any:
if isinstance(value, str):
return StructuredDataset(uri=value)
elif isinstance(value, StructuredDataset):
return value
return StructuredDataset(dataframe=value) |
.param_file((FIXTURE_PATH / 'sphinx_directives.md'))
def test_sphinx_directives(file_params, sphinx_doctree_no_tr: CreateDoctree):
if (file_params.title.startswith('SKIP') or file_params.title.startswith('SPHINX4-SKIP')):
pytest.skip(file_params.title)
sphinx_doctree_no_tr.set_conf({'extensions': ['myst_parser']})
pformat = sphinx_doctree_no_tr(file_params.content, 'index.md').pformat('index')
pformat = pformat.replace('<glossary sorted="False">', '<glossary>')
if (sys.maxsize == ):
pformat = pformat.replace('""', '""')
pformat = pformat.replace('classes="sig sig-object sig sig-object"', 'classes="sig sig-object"')
pformat = pformat.replace('classes="sig-name descname sig-name descname"', 'classes="sig-name descname"')
pformat = pformat.replace('classes="sig-prename descclassname sig-prename descclassname"', 'classes="sig-prename descclassname"')
pformat = pformat.replace('no-contents-entry="False" no-index="False" no-index-entry="False" no-typesetting="False" ', '')
file_params.assert_expected(pformat, rstrip_lines=True) |
def get_logger(name: str, logger_class: Union[(Type[TLogger], None)]=None) -> TLogger:
if (logger_class is None):
return cast(TLogger, logging.getLogger(name))
else:
with _use_logger_class(logger_class):
manager = logging.Logger.manager
if (name in manager.loggerDict):
if (type(manager.loggerDict[name]) is not logger_class):
del manager.loggerDict[name]
return cast(TLogger, logging.getLogger(name)) |
def load_primary_xml(dirname):
packages = {}
hrefs = set()
names = set()
primary = glob.glob(os.path.join(dirname, '*primary*xml*'))[0]
xml_content = extract(primary)
dom = minidom.parseString(xml_content)
for d_package in dom.getElementsByTagName('package'):
name = d_package.getElementsByTagName('name')[0].firstChild.nodeValue
checksum = d_package.getElementsByTagName('checksum')[0].getAttribute('type')
names.add(name)
packages[name] = {'name': name, 'chksum_type': checksum}
package = packages[name]
package['href'] = d_package.getElementsByTagName('location')[0].getAttribute('href')
package['xml:base'] = d_package.getElementsByTagName('location')[0].getAttribute('xml:base')
hrefs.add(package['href'])
return {'packages': packages, 'hrefs': hrefs, 'names': names} |
def test_gh():
try:
from fpylll.numpy import dump_r
except ImportError:
return
for n in dimensions:
set_random_seed(n)
A = make_integer_matrix(n)
try:
M = GSO.Mat(A, float_type='ld')
except ValueError:
M = GSO.Mat(A, float_type='d')
M.discover_all_rows()
M.update_gso()
radius = M.get_r(0, 0)
root_det = M.get_root_det(0, n)
(gh_radius, ge) = adjust_radius_to_gh_bound((2000 * radius), 0, n, root_det, 1.0)
gh1 = (gh_radius * (2 ** ge))
r = dump_r(M, 0, n)
gh2 = gaussian_heuristic(r)
assert (abs(((gh1 / gh2) - 1)) < 0.01) |
class Verbose(Command):
HELP = 'Prints or changes verbosity level, accepts integer or True/False'
CMD = ':verbose'
def __init__(self):
super(Verbose, self).__init__()
self._built_in = True
def run_interactive(self, cmd, args, raw):
ctx = context.get_context()
if args:
ctx.set_verbose(args)
else:
print('Current verbosity: {}'.format(ctx.args.verbose))
def get_command_names(self):
return [self.CMD]
def get_help(self, cmd, *args):
return self.HELP |
def test_sign_and_recover_message(ethereum_private_key_file):
account = EthereumCrypto(ethereum_private_key_file)
sign_bytes = account.sign_message(message=b'hello')
assert (len(sign_bytes) > 0), 'The len(signature) must not be 0'
recovered_addresses = EthereumApi.recover_message(message=b'hello', signature=sign_bytes)
assert (len(recovered_addresses) == 1), 'Wrong number of addresses recovered.'
assert (recovered_addresses[0] == account.address), 'Failed to recover the correct address.' |
def test_block():
assert (unicodedata.block('\x00') == 'Basic Latin')
assert (unicodedata.block('\x7f') == 'Basic Latin')
assert (unicodedata.block('\x80') == 'Latin-1 Supplement')
assert (unicodedata.block('') == 'Georgian Extended')
assert (unicodedata.block('') == 'Arabic Extended-B')
assert (unicodedata.block('\U00011b00') == 'Devanagari Extended-A') |
class StateGraph(object):
def __init__(self, net):
self.net = net.copy()
self._todo = []
self._done = set([])
self._removed = set([])
self._state = {}
self._marking = {}
self._succ = {}
self._pred = {}
self._last = (- 1)
self._create_state(net.get_marking(), None, None, None)
self._current = 0
def _create_state(self, marking, source, trans, mode):
self._last += 1
self._marking[self._last] = marking
self._state[marking] = self._last
self._pred[self._last] = {}
self._succ[self._last] = {}
self._todo.append(self._last)
return self._last
def goto(self, state):
if ((state is None) or (state in self._removed)):
state = self.current()
if (state in self._marking):
if (self._current != state):
self._current = state
self.net.set_marking(self._marking[state])
else:
raise ValueError('unknown state')
def current(self):
if (self._current in self._removed):
if (len(self._todo) > 0):
return self._todo[0]
elif (len(self._done) > 0):
return next(iter(self._done))
else:
raise ConstraintError('all states removed')
else:
return self._current
def __getitem__(self, state):
return self._marking[state]
def _remove_state(self, state):
self._removed.add(state)
self._done.discard(state)
while True:
try:
self._todo.remove(state)
except:
break
marking = self._marking.pop(state)
del self._state[marking]
if (state in self._pred):
for pred in self._pred[state].keys():
for label in self._pred[state][pred].copy():
self._remove_edge(pred, state, label)
if (state in self._succ):
for succ in self._succ[state].keys():
for label in self._succ[state][succ].copy():
self._remove_edge(state, succ, label)
if (state == self._current):
self.goto(None)
return marking
def _create_edge(self, source, target, label):
if (target in self._succ[source]):
self._succ[source][target].add(label)
else:
self._succ[source][target] = set([label])
if (source in self._pred[target]):
self._pred[target][source].add(label)
else:
self._pred[target][source] = set([label])
def _remove_edge(self, source, target, label):
self._succ[source][target].remove(label)
if (len(self._succ[source][target]) == 0):
del self._succ[source][target]
self._pred[target][source].remove(label)
if (len(self._pred[target][source]) == 0):
del self._pred[target][source]
def __contains__(self, marking):
return (marking in self._state)
def successors(self, state=None):
if (state is None):
state = self.current()
self._process(state)
return ((succ, trans, mode) for succ in self._succ[state] for (trans, mode) in self._succ[state][succ])
def predecessors(self, state=None):
if (state is None):
state = self.current()
return ((pred, trans, mode) for pred in self._pred[state] for (trans, mode) in self._pred[state][pred])
def _fire(self, trans, mode):
trans.fire(mode)
def _process(self, state):
current = self.current()
for state in self._build(state):
pass
self.goto(current)
def _get_state(self, marking):
return self._state.get(marking, None)
def _compute(self, state):
self._done.add(state)
self.goto(state)
marking = self.net.get_marking()
for trans in self.net.transition():
for mode in trans.modes():
self._fire(trans, mode)
new_marking = self.net.get_marking()
target = self._get_state(new_marking)
if (target is None):
target = self._create_state(new_marking, state, trans, mode)
if (state in self._marking):
self._create_edge(state, target, (trans, mode))
self.net.set_marking(marking)
if (state not in self._marking):
return
def __len__(self):
return (len(self._done) + len(self._todo))
def __iter__(self):
current = self.current()
for state in sorted(self._done):
self.goto(state)
(yield state)
for state in self._build():
self.goto(state)
(yield state)
self.goto(current)
def _build(self, stop=None):
while ((len(self._todo) > 0) and ((stop is None) or (self._todo[0] <= stop))):
state = self._todo.pop(0)
self._compute(state)
(yield state)
def build(self):
for state in self._build():
pass
def completed(self):
return (len(self._todo) == 0)
def todo(self):
return len(self._todo) |
class StackableBaseEdge(BaseEdge):
char = 's'
description = 'Abstract Stackable class'
bottom = True
def __init__(self, boxes, settings, fingerjointsettings) -> None:
super().__init__(boxes, settings)
self.fingerjointsettings = fingerjointsettings
def __call__(self, length, **kw):
s = self.settings
r = ((s.height / 2.0) / (1 - math.cos(math.radians(s.angle))))
l = (r * math.sin(math.radians(s.angle)))
p = (1 if self.bottom else (- 1))
if (self.bottom and s.bottom_stabilizers):
with self.saved_context():
sp = self.boxes.spacing
self.moveTo(((- sp) / 2), ((- s.height) - sp))
self.rectangularWall((length - (1.05 * self.boxes.thickness)), s.bottom_stabilizers)
self.boxes.edge(s.width, tabs=1)
self.boxes.corner((p * s.angle), r)
self.boxes.corner(((- p) * s.angle), r)
self.boxes.edge(((length - (2 * s.width)) - (4 * l)))
self.boxes.corner(((- p) * s.angle), r)
self.boxes.corner((p * s.angle), r)
self.boxes.edge(s.width, tabs=1)
def _height(self):
return ((self.settings.height + self.settings.holedistance) + self.settings.thickness)
def startwidth(self) -> float:
return (self._height() if self.bottom else 0)
def margin(self) -> float:
if self.bottom:
if self.settings.bottom_stabilizers:
return (self.settings.bottom_stabilizers + self.boxes.spacing)
else:
return 0
else:
return self.settings.height |
class OptionSeriesPieSonificationTracksMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class JsPercentage(JsRecFunc):
def extendColumns(jsSchema, params):
if ((params[0] is not None) and (params[1] is not None)):
jsSchema['keys'] |= set(params[0])
jsSchema['values'] |= set(params[1])
alias = 'percentage'
params = ('keys', 'values')
value = ' \n if ((keys == null) || (values == null)){result = data}\n else{\n var temp = {}; var order = []; var sumPerSeries = {};\n data.forEach( function(rec) { \n var aggKey = []; keys.forEach(function(k){ aggKey.push(rec[k])}); \n var newKey = aggKey.join("#"); if (!(newKey in temp)) {order.push(newKey)};\n if (!(newKey in temp)) {temp[newKey] = {}};\n values.forEach(function(v) {\n if (!(v in sumPerSeries)) {sumPerSeries[v] = rec[v]} else {sumPerSeries[v] += rec[v]};\n if (!(v in temp[newKey])) {temp[newKey][v] = rec[v]} \n else {temp[newKey][v] += rec[v]}})});\n order.forEach(function(label) {\n var rec = {}; var splitKey = label.split("#");\n keys.forEach(function(k, i) {rec[k] = splitKey[i]});\n for(var v in temp[label]) {rec[v] = 100 * (temp[label][v] / sumPerSeries[v])};\n result.push(rec)})}' |
def quadrupole3d_20(ax, da, A, bx, db, B, R):
result = numpy.zeros((6, 6, 1), dtype=float)
x0 = ((ax + bx) ** (- 1.0))
x1 = (x0 * ((ax * A[0]) + (bx * B[0])))
x2 = (- x1)
x3 = (x2 + R[0])
x4 = (x3 ** 2)
x5 = (3.0 * x0)
x6 = (x2 + A[0])
x7 = (x3 * x6)
x8 = (x0 * ((((- 2.0) * x1) + A[0]) + R[0]))
x9 = (x0 + (2.0 * x7))
x10 = ((x3 * x9) + x8)
x11 = 1.
x12 = ((ax * bx) * x0)
x13 = (((5. * da) * db) * numpy.exp(((- x12) * ((((A[0] - B[0]) ** 2) + ((A[1] - B[1]) ** 2)) + ((A[2] - B[2]) ** 2)))))
x14 = (numpy.sqrt(x0) * x13)
x15 = (x0 * x14)
x16 = (x11 * x15)
x17 = (0. * x16)
x18 = (x0 * ((ax * A[1]) + (bx * B[1])))
x19 = (- x18)
x20 = (x19 + A[1])
x21 = (0.5 * x0)
x22 = (x14 * x21)
x23 = (x10 * x22)
x24 = (x0 * ((ax * A[2]) + (bx * B[2])))
x25 = (- x24)
x26 = (x25 + A[2])
x27 = ((0. * (x20 ** 2)) + (0. * x21))
x28 = (x21 + x4)
x29 = ((x0 ** 1.5) * x13)
x30 = (x11 * x29)
x31 = (x28 * x30)
x32 = (x15 * x26)
x33 = ((0. * x21) + (0. * (x26 ** 2)))
x34 = (x19 + R[1])
x35 = (0. * x16)
x36 = (x35 * ((x6 * x9) + x8))
x37 = (x20 * x34)
x38 = (x21 + x37)
x39 = (x21 + x7)
x40 = (x29 * x39)
x41 = (x0 * ((((- 2.0) * x18) + A[1]) + R[1]))
x42 = (x0 + (2.0 * x37))
x43 = ((x20 * x42) + x41)
x44 = (x3 * x35)
x45 = (x16 * x3)
x46 = (x25 + R[2])
x47 = (x15 * x20)
x48 = (x26 * x46)
x49 = (x21 + x48)
x50 = (x0 * ((((- 2.0) * x24) + A[2]) + R[2]))
x51 = (x0 + (2.0 * x48))
x52 = ((x26 * x51) + x50)
x53 = (x34 ** 2)
x54 = (x21 + x53)
x55 = (x30 * x54)
x56 = ((0. * x21) + (0. * (x6 ** 2)))
x57 = ((x34 * x42) + x41)
x58 = (x22 * x57)
x59 = (x15 * x6)
x60 = (x46 ** 2)
x61 = (x21 + x60)
x62 = (x30 * x61)
x63 = ((x46 * x51) + x50)
x64 = (x22 * x63)
result[(0, 0, 0)] = numpy.sum((x17 * ((x0 * (((2.0 * x4) + x5) + (4.0 * x7))) + ((2.0 * x10) * x6))))
result[(0, 1, 0)] = numpy.sum((x20 * x23))
result[(0, 2, 0)] = numpy.sum((x23 * x26))
result[(0, 3, 0)] = numpy.sum((x27 * x31))
result[(0, 4, 0)] = numpy.sum(((x20 * x28) * x32))
result[(0, 5, 0)] = numpy.sum((x31 * x33))
result[(1, 0, 0)] = numpy.sum((x34 * x36))
result[(1, 1, 0)] = numpy.sum((x38 * x40))
result[(1, 2, 0)] = numpy.sum(((x32 * x34) * x39))
result[(1, 3, 0)] = numpy.sum((x43 * x44))
result[(1, 4, 0)] = numpy.sum(((x3 * x32) * x38))
result[(1, 5, 0)] = numpy.sum(((x33 * x34) * x45))
result[(2, 0, 0)] = numpy.sum((x36 * x46))
result[(2, 1, 0)] = numpy.sum(((x39 * x46) * x47))
result[(2, 2, 0)] = numpy.sum((x40 * x49))
result[(2, 3, 0)] = numpy.sum(((x27 * x45) * x46))
result[(2, 4, 0)] = numpy.sum(((x3 * x47) * x49))
result[(2, 5, 0)] = numpy.sum((x44 * x52))
result[(3, 0, 0)] = numpy.sum((x55 * x56))
result[(3, 1, 0)] = numpy.sum((x58 * x6))
result[(3, 2, 0)] = numpy.sum(((x32 * x54) * x6))
result[(3, 3, 0)] = numpy.sum((x17 * ((x0 * (((4.0 * x37) + x5) + (2.0 * x53))) + ((2.0 * x20) * x57))))
result[(3, 4, 0)] = numpy.sum((x26 * x58))
result[(3, 5, 0)] = numpy.sum((x33 * x55))
result[(4, 0, 0)] = numpy.sum((((x16 * x34) * x46) * x56))
result[(4, 1, 0)] = numpy.sum(((x38 * x46) * x59))
result[(4, 2, 0)] = numpy.sum(((x34 * x49) * x59))
result[(4, 3, 0)] = numpy.sum(((x35 * x43) * x46))
result[(4, 4, 0)] = numpy.sum(((x29 * x38) * x49))
result[(4, 5, 0)] = numpy.sum(((x34 * x35) * x52))
result[(5, 0, 0)] = numpy.sum((x56 * x62))
result[(5, 1, 0)] = numpy.sum(((x47 * x6) * x61))
result[(5, 2, 0)] = numpy.sum((x6 * x64))
result[(5, 3, 0)] = numpy.sum((x27 * x62))
result[(5, 4, 0)] = numpy.sum((x20 * x64))
result[(5, 5, 0)] = numpy.sum((x17 * ((x0 * (((4.0 * x48) + x5) + (2.0 * x60))) + ((2.0 * x26) * x63))))
return result |
class SceneManager(HasTraits):
scenes = Dict
figure_to_id = Dict
clients = List
call_later = Callable
def add_client(self, client):
self.clients.append(client)
def remove_client(self, client):
self.clients.remove(client)
def register_figure(self, figure):
remote = RemoteScene(figure=figure)
r_id = id(remote)
self.figure_to_id[figure] = r_id
self.scenes[r_id] = remote
self._setup_events(remote)
if self.call_later:
remote.call_later = self.call_later
return r_id
def figure(self, *args, **kw):
f = figure(*args, **kw)
return self.register_figure(f)
def message(self, obj_id, method_name, *args, **kw):
self.call(obj_id, method_name, *args, **kw)
def call(self, obj_id, method_name, *args, **kw):
scene = self.scenes[obj_id]
method = getattr(scene, method_name)
return method(*args, **kw)
def _setup_events(self, obj):
obj.on_trait_change(self._forward_event, 'event')
def _forward_event(self, event):
for client in self.clients:
client.handle_event(event)
def _call_later_changed(self, f):
for scene in self.scenes.values():
scene.call_later = f |
def get_endpoint_users(session, endpoint_id, users):
times = get_user_data_grouped(session, (lambda x: simplify(x, 100)), (Request.endpoint_id == endpoint_id))
first_requests = get_first_requests(session, endpoint_id)
return [{'user': u, 'date': get_value(first_requests, u), 'values': get_value(times, u), 'color': get_color(u)} for u in users] |
.only_on_targets(['bigquery'])
def test_wildcard_name_table_volume_anomalies(test_id: str, dbt_project: DbtProject):
utc_today = datetime.utcnow().date()
data = [{TIMESTAMP_COLUMN: cur_date.strftime(DATE_FORMAT)} for cur_date in generate_dates(base_date=utc_today) if (cur_date < (utc_today - timedelta(days=1)))]
wildcarded_table_name = (test_id[:(- 1)] + '*')
test_result = dbt_project.test(wildcarded_table_name, DBT_TEST_NAME, DBT_TEST_ARGS, data=data, table_name=test_id)
assert (test_result['status'] == 'fail') |
def CreateConv2dFwdOperator(manifest, operation_kind, out_element_op, out_data_op=''):
a_element_desc = library.TensorDesc(library.DataType.f16, library.LayoutType.G_NHW_C)
b_element_desc = library.TensorDesc(library.DataType.f16, library.LayoutType.G_K_YX_C)
c_element_desc = library.TensorDesc(library.DataType.f16, library.LayoutType.G_NHW_K)
in_element_op = library.TensorOperation.PassThrough
tile_descriptions = [conv.GroupTileDesc(1, 256, 256, 128, 32, 8, 8, 32, 32, 4, 2), conv.GroupTileDesc(1, 256, 128, 256, 32, 8, 8, 32, 32, 2, 4), conv.GroupTileDesc(1, 128, 128, 128, 32, 8, 8, 32, 32, 4, 2), conv.GroupTileDesc(1, 256, 128, 128, 32, 8, 8, 32, 32, 2, 2), conv.GroupTileDesc(1, 128, 128, 64, 32, 8, 8, 32, 32, 2, 2), conv.GroupTileDesc(1, 128, 64, 128, 32, 8, 8, 32, 32, 2, 2), conv.GroupTileDesc(1, 64, 64, 64, 32, 8, 8, 32, 32, 2, 2), conv.GroupTileDesc(1, 256, 128, 64, 32, 8, 8, 32, 32, 2, 1), conv.GroupTileDesc(1, 256, 64, 128, 32, 8, 8, 32, 32, 1, 2), conv.GroupTileDesc(1, 128, 32, 128, 32, 8, 8, 32, 32, 1, 2), conv.GroupTileDesc(1, 64, 64, 32, 32, 8, 8, 32, 32, 2, 1), conv.GroupTileDesc(1, 64, 32, 64, 32, 8, 8, 32, 32, 1, 2)]
c_block_descriptions = [conv.CBlockTransferDesc(1, 1, [1, 32, 1, 8], 8), conv.CBlockTransferDesc(1, 1, [1, 32, 1, 8], 8), conv.CBlockTransferDesc(1, 1, [1, 16, 1, 8], 8), conv.CBlockTransferDesc(1, 1, [1, 32, 1, 8], 8), conv.CBlockTransferDesc(1, 1, [1, 32, 1, 4], 8), conv.CBlockTransferDesc(1, 1, [1, 16, 1, 8], 8), conv.CBlockTransferDesc(1, 1, [1, 16, 1, 4], 8), conv.CBlockTransferDesc(1, 1, [1, 32, 1, 8], 8), conv.CBlockTransferDesc(1, 1, [1, 32, 1, 8], 8), conv.CBlockTransferDesc(1, 1, [1, 16, 1, 8], 8), conv.CBlockTransferDesc(1, 1, [1, 16, 1, 4], 8), conv.CBlockTransferDesc(1, 1, [1, 16, 1, 4], 8)]
block_descriptions = []
for t in tile_descriptions:
block_transfer = (- 1)
if (t.block_size == 256):
block_transfer = [4, 64, 1]
if (t.block_size == 128):
block_transfer = [4, 32, 1]
if (t.block_size == 64):
block_transfer = [4, 16, 1]
assert ((block_transfer != (- 1)) and ('Cannot determine block_transfer_size with block_size ' + str(t.block_size)))
block_descriptions.append(conv.BlockTransferDesc(block_transfer, [1, 0, 2], [1, 0, 2], 2, 8, 8, 1))
conv2d_specialization = [conv.Conv2DSpecialization.ConvFwdDefault, conv.Conv2DSpecialization.ConvFwd1x1P0, conv.Conv2DSpecialization.ConvFwd1x1S1P0]
gemm_specialization = [conv.Conv2DSpecialization.GemmDefault, conv.Conv2DSpecialization.MNKPadding]
operations = []
for conv2d_spec in conv2d_specialization:
for gemm_spec in gemm_specialization:
for (tile_desc, block_desc, c_block_desc) in zip(tile_descriptions, block_descriptions, c_block_descriptions):
new_operation = conv.Conv2DOperation(operation_kind=operation_kind, extra_kind=out_element_op, xdl_op_type=conv.XdlOpType(operation_kind.value), A=a_element_desc, B=b_element_desc, C=c_element_desc, a_elem_op=in_element_op, b_elem_op=in_element_op, epilogue_functor=out_element_op, c_data_op=out_data_op, conv2d_specialization=conv2d_spec, gemm_specialization=gemm_spec, tile_desc=tile_desc, a_block_transfer=block_desc, b_block_transfer=block_desc, c_block_transfer=c_block_desc)
manifest.append(new_operation)
operations.append(new_operation)
conv2d_specialization = [conv.Conv2DSpecialization.ConvFwdOddC]
tile_descriptions += [conv.GroupTileDesc(1, 256, 128, 64, 32, 8, 8, 32, 32, 2, 1), conv.GroupTileDesc(1, 256, 128, 64, 32, 8, 8, 32, 32, 2, 1), conv.GroupTileDesc(1, 256, 256, 64, 32, 8, 8, 32, 32, 4, 1), conv.GroupTileDesc(1, 128, 128, 64, 32, 8, 8, 32, 32, 2, 2), conv.GroupTileDesc(1, 128, 64, 64, 32, 8, 8, 32, 32, 1, 2), conv.GroupTileDesc(1, 256, 256, 16, 32, 8, 8, 16, 16, 4, 1)]
block_descriptions = [conv.BlockTransferDesc([4, 8, 8], [1, 0, 2], [1, 0, 2], 2, 1, 1, 1), conv.BlockTransferDesc([4, 8, 8], [1, 0, 2], [1, 0, 2], 2, 1, 1, 1), conv.BlockTransferDesc([4, 4, 8], [1, 0, 2], [1, 0, 2], 2, 1, 1, 1), conv.BlockTransferDesc([4, 8, 8], [1, 0, 2], [1, 0, 2], 2, 1, 1, 1), conv.BlockTransferDesc([4, 4, 8], [1, 0, 2], [1, 0, 2], 2, 1, 1, 1), conv.BlockTransferDesc([4, 4, 8], [1, 0, 2], [1, 0, 2], 2, 1, 1, 1), conv.BlockTransferDesc([4, 2, 8], [1, 0, 2], [1, 0, 2], 2, 1, 1, 1), conv.BlockTransferDesc([4, 8, 8], [1, 0, 2], [1, 0, 2], 2, 1, 1, 1), conv.BlockTransferDesc([4, 8, 8], [1, 0, 2], [1, 0, 2], 2, 1, 1, 1), conv.BlockTransferDesc([4, 4, 8], [1, 0, 2], [1, 0, 2], 2, 1, 1, 1), conv.BlockTransferDesc([4, 2, 8], [1, 0, 2], [1, 0, 2], 2, 1, 1, 1), conv.BlockTransferDesc([4, 2, 8], [1, 0, 2], [1, 0, 2], 2, 1, 1, 1), conv.BlockTransferDesc([2, 32, 4], [1, 0, 2], [1, 0, 2], 2, 1, 1, 1), conv.BlockTransferDesc([2, 32, 4], [1, 0, 2], [1, 0, 2], 2, 1, 1, 1), conv.BlockTransferDesc([2, 32, 4], [1, 0, 2], [1, 0, 2], 2, 1, 1, 1), conv.BlockTransferDesc([2, 16, 4], [1, 0, 2], [1, 0, 2], 2, 1, 1, 1), conv.BlockTransferDesc([2, 16, 4], [1, 0, 2], [1, 0, 2], 2, 1, 1, 1), conv.BlockTransferDesc([4, 16, 4], [1, 0, 2], [1, 0, 2], 2, 2, 2, 1)]
c_block_descriptions += [conv.CBlockTransferDesc(1, 1, [1, 32, 1, 8], 8), conv.CBlockTransferDesc(1, 1, [1, 32, 1, 8], 8), conv.CBlockTransferDesc(1, 1, [1, 32, 1, 8], 8), conv.CBlockTransferDesc(1, 1, [1, 32, 1, 4], 8), conv.CBlockTransferDesc(1, 1, [1, 16, 1, 4], 8), conv.CBlockTransferDesc(4, 1, [1, 256, 1, 1], 1)]
for conv2d_spec in conv2d_specialization:
for gemm_spec in gemm_specialization:
for (tile_desc, block_desc, c_block_desc) in zip(tile_descriptions, block_descriptions, c_block_descriptions):
new_operation = conv.Conv2DOperation(operation_kind=operation_kind, extra_kind=out_element_op, xdl_op_type=conv.XdlOpType(operation_kind.value), A=a_element_desc, B=b_element_desc, C=c_element_desc, a_elem_op=in_element_op, b_elem_op=in_element_op, epilogue_functor=out_element_op, c_data_op=out_data_op, conv2d_specialization=conv2d_spec, gemm_specialization=gemm_spec, tile_desc=tile_desc, a_block_transfer=block_desc, b_block_transfer=block_desc, c_block_transfer=c_block_desc)
manifest.append(new_operation)
operations.append(new_operation)
return operations |
class Stats():
def __init__(self, binary):
self.binary = binary
self.corrects = dict()
self.errors = dict()
self.stated = False
def stat(self):
if (not self.stated):
self.stated = True
self.binary.nodes.stat()
self.binary.edges.stat()
self.name_known = ((Function.known + Offset.known) + Reg.known)
self.name_unknown = ((Function.unknown + Offset.unknown) + Reg.unknown)
self.name_inf = ((Function.inf + Offset.inf) + Reg.inf)
self.name_correct = ((Function.correct + Offset.correct) + RegBase.correct)
self.known = (self.name_known + Ttype.known)
self.unknown = (self.name_unknown + Ttype.unknown)
self.inf = (self.name_inf + Ttype.inf)
self.giv = (((Function.giv + Reg.giv) + Offset.giv) + GivElm.total)
self.total = ((self.name_inf + Ttype.inf) + self.giv)
self.tp_1p = (Offset.tp_1p + RegBase.tp_1p)
self.fp_1p = (Offset.fp_1p + RegBase.fp_1p)
self.tn_1p = (Offset.tn_1p + RegBase.tn_1p)
self.fn_1p = (Offset.fn_1p + RegBase.fn_1p)
self.correct = (self.name_correct + Ttype.correct)
def stat_result(self, nodes_json):
for node_json in nodes_json:
if (('inf' in node_json) and (node_json['inf'] != UNKNOWN_LABEL)):
node = self.binary.nodes.nodes[node_json['v']]
train_name = node.train_name
test_name = node_json['inf']
if (train_name == test_name):
if (type(node) is IndirectOffset):
IndirectOffset.correct += 1
Offset.correct += 1
elif (type(node) is DirectOffset):
DirectOffset.correct += 1
Offset.correct += 1
elif (type(node) is StringArrayOffset):
StringArrayOffset.correct += 1
DirectOffset.correct += 1
Offset.correct += 1
elif (type(node) is Reg):
Reg.correct += 1
RegBase.correct += 1
elif (type(node) is Function):
Function.correct += 1
elif (type(node) is Ttype):
Ttype.correct += 1
type(node.owner).ttype_correct += 1
if isinstance(node.owner, StringArrayOffset):
DirectOffset.ttype_correct += 1
for node in self.binary.nodes.nodes.values():
if ((type(node) in INF_NODES) and (not (isinstance(node, Function) and node.is_name_given)) and (not (isinstance(node, DirectOffset) and node.is_name_given))):
if (node.train_name == node.test_name):
if ((node.train_name, node.test_name) not in self.corrects):
self.corrects[(node.train_name, node.test_name)] = 0
self.corrects[(node.train_name, node.test_name)] += 1
else:
if ((node.train_name, node.test_name) not in self.errors):
self.errors[(node.train_name, node.test_name)] = 0
self.errors[(node.train_name, node.test_name)] += 1
def dump_corrects(self):
corrects = sorted(self.corrects.items(), key=(lambda i: i[1]), reverse=True)
with open(self.binary.config.CORRECTS_PATH, 'w') as w:
for c in corrects:
w.write('\t{} : {} -> {}\n'.format(c[1], c[0][0], c[0][1]))
def dump_errors(self):
errors = sorted(self.errors.items(), key=(lambda i: i[1]), reverse=True)
with open(self.binary.config.ERRORS_PATH, 'w') as w:
for e in errors:
w.write('\t{} : {} -> {}\n'.format(e[1], e[0][0], e[0][1]))
def dump(self):
with open(self.binary.config.STAT_PATH, 'w') as w:
w.write('path: {}\n'.format(self.binary.config.BINARY_PATH))
w.write('\n')
denominator = (self.tp_1p + self.fp_1p)
precision_1p = ((self.tp_1p / denominator) if (denominator != 0) else 0)
denominator = (self.tp_1p + self.fn_1p)
recall_1p = ((self.tp_1p / denominator) if (denominator != 0) else 0)
denominator = (precision_1p + recall_1p)
f1_1p = ((((2 * precision_1p) * recall_1p) / denominator) if (denominator != 0) else 0)
denominator = (((self.tp_1p + self.fp_1p) + self.tn_1p) + self.fn_1p)
accuracy_1p = (((self.tp_1p + self.tn_1p) / denominator) if (denominator != 0) else 0)
w.write('precision_1p: {}\n'.format(precision_1p))
w.write('recall_1p: {}\n'.format(recall_1p))
w.write('f1_1p: {}\n'.format(f1_1p))
w.write('accuracy_1p: {}\n'.format(accuracy_1p))
w.write('\n')
precision_2p = ((self.correct / self.inf) if (self.inf != 0) else 0)
recall_2p = ((self.correct / self.known) if (self.known != 0) else 0)
denominator = (recall_2p + precision_2p)
f1_2p = ((((2 * recall_2p) * precision_2p) / denominator) if (denominator != 0) else 0)
w.write('precision_2p: {}\n'.format(precision_2p))
w.write('recall_2p: {}\n'.format(recall_2p))
w.write('f1_2p: {}\n'.format(f1_2p))
w.write('\n')
precision_name_2p = ((self.name_correct / self.name_inf) if (self.name_inf != 0) else 0)
recall_name_2p = ((self.name_correct / self.name_known) if (self.name_known != 0) else 0)
denominator = (recall_name_2p + precision_name_2p)
f1_name_2p = ((((2 * recall_name_2p) * precision_name_2p) / denominator) if (denominator != 0) else 0)
w.write('precision_name_2p: {}\n'.format(precision_name_2p))
w.write('recall_name_2p: {}\n'.format(recall_name_2p))
w.write('f1_name_2p: {}\n'.format(f1_name_2p))
w.write('\n')
precision_ttype_2p = ((Ttype.correct / Ttype.inf) if (Ttype.inf != 0) else 0)
recall_ttype_2p = ((Ttype.correct / Ttype.known) if (Ttype.known != 0) else 0)
denominator = (recall_ttype_2p + precision_ttype_2p)
f1_ttype_2p = ((((2 * recall_ttype_2p) * precision_ttype_2p) / denominator) if (denominator != 0) else 0)
w.write('precision_ttype_2p: {}\n'.format(precision_ttype_2p))
w.write('recall_ttype_2p: {}\n'.format(recall_ttype_2p))
w.write('f1_ttype_2p: {}\n'.format(f1_ttype_2p))
w.write('\n')
w.write('time:\n')
w.write(str(TIMER))
w.write('\n\n')
w.write('total: {}\n'.format(self.total))
w.write('known: {}\n'.format(self.known))
w.write('unknown: {}\n'.format(self.unknown))
w.write('inf: {}\n'.format(self.inf))
w.write('correct: {}\n'.format(self.correct))
w.write('\n')
w.write('name_known: {}\n'.format(self.name_known))
w.write('name_unknown: {}\n'.format(self.name_unknown))
w.write('name_inf: {}\n'.format(self.name_inf))
w.write('name_correct: {}\n'.format(self.name_correct))
w.write('\n')
w.write('ttype_known: {}\n'.format(Ttype.known))
w.write('ttype_unknown: {}\n'.format(Ttype.unknown))
w.write('ttype_inf: {}\n'.format(Ttype.inf))
w.write('ttype_correct: {}\n'.format(Ttype.correct))
w.write('\n')
w.write('function_total: {}\n'.format(Function.total))
w.write('function_known: {}\n'.format(Function.known))
w.write('function_unknown: {}\n'.format(Function.unknown))
w.write('function_inf: {}\n'.format(Function.inf))
w.write('function_correct: {}\n'.format(Function.correct))
w.write('\n')
w.write('reg_total: {}\n'.format(Reg.total))
w.write('reg_known: {}\n'.format(Reg.known))
w.write('reg_unknown: {}\n'.format(Reg.unknown))
w.write('reg_inf: {}\n'.format(Reg.inf))
w.write('reg_tp_1p: {}\n'.format(RegBase.tp_1p))
w.write('reg_fp_1p: {}\n'.format(RegBase.fp_1p))
w.write('reg_tn_1p: {}\n'.format(RegBase.tn_1p))
w.write('reg_fn_1p: {}\n'.format(RegBase.fn_1p))
w.write('reg_correct: {}\n'.format(Reg.correct))
w.write('\n')
w.write('offset_total: {}\n'.format(Offset.total))
w.write('offset_known: {}\n'.format(Offset.known))
w.write('offset_unknown: {}\n'.format(Offset.unknown))
w.write('offset_inf: {}\n'.format(Offset.inf))
w.write('offset_correct: {}\n'.format(Offset.correct))
w.write('\n')
w.write('indirectoffset_total: {}\n'.format(IndirectOffset.total))
w.write('indirectoffset_known: {}\n'.format(IndirectOffset.known))
w.write('indirectoffset_unknown: {}\n'.format(IndirectOffset.unknown))
w.write('indirectoffset_inf: {}\n'.format(IndirectOffset.inf))
w.write('indirectoffset_tp_1p: {}\n'.format(IndirectOffset.tp_1p))
w.write('indirectoffset_fp_1p: {}\n'.format(IndirectOffset.fp_1p))
w.write('indirectoffset_tn_1p: {}\n'.format(IndirectOffset.tn_1p))
w.write('indirectoffset_fn_1p: {}\n'.format(IndirectOffset.fn_1p))
w.write('indirectoffset_correct: {}\n'.format(IndirectOffset.correct))
w.write('\n')
w.write('directoffset_total: {}\n'.format(DirectOffset.total))
w.write('directoffset_known: {}\n'.format(DirectOffset.known))
w.write('directoffset_unknown: {}\n'.format(DirectOffset.unknown))
w.write('directoffset_inf: {}\n'.format(DirectOffset.inf))
w.write('directoffset_correct: {}\n'.format(DirectOffset.correct))
w.write('\n')
w.write('function_ttype_total: {}\n'.format(Function.ttype_total))
w.write('function_ttype_known: {}\n'.format(Function.ttype_known))
w.write('function_ttype_unknown: {}\n'.format(Function.ttype_unknown))
w.write('function_ttype_inf: {}\n'.format(Function.ttype_inf))
w.write('function_ttype_correct: {}\n'.format(Function.ttype_correct))
w.write('\n')
w.write('reg_ttype_total: {}\n'.format(Reg.ttype_total))
w.write('reg_ttype_known: {}\n'.format(Reg.ttype_known))
w.write('reg_ttype_unknown: {}\n'.format(Reg.ttype_unknown))
w.write('reg_ttype_inf: {}\n'.format(Reg.ttype_inf))
w.write('reg_ttype_correct: {}\n'.format(Reg.ttype_correct))
w.write('\n')
w.write('indirectoffset_ttype_total: {}\n'.format(IndirectOffset.ttype_total))
w.write('indirectoffset_ttype_known: {}\n'.format(IndirectOffset.ttype_known))
w.write('indirectoffset_ttype_unknown: {}\n'.format(IndirectOffset.ttype_unknown))
w.write('indirectoffset_ttype_inf: {}\n'.format(IndirectOffset.ttype_inf))
w.write('indirectoffset_ttype_correct: {}\n'.format(IndirectOffset.ttype_correct))
w.write('\n')
w.write('directoffset_ttype_total: {}\n'.format(DirectOffset.ttype_total))
w.write('directoffset_ttype_known: {}\n'.format(DirectOffset.ttype_known))
w.write('directoffset_ttype_unknown: {}\n'.format(DirectOffset.ttype_unknown))
w.write('directoffset_ttype_inf: {}\n'.format(DirectOffset.ttype_inf))
w.write('directoffset_ttype_correct: {}\n'.format(DirectOffset.ttype_correct))
w.write('\n') |
def gen_function_call(func_attrs, indent=' ', bias_ptr_arg=None):
a = func_attrs['inputs'][0]
ashapes = func_attrs['input_accessors'][0].original_shapes
b = func_attrs['inputs'][1]
bshapes = func_attrs['input_accessors'][1].original_shapes
c = func_attrs['outputs'][0]
cshapes = func_attrs['output_accessors'][0].original_shapes
has_bias = (bias_ptr_arg is not None)
local_dim_defs = gen_local_dim_defs(func_attrs, indent=indent)
adims = [('&' + dim._attrs['name']) for dim in ashapes]
bdims = [('&' + dim._attrs['name']) for dim in bshapes]
cdims = [('&' + dim._attrs['name']) for dim in cshapes]
return FUNC_CALL_TEMPLATE.render(local_dim_defs=local_dim_defs, func_name=func_attrs['name'], a_ptr=a._attrs['name'], b_ptr=b._attrs['name'], has_bias=has_bias, bias_ptr=bias_ptr_arg, c_ptr=c._attrs['name'], split_k=func_attrs['split_k'], adims=adims, bdims=bdims, cdims=cdims, indent=indent) |
def test_difference_with_two_frames_and_default_values(traces):
expected = _read('difference_result_frame2_10.npz')
frame = slice(None, 50)
frame_2 = slice(None, 10)
result = scared.preprocesses.high_order.Difference(frame_1=frame, frame_2=frame_2)(traces)
assert np.array_equal(expected, result) |
class TestValueCondition(ExcludeNoneMixin):
class Config():
arbitrary_types_allowed = True
use_enum_values = True
smart_union = True
eq: Optional[NumericApprox] = None
gt: Optional[NumericApprox] = None
gte: Optional[NumericApprox] = None
is_in: Optional[List[Union[(Numeric, str, bool)]]] = None
lt: Optional[NumericApprox] = None
lte: Optional[NumericApprox] = None
not_eq: Optional[Numeric] = None
not_in: Optional[List[Union[(Numeric, str, bool)]]] = None
source: Optional[ValueSource] = Field(None, exclude=True)
def has_condition(self) -> bool:
return any(((value is not None) for value in (self.eq, self.gt, self.gte, self.is_in, self.lt, self.lte, self.not_in, self.not_eq)))
def check_value(self, value: Numeric) -> bool:
result = True
if ((self.eq is not None) and result):
result = (value == self.eq)
if ((self.gt is not None) and result):
result = (value > self.gt)
if ((self.gte is not None) and result):
result = (value >= self.gte)
if ((self.is_in is not None) and result):
result = (value in self.is_in)
if ((self.lt is not None) and result):
result = (value < self.lt)
if ((self.lte is not None) and result):
result = (value <= self.lte)
if ((self.not_eq is not None) and result):
result = (value != self.not_eq)
if ((self.not_in is not None) and result):
result = (value not in self.not_in)
return result
def __str__(self) -> str:
conditions = []
operations = ['eq', 'gt', 'gte', 'lt', 'lte', 'not_eq', 'is_in', 'not_in']
for op in operations:
value = getattr(self, op)
if (value is None):
continue
if isinstance(value, (float, ApproxValue)):
conditions.append(f'{op}={value:.3g}')
else:
conditions.append(f'{op}={value}')
return f"{' and '.join(conditions)}" |
.usefixtures('use_tmpdir')
def test_that_missing_report_steps_errors():
assert_that_config_leads_to_error(config_file_contents=dedent('\nNUM_REALIZATIONS 1\nGEN_DATA RFT_3-1_R_DATA INPUT_FORMAT:ASCII RESULT_FILE:RFT_3-1_R%d\n\n '), expected_error=ExpectedErrorInfo(match='REPORT_STEPS', line=3, column=1, end_column=9)) |
def clamp_f_f(gen, t, srcs):
smaller_than_one = gen.symbols.newLabel()
done = gen.symbols.newLabel()
one = ConstFloatArg(1.0)
zero = ConstFloatArg(0.0)
src = srcs[0]
dst = TempArg(gen.symbols.newTemp(Float), Float)
gen.emit_move(src, dst)
lte1 = gen.emit_binop('<=', [src, one], Float)
gen.emit_cjump(lte1, smaller_than_one)
gen.emit_move(one, dst)
gen.emit_jump(done)
gen.emit_label(smaller_than_one)
gte0 = gen.emit_binop('>=', [src, zero], Float)
gen.emit_cjump(gte0, done)
gen.emit_move(zero, dst)
gen.emit_label(done)
return dst |
def do_job(links_file, contigs_file, reference_file):
scaffolds = parse_links_file(links_file)
alignment = get_alignment(scaffolds, contigs_file, reference_file)
alignment = filter_by_coverage(alignment, 0.45)
alignment = join_collinear(alignment)
(entry_ord, chr_len, contig_len) = get_order(alignment)
total_breaks = 0
total_gaps = 0
total_contigs = 0
for s in scaffolds:
print(('\n>' + s.name))
prev_aln = []
prev_strand = None
increasing = None
breaks = []
for contig in s.contigs:
miss_ord = False
miss_strand = False
for hit in entry_ord[contig.name]:
if (contig.sign < 0):
hit.sign = (- hit.sign)
if prev_aln:
if (increasing is not None):
if (not agreement_ord(increasing, prev_aln, entry_ord[contig.name], chr_len)):
increasing = None
breaks.append(contig.name)
total_breaks += 1
miss_ord = True
elif ((len(entry_ord[contig.name]) == 1) and (len(prev_aln) == 1)):
increasing = (entry_ord[contig.name][0].index > prev_aln[0].index)
cur_strand = [h.sign for h in entry_ord[contig.name]]
if ((not miss_ord) and prev_strand and cur_strand):
if (not agreement_strands(prev_strand, cur_strand, increasing)):
breaks.append(contig.name)
total_breaks += 1
miss_strand = True
increasing = None
if (gap_count(prev_aln, entry_ord[contig.name]) > 0):
total_gaps += 1
if entry_ord[contig.name]:
prev_aln = entry_ord[contig.name]
prev_strand = cur_strand
sign = ('+' if (contig.sign > 0) else '-')
pos_list = list(map(str, entry_ord[contig.name]))
pos_list_str = (str(pos_list) if (len(pos_list) < 5) else (str(pos_list[:5]) + '...'))
print('{0}{1}\t\t{2}\t{3}'.format(sign, contig.name, contig_len[contig.name], pos_list_str), end='')
print(('\t<<<order' if miss_ord else ''), end='')
print(('\t<<<strand' if miss_strand else ''), end='')
print('')
total_contigs += 1
print('\tmiss-ordered: ', len(breaks))
print('\nTotal miss-ordered:', total_breaks)
print('Total gaps:', total_gaps)
print('Total contigs:', total_contigs)
print('Total scaffolds:', len(scaffolds)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.