code stringlengths 281 23.7M |
|---|
def _assemble_broker_tas_lookup_records() -> list:
base_record = {'created_at': None, 'updated_at': None, 'tas_id': None, 'allocation_transfer_agency': None, 'agency_identifier': None, 'beginning_period_of_availa': None, 'ending_period_of_availabil': None, 'availability_type_code': None, 'main_account_code': None, 'sub_account_code': None, 'account_num': None, 'internal_end_date': None, 'internal_start_date': '2015-01-01', 'financial_indicator2': None, 'fr_entity_description': None, 'fr_entity_type': None, 'account_title': None, 'budget_bureau_code': None, 'budget_bureau_name': None, 'budget_function_code': None, 'budget_function_title': None, 'budget_subfunction_code': None, 'budget_subfunction_title': None, 'reporting_agency_aid': None, 'reporting_agency_name': None}
default_tas_lookup_record = copy.copy(base_record)
default_tas_lookup_record['tas'] = ''
default_tas_lookup_record['display_tas'] = '1001-1002-1005/1006-1007-1008'
default_tas_lookup_record['tas_id'] = (- 999)
default_tas_lookup_record['account_num'] = (- 99999)
default_tas_lookup_record['allocation_transfer_agency'] = 1001
default_tas_lookup_record['agency_identifier'] = 1002
default_tas_lookup_record['availability_type_code'] = 1003
default_tas_lookup_record['allocation_transfer_agency'] = 1004
default_tas_lookup_record['beginning_period_of_availa'] = 1005
default_tas_lookup_record['ending_period_of_availabil'] = 1006
default_tas_lookup_record['main_account_code'] = 1007
default_tas_lookup_record['sub_account_code'] = 1008
return [default_tas_lookup_record] |
.integration
class TestUpgradeWithEjectAccept(BaseTestUpgradeWithEject):
CONFIRM_OUTPUT = [True, True]
GENERIC_SELLER = ComponentId(ComponentType.SKILL, PublicId.from_str('fetchai/generic_seller:0.24.0'))
EXPECTED_CLICK_ECHO_CALLS = ['Ejecting (skill, fetchai/generic_seller:0.24.0)...', 'Ejecting item skill fetchai/generic_seller:0.24.0', "Fingerprinting skill components of 'default_author/generic_seller:0.1.0' ...", 'Successfully ejected skill fetchai/generic_seller:0.24.0 to ./skills/generic_seller as default_author/generic_seller:0.1.0.']
EXPECTED_CLICK_CONFIRM_CALLS = [RegexComparator('Skill fetchai/generic_seller:0.24.0 prevents the upgrade of the following vendor packages:'), RegexComparator("as there isn't a compatible version available on the AEA registry. Would you like to eject it?")]
def test_run(self, *mocks):
super().test_run(*mocks)
ejected_package_path = Path(self.t, self.current_agent_context, 'skills', 'generic_seller')
assert ejected_package_path.exists()
assert ejected_package_path.is_dir() |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'name'
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'authentication_rule': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['authentication_rule']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['authentication_rule']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=True)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'authentication_rule')
(is_error, has_changed, result, diff) = fortios_authentication(module.params, fos, module.check_mode)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
class Solution():
def majorityElement(self, nums: List[int]) -> List[int]:
(num1, num2) = (None, None)
(c1, c2) = (0, 0)
for n in nums:
if (n == num1):
c1 += 1
elif (n == num2):
c2 += 1
elif (c1 == 0):
num1 = n
c1 = 1
elif (c2 == 0):
num2 = n
c2 = 1
else:
c1 -= 1
c2 -= 1
(c1, c2) = (0, 0)
for n in nums:
if (n == num1):
c1 += 1
elif (n == num2):
c2 += 1
ret = []
if (c1 > (len(nums) // 3)):
ret.append(num1)
if (c2 > (len(nums) // 3)):
ret.append(num2)
return ret |
.unit
('fides.api.ui.get_path_to_admin_ui_file')
.parametrize('route, expected', [('index.html', True), ('//etc/passwd', False), ('dataset/new.html', True), ('//fides/example.env', False)])
def test_path_is_in_ui_directory(mock_get_path_to_admin_ui_file: Mock, tmp_static: Path, route: str, expected: bool):
mock_get_path_to_admin_ui_file.return_value = tmp_static
assert (path_is_in_ui_directory((tmp_static / Path(route))) == expected) |
def _handle_hooks(cls, bases):
hooks = [val.__attrs_post_init__ for val in bases if hasattr(val, '__attrs_post_init__')]
if (hasattr(cls, '__post_hook__') or hasattr(cls, '__maps__') or (len(hooks) > 0)):
if (hasattr(cls, '__post_hook__') and contains_return(cls.__post_hook__)):
raise _SpockInstantiationError(f'__post_hook__ function contains an explict return. This function cannot return any values (i.e. requires an implicit None return)')
if (hasattr(cls, '__maps__') and (not contains_return(cls.__maps__))):
raise _SpockInstantiationError(f'__maps__ function is missing an explict return. This function needs to explicitly return any type of values')
if (len(hooks) > 0):
def __shim__(self):
if hasattr(cls, '__post_hook__'):
cls.__post_hook__(self)
all_hooks = [val(self) for val in hooks]
all_hooks = [val for val in all_hooks if (val is not None)]
if hasattr(cls, '__maps__'):
all_hooks = ([cls.__maps__(self)] + all_hooks)
if (len(all_hooks) == 1):
all_hooks = all_hooks[0]
object.__setattr__(self, '_maps', all_hooks)
else:
def __shim__(self):
if hasattr(cls, '__post_hook__'):
cls.__post_hook__(self)
if hasattr(cls, '__maps__'):
object.__setattr__(self, '_maps', cls.__maps__(self))
return cls.__maps__(self)
else:
return None
else:
def __shim__(self):
...
return __shim__ |
def _add_GSUB_feature_variations(font, axes, internal_axis_supports, rules, featureTags):
def normalize(name, value):
return models.normalizeLocation({name: value}, internal_axis_supports)[name]
log.info('Generating GSUB FeatureVariations')
axis_tags = {name: axis.tag for (name, axis) in axes.items()}
conditional_subs = []
for rule in rules:
region = []
for conditions in rule.conditionSets:
space = {}
for condition in conditions:
axis_name = condition['name']
if (condition['minimum'] is not None):
minimum = normalize(axis_name, condition['minimum'])
else:
minimum = (- 1.0)
if (condition['maximum'] is not None):
maximum = normalize(axis_name, condition['maximum'])
else:
maximum = 1.0
tag = axis_tags[axis_name]
space[tag] = (minimum, maximum)
region.append(space)
subs = {k: v for (k, v) in rule.subs}
conditional_subs.append((region, subs))
addFeatureVariations(font, conditional_subs, featureTags) |
.parametrize('asgi', [True, False])
class TestPostQueryParamsDefaultBehavior():
def test_dont_auto_parse_by_default(self, asgi):
app = create_app(asgi)
resource = testing.SimpleTestResource()
app.add_route('/', resource)
client = testing.TestClient(app)
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
client.simulate_request(path='/', body='q=42', headers=headers)
req = resource.captured_req
assert (req.get_param('q') is None) |
def test_fix_reference_namespace_is_working_properly_with_refs_updated_in_a_previous_scene(create_test_data, create_pymel, create_maya_env):
data = create_test_data
pm = create_pymel
maya_env = create_maya_env
data['asset2_model_main_v002'].is_published = True
data['asset2_lookdev_take1_v001'].is_published = True
DBSession.commit()
maya_env.open(data['asset2_model_main_v002'])
loc = pm.spaceLocator(name='locator1')
loc.t.set(0, 0, 0)
tra_group = pm.nt.Transform(name='asset1')
pm.parent(loc, tra_group)
pm.saveFile()
maya_env.open(data['asset2_lookdev_take1_v001'])
maya_env.reference(data['asset2_model_main_v002'])
refs = pm.listReferences()
ref = refs[0]
isinstance(ref, pm.system.FileReference)
ref.namespace = data['asset2_model_main_v002'].filename.replace('.', '_')
pm.saveFile()
pm.newFile(force=True)
maya_env.open(data['version11'])
maya_env.reference(data['asset2_lookdev_take1_v001'])
refs = pm.listReferences()
refs[0].namespace = data['asset2_lookdev_take1_v001'].filename.replace('.', '_')
loc = pm.ls('locator1', type=pm.nt.Transform, r=1)
loc[0].t.set(1, 0, 0)
version2_ref_node = pm.listReferences(refs[0])[0]
edits = pm.referenceQuery(version2_ref_node, es=1)
assert (len(edits) > 0)
pm.saveFile()
DBSession.commit()
pm.newFile(force=True)
maya_env.open(data['version15'])
maya_env.reference(data['asset2_lookdev_take1_v001'])
refs = pm.listReferences()
refs[0].namespace = data['asset2_lookdev_take1_v001'].filename.replace('.', '_')
loc = pm.ls('locator1', type=pm.nt.Transform, r=1)
loc[0].t.set(0, 1, 0)
pm.saveFile()
version2_ref_node = pm.listReferences(refs[0])[0]
edits = pm.referenceQuery(version2_ref_node, es=1)
assert (len(edits) > 0)
DBSession.commit()
all_refs = pm.listReferences(recursive=1)
assert (all_refs[0].namespace == data['asset2_lookdev_take1_v001'].filename.replace('.', '_'))
assert (all_refs[1].namespace == data['asset2_model_main_v002'].filename.replace('.', '_'))
maya_env.fix_reference_namespaces()
pm.saveFile()
all_refs = pm.listReferences(recursive=1)
version15_asset2_lookdev_take1_v001_path = all_refs[0].path
version15_version2_path = all_refs[1].path
assert (all_refs[0].namespace == data['asset2_lookdev_take1_v001'].latest_published_version.nice_name)
assert (all_refs[1].namespace == data['asset2_model_main_v002'].latest_published_version.nice_name)
assert (len(pm.referenceQuery(all_refs[0], es=1, fld=1)) == 0)
assert (len(pm.referenceQuery(all_refs[1], es=1, fld=1)) == 0)
assert (len(pm.referenceQuery(all_refs[0], es=1, scs=1)) == 0)
assert (len(pm.referenceQuery(all_refs[1], es=1, scs=1)) == 2)
locs = pm.ls('locator1', type=pm.nt.Transform, r=1)
assert (1.0 == locs[0].ty.get())
pm.saveFile()
maya_env.open(data['version11'])
all_refs = pm.listReferences(recursive=1)
assert (all_refs[0].namespace == data['asset2_lookdev_take1_v001'].filename.replace('.', '_'))
assert (all_refs[1].namespace == data['asset2_model_main_v002'].filename.replace('.', '_'))
maya_env.fix_reference_namespaces()
pm.saveFile()
all_refs = pm.listReferences(recursive=1)
version11_asset2_lookdev_take1_v001_path = all_refs[0].path
version11_version2_path = all_refs[1].path
assert (all_refs[0].namespace == data['asset2_lookdev_take1_v001'].latest_published_version.nice_name)
assert (all_refs[1].namespace == data['asset2_model_main_v002'].latest_published_version.nice_name)
assert (len(pm.referenceQuery(all_refs[0], es=1, fld=1)) == 0)
assert (len(pm.referenceQuery(all_refs[1], es=1, fld=1)) == 0)
assert (len(pm.referenceQuery(all_refs[0], es=1, scs=1)) == 0)
assert (len(pm.referenceQuery(all_refs[1], es=1, scs=1)) == 2)
locs = pm.ls('locator1', type=pm.nt.Transform, r=1)
assert (1.0 == locs[0].tx.get())
pm.saveFile()
assert (version15_asset2_lookdev_take1_v001_path == version11_asset2_lookdev_take1_v001_path)
assert (version15_version2_path == version11_version2_path) |
def test_tox(cookies, tmp_path):
with run_within_dir(tmp_path):
result = cookies.bake()
assert (result.exit_code == 0)
assert file_contains_text(f'{result.project_path}/.github/workflows/main.yml', 'pip install tox tox-gh-actions')
assert os.path.isfile(f'{result.project_path}/tox.ini')
assert file_contains_text(f'{result.project_path}/tox.ini', '[tox]') |
class AbstractServer():
def __init__(self, host, port, loop=None, **kwargs):
if ((sys.version_info > (3, 8)) and sys.platform.startswith('win')):
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
if (loop is None):
self._loop = asyncio.get_event_loop()
else:
assert isinstance(loop, asyncio.AbstractEventLoop)
self._loop = loop
asyncio.set_event_loop(self._loop)
_loop.loop.integrate(self._loop, reset=False)
self._serving = None
if (host is not False):
self._open(host, port, **kwargs)
assert self._serving
def _running(self):
return self._loop.is_running()
def start(self):
if (not self._serving):
raise RuntimeError('Cannot start a closed or non-serving server!')
if self._running:
raise RuntimeError('Cannot start a running server.')
if (asyncio.get_event_loop() is not self._loop):
raise RuntimeError('Can only start server in same thread that created it.')
logger.info('Starting Flexx event loop.')
if (not getattr(self._loop, '_in_event_loop', False)):
poller = self._loop.create_task(keep_awake())
try:
self._loop.run_forever()
except KeyboardInterrupt:
logger.info('Flexx event loop interrupted.')
except TypeError as err:
if ('close() takes 1 positional argument but 3 were given' in str(err)):
logger.info('Interrupted Flexx event loop.')
else:
raise
poller.cancel()
def stop(self):
logger.info('Stopping Flexx event loop.')
self._loop.call_soon_threadsafe(self._loop.stop)
def close(self):
if self._running:
raise RuntimeError('Cannot close a running server; need to stop first.')
self._serving = None
self._close()
def _open(self, host, port, **kwargs):
raise NotImplementedError()
def _close(self):
raise NotImplementedError()
def serving(self):
return self._serving
def protocol(self):
raise NotImplementedError |
def _check_path_safe_to_write(sql_path: Path, py_path: Path):
if sql_path.exists():
with open(sql_path, 'r') as file:
contents = file.read()
(checksum, found) = _checksum(contents)
if ((not found) or (checksum != found)):
LOGGER.debug(f'''Existing file calculated checksum: {checksum}
Found checksum: {found}''')
raise RuntimeError(f"File '{sql_path}' not generated by fal would be overwritten by generated model of '{py_path}'. Please rename or remove.")
return checksum |
def extractBlackbellyblWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def delTransport(snmpEngine, transportDomain):
if (not snmpEngine.transportDispatcher):
return
transport = getTransport(snmpEngine, transportDomain)
snmpEngine.transportDispatcher.unregisterTransport(transportDomain)
automaticTransportDispatcher = snmpEngine.getUserContext('automaticTransportDispatcher')
if (automaticTransportDispatcher is not None):
automaticTransportDispatcher -= 1
snmpEngine.setUserContext(automaticTransportDispatcher=automaticTransportDispatcher)
if (not automaticTransportDispatcher):
snmpEngine.transportDispatcher.closeDispatcher()
snmpEngine.unregisterTransportDispatcher()
snmpEngine.delUserContext(automaticTransportDispatcher)
return transport |
def extractZigguratlocalizationsBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionPlotoptionsArcdiagramSonificationDefaultinstrumentoptionsMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def namespace_manifest(namespace: str) -> str:
ret = f'''
---
apiVersion: v1
kind: Namespace
metadata:
name: {namespace}
'''
if os.environ.get('DEV_USE_IMAGEPULLSECRET', None):
dockercfg = {'auths': {os.path.dirname(os.environ['DEV_REGISTRY']): {'auth': b64encode(((os.environ['DOCKER_BUILD_USERNAME'] + ':') + os.environ['DOCKER_BUILD_PASSWORD']).encode('utf-8')).decode('utf-8')}}}
ret += f'''
---
apiVersion: v1
kind: Secret
metadata:
name: dev-image-pull-secret
namespace: {namespace}
type: kubernetes.io/dockerconfigjson
data:
".dockerconfigjson": "{b64encode(json.dumps(dockercfg).encode('utf-8')).decode('utf-8')}"
---
apiVersion: v1
kind: ServiceAccount
metadata:
name: default
namespace: {namespace}
imagePullSecrets:
- name: dev-image-pull-secret
'''
return ret |
class OptionSeriesDumbbellDataDatalabels(Options):
def align(self):
return self._config_get('center')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(False)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionSeriesDumbbellDataDatalabelsAnimation':
return self._config_sub_data('animation', OptionSeriesDumbbellDataDatalabelsAnimation)
def backgroundColor(self):
return self._config_get(None)
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def crop(self):
return self._config_get(True)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(True)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionSeriesDumbbellDataDatalabelsFilter':
return self._config_sub_data('filter', OptionSeriesDumbbellDataDatalabelsFilter)
def format(self):
return self._config_get('point.value')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(None)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('justify')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(5)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def textPath(self) -> 'OptionSeriesDumbbellDataDatalabelsTextpath':
return self._config_sub_data('textPath', OptionSeriesDumbbellDataDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('bottom')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def xHigh(self):
return self._config_get(0)
def xHigh(self, num: float):
self._config(num, js_type=False)
def xLow(self):
return self._config_get(0)
def xLow(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(0)
def y(self, num: float):
self._config(num, js_type=False)
def yHigh(self):
return self._config_get(0)
def yHigh(self, num: float):
self._config(num, js_type=False)
def yLow(self):
return self._config_get(0)
def yLow(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(6)
def zIndex(self, num: float):
self._config(num, js_type=False) |
class Spreadsheet():
def __init__(self, name, title):
self.workbook = Workbook(name)
self.name = name
self.headers = []
try:
self.sheet = self.workbook.getSheet(title)
except KeyError:
self.sheet = self.workbook.addSheet(title)
def addTitleRow(self, headers):
header_line = []
for item in headers:
header_line.append(('<style font=bold>%s</style>' % item))
self.sheet.addText('\t'.join(header_line))
def addEmptyRow(self, color=None):
if (not color):
self.sheet.addText('')
else:
empty_row = []
ncols = min(self.sheet.ncols, 256)
for i in range(ncols):
empty_row.append(('<style bgcolor=%s> </style>' % color))
self.sheet.addText('\t'.join(empty_row))
def addRow(self, data, set_widths=False, bold=False, wrap=False, bg_color=''):
style_str = []
if bold:
style_str.append('font=bold')
if wrap:
style_str.append('wrap')
if bg_color:
style_str.append(('bg_color=%' % bg_color))
style_str = ' '.join(style_str)
items = []
for item in data:
if style_str:
items.append(('<style %s>%s</style>' % (style_str, item)))
else:
items.append(str(item))
self.sheet.addText('\t'.join(items))
def write(self):
self.workbook.save(self.name) |
def main(page: ft.Page):
def on_new_game(settings):
page.controls.pop()
new_solitaire = Solitaire(settings, on_win)
page.add(new_solitaire)
page.update()
def on_win():
page.add(ft.AlertDialog(title=ft.Text('YOU WIN!'), open=True))
print('You win')
page.update()
settings = Settings()
create_appbar(page, settings, on_new_game)
solitaire = Solitaire(settings, on_win)
page.add(solitaire) |
def main():
parser = argparse.ArgumentParser(prog='calc_cam16_ucs_jmh_min_m.py', description='Calculate min M for achromatic colors in CAM16 UCS JMh and map current spline against real values.')
parser.add_argument('--res', '-r', type=int, default=50000, help='Resolution to use when calculating range, default is 50000.')
parser.add_argument('--white-point', '-w', type=str, default='2deg:D65', help="White point 'deg:wp', example: '2deg:D65'.")
parser.add_argument('--adapting-luminance', '-a', type=float, default=((64 / math.pi) * 0.2), help='Adapting luminance.')
parser.add_argument('--background-luminance', '-b', type=float, default=20, help='Background luminace - default 20 (gray world).')
parser.add_argument('--surround', '-s', type=str, default='average', help="Surround: 'average', 'dim', 'dark'")
parser.add_argument('--discounting', '-d', action='store_true', help='Enable discounting.')
parser.add_argument('--spline', '-S', type=str, default='catrom', help='Spline to use for approximation of achromatic line')
parser.add_argument('--negative', '-n', action='store_true', help='Negative lightness spline.')
parser.add_argument('--tuning', '-t', type=str, action='append', help='Spline tuning parameters: start:end:step:scale (int:int:int:float)')
parser.add_argument('--dump', action='store_true', help='Dump calculated values.')
args = parser.parse_args()
return run(args.white_point, args.adapting_luminance, args.background_luminance, args.surround, args.discounting, args.spline, args.tuning, args.res, args.negative, args.dump) |
def extractRbktrtranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Residence of Monsters', 'Residence of Monsters', 'translated'), ('Jin Xiao Yi Tan', 'Jin Xiao Yi Tan', 'translated'), ('I Think My Boyfriend Is Sick', 'I Think My Boyfriend Is Sick', 'translated'), ('the protagonist makes you retreat about thirty miles', 'the protagonist makes you retreat about thirty miles', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class RFC6455WebSocket(WebSocket):
def __init__(self, sock, environ, version=13, protocol=None, client=False, extensions=None, max_frame_length=DEFAULT_MAX_FRAME_LENGTH):
super().__init__(sock, environ, version)
self.iterator = self._iter_frames()
self.client = client
self.protocol = protocol
self.extensions = (extensions or {})
self._deflate_enc = None
self._deflate_dec = None
self.max_frame_length = max_frame_length
self._remote_close_data = None
class UTF8Decoder():
def __init__(self):
if utf8validator:
self.validator = utf8validator.Utf8Validator()
else:
self.validator = None
decoderclass = codecs.getincrementaldecoder('utf8')
self.decoder = decoderclass()
def reset(self):
if self.validator:
self.validator.reset()
self.decoder.reset()
def decode(self, data, final=False):
if self.validator:
(valid, eocp, c_i, t_i) = self.validator.validate(data)
if (not valid):
raise ValueError('Data is not valid unicode')
return self.decoder.decode(data, final)
def _get_permessage_deflate_enc(self):
options = self.extensions.get('permessage-deflate')
if (options is None):
return None
def _make():
return zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, (- options.get(('client_max_window_bits' if self.client else 'server_max_window_bits'), zlib.MAX_WBITS)))
if options.get(('client_no_context_takeover' if self.client else 'server_no_context_takeover')):
return _make()
else:
if (self._deflate_enc is None):
self._deflate_enc = _make()
return self._deflate_enc
def _get_permessage_deflate_dec(self, rsv1):
options = self.extensions.get('permessage-deflate')
if ((options is None) or (not rsv1)):
return None
def _make():
return zlib.decompressobj((- options.get(('server_max_window_bits' if self.client else 'client_max_window_bits'), zlib.MAX_WBITS)))
if options.get(('server_no_context_takeover' if self.client else 'client_no_context_takeover')):
return _make()
else:
if (self._deflate_dec is None):
self._deflate_dec = _make()
return self._deflate_dec
def _get_bytes(self, numbytes):
data = b''
while (len(data) < numbytes):
d = self.socket.recv((numbytes - len(data)))
if (not d):
raise ConnectionClosedError()
data = (data + d)
return data
class Message():
def __init__(self, opcode, max_frame_length, decoder=None, decompressor=None):
self.decoder = decoder
self.data = []
self.finished = False
self.opcode = opcode
self.decompressor = decompressor
self.max_frame_length = max_frame_length
def push(self, data, final=False):
self.finished = final
self.data.append(data)
def getvalue(self):
data = b''.join(self.data)
if ((not (self.opcode & 8)) and self.decompressor):
data = self.decompressor.decompress((data + b'\x00\x00\xff\xff'), self.max_frame_length)
if self.decompressor.unconsumed_tail:
raise FailedConnectionError(1009, 'Incoming compressed frame exceeds length limit of {} bytes.'.format(self.max_frame_length))
if self.decoder:
data = self.decoder.decode(data, self.finished)
return data
def _apply_mask(data, mask, length=None, offset=0):
if (length is None):
length = len(data)
cnt = range(length)
return b''.join((bytes(((data[i] ^ mask[((offset + i) % 4)]),)) for i in cnt))
def _handle_control_frame(self, opcode, data):
if (opcode == 8):
self._remote_close_data = data
if (not data):
status = 1000
elif (len(data) > 1):
status = struct.unpack_from('!H', data)[0]
if ((not status) or (status not in VALID_CLOSE_STATUS)):
raise FailedConnectionError(1002, 'Unexpected close status code.')
try:
data = self.UTF8Decoder().decode(data[2:], True)
except (UnicodeDecodeError, ValueError):
raise FailedConnectionError(1002, 'Close message data should be valid UTF-8.')
else:
status = 1002
self.close(close_data=(status, ''))
raise ConnectionClosedError()
elif (opcode == 9):
self.send(data, control_code=10)
elif (opcode == 10):
pass
else:
raise FailedConnectionError(1002, 'Unknown control frame received.')
def _iter_frames(self):
fragmented_message = None
try:
while True:
message = self._recv_frame(message=fragmented_message)
if (message.opcode & 8):
self._handle_control_frame(message.opcode, message.getvalue())
continue
if (fragmented_message and (message is not fragmented_message)):
raise RuntimeError('Unexpected message change.')
fragmented_message = message
if message.finished:
data = fragmented_message.getvalue()
fragmented_message = None
(yield data)
except FailedConnectionError:
(exc_typ, exc_val, exc_tb) = sys.exc_info()
self.close(close_data=(exc_val.status, exc_val.message))
except ConnectionClosedError:
return
except Exception:
self.close(close_data=(1011, 'Internal Server Error'))
raise
def _recv_frame(self, message=None):
recv = self._get_bytes
header = recv(2)
(a, b) = struct.unpack('!BB', header)
finished = ((a >> 7) == 1)
rsv123 = ((a >> 4) & 7)
rsv1 = (rsv123 & 4)
if rsv123:
if (rsv1 and ('permessage-deflate' not in self.extensions)):
raise FailedConnectionError(1002, 'RSV1, RSV2, RSV3: MUST be 0 unless an extension is negotiated that defines meanings for non-zero values.')
opcode = (a & 15)
if (opcode not in (0, 1, 2, 8, 9, 10)):
raise FailedConnectionError(1002, 'Unknown opcode received.')
masked = ((b & 128) == 128)
if ((not masked) and (not self.client)):
raise FailedConnectionError(1002, 'A client MUST mask all frames that it sends to the server')
length = (b & 127)
if (opcode & 8):
if (not finished):
raise FailedConnectionError(1002, 'Control frames must not be fragmented.')
if (length > 125):
raise FailedConnectionError(1002, 'All control frames MUST have a payload length of 125 bytes or less')
elif (opcode and message):
raise FailedConnectionError(1002, 'Received a non-continuation opcode within fragmented message.')
elif ((not opcode) and (not message)):
raise FailedConnectionError(1002, 'Received continuation opcode with no previous fragments received.')
if (length == 126):
length = struct.unpack('!H', recv(2))[0]
elif (length == 127):
length = struct.unpack('!Q', recv(8))[0]
if (length > self.max_frame_length):
raise FailedConnectionError(1009, 'Incoming frame of {} bytes is above length limit of {} bytes.'.format(length, self.max_frame_length))
if masked:
mask = struct.unpack('!BBBB', recv(4))
received = 0
if ((not message) or (opcode & 8)):
decoder = (self.UTF8Decoder() if (opcode == 1) else None)
decompressor = self._get_permessage_deflate_dec(rsv1)
message = self.Message(opcode, self.max_frame_length, decoder=decoder, decompressor=decompressor)
if (not length):
message.push(b'', final=finished)
else:
while (received < length):
d = self.socket.recv((length - received))
if (not d):
raise ConnectionClosedError()
dlen = len(d)
if masked:
d = self._apply_mask(d, mask, length=dlen, offset=received)
received = (received + dlen)
try:
message.push(d, final=finished)
except (UnicodeDecodeError, ValueError):
raise FailedConnectionError(1007, 'Text data must be valid utf-8')
return message
def _pack_message(self, message, masked=False, continuation=False, final=True, control_code=None):
is_text = False
if isinstance(message, str):
message = message.encode('utf-8')
is_text = True
compress_bit = 0
compressor = self._get_permessage_deflate_enc()
is_control_frame = ((control_code or 0) & 8)
if (message and compressor and (not is_control_frame)):
message = compressor.compress(message)
message += compressor.flush(zlib.Z_SYNC_FLUSH)
assert (message[(- 4):] == b'\x00\x00\xff\xff')
message = message[:(- 4)]
compress_bit = (1 << 6)
length = len(message)
if (not length):
masked = False
if control_code:
if (control_code not in (8, 9, 10)):
raise ProtocolError('Unknown control opcode.')
if (continuation or (not final)):
raise ProtocolError('Control frame cannot be a fragment.')
if (length > 125):
raise ProtocolError('Control frame data too large (>125).')
header = struct.pack('!B', (control_code | (1 << 7)))
else:
opcode = (0 if continuation else ((1 if is_text else 2) | compress_bit))
header = struct.pack('!B', (opcode | ((1 << 7) if final else 0)))
lengthdata = ((1 << 7) if masked else 0)
if (length > 65535):
lengthdata = struct.pack('!BQ', (lengthdata | 127), length)
elif (length > 125):
lengthdata = struct.pack('!BH', (lengthdata | 126), length)
else:
lengthdata = struct.pack('!B', (lengthdata | length))
if masked:
rand = Random(time.time())
mask = [rand.getrandbits(8) for _ in range(4)]
message = RFC6455WebSocket._apply_mask(message, mask, length)
maskdata = struct.pack('!BBBB', *mask)
else:
maskdata = b''
return b''.join((header, lengthdata, maskdata, message))
def wait(self):
for i in self.iterator:
return i
def _send(self, frame):
self._sendlock.acquire()
try:
self.socket.sendall(frame)
finally:
self._sendlock.release()
def send(self, message, **kw):
kw['masked'] = self.client
payload = self._pack_message(message, **kw)
self._send(payload)
def _send_closing_frame(self, ignore_send_errors=False, close_data=None):
if ((self.version in (8, 13)) and (not self.websocket_closed)):
if (close_data is not None):
(status, msg) = close_data
if isinstance(msg, str):
msg = msg.encode('utf-8')
data = (struct.pack('!H', status) + msg)
else:
data = ''
try:
self.send(data, control_code=8)
except OSError:
if (not ignore_send_errors):
raise
self.websocket_closed = True
def close(self, close_data=None):
try:
self._send_closing_frame(close_data=close_data, ignore_send_errors=True)
self.socket.shutdown(socket.SHUT_WR)
except OSError as e:
if (e.errno != errno.ENOTCONN):
self.log.write('{ctx} socket shutdown error: {e}'.format(ctx=self.log_context, e=e))
finally:
self.socket.close() |
def replace_arg(value, resolve_args):
result = value
re_if = re.compile('\\$\\(arg.(?P<name>.*?)\\)')
for arg in re_if.findall(value):
if (arg in resolve_args):
result = result.replace(('$(arg %s)' % arg), resolve_args[arg])
if value.startswith('$(eval'):
re_if = re.compile("arg\\(\\'(?P<name>.*?)\\'\\)")
for arg in re_if.findall(value):
if (arg in resolve_args):
result = result.replace(("arg('%s')" % arg), f"'{resolve_args[arg]}'")
re_items = '|'.join([f'({item})' for item in list(resolve_args.keys())])
re_if = re.compile(re_items)
for matches in re_if.findall(value):
for arg in matches:
if arg:
if (arg in resolve_args):
result = result.replace(('%s' % arg), f"'{resolve_args[arg]}'")
result = result.replace('$(eval', '').rstrip(')')
result = ('true' if eval(result) else 'false')
return result |
class NameSuffixTestCase(IdModifyMixin, unittest.TestCase):
initial_fasta = '>seq1\nACGT\n>gi|260674|gb|S52561.1| {long terminal repeat} [human immunodeficiency virus type]\nACGT'
target_fasta = '>seq1.post\nACGT\n>gi|260674|gb|S52561.1|.post {long terminal repeat} [human immunodeficiency virus type]\nACGT'
modify_fn = functools.partial(transform.name_append_suffix, suffix='.post') |
class IGShoppingReviewStatusReasonWithHelpMessage(AbstractObject):
def __init__(self, api=None):
super(IGShoppingReviewStatusReasonWithHelpMessage, self).__init__()
self._isIGShoppingReviewStatusReasonWithHelpMessage = True
self._api = api
class Field(AbstractObject.Field):
code = 'code'
help_url = 'help_url'
message = 'message'
_field_types = {'code': 'string', 'help_url': 'string', 'message': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
class AggregateResource(ApiResource):
query_args = {}
def args(self):
return utils.extend(args.paging, self.query_args, self.sort_args)
def sort_args(self):
return args.make_sort_args(validator=args.IndexValidator(self.model))
def index_column(self):
return self.model.idx |
.parametrize('name', ('0_starts_with_digit', ' starts_with_space', '$starts_with_dollar', 'has_dollar_$_inside', 'has spaces'))
def test_serializable_field_names_must_be_valid_identifiers(name):
with pytest.raises(TypeError, match=f'not valid python identifiers: `{re.escape(name)}`'):
class Klass(Serializable):
fields = ((name, big_endian_int),) |
class Render():
def get_selected_shading_nodes(cls):
selected_nodes = []
for mat in bpy.data.materials:
if mat.node_tree:
for node in mat.node_tree.nodes:
if node.select:
selected_nodes.append(node)
return selected_nodes
def set_selected_image_texture_nodes_color_space(cls, space):
nodes = cls.get_selected_shading_nodes()
for node in nodes:
cls.set_image_texture_node_color_space(node, space)
def set_selected_image_texture_nodes_to_srgb(cls):
nodes = cls.get_selected_shading_nodes()
for node in nodes:
cls.set_to_srgb(node)
def set_selected_image_texture_nodes_to_raw(cls):
nodes = cls.get_selected_shading_nodes()
for node in nodes:
cls.set_to_raw(node)
def set_to_srgb(cls, node):
cls.set_image_texture_node_color_space(node, 'Utility - sRGB - Texture')
def set_to_raw(cls, node):
cls.set_image_texture_node_color_space(node, 'raw')
def set_image_texture_node_color_space(cls, node, space):
try:
node.image.colorspace_settings.name = space
except AttributeError:
pass |
class RefreshTokenTests(mixins.RefreshMixin, RelaySchemaTestCase):
query = '\n mutation RefreshToken($input: RefreshInput!) {\n refreshToken(input: $input) {\n token\n payload\n refreshToken\n refreshExpiresIn\n clientMutationId\n }\n }'
refresh_token_mutations = {'refresh_token': Refresh} |
def check_vulnerable(fobj, path_chain, stats, has_jndilookup=True):
md5sum = md5_digest(fobj)
first_path = bold(path_chain.pop(0))
path_chain = ' -> '.join((str(p) for p in ([first_path] + path_chain)))
comment = collections.ChainMap(MD5_BAD, MD5_GOOD).get(md5sum, 'Unknown MD5')
color_map = {'vulnerable': red, 'good': green, 'patched': cyan, 'unknown': yellow}
if (md5sum in MD5_BAD):
status = ('vulnerable' if has_jndilookup else 'patched')
elif (md5sum in MD5_GOOD):
status = 'good'
else:
status = 'unknown'
stats[status] += 1
color = color_map.get(status, red)
now = datetime.datetime.utcnow().replace(microsecond=0)
hostname = magenta(HOSTNAME)
status = bold(color(status.upper()))
md5sum = color(md5sum)
comment = bold(color(comment))
print(f'[{now}] {hostname} {status}: {path_chain} [{md5sum}: {comment}]') |
class TickerInfo():
def __init__(self, client):
self.exinfo = client.get_exchange_info()
self.info = client.get_account()
self.tickers = client.get_symbol_ticker()
def _list_select(list, key, value):
ret = [l for l in list if (l[key] == value)]
if (len(ret) == 0):
return None
else:
return ret[0]
def get_base_asset(self, symbol):
sinfo = self._list_select(self.exinfo['symbols'], 'symbol', symbol)
return sinfo['baseAsset']
def get_quote_asset(self, symbol):
sinfo = self._list_select(self.exinfo['symbols'], 'symbol', symbol)
return sinfo['quoteAsset']
def get_asset_price_in_btc(self, asset):
if (asset == 'BTC'):
return 1
ret = self._list_select(self.tickers, 'symbol', (asset + 'BTC'))
if (ret is not None):
return float(ret['price'])
ret = self._list_select(self.tickers, 'symbol', ('BTC' + asset))
if (ret is not None):
return (1 / float(ret['price']))
return None |
def test_eq(accounts):
pub = PublicKeyAccount('0x14b0Ed2a7C4cC60DD8F676AE44D0831d3c9b2a9E')
assert (pub == PublicKeyAccount('0x14b0Ed2a7C4cC60DD8F676AE44D0831d3c9b2a9E'))
assert (pub == '0x14b0Ed2a7C4cC60DD8F676AE44D0831d3c9b2a9E')
assert (pub != 'potato')
assert (PublicKeyAccount(accounts[0]) == accounts[0]) |
def calc_intervals(bins_log2s, weights, func):
out_vals_lo = np.repeat(np.nan, len(bins_log2s))
out_vals_hi = np.repeat(np.nan, len(bins_log2s))
for (i, ser) in enumerate(bins_log2s):
if len(ser):
wt = weights[ser.index]
assert (wt.index == ser.index).all()
(out_vals_lo[i], out_vals_hi[i]) = func(ser.values, wt.values)
return (out_vals_lo, out_vals_hi) |
class PlayerServer(Player):
def __init__(self, channel: Channel, logger, *args, **kwargs):
Player.__init__(self, *args, **kwargs)
self._channel: Channel = channel
self._connected: bool = True
self._logger = (logger if logger else logging)
def disconnect(self):
if self._connected:
self.try_send_message({'message_type': 'disconnect'})
self._channel.close()
self._connected = False
def channel(self) -> Channel:
return self._channel
def connected(self) -> bool:
return self._connected
def update_channel(self, new_player):
self.disconnect()
self._channel = new_player.channel
self._connected = new_player.connected
def ping(self) -> bool:
try:
self.send_message({'message_type': 'ping'})
message = self.recv_message(timeout_epoch=(time.time() + 2))
MessageFormatError.validate_message_type(message, expected='pong')
return True
except (ChannelError, MessageTimeout, MessageFormatError) as e:
self._logger.error('Unable to ping {}: {}'.format(self, e))
self.disconnect()
return False
def try_send_message(self, message: Any) -> bool:
try:
self.send_message(message)
return True
except ChannelError:
return False
def send_message(self, message: Any):
return self._channel.send_message(message)
def recv_message(self, timeout_epoch: Optional[float]=None) -> Any:
message = self._channel.recv_message(timeout_epoch)
if (('message_type' in message) and (message['message_type'] == 'disconnect')):
raise ChannelError('Client disconnected')
return message |
def test_set_get_del_providers():
a_p13 = providers.Provider()
b_p23 = providers.Provider()
ContainerA.p13 = a_p13
ContainerB.p23 = b_p23
assert (ContainerA.providers == dict(p11=ContainerA.p11, p12=ContainerA.p12, p13=a_p13))
assert (ContainerB.providers == dict(p11=ContainerA.p11, p12=ContainerA.p12, p21=ContainerB.p21, p22=ContainerB.p22, p23=b_p23))
assert (ContainerA.cls_providers == dict(p11=ContainerA.p11, p12=ContainerA.p12, p13=a_p13))
assert (ContainerB.cls_providers == dict(p21=ContainerB.p21, p22=ContainerB.p22, p23=b_p23))
del ContainerA.p13
del ContainerB.p23
assert (ContainerA.providers == dict(p11=ContainerA.p11, p12=ContainerA.p12))
assert (ContainerB.providers == dict(p11=ContainerA.p11, p12=ContainerA.p12, p21=ContainerB.p21, p22=ContainerB.p22))
assert (ContainerA.cls_providers == dict(p11=ContainerA.p11, p12=ContainerA.p12))
assert (ContainerB.cls_providers == dict(p21=ContainerB.p21, p22=ContainerB.p22)) |
def main():
args = _get_parser().parse_args()
backend = args.backend.lower()
updated_packages = get_updated_packages(get_updates_messages(args.delta), backend)
log.info('Updated packages per datagrepper %s', len(updated_packages))
for (package, last_build) in PackagesLogic.webhook_package_candidates(helpers.BuildSourceEnum(args.backend.lower())):
source_json = json.loads(package.source_json)
rebuilder = package_from_source(backend, source_json)
log.debug('candidate %s package %s in %s', args.backend, rebuilder.name, package.copr.full_name)
if (rebuilder.name not in updated_packages):
continue
new_updated_version = updated_packages[rebuilder.name]
last_version = None
if last_build:
last_version = last_build.pkg_version
if (not last_version):
source_data = json.loads(last_build.source_json)
last_version = rebuilder.source_json_version(source_data)
if ((not last_version) and (not last_build.finished)):
log.debug('Skipping %s %s in %s, existing build %s', package.name, new_updated_version, package.copr.full_name, last_build.id)
continue
log.debug('checking %s (pkg_name %s), last version: %s, new version %s', rebuilder.name, package.name, last_version, new_updated_version)
if (last_version and re.match(new_updated_version, last_version)):
continue
try:
rebuilder.build(package.copr, package, new_updated_version)
log.info('Launched build for %s (%s) version %s in %s', rebuilder.name, package.name, new_updated_version, package.copr.full_name)
except BadRequest as exc:
log.error("Can't submit a build: %s", str(exc))
db.session.commit() |
def create_gff(searcher_name, version, annotated_hits, outfile, rm_suffix, gff_ID_field):
print(colorify(f'Decorating gff file {outfile}...', 'lgreen'), file=serr)
with open(outfile, 'w') as OUT:
print('##gff-version 3', file=OUT)
print(f'## created with {version}', file=OUT)
for (hit, annotation) in sorted(parse_annotations(annotated_hits), key=(lambda hit: sort_annotated_hits(hit, rm_suffix))):
(query, target, evalue, score, qstart, qend, sstart, send, pident, qcov, scov, strand, phase, attrs) = hit_to_gff(hit, gff_ID_field)
if (searcher_name is None):
attrs.append(f'em_searcher=unk')
else:
attrs.append(f'em_searcher={searcher_name}')
if (annotation is not None):
attrs.extend(annotation_to_gff(annotation))
if rm_suffix:
contig = query[:query.rfind('_')]
else:
contig = query
fields = '\t'.join((str(x) for x in [contig, 'eggNOG-mapper', 'CDS', qstart, qend, score, strand, phase, ';'.join(attrs)]))
print(fields, file=OUT)
(yield (hit, annotation))
return |
def validate_signature_block(image_content, sig_blk_num):
offset = ((- SECTOR_SIZE) + (sig_blk_num * SIG_BLOCK_SIZE))
sig_blk = image_content[offset:(offset + SIG_BLOCK_SIZE)]
assert (len(sig_blk) == SIG_BLOCK_SIZE)
(magic, version, _, _, _, _, _, _, blk_crc) = struct.unpack('<BBxx32s384sI384sI384sI16x', sig_blk)
calc_crc = zlib.crc32(sig_blk[:1196])
is_invalid_block = (magic != SIG_BLOCK_MAGIC)
is_invalid_block |= (version not in [SIG_BLOCK_VERSION_RSA, SIG_BLOCK_VERSION_ECDSA])
if (is_invalid_block or (blk_crc != (calc_crc & ))):
return None
key_type = ('RSA' if (version == SIG_BLOCK_VERSION_RSA) else 'ECDSA')
print(f'Signature block {sig_blk_num} is valid ({key_type}).')
return sig_blk |
def lab2xyz(lab, axis=(- 1), wp=whitepoints['D65'][(- 1)]):
lab = np.asarray(lab)
(L, a, b, axis) = separate_colors(lab, axis)
fy = ((L + 16) / 116.0)
fz = (fy - (b / 200.0))
fx = ((a / 500.0) + fy)
def finv(y):
eps3 = ((216 / 24389.0) ** 3)
kap = (24389 / 27.0)
return np.where((y > eps3), np.power(y, 3), (((116 * y) - 16) / kap))
(xr, yr, zr) = (finv(fx), finv(fy), finv(fz))
return join_colors((xr * wp[0]), (yr * wp[1]), (zr * wp[2]), axis) |
class Window(QWidget):
def __init__(self):
super(Window, self).__init__()
self.setWindowTitle('GL Waveform Test')
self.glWidget = GLWaveformWidget()
self.timeSlider = QSlider(Qt.Vertical)
self.timeSlider.setRange(0, 300)
self.timeSlider.setSingleStep(1)
self.timeSlider.setTickInterval(10)
self.timeSlider.setTickPosition(QSlider.TicksRight)
self.zoomSlider = QSlider(Qt.Vertical)
self.zoomSlider.setRange(2, 10)
self.zoomSlider.setSingleStep(1)
self.zoomSlider.setTickInterval(1)
self.zoomSlider.setTickPosition(QSlider.TicksRight)
self.timeSlider.valueChanged.connect(self.glWidget.setPosition)
self.zoomSlider.valueChanged.connect(self.glWidget.setZoom)
mainLayout = QHBoxLayout()
mainLayout.addWidget(self.glWidget)
mainLayout.addWidget(self.timeSlider)
mainLayout.addWidget(self.zoomSlider)
self.setLayout(mainLayout)
self.timeSlider.setValue(0)
self.zoomSlider.setValue(4) |
def determine_filetype(path):
logger = logging.getLogger(__name__)
MRbase = os.path.basename(path)
if MRbase.endswith('.nii'):
if MRbase.endswith('.dtseries.nii'):
MR_type = 'cifti'
MRbase = MRbase.replace('.dtseries.nii', '')
elif MRbase.endswith('.dscalar.nii'):
MR_type = 'cifti'
MRbase = MRbase.replace('.dscalar.nii', '')
elif MRbase.endswith('.dlabel.nii'):
MR_type = 'cifti'
MRbase = MRbase.replace('.dlabel.nii', '')
else:
MR_type = 'nifti'
MRbase = MRbase.replace('.nii', '')
elif MRbase.endswith('nii.gz'):
MR_type = 'nifti'
MRbase = MRbase.replace('.nii.gz', '')
elif MRbase.endswith('.gii'):
MR_type = 'gifti'
gifti_types = ['.shape.gii', '.func.gii', '.surf.gii', '.label.gii', '.gii']
for ext_type in gifti_types:
MRbase = MRbase.replace(ext_type, '')
else:
logger.error('{} is not a nifti or gifti file type'.format(path))
sys.exit(1)
return (MR_type, MRbase) |
class WorkspaceRepository(BaseRepository[Workspace], UUIDRepositoryMixin[Workspace]):
model = Workspace
async def get_by_admin_user(self, user_id: uuid.UUID) -> list[Workspace]:
statement = select(Workspace).join(Workspace.workspace_users).where((WorkspaceUser.user_id == user_id))
return (await self.list(statement))
async def get_by_domain(self, domain: str) -> (Workspace | None):
statement = select(Workspace).where((Workspace.domain == domain))
return (await self.get_one_or_none(statement))
async def get_main(self) -> (Workspace | None):
return (await self.get_by_domain(settings.fief_domain))
async def get_available_subdomain(self, name: str) -> str:
slug = slugify(name)
domain = f'{slug}.{settings.root_domain}'
workspace = (await self.get_by_domain(domain))
if (workspace is None):
return domain
random_string = ''.join(random.choices((string.ascii_lowercase + string.digits), k=6))
return f'{slug}-{random_string}.{settings.root_domain}'
async def get_by_alembic_revision(self, alembic_revision: str) -> list[Workspace]:
statement = select(Workspace).where((Workspace.alembic_revision == alembic_revision))
return (await self.list(statement)) |
class SymbolPrinter():
def __init__(self, f):
self.f = f
self.funcs = {}
self.operators = {}
self.vars = {}
def add_symbol(self, key, val):
if (key.startswith('_') or key.startswith('_')):
return
if isinstance(val, fracttypes.Var):
self.vars[key] = val
elif isinstance(val, fsymbol.OverloadList):
if val.is_operator():
self.operators[key] = val
else:
self.funcs[key] = val
def output_entry(self, nrows=1):
print(('<td valign="top" align="left" rowspan="%d">' % nrows), file=self.f)
def output_refentry_header(self, key, val, type, nrows=1):
print('<tr>', file=self.f)
self.output_entry(nrows)
print(('%s</td>' % escape(key)), file=self.f)
def output_overload(self, func):
self.output_entry()
print(', '.join(map(strOfType, func.args)), file=self.f)
print('</td>', file=self.f)
self.output_entry()
print(('%s</td>' % strOfType(func.ret)), file=self.f)
def output_function(self, val):
self.output_overload(val[0])
for func in val[1:]:
print('</tr>', file=self.f)
print('<tr>', file=self.f)
self.output_overload(func)
def output_refentry_footer(self):
print('</tr>', file=self.f)
def output_refentry_body(self, val, nrows=1):
self.output_entry(nrows)
text = (val.__doc__ or 'No documentation yet.')
print(escape(text), file=self.f)
print('</td>', file=self.f)
def output_symbol(self, key, val, type):
if isinstance(val, fsymbol.OverloadList):
nrows = len(val)
self.output_refentry_header(key, val, type, nrows)
self.output_refentry_body(val, nrows)
self.output_function(val)
else:
self.output_refentry_header(key, val, type)
self.output_refentry_body(val)
print(('<td>%s</td>' % strOfType(val.type)), file=self.f)
self.output_refentry_footer()
def output_all(self):
self.output_table(self.operators, 'Operators', 'operator')
self.output_table(self.funcs, 'Functions', 'function')
self.output_table(self.vars, 'Symbols', '(symbol)')
def output_table(self, table, name, type):
print(('<h2>%s</h2>' % escape(name)), file=self.f)
print('<table>', file=self.f)
print('\n<thead>\n<tr>\n <th>Name</th>\n <th>Description</th>\n <th>Argument Types</th>\n <th>Return Type</th>\n</tr>\n</thead>', file=self.f)
print('<tbody>', file=self.f)
keys = list(table.keys())
keys.sort()
for k in keys:
self.output_symbol(k, table[k], type)
print('</tbody>', file=self.f)
print('</table>', file=self.f) |
class JsRowTotal(JsRecFunc):
def extendArgs(category, originParams, newCols):
originParams[0] += newCols
return originParams
def extendColumns(jsSchema, params):
alias = 'row-total'
params = ('seriesNames', 'rowDefinition')
value = "\n seriesNames.forEach(function(v){rowDefinition[v] = 0});\n data.forEach(function(rec){\n if(!rec['_system']){seriesNames.forEach(function(v){rowDefinition[v] += rec[v]})};\n result.push(rec);\n }); result.push(rowDefinition);\n " |
class TVDBIDSelectorList(GUIComponent, object):
GUI_WIDGET = eListbox
def __init__(self):
GUIComponent.__init__(self)
self.l = eListboxPythonMultiContent()
self.l.setFont(0, gFont('Regular', 19))
self.l.setItemHeight(186)
self.l.setBuildFunc(self.buildList)
def buildList(self, entry):
(tvdb_id, year, name, overview, path) = entry
res = [None]
(x, y, w, h) = (5, 5, 120, 176)
if fileExists(path):
picloader = PicLoader(w, h)
image = picloader.load(path)
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, x, y, w, h, image))
picloader.destroy()
else:
res.append((eListboxPythonMultiContent.TYPE_TEXT, x, y, w, h, 0, (RT_HALIGN_CENTER | RT_VALIGN_CENTER), 'Ladefehler'))
(x, y, w, h) = (150, 5, 515, 25)
res.append((eListboxPythonMultiContent.TYPE_TEXT, x, y, w, h, 0, (RT_HALIGN_LEFT | RT_VALIGN_CENTER), ('%s' % str(tvdb_id))))
res.append((eListboxPythonMultiContent.TYPE_TEXT, x, (y + 30), w, h, 0, (RT_HALIGN_LEFT | RT_VALIGN_CENTER), ('%s' % toStr(name))))
res.append((eListboxPythonMultiContent.TYPE_TEXT, x, (y + 60), w, 110, 0, (RT_HALIGN_LEFT | RT_WRAP), ('%s' % toStr(overview))))
return res
def getCurrent(self):
cur = self.l.getCurrentSelection()
return (cur and cur[0])
def postWidgetCreate(self, instance):
instance.setContent(self.l)
self.instance.setWrapAround(True)
def preWidgetRemove(self, instance):
instance.setContent(None)
def setList(self, list):
self.l.setList(list)
def moveToIndex(self, idx):
self.instance.moveSelectionTo(idx)
def getSelectionIndex(self):
return self.l.getCurrentSelectionIndex()
def getSelectedIndex(self):
return self.l.getCurrentSelectionIndex()
def selectionEnabled(self, enabled):
if (self.instance is not None):
self.instance.setSelectionEnable(enabled)
def pageUp(self):
if (self.instance is not None):
self.instance.moveSelection(self.instance.pageUp)
def pageDown(self):
if (self.instance is not None):
self.instance.moveSelection(self.instance.pageDown)
def up(self):
if (self.instance is not None):
self.instance.moveSelection(self.instance.moveUp)
def down(self):
if (self.instance is not None):
self.instance.moveSelection(self.instance.moveDown) |
class CompoundEdge(BaseEdge):
description = 'Compound Edge'
def __init__(self, boxes, types, lengths) -> None:
super().__init__(boxes, None)
self.types = [self.edges.get(edge, edge) for edge in types]
self.lengths = lengths
self.length = sum(lengths)
def startwidth(self) -> float:
return self.types[0].startwidth()
def endwidth(self) -> float:
return self.types[(- 1)].endwidth()
def margin(self) -> float:
return (max(((e.margin() + e.startwidth()) for e in self.types)) - self.types[0].startwidth())
def __call__(self, length, **kw):
if (length and (abs((length - self.length)) > 1e-05)):
raise ValueError('Wrong length for CompoundEdge')
lastwidth = self.types[0].startwidth()
for (e, l) in zip(self.types, self.lengths):
self.step((e.startwidth() - lastwidth))
e(l)
lastwidth = e.endwidth() |
def _remap_reserved(field_name):
idx = field_name.rfind('.')
if (idx > 0):
prefix = field_name[:(idx + 1)]
sub_field_name = field_name[(idx + 1):]
else:
prefix = ''
sub_field_name = field_name
if (sub_field_name in (keyword.kwlist + ['self'])):
sub_field_name = (sub_field_name + '_')
return (prefix + sub_field_name) |
class PSStackParser(PSBaseParser):
def __init__(self, fp):
PSBaseParser.__init__(self, fp)
self.reset()
return
def reset(self):
self.context = []
self.curtype = None
self.curstack = []
self.results = []
return
def seek(self, pos):
PSBaseParser.seek(self, pos)
self.reset()
return
def push(self, *objs):
self.curstack.extend(objs)
return
def pop(self, n):
objs = self.curstack[(- n):]
self.curstack[(- n):] = []
return objs
def popall(self):
objs = self.curstack
self.curstack = []
return objs
def add_results(self, *objs):
if self.debug:
logging.debug(('add_results: %r' % (objs,)))
self.results.extend(objs)
return
def start_type(self, pos, type):
self.context.append((pos, self.curtype, self.curstack))
(self.curtype, self.curstack) = (type, [])
if self.debug:
logging.debug(('start_type: pos=%r, type=%r' % (pos, type)))
return
def end_type(self, type):
if (self.curtype != type):
raise PSTypeError(('Type mismatch: %r != %r' % (self.curtype, type)))
objs = [obj for (_, obj) in self.curstack]
(pos, self.curtype, self.curstack) = self.context.pop()
if self.debug:
logging.debug(('end_type: pos=%r, type=%r, objs=%r' % (pos, type, objs)))
return (pos, objs)
def do_keyword(self, pos, token):
return
def nextobject(self):
while (not self.results):
(pos, token) = self.nexttoken()
if isinstance(token, (int, float, bool, bytes, PSLiteral)):
self.push((pos, token))
elif (token == KEYWORD_ARRAY_BEGIN):
self.start_type(pos, 'a')
elif (token == KEYWORD_ARRAY_END):
try:
self.push(self.end_type('a'))
except PSTypeError:
if STRICT:
raise
elif (token == KEYWORD_DICT_BEGIN):
self.start_type(pos, 'd')
elif (token == KEYWORD_DICT_END):
try:
(pos, objs) = self.end_type('d')
if ((len(objs) % 2) != 0):
raise PSSyntaxError(('Invalid dictionary construct: %r' % (objs,)))
d = dict(((literal_name(k), v) for (k, v) in choplist(2, objs) if (v is not None)))
self.push((pos, d))
except PSTypeError:
if STRICT:
raise
elif (token == KEYWORD_PROC_BEGIN):
self.start_type(pos, 'p')
elif (token == KEYWORD_PROC_END):
try:
self.push(self.end_type('p'))
except PSTypeError:
if STRICT:
raise
else:
if self.debug:
logging.debug(('do_keyword: pos=%r, token=%r, stack=%r' % (pos, token, self.curstack)))
self.do_keyword(pos, token)
if self.context:
continue
else:
self.flush()
obj = self.results.pop(0)
if self.debug:
logging.debug(('nextobject: %r' % (obj,)))
return obj |
()
('-i', 'infile', type=click.File('r'), default='-', help='Input file (Default: stdin)')
('-v', 'verbose', is_flag=True, default=False, help='Verbose output')
def cmd_data_enrich(infile, verbose):
if verbose:
logging.basicConfig(level=logging.INFO, format='%(message)s')
lines = infile.read().split('\n')
result = [enrich(line) for line in lines if line]
print(json.dumps(result, indent=4)) |
class OptionSeriesArearangeSonificationDefaultinstrumentoptionsMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_execute_python_workflow_list_of_floats(register):
from .workflows.basic.list_float_wf import my_wf
remote = FlyteRemote(Config.auto(config_file=CONFIG), PROJECT, DOMAIN)
xs: typing.List[float] = [42.24, 999.1, 0.0001]
execution = remote.execute(my_wf, name='basic.list_float_wf.my_wf', inputs={'xs': xs}, version=VERSION, wait=True)
assert (execution.outputs['o0'] == '[42.24, 999.1, 0.0001]')
launch_plan = LaunchPlan.get_or_create(workflow=my_wf, name=my_wf.name)
execution = remote.execute(launch_plan, name='basic.list_float_wf.my_wf', inputs={'xs': [(- 1.1), 0.12345]}, version=VERSION, wait=True)
assert (execution.outputs['o0'] == '[-1.1, 0.12345]') |
class EmmetEvaluateMath(sublime_plugin.TextCommand):
def run(self, edit: sublime.Edit):
replacements = []
selections = self.view.sel()
for sel in selections:
if sel.empty():
line = self.view.line(sel.begin())
expr = emmet_sublime.evaluate_math(self.view.substr(line), (sel.end() - line.begin()))
if expr:
replacements.append({'region': sublime.Region((line.begin() + expr['start']), (line.begin() + expr['end'])), 'snippet': str(expr['snippet'])})
else:
text = self.view.substr(sel)
expr = emmet_sublime.evaluate_math(text, len(text))
if expr:
replacements.append({'region': sublime.Region((sel.begin() + expr['start']), (sel.begin() + expr['end'])), 'snippet': str(expr['snippet'])})
if replacements:
replacements.reverse()
for item in replacements:
self.view.replace(edit, item['region'], item['snippet'])
track_action('Evaluate Math') |
('gitlabber.git.git')
def test_clone_repo_interrupt(mock_git):
mock_repo = mock.Mock()
mock_git.Repo = mock_repo
git.is_git_repo = mock.MagicMock(return_value=False)
mock_git.Repo.clone_from.side_effect = KeyboardInterrupt('clone test keyboard interrupt')
with pytest.raises(SystemExit):
git.clone_or_pull_project(GitAction(Node(name='dummy_url', url='dummy_url'), 'dummy_dir'))
mock_git.Repo.clone_from.assert_called_once_with('dummy_url', 'dummy_dir') |
class OptionPlotoptionsTimelineDatalabelsFilter(Options):
def operator(self):
return self._config_get(None)
def operator(self, value: Any):
self._config(value, js_type=False)
def property(self):
return self._config_get(None)
def property(self, text: str):
self._config(text, js_type=False) |
class TestStatusResource():
(before_hook)
def on_get(self, req, resp):
resp.status = falcon.HTTP_500
resp.set_header('X-Failed', 'True')
resp.text = 'Fail'
def on_post(self, req, resp):
resp.status = falcon.HTTP_500
resp.set_header('X-Failed', 'True')
resp.text = 'Fail'
raise HTTPStatus(falcon.HTTP_200, headers={'X-Failed': 'False'}, text='Pass')
(after_hook)
def on_put(self, req, resp):
resp.status = '500 Internal Server Error'
resp.set_header('X-Failed', 'True')
resp.text = 'Fail'
def on_patch(self, req, resp):
raise HTTPStatus(falcon.HTTP_200, text=None)
(noop_after_hook)
def on_delete(self, req, resp):
raise HTTPStatus(201, headers={'X-Failed': 'False'}, text='Pass') |
def test_ngrams():
ops = get_current_ops()
arr1 = numpy.asarray([1, 2, 3, 4, 5], dtype=numpy.uint64)
for n in range(1, 10):
assert (len(ops.ngrams(n, arr1)) == max(0, (arr1.shape[0] - (n - 1))))
assert (len(ops.ngrams((- 1), arr1)) == 0)
assert (len(ops.ngrams((arr1.shape[0] + 1), arr1)) == 0) |
class OptionPlotoptionsPolygonSonificationDefaultinstrumentoptionsMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def extractNovelkarimaWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def build_distributed_validator(validator_identity: ValidatorIdentity, num_covalidators: int=4) -> DistributedValidator:
co_validators = []
for i in range(num_covalidators):
co_validators.append(CoValidator(validator_identity=validator_identity, pubkey=BLSPubkey(str(i).zfill((48 * 2))), index=i))
slashing_db = SlashingDB(interchange_format_version=5, genesis_validators_root=Root(), data=[])
distributed_validator = DistributedValidator(validator_identity=validator_identity, co_validators=co_validators, slashing_db=slashing_db)
return distributed_validator |
def toggle_modal(n_clicks, src, is_open, last_ctx):
n_clicks = ctx.triggered[0]['value']
trigger_id = ctx.triggered_id['index']
if ((not n_clicks) or ((trigger_id in last_ctx) and (last_ctx[trigger_id] == n_clicks))):
raise PreventUpdate
new_ctx = Patch()
new_ctx[trigger_id] = n_clicks
logger.debug(f'Showing modal of {trigger_id} {n_clicks}')
header = f'Crop of {trigger_id}'
if n_clicks:
return [(not is_open), src[(- 1)], header, new_ctx]
return [is_open, src[(- 1)], header, new_ctx] |
class _IamProjectsServiceAccountsRepository(repository_mixins.GetIamPolicyQueryMixin, repository_mixins.ListQueryMixin, _base_repository.GCPRepository):
def __init__(self, **kwargs):
super(_IamProjectsServiceAccountsRepository, self).__init__(key_field='name', max_results_field='pageSize', component='projects.serviceAccounts', **kwargs)
def get_iam_policy(self, resource, fields=None, verb='getIamPolicy', include_body=False, resource_field='resource', **kwargs):
return repository_mixins.GetIamPolicyQueryMixin.get_iam_policy(self, resource, fields=fields, verb=verb, include_body=include_body, resource_field=resource_field, **kwargs)
def get_name(project_id):
if (not project_id.startswith('projects/')):
project_id = 'projects/{}'.format(project_id)
return project_id |
class MaxLengthFilterTestCase(unittest.TestCase):
def setUp(self):
self.sequences = [SeqRecord(Seq('ACGT')), SeqRecord(Seq('ACTTT'))]
def test_none_truncated(self):
instance = quality_filter.MaxLengthFilter(6)
actual = list(instance.filter_records(self.sequences))
self.assertEqual(self.sequences, actual)
def test_some_truncated(self):
instance = quality_filter.MaxLengthFilter(4)
actual = list(instance.filter_records(self.sequences))
self.assertEqual(['ACGT', 'ACTT'], [str(s.seq) for s in actual])
def test_all_truncated(self):
instance = quality_filter.MaxLengthFilter(3)
actual = list(instance.filter_records(self.sequences))
self.assertEqual(['ACG', 'ACT'], [str(s.seq) for s in actual])
self.assertEqual([i.id for i in self.sequences], [i.id for i in actual]) |
def test_generate_gpu_of_a_look_dev_of_an_environment(create_test_data, store_local_session, create_pymel, create_maya_env):
data = create_test_data
pm = create_pymel
maya_env = create_maya_env
gen = RepresentationGenerator(version=data['building1_yapi_model_main_v003'])
gen.generate_gpu()
gen.version = data['building1_yapi_look_dev_main_v003']
gen.generate_gpu()
gen.version = data['building1_layout_main_v003']
gen.generate_gpu()
gen = RepresentationGenerator(version=data['building2_yapi_model_main_v003'])
gen.generate_gpu()
gen.version = data['building2_yapi_look_dev_main_v003']
gen.generate_gpu()
gen.version = data['building2_layout_main_v003']
gen.generate_gpu()
gen.version = data['ext1_vegetation_main_v003']
gen.generate_gpu()
gen.version = data['ext1_layout_main_v003']
gen.generate_gpu()
gen.version = data['ext1_look_dev_main_v003']
gen.generate_gpu()
r = Representation(version=data['ext1_look_dev_main_v003'])
v = r.find('GPU')
maya_env.open(v, force=True)
for ref in pm.listReferences():
assert ref.is_repr('GPU') |
def ec2_pipeline_setup(generated=None, project='', settings=None, env='', pipeline_type='', region='', region_subnets=None):
data = copy.deepcopy(settings)
user_data = generate_encoded_user_data(env=env, region=region, generated=generated, group_name=project, pipeline_type=pipeline_type)
instance_security_groups = sorted(DEFAULT_EC2_SECURITYGROUPS[env])
instance_security_groups.append(generated.security_group_app)
instance_security_groups.extend(settings['security_group']['instance_extras'])
instance_security_groups = remove_duplicate_sg(instance_security_groups)
LOG.info('Instance security groups to attach: %s', instance_security_groups)
if settings['asg']['scaling_policy']:
scalingpolicy = True
LOG.info('Found scaling policy')
else:
scalingpolicy = False
LOG.info('No scaling policy found')
if settings['app']['eureka_enabled']:
elb = []
else:
elb = [generated.elb_app]
LOG.info('Attaching the following ELB: %s', elb)
health_checks = check_provider_healthcheck(settings)
if ((env == 'dev') or settings['app']['eureka_enabled']):
data['asg'].update({'hc_type': 'EC2'})
LOG.info('Switching health check type to: EC2')
hc_grace_period = data['asg'].get('hc_grace_period')
app_grace_period = data['asg'].get('app_grace_period')
grace_period = (hc_grace_period + app_grace_period)
ssh_keypair = data['asg'].get('ssh_keypair', None)
if (not ssh_keypair):
ssh_keypair = '{0}_{1}_default'.format(env, region)
LOG.info('SSH keypair (%s) used', ssh_keypair)
if settings['app']['canary']:
canary_user_data = generate_encoded_user_data(env=env, region=region, generated=generated, group_name=project, canary=True)
data['app'].update({'canary_encoded_user_data': canary_user_data})
data['asg'].update({'hc_type': data['asg'].get('hc_type').upper(), 'hc_grace_period': grace_period, 'ssh_keypair': ssh_keypair, 'provider_healthcheck': json.dumps(health_checks.providers), 'enable_public_ips': json.dumps(settings['asg']['enable_public_ips']), 'has_provider_healthcheck': health_checks.has_healthcheck, 'asg_whitelist': ASG_WHITELIST})
data['app'].update({'az_dict': json.dumps(region_subnets), 'encoded_user_data': user_data, 'instance_security_groups': json.dumps(instance_security_groups), 'elb': json.dumps(elb), 'scalingpolicy': scalingpolicy})
return data |
class OptionSeriesColumnpyramidSonificationContexttracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def _author_affiliation(**kwargs):
props = kwargs
affiliation = TEI_E.affiliation()
if ('key' in props):
affiliation.attrib['key'] = props['key']
if ('department' in props):
affiliation.append(TEI_E.orgName(props['department'], type='department'))
if ('laboratory' in props):
affiliation.append(TEI_E.orgName(props['laboratory'], type='laboratory'))
if ('institution' in props):
affiliation.append(TEI_E.orgName(props['institution'], type='institution'))
address = TEI_E.address()
affiliation.append(address)
if ('city' in props):
address.append(TEI_E.settlement(props['city']))
if ('country' in props):
address.append(TEI_E.country(props['country']))
return affiliation |
def transpile_all(args):
project_path = (args.path or os.getcwd())
sys.path.append(project_path)
reports_path = utils.get_report_path(project_path)
sys.path.append(reports_path)
sys.path.append(os.path.join(reports_path, '..'))
settings = __import__('ui_settings', fromlist=['object'])
results = {'completed': [], 'failed': []}
for report in os.listdir(reports_path):
if (report.endswith('.py') and (report != '__init__.py')):
view_name = report[:(- 3)]
try:
mod = __import__(view_name, fromlist=['object'])
importlib.reload(mod)
page = utils.get_page(mod, template=True, colors=args.colors)
page.node_modules(settings.PACKAGE_PATH, alias=settings.SERVER_PACKAGE_URL)
if (not os.path.exists(settings.VIEWS_FOLDER)):
settings.VIEWS_FOLDER = os.path.join(reports_path, '..', '..', settings.VIEWS_FOLDER)
options = {'css_route': '/css', 'js_route': '/js'}
if args.split:
options = {'css_route': ('/%s/css' % settings.PACKAGE_PATH), 'js_route': ('/%s/js' % settings.PACKAGE_PATH)}
if (not os.path.exists(settings.PACKAGE_PATH)):
options['static_path'] = os.path.join(reports_path, '..', '..', settings.PACKAGE_PATH)
else:
options['static_path'] = settings.PACKAGE_PATH
options['split'] = args.split
output = page.outs.html_file(path=settings.VIEWS_FOLDER, name=view_name, options=options)
results['completed'].append(view_name)
print(output)
except Exception as err:
results['failed'].append(view_name)
print(('Error with view: %s' % view_name))
print(err)
return results |
class Notices():
def __init__(self, local_config_path: str) -> None:
self.local_path = local_config_path
self.notices: List[Dict[(str, str)]] = []
def reset(self):
local_notices: List[Dict[(str, str)]] = []
local_data = ''
try:
local_stream = open(self.local_path, 'r')
local_data = local_stream.read()
local_notices = parse_json(local_data)
except OSError:
pass
except:
local_notices.append({'level': 'ERROR', 'message': ('bad local notices: %s' % local_data)})
self.notices = local_notices
def post(self, notice):
self.notices.append(notice)
def prepend(self, notice):
self.notices.insert(0, notice)
def extend(self, notices):
for notice in notices:
self.post(notice) |
.parametrize('obs_content, match', [(dedent('\n GENERAL_OBSERVATION obs\n {\n DATA = RES;\n DATE = 2023-02-01;\n VALUE = 1;\n };\n '), 'ERROR must also be given'), (dedent('\n GENERAL_OBSERVATION obs\n {\n DATE = 2023-02-01;\n VALUE = 1;\n ERROR = 0.01;\n ERROR_MIN = 0.1;\n };\n '), 'Missing item "DATA"')])
def test_validation_of_general_observation(tmpdir, obs_content, match):
with tmpdir.as_cwd():
config = dedent('\n NUM_REALIZATIONS 2\n\n TIME_MAP time_map.txt\n OBS_CONFIG observations\n GEN_DATA RES RESULT_FILE:out_%d REPORT_STEPS:0 INPUT_FORMAT:ASCII\n ')
with open('config.ert', 'w', encoding='utf-8') as fh:
fh.writelines(config)
with open('observations', 'w', encoding='utf-8') as fo:
fo.writelines(obs_content)
with open('time_map.txt', 'w', encoding='utf-8') as fo:
fo.writelines('2023-02-01')
with pytest.raises(ObservationConfigError, match=match):
ErtConfig.from_file('config.ert') |
def main() -> None:
robot = Robot.from_parameters(ur10())
tool = Tool()
tool.position = np.array([1, 2, 3])
robot.tool = tool
world_frame = vector_2_matrix(np.array([100, 200, 300, 0, 0, 0]))
robot.world_frame = world_frame
print(f'Robot: {robot}')
print(f'Kinematic Chain: {robot.kinematic_chain}') |
class TestFeatureMerger():
.parametrize('data', datas)
.parametrize('tickers', [['AAPL', 'TSLA'], ['NVDA', 'TSLA'], ['AAPL', 'NVDA', 'TSLA', 'WORK'], ['AAPL', 'ZLG']])
def test_calculate(self, data, tickers):
fc1 = QuarterlyFeatures(data_key='quarterly', columns=['ebit'], quarter_counts=[2], max_back_quarter=10)
fc2 = QuarterlyDiffFeatures(data_key='quarterly', columns=['ebit', 'debt'], compare_quarter_idxs=[1, 4], max_back_quarter=10)
fc3 = BaseCompanyFeatures(data_key='base', cat_columns=['sector', 'sicindustry'])
X1 = fc1.calculate(data, tickers)
X2 = fc2.calculate(data, tickers)
X3 = fc3.calculate(data, tickers)
fm1 = FeatureMerger(fc1, fc2, on=['ticker', 'date'])
Xm1 = fm1.calculate(data, tickers)
fm2 = FeatureMerger(fc1, fc3, on='ticker')
Xm2 = fm2.calculate(data, tickers)
assert (Xm1.shape[0] == X1.shape[0])
assert (Xm2.shape[0] == X1.shape[0])
assert (Xm1.shape[1] == (X1.shape[1] + X2.shape[1]))
assert (Xm2.shape[1] == (X1.shape[1] + X3.shape[1]))
assert (Xm1.index == X1.index).min()
assert (Xm2.index == X1.index).min()
new_cols = Xm1.columns[:X1.shape[1]]
old_cols = X1.columns
for (nc, oc) in zip(new_cols, old_cols):
assert (Xm1[nc] == X1[oc]).min()
new_cols = Xm2.columns[:X1.shape[1]]
old_cols = X1.columns
for (nc, oc) in zip(new_cols, old_cols):
assert (Xm2[nc] == X1[oc]).min() |
def werkzeug(body, headers):
import werkzeug.wrappers as werkzeug
from werkzeug.routing import Map, Rule
path = '/hello/<account_id>/test'
url_map = Map([Rule(path, endpoint='hello')])
.application
def hello(request):
user_agent = request.headers['User-Agent']
limit = request.args.get('limit', '10')
adapter = url_map.bind_to_environ(request.environ)
(endpoint, values) = adapter.match()
aid = values['account_id']
return werkzeug.Response(body, headers=headers, mimetype='text/plain')
return hello |
_heads([Minimum, Maximum, ArgMin, ArgMax, ArgMinUnique, ArgMaxUnique, Supremum, Infimum, Zeros, UniqueZero, Solutions, UniqueSolution, Poles])
def tex_std_operator(head, args, **kwargs):
argstr = [arg.latex(**kwargs) for arg in args]
opname = {Minimum: '\\min', Maximum: '\\max', ArgMin: '\\operatorname{arg\\,min}', ArgMinUnique: '\\operatorname{arg\\,min*}', ArgMax: '\\operatorname{arg\\,max}', ArgMaxUnique: '\\operatorname{arg\\,max*}', Infimum: '\\operatorname{inf}', Supremum: '\\operatorname{sup}', Zeros: '\\operatorname{zeros}\\,', UniqueZero: '\\operatorname{zero*}\\,', Poles: '\\operatorname{poles}\\,', Solutions: '\\operatorname{solutions}\\,', UniqueSolution: '\\operatorname{solution*}\\,'}[head]
if ((head in (Minimum, Maximum, Supremum, Infimum)) and (len(args) == 1)):
if (args[0].head() == Set):
return ((opname + ' ') + argstr[0])
else:
return ('%s\\left(%s\\right)' % (opname, argstr[0]))
if (len(args) == 2):
assert (args[1].head() == ForElement)
formula = args[0]
(var, S) = args[1].args()
predicate = Element(var, S)
elif (len(args) == 3):
(formula, var, predicate) = args
assert (var.head() in (For, ForElement))
if (len(var.args()) == 2):
(var, S) = args[1].args()
predicate = And(Element(var, S), predicate)
else:
(var,) = args[1].args()
else:
raise ValueError
if (formula.head() in (Add, Sub, Neg, Sum, Product, Integral)):
formula = Brackets(formula)
if (0 and (predicate.head() == And) and (len(predicate.args()) > 1)):
predicate = (('\\begin{matrix}' + '\\\\'.join((('\\scriptstyle %s ' % s.latex(in_small=True)) for s in predicate.args()))) + '\\end{matrix}')
else:
predicate = predicate.latex(in_small=True)
if (formula.head() in (Add, Sub)):
formula = (('\\left(' + formula.latex()) + '\\right)')
else:
formula = formula.latex()
return ('\\mathop{%s}\\limits_{%s} %s' % (opname, predicate, formula)) |
def diag_quadrupole3d_21(ax, da, A, bx, db, B, R):
result = numpy.zeros((3, 6, 3), dtype=float)
x0 = ((ax + bx) ** (- 1.0))
x1 = (x0 * ((ax * A[0]) + (bx * B[0])))
x2 = (- x1)
x3 = (x2 + R[0])
x4 = (x2 + B[0])
x5 = (2.0 * x3)
x6 = (x4 * x5)
x7 = (x0 + x6)
x8 = (x3 * x7)
x9 = (x2 + A[0])
x10 = (x7 * x9)
x11 = ((- 2.0) * x1)
x12 = (x11 + R[0])
x13 = (x12 + B[0])
x14 = (3.0 * x0)
x15 = (x5 * x9)
x16 = ((x0 * (x12 + A[0])) + (x3 * (x0 + x15)))
x17 = (x4 * x9)
x18 = (2.0 * x17)
x19 = (x0 * x13)
x20 = ((x0 * (((x14 + x15) + x18) + x6)) + (x5 * (x10 + x19)))
x21 = 1.
x22 = ((ax * bx) * x0)
x23 = (((5. * da) * db) * numpy.exp(((- x22) * ((((A[0] - B[0]) ** 2) + ((A[1] - B[1]) ** 2)) + ((A[2] - B[2]) ** 2)))))
x24 = (numpy.sqrt(x0) * x23)
x25 = (x0 * x24)
x26 = ((0. * x21) * x25)
x27 = (x0 * ((ax * A[1]) + (bx * B[1])))
x28 = (- x27)
x29 = (x28 + B[1])
x30 = (x3 ** 2)
x31 = (x26 * ((x0 * ((x14 + ((4.0 * x3) * x9)) + (2.0 * x30))) + ((2.0 * x16) * x9)))
x32 = (x0 * ((ax * A[2]) + (bx * B[2])))
x33 = (- x32)
x34 = (x33 + B[2])
x35 = (x28 + A[1])
x36 = (0.25 * x25)
x37 = (x20 * x36)
x38 = (0.5 * x0)
x39 = (x29 * x35)
x40 = ((x0 ** 1.5) * x23)
x41 = (x40 * (x38 + x39))
x42 = (0.5 * x16)
x43 = (x24 * x38)
x44 = (x16 * x43)
x45 = (x33 + A[2])
x46 = (x34 * x45)
x47 = (x40 * (x38 + x46))
x48 = ((x35 ** 2) + x38)
x49 = (x19 + x8)
x50 = (x21 * x40)
x51 = (0. * x50)
x52 = (x49 * x51)
x53 = ((- 2.0) * x27)
x54 = (x53 + B[1])
x55 = (2.0 * x39)
x56 = ((x0 * (x54 + A[1])) + (x35 * (x0 + x55)))
x57 = (x30 + x38)
x58 = (x51 * x57)
x59 = (0. * x50)
x60 = (x57 * x59)
x61 = (x43 * x45)
x62 = (x38 + (x45 ** 2))
x63 = ((- 2.0) * x32)
x64 = (x63 + B[2])
x65 = (2.0 * x46)
x66 = ((x0 * (x64 + A[2])) + (x45 * (x0 + x65)))
x67 = ((x0 * ((x11 + A[0]) + B[0])) + (x9 * (x0 + x18)))
x68 = (x28 + R[1])
x69 = (x68 ** 2)
x70 = (x38 + x69)
x71 = (x51 * x70)
x72 = (x38 + (x9 ** 2))
x73 = (x54 + R[1])
x74 = (x0 * x73)
x75 = (2.0 * x68)
x76 = (x29 * x75)
x77 = (x0 + x76)
x78 = (x68 * x77)
x79 = (x74 + x78)
x80 = (x51 * x79)
x81 = (x59 * x70)
x82 = (x40 * (x17 + x38))
x83 = (x35 * x75)
x84 = ((x0 * ((x53 + A[1]) + R[1])) + (x68 * (x0 + x83)))
x85 = (0.5 * x84)
x86 = (x35 * x77)
x87 = ((x0 * (((x14 + x55) + x76) + x83)) + (x75 * (x74 + x86)))
x88 = (x36 * x87)
x89 = (x43 * x9)
x90 = (x26 * ((x0 * ((x14 + ((4.0 * x35) * x68)) + (2.0 * x69))) + ((2.0 * x35) * x84)))
x91 = (x33 + R[2])
x92 = (x91 ** 2)
x93 = (x38 + x92)
x94 = (x51 * x93)
x95 = (x59 * x93)
x96 = (x64 + R[2])
x97 = (x0 * x96)
x98 = (2.0 * x91)
x99 = (x34 * x98)
x100 = (x0 + x99)
x101 = (x100 * x91)
x102 = (x101 + x97)
x103 = (x102 * x51)
x104 = (x45 * x98)
x105 = ((x0 * ((x63 + A[2]) + R[2])) + (x91 * (x0 + x104)))
x106 = (0.5 * x105)
x107 = (x100 * x45)
x108 = ((x0 * (((x104 + x14) + x65) + x99)) + (x98 * (x107 + x97)))
x109 = (x108 * x36)
x110 = (x26 * ((x0 * ((x14 + ((4.0 * x45) * x91)) + (2.0 * x92))) + ((2.0 * x105) * x45)))
result[(0, 0, 0)] = numpy.sum(((- x26) * ((x0 * ((((2.0 * x10) + (x13 * x14)) + x16) + x8)) + (x20 * x9))))
result[(0, 0, 1)] = numpy.sum(((- x29) * x31))
result[(0, 0, 2)] = numpy.sum(((- x31) * x34))
result[(0, 1, 0)] = numpy.sum(((- x35) * x37))
result[(0, 1, 1)] = numpy.sum(((- x41) * x42))
result[(0, 1, 2)] = numpy.sum((((- x34) * x35) * x44))
result[(0, 2, 0)] = numpy.sum(((- x37) * x45))
result[(0, 2, 1)] = numpy.sum((((- x29) * x44) * x45))
result[(0, 2, 2)] = numpy.sum(((- x42) * x47))
result[(0, 3, 0)] = numpy.sum(((- x48) * x52))
result[(0, 3, 1)] = numpy.sum(((- x56) * x58))
result[(0, 3, 2)] = numpy.sum((((- x34) * x48) * x60))
result[(0, 4, 0)] = numpy.sum((((- x35) * x49) * x61))
result[(0, 4, 1)] = numpy.sum((((- x41) * x45) * x57))
result[(0, 4, 2)] = numpy.sum((((- x35) * x47) * x57))
result[(0, 5, 0)] = numpy.sum(((- x52) * x62))
result[(0, 5, 1)] = numpy.sum((((- x29) * x60) * x62))
result[(0, 5, 2)] = numpy.sum(((- x58) * x66))
result[(1, 0, 0)] = numpy.sum(((- x67) * x71))
result[(1, 0, 1)] = numpy.sum(((- x72) * x80))
result[(1, 0, 2)] = numpy.sum((((- x34) * x72) * x81))
result[(1, 1, 0)] = numpy.sum(((- x82) * x85))
result[(1, 1, 1)] = numpy.sum(((- x88) * x9))
result[(1, 1, 2)] = numpy.sum((((- x34) * x84) * x89))
result[(1, 2, 0)] = numpy.sum((((- x45) * x70) * x82))
result[(1, 2, 1)] = numpy.sum((((- x61) * x79) * x9))
result[(1, 2, 2)] = numpy.sum((((- x47) * x70) * x9))
result[(1, 3, 0)] = numpy.sum(((- x4) * x90))
result[(1, 3, 1)] = numpy.sum(((- x26) * ((x0 * ((((x14 * x73) + x78) + x84) + (2.0 * x86))) + (x35 * x87))))
result[(1, 3, 2)] = numpy.sum(((- x34) * x90))
result[(1, 4, 0)] = numpy.sum((((- x4) * x61) * x84))
result[(1, 4, 1)] = numpy.sum(((- x45) * x88))
result[(1, 4, 2)] = numpy.sum(((- x47) * x85))
result[(1, 5, 0)] = numpy.sum((((- x4) * x62) * x81))
result[(1, 5, 1)] = numpy.sum(((- x62) * x80))
result[(1, 5, 2)] = numpy.sum(((- x66) * x71))
result[(2, 0, 0)] = numpy.sum(((- x67) * x94))
result[(2, 0, 1)] = numpy.sum((((- x29) * x72) * x95))
result[(2, 0, 2)] = numpy.sum(((- x103) * x72))
result[(2, 1, 0)] = numpy.sum((((- x35) * x82) * x93))
result[(2, 1, 1)] = numpy.sum((((- x41) * x9) * x93))
result[(2, 1, 2)] = numpy.sum((((- x102) * x35) * x89))
result[(2, 2, 0)] = numpy.sum(((- x106) * x82))
result[(2, 2, 1)] = numpy.sum((((- x105) * x29) * x89))
result[(2, 2, 2)] = numpy.sum(((- x109) * x9))
result[(2, 3, 0)] = numpy.sum((((- x4) * x48) * x95))
result[(2, 3, 1)] = numpy.sum(((- x56) * x94))
result[(2, 3, 2)] = numpy.sum(((- x103) * x48))
result[(2, 4, 0)] = numpy.sum(((((- x105) * x35) * x4) * x43))
result[(2, 4, 1)] = numpy.sum(((- x106) * x41))
result[(2, 4, 2)] = numpy.sum(((- x109) * x35))
result[(2, 5, 0)] = numpy.sum(((- x110) * x4))
result[(2, 5, 1)] = numpy.sum(((- x110) * x29))
result[(2, 5, 2)] = numpy.sum(((- x26) * ((x0 * (((x101 + x105) + (2.0 * x107)) + (x14 * x96))) + (x108 * x45))))
return result |
def test_validate_string_in_int_raises():
records = [{'str_null': 'str', 'str': 'str', 'integ_null': 'str', 'integ': 21}]
with pytest.raises(ValidationError) as exc:
validation_raise(schema, *records)
for error in exc.value.errors:
expected_type = error.schema
assert (expected_type in ['null', 'int'])
assert (error.field == 'namespace.missingerror.integ_null') |
def from_json_to_html(content: str, page: Rpt=None):
if (page is None):
page = Page()
components = (content['components'] if ('components' in content) else content)
for (alias, component) in components.items():
comp_type = component.get('type', alias)
comp_category = component.get('category')
comp_family = component.get('family', 'ui')
comp_library = component.get('library')
component['html_code'] = alias
ui_age = getattr(page, comp_family)
if (comp_category is not None):
ui_age = getattr(ui_age, comp_category)
if (comp_library is not None):
ui_age = getattr(ui_age, comp_library)
ui_component_cls = getattr(ui_age, comp_type)
funcs_args = inspect.getfullargspec(ui_component_cls)[0][1:]
pmts = {}
for (field, mapped_field) in MAP_FIELDS.get(comp_type, {}).items():
if (field in component):
component[mapped_field] = component[field]
del component[field]
if (comp_type in ['div', 'row']):
values = []
for val in component['components']:
if (val in page.components):
values.append(page.components[val])
else:
values.append(val)
component['components'] = values
for arg_name in funcs_args:
if (arg_name in component):
pmts[arg_name] = component[arg_name]
ui_component = ui_component_cls(**pmts)
if ('css' in component):
ui_component.css(component['css'])
if ('style' in component):
if ('theme' in component['style']):
ui_component.style.theme(component['style']['theme'])
if ('class' in component):
ui_component['attr']['class'].add(component['class'])
event_count = 1
for (alias, component) in components.items():
for (event, event_details) in component.get('events', {}).items():
event_frgs = []
if ('print' in event_details):
if (event_details['print'] in page.components):
event_frgs.append(page.js.alert(page.components[event_details['print']].dom.content))
else:
event_frgs.append(page.js.alert(event_details['print']))
if ('console' in event_details):
if (event_details['console'] == 'event'):
event_frgs.append(page.js.console.log(events.event))
elif (event_details['console'] == 'data'):
event_frgs.append(page.js.console.log(events.data))
else:
event_frgs.append(page.js.console.log(event_details['console']))
for (target_alias, target_value) in event_details.get('targets', {}).items():
if isinstance(target_value, dict):
event_frgs.append(page.components[target_alias].build(events.data[target_value['field']]))
elif (target_value in page.components):
event_frgs.append(page.components[target_alias].build(page.components[target_value].dom.content))
else:
event_frgs.append(page.components[target_alias].build(target_value))
if hasattr(page.components[alias], event):
if ('url' in event_details):
(query_inputs, query_data) = (None, {})
if ('inputs' in event_details):
query_inputs = []
for query_alias in event_details['inputs']:
if isinstance(query_alias, dict):
for (k, v) in query_alias.items():
if isinstance(v, dict):
if (('type' in v) and (k in page.components)):
if ('transform' in v):
if (v['transform'] == 'stringify'):
query_data[k] = page.js.json.stringify(getattr(page.components[k].dom, v['type'])())
else:
query_data[k] = getattr(page.components[k].dom, v['type'])()
else:
query_inputs.append((page.components[k], v))
else:
query_inputs.append(page.components[query_alias])
url_method = event_details.get('method', 'post').lower()
for data in event_details.get('data', []):
if isinstance(data, dict):
for (k, v) in data.items():
if isinstance(v, dict):
if (('type' in v) and (k in page.components)):
if ('transform' in v):
if (v['transform'] == 'stringify'):
query_data[k] = page.js.json.stringify(getattr(page.components[k].dom, v['type'])())
else:
query_data[k] = getattr(page.components[k].dom, v['type'])()
else:
query_data[k] = v
getattr(page.components[alias], event)([page.js.rest(url_method, url=event_details['url'], data=(query_data or None), components=query_inputs, js_code=('response%s' % event_count), headers={'Access-Control-Allow-Origin': '*'}, stringify=event_details.get('json', True)).onSuccess(event_frgs)])
event_count += 1
else:
getattr(page.components[alias], event)(event_frgs)
elif ('url' in event_details):
query_inputs = None
if ('inputs' in event_details):
query_inputs = []
for query_alias in event_details['inputs']:
if isinstance(query_alias, dict):
for (k, v) in query_alias.items():
query_inputs.append((v, page.components[k]))
else:
query_inputs.append(page.components[query_alias])
url_method = event_details.get('method', 'post').lower()
page.components[alias].on(page.js.rest(url_method)(event_details['url']).onSuccess(event_frgs))
else:
page.components[alias].on(event, event_frgs)
if ('body' in content):
if ('css' in content['body']):
page.body.css(content['body']['css'])
return page |
class Aria():
def __init__(self, component: primitives.HtmlModel):
self.component = component
def set(self, arias: dict):
for (k, v) in arias.items():
k = k.replace('aria-', '')
setattr(self, k, v)
def custom(self, key: str, val):
self.component.attr[('aria-%s' % key)] = val
def get(self, key: str, dfl=None):
return self.component.attr.get(('aria-%s' % key), dfl)
def has(self, key: str) -> bool:
return (('aria-%s' % key) in self.component.attr)
def js_is(self, key: str, value) -> JsBoolean.JsBoolean:
return JsBoolean.JsBoolean.get(("%s.getAttribute('aria-%s') == %s" % (self.component.dom.varId, key, JsUtils.jsConvertData(value, None))))
def role(self):
return self.component.attr.get('role', '')
def role(self, val):
self.component.attr['role'] = val
def atomic(self) -> bool:
return (self.component.attr.get('aria-atomic', 'false') == 'true')
def atomic(self, flag: bool):
self.component.attr['aria-atomic'] = json.dumps(flag)
def autocomplete(self):
return self.component.attr.get('aria-autocomplete', False)
def autocomplete(self, flag):
if ((not flag) and ('aria-autocomplete' in self.component.attr)):
del self.component.attr['aria-autocomplete']
else:
self.component.attr['aria-autocomplete'] = None
def busy(self):
return (self.component.attr.get('aria-busy', 'false') == 'true')
def busy(self, flag: bool):
self.component.attr['aria-busy'] = json.dumps(flag)
def checked(self) -> bool:
return (self.component.attr.get('aria-checked', 'false') == 'true')
def checked(self, flag: bool):
self.component.attr['aria-checked'] = json.dumps(flag)
def colcount(self):
return self.component.attr.get('aria-colcount', 0)
def colcount(self, num):
self.component.attr['aria-colcount'] = num
def colindex(self):
return self.component.attr.get('aria-colindex', 0)
def colindex(self, num):
self.component.attr['aria-colindex'] = num
def colspan(self):
return self.component.attr.get('aria-colspan', 0)
def colspan(self, num):
self.component.attr['aria-colspan'] = num
def controls(self):
return self.component.attr.get('aria-controls')
def controls(self, val):
self.component.attr['aria-controls'] = val
def current(self):
return self.component.attr.get('aria-current')
def current(self, val):
self.component.attr['aria-current'] = val
def describedby(self):
return self.component.attr.get('aria-describedby')
def describedby(self, val: str):
self.component.attr['aria-describedby'] = val
def details(self):
return self.component.attr.get('aria-details')
def details(self, val):
self.component.attr['aria-details'] = val
def disabled(self) -> bool:
return (self.component.attr.get('aria-disabled', 'false') == 'true')
def disabled(self, flag: bool):
self.component.attr['aria-disabled'] = json.dumps(flag)
def errormessage(self):
return self.component.attr.get('aria-errormessage')
def errormessage(self, val):
self.component.attr['aria-errormessage'] = val
def expanded(self) -> bool:
return (self.component.attr.get('aria-expanded', 'false') == 'true')
def expanded(self, flag: bool):
self.component.attr['aria-expanded'] = json.dumps(flag)
def flowto(self):
return self.component.attr.get('aria-flowto')
def flowto(self, val):
self.component.attr['aria-flowto'] = val
def haspopup(self) -> bool:
return (self.component.attr.get('aria-haspopup', 'false') == 'true')
def haspopup(self, flag: bool):
self.component.attr['aria-haspopup'] = json.dumps(flag)
def hidden(self) -> bool:
return (self.component.attr.get('aria-hidden', 'false') == 'true')
def hidden(self, flag: bool):
self.component.attr['aria-hidden'] = json.dumps(flag)
def invalid(self) -> bool:
return (self.component.attr.get('aria-invalid', 'false') == 'true')
def invalid(self, flag: bool):
self.component.attr['aria-invalid'] = json.dumps(flag)
def keyshortcuts(self):
return self.component.attr.get('aria-keyshortcuts')
def keyshortcuts(self, val):
self.component.attr['aria-keyshortcuts'] = val
def label(self):
return self.component.attr.get('aria-label')
def label(self, val):
self.component.attr['aria-label'] = val
def labelledby(self):
return self.component.attr.get('aria-labelledby')
def labelledby(self, val):
self.component.attr['aria-labelledby'] = val
def level(self):
return self.component.attr.get('aria-level')
def level(self, val):
self.component.attr['aria-level'] = val
def live(self):
return self.component.attr.get('aria-live')
def live(self, val):
self.component.attr['aria-live'] = val
def modal(self):
return self.component.attr.get('aria-modal')
def modal(self, val):
self.component.attr['aria-modal'] = val
def multiline(self) -> bool:
return (self.component.attr.get('aria-multiline', 'false') == 'true')
def multiline(self, flag: bool):
self.component.attr['aria-multiline'] = json.dumps(flag)
def multiselectable(self):
return self.component.attr.get('aria-multiselectable', False)
def multiselectable(self, val):
self.component.attr['aria-multiselectable'] = val
def orientation(self):
return self.component.attr.get('aria-orientation')
def orientation(self, val):
self.component.attr['aria-orientation'] = val
def owns(self):
return self.component.attr.get('aria-owns')
def owns(self, val):
self.component.attr['aria-owns'] = val
def placeholder(self):
return self.component.attr.get('aria-placeholder')
def placeholder(self, val: str):
self.component.attr['aria-placeholder'] = val
def posinset(self):
return self.component.attr.get('aria-posinset', 0)
def posinset(self, num):
self.component.attr['aria-posinset'] = num
def pressed(self) -> bool:
return (self.component.attr.get('aria-pressed', 'false') == 'true')
def pressed(self, flag: bool):
self.component.attr['aria-pressed'] = json.dumps(flag)
def readonly(self) -> bool:
return (self.component.attr.get('aria-readonly', 'false') == 'true')
def readonly(self, flag: bool):
self.component.attr['aria-readonly'] = json.dumps(flag)
def relevant(self) -> bool:
return (self.component.attr.get('aria-relevant', 'false') == 'true')
def relevant(self, flag: bool):
self.component.attr['aria-relevant'] = json.dumps(flag)
def roledescription(self):
return self.component.attr.get('aria-roledescription')
def roledescription(self, val):
self.component.attr['aria-roledescription'] = val
def rowindex(self):
return self.component.attr.get('aria-rowindex')
def rowindex(self, num):
self.component.attr['aria-roledescription'] = num
def rowspan(self):
return self.component.attr.get('aria-rowspan')
def rowspan(self, num):
self.component.attr['aria-rowspan'] = num
def selected(self) -> bool:
return (self.component.attr.get('aria-selected', 'false') == 'true')
def selected(self, flag: bool):
self.component.attr['aria-selected'] = json.dumps(flag)
def setsize(self):
return self.component.attr.get('aria-setsize')
def setsize(self, num):
self.component.attr['aria-setsize'] = num
def sort(self) -> bool:
return (self.component.attr.get('aria-sort', 'false') == 'true')
def sort(self, flag: bool):
self.component.attr['aria-sort'] = json.dumps(flag)
def valuemin(self):
return self.component.attr.get('aria-valuemin')
def valuemin(self, num: float):
self.component.attr['aria-valuemin'] = num
def valuemax(self):
return self.component.attr.get('aria-valuemax')
def valuemax(self, num: float):
self.component.attr['aria-valuemax'] = num
def valuenow(self):
return self.component.attr.get('aria-valuenow')
def valuenow(self, num: float):
self.component.attr['aria-valuenow'] = num
def valuetext(self):
return self.component.attr.get('aria-valuetext')
def valuetext(self, text: str):
self.component.attr['aria-valuetext'] = text |
def filter_firewall_vendor_mac_data(json):
option_list = ['id', 'mac_number', 'name', 'obsolete']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in json) and (json[attribute] is not None)):
dictionary[attribute] = json[attribute]
return dictionary |
def name_exclude(records, filter_regex):
logging.info((('Applying _name_exclude generator: excluding IDs matching ' + filter_regex) + ' in results.'))
regex = re.compile(filter_regex)
for record in records:
if ((not regex.search(record.id)) and (not regex.search(record.description))):
(yield record) |
class ELFFile(object):
def __init__(self, stream, stream_loader=None):
self.stream = stream
self.stream.seek(0, io.SEEK_END)
self.stream_len = self.stream.tell()
self._identify_file()
self.structs = ELFStructs(little_endian=self.little_endian, elfclass=self.elfclass)
self.structs.create_basic_structs()
self.header = self._parse_elf_header()
self.structs.create_advanced_structs(self['e_type'], self['e_machine'], self['e_ident']['EI_OSABI'])
self.stream.seek(0)
self.e_ident_raw = self.stream.read(16)
self._section_header_stringtable = self._get_section_header_stringtable()
self._section_name_map = None
self.stream_loader = stream_loader
def load_from_path(cls, path):
base_directory = os.path.dirname(path)
def loader(elf_path):
if (not os.path.isabs(elf_path)):
elf_path = os.path.join(base_directory, elf_path)
return open(elf_path, 'rb')
stream = open(path, 'rb')
return ELFFile(stream, loader)
def num_sections(self):
if (self['e_shoff'] == 0):
return 0
if (self['e_shnum'] == 0):
return self._get_section_header(0)['sh_size']
return self['e_shnum']
def get_section(self, n):
section_header = self._get_section_header(n)
return self._make_section(section_header)
def get_section_by_name(self, name):
if (self._section_name_map is None):
self._make_section_name_map()
secnum = self._section_name_map.get(name, None)
return (None if (secnum is None) else self.get_section(secnum))
def get_section_index(self, section_name):
if (self._section_name_map is None):
self._make_section_name_map()
return self._section_name_map.get(section_name, None)
def iter_sections(self, type=None):
for i in range(self.num_sections()):
section = self.get_section(i)
if ((type is None) or (section['sh_type'] == type)):
(yield section)
def num_segments(self):
if (self['e_phnum'] < 65535):
return self['e_phnum']
else:
return self.get_section(0)['sh_info']
def get_segment(self, n):
segment_header = self._get_segment_header(n)
return self._make_segment(segment_header)
def iter_segments(self, type=None):
for i in range(self.num_segments()):
segment = self.get_segment(i)
if ((type is None) or (segment['p_type'] == type)):
(yield segment)
def address_offsets(self, start, size=1):
end = (start + size)
for seg in self.iter_segments(type='PT_LOAD'):
if ((start >= seg['p_vaddr']) and (end <= (seg['p_vaddr'] + seg['p_filesz']))):
(yield ((start - seg['p_vaddr']) + seg['p_offset']))
def has_dwarf_info(self):
return bool((self.get_section_by_name('.debug_info') or self.get_section_by_name('.zdebug_info') or self.get_section_by_name('.eh_frame')))
def get_dwarf_info(self, relocate_dwarf_sections=True, follow_links=True):
section_names = ('.debug_info', '.debug_aranges', '.debug_abbrev', '.debug_str', '.debug_line', '.debug_frame', '.debug_loc', '.debug_ranges', '.debug_pubtypes', '.debug_pubnames', '.debug_addr', '.debug_str_offsets', '.debug_line_str', '.debug_loclists', '.debug_rnglists', '.debug_sup', '.gnu_debugaltlink')
compressed = bool(self.get_section_by_name('.zdebug_info'))
if compressed:
section_names = tuple(map((lambda x: ('.z' + x[1:])), section_names))
section_names += ('.eh_frame',)
(debug_info_sec_name, debug_aranges_sec_name, debug_abbrev_sec_name, debug_str_sec_name, debug_line_sec_name, debug_frame_sec_name, debug_loc_sec_name, debug_ranges_sec_name, debug_pubtypes_name, debug_pubnames_name, debug_addr_name, debug_str_offsets_name, debug_line_str_name, debug_loclists_sec_name, debug_rnglists_sec_name, debug_sup_name, gnu_debugaltlink_name, eh_frame_sec_name) = section_names
debug_sections = {}
for secname in section_names:
section = self.get_section_by_name(secname)
if (section is None):
debug_sections[secname] = None
else:
dwarf_section = self._read_dwarf_section(section, relocate_dwarf_sections)
if (compressed and secname.startswith('.z')):
dwarf_section = self._decompress_dwarf_section(dwarf_section)
debug_sections[secname] = dwarf_section
dwarfinfo = DWARFInfo(config=DwarfConfig(little_endian=self.little_endian, default_address_size=(self.elfclass // 8), machine_arch=self.get_machine_arch()), debug_info_sec=debug_sections[debug_info_sec_name], debug_aranges_sec=debug_sections[debug_aranges_sec_name], debug_abbrev_sec=debug_sections[debug_abbrev_sec_name], debug_frame_sec=debug_sections[debug_frame_sec_name], eh_frame_sec=debug_sections[eh_frame_sec_name], debug_str_sec=debug_sections[debug_str_sec_name], debug_loc_sec=debug_sections[debug_loc_sec_name], debug_ranges_sec=debug_sections[debug_ranges_sec_name], debug_line_sec=debug_sections[debug_line_sec_name], debug_pubtypes_sec=debug_sections[debug_pubtypes_name], debug_pubnames_sec=debug_sections[debug_pubnames_name], debug_addr_sec=debug_sections[debug_addr_name], debug_str_offsets_sec=debug_sections[debug_str_offsets_name], debug_line_str_sec=debug_sections[debug_line_str_name], debug_loclists_sec=debug_sections[debug_loclists_sec_name], debug_rnglists_sec=debug_sections[debug_rnglists_sec_name], debug_sup_sec=debug_sections[debug_sup_name], gnu_debugaltlink_sec=debug_sections[gnu_debugaltlink_name])
if follow_links:
dwarfinfo.supplementary_dwarfinfo = self.get_supplementary_dwarfinfo(dwarfinfo)
return dwarfinfo
def get_supplementary_dwarfinfo(self, dwarfinfo):
supfilepath = dwarfinfo.parse_debugsupinfo()
if ((supfilepath is not None) and (self.stream_loader is not None)):
stream = self.stream_loader(supfilepath)
supelffile = ELFFile(stream)
dwarf_info = supelffile.get_dwarf_info()
stream.close()
return dwarf_info
return None
def has_ehabi_info(self):
return any(self.iter_sections(type='SHT_ARM_EXIDX'))
def get_ehabi_infos(self):
_ret = []
if (self['e_type'] == 'ET_REL'):
assert False, "Current version of pyelftools doesn't support relocatable file."
for section in self.iter_sections(type='SHT_ARM_EXIDX'):
_ret.append(EHABIInfo(section, self.little_endian))
return (_ret if (len(_ret) > 0) else None)
def get_machine_arch(self):
architectures = {'EM_M32': 'AT&T WE 32100', 'EM_SPARC': 'SPARC', 'EM_386': 'x86', 'EM_68K': 'Motorola 68000', 'EM_88K': 'Motorola 88000', 'EM_IAMCU': 'Intel MCU', 'EM_860': 'Intel 80860', 'EM_MIPS': 'MIPS', 'EM_S370': 'IBM System/370', 'EM_MIPS_RS3_LE': 'MIPS RS3000 Little-endian', 'EM_PARISC': 'Hewlett-Packard PA-RISC', 'EM_VPP500': 'Fujitsu VPP500', 'EM_SPARC32PLUS': 'Enhanced SPARC', 'EM_960': 'Intel 80960', 'EM_PPC': 'PowerPC', 'EM_PPC64': '64-bit PowerPC', 'EM_S390': 'IBM S/390', 'EM_SPU': 'IBM SPU/SPC', 'EM_V800': 'NEC V800', 'EM_FR20': 'Fujitsu FR20', 'EM_RH32': 'TRW RH-32', 'EM_RCE': 'Motorola RCE', 'EM_ARM': 'ARM', 'EM_ALPHA': 'Digital Alpha', 'EM_SH': 'Hitachi SH', 'EM_SPARCV9': 'SPARC Version 9', 'EM_TRICORE': 'Siemens TriCore embedded processor', 'EM_ARC': 'Argonaut RISC Core, Argonaut Technologies Inc.', 'EM_H8_300': 'Hitachi H8/300', 'EM_H8_300H': 'Hitachi H8/300H', 'EM_H8S': 'Hitachi H8S', 'EM_H8_500': 'Hitachi H8/500', 'EM_IA_64': 'Intel IA-64', 'EM_MIPS_X': 'MIPS-X', 'EM_COLDFIRE': 'Motorola ColdFire', 'EM_68HC12': 'Motorola M68HC12', 'EM_MMA': 'Fujitsu MMA', 'EM_PCP': 'Siemens PCP', 'EM_NCPU': 'Sony nCPU', 'EM_NDR1': 'Denso NDR1', 'EM_STARCORE': 'Motorola Star*Core', 'EM_ME16': 'Toyota ME16', 'EM_ST100': 'STMicroelectronics ST100', 'EM_TINYJ': 'Advanced Logic TinyJ', 'EM_X86_64': 'x64', 'EM_PDSP': 'Sony DSP', 'EM_PDP10': 'Digital Equipment PDP-10', 'EM_PDP11': 'Digital Equipment PDP-11', 'EM_FX66': 'Siemens FX66', 'EM_ST9PLUS': 'STMicroelectronics ST9+ 8/16 bit', 'EM_ST7': 'STMicroelectronics ST7 8-bit', 'EM_68HC16': 'Motorola MC68HC16', 'EM_68HC11': 'Motorola MC68HC11', 'EM_68HC08': 'Motorola MC68HC08', 'EM_68HC05': 'Motorola MC68HC05', 'EM_SVX': 'Silicon Graphics SVx', 'EM_ST19': 'STMicroelectronics ST19 8-bit', 'EM_VAX': 'Digital VAX', 'EM_CRIS': 'Axis Communications 32-bit', 'EM_JAVELIN': 'Infineon Technologies 32-bit', 'EM_FIREPATH': 'Element 14 64-bit DSP', 'EM_ZSP': 'LSI Logic 16-bit DSP', 'EM_MMIX': "Donald Knuth's educational 64-bit", 'EM_HUANY': 'Harvard University machine-independent object files', 'EM_PRISM': 'SiTera Prism', 'EM_AVR': 'Atmel AVR 8-bit', 'EM_FR30': 'Fujitsu FR30', 'EM_D10V': 'Mitsubishi D10V', 'EM_D30V': 'Mitsubishi D30V', 'EM_V850': 'NEC v850', 'EM_M32R': 'Mitsubishi M32R', 'EM_MN10300': 'Matsushita MN10300', 'EM_MN10200': 'Matsushita MN10200', 'EM_PJ': 'picoJava', 'EM_OPENRISC': 'OpenRISC 32-bit', 'EM_ARC_COMPACT': 'ARC International ARCompact', 'EM_XTENSA': 'Tensilica Xtensa', 'EM_VIDEOCORE': 'Alphamosaic VideoCore', 'EM_TMM_GPP': 'Thompson Multimedia', 'EM_NS32K': 'National Semiconductor 32000 series', 'EM_TPC': 'Tenor Network TPC', 'EM_SNP1K': 'Trebia SNP 1000', 'EM_ST200': 'STMicroelectronics ST200', 'EM_IP2K': 'Ubicom IP2xxx', 'EM_MAX': 'MAX', 'EM_CR': 'National Semiconductor CompactRISC', 'EM_F2MC16': 'Fujitsu F2MC16', 'EM_MSP430': 'Texas Instruments msp430', 'EM_BLACKFIN': 'Analog Devices Blackfin', 'EM_SE_C33': 'Seiko Epson S1C33', 'EM_SEP': 'Sharp', 'EM_ARCA': 'Arca RISC', 'EM_UNICORE': 'PKU-Unity MPRC', 'EM_EXCESS': 'eXcess', 'EM_DXP': 'Icera Semiconductor Deep Execution Processor', 'EM_ALTERA_NIOS2': 'Altera Nios II', 'EM_CRX': 'National Semiconductor CompactRISC CRX', 'EM_XGATE': 'Motorola XGATE', 'EM_C166': 'Infineon C16x/XC16x', 'EM_M16C': 'Renesas M16C', 'EM_DSPIC30F': 'Microchip Technology dsPIC30F', 'EM_CE': 'Freescale Communication Engine RISC core', 'EM_M32C': 'Renesas M32C', 'EM_TSK3000': 'Altium TSK3000', 'EM_RS08': 'Freescale RS08', 'EM_SHARC': 'Analog Devices SHARC', 'EM_ECOG2': 'Cyan Technology eCOG2', 'EM_SCORE7': 'Sunplus S+core7 RISC', 'EM_DSP24': 'New Japan Radio (NJR) 24-bit DSP', 'EM_VIDEOCORE3': 'Broadcom VideoCore III', 'EM_LATTICEMICO32': 'Lattice FPGA RISC', 'EM_SE_C17': 'Seiko Epson C17', 'EM_TI_C6000': 'TI TMS320C6000', 'EM_TI_C2000': 'TI TMS320C2000', 'EM_TI_C5500': 'TI TMS320C55x', 'EM_TI_ARP32': 'TI Application Specific RISC, 32bit', 'EM_TI_PRU': 'TI Programmable Realtime Unit', 'EM_MMDSP_PLUS': 'STMicroelectronics 64bit VLIW', 'EM_CYPRESS_M8C': 'Cypress M8C', 'EM_R32C': 'Renesas R32C', 'EM_TRIMEDIA': 'NXP Semiconductors TriMedia', 'EM_QDSP6': 'QUALCOMM DSP6', 'EM_8051': 'Intel 8051', 'EM_STXP7X': 'STMicroelectronics STxP7x', 'EM_NDS32': 'Andes Technology RISC', 'EM_ECOG1': 'Cyan Technology eCOG1X', 'EM_ECOG1X': 'Cyan Technology eCOG1X', 'EM_MAXQ30': 'Dallas Semiconductor MAXQ30', 'EM_XIMO16': 'New Japan Radio (NJR) 16-bit', 'EM_MANIK': 'M2000 Reconfigurable RISC', 'EM_CRAYNV2': 'Cray Inc. NV2', 'EM_RX': 'Renesas RX', 'EM_METAG': 'Imagination Technologies META', 'EM_MCST_ELBRUS': 'MCST Elbrus', 'EM_ECOG16': 'Cyan Technology eCOG16', 'EM_CR16': 'National Semiconductor CompactRISC CR16 16-bit', 'EM_ETPU': 'Freescale', 'EM_SLE9X': 'Infineon Technologies SLE9X', 'EM_L10M': 'Intel L10M', 'EM_K10M': 'Intel K10M', 'EM_AARCH64': 'AArch64', 'EM_AVR32': 'Atmel 32-bit', 'EM_STM8': 'STMicroeletronics STM8 8-bit', 'EM_TILE64': 'Tilera TILE64', 'EM_TILEPRO': 'Tilera TILEPro', 'EM_MICROBLAZE': 'Xilinx MicroBlaze 32-bit RISC', 'EM_CUDA': 'NVIDIA CUDA', 'EM_TILEGX': 'Tilera TILE-Gx', 'EM_CLOUDSHIELD': 'CloudShield', 'EM_COREA_1ST': 'KIPO-KAIST Core-A 1st generation', 'EM_COREA_2ND': 'KIPO-KAIST Core-A 2nd generation', 'EM_ARC_COMPACT2': 'Synopsys ARCompact V2', 'EM_OPEN8': 'Open8 8-bit RISC', 'EM_RL78': 'Renesas RL78', 'EM_VIDEOCORE5': 'Broadcom VideoCore V', 'EM_78KOR': 'Renesas 78KOR', 'EM_56800EX': 'Freescale 56800EX', 'EM_BA1': 'Beyond BA1', 'EM_BA2': 'Beyond BA2', 'EM_XCORE': 'XMOS xCORE', 'EM_MCHP_PIC': 'Microchip 8-bit PIC', 'EM_INTEL205': 'Reserved by Intel', 'EM_INTEL206': 'Reserved by Intel', 'EM_INTEL207': 'Reserved by Intel', 'EM_INTEL208': 'Reserved by Intel', 'EM_INTEL209': 'Reserved by Intel', 'EM_KM32': 'KM211 KM32 32-bit', 'EM_KMX32': 'KM211 KMX32 32-bit', 'EM_KMX16': 'KM211 KMX16 16-bit', 'EM_KMX8': 'KM211 KMX8 8-bit', 'EM_KVARC': 'KM211 KVARC', 'EM_CDP': 'Paneve CDP', 'EM_COGE': 'Cognitive', 'EM_COOL': 'Bluechip Systems CoolEngine', 'EM_NORC': 'Nanoradio Optimized RISC', 'EM_CSR_KALIMBA': 'CSR Kalimba', 'EM_Z80': 'Zilog Z80', 'EM_VISIUM': 'VISIUMcore', 'EM_FT32': 'FTDI Chip FT32 32-bit RISC', 'EM_MOXIE': 'Moxie', 'EM_AMDGPU': 'AMD GPU', 'EM_RISCV': 'RISC-V', 'EM_BPF': 'Linux BPF - in-kernel virtual machine', 'EM_CSKY': 'C-SKY', 'EM_LOONGARCH': 'LoongArch', 'EM_FRV': 'Fujitsu FR-V'}
return architectures.get(self['e_machine'], '<unknown>')
def get_shstrndx(self):
if (self['e_shstrndx'] != SHN_INDICES.SHN_XINDEX):
return self['e_shstrndx']
else:
return self._get_section_header(0)['sh_link']
def __getitem__(self, name):
return self.header[name]
def _identify_file(self):
self.stream.seek(0)
magic = self.stream.read(4)
elf_assert((magic == b'\x7fELF'), 'Magic number does not match')
ei_class = self.stream.read(1)
if (ei_class == b'\x01'):
self.elfclass = 32
elif (ei_class == b'\x02'):
self.elfclass = 64
else:
raise ELFError(('Invalid EI_CLASS %s' % repr(ei_class)))
ei_data = self.stream.read(1)
if (ei_data == b'\x01'):
self.little_endian = True
elif (ei_data == b'\x02'):
self.little_endian = False
else:
raise ELFError(('Invalid EI_DATA %s' % repr(ei_data)))
def _section_offset(self, n):
return (self['e_shoff'] + (n * self['e_shentsize']))
def _segment_offset(self, n):
return (self['e_phoff'] + (n * self['e_phentsize']))
def _make_segment(self, segment_header):
segtype = segment_header['p_type']
if (segtype == 'PT_INTERP'):
return InterpSegment(segment_header, self.stream)
elif (segtype == 'PT_DYNAMIC'):
return DynamicSegment(segment_header, self.stream, self)
elif (segtype == 'PT_NOTE'):
return NoteSegment(segment_header, self.stream, self)
else:
return Segment(segment_header, self.stream)
def _get_section_header(self, n):
stream_pos = self._section_offset(n)
if (stream_pos > self.stream_len):
return None
return struct_parse(self.structs.Elf_Shdr, self.stream, stream_pos=stream_pos)
def _get_section_name(self, section_header):
if (self._section_header_stringtable is None):
raise ELFParseError('String Table not found')
name_offset = section_header['sh_name']
return self._section_header_stringtable.get_string(name_offset)
def _make_section(self, section_header):
name = self._get_section_name(section_header)
sectype = section_header['sh_type']
if (sectype == 'SHT_STRTAB'):
return StringTableSection(section_header, name, self)
elif (sectype == 'SHT_NULL'):
return NullSection(section_header, name, self)
elif (sectype in ('SHT_SYMTAB', 'SHT_DYNSYM', 'SHT_SUNW_LDYNSYM')):
return self._make_symbol_table_section(section_header, name)
elif (sectype == 'SHT_SYMTAB_SHNDX'):
return self._make_symbol_table_index_section(section_header, name)
elif (sectype == 'SHT_SUNW_syminfo'):
return self._make_sunwsyminfo_table_section(section_header, name)
elif (sectype == 'SHT_GNU_verneed'):
return self._make_gnu_verneed_section(section_header, name)
elif (sectype == 'SHT_GNU_verdef'):
return self._make_gnu_verdef_section(section_header, name)
elif (sectype == 'SHT_GNU_versym'):
return self._make_gnu_versym_section(section_header, name)
elif (sectype in ('SHT_REL', 'SHT_RELA')):
return RelocationSection(section_header, name, self)
elif (sectype == 'SHT_DYNAMIC'):
return DynamicSection(section_header, name, self)
elif (sectype == 'SHT_NOTE'):
return NoteSection(section_header, name, self)
elif ((sectype == 'SHT_PROGBITS') and (name == '.stab')):
return StabSection(section_header, name, self)
elif (sectype == 'SHT_ARM_ATTRIBUTES'):
return ARMAttributesSection(section_header, name, self)
elif (sectype == 'SHT_RISCV_ATTRIBUTES'):
return RISCVAttributesSection(section_header, name, self)
elif (sectype == 'SHT_HASH'):
return self._make_elf_hash_section(section_header, name)
elif (sectype == 'SHT_GNU_HASH'):
return self._make_gnu_hash_section(section_header, name)
elif (sectype == 'SHT_RELR'):
return RelrRelocationSection(section_header, name, self)
else:
return Section(section_header, name, self)
def _make_section_name_map(self):
self._section_name_map = {}
for (i, sec) in enumerate(self.iter_sections()):
self._section_name_map[sec.name] = i
def _make_symbol_table_section(self, section_header, name):
linked_strtab_index = section_header['sh_link']
strtab_section = self.get_section(linked_strtab_index)
return SymbolTableSection(section_header, name, elffile=self, stringtable=strtab_section)
def _make_symbol_table_index_section(self, section_header, name):
linked_symtab_index = section_header['sh_link']
return SymbolTableIndexSection(section_header, name, elffile=self, symboltable=linked_symtab_index)
def _make_sunwsyminfo_table_section(self, section_header, name):
linked_strtab_index = section_header['sh_link']
strtab_section = self.get_section(linked_strtab_index)
return SUNWSyminfoTableSection(section_header, name, elffile=self, symboltable=strtab_section)
def _make_gnu_verneed_section(self, section_header, name):
linked_strtab_index = section_header['sh_link']
strtab_section = self.get_section(linked_strtab_index)
return GNUVerNeedSection(section_header, name, elffile=self, stringtable=strtab_section)
def _make_gnu_verdef_section(self, section_header, name):
linked_strtab_index = section_header['sh_link']
strtab_section = self.get_section(linked_strtab_index)
return GNUVerDefSection(section_header, name, elffile=self, stringtable=strtab_section)
def _make_gnu_versym_section(self, section_header, name):
linked_strtab_index = section_header['sh_link']
strtab_section = self.get_section(linked_strtab_index)
return GNUVerSymSection(section_header, name, elffile=self, symboltable=strtab_section)
def _make_elf_hash_section(self, section_header, name):
linked_symtab_index = section_header['sh_link']
symtab_section = self.get_section(linked_symtab_index)
return ELFHashSection(section_header, name, self, symtab_section)
def _make_gnu_hash_section(self, section_header, name):
linked_symtab_index = section_header['sh_link']
symtab_section = self.get_section(linked_symtab_index)
return GNUHashSection(section_header, name, self, symtab_section)
def _get_segment_header(self, n):
return struct_parse(self.structs.Elf_Phdr, self.stream, stream_pos=self._segment_offset(n))
def _get_section_header_stringtable(self):
stringtable_section_num = self.get_shstrndx()
stringtable_section_header = self._get_section_header(stringtable_section_num)
if (stringtable_section_header is None):
return None
return StringTableSection(header=stringtable_section_header, name='', elffile=self)
def _parse_elf_header(self):
return struct_parse(self.structs.Elf_Ehdr, self.stream, stream_pos=0)
def _read_dwarf_section(self, section, relocate_dwarf_sections):
phantom_bytes = self.has_phantom_bytes()
section_stream = BytesIO()
section_data = section.data()
section_stream.write((section_data[::2] if phantom_bytes else section_data))
if relocate_dwarf_sections:
reloc_handler = RelocationHandler(self)
reloc_section = reloc_handler.find_relocations_for_section(section)
if (reloc_section is not None):
if phantom_bytes:
raise ELFParseError('This binary has relocations in the DWARF sections, currently not supported.')
else:
reloc_handler.apply_section_relocations(section_stream, reloc_section)
return DebugSectionDescriptor(stream=section_stream, name=section.name, global_offset=section['sh_offset'], size=((section.data_size // 2) if phantom_bytes else section.data_size), address=section['sh_addr'])
def _decompress_dwarf_section(section):
assert (section.size > 12), 'Unsupported compression format.'
section.stream.seek(0)
compression_type = section.stream.read(4)
assert (compression_type == b'ZLIB'), ('Invalid compression type: %r' % compression_type)
uncompressed_size = struct.unpack('>Q', section.stream.read(8))[0]
decompressor = zlib.decompressobj()
uncompressed_stream = BytesIO()
while True:
chunk = section.stream.read(PAGESIZE)
if (not chunk):
break
uncompressed_stream.write(decompressor.decompress(chunk))
uncompressed_stream.write(decompressor.flush())
uncompressed_stream.seek(0, io.SEEK_END)
size = uncompressed_stream.tell()
assert (uncompressed_size == size), ('Wrong uncompressed size: expected %r, but got %r' % (uncompressed_size, size))
return section._replace(stream=uncompressed_stream, size=size)
def close(self):
self.stream.close()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def has_phantom_bytes(self):
return ((self['e_machine'] == 'EM_DSPIC30F') and ((self['e_flags'] & ) == 0)) |
def _create_mysql_database(db_name: str, db_url: str, try_to_create_db: bool=False):
from sqlalchemy import create_engine, DDL
from sqlalchemy.exc import SQLAlchemyError, OperationalError
if (not try_to_create_db):
logger.info(f'Skipping creation of database {db_name}')
return
engine = create_engine(db_url)
try:
with engine.connect() as conn:
logger.info(f'Database {db_name} already exists')
return
except OperationalError as oe:
if ('Unknown database' in str(oe)):
try:
no_db_name_url = db_url.rsplit('/', 1)[0]
engine_no_db = create_engine(no_db_name_url)
with engine_no_db.connect() as conn:
conn.execute(DDL(f'CREATE DATABASE {db_name} CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci'))
logger.info(f'Database {db_name} successfully created')
except SQLAlchemyError as e:
logger.error(f'Failed to create database {db_name}: {e}')
raise
else:
logger.error(f'Error connecting to database {db_name}: {oe}')
raise |
def monitor(session, *args, **kwargs):
from evennia.scripts.monitorhandler import MONITOR_HANDLER
name = kwargs.get('name', None)
outputfunc_name = kwargs.get('outputfunc_name', 'monitor')
category = kwargs.get('category', None)
if (name and (name in _monitorable) and session.puppet):
field_name = _monitorable[name]
obj = session.puppet
if kwargs.get('stop', False):
MONITOR_HANDLER.remove(obj, field_name, idstring=session.sessid)
else:
MONITOR_HANDLER.add(obj, field_name, _on_monitor_change, idstring=session.sessid, persistent=False, name=name, session=session, outputfunc_name=outputfunc_name, category=category) |
class CardsTests(N26TestBase):
_requests(method=GET, response_file='cards.json')
def test_cards_cli(self):
from n26.cli import cards
result = self._run_cli_cmd(cards)
self.assertIn('MASTERCARD', result.output)
self.assertIn('MAESTRO', result.output)
self.assertIn('active', result.output)
self.assertIn('123456******1234', result.output)
_requests(method=GET, response_file='cards.json')
_requests(method=POST, response_file='card_block_single.json')
def test_block_card_cli_single(self):
from n26.cli import card_block
card_id = '-1234-abcd-abcd-ab'
result = self._run_cli_cmd(card_block, ['--card', card_id])
self.assertEqual(result.output, 'Blocked card: {}\n'.format(card_id))
_requests(method=GET, response_file='cards.json')
_requests(method=POST, response_file='card_block_single.json')
def test_block_card_cli_all(self):
from n26.cli import card_block
card_id_1 = '-1234-abcd-abcd-ab'
card_id_2 = '-1234-abcd-abcd-ab'
result = self._run_cli_cmd(card_block)
self.assertEqual(result.output, 'Blocked card: {}\nBlocked card: {}\n'.format(card_id_1, card_id_2))
_requests(method=GET, response_file='cards.json')
_requests(method=POST, response_file='card_unblock_single.json')
def test_unblock_card_cli_single(self):
from n26.cli import card_unblock
card_id = '-1234-abcd-abcd-ab'
result = self._run_cli_cmd(card_unblock, ['--card', card_id])
self.assertEqual(result.output, 'Unblocked card: {}\n'.format(card_id))
_requests(method=GET, response_file='cards.json')
_requests(method=POST, response_file='card_unblock_single.json')
def test_unblock_card_cli_all(self):
from n26.cli import card_unblock
card_id_1 = '-1234-abcd-abcd-ab'
card_id_2 = '-1234-abcd-abcd-ab'
result = self._run_cli_cmd(card_unblock)
self.assertEqual(result.output, 'Unblocked card: {}\nUnblocked card: {}\n'.format(card_id_1, card_id_2)) |
def _handler1(content_path, model_path, resize_height=None, resize_width=None, save_path=None, prefix=None, suffix=None):
content_target = get_images(content_path, resize_height, resize_width)
with tf.Graph().as_default(), tf.Session() as sess:
content_image = tf.placeholder(tf.float32, shape=content_target.shape, name='content_image')
output_image = itn.transform(content_image)
saver = tf.train.Saver()
saver.restore(sess, model_path)
output = sess.run(output_image, feed_dict={content_image: content_target})
if (save_path is not None):
save_images(content_path, output, save_path, prefix=prefix, suffix=suffix)
return output |
()
def search_packages(search_term: str, channel: Optional[str]=None, shipper: Optional[str]=None):
filters = {CHANNEL_ID_FIELD: channel, SHIPPING_PROVIDER_CODE: shipper, MANIFEST_GENERATED_CHECK: 0}
filters = {k: v for (k, v) in filters.items() if (v is not None)}
or_filters = {TRACKING_CODE_FIELD: search_term, SHIPPING_PACKAGE_CODE_FIELD: search_term, INVOICE_CODE_FIELD: search_term}
packages = frappe.get_list('Sales Invoice', filters=filters, or_filters=or_filters, limit_page_length=1)
if packages:
return packages[0].name |
_type(OSPF_OPAQUE_TYPE_EXTENDED_PREFIX_LSA)
class ExtendedPrefixOpaqueBody(OpaqueBody):
def parser(cls, buf):
buf = six.binary_type(buf)
tlvs = []
while buf:
(type_, length) = struct.unpack_from('!HH', buf)
if (len(buf[struct.calcsize('!HH'):]) < length):
raise stream_parser.StreamParser.TooSmallException(('%d < %d' % (len(buf), length)))
tlvcls = ExtendedPrefixTLV._lookup_type(type_)
if tlvcls:
(tlv, buf) = tlvcls.parser(buf)
tlvs.append(tlv)
return cls(tlvs) |
def gethtml(url, times=3):
try:
ua = get_UserAgent()
session = get_request_session(url)
response = session.get(url, headers={'User-Agent': ua}, timeout=8, verify=False)
response.encoding = 'utf-8'
if (response.status_code == 200):
return response.text
except Exception as e:
error_msg = '{}'.format(sys.exc_info())
print(error_msg, '')
time.sleep(1)
if (times > 0):
return gethtml(url, times=(times - 1))
else:
return error_msg |
class GradientEditorWidget(AbstractGradientEditor):
def __init__(self, master=None, vtk_table=None, on_change_color_table=None, colors=None):
if (colors is None):
colors = ['rgb', 'hsv', 'a']
self.colors = colors
self.gradient_preview_width = 300
self.gradient_preview_height = 50
self.channel_function_width = self.gradient_preview_width
self.channel_function_height = 80
self.gradient_table = GradientTable(self.gradient_preview_width)
self.vtk_color_table = vtk_table
if isinstance(vtk_table, tvtk.LookupTable):
self.vtk_table_is_lut = True
else:
self.vtk_table_is_lut = False
self.gradient_table.load_from_vtk_volume_prop(vtk_table)
self.on_change_color_table = on_change_color_table
self.function_controls = []
self.tooltip_text = 'Left click: move control points\nRight click: add/remove control points'
editor_data = {'rgb': ('', 'RGB'), 'hsv': ('Hue: Red; Saturation: Green; Value: Blue\n', 'HSV'), 'h': ('', 'HUE'), 's': ('', 'SAT'), 'v': ('', 'VAL'), 'r': ('', 'RED'), 'g': ('', 'GREEN'), 'b': ('', 'BLUE'), 'a': ('', 'ALPHA')}
self.editor_data = editor_data
def set_status_text(self, msg):
raise NotImplementedError
def on_gradient_table_changed(self, final_update):
for control in self.function_controls:
control.update()
if True:
self.gradient_control.update()
if final_update:
vtk_table = self.vtk_color_table
if self.vtk_table_is_lut:
self.gradient_table.store_to_vtk_lookup_table(vtk_table)
else:
rng = self.get_table_range()
self.gradient_table.store_to_vtk_volume_prop(vtk_table, rng)
cb = self.on_change_color_table
if (cb is not None):
cb()
def get_table_range(self):
vtk_table = self.vtk_color_table
if self.vtk_table_is_lut:
return vtk_table.table_range
else:
return vtk_table.get_scalar_opacity().range
def load(self, file_name):
if (len(file_name) == 0):
return
self.gradient_table.load(file_name)
self.on_gradient_table_changed(final_update=True)
def save(self, file_name):
if (len(file_name) == 0):
return
self.gradient_table.save(file_name) |
def update_css_js(app: Sphinx):
app.env.sphinx_design_css_changed = False
static_path = (Path(app.outdir) / '_sphinx_design_static').absolute()
static_existed = static_path.exists()
static_path.mkdir(exist_ok=True)
app.config.html_static_path.append(str(static_path))
js_path = (static_path / 'design-tabs.js')
app.add_js_file(js_path.name)
if (not js_path.exists()):
content = read_text(static_module, 'sd_tabs.js')
js_path.write_text(content)
content = read_text(static_module, 'style.min.css')
hash = hashlib.md5(content.encode('utf8')).hexdigest()
css_path = (static_path / f'design-style.{hash}.min.css')
app.add_css_file(css_path.name)
if css_path.exists():
return
if static_existed:
app.env.sphinx_design_css_changed = True
for path in static_path.glob('*.css'):
path.unlink()
css_path.write_text(content, encoding='utf8') |
class OptionSeriesErrorbarSonificationContexttracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestDeleteDirectoryContents():
def setup(self):
self.root = Path(tempfile.mkdtemp())
file_1 = (self.root / 'file_1')
file_1.touch()
(self.root / 'symlink_1').symlink_to(file_1)
subdir = (self.root / 'directory')
subdir.mkdir()
(subdir / 'file').touch()
def test_main(self):
delete_directory_contents(self.root)
assert self.root.exists()
assert (len(list(self.root.iterdir())) == 0)
def teardown(self):
shutil.rmtree(str(self.root), ignore_errors=True) |
class ChannelSpatialSqueezeExcitation(nn.Module):
def __init__(self, num_channels: int, num_channels_reduced: Optional[int]=None, reduction_ratio: float=16.0, is_3d: bool=False, activation: Optional[nn.Module]=None) -> None:
super().__init__()
self.channel = SqueezeExcitation(num_channels=num_channels, num_channels_reduced=num_channels_reduced, reduction_ratio=reduction_ratio, is_3d=is_3d, activation=activation)
self.spatial = SpatialSqueezeExcitation(num_channels=num_channels, is_3d=is_3d)
def forward(self, input_tensor: torch.Tensor) -> torch.Tensor:
output_tensor = torch.max(self.channel(input_tensor), self.spatial(input_tensor))
return output_tensor |
def code_command(argv) -> CommandResult:
if (not argv):
return (ExitCode.NO_PLUGIN_NAME, 'no plugin name provided')
if (argv[0] == '--help'):
print(code_command.__doc__)
return (ExitCode.OK, '')
if (len(argv) > 1):
return (ExitCode.TOO_MANY_ARGS, 'the command accept only one argument')
code = argv[0]
app = FlakeHellApplication(program=NAME, version=VERSION)
plugins = sorted(get_installed(app=app), key=(lambda p: p['name']))
if (not plugins):
return (ExitCode.NO_PLUGINS_INSTALLED, 'no plugins installed')
messages = []
checked = set()
for plugin in plugins:
if (plugin['name'] in checked):
continue
checked.add(plugin['name'])
if (not code.startswith(tuple(plugin['codes']))):
continue
try:
codes = extract(plugin['name'])
except ImportError:
continue
if (code not in codes):
continue
messages.append(dict(plugin=plugin['name'], message=codes[code]))
if (not messages):
return (ExitCode.NO_CODES, 'no messages found')
width = max((len(m['plugin']) for m in messages))
template = '{plugin} | {message}'
print(template.format(plugin=colored('PLUGIN'.ljust(width), 'yellow'), message=colored('MESSAGE', 'yellow')))
for message in messages:
print(template.format(plugin=message['plugin'].ljust(width), message=color_description(message['message'])))
return (ExitCode.OK, '') |
class ChainContext(ChainContextAPI):
__slots__ = ['_chain_id']
def __init__(self, chain_id: Optional[int]) -> None:
if (chain_id is None):
chain_id = 0
validate_uint256(chain_id)
self._chain_id = chain_id
def chain_id(self) -> int:
return self._chain_id |
class RegisterPairHandling(PipelineStage):
name = 'register-pair-handling'
instruction_location = namedtuple('InstructionLocation', ['basic_block', 'index'])
def __init__(self):
self.cfg = None
self._def_map = None
self._use_map = None
self._locations = None
self._dominator_tree = None
def run(self, task: DecompilerTask) -> None:
self.cfg = task.graph
(self._def_map, self._use_map, self._locations) = self._parse_cfg(self.cfg)
self._dominator_tree = self.cfg.dominator_tree
self._handle_register_pairs()
def _parse_cfg(cfg: ControlFlowGraph) -> Tuple[(DefMap, UseMap, Dict[(Instruction, RegisterPairHandling.instruction_location)])]:
def_map = DefMap()
use_map = UseMap()
locations = {}
for basic_block in cfg:
for (index, instruction) in enumerate(basic_block.instructions):
def_map.add(instruction)
use_map.add(instruction)
locations[instruction] = RegisterPairHandling.instruction_location(basic_block, index)
return (def_map, use_map, locations)
def _handle_register_pairs(self) -> None:
handled_pairs = set()
found_pairs: List[RegisterPair] = ([variable for variable in self._def_map.defined_variables if isinstance(variable, RegisterPair)] + [variable for variable in self._use_map.used_variables if isinstance(variable, RegisterPair)])
for (variable_postfix, register_pair) in enumerate(found_pairs):
if (register_pair in handled_pairs):
continue
info(f'[{self.name}] eliminate register pair {str(register_pair)}')
replacement_variable: Variable = self._get_replacement_variable(register_pair, variable_postfix)
if (definition := self._def_map.get(register_pair)):
definition_location = self._locations[definition]
self._replace_definition_of_register_pair(definition_location.basic_block, definition, replacement_variable)
self._update_locations(definition_location.basic_block)
else:
insert_location = self._find_definition_insert_location([self._locations[instruction] for instruction in self._use_map.get(register_pair)])
self._add_definition_for_replacement(insert_location, register_pair, replacement_variable)
self._update_locations(insert_location.basic_block)
self._replace_usages_of(register_pair, replacement_variable)
handled_pairs.add(register_pair)
def _update_locations(self, basic_block: BasicBlock):
for (index, instruction) in enumerate(basic_block.instructions):
self._locations[instruction] = RegisterPairHandling.instruction_location(basic_block, index)
def _find_definition_insert_location(self, usage_locations: List[RegisterPairHandling.instruction_location]):
blocks = [location.basic_block for location in usage_locations]
if (len(set(blocks)) == 1):
return min(usage_locations, key=(lambda x: x.index))
dominator_block = self._find_common_dominator(blocks)
if (dominator_block in blocks):
return min([location for location in usage_locations if (location.basic_block == dominator_block)], key=(lambda x: x.index))
insertion_index = len(dominator_block.instructions)
if isinstance(dominator_block.instructions[(- 1)], GenericBranch):
insertion_index -= 1
return self.instruction_location(dominator_block, insertion_index)
def _find_common_dominator(self, basic_blocks: List[BasicBlock]) -> BasicBlock:
dominator_guess = next(iter(basic_blocks))
while dominator_guess:
if self._is_dominator(dominator_guess, basic_blocks):
return dominator_guess
dominator_guess = next(iter(self._dominator_tree.get_predecessors(dominator_guess)), None)
def _is_dominator(self, dominator_guess: BasicBlock, dominated_blocks: List[BasicBlock]) -> bool:
return all((self._dominator_tree.has_path(dominator_guess, basicblock) for basicblock in dominated_blocks))
def _get_replacement_variable(register_pair: RegisterPair, counter) -> Variable:
return Variable(f'loc_{counter}', register_pair.type, 0)
def _replace_definition_of_register_pair(self, basic_block: BasicBlock, definition_of_register_pair: Assignment, replacement: Variable) -> None:
register_pair: RegisterPair = definition_of_register_pair.destination
renamed_definition_of_register_pair = Assignment(replacement, definition_of_register_pair.value)
lower_register_definition = Assignment(register_pair.low, self._get_lower_register_definition_value(replacement, register_pair.low.type.size))
higher_register_definition = Assignment(register_pair.high, self._get_higher_register_definition_value(replacement, register_pair.high.type.size))
basic_block.replace_instruction(definition_of_register_pair, [renamed_definition_of_register_pair, lower_register_definition, higher_register_definition])
self._locations[renamed_definition_of_register_pair] = self._locations[definition_of_register_pair]
def _replace_usages_of(self, replacee: RegisterPair, replacement: Variable) -> None:
for using_instruction in self._use_map.get(replacee):
former_location = self._locations[using_instruction]
using_instruction.substitute(replacee, replacement)
self._locations[using_instruction] = former_location
def _get_higher_register_definition_value(var: Variable, register_size_in_bits: int) -> BinaryOperation:
return BinaryOperation(OperationType.right_shift, [var, Constant(register_size_in_bits, vartype=Integer(register_size_in_bits, False))])
def _get_lower_register_definition_value(var: Variable, register_size_in_bits: int) -> BinaryOperation:
register_size_mask = ((2 ** register_size_in_bits) - 1)
return BinaryOperation(OperationType.bitwise_and, [var, Constant(register_size_mask, vartype=Integer(register_size_in_bits, True))])
def _add_definition_for_replacement(location: RegisterPairHandling.instruction_location, register_pair: RegisterPair, replacement_variable: Variable):
assignment_of_replacement_variable = Assignment(replacement_variable, BinaryOperation(OperationType.plus, [register_pair.low, BinaryOperation(OperationType.left_shift, [register_pair.high, Constant(register_pair.low.type.size, vartype=Integer.uint8_t())])]))
location.basic_block.instructions.insert(location.index, assignment_of_replacement_variable) |
def reset_schema():
for schema in ['aouser', 'auditsearch', 'disclosure', 'fecapp', 'fecmur', 'public', 'rad_pri_user', 'real_efile', 'real_pfile', 'rohan', 'staging']:
rest.db.engine.execute(('drop schema if exists %s cascade;' % schema))
rest.db.engine.execute('create schema public;') |
def test_max_geometry_validation():
too_many = [td.Box(size=(1, 1, 1)) for _ in range((MAX_GEOMETRY_COUNT + 1))]
fine = [td.Structure(geometry=td.ClipOperation(operation='union', geometry_a=td.Box(size=(1, 1, 1)), geometry_b=td.GeometryGroup(geometries=too_many)), medium=td.Medium(permittivity=2.0)), td.Structure(geometry=td.GeometryGroup(geometries=too_many), medium=td.Medium(permittivity=2.0))]
_ = td.Scene(structures=fine)
not_fine = [td.Structure(geometry=td.ClipOperation(operation='difference', geometry_a=td.Box(size=(1, 1, 1)), geometry_b=td.GeometryGroup(geometries=too_many)), medium=td.Medium(permittivity=2.0))]
with pytest.raises(pd.ValidationError, match=f' {(MAX_GEOMETRY_COUNT + 2)} '):
_ = td.Scene(structures=not_fine) |
def extractArunamettaAsia(item):
badwords = ['Menantu Sang Raja Naga', 'Lembayung Ema']
if any([(bad in item['tags']) for bad in badwords]):
return None
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Eiwa - The Knight of Magical Laws', 'Eiwa - The Knight of Magical Laws', 'oel'), ('dystopian sunset', 'Dystopian Sunset', 'oel'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def _get_atf_vector(atf_type, target_psd_matrix, noise_psd_matrix, **atf_kwargs):
if (atf_type == 'pca'):
return get_pca_vector(target_psd_matrix, **atf_kwargs)
elif (atf_type == 'scaled_gev_atf'):
return _get_gev_atf_vector(target_psd_matrix, noise_psd_matrix, **atf_kwargs)
else:
raise ValueError(atf_type, 'use either pca or scaled_gev_atf') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.