code stringlengths 281 23.7M |
|---|
class TemplateMixin(models.Model):
template_key = ChoicesCharField(_('template'), max_length=100)
class Meta():
abstract = True
def template(self):
return self.TEMPLATES_DICT.get(self.template_key, self.TEMPLATES[0])
def regions(self):
return self.template.regions
def fill_template_key_choices(sender, **kwargs):
if (issubclass(sender, TemplateMixin) and (not sender._meta.abstract)):
field = sender._meta.get_field('template_key')
field.choices = [(t.key, t.title) for t in sender.TEMPLATES]
field.default = sender.TEMPLATES[0].key
sender.TEMPLATES_DICT = {t.key: t for t in sender.TEMPLATES}
warnings.warn(f"{sender._meta.label} uses the TemplateMixin. It is recommended to use the PageTypeMixin and TemplateType from feincms3.applications even if you're not planning to use any apps.", DeprecationWarning, stacklevel=1) |
class Docstring(object):
def __init__(self, docstring: Optional[str]=None, callable_: Optional[Callable]=None):
if (docstring is not None):
self._parsed_docstring = parse(docstring)
else:
self._parsed_docstring = parse(callable_.__doc__)
def input_descriptions(self) -> Dict[(str, str)]:
return {p.arg_name: p.description for p in self._parsed_docstring.params}
def output_descriptions(self) -> Dict[(str, str)]:
return {p.return_name: p.description for p in self._parsed_docstring.many_returns}
def short_description(self) -> Optional[str]:
return self._parsed_docstring.short_description
def long_description(self) -> Optional[str]:
return self._parsed_docstring.long_description |
def extractWwwDreampotatoCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Magis Grandson', "Magi's Grandson", 'translated'), ('I Was a Sword When I Reincarnated (WN)', 'I Was a Sword When I Reincarnated (WN)', 'translated'), ('Tensei Saki ga Shoujo Manga no Shiro Buta Reijou datta', 'Tensei Saki ga Shoujo Manga no Shiro Buta Reijou datta', 'translated'), ('Otoko Nara Ikkokuichijou no Aruji o Mezasa Nakya, ne?', 'Otoko Nara Ikkokuichijou no Aruji o Mezasa Nakya, ne?', 'translated'), ('The Lazy Swordmaster', 'The Lazy Swordmaster', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
.parametrize('_input_type, expected_esd_hash, message_to_encode', (('Example data from EIP-712', 'be609aee343fb3c4b28e1df9e632fca64fcfaede20f02e86244efddf30957bd2', {'types': {'EIP712Domain': [{'name': 'name', 'type': 'string'}, {'name': 'version', 'type': 'string'}, {'name': 'chainId', 'type': 'uint256'}, {'name': 'verifyingContract', 'type': 'address'}], 'Person': [{'name': 'name', 'type': 'string'}, {'name': 'wallet', 'type': 'address'}], 'Mail': [{'name': 'from', 'type': 'Person'}, {'name': 'to', 'type': 'Person'}, {'name': 'contents', 'type': 'string'}]}, 'primaryType': 'Mail', 'domain': {'name': 'Ether Mail', 'version': '1', 'chainId': 1, 'verifyingContract': '0xCcCCccccCCCCcCCCCCCcCcCccCcCCCcCcccccccC'}, 'message': {'from': {'name': 'Cow', 'wallet': '0xCD2a3d9F938E13CD947Ec05AbC7FE734Df8DD826'}, 'to': {'name': 'Bob', 'wallet': '0xbBbBBBBbbBBBbbbBbbBbbbbBBbBbbbbBbBbbBBbB'}, 'contents': 'Hello, Bob!'}}), ('Example data from EIP-712 with array', '1780e7e042fa9ec126ccb68cd707d61580d00601b3eff8a5ec05116b46007fdb', {'types': {'EIP712Domain': [{'name': 'name', 'type': 'string'}, {'name': 'version', 'type': 'string'}, {'name': 'chainId', 'type': 'uint256'}, {'name': 'verifyingContract', 'type': 'address'}], 'Person': [{'name': 'name', 'type': 'string'}, {'name': 'wallet', 'type': 'address'}], 'Mail': [{'name': 'from', 'type': 'Person'}, {'name': 'to', 'type': 'Person'}, {'name': 'cc', 'type': 'Person[]'}, {'name': 'contents', 'type': 'string'}]}, 'primaryType': 'Mail', 'domain': {'name': 'Ether Mail', 'version': '1', 'chainId': 1, 'verifyingContract': '0xCcCCccccCCCCcCCCCCCcCcCccCcCCCcCcccccccC'}, 'message': {'from': {'name': 'Cow', 'wallet': '0xCD2a3d9F938E13CD947Ec05AbC7FE734Df8DD826'}, 'to': {'name': 'Bob', 'wallet': '0xbBbBBBBbbBBBbbbBbbBbbbbBBbBbbbbBbBbbBBbB'}, 'cc': [{'name': 'Alice', 'wallet': '0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'}, {'name': 'Dot', 'wallet': '0xdddddddddddddddddddddddddddddddddddddddd'}], 'contents': 'Hello, Bob!'}}), ('Custom type', '2f1c830ad734e08c06419c229b6bb26e0fb4134b5c04a4dea2163171b79683eb', {'types': {'EIP712Domain': [{'name': 'name', 'type': 'string'}], 'Person': [{'name': 'name', 'type': 'string'}]}, 'primaryType': 'Person', 'domain': {'name': 'Name'}, 'message': {'name': 'Bob'}}), ('Recursive custom type', '36c7cddeb49c4095ec5cacfed60c1120b7d7f32740a9', {'types': {'EIP712Domain': [{'name': 'name', 'type': 'string'}], 'Person': [{'name': 'name', 'type': 'string'}, {'name': 'friends', 'type': 'Person[]'}]}, 'primaryType': 'Person', 'domain': {'name': 'Name'}, 'message': {'name': 'Bob', 'friends': [{'name': 'Charlie', 'friends': []}]}}), ('Unused custom type', '2f1c830ad734e08c06419c229b6bb26e0fb4134b5c04a4dea2163171b79683eb', {'types': {'EIP712Domain': [{'name': 'name', 'type': 'string'}], 'Person': [{'name': 'name', 'type': 'string'}], 'Pet': [{'name': 'animal', 'type': 'string'}, {'name': 'age', 'type': 'uint256'}]}, 'primaryType': 'Person', 'domain': {'name': 'Name'}, 'message': {'name': 'Bob'}}), ('Custom type with extra properties in message', '2f1c830ad734e08c06419c229b6bb26e0fb4134b5c04a4dea2163171b79683eb', {'types': {'EIP712Domain': [{'name': 'name', 'type': 'string'}], 'Person': [{'name': 'name', 'type': 'string'}]}, 'primaryType': 'Person', 'domain': {'name': 'Name'}, 'message': {'name': 'Bob', 'age': 33}})))
def test_encode_structured_data_pass(_input_type, expected_esd_hash, message_to_encode):
assert (_hash_eip191_message(encode_structured_data(message_to_encode)).hex() == expected_esd_hash) |
_os(*metadata.platforms)
def main():
masquerade = '/tmp/eggshell'
if (common.CURRENT_OS == 'linux'):
source = common.get_path('bin', 'linux.ditto_and_spawn')
common.copy_file(source, masquerade)
else:
common.create_macos_masquerade(masquerade)
common.log('Launching fake commands for EggShell backdoor behavior')
common.execute([masquerade, 'eyJkZWJ1ZyI6test'], timeout=10, kill=True)
common.remove_file(masquerade) |
def upgrade():
with op.batch_alter_table('flicket_post') as batch_op:
batch_op.alter_column('hours', existing_type=sa.Numeric(precision=10, scale=0), type_=sa.Numeric(precision=10, scale=2), existing_nullable=True, existing_server_default=sa.text("'0'"))
with op.batch_alter_table('flicket_topic') as batch_op:
batch_op.alter_column('hours', existing_type=sa.Numeric(precision=10, scale=0), type_=sa.Numeric(precision=10, scale=2), existing_nullable=True, existing_server_default=sa.text("'0'")) |
def catch_exception(fun):
(fun, assigned=available_attrs(fun))
def wrap(*args, **kwargs):
if kwargs.pop('fail_silently', True):
try:
return fun(*args, **kwargs)
except Exception as e:
logger.exception(e)
logger.error('Got exception when running %s(%s, %s): %s.', fun.__name__, args, kwargs, e)
else:
return fun(*args, **kwargs)
return wrap |
class OptionSeriesColumnrangeOnpointPosition(Options):
def offsetX(self):
return self._config_get(None)
def offsetX(self, num: float):
self._config(num, js_type=False)
def offsetY(self):
return self._config_get(None)
def offsetY(self, num: float):
self._config(num, js_type=False)
def x(self):
return self._config_get(None)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(None)
def y(self, num: float):
self._config(num, js_type=False) |
.django_db
def test_state_metadata_success(client, state_data):
resp = client.get(state_metadata_endpoint('01'))
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.data == EXPECTED_STATE)
resp = client.get(state_metadata_endpoint('02'))
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.data == EXPECTED_DISTRICT)
resp = client.get(state_metadata_endpoint('03'))
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.data == EXPECTED_TERRITORY) |
def test_use_callable_class_event_listener():
browser = pychrome.Browser()
tab = browser.new_tab()
tab.start()
tab.Network.requestWillBeSent = CallableClass(tab)
tab.Network.enable()
try:
tab.Page.navigate(url='chrome://newtab/')
except pychrome.UserAbortException:
pass
if (not tab.wait(timeout=5)):
assert False, 'never get here'
tab.stop() |
class ValveEgressACLTestCase(ValveTestBases.ValveTestNetwork):
def setUp(self):
self.setup_valves(CONFIG)
def test_vlan_acl_deny(self):
ALLOW_HOST_V6 = 'fc00:200::1:1'
DENY_HOST_V6 = 'fc00:200::1:2'
FAUCET_V100_VIP = 'fc00:100::1'
FAUCET_V200_VIP = 'fc00:200::1'
acl_config = "\ndps:\n s1:\n{dp1_config}\n interfaces:\n p1:\n number: 1\n native_vlan: v100\n p2:\n number: 2\n native_vlan: v200\n tagged_vlans: [v100]\n p3:\n number: 3\n tagged_vlans: [v100, v200]\n p4:\n number: 4\n tagged_vlans: [v200]\nvlans:\n v100:\n vid: 0x100\n faucet_mac: '{mac}'\n faucet_vips: ['{v100_vip}/64']\n v200:\n vid: 0x200\n faucet_mac: '{mac}'\n faucet_vips: ['{v200_vip}/64']\n acl_out: drop_non_allow_host_v6\n minimum_ip_size_check: false\nrouters:\n r_v100_v200:\n vlans: [v100, v200]\nacls:\n drop_non_allow_host_v6:\n - rule:\n ipv6_dst: '{allow_host}'\n eth_type: 0x86DD\n actions:\n allow: 1\n - rule:\n eth_type: 0x86DD\n actions:\n allow: 0\n".format(dp1_config=DP1_CONFIG, mac=FAUCET_MAC, v100_vip=FAUCET_V100_VIP, v200_vip=FAUCET_V200_VIP, allow_host=ALLOW_HOST_V6)
l2_drop_match = {'in_port': 2, 'eth_dst': self.P3_V200_MAC, 'vlan_vid': 0, 'eth_type': 34525, 'ipv6_dst': DENY_HOST_V6}
l2_accept_match = {'in_port': 3, 'eth_dst': self.P2_V200_MAC, 'vlan_vid': (512 | ofp.OFPVID_PRESENT), 'eth_type': 34525, 'ipv6_dst': ALLOW_HOST_V6}
v100_accept_match = {'in_port': 1, 'vlan_vid': 0}
table = self.network.tables[self.DP_ID]
for match in (l2_drop_match, l2_accept_match):
self.assertTrue(table.is_output(match, port=4), msg='Packet not output before adding ACL')
self.update_config(acl_config, reload_type='cold')
self.assertTrue(table.is_output(v100_accept_match, port=3), msg='Packet not output when on vlan with no ACL')
self.assertFalse(table.is_output(l2_drop_match, port=3), msg='Packet not blocked by ACL')
self.assertTrue(table.is_output(l2_accept_match, port=2), msg='Packet not allowed by ACL')
self.rcv_packet(2, 512, {'eth_src': self.P2_V200_MAC, 'eth_dst': self.P3_V200_MAC, 'vid': 512, 'ipv6_src': ALLOW_HOST_V6, 'ipv6_dst': DENY_HOST_V6, 'neighbor_advert_ip': ALLOW_HOST_V6})
self.rcv_packet(3, 512, {'eth_src': self.P3_V200_MAC, 'eth_dst': self.P2_V200_MAC, 'vid': 512, 'ipv6_src': DENY_HOST_V6, 'ipv6_dst': ALLOW_HOST_V6, 'neighbor_advert_ip': DENY_HOST_V6})
self.assertTrue(table.is_output(l2_accept_match, port=2), msg='Packet not allowed by ACL')
self.assertFalse(table.is_output(l2_drop_match, port=3), msg='Packet not blocked by ACL')
l3_drop_match = {'in_port': 1, 'eth_dst': FAUCET_MAC, 'vlan_vid': 0, 'eth_type': 34525, 'ipv6_dst': DENY_HOST_V6}
l3_accept_match = {'in_port': 1, 'eth_dst': FAUCET_MAC, 'vlan_vid': 0, 'eth_type': 34525, 'ipv6_dst': ALLOW_HOST_V6}
self.assertTrue(table.is_output(l3_accept_match, port=2), msg='Routed packet not allowed by ACL')
self.assertFalse(table.is_output(l3_drop_match, port=3), msg='Routed packet not blocked by ACL') |
(Collection)
class CollectionAdmin(CustomAdmin):
autocomplete_fields = ['curators']
fields = ['id', 'name', 'curators']
inlines = [BookInline]
list_display = ['id', 'name']
list_display_links = ['name']
list_filter = [CuratorsFilter, BookFilter]
list_filter_auto = [AutocompleteFilterFactory('curators (auto)', 'curators'), AutocompleteFilterFactory('has book (auto)', 'book')]
ordering = ['id']
readonly_fields = ['id']
search_fields = ['id', 'name', 'curators__name', 'book__title', 'book__author__name'] |
.usefixtures('use_tmpdir')
def test_gen_kw_log_appended_extra():
with open('config_file.ert', 'w', encoding='utf-8') as fout:
fout.write(dedent('\n NUM_REALIZATIONS 1\n GEN_KW KW_NAME template.txt kw.txt prior.txt\n '))
with open('template.txt', 'w', encoding='utf-8') as fh:
fh.writelines('MY_KEYWORD <MY_KEYWORD>')
with open('prior.txt', 'w', encoding='utf-8') as fh:
fh.writelines('MY_KEYWORD LOGNORMAL 1 2')
facade = LibresFacade.from_config_file('config_file.ert')
assert (len(facade.gen_kw_keys()) == 2) |
class FunctionSignature(SignatureMixin):
name = str()
return_value = TypeHint.Unknown
sometimes_null = False
def get_callback(cls, *arguments):
return cls.run
def optimize(cls, arguments):
return FunctionCall(cls.name, arguments)
def alternate_render(cls, arguments, precedence=None, **kwargs):
def run(cls, *arguments):
raise NotImplementedError() |
class BaseConfiguration(object):
WriteConfigJson.json_exists()
DEBUG = False
TESTING = False
EXPLAIN_TEMPLATE_LOADING = False
try:
with open(config_file, 'r') as f:
config_data = json.load(f)
db_username = config_data['db_username']
db_password = config_data['db_password']
db_url = config_data['db_url']
db_port = config_data['db_port']
db_name = config_data['db_name']
db_type = config_data['db_type']
db_driver = config_data['db_driver']
except KeyError:
raise KeyError('The file config.json appears to incorrectly formatted.')
db_dialect = None
SQLALCHEMY_DATABASE_URI = None
sql_os_path_prefix = '////'
if (platform.system() == 'Windows'):
sql_os_path_prefix = '///'
if (db_type == 1):
db_dialect = 'sqlite'
db_path = os.path.join(basedir, db_name)
SQLALCHEMY_DATABASE_URI = f'{db_dialect}:{sql_os_path_prefix}{db_path}'
else:
if (db_type == 2):
db_dialect = 'postgresql'
if (db_type == 3):
db_dialect = 'mysql'
SQLALCHEMY_DATABASE_URI = f'{db_dialect}+{db_driver}://{db_username}:{db_password}{db_url}:{db_port}/{db_name}'
if (SQLALCHEMY_DATABASE_URI is None):
raise ConnectionAbortedError('Incorrect database type defined in config.json.')
SQLALCHEMY_TRACK_MODIFICATIONS = True
ADMIN_GROUP_NAME = 'flicket_admin'
SUPER_USER_GROUP_NAME = 'super_user'
SECRET_KEY = config_data['SECRET_KEY']
WEBHOME = '/'
FLICKET = (WEBHOME + '')
FLICKET_API = (WEBHOME + 'flicket-api/')
FLICKET_REST_API = (WEBHOME + 'flicket-rest-api')
ADMINHOME = '/flicket_admin/'
NOTIFICATION = {'name': 'notification', 'username': 'notification', 'password': config_data['NOTIFICATION_USER_PASSWORD'], 'email': ''}
SUPPORTED_LANGUAGES = {'en': 'English', 'fr': 'Francais'}
BABEL_DEFAULT_LOCALE = 'en'
BABEL_DEFAULT_TIMEZONE = 'UTC'
check_db_connection(SQLALCHEMY_DATABASE_URI) |
def test_child_container_parent_name():
class ChildContainer(containers.DeclarativeContainer):
dependency = providers.Dependency()
class Container(containers.DeclarativeContainer):
child_container = providers.Container(ChildContainer)
with raises(errors.Error, match='Dependency "Container.child_container.dependency" is not defined'):
Container.child_container.dependency() |
def test_entity_storage_add_entity_asset(create_test_db, create_project, prepare_entity_storage):
from stalker import Asset
project = create_project
char1 = Asset.query.filter((Asset.project == project)).filter((Asset.name == 'Char1')).first()
assert (char1 is not None)
storage = EntityStorage()
storage.add_entity(char1)
assert (char1 in storage.storage) |
def extractBlueSilverTranslations(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if ('Douluo Dalu' in item['tags']):
proc_str = ('%s %s' % (item['tags'], item['title']))
proc_str = proc_str.replace("'", ' ')
(chp, vol) = extractChapterVol(proc_str)
return buildReleaseMessageWithType(item, 'Douluo Dalu', vol, chp)
if ('Immortal Executioner' in item['tags']):
return buildReleaseMessageWithType(item, 'Immortal Executioner', vol, chp, frag=frag, postfix=postfix)
if ('Stellar War Storm' in item['tags']):
return buildReleaseMessageWithType(item, 'Stellar War Storm', vol, chp, frag=frag, postfix=postfix)
if ('Bringing The Farm To Live In Another World' in item['tags']):
return buildReleaseMessageWithType(item, 'Bringing The Farm To Live In Another World', vol, chp, frag=frag, postfix=postfix)
if ('Law of the Devil' in item['tags']):
return buildReleaseMessageWithType(item, 'Law of the Devil', vol, chp, frag=frag, postfix=postfix)
return False |
(init=True, repr=True, eq=True, frozen=True)
class PlatformConfig(object):
endpoint: str = 'localhost:30080'
insecure: bool = False
insecure_skip_verify: bool = False
ca_cert_file_path: typing.Optional[str] = None
console_endpoint: typing.Optional[str] = None
command: typing.Optional[typing.List[str]] = None
proxy_command: typing.Optional[typing.List[str]] = None
client_id: typing.Optional[str] = None
client_credentials_secret: typing.Optional[str] = None
scopes: List[str] = field(default_factory=list)
auth_mode: AuthType = AuthType.STANDARD
audience: typing.Optional[str] = None
rpc_retries: int = 3
typing.Optional[str] = None
def auto(cls, config_file: typing.Optional[typing.Union[(str, ConfigFile)]]=None) -> PlatformConfig:
config_file = get_config_file(config_file)
kwargs = {}
kwargs = set_if_exists(kwargs, 'insecure', _internal.Platform.INSECURE.read(config_file))
kwargs = set_if_exists(kwargs, 'insecure_skip_verify', _internal.Platform.INSECURE_SKIP_VERIFY.read(config_file))
kwargs = set_if_exists(kwargs, 'ca_cert_file_path', _internal.Platform.CA_CERT_FILE_PATH.read(config_file))
kwargs = set_if_exists(kwargs, 'command', _internal.Credentials.COMMAND.read(config_file))
kwargs = set_if_exists(kwargs, 'proxy_command', _internal.Credentials.PROXY_COMMAND.read(config_file))
kwargs = set_if_exists(kwargs, 'client_id', _internal.Credentials.CLIENT_ID.read(config_file))
kwargs = set_if_exists(kwargs, 'client_credentials_secret', _internal.Credentials.CLIENT_CREDENTIALS_SECRET.read(config_file))
is_client_secret = False
client_credentials_secret = read_file_if_exists(_internal.Credentials.CLIENT_CREDENTIALS_SECRET_LOCATION.read(config_file))
if client_credentials_secret:
is_client_secret = True
if client_credentials_secret.endswith('\n'):
logger.info('Newline stripped from client secret')
client_credentials_secret = client_credentials_secret.strip()
kwargs = set_if_exists(kwargs, 'client_credentials_secret', client_credentials_secret)
client_credentials_secret_env_var = _internal.Credentials.CLIENT_CREDENTIALS_SECRET_ENV_VAR.read(config_file)
if client_credentials_secret_env_var:
client_credentials_secret = os.getenv(client_credentials_secret_env_var)
if client_credentials_secret:
is_client_secret = True
kwargs = set_if_exists(kwargs, 'client_credentials_secret', client_credentials_secret)
kwargs = set_if_exists(kwargs, 'scopes', _internal.Credentials.SCOPES.read(config_file))
kwargs = set_if_exists(kwargs, 'auth_mode', _internal.Credentials.AUTH_MODE.read(config_file))
if is_client_secret:
kwargs = set_if_exists(kwargs, 'auth_mode', AuthType.CLIENTSECRET.value)
kwargs = set_if_exists(kwargs, 'endpoint', _internal.Platform.URL.read(config_file))
kwargs = set_if_exists(kwargs, 'console_endpoint', _internal.Platform.CONSOLE_ENDPOINT.read(config_file))
kwargs = set_if_exists(kwargs, ' _internal.Platform.HTTP_PROXY_URL.read(config_file))
return PlatformConfig(**kwargs)
def for_endpoint(cls, endpoint: str, insecure: bool=False) -> PlatformConfig:
return PlatformConfig(endpoint=endpoint, insecure=insecure) |
def extractBloomingsilkBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class Cli():
def __init__(self):
self.parser = None
self.list_pvcs = False
self.list_nodes = False
self.node = None
self.list_pods = False
self.pod = None
self.container = None
self.namespace = 'default'
self.all_namespaces = False
self.debug = False
self.dashboard = 'default'
self.list_dashboards = False
self.sort_by_mem_usage = False
self.list_option = ''
self.list_nodes_option = []
self.colorize_json = False
self.argparse()
if self.debug:
GlobalAttrs.debug = True
Logging.log.setLevel(level='DEBUG')
if self.node:
if self.list_dashboards:
node_monitor.list_dashboards()
exit(0)
if (self.list_option == 'json'):
node_metrics.topNodeJson(node=self.node, color=self.colorize_json)
exit(0)
node_monitor.display_dashboard(dashboard=self.dashboard, node_name=self.node)
if self.list_nodes:
if (self.list_option == 'json'):
node_metrics.topNodeJson(node='.*', color=self.colorize_json)
exit(0)
node_metrics.topNodeTable(option=self.list_option)
exit(0)
if self.pod:
if (self.container is None):
self.container = '.*'
check_pod = pod_metrics.podExists(pod=self.pod, namespace=self.namespace)
if (not check_pod.get('result')):
print(f"pod/{self.pod} not found in the '{self.namespace}' namespace")
rich.print(f"[yellow]{check_pod.get('fail_reason')}")
exit(1)
pod_monitor.pod_monitor(pod=self.pod, namespace=self.namespace, container=self.container)
if self.list_pods:
ns = self.namespace
if self.all_namespaces:
ns = '.*'
pod_metrics.topPodTable(namespace=ns, sort_by_mem_usage=self.sort_by_mem_usage)
exit(0)
if self.list_pvcs:
ns = self.namespace
if self.all_namespaces:
ns = '.*'
pod_metrics.topPvcTable(namespace=ns)
exit(0)
def argparse(self):
parser = argparse.ArgumentParser(description='A Python tool for Kubernetes Nodes/Pods terminal monitoring through Prometheus metrics.')
parser.add_argument('top', type=str, nargs='*', metavar='{pods, pod, po} | {nodes, node} | {persistentvolumeclaim, pvc}', help='top pods/nodes/persistentvolumeclaim')
parser.add_argument('-n', '--namespace', type=str, required=False, metavar='', help='Specify a Kubernetes namespace')
parser.add_argument('-A', '--all-namespaces', required=False, action='store_true', help='All Kubernetes namespaces')
parser.add_argument('-c', '--container', type=str, required=False, metavar='', help="Monitor a specific Pod's container")
parser.add_argument('-i', '--interval', type=int, required=False, metavar='', help='Live monitoring update interval')
parser.add_argument('-V', '--verify-prometheus', required=False, action='store_true', help='Verify Prometheus connection & exporters')
parser.add_argument('-C', '--check-metrics', required=False, action='store_true', help='Checks the availability of the needed metrics')
parser.add_argument('-d', '--debug', required=False, action='store_true', help='Print debug output')
parser.add_argument('-s', '--sort-by-mem-usage', required=False, action='store_true', help='Sort top result by memory usage')
parser.add_argument('-o', '--option', type=str, required=False, choices=['cloud', 'json'], help='options for "kptop node||pod" (currently supported in "kptop node")')
parser.add_argument('-cj', '--colorize-json', required=False, action='store_true', help='Colorize Json output (with "-o json")')
pod_aliases = ['pod', 'pods', 'po']
node_aliases = ['node', 'nodes']
pvc_aliases = ['pvc', 'persistentvolumeclaim']
results = parser.parse_args()
self.parser = parser
if results.debug:
self.debug = True
if results.verify_prometheus:
prometheus_api.verify_exporters()
if results.check_metrics:
prometheus_api.check_metrics()
exit(0)
if (len(results.top) == 0):
self.parser.print_help()
exit(1)
if (len(results.top) == 1):
if (results.top[0] in pod_aliases):
self.list_pods = True
elif (results.top[0] in node_aliases):
self.list_nodes = True
elif (results.top[0] in pvc_aliases):
self.list_pvcs = True
else:
rich.print(f'''[bold]ERROR -- unkown argument '{results.top[0]}'
''')
self.parser.print_help()
exit(1)
if (len(results.top) == 2):
if (results.top[0] in pod_aliases):
self.pod = results.top[1]
elif (results.top[0] in node_aliases):
self.node = results.top[1]
else:
rich.print(f'''[bold]ERROR -- unkown argument '{results.top[0]}'
''')
self.parser.print_help()
exit(1)
if (len(results.top) > 2):
rich.print(f'''[bold]ERROR -- unkown argument '{results.top[2]}' - only 2 arguments are expected
''')
self.parser.print_help()
exit(1)
if (results.namespace and results.all_namespaces):
rich.print("[bold]ERROR -- You can only use '--all-namespaces' or '--namespace' \n")
self.parser.print_help()
exit(1)
if results.namespace:
self.namespace = results.namespace
if results.option:
self.list_option = results.option
if results.colorize_json:
self.colorize_json = results.option
if results.all_namespaces:
self.all_namespaces = results.all_namespaces
if results.container:
self.container = results.container
if results.interval:
GlobalAttrs.live_update_interval = results.interval
if results.sort_by_mem_usage:
self.sort_by_mem_usage = True |
class HelpdeskCommentRequest(BaseZendeskRequest):
def put(self, endpoint, article, comment):
url = self.api._build_url(endpoint(article, comment.id))
payload = self.build_payload(comment)
return self.api._put(url, payload)
def post(self, endpoint, article, comment):
url = self.api._build_url(endpoint(id=article))
payload = self.build_payload(comment)
return self.api._post(url, payload)
def delete(self, endpoint, article, comment):
url = self.api._build_url(endpoint(article, comment))
return self.api._delete(url) |
def get_available_ram() -> int:
if (sys.platform == 'linux'):
return _get_available_ram_linux()
elif (sys.platform == 'darwin'):
return _get_available_ram_macos()
elif (sys.platform == 'win32'):
return _get_available_ram_windows()
elif sys.platform.startswith('freebsd'):
return _get_available_ram_freebsd()
else:
raise NotImplementedError(f'platform {sys.platform} does not have an implementation of get_available_ram') |
.parametrize('charset,data', [('utf-8', b'Impossible byte: \xff'), ('utf-8', b'Overlong... \xfc\x83\xbf\xbf\xbf\xbf ... sequence'), ('ascii', b'\x80\x80\x80'), ('pecyn', b'AAHEHlRoZSBGYWxjb24gV2ViIEZyYW1ld29yaywgMjAxOQ==')])
def test_invalid_text_or_charset(charset, data):
data = (((b'--BOUNDARY\r\nContent-Disposition: form-data; name="text"\r\nContent-Type: text/plain; ' + 'charset={}\r\n\r\n'.format(charset).encode()) + data) + b'\r\n--BOUNDARY\r\nContent-Disposition: form-data; name="empty"\r\nContent-Type: text/plain\r\n\r\n\r\n--BOUNDARY--\r\n')
handler = media.MultipartFormHandler()
form = handler.deserialize(io.BytesIO(data), 'multipart/form-data; boundary=BOUNDARY', len(data))
with pytest.raises(falcon.MediaMalformedError):
for part in form:
part.text |
def auth0_dataset_config(db: Session, auth0_connection_config: ConnectionConfig, auth0_dataset: Dict[(str, Any)]) -> Generator:
fides_key = auth0_dataset['fides_key']
auth0_connection_config.name = fides_key
auth0_connection_config.key = fides_key
auth0_connection_config.save(db=db)
ctl_dataset = CtlDataset.create_from_dataset_dict(db, auth0_dataset)
dataset = DatasetConfig.create(db=db, data={'connection_config_id': auth0_connection_config.id, 'fides_key': fides_key, 'ctl_dataset_id': ctl_dataset.id})
(yield dataset)
dataset.delete(db=db)
ctl_dataset.delete(db=db) |
class Number(Param[(IT, OT)]):
min_value: Optional[int] = None
max_value: Optional[int] = None
number_aliases: Mapping[(IT, OT)]
def _init_options(self, min_value: Optional[int]=None, max_value: Optional[int]=None, number_aliases: Mapping[(IT, OT)]=None, **kwargs: Any) -> None:
if (min_value is not None):
self.min_value = min_value
if (max_value is not None):
self.max_value = max_value
self.number_aliases = (number_aliases or {})
def convert(self, conf: _Settings, value: IT) -> OT:
...
def to_python(self, conf: _Settings, value: IT) -> OT:
try:
return self.number_aliases[value]
except KeyError:
return self.convert(conf, value)
def validate_after(self, value: OT) -> None:
v = cast(int, value)
min_ = self.min_value
max_ = self.max_value
if ((min_ is not None) and (v < min_)):
raise self._out_of_range(v)
if ((max_ is not None) and (v > max_)):
raise self._out_of_range(v)
def _out_of_range(self, value: float) -> ImproperlyConfigured:
return ImproperlyConfigured(f'Value {value} is out of range for {self.class_name} (min={self.min_value} max={self.max_value})') |
class OptionSeriesSankeyPointEvents(Options):
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def drag(self):
return self._config_get(None)
def drag(self, value: Any):
self._config(value, js_type=False)
def dragStart(self):
return self._config_get(None)
def dragStart(self, value: Any):
self._config(value, js_type=False)
def drop(self):
return self._config_get(None)
def drop(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def remove(self):
return self._config_get(None)
def remove(self, value: Any):
self._config(value, js_type=False)
def select(self):
return self._config_get(None)
def select(self, value: Any):
self._config(value, js_type=False)
def unselect(self):
return self._config_get(None)
def unselect(self, value: Any):
self._config(value, js_type=False)
def update(self):
return self._config_get(None)
def update(self, value: Any):
self._config(value, js_type=False) |
def main():
instr_set_keys = list(INSTRUCTION_SET_MAPPING.keys())
instr_mode_keys = list(INSTRUCTION_MODE_MAPPING.keys())
parser = argparse.ArgumentParser(description='Generate a Yara rule based on disassembled code', formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('-i', '--instruction_set', type=str, help='Instruction set', choices=instr_set_keys, default=instr_set_keys[0])
parser.add_argument('-a', '--instruction_mode', type=str, help='Instruction mode', choices=instr_mode_keys, default=instr_mode_keys[0])
parser.add_argument('-f', '--file_path', type=str, help='Sample file path', required=True)
parser.add_argument('-n', '--rulename', type=str, help='Generated rule name', default='generated_rule')
parser.add_argument('-o', '--offset', type=auto_int, help='File offset for signature', required=True)
parser.add_argument('-s', '--size', type=auto_int, help='Size of desired signature', required=True)
parser.add_argument('-m', '--mode', type=str, help='Wildcard mode for yara rule generation\nloose = wildcard all operands\nnormal = wildcard only displacement operands\nstrict = wildcard only jmp/call addresses', required=False, choices=['loose', 'normal', 'strict'], default='normal')
parser.add_argument('-r', '--result', type=argparse.FileType('w'), help='Output file', required=False, default=None)
parser.add_argument('-v', '--verbose', action='count', default=0, help='Increase verbosity')
args = parser.parse_args()
levels = [logging.WARNING, logging.INFO, logging.DEBUG]
level = levels[min((len(levels) - 1), args.verbose)]
logging.basicConfig(stream=sys.stderr, level=level, format='%(asctime)s %(levelname)s %(name)s %(message)s')
log.info('Disassembling code and generating signature...')
ins_set = INSTRUCTION_SET_MAPPING[args.instruction_set]
ins_mode = INSTRUCTION_MODE_MAPPING[args.instruction_mode]
yr_gen = YaraGenerator(args.mode, ins_set, ins_mode, rule_name=args.rulename)
with open(args.file_path, 'rb') as file:
file.seek(args.offset)
data = file.read(args.size)
yr_gen.add_chunk(data, args.offset)
yr_rule = yr_gen.generate_rule()
yr_rule.metas['sample'] = '"{}"'.format(sha256_hash(args.file_path))
log.info('Creating Yara rule...')
yr_rule_str = yr_rule.get_rule_string()
out_file = (args.result or sys.stdout)
log.info('Writing to output file...')
out_file.write(yr_rule_str)
log.info('Checking generated rule...')
compiled_yr = yara.compile(source=yr_rule_str)
matches = compiled_yr.match(args.file_path)
if (len(matches) == 0):
log.info('ERROR! Generated rule does not match on source file.')
else:
log.info('Rule check OK. Source file matches on rule!')
for match in matches:
log.debug('Sample matched rule {}'.format(match))
for s in match.strings:
hex_bytes = binascii.hexlify(s[2])
hex_bytes = hex_bytes.decode('ascii')
log.debug('0x{:X} - {}\t {}'.format(s[0], s[1], hex_bytes)) |
def add_sync_from_checkpoint_arg(arg_group: _ArgumentGroup) -> None:
add_shared_argument(arg_group, '--sync-from-checkpoint', action=NormalizeCheckpointURI, help='Start syncing from a trusted checkpoint specified using URI syntax:By specific block, eth://block/byhash/<hash>?score=<score>Let etherscan pick a block near the tip, eth://block/byetherscan/latest', default=None) |
class WebhooksApi(CRUDApi):
def __init__(self, config):
super(WebhooksApi, self).__init__(config, object_type='webhook')
def update(self, webhook_id, new_webhook):
payload = dict(webhook=json.loads(json.dumps(new_webhook, default=json_encode_for_zendesk)))
url = self._build_url(endpoint=self.endpoint(id=webhook_id))
return self._put(url, payload=payload)
def patch(self, webhook):
payload = dict(webhook=json.loads(json.dumps(webhook, default=json_encode_for_zendesk)))
url = self._build_url(endpoint=self.endpoint(id=webhook.id))
return self._patch(url, payload=payload)
def list(self, **kwargs):
url = self._build_url(endpoint=self.endpoint(**kwargs))
return self._get(url)
_id(Webhook)
def clone(self, webhook):
url = self._build_url(endpoint=self.endpoint(clone_webhook_id=webhook))
return self._post(url, payload=None)
_id(Webhook)
def invocations(self, webhook, **kwargs):
url = self._build_url(endpoint=self.endpoint.invocations(id=webhook, **kwargs))
return self._get(url)
def invocation_attempts(self, webhook, invocation):
url = self._build_url(endpoint=self.endpoint.invocation_attempts(webhook, invocation))
return self._get(url)
_id(Webhook)
def test(self, webhook=None, request={}):
params = (dict(test_webhook_id=webhook) if webhook else {})
payload = dict(request=request)
url = self._build_url(endpoint=self.endpoint.test(**params))
return self._post(url, payload=payload)
_id(Webhook)
def show_secret(self, webhook):
url = self._build_url(endpoint=self.endpoint.secret(webhook))
return self._get(url)
_id(Webhook)
def reset_secret(self, webhook):
url = self._build_url(endpoint=self.endpoint.secret(webhook))
return self._post(url, payload=None) |
def ovlp3d_12(ax, da, A, bx, db, B):
result = numpy.zeros((3, 6), dtype=float)
x0 = ((ax + bx) ** (- 1.0))
x1 = (x0 * ((ax * A[0]) + (bx * B[0])))
x2 = (- x1)
x3 = (x2 + A[0])
x4 = (x2 + B[0])
x5 = (x3 * x4)
x6 = ((ax * bx) * x0)
x7 = ((((5. * da) * db) * (x0 ** 1.5)) * numpy.exp(((- x6) * ((((A[0] - B[0]) ** 2) + ((A[1] - B[1]) ** 2)) + ((A[2] - B[2]) ** 2)))))
x8 = (1. * x7)
x9 = (0. * x8)
x10 = (x0 * ((ax * A[1]) + (bx * B[1])))
x11 = (- x10)
x12 = (x11 + B[1])
x13 = (0.5 * x0)
x14 = (x7 * (x13 + x5))
x15 = (x0 * ((ax * A[2]) + (bx * B[2])))
x16 = (- x15)
x17 = (x16 + B[2])
x18 = ((x12 ** 2) + x13)
x19 = (0. * x8)
x20 = (x19 * x3)
x21 = (x17 * x7)
x22 = (x13 + (x17 ** 2))
x23 = (x13 + (x4 ** 2))
x24 = (x11 + A[1])
x25 = (x19 * x24)
x26 = (x12 * x24)
x27 = (x7 * (x13 + x26))
x28 = (x16 + A[2])
x29 = (x19 * x28)
x30 = (x17 * x28)
x31 = (x7 * (x13 + x30))
result[(0, 0)] = numpy.sum(((- x9) * ((x0 * ((((- 2.0) * x1) + A[0]) + B[0])) + (x4 * (x0 + (2.0 * x5))))))
result[(0, 1)] = numpy.sum(((- x12) * x14))
result[(0, 2)] = numpy.sum(((- x14) * x17))
result[(0, 3)] = numpy.sum(((- x18) * x20))
result[(0, 4)] = numpy.sum((((- x12) * x21) * x3))
result[(0, 5)] = numpy.sum(((- x20) * x22))
result[(1, 0)] = numpy.sum(((- x23) * x25))
result[(1, 1)] = numpy.sum(((- x27) * x4))
result[(1, 2)] = numpy.sum((((- x21) * x24) * x4))
result[(1, 3)] = numpy.sum(((- x9) * ((x0 * ((((- 2.0) * x10) + A[1]) + B[1])) + (x12 * (x0 + (2.0 * x26))))))
result[(1, 4)] = numpy.sum(((- x17) * x27))
result[(1, 5)] = numpy.sum(((- x22) * x25))
result[(2, 0)] = numpy.sum(((- x23) * x29))
result[(2, 1)] = numpy.sum(((((- x12) * x28) * x4) * x7))
result[(2, 2)] = numpy.sum(((- x31) * x4))
result[(2, 3)] = numpy.sum(((- x18) * x29))
result[(2, 4)] = numpy.sum(((- x12) * x31))
result[(2, 5)] = numpy.sum(((- x9) * ((x0 * ((((- 2.0) * x15) + A[2]) + B[2])) + (x17 * (x0 + (2.0 * x30))))))
return result |
.parametrize('abi_type,should_match', (('SomeEnum.SomeValue', True), ('Some_Enum.Some_Value', True), ('SomeEnum.someValue', True), ('SomeEnum.some_value', True), ('__SomeEnum__.some_value', True), ('__SomeEnum__.__some_value__', True), ('SomeEnum.__some_value__', True), ('uint256', False)))
def test_is_probably_enum(abi_type, should_match):
is_match = is_probably_enum(abi_type)
assert (is_match is should_match) |
def get_linked_anki_notes_for_pdf_page(siac_nid: int, page: int) -> List[IndexNote]:
conn = _get_connection()
nids = conn.execute(f'select nid from notes_pdf_page where siac_nid = {siac_nid} and page = {page}').fetchall()
if ((not nids) or (len(nids) == 0)):
return []
nids_str = ','.join([str(nid[0]) for nid in nids])
res = mw.col.db.all(('select distinct notes.id, flds, tags, did, mid from notes left join cards on notes.id = cards.nid where notes.id in (%s)' % nids_str))
if (len(res) != len(nids)):
anki_nids = [r[0] for r in res]
siac_nids = [r[0] for r in nids]
for snid in siac_nids:
if (snid not in anki_nids):
conn.execute(f'delete from notes_pdf_page where nid = {snid}')
conn.commit()
conn.close()
return _anki_to_index_note(res) |
class ProcessMethodCommandHandler(ProcessCommandHandler):
def __init__(self, name: str):
super().__init__(name)
try:
self.method = getattr(psutil.Process, self.name)
except AttributeError:
self.method = None
return
def handle(self, param: str, process: psutil.Process) -> Payload:
if (param != ''):
raise Exception(f"Parameter '{param}' in '{self.name}' is not supported")
return self.get_value(process)
def get_value(self, process: psutil.Process) -> Payload:
if (self.method is None):
raise Exception(f'Not implemented: psutil.{self.name}')
return self.method(process) |
class TestLinearsRGBSerialize(util.ColorAssertsPyTest):
COLORS = [('color(srgb-linear 0 0.3 0.75 / 0.5)', {}, 'color(srgb-linear 0 0.3 0.75 / 0.5)'), ('color(srgb-linear 0 0.3 0.75)', {'alpha': True}, 'color(srgb-linear 0 0.3 0.75 / 1)'), ('color(srgb-linear 0 0.3 0.75 / 0.5)', {'alpha': False}, 'color(srgb-linear 0 0.3 0.75)'), ('color(srgb-linear none 0.3 0.75)', {}, 'color(srgb-linear 0 0.3 0.75)'), ('color(srgb-linear none 0.3 0.75)', {'none': True}, 'color(srgb-linear none 0.3 0.75)'), ('color(srgb-linear 1.2 0.2 0)', {}, 'color(srgb-linear 1 0.22783 0.0233)'), ('color(srgb-linear 1.2 0.2 0)', {'fit': False}, 'color(srgb-linear 1.2 0.2 0)')]
.parametrize('color1,options,color2', COLORS)
def test_colors(self, color1, options, color2):
self.assertEqual(Color(color1).to_string(**options), color2) |
def get_edge_bias(cnarr, margin):
output_by_chrom = []
for (_chrom, subarr) in cnarr.by_chromosome():
tile_starts = subarr['start'].values
tile_ends = subarr['end'].values
tgt_sizes = (tile_ends - tile_starts)
losses = edge_losses(tgt_sizes, margin)
gap_sizes = (tile_starts[1:] - tile_ends[:(- 1)])
ok_gaps_mask = (gap_sizes < margin)
ok_gaps = gap_sizes[ok_gaps_mask]
left_gains = edge_gains(tgt_sizes[1:][ok_gaps_mask], ok_gaps, margin)
right_gains = edge_gains(tgt_sizes[:(- 1)][ok_gaps_mask], ok_gaps, margin)
gains = np.zeros(len(subarr))
gains[np.concatenate([[False], ok_gaps_mask])] += left_gains
gains[np.concatenate([ok_gaps_mask, [False]])] += right_gains
output_by_chrom.append((gains - losses))
return pd.Series(np.concatenate(output_by_chrom), index=cnarr.data.index) |
.usefixtures('use_tmpdir')
def test_that_values_with_brackets_are_ommitted(caplog, joblist):
forward_model_list: List[ForwardModel] = set_up_forward_model(joblist)
forward_model_list[0].environment['ENV_VAR'] = '<SOME_BRACKETS>'
run_id = 'test_no_jobs_id'
context = SubstitutionList.from_dict({'DEFINE': [['<RUNPATH>', './']]})
data = ErtConfig(forward_model_list=forward_model_list, substitution_list=context).forward_model_data_to_json(run_id)
assert ('Environment variable ENV_VAR skipped due to' in caplog.text)
assert ('ENV_VAR' not in data['jobList'][0]['environment']) |
class Test(unittest.TestCase):
def test_emapper_diamond(self):
in_file = 'tests/fixtures/test_queries.fa'
data_dir = 'tests/fixtures'
outdir = 'tests/integration/out'
outprefix = 'test'
obs_seed_orthologs = os.path.join(outdir, (outprefix + SEED_ORTHOLOGS_SUFFIX))
obs_annotations = os.path.join(outdir, (outprefix + ANNOTATIONS_SUFFIX))
exp_seed_orthologs = os.path.join(data_dir, 'test_output.emapper.seed_orthologs')
exp_annotations = os.path.join(data_dir, 'test_output.emapper.annotations')
if os.path.isdir(outdir):
shutil.rmtree(outdir)
os.mkdir(outdir)
cmd = f'./emapper.py -m diamond -i {in_file} --data_dir {data_dir} --output_dir {outdir} -o {outprefix}'
(st, out, err) = run(cmd)
if (st != 0):
print(out.decode('utf-8'))
print(err.decode('utf-8'))
assert (st == 0)
check_seed_orthologs(obs_seed_orthologs, exp_seed_orthologs)
check_annotations(obs_annotations, exp_annotations)
if os.path.isdir(outdir):
shutil.rmtree(outdir)
return
def test_emapper_novel_fams(self):
in_file = 'tests/fixtures/novel_fams/novel_fams_queries.fa'
data_dir = 'tests/fixtures/novel_fams'
outdir = 'tests/integration/out'
outprefix = 'test'
obs_seed_orthologs = os.path.join(outdir, (outprefix + SEED_ORTHOLOGS_SUFFIX))
obs_annotations = os.path.join(outdir, (outprefix + ANNOTATIONS_SUFFIX))
exp_seed_orthologs = os.path.join(data_dir, 'test.emapper.seed_orthologs')
exp_annotations = os.path.join(data_dir, 'test.emapper.annotations')
if os.path.isdir(outdir):
shutil.rmtree(outdir)
os.mkdir(outdir)
cmd = f'./emapper.py -m novel_fams -i {in_file} --data_dir {data_dir} --output_dir {outdir} -o {outprefix}'
cmd += f' --dmnd_db tests/fixtures/novel_fams/novel_fams.dmnd'
(st, out, err) = run(cmd)
if (st != 0):
print(out.decode('utf-8'))
print(err.decode('utf-8'))
assert (st == 0)
check_seed_orthologs(obs_seed_orthologs, exp_seed_orthologs)
check_annotations(obs_annotations, exp_annotations)
if os.path.isdir(outdir):
shutil.rmtree(outdir)
return
def test_emapper_mmseqs(self):
in_file = 'tests/fixtures/test_queries.fa'
data_dir = 'tests/fixtures'
outdir = 'tests/integration/out'
outprefix = 'test'
obs_seed_orthologs = os.path.join(outdir, (outprefix + SEED_ORTHOLOGS_SUFFIX))
obs_annotations = os.path.join(outdir, (outprefix + ANNOTATIONS_SUFFIX))
exp_seed_orthologs = os.path.join(data_dir, 'test_mmseqs.emapper.seed_orthologs')
exp_annotations = os.path.join(data_dir, 'test_mmseqs.emapper.annotations')
if os.path.isdir(outdir):
shutil.rmtree(outdir)
os.mkdir(outdir)
cmd = f'./emapper.py -m mmseqs -i {in_file} --data_dir {data_dir} --output_dir {outdir} -o {outprefix}'
(st, out, err) = run(cmd)
if (st != 0):
print(out.decode('utf-8'))
print(err.decode('utf-8'))
assert (st == 0)
check_seed_orthologs(obs_seed_orthologs, exp_seed_orthologs)
check_annotations(obs_annotations, exp_annotations)
if os.path.isdir(outdir):
shutil.rmtree(outdir)
return
def test_emapper_no_search(self):
in_file = 'tests/fixtures/test_output.emapper.seed_orthologs'
data_dir = 'tests/fixtures'
outdir = 'tests/integration/out'
outprefix = 'test'
obs_annotations = os.path.join(outdir, (outprefix + ANNOTATIONS_SUFFIX))
obs_orthologs = os.path.join(outdir, (outprefix + ORTHOLOGS_SUFFIX))
exp_annotations = os.path.join(data_dir, 'test_no_search.emapper.annotations')
exp_orthologs = os.path.join(data_dir, 'test_no_search.emapper.orthologs')
if os.path.isdir(outdir):
shutil.rmtree(outdir)
os.mkdir(outdir)
cmd = f'./emapper.py -m no_search --annotate_hits_table {in_file} --data_dir {data_dir} --output_dir {outdir} -o {outprefix} --report_orthologs --target_orthologs one2one --target_taxa 72274,1123487 --excluded_taxa 205918,1395571'
(st, out, err) = run(cmd)
if (st != 0):
print(out.decode('utf-8'))
print(err.decode('utf-8'))
assert (st == 0)
check_orthologs(obs_orthologs, exp_orthologs)
check_annotations(obs_annotations, exp_annotations)
if os.path.isdir(outdir):
shutil.rmtree(outdir)
return
def test_emapper_hmmer_eggnogdb(self):
in_file = 'tests/fixtures/test_queries.fa'
data_dir = 'tests/fixtures'
outdir = 'tests/integration/out'
outprefix = 'bact'
database = 'bact'
obs_hmm_hits = os.path.join(outdir, (outprefix + HMM_HITS_SUFFIX))
obs_seed_orthologs = os.path.join(outdir, (outprefix + SEED_ORTHOLOGS_SUFFIX))
obs_annotations = os.path.join(outdir, (outprefix + ANNOTATIONS_SUFFIX))
exp_files_dir = 'tests/fixtures/hmmer_expected_output/'
exp_hmm_hits = os.path.join(exp_files_dir, 'bact.emapper.hmm_hits')
exp_seed_orthologs = os.path.join(exp_files_dir, 'bact.emapper.seed_orthologs')
exp_annotations = os.path.join(exp_files_dir, 'bact.emapper.annotations')
if os.path.isdir(outdir):
shutil.rmtree(outdir)
os.mkdir(outdir)
cmd = f'./emapper.py -m hmmer -i {in_file} --data_dir {data_dir} -d {database} --output_dir {outdir} -o {outprefix}'
(st, out, err) = run(cmd)
if (st != 0):
print(out.decode('utf-8'))
print(err.decode('utf-8'))
assert (st == 0)
check_hmm_hits(obs_hmm_hits, exp_hmm_hits)
check_seed_orthologs(obs_seed_orthologs, exp_seed_orthologs)
check_annotations(obs_annotations, exp_annotations)
if os.path.isdir(outdir):
shutil.rmtree(outdir)
return
def test_scratch_dir(self):
in_file = 'tests/fixtures/test_queries.fa'
data_dir = 'tests/fixtures'
outdir = 'tests/integration/out'
scratchdir = 'tests/integration/scratch'
outprefix = 'bact'
database = 'bact'
obs_hmm_hits = os.path.join(outdir, (outprefix + HMM_HITS_SUFFIX))
obs_seed_orthologs = os.path.join(outdir, (outprefix + SEED_ORTHOLOGS_SUFFIX))
obs_annotations = os.path.join(outdir, (outprefix + ANNOTATIONS_SUFFIX))
scratch_hmm_hits = os.path.join(scratchdir, (outprefix + HMM_HITS_SUFFIX))
scratch_seed_orthologs = os.path.join(scratchdir, (outprefix + SEED_ORTHOLOGS_SUFFIX))
scratch_annotations = os.path.join(scratchdir, (outprefix + ANNOTATIONS_SUFFIX))
exp_files_dir = 'tests/fixtures/hmmer_expected_output/'
exp_hmm_hits = os.path.join(exp_files_dir, 'bact.emapper.hmm_hits')
exp_seed_orthologs = os.path.join(exp_files_dir, 'bact.emapper.seed_orthologs')
exp_annotations = os.path.join(exp_files_dir, 'bact.emapper.annotations')
if os.path.isdir(outdir):
shutil.rmtree(outdir)
os.mkdir(outdir)
if os.path.isdir(scratchdir):
shutil.rmtree(scratchdir)
os.mkdir(scratchdir)
cmd = f'./emapper.py -m hmmer -i {in_file} --data_dir {data_dir} -d {database} --output_dir {outdir} -o {outprefix} --scratch_dir {scratchdir}'
(st, out, err) = run(cmd)
if (st != 0):
print(out.decode('utf-8'))
print(err.decode('utf-8'))
assert (st == 0)
check_hmm_hits(obs_hmm_hits, exp_hmm_hits)
check_hmm_hits(scratch_hmm_hits, exp_hmm_hits)
check_seed_orthologs(obs_seed_orthologs, exp_seed_orthologs)
check_seed_orthologs(scratch_seed_orthologs, exp_seed_orthologs)
check_annotations(obs_annotations, exp_annotations)
check_annotations(scratch_annotations, exp_annotations)
if os.path.isdir(outdir):
shutil.rmtree(outdir)
if os.path.isdir(scratchdir):
shutil.rmtree(scratchdir)
return
def test_pfam_realign(self):
in_file = 'tests/fixtures/test_pfam_groups.fa'
seeds_file = 'tests/fixtures/test_pfam_groups.seed_orthologs'
data_dir = 'tests/fixtures'
outdir = 'tests/integration/out'
outprefix = 'test'
obs_annotations = os.path.join(outdir, (outprefix + ANNOTATIONS_SUFFIX))
obs_pfam = os.path.join(outdir, (outprefix + PFAM_SUFFIX))
exp_annotations = os.path.join(data_dir, 'pfam_realign_output', 'test.emapper.annotations')
exp_pfam = os.path.join(data_dir, 'pfam_realign_output', 'test.emapper.pfam')
if os.path.isdir(outdir):
shutil.rmtree(outdir)
os.mkdir(outdir)
cmd = f'./emapper.py -m no_search --annotate_hits_table {seeds_file} -i {in_file} --data_dir {data_dir} --output_dir {outdir} -o {outprefix} --pfam_realign realign'
(st, out, err) = run(cmd)
if (st != 0):
print(out.decode('utf-8'))
print(err.decode('utf-8'))
assert (st == 0)
check_pfam(obs_pfam, exp_pfam)
check_annotations(obs_annotations, exp_annotations)
if os.path.isdir(outdir):
shutil.rmtree(outdir)
return
def test_pfam_denovo(self):
in_file = 'tests/fixtures/test_pfam_groups.fa'
seeds_file = 'tests/fixtures/test_pfam_groups.seed_orthologs'
data_dir = 'tests/fixtures'
outdir = 'tests/integration/out'
outprefix = 'test'
obs_annotations = os.path.join(outdir, (outprefix + ANNOTATIONS_SUFFIX))
obs_pfam = os.path.join(outdir, (outprefix + PFAM_SUFFIX))
exp_annotations = os.path.join(data_dir, 'pfam_denovo_output', 'test.emapper.annotations')
exp_pfam = os.path.join(data_dir, 'pfam_denovo_output', 'test.emapper.pfam')
if os.path.isdir(outdir):
shutil.rmtree(outdir)
os.mkdir(outdir)
cmd = f'./emapper.py -m no_search --annotate_hits_table {seeds_file} -i {in_file} --data_dir {data_dir} --output_dir {outdir} -o {outprefix} --pfam_realign denovo'
(st, out, err) = run(cmd)
if (st != 0):
print(out.decode('utf-8'))
print(err.decode('utf-8'))
assert (st == 0)
check_pfam(obs_pfam, exp_pfam)
check_annotations(obs_annotations, exp_annotations)
if os.path.isdir(outdir):
shutil.rmtree(outdir)
return
def test_genepred_prodigal(self):
in_file = 'tests/fixtures/genepred_contig/contig.fna'
data_dir = 'tests/fixtures/genepred_contig/prodigal_out'
dmnd_db = 'tests/fixtures/genepred_contig/contig.dmnd'
outdir = 'tests/integration/out'
outprefix = 'test'
obs_genepred_gff = os.path.join(outdir, (outprefix + GENEPRED_GFF_SUFFIX))
obs_genepred_fasta = os.path.join(outdir, (outprefix + GENEPRED_FASTA_SUFFIX))
obs_seed_orthologs = os.path.join(outdir, (outprefix + SEED_ORTHOLOGS_SUFFIX))
exp_genepred_gff = os.path.join(data_dir, 'out.emapper.genepred.gff')
exp_genepred_fasta = os.path.join(data_dir, 'out.emapper.genepred.fasta')
exp_seed_orthologs = os.path.join(data_dir, 'out.emapper.seed_orthologs')
if os.path.isdir(outdir):
shutil.rmtree(outdir)
os.mkdir(outdir)
cmd = f'./emapper.py -i {in_file} --itype metagenome --genepred prodigal --data_dir tests/fixtures -m diamond --sensmode sensitive --no_annot --dmnd_db {dmnd_db} -o {outprefix} --output_dir {outdir}'
(st, out, err) = run(cmd)
if (st != 0):
print(out.decode('utf-8'))
print(err.decode('utf-8'))
assert (st == 0)
check_gff(obs_genepred_gff, exp_genepred_gff)
check_fasta(obs_genepred_fasta, exp_genepred_fasta)
check_seed_orthologs(obs_seed_orthologs, exp_seed_orthologs)
if os.path.isdir(outdir):
shutil.rmtree(outdir)
return
def test_genepred_diamond(self):
in_file = 'tests/fixtures/genepred_contig/contig.fna'
data_dir = 'tests/fixtures/genepred_contig/diamond_out'
dmnd_db = 'tests/fixtures/genepred_contig/contig.dmnd'
outdir = 'tests/integration/out'
outprefix = 'test'
obs_genepred_gff = os.path.join(outdir, (outprefix + GENEPRED_GFF_SUFFIX))
obs_genepred_fasta = os.path.join(outdir, (outprefix + GENEPRED_FASTA_SUFFIX))
obs_seed_orthologs = os.path.join(outdir, (outprefix + SEED_ORTHOLOGS_SUFFIX))
exp_genepred_gff = os.path.join(data_dir, 'out.emapper.genepred.gff')
exp_genepred_fasta = os.path.join(data_dir, 'out.emapper.genepred.fasta')
exp_seed_orthologs = os.path.join(data_dir, 'out.emapper.seed_orthologs')
if os.path.isdir(outdir):
shutil.rmtree(outdir)
os.mkdir(outdir)
cmd = f'./emapper.py -i {in_file} --itype metagenome --genepred search --data_dir tests/fixtures -m diamond --sensmode sensitive --no_annot --dmnd_db {dmnd_db} -o {outprefix} --output_dir {outdir}'
(st, out, err) = run(cmd)
if (st != 0):
print(out.decode('utf-8'))
print(err.decode('utf-8'))
assert (st == 0)
check_gff(obs_genepred_gff, exp_genepred_gff)
check_fasta(obs_genepred_fasta, exp_genepred_fasta)
check_seed_orthologs(obs_seed_orthologs, exp_seed_orthologs)
if os.path.isdir(outdir):
shutil.rmtree(outdir)
return
def test_genepred_mmseqs(self):
in_file = 'tests/fixtures/genepred_contig/contig.fna'
data_dir = 'tests/fixtures/genepred_contig/mmseqs_out'
mmseqs_db = 'tests/fixtures/genepred_contig/contig.mmseqs/contig.0.hits.mmseqs.db'
outdir = 'tests/integration/out'
outprefix = 'test'
obs_genepred_gff = os.path.join(outdir, (outprefix + GENEPRED_GFF_SUFFIX))
obs_genepred_fasta = os.path.join(outdir, (outprefix + GENEPRED_FASTA_SUFFIX))
obs_seed_orthologs = os.path.join(outdir, (outprefix + SEED_ORTHOLOGS_SUFFIX))
exp_genepred_gff = os.path.join(data_dir, 'out.emapper.genepred.gff')
exp_genepred_fasta = os.path.join(data_dir, 'out.emapper.genepred.fasta')
exp_seed_orthologs = os.path.join(data_dir, 'out.emapper.seed_orthologs')
if os.path.isdir(outdir):
shutil.rmtree(outdir)
os.mkdir(outdir)
cmd = f'./emapper.py -i {in_file} --itype metagenome --genepred search --data_dir tests/fixtures -m mmseqs --no_annot --mmseqs_db {mmseqs_db} -o {outprefix} --output_dir {outdir}'
(st, out, err) = run(cmd)
if (st != 0):
print(out.decode('utf-8'))
print(err.decode('utf-8'))
assert (st == 0)
check_gff(obs_genepred_gff, exp_genepred_gff)
check_fasta(obs_genepred_fasta, exp_genepred_fasta)
check_seed_orthologs(obs_seed_orthologs, exp_seed_orthologs)
if os.path.isdir(outdir):
shutil.rmtree(outdir)
return
def test_decorate_gff_blastx_annot(self):
in_file = 'tests/fixtures/genepred_contig/contig.fna'
data_dir = 'tests/fixtures'
mmseqs_db = 'tests/fixtures/genepred_contig/contig.mmseqs/contig.0.hits.mmseqs.db'
outdir = 'tests/integration/out'
outprefix = 'test'
obs_genepred_gff = os.path.join(outdir, (outprefix + DECORATED_GFF_SUFFIX))
exp_genepred_gff = os.path.join(data_dir, 'decorate_gff', 'blastx_annot', 'out.emapper.genepred.gff')
if os.path.isdir(outdir):
shutil.rmtree(outdir)
os.mkdir(outdir)
cmd = f'./emapper.py -i {in_file} --itype metagenome --genepred search --decorate_gff yes -m mmseqs --data_dir tests/fixtures --mmseqs_db {mmseqs_db} -o {outprefix} --output_dir {outdir}'
(st, out, err) = run(cmd)
if (st != 0):
print(out.decode('utf-8'))
print(err.decode('utf-8'))
assert (st == 0)
check_gff(obs_genepred_gff, exp_genepred_gff)
if os.path.isdir(outdir):
shutil.rmtree(outdir)
return
def test_decorate_gff_file(self):
in_file = 'tests/fixtures/decorate_gff/decorate_file/test.emapper.seed_orthologs'
data_dir = 'tests/fixtures'
gff_to_decorate = 'tests/fixtures/decorate_gff/decorate_file/test.emapper.genepred.gff'
outdir = 'tests/integration/out'
outprefix = 'test'
obs_genepred_gff = os.path.join(outdir, (outprefix + DECORATED_GFF_SUFFIX))
exp_genepred_gff = os.path.join(data_dir, 'decorate_gff', 'decorate_file', 'out.emapper.genepred.gff')
if os.path.isdir(outdir):
shutil.rmtree(outdir)
os.mkdir(outdir)
cmd = f'./emapper.py -m no_search --annotate_hits_table {in_file} --decorate_gff {gff_to_decorate} --data_dir tests/fixtures -o {outprefix} --output_dir {outdir}'
(st, out, err) = run(cmd)
if (st != 0):
print(out.decode('utf-8'))
print(err.decode('utf-8'))
assert (st == 0)
check_gff(obs_genepred_gff, exp_genepred_gff)
if os.path.isdir(outdir):
shutil.rmtree(outdir)
return
def test_decorate_gff_file_short(self):
in_file = 'tests/fixtures/decorate_gff/decorate_file/test.emapper.seed_orthologs.short'
data_dir = 'tests/fixtures'
gff_to_decorate = 'tests/fixtures/decorate_gff/decorate_file/test.emapper.genepred.gff'
outdir = 'tests/integration/out'
outprefix = 'test'
obs_genepred_gff = os.path.join(outdir, (outprefix + DECORATED_GFF_SUFFIX))
exp_genepred_gff = os.path.join(data_dir, 'decorate_gff', 'decorate_file', 'out.emapper.short.genepred.gff')
if os.path.isdir(outdir):
shutil.rmtree(outdir)
os.mkdir(outdir)
cmd = f'./emapper.py -m no_search --annotate_hits_table {in_file} --decorate_gff {gff_to_decorate} --data_dir tests/fixtures -o {outprefix} --output_dir {outdir}'
(st, out, err) = run(cmd)
if (st != 0):
print(out.decode('utf-8'))
print(err.decode('utf-8'))
assert (st == 0)
check_gff(obs_genepred_gff, exp_genepred_gff)
if os.path.isdir(outdir):
shutil.rmtree(outdir)
return |
def extractFableWind(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
.skipif((ProgressBar is None), reason='requires numba_progress')
.parametrize('use_progressbar', (True, False))
def test_initialize_progressbar(use_progressbar):
with initialize_progressbar(3, use_progressbar) as progress_proxy:
if use_progressbar:
assert isinstance(progress_proxy, ProgressBar)
else:
assert (progress_proxy is None) |
_os(metadata.platforms)
def main():
common.log('Creating a fake unshare executable..')
masquerade = '/tmp/unshare'
source = common.get_path('bin', 'linux.ditto_and_spawn')
common.copy_file(source, masquerade)
commands = [masquerade, '-rm', 'cap_setuid']
common.log('Launching fake commands to set cap_setuid via unshare')
common.execute([*commands], timeout=2, kill=True)
common.log('Unshare simulation succesful')
common.log('Faking uid change via same parent')
sudo_commands = ['sudo', 'su']
subprocess.run(sudo_commands, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
common.log('Uid change simulation succesful')
common.remove_file(masquerade) |
def BuGn(range, **traits):
_data = dict(red=[(0.0, 0., 0.), (0.125, 0., 0.), (0.25, 0.8, 0.8), (0.375, 0.6, 0.6), (0.5, 0.4, 0.4), (0.625, 0., 0.), (0.75, 0., 0.), (0.875, 0.0, 0.0), (1.0, 0.0, 0.0)], green=[(0.0, 0., 0.), (0.125, 0., 0.), (0.25, 0., 0.), (0.375, 0., 0.), (0.5, 0., 0.), (0.625, 0., 0.), (0.75, 0., 0.), (0.875, 0., 0.), (1.0, 0., 0.)], blue=[(0.0, 0., 0.), (0.125, 0., 0.), (0.25, 0., 0.), (0.375, 0., 0.), (0.5, 0., 0.), (0.625, 0., 0.), (0.75, 0., 0.), (0.875, 0., 0.), (1.0, 0., 0.)])
return ColorMapper.from_segment_map(_data, range=range, **traits) |
class OptionPlotoptionsSplineSonificationTracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_invalid_platform_call(tmp_project):
invalid_sd_command = 'python manage.py simple_deploy --platform unsupported_platform_name'
(stdout, stderr) = msp.call_simple_deploy(tmp_project, invalid_sd_command)
assert ('The platform "unsupported_platform_name" is not currently supported.' in stderr)
check_project_unchanged(tmp_project) |
class Solution():
def isAlienSorted(self, words: List[str], order: str) -> bool:
def is_in_order(prev, word, track):
for (c1, c2) in zip(prev, word):
if (c1 == c2):
continue
return (track[c1] < track[c2])
return (len(prev) <= len(word))
track = dict(((c, i) for (i, c) in enumerate(order)))
prev = None
for word in words:
if (prev is None):
prev = word
continue
tmp = is_in_order(prev, word, track)
if (not tmp):
return False
prev = word
return True |
class ConfWithStatsListener(BaseConfListener):
def __init__(self, conf_with_stats):
assert conf_with_stats
super(ConfWithStatsListener, self).__init__(conf_with_stats)
conf_with_stats.add_listener(ConfWithStats.UPDATE_STATS_LOG_ENABLED_EVT, self.on_chg_stats_enabled_conf_with_stats)
conf_with_stats.add_listener(ConfWithStats.UPDATE_STATS_TIME_EVT, self.on_chg_stats_time_conf_with_stats)
def on_chg_stats_time_conf_with_stats(self, conf_evt):
raise NotImplementedError()
def on_chg_stats_enabled_conf_with_stats(self, conf_evt):
raise NotImplementedError() |
def execute(conn: sqlite3.Connection) -> None:
date_created = int(time.time())
conn.execute('CREATE TABLE IF NOT EXISTS MasterKeys (masterkey_id INTEGER PRIMARY KEY,parent_masterkey_id INTEGER DEFAULT NULL,derivation_type INTEGER NOT NULL,derivation_data BLOB NOT NULL,date_created INTEGER NOT NULL,date_updated INTEGER NOT NULL,FOREIGN KEY(parent_masterkey_id) REFERENCES MasterKeys (masterkey_id))')
conn.execute('CREATE TABLE IF NOT EXISTS Accounts (account_id INTEGER PRIMARY KEY,default_masterkey_id INTEGER DEFAULT NULL,default_script_type INTEGER NOT NULL,account_name TEXT NOT NULL,date_created INTEGER NOT NULL,date_updated INTEGER NOT NULL,FOREIGN KEY(default_masterkey_id) REFERENCES MasterKeys (masterkey_id))')
conn.execute((('CREATE TABLE IF NOT EXISTS KeyInstances (keyinstance_id INTEGER PRIMARY KEY,account_id INTEGER NOT NULL,masterkey_id INTEGER DEFAULT NULL,derivation_type INTEGER NOT NULL,derivation_data BLOB NOT NULL,script_type INTEGER NOT NULL,flags INTEGER NOT NULL,description TEXT DEFAULT NULL,date_created INTEGER NOT NULL,date_updated INTEGER NOT NULL,FOREIGN KEY(account_id) REFERENCES Accounts (account_id)' + 'FOREIGN KEY(masterkey_id) REFERENCES MasterKeys (masterkey_id)') + ')'))
conn.execute('CREATE TABLE IF NOT EXISTS Transactions (tx_hash BLOB PRIMARY KEY,tx_data BLOB DEFAULT NULL,proof_data BLOB DEFAULT NULL,block_height INTEGER DEFAULT NULL,block_position INTEGER DEFAULT NULL,fee_value INTEGER DEFAULT NULL,flags INTEGER NOT NULL DEFAULT 0,description TEXT DEFAULT NULL,date_created INTEGER NOT NULL,date_updated INTEGER NOT NULL)')
conn.execute('CREATE TABLE IF NOT EXISTS TransactionOutputs (tx_hash BLOB NOT NULL,tx_index INTEGER NOT NULL,value INTEGER NOT NULL,keyinstance_id INTEGER NOT NULL,flags INTEGER NOT NULL,date_created INTEGER NOT NULL,date_updated INTEGER NOT NULL,FOREIGN KEY (tx_hash) REFERENCES Transactions (tx_hash),FOREIGN KEY (keyinstance_id) REFERENCES KeyInstances (keyinstance_id))')
conn.execute('CREATE UNIQUE INDEX IF NOT EXISTS idx_TransactionOutputs_unique ON TransactionOutputs(tx_hash, tx_index)')
conn.execute('CREATE TABLE IF NOT EXISTS TransactionDeltas (keyinstance_id INTEGER NOT NULL,tx_hash BLOB NOT NULL,value_delta INTEGER NOT NULL,date_created INTEGER NOT NULL,date_updated INTEGER NOT NULL,FOREIGN KEY(tx_hash) REFERENCES Transactions (tx_hash),FOREIGN KEY(keyinstance_id) REFERENCES KeyInstances (keyinstance_id) )')
conn.execute('CREATE UNIQUE INDEX IF NOT EXISTS idx_TransactionDeltas_unique ON TransactionDeltas(keyinstance_id, tx_hash)')
conn.execute('CREATE TABLE IF NOT EXISTS WalletData (key TEXT NOT NULL,value TEXT NOT NULL,date_created INTEGER NOT NULL,date_updated INTEGER NOT NULL)')
conn.execute('CREATE TABLE IF NOT EXISTS PaymentRequests (paymentrequest_id INTEGER PRIMARY KEY,keyinstance_id INTEGER NOT NULL,state INTEGER NOT NULL,description TEXT DEFAULT NULL,expiration INTEGER DEFAULT NULL,value INTEGER DEFAULT NULL,date_created INTEGER NOT NULL,date_updated INTEGER NOT NULL,FOREIGN KEY(keyinstance_id) REFERENCES KeyInstances (keyinstance_id) )')
conn.execute('CREATE UNIQUE INDEX IF NOT EXISTS idx_WalletData_unique ON WalletData(key)')
conn.executemany('INSERT INTO WalletData (key, value, date_created, date_updated) VALUES (?, ?, ?, ?)', [['migration', json.dumps(MIGRATION), date_created, date_created], ['next_masterkey_id', json.dumps(1), date_created, date_created], ['next_account_id', json.dumps(1), date_created, date_created], ['next_keyinstance_id', json.dumps(1), date_created, date_created], ['next_paymentrequest_id', json.dumps(1), date_created, date_created]]) |
class Plotly():
def surface(data: List[dict], y_columns: List[str], x_axis: str, z_axis: str) -> dict:
naps = {'datasets': [], 'series': [], 'python': True}
(z_a, x_a, agg_y) = (set(), set(), {})
for rec in data:
if (z_axis in rec):
z_a.add(rec[z_axis])
if (x_axis in rec):
x_a.add(rec[x_axis])
if ((z_axis in rec) and (x_axis in rec)):
agg_key = (rec[x_axis], rec[z_axis])
for y in y_columns:
agg_y.setdefault(agg_key, {})[y] = (agg_y.get(agg_key, {}).get(y, 0) + float((rec[y] if rec[y] else 0)))
z_array = sorted(list(z_a))
x_array = sorted(list(x_a))
for y in y_columns:
nap = []
for z in z_array:
row = [agg_y.get((x, z), {}).get(y, 0) for x in x_array]
nap.append(row)
naps['datasets'].append(nap)
naps['series'].append(y)
return naps
def map(data: List[dict]) -> dict:
return {'datasets': data, 'series': [], 'python': True}
def countries(data: List[dict], country_col: str, size_col: str, scale: bool=False) -> List[dict]:
aggregated = {}
for rec in data:
if (country_col in rec):
try:
aggregated[rec[country_col]] = (aggregated.get(rec[country_col], 0) + float(rec.get(size_col, 0)))
except Exception as err:
pass
records = []
if aggregated:
max_value = max(aggregated.values())
factor = (scale if scale else (50 / max_value))
record = {'locations': [], 'marker': {'size': []}, 'python': True}
for (k, v) in aggregated.items():
record['locations'].append(k)
record['marker']['size'].append((v * factor))
records.append(record)
return records
def choropleth(data: List[dict], country_col: str, size_col: str, scale: Union[(float, bool)]=False) -> List[dict]:
aggregated = {}
for rec in data:
if (country_col in rec):
try:
aggregated[rec[country_col]] = (aggregated.get(rec[country_col], 0) + float(rec.get(size_col, 0)))
except Exception as err:
pass
records = []
if aggregated:
max_value = max(aggregated.values())
factor = (scale if scale else (50 / max_value))
record = {'locations': [], 'z': []}
for (k, v) in aggregated.items():
record['locations'].append(k)
record['z'].append((v * factor))
records.append(record)
return records
def locations(data: List[dict], long_col: str, lat_col: str, size_col: str, scale: Union[(float, bool)]=False) -> List[dict]:
aggregated = {}
for rec in data:
if ((long_col in rec) and (lat_col in rec)):
point = (rec[long_col], rec[lat_col])
try:
aggregated[point] = (aggregated.get(point, 0) + float(rec.get(size_col, 0)))
except Exception as err:
pass
records = []
if aggregated:
max_value = max(aggregated.values())
factor = ((1 / scale) if scale else (50 / max_value))
record = {'lon': [], 'lat': [], 'marker': {'size': []}}
for (k, v) in aggregated.items():
record['lon'].append(float(k[0]))
record['lat'].append(float(k[1]))
record['marker']['size'].append((v * factor))
records.append(record)
return records
def xy(data: List[dict], y_columns: List[str], x_axis: str, options: dict=None) -> dict:
if (data is None):
return {'datasets': [], 'python': True, 'series': y_columns}
results = []
if ((options is not None) and (options.get('agg') == 'distinct')):
for y in y_columns:
series = {'x': [], 'y': [], 'text': []}
for rec in data:
if (x_axis not in rec):
continue
series['x'].append(rec[x_axis])
series['y'].append(rec[y])
results.append(series)
else:
agg_data = {}
for rec in data:
for y in y_columns:
if (y in rec):
agg_data.setdefault(y, {})[rec[x_axis]] = (agg_data.get(y, {}).get(rec[x_axis], 0) + float(rec[y]))
results = []
for c in y_columns:
series = {'x': [], 'y': []}
for (x, y) in agg_data.get(c, {}).items():
series['x'].append(x)
series['y'].append(y)
results.append(series)
return {'datasets': results, 'python': True, 'series': y_columns}
def xy_text(data: List[dict], y_columns: List[str], x_axis: str, text: str=None, options: dict=None) -> dict:
if (text is None):
return Plotly.xy(data, y_columns, x_axis, options=options)
results = []
if ((options is not None) and (options.get('agg') == 'distinct')):
for y in y_columns:
series = {'x': [], 'y': [], 'text': []}
for rec in data:
series['x'].append(rec[x_axis])
series['y'].append(rec[y])
if (text is not None):
series['text'].append(rec.get(text, ''))
results.append(series)
else:
(agg_data, texts) = ({}, {})
for rec in data:
for y in y_columns:
if (y in rec):
agg_data.setdefault(y, {})[rec[x_axis]] = (agg_data.get(y, {}).get(rec[x_axis], 0) + float(rec[y]))
texts.setdefault(y, {})[rec[x_axis]] = rec[text]
for c in y_columns:
series = {'x': [], 'y': [], 'text': []}
for (x, y) in agg_data.get(c, {}).items():
series['x'].append(x)
series['y'].append(y)
series['text'].append(texts.get(c, {}).get(x, ''))
results.append(series)
return {'datasets': results, 'python': True, 'series': y_columns}
def xyz(data: List[dict], y_columns: List[str], x_axis: str, z_axis: str) -> dict:
(agg_data, agg_z) = ({}, {})
for rec in data:
for (i, y) in enumerate(y_columns):
if (y in rec):
agg_data.setdefault(y, {})[rec[x_axis]] = (agg_data.get(y, {}).get(rec[x_axis], 0) + float((rec[y] if rec[y] else 0)))
if ((z_axis is not None) and (i < len(z_axis))):
z_col = (sum([float(rec[z]) for z in z_axis]) if isinstance(z_axis, list) else float((rec[z_axis] if rec[z_axis] else 0)))
agg_z.setdefault(y, {})[rec[x_axis]] = (agg_z.get(y, {}).get(rec[x_axis], 0) + z_col)
(labels, data) = (OrderedSet(), [])
for c in y_columns:
series = {'x': [], 'y': [], 'z': []}
for (x, y) in agg_data[c].items():
labels.add(x)
series['x'].append(x)
series['y'].append(y)
series['z'].append(agg_z.get(c, {}).get(x, 0))
data.append(series)
is_data = {'labels': labels, 'datasets': [], 'series': [], 'python': True}
for (i, l) in enumerate(y_columns):
is_data['datasets'].append(data[i])
is_data['series'].append(l)
return is_data
def x_yz(data: List[dict], y_columns: List[str], x_axis: str, z_axis: str, dy: float=0, dx: float=0, dz: float=0) -> dict:
(agg_data, agg_z) = ({}, {})
for rec in data:
for (i, y) in enumerate(y_columns):
if (y in rec):
agg_data.setdefault(y, {})[rec[x_axis]] = (agg_data.get(y, {}).get(rec[x_axis], 0) + float((rec[y] if rec[y] else 0)))
if ((z_axis is not None) and (i < len(z_axis))):
z_col = (sum([float(rec.get(z, 0)) for z in z_axis]) if isinstance(z_axis, list) else float((rec[z_axis] if rec.get(z_axis, 0) else 0)))
agg_z.setdefault(y, {})[rec.get(x_axis, 0)] = (agg_z.get(y, {}).get(rec.get(x_axis, 0), 0) + z_col)
(labels, data) = (OrderedSet(), [])
for c in y_columns:
series = {'x': [], 'y': [], 'z': []}
for (x, y) in agg_data.get(c, {}).items():
labels.add(x)
z = agg_z.get(c, {}).get(x, 0)
series['x'].append([x, ((float(x) + dx) if x else 1)])
series['y'].append([y, (y + dy)])
series['z'].append([z, (z + dz)])
data.append(series)
is_data = {'labels': labels, 'datasets': [], 'series': [], 'python': True}
for (i, l) in enumerate(y_columns):
is_data['datasets'].append(data[i])
is_data['series'].append(l)
return is_data
def table(data: List[dict], columns: List[str], dflt: str='') -> dict:
result = {'values': [], 'python': True, 'header': [[c] for c in columns]}
if (data is None):
return result
for rec in data:
result['values'].append([rec.get(c, dflt) for c in columns])
return result |
def check(ip, domain, port, args, timeout, payload_map):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socket.setdefaulttimeout(int(timeout))
s.connect((ip, int(port)))
s.send(binascii.a2b_hex(args))
byte_result = s.recv(1024)
str_result = str(byte_result)
if ('errmsg' not in str_result):
return ('MongoDb\n' + str_result)
except Exception:
raise |
class ConfigProxy():
def __init__(self, db: Session) -> None:
self.notifications = NotificationSettingsProxy(db)
self.execution = ExecutionSettingsProxy(db)
self.storage = StorageSettingsProxy(db)
self.security = SecuritySettingsProxy(db)
self.consent = ConsentSettingsProxy(db)
def load_current_cors_domains_into_middleware(self, app: FastAPI) -> None:
for mw in app.user_middleware:
if (mw.cls is CORSMiddleware):
current_config_proxy_domains = (self.security.cors_origins if (self.security.cors_origins is not None) else [])
mw.options['allow_origins'] = [str(domain) for domain in current_config_proxy_domains] |
class OptionSonificationDefaultspeechoptionsMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def value(self):
return self._config_get(None)
def value(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_builder_simple_with_multi_meta_field():
manifest = build(BASE_MANIFEST, authors('some', 'guy'), license('MIT'), description('description'), keywords('awesome', 'package'), links(website='www', repository='github'), validate())
expected = assoc(BASE_MANIFEST, 'meta', {'license': 'MIT', 'authors': ['some', 'guy'], 'description': 'description', 'keywords': ['awesome', 'package'], 'links': {'website': 'www', 'repository': 'github'}})
assert (manifest == expected) |
def summarize_address_range(first, last):
if (not (isinstance(first, BaseIP) and isinstance(last, BaseIP))):
raise IPTypeError('first and last must be IP addresses, not networks')
if (first.version != last.version):
raise IPTypeError('IP addresses must be same version')
if (first > last):
raise ValueError('last IP address must be greater than first')
networks = []
if (first.version == 4):
ip = IPv4Network
elif (first.version == 6):
ip = IPv6Network
else:
raise ValueError('unknown IP version')
ip_bits = first._max_prefixlen
first_int = first._ip
last_int = last._ip
while (first_int <= last_int):
nbits = _count_righthand_zero_bits(first_int, ip_bits)
current = None
while (nbits >= 0):
addend = ((2 ** nbits) - 1)
current = (first_int + addend)
nbits -= 1
if (current <= last_int):
break
prefix = _get_prefix_length(first_int, current, ip_bits)
net = ip(('%s/%d' % (str(first), prefix)))
networks.append(net)
if (current == ip._ALL_ONES):
break
first_int = (current + 1)
first = IPAddress(first_int, version=first._version)
return networks |
class CommandLayer(Module):
def __init__(self, transport):
self.transport = transport
self.transport.set_command(self)
self.hdd = None
self.n = None
def set_hdd(self, hdd):
self.hdd = hdd
self.transport.n = hdd.n
self.transport.link.n = hdd.n
def callback(self, fis):
resp = None
if isinstance(fis, FIS_REG_H2D):
if (fis.command == regs['WRITE_DMA_EXT']):
resp = self.hdd.write_dma_callback(fis)
elif (fis.command == regs['READ_DMA_EXT']):
resp = self.hdd.read_dma_callback(fis)
elif isinstance(fis, FIS_DATA):
resp = self.hdd.data_callback(fis)
if (resp is not None):
for packet in resp:
self.transport.send(packet) |
class OptionPlotoptionsBarAccessibilityPoint(Options):
def dateFormat(self):
return self._config_get(None)
def dateFormat(self, text: str):
self._config(text, js_type=False)
def dateFormatter(self):
return self._config_get(None)
def dateFormatter(self, value: Any):
self._config(value, js_type=False)
def describeNull(self):
return self._config_get(True)
def describeNull(self, flag: bool):
self._config(flag, js_type=False)
def descriptionFormat(self):
return self._config_get(None)
def descriptionFormat(self, text: str):
self._config(text, js_type=False)
def descriptionFormatter(self):
return self._config_get(None)
def descriptionFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valueDescriptionFormat(self):
return self._config_get('{xDescription}{separator}{value}.')
def valueDescriptionFormat(self, text: str):
self._config(text, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False) |
class NoteList(QTableWidget):
def __init__(self, parent):
self.parent = parent
super(NoteList, self).__init__(parent)
self.setColumnCount(4)
self.setHorizontalHeaderLabels(['Title', '', '', ''])
self.horizontalHeader().setSectionResizeMode(0, QHeaderView.ResizeMode.Stretch)
self.horizontalHeader().setSectionResizeMode(1, QHeaderView.ResizeMode.ResizeToContents)
self.horizontalHeader().setSectionResizeMode(2, QHeaderView.ResizeMode.ResizeToContents)
self.horizontalHeader().setSectionResizeMode(3, QHeaderView.ResizeMode.ResizeToContents)
self.setSelectionMode(QAbstractItemView.SelectionMode.NoSelection)
self.setFocusPolicy(Qt.FocusPolicy.NoFocus)
self.cellDoubleClicked.connect(self.cell_clicked)
self.setEditTriggers(QAbstractItemView.EditTrigger.NoEditTriggers)
def fill(self, notes):
self.clearContents()
if ((notes is None) or (len(notes) == 0)):
self.setRowCount(0)
return
self.setRowCount(len(notes))
open_icon = QApplication.style().standardIcon(QStyle.StandardPixmap.SP_FileDialogContentsView)
del_icon = QApplication.style().standardIcon(QStyle.StandardPixmap.SP_TrashIcon)
for (ix, n) in enumerate(notes):
title = QTableWidgetItem(n.get_title())
title.setData(Qt.ItemDataRole.UserRole, QVariant(n.id))
open_btn = QToolButton()
open_btn.setIcon(open_icon)
open_btn.setToolTip('Open')
open_btn.clicked.connect(functools.partial(self.open_btn_clicked, n.id))
del_btn = QToolButton()
del_btn.setIcon(del_icon)
del_btn.setToolTip('Delete Note...')
del_btn.clicked.connect(functools.partial(self.del_btn_clicked, n.id))
add_btn = QToolButton()
add_btn.setText('+')
add_btn.setToolTip('Add to Queue...')
add_btn.clicked.connect(functools.partial(self.add_btn_clicked, n.id))
self.setItem(ix, 0, title)
self.setCellWidget(ix, 1, open_btn)
self.setCellWidget(ix, 2, del_btn)
self.setCellWidget(ix, 3, add_btn)
self.resizeRowsToContents()
def open_btn_clicked(self, id):
self.parent.parent.set_chosen(id, '')
win = mw.app.activeWindow()
win.accept()
def del_btn_clicked(self, id):
reply = QMessageBox.question(self, 'Delete Note?', 'This will irreversibly delete the chosen note. \nAre you sure?', ((QMessageBox.Yes | QMessageBox.No) | QMessageBox.Cancel), QMessageBox.Cancel)
if (reply == QMessageBox.Yes):
delete_note(id)
if (get_index() is not None):
get_index().deleteNote(id)
run_hooks('user-note-deleted')
self.parent.refresh()
def cell_clicked(self, row, col):
if (col == 0):
nid = int(self.item(row, col).data(Qt.ItemDataRole.UserRole))
self.parent.parent.display_note_modal(nid)
def add_btn_clicked(self, id):
dialog = PriorityDialog(self, id)
if dialog.exec_():
prio = dialog.value
update_priority_list(id, prio)
self.parent.parent.refresh_queue_list()
self.parent.refresh()
tooltip(f'Moved in Queue, with priority <b>{dynamic_sched_to_str(prio)}</b>') |
def _replace_charref(s):
s = s.group(1)
if (s[0] == '#'):
if (s[1] in 'xX'):
num = int(s[2:].rstrip(';'), 16)
else:
num = int(s[1:].rstrip(';'))
if (num in _invalid_charrefs):
return _invalid_charrefs[num]
if ((55296 <= num <= 57343) or (num > 1114111)):
return ''
if (num in _invalid_codepoints):
return ''
return chr(num)
else:
if (s in SAFE_ENTITIES):
return SAFE_ENTITIES[s]
for x in range((len(s) - 1), 1, (- 1)):
if (s[:x] in SAFE_ENTITIES):
return (SAFE_ENTITIES[s[:x]] + s[x:])
else:
return ('&' + s) |
class OptionPlotoptionsSankeySonificationContexttracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
class TestFilterMisc(util.ColorAsserts, unittest.TestCase):
def test_srgb(self):
self.assertColorEqual(Color('red').filter('sepia', space='srgb'), Color('rgb(100.22 88.995 69.36)'))
self.assertColorNotEqual(Color('red').filter('sepia'), Color('red').filter('sepia', space='srgb'))
def test_bad_space(self):
with self.assertRaises(ValueError):
Color('red').filter('sepia', space='display-p3', out_space='srgb')
def test_bad_filter(self):
with self.assertRaises(ValueError):
Color('red').filter('bad') |
def extractThat1VillainessCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def _lex_char_or_cs(text: str) -> Tuple[(str, str)]:
if (text == '\x1b'):
return ('', text)
i = 1
if text.startswith('\x1b['):
try:
i = len('\x1b[')
while (text[i] not in string.ascii_letters):
i += 1
i += 1
except IndexError:
return ('', text)
return (text[:i], text[i:]) |
def _check_aea_version(package_configuration: PackageConfiguration) -> None:
current_aea_version = Version(__aea_version__)
version_specifiers = package_configuration.aea_version_specifiers
if (current_aea_version not in version_specifiers):
raise AEAVersionError(package_configuration.public_id, package_configuration.aea_version_specifiers) |
def test_generate_predictable_pipeline_id():
app_name = 'testspinnakerapplication'
pipeline_name = 'testspinnakerpipeline'
uuid_1 = generate_predictable_pipeline_id(app_name, pipeline_name)
uuid_2 = generate_predictable_pipeline_id(app_name, pipeline_name)
assert (uuid_1 == uuid_2) |
_frequency(timedelta(days=1))
def fetch_production(zone_key: ZoneKey, session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> list[dict]:
check_valid_parameters(zone_key, session, target_datetime)
ses = (session or Session())
data_mapping = PRODUCTION_PARSE_MAPPING.copy()
if (zone_key.split('-')[1] == 'CN'):
data_mapping['gas'] = 'oil'
data_mapping['cc'] = 'oil'
if ((zone_key == 'ES-IB-ME') or (zone_key == 'ES-IB-FO')):
data_mapping['gas'] = 'oil'
if (zone_key == 'ES-IB-IZ'):
data_mapping['die'] = 'gas'
data = fetch_and_preprocess_data(zone_key, ses, logger, target_datetime)
productionEventList = ProductionBreakdownList(logger)
for event in data:
storage = StorageMix()
if ('hid' in event):
storage.add_value('hydro', (- event['hid']))
production = ProductionMix()
for key in event:
if (key in data_mapping.keys()):
production.add_value(data_mapping[key], event[key])
productionEventList.append(zoneKey=zone_key, datetime=event['ts'], production=production, storage=storage, source='demanda.ree.es')
return productionEventList.to_list() |
def _is_supported_gemm_or_bmm(gemm_or_bmm_op: Operator, slice_op: Operator) -> bool:
if (not gemm_or_bmm_op._attrs['op'].startswith(('gemm_rcr', 'bmm'))):
return False
slice_output_tensor = slice_op._attrs['outputs'][0]
slice_output_rank = slice_output_tensor._rank()
op_inputs = gemm_or_bmm_op._attrs['inputs']
if ((op_inputs[0] is not slice_output_tensor) and (op_inputs[1] is not slice_output_tensor)):
return False
return (slice_output_rank >= 2) |
class CanSaveMixin(HasTraits):
filepath = Str()
dirty = Bool(False)
def __getstate__(self):
state = super().__getstate__()
del state['filepath']
del state['dirty']
return state
def validate(self):
return (True, '')
def save(self):
raise NotImplementedError |
class desc_stats_reply(stats_reply):
version = 3
type = 19
stats_type = 0
def __init__(self, xid=None, flags=None, mfr_desc=None, hw_desc=None, sw_desc=None, serial_num=None, dp_desc=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (mfr_desc != None):
self.mfr_desc = mfr_desc
else:
self.mfr_desc = ''
if (hw_desc != None):
self.hw_desc = hw_desc
else:
self.hw_desc = ''
if (sw_desc != None):
self.sw_desc = sw_desc
else:
self.sw_desc = ''
if (serial_num != None):
self.serial_num = serial_num
else:
self.serial_num = ''
if (dp_desc != None):
self.dp_desc = dp_desc
else:
self.dp_desc = ''
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(struct.pack('!256s', self.mfr_desc))
packed.append(struct.pack('!256s', self.hw_desc))
packed.append(struct.pack('!256s', self.sw_desc))
packed.append(struct.pack('!32s', self.serial_num))
packed.append(struct.pack('!256s', self.dp_desc))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = desc_stats_reply()
_version = reader.read('!B')[0]
assert (_version == 3)
_type = reader.read('!B')[0]
assert (_type == 19)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 0)
obj.flags = reader.read('!H')[0]
reader.skip(4)
obj.mfr_desc = reader.read('!256s')[0].rstrip('\x00')
obj.hw_desc = reader.read('!256s')[0].rstrip('\x00')
obj.sw_desc = reader.read('!256s')[0].rstrip('\x00')
obj.serial_num = reader.read('!32s')[0].rstrip('\x00')
obj.dp_desc = reader.read('!256s')[0].rstrip('\x00')
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.mfr_desc != other.mfr_desc):
return False
if (self.hw_desc != other.hw_desc):
return False
if (self.sw_desc != other.sw_desc):
return False
if (self.serial_num != other.serial_num):
return False
if (self.dp_desc != other.dp_desc):
return False
return True
def pretty_print(self, q):
q.text('desc_stats_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REPLY_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('mfr_desc = ')
q.pp(self.mfr_desc)
q.text(',')
q.breakable()
q.text('hw_desc = ')
q.pp(self.hw_desc)
q.text(',')
q.breakable()
q.text('sw_desc = ')
q.pp(self.sw_desc)
q.text(',')
q.breakable()
q.text('serial_num = ')
q.pp(self.serial_num)
q.text(',')
q.breakable()
q.text('dp_desc = ')
q.pp(self.dp_desc)
q.breakable()
q.text('}') |
class FaucetUntaggedIPv4LACPMismatchTest(FaucetUntaggedIPv4LACPTest):
def test_untagged(self):
first_host = self.hosts_name_ordered()[0]
orig_ip = first_host.IP()
switch = self.first_switch()
bond_members = [pair[0].name for pair in first_host.connectionsTo(switch)]
bond_cmds = []
for (i, bond_member) in enumerate(bond_members):
bond = ('bond%u' % i)
bond_cmds.extend([('link set %s down' % bond_member), ('address flush dev %s' % bond_member), ('link add %s address 0e:00:00:00:00:%2.2x type bond mode 802.3ad lacp_rate fast miimon 100' % (bond, ((i * 2) + i))), ('addr add %s/24 dev %s' % (orig_ip, bond)), ('link set %s up' % bond), ('link set dev %s master %s' % (bond_member, bond))])
first_host.run_ip_batch(bond_cmds)
self.wait_until_matching_lines_from_faucet_log_files('.+actor system mismatch.+') |
class OptionPlotoptionsFunnel3dSonificationDefaultinstrumentoptionsMappingLowpass(Options):
def frequency(self) -> 'OptionPlotoptionsFunnel3dSonificationDefaultinstrumentoptionsMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsFunnel3dSonificationDefaultinstrumentoptionsMappingLowpassFrequency)
def resonance(self) -> 'OptionPlotoptionsFunnel3dSonificationDefaultinstrumentoptionsMappingLowpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsFunnel3dSonificationDefaultinstrumentoptionsMappingLowpassResonance) |
def test_delivery_receipt_user(session):
data = {'deliveredWatermarkTimestampMs': '', 'irisSeqId': '1111111', 'irisTags': ['DeltaDeliveryReceipt', 'is_from_iris_fanout'], 'messageIds': ['mid.$XYZ', 'mid.$ABC'], 'requestContext': {'apiArgs': {}}, 'threadKey': {'otherUserFbId': '1234'}, 'class': 'DeliveryReceipt'}
thread = User(session=session, id='1234')
assert (MessagesDelivered(author=thread, thread=thread, messages=[Message(thread=thread, id='mid.$XYZ'), Message(thread=thread, id='mid.$ABC')], at=datetime.datetime(2017, 7, 14, 2, 40, tzinfo=datetime.timezone.utc)) == parse_delta(session, data)) |
def extractAwildbardWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Garudeina translations', 'Garudeina', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
titlemap = [('Garudeina ', 'Garudeina', 'translated')]
for (titlecomponent, name, tl_type) in titlemap:
if item['title'].lower().startswith(titlecomponent.lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class ShadowedLocalVariablePattern(DeclarationUtils, AbstractAstPattern):
name = 'Shadowed Local Variable'
description = 'Reports local variable declarations that shadow declarations from outer scopes.'
severity = Severity.MEDIUM
tags = {}
def find_matches(self) -> Iterator[PatternMatch]:
ast_root = self.get_ast_root()
ast = self.get_ast_module()
for contract in ast_root.find_descendants_of_type(ast.ContractDefinition):
conflicts = self.process_declarations(contract)
for conflict in conflicts:
(shadowed_var, new_var) = conflict
(yield self.match_violation().with_info(MatchComment(f"{self.get_decl_kind(new_var).capitalize()} '{new_var.name}' shadows {self.get_decl_kind(shadowed_var)} from outer scope."), *self.ast_node_info(new_var)))
def process_declarations(self, contract):
ast = self.get_ast_module()
def add_decls(_, scope):
return []
_decls.register
def _(statement: ast.Statement, scope):
for c in statement.children():
(yield from add_decls(c, scope))
_decls.register
def _(params: ast.ParameterList, scope):
for c in params.parameters:
(yield from add_decls(c, scope))
_decls.register(ast.VariableDeclaration)
def _(decl, scope):
if (decl.name == ''):
return
if (decl.name in scope):
(yield (scope[decl.name], decl))
scope[decl.name] = decl
_decls.register
def _(c: ast.ContractDefinition, scope):
bases = reversed(c.linearized_base_contracts)
function_like = (ast.FunctionDefinition, ast.ModifierDefinition)
for base in bases:
base: ast.ContractDefinition = c.resolve_reference(base)
new_decls = ((t.name, t) for t in base.find_children_of_type(self.named_node_types))
scope.update(new_decls)
for node in of_type[function_like](c.nodes):
(yield from add_decls(node, scope))
_decls.register
def _(node: ast.FunctionDefinition, scope):
function_scope = scope.copy()
(yield from add_decls(node.parameters, function_scope))
(yield from add_decls(node.return_parameters, function_scope))
(yield from add_decls(node.body, function_scope))
_decls.register
def _(node: ast.ModifierDefinition, scope):
function_scope = scope.copy()
(yield from add_decls(node.parameters, function_scope))
(yield from add_decls(node.body, function_scope))
_decls.register
def _(node: ast.Block, scope):
current_scope = scope.copy()
for stmt in node.statements:
(yield from add_decls(stmt, current_scope))
_decls.register
def _(node: ast.IfStatement, scope):
(yield from add_decls(node.true_body, scope.copy()))
(yield from add_decls(node.false_body, scope.copy()))
_decls.register
def _(node: ast.ForStatement, scope):
current_scope = scope.copy()
(yield from add_decls(node.initialization_expression, current_scope))
(yield from add_decls(node.body, current_scope))
(yield from add_decls(contract, {})) |
def transform_notebook(path: Path) -> str:
(filename, assets_folder) = create_folders(path)
img_folder = (assets_folder / 'img')
plot_data_folder = (assets_folder / 'plot_data')
save_folder = assets_folder.joinpath('..').resolve()
nb = load_notebook(path)
nb_metadata = load_nb_metadata()
mdx = ''
mdx += create_frontmatter(path, nb_metadata)
mdx += create_imports()
mdx += create_buttons(nb_metadata, path.stem)
for cell in nb['cells']:
cell_type = cell['cell_type']
if (cell_type == 'markdown'):
mdx += handle_markdown_cell(cell, img_folder, LIB_DIR)
if (cell_type == 'code'):
mdx += handle_code_cell(cell, plot_data_folder)
save_path = (save_folder / f'{filename}.mdx')
with save_path.open('w') as f:
f.write(mdx)
return mdx |
def iter_fasta_seqs(source):
if os.path.isfile(source):
if source.endswith('.gz'):
import gzip
_source = gzip.open(source)
else:
_source = open(source, 'r')
else:
_source = iter(source.split('\n'))
seq_chunks = []
seq_name = None
for line in _source:
line = line.strip()
if (line.startswith('#') or (not line)):
continue
elif line.startswith('>'):
if (seq_name and (not seq_chunks)):
raise ValueError(('Error parsing fasta file. %s has no sequence' % seq_name))
elif seq_name:
(yield (seq_name, ''.join(seq_chunks)))
seq_name = line[1:].split('\t')[0].strip()
seq_chunks = []
else:
if (seq_name is None):
raise Exception('Error reading sequences: Wrong format.')
seq_chunks.append(line.replace(' ', ''))
if os.path.isfile(source):
_source.close()
if (seq_name and (not seq_chunks)):
raise ValueError(('Error parsing fasta file. %s has no sequence' % seq_name))
elif seq_name:
(yield (seq_name, ''.join(seq_chunks))) |
class Migration(migrations.Migration):
dependencies = [('sites', '0002_alter_domain_unique'), ('manager', '0028_reviewer')]
operations = [migrations.CreateModel(name='CustomField', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('label', models.CharField(max_length=200, verbose_name='Label')), ('slug', models.SlugField(blank=True, default='', max_length=100, verbose_name='Slug')), ('field_type', models.IntegerField(choices=[(1, 'Single line text'), (2, 'Multi line text'), (3, 'Email'), (13, 'Number'), (14, 'URL'), (4, 'Check box'), (5, 'Check boxes'), (6, 'Drop down'), (7, 'Multi select'), (8, 'Radio buttons'), (9, 'File upload'), (10, 'Date'), (11, 'Date/time'), (15, 'Date of birth'), (12, 'Hidden')], verbose_name='Type')), ('required', models.BooleanField(default=True, verbose_name='Required')), ('visible', models.BooleanField(default=True, verbose_name='Visible')), ('choices', models.CharField(blank=True, help_text='Comma separated options where applicable. If an option itself contains commas, surround the option starting with the `character and ending with the ` character.', max_length=1000, verbose_name='Choices')), ('default', models.CharField(blank=True, max_length=2000, verbose_name='Default value')), ('placeholder_text', models.CharField(blank=True, max_length=100, null=True, verbose_name='Placeholder Text')), ('help_text', models.CharField(blank=True, max_length=100, verbose_name='Help text')), ('order', models.IntegerField(verbose_name='Order'))], options={'verbose_name': 'Custom Field', 'verbose_name_plural': 'Custom Fields', 'ordering': ['form', 'order']}), migrations.CreateModel(name='CustomForm', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=50, verbose_name='Title')), ('slug', models.SlugField(editable=False, max_length=100, unique=True, verbose_name='Slug')), ('intro', models.TextField(blank=True, verbose_name='Intro')), ('button_text', models.CharField(default='Submit', max_length=50, verbose_name='Button text')), ('response', models.TextField(blank=True, verbose_name='Response')), ('redirect_url', models.CharField(blank=True, help_text='An alternate URL to redirect to after form submission', max_length=200, null=True, verbose_name='Redirect url')), ('status', models.IntegerField(choices=[(1, 'Draft'), (2, 'Published')], default=2, verbose_name='Status')), ('publish_date', models.DateTimeField(blank=True, help_text="With published selected, won't be shown until this time", null=True, verbose_name='Published from')), ('expiry_date', models.DateTimeField(blank=True, help_text="With published selected, won't be shown after this time", null=True, verbose_name='Expires on')), ('login_required', models.BooleanField(default=False, help_text='If checked, only logged in users can view the form', verbose_name='Login required')), ('send_email', models.BooleanField(default=True, help_text='If checked, the person entering the form will be sent an email', verbose_name='Send email')), ('email_from', models.EmailField(blank=True, help_text='The address the email will be sent from', max_length=254, verbose_name='From address')), ('email_copies', models.CharField(blank=True, help_text='One or more email addresses, separated by commas', max_length=200, verbose_name='Send copies to')), ('email_subject', models.CharField(blank=True, max_length=200, verbose_name='Subject')), ('email_message', models.TextField(blank=True, verbose_name='Message')), ('sites', models.ManyToManyField(default=[1], related_name='manager_customform_forms', to='sites.Site'))], options={'verbose_name': 'Custom Form', 'verbose_name_plural': 'Custom Forms', 'ordering': ['title']}), migrations.AlterModelOptions(name='event', options={'ordering': ['name'], 'verbose_name': 'Event', 'verbose_name_plural': 'Events'}), migrations.AlterModelOptions(name='eventdate', options={'verbose_name': 'Event Date', 'verbose_name_plural': 'Event Dates'}), migrations.AddField(model_name='customfield', name='form', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='fields', to='manager.CustomForm')), migrations.AddField(model_name='event', name='customForm', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='manager.CustomForm', verbose_name='Custom form')), migrations.AlterUniqueTogether(name='customfield', unique_together=set([('form', 'order')]))] |
def parseResolveInfo(parcel: ParcelParser, parent: Field) -> None:
component_info_field = parcel.parse_field('componentInfoType', 'int32', parcel.readInt32, parent)
component_info_type = component_info_field.content
if (component_info_type == 1):
parcel.parse_field('activityInfo', 'android.content.pm.ActivityInfo', functools.partial(parcel.readParcelable, 'android.content.pm.ActivityInfo'), parent)
elif (component_info_type == 2):
parcel.parse_field('serviceInfo', 'android.content.pm.ServiceInfo', functools.partial(parcel.readParcelable, 'android.content.pm.ServiceInfo'), parent)
elif (component_info_type == 3):
parcel.parse_field('providerInfo', 'android.content.pm.ProviderInfo', functools.partial(parcel.readParcelable, 'android.content.pm.ProviderInfo'), parent)
filter_nullcheck_field = parcel.parse_field('filter-nullcheck', 'int32', parcel.readInt32, parent)
if (filter_nullcheck_field.content != 0):
parcel.parse_field('filter', 'IntentFilter', functools.partial(parcel.readParcelable, 'IntentFilter'), parent)
parcel.parse_field('priority', 'int32', parcel.readInt32, parent)
parcel.parse_field('preferredorder', 'int32', parcel.readInt32, parent)
parcel.parse_field('match', 'int32', parcel.readInt32, parent)
parcel.parse_field('specificIndex', 'int32', parcel.readInt32, parent)
parcel.parse_field('labelRes', 'int32', parcel.readInt32, parent)
parcel.parse_field('nonLocalizedLabel', 'android.content.pm.TextUtils', functools.partial(parcel.readParcelable, 'android.content.pm.TextUtils'), parent)
parcel.parse_field('icon', 'int32', parcel.readInt32)
parcel.parse_field('resolvePackageName', 'string', parcel.readString8, parent)
parcel.parse_field('targetUserId', 'int32', parcel.readInt32, parent)
parcel.parse_field('system', 'bool', parcel.readBool, parent)
parcel.parse_field('noResourceId', 'bool', parcel.readBool, parent)
parcel.parse_field('iconResourceId', 'int32', parcel.readInt32, parent)
parcel.parse_field('handleAllWebDataURI', 'bool', parcel.readBool, parent)
parcel.parse_field('isInstantAppAvailable', 'bool', parcel.readBool, parent) |
class NodeUpdateView(APIView):
LOCK = 'system_node_update:%s'
dc_bound = False
def __init__(self, request, hostname, data):
super(NodeUpdateView, self).__init__(request)
self.hostname = hostname
self.data = data
self.node = get_node(request, hostname)
def _update_v2(self, version, key=None, cert=None):
from api.system.update.utils import process_update_reply
node = self.node
worker = node.worker(Q_FAST)
logger.info('Running oldstyle (v2.x) node "%s" system update to version: "%s"', node, version)
reply = worker_command('system_update', worker, version=version, key=key, cert=cert, timeout=600)
if (reply is None):
raise GatewayTimeout('Node worker is not responding')
(result, error) = process_update_reply(reply, node, version)
if error:
response_class = FailureTaskResponse
else:
response_class = SuccessTaskResponse
detail_dict = result.copy()
detail_dict['version'] = version
response = response_class(self.request, result, obj=node, msg=LOG_SYSTEM_UPDATE, dc_bound=False, detail_dict=detail_dict)
if (response.status_code == 200):
ctrl = NodeServiceControl(node)
for service in ctrl.app_services:
ctrl.restart(service)
return response
def put(self):
assert self.request.dc.is_default()
ser = UpdateSerializer(self.request, data=self.data)
if (not ser.is_valid()):
return FailureTaskResponse(self.request, ser.errors, dc_bound=False)
node = self.node
version = ser.data['version']
key = ser.data.get('key')
cert = ser.data.get('cert')
del node.system_version
node_version = node.system_version
if (not (isinstance(node_version, text_type) and node_version)):
raise NodeIsNotOperational('Node version information could not be retrieved')
node_version = node_version.split(':')[(- 1)]
if ((version == ('v' + node_version)) and (not ser.data.get('force'))):
raise PreconditionRequired('Node is already up-to-date')
if (node.status != node.OFFLINE):
raise NodeIsNotOperational('Unable to perform update on node that is not in maintenance state')
if node_version.startswith('2.'):
return self._update_v2(version, key=key, cert=cert)
worker = node.worker(Q_FAST)
update_cmd = worker_command('system_update_command', worker, version=version, key=key, cert=cert, force=ser.data.get('force'), timeout=10)
if (update_cmd is None):
raise GatewayTimeout('Node worker is not responding')
if (not isinstance(update_cmd, list)):
raise PreconditionRequired('Node update command could be retrieved')
msg = LOG_SYSTEM_UPDATE
_apiview_ = {'view': 'system_node_update', 'method': self.request.method, 'hostname': node.hostname, 'version': version}
meta = {'apiview': _apiview_, 'msg': msg, 'node_uuid': node.uuid, 'output': {'returncode': 'returncode', 'stdout': 'message'}, 'check_returncode': True}
lock = (self.LOCK % node.hostname)
cmd = ('%s 2>&1' % ' '.join(update_cmd))
(tid, err) = execute(self.request, node.owner.id, cmd, meta=meta, lock=lock, queue=node.fast_queue, tg=TG_DC_UNBOUND)
if err:
return FailureTaskResponse(self.request, err, dc_bound=False)
else:
return TaskResponse(self.request, tid, msg=msg, obj=node, api_view=_apiview_, data=self.data, dc_bound=False, detail_dict=ser.detail_dict(force_full=True)) |
_dict
def _fill_transaction(transaction, block, transaction_index, is_pending, overrides=None):
is_dynamic_fee_transaction = (any(((_ in transaction) for _ in DYNAMIC_FEE_TRANSACTION_PARAMS)) or (not any(((_ in transaction) for _ in (DYNAMIC_FEE_TRANSACTION_PARAMS + ('gas_price',))))))
if (overrides is None):
overrides = {}
if ('hash' in overrides):
(yield ('hash', overrides['hash']))
(yield ('nonce', overrides.get('nonce', 0)))
(yield ('from', overrides.get('from', transaction.get('from'))))
(yield ('to', overrides.get('to', transaction.get('to', b''))))
(yield ('data', overrides.get('data', transaction.get('data', b''))))
(yield ('value', overrides.get('value', transaction.get('value', 0))))
(yield ('gas', overrides.get('gas', transaction.get('gas'))))
(yield ('r', overrides.get('r', transaction.get('r', 12345))))
(yield ('s', overrides.get('s', transaction.get('s', 67890))))
(yield ('v', overrides.get('v', transaction.get('v', 0))))
if is_dynamic_fee_transaction:
(yield ('max_fee_per_gas', overrides.get('max_fee_per_gas', transaction.get('max_fee_per_gas', ))))
(yield ('max_priority_fee_per_gas', overrides.get('max_priority_fee_per_gas', transaction.get('max_priority_fee_per_gas', ))))
(yield from _yield_typed_transaction_fields(overrides, transaction))
else:
(yield ('gas_price', overrides.get('gas_price', transaction.get('gas_price'))))
if ('access_list' in transaction):
(yield from _yield_typed_transaction_fields(overrides, transaction)) |
def test_logger_without_stacktrace_config(logbook_logger, logbook_handler):
logbook_handler.client.config.auto_log_stacks = False
with logbook_handler.applicationbound():
logbook_logger.warning('This is a test warning')
event = logbook_handler.client.events[ERROR][0]
assert ('stacktrace' not in event['log']) |
class ExpressionStatement(SimpleStatement):
expression: Expression
def cfg(self, expression):
return expression.cfg
def variables_post(self, expression):
return expression.variables_post
def changed_variables(self, expression):
return expression.changed_variables
def expression_value_used(self) -> (Expression.expression_value_used expression):
return False |
def test_list_plots(fs: pyfakefs.fake_filesystem.FakeFilesystem) -> None:
fs.create_file('/t/plot-k32-0.plot', st_size=(108 * GB))
fs.create_file('/t/plot-k32-1.plot', st_size=(108 * GB))
fs.create_file('/t/.plot-k32-2.plot', st_size=(108 * GB))
fs.create_file('/t/plot-k32-3.plot.2.tmp', st_size=(108 * GB))
fs.create_file('/t/plot-k32-4.plot', st_size=(100 * GB))
fs.create_file('/t/plot-k32-5.plot', st_size=(108 * GB))
fs.create_file('/t/plot-k33-6.plot', st_size=(108 * GB))
fs.create_file('/t/plot-k33-7.plot', st_size=(216 * GB))
assert (plot_util.list_plots('/t/') == ['/t/plot-k32-0.plot', '/t/plot-k32-1.plot', '/t/plot-k32-5.plot', '/t/plot-k33-7.plot']) |
class OptionPlotoptionsVariwideSonificationTracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesTreemapSonificationTracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class ExinwanEffect(GenericAction):
card_usage = 'drop'
def apply_action(self):
g = self.game
tgt = self.target
if tgt.dead:
return False
cards = user_choose_cards(self, tgt, ('cards', 'showncards', 'equips'))
if cards:
g.process_action(DropCards(tgt, tgt, cards))
else:
g.process_action(Damage(None, tgt))
return True
def cond(self, cards):
if (len(cards) != 2):
return False
from thb.cards.base import Skill
if any((isinstance(c, Skill) for c in cards)):
return False
return True
def is_valid(self):
return (not self.target.dead) |
def test_addAxisDescriptor():
ds = DesignSpaceDocument()
axis = ds.addAxisDescriptor(name='Weight', tag='wght', minimum=100, default=400, maximum=900)
assert (ds.axes[0] is axis)
assert isinstance(axis, AxisDescriptor)
assert (axis.name == 'Weight')
assert (axis.tag == 'wght')
assert (axis.minimum == 100)
assert (axis.default == 400)
assert (axis.maximum == 900) |
def test_parse_schema_accepts_nested_records_from_arrays():
parsed_schema = fastavro.parse_schema({'fields': [{'type': {'items': {'type': 'record', 'fields': [{'type': 'string', 'name': 'text'}], 'name': 'Nested'}, 'type': 'array'}, 'name': 'multiple'}, {'type': {'type': 'array', 'items': 'Nested'}, 'name': 'single'}], 'type': 'record', 'name': 'test_parse_schema_accepts_nested_records_from_arrays'})
assert ('Nested' == parsed_schema['fields'][1]['type']['items']) |
def fetch_consumption(zone_key: str='IN-KA', session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> dict:
if target_datetime:
raise NotImplementedError('This parser is not yet able to parse past dates')
zonekey.assert_zone_key(zone_key, 'IN-KA')
html = web.get_response_soup(zone_key, ' session)
india_date_time = IN.read_datetime_from_span_id(html, 'Label6', 'DD/MM/YYYY HH:mm')
demand_value = IN.read_value_from_span_id(html, 'Label5')
data = {'zoneKey': zone_key, 'datetime': india_date_time.datetime, 'consumption': demand_value, 'source': 'kptclsldc.in'}
return data |
def _add_headers_to_environ(env, headers):
if headers:
try:
items = headers.items()
except AttributeError:
items = headers
for (name, value) in items:
name_wsgi = name.upper().replace('-', '_')
if (name_wsgi not in ('CONTENT_TYPE', 'CONTENT_LENGTH')):
name_wsgi = ('HTTP_' + name_wsgi)
if (value is None):
value = ''
else:
value = value.strip()
if ((name_wsgi not in env) or (name.lower() in SINGLETON_HEADERS)):
env[name_wsgi] = value
else:
env[name_wsgi] += (',' + value)
env.setdefault('HTTP_USER_AGENT', DEFAULT_UA) |
def gnerate_all_users(cursor):
users = []
users_f = ['ZhangWei', 'LiQiang', 'ZhangSan', 'LiSi']
users.extend(user_build(users_f, 'China', 'Male'))
users_m = ['Hanmeimei', 'LiMeiMei', 'LiNa', 'ZhangLi', 'ZhangMing']
users.extend(user_build(users_m, 'China', 'Female'))
users1_f = ['James', 'John', 'David', 'Richard']
users.extend(user_build(users1_f, 'US', 'Male'))
users1_m = ['Mary', 'Patricia', 'Sarah']
users.extend(user_build(users1_m, 'US', 'Female'))
users2_f = ['Ravi', 'Rajesh', 'Ajay', 'Arjun', 'Sanjay']
users.extend(user_build(users2_f, 'India', 'Male'))
users2_m = ['Priya', 'Sushma', 'Pooja', 'Swati']
users.extend(user_build(users2_m, 'India', 'Female'))
for user in users:
cursor.execute('INSERT INTO user (id, name, email, mobile, gender, birth, country, city, create_time, update_time) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', user)
return users |
class IFLChannel(abc.ABC):
def __init__(self, **kwargs):
init_self_cfg(self, component_class=__class__, config_class=FLChannelConfig, **kwargs)
self.stats_collector = (ChannelStatsCollector() if self.cfg.report_communication_metrics else None)
def _set_defaults_in_cfg(cls, cfg):
pass
def server_to_client(self, message: Message) -> Message:
pass
def client_to_server(self, message: Message) -> Message:
pass |
(name='remove_similarities')
def remove_similarities(rated_model, object_id):
from django.apps import apps
from recommends.providers import recommendation_registry
ObjectClass = apps.get_model(*rated_model.split('.'))
provider_instance = recommendation_registry.get_provider_for_content(ObjectClass)
obj = ObjectClass.objects.get(pk=object_id)
provider_instance.storage.remove_similarities(obj) |
def _brief_loop(image: 'float64[:,:]', descriptors: 'uint8[:,:]', keypoints: 'intp[:,:]', pos0: Ai2, pos1: Ai2):
for k in range(len(keypoints)):
(kr, kc) = keypoints[k]
for p in range(len(pos0)):
(pr0, pc0) = pos0[p]
(pr1, pc1) = pos1[p]
descriptors[(k, p)] = (image[((kr + pr0), (kc + pc0))] < image[((kr + pr1), (kc + pc1))]) |
def parse_elasticsearch_response(hits):
results = []
for city in hits['aggregations']['cities']['buckets']:
if (len(city['states']['buckets']) > 0):
for state_code in city['states']['buckets']:
results.append(OrderedDict([('city_name', city['key']), ('state_code', state_code['key']), ('hits', state_code['doc_count'])]))
else:
results.append(OrderedDict([('city_name', city['key']), ('state_code', None), ('hits', city['doc_count'])]))
return results |
def test_not_codecov(cookies, tmp_path):
with run_within_dir(tmp_path):
result = cookies.bake(extra_context={'codecov': 'n'})
assert (result.exit_code == 0)
assert (not os.path.isfile(f'{result.project_path}/codecov.yaml'))
assert (not os.path.isfile(f'{result.project_path}/.github/workflows/validate-codecov-config.yml')) |
class TestPCF2LiftMetadataCompactionStageService(IsolatedAsyncioTestCase):
def setUp(self) -> None:
self.mock_mpc_svc = MagicMock(spec=MPCService)
self.mock_mpc_svc.onedocker_svc = MagicMock()
onedocker_binary_config_map = defaultdict((lambda : OneDockerBinaryConfig(tmp_directory='/test_tmp_directory/', binary_version='latest', repository_path='test_path/')))
self.stage_svc = PCF2LiftMetadataCompactionStageService(onedocker_binary_config_map, self.mock_mpc_svc)
self.container_permission_id = 'test-container-permission'
async def test_run_async_with_udp(self) -> None:
containers = [ContainerInstance(instance_id='test_container_id', status=ContainerInstanceStatus.STARTED)]
self.mock_mpc_svc.start_containers.return_value = containers
private_computation_instance = self._create_pc_instance()
binary_name = 'private_lift/pcf2_lift_metadata_compaction'
test_server_ips = [f'192.0.2.{i}' for i in range(private_computation_instance.infra_config.num_udp_containers)]
self.mock_mpc_svc.convert_cmd_args_list.return_value = (binary_name, ['cmd_1', 'cmd_2'])
(await self.stage_svc.run_async(private_computation_instance, NullCertificateProvider(), NullCertificateProvider(), '', '', test_server_ips))
self.mock_mpc_svc.start_containers.assert_called_once_with(cmd_args_list=['cmd_1', 'cmd_2'], onedocker_svc=self.mock_mpc_svc.onedocker_svc, binary_version='latest', binary_name=binary_name, timeout=None, env_vars={'ONEDOCKER_REPOSITORY_PATH': 'test_path/'}, wait_for_containers_to_start_up=True, existing_containers=None, env_vars_list=None, opa_workflow_path=None, permission=ContainerPermissionConfig(self.container_permission_id))
self.assertEqual(containers, private_computation_instance.infra_config.instances[(- 1)].containers)
self.assertEqual('PCF2_LIFT_METADATA_COMPACTION', private_computation_instance.infra_config.instances[(- 1)].stage_name)
def test_get_game_args_with_udp(self) -> None:
private_computation_instance = self._create_pc_instance()
base_run_name = ((private_computation_instance.infra_config.instance_id + '_') + GameNames.PCF2_LIFT_METADATA_COMPACTION.value)
total_num_files = private_computation_instance.infra_config.num_secure_random_shards
num_udp_containers = private_computation_instance.infra_config.num_udp_containers
files_per_container = distribute_files_among_containers(total_num_files, num_udp_containers)
test_game_args = [{'input_base_path': private_computation_instance.secure_random_sharder_output_base_path, 'output_global_params_base_path': f'{private_computation_instance.pcf2_lift_metadata_compaction_output_base_path}_global_params', 'output_secret_shares_base_path': f'{private_computation_instance.pcf2_lift_metadata_compaction_output_base_path}_secret_shares', 'file_start_index': sum(files_per_container[0:i]), 'num_files': files_per_container[i], 'concurrency': private_computation_instance.infra_config.mpc_compute_concurrency, 'num_conversions_per_user': private_computation_instance.product_config.common.padding_size, 'run_name': f'{base_run_name}_{i}', 'log_cost': True, 'use_tls': False, 'ca_cert_path': '', 'server_cert_path': '', 'private_key_path': '', 'pc_feature_flags': 'private_lift_unified_data_process', 'log_cost_s3_bucket': private_computation_instance.infra_config.log_cost_bucket} for i in range(num_udp_containers)]
self.assertEqual(test_game_args, self.stage_svc.get_game_args(private_computation_instance, '', ''))
def _create_pc_instance(self) -> PrivateComputationInstance:
infra_config: InfraConfig = InfraConfig(instance_id='test_instance_123', role=PrivateComputationRole.PARTNER, _stage_flow_cls_name='PrivateComputationPCF2LiftUDPStageFlow', status=PrivateComputationInstanceStatus.PCF2_LIFT_METADATA_COMPACTION_STARTED, status_update_ts=, instances=[], game_type=PrivateComputationGameType.LIFT, num_pid_containers=2, num_mpc_containers=4, num_files_per_mpc_container=NUM_NEW_SHARDS_PER_FILE, status_updates=[], pcs_features={PCSFeature.PRIVATE_LIFT_UNIFIED_DATA_PROCESS}, log_cost_bucket='test_log_cost_bucket', container_permission_id=self.container_permission_id)
common: CommonProductConfig = CommonProductConfig(input_path='456', output_dir='789')
product_config: ProductConfig = LiftConfig(common=common)
return PrivateComputationInstance(infra_config=infra_config, product_config=product_config) |
class OptionSeriesParetoSonificationTracksMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionPlotoptionsVariablepieSonificationTracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
_traitsui
class TestDatetimeEditor(SimpleEditorTestMixin, unittest.TestCase):
traitsui_name = 'DatetimeEditor'
factory_name = 'datetime_editor'
def test_str_to_obj_conversions(self):
obj = None
obj_str = _datetime_to_datetime_str(obj)
self.assertEqual(obj_str, '')
self.assertEqual(_datetime_str_to_datetime(obj_str), obj)
obj = datetime.datetime(2019, 1, 13)
obj_str = _datetime_to_datetime_str(obj)
self.assertIsInstance(obj_str, str)
self.assertEqual(_datetime_str_to_datetime(obj_str), obj)
obj_str = '2020-02-15T11:12:13'
obj = _datetime_str_to_datetime(obj_str)
self.assertIsInstance(obj, datetime.datetime)
self.assertEqual(_datetime_to_datetime_str(obj), obj_str)
obj_str = ''
obj = _datetime_str_to_datetime(obj_str)
self.assertIsNone(obj)
self.assertEqual(_datetime_to_datetime_str(obj), obj_str) |
def create_ofinput(filename, ast):
ctx = FrontendCtx(set())
ofinput = ir.OFInput(filename, wire_versions=set(), classes=[], enums=[])
for decl_ast in ast:
if (decl_ast[0] == 'struct'):
superclass = decl_ast[3]
members = [create_member(m_ast, ctx) for m_ast in decl_ast[4]]
discriminators = [m for m in members if isinstance(m, ir.OFDiscriminatorMember)]
if (len(discriminators) > 1):
raise InputError(('%s: Cannot support more than one discriminator by class - got %s' % (decl_ast[1], repr(discriminators))))
ofclass = ir.OFClass(name=decl_ast[1], members=members, superclass=superclass, virtual=(len(discriminators) > 0), params={param: value for (param, value) in decl_ast[2]})
ofinput.classes.append(ofclass)
if (decl_ast[0] == 'enum'):
enum = ir.OFEnum(name=decl_ast[1], entries=[ir.OFEnumEntry(name=x[0], value=x[2], params={param: value for (param, value) in x[1]}) for x in decl_ast[3]], params={param: value for (param, value) in decl_ast[2]})
ofinput.enums.append(enum)
elif (decl_ast[0] == 'metadata'):
if (decl_ast[1] == 'version'):
if (decl_ast[2] == 'any'):
ofinput.wire_versions.update((v.wire_version for v in loxi_globals.OFVersions.all_supported))
elif (int(decl_ast[2]) in loxi_globals.OFVersions.wire_version_map):
ofinput.wire_versions.add(int(decl_ast[2]))
else:
raise InputError(('Unrecognized wire protocol version %r' % decl_ast[2]))
if (not ofinput.wire_versions):
raise InputError('Missing #version metadata')
return ofinput |
def store_ids_in_file(id_iter: List, file_name: str='temp_file', is_numeric: bool=True) -> Tuple[(Path, int)]:
total_ids = 0
file_path = Path(file_name)
with open(str(file_path), 'w') as f:
for id_string in id_iter:
if (not id_string):
continue
total_ids += 1
if is_numeric:
id_characters = regex('\\d+', str(id_string)).group()
elif isinstance(id_string, list):
id_characters = '\n'.join([str(id) for id in id_string])
total_ids += (len(id_string) - 1)
else:
id_characters = id_string
f.writelines('{}\n'.format(id_characters))
return (file_path.resolve(), total_ids) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.