code stringlengths 281 23.7M |
|---|
def findFirst2(path, fileName, level, searchAttributes, pktFlags=smb.SMB.FLAGS2_UNICODE, isSMB2=False):
if (pktFlags & smb.SMB.FLAGS2_UNICODE):
encoding = 'utf-16le'
else:
encoding = 'ascii'
fileName = normalize_path(fileName)
pathName = os.path.join(path, fileName)
if (not isInFileJail(path, fileName)):
LOG.error('Path not in current working directory')
return ([], 0, STATUS_OBJECT_PATH_SYNTAX_BAD)
files = []
if ((pathName.find('*') == (- 1)) and (pathName.find('?') == (- 1))):
pattern = ''
else:
pattern = os.path.basename(pathName)
dirName = os.path.dirname(pathName)
if (pattern == '*'):
files.append(os.path.join(dirName, '.'))
files.append(os.path.join(dirName, '..'))
if (pattern != ''):
if (not os.path.exists(dirName)):
return (None, 0, STATUS_OBJECT_NAME_NOT_FOUND)
for file in os.listdir(dirName):
if fnmatch.fnmatch(file.lower(), pattern.lower()):
entry = os.path.join(dirName, file)
if os.path.isdir(entry):
if (searchAttributes & smb.ATTR_DIRECTORY):
files.append(entry)
else:
files.append(entry)
elif os.path.exists(pathName):
files.append(pathName)
searchResult = []
searchCount = len(files)
errorCode = STATUS_SUCCESS
for i in files:
if ((level == smb.SMB_FIND_FILE_BOTH_DIRECTORY_INFO) or (level == smb2.SMB2_FILE_BOTH_DIRECTORY_INFO)):
item = smb.SMBFindFileBothDirectoryInfo(flags=pktFlags)
elif ((level == smb.SMB_FIND_FILE_DIRECTORY_INFO) or (level == smb2.SMB2_FILE_DIRECTORY_INFO)):
item = smb.SMBFindFileDirectoryInfo(flags=pktFlags)
elif ((level == smb.SMB_FIND_FILE_FULL_DIRECTORY_INFO) or (level == smb2.SMB2_FULL_DIRECTORY_INFO)):
item = smb.SMBFindFileFullDirectoryInfo(flags=pktFlags)
elif (level == smb.SMB_FIND_INFO_STANDARD):
item = smb.SMBFindInfoStandard(flags=pktFlags)
elif ((level == smb.SMB_FIND_FILE_ID_FULL_DIRECTORY_INFO) or (level == smb2.SMB2_FILE_ID_FULL_DIRECTORY_INFO)):
item = smb.SMBFindFileIdFullDirectoryInfo(flags=pktFlags)
elif ((level == smb.SMB_FIND_FILE_ID_BOTH_DIRECTORY_INFO) or (level == smb2.SMB2_FILE_ID_BOTH_DIRECTORY_INFO)):
item = smb.SMBFindFileIdBothDirectoryInfo(flags=pktFlags)
elif ((level == smb.SMB_FIND_FILE_NAMES_INFO) or (level == smb2.SMB2_FILE_NAMES_INFO)):
item = smb.SMBFindFileNamesInfo(flags=pktFlags)
else:
LOG.error(('Wrong level %d!' % level))
return (searchResult, searchCount, STATUS_NOT_SUPPORTED)
(mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) = os.stat(i)
if os.path.isdir(i):
item['ExtFileAttributes'] = smb.ATTR_DIRECTORY
else:
item['ExtFileAttributes'] = (smb.ATTR_NORMAL | smb.ATTR_ARCHIVE)
item['FileName'] = os.path.basename(i).encode(encoding)
if (level in [smb.SMB_FIND_FILE_BOTH_DIRECTORY_INFO, smb2.SMB2_FILE_BOTH_DIRECTORY_INFO, smb.SMB_FIND_FILE_ID_BOTH_DIRECTORY_INFO, smb2.SMB2_FILE_ID_BOTH_DIRECTORY_INFO]):
item['EaSize'] = 0
item['EndOfFile'] = size
item['AllocationSize'] = size
item['CreationTime'] = getFileTime(ctime)
item['LastAccessTime'] = getFileTime(atime)
item['LastWriteTime'] = getFileTime(mtime)
item['LastChangeTime'] = getFileTime(mtime)
item['ShortName'] = ('\x00' * 24)
item['FileName'] = os.path.basename(i).encode(encoding)
padLen = ((8 - (len(item) % 8)) % 8)
item['NextEntryOffset'] = (len(item) + padLen)
elif (level in [smb.SMB_FIND_FILE_DIRECTORY_INFO, smb2.SMB2_FILE_DIRECTORY_INFO]):
item['EndOfFile'] = size
item['AllocationSize'] = size
item['CreationTime'] = getFileTime(ctime)
item['LastAccessTime'] = getFileTime(atime)
item['LastWriteTime'] = getFileTime(mtime)
item['LastChangeTime'] = getFileTime(mtime)
item['FileName'] = os.path.basename(i).encode(encoding)
padLen = ((8 - (len(item) % 8)) % 8)
item['NextEntryOffset'] = (len(item) + padLen)
elif (level in [smb.SMB_FIND_FILE_FULL_DIRECTORY_INFO, smb.SMB_FIND_FILE_ID_FULL_DIRECTORY_INFO, smb2.SMB2_FULL_DIRECTORY_INFO, smb2.SMB2_FILE_ID_FULL_DIRECTORY_INFO]):
item['EaSize'] = 0
item['EndOfFile'] = size
item['AllocationSize'] = size
item['CreationTime'] = getFileTime(ctime)
item['LastAccessTime'] = getFileTime(atime)
item['LastWriteTime'] = getFileTime(mtime)
item['LastChangeTime'] = getFileTime(mtime)
padLen = ((8 - (len(item) % 8)) % 8)
item['NextEntryOffset'] = (len(item) + padLen)
elif (level == smb.SMB_FIND_INFO_STANDARD):
item['EaSize'] = size
item['CreationDate'] = getSMBDate(ctime)
item['CreationTime'] = getSMBTime(ctime)
item['LastAccessDate'] = getSMBDate(atime)
item['LastAccessTime'] = getSMBTime(atime)
item['LastWriteDate'] = getSMBDate(mtime)
item['LastWriteTime'] = getSMBTime(mtime)
searchResult.append(item)
if (((level >= smb.SMB_FIND_FILE_DIRECTORY_INFO) or (isSMB2 is True)) and (searchCount > 0)):
searchResult[(- 1)]['NextEntryOffset'] = 0
return (searchResult, searchCount, errorCode) |
def keygen(get_keyring=get_keyring):
warn_signatures()
(WheelKeys, keyring) = get_keyring()
ed25519ll = signatures.get_ed25519ll()
wk = WheelKeys().load()
keypair = ed25519ll.crypto_sign_keypair()
vk = native(urlsafe_b64encode(keypair.vk))
sk = native(urlsafe_b64encode(keypair.sk))
kr = keyring.get_keyring()
kr.set_password('wheel', vk, sk)
print('Created Ed25519 keypair with vk={}'.format(vk))
print('in {!r}'.format(kr))
sk2 = kr.get_password('wheel', vk)
if (sk2 != sk):
raise WheelError('Keyring is broken. Could not retrieve secret key.')
print('Trusting {} to sign and verify all packages.'.format(vk))
wk.add_signer('+', vk)
wk.trust('+', vk)
wk.save() |
class OptionPlotoptionsCylinderSonificationContexttracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestDiscordGetIdEmail():
.asyncio
async def test_success(self, get_respx_call_args):
request = respx.get(re.compile(f'^{PROFILE_ENDPOINT}')).mock(return_value=Response(200, json=profile_verified_email_response))
(user_id, user_email) = (await client.get_id_email('TOKEN'))
(_, headers, _) = (await get_respx_call_args(request))
assert (headers['Authorization'] == 'Bearer TOKEN')
assert (headers['Accept'] == 'application/json')
assert (user_id == '')
assert (user_email == '')
.asyncio
async def test_error(self):
respx.get(re.compile(f'^{PROFILE_ENDPOINT}')).mock(return_value=Response(400, json={'error': 'message'}))
with pytest.raises(GetIdEmailError) as excinfo:
(await client.get_id_email('TOKEN'))
assert isinstance(excinfo.value.args[0], dict)
assert (excinfo.value.args[0] == {'error': 'message'})
.asyncio
async def test_no_email(self):
respx.get(re.compile(f'^{PROFILE_ENDPOINT}$')).mock(return_value=Response(200, json=profile_no_email_response))
(user_id, user_email) = (await client.get_id_email('TOKEN'))
assert (user_id == '')
assert (user_email is None)
.asyncio
async def test_email_not_verified_error(self):
respx.get(re.compile(f'^{PROFILE_ENDPOINT}$')).mock(return_value=Response(200, json=profile_not_verified_email_response))
(user_id, user_email) = (await client.get_id_email('TOKEN'))
assert (user_id == '')
assert (user_email is None) |
class TestUslugifyCasedEncoded(util.MdCase):
extension = ['markdown.extensions.toc']
extension_configs = {'markdown.extensions.toc': {'slugify': slugs.uslugify_cased_encoded}}
def test_slug(self):
with pytest.warns(DeprecationWarning):
self.check_markdown('# Testing cased unicode-slugs_headers I with encoding', '<h1 id="Testing-cased-unicode-slugs_headers-%CE%A9%E2%84%91-with-encoding">Testing cased unicode-slugs_headers I with encoding</h1>') |
def run_subprocess_with_logging(command_line, header=None, level=logging.INFO, stdin=None, env=None, detach=False):
logger = logging.getLogger(__name__)
logger.debug('Running subprocess [%s] with logging.', command_line)
command_line_args = shlex.split(command_line)
pre_exec = (os.setpgrp if detach else None)
if (header is not None):
logger.info(header)
with subprocess.Popen(command_line_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, env=env, stdin=(stdin if stdin else None), preexec_fn=pre_exec) as command_line_process:
(stdout, _) = command_line_process.communicate()
if stdout:
logger.log(level=level, msg=stdout)
logger.debug('Subprocess [%s] finished with return code [%s].', command_line, str(command_line_process.returncode))
return command_line_process.returncode |
def dataclass_from_dict(cls: type, src: typing.Dict[(str, typing.Any)]) -> typing.Any:
field_types_lookup = {field.name: field.type for field in dataclasses.fields(cls)}
constructor_inputs = {}
for (field_name, value) in src.items():
if dataclasses.is_dataclass(field_types_lookup[field_name]):
constructor_inputs[field_name] = dataclass_from_dict(field_types_lookup[field_name], value)
else:
constructor_inputs[field_name] = value
return cls(**constructor_inputs) |
def extractJadeslipWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
class ConvBlock(nn.Module):
def __init__(self, in_planes, out_planes, norm='batch'):
super(ConvBlock, self).__init__()
self.norm = norm
self.conv1 = conv3x3(in_planes, int((out_planes / 2)), norm=norm)
self.conv2 = conv3x3(int((out_planes / 2)), int((out_planes / 4)), norm=norm)
self.conv3 = conv3x3(int((out_planes / 4)), int((out_planes / 4)), norm=norm)
if (norm == 'batch'):
self.bn1 = nn.BatchNorm2d(in_planes)
self.bn2 = nn.BatchNorm2d(int((out_planes / 2)))
self.bn3 = nn.BatchNorm2d(int((out_planes / 4)))
self.bn4 = nn.BatchNorm2d(in_planes)
elif (norm == 'group'):
self.bn1 = nn.GroupNorm(32, in_planes)
self.bn2 = nn.GroupNorm(32, int((out_planes / 2)))
self.bn3 = nn.GroupNorm(32, int((out_planes / 4)))
self.bn4 = nn.GroupNorm(32, in_planes)
if (in_planes != out_planes):
if (norm == 'weight'):
self.downsample = nn.Sequential(nn.ReLU(False), nn.utils.weight_norm(nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=1, bias=False)))
else:
self.downsample = nn.Sequential(self.bn4, nn.ReLU(False), nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=1, bias=False))
else:
self.downsample = None
def forward(self, x):
residual = x
if (self.norm == 'weight'):
out1 = self.conv1(F.relu(x, False))
out2 = self.conv2(F.relu(out1, False))
out3 = self.conv3(F.relu(out2, False))
else:
out1 = self.conv1(F.relu(self.bn1(x), False))
out2 = self.conv2(F.relu(self.bn2(out1), False))
out3 = self.conv3(F.relu(self.bn3(out2), False))
out3 = torch.cat([out1, out2, out3], 1)
if (self.downsample is not None):
residual = self.downsample(residual)
out3 += residual
return out3 |
class FipaDialogue(BaseFipaDialogue):
__slots__ = ('_proposal', '_terms', '_counterparty_signature')
def __init__(self, dialogue_label: DialogueLabel, self_address: Address, role: Dialogue.Role, message_class: Type[FipaMessage]=FipaMessage) -> None:
BaseFipaDialogue.__init__(self, dialogue_label=dialogue_label, self_address=self_address, role=role, message_class=message_class)
self._proposal = None
self._terms = None
self._counterparty_signature = None
def counterparty_signature(self) -> str:
if (self._counterparty_signature is None):
raise ValueError('counterparty_signature not set!')
return self._counterparty_signature
_signature.setter
def counterparty_signature(self, counterparty_signature: str) -> None:
enforce((self._counterparty_signature is None), 'counterparty_signature already set!')
self._counterparty_signature = counterparty_signature
def proposal(self) -> Description:
if (self._proposal is None):
raise ValueError('Proposal not set!')
return self._proposal
def proposal(self, proposal: Description) -> None:
enforce((self._proposal is None), 'Proposal already set!')
self._proposal = proposal
def terms(self) -> Terms:
if (self._terms is None):
raise ValueError('Terms not set!')
return self._terms
def terms(self, terms: Terms) -> None:
enforce((self._terms is None), 'Terms already set!')
self._terms = terms |
def to_omnetpp(topology, path=None):
try:
from mako.template import Template
except ImportError:
raise ImportError('Cannot import mako.template module. Make sure mako is installed on this machine.')
set_delays = True
set_capacities = True
if ((not ('capacity_unit' in topology.graph)) or (not (topology.graph['capacity_unit'] in capacity_units))):
warn('Missing or invalid capacity unit attribute in the topology. The output file will be generated without link capacity attributes.')
set_capacities = False
if ((not ('delay_unit' in topology.graph)) or (not (topology.graph['delay_unit'] in time_units))):
warn('Missing or invalid delay unit attribute in the topology. The output file will be generated without link delay attributes.')
set_delays = False
template = Template(__TEMPLATE)
variables = {'topology': topology, 'set_capacities': set_capacities, 'set_delays': set_delays}
ned = template.render(**variables)
if path:
with open(path, 'w') as out:
out.write(ned)
else:
print(ned) |
class TestESP8266V1Image(BaseTestCase):
ELF = 'esp8266-nonosssdk20-iotdemo.elf'
BIN_LOAD = 'esp8266-nonosssdk20-iotdemo.elf-0x00000.bin'
BIN_IROM = 'esp8266-nonosssdk20-iotdemo.elf-0x10000.bin'
def setup_class(self):
super(TestESP8266V1Image, self).setup_class()
self.run_elf2image(self, 'esp8266', self.ELF, 1)
def teardown_class(self):
super(TestESP8266V1Image, self).teardown_class()
try_delete(self.BIN_LOAD)
try_delete(self.BIN_IROM)
def test_irom_bin(self):
with open(self.ELF, 'rb') as f:
e = ELFFile(f)
irom_section = e.get_section_by_name('.irom0.text')
assert (irom_section.header.sh_size == os.stat(self.BIN_IROM).st_size), 'IROM raw binary file should be same length as .irom0.text section'
def test_loaded_sections(self):
image = esptool.bin_image.LoadFirmwareImage('esp8266', self.BIN_LOAD)
assert (len(image.segments) == 2)
self.assertImageContainsSection(image, self.ELF, '.data')
self.assertImageContainsSection(image, self.ELF, '.text')
self.assertImageDoesNotContainSection(image, self.ELF, '.rodata') |
class Capability():
def __init__(self, name='', namespace='', cap_type='', images=[], description='', nodes=[]):
self.namespace = namespace
self.name = name
self.type = cap_type
self.images = (images if images else [])
self.description = description
self.nodes = (nodes if nodes else [])
def __repr__(self):
return ('<%s[%s/%s], with %d nodes>' % (self.__class__, self.namespace, self.name, len(self.nodes)))
def __str__(self):
if self.nodes:
return ('%s [%s]' % (self.__repr__(), ','.join([str(node) for node in self.nodes])))
return self.__repr__() |
class CcrClient(NamespacedClient):
_rewrite_parameters()
async def delete_auto_follow_pattern(self, *, name: str, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]:
if (name in SKIP_IN_PATH):
raise ValueError("Empty value passed for parameter 'name'")
__path = f'/_ccr/auto_follow/{_quote(name)}'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
__headers = {'accept': 'application/json'}
return (await self.perform_request('DELETE', __path, params=__query, headers=__headers))
_rewrite_parameters(body_fields=('leader_index', 'max_outstanding_read_requests', 'max_outstanding_write_requests', 'max_read_request_operation_count', 'max_read_request_size', 'max_retry_delay', 'max_write_buffer_count', 'max_write_buffer_size', 'max_write_request_operation_count', 'max_write_request_size', 'read_poll_timeout', 'remote_cluster'))
async def follow(self, *, index: str, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, leader_index: t.Optional[str]=None, max_outstanding_read_requests: t.Optional[int]=None, max_outstanding_write_requests: t.Optional[int]=None, max_read_request_operation_count: t.Optional[int]=None, max_read_request_size: t.Optional[str]=None, max_retry_delay: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, max_write_buffer_count: t.Optional[int]=None, max_write_buffer_size: t.Optional[str]=None, max_write_request_operation_count: t.Optional[int]=None, max_write_request_size: t.Optional[str]=None, pretty: t.Optional[bool]=None, read_poll_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, remote_cluster: t.Optional[str]=None, wait_for_active_shards: t.Optional[t.Union[(int, t.Union[("t.Literal['all', 'index-setting']", str)])]]=None, body: t.Optional[t.Dict[(str, t.Any)]]=None) -> ObjectApiResponse[t.Any]:
if (index in SKIP_IN_PATH):
raise ValueError("Empty value passed for parameter 'index'")
__path = f'/{_quote(index)}/_ccr/follow'
__query: t.Dict[(str, t.Any)] = {}
__body: t.Dict[(str, t.Any)] = (body if (body is not None) else {})
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
if (wait_for_active_shards is not None):
__query['wait_for_active_shards'] = wait_for_active_shards
if (not __body):
if (leader_index is not None):
__body['leader_index'] = leader_index
if (max_outstanding_read_requests is not None):
__body['max_outstanding_read_requests'] = max_outstanding_read_requests
if (max_outstanding_write_requests is not None):
__body['max_outstanding_write_requests'] = max_outstanding_write_requests
if (max_read_request_operation_count is not None):
__body['max_read_request_operation_count'] = max_read_request_operation_count
if (max_read_request_size is not None):
__body['max_read_request_size'] = max_read_request_size
if (max_retry_delay is not None):
__body['max_retry_delay'] = max_retry_delay
if (max_write_buffer_count is not None):
__body['max_write_buffer_count'] = max_write_buffer_count
if (max_write_buffer_size is not None):
__body['max_write_buffer_size'] = max_write_buffer_size
if (max_write_request_operation_count is not None):
__body['max_write_request_operation_count'] = max_write_request_operation_count
if (max_write_request_size is not None):
__body['max_write_request_size'] = max_write_request_size
if (read_poll_timeout is not None):
__body['read_poll_timeout'] = read_poll_timeout
if (remote_cluster is not None):
__body['remote_cluster'] = remote_cluster
__headers = {'accept': 'application/json', 'content-type': 'application/json'}
return (await self.perform_request('PUT', __path, params=__query, headers=__headers, body=__body))
_rewrite_parameters()
async def follow_info(self, *, index: t.Union[(str, t.Sequence[str])], error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]:
if (index in SKIP_IN_PATH):
raise ValueError("Empty value passed for parameter 'index'")
__path = f'/{_quote(index)}/_ccr/info'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
__headers = {'accept': 'application/json'}
return (await self.perform_request('GET', __path, params=__query, headers=__headers))
_rewrite_parameters()
async def follow_stats(self, *, index: t.Union[(str, t.Sequence[str])], error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]:
if (index in SKIP_IN_PATH):
raise ValueError("Empty value passed for parameter 'index'")
__path = f'/{_quote(index)}/_ccr/stats'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
__headers = {'accept': 'application/json'}
return (await self.perform_request('GET', __path, params=__query, headers=__headers))
_rewrite_parameters(body_fields=('follower_cluster', 'follower_index', 'follower_index_uuid', 'leader_remote_cluster'))
async def forget_follower(self, *, index: str, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, follower_cluster: t.Optional[str]=None, follower_index: t.Optional[str]=None, follower_index_uuid: t.Optional[str]=None, human: t.Optional[bool]=None, leader_remote_cluster: t.Optional[str]=None, pretty: t.Optional[bool]=None, body: t.Optional[t.Dict[(str, t.Any)]]=None) -> ObjectApiResponse[t.Any]:
if (index in SKIP_IN_PATH):
raise ValueError("Empty value passed for parameter 'index'")
__path = f'/{_quote(index)}/_ccr/forget_follower'
__query: t.Dict[(str, t.Any)] = {}
__body: t.Dict[(str, t.Any)] = (body if (body is not None) else {})
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
if (not __body):
if (follower_cluster is not None):
__body['follower_cluster'] = follower_cluster
if (follower_index is not None):
__body['follower_index'] = follower_index
if (follower_index_uuid is not None):
__body['follower_index_uuid'] = follower_index_uuid
if (leader_remote_cluster is not None):
__body['leader_remote_cluster'] = leader_remote_cluster
__headers = {'accept': 'application/json', 'content-type': 'application/json'}
return (await self.perform_request('POST', __path, params=__query, headers=__headers, body=__body))
_rewrite_parameters()
async def get_auto_follow_pattern(self, *, name: t.Optional[str]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]:
if (name not in SKIP_IN_PATH):
__path = f'/_ccr/auto_follow/{_quote(name)}'
else:
__path = '/_ccr/auto_follow'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
__headers = {'accept': 'application/json'}
return (await self.perform_request('GET', __path, params=__query, headers=__headers))
_rewrite_parameters()
async def pause_auto_follow_pattern(self, *, name: str, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]:
if (name in SKIP_IN_PATH):
raise ValueError("Empty value passed for parameter 'name'")
__path = f'/_ccr/auto_follow/{_quote(name)}/pause'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
__headers = {'accept': 'application/json'}
return (await self.perform_request('POST', __path, params=__query, headers=__headers))
_rewrite_parameters()
async def pause_follow(self, *, index: str, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]:
if (index in SKIP_IN_PATH):
raise ValueError("Empty value passed for parameter 'index'")
__path = f'/{_quote(index)}/_ccr/pause_follow'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
__headers = {'accept': 'application/json'}
return (await self.perform_request('POST', __path, params=__query, headers=__headers))
_rewrite_parameters(body_fields=('remote_cluster', 'follow_index_pattern', 'leader_index_exclusion_patterns', 'leader_index_patterns', 'max_outstanding_read_requests', 'max_outstanding_write_requests', 'max_read_request_operation_count', 'max_read_request_size', 'max_retry_delay', 'max_write_buffer_count', 'max_write_buffer_size', 'max_write_request_operation_count', 'max_write_request_size', 'read_poll_timeout', 'settings'))
async def put_auto_follow_pattern(self, *, name: str, remote_cluster: t.Optional[str]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, follow_index_pattern: t.Optional[str]=None, human: t.Optional[bool]=None, leader_index_exclusion_patterns: t.Optional[t.Sequence[str]]=None, leader_index_patterns: t.Optional[t.Sequence[str]]=None, max_outstanding_read_requests: t.Optional[int]=None, max_outstanding_write_requests: t.Optional[int]=None, max_read_request_operation_count: t.Optional[int]=None, max_read_request_size: t.Optional[t.Union[(int, str)]]=None, max_retry_delay: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, max_write_buffer_count: t.Optional[int]=None, max_write_buffer_size: t.Optional[t.Union[(int, str)]]=None, max_write_request_operation_count: t.Optional[int]=None, max_write_request_size: t.Optional[t.Union[(int, str)]]=None, pretty: t.Optional[bool]=None, read_poll_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, settings: t.Optional[t.Mapping[(str, t.Any)]]=None, body: t.Optional[t.Dict[(str, t.Any)]]=None) -> ObjectApiResponse[t.Any]:
if (name in SKIP_IN_PATH):
raise ValueError("Empty value passed for parameter 'name'")
if ((remote_cluster is None) and (body is None)):
raise ValueError("Empty value passed for parameter 'remote_cluster'")
__path = f'/_ccr/auto_follow/{_quote(name)}'
__query: t.Dict[(str, t.Any)] = {}
__body: t.Dict[(str, t.Any)] = (body if (body is not None) else {})
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
if (not __body):
if (remote_cluster is not None):
__body['remote_cluster'] = remote_cluster
if (follow_index_pattern is not None):
__body['follow_index_pattern'] = follow_index_pattern
if (leader_index_exclusion_patterns is not None):
__body['leader_index_exclusion_patterns'] = leader_index_exclusion_patterns
if (leader_index_patterns is not None):
__body['leader_index_patterns'] = leader_index_patterns
if (max_outstanding_read_requests is not None):
__body['max_outstanding_read_requests'] = max_outstanding_read_requests
if (max_outstanding_write_requests is not None):
__body['max_outstanding_write_requests'] = max_outstanding_write_requests
if (max_read_request_operation_count is not None):
__body['max_read_request_operation_count'] = max_read_request_operation_count
if (max_read_request_size is not None):
__body['max_read_request_size'] = max_read_request_size
if (max_retry_delay is not None):
__body['max_retry_delay'] = max_retry_delay
if (max_write_buffer_count is not None):
__body['max_write_buffer_count'] = max_write_buffer_count
if (max_write_buffer_size is not None):
__body['max_write_buffer_size'] = max_write_buffer_size
if (max_write_request_operation_count is not None):
__body['max_write_request_operation_count'] = max_write_request_operation_count
if (max_write_request_size is not None):
__body['max_write_request_size'] = max_write_request_size
if (read_poll_timeout is not None):
__body['read_poll_timeout'] = read_poll_timeout
if (settings is not None):
__body['settings'] = settings
__headers = {'accept': 'application/json', 'content-type': 'application/json'}
return (await self.perform_request('PUT', __path, params=__query, headers=__headers, body=__body))
_rewrite_parameters()
async def resume_auto_follow_pattern(self, *, name: str, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]:
if (name in SKIP_IN_PATH):
raise ValueError("Empty value passed for parameter 'name'")
__path = f'/_ccr/auto_follow/{_quote(name)}/resume'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
__headers = {'accept': 'application/json'}
return (await self.perform_request('POST', __path, params=__query, headers=__headers))
_rewrite_parameters(body_fields=('max_outstanding_read_requests', 'max_outstanding_write_requests', 'max_read_request_operation_count', 'max_read_request_size', 'max_retry_delay', 'max_write_buffer_count', 'max_write_buffer_size', 'max_write_request_operation_count', 'max_write_request_size', 'read_poll_timeout'))
async def resume_follow(self, *, index: str, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, max_outstanding_read_requests: t.Optional[int]=None, max_outstanding_write_requests: t.Optional[int]=None, max_read_request_operation_count: t.Optional[int]=None, max_read_request_size: t.Optional[str]=None, max_retry_delay: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, max_write_buffer_count: t.Optional[int]=None, max_write_buffer_size: t.Optional[str]=None, max_write_request_operation_count: t.Optional[int]=None, max_write_request_size: t.Optional[str]=None, pretty: t.Optional[bool]=None, read_poll_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, body: t.Optional[t.Dict[(str, t.Any)]]=None) -> ObjectApiResponse[t.Any]:
if (index in SKIP_IN_PATH):
raise ValueError("Empty value passed for parameter 'index'")
__path = f'/{_quote(index)}/_ccr/resume_follow'
__query: t.Dict[(str, t.Any)] = {}
__body: t.Dict[(str, t.Any)] = (body if (body is not None) else {})
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
if (not __body):
if (max_outstanding_read_requests is not None):
__body['max_outstanding_read_requests'] = max_outstanding_read_requests
if (max_outstanding_write_requests is not None):
__body['max_outstanding_write_requests'] = max_outstanding_write_requests
if (max_read_request_operation_count is not None):
__body['max_read_request_operation_count'] = max_read_request_operation_count
if (max_read_request_size is not None):
__body['max_read_request_size'] = max_read_request_size
if (max_retry_delay is not None):
__body['max_retry_delay'] = max_retry_delay
if (max_write_buffer_count is not None):
__body['max_write_buffer_count'] = max_write_buffer_count
if (max_write_buffer_size is not None):
__body['max_write_buffer_size'] = max_write_buffer_size
if (max_write_request_operation_count is not None):
__body['max_write_request_operation_count'] = max_write_request_operation_count
if (max_write_request_size is not None):
__body['max_write_request_size'] = max_write_request_size
if (read_poll_timeout is not None):
__body['read_poll_timeout'] = read_poll_timeout
if (not __body):
__body = None
__headers = {'accept': 'application/json'}
if (__body is not None):
__headers['content-type'] = 'application/json'
return (await self.perform_request('POST', __path, params=__query, headers=__headers, body=__body))
_rewrite_parameters()
async def stats(self, *, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]:
__path = '/_ccr/stats'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
__headers = {'accept': 'application/json'}
return (await self.perform_request('GET', __path, params=__query, headers=__headers))
_rewrite_parameters()
async def unfollow(self, *, index: str, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]:
if (index in SKIP_IN_PATH):
raise ValueError("Empty value passed for parameter 'index'")
__path = f'/{_quote(index)}/_ccr/unfollow'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
__headers = {'accept': 'application/json'}
return (await self.perform_request('POST', __path, params=__query, headers=__headers)) |
def _dequantize(obj):
if (obj is None):
return None
elif (type(obj) == torch.Tensor):
if (obj.dtype != torch.float32):
return obj.to(torch.float32)
else:
return obj
else:
resultTensor = obj.value()[0]
if (resultTensor.dtype != torch.float32):
return resultTensor.to(torch.float32)
else:
return resultTensor |
def store_providers(path):
global settings
global public
global private
public1_string = ''
private1_string = ''
public1_predefined_string = ''
private1_predefined_string = ''
public2_string = ''
private2_string = ''
public2_predefined_string = ''
private2_predefined_string = ''
public_count = 0
private_count = 0
print(('Saving providers to %s' % path))
for p in public:
public_count += 1
item = u'\n <setting label="{name}" id="use_{id}" type="bool" default="{default}" />\n <setting id="{id}_alias" label="32077" type="text" default="" subsetting="true" visible="eq(-1,true)" />\n <setting id="{id}_contains" type="enum" label="32080" subsetting="true" lvalues="32081|32082|32083" visible="eq(-2,true)" />\n'.format(id=p['id'], name=p['name'], default=str(p['predefined']).lower())
if (p['title'][:1].lower() in char_range('0', 'm')):
if (not p['predefined']):
public1_string += item
else:
public1_predefined_string += item
elif (not p['predefined']):
public2_string += item
else:
public2_predefined_string += item
for p in private:
private_count += 1
auth = ''
if (('login_passkey' not in p) or (not p['login_passkey'])):
auth = '<setting id="{id}_username" label="32015" type="text" default="" subsetting="true" visible="eq(-1,true)" />\n <setting id="{id}_password" label="32016" type="text" default="" option="hidden" subsetting="true" visible="eq(-2,true)" />'.format(id=p['id'])
else:
auth = '<setting id="{id}_username" label="32015" type="text" default="" subsetting="true" visible="eq(-1,true)" />\n <setting id="{id}_passkey" label="32076" type="text" default="" option="hidden" subsetting="true" visible="eq(-2,true)" />'.format(id=p['id'])
item = u'\n <setting label="{name}" id="use_{id}" type="bool" default="{default}" />\n {auth}\n <setting id="{id}_alias" label="32077" type="text" default="" subsetting="true" visible="eq(-3,true)" />\n <setting id="{id}_contains" type="enum" label="32080" subsetting="true" lvalues="32081|32082|32083" visible="eq(-4,true)" />\n'.format(id=p['id'], name=p['name'], default=str(p['predefined']).lower(), auth=auth)
if (p['title'][:1].lower() in char_range('0', 'm')):
if (not p['predefined']):
private1_string += item
else:
private1_predefined_string += item
elif (not p['predefined']):
private2_string += item
else:
private2_predefined_string += item
try:
settings = re.sub('(<!-- Providers-Public-1-Begin -->).*?(<!-- Providers-Public-1-End -->)', ((('\\1\n' + public1_predefined_string) + public1_string) + ' \\2'), settings, flags=re.DOTALL)
settings = re.sub('(<!-- Providers-Public-2-Begin -->).*?(<!-- Providers-Public-2-End -->)', ((('\\1\n' + public2_predefined_string) + public2_string) + ' \\2'), settings, flags=re.DOTALL)
settings = re.sub('(<!-- Providers-Private-1-Begin -->).*?(<!-- Providers-Private-1-End -->)', ((('\\1\n' + private1_predefined_string) + private1_string) + ' \\2'), settings, flags=re.DOTALL)
settings = re.sub('(<!-- Providers-Private-2-Begin -->).*?(<!-- Providers-Private-2-End -->)', ((('\\1\n' + private2_predefined_string) + private2_string) + ' \\2'), settings, flags=re.DOTALL)
with open(path, 'w', encoding='utf-8') as file:
file.write(settings)
file.close()
print(('Saved %d public, %d private providers to %s' % (public_count, private_count, path)))
except Exception as e:
print(('Failed removing settings from %s: %s' % (path, repr(e))))
print(traceback.format_exc()) |
class LoansViewSet(LoansMixin, LoansPaginationMixin, FabaOutlayMixin, ElasticsearchAccountDisasterBase):
endpoint_doc = 'usaspending_api/api_contracts/contracts/v2/disaster/federal_account/loans.md'
agg_key = 'financial_accounts_by_award.treasury_account_id'
nested_nonzero_fields = {'obligation': 'transaction_obligated_amount', 'outlay': 'gross_outlay_amount_by_award_cpe'}
query_fields = ['federal_account_symbol', 'federal_account_symbol.contains', 'federal_account_title', 'federal_account_title.contains', 'treasury_account_symbol', 'treasury_account_symbol.contains', 'treasury_account_title', 'treasury_account_title.contains']
top_hits_fields = ['financial_accounts_by_award.federal_account_symbol', 'financial_accounts_by_award.federal_account_title', 'financial_accounts_by_award.treasury_account_symbol', 'financial_accounts_by_award.treasury_account_title', 'financial_accounts_by_award.federal_account_id']
_response()
def post(self, request):
self.filters.update({'award_type_codes': ['07', '08']})
self.has_children = True
return self.perform_elasticsearch_search(loans=True)
def build_elasticsearch_result(self, info_buckets: List[dict]) -> List[dict]:
temp_results = {}
child_results = []
for bucket in info_buckets:
child = self._build_child_json_result(bucket)
child_results.append(child)
for child in child_results:
result = self._build_json_result(child)
child.pop('parent_data')
if (result['id'] in temp_results.keys()):
temp_results[result['id']] = {'id': int(result['id']), 'code': result['code'], 'description': result['description'], 'award_count': (temp_results[result['id']]['award_count'] + result['award_count']), 'obligation': (temp_results[result['id']]['obligation'] + result['obligation']), 'outlay': (temp_results[result['id']]['outlay'] + result['outlay']), 'children': (temp_results[result['id']]['children'] + result['children']), 'face_value_of_loan': (temp_results[result['id']]['face_value_of_loan'] + result['face_value_of_loan'])}
else:
temp_results[result['id']] = result
results = [x for x in temp_results.values()]
return results
def _build_json_result(self, child):
return {'id': child['parent_data'][2], 'code': child['parent_data'][1], 'description': child['parent_data'][0], 'award_count': child['award_count'], 'obligation': child['obligation'], 'outlay': child['outlay'], 'children': [child], 'face_value_of_loan': child['face_value_of_loan']}
def _build_child_json_result(self, bucket: dict):
return {'id': int(bucket['key']), 'code': bucket['dim_metadata']['hits']['hits'][0]['_source']['treasury_account_symbol'], 'description': bucket['dim_metadata']['hits']['hits'][0]['_source']['treasury_account_title'], 'award_count': int(bucket['count_awards_by_dim']['award_count']['value']), **{key: Decimal(bucket.get(f'sum_{val}', {'value': 0})['value']) for (key, val) in self.nested_nonzero_fields.items()}, 'face_value_of_loan': bucket['count_awards_by_dim']['sum_loan_value']['value'], 'parent_data': [bucket['dim_metadata']['hits']['hits'][0]['_source']['federal_account_title'], bucket['dim_metadata']['hits']['hits'][0]['_source']['federal_account_symbol'], bucket['dim_metadata']['hits']['hits'][0]['_source']['federal_account_id']]}
def queryset(self):
query = self.construct_loan_queryset('treasury_account__treasury_account_identifier', TreasuryAppropriationAccount, 'treasury_account_identifier')
annotations = {'fa_code': F('federal_account__federal_account_code'), 'award_count': query.award_count_column, 'description': F('account_title'), 'code': F('tas_rendering_label'), 'id': F('treasury_account_identifier'), 'fa_description': F('federal_account__account_title'), 'fa_id': F('federal_account_id'), 'obligation': query.obligation_column, 'outlay': query.outlay_column, 'total_budgetary_resources': query.face_value_of_loan_column}
return query.queryset.annotate(**annotations).values(*annotations) |
.parametrize('uri_template,', ['/{id:int(2)}', '/{id:int(min=124)}', '/{id:int(num_digits=3, max=100)}'])
def test_int_converter_rejections(client, uri_template):
resource1 = IDResource()
client.app.add_route(uri_template, resource1)
result = client.simulate_get('/123')
assert (result.status_code == 404)
assert (not resource1.called) |
def create_tree_structure(df, groups, cols, aggregations):
results = {}
for (_, row) in df.iterrows():
stack = []
result = results
for field in groups:
value = row[field]
stack.append((field, value))
if (field not in result):
result[field] = {'total': 0}
result = result[field]
if (value not in result):
result[value] = {'count': 0, 'ratio': 0}
result = result[value]
if (field == groups[(- 1)]):
result['stats'] = {}
for c in cols:
result['stats'][c] = {}
for a in aggregations:
result['stats'][c][a] = row[f'{c}_{a}']
stack_r = results
for s in stack:
(field, value) = s
count = result['stats'][cols[0]]['count']
total = stack_r[field]['total']
stack_r[field]['total'] = (total + count)
stack_r[field][value]['count'] = (stack_r[field][value]['count'] + count)
stack_r = stack_r[field][value]
stack = []
return results |
def register_webhooks(shopify_url: str, password: str) -> List[Webhook]:
new_webhooks = []
unregister_webhooks(shopify_url, password)
with Session.temp(shopify_url, API_VERSION, password):
for topic in WEBHOOK_EVENTS:
webhook = Webhook.create({'topic': topic, 'address': get_callback_url(), 'format': 'json'})
if webhook.is_valid():
new_webhooks.append(webhook)
else:
create_shopify_log(status='Error', response_data=webhook.to_dict(), exception=webhook.errors.full_messages())
return new_webhooks |
def get_registry_query_or_enum_key_extra_details(metadata, event, extra_detail_io, details_info):
event.category = 'Read'
key_information_class = RegistryKeyInformationClass(details_info['information_class'])
if (event.operation == RegistryOperation.RegEnumKey.name):
event.details['Index'] = details_info['index']
elif (event.operation == RegistryOperation.RegQueryKey.name):
event.details['Query'] = key_information_class.name
if (not extra_detail_io):
event.details['Length'] = details_info['length']
return
if (key_information_class == RegistryKeyInformationClass.Name):
name_size = read_u32(extra_detail_io)
event.details['Name'] = read_utf16(extra_detail_io, name_size)
elif (key_information_class == RegistryKeyInformationClass.HandleTags):
event.details['HandleTags'] = read_u32(extra_detail_io)
elif (key_information_class == RegistryKeyInformationClass.Flags):
event.details['UserFlags'] = read_u32(extra_detail_io)
elif (key_information_class == RegistryKeyInformationClass.Cached):
event.details['LastWriteTime'] = read_filetime(extra_detail_io)
event.details['TitleIndex'] = read_u32(extra_detail_io)
event.details['SubKeys'] = read_u32(extra_detail_io)
event.details['MaxNameLen'] = read_u32(extra_detail_io)
event.details['Values'] = read_u32(extra_detail_io)
event.details['MaxValueNameLen'] = read_u32(extra_detail_io)
event.details['MaxValueDataLen'] = read_u32(extra_detail_io)
elif (key_information_class == RegistryKeyInformationClass.Basic):
event.details['LastWriteTime'] = read_filetime(extra_detail_io)
event.details['TitleIndex'] = read_u32(extra_detail_io)
name_size = read_u32(extra_detail_io)
event.details['Name'] = read_utf16(extra_detail_io, name_size)
elif (key_information_class == RegistryKeyInformationClass.Full):
event.details['LastWriteTime'] = read_filetime(extra_detail_io)
event.details['TitleIndex'] = read_u32(extra_detail_io)
event.details['ClassOffset'] = read_u32(extra_detail_io)
event.details['ClassLength'] = read_u32(extra_detail_io)
event.details['SubKeys'] = read_u32(extra_detail_io)
event.details['MaxNameLen'] = read_u32(extra_detail_io)
event.details['MaxClassLen'] = read_u32(extra_detail_io)
event.details['Values'] = read_u32(extra_detail_io)
event.details['MaxValueNameLen'] = read_u32(extra_detail_io)
event.details['MaxValueDataLen'] = read_u32(extra_detail_io)
elif (key_information_class == RegistryKeyInformationClass.Node):
event.details['LastWriteTime'] = read_filetime(extra_detail_io)
event.details['TitleIndex'] = read_u32(extra_detail_io)
event.details['ClassOffset'] = read_u32(extra_detail_io)
event.details['ClassLength'] = read_u32(extra_detail_io)
name_size = read_u32(extra_detail_io)
event.details['Name'] = read_utf16(extra_detail_io, name_size) |
def print_tree(node: LN, results: Capture=None, filename: Filename=None, indent: int=0, recurse: int=(- 1)):
filename = (filename or Filename(''))
tab = (INDENT_STR * indent)
if (filename and (indent == 0)):
click.secho(filename, fg='red', bold=True)
if isinstance(node, Leaf):
click.echo((click.style(tab, fg='black', bold=True) + click.style(f'[{tok_name[node.type]}] {repr(node.prefix)} {repr(node.value)}', fg='yellow')))
else:
click.echo((click.style(tab, fg='black', bold=True) + click.style(f'[{type_repr(node.type)}] {repr(node.prefix)}', fg='blue')))
if node.children:
if recurse:
for child in node.children:
print_tree(child, indent=(indent + 1), recurse=(recurse - 1))
else:
click.echo(((INDENT_STR * (indent + 1)) + '...'))
if (results is None):
return
for key in results:
if (key == 'node'):
continue
value = results[key]
if isinstance(value, (Leaf, Node)):
click.secho(f'results[{repr(key)}] =', fg='red')
print_tree(value, indent=1, recurse=1)
else:
click.secho(f'results[{repr(key)}] = {value}', fg='red') |
class TestNeedlemanWunschDecoder(unittest.TestCase):
def setUp(self):
if torch.cuda.is_available():
cuda_device = torch.device('cuda')
torch.manual_seed(2)
(B, S, N, M) = (3, 3, 5, 5)
self.theta = torch.rand(B, N, M, requires_grad=True, dtype=torch.float32, device=cuda_device)
self.Ztheta = torch.rand(B, N, M, requires_grad=True, dtype=torch.float32, device=cuda_device)
self.A = ((- 1.0) * torch.ones_like(self.theta, dtype=torch.float32, device=cuda_device))
(self.B, self.S, self.N, self.M) = (B, S, N, M)
self.operator = 'softmax'
(torch.cuda.is_available(), 'No GPU was detected')
def test_grad_needlemanwunsch_function(self):
needle = NeedlemanWunschDecoder(self.operator)
(theta, A) = (self.theta, self.A)
theta.requires_grad_()
gradcheck(needle, (theta, A), eps=0.1, atol=0.1, rtol=0.1)
(torch.cuda.is_available(), 'No GPU was detected')
def test_hessian_needlemanwunsch_function(self):
needle = NeedlemanWunschDecoder(self.operator)
inputs = (self.theta, self.A)
gradgradcheck(needle, inputs, eps=0.1, atol=0.1, rtol=0.1)
(torch.cuda.is_available(), 'No GPU was detected')
def test_decoding(self):
theta = torch.tensor(make_data().astype(np.float32), device=self.theta.device).unsqueeze(0)
theta.requires_grad_()
A = (0.1 * torch.ones_like(theta, dtype=torch.float32, device=self.theta.device))
needle = NeedlemanWunschDecoder(self.operator)
v = needle(theta, A)
v.backward()
decoded = needle.traceback(theta.grad.squeeze())
decoded = [(x[0], x[1]) for x in decoded]
states = [(0, 0), (1, 0), (2, 0), (3, 1), (4, 2), (4, 3)]
self.assertListEqual(states, decoded)
(torch.cuda.is_available(), 'No GPU was detected')
def test_decoding2(self):
X = 'HECDRKTCDESFSTKGNLRVHKLGH'
Y = 'LKCSGCGKNFKSQYAYKRHEQTH'
needle = NeedlemanWunschDecoder(self.operator)
dm = torch.Tensor(np.loadtxt(get_data_path('dm.txt')))
decoded = needle.traceback(dm)
(pred_x, pred_y, pred_states) = list(zip(*decoded))
states2alignment(np.array(pred_states), X, Y) |
def test_comp_interface():
string = write_rpc_request(1, 'initialize', {'rootPath': str(test_dir)})
file_path = ((test_dir / 'subdir') / 'test_generic.f90')
string += comp_request(file_path, 14, 10)
(errcode, results) = run_request(string, ['--use_signature_help'])
assert (errcode == 0)
exp_results = ([4, 'my_gen', 'SUBROUTINE my_gen(self, a, b)', 'my_gen(${1:self}, ${2:a}, ${3:b})'],)
assert (len(exp_results) == (len(results) - 1))
for (i, ref) in enumerate(exp_results):
validate_comp(results[(i + 1)], ref) |
def extractVrumjaCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('supreme lord', 'supreme lord', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class MibScalarInstance(ManagedMibObject):
def __init__(self, typeName, instId, syntax):
ManagedMibObject.__init__(self, (typeName + instId), syntax)
self.typeName = typeName
self.instId = instId
def getValue(self, name, **context):
((debug.logger & debug.FLAG_INS) and debug.logger(('getValue: returning %r for %s' % (self.syntax, self.name))))
return self.syntax.clone()
def setValue(self, value, name, **context):
if (value is None):
value = univ.noValue
try:
if hasattr(self.syntax, 'setValue'):
return self.syntax.setValue(value)
else:
return self.syntax.clone(value)
except PyAsn1Error as exc:
((debug.logger & debug.FLAG_INS) and debug.logger(('setValue: %s=%r failed with traceback %s' % (self.name, value, traceback.format_exception(*sys.exc_info())))))
if isinstance(exc, error.TableRowManagement):
raise exc
else:
raise error.WrongValueError(name=name, idx=context.get('idx'), msg=exc)
def getBranch(self, name, **context):
try:
return ManagedMibObject.getBranch(self, name, **context)
except (error.NoSuchInstanceError, error.NoSuchObjectError):
raise error.NoSuchInstanceError(name=name, idx=context.get('idx'))
def getNextBranch(self, name, **context):
try:
return ManagedMibObject.getNextBranch(self, name, **context)
except (error.NoSuchInstanceError, error.NoSuchObjectError):
raise error.NoSuchInstanceError(name=name, idx=context.get('idx'))
def getNode(self, name, **context):
if (name == self.name):
return self
raise error.NoSuchInstanceError(name=name, idx=context.get('idx'))
def getNextNode(self, name, **context):
raise error.NoSuchInstanceError(name=name, idx=context.get('idx'))
def readTest(self, varBind, **context):
(name, val) = varBind
((debug.logger & debug.FLAG_INS) and debug.logger(('%s: readTest(%s, %r)' % (self, name, val))))
cbFun = context['cbFun']
if ((name != self.name) or (not self.syntax.isValue)):
exc = error.NoSuchInstanceError(name=name, idx=context.get('idx'))
cbFun(varBind, **dict(context, error=exc))
return
cbFun((self.name, self.syntax), **context)
def readGet(self, varBind, **context):
(name, val) = varBind
((debug.logger & debug.FLAG_INS) and debug.logger(('%s: readGet(%s, %r)' % (self, name, val))))
cbFun = context['cbFun']
if ((name != self.name) or (not self.syntax.isValue)):
exc = error.NoSuchInstanceError(name=name, idx=context.get('idx'))
cbFun(varBind, **dict(context, error=exc))
return
cbFun((self.name, self.getValue(name, **context)), **context)
def readTestNext(self, varBind, **context):
(name, val) = varBind
((debug.logger & debug.FLAG_INS) and debug.logger(('%s: readTestNext(%s, %r)' % (self, name, val))))
cbFun = context['cbFun']
if ((name >= self.name) or (not self.syntax.isValue)):
nextName = context.get('nextName')
if nextName:
varBind = (nextName, exval.noSuchInstance)
else:
varBind = (name, exval.endOfMibView)
cbFun(varBind, **context)
return
cbFun((self.name, self.syntax), **context)
def readGetNext(self, varBind, **context):
(name, val) = varBind
((debug.logger & debug.FLAG_INS) and debug.logger(('%s: readGetNext(%s, %r)' % (self, name, val))))
cbFun = context['cbFun']
if ((name >= self.name) or (not self.syntax.isValue)):
nextName = context.get('nextName')
if nextName:
varBind = (nextName, exval.noSuchInstance)
else:
varBind = (name, exval.endOfMibView)
cbFun(varBind, **context)
return
cbFun((self.name, self.getValue(self.name, **context)), **context)
def writeTest(self, varBind, **context):
(name, val) = varBind
((debug.logger & debug.FLAG_INS) and debug.logger(('%s: writeTest(%s, %r)' % (self, name, val))))
cbFun = context['cbFun']
instances = context['instances'].setdefault(self.name, {self.ST_CREATE: {}, self.ST_DESTROY: {}})
idx = context['idx']
if (name != self.name):
exc = error.NoSuchInstanceError(name=name, idx=context.get('idx'))
cbFun(varBind, **dict(context, error=exc))
try:
instances[self.ST_CREATE][idx] = self.setValue(val, name, **context)
except error.MibOperationError as exc:
if ('syntax' in exc):
instances[self.ST_CREATE][idx] = exc['syntax']
cbFun(varBind, **dict(context, error=exc))
return
else:
exc = error.WrongValueError(name=name, idx=context.get('idx'), msg=exc)
cbFun(varBind, **dict(context, error=exc))
return
cbFun((self.name, self.syntax), **context)
def writeCommit(self, varBind, **context):
(name, val) = varBind
((debug.logger & debug.FLAG_INS) and debug.logger(('%s: writeCommit(%s, %r)' % (self, name, val))))
instances = context['instances'].setdefault(self.name, {self.ST_CREATE: {}, self.ST_DESTROY: {}})
idx = context['idx']
(instances[self.ST_CREATE][((- idx) - 1)], self.syntax) = (self.syntax, instances[self.ST_CREATE][idx])
cbFun = context['cbFun']
cbFun((self.name, self.syntax), **context)
def writeCleanup(self, varBind, **context):
(name, val) = varBind
((debug.logger & debug.FLAG_INS) and debug.logger(('%s: writeCleanup(%s, %r)' % (self, name, val))))
instances = context['instances'].setdefault(self.name, {self.ST_CREATE: {}, self.ST_DESTROY: {}})
idx = context['idx']
self.branchVersionId += 1
instances[self.ST_CREATE].pop(idx, None)
instances[self.ST_CREATE].pop(((- idx) - 1), None)
cbFun = context['cbFun']
cbFun((self.name, self.syntax), **context)
def writeUndo(self, varBind, **context):
(name, val) = varBind
((debug.logger & debug.FLAG_INS) and debug.logger(('%s: writeUndo(%s, %r)' % (self, name, val))))
instances = context['instances'].setdefault(self.name, {self.ST_CREATE: {}, self.ST_DESTROY: {}})
idx = context['idx']
self.syntax = instances[self.ST_CREATE].pop(((- idx) - 1), None)
instances[self.ST_CREATE].pop(idx, None)
cbFun = context['cbFun']
cbFun((self.name, self.syntax), **context) |
class TaxCodeTests(unittest.TestCase):
def test_unicode(self):
tax = TaxRate()
tax.Name = 'test'
self.assertEqual(str(tax), 'test')
def test_valid_object_name(self):
obj = TaxRate()
client = QuickBooks()
result = client.isvalid_object_name(obj.qbo_object_name)
self.assertTrue(result) |
def setup_logging():
global mod_logger
import logging
from logging.config import dictConfig
from logging.handlers import RotatingFileHandler
dictConfig({'version': 1, 'formatters': {'default': {'format': '[%(asctime)s] %(levelname)s in %(module)s: %(message)s'}}, 'handlers': {'wsgi': {'class': 'logging.StreamHandler', 'stream': 'ext://flask.logging.wsgi_errors_stream', 'formatter': 'default'}}, 'root': {'level': 'INFO', 'handlers': ['wsgi']}})
mod_log_path = config.mod_log_path
os.makedirs(os.path.dirname(mod_log_path), exist_ok=True)
mod_file_log_handler = RotatingFileHandler(mod_log_path, maxBytes=5000000, backupCount=5)
mod_file_log_handler.setFormatter(logging.Formatter('[%(asctime)s] %(message)s'))
mod_logger = logging.getLogger('mod log')
mod_logger.addHandler(mod_file_log_handler)
mod_logger.setLevel(logging.INFO) |
def filter_log_memory_filter_data(json):
option_list = ['admin', 'anomaly', 'auth', 'cpu_memory_usage', 'dhcp', 'dns', 'event', 'filter', 'filter_type', 'forward_traffic', 'free_style', 'gtp', 'ha', 'ipsec', 'ldb_monitor', 'local_traffic', 'multicast_traffic', 'netscan_discovery', 'netscan_vulnerability', 'notification', 'pattern', 'ppp', 'radius', 'severity', 'sniffer_traffic', 'ssh', 'sslvpn_log_adm', 'sslvpn_log_auth', 'sslvpn_log_session', 'system', 'vip_ssl', 'voip', 'wan_opt', 'wireless_activity', 'ztna_traffic']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in json) and (json[attribute] is not None)):
dictionary[attribute] = json[attribute]
return dictionary |
class CallableImpl():
def __init__(self):
self.setup_called = False
self.flattened_expression_values = []
self.cfg = CfgSimple.empty()
def setup(self, call_info: FunctionCallInfo):
if self.setup_called:
return
self.setup_called = True
self.setup_impl(call_info)
def setup_impl(self, call_info: FunctionCallInfo):
raise NotImplementedError() |
class OptionSeriesAreasplinerangeSonificationContexttracksMappingTremolo(Options):
def depth(self) -> 'OptionSeriesAreasplinerangeSonificationContexttracksMappingTremoloDepth':
return self._config_sub_data('depth', OptionSeriesAreasplinerangeSonificationContexttracksMappingTremoloDepth)
def speed(self) -> 'OptionSeriesAreasplinerangeSonificationContexttracksMappingTremoloSpeed':
return self._config_sub_data('speed', OptionSeriesAreasplinerangeSonificationContexttracksMappingTremoloSpeed) |
class XarTestCase(unittest.TestCase):
def _unxar(self, xarfile, outdir):
with open(xarfile, 'rb') as fh:
first_line = fh.readline()
shebang = first_line.decode('utf-8').strip()
self.assertEqual(shebang, xar_builder.BORING_SHEBANG)
saw_stop = False
offset = None
for line in fh:
if (line == b'#xar_stop\n'):
saw_stop = True
break
if line.startswith(b'OFFSET='):
offset = int(line[8:(- 2)])
self.assertTrue(saw_stop)
self.assertTrue(((offset % 4096) == 0))
fh.seek(offset)
squashfs_contents = fh.read()
with tempfile.NamedTemporaryFile() as out, open('/dev/null', 'wb') as devnull:
out.write(squashfs_contents)
out.flush()
subprocess.check_call(['unsquashfs', '-d', outdir, '-no-xattrs', out.name], stdout=devnull)
def assertDirectoryEqual(self, src, dst, check_contents=True):
def directory_contents(d):
ret = []
for (dirname, dirs, files) in os.walk(d):
for entry in (dirs + files):
full_path = os.path.join(dirname, entry)
ret.append(full_path[(len(d) + 1):])
return sorted(ret)
src_contents = directory_contents(src)
dst_contents = directory_contents(dst)
self.assertEqual(src_contents, dst_contents)
for (src_file, dst_file) in zip(src_contents, dst_contents):
src_file = os.path.join(src, src_file)
dst_file = os.path.join(dst, dst_file)
if (check_contents and os.path.isfile(src_file)):
self.assertFilesEqual(src_file, dst_file)
def assertFilesEqual(self, src, dst):
self.assertTrue(os.path.exists(src))
self.assertTrue(os.path.exists(dst))
with open(src, 'rb') as fh:
src_contents = fh.read()
with open(dst, 'rb') as fh:
dst_contents = fh.read()
self.assertEqual(src_contents, dst_contents)
self.assertEqual(mode(src), mode(dst)) |
def deepMerge(tgt, src):
if isinstance(src, list):
tgt.extend(src)
elif isinstance(src, dict):
for name in src:
m = src[name]
if (name not in tgt):
tgt[name] = copy.deepcopy(m)
else:
deepMerge(tgt[name], m)
else:
return |
class GaussianMixtureModel(object):
def __init__(self, k):
self.K = k
_variable
def alpha(self, k):
return dist.Dirichlet((5 * torch.ones(k)))
_variable
def mu(self, c):
return dist.Normal(0, 10)
_variable
def sigma(self, c):
return dist.Gamma(1, 10)
_variable
def component(self, i):
alpha = self.alpha(self.K)
return dist.Categorical(alpha)
_variable
def y(self, i):
c = self.component(i)
return dist.Normal(self.mu(c), self.sigma(c)) |
def phylogeny(node):
leaf_color = '#000000'
node.img_style['shape'] = 'square'
node.img_style['size'] = 2
if hasattr(node, 'evoltype'):
if (node.evoltype == 'D'):
node.img_style['fgcolor'] = '#FF0000'
node.img_style['hz_line_color'] = '#FF0000'
node.img_style['vt_line_color'] = '#FF0000'
elif (node.evoltype == 'S'):
node.img_style['fgcolor'] = '#1d176e'
node.img_style['hz_line_color'] = '#1d176e'
node.img_style['vt_line_color'] = '#1d176e'
elif (node.evoltype == 'L'):
node.img_style['fgcolor'] = '#777777'
node.img_style['vt_line_color'] = '#777777'
node.img_style['hz_line_color'] = '#777777'
node.img_style['hz_line_type'] = 1
node.img_style['vt_line_type'] = 1
leaf_color = '#777777'
if node.is_leaf:
node.img_style['shape'] = 'square'
node.img_style['size'] = 2
node.img_style['fgcolor'] = leaf_color
faces.add_face_to_node(faces.AttrFace('name', 'Arial', 11, leaf_color, None), node, 0)
if hasattr(node, 'sequence'):
SequenceFace = faces.SequenceFace(node.sequence, 'aa', 13)
faces.add_face_to_node(SequenceFace, node, 1, aligned=True)
else:
node.img_style['size'] = 2 |
class OptionsPanelPoints(Options):
def background_color(self):
return self.get(self.page.theme.success[1])
_color.setter
def background_color(self, val):
self.set(val)
def div_css(self):
return self.get({})
_css.setter
def div_css(self, css):
self.set(css)
def selected(self):
return self.get(0)
def selected(self, num):
self.set(num) |
class View(ViewElement):
id = AnId
content = Content
menubar = Any
toolbar = Any
action_manager_builder = Any
statusbar = ViewStatus
buttons = Buttons
default_button = AButton
key_bindings = AKeyBindings
handler = AHandler
model_view = AModelView
title = ATitle
icon = Image
kind = AKind
object = AnObject
style = EditorStyle
dock = DockStyle
image = Image
on_apply = OnApply
resizable = IsResizable
scrollable = IsScrollable
export = ExportType
imports = ImportTypes
help_id = HelpId
x = XCoordinate
y = YCoordinate
width = Width
height = Height
drop_class = Any()
updated = Event()
close_result = CloseResult
ok = Bool(False)
cancel = Bool(False)
undo = Bool(False)
redo = Bool(False)
apply = Bool(False)
revert = Bool(False)
help = Bool(False)
def __init__(self, *values, **traits):
ViewElement.__init__(self, **traits)
self.set_content(*values)
def set_content(self, *values):
content = []
accum = []
for value in values:
if isinstance(value, ViewSubElement):
content.append(value)
elif (type(value) in SequenceTypes):
content.append(Group(*value))
elif (isinstance(value, str) and (value[:1] == '<') and (value[(- 1):] == '>')):
content.append(Include(value[1:(- 1)].strip()))
else:
content.append(Item(value))
for item in content:
if isinstance(item, Item):
content = [Group(*content)]
break
self.content = Group(*content, container=self)
def ui(self, context, parent=None, kind=None, view_elements=None, handler=None, id='', scrollable=None, args=None):
handler = (handler or self.handler or default_handler())
if (not isinstance(handler, Handler)):
handler = handler()
if (args is not None):
handler.trait_set(**args)
if (not isinstance(context, dict)):
context = context.trait_context()
context.setdefault('handler', handler)
handler = context['handler']
if (self.model_view is not None):
context['object'] = self.model_view(context['object'])
self_id = self.id
if (self_id != ''):
if (id != ''):
id = ('%s:%s' % (self_id, id))
else:
id = self_id
if (scrollable is None):
scrollable = self.scrollable
ui = UI(view=self, context=context, handler=handler, view_elements=view_elements, title=self.title, id=id, scrollable=scrollable)
if (kind is None):
kind = self.kind
ui.ui(parent, kind)
return ui
def replace_include(self, view_elements):
if (self.content is not None):
self.content.replace_include(view_elements)
def __repr__(self):
if (self.content is None):
return '()'
return ('( %s )' % ', '.join([item.__repr__() for item in self.content.content]))
def _action_manager_builder_default(self):
return ActionManagerBuilder() |
_op([GapCursorA, ConfigA, ConfigFieldA, NewExprA('gap_cursor')])
def write_config(proc, gap_cursor, config, field, rhs):
stmtc = gap_cursor.anchor()
before = (gap_cursor.type() == ic.GapType.Before)
stmt = stmtc._impl
(ir, fwd, cfg) = scheduling.DoConfigWrite(stmt, config, field, rhs, before=before)
return Procedure(ir, _provenance_eq_Procedure=proc, _forward=fwd, _mod_config=cfg) |
def annotate_hit_line(arguments):
(hit, annot, seed_ortholog_score, seed_ortholog_evalue, tax_scope_mode, tax_scope_ids, target_taxa, target_orthologs, excluded_taxa, go_evidence, go_excluded, data_dir, annotation, novel_fams_dict) = arguments
if (annotation is not None):
return ((hit, annotation), True)
try:
query_name = hit[0]
best_hit_name = '_'.join(hit[1].split('_')[1:])
best_hit_evalue = float(hit[2])
best_hit_score = float(hit[3])
if filter_out(best_hit_name, best_hit_evalue, best_hit_score, seed_ortholog_evalue, seed_ortholog_score):
pass
else:
novel_fam = hit[1].split('_')[0]
(best_neigh_predicted_pathways, best_neigh_score, plddt, best_structural_pdb_matches, best_pdb_evalue, best_structural_uniprot_matches, best_uniprot_evalue, has_signal_peptide, has_TM_domains, is_AMP) = _retrieve_novel_fam_annots(novel_fam, novel_fams_dict)
annotation = (query_name, best_hit_name, best_hit_evalue, best_hit_score, novel_fam, best_neigh_predicted_pathways, best_neigh_score, plddt, best_structural_pdb_matches, best_pdb_evalue, best_structural_uniprot_matches, best_uniprot_evalue, has_signal_peptide, has_TM_domains, is_AMP)
except Exception as e:
import traceback
traceback.print_exc()
raise EmapperException((f'Error: annotation went wrong for hit {hit}. ' + str(e)))
return ((hit, annotation), False) |
def test_data_integrity_test_different_missing_values_one_column() -> None:
test_dataset = pd.DataFrame({'feature1': ['n/a', 'b', 'a'], 'feature2': ['b', '', None]})
suite = TestSuite(tests=[TestColumnNumberOfDifferentMissingValues(column_name='feature1')])
suite.run(current_data=test_dataset, reference_data=test_dataset, column_mapping=ColumnMapping())
suite._inner_suite.raise_for_error()
assert suite
assert suite.show()
assert suite.json() |
class IntMatcherTest(testslide.TestCase):
def testAnyInt(self):
self.assertEqual(testslide.matchers.AnyInt(), 666)
self.assertEqual(testslide.matchers.AnyInt(), 42)
self.assertNotEqual(testslide.matchers.AnyInt(), 'derp')
def test_NotThisInt(self):
self.assertEqual(testslide.matchers.NotThisInt(666), 42)
self.assertNotEqual(testslide.matchers.NotThisInt(69), 69)
self.assertNotEqual(testslide.matchers.NotThisInt(42), 'derp')
with self.assertRaises(ValueError):
testslide.matchers.NotThisInt('derp')
with self.assertRaises(ValueError):
testslide.matchers.NotThisInt(1.34)
def test_IntBetween(self):
self.assertEqual(testslide.matchers.IntBetween(21, 666), 42)
self.assertEqual(testslide.matchers.IntBetween(21, 666), 21)
self.assertEqual(testslide.matchers.IntBetween(21, 666), 666)
self.assertNotEqual(testslide.matchers.IntBetween(42, 69), 666)
self.assertNotEqual(testslide.matchers.IntBetween(42, 69), 'derp')
with self.assertRaises(ValueError):
testslide.matchers.IntBetween('derp', 42)
with self.assertRaises(ValueError):
testslide.matchers.IntBetween(42.42, 'derp')
with self.assertRaises(ValueError):
testslide.matchers.IntBetween('derp', 'derp')
def test_IntGreaterThan(self):
self.assertEqual(testslide.matchers.IntGreaterThan(21), 42)
self.assertNotEqual(testslide.matchers.IntGreaterThan(21), 21)
self.assertNotEqual(testslide.matchers.IntGreaterThan(21), 20)
self.assertNotEqual(testslide.matchers.IntGreaterThan(42), 'derp')
with self.assertRaises(ValueError):
testslide.matchers.IntGreaterThan('derp')
def test_IntGreaterOrEquals(self):
self.assertEqual(testslide.matchers.IntGreaterOrEquals(21), 42)
self.assertEqual(testslide.matchers.IntGreaterOrEquals(21), 21)
self.assertNotEqual(testslide.matchers.IntGreaterOrEquals(21), 20)
self.assertNotEqual(testslide.matchers.IntGreaterOrEquals(42), 'derp')
with self.assertRaises(ValueError):
testslide.matchers.IntGreaterOrEquals('derp')
def test_IntLessThan(self):
self.assertEqual(testslide.matchers.IntLessThan(21), 20)
self.assertNotEqual(testslide.matchers.IntLessThan(21), 21)
self.assertNotEqual(testslide.matchers.IntLessThan(21), 22)
self.assertNotEqual(testslide.matchers.IntLessThan(42), 'derp')
with self.assertRaises(ValueError):
testslide.matchers.IntLessThan('derp')
def test_IntLessOrEquals(self):
self.assertEqual(testslide.matchers.IntLessOrEquals(21), 20)
self.assertEqual(testslide.matchers.IntLessOrEquals(21), 21)
self.assertNotEqual(testslide.matchers.IntLessOrEquals(21), 22)
self.assertNotEqual(testslide.matchers.IntLessOrEquals(42), 'derp')
with self.assertRaises(ValueError):
testslide.matchers.IntLessOrEquals('derp') |
def fetch_rio_magnetic():
warnings.warn('The Rio magnetic anomaly dataset is deprecated and will be removed in Verde v2.0.0. Use a different dataset instead.', FutureWarning, stacklevel=2)
data_file = REGISTRY.fetch('rio-magnetic.csv.xz')
data = pd.read_csv(data_file, compression='xz')
return data |
def _render_symtable(data, *, depth='minimal'):
namewidth = 15
(table, top, root) = data
if (not depth):
depth = 'minimal'
if (depth == 'full'):
table = _build_symtable_snapshot(root)
elif (depth == 'top'):
table = _build_symtable_snapshot(top)
elif (depth == 'minimal'):
table = _build_symtable_snapshot(table)
for (i, child) in enumerate(table.children):
assert (not isinstance(child, symtable.SymbolTable))
if isinstance(child, types.SimpleNamespace):
name = child.name
child = child.__obj__
table.children[i] = f'<{type(child).__name__} {name!r} at 0x{id(child):x}>'
elif (depth == 'sub'):
table = _build_symtable_snapshot(table)
else:
raise NotImplementedError(depth)
ignored = ('id', 'name', 'kind', 'has_children', 'lineno', 'symbols', 'imported', 'annotated')
def render_simple(name, value, indent):
value = normalize_rendered(value)
return f"{indent}{(name + ':').ljust(namewidth)} {value}"
def render(curobj, indent=''):
assert isinstance(curobj, types.SimpleNamespace)
(yield f'{indent}<{curobj.kind} {curobj.name}>')
indent += INDENT
for (name, value) in vars(curobj).items():
if ((name in ignored) or name.startswith('_')):
continue
if ((not isinstance(value, list)) or (not value)):
(yield render_simple(name, value, indent))
continue
if isinstance(value[0], str):
if (len(value) < 6):
value = ', '.join(value)
(yield render_simple(name, value, indent))
continue
(yield f'{indent}{name}:')
itemindent = (indent + INDENT)
for item in value:
item = normalize_rendered(item)
(yield f'{itemindent}{item}')
else:
(yield f'{indent}{name}:')
for item in value:
(yield from render(item, (indent + INDENT)))
(yield from render(table)) |
.parametrize('ops', ALL_OPS)
def test_alloc(ops):
float_methods = (ops.alloc1f, ops.alloc2f, ops.alloc3f, ops.alloc4f)
for (i, method) in enumerate(float_methods):
shape = ((1,) * (i + 1))
arr = method(*shape)
assert (arr.dtype == numpy.float32)
assert (arr.ndim == len(shape))
arr = ops.alloc_f(shape)
assert (arr.dtype == numpy.float32)
assert (arr.ndim == len(shape))
int_methods = (ops.alloc1i, ops.alloc2i, ops.alloc3i, ops.alloc4i)
for (i, method) in enumerate(int_methods):
shape = ((1,) * (i + 1))
arr = method(*shape)
assert (arr.dtype == numpy.int32)
assert (arr.ndim == len(shape))
arr = ops.alloc_i(shape)
assert (arr.dtype == numpy.int32)
assert (arr.ndim == len(shape))
assert (ops.alloc(1).ndim == 1) |
class ObserverExpression():
__slots__ = ()
def __or__(self, expression):
return ParallelObserverExpression(self, expression)
def then(self, expression):
return SeriesObserverExpression(self, expression)
def match(self, filter, notify=True):
return self.then(match(filter=filter, notify=notify))
def anytrait(self, notify=True):
return self.match(filter=anytrait_filter, notify=notify)
def metadata(self, metadata_name, notify=True):
return self.match(filter=MetadataFilter(metadata_name=metadata_name), notify=notify)
def dict_items(self, notify=True, optional=False):
return self.then(dict_items(notify=notify, optional=optional))
def list_items(self, notify=True, optional=False):
return self.then(list_items(notify=notify, optional=optional))
def set_items(self, notify=True, optional=False):
return self.then(set_items(notify=notify, optional=optional))
def trait(self, name, notify=True, optional=False):
return self.then(trait(name=name, notify=notify, optional=optional))
def _as_graphs(self):
return self._create_graphs(branches=[])
def _create_graphs(self, branches):
raise NotImplementedError("'_create_graphs' must be implemented.") |
class IgdbInfo(Base):
__tablename__ = 'igdb_info'
id: Mapped[int] = mapped_column(primary_key=True)
game: Mapped[(Game | None)] = relationship('Game', back_populates='igdb_info', default=None)
url: Mapped[(str | None)] = mapped_column(default=None)
name: Mapped[(str | None)] = mapped_column(default=None)
short_description: Mapped[(str | None)] = mapped_column(default=None)
release_date: Mapped[(datetime | None)] = mapped_column(AwareDateTime, default=None)
user_score: Mapped[(int | None)] = mapped_column(default=None)
user_ratings: Mapped[(int | None)] = mapped_column(default=None)
meta_score: Mapped[(int | None)] = mapped_column(default=None)
meta_ratings: Mapped[(int | None)] = mapped_column(default=None) |
class _SortEntry():
_type_none = 0
_type_bool_false = 1
_type_bool_true = 2
_type_numeric = 3
_type_string = 4
_type_object = 5
def __init__(self, key, value, order_by):
self._key = key
self._value = value
if (order_by in ('$key', '$priority')):
self._index = key
elif (order_by == '$value'):
self._index = value
else:
self._index = _SortEntry._extract_child(value, order_by)
self._index_type = _SortEntry._get_index_type(self._index)
def key(self):
return self._key
def index(self):
return self._index
def index_type(self):
return self._index_type
def value(self):
return self._value
def _get_index_type(cls, index):
if (index is None):
return cls._type_none
if (isinstance(index, bool) and (not index)):
return cls._type_bool_false
if (isinstance(index, bool) and index):
return cls._type_bool_true
if isinstance(index, (int, float)):
return cls._type_numeric
if isinstance(index, str):
return cls._type_string
return cls._type_object
def _extract_child(cls, value, path):
segments = path.split('/')
current = value
for segment in segments:
if isinstance(current, dict):
current = current.get(segment)
else:
return None
return current
def _compare(self, other):
(self_key, other_key) = (self.index_type, other.index_type)
if (self_key == other_key):
if ((self_key in (self._type_numeric, self._type_string)) and (self.index != other.index)):
(self_key, other_key) = (self.index, other.index)
else:
(self_key, other_key) = (self.key, other.key)
if (self_key < other_key):
return (- 1)
if (self_key > other_key):
return 1
return 0
def __lt__(self, other):
return (self._compare(other) < 0)
def __le__(self, other):
return (self._compare(other) <= 0)
def __gt__(self, other):
return (self._compare(other) > 0)
def __ge__(self, other):
return (self._compare(other) >= 0)
def __eq__(self, other):
return (self._compare(other) == 0) |
class BsOffCanvas(StructComponent):
css_classes = ['offcanvas']
_option_cls = OptBsWidget.OffCanvas
name = 'Bootstrap OffCanvas'
str_repr = '<div {attrs}>{content}</div>'
_js__builder__ = 'var carousel = new bootstrap.Offcanvas(htmlObj, options)'
def options(self) -> OptBsWidget.OffCanvas:
return super().options
def js(self) -> JsBsWidgets.OffCanvas:
if (self._js is None):
self._js = JsBsWidgets.OffCanvas(component=self, page=self.page)
return self._js
def add_to_header(self, content: primitives.HtmlModel):
return self.add_to('header', content)
def add_to_body(self, content: primitives.HtmlModel):
return self.add_to('body', content)
def add_to_footer(self, content: primitives.HtmlModel):
return self.add_to('footer', content)
def write_values(self):
str_frg = []
for g in ['header', 'body', 'footer']:
if self.items.get(g):
container = self.page.web.std.div(self.items[g])
container.add_style([('offcanvas-%s' % g)], clear_first=True)
container.options.managed = False
str_frg.append(container.html())
return {'content': ''.join(str_frg)} |
def LoadFirmwareImage(chip, image_file):
def select_image_class(f, chip):
chip = re.sub('[-()]', '', chip.lower())
if (chip != 'esp8266'):
return {'esp32': ESP32FirmwareImage, 'esp32s2': ESP32S2FirmwareImage, 'esp32s3beta2': ESP32S3BETA2FirmwareImage, 'esp32s3': ESP32S3FirmwareImage, 'esp32c3': ESP32C3FirmwareImage, 'esp32c6beta': ESP32C6BETAFirmwareImage, 'esp32h2beta1': ESP32H2BETA1FirmwareImage, 'esp32h2beta2': ESP32H2BETA2FirmwareImage, 'esp32c2': ESP32C2FirmwareImage, 'esp32c6': ESP32C6FirmwareImage, 'esp32c5beta3': ESP32C5BETA3FirmwareImage, 'esp32h2': ESP32H2FirmwareImage, 'esp32p4': ESP32P4FirmwareImage}[chip](f)
else:
magic = ord(f.read(1))
f.seek(0)
if (magic == ESPLoader.ESP_IMAGE_MAGIC):
return ESP8266ROMFirmwareImage(f)
elif (magic == ESP8266V2FirmwareImage.IMAGE_V2_MAGIC):
return ESP8266V2FirmwareImage(f)
else:
raise FatalError(('Invalid image magic number: %d' % magic))
if isinstance(image_file, str):
with open(image_file, 'rb') as f:
return select_image_class(f, chip)
return select_image_class(image_file, chip) |
class CombatParticipant(Base):
__tablename__ = 'combat_participant'
combat_participant_id = Column(Integer, primary_key=True)
combat_id = Column(ForeignKey(Combat.combat_id), index=True)
war_participant_id = Column(ForeignKey(WarParticipant.warparticipant_id), index=True)
is_attacker = Column(Boolean)
war_participant = relationship('WarParticipant', back_populates='combat_participation')
combat = relationship('Combat', back_populates='participants')
def country(self):
return self.war_participant.country |
class CIDRRange():
def __init__(self, spec: str) -> None:
self.error: Optional[str] = None
self.address: Optional[str] = None
self.prefix_len: Optional[int] = None
prefix: Optional[str] = None
pfx_len: Optional[int] = None
addr: Optional[Union[(IPv4Address, IPv6Address)]] = None
if ('/' in spec):
(address, lenstr) = spec.split('/', 1)
try:
pfx_len = int(lenstr)
except ValueError:
self.error = f'CIDR range {spec} has an invalid length, ignoring'
return
else:
address = spec
try:
addr = ip_address(address)
except ValueError:
pass
if (addr is None):
self.error = f'Invalid IP address {address}'
return
if (pfx_len is None):
pfx_len = addr.max_prefixlen
elif (pfx_len > addr.max_prefixlen):
self.error = f'Invalid prefix length for IPv{addr.version} address {address}/{pfx_len}'
return
self.address = str(addr)
self.prefix_len = pfx_len
def __bool__(self) -> bool:
return ((not self.error) and (self.address is not None) and (self.prefix_len is not None))
def __str__(self) -> str:
if self:
return f'{self.address}/{self.prefix_len}'
else:
raise RuntimeError('cannot serialize an invalid CIDRRange!')
def as_dict(self) -> dict:
return {'address_prefix': self.address, 'prefix_len': self.prefix_len} |
def extractWebnoveltranslationWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('BTSTTA', 'Bringing the Supermarket to the Apocalypse', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def readtext(path):
if isinstance(path, pathlib.Path):
with path.open() as f:
return f.read()
if isinstance(path, str):
with open(path) as f:
return f.read()
if isinstance(path, io.TextIOBase):
return path.read()
raise TypeError('readtext requires a path-like or file-like argument') |
def _build(tmpdir, ext):
from distutils.core import Distribution
import distutils.errors
dist = Distribution({'ext_modules': [ext]})
dist.parse_config_files()
options = dist.get_option_dict('build_ext')
options['force'] = ('ffiplatform', True)
options['build_lib'] = ('ffiplatform', tmpdir)
options['build_temp'] = ('ffiplatform', tmpdir)
try:
dist.run_command('build_ext')
except (distutils.errors.CompileError, distutils.errors.LinkError) as e:
raise VerificationError(('%s: %s' % (e.__class__.__name__, e)))
cmd_obj = dist.get_command_obj('build_ext')
[soname] = cmd_obj.get_outputs()
return soname |
def get_stage_flow(game_type: PrivateComputationGameType, pcs_feature_enums: Set[PCSFeature], stage_flow_cls: Optional[Type[PrivateComputationBaseStageFlow]]=None) -> Type[PrivateComputationBaseStageFlow]:
selected_stage_flow_cls = unwrap_or_default(optional=stage_flow_cls, default=(PrivateComputationPCF2StageFlow if (game_type is PrivateComputationGameType.ATTRIBUTION) else PrivateComputationStageFlow))
if (PCSFeature.PRIVATE_ATTRIBUTION_MR_PID in pcs_feature_enums):
selected_stage_flow_cls = (PrivateComputationMRStageFlow if (game_type is PrivateComputationGameType.ATTRIBUTION) else PrivateComputationMrPidPCF2LiftStageFlow)
if (PCSFeature.PRIVATE_LIFT_UNIFIED_DATA_PROCESS in pcs_feature_enums):
selected_stage_flow_cls = (PrivateComputationPCF2LiftUDPStageFlow if (game_type is PrivateComputationGameType.LIFT) else selected_stage_flow_cls)
return selected_stage_flow_cls |
class Ui_InfoDialog(object):
def setupUi(self, InfoDialog):
InfoDialog.setObjectName('InfoDialog')
InfoDialog.resize(640, 480)
InfoDialog.setWindowTitle('Dialog')
self.gridLayout = QtWidgets.QGridLayout(InfoDialog)
self.gridLayout.setObjectName('gridLayout')
self.colors = QtWidgets.QWidget(InfoDialog)
self.colors.setMinimumSize(QtCore.QSize(240, 180))
self.colors.setMaximumSize(QtCore.QSize(240, 180))
self.colors.setObjectName('colors')
self.gridLayout.addWidget(self.colors, 0, 0, 4, 1)
self.info_label = QtWidgets.QLabel(InfoDialog)
self.info_label.setMinimumSize(QtCore.QSize(250, 60))
self.info_label.setMaximumSize(QtCore.QSize(250, 60))
font = QtGui.QFont()
font.setPointSize(12)
self.info_label.setFont(font)
self.info_label.setObjectName('info_label')
self.gridLayout.addWidget(self.info_label, 0, 1, 1, 2)
spacerItem = QtWidgets.QSpacerItem(408, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout.addItem(spacerItem, 0, 3, 1, 1)
self.name_label = QtWidgets.QLabel(InfoDialog)
self.name_label.setMinimumSize(QtCore.QSize(60, 30))
self.name_label.setMaximumSize(QtCore.QSize(60, 30))
self.name_label.setObjectName('name_label')
self.gridLayout.addWidget(self.name_label, 1, 1, 1, 1)
self.name_ledit = QtWidgets.QLineEdit(InfoDialog)
self.name_ledit.setMinimumSize(QtCore.QSize(240, 30))
self.name_ledit.setMaximumSize(QtCore.QSize(960, 30))
self.name_ledit.setObjectName('name_ledit')
self.gridLayout.addWidget(self.name_ledit, 1, 2, 1, 1)
self.rule_label = QtWidgets.QLabel(InfoDialog)
self.rule_label.setMinimumSize(QtCore.QSize(60, 30))
self.rule_label.setMaximumSize(QtCore.QSize(60, 30))
self.rule_label.setObjectName('rule_label')
self.gridLayout.addWidget(self.rule_label, 2, 1, 1, 1)
self.hm_rule_label = QtWidgets.QLabel(InfoDialog)
self.hm_rule_label.setMinimumSize(QtCore.QSize(200, 30))
self.hm_rule_label.setMaximumSize(QtCore.QSize(200, 30))
self.hm_rule_label.setText('hm_rule')
self.hm_rule_label.setObjectName('hm_rule_label')
self.gridLayout.addWidget(self.hm_rule_label, 2, 2, 1, 1)
self.time_label = QtWidgets.QLabel(InfoDialog)
self.time_label.setMinimumSize(QtCore.QSize(60, 30))
self.time_label.setMaximumSize(QtCore.QSize(60, 30))
self.time_label.setObjectName('time_label')
self.gridLayout.addWidget(self.time_label, 3, 1, 1, 1)
self.cr_time_label = QtWidgets.QLabel(InfoDialog)
self.cr_time_label.setMinimumSize(QtCore.QSize(200, 40))
self.cr_time_label.setMaximumSize(QtCore.QSize(200, 60))
self.cr_time_label.setText('cr_time')
self.cr_time_label.setObjectName('cr_time_label')
self.gridLayout.addWidget(self.cr_time_label, 3, 2, 1, 1)
self.desc_label = QtWidgets.QLabel(InfoDialog)
self.desc_label.setMinimumSize(QtCore.QSize(0, 50))
self.desc_label.setMaximumSize(QtCore.QSize(, 50))
self.desc_label.setObjectName('desc_label')
self.gridLayout.addWidget(self.desc_label, 4, 0, 1, 1)
self.desc_tedit = QtWidgets.QTextEdit(InfoDialog)
self.desc_tedit.setObjectName('desc_tedit')
self.gridLayout.addWidget(self.desc_tedit, 5, 0, 1, 4)
self.buttonBox = QtWidgets.QDialogButtonBox(InfoDialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons((((QtWidgets.QDialogButtonBox.Apply | QtWidgets.QDialogButtonBox.Cancel) | QtWidgets.QDialogButtonBox.Ok) | QtWidgets.QDialogButtonBox.Reset))
self.buttonBox.setObjectName('buttonBox')
self.gridLayout.addWidget(self.buttonBox, 6, 0, 1, 4)
self.retranslateUi(InfoDialog)
self.buttonBox.accepted.connect(InfoDialog.accept)
self.buttonBox.rejected.connect(InfoDialog.reject)
QtCore.QMetaObject.connectSlotsByName(InfoDialog)
def retranslateUi(self, InfoDialog):
_translate = QtCore.QCoreApplication.translate
self.info_label.setText(_translate('InfoDialog', 'Color Set Information'))
self.name_label.setText(_translate('InfoDialog', 'Name:'))
self.rule_label.setText(_translate('InfoDialog', 'Rule:'))
self.time_label.setText(_translate('InfoDialog', 'Time:'))
self.desc_label.setText(_translate('InfoDialog', 'Description')) |
def main():
args = parse_args()
if args.update:
update()
if ((os.name != 'nt') and (sys.platform != 'darwin')):
if args.install:
systemd_service.install()
if args.uninstall:
systemd_service.uninstall()
if args.enable:
systemd_service.enable()
if args.disable:
systemd_service.disable()
if args.start:
systemd_service.start()
if args.stop:
systemd_service.stop()
if args.status:
systemd_service.status()
if args.version:
print('FetchCord version:', __init__.VERSION)
sys.exit(0)
if args.time:
if (int(args.time) < 15):
print('ERROR: Invalid time set, must be > 15 seconds, cannot continue.')
sys.exit(1)
else:
print(('setting custom time %s seconds' % args.time))
try:
if args.help:
sys.exit(0)
except AttributeError:
pass
computer: Computer = Computer()
if ((not computer.neofetchwin) and (computer.host == 'Host: N/A') and args.nodistro and args.noshell and args.nohardware):
print('ERROR: no hostline is available!')
sys.exit(1)
if args.debug:
run_rpc_debug(computer)
run: Run_rpc = Run_rpc()
if computer.neofetchwin:
loops: Dict = {}
loops_indexes: Dict = {}
if (not args.nodistro):
loops['windows'] = (computer.osinfoid, windows)
loops_indexes[len(loops_indexes)] = 'windows'
if (not args.nohardware):
loops['cycle1'] = (computer.cpuid, cycle1)
loops_indexes[len(loops_indexes)] = 'cycle1'
run.set_loop(loops, loops_indexes, computer.updateMap, (int(args.poll_rate) if args.poll_rate else 3))
run.run_loop(computer)
else:
loops: Dict = {}
loops_indexes: Dict = {}
if ((not args.nodistro) and (computer.os != 'macos')):
loops['cycle0'] = (computer.osinfoid, cycle0)
loops_indexes[len(loops_indexes)] = 'cycle0'
if (computer.os == 'macos'):
loops['runmac'] = ('', runmac)
loops_indexes[len(loops_indexes)] = 'runmac'
if (not args.nohardware):
loops['cycle1'] = (computer.cpuid, cycle1)
loops_indexes[len(loops_indexes)] = 'cycle1'
if (not args.noshell):
loops['cycle2'] = (computer.terminalid, cycle2)
loops_indexes[len(loops_indexes)] = 'cycle2'
if ((not args.nohost) and (computer.os != 'macos')):
loops['cycle3'] = (computer.hostappid, cycle3)
loops_indexes[len(loops_indexes)] = 'cycle3'
if args.pause_cycle:
loops['pause'] = ('', pause)
loops_indexes[len(loops_indexes)] = 'pause'
run.set_loop(loops, loops_indexes, computer.updateMap, (int(args.poll_rate) if args.poll_rate else 3))
run.run_loop(computer) |
def test_decimal_precision_is_a_positive_int():
'
schema = {'type': 'record', 'name': 'test_scale_is_an_int', 'fields': [{'name': 'field', 'type': {'logicalType': 'decimal', 'precision': (- 5), 'scale': 2, 'type': 'bytes'}}]}
with pytest.raises(SchemaParseException, match='decimal precision must be a positive integer'):
parse_schema(schema) |
def test_intrinsics():
string = write_rpc_request(1, 'initialize', {'rootPath': str((test_dir / 'signature'))})
file_path = ((test_dir / 'signature') / 'nested_sigs.f90')
string += sigh_request(file_path, 8, 77)
(errcode, results) = run_request(string, ['--hover_signature', '--use_signature_help', '-n', '1'])
assert (errcode == 0)
ref = [[0, 2, 'REAL(A, KIND=kind)']]
assert (len(ref) == (len(results) - 1))
for (i, r) in enumerate(ref):
validate_sigh(results[(i + 1)], r) |
def _mark_storage_warm(computation: ComputationAPI, slot: int) -> bool:
storage_address = computation.msg.storage_address
if computation.state.is_storage_warm(storage_address, slot):
return False
else:
computation.state.mark_storage_warm(storage_address, slot)
return True |
('/view', methods=['GET'])
def view():
req_url = request.args.get('url')
if (not req_url):
return render_template('error.html', title='Viewer', message='Error! No page specified!')
version = request.args.get('version')
if version:
return render_template('error.html', title='Error', message='Historical views must be routed through the /history route!')
response = make_response(render_template('view.html', title='Rendering Content', req_url=req_url, version=None))
return set_cache_control_headers(response, allow_inline=True) |
_json
class Collation(Element):
children: List[Element] = field(default_factory=list)
def __hash__(self):
return hash(f'{self.id}-{self.code}')
def __eq__(self, other):
return ((self.__class__ == other.__class__) and (self.id == other.id) and (self.code == other.code) and (self.description == other.description))
def include(self, val):
self.children.append(val) |
def getsize(object: Any) -> Tuple[(int, int)]:
if isinstance(object, BLACKLIST):
raise TypeError(('getsize() does not take argument of type: ' + str(type(object))))
seen_ids = set()
size_in_byte = 0
objects = [object]
while objects:
need_referents = []
for obj in objects:
if ((not isinstance(obj, BLACKLIST)) and (id(obj) not in seen_ids)):
seen_ids.add(id(obj))
size_in_byte += sys.getsizeof(obj)
need_referents.append(obj)
objects = get_referents(*need_referents)
size_in_gbyte = ((size_in_byte * 9.31) * (10 ** (- 10)))
return (size_in_byte, size_in_gbyte) |
.usefixtures('use_tmpdir')
def test_that_magic_strings_get_substituted_in_workflow():
script_file_contents = dedent('\n SCRIPT script.py\n ARGLIST <A>\n ARG_TYPE 0 INT\n ')
workflow_file_contents = dedent('\n script <ZERO>\n ')
script_file_path = os.path.join(os.getcwd(), 'script')
workflow_file_path = os.path.join(os.getcwd(), 'workflow')
with open(script_file_path, mode='w', encoding='utf-8') as fh:
fh.write(script_file_contents)
with open(workflow_file_path, mode='w', encoding='utf-8') as fh:
fh.write(workflow_file_contents)
with open('script.py', mode='w', encoding='utf-8') as fh:
fh.write(dedent('\n from ert import ErtScript\n class Script(ErtScript):\n def run(self, *args):\n pass\n '))
with open('config.ert', mode='w', encoding='utf-8') as fh:
fh.write(dedent(f'''
NUM_REALIZATIONS 1
DEFINE <ZERO> 0
LOAD_WORKFLOW_JOB {script_file_path} script
LOAD_WORKFLOW {workflow_file_path}
'''))
ert_config = ErtConfig.from_file('config.ert')
assert (ert_config.workflows['workflow'].cmd_list[0][1] == ['0']) |
class ProcessDetailsDialog(QtWidgets.QDialog, uic.loadUiType(DIALOG_UI_PATH)[0]):
LOG_TAG = '[ProcessDetails]: '
_notification_callback = QtCore.pyqtSignal(ui_pb2.NotificationReply)
TAB_STATUS = 0
TAB_DESCRIPTORS = 1
TAB_IOSTATS = 2
TAB_MAPS = 3
TAB_STACK = 4
TAB_ENVS = 5
TABS = {TAB_STATUS: {'text': None, 'scrollPos': 0}, TAB_DESCRIPTORS: {'text': None, 'scrollPos': 0}, TAB_IOSTATS: {'text': None, 'scrollPos': 0}, TAB_MAPS: {'text': None, 'scrollPos': 0}, TAB_STACK: {'text': None, 'scrollPos': 0}, TAB_ENVS: {'text': None, 'scrollPos': 0}}
def __init__(self, parent=None, appicon=None):
super(ProcessDetailsDialog, self).__init__(parent)
QtWidgets.QDialog.__init__(self, parent, QtCore.Qt.WindowStaysOnTopHint)
self.setWindowFlags(QtCore.Qt.Window)
self.setupUi(self)
self.setWindowIcon(appicon)
self._app_name = None
self._app_icon = None
self._apps_parser = LinuxDesktopParser()
self._nodes = Nodes.instance()
self._notification_callback.connect(self._cb_notification_callback)
self._nid = None
self._pid = ''
self._notifications_sent = {}
self.cmdClose.clicked.connect(self._cb_close_clicked)
self.cmdAction.clicked.connect(self._cb_action_clicked)
self.comboPids.currentIndexChanged.connect(self._cb_combo_pids_changed)
self.TABS[self.TAB_STATUS]['text'] = self.textStatus
self.TABS[self.TAB_DESCRIPTORS]['text'] = self.textOpenedFiles
self.TABS[self.TAB_IOSTATS]['text'] = self.textIOStats
self.TABS[self.TAB_MAPS]['text'] = self.textMappedFiles
self.TABS[self.TAB_STACK]['text'] = self.textStack
self.TABS[self.TAB_ENVS]['text'] = self.textEnv
self.TABS[self.TAB_DESCRIPTORS]['text'].setFont(QtGui.QFont('monospace'))
self.iconStart = QtGui.QIcon.fromTheme('media-playback-start')
self.iconPause = QtGui.QIcon.fromTheme('media-playback-pause')
if QtGui.QIcon.hasThemeIcon('window-close'):
return
closeIcon = Icons.new(self, 'window-close')
self.cmdClose.setIcon(closeIcon)
self.iconStart = Icons.new(self, 'media-playback-start')
self.iconPause = Icons.new(self, 'media-playback-pause')
(ui_pb2.NotificationReply)
def _cb_notification_callback(self, reply):
if (reply.id not in self._notifications_sent):
print('[stats] unknown notification received: ', reply.id)
else:
noti = self._notifications_sent[reply.id]
if (reply.code == ui_pb2.ERROR):
self._show_message((QtCore.QCoreApplication.translate('proc_details', '<b>Error loading process information:</b> <br><br>\n\n') + reply.data))
self._pid = ''
self._set_button_running(False)
if (self._data_loaded == False):
if (self.comboPids.count() <= 1):
self._close()
self._delete_notification(reply.id)
return
if ((noti.type == ui_pb2.MONITOR_PROCESS) and (reply.data != '')):
self._load_data(reply.data)
elif (noti.type == ui_pb2.STOP_MONITOR_PROCESS):
if (reply.data != ''):
self._show_message((QtCore.QCoreApplication.translate('proc_details', '<b>Error stopping monitoring process:</b><br><br>') + reply.data))
self._set_button_running(False)
self._delete_notification(reply.id)
def closeEvent(self, e):
self._close()
def _cb_close_clicked(self):
self._close()
def _cb_combo_pids_changed(self, idx):
if (idx == (- 1)):
return
def _cb_action_clicked(self):
if (not self.cmdAction.isChecked()):
self._stop_monitoring()
else:
self._start_monitoring()
def _show_message(self, text):
Message.ok(text, '', QtWidgets.QMessageBox.Warning)
def _delete_notification(self, nid):
if (nid in self._notifications_sent):
del self._notifications_sent[nid]
def _reset(self):
self._app_name = None
self._app_icon = None
self.comboPids.clear()
self.labelProcName.setText(QtCore.QCoreApplication.translate('proc_details', 'loading...'))
self.labelProcArgs.setText(QtCore.QCoreApplication.translate('proc_details', 'loading...'))
self.labelProcPath.setText(QtCore.QCoreApplication.translate('proc_details', 'loading...'))
self.labelProcIcon.clear()
self.labelStatm.setText('')
self.labelCwd.setText('')
self.labelChecksums.setText('')
self.labelParent.setText('')
for tidx in range(0, len(self.TABS)):
self.TABS[tidx]['text'].setPlainText('')
def _set_button_running(self, yes):
if yes:
self.cmdAction.setChecked(True)
self.cmdAction.setIcon(self.iconPause)
else:
self.cmdAction.setChecked(False)
self.cmdAction.setIcon(self.iconStart)
def _close(self):
self._stop_monitoring()
self.comboPids.clear()
self._pid = ''
self.hide()
def monitor(self, pids):
if (self._pid != ''):
self._stop_monitoring()
self._data_loaded = False
self._pids = pids
self._reset()
for pid in pids:
if (pid != None):
self.comboPids.addItem(pid)
self.show()
self._start_monitoring()
def _set_tab_text(self, tab_idx, text):
self.TABS[tab_idx]['scrollPos'] = self.TABS[tab_idx]['text'].verticalScrollBar().value()
self.TABS[tab_idx]['text'].setPlainText(text)
self.TABS[tab_idx]['text'].verticalScrollBar().setValue(self.TABS[tab_idx]['scrollPos'])
def _start_monitoring(self):
try:
if (self._pid != ''):
return
self._pid = self.comboPids.currentText()
if (self._pid == ''):
return
self._set_button_running(True)
noti = ui_pb2.Notification(clientName='', serverName='', type=ui_pb2.MONITOR_PROCESS, data=self._pid, rules=[])
self._nid = self._nodes.send_notification(self._pids[self._pid], noti, self._notification_callback)
self._notifications_sent[self._nid] = noti
except Exception as e:
print((self.LOG_TAG + 'exception starting monitoring: '), e)
def _stop_monitoring(self):
if (self._pid == ''):
return
self._set_button_running(False)
noti = ui_pb2.Notification(clientName='', serverName='', type=ui_pb2.STOP_MONITOR_PROCESS, data=str(self._pid), rules=[])
self._nid = self._nodes.send_notification(self._pids[self._pid], noti, self._notification_callback)
self._notifications_sent[self._nid] = noti
self._pid = ''
self._app_icon = None
def _load_data(self, data):
tab_idx = self.tabWidget.currentIndex()
try:
proc = json.loads(data)
self._load_app_icon(proc['Path'])
if (self._app_name != None):
self.labelProcName.setText((('<b>' + self._app_name) + '</b>'))
self.labelProcName.setToolTip((('<b>' + self._app_name) + '</b>'))
if ('Tree' in proc):
proc['Tree'].reverse()
self.labelParent.setText(('<b>Parent(s): </b>' + ' '.join((path['key'] for path in proc['Tree']))))
else:
self.labelParent.setText('<could not obtain hash>')
if (proc['Path'] not in proc['Args']):
self.labelProcPath.setVisible(True)
self.labelProcPath.setText('({0})'.format(proc['Path']))
else:
self.labelProcPath.setVisible(False)
if ('Checksums' in proc):
checksums = proc['Checksums']
hashes = ''
if (Config.OPERAND_PROCESS_HASH_MD5 in checksums):
hashes = '<b>md5:</b> {0}'.format(checksums[Config.OPERAND_PROCESS_HASH_MD5])
if (Config.OPERAND_PROCESS_HASH_SHA1 in checksums):
hashes = '<b>sha1:</b> {0}'.format(checksums[Config.OPERAND_PROCESS_HASH_SHA1])
self.labelChecksums.setText(hashes)
self.labelProcArgs.setFixedHeight(30)
self.labelProcArgs.setText(' '.join(proc['Args']))
self.labelProcArgs.setToolTip(' '.join(proc['Args']))
self.labelCwd.setText(('<b>CWD: </b>' + proc['CWD']))
self.labelCwd.setToolTip(('<b>CWD: </b>' + proc['CWD']))
self._load_mem_data(proc['Statm'])
if (tab_idx == self.TAB_STATUS):
self._set_tab_text(tab_idx, proc['Status'])
elif (tab_idx == self.TAB_DESCRIPTORS):
self._load_descriptors(proc['Descriptors'])
elif (tab_idx == self.TAB_IOSTATS):
self._load_iostats(proc['IOStats'])
elif (tab_idx == self.TAB_MAPS):
self._set_tab_text(tab_idx, proc['Maps'])
elif (tab_idx == self.TAB_STACK):
self._set_tab_text(tab_idx, proc['Stack'])
elif (tab_idx == self.TAB_ENVS):
self._load_env_vars(proc['Env'])
self._data_loaded = True
except Exception as e:
print((self.LOG_TAG + 'exception loading data: '), e)
def _load_app_icon(self, proc_path):
if (self._app_icon != None):
return
(self._app_name, self._app_icon, _, _) = self._apps_parser.get_info_by_path(proc_path, 'terminal')
pixmap = Icons.get_by_appname(self._app_icon)
self.labelProcIcon.setPixmap(pixmap)
if (self._app_name == None):
self._app_name = proc_path
def _load_iostats(self, iostats):
ioText = ('%-16s %dMB<br>%-16s %dMB<br>%-16s %d<br>%-16s %d<br>%-16s %dMB<br>%-16s %dMB<br>' % ('<b>Chars read:</b>', ((iostats['RChar'] / 1024) / 1024), '<b>Chars written:</b>', ((iostats['WChar'] / 1024) / 1024), '<b>Syscalls read:</b>', iostats['SyscallRead'], '<b>Syscalls write:</b>', iostats['SyscallWrite'], '<b>KB read:</b>', ((iostats['ReadBytes'] / 1024) / 1024), '<b>KB written: </b>', ((iostats['WriteBytes'] / 1024) / 1024)))
self.textIOStats.setPlainText('')
self.textIOStats.appendHtml(ioText)
def _load_mem_data(self, mem):
pagesize = 4096
memText = ('<b>VIRT:</b> %dMB, <b>RSS:</b> %dMB, <b>Libs:</b> %dMB, <b>Data:</b> %dMB, <b>Text:</b> %dMB' % ((((mem['Size'] * pagesize) / 1024) / 1024), (((mem['Resident'] * pagesize) / 1024) / 1024), (((mem['Lib'] * pagesize) / 1024) / 1024), (((mem['Data'] * pagesize) / 1024) / 1024), (((mem['Text'] * pagesize) / 1024) / 1024)))
self.labelStatm.setText(memText)
def _load_descriptors(self, descriptors):
text = ('%-12s%-40s%-8s -> %s\n\n' % ('Size', 'Time', 'Name', 'Symlink'))
for d in descriptors:
text += '{:<12}{:<40}{:<8} -> {}\n'.format(str(d['Size']), d['ModTime'], d['Name'], d['SymLink'])
self._set_tab_text(self.TAB_DESCRIPTORS, text)
def _load_env_vars(self, envs):
if (envs == {}):
self._set_tab_text(self.TAB_ENVS, '<no environment variables>')
return
text = ('%-15s\t%s\n\n' % ('Name', 'Value'))
for env_name in envs:
text += ('%-15s:\t%s\n' % (env_name, envs[env_name]))
self._set_tab_text(self.TAB_ENVS, text) |
([Output('oura-activity-content-kpi-trend', 'children'), Output('oura-activity-content-kpi-trend', 'style'), Output('current-activity-content-trend', 'children')], [Input('goal-progress-button', 'n_clicks'), Input('total-burn-button', 'n_clicks'), Input('walking-equivalency-button', 'n_clicks')], [State('current-activity-content-trend', 'children')])
def activity_content_kpi_trend(goal_progress, total_burn, walking_equivalency, current_trend):
ctx = dash.callback_context
latest_dict = {'goal-progress-button': 'cal_active', 'total-burn-button': 'cal_total', 'walking-equivalency-button': 'daily_movement'}
if (len(ctx.triggered) == 1):
latest = latest_dict[ctx.triggered[0]['prop_id'].split('.')[0]]
if (current_trend == latest):
return ([], {'display': 'none'}, [])
else:
return (generate_content_kpi_trend('activity', latest), {'display': 'inherit'}, latest)
else:
return ([], {'display': 'none'}, []) |
def extractTeaserboynovelWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
_os(*metadata.platforms)
def main(args=None):
slow_commands = ['gpresult.exe /z', 'systeminfo.exe']
commands = ['ipconfig /all', 'net localgroup administrators', 'net user', 'net user administrator', 'net user /domaintasklist', 'net view', 'net view /domain', ('net view \\\\%s' % common.get_ip()), 'netstat -nao', 'whoami', 'hostname', 'net start', 'tasklist /svc', ('net time \\\\%s' % common.get_ip()), 'net use', 'net view', 'net start', 'net accounts', 'net localgroup', 'net group', 'net group "Domain Admins" /domain', 'net share', 'net config workstation']
commands.extend(slow_commands)
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--sample', dest='sample', default=len(commands), type=int, help='Number of commands to run, chosen at random from the list of enumeration commands')
args = parser.parse_args(args)
sample = min(len(commands), args.sample)
if (sample < len(commands)):
random.shuffle(commands)
common.log('Running {} out of {} enumeration commands\n'.format(sample, len(commands)))
for command in commands[0:sample]:
common.log('About to call {}'.format(command))
if (command in slow_commands):
common.execute(command, kill=True, timeout=15)
common.log('[output suppressed]', log_type='-')
else:
common.execute(command) |
class InlineModelFormField(FormField):
widget = InlineFormWidget()
def __init__(self, form_class, pk, form_opts=None, **kwargs):
super(InlineModelFormField, self).__init__(form_class, **kwargs)
self._pk = pk
self.form_opts = form_opts
def get_pk(self):
if isinstance(self._pk, (tuple, list)):
return tuple((getattr(self.form, pk).data for pk in self._pk))
return getattr(self.form, self._pk).data
def populate_obj(self, obj, name):
for (name, field) in iteritems(self.form._fields):
if (name != self._pk):
field.populate_obj(obj, name) |
class AdCreativeObjectStorySpec(AbstractObject):
def __init__(self, api=None):
super(AdCreativeObjectStorySpec, self).__init__()
self._isAdCreativeObjectStorySpec = True
self._api = api
class Field(AbstractObject.Field):
instagram_actor_id = 'instagram_actor_id'
link_data = 'link_data'
page_id = 'page_id'
photo_data = 'photo_data'
template_data = 'template_data'
text_data = 'text_data'
video_data = 'video_data'
_field_types = {'instagram_actor_id': 'string', 'link_data': 'AdCreativeLinkData', 'page_id': 'string', 'photo_data': 'AdCreativePhotoData', 'template_data': 'AdCreativeLinkData', 'text_data': 'AdCreativeTextData', 'video_data': 'AdCreativeVideoData'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
class Batch(WebContainer):
simulations: Dict[(TaskName, SimulationType)] = pd.Field(..., title='Simulations', description='Mapping of task names to Simulations to run as a batch.')
folder_name: str = pd.Field('default', title='Folder Name', description='Name of folder to store member of each batch on web UI.')
verbose: bool = pd.Field(True, title='Verbose', description='Whether to print info messages and progressbars.')
solver_version: str = pd.Field(None, title='Solver Version', description_str='Custom solver version to use, otherwise uses default for the current front end version.')
callback_url: str = pd.Field(None, title='Callback URL', description="Http PUT url to receive simulation finish event. The body content is a json file with fields ``{'id', 'status', 'name', 'workUnit', 'solverVersion'}``.")
simulation_type: str = pd.Field('tidy3d', title='Simulation Type', description='Type of each simulation in the batch, used internally only.')
parent_tasks: Dict[(str, Tuple[(TaskId, ...)])] = pd.Field(None, title='Parent Tasks', description='Collection of parent task ids for each job in batch, used internally only.')
jobs: Dict[(TaskName, Job)] = pd.Field(None, title='Simulations', description='Mapping of task names to individual Job object for each task in the batch. Set by ``Batch.upload``, leave as None.')
def _check_path_dir(path_dir: str) -> None:
if (not os.path.exists(path_dir)):
os.makedirs(path_dir, exist_ok=True)
def run(self, path_dir: str=DEFAULT_DATA_DIR) -> BatchData:
self._check_path_dir(path_dir)
self.start()
self.monitor()
return self.load(path_dir=path_dir)
('jobs', always=True)
def _upload(cls, val, values) -> None:
if (val is not None):
return val
JobType = cls.__fields__['jobs'].type_
parent_tasks = values.get('parent_tasks')
verbose = bool(values.get('verbose'))
solver_version = values.get('solver_version')
jobs = {}
for (task_name, simulation) in values.get('simulations').items():
upload_kwargs = {key: values.get(key) for key in JobType._upload_fields}
upload_kwargs['task_name'] = task_name
upload_kwargs['simulation'] = simulation
upload_kwargs['verbose'] = verbose
upload_kwargs['solver_version'] = solver_version
if (parent_tasks and (task_name in parent_tasks)):
upload_kwargs['parent_tasks'] = parent_tasks[task_name]
job = JobType(**upload_kwargs)
jobs[task_name] = job
return jobs
def get_info(self) -> Dict[(TaskName, TaskInfo)]:
info_dict = {}
for (task_name, job) in self.jobs.items():
task_info = job.get_info()
info_dict[task_name] = task_info
return info_dict
def start(self) -> None:
for (_, job) in self.jobs.items():
job.start()
def get_run_info(self) -> Dict[(TaskName, RunInfo)]:
run_info_dict = {}
for (task_name, job) in self.jobs.items():
run_info = job.get_run_info()
run_info_dict[task_name] = run_info
return run_info_dict
def monitor(self) -> None:
def pbar_description(task_name: str, status: str) -> str:
description = f'{task_name}: status = {status}'
if (('error' in status) or ('diverge' in status)):
description = f'[red]{description}'
return description
run_statuses = ['draft', 'queued', 'preprocess', 'queued_solver', 'running', 'postprocess', 'visualize', 'success']
end_statuses = ('success', 'error', 'errored', 'diverged', 'diverge', 'deleted', 'draft')
if self.verbose:
console = get_logging_console()
console.log('Started working on Batch.')
self.estimate_cost()
console.log("Use 'Batch.real_cost()' to get the billed FlexCredit cost after the Batch has completed.")
with Progress(console=console) as progress:
pbar_tasks = {}
for (task_name, job) in self.jobs.items():
status = job.status
description = pbar_description(task_name, status)
pbar = progress.add_task(description, total=(len(run_statuses) - 1))
pbar_tasks[task_name] = pbar
while any(((job.status not in end_statuses) for job in self.jobs.values())):
for (task_name, job) in self.jobs.items():
pbar = pbar_tasks[task_name]
status = job.status
description = pbar_description(task_name, status)
if (status not in run_statuses):
completed = run_statuses.index('success')
else:
completed = run_statuses.index(status)
progress.update(pbar, description=description, completed=completed)
time.sleep(web.REFRESH_TIME)
for (task_name, job) in self.jobs.items():
pbar = pbar_tasks[task_name]
status = job.status
description = pbar_description(task_name, status)
progress.update(pbar, description=description, completed=(len(run_statuses) - 1), refresh=True)
console.log('Batch complete.')
else:
while any(((job.status not in end_statuses) for job in self.jobs.values())):
time.sleep(web.REFRESH_TIME)
def _job_data_path(task_id: TaskId, path_dir: str=DEFAULT_DATA_DIR):
return os.path.join(path_dir, f'{str(task_id)}.hdf5')
def _batch_path(path_dir: str=DEFAULT_DATA_DIR):
return os.path.join(path_dir, 'batch.hdf5')
def download(self, path_dir: str=DEFAULT_DATA_DIR) -> None:
self.to_file(self._batch_path(path_dir=path_dir))
for (task_name, job) in self.jobs.items():
job_path = self._job_data_path(task_id=job.task_id, path_dir=path_dir)
if ('error' in job.status):
log.warning(f"Not downloading '{task_name}' as the task errored.")
continue
job.download(path=job_path)
def load(self, path_dir: str=DEFAULT_DATA_DIR) -> BatchData:
self.to_file(self._batch_path(path_dir=path_dir))
if (self.jobs is None):
raise DataError("Can't load batch results, hasn't been uploaded.")
task_paths = {}
task_ids = {}
for (task_name, job) in self.jobs.items():
if ('error' in job.status):
log.warning(f"Not loading '{task_name}' as the task errored.")
continue
task_paths[task_name] = self._job_data_path(task_id=job.task_id, path_dir=path_dir)
task_ids[task_name] = self.jobs[task_name].task_id
return BatchData(task_paths=task_paths, task_ids=task_ids, verbose=self.verbose)
def delete(self) -> None:
for (_, job) in self.jobs.items():
job.delete()
def real_cost(self, verbose: bool=True) -> float:
real_cost_sum = 0.0
for (_, job) in self.jobs.items():
cost_job = job.real_cost(verbose=False)
if (cost_job is not None):
real_cost_sum += cost_job
real_cost_sum = (real_cost_sum or None)
if (real_cost_sum and verbose):
console = get_logging_console()
console.log(f'Total billed flex credit cost: {real_cost_sum:1.3f}.')
return real_cost_sum
def estimate_cost(self, verbose: bool=True) -> float:
batch_cost = sum((job.estimate_cost(verbose=False) for (_, job) in self.jobs.items()))
if verbose:
console = get_logging_console()
if ((batch_cost is not None) and (batch_cost > 0)):
console.log(f'Maximum FlexCredit cost: {batch_cost:1.3f} for the whole batch.')
else:
console.log('Could not get estimated batch cost!')
return batch_cost |
def storage(request):
if ((not request) or (not hasattr(request, 'param'))):
file_does_not_exist = False
else:
file_does_not_exist = request.param.get('file_does_not_exist', False)
if file_does_not_exist:
with tempfile.TemporaryDirectory() as tmp_dir:
filename = os.path.join(tmp_dir, 'non_existent_file.jsonl')
storage_instance = FileSpanStorage(filename)
(yield storage_instance)
else:
with tempfile.NamedTemporaryFile(delete=True) as tmp_file:
filename = tmp_file.name
storage_instance = FileSpanStorage(filename)
(yield storage_instance) |
def test_fname():
assert (fname('data', 'json') == 'data.json')
assert (fname('data.json', 'json') == 'data.json')
assert (fname('pic', 'png') == 'pic.png')
assert (fname('pic.png', 'png') == 'pic.png')
assert (fname('report.pdf', 'pdf') == 'report.pdf')
assert (fname('report.png', 'pdf') == 'report.png') |
def draw(input_file_name: Path, output_file_name: Path, boundary_box: BoundaryBox, configuration: Optional[MapConfiguration]=None) -> None:
if (configuration is None):
configuration = MapConfiguration(SCHEME)
osm_data: OSMData = OSMData()
osm_data.parse_osm_file(input_file_name)
flinger: MercatorFlinger = MercatorFlinger(boundary_box, configuration.zoom_level, osm_data.equator_length)
constructor: Constructor = Constructor(osm_data, flinger, EXTRACTOR, configuration)
constructor.construct()
svg: svgwrite.Drawing = svgwrite.Drawing(str(output_file_name), size=flinger.size)
map_: Map = Map(flinger, svg, configuration)
map_.draw(constructor)
svg.write(output_file_name.open('w')) |
class Solution():
def accountsMerge(self, accounts: List[List[str]]) -> List[List[str]]:
def get_connections(accounts):
track = {}
emails = {}
for (i, acc) in enumerate(accounts):
if (i not in track):
track[i] = []
for (j, email) in enumerate(acc):
if (j == 0):
continue
if (email not in emails):
emails[email] = []
for k in emails[email]:
if (k not in track):
track[k] = []
track[k].append(i)
track[i].append(k)
emails[email].append(i)
return track
track = get_connections(accounts)
visited = set()
parts = []
for (i, acc) in enumerate(accounts):
if (i in visited):
continue
part = []
stack = [i]
while stack:
curr = stack.pop()
if (curr in visited):
continue
visited.add(curr)
part.append(curr)
for ne in track.get(curr, []):
if (ne in visited):
continue
stack.append(ne)
parts.append(part)
ret = []
for part in parts:
name = accounts[part[0]][0]
acc = set()
for pp in part:
acc = acc.union(set(accounts[pp][1:]))
ret.append(([name] + sorted(acc)))
return ret |
def _get_session(path: Path) -> BaseSession:
is_memory = (path.name == ':memory:')
if is_memory:
database_uri = 'sqlite:///:memory:'
else:
database_uri = f'sqlite:///{path.resolve()}'
engine = create_engine(database_uri)
Session = sessionmaker(bind=engine)
session = Session()
return session |
def get_record_csv(storage, ensemble_id1, keyword, poly_ran):
csv = run_in_loop(records.get_ensemble_record(db=storage, name=keyword, ensemble_id=ensemble_id1)).body
record_df1 = pd.read_csv(io.BytesIO(csv), index_col=0, float_precision='round_trip')
assert (len(record_df1.columns) == poly_ran['gen_data_entries'])
assert (len(record_df1.index) == poly_ran['reals']) |
def set_cli_author(click_context: click.Context) -> None:
config = get_or_create_cli_config()
cli_author = config.get(AUTHOR_KEY, None)
if (cli_author is None):
raise click.ClickException('The AEA configurations are not initialized. Use `aea init` before continuing.')
click_context.obj.set_config('cli_author', cli_author) |
.integration_test
.parametrize('config_str, expected, extra_files, expectation', [('GEN_KW KW_NAME template.txt kw.txt prior.txt\nRANDOM_SEED 1234', 'MY_KEYWORD -0.881423', [], does_not_raise()), ('GEN_KW KW_NAME template.txt kw.txt prior.txt INIT_FILES:custom_param%d', 'MY_KEYWORD 1.31', [('custom_param0', 'MY_KEYWORD 1.31')], does_not_raise()), ('GEN_KW KW_NAME template.txt kw.txt prior.txt INIT_FILES:custom_param%d', 'MY_KEYWORD 1.31', [('custom_param0', '1.31')], does_not_raise()), ('GEN_KW KW_NAME template.txt kw.txt prior.txt INIT_FILES:custom_param0', 'Not expecting a file', [], pytest.raises(ConfigValidationError, match='Loading GEN_KW from files requires %d'))])
def test_gen_kw(storage, tmpdir, config_str, expected, extra_files, expectation):
with tmpdir.as_cwd():
config = dedent('\n JOBNAME my_name%d\n NUM_REALIZATIONS 1\n ')
config += config_str
with open('config.ert', mode='w', encoding='utf-8') as fh:
fh.writelines(config)
with open('template.txt', mode='w', encoding='utf-8') as fh:
fh.writelines('MY_KEYWORD <MY_KEYWORD>')
with open('prior.txt', mode='w', encoding='utf-8') as fh:
fh.writelines('MY_KEYWORD NORMAL 0 1')
for (fname, contents) in extra_files:
write_file(fname, contents)
with expectation:
create_runpath(storage, 'config.ert')
assert (Path('simulations/realization-0/iter-0/kw.txt').read_text(encoding='utf-8') == expected) |
(('cfg', 'expected'), [param({'_target_': 'tests.instantiate.ArgsClass', 'child': {'_target_': 'tests.instantiate.ArgsClass'}}, ArgsClass(child=ArgsClass()), id='config:no_params'), param({'_target_': 'tests.instantiate.ArgsClass', '_args_': [1], 'child': {'_target_': 'tests.instantiate.ArgsClass', '_args_': [2]}}, ArgsClass(1, child=ArgsClass(2)), id='config:args_only'), param({'_target_': 'tests.instantiate.ArgsClass', '_args_': [1], 'foo': 10, 'child': {'_target_': 'tests.instantiate.ArgsClass', '_args_': [2]}}, ArgsClass(1, foo=10, child=ArgsClass(2)), id='config:args+kwargs'), param({'_target_': 'tests.instantiate.ArgsClass', 'child': {'_target_': 'tests.instantiate.ArgsClass', '_args_': [2]}, 'foo': 10}, ArgsClass(foo=10, child=ArgsClass(2)), id='config:kwargs_only')])
def test_recursive_instantiate_args_kwargs(cfg: Any, expected: Any) -> None:
assert (instantiate(cfg) == expected) |
def build_resnet_backbone(depth, activation):
norm = 'BN'
activation = activation
num_groups = 1
stride_in_1x1 = False
num_groups = 1
width_per_group = 64
bottleneck_channels = (num_groups * width_per_group)
in_channels = 64
out_channels = 256
stem = BasicStem(in_channels=3, out_channels=64, norm=norm, activation=activation)
num_blocks_per_stage = {18: [2, 2, 2, 2], 34: [3, 4, 6, 3], 50: [3, 4, 6, 3], 101: [3, 4, 23, 3], 152: [3, 8, 36, 3]}[depth]
stages = []
for (idx, stage_idx) in enumerate(range(2, 6)):
dilation = 1
first_stride = (1 if ((idx == 0) or ((stage_idx == 5) and (dilation == 2))) else 2)
stage_kargs = {'num_blocks': num_blocks_per_stage[idx], 'stride_per_block': ([first_stride] + ([1] * (num_blocks_per_stage[idx] - 1))), 'in_channels': in_channels, 'out_channels': out_channels, 'norm': norm, 'activation': activation}
if (depth in [18, 34]):
stage_kargs['block_class'] = BasicBlock
else:
stage_kargs['bottleneck_channels'] = bottleneck_channels
stage_kargs['stride_in_1x1'] = stride_in_1x1
stage_kargs['dilation'] = dilation
stage_kargs['num_groups'] = num_groups
stage_kargs['block_class'] = BottleneckBlock
blocks = ResNet.make_stage(**stage_kargs)
in_channels = out_channels
out_channels *= 2
bottleneck_channels *= 2
stages.append(blocks)
return ResNet(stem, stages, num_classes=1000) |
class Axis(BasicObject):
attributes = {'AXIS-ID': utils.scalar, 'COORDINATES': utils.vector, 'SPACING': utils.scalar}
def __init__(self, attic, lf):
super().__init__(attic, lf=lf)
def axis_id(self):
return self['AXIS-ID']
def coordinates(self):
return self['COORDINATES']
def spacing(self):
return self['SPACING']
def describe_attr(self, buf, width, indent, exclude):
d = OrderedDict()
d['Description'] = 'AXIS-ID'
d['Spacing'] = 'SPACING'
d['Coordinates'] = 'COORDINATES'
utils.describe_attributes(buf, d, self, width, indent, exclude) |
class DiscarderHandler(THBEventHandler):
interested = ['action_after', 'action_shootdown']
def handle(self, evt_type, act):
if ((evt_type == 'action_shootdown') and isinstance(act, ActionStageLaunchCard)):
src = act.source
if (not src.has_skill(Discarder)):
return act
g = self.game
current = PlayerTurn.get_current(g).target
if (src is not current):
return act
self.card = c = act.card
if (not c.is_card(AttackCard)):
if (c.is_card(PhysicalCard) or ('treat_as' in c.category)):
raise DiscarderAttackOnly
else:
return act
dist = LaunchCard.calc_distance(g, src, Discarder(src))
dist.pop(src, '')
nearest = max(min(dist.values()), 0)
avail = {p for p in dist if (dist[p] <= nearest)}
if (not (set(act.target_list) <= avail)):
raise DiscarderDistanceLimit
elif ((evt_type == 'action_after') and isinstance(act, FinalizeStage)):
tgt = act.target
if tgt.has_skill(Discarder):
tgt.skills.remove(Discarder)
tgt.tags['reisen_discarder'] = False
return act |
class EnvoyEventProcessor(AbstractGamestateDataProcessor):
ID = 'envoy_events'
DEPENDENCIES = [CountryProcessor.ID, LeaderProcessor.ID]
def extract_data_from_gamestate(self, dependencies):
countries_dict = dependencies[CountryProcessor.ID]
leaders = dependencies[LeaderProcessor.ID]
for (envoy_id_ingame, raw_leader_dict) in sorted(self._gamestate_dict['leaders'].items()):
if (not isinstance(raw_leader_dict, dict)):
continue
if (raw_leader_dict.get('class') != 'envoy'):
continue
if (envoy_id_ingame not in leaders):
continue
envoy = leaders[envoy_id_ingame]
country = envoy.country
target_country = None
location = raw_leader_dict.get('location', {})
assignment = location.get('assignment', 'idle')
description = None
if (assignment == 'improve_relations'):
event_type = datamodel.HistoricalEventType.envoy_improving_relations
target_country = countries_dict.get(location.get('id'))
elif (assignment == 'harm_relations'):
event_type = datamodel.HistoricalEventType.envoy_harming_relations
target_country = countries_dict.get(location.get('id'))
elif (assignment == 'galactic_community'):
event_type = datamodel.HistoricalEventType.envoy_community
elif (assignment == 'federation'):
event_type = datamodel.HistoricalEventType.envoy_federation
federations = self._gamestate_dict.get('federation', {})
if isinstance(federations, dict):
federation_name = dump_name(federations.get(location.get('id'), {}).get('name', 'Unknown Federation'))
description = self._get_or_add_shared_description(federation_name)
else:
event_type = None
event_is_known = country.has_met_player()
if (target_country is not None):
event_is_known &= target_country.has_met_player()
previous_assignment = self._previous_assignment(envoy)
assignment_is_the_same = False
if (previous_assignment is not None):
if ((previous_assignment.event_type == event_type) and (previous_assignment.target_country == target_country)):
assignment_is_the_same = True
else:
previous_assignment.end_date_days = (self._basic_info.date_in_days - 1)
self._session.add(previous_assignment)
if ((not assignment_is_the_same) and (event_type is not None)):
new_assignment_event = datamodel.HistoricalEvent(start_date_days=self._basic_info.date_in_days, country=country, leader=envoy, event_type=event_type, event_is_known_to_player=event_is_known, target_country=target_country, db_description=description)
self._session.add(new_assignment_event)
def _previous_assignment(self, envoy: datamodel.Leader) -> Optional[datamodel.HistoricalEvent]:
return self._session.query(datamodel.HistoricalEvent).filter(datamodel.HistoricalEvent.end_date_days.is_(None)).filter_by(leader=envoy).order_by(datamodel.HistoricalEvent.start_date_days.desc()).first() |
def load_hydra_config(config_module: str, config_name: str, hydra_overrides: Dict[(str, str)]) -> DictConfig:
with initialize_config_module(config_module=config_module):
cfg = compose(config_name=config_name, overrides=[((key + '=') + str(val)) for (key, val) in hydra_overrides.items()])
return cfg |
def remove_duplicate_sg(security_groups):
for (each_sg, duplicate_sg_name) in SECURITYGROUP_REPLACEMENTS.items():
if ((each_sg in security_groups) and (duplicate_sg_name in security_groups)):
LOG.info('Duplicate SG found. Removing %s in favor of %s.', duplicate_sg_name, each_sg)
security_groups.remove(duplicate_sg_name)
return security_groups |
class UpdateNoIdMixin(object):
qbo_object_name = ''
qbo_json_object_name = ''
def save(self, qb=None, request_id=None):
if (not qb):
qb = QuickBooks()
json_data = qb.update_object(self.qbo_object_name, self.to_json(), request_id=request_id)
obj = type(self).from_json(json_data[self.qbo_object_name])
return obj |
def _call(func_name: str, ret_val: List[Expression]=None, operands: List[Expression]=None) -> Assignment:
if (not ret_val):
ret_val = list()
if (not operands):
operands = list()
return Assignment(ListOperation(ret_val), Call(ImportedFunctionSymbol(func_name, 66), operands)) |
def lazy_import():
from fastly.model.domain_inspector_entry import DomainInspectorEntry
from fastly.model.historical_domains import HistoricalDomains
from fastly.model.historical_domains_meta import HistoricalDomainsMeta
from fastly.model.historical_domains_response_all_of import HistoricalDomainsResponseAllOf
globals()['DomainInspectorEntry'] = DomainInspectorEntry
globals()['HistoricalDomains'] = HistoricalDomains
globals()['HistoricalDomainsMeta'] = HistoricalDomainsMeta
globals()['HistoricalDomainsResponseAllOf'] = HistoricalDomainsResponseAllOf |
_server.peripheral_model
class EthernetModel(object):
frame_queues = defaultdict(deque)
calc_crc = True
rx_frame_isr = None
rx_isr_enabled = False
frame_times = defaultdict(deque)
def enable_rx_isr(cls, interface_id):
cls.rx_isr_enabled = True
if (cls.frame_queues[interface_id] and (cls.rx_frame_isr is not None)):
Interrupts.trigger_interrupt(cls.rx_frame_isr, 'Ethernet_RX_Frame')
def disable_rx_isr(self, interface_id):
EthernetModel.rx_isr_enabled = False
_server.tx_msg
def tx_frame(cls, interface_id, frame):
print(('Sending Frame (%i): ' % len(frame)), binascii.hexlify(frame))
msg = {'interface_id': interface_id, 'frame': frame}
return msg
_server.reg_rx_handler
def rx_frame(cls, msg):
interface_id = msg['interface_id']
log.info(('Adding Frame to: %s' % interface_id))
frame = msg['frame']
cls.frame_queues[interface_id].append(frame)
cls.frame_times[interface_id].append(time.time())
log.info(('Adding Frame to: %s' % interface_id))
if ((cls.rx_frame_isr is not None) and cls.rx_isr_enabled):
Interrupts.trigger_interrupt(cls.rx_frame_isr, 'Ethernet_RX_Frame')
def get_rx_frame(cls, interface_id, get_time=False):
frame = None
rx_time = None
log.info(('Checking for: %s' % str(interface_id)))
if cls.frame_queues[interface_id]:
log.info('Returning frame')
frame = cls.frame_queues[interface_id].popleft()
rx_time = cls.frame_times[interface_id].popleft()
if get_time:
return (frame, rx_time)
else:
return frame
def get_frame_info(cls, interface_id):
queue = cls.frame_queues[interface_id]
if queue:
return (len(queue), len(queue[0]))
return (0, 0) |
class OptionSeriesColumnDataDragdropDraghandle(Options):
def className(self):
return self._config_get('highcharts-drag-handle')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#fff')
def color(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('rgba(0, 0, 0, 0.6)')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(901)
def zIndex(self, num: float):
self._config(num, js_type=False) |
class OptionSeriesBulletSonificationContexttracksMappingLowpass(Options):
def frequency(self) -> 'OptionSeriesBulletSonificationContexttracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionSeriesBulletSonificationContexttracksMappingLowpassFrequency)
def resonance(self) -> 'OptionSeriesBulletSonificationContexttracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionSeriesBulletSonificationContexttracksMappingLowpassResonance) |
def map_ec2route_to_route(route: Dict[(str, Any)]) -> Route:
destination_cidr = route['DestinationCidrBlock']
route_target_type = RouteTargetType.OTHER
route_target_id = ''
if ('VpcPeeringConnectionId' in route):
route_target_type = RouteTargetType.VPC_PEERING
route_target_id = route['VpcPeeringConnectionId']
elif (('GatewayId' in route) and route['GatewayId'].startswith('igw-')):
route_target_type = RouteTargetType.INTERNET
route_target_id = route['GatewayId']
state = RouteState.UNKNOWN
if (route['State'] == 'active'):
state = RouteState.ACTIVE
route_target = RouteTarget(route_target_id, route_target_type)
return Route(destination_cidr, route_target, state) |
class SurfaceAberrationAtDistance(OpticalElement):
def __init__(self, surface_aberration, distance):
self.fresnel = FresnelPropagator(surface_aberration.input_grid, distance)
self.surface_aberration = surface_aberration
def forward(self, wavefront):
wf = self.fresnel.forward(wavefront)
wf = self.surface_aberration.forward(wf)
return self.fresnel.backward(wf)
def backward(self, wavefront):
wf = self.fresnel.forward(wavefront)
wf = self.surface_aberration.backward(wf)
return self.fresnel.backward(wf) |
_invites_misc_routes.route('/speaker-invites/<int:speaker_invite_id>/reject-invite')
_required
def reject_invite(speaker_invite_id):
try:
speaker_invite = SpeakerInvite.query.filter_by(id=speaker_invite_id).one()
except NoResultFound:
raise NotFoundError({'source': ''}, 'Speaker Invite Not Found')
else:
if (not (current_user.email == speaker_invite.email)):
raise ForbiddenError({'source': ''}, 'Invitee access is required.')
elif (speaker_invite.status == 'accepted'):
raise ConflictError({'pointer': '/data/status'}, 'Accepted speaker invite can not be rejected.')
elif (speaker_invite.status == 'rejected'):
raise ConflictError({'pointer': '/data/status'}, 'Speaker invite is already rejected.')
try:
speaker_invite.status = 'rejected'
save_to_db(speaker_invite, {'speaker invite rejected'})
except Exception:
raise UnprocessableEntityError({'source': ''}, 'error while rejecting speaker invite.')
return jsonify(success=True, message='Speaker invite rejected successfully') |
class OptionPlotoptionsBellcurvePointEvents(Options):
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def drag(self):
return self._config_get(None)
def drag(self, value: Any):
self._config(value, js_type=False)
def dragStart(self):
return self._config_get(None)
def dragStart(self, value: Any):
self._config(value, js_type=False)
def drop(self):
return self._config_get(None)
def drop(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def remove(self):
return self._config_get(None)
def remove(self, value: Any):
self._config(value, js_type=False)
def select(self):
return self._config_get(None)
def select(self, value: Any):
self._config(value, js_type=False)
def unselect(self):
return self._config_get(None)
def unselect(self, value: Any):
self._config(value, js_type=False)
def update(self):
return self._config_get(None)
def update(self, value: Any):
self._config(value, js_type=False) |
def is_dbt_package_up_to_date() -> bool:
installed_version = _get_installed_dbt_package_version()
if (installed_version is None):
return False
required_version = _get_required_dbt_package_version()
if (not required_version):
return True
return (installed_version == required_version) |
class ChatMessage(ft.Row):
def __init__(self, message: Message):
super().__init__()
self.vertical_alignment = 'start'
self.controls = [ft.CircleAvatar(content=ft.Text(self.get_initials(message.user_name)), color=ft.colors.WHITE, bgcolor=self.get_avatar_color(message.user_name)), ft.Column([ft.Text(message.user_name, weight='bold'), ft.Text(message.text, selectable=True)], tight=True, spacing=5)]
def get_initials(self, user_name: str):
if user_name:
return user_name[:1].capitalize()
else:
return 'Unknown'
def get_avatar_color(self, user_name: str):
colors_lookup = [ft.colors.AMBER, ft.colors.BLUE, ft.colors.BROWN, ft.colors.CYAN, ft.colors.GREEN, ft.colors.INDIGO, ft.colors.LIME, ft.colors.ORANGE, ft.colors.PINK, ft.colors.PURPLE, ft.colors.RED, ft.colors.TEAL, ft.colors.YELLOW]
return colors_lookup[(hash(user_name) % len(colors_lookup))] |
def test_variable_COLR_without_VarIndexMap():
font1 = TTFont()
font1.setGlyphOrder(['.notdef', 'A'])
font1['COLR'] = buildCOLR({'A': (ot.PaintFormat.PaintSolid, 0, 1.0)})
font2 = deepcopy(font1)
font2['COLR'].table.BaseGlyphList.BaseGlyphPaintRecord[0].Paint.Alpha = 0.0
master_fonts = [font1, font2]
varfont = deepcopy(font1)
axis_order = ['XXXX']
model = VariationModel([{}, {'XXXX': 1.0}], axis_order)
_add_COLR(varfont, model, master_fonts, axis_order)
colr = varfont['COLR'].table
assert (len(colr.BaseGlyphList.BaseGlyphPaintRecord) == 1)
baserec = colr.BaseGlyphList.BaseGlyphPaintRecord[0]
assert (baserec.Paint.Format == ot.PaintFormat.PaintVarSolid)
assert (baserec.Paint.VarIndexBase == 0)
assert (colr.VarStore is not None)
assert (len(colr.VarStore.VarData) == 1)
assert (len(colr.VarStore.VarData[0].Item) == 1)
assert (colr.VarStore.VarData[0].Item[0] == [(- 16384)])
assert (colr.VarIndexMap is None) |
class FilterInterface(object):
def __init__(self):
self.is_valid = False
self._re_do_not_sync = EMPTY_PATTERN
self._re_do_not_sync_from_list = EMPTY_PATTERN
self._re_hide_nodes = EMPTY_PATTERN
self._re_hide_topics = EMPTY_PATTERN
self._re_hide_services = EMPTY_PATTERN
def load(self, mastername='', ignore_nodes=[], sync_nodes=[], ignore_topics=[], sync_topics=[], ignore_srv=[], sync_srv=[], ignore_type=[], ignore_publishers=[], ignore_subscribers=[], do_not_sync=[]):
self.__interface_file = interface_file = resolve_url(get_ros_param('~interface_url', ''))
self.__mastername = mastername
self.__data = data = (read_interface(interface_file) if interface_file else {})
self._re_ignore_nodes = create_pattern('ignore_nodes', data, interface_file, ignore_nodes, mastername)
self._re_sync_nodes = create_pattern('sync_nodes', data, interface_file, sync_nodes, mastername)
self._re_ignore_topics = create_pattern('ignore_topics', data, interface_file, ignore_topics, mastername)
self._re_sync_topics = create_pattern('sync_topics', data, interface_file, sync_topics, mastername)
self._re_ignore_services = create_pattern('ignore_services', data, interface_file, ignore_srv, mastername)
self._re_sync_services = create_pattern('sync_services', data, interface_file, sync_srv, mastername)
self._re_ignore_type = create_pattern('ignore_type', data, interface_file, ignore_type, mastername)
self._re_ignore_publishers = create_pattern('ignore_publishers', data, interface_file, ignore_publishers, mastername)
self._re_ignore_subscribers = create_pattern('ignore_subscribers', data, interface_file, ignore_subscribers, mastername)
self._sync_remote_nodes = False
if interface_file:
if ('sync_remote_nodes' in data):
self._sync_remote_nodes = data['sync_remote_nodes']
else:
self._sync_remote_nodes = get_ros_param('~sync_remote_nodes', self._sync_remote_nodes)
if do_not_sync:
self._re_do_not_sync = gen_pattern(do_not_sync, 'do_not_sync')
else:
self.read_do_not_sync()
self.is_valid = True
def set_hide_pattern(self, re_hide_nodes=EMPTY_PATTERN, re_hide_topics=EMPTY_PATTERN, re_hide_services=EMPTY_PATTERN):
self._re_hide_nodes = re_hide_nodes
self._re_hide_topics = re_hide_topics
self._re_hide_services = re_hide_services
def read_do_not_sync(self):
_do_not_sync = get_ros_param('do_not_sync', [])
if (sys.version_info[0] <= 2):
import types
string_types = types.StringTypes
else:
string_types = (str,)
if isinstance(_do_not_sync, string_types):
_do_not_sync = _do_not_sync.strip('[').rstrip(']').replace(' ', ',').split(',')
_do_not_sync = [val for val in _do_not_sync if val]
self._re_do_not_sync = gen_pattern(_do_not_sync, 'do_not_sync', print_info=False)
def update_sync_topics_pattern(self, topics=[]):
self._re_sync_topics = create_pattern('sync_topics', self.__data, self.__interface_file, topics, self.__mastername)
def sync_remote_nodes(self):
if (not self.is_valid):
return False
return self._sync_remote_nodes
def is_ignored_node(self, node):
if (not self.is_valid):
return False
if self._re_hide_nodes.match(node):
return True
if self.do_not_sync(node):
return True
if self._re_sync_nodes.match(node):
return False
return (not is_empty_pattern(self._re_sync_nodes))
def is_ignored_topic(self, node, topic, topictype):
rospy.logwarn("Call to deprecated method 'is_ignored_topic'. Please use'is_ignored_subscriber' and 'is_ignored_publisher' instead")
self._is_ignored_topic(node, topic, topictype)
def _is_ignored_topic(self, node, topic, topictype):
if (not self.is_valid):
return False
if self.do_not_sync([node, topic, topictype]):
return True
if self._re_ignore_type.match(topictype):
return True
if self._re_ignore_nodes.match(node):
return True
if self._re_ignore_topics.match(topic):
return True
if (self._re_sync_nodes.match(node) or self._re_sync_topics.match(topic)):
return False
return ((not is_empty_pattern(self._re_sync_nodes)) or (not is_empty_pattern(self._re_sync_topics)))
def is_ignored_subscriber(self, node, topic, topictype):
if self._re_hide_nodes.match(node):
return True
if self._re_hide_topics.match(topic):
return True
if self.do_not_sync([node, topic, topictype]):
return True
return (self._re_ignore_subscribers.match(topic) or self._is_ignored_topic(node, topic, topictype))
def is_ignored_publisher(self, node, topic, topictype):
if self._re_hide_nodes.match(node):
return True
if self._re_hide_topics.match(topic):
return True
if self.do_not_sync([node, topic, topictype]):
return True
return (self._re_ignore_publishers.match(topic) or self._is_ignored_topic(node, topic, topictype))
def is_ignored_service(self, node, service):
if (not self.is_valid):
return False
if self._re_hide_nodes.match(node):
return True
if self._re_hide_services.match(service):
return True
if self.do_not_sync([node, service]):
return True
if self._re_ignore_nodes.match(node):
return True
if self._re_ignore_services.match(service.strip()):
return True
if (self._re_sync_nodes.match(node) or self._re_sync_services.match(service)):
return False
return ((not is_empty_pattern(self._re_sync_nodes)) or (not is_empty_pattern(self._re_sync_services)))
def do_not_sync(self, name):
if isinstance(name, list):
for nval in name:
if (self._re_do_not_sync.match(nval) or self._re_do_not_sync_from_list.match(nval)):
return True
elif (self._re_do_not_sync.match(name) or self._re_do_not_sync_from_list.match(name)):
return True
return False
def to_list(self):
if (not self.is_valid):
return (False, '', '', '', '', '', '', '', '', '', '')
return (self._sync_remote_nodes, _to_str(self._re_ignore_nodes), _to_str(self._re_sync_nodes), _to_str(self._re_ignore_topics), _to_str(self._re_sync_topics), _to_str(self._re_ignore_services), _to_str(self._re_sync_services), _to_str(self._re_ignore_type), _to_str(self._re_ignore_publishers), _to_str(self._re_ignore_subscribers), _to_str(self._re_do_not_sync))
def from_list(l=None):
try:
result = FilterInterface()
if (l is None):
l = (False, '', '', '', '', '', '', '', '', '', '')
else:
result.read_do_not_sync()
result._sync_remote_nodes = bool(l[0])
result._re_ignore_nodes = _from_str(l[1])
result._re_sync_nodes = _from_str(l[2])
result._re_ignore_topics = _from_str(l[3])
result._re_sync_topics = _from_str(l[4])
result._re_ignore_services = _from_str(l[5])
result._re_sync_services = _from_str(l[6])
result._re_ignore_type = _from_str(l[7])
result._re_ignore_publishers = _from_str((l[8] if (len(l) > 8) else ''))
result._re_ignore_subscribers = _from_str((l[9] if (len(l) > 9) else ''))
result._re_do_not_sync_from_list = _from_str((l[10] if (len(l) > 10) else ''))
result.is_valid = True
return result
except:
import traceback
print(traceback.format_exc())
return None |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.