code stringlengths 281 23.7M |
|---|
class SimpleDeployCLI():
def __init__(self, parser):
help_group = parser.add_argument_group('Get help')
required_group = parser.add_argument_group('Required arguments')
behavior_group = parser.add_argument_group("Customize simple_deploy's behavior")
deployment_config_group = parser.add_argument_group('Customize deployment configuration')
help_group.add_argument('--help', '-h', action='help', help='Show this help message and exit.')
required_group.add_argument('--platform', '-p', type=str, help='Specifies the platform where the project will be deployed. Options: fly_io | platform_sh | heroku', default='')
behavior_group.add_argument('--automate-all', help='Automates all aspects of deployment. Creates resources, makes commits, and runs `push` or `deploy` commands.', action='store_true')
behavior_group.add_argument('--no-logging', help='Do not create a log of the configuration and deployment process.', action='store_true')
behavior_group.add_argument('--ignore-unclean-git', help='Run simple_deploy even with an unclean `git status` message.', action='store_true')
deployment_config_group.add_argument('--deployed-project-name', type=str, help='Provide a name that the platform will use for this project.', default='')
deployment_config_group.add_argument('--region', type=str, help='Specify the region that this project will be deployed to.', default='us-3.platform.sh')
parser.add_argument('--unit-testing', action='store_true', help=argparse.SUPPRESS)
parser.add_argument('--integration-testing', action='store_true', help=argparse.SUPPRESS) |
class NDN_GetTask():
def __init__(self) -> None:
self.cid: str = None
self.target_path: str = None
self.urls: [str] = None
self.options: Optional[dict] = None
self.working_task = None
self.state = NDN_GET_TASK_STATE_INIT
self.total_size = 0
self.recv_bytes = 0
self.write_bytes = 0
self.error_str = None
self.chunk_queue = None
self.retry_count = 0
self.used_urls = []
self.hash_update = None
def select_url(self, index: int) -> str:
return self.urls[0]
def get_chunk_for_download(self) -> bytes:
pass |
class port_stats_reply(stats_reply):
version = 5
type = 19
stats_type = 4
def __init__(self, xid=None, flags=None, entries=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (entries != None):
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = port_stats_reply()
_version = reader.read('!B')[0]
assert (_version == 5)
_type = reader.read('!B')[0]
assert (_type == 19)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 4)
obj.flags = reader.read('!H')[0]
reader.skip(4)
obj.entries = loxi.generic_util.unpack_list(reader, ofp.common.port_stats_entry.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.entries != other.entries):
return False
return True
def pretty_print(self, q):
q.text('port_stats_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REPLY_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('entries = ')
q.pp(self.entries)
q.breakable()
q.text('}') |
class bad_match_error_msg(error_msg):
version = 5
type = 1
err_type = 4
def __init__(self, xid=None, code=None, data=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (code != None):
self.code = code
else:
self.code = 0
if (data != None):
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.err_type))
packed.append(struct.pack('!H', self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bad_match_error_msg()
_version = reader.read('!B')[0]
assert (_version == 5)
_type = reader.read('!B')[0]
assert (_type == 1)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_err_type = reader.read('!H')[0]
assert (_err_type == 4)
obj.code = reader.read('!H')[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.code != other.code):
return False
if (self.data != other.data):
return False
return True
def pretty_print(self, q):
q.text('bad_match_error_msg {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('code = ')
value_name_map = {0: 'OFPBMC_BAD_TYPE', 1: 'OFPBMC_BAD_LEN', 2: 'OFPBMC_BAD_TAG', 3: 'OFPBMC_BAD_DL_ADDR_MASK', 4: 'OFPBMC_BAD_NW_ADDR_MASK', 5: 'OFPBMC_BAD_WILDCARDS', 6: 'OFPBMC_BAD_FIELD', 7: 'OFPBMC_BAD_VALUE', 8: 'OFPBMC_BAD_MASK', 9: 'OFPBMC_BAD_PREREQ', 10: 'OFPBMC_DUP_FIELD', 11: 'OFPBMC_EPERM'}
if (self.code in value_name_map):
q.text(('%s(%d)' % (value_name_map[self.code], self.code)))
else:
q.text(('%#x' % self.code))
q.text(',')
q.breakable()
q.text('data = ')
q.pp(self.data)
q.breakable()
q.text('}') |
class XmlHighlighter(QSyntaxHighlighter):
LAUNCH_LAUNCH_CHILDS = ['group', 'node', 'test', 'env', 'remap', 'rosparam', 'param', 'machine', 'include', 'arg']
LAUNCH_LAUNCH_ATTR = {'deprecated=': '"message"'}
LAUNCH_GROUP_CHILDS = ['node', 'test', 'env', 'remap', 'rosparam', 'param', 'machine', 'include', 'arg']
LAUNCH_GROUP_ATTR = {'ns=': '"foo"', 'clear_params=': '"true|false"'}
LAUNCH_MACHINE_CHILDS = ['env']
LAUNCH_MACHINE_ATTR = {'name=': '"machine-name"', 'address=': '"blah.willowgarage.com"', 'env-loader=': '"/opt/ros/fuerte/env.sh"', 'default=': '"true|false|never"', 'user=': '"username"', 'password=': '"passwhat"', 'timeout=': '"10.0"'}
LAUNCH_NODE_CHILDS = ['env', 'remap', 'rosparam', 'param']
LAUNCH_NODE_ATTR = {'pkg=': '"mypackage"', 'type=': '"nodetype"', 'name=': '"nodename"', 'args=': '"arg1"', 'machine=': '"machine-name"', 'respawn=': '"true"', 'required=': '"true"', 'ns=': '"foo"', 'clear_params=': '"true|false"', 'output=': '"log|screen"', 'cwd=': '"ROS_HOME|node"', 'launch-prefix=': '"prefix arguments"'}
LAUNCH_INCLUDE_CHILDS = ['env', 'arg']
LAUNCH_INCLUDE_ATTR = {'file=': '"$(find pkg-name)/path/filename.xml"', 'ns=': '"foo"', 'clear_params=': '"true|false"', 'pass_all_args=': '"true|false"'}
LAUNCH_REMAP_ATTR = {'from=': '"originalname"', 'to=': '"newname"'}
LAUNCH_ENV_ATTR = {'name=': '"name"', 'value=': '"value"'}
LAUNCH_PARAM_ATTR = {'name=': '"namespace/name"', 'value=': '"value"', 'type=': '"str|int|double|bool"', 'textfile=': '"$(find pkg-name)/path/file.txt"', 'binfile=': '"$(find pkg-name)/path/file"', 'command=': '"$(find pkg-name)/exe \'$(find pkg-name)/arg.txt\'"'}
LAUNCH_ROSPARAM_ATTR = {'command=': '"load|dump|delete"', 'file=': '"$(find pkg-name)/path/foo.yaml"', 'param=': '"name"', 'ns=': '"foo"', 'subst_value=': '"true|false"'}
LAUNCH_ARG_ATTR = {'name=': '"name"', 'value=': '"bar"', 'default=': '"defbar"'}
LAUNCH_TEST_CHILDS = ['env', 'remap', 'rosparam', 'param']
LAUNCH_TEST_ATTR = {'pkg=': '"mypackage"', 'type=': '"nodetype"', 'name=': '"nodename"', 'test-name=': '"test_name"', 'args=': '"arg1"', 'ns=': '"foo"', 'clear_params=': '"true|false"', 'retry=': '"0"', 'cwd=': '"ROS_HOME|node"', 'launch-prefix=': '"prefix arguments"', 'time-limit=': '"60.0"'}
LAUNCH_CHILDS = {'launch': LAUNCH_LAUNCH_CHILDS, 'group': LAUNCH_GROUP_CHILDS, 'machine': LAUNCH_MACHINE_CHILDS, 'node': LAUNCH_NODE_CHILDS, 'include': LAUNCH_INCLUDE_CHILDS, 'remap': [], 'env': [], 'param': [], 'rosparam': [], 'arg': [], 'test': LAUNCH_TEST_CHILDS}
LAUNCH_ATT_GLOBAL = {'if=': '""', 'unless=': '""'}
LAUNCH_LAUNCH_ATTR.update(LAUNCH_ATT_GLOBAL)
LAUNCH_GROUP_ATTR.update(LAUNCH_ATT_GLOBAL)
LAUNCH_MACHINE_ATTR.update(LAUNCH_ATT_GLOBAL)
LAUNCH_NODE_ATTR.update(LAUNCH_ATT_GLOBAL)
LAUNCH_INCLUDE_ATTR.update(LAUNCH_ATT_GLOBAL)
LAUNCH_REMAP_ATTR.update(LAUNCH_ATT_GLOBAL)
LAUNCH_ENV_ATTR.update(LAUNCH_ATT_GLOBAL)
LAUNCH_PARAM_ATTR.update(LAUNCH_ATT_GLOBAL)
LAUNCH_ROSPARAM_ATTR.update(LAUNCH_ATT_GLOBAL)
LAUNCH_ARG_ATTR.update(LAUNCH_ATT_GLOBAL)
LAUNCH_TEST_ATTR.update(LAUNCH_ATT_GLOBAL)
LAUNCH_ATTR = {'launch': LAUNCH_LAUNCH_ATTR, 'group': LAUNCH_GROUP_ATTR, 'machine': LAUNCH_MACHINE_ATTR, 'node': LAUNCH_NODE_ATTR, 'include': LAUNCH_INCLUDE_ATTR, 'remap': LAUNCH_REMAP_ATTR, 'env': LAUNCH_ENV_ATTR, 'param': LAUNCH_PARAM_ATTR, 'rosparam': LAUNCH_ROSPARAM_ATTR, 'arg': LAUNCH_ARG_ATTR, 'test': LAUNCH_TEST_ATTR}
DEPRECATED_PARAMETER = {'associations': 'nm/associations', 'kill_on_stop': 'nm/kill_on_stop'}
STATE_COMMENT = 2
STATE_STRING = 4
def __init__(self, parent=None, is_launch=True):
QSyntaxHighlighter.__init__(self, parent)
self._is_launch = is_launch
self.rules = []
self.comment_start = QRegExp('<!--')
self.comment_end = QRegExp('-->')
self.comment_format = self._create_format(Qt.darkGray, 'italic')
self.rules.append((self._create_regexp('</?|/?>'), self._create_format(QColor(24, 24, 24))))
if self._is_launch:
tag_list = '|'.join([('\\b%s\\b' % t) for t in self.LAUNCH_CHILDS.keys()])
self.rules.append((self._create_regexp(tag_list), self._create_format(Qt.darkRed)))
else:
self.rules.append((self._create_regexp('>|/>|<[/.\\w:]*[\\s\t>]|<[/.\\w:]*$'), self._create_format(Qt.darkRed)))
if self._is_launch:
attr_list = '|'.join(set([('\\b%s' % attr) for v in self.LAUNCH_ATTR.values() for attr in v.keys()]))
self.rules.append((self._create_regexp(attr_list), self._create_format(QColor(0, 100, 0))))
else:
self.rules.append((self._create_regexp('[_.\\w]*='), self._create_format(QColor(0, 100, 0))))
self.rule_arg = (self._create_regexp('\\$\\(.*\\)'), self._create_format(QColor(77, 0, 38)))
self.rules.append((self._create_regexp('<!DOCTYPE.*>'), self._create_format(Qt.lightGray)))
self.rules.append((self._create_regexp('<\\?xml.*\\?>'), self._create_format(Qt.lightGray)))
self.rules.append((self._create_regexp('[_.\\w]*\\s*:'), self._create_format(Qt.darkBlue)))
self.rules.append((self._create_regexp("'.*'"), self._create_format(Qt.blue)))
self.rules.append((self._create_regexp('^\\s*-'), self._create_format(Qt.darkRed, 'bold')))
self.rules.append((self._create_regexp('\\d+'), self._create_format(QColor(127, 64, 127))))
self.yaml_comment_rule = (self._create_regexp('#[.]*'), self._create_format(Qt.darkGray))
self.dep_pattern = []
if self.DEPRECATED_PARAMETER:
attr_list = '|'.join(set([('name="%s"' % attr) for attr in self.DEPRECATED_PARAMETER.keys()]))
self.dep_pattern.append((self._create_regexp(attr_list), self._create_format(QColor(250, 0, 0), 'bold')))
self.string_pattern = QRegExp('"')
self.string_format = self._create_format(Qt.blue)
self._tag_hl_range = []
self._tag_hl_last = []
self._color_hl_tag = QColor(255, 128, 0)
def _create_regexp(self, pattern=''):
_regexp = QRegExp()
_regexp.setMinimal(True)
_regexp.setPattern(pattern)
return _regexp
def _create_format(self, color, style=''):
_format = QTextCharFormat()
_format.setForeground(color)
if ('bold' in style):
_format.setFontWeight(QFont.Bold)
else:
_format.setFontWeight(QFont.Normal)
if ('italic' in style):
_format.setFontItalic(True)
return _format
def highlightBlock(self, text):
for (pattern, form) in self.rules:
index = pattern.indexIn(text)
while (index >= 0):
length = pattern.matchedLength()
frmt = form
if self._in_hl_range(index, self._tag_hl_range):
frmt = QTextCharFormat(form)
if (not self._end_tag_found):
frmt.setForeground(Qt.red)
else:
frmt.setForeground(self._color_hl_tag)
frmt.setFontWeight(QFont.Bold)
self.setFormat(index, length, frmt)
index = pattern.indexIn(text, (index + length))
index = self.yaml_comment_rule[0].indexIn(text)
if (index >= 0):
self.setFormat(index, (len(text) - index), self.yaml_comment_rule[1])
self._tag_hl_range = []
self.setCurrentBlockState(0)
self._comments_idx = []
idx_start_cmt = 0
comment_length = 0
if ((self.previousBlockState() == (- 1)) or (not (self.previousBlockState() & self.STATE_COMMENT))):
idx_start_cmt = self.comment_start.indexIn(text)
while (idx_start_cmt >= 0):
idx_end = self.comment_end.indexIn(text, idx_start_cmt)
comment_length = 0
if (idx_end == (- 1)):
self.setCurrentBlockState(self.STATE_COMMENT)
comment_length = (len(text) - idx_start_cmt)
else:
comment_length = ((idx_end - idx_start_cmt) + self.comment_end.matchedLength())
self._comments_idx.append((idx_start_cmt, comment_length))
self.setFormat(idx_start_cmt, comment_length, self.comment_format)
idx_start_cmt = self.comment_start.indexIn(text, (idx_start_cmt + comment_length))
idx_start = self.string_pattern.indexIn(text)
if ((self.previousBlockState() != (- 1)) and (self.previousBlockState() & self.STATE_STRING)):
strlen = (idx_start + self.string_pattern.matchedLength())
if (idx_start == (- 1)):
strlen = len(text)
self.setCurrentBlockState((self.currentBlockState() + self.STATE_STRING))
self.setFormat(0, strlen, self.string_format)
idx_start = self.string_pattern.indexIn(text, strlen)
idx_search = (idx_start + 1)
while (idx_start >= 0):
if (not self._in_hl_range(idx_search, self._comments_idx)):
idx_end = self.string_pattern.indexIn(text, idx_search)
strlen = 0
if (not self._in_hl_range(idx_end, self._comments_idx)):
if (idx_end == (- 1)):
self.setCurrentBlockState((self.currentBlockState() + self.STATE_STRING))
strlen = (len(text) - idx_start)
else:
strlen = ((idx_end - idx_start) + self.string_pattern.matchedLength())
idx_search = (idx_start + strlen)
self.setFormat(idx_start, strlen, self.string_format)
idx_start = self.string_pattern.indexIn(text, idx_search)
idx_search = (idx_start + 1)
else:
idx_search = (idx_end + 1)
else:
idx_start = self.string_pattern.indexIn(text, idx_search)
idx_search = (idx_start + 1)
index = self.rule_arg[0].indexIn(text)
while (index >= 0):
if (not self._in_hl_range(index, self._comments_idx)):
length = self.rule_arg[0].matchedLength()
self.setFormat(index, length, self.rule_arg[1])
index = self.rule_arg[0].indexIn(text, (index + length))
for (pattern, form) in self.dep_pattern:
index = pattern.indexIn(text)
while (index >= 0):
length = pattern.matchedLength()
frmt = form
if self._in_hl_range(index, self._tag_hl_range):
frmt = QTextCharFormat(form)
if (not self._end_tag_found):
frmt.setForeground(Qt.red)
else:
frmt.setForeground(self._color_hl_tag)
frmt.setFontWeight(QFont.Bold)
self.setFormat(index, length, frmt)
index = pattern.indexIn(text, (index + length))
def mark_block(self, block, position):
text = block.text()
(word, idx_word) = self._get_current_word(text, position)
for hlblock in self._tag_hl_last:
self.rehighlightBlock(hlblock)
del self._tag_hl_last[:]
self._tag_hl_range = [(idx_word, len(word))]
next_block = block
open_braces = 0
closed_braces = 0
idx_search = idx_word
rindex = (- 1)
loop = 0
tag_len = 0
if self._isclosetag(word):
opentag = ('<%s' % self._get_tag(word))
tag_len = len(opentag)
while ((rindex == (- 1)) and next_block.isValid()):
rindex = text.rfind(opentag, 0, idx_search)
(obr, cbr) = self._get_braces_count(text[(rindex if (rindex != (- 1)) else 0):idx_search])
open_braces += obr
closed_braces += cbr
loop += 1
if (loop > 50000):
rindex = (- 1)
break
if (rindex == (- 1)):
next_block = next_block.previous()
text = next_block.text()
idx_search = len(text)
elif (open_braces <= closed_braces):
idx_search = rindex
rindex = (- 1)
elif self._isopentag(word):
closetag = QRegExp(('</%s>|/>' % self._get_tag(word)))
closetag.setMinimal(True)
while ((rindex == (- 1)) and next_block.isValid()):
rindex = closetag.indexIn(text, idx_search)
max_search_idx = ((rindex + closetag.matchedLength()) if (rindex != (- 1)) else len(text))
(obr, cbr) = self._get_braces_count(text[idx_search:max_search_idx])
open_braces += obr
closed_braces += cbr
loop += 1
if (loop > 50000):
rindex = (- 1)
break
if (rindex == (- 1)):
next_block = next_block.next()
text = next_block.text()
idx_search = 0
elif (open_braces > closed_braces):
idx_search = (rindex + closetag.matchedLength())
rindex = (- 1)
tag_len = closetag.matchedLength()
else:
self._tag_hl_range = []
self._end_tag_found = (rindex != (- 1))
if (self._tag_hl_range and (block != next_block)):
self.rehighlightBlock(block)
self._tag_hl_last.append(block)
if (rindex != (- 1)):
self._tag_hl_range.append((rindex, tag_len))
self.rehighlightBlock(next_block)
self._tag_hl_last.append(next_block)
def _get_braces_count(self, text):
closed_short = text.count('/>')
closed_long = text.count('</')
cmnt_long = text.count('<!')
openbr = ((text.count('<') - closed_long) - cmnt_long)
return (openbr, (closed_short + closed_long))
def _isopentag(self, word):
return (word.startswith('<') and ('/' not in word))
def _isclosetag(self, word):
return (('/>' == word) or word.startswith('</'))
def _get_tag(self, word):
return word.strip('</>')
def _get_current_word(self, text, position):
word = ''
idx_start = position
for i in reversed(range(0, position)):
if (text[i] in [' ', '\n', '=', '"']):
break
else:
word = ('%s%s' % (text[i], word))
idx_start = i
for i in range(position, len(text)):
if (text[i] in [' ', '\n', '=', '"']):
break
else:
word += text[i]
return (word, idx_start)
def _in_hl_range(self, value, ranges):
for (start, length) in ranges:
if ((value >= start) and (value <= (start + length))):
return True
return False
def get_tag_of_current_block(self, block, position):
text = block.text()
next_block = block
idx_search = position
rindex = (- 1)
loop = 0
opentag = '<'
while ((rindex == (- 1)) and next_block.isValid()):
rindex = text.rfind(opentag, 0, idx_search)
loop += 1
if (loop > 100):
rindex = (- 1)
break
if (rindex == (- 1)):
next_block = next_block.previous()
text = next_block.text()
idx_search = len(text)
tag = ''
if (rindex != (- 1)):
for i in range((rindex + 1), len(text)):
if (text[i] in [' ', '\n', '=', '"', '>']):
break
else:
tag += text[i]
return tag |
class CarDescriptor():
def __init__(self, name, description, type, root_paths, config_paths, config_base_variables, variables):
self.name = name
self.description = description
self.type = type
self.root_paths = root_paths
self.config_paths = config_paths
self.config_base_variables = config_base_variables
self.variables = variables
def __hash__(self):
return hash(self.name)
def __eq__(self, other):
return (isinstance(other, type(self)) and (self.name == other.name)) |
def test_logging_filter_no_span(elasticapm_client):
transaction = elasticapm_client.begin_transaction('test')
f = LoggingFilter()
record = logging.LogRecord(__name__, logging.DEBUG, __file__, 252, 'dummy_msg', [], None)
f.filter(record)
assert (record.elasticapm_transaction_id == transaction.id)
assert (record.elasticapm_service_name == transaction.tracer.config.service_name)
assert (record.elasticapm_service_environment == transaction.tracer.config.environment)
assert (record.elasticapm_trace_id == transaction.trace_parent.trace_id)
assert (record.elasticapm_span_id is None)
assert record.elasticapm_labels |
def validate_blob_uri_contents(contents: bytes, blob_uri: str) -> None:
blob_path = parse.urlparse(blob_uri).path
blob_hash = blob_path.split('/')[(- 1)]
contents_str = to_text(contents)
content_length = len(contents_str)
hashable_contents = ((('blob ' + str(content_length)) + '\x00') + contents_str)
hash_object = hashlib.sha1(to_bytes(text=hashable_contents))
if (hash_object.hexdigest() != blob_hash):
raise EthPMValidationError(f'Hash of contents fetched from {blob_uri} do not match its hash: {blob_hash}.') |
class OptionPlotoptionsBoxplotDragdropGuideboxDefault(Options):
def className(self):
return self._config_get('highcharts-drag-box-default')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('rgba(0, 0, 0, 0.1)')
def color(self, text: str):
self._config(text, js_type=False)
def cursor(self):
return self._config_get('move')
def cursor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#888')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(900)
def zIndex(self, num: float):
self._config(num, js_type=False) |
class Test_tcp(unittest.TestCase):
src_port = 6431
dst_port = 8080
seq = 5
ack = 1
offset = 6
bits = 42
window_size = 2048
csum = 12345
urgent = 128
option = b'\x01\x02\x03\x04'
t = tcp.tcp(src_port, dst_port, seq, ack, offset, bits, window_size, csum, urgent, option)
buf = pack(tcp.tcp._PACK_STR, src_port, dst_port, seq, ack, (offset << 4), bits, window_size, csum, urgent)
buf += option
def setUp(self):
pass
def tearDown(self):
pass
def test_init(self):
eq_(self.src_port, self.t.src_port)
eq_(self.dst_port, self.t.dst_port)
eq_(self.seq, self.t.seq)
eq_(self.ack, self.t.ack)
eq_(self.offset, self.t.offset)
eq_(self.bits, self.t.bits)
eq_(self.window_size, self.t.window_size)
eq_(self.csum, self.t.csum)
eq_(self.urgent, self.t.urgent)
eq_(self.option, self.t.option)
def test_parser(self):
(r1, r2, _) = self.t.parser(self.buf)
eq_(self.src_port, r1.src_port)
eq_(self.dst_port, r1.dst_port)
eq_(self.seq, r1.seq)
eq_(self.ack, r1.ack)
eq_(self.offset, r1.offset)
eq_(self.bits, r1.bits)
eq_(self.window_size, r1.window_size)
eq_(self.csum, r1.csum)
eq_(self.urgent, r1.urgent)
eq_(self.option, r1.option)
eq_(None, r2)
def test_serialize(self):
offset = 5
csum = 0
src_ip = '192.168.10.1'
dst_ip = '192.168.100.1'
prev = ipv4(4, 5, 0, 0, 0, 0, 0, 64, inet.IPPROTO_TCP, 0, src_ip, dst_ip)
t = tcp.tcp(self.src_port, self.dst_port, self.seq, self.ack, offset, self.bits, self.window_size, csum, self.urgent)
buf = t.serialize(bytearray(), prev)
res = struct.unpack(tcp.tcp._PACK_STR, six.binary_type(buf))
eq_(res[0], self.src_port)
eq_(res[1], self.dst_port)
eq_(res[2], self.seq)
eq_(res[3], self.ack)
eq_(res[4], (offset << 4))
eq_(res[5], self.bits)
eq_(res[6], self.window_size)
eq_(res[8], self.urgent)
eq_((offset * 4), len(t))
ph = struct.pack('!4s4sBBH', addrconv.ipv4.text_to_bin(src_ip), addrconv.ipv4.text_to_bin(dst_ip), 0, 6, (offset * 4))
d = (ph + buf)
s = packet_utils.checksum(d)
eq_(0, s)
def test_serialize_option(self):
offset = 0
csum = 0
option = [tcp.TCPOptionMaximumSegmentSize(max_seg_size=1460), tcp.TCPOptionSACKPermitted(), tcp.TCPOptionTimestamps(ts_val=, ts_ecr=), tcp.TCPOptionNoOperation(), tcp.TCPOptionWindowScale(shift_cnt=9)]
option_buf = b'\x02\x04\x05\xb4\x04\x02\x08\n\x11"3DUfw\x88\x01\x03\x03\t'
prev = ipv4(4, 5, 0, 0, 0, 0, 0, 64, inet.IPPROTO_TCP, 0, '192.168.10.1', '192.168.100.1')
t = tcp.tcp(self.src_port, self.dst_port, self.seq, self.ack, offset, self.bits, self.window_size, csum, self.urgent, option)
buf = t.serialize(bytearray(), prev)
r_option_buf = buf[tcp.tcp._MIN_LEN:(tcp.tcp._MIN_LEN + len(option_buf))]
eq_(option_buf, r_option_buf)
(r_tcp, _, _) = tcp.tcp.parser(buf)
eq_(str(option), str(r_tcp.option))
(Exception)
def test_malformed_tcp(self):
m_short_buf = self.buf[1:tcp.tcp._MIN_LEN]
tcp.tcp.parser(m_short_buf)
def test_default_args(self):
prev = ipv4(proto=inet.IPPROTO_TCP)
t = tcp.tcp()
buf = t.serialize(bytearray(), prev)
res = struct.unpack(tcp.tcp._PACK_STR, buf)
eq_(res[0], 1)
eq_(res[1], 1)
eq_(res[2], 0)
eq_(res[3], 0)
eq_(res[4], (5 << 4))
eq_(res[5], 0)
eq_(res[6], 0)
eq_(res[8], 0)
t = tcp.tcp(option=[tcp.TCPOptionMaximumSegmentSize(1460)])
buf = t.serialize(bytearray(), prev)
res = struct.unpack((tcp.tcp._PACK_STR + '4s'), buf)
eq_(res[0], 1)
eq_(res[1], 1)
eq_(res[2], 0)
eq_(res[3], 0)
eq_(res[4], (6 << 4))
eq_(res[5], 0)
eq_(res[6], 0)
eq_(res[8], 0)
eq_(res[9], b'\x02\x04\x05\xb4')
t = tcp.tcp(offset=7, option=[tcp.TCPOptionWindowScale(shift_cnt=9)])
buf = t.serialize(bytearray(), prev)
res = struct.unpack((tcp.tcp._PACK_STR + '8s'), buf)
eq_(res[0], 1)
eq_(res[1], 1)
eq_(res[2], 0)
eq_(res[3], 0)
eq_(res[4], (7 << 4))
eq_(res[5], 0)
eq_(res[6], 0)
eq_(res[8], 0)
eq_(res[9], b'\x03\x03\t\x00\x00\x00\x00\x00')
def test_json(self):
jsondict = self.t.to_jsondict()
t = tcp.tcp.from_jsondict(jsondict['tcp'])
eq_(str(self.t), str(t)) |
def query_with_labels(query, schema, sort_columns=False):
exclude = getattr(schema.Meta, 'exclude', ())
relationships = getattr(schema.Meta, 'relationships', [])
joins = []
entities = [entity for entity in query_entities(query) if (entity.key not in exclude)]
for relationship in relationships:
if (relationship.position == (- 1)):
entities.append(relationship.column.label(relationship.label))
else:
entities.insert(relationship.position, relationship.column.label(relationship.label))
if (relationship.field not in joins):
joins.append(relationship.field)
if sort_columns:
entities.sort(key=(lambda x: x.name))
if joins:
query = query.join(*joins).with_entities(*entities)
else:
query = query.with_entities(*entities)
return query |
_tlv_types(CFM_PORT_STATUS_TLV)
class port_status_tlv(tlv):
_PACK_STR = '!BHB'
_MIN_LEN = struct.calcsize(_PACK_STR)
_PS_BLOCKED = 1
_PS_UP = 2
def __init__(self, length=0, port_status=_PS_UP):
super(port_status_tlv, self).__init__(length)
self._type = CFM_PORT_STATUS_TLV
assert (port_status in [self._PS_BLOCKED, self._PS_UP])
self.port_status = port_status
def parser(cls, buf):
(type_, length, port_status) = struct.unpack_from(cls._PACK_STR, buf)
return cls(length, port_status)
def serialize(self):
if (self.length == 0):
self.length = 1
buf = struct.pack(self._PACK_STR, self._type, self.length, self.port_status)
return bytearray(buf) |
_required
def update(hashid):
referrer = referrer_to_baseurl(request.referrer)
service = referrer_to_baseurl(settings.SERVICE_URL)
if (referrer != service):
return jsonerror(400, {'error': 'Improper request.'})
form = Form.get_with_hashid(hashid)
if (not form):
return jsonerror(400, {'error': 'Not a valid form.'})
if ((form.owner_id != current_user.id) and (form not in current_user.forms)):
return jsonerror(401, {'error': 'Wrong user.'})
patch = request.get_json()
for attr in ['disable_storage', 'disabled', 'disable_email', 'captcha_disabled']:
if (attr in patch):
setattr(form, attr, patch[attr])
DB.session.add(form)
DB.session.commit()
return jsonify({'ok': True}) |
class TestAttrFormFlagPresent(unittest.TestCase):
def test_form_flag_present_value_is_true(self):
with open(os.path.join('test', 'testfiles_for_unittests', 'lambda.elf'), 'rb') as f:
elffile = ELFFile(f)
self.assertTrue(elffile.has_dwarf_info())
dwarf = elffile.get_dwarf_info()
for cu in dwarf.iter_CUs():
for die in cu.iter_DIEs():
for (_, attr) in die.attributes.items():
if (attr.form == 'DW_FORM_flag_present'):
self.assertTrue(attr.value) |
def set_component_observation_data(observation_template):
(sample_reqd_component_obs, non_sample_reqd_component_obs) = get_observation_template_details(observation_template)
data = []
for d in sample_reqd_component_obs:
obs_temp = frappe.get_value('Observation Template', d, ['sample_type', 'sample', 'medical_department', 'container_closure_color', 'name as observation_template', 'sample_qty'], as_dict=True)
obs_temp['status'] = 'Open'
data.append(obs_temp)
return data |
def download_and_cache(url, *, owner='url', parts=None, verify=True, force=None, chunk_size=(1024 * 1024), range_method='auto', update_if_out_of_date=False, fake_headers=None, **kwargs):
extension = None
LOG.debug('URL %s', url)
downloader = Downloader(url, chunk_size=chunk_size, timeout=SETTINGS.get('url-download-timeout'), verify=verify, parts=parts, range_method=range_method, fake_headers=fake_headers, statistics_gatherer=record_statistics, progress_bar=progress_bar, resume_transfers=True, override_target_file=False, download_file_extension='.download')
if (extension and (extension[0] != '.')):
extension = ('.' + extension)
if (extension is None):
extension = downloader.extension()
path = downloader.local_path()
if (path is not None):
return
def out_of_date(url, path, cache_data):
if (SETTINGS.get('check-out-of-date-urls') is False):
return False
if downloader.out_of_date(path, cache_data):
if (SETTINGS.get('download-out-of-date-urls') or update_if_out_of_date):
LOG.warning('Invalidating cache version and re-downloading %s', url)
return True
else:
LOG.warning("To enable automatic downloading of updated URLs set the 'download-out-of-date-urls' setting to True")
return False
if (force is None):
force = out_of_date
def download(target, _):
downloader.download(target)
return downloader.cache_data()
path = cache_file(owner, download, dict(url=url, parts=parts), extension=extension, force=force)
return path |
_touched_chat
def cmd_set_timezone(bot, update, args, chat):
if (len(args) < 1):
bot.reply(update, 'No timezone specified. Find yours [here]({})!'.format(TIMEZONE_LIST_URL), parse_mode=telegram.ParseMode.MARKDOWN)
return
tz_name = args[0]
try:
tz = timezone(tz_name)
chat.timezone_name = tz_name
chat.save()
tz_str = datetime.now(tz).strftime('%Z %z')
bot.reply(update, 'Timezone is set to {}'.format(tz_str))
except UnknownTimeZoneError:
bot.reply(update, 'Unknown timezone. Find yours [here]({})!'.format(TIMEZONE_LIST_URL), parse_mode=telegram.ParseMode.MARKDOWN) |
def test_build_matrix_restriction_enzyme_region(capsys):
outfile = NamedTemporaryFile(suffix='.h5', delete=False)
outfile.close()
outfile_bam = NamedTemporaryFile(suffix='.bam', delete=False)
outfile.close()
qc_folder = mkdtemp(prefix='testQC_')
args = '-s {} {} --outFileName {} -bs 5000 -b {} --QCfolder {} --threads 4 --danglingSequence GATC AGCT --restrictionSequence GATC AAGCTT -rs {} {} --region {}'.format(sam_R1, sam_R2, outfile.name, outfile_bam.name, qc_folder, dpnii_file, (ROOT + 'hicFindRestSite/hindIII.bed'), 'chr3R').split()
compute(hicBuildMatrix.main, args, 5)
test = hm.hiCMatrix((ROOT + 'small_test_matrix_parallel_two_rc_chr3R.h5'))
new = hm.hiCMatrix(outfile.name)
nt.assert_equal(test.matrix.data, new.matrix.data)
nt.assert_equal(len(test.cut_intervals), len(new.cut_intervals))
print(set(os.listdir((ROOT + 'QC_region/'))))
assert are_files_equal((ROOT + 'QC_region/QC.log'), (qc_folder + '/QC.log'))
assert (set(os.listdir((ROOT + 'QC_region/'))) == set(os.listdir(qc_folder)))
assert (abs((os.path.getsize((ROOT + 'build_region.bam')) - os.path.getsize(outfile_bam.name))) < delta)
os.unlink(outfile.name)
shutil.rmtree(qc_folder) |
class Solution(object):
def maxWidthRamp(self, A):
if (len(A) < 2):
return 0
pos = dict()
for (i, n) in enumerate(A):
if (n not in pos):
pos[n] = []
pos[n].append(i)
max_pos = dict()
prev = (- 1)
for k in reversed(sorted(pos.keys())):
max_pos[k] = max(prev, pos[k][(- 1)])
prev = max_pos[k]
res = 0
for k in pos.keys():
res = max((max_pos[k] - pos[k][0]), res)
return res |
def gen_sites():
db = Database(util.get_db_root(), util.get_part())
grid = db.grid()
for tile_name in sorted(grid.tiles()):
loc = grid.loc_of_tilename(tile_name)
gridinfo = grid.gridinfo_at_loc(loc)
for (site_name, site_type) in gridinfo.sites.items():
if (site_type in ['GTPE2_COMMON']):
gtp_int_tile = get_gtp_int_tile(gridinfo.clock_region, grid)
(yield (gtp_int_tile, site_name)) |
_projects_ns.route('/fork/<ownername>/<projectname>')
class ProjectFork(Resource):
def _common(ownername, projectname):
copr = get_copr(ownername, projectname)
data = get_form_compatible_data(preserve=['chroots'])
data['owner'] = data.get('ownername')
form = forms.CoprForkFormFactory.create_form_cls(copr=copr, user=flask.g.user, groups=flask.g.user.user_groups)(data, meta={'csrf': False})
if (form.validate_on_submit() and copr):
try:
dstgroup = ([g for g in flask.g.user.user_groups if (g.at_name == form.owner.data)] or [None])[0]
if ((flask.g.user.name != form.owner.data) and (not dstgroup)):
return ObjectNotFound('There is no such group: {}'.format(form.owner.data))
dst_copr = CoprsLogic.get(flask.g.user.name, form.name.data).all()
if (dst_copr and (not form.confirm.data)):
raise BadRequest('You are about to fork into existing project: {}\nPlease use --confirm if you really want to do this'.format(form.name.data))
(fcopr, _) = ComplexLogic.fork_copr(copr, flask.g.user, dstname=form.name.data, dstgroup=dstgroup)
db.session.commit()
except (ActionInProgressException, InsufficientRightsException) as err:
db.session.rollback()
raise err
else:
raise InvalidForm(form)
return to_dict(fcopr)
_api_login_required
_projects_ns.doc(params=fullname_params)
_projects_ns.marshal_with(project_model)
_projects_ns.expect(project_fork_input_model)
_projects_ns.response(HTTPStatus.OK.value, 'Copr project is forking...')
_projects_ns.response(HTTPStatus.BAD_REQUEST.value, HTTPStatus.BAD_REQUEST.description)
def post(self, ownername, projectname):
return self._common(ownername, projectname)
_api_login_required
_projects_ns.doc(params=fullname_params)
_projects_ns.marshal_with(project_model)
_projects_ns.expect(project_fork_input_model)
_projects_ns.response(HTTPStatus.OK.value, 'Copr project is forking...')
_projects_ns.response(HTTPStatus.BAD_REQUEST.value, HTTPStatus.BAD_REQUEST.description)
_route_method_type(apiv3_projects_ns, 'PUT', 'POST')
def put(self, ownername, projectname):
return self._common(ownername, projectname) |
class OptionYaxisPlotbands(Options):
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#e6e9ff')
def color(self, text: str):
self._config(text, js_type=False)
def events(self) -> 'OptionYaxisPlotbandsEvents':
return self._config_sub_data('events', OptionYaxisPlotbandsEvents)
def from_(self):
return self._config_get(None)
_.setter
def from_(self, num: float):
self._config(num, js_type=False)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def innerRadius(self):
return self._config_get(None)
def innerRadius(self, num: float):
self._config(num, js_type=False)
def label(self) -> 'OptionYaxisPlotbandsLabel':
return self._config_sub_data('label', OptionYaxisPlotbandsLabel)
def outerRadius(self):
return self._config_get('100%')
def outerRadius(self, num: float):
self._config(num, js_type=False)
def thickness(self):
return self._config_get(10)
def thickness(self, num: float):
self._config(num, js_type=False)
def to(self):
return self._config_get(None)
def to(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(None)
def zIndex(self, num: float):
self._config(num, js_type=False) |
class OptionPlotoptionsBubbleSonificationContexttracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestChatDatabase(unittest.TestCase):
def setUp(self):
self.db_file = 'test_chat.db'
self.chat_db = ChatSessionDB(self.db_file)
def tearDown(self):
self.chat_db.close()
os.remove(self.db_file)
def test_database_creation(self):
self.assertTrue(os.path.exists(self.db_file))
def test_table_creation(self):
conn = sqlite3.connect(self.db_file)
cursor = conn.cursor()
cursor.execute("\n SELECT count(name) FROM sqlite_master WHERE type='table' AND name='ChatSessions'\n ")
self.assertEqual(cursor.fetchone()[0], 1)
cursor.execute("\n SELECT count(name) FROM sqlite_master WHERE type='table' AND name='Messages'\n ")
self.assertEqual(cursor.fetchone()[0], 1)
conn.close()
def test_insert_and_get_session(self):
session_id = 'session1'
session_owner = 'user1'
session_topic = 'topic1'
start_time = '2023-08-28 12:00:00'
self.chat_db.insert_chatsession(session_id, session_owner, session_topic, start_time)
session = self.chat_db.get_chatsession_by_id(session_id)
self.assertEqual(session, (session_id, session_owner, session_topic, start_time))
def test_insert_and_get_message(self):
message_id = 'message1'
session_id = 'session1'
sender_id = 'user1'
receiver_id = 'user2'
timestamp = '2023-08-28 12:30:00'
content = 'Hello, world!'
status = 0
self.chat_db.insert_message(message_id, session_id, sender_id, receiver_id, timestamp, content, status)
message = self.chat_db.get_message_by_id(message_id)
self.assertEqual(message, (message_id, session_id, sender_id, receiver_id, timestamp, content, status)) |
def frequency_stacked(mu):
inp = {'rel_error': rel_error, 'noise_floor': noise_floor, 'mu': mu}
sfnoise_reim = stack(n_stack, fresp, 'gaussian_correlated', **inp)
sfnoise_comp = stack(n_stack, fresp, 'white_noise', **inp)
(fig, axs) = figure(freqs, fresp, sfnoise_reim, sfnoise_comp)
fig.suptitle(f'STACKED {n_stack} times; $\mu=${mu}', fontsize=20)
for i in range(3):
axs[(0, i)].set_ylim([1e-18, 3e-11])
for i in range(4):
axs[(0, i)].set_xscale('log')
axs[(1, i)].set_xlabel('Frequency (Hz)')
axs[(1, i)].set_ylim([(- 5), 40]) |
def main(args):
num_digits = 6
prefix = ('%s_%s_' % (args.filename_prefix, args.split))
img_template = ('%s%%0%dd.png' % (prefix, num_digits))
scene_template = ('%s%%0%dd.json' % (prefix, num_digits))
blend_template = ('%s%%0%dd.blend' % (prefix, num_digits))
img_template = os.path.join(args.output_image_dir, img_template)
scene_template = os.path.join(args.output_scene_dir, scene_template)
blend_template = os.path.join(args.output_blend_dir, blend_template)
if (not os.path.isdir(args.output_image_dir)):
os.makedirs(args.output_image_dir)
if (not os.path.isdir(args.output_scene_dir)):
os.makedirs(args.output_scene_dir)
if ((args.save_blendfiles == 1) and (not os.path.isdir(args.output_blend_dir))):
os.makedirs(args.output_blend_dir)
all_scene_paths = []
for i in range(args.num_images):
img_path = (img_template % (i + args.start_idx))
scene_path = (scene_template % (i + args.start_idx))
all_scene_paths.append(scene_path)
blend_path = None
if (args.save_blendfiles == 1):
blend_path = (blend_template % (i + args.start_idx))
num_objects = random.randint(args.min_objects, args.max_objects)
render_scene(args, num_objects=num_objects, output_index=(i + args.start_idx), output_split=args.split, output_image=img_path, output_scene=scene_path, output_blendfile=blend_path)
all_scenes = []
for scene_path in all_scene_paths:
with open(scene_path, 'r') as f:
all_scenes.append(json.load(f))
output = {'info': {'date': args.date, 'version': args.version, 'split': args.split, 'license': args.license}, 'scenes': all_scenes}
with open(args.output_scene_file, 'w') as f:
json.dump(output, f) |
()
def redshift_systems() -> Generator:
redshift_systems = [System.construct(fides_key='redshift-cluster-1', organization_fides_key='default_organization', name='redshift-cluster-1', description='Fides Generated Description for Redshift Cluster: redshift-cluster-1', fidesctl_meta=SystemMetadata(endpoint_address='redshift-cluster-1.cue8hjdl1kb1.us-east-1.redshift.amazonaws.com', endpoint_port='5439', resource_id='arn:aws:redshift:us-east-1::namespace:5eb1f195-7815-4c62-9140-e062dd98da83'), system_type='redshift_cluster', privacy_declarations=[]), System.construct(fides_key='redshift-cluster-2', organization_fides_key='default_organization', name='redshift-cluster-2', description='Fides Generated Description for Redshift Cluster: redshift-cluster-2', fidesctl_meta=SystemMetadata(endpoint_address='redshift-cluster-2.cue8hjdl1kb1.us-east-1.redshift.amazonaws.com', endpoint_port='5439', resource_id='arn:aws:redshift:us-east-1::namespace:06ba7fe3-8cb3-4e1c-b2c6-cc2f2415a979'), system_type='redshift_cluster', privacy_declarations=[])]
(yield redshift_systems) |
class Migration(migrations.Migration):
dependencies = [('forum', '0016_auto__1528')]
operations = [migrations.AlterModelOptions(name='parent_comment', options={'verbose_name': '', 'verbose_name_plural': ''}), migrations.AddField(model_name='forum_plate', name='image', field=models.ImageField(blank=True, upload_to='forum/%Y%m%d', verbose_name=''))] |
class AccumulateMessages(Accumulate[(MessageT, MessageT)], ReprWithInner):
inner: Type[MessageT]
__slots__ = ('inner',)
def __init__(self, inner: Type[MessageT]) -> None:
self.inner = inner
def __call__(self, lhs: Optional[MessageT], rhs: Iterable[MessageT]) -> MessageT:
for other in rhs:
assert (other is not None), 'right-hand side items must not be `None`'
if (lhs is not None):
for (name, descriptor) in other.__PROTOBUF_FIELDS_BY_NUMBER__.values():
setattr(lhs, name, descriptor.merge(getattr(lhs, name), getattr(other, name)))
one_of = descriptor.one_of
if (one_of is not None):
one_of._keep_attribute(lhs, descriptor.number)
else:
lhs = other
assert (lhs is not None), 'there must be at least one right-hand side item'
return lhs |
class Options3D(DataClass):
def animationInterval(self):
return self._attrs['animationInterval']
def animationInterval(self, val):
self._attrs['animationInterval'] = val
def animationPreload(self):
return self._attrs['animationPreload']
def animationPreload(self, val):
self._attrs['animationPreload'] = val
def animationAutoStart(self):
return self._attrs['animationAutoStart']
def animationAutoStart(self, val):
self._attrs['animationAutoStart'] = val
def axisColor(self):
return self._attrs['axisColor']
def axisColor(self, val):
self._attrs['axisColor'] = val
def autoResize(self):
return self._attrs['autoResize']
def autoResize(self, val):
self._attrs['autoResize'] = val
def backgroundColor(self) -> OptionsBackgroundColor:
return self.sub_data('backgroundColor', OptionsBackgroundColor)
def cameraPosition(self) -> OptionsCameraPosition:
return self.has_attribute(OptionsCameraPosition)
def ctrlToZoom(self):
return self._attrs['ctrlToZoom']
def ctrlToZoom(self, val):
self._attrs['ctrlToZoom'] = val
def dataColor(self) -> OptionsBackgroundColor:
return self.sub_data('dataColor', OptionsBackgroundColor)
def dotSizeRatio(self):
return self._attrs['dotSizeRatio']
def dotSizeRatio(self, val):
self._attrs['dotSizeRatio'] = val
def dotSizeMinFraction(self):
return self._attrs['dotSizeMinFraction']
def dotSizeMinFraction(self, val):
self._attrs['dotSizeMinFraction'] = val
def dotSizeMaxFraction(self):
return self._attrs['dotSizeMaxFraction']
def dotSizeMaxFraction(self, val):
self._attrs['dotSizeMaxFraction'] = val
def filterLabel(self):
return self._attrs['filterLabel']
def filterLabel(self, val):
self._attrs['filterLabel'] = val
def gridColor(self):
return self._attrs['gridColor']
def gridColor(self, val):
self._attrs['gridColor'] = val
def height(self):
return self._attrs['height']
def height(self, val):
if isinstance(val, float):
val = ('%spx' % val)
self._attrs['height'] = val
def keepAspectRatio(self):
return self._attrs['keepAspectRatio']
def keepAspectRatio(self, val):
self._attrs['keepAspectRatio'] = val
def rotateAxisLabels(self):
return self._attrs['rotateAxisLabels']
def rotateAxisLabels(self, val):
self._attrs['rotateAxisLabels'] = val
def showAnimationControls(self):
return self._attrs['showAnimationControls']
def showAnimationControls(self, val):
self._attrs['showAnimationControls'] = val
def showGrayBottom(self):
return self._attrs['showGrayBottom']
def showGrayBottom(self, val):
self._attrs['showGrayBottom'] = val
def showGrid(self):
return self._attrs['showGrid']
def showGrid(self, val):
self._attrs['showGrid'] = val
def showXAxis(self):
return self._attrs['showXAxis']
def showXAxis(self, val):
self._attrs['showXAxis'] = val
def showYAxis(self):
return self._attrs['showYAxis']
def showYAxis(self, val):
self._attrs['showYAxis'] = val
def showZAxis(self):
return self._attrs['showZAxis']
def showZAxis(self, val):
self._attrs['showZAxis'] = val
def showPerspective(self):
return self._attrs['showPerspective']
def showPerspective(self, val):
self._attrs['showPerspective'] = val
def showLegend(self):
return self._attrs['showLegend']
def showLegend(self, val):
self._attrs['showLegend'] = val
def showShadow(self):
return self._attrs['showShadow']
def showShadow(self, val):
self._attrs['showShadow'] = val
def showSurfaceGrid(self):
return self._attrs['showSurfaceGrid']
def showSurfaceGrid(self, val):
self._attrs['showSurfaceGrid'] = val
def style(self) -> Enum3dStyles:
return self.has_attribute(Enum3dStyles)
def tooltip(self):
return self._attrs['tooltip']
def tooltip(self, val):
self._attrs['tooltip'] = val
def tooltipDelay(self):
return self._attrs['tooltipDelay']
def tooltipDelay(self, val):
self._attrs['tooltipDelay'] = val
def tooltipStyle(self) -> OptionsTooltipStyle:
return self.has_attribute(OptionsTooltipStyle)
def valueMax(self):
return self._attrs['valueMax']
def valueMax(self, val):
self._attrs['valueMax'] = val
def valueMin(self):
return self._attrs['valueMin']
def valueMin(self, val):
self._attrs['valueMin'] = val
def verticalRatio(self):
return self._attrs['verticalRatio']
def verticalRatio(self, val):
self._attrs['verticalRatio'] = val
def width(self):
return self._attrs['width']
def width(self, val):
if isinstance(val, float):
val = ('%spx' % val)
self._attrs['width'] = val
def xCenter(self):
return self._attrs['xCenter']
def xCenter(self, val):
self._attrs['xCenter'] = val
def xMax(self):
return self._attrs['xMax']
def xMax(self, val):
self._attrs['xMax'] = val
def xMin(self):
return self._attrs['xMin']
def xMin(self, val):
self._attrs['xMin'] = val
def xBarWidth(self):
return self._attrs['xBarWidth']
def xBarWidth(self, val):
self._attrs['xBarWidth'] = val
def xValueLabel(self):
return self._attrs['xValueLabel']
def xValueLabel(self, val):
self._attrs['xValueLabel'] = val
def xStep(self):
return self._attrs['xStep']
def xStep(self, val):
self._attrs['xStep'] = val
def yBarWidth(self):
return self._attrs['yBarWidth']
def yBarWidth(self, val):
self._attrs['yBarWidth'] = val
def yCenter(self):
return self._attrs['yCenter']
def yCenter(self, val):
self._attrs['yCenter'] = val
def yMax(self):
return self._attrs['yMax']
def yMax(self, val):
self._attrs['yMax'] = val
def yMin(self):
return self._attrs['yMin']
def yMin(self, val):
self._attrs['yMin'] = val
def yValueLabel(self):
return self._attrs['yValueLabel']
def yValueLabel(self, val):
self._attrs['yValueLabel'] = val
def yStep(self):
return self._attrs['yStep']
def yStep(self, val):
self._attrs['yStep'] = val
def zValueLabel(self):
return self._attrs['zValueLabel']
def zValueLabel(self, val):
self._attrs['zValueLabel'] = val
def zMax(self):
return self._attrs['zMax']
def zMax(self, val):
self._attrs['zMax'] = val
def zMin(self):
return self._attrs['zMin']
def zMin(self, val):
self._attrs['zMin'] = val
def managed(self):
return self.get(True)
def managed(self, flag: bool):
self.set(flag) |
def analyze_memory_launcher(pid, num_refs, specific_refs, debug, output_file, executable, template_out_path):
templates_path = (pkg_resources.resource_filename('memory_analyzer', 'templates') + '/')
cur_path = (os.path.dirname(__file__) + '/')
gdb_obj = analysis_utils.GDBObject(pid, cur_path, executable, template_out_path)
output_path = os.path.abspath(output_file)
analysis_utils.render_template(f'analysis.py.template', templates_path, num_refs, pid, specific_refs, output_path, template_out_path)
return gdb_obj.run_analysis(debug) |
class BotUserInputHandler(THBEventHandler):
def __init__(self, g: THBattle, delay: float=0.0):
super().__init__(g)
self.event_translator = g.ui_meta.event_translator
self.delay = delay
def handle(self, evt: str, arg: Any) -> Any:
if (evt == 'user_input'):
(trans, ilet) = arg
self.react(trans, ilet)
if (self.delay > 0.0):
import gevent
gevent.sleep(self.delay)
else:
g = self.game
core = g.runner.core
self.event_translator(g, core, evt, arg)
return arg
def get_game_state(self):
from thb.meta.view import state_of
state_of(self.game)
def card_mig_ui_meta(self, arg: MigrateCardsTransaction):
arg.ui_meta.animation_instructions(arg)
def detach_ui_meta(self, arg: Tuple[(MigrateCardsTransaction, Sequence[Card])]):
(trans, cards) = arg
rst = trans.ui_meta.detach_animation_instructions(trans, cards)
assert rst
def effect_meta(self, evt: str, act: Any):
while hasattr(act, 'ui_meta'):
_type = {'action_before': 'effect_string_before', 'action_apply': 'effect_string_apply', 'action_after': 'effect_string'}[evt]
prompt = getattr(act.ui_meta, _type, None)
if (not prompt):
break
prompt(act)
break
if hasattr(act, 'ui_meta'):
if (evt == 'action_before'):
rays = getattr(act.ui_meta, 'ray', None)
rays = (rays(act) if rays else [])
_type = {'action_before': 'sound_effect_before', 'action_apply': 'sound_effect_apply', 'action_after': 'sound_effect'}[evt]
se = getattr(act.ui_meta, _type, None)
se = (se and se(act))
def react(self, trans, ilet):
cl: Any
p = ilet.actor
g = self.game
if (trans.name == 'ActionStageAction'):
if (random.random() < 0.05):
return False
cards = ((list(p.showncards) + list(p.cards)) + list(p.equips))
while (random.random() < 0.5):
skl = [sk for sk in p.skills if ('t_None' not in sk.target.__name__)]
[sk.ui_meta.clickable() for sk in skl]
if skl:
sk = random.choice(skl)
else:
break
cc = list(chain(*[C(cards, i) for i in range(4)]))
random.shuffle(cc)
for tl in self.possible_targets(g, p, sk):
tl_seen = set()
for cl in cc:
c = skill_wrap(p, [sk], cl, {})
try:
(tl, ok) = c.target(p, tl)
except Exception as e:
raise Exception(f'{c}.target: {c.target} failed') from e
tl1 = tuple(tl)
if (tl1 in tl_seen):
continue
tl_seen.add(tl1)
(ok, reason) = self.ui_meta_walk_wrapped([c])
if (not ok):
assert (not c.check()), (c, c.associated_cards, c.check(), ok, reason)
continue
try:
(ok2, reason) = c.ui_meta.is_action_valid(c, tl)
except Exception as e:
raise Exception(f'{c}.ui_meta.is_action_valid failed') from e
if (not ok):
continue
if self.try_launch(ilet, cl, tl, skills=[sk]):
return
break
for c in cards:
if (not c.associated_action):
continue
for t in self.possible_targets(g, p, c):
if self.try_launch(ilet, [c], t):
return True
elif ((trans.name in ('Action', 'AskForRejectAction')) and isinstance(ilet, ActionInputlet)):
rst: Any = {'skills': [], 'cards': [], 'characters': []}
if ilet.categories:
initiator: Any = ilet.initiator
cond = initiator.cond
cl = (list(p.showncards) + list(p.cards))
found = False
for skcls in ilet.actor.skills:
sk = skcls(ilet.actor)
if (not sk.ui_meta.clickable()):
continue
(uiok, uireason) = self.ui_meta_walk_wrapped([sk])
(cct_ok, cct_reason) = initiator.ui_meta.choose_card_text(initiator, [sk])
if (cond([sk]) and (random.random() < 0.5)):
for c in chain([], C(cl, 1), C(cl, 2), [cl]):
sk = skill_wrap(p, [skcls], c, {})
(uiok, uireason) = self.ui_meta_walk_wrapped([sk])
assert uiok
assert cond([sk])
rst['skills'] = [skcls]
rst['cards'] = c
found = True
break
if found:
break
else:
for c in chain(C(cl, 1), C(cl, 2), [cl]):
(cct_ok, cct_reason) = initiator.ui_meta.choose_card_text(initiator, c)
if cond(c):
assert cct_ok, (c, cct_reason)
rst['cards'] = c
break
if ilet.candidates:
initiator = cast(CharacterChooser, ilet.initiator)
target = initiator.choose_player_target
pl = ilet.candidates
for p in chain(C(pl, 1), C(pl, 2), [pl]):
(p, ok) = target(p)
(ccp_ok, ccp_reason) = initiator.ui_meta.target(p)
if ok:
assert ccp_ok, (p, ccp_reason)
rst['characters'] = p
break
if rst:
ilet.set_result(**rst)
return True
elif ((trans.name == 'ChooseOption') and isinstance(ilet, ChooseOptionInputlet)):
ilet.set_option(random.choice(((list(ilet.options) * 2) + [None])))
elif ((trans.name == 'ChoosePeerCard') and isinstance(ilet, ChoosePeerCardInputlet)):
tgt = ilet.target
if (random.random() < 0.9):
cats = [getattr(tgt, i) for i in ilet.categories]
cl = list(chain(*cats))
if cl:
ilet.set_card(random.choice(cl))
elif (trans.name == 'SortCharacter'):
pass
elif (trans.name == 'ChooseGirl'):
from settings import TESTING_CHARACTERS as TESTS
choices = [c for c in ilet.mapping[ilet.actor] if (c.char_cls and (c.char_cls.__name__ in TESTS))]
if choices:
c = random.choice(choices)
log.info('Got %s', c.char_cls)
ilet.set_choice(c)
elif (trans.name == 'HarvestChoose'):
pass
elif (trans.name == 'Pindian'):
pass
elif (trans.name == 'HopeMask'):
pass
elif (trans.name == 'Prophet'):
pass
elif (trans.name == 'ChooseIndividualCard'):
pass
elif (trans.name == 'BanGirl'):
pass
elif (trans.name == 'GalgameDialog'):
pass
else:
log.warning('Not processing %s transaction', trans.name)
(1 / 0)
def ui_meta_walk_wrapped(self, cl, check_is_complete=False):
from thb.cards.base import Skill
for c in cl:
if (not isinstance(c, Skill)):
continue
if check_is_complete:
(rst, reason) = c.ui_meta.is_complete(c)
if (not rst):
return (rst, reason)
(rst, reason) = self.ui_meta_walk_wrapped(c.associated_cards, True)
if (not rst):
return (rst, reason)
return (True, 'OK')
def try_launch(self, ilet, cl, tl, skills=[]):
p = ilet.actor
if skills:
c = skill_wrap(p, skills, cl, {})
else:
assert (len(cl) == 1), cl
(c,) = cl
act = ActionStageLaunchCard(p, tl, c)
if act.can_fire():
ilet.set_result(skills=skills, cards=cl, characters=tl)
return True
return False
def possible_targets(self, g, me, c):
rl: Any
target = c.target
tn = target.__name__
pl = g.players
if (tn == 't_None'):
raise Exception('Fuck')
elif (tn == 't_Self'):
rl = [[me]]
elif (tn == 't_OtherOne'):
rl = list(pl)
random.shuffle(rl)
rl = [[i] for i in rl if (i is not me)]
elif (tn == 't_One'):
rl = list(pl)
random.shuffle(rl)
rl = [[i] for i in rl]
elif (tn == 't_All'):
rl = list(pl)
rl.remove(me)
rl = [rl]
elif (tn == 't_AllInclusive'):
rl = [pl]
elif (tn == '_t_OtherLessEqThanN'):
n = target._for_test_OtherLessEqThanN
rl = []
for i in range((n + 1)):
rl.extend(list(P(pl, i)))
random.shuffle(rl)
elif (tn == 't_OneOrNone'):
rl = [[i] for i in pl]
rl.append([])
random.shuffle(rl)
elif (tn == '_t_OtherN'):
n = target._for_test_OtherN
rl = list(P(pl, n))
random.shuffle(rl)
rl = [list(i) for i in rl]
else:
rl = []
for i in range(3):
rl.extend(list(P(pl, i)))
random.shuffle(rl)
return rl |
('cuda.conv2d_bias_hardswish_few_channels.gen_function')
def conv2d_bias_hardswish_few_channels_gen_function(func_attrs, exec_cond_template, shape_eval_template, shape_save_template):
return cba.gen_function(func_attrs=func_attrs, exec_cond_template=exec_cond_template, shape_eval_template=shape_eval_template, shape_save_template=shape_save_template) |
def test_partitioned_analyses_init_raises_error_if_accumulators_are_too_large_for_memory():
d = DumbPartDistinguisher()
traces = np.random.randint(0, 255, (500, 2000000), dtype='uint8')
data = np.random.randint(0, 255, (500, 40096), dtype='uint8')
with pytest.raises(MemoryError):
d.update(traces=traces, data=data) |
_flyte_cli.command('get-workflow', cls=_FlyteSubCommand)
_urn_option
_host_option
_insecure_option
def get_workflow(urn, host, insecure):
_welcome_message()
client = _get_client(host, insecure)
_click.echo(client.get_workflow(cli_identifiers.Identifier.from_python_std(urn)))
_click.echo('') |
class IntrospectionToken(BaseIntrospectionToken):
def is_active(self):
return self['active']
def is_expired(self):
if (not self.is_active()):
return True
expires_at = self.get_expires_at()
if (not expires_at):
return None
return (int(expires_at) < time.time())
def is_revoked(self):
return (not self.is_active()) |
class ComponentId(PackageId):
def __init__(self, component_type: Union[(ComponentType, str)], public_id: PublicId) -> None:
component_type = ComponentType(component_type)
super().__init__(component_type.to_package_type(), public_id)
def component_type(self) -> ComponentType:
return ComponentType(self.package_type.value)
def component_prefix(self) -> PackageIdPrefix:
package_prefix = super().package_prefix
(package_type, author, name) = package_prefix
return (ComponentType(package_type.value), author, name)
def same_prefix(self, other: 'ComponentId') -> bool:
return ((self.component_type == other.component_type) and self.public_id.same_prefix(other.public_id))
def prefix_import_path(self) -> str:
return 'packages.{}.{}.{}'.format(self.public_id.author, self.component_type.to_plural(), self.public_id.name)
def json(self) -> Dict:
return dict(**self.public_id.json, type=str(self.component_type))
def from_json(cls, json_data: Dict) -> 'ComponentId':
return cls(component_type=json_data['type'], public_id=PublicId.from_json(json_data)) |
def _get_definition_tokens(tokens):
definition_tokens = []
first_line = None
for (type, name, start, stop, line_text) in tokens:
if (first_line is None):
first_line = start[0]
if (type == token.NEWLINE):
break
item = (type, name, (((start[0] - first_line) + 1), start[1]), (((stop[0] - first_line) + 1), stop[1]), line_text)
definition_tokens.append(item)
return definition_tokens |
def detect_freq_limit(func, *args, **kwargs):
start = time.time()
count = 0
while True:
try:
func(*args, **kwargs)
except ResponseError as e:
logger.info('freq limit reached: {} requests passed, error_info: {}'.format(count, e))
break
else:
count += 1
logger.debug('{} passed'.format(count))
while True:
period = (time.time() - start)
try:
func(*args, **kwargs)
except ResponseError:
logger.debug('blocking: {:.0f} secs'.format(period))
time.sleep(1)
else:
logger.info('freq limit detected: {} requests / {:.0f} secs'.format(count, period))
return (count, period) |
def chooseMove(availMovesL):
while True:
try:
choice = input('\n Choose a move (number): ')
choices(choice)
except ValueError:
pickValidMoveMessage()
continue
if ((not choice.isdigit()) or (choice == '')):
pickValidMoveMessage()
continue
elif ((int(choice) < 1) or (int(choice) > availMovesL)):
pickValidMoveMessage()
continue
else:
break
return int(choice) |
class FlattenDenseBlock(PerceptionBlock):
def __init__(self, in_keys: Union[(str, List[str])], out_keys: Union[(str, List[str])], in_shapes: Union[(Sequence[int], List[Sequence[int]])], num_flatten_dims: int, hidden_units: List[int], non_lin: Union[(str, type(nn.Module))]):
super().__init__(in_keys=in_keys, out_keys=out_keys, in_shapes=in_shapes)
out_keys_flatten = [f'{k}_flat' for k in self.out_keys]
self.flatten_block = FlattenBlock(in_keys=self.in_keys, out_keys=out_keys_flatten, in_shapes=self.in_shapes, num_flatten_dims=num_flatten_dims)
self.dense_block = DenseBlock(in_keys=out_keys_flatten, out_keys=out_keys, in_shapes=self.flatten_block.out_shapes(), hidden_units=hidden_units, non_lin=non_lin)
(PerceptionBlock)
def forward(self, block_input: Dict[(str, torch.Tensor)]) -> Dict[(str, torch.Tensor)]:
block_output = self.flatten_block(block_input)
block_output = self.dense_block(block_output)
return block_output
def __repr__(self):
txt = f'{FlattenDenseBlock.__name__}:'
txt += f'''
{str(self.flatten_block)}'''
txt += f'''
{str(self.dense_block)}'''
return txt |
class LineMatch(LineBase):
ARROW_DECORATOR = '|===>'
TRUNCATE_DECORATOR = '|...|'
def __init__(self, formatted_line: FormattedText, result: MatchResult, index: int, validate_file_exists: bool=False, all_input: bool=False):
super().__init__()
self.formatted_line = formatted_line
self.index = index
self.all_input = all_input
(path, num, matches) = result
self.path = (path if all_input else parse.prepend_dir(path, with_file_inspection=validate_file_exists))
self.num = num
line = str(self.formatted_line)
self.start = matches.start()
self.end = min(matches.end(), len(line))
self.group: str = matches.group()
string_subset = line[self.start:self.end]
stripped_subset = string_subset.strip()
trailing_whitespace = (len(string_subset) - len(stripped_subset))
self.end -= trailing_whitespace
self.group = self.group[0:(len(self.group) - trailing_whitespace)]
self.selected = False
self.hovered = False
self.is_truncated = False
(self.before_text, _) = self.formatted_line.breakat(self.start)
(_, self.after_text) = self.formatted_line.breakat(self.end)
self.decorated_match = FormattedText()
self.update_decorated_match()
def toggle_select(self) -> None:
self.set_select((not self.selected))
def set_select(self, val: bool) -> None:
self.selected = val
self.update_decorated_match()
def set_hover(self, val: bool) -> None:
self.hovered = val
self.update_decorated_match()
def get_screen_index(self) -> int:
return self.index
def get_path(self) -> str:
return self.path
def get_file_size(self) -> str:
size = os.path.getsize(self.path)
for unit in ['B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']:
if (size < 1024):
return f'size: {size}{unit}'
size //= 1024
raise AssertionError('Unreachable')
def get_length_in_lines(self) -> str:
output = subprocess.check_output(['wc', '-l', self.path])
lines_count = output.strip().split()[0].decode('utf-8')
lines_caption = ('lines' if (int(lines_count) > 1) else 'line')
return f'length: {lines_count} {lines_caption}'
def get_time_last_accessed(self) -> str:
time_accessed = time.strftime('%m/%d/%Y %H:%M:%S', time.localtime(os.stat(self.path).st_atime))
return f'last accessed: {time_accessed}'
def get_time_last_modified(self) -> str:
time_modified = time.strftime('%m/%d/%Y %H:%M:%S', time.localtime(os.stat(self.path).st_mtime))
return f'last modified: {time_modified}'
def get_owner_user(self) -> str:
user_owner_name = Path(self.path).owner()
user_owner_id = os.stat(self.path).st_uid
return f'owned by user: {user_owner_name}, {user_owner_id}'
def get_owner_group(self) -> str:
group_owner_name = Path(self.path).group()
group_owner_id = os.stat(self.path).st_gid
return f'owned by group: {group_owner_name}, {group_owner_id}'
def get_dir(self) -> str:
return os.path.dirname(self.path)
def is_resolvable(self) -> bool:
return (not self.is_git_abbreviated_path())
def is_git_abbreviated_path(self) -> bool:
parts = self.path.split(os.path.sep)
return ((len(parts) > 0) and (parts[0] == '...'))
def get_line_num(self) -> int:
return self.num
def get_selected(self) -> bool:
return self.selected
def get_before(self) -> str:
return str(self.before_text)
def get_after(self) -> str:
return str(self.after_text)
def get_match(self) -> str:
return self.group
def __str__(self) -> str:
return ((((((self.get_before() + '||') + self.get_match()) + '||') + self.get_after()) + '||') + str(self.num))
def update_decorated_match(self, max_len: Optional[int]=None) -> None:
if (self.hovered and self.selected):
attributes = (curses.COLOR_WHITE, curses.COLOR_RED, FormattedText.BOLD_ATTRIBUTE)
elif self.hovered:
attributes = (curses.COLOR_WHITE, curses.COLOR_BLUE, FormattedText.BOLD_ATTRIBUTE)
elif self.selected:
attributes = (curses.COLOR_WHITE, curses.COLOR_GREEN, FormattedText.BOLD_ATTRIBUTE)
elif (not self.all_input):
attributes = (0, 0, FormattedText.UNDERLINE_ATTRIBUTE)
else:
attributes = (0, 0, 0)
decorator_text = self.get_decorator()
if self.controller:
self.controller.dirty_line(self.index)
plain_text = (decorator_text + self.get_match())
if (max_len and (len((plain_text + str(self.before_text))) > max_len)):
space_allowed = (((max_len - len(self.TRUNCATE_DECORATOR)) - len(decorator_text)) - len(str(self.before_text)))
mid_point = int((space_allowed / 2))
begin_match = plain_text[0:mid_point]
end_match = plain_text[(- mid_point):len(plain_text)]
plain_text = ((begin_match + self.TRUNCATE_DECORATOR) + end_match)
self.decorated_match = FormattedText((FormattedText.get_sequence_for_attributes(*attributes) + plain_text))
def get_decorator(self) -> str:
if self.selected:
return self.ARROW_DECORATOR
return ''
def print_up_to(self, text: FormattedText, printer: ColorPrinter, y_pos: int, x_pos: int, max_len: int) -> Tuple[(int, int)]:
if (max_len <= 0):
return (x_pos, max_len)
max_printable = min(len(str(text)), max_len)
text.print_text(y_pos, x_pos, printer, max_printable)
return ((x_pos + max_printable), (max_len - max_printable))
def output(self, printer: ColorPrinter) -> None:
assert (self.controller is not None)
(min_x, min_y, max_x, max_y) = self.controller.get_chrome_boundaries()
y_pos = ((min_y + self.index) + self.controller.get_scroll_offset())
if ((y_pos < min_y) or (y_pos >= max_y)):
return
important_text_length = (len(str(self.before_text)) + len(str(self.decorated_match)))
space_for_printing = (max_x - min_x)
if (important_text_length > space_for_printing):
self.update_decorated_match(max_len=space_for_printing)
self.is_truncated = True
else:
expanded_size = (len(str(self.before_text)) + len(self.get_match()))
if ((expanded_size < space_for_printing) and self.is_truncated):
self.update_decorated_match()
self.is_truncated = False
max_len = (max_x - min_x)
so_far = (min_x, max_len)
so_far = self.print_up_to(self.before_text, printer, y_pos, *so_far)
so_far = self.print_up_to(self.decorated_match, printer, y_pos, *so_far)
so_far = self.print_up_to(self.after_text, printer, y_pos, *so_far) |
def register_plugin(manager):
manager.register_blueprint(player)
manager.register_mimetype_function(detect_playable_mimetype)
style = manager.style_class('deprecated_player.static', filename='css/browse.css')
manager.register_widget(style)
button_widget = manager.button_class(css='play')
link_widget = manager.link_class()
for widget in (link_widget, button_widget):
manager.register_action('deprecated_player.audio', widget, mimetypes=('audio/mpeg', 'audio/ogg', 'audio/wav')) |
.unit
def test_find_uncategorized_dataset_fields_uncategorized_fields() -> None:
test_resource = {'foo': ['1', '2']}
test_resource_dataset = _dataset.create_db_dataset('ds', test_resource)
existing_dataset = Dataset(name='ds', fides_key='ds', data_categories=['category_1'], collections=[DatasetCollection(name='foo', data_categories=['category_1'], fields=[DatasetField(name=1, data_categories=['category_1']), DatasetField(name=2)])])
(uncategorized_keys, total_field_count) = _dataset.find_uncategorized_dataset_fields(existing_dataset=existing_dataset, source_dataset=test_resource_dataset)
assert (set(uncategorized_keys) == {'ds.foo.2'})
assert (total_field_count == 2) |
class Slot(ft.Container):
def __init__(self, solitaire, slot_type, top, left, border):
super().__init__()
self.solitaire = solitaire
self.pile = []
self.type = slot_type
self.width = 70
self.height = 100
self.left = left
self.top = top
self.border_radius = ft.border_radius.all(6)
self.border = border
self.on_click = self.click
def get_top_card(self):
if (len(self.pile) > 0):
return self.pile[(- 1)]
def upper_card_top(self):
if (self.type == 'tableau'):
if (len(self.pile) > 1):
return (self.top + (self.solitaire.card_offset * (len(self.pile) - 1)))
return self.top
def click(self, e):
if ((self.type == 'stock') and (self.solitaire.deck_passes_remaining > 1)):
self.solitaire.deck_passes_remaining -= 1
self.solitaire.restart_stock() |
class OptionPlotoptionsItemSonificationTracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesAreasplineAccessibility(Options):
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def descriptionFormat(self):
return self._config_get(None)
def descriptionFormat(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def exposeAsGroupOnly(self):
return self._config_get(None)
def exposeAsGroupOnly(self, flag: bool):
self._config(flag, js_type=False)
def keyboardNavigation(self) -> 'OptionSeriesAreasplineAccessibilityKeyboardnavigation':
return self._config_sub_data('keyboardNavigation', OptionSeriesAreasplineAccessibilityKeyboardnavigation)
def point(self) -> 'OptionSeriesAreasplineAccessibilityPoint':
return self._config_sub_data('point', OptionSeriesAreasplineAccessibilityPoint) |
def read_formulation_swaps_file(filename):
with open(filename) as handle:
data = list(csv.DictReader(handle))
code_pairs = []
formulations = {}
for row in data:
if (row['Really equivalent?'].strip() != 'Y'):
continue
code = row['Code'].strip()
alternative_code = row['Alternative code'].strip()
assert (code == generic_equivalent_for_bnf_code(code))
assert (alternative_code == generic_equivalent_for_bnf_code(alternative_code))
assert (code != alternative_code)
code_pairs.append((code, alternative_code))
formulations[code] = row['Formulation'].strip()
formulations[alternative_code] = row['Alternative formulation'].strip()
for code_group in groups_from_pairs(code_pairs):
formulations_for_group = {formulations[code] for code in code_group if formulations.get(code)}
(yield (code_group, formulations_for_group)) |
def hostloc_get_info(session, proxies=None, cookies=None):
user_info = {}
r = session.get(' proxies=proxies, cookies=cookies).text
print(r)
_ = re.search('>: (\\w+)</a>', r)
if _:
user_info['group'] = _.group(1)
_ = re.search('<em> : </em>(\\d+)', r)
if _:
user_info['coin'] = _.group(1)
_ = re.search('<em> : </em>(\\d+)', r)
if _:
user_info['rep'] = _.group(1)
_ = re.search('<em>: </em>(\\d+)', r)
if _:
user_info['point'] = _.group(1)
return user_info |
def timestamp_converter(timestamp_string):
try:
dt_naive = arrow.get(timestamp_string, 'MM/DD/YYYY HH:mm:ss')
except ParserError:
dt_naive = arrow.get(timestamp_string, 'MM/DD/YYYY HH:mm')
dt_aware = dt_naive.replace(tzinfo='America/New_York').datetime
return dt_aware |
.skipif(True, reason='Need to update with zenodo API')
.skipif(IN_GITHUB, reason='Too long to test on GITHUB')
.external_download
.download
def test_zenodo_read_nc_partial():
ds = cml.load_source('zenodo', record_id='3403963', file_key='2000_temperature_summary.nc')
ds = ds.to_xarray()
assert ('t_min' in list(ds.keys())) |
class Mesh():
def __init__(self, filename, translation=None, rotation=None, scale=None, color=None, visible=True):
self.filename_source = filename
obj_file = os.path.split(filename)[(- 1)]
obj_file_name = obj_file.split('.')[0]
obj_file_extension = obj_file.split('.')[1]
obj_file_size = os.path.getsize(filename)
self.filename_destination = ((((obj_file_name + '_') + str(obj_file_size)) + '.') + obj_file_extension)
self.translation = translation
self.rotation = rotation
self.scale = scale
self.color = color
self.visible = visible
def get_properties(self, filename):
json_dict = {'type': 'obj', 'filename': self.filename_destination, 'translation': self.translation, 'rotation': self.rotation, 'scale': self.scale, 'color': self.color, 'visible': self.visible}
return json_dict
def write_binary(self, path):
destination_dir = os.path.dirname(path)
destination_path = os.path.join(destination_dir, self.filename_destination)
if (not os.path.exists(self.filename_destination)):
copyfile(self.filename_source, destination_path) |
class TOMLHandler(Handler):
def _load(self, path: str) -> Dict:
base_payload = pytomlpp.load(path)
return base_payload
def _save(self, out_dict: Dict, info_dict: Optional[Dict], library_dict: Optional[Dict], path: str) -> str:
self.write_extra_info(path=path, info_dict=info_dict)
with open(path, 'a') as toml_fid:
pytomlpp.dump(out_dict, toml_fid)
self.write_extra_info(path=path, info_dict=library_dict, version=False, write_mode='a', newlines=2)
return path |
class OptionPlotoptionsPictorialSonificationContexttracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
(urls.POLICY_WEBHOOKS_PRE, status_code=HTTP_200_OK, dependencies=[Security(verify_oauth_client, scopes=[scopes.WEBHOOK_CREATE_OR_UPDATE])], response_model=List[schemas.PolicyWebhookResponse])
def create_or_update_pre_execution_webhooks(*, policy_key: FidesKey, db: Session=Depends(deps.get_db), webhooks: conlist(schemas.PolicyWebhookCreate, max_items=50)=Body(...)) -> List[PolicyPreWebhook]:
return put_webhooks(PolicyPreWebhook, policy_key, db, webhooks) |
def _add_peaks_outline(model, plt_log, ax, **plot_kwargs):
defaults = {'color': PLT_COLORS['periodic'], 'alpha': 0.7, 'lw': 1.5}
plot_kwargs = check_plot_kwargs(plot_kwargs, defaults)
for peak in model.gaussian_params_:
peak_range = [(peak[0] - (peak[2] * 3)), (peak[0] + (peak[2] * 3))]
peak_line = (model._ap_fit + gen_periodic(model.freqs, peak))
(peak_freqs, peak_line) = trim_spectrum(model.freqs, peak_line, peak_range)
peak_freqs = (np.log10(peak_freqs) if plt_log else peak_freqs)
ax.plot(peak_freqs, peak_line, **plot_kwargs) |
def construct_q(filters):
if ('condition' not in filters):
return construct_single_q(filters)
q_list = [construct_q(rule) for rule in filters['rules']]
if (filters['condition'] == 'AND'):
q_obj = reduce(operator.and_, q_list)
else:
q_obj = reduce(operator.or_, q_list)
return q_obj |
class OptionPlotoptionsTilemapTooltip(Options):
def clusterFormat(self):
return self._config_get('Clustered points: {point.clusterPointsAmount}')
def clusterFormat(self, text: str):
self._config(text, js_type=False)
def dateTimeLabelFormats(self) -> 'OptionPlotoptionsTilemapTooltipDatetimelabelformats':
return self._config_sub_data('dateTimeLabelFormats', OptionPlotoptionsTilemapTooltipDatetimelabelformats)
def distance(self):
return self._config_get(16)
def distance(self, num: float):
self._config(num, js_type=False)
def followPointer(self):
return self._config_get(False)
def followPointer(self, flag: bool):
self._config(flag, js_type=False)
def followTouchMove(self):
return self._config_get(True)
def followTouchMove(self, flag: bool):
self._config(flag, js_type=False)
def footerFormat(self):
return self._config_get('')
def footerFormat(self, text: str):
self._config(text, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def headerFormat(self):
return self._config_get('<span style="color:{point.color}"></span> <span style="font-size: 0.8em"> {series.name}</span><br/>')
def headerFormat(self, text: str):
self._config(text, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, text: str):
self._config(text, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def pointFormat(self):
return self._config_get('{point.x}, {point.y}: {point.value}<br/>')
def pointFormat(self, text: str):
self._config(text, js_type=False)
def pointFormatter(self):
return self._config_get(None)
def pointFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False)
def xDateFormat(self):
return self._config_get(None)
def xDateFormat(self, text: str):
self._config(text, js_type=False) |
def test_view_exception(django_elasticapm_client, client):
with pytest.raises(Exception):
client.get(reverse('elasticapm-raise-exc'))
assert (len(django_elasticapm_client.events[ERROR]) == 1)
event = django_elasticapm_client.events[ERROR][0]
assert ('exception' in event)
exc = event['exception']
assert (exc['type'] == 'MyException')
assert (exc['message'] == 'MyException: view exception')
assert (exc['handled'] is False)
assert (event['culprit'] == 'tests.contrib.django.testapp.views.raise_exc') |
def test_worker_env_vars(isolated_client):
_client('virtualenv', keep_alive=5)
def get_env_var(name: str) -> (str | None):
import os
return os.getenv(name, None)
fal_host = get_env_var('FAL_HOST')
assert fal_host, 'FAL_HOST is not set'
assert fal_host.startswith('api.')
assert fal_host.endswith('.shark.fal.ai')
fal_key_id = get_env_var('FAL_KEY_ID')
assert fal_key_id, 'FAL_KEY_ID is not set'
fal_key_secret = get_env_var('FAL_KEY_SECRET')
assert fal_key_secret, 'FAL_KEY_SECRET is not set' |
class CommandsTestCase(TestCase):
def test_import_list_sizes(self):
fname = 'frontend/tests/fixtures/commands/'
fname += 'patient_list_size/2040_02/patient_list_size_new.csv'
call_command('import_list_sizes', '--filename={}'.format(fname))
last_list_size_date = '2040-02-01'
list_sizes = PracticeStatistics.objects.all()
self.assertEqual(len(list_sizes), 2)
p = PracticeStatistics.objects.get(practice_id='N84014', date=last_list_size_date)
self.assertEqual(p.total_list_size, 40)
self.assertAlmostEqual(p.astro_pu_cost, 199.)
self.assertAlmostEqual(p.astro_pu_items, 780.)
self.assertEqual(('%.3f' % p.star_pu['oral_antibacterials_item']), '27.135')
self.assertEqual(('%.3f' % p.star_pu['cox-2_inhibitors_cost']), '13.050')
self.assertEqual(('%.3f' % p.star_pu['antidepressants_adq']), '887.100')
for k in p.star_pu:
self.assertNotEqual(p.star_pu[k], 0)
self.assertNotEqual(p.star_pu[k], None) |
class IPv6ProtoTCP(MatchTest):
def runTest(self):
match = ofp.match([ofp.oxm.eth_type(34525), ofp.oxm.ip_proto(6)])
matching = {'tcp': simple_tcpv6_packet()}
nonmatching = {'udp': simple_udpv6_packet(), 'icmp': simple_icmpv6_packet()}
self.verify_match(match, matching, nonmatching) |
_register_parser
_set_msg_type(ofproto.OFPT_QUEUE_GET_CONFIG_REPLY)
class OFPQueueGetConfigReply(MsgBase):
def __init__(self, datapath, port=None, queues=None):
super(OFPQueueGetConfigReply, self).__init__(datapath)
self.port = port
self.queues = queues
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPQueueGetConfigReply, cls).parser(datapath, version, msg_type, msg_len, xid, buf)
(msg.port,) = struct.unpack_from(ofproto.OFP_QUEUE_GET_CONFIG_REPLY_PACK_STR, msg.buf, ofproto.OFP_HEADER_SIZE)
msg.queues = []
length = ofproto.OFP_QUEUE_GET_CONFIG_REPLY_SIZE
offset = ofproto.OFP_QUEUE_GET_CONFIG_REPLY_SIZE
while (length < msg.msg_len):
queue = OFPPacketQueue.parser(msg.buf, offset)
msg.queues.append(queue)
offset += queue.len
length += queue.len
return msg |
class TestFieldNamePDDType(TestData):
def test_all_formats(self):
ed_field_mappings = FieldMappings(client=ES_TEST_CLIENT, index_pattern=FLIGHTS_INDEX_NAME)
pd_flights = self.pd_flights()
assert_series_equal(pd_flights.dtypes, ed_field_mappings.dtypes())
for es_field_name in FLIGHTS_MAPPING['mappings']['properties'].keys():
pd_dtype = ed_field_mappings.field_name_pd_dtype(es_field_name)
assert (pd_flights[es_field_name].dtype == pd_dtype)
def test_non_existant(self):
ed_field_mappings = FieldMappings(client=ES_TEST_CLIENT, index_pattern=FLIGHTS_INDEX_NAME)
with pytest.raises(KeyError):
ed_field_mappings.field_name_pd_dtype('unknown') |
class IsUserHomeDeterminer(hass.Hass):
def initialize(self):
self.listen_state_handle_list = []
self.timer_handle_list = []
self.delay = 600
self.app_switch = self.args['app_switch']
self.input_boolean = self.args['input_boolean']
self.device_tracker = self.args['device_tracker']
self.door_sensor = self.args['door_sensor']
device_tracker_state = self.get_state(self.device_tracker, attribute='all')
if (self.get_state(self.app_switch) == 'on'):
if (device_tracker_state['state'] == 'home'):
self.log('User is home')
self.timer_handle_list.append(self.run_in(self.turn_on_callback, 0, turn_on_entity=self.input_boolean))
else:
self.log('User is not home')
self.timer_handle_list.append(self.run_in(self.turn_off_callback, 0, turn_off_entity=self.input_boolean))
self.listen_state_handle_list.append(self.listen_state(self.state_change, self.door_sensor))
self.listen_state_handle = None
def state_change(self, entity, attribute, old, new, kwargs):
if (self.get_state(self.app_switch) == 'on'):
if ((new != '') and (new != old)):
self.log('{} changed from {} to {}'.format(entity, old, new))
if ((new == 'on') and (old == 'off')):
self.cancel_listen_state_callback(None)
device_tracker_state = self.get_state(self.device_tracker, attribute='all')
self.log('device_tracker_state: {}'.format(device_tracker_state))
last_changed = device_tracker_state['last_changed']
self.log('last_changed: {}'.format(last_changed))
if ((device_tracker_state['state'] == 'home') and ((datetime.datetime.now(datetime.timezone.utc) - self.convert_utc(last_changed)) < datetime.timedelta(seconds=self.delay))):
self.log('User got home')
self.turn_on(self.input_boolean)
elif (device_tracker_state['state'] != 'home'):
self.log("Wait for device tracker to change to 'home'")
self.listen_state_handle = self.listen_state(self.check_if_user_got_home, self.device_tracker)
self.listen_state_handle_list.append(self.listen_state_handle)
self.timer_handle_list.append(self.run_in(self.cancel_listen_state_callback, self.delay))
elif (device_tracker_state['state'] == 'home'):
self.log("Wait for device tracker to change to 'not_home'")
self.listen_state_handle = self.listen_state(self.check_if_user_left_home, self.device_tracker)
self.listen_state_handle_list.append(self.listen_state_handle)
def cancel_listen_state_callback(self, kwargs):
if (self.listen_state_handle is not None):
self.log('Timeout while waiting for user to get/leave home. Cancel listen_state')
if (self.listen_state_handle in self.listen_state_handle_list):
self.listen_state_handle_list.remove(self.listen_state_handle)
self.cancel_listen_state(self.listen_state_handle)
self.listen_state_handle = None
def check_if_user_left_home(self, entity, attribute, old, new, kwargs):
if (new != 'home'):
self.log('User left home')
if (self.listen_state_handle in self.listen_state_handle_list):
self.listen_state_handle_list.remove(self.listen_state_handle)
if (self.listen_state_handle != None):
self.cancel_listen_state(self.listen_state_handle)
self.listen_state_handle = None
self.timer_handle_list.append(self.run_in(self.turn_off_callback, 1, turn_off_entity=self.input_boolean))
def check_if_user_got_home(self, entity, attribute, old, new, kwargs):
if (new == 'home'):
self.log('User got home')
if (self.listen_state_handle in self.listen_state_handle_list):
self.listen_state_handle_list.remove(self.listen_state_handle)
if (self.listen_state_handle is not None):
self.cancel_listen_state(self.listen_state_handle)
self.listen_state_handle = None
self.timer_handle_list.append(self.run_in(self.turn_on_callback, 1, turn_on_entity=self.input_boolean))
def turn_on_callback(self, kwargs):
try:
self.turn_on(kwargs['turn_on_entity'])
except HTTPError as exception:
self.log('Error trying to turn on entity. Will try again in 1s. Error: {}'.format(exception), level='WARNING')
self.timer_handle_list.append(self.run_in(self.turn_on_callback, 1, turn_on_entity=kwargs['turn_on_entity']))
def turn_off_callback(self, kwargs):
try:
self.turn_off(kwargs['turn_off_entity'])
except HTTPError as exception:
self.log('Error trying to turn off entity. Will try again in 1s. Error: {}'.format(exception), level='WARNING')
self.timer_handle_list.append(self.run_in(self.turn_off_callback, 1, turn_off_entity=kwargs['turn_off_entity']))
def terminate(self):
for listen_state_handle in self.listen_state_handle_list:
self.cancel_listen_state(listen_state_handle)
for timer_handle in self.timer_handle_list:
self.cancel_timer(timer_handle) |
def extractCheapmachinetranslationsBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
titlemap = [('MSLOL - Chapter', 'The Mad Summoner of League of Legends', 'translated'), ('Tensei Shoujo no Rirekisho', 'Tensei Shoujo no Rirekisho', 'translated'), ('Master of Dungeon', 'Master of Dungeon', 'oel')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def _scan_depth(args):
(bed_fname, bam_fnames, min_depth, min_gap, min_length) = args
regions = list(drop_small(merge_gaps(scan_depth(bed_fname, bam_fnames, min_depth), min_gap), min_length))
result = pd.DataFrame.from_records(list(regions), columns=regions[0]._fields)
return (bed_fname, result) |
def can_obtain_lock(basename):
lock_file = os.path.join(LOCK_DIRECTORY, basename)
try:
lh = open(lock_file, 'r')
except FileNotFoundError:
return True
try:
fcntl.lockf(lh, (fcntl.LOCK_SH | fcntl.LOCK_NB))
except IOError:
sdlog.error(LOCK_ERROR.format(lock_file))
return False
return True |
_os(*metadata.platforms)
def main():
cscript = 'C:\\Users\\Public\\cscript.exe'
rcedit = 'C:\\Users\\Public\\rcedit.exe'
common.copy_file(EXE_FILE, cscript)
common.copy_file(RENAMER, rcedit)
common.log('Modifying the OriginalFileName attribute')
common.execute([rcedit, cscript, '--set-version-string', 'OriginalFilename', 'cscript.exe'])
common.execute([cscript, '/c', 'echo', 'C:\\Users\\A\\Temp\\7zip'], timeout=5, kill=True)
common.remove_files(cscript) |
def test_dataclass_more():
class Datum(DataClassJsonMixin):
x: int
y: str
z: typing.Dict[(int, str)]
def stringify(x: int) -> Datum:
return Datum(x=x, y=str(x), z={x: str(x)})
def add(x: Datum, y: Datum) -> Datum:
x.z.update(y.z)
return Datum(x=(x.x + y.x), y=(x.y + y.y), z=x.z)
def wf(x: int, y: int) -> Datum:
return add(x=stringify(x=x), y=stringify(x=y))
wf(x=10, y=20) |
def rsync(config: Dict[(Any, Any)], include: List[str], exclude: List[str], source: str, target: str, up: bool=True) -> None:
keypair = _get_pem(config)
remote_ip = sdk.get_head_node_ip(config)
user = config['auth']['ssh_user']
args = ['rsync', '--rsh', f'ssh -i {keypair} -o StrictHostKeyChecking=no', '-avz']
for i in include:
args += [f'--include={i}']
for e in exclude:
args += [f'--exclude={e}']
args += ['--prune-empty-dirs']
if up:
target = f'{user}{remote_ip}:{target}'
else:
source = f'{user}{remote_ip}:{source}'
args += [source, target]
_run_command(args) |
class SimulationData(AbstractSimulationData):
simulation: Simulation = pd.Field(..., title='Simulation', description='Original :class:`.Simulation` associated with the data.')
data: Tuple[(annotate_type(MonitorDataType), ...)] = pd.Field(..., title='Monitor Data', description='List of :class:`.MonitorData` instances associated with the monitors of the original :class:`.Simulation`.')
diverged: bool = pd.Field(False, title='Diverged', description='A boolean flag denoting whether the simulation run diverged.')
def final_decay_value(self) -> float:
log_str = self.log
if (log_str is None):
raise DataError("No log string in the SimulationData object, can't find final decay value.")
lines = log_str.split('\n')
decay_lines = [line for line in lines if ('field decay' in line)]
final_decay = 1.0
if (len(decay_lines) > 0):
final_decay_line = decay_lines[(- 1)]
final_decay = float(final_decay_line.split('field decay: ')[(- 1)])
return final_decay
def source_spectrum(self, source_index: int) -> Callable:
if ((source_index is None) or (len(self.simulation.sources) == 0)):
return np.ones_like
source = self.simulation.sources[source_index]
source_time = source.source_time
times = self.simulation.tmesh
dt = self.simulation.dt
def source_spectrum_fn(freqs):
spectrum = source_time.spectrum(times, freqs, dt)
return ((spectrum / source_time.amplitude) / np.exp((1j * source_time.phase)))
return source_spectrum_fn
def renormalize(self, normalize_index: int) -> SimulationData:
num_sources = len(self.simulation.sources)
if ((normalize_index == self.simulation.normalize_index) or (num_sources == 0)):
return self.copy()
if (normalize_index and ((normalize_index < 0) or (normalize_index >= num_sources))):
raise DataError(f'normalize_index {normalize_index} out of bounds for list of sources of length {num_sources}')
def source_spectrum_fn(freqs):
new_spectrum_fn = self.source_spectrum(normalize_index)
old_spectrum_fn = self.source_spectrum(self.simulation.normalize_index)
return (new_spectrum_fn(freqs) / old_spectrum_fn(freqs))
data_normalized = [mnt_data.normalize(source_spectrum_fn) for mnt_data in self.data]
simulation = self.simulation.copy(update=dict(normalize_index=normalize_index))
return self.copy(update=dict(simulation=simulation, data=data_normalized))
def load_field_monitor(self, monitor_name: str) -> AbstractFieldData:
mon_data = self[monitor_name]
if (not isinstance(mon_data, AbstractFieldData)):
raise DataError(f"data for monitor '{monitor_name}' does not contain field data as it is a '{type(mon_data)}'.")
return mon_data
def at_centers(self, field_monitor_name: str) -> xr.Dataset:
monitor_data = self.load_field_monitor(field_monitor_name)
return monitor_data.at_coords(monitor_data.colocation_centers)
def _at_boundaries(self, monitor_data: xr.Dataset) -> xr.Dataset:
if monitor_data.monitor.colocate:
return monitor_data.package_colocate_results(monitor_data.field_components)
return monitor_data.at_coords(monitor_data.colocation_boundaries)
def at_boundaries(self, field_monitor_name: str) -> xr.Dataset:
return self._at_boundaries(self.load_field_monitor(field_monitor_name))
def get_poynting_vector(self, field_monitor_name: str) -> xr.Dataset:
mon_data = self.load_field_monitor(field_monitor_name)
field_dataset = self._at_boundaries(mon_data)
time_domain = isinstance(self.monitor_data[field_monitor_name], FieldTimeData)
poynting_components = {}
dims = 'xyz'
for (axis, dim) in enumerate(dims):
dim_1 = dims[(axis - 2)]
dim_2 = dims[(axis - 1)]
required_components = [(f + c) for f in 'EH' for c in (dim_1, dim_2)]
if (not all(((field_cmp in field_dataset) for field_cmp in required_components))):
continue
e_1 = field_dataset.data_vars[('E' + dim_1)]
e_2 = field_dataset.data_vars[('E' + dim_2)]
h_1 = field_dataset.data_vars[('H' + dim_1)]
h_2 = field_dataset.data_vars[('H' + dim_2)]
poynting_components[('S' + dim)] = (((e_1 * h_2) - (e_2 * h_1)) if time_domain else (0.5 * ((e_1 * h_2.conj()) - (e_2 * h_1.conj()))))
grid_correction = (mon_data.grid_dual_correction * mon_data.grid_primal_correction)
poynting_components[('S' + dim)] *= grid_correction
return xr.Dataset(poynting_components)
def _get_scalar_field(self, field_monitor_name: str, field_name: str, val: FieldVal, phase: float=0.0):
if (field_name[0] == 'S'):
dataset = self.get_poynting_vector(field_monitor_name)
if (len(field_name) > 1):
if (field_name in dataset):
derived_data = dataset[field_name]
derived_data.name = field_name
return self._field_component_value(derived_data, val)
raise Tidy3dKeyError(f'Poynting component {field_name} not available')
else:
dataset = self.at_boundaries(field_monitor_name)
dataset = self.apply_phase(data=dataset, phase=phase)
if (field_name in ('E', 'H', 'S')):
required_components = [(field_name + c) for c in 'xyz']
if (not all(((field_cmp in dataset) for field_cmp in required_components))):
raise DataError(f"Field monitor must contain '{field_name}x', '{field_name}y', and '{field_name}z' fields to compute '{field_name}'.")
field_components = (dataset[c] for c in required_components)
if (val == 'real'):
derived_data = (sum(((f.real ** 2) for f in field_components)) ** 0.5)
derived_data.name = f'|Re{{{field_name}}}|'
elif (val == 'imag'):
derived_data = (sum(((f.imag ** 2) for f in field_components)) ** 0.5)
derived_data.name = f'|Im{{{field_name}}}|'
elif (val == 'abs'):
derived_data = (sum(((abs(f) ** 2) for f in field_components)) ** 0.5)
derived_data.name = f'|{field_name}|'
elif (val == 'abs^2'):
derived_data = sum(((abs(f) ** 2) for f in field_components))
if hasattr(derived_data, 'name'):
derived_data.name = f'|{field_name}|2'
elif (val == 'phase'):
raise Tidy3dKeyError(f'Phase is not defined for complex vector {field_name}')
return derived_data
raise Tidy3dKeyError(f"Derived field name must be one of 'E', 'H', 'S', 'Sx', 'Sy', or 'Sz', received '{field_name}'.")
def get_intensity(self, field_monitor_name: str) -> xr.DataArray:
return self._get_scalar_field(field_monitor_name=field_monitor_name, field_name='E', val='abs^2')
def mnt_data_from_file(cls, fname: str, mnt_name: str, **parse_obj_kwargs) -> MonitorDataType:
if (pathlib.Path(fname).suffix != '.hdf5'):
raise ValueError("'mnt_data_from_file' only works with '.hdf5' files.")
with h5py.File(fname) as f_handle:
if ('data' not in f_handle):
raise ValueError(f'could not find data in the supplied file {fname}')
json_string = f_handle[JSON_TAG][()]
json_dict = json.loads(json_string)
monitor_list = json_dict['simulation']['monitors']
for (monitor_index_str, _mnt_data) in f_handle['data'].items():
monitor_dict = monitor_list[int(monitor_index_str)]
if (monitor_dict['name'] == mnt_name):
monitor_type_str = monitor_dict['type']
if (monitor_type_str not in DATA_TYPE_NAME_MAP):
raise ValueError(f"Could not find data type '{monitor_type_str}'.")
monitor_data_type = DATA_TYPE_NAME_MAP[monitor_type_str]
group_path = f'data/{monitor_index_str}'
return monitor_data_type.from_file(fname, group_path=group_path, **parse_obj_kwargs)
raise ValueError(f"No monitor with name '{mnt_name}' found in data file.")
def apply_phase(data: Union[(xr.DataArray, xr.Dataset)], phase: float=0.0) -> xr.DataArray:
if (phase != 0.0):
if np.any(np.iscomplex(data.values)):
data *= np.exp((1j * phase))
else:
log.warning(f'Non-zero phase of {phase} specified but the data being plotted is real-valued. The phase will be ignored in the plot.')
return data
def plot_field(self, field_monitor_name: str, field_name: str, val: FieldVal='real', scale: PlotScale='lin', eps_alpha: float=0.2, phase: float=0.0, robust: bool=True, vmin: float=None, vmax: float=None, ax: Ax=None, **sel_kwargs) -> Ax:
if (field_name == 'int'):
log.warning("'int' field name is deprecated and will be removed in the future. Plese use field_name='E' and val='abs^2' for the same effect.")
field_name = 'E'
val = 'abs^2'
if ((field_name in ('E', 'H')) or (field_name[0] == 'S')):
field_data = self._get_scalar_field(field_monitor_name, field_name, val, phase=phase)
else:
field_monitor_data = self.load_field_monitor(field_monitor_name)
if (field_name not in field_monitor_data.field_components):
raise DataError(f"field_name '{field_name}' not found in data.")
field_component = field_monitor_data.field_components[field_name]
field_component.name = field_name
field_component = self.apply_phase(data=field_component, phase=phase)
field_data = self._field_component_value(field_component, val)
if (scale == 'dB'):
if (val == 'phase'):
log.warning('Ploting phase component in log scale masks the phase sign.')
db_factor = {('S', 'real'): 10, ('S', 'imag'): 10, ('S', 'abs'): 10, ('S', 'abs^2'): 5, ('S', 'phase'): 1, ('E', 'abs^2'): 10, ('H', 'abs^2'): 10}.get((field_name[0], val), 20)
field_data = (db_factor * np.log10(np.abs(field_data)))
field_data.name += ' (dB)'
cmap_type = 'sequential'
else:
cmap_type = ('cyclic' if (val == 'phase') else ('divergent' if ((len(field_name) == 2) and (val in ('real', 'imag'))) else 'sequential'))
monitor = self.simulation.get_monitor_by_name(field_monitor_name)
thin_dims = {'xyz'[dim]: monitor.center[dim] for dim in range(3) if ((monitor.size[dim] == 0) and ('xyz'[dim] not in sel_kwargs))}
for (axis, pos) in thin_dims.items():
if (field_data.coords[axis].size <= 1):
field_data = field_data.sel(**{axis: pos}, method='nearest')
else:
field_data = field_data.interp(**{axis: pos}, kwargs=dict(bounds_error=True))
if ('freq' in sel_kwargs):
log.warning("'freq' supplied to 'plot_field', frequency selection key renamed to 'f' and 'freq' will error in future release, please update your local script to use 'f=value'.")
sel_kwargs['f'] = sel_kwargs.pop('freq')
if ('time' in sel_kwargs):
log.warning("'time' supplied to 'plot_field', frequency selection key renamed to 't' and 'time' will error in future release, please update your local script to use 't=value'.")
sel_kwargs['t'] = sel_kwargs.pop('time')
for (coord_name, coord_val) in sel_kwargs.items():
if (field_data.coords[coord_name].size <= 1):
field_data = field_data.sel(**{coord_name: coord_val}, method=None)
else:
field_data = field_data.interp(**{coord_name: coord_val}, kwargs=dict(bounds_error=True))
if ('f' in sel_kwargs):
freq_eps_eval = sel_kwargs['f']
elif ('f' in field_data.coords):
freq_eps_eval = field_data.coords['f'].values[0]
else:
freq_eps_eval = None
field_data = field_data.squeeze(drop=True)
non_scalar_coords = {name: c for (name, c) in field_data.coords.items() if (c.size > 1)}
if (len(non_scalar_coords) != 2):
raise DataError(f'Data after selection has {len(non_scalar_coords)} coordinates ({list(non_scalar_coords.keys())}), must be 2 spatial coordinates for plotting on plane. Please add keyword arguments to `plot_field()` to select out the other coords.')
spatial_coords_in_data = {coord_name: (coord_name in non_scalar_coords) for coord_name in 'xyz'}
if (sum(spatial_coords_in_data.values()) != 2):
raise DataError(f'All coordinates in the data after selection must be spatial (x, y, z), given {non_scalar_coords.keys()}.')
planar_coord = [name for (name, c) in spatial_coords_in_data.items() if (c is False)][0]
axis = 'xyz'.index(planar_coord)
position = float(field_data.coords[planar_coord])
return self.plot_scalar_array(field_data=field_data, axis=axis, position=position, freq=freq_eps_eval, eps_alpha=eps_alpha, robust=robust, vmin=vmin, vmax=vmax, cmap_type=cmap_type, ax=ax)
_aspect
_ax_if_none
def plot_scalar_array(self, field_data: xr.DataArray, axis: Axis, position: float, freq: float=None, eps_alpha: float=0.2, robust: bool=True, vmin: float=None, vmax: float=None, cmap_type: ColormapType='divergent', ax: Ax=None) -> Ax:
interp_kwarg = {'xyz'[axis]: position}
if (cmap_type == 'divergent'):
cmap = 'RdBu'
center = 0.0
eps_reverse = False
elif (cmap_type == 'sequential'):
cmap = 'magma'
center = False
eps_reverse = True
elif (cmap_type == 'cyclic'):
cmap = 'twilight'
vmin = (- np.pi)
vmax = np.pi
center = False
eps_reverse = False
xy_coord_labels = list('xyz')
xy_coord_labels.pop(axis)
(x_coord_label, y_coord_label) = (xy_coord_labels[0], xy_coord_labels[1])
field_data.plot(ax=ax, x=x_coord_label, y=y_coord_label, cmap=cmap, vmin=vmin, vmax=vmax, robust=robust, center=center, cbar_kwargs={'label': field_data.name})
ax = self.simulation.plot_structures_eps(freq=freq, cbar=False, alpha=eps_alpha, reverse=eps_reverse, ax=ax, **interp_kwarg)
x_coord_values = field_data.coords[x_coord_label]
y_coord_values = field_data.coords[y_coord_label]
ax.set_xlim(min(x_coord_values), max(x_coord_values))
ax.set_ylim(min(y_coord_values), max(y_coord_values))
return ax |
class OptionSeriesScatter3dSonificationDefaultspeechoptionsMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def extractWwwSkymachinetranslationsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def configure_device(device, mqtt_prefix):
device.auth()
logging.debug(("Connected to '%s' Broadlink device at '%s' (MAC %s) and started listening to MQTT commands at '%s#' " % (device.type, device.host[0], ':'.join((format(s, '02x') for s in device.mac)), mqtt_prefix)))
broadlink_rm_temperature_interval = cf.get('broadlink_rm_temperature_interval', 0)
if (((device.type == 'RM2') or (device.type == 'RMPRO') or (device.type == 'RM4') or (device.type == 'RM4PRO') or (device.type == 'RM4MINI')) and (broadlink_rm_temperature_interval > 0)):
scheduler = sched.scheduler(time.time, time.sleep)
scheduler.enter(broadlink_rm_temperature_interval, 1, broadlink_rm_temperature_timer, [scheduler, broadlink_rm_temperature_interval, device, mqtt_prefix])
tt = SchedulerThread(scheduler)
tt.daemon = True
tt.start()
broadlink_sp_energy_interval = cf.get('broadlink_sp_energy_interval', 0)
if (((device.type == 'SP2') or (device.type == 'SP3S')) and (broadlink_sp_energy_interval > 0)):
scheduler = sched.scheduler(time.time, time.sleep)
scheduler.enter(broadlink_sp_energy_interval, 1, broadlink_sp_energy_timer, [scheduler, broadlink_sp_energy_interval, device, mqtt_prefix])
tt = SchedulerThread(scheduler)
tt.daemon = True
tt.start()
broadlink_a1_sensors_interval = cf.get('broadlink_a1_sensors_interval', 0)
if ((device.type == 'A1') and (broadlink_a1_sensors_interval > 0)):
scheduler = sched.scheduler(time.time, time.sleep)
scheduler.enter(broadlink_a1_sensors_interval, 1, broadlink_a1_sensors_timer, [scheduler, broadlink_a1_sensors_interval, device, mqtt_prefix])
tt = SchedulerThread(scheduler)
tt.daemon = True
tt.start()
broadlink_mp1_state_interval = cf.get('broadlink_mp1_state_interval', 0)
if ((device.type == 'MP1') and (broadlink_mp1_state_interval > 0)):
scheduler = sched.scheduler(time.time, time.sleep)
scheduler.enter(broadlink_mp1_state_interval, 1, broadlink_mp1_state_timer, [scheduler, broadlink_mp1_state_interval, device, mqtt_prefix])
tt = SchedulerThread(scheduler)
tt.daemon = True
tt.start()
if (device.type == 'Dooya DT360E'):
def publish(dev, percentage):
try:
percentage = str(percentage)
topic = (mqtt_prefix + 'position')
logging.debug(((('Sending Dooya position ' + percentage) + ' to topic ') + topic))
mqttc.publish(topic, percentage, qos=qos, retain=retain)
except:
logging.exception('Error')
device.publish = types.MethodType(publish, device)
broadlink_dooya_position_interval = cf.get('broadlink_dooya_position_interval', 0)
if (broadlink_dooya_position_interval > 0):
scheduler = sched.scheduler(time.time, time.sleep)
scheduler.enter(broadlink_dooya_position_interval, 1, broadlink_dooya_position_timer, [scheduler, broadlink_dooya_position_interval, device])
tt = SchedulerThread(scheduler)
tt.daemon = True
tt.start()
broadlink_bg1_state_interval = cf.get('broadlink_bg1_state_interval', 0)
if ((device.type == 'BG1') and (broadlink_bg1_state_interval > 0)):
scheduler = sched.scheduler(time.time, time.sleep)
scheduler.enter(broadlink_bg1_state_interval, 1, broadlink_bg1_state_timer, [scheduler, broadlink_bg1_state_interval, device, mqtt_prefix])
tt = SchedulerThread(scheduler)
tt.daemon = True
tt.start()
return device |
()
('input_dir', type=click.Path(exists=True, file_okay=False))
('output_dir', type=click.Path(exists=False, file_okay=False))
('--recursive/--no-recursive', default=True, help='Search recursively')
('--overwrite/--no-overwrite', default=False, help='Overwrite existing files')
('--clean/--no-clean', default=False, help='Clean output directory before processing')
('--peak', help='Peak normalize audio to -1 dB', default=(- 1.0), show_default=True, type=float)
('--loudness', help='Loudness normalize audio to -23 dB LUFS', default=(- 23.0), show_default=True, type=float)
('--block-size', help='Block size for loudness measurement, unit is second', default=0.4, show_default=True, type=float)
('--num-workers', help='Number of workers to use for processing, defaults to number of CPU cores', default=os.cpu_count(), show_default=True, type=int)
def loudness_norm(input_dir: str, output_dir: str, recursive: bool, overwrite: bool, clean: bool, peak: float, loudness: float, block_size: float, num_workers: int):
from fish_audio_preprocess.utils.loudness_norm import loudness_norm_file
(input_dir, output_dir) = (Path(input_dir), Path(output_dir))
if ((input_dir == output_dir) and clean):
logger.error('You are trying to clean the input directory, aborting')
return
make_dirs(output_dir, clean)
files = list_files(input_dir, extensions=AUDIO_EXTENSIONS, recursive=recursive)
logger.info(f'Found {len(files)} files, normalizing loudness')
skipped = 0
with ProcessPoolExecutor(max_workers=num_workers) as executor:
tasks = []
for file in tqdm(files, desc='Preparing tasks'):
relative_path = file.relative_to(input_dir)
new_file = (output_dir / relative_path)
if (new_file.parent.exists() is False):
new_file.parent.mkdir(parents=True)
if (new_file.exists() and (not overwrite)):
skipped += 1
continue
tasks.append(executor.submit(loudness_norm_file, file, new_file, peak, loudness, block_size))
for i in tqdm(as_completed(tasks), total=len(tasks), desc='Processing'):
assert (i.exception() is None), i.exception()
logger.info('Done!')
logger.info(f'Total: {len(files)}, Skipped: {skipped}')
logger.info(f'Output directory: {output_dir}') |
class OptionSeriesVectorClusterZones(Options):
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def from_(self):
return self._config_get(None)
_.setter
def from_(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionSeriesVectorClusterZonesMarker':
return self._config_sub_data('marker', OptionSeriesVectorClusterZonesMarker)
def to(self):
return self._config_get(None)
def to(self, num: float):
self._config(num, js_type=False) |
def test_log_base_e_plus_automatically_find_variables(df_vartypes):
transformer = LogTransformer(base='e', variables=None)
X = transformer.fit_transform(df_vartypes)
transf_df = df_vartypes.copy()
transf_df['Age'] = [2.99573, 3.04452, 2.94444, 2.89037]
transf_df['Marks'] = [(- 0.105361), (- 0.223144), (- 0.356675), (- 0.510826)]
assert (transformer.base == 'e')
assert (transformer.variables is None)
assert (transformer.variables_ == ['Age', 'Marks'])
assert (transformer.n_features_in_ == 5)
pd.testing.assert_frame_equal(X, transf_df)
Xit = transformer.inverse_transform(X)
Xit['Age'] = Xit['Age'].round().astype('int64')
Xit['Marks'] = Xit['Marks'].round(1)
pd.testing.assert_frame_equal(Xit, df_vartypes) |
def guess_new_class(wordmap):
if wordmap['new_para']:
return wordmap
if ((wordmap['kotus_tn'] is None) or (wordmap['kotus_tn'] == '')):
return wordmap
if (not wordmap['pos']):
wordmap['pos'] = 'PARTICLE'
wordmap['new_para'] = '#'
if wordmap['is_prefix']:
wordmap['new_para'] = 'N_COMPOUND'
elif (wordmap['pos'] in ['PROPER', 'NOUN']):
wordmap = guess_new_noun(wordmap)
elif (wordmap['pos'] == 'ADJECTIVE'):
wordmap = guess_new_adjective(wordmap)
elif (wordmap['pos'] == 'VERB'):
wordmap = guess_new_verb(wordmap)
elif (wordmap['pos'] == 'PRONOUN'):
wordmap = guess_new_pronoun(wordmap)
elif (wordmap['pos'] == 'ACRONYM'):
wordmap = guess_new_acro(wordmap)
elif (wordmap['pos'] == 'NUMERAL'):
wordmap = guess_new_numeral(wordmap)
elif (wordmap['pos'] == 'PARTICLE'):
wordmap = guess_new_particle(wordmap)
else:
fail_guess_because(wordmap, ['POS'], ['!POS', 'PARTICLE', 'N', 'PROP', 'A', 'V', 'ACRO', 'NUM', 'INTJ', 'CONJ', 'ABBR'])
if (not wordmap['new_para']):
fail_guess_because(wordmap, ['???'], ['???'], "shouldn't reach this point (missing case somewhere)\nTemporarily recovering by setting new_para to #!")
wordmap['new_para'] = '#'
return wordmap |
def get_single_case(case_type, case_no, bucket):
with db.engine.connect() as conn:
rs = conn.execute(SINGLE_CASE, case_type, case_no)
row = rs.first()
if (row is not None):
case_id = row['case_id']
(sort1, sort2) = get_sort_fields(row['case_no'])
case = {'type': get_es_type(case_type), 'doc_id': '{0}_{1}'.format(case_type.lower(), row['case_no']), 'no': row['case_no'], 'case_serial': row['case_serial'], 'name': row['name'], 'published_flg': row['published_flg'], 'sort1': sort1, 'sort2': sort2}
if (case_type == 'ADR'):
case['commission_votes'] = get_adr_commission_votes(case_id)
case['complainant'] = get_adr_complainant(case_id)
case['non_monetary_terms'] = get_adr_non_monetary_terms(case_id)
case['non_monetary_terms_respondents'] = get_adr_non_monetary_terms_respondents(case_id)
case['citations'] = get_adr_citations(case_id)
case['adr_dispositions'] = get_adr_dispositions(case_id)
case['case_status'] = get_adr_case_status(case_id)
else:
case['commission_votes'] = get_commission_votes(case_type, case_id)
case['documents'] = get_documents(case_id, bucket)
case['url'] = '/legal/{0}/{1}/'.format(get_full_name(case_type), row['case_no'])
if (case_type == 'AF'):
case = extend(case, get_af_specific_fields(case_id))
return case
if (case_type == 'MUR'):
case['mur_type'] = 'current'
case['dispositions'] = get_dispositions(case_id)
case['subjects'] = get_subjects(case_id)
case['election_cycles'] = get_election_cycles(case_id)
participants = get_participants(case_id)
case['participants'] = list(participants.values())
case['respondents'] = get_sorted_respondents(case['participants'])
(case['open_date'], case['close_date']) = get_open_and_close_dates(case_id)
return case
else:
logger.error('Not a valid {0} number.'.format(case_type))
return None |
class TestContrastLstar(util.ColorAsserts, unittest.TestCase):
def test_contrast_same(self):
self.assertEqual(Color('blue').contrast('blue', method='lstar'), 0)
def test_contrast_bigger(self):
self.assertCompare(Color('orange').contrast('blue', method='lstar'), 42.63303)
def test_symmetry(self):
self.assertEqual(Color('orange').contrast('blue', method='lstar'), Color('blue').contrast('orange', method='lstar')) |
def extractPlustranslatorsBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
chp_prefixes = [('Artifact Planting Space Chapter', 'Artifact Planting Space', 'translated'), ('Manowa', 'Manowa Mamono Taosu Nouryoku Ubau Watashi Tsuyokunaru', 'translated'), ('Cat ', 'Me and My Beloved Cat (Girlfriend)', 'translated')]
for (prefix, series, tl_type) in chp_prefixes:
if item['title'].lower().startswith(prefix.lower()):
return buildReleaseMessageWithType(item, series, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
tagmap = [('Artifact planting space', 'Artifact planting space', 'translated'), ('Overgod Ascension', 'Overgod Ascension', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class Migration(migrations.Migration):
dependencies = [('admin_interface', '0021_file_extension_validator')]
operations = [migrations.AddField(model_name='theme', name='logo_max_height', field=models.PositiveSmallIntegerField(blank=True, default=100, verbose_name='max height')), migrations.AddField(model_name='theme', name='logo_max_width', field=models.PositiveSmallIntegerField(blank=True, default=400, verbose_name='max width'))] |
class PopupMenu(PopupGridLayout):
defaults = [('hide_on_mouse_leave', True, 'Hide the menu if the mouse pointer leaves the menu')]
def __init__(self, qtile, controls, **config):
PopupGridLayout.__init__(self, qtile, controls=controls, **config)
self.add_defaults(PopupMenu.defaults)
def from_dbus_menu(cls, qtile, dbusmenuitems, **config):
menuitems = []
prev_sep = False
for (i, dbmitem) in enumerate(dbusmenuitems):
sep = (dbmitem.item_type == 'separator')
if (not dbmitem.visible):
continue
if (prev_sep and sep):
continue
if sep:
menuitems.append(PopupMenuSeparator(bar_size=1, can_focus=False, **config))
else:
if dbmitem.enabled:
callbacks = {'mouse_callbacks': {'Button1': (lambda dbmitem=dbmitem: dbmitem.click())}}
else:
callbacks = {}
icon = None
if (has_xdg and dbmitem.icon_name):
icon = getIconPath(dbmitem.icon_name, theme=config.get('icon_theme', None))
menuitems.append(PopupMenuItem(text=dbmitem.label.replace('_', ''), menu_icon=(icon or dbmitem.icon_data), can_focus=dbmitem.enabled, toggle_box=(True if dbmitem.toggle_type else False), toggled=(True if dbmitem.toggle_state else False), has_submenu=(dbmitem.children_display == 'submenu'), enabled=dbmitem.enabled, **callbacks, **config))
prev_sep = sep
return cls.generate(qtile, menuitems, **config)
def generate(cls, qtile, menuitems, **config):
row_count = 0
for item in menuitems:
item.row = row_count
row_count += item.row_span
row_height = config.get('row_height', None)
fontsize = config.get('fontsize', 12)
menu_width = config.get('menu_width', 200)
if (row_height is None):
row_height = fontsize
menu_config = {'width': menu_width, 'height': (row_count * row_height), 'rows': row_count, 'cols': 1}
menu_config.update(config)
return cls(qtile, controls=menuitems, **menu_config) |
class Traversal():
def extract_seed_field_addresses(self) -> Dict[(FieldAddress, str)]:
return {identity_address: seed_key for (identity_address, seed_key) in self.graph.identity_keys.items() if (seed_key in self.seed_data)}
def __init__(self, graph: DatasetGraph, data: Dict[(str, Any)]):
self.graph = graph
self.seed_data = data
self.traversal_node_dict = {k: TraversalNode(v) for (k, v) in graph.nodes.items()}
self.edges: Set[Edge] = graph.edges.copy()
self.root_node = artificial_traversal_node(ROOT_COLLECTION_ADDRESS)
for (start_field_address, seed_key) in self.extract_seed_field_addresses().items():
self.edges.add(Edge(FieldAddress(ROOT_COLLECTION_ADDRESS.dataset, ROOT_COLLECTION_ADDRESS.collection, seed_key), start_field_address))
self.__verify_traversal()
def __verify_traversal(self) -> None:
self.traverse({self.root_node.address: [self.seed_data]}, (lambda n, m: logger.info('Traverse {}', n.address)))
def traversal_map(self) -> Tuple[(Dict[(str, List[Dict[(str, Any)]])], List[CollectionAddress])]:
def traversal_dict_fn(tn: TraversalNode, data: Dict[(CollectionAddress, Dict[(str, Any)])]) -> None:
data[tn.address] = tn.debug()
db = {ROOT_COLLECTION_ADDRESS: [self.seed_data]}
traversal_ends = self.traverse(db, traversal_dict_fn)
return ({str(k): v for (k, v) in db.items()}, traversal_ends)
def traverse(self, environment: Dict[(CollectionAddress, Any)], node_run_fn: Callable[([TraversalNode, Dict[(CollectionAddress, Any)]], None)]) -> List[CollectionAddress]:
if environment:
logger.info('starting traversal')
remaining_node_keys: Set[CollectionAddress] = set(self.traversal_node_dict.keys())
finished_nodes: dict[(CollectionAddress, TraversalNode)] = {}
running_node_queue: MatchingQueue[TraversalNode] = MatchingQueue(self.root_node)
remaining_edges: Set[Edge] = self.edges.copy()
while (not running_node_queue.is_empty()):
n = running_node_queue.pop_first_match((lambda x: x.can_run_given(remaining_node_keys)))
if n:
node_run_fn(n, environment)
for (finished_node_address, finished_node) in finished_nodes.items():
completed_edges = Edge.delete_edges(remaining_edges, finished_node_address, cast(TraversalNode, n).address)
for edge in filter((lambda _edge: _edge.ends_with_collection(cast(TraversalNode, n).address)), completed_edges):
finished_node.add_child(n, edge)
edges_to_children = pydash.collections.filter_([e.split_by_address(cast(TraversalNode, n).address) for e in remaining_edges if e.contains(n.address)])
if (not edges_to_children):
n.is_terminal_node = True
child_node_addresses = {a[1].collection_address() for a in edges_to_children if a}
for nxt_address in child_node_addresses:
running_node_queue.push_if_new(self.traversal_node_dict[nxt_address])
finished_nodes[n.address] = n
remaining_node_keys.difference_update({n.address})
else:
logger.error('Node could not be reached given specified ordering [{}]', ','.join([str(tn.address) for tn in running_node_queue.data]))
raise TraversalError(f'''Node could not be reached given the specified ordering:
[{','.join([str(tn.address) for tn in running_node_queue.data])}]''')
if remaining_node_keys:
logger.error('Some nodes were not reachable: {}', ','.join([str(x) for x in remaining_node_keys]))
raise TraversalError(f"Some nodes were not reachable: {','.join([str(x) for x in remaining_node_keys])}")
if remaining_edges:
logger.error('Some edges were not reachable: {}', ','.join([str(x) for x in remaining_edges]))
raise TraversalError(f"Some edges were not reachable: {','.join([str(x) for x in remaining_edges])}")
end_nodes = [tn.address for tn in finished_nodes.values() if tn.is_terminal_node]
if environment:
logger.debug('Found {} end nodes: {}', len(end_nodes), end_nodes)
return end_nodes |
.compilertest
def test_irratelimit_error():
yaml = '\n---\napiVersion: getambassador.io/v3alpha1\nkind: RateLimitService\nmetadata:\n name: myrls\n namespace: default\nspec: {}\n'
econf = _get_envoy_config(yaml)
conf = _get_rl_config(econf.as_dict())
assert (not conf)
errors = econf.ir.aconf.errors
assert ('ir.ratelimit' in errors)
assert (errors['ir.ratelimit'][0]['error'] == 'service is required in RateLimitService') |
class NodeWithScore(BaseComponent):
node: BaseNode
score: Optional[float] = None
def __str__(self) -> str:
return f'''{self.node}
Score: {self.score: 0.3f}
'''
def get_score(self, raise_error: bool=False) -> float:
if (self.score is None):
if raise_error:
raise ValueError('Score not set.')
else:
return 0.0
else:
return self.score
def class_name(cls) -> str:
return 'NodeWithScore'
def node_id(self) -> str:
return self.node.node_id
def id_(self) -> str:
return self.node.id_
def text(self) -> str:
if isinstance(self.node, TextNode):
return self.node.text
else:
raise ValueError('Node must be a TextNode to get text.')
def metadata(self) -> Dict[(str, Any)]:
return self.node.metadata
def embedding(self) -> Optional[List[float]]:
return self.node.embedding
def get_text(self) -> str:
if isinstance(self.node, TextNode):
return self.node.get_text()
else:
raise ValueError('Node must be a TextNode to get text.')
def get_content(self, metadata_mode: MetadataMode=MetadataMode.NONE) -> str:
return self.node.get_content(metadata_mode=metadata_mode)
def get_embedding(self) -> List[float]:
return self.node.get_embedding() |
class Line(Object):
def __init__(self, latlngs: typing.List[s2sphere.LatLng], color: Color=RED, width: int=2) -> None:
Object.__init__(self)
if ((latlngs is None) or (len(latlngs) < 2)):
raise ValueError('Trying to create line with less than 2 coordinates')
if (width < 0):
raise ValueError(f"'width' must be >= 0: {width}")
self._latlngs = latlngs
self._color = color
self._width = width
self._interpolation_cache: typing.Optional[typing.List[s2sphere.LatLng]] = None
def color(self) -> Color:
return self._color
def width(self) -> int:
return self._width
def bounds(self) -> s2sphere.LatLngRect:
b = s2sphere.LatLngRect()
for latlng in self.interpolate():
b = b.union(s2sphere.LatLngRect.from_point(latlng.normalized()))
return b
def extra_pixel_bounds(self) -> PixelBoundsT:
return (self._width, self._width, self._width, self._width)
def interpolate(self) -> typing.List[s2sphere.LatLng]:
if (self._interpolation_cache is not None):
return self._interpolation_cache
assert (len(self._latlngs) >= 2)
self._interpolation_cache = []
threshold = ((2 * math.pi) / 360)
last = self._latlngs[0]
self._interpolation_cache.append(last)
geod = Geodesic.WGS84
for current in self._latlngs[1:]:
dlng = (current.lng().radians - last.lng().radians)
while (dlng < 0):
dlng += (2 * math.pi)
while (dlng >= math.pi):
dlng -= (2 * math.pi)
if (abs(dlng) < threshold):
self._interpolation_cache.append(current)
last = current
continue
line = geod.InverseLine(last.lat().degrees, last.lng().degrees, current.lat().degrees, current.lng().degrees)
n = (2 + math.ceil(line.a13))
for i in range(1, n):
a = ((i * line.a13) / n)
g = line.ArcPosition(a, ((Geodesic.LATITUDE | Geodesic.LONGITUDE) | Geodesic.LONG_UNROLL))
self._interpolation_cache.append(create_latlng(g['lat2'], g['lon2']))
self._interpolation_cache.append(current)
last = current
return self._interpolation_cache
def render_pillow(self, renderer: PillowRenderer) -> None:
if (self.width() == 0):
return
xys = [((x + renderer.offset_x()), y) for (x, y) in [renderer.transformer().ll2pixel(latlng) for latlng in self.interpolate()]]
renderer.draw().line(xys, self.color().int_rgba(), self.width())
def render_svg(self, renderer: SvgRenderer) -> None:
if (self.width() == 0):
return
xys = [renderer.transformer().ll2pixel(latlng) for latlng in self.interpolate()]
polyline = renderer.drawing().polyline(xys, fill='none', stroke=self.color().hex_rgb(), stroke_width=self.width(), opacity=self.color().float_a())
renderer.group().add(polyline)
def render_cairo(self, renderer: CairoRenderer) -> None:
if (self.width() == 0):
return
xys = [renderer.transformer().ll2pixel(latlng) for latlng in self.interpolate()]
renderer.context().set_source_rgba(*self.color().float_rgba())
renderer.context().set_line_width(self.width())
renderer.context().new_path()
for (x, y) in xys:
renderer.context().line_to(x, y)
renderer.context().stroke() |
def build_game(data: Dict) -> Game:
hard = data.get('hard')
icode = data.get('icode')
if (hard and icode):
product_code = (hard[2:] + icode)
else:
product_code = None
game = Game(platform=PLATFORMS[hard], region=Regions.JP, title=data['title'], nsuid=data.get('nsuid'), product_code=product_code)
game.description = data.get('text')
game.slug = data.get('icode')
game.free_to_play = (data.get('price') == 0.0)
players = (data.get('player') or ['0'])
game.players = max(map(int, players[0].split('~')))
try:
game.release_date = datetime.strptime(data.get('sdate'), '%Y.%m.%d')
except (ValueError, TypeError):
game.release_date = None
game.categories = data.get('genre', [])
developer = data.get('maker')
game.developers = ([developer] if developer else [])
game.languages = data.get('lang', [])
publisher = data.get('publisher')
game.publishers = ([publisher] if publisher else [])
rating = (data.get('cero') or ['0'])
game.rating = (Ratings.CERO, RATINGS.get(rating[0]))
game.features = {Features.AMIIBO: (data.get('amiibo', '0') == '1'), Features.DLC: (len((data.get('cnsuid') or [])) > 0), Features.ONLINE_PLAY: ((data.get('nso') or ['0']) == ['1'])}
return game |
.usefixtures('use_tmpdir')
def test_workflow_run():
WorkflowCommon.createExternalDumpJob()
dump_job = WorkflowJob.from_file('dump_job', name='DUMP')
context = SubstitutionList()
context['<PARAM>'] = 'text'
workflow = Workflow.from_file('dump_workflow', context, {'DUMP': dump_job})
assert (len(workflow) == 2)
WorkflowRunner(workflow).run_blocking()
with open('dump1', 'r', encoding='utf-8') as f:
assert (f.read() == 'dump_text_1')
with open('dump2', 'r', encoding='utf-8') as f:
assert (f.read() == 'dump_text_2') |
class OptionPlotoptionsScatterSonificationTracksMappingLowpass(Options):
def frequency(self) -> 'OptionPlotoptionsScatterSonificationTracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsScatterSonificationTracksMappingLowpassFrequency)
def resonance(self) -> 'OptionPlotoptionsScatterSonificationTracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsScatterSonificationTracksMappingLowpassResonance) |
class Subscription(models.Model):
subscriptionId = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=True)
timeTick = models.IntegerField()
filter = models.ManyToManyField(CommonNotification, related_name='Subscription_CommonNotification')
callbackUri = models.TextField() |
_init.register_param_type
class param_cookie_preserve(param):
_PACK_STR = '!HHI'
_MIN_LEN = struct.calcsize(_PACK_STR)
def param_type(cls):
return PTYPE_COOKIE_PRESERVE
def __init__(self, value=0, length=0):
super(param_cookie_preserve, self).__init__(value, length)
def parser(cls, buf):
(_, length, value) = struct.unpack_from(cls._PACK_STR, buf)
return cls(value, length)
def serialize(self):
if (0 == self.length):
self.length = self._MIN_LEN
buf = struct.pack(self._PACK_STR, self.param_type(), self.length, self.value)
return buf |
class TestActionAllocation(TestCase):
VERSION = {'version': {'number': '5.0.0'}}
def builder(self):
self.client = Mock()
self.client.info.return_value = self.VERSION
self.client.cat.indices.return_value = testvars.state_one
self.client.indices.get_settings.return_value = testvars.settings_one
self.client.indices.stats.return_value = testvars.stats_one
self.client.indices.exists_alias.return_value = False
self.client.indices.put_settings.return_value = None
self.ilo = IndexList(self.client)
def test_init_raise(self):
self.assertRaises(TypeError, Allocation, 'invalid')
def test_init(self):
self.builder()
ao = Allocation(self.ilo, key='key', value='value')
self.assertEqual(self.ilo, ao.index_list)
self.assertEqual(self.client, ao.client)
def test_create_body_no_key(self):
self.builder()
self.assertRaises(MissingArgument, Allocation, self.ilo)
def test_create_body_invalid_allocation_type(self):
self.builder()
self.assertRaises(ValueError, Allocation, self.ilo, key='key', value='value', allocation_type='invalid')
def test_create_body_valid(self):
self.builder()
ao = Allocation(self.ilo, key='key', value='value')
self.assertEqual({'index.routing.allocation.require.key': 'value'}, ao.settings)
def test_do_action_raise_on_put_settings(self):
self.builder()
self.client.indices.put_settings.side_effect = testvars.fake_fail
ao = Allocation(self.ilo, key='key', value='value')
self.assertRaises(Exception, ao.do_action)
def test_do_dry_run(self):
self.builder()
alo = Allocation(self.ilo, key='key', value='value')
self.assertIsNone(alo.do_dry_run())
def test_do_action(self):
self.builder()
alo = Allocation(self.ilo, key='key', value='value')
self.assertIsNone(alo.do_action())
def test_do_action_wait_v50(self):
self.builder()
self.client.cluster.health.return_value = {'relocating_shards': 0}
alo = Allocation(self.ilo, key='key', value='value', wait_for_completion=True)
self.assertIsNone(alo.do_action())
def test_do_action_wait_v51(self):
self.builder()
self.client.info.return_value = {'version': {'number': '5.1.1'}}
self.client.cluster.health.return_value = {'relocating_shards': 0}
alo = Allocation(self.ilo, key='key', value='value', wait_for_completion=True)
self.assertIsNone(alo.do_action()) |
class SPMD_Statement(Compound_Statement):
def __init__(self, t_spmd):
super().__init__()
assert isinstance(t_spmd, MATLAB_Token)
assert ((t_spmd.kind == 'KEYWORD') and (t_spmd.value == 'spmd'))
self.t_spmd = t_spmd
self.t_spmd.set_ast(self)
self.n_body = None
def loc(self):
return self.t_spmd.location
def set_body(self, n_body):
assert isinstance(n_body, Sequence_Of_Statements)
self.n_body = n_body
self.n_body.set_parent(self)
def visit(self, parent, function, relation):
self._visit(parent, function, relation)
self.n_body.visit(self, function, 'Body')
self._visit_end(parent, function, relation) |
class TestReplayDecider(TestCase):
def setUp(self) -> None:
worker: Worker = Mock()
worker.get_workflow_method = MagicMock(return_value=(DummyWorkflow, (lambda *args: None)))
self.decider = ReplayDecider(execution_id='', workflow_type=Mock(), worker=worker)
def test_get_and_increment_next_id(self):
self.assertEqual('0', self.decider.get_and_increment_next_id())
self.assertEqual('1', self.decider.get_and_increment_next_id())
def test_get_decisions(self):
decision = Decision()
state_machine: DecisionStateMachine = Mock()
state_machine.get_decision = MagicMock(return_value=decision)
self.decider.decisions[DecisionId(DecisionTarget.ACTIVITY, 10)] = state_machine
decisions = self.decider.get_decisions()
self.assertEqual(1, len(decisions))
self.assertIs(decision, decisions[0])
def test_get_decisions_none(self):
state_machine: DecisionStateMachine = Mock()
state_machine.get_decision = MagicMock(return_value=None)
self.decider.decisions[DecisionId(DecisionTarget.ACTIVITY, 10)] = state_machine
decisions = self.decider.get_decisions()
self.assertEqual(0, len(decisions))
def test_get_decision(self):
state_machine = DecisionStateMachine()
decision_id = DecisionId(DecisionTarget.ACTIVITY, 20)
self.decider.decisions[decision_id] = state_machine
self.assertIs(state_machine, self.decider.get_decision(decision_id))
def test_get_decision_not_found(self):
decision_id = DecisionId(DecisionTarget.ACTIVITY, 20)
with self.assertRaises(NonDeterministicWorkflowException):
self.decider.get_decision(decision_id)
def test_notify_decision_sent(self):
state_machine: DecisionStateMachine = Mock()
self.decider.decisions[DecisionId(DecisionTarget.ACTIVITY, 10)] = state_machine
self.decider.notify_decision_sent()
state_machine.handle_decision_task_started_event.assert_called_once()
def test_process_decision_events_notifies_when_replay(self):
self.decider.event_loop = Mock()
events = [HistoryEvent(event_type=EventType.WorkflowExecutionStarted, workflow_execution_started_event_attributes=WorkflowExecutionStartedEventAttributes()), HistoryEvent(event_type=EventType.DecisionTaskScheduled)]
decision_events = DecisionEvents(events, [], replay=True, replay_current_time_milliseconds=0, next_decision_event_id=5)
self.decider.notify_decision_sent = MagicMock()
self.decider.process_event = Mock()
self.decider.process_decision_events(decision_events)
self.decider.notify_decision_sent.assert_called_once()
def test_process_decision_events_markers_first(self):
self.decider.event_loop = Mock()
marker_event = HistoryEvent(event_type=EventType.MarkerRecorded)
marker_event.marker_recorded_event_attributes = MarkerRecordedEventAttributes()
marker_event.marker_recorded_event_attributes.marker_name = VERSION_MARKER_NAME
events = [HistoryEvent(event_type=EventType.WorkflowExecutionStarted, workflow_execution_started_event_attributes=WorkflowExecutionStartedEventAttributes()), HistoryEvent(event_type=EventType.DecisionTaskScheduled), marker_event]
decision_events = DecisionEvents([], events, replay=True, replay_current_time_milliseconds=0, next_decision_event_id=5)
self.decider.process_event = Mock()
self.decider.process_decision_events(decision_events)
self.decider.process_event.assert_called()
assert (len(self.decider.process_event.call_args_list) == 4)
(args, kwargs) = self.decider.process_event.call_args_list[0]
assert (id(args[0]) == id(marker_event))
def test_activity_task_closed(self):
state_machine: DecisionStateMachine = Mock()
state_machine.is_done = MagicMock(return_value=True)
self.decider.decisions[DecisionId(DecisionTarget.ACTIVITY, 10)] = state_machine
ret = self.decider.handle_activity_task_closed(10)
self.assertTrue(ret)
state_machine.handle_completion_event.assert_called_once()
state_machine.is_done.assert_called_once()
def test_handle_activity_task_scheduled(self):
state_machine: DecisionStateMachine = Mock()
self.decider.decisions[DecisionId(DecisionTarget.ACTIVITY, 10)] = state_machine
event = HistoryEvent(event_id=10)
self.decider.handle_activity_task_scheduled(event)
state_machine.handle_initiated_event.assert_called()
(args, kwargs) = state_machine.handle_initiated_event.call_args_list[0]
self.assertIn(event, args)
def test_handle_activity_task_started(self):
state_machine: DecisionStateMachine = Mock()
self.decider.decisions[DecisionId(DecisionTarget.ACTIVITY, 10)] = state_machine
event = HistoryEvent(event_id=15)
event.activity_task_started_event_attributes = ActivityTaskStartedEventAttributes()
event.activity_task_started_event_attributes.scheduled_event_id = 10
self.decider.handle_activity_task_started(event)
state_machine.handle_started_event.assert_called()
(args, kwargs) = state_machine.handle_started_event.call_args_list[0]
self.assertIn(event, args)
def test_handle_decision_task_failed(self):
event = HistoryEvent(event_id=15)
event.event_type = EventType.DecisionTaskFailed
event.decision_task_failed_event_attributes = DecisionTaskFailedEventAttributes()
event.decision_task_failed_event_attributes.cause = DecisionTaskFailedCause.RESET_WORKFLOW
event.decision_task_failed_event_attributes.new_run_id = 'the-new-run-id'
self.decider.decision_context = decision_context = MagicMock()
self.decider.handle_decision_task_failed(event)
decision_context.set_current_run_id.assert_called()
(args, kwargs) = decision_context.set_current_run_id.call_args_list[0]
assert (args[0] == 'the-new-run-id')
def tearDown(self) -> None:
self.decider.destroy() |
class ACL(Conf):
defaults = {'rules': None, 'exact_match': False, 'dot1x_assigned': False}
defaults_types = {'rules': list, 'exact_match': bool, 'dot1x_assigned': bool}
rule_types = {'cookie': int, 'actions': dict, 'description': str}
actions_types = {'meter': str, 'mirror': (str, int), 'output': (dict, list), 'allow': int, 'force_port_vlan': int, 'ct': dict}
output_actions_types = {'tunnel': dict, 'port': (str, int), 'ports': list, 'failover': dict, 'set_fields': list, 'pop_vlans': int, 'swap_vid': int, 'vlan_vid': int, 'vlan_vids': list}
ct_action_types = {'flags': int, 'alg': int, 'table': int, 'zone': int, 'zone_src': int, 'clear': bool, 'nat': dict}
ct_action_nat_types = {'flags': int, 'range_ipv4_min': str, 'range_ipv4_max': str, 'range_ipv6_min': str, 'range_ipv6_max': str, 'range_proto_min': int, 'range_proto_max': int}
tunnel_types = {'type': (str, None), 'tunnel_id': (str, int, None), 'dp': str, 'port': (str, int, None), 'exit_instructions': (list, None), 'maintain_encapsulation': bool, 'bi_directional': bool, 'reverse': bool}
mutable_attrs = frozenset(['tunnel_sources'])
def __init__(self, _id, dp_id, conf):
self.rules = []
self.exact_match = None
self.dot1x_assigned = None
self.meter = False
self.matches = {}
self.set_fields = set()
self._ports_resolved = False
self.tunnel_dests = {}
self.tunnel_sources = {}
self.dyn_tunnel_rules = {}
self.dyn_reverse_tunnel_rules = {}
for match_fields in (MATCH_FIELDS, OLD_MATCH_FIELDS):
self.rule_types.update({match: (str, int) for match in match_fields})
conf = copy.deepcopy(conf)
if isinstance(conf, dict):
rules = conf.get('rules', [])
elif isinstance(conf, list):
rules = conf
conf = {}
else:
raise InvalidConfigError(('ACL conf is an invalid type %s' % _id))
conf['rules'] = []
for rule in rules:
normalized_rule = rule
if isinstance(rule, dict):
normalized_rule = rule.get('rule', rule)
if (normalized_rule is None):
normalized_rule = {k: v for (k, v) in rule.items() if (v is not None)}
test_config_condition((not isinstance(normalized_rule, dict)), ('ACL rule is %s not %s (%s)' % (type(normalized_rule), dict, rules)))
conf['rules'].append(normalized_rule)
super().__init__(_id, dp_id, conf)
def finalize(self):
self._ports_resolved = True
super().finalize()
def check_config(self):
test_config_condition((not self.rules), ('no rules found for ACL %s' % self._id))
for rule in self.rules:
self._check_conf_types(rule, self.rule_types)
for (rule_field, rule_conf) in rule.items():
if (rule_field == 'cookie'):
test_config_condition(((rule_conf < 0) or (rule_conf > (2 ** 16))), 'rule cookie value must be 0-2**16')
elif (rule_field == 'actions'):
test_config_condition((not rule_conf), ('Missing rule actions in ACL %s' % self._id))
self._check_conf_types(rule_conf, self.actions_types)
for (action_name, action_conf) in rule_conf.items():
if (action_name == 'output'):
if isinstance(action_conf, (list, tuple)):
for subconf in action_conf:
test_config_condition((len(subconf) > 1), 'ACL ordered output must have only one action per element')
self._check_conf_types(subconf, self.output_actions_types)
else:
self._check_conf_types(action_conf, self.output_actions_types)
elif (action_name == 'ct'):
self._check_conf_types(action_conf, self.ct_action_types)
if (('clear' in action_conf) and action_conf['clear']):
test_config_condition((len(action_conf) != 1), "no other parameters can be set when 'clear' set on conntrack ACL")
else:
test_config_condition(('table' not in action_conf), "required parameter 'table' not set for conntrack ACL")
test_config_condition(('zone' not in action_conf), "required parameter 'zone' not set for conntrack ACL")
if ('nat' in action_conf):
self._check_conf_types(action_conf['nat'], self.ct_action_nat_types)
def build(self, meters, vid, port_num):
self.matches = {}
self.set_fields = set()
self.meter = False
if self.rules:
try:
ofmsgs = valve_acl.build_acl_ofmsgs([self], wildcard_table, [valve_of.goto_table(wildcard_table)], [valve_of.goto_table(wildcard_table)], ((2 ** 16) - 1), meters, self.exact_match, vlan_vid=vid, port_num=port_num)
except (netaddr.core.AddrFormatError, KeyError, ValueError) as err:
raise InvalidConfigError from err
test_config_condition((not ofmsgs), 'OF messages is empty')
for ofmsg in ofmsgs:
try:
valve_of.verify_flowmod(ofmsg)
except (KeyError, ValueError) as err:
raise InvalidConfigError from err
except Exception as err:
raise err
if valve_of.is_flowmod(ofmsg):
apply_actions = []
for inst in ofmsg.instructions:
if valve_of.is_apply_actions(inst):
apply_actions.extend(inst.actions)
elif valve_of.is_meter(inst):
self.meter = True
for action in apply_actions:
if valve_of.is_set_field(action):
self.set_fields.add(action.key)
for (match, value) in ofmsg.match.items():
has_mask = isinstance(value, (tuple, list))
if (has_mask or (match not in self.matches)):
self.matches[match] = has_mask
for tunnel_rules in self.tunnel_dests.values():
if ('exit_instructions' in tunnel_rules):
exit_inst = tunnel_rules['exit_instructions']
try:
ofmsgs = valve_acl.build_tunnel_ofmsgs(exit_inst, wildcard_table, 1)
except (netaddr.core.AddrFormatError, KeyError, ValueError) as err:
raise InvalidConfigError from err
test_config_condition((not ofmsgs), 'OF messages is empty')
for ofmsg in ofmsgs:
try:
valve_of.verify_flowmod(ofmsg)
except (KeyError, ValueError) as err:
raise InvalidConfigError from err
except Exception as err:
raise err
if valve_of.is_flowmod(ofmsg):
apply_actions = []
for inst in ofmsg.instructions:
if valve_of.is_apply_actions(inst):
apply_actions.extend(inst.actions)
elif valve_of.is_meter(inst):
self.meter = True
for action in apply_actions:
if valve_of.is_set_field(action):
self.set_fields.add(action.key)
for (match, value) in ofmsg.match.items():
has_mask = isinstance(value, (tuple, list))
if (has_mask or (match not in self.matches)):
self.matches[match] = has_mask
return (self.matches, self.set_fields, self.meter)
def get_meters(self):
for rule in self.rules:
if (('actions' not in rule) or ('meter' not in rule['actions'])):
continue
(yield rule['actions']['meter'])
def get_mirror_destinations(self):
for rule in self.rules:
if (('actions' not in rule) or ('mirror' not in rule['actions'])):
continue
(yield rule['actions']['mirror'])
def _resolve_ordered_output_ports(self, output_list, resolve_port_cb, resolve_tunnel_objects):
result = []
for action in output_list:
for (key, value) in action.items():
if (key == 'tunnel'):
tunnel = value
test_config_condition(('dp' not in tunnel), ('ACL (%s) tunnel DP not defined' % self._id))
tunnel_dp = tunnel['dp']
tunnel_port = tunnel.get('port', None)
tunnel_id = tunnel.get('tunnel_id', None)
tunnel_type = tunnel.get('type', 'vlan')
tunnel_exit_instructions = tunnel.get('exit_instructions', [])
tunnel_direction = tunnel.get('bi_directional', False)
tunnel_maintain = tunnel.get('maintain_encapsulation', False)
tunnel_reverse = tunnel.get('reverse', False)
test_config_condition((tunnel_reverse and tunnel_direction), ('Tunnel ACL %s cannot contain values for the fields`bi_directional` and `reverse` at the same time' % self._id))
(dst_dp, dst_port, tunnel_id) = resolve_tunnel_objects(tunnel_dp, tunnel_port, tunnel_id)
tunnel_dict = {'dst_dp': dst_dp, 'dst_port': dst_port, 'tunnel_id': tunnel_id, 'type': tunnel_type, 'exit_instructions': tunnel_exit_instructions, 'bi_directional': tunnel_direction, 'maintain_encapsulation': tunnel_maintain, 'reverse': tunnel_reverse}
self.tunnel_dests[tunnel_id] = tunnel_dict
result.append({key: tunnel_id})
elif (key == 'port'):
port_name = value
port = resolve_port_cb(port_name)
test_config_condition((not port), ('ACL (%s) output port undefined in DP: %s' % (self._id, self.dp_id)))
result.append({key: port})
elif (key == 'ports'):
resolved_ports = [resolve_port_cb(p) for p in value]
test_config_condition((None in resolved_ports), ('ACL (%s) output port(s) not defined in DP: %s' % (self._id, self.dp_id)))
result.append({key: resolved_ports})
elif (key == 'failover'):
failover = value
test_config_condition((not isinstance(failover, dict)), 'failover is not a dictionary')
failover_dict = {}
for (failover_name, failover_values) in failover.items():
if (failover_name == 'ports'):
resolved_ports = [resolve_port_cb(p) for p in failover_values]
test_config_condition((None in resolved_ports), ('ACL (%s) failover port(s) not defined in DP: %s' % (self._id, self.dp_id)))
failover_dict[failover_name] = resolved_ports
else:
failover_dict[failover_name] = failover_values
result.append({key: failover_dict})
else:
result.append(action)
return result
def _resolve_output_ports(self, action_conf, resolve_port_cb, resolve_tunnel_objects):
if isinstance(action_conf, (list, tuple)):
return self._resolve_ordered_output_ports(action_conf, resolve_port_cb, resolve_tunnel_objects)
result = {}
test_config_condition((('vlan_vid' in action_conf) and ('vlan_vids' in action_conf)), ('ACL %s has both vlan_vid and vlan_vids defined' % self._id))
test_config_condition((('port' in action_conf) and ('ports' in action_conf)), ('ACL %s has both port and ports defined' % self._id))
for (output_action, output_action_values) in action_conf.items():
if (output_action == 'tunnel'):
tunnel = output_action_values
test_config_condition(('dp' not in tunnel), ('ACL (%s) tunnel DP not defined' % self._id))
tunnel_dp = tunnel['dp']
tunnel_port = tunnel.get('port', None)
tunnel_id = tunnel.get('tunnel_id', None)
tunnel_type = tunnel.get('type', 'vlan')
tunnel_exit_instructions = tunnel.get('exit_instructions', [])
tunnel_direction = tunnel.get('bi_directional', False)
tunnel_maintain = tunnel.get('maintain_encapsulation', False)
tunnel_reverse = tunnel.get('reverse', False)
test_config_condition((tunnel_reverse and tunnel_direction), ('Tunnel ACL %s cannot contain values for the fields`bi_directional` and `reverse` at the same time' % self._id))
(dst_dp, dst_port, tunnel_id) = resolve_tunnel_objects(tunnel_dp, tunnel_port, tunnel_id)
tunnel_dict = {'dst_dp': dst_dp, 'dst_port': dst_port, 'tunnel_id': tunnel_id, 'type': tunnel_type, 'exit_instructions': tunnel_exit_instructions, 'bi_directional': tunnel_direction, 'maintain_encapsulation': tunnel_maintain, 'reverse': tunnel_reverse}
self.tunnel_dests[tunnel_id] = tunnel_dict
result[output_action] = tunnel_id
elif (output_action == 'port'):
port_name = output_action_values
port = resolve_port_cb(port_name)
test_config_condition((not port), ('ACL (%s) output port undefined in DP: %s' % (self._id, self.dp_id)))
result[output_action] = port
elif (output_action == 'ports'):
resolved_ports = [resolve_port_cb(p) for p in output_action_values]
test_config_condition((None in resolved_ports), ('ACL (%s) output port(s) not defined in DP: %s' % (self._id, self.dp_id)))
result[output_action] = resolved_ports
elif (output_action == 'failover'):
failover = output_action_values
test_config_condition((not isinstance(failover, dict)), 'failover is not a dictionary')
result[output_action] = {}
for (failover_name, failover_values) in failover.items():
if (failover_name == 'ports'):
resolved_ports = [resolve_port_cb(p) for p in failover_values]
test_config_condition((None in resolved_ports), ('ACL (%s) failover port(s) not defined in DP: %s' % (self._id, self.dp_id)))
result[output_action][failover_name] = resolved_ports
else:
result[output_action][failover_name] = failover_values
else:
result[output_action] = output_action_values
return result
def resolve_ports(self, resolve_port_cb, resolve_tunnel_objects):
if self._ports_resolved:
return
for rule_conf in self.rules:
if ('actions' in rule_conf):
actions_conf = rule_conf['actions']
resolved_actions = {}
test_config_condition((not isinstance(actions_conf, dict)), 'actions value is not a dictionary')
for (action_name, action_conf) in actions_conf.items():
if (action_name == 'mirror'):
resolved_port = resolve_port_cb(action_conf)
test_config_condition((resolved_port is None), ('ACL (%s) mirror port is not defined in DP: %s' % (self._id, self.dp_id)))
resolved_actions[action_name] = resolved_port
elif (action_name == 'output'):
resolved_action = self._resolve_output_ports(action_conf, resolve_port_cb, resolve_tunnel_objects)
resolved_actions[action_name] = resolved_action
else:
resolved_actions[action_name] = action_conf
rule_conf['actions'] = resolved_actions
self._ports_resolved = True
def requires_reverse_tunnel(self, tunnel_id):
return self.tunnel_dests[tunnel_id]['bi_directional']
def get_num_tunnels(self):
num_tunnels = 0
for rule_conf in self.rules:
if self.does_rule_contain_tunnel(rule_conf):
output_conf = rule_conf['actions']['output']
if isinstance(output_conf, list):
for action in output_conf:
for key in action:
if (key == 'tunnel'):
num_tunnels += 1
elif ('tunnel' in output_conf):
num_tunnels += 1
return num_tunnels
def get_tunnel_rules(self, tunnel_id):
rules = []
for rule_conf in self.rules:
if self.does_rule_contain_tunnel(rule_conf):
output_conf = rule_conf['actions']['output']
if isinstance(output_conf, (list, tuple)):
for action in output_conf:
for (key, value) in action.items():
if ((key == 'tunnel') and (value == tunnel_id)):
rules.append(rule_conf)
continue
elif (output_conf['tunnel'] == tunnel_id):
rules.append(rule_conf)
return rules
def does_rule_contain_tunnel(rule_conf):
if ('actions' in rule_conf):
if ('output' in rule_conf['actions']):
output_conf = rule_conf['actions']['output']
if isinstance(output_conf, (list, tuple)):
for action in output_conf:
for key in action:
if (key == 'tunnel'):
return True
elif ('tunnel' in output_conf):
return True
return False
def is_tunnel_acl(self):
if self.tunnel_dests:
return True
for rule_conf in self.rules:
if self.does_rule_contain_tunnel(rule_conf):
return True
return False
def _tunnel_source_id(source):
return tuple(sorted(source.items()))
def add_tunnel_source(self, dp_name, port, reverse=False, bi_directional=False):
source = {'dp': dp_name, 'port': port, 'reverse': reverse, 'bi_directional': bi_directional}
source_id = self._tunnel_source_id(source)
self.tunnel_sources[source_id] = source
for _id in self.tunnel_dests:
self.dyn_tunnel_rules.setdefault(_id, {})
self.dyn_reverse_tunnel_rules.setdefault(_id, {})
def verify_tunnel_rules(self):
if ('eth_type' not in self.matches):
self.matches['eth_type'] = False
if ('in_port' not in self.matches):
self.matches['in_port'] = False
if ('vlan_vid' not in self.matches):
self.matches['vlan_vid'] = False
if ('vlan_vid' not in self.set_fields):
self.set_fields.add('vlan_vid')
if ('vlan_pcp' not in self.matches):
self.matches['vlan_pcp'] = False
if ('vlan_pcp' not in self.set_fields):
self.set_fields.add('vlan_pcp')
def update_reverse_tunnel_rules(self, curr_dp, source_id, tunnel_id, out_port, output_table):
if (not self.requires_reverse_tunnel(tunnel_id)):
return False
dst_dp = self.tunnel_sources[source_id]['dp']
src_dp = self.tunnel_dests[tunnel_id]['dst_dp']
prev_list = self.dyn_reverse_tunnel_rules[tunnel_id].get(source_id, [])
new_list = []
if ((curr_dp == src_dp) and (curr_dp != dst_dp)):
new_list = [{'port': out_port}]
elif ((curr_dp == dst_dp) and (curr_dp != src_dp)):
new_list = [{'pop_vlans': 1}]
if (out_port is None):
new_list.append({'goto': output_table.table_id})
else:
new_list.append({'port': out_port})
elif ((curr_dp == src_dp) and (curr_dp == dst_dp)):
new_list = [{'pop_vlans': 1}]
if (out_port is None):
new_list.extend([{'goto': output_table.table_id}])
else:
new_list.extend([{'port': out_port}])
else:
new_list = [{'port': out_port}]
if (new_list != prev_list):
self.dyn_reverse_tunnel_rules[tunnel_id][source_id] = new_list
return True
return True
def update_source_tunnel_rules(self, curr_dp, source_id, tunnel_id, out_port, output_table):
src_dp = self.tunnel_sources[source_id]['dp']
dst_dp = self.tunnel_dests[tunnel_id]['dst_dp']
prev_list = self.dyn_tunnel_rules[tunnel_id].get(source_id, [])
new_list = []
pcp_flag = valve_of.PCP_TUNNEL_FLAG
if self.tunnel_dests[tunnel_id]['reverse']:
pcp_flag = valve_of.PCP_TUNNEL_REVERSE_DIRECTION_FLAG
if ((curr_dp == src_dp) and (curr_dp != dst_dp)):
new_list = [{'vlan_vids': [{'vid': tunnel_id, 'eth_type': ether.ETH_TYPE_8021Q}]}, {'set_fields': [{'vlan_pcp': pcp_flag}]}, {'port': out_port}, {'pop_vlans': 1}]
elif ((curr_dp == dst_dp) and (curr_dp != src_dp)):
if self.tunnel_dests[tunnel_id]['maintain_encapsulation']:
new_list = []
else:
new_list = [{'pop_vlans': 1}]
exit_instructions = self.tunnel_dests[tunnel_id].get('exit_instructions', [])
new_list.extend(copy.copy(list(exit_instructions)))
if (out_port is None):
new_list.append({'goto': output_table.table_id})
else:
new_list.append({'port': out_port})
elif ((curr_dp == src_dp) and (curr_dp == dst_dp)):
exit_instructions = self.tunnel_dests[tunnel_id].get('exit_instructions', [])
new_list.extend(copy.copy(list(exit_instructions)))
if self.tunnel_dests[tunnel_id].get('maintain_encapsulation', False):
new_list.extend([{'vlan_vids': [{'vid': tunnel_id, 'eth_type': ether.ETH_TYPE_8021Q}]}, {'set_fields': [{'vlan_pcp': pcp_flag}]}])
if (out_port is None):
new_list.extend([{'goto': output_table.table_id}])
else:
new_list.extend([{'port': out_port}])
else:
new_list = [{'port': out_port}]
if (new_list != prev_list):
self.dyn_tunnel_rules[tunnel_id][source_id] = new_list
return True
return True |
class TopicContinuumSet(UsableFlag):
def __init__(self, input_mods, config):
tracer.info('called')
UsableFlag.__init__(self)
self.__input_mods = input_mods
self._config = config
self.__continuum = {}
self.__object_cache = ObjectCache()
self.__init_continuum_set()
self.__object_cache.log_stats()
tracer.debug('Finished.')
def __init_continuum_set(self):
tracer.debug('Called.')
for (ts_name, ts_config) in iteritems(self._config.get_value('topics')):
topic_cont = TopicContinuum(ts_name, self._config, ts_config, self.__object_cache, self.__input_mods)
self.__continuum[ts_name] = topic_cont
self._adapt_usablility(topic_cont)
tracer.debug('Finished; count [%d]', len(self.__continuum))
def execute(self, executor, func_prefix):
tracer.debug('Calling pre.')
FuncCall.pcall(executor, (func_prefix + 'topic_continuum_set_pre'), self)
tracer.debug('Calling sub.')
for continuum in executor.topic_continuum_set_sort(self.__continuum.values()):
continuum.execute(executor, func_prefix)
tracer.debug('Calling Post')
FuncCall.pcall(executor, (func_prefix + 'topic_continuum_set_post'), self)
tracer.debug('Finished.')
def get_continuum_dict(self):
return self.__continuum |
class NormalizedResource():
object: dict
rkey: Optional[str] = None
log_resources: ClassVar[bool] = parse_bool(os.environ.get('AMBASSADOR_LOG_RESOURCES'))
def from_data(cls, kind: str, name: str, namespace: Optional[str]=None, generation: Optional[int]=None, version: str='v3alpha1', api_group='getambassador.io', labels: Optional[Dict[(str, Any)]]=None, spec: (Dict[(str, Any)] | None)=None, errors: Optional[str]=None, rkey: Optional[str]=None) -> NormalizedResource:
if (rkey is None):
rkey = f'{name}.{namespace}'
ir_obj = {}
if spec:
ir_obj.update(spec)
ir_obj['apiVersion'] = f'{api_group}/{version}'
ir_obj['kind'] = kind
ir_obj['name'] = name
if (namespace is not None):
ir_obj['namespace'] = namespace
if (generation is not None):
ir_obj['generation'] = generation
ir_obj['metadata_labels'] = (labels or {})
if errors:
ir_obj['errors'] = errors
return cls(ir_obj, rkey)
def from_kubernetes_object(cls, obj: KubernetesObject, rkey: Optional[str]=None) -> NormalizedResource:
if (obj.namespace is None):
raise ValueError(f'Cannot construct resource from Kubernetes object {obj.key} without namespace')
labels = dict(obj.labels)
if (not rkey):
rkey = f'{obj.name}.{obj.namespace}'
labels['ambassador_crd'] = rkey
else:
labels.pop('ambassador_crd', None)
return cls.from_data(obj.kind, obj.name, errors=obj.get('errors'), namespace=obj.namespace, generation=obj.generation, version=obj.gvk.version, api_group=obj.gvk.api_group, labels=labels, spec=obj.spec, rkey=rkey) |
class OptionSeriesBulletSonificationDefaultinstrumentoptionsMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.