text stringlengths 89 104k | code_tokens list | avg_line_len float64 7.91 980 | score float64 0 630 |
|---|---|---|---|
def local_time_to_online(dt=None):
"""Converts datetime object to a UTC timestamp for AGOL.
Args:
dt (datetime): The :py:class:`datetime.datetime` object to convert. Defaults to ``None``, i.e., :py:func:`datetime.datetime.now`.
Returns:
float: A UTC timestamp as understood by AGOL (time in ms since Unix epoch * 1000)
Examples:
>>> arcresthelper.common.local_time_to_online() # PST
1457167261000.0
>>> dt = datetime.datetime(1993, 3, 5, 12, 35, 15) # PST
>>> arcresthelper.common.local_time_to_online(dt)
731392515000.0
See Also:
:py:func:`online_time_to_string` for converting a UTC timestamp
"""
is_dst = None
utc_offset = None
try:
if dt is None:
dt = datetime.datetime.now()
is_dst = time.daylight > 0 and time.localtime().tm_isdst > 0
utc_offset = (time.altzone if is_dst else time.timezone)
return (time.mktime(dt.timetuple()) * 1000) + (utc_offset * 1000)
except:
line, filename, synerror = trace()
raise ArcRestHelperError({
"function": "local_time_to_online",
"line": line,
"filename": filename,
"synerror": synerror,
}
)
finally:
is_dst = None
utc_offset = None
del is_dst
del utc_offset | [
"def",
"local_time_to_online",
"(",
"dt",
"=",
"None",
")",
":",
"is_dst",
"=",
"None",
"utc_offset",
"=",
"None",
"try",
":",
"if",
"dt",
"is",
"None",
":",
"dt",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"is_dst",
"=",
"time",
".",
"daylight",
">",
"0",
"and",
"time",
".",
"localtime",
"(",
")",
".",
"tm_isdst",
">",
"0",
"utc_offset",
"=",
"(",
"time",
".",
"altzone",
"if",
"is_dst",
"else",
"time",
".",
"timezone",
")",
"return",
"(",
"time",
".",
"mktime",
"(",
"dt",
".",
"timetuple",
"(",
")",
")",
"*",
"1000",
")",
"+",
"(",
"utc_offset",
"*",
"1000",
")",
"except",
":",
"line",
",",
"filename",
",",
"synerror",
"=",
"trace",
"(",
")",
"raise",
"ArcRestHelperError",
"(",
"{",
"\"function\"",
":",
"\"local_time_to_online\"",
",",
"\"line\"",
":",
"line",
",",
"\"filename\"",
":",
"filename",
",",
"\"synerror\"",
":",
"synerror",
",",
"}",
")",
"finally",
":",
"is_dst",
"=",
"None",
"utc_offset",
"=",
"None",
"del",
"is_dst",
"del",
"utc_offset"
] | 32.159091 | 23.181818 |
def fastq_iter(handle, header=None):
"""Iterate over FASTQ file and return FASTQ entries
Args:
handle (file): FASTQ file handle, can be any iterator so long as it
it returns subsequent "lines" of a FASTQ entry
header (str): Header line of next FASTQ entry, if 'handle' has been
partially read and you want to start iterating at the next entry,
read the next FASTQ header and pass it to this variable when
calling fastq_iter. See 'Examples.'
Yields:
FastqEntry: class containing all FASTQ data
Raises:
IOError: If FASTQ entry doesn't start with '@'
Examples:
The following two examples demonstrate how to use fastq_iter.
Note: These doctests will not pass, examples are only in doctest
format as per convention. bio_utils uses pytests for testing.
>>> for entry in fastq_iter(open('test.fastq')):
... print(entry.id) # Print FASTQ id
... print(entry.description) # Print FASTQ description
... print(entry.sequence) # Print FASTQ sequence
... print(entry.quality) # Print FASTQ quality scores
... print(entry.write()) # Print full FASTQ entry
>>> fastq_handle = open('test.fastq')
>>> next(fastq_handle) # Skip first entry header
>>> next(fastq_handle) # Skip first entry sequence
>>> next(fastq_handle) # Skip line with '+'
>>> next(fastq_handle) # Skip first entry quality scores
>>> first_line = next(fastq_handle) # Read second entry header
>>> for entry in fastq_iter(fastq_handle, header=first_line):
... print(entry.id) # Print FASTQ id
... print(entry.description) # Print FASTQ description
... print(entry.sequence) # Print FASTQ sequence
... print(entry.quality) # Print FASTQ quality scores
... print(entry.write()) # Print full FASTQ entry
"""
# Speed tricks: reduces function calls
append = list.append
join = str.join
strip = str.strip
next_line = next
if header is None:
header = next(handle) # Read first FASTQ entry header
# Check if input is text or bytestream
if (isinstance(header, bytes)):
def next_line(i):
return next(i).decode('utf-8')
header = strip(header.decode('utf-8'))
else:
header = strip(header)
try: # Manually construct a for loop to improve speed by using 'next'
while True: # Loop until StopIteration Exception raised
line = strip(next_line(handle))
data = FastqEntry()
if not header[0] == '@':
raise IOError('Bad FASTQ format: no "@" at beginning of line')
try:
data.id, data.description = header[1:].split(' ', 1)
except ValueError: # No description
data.id = header[1:]
data.description = ''
# obtain sequence
sequence_list = []
while line and not line[0] == '+' and not line[0] == '#':
append(sequence_list, line)
line = strip(next_line(handle))
data.sequence = join('', sequence_list)
line = strip(next_line(handle)) # Skip line containing only '+'
# Obtain quality scores
quality_list = []
seq_len = len(data.sequence)
qual_len = 0
while line and qual_len < seq_len:
append(quality_list, line)
qual_len += len(line)
line = strip(next_line(handle)) # Raises StopIteration at EOF
header = line # Store current line so it's not lost next iteration
data.quality = join('', quality_list)
yield data
except StopIteration: # Yield last FASTQ entry
data.quality = join('', quality_list)
yield data | [
"def",
"fastq_iter",
"(",
"handle",
",",
"header",
"=",
"None",
")",
":",
"# Speed tricks: reduces function calls",
"append",
"=",
"list",
".",
"append",
"join",
"=",
"str",
".",
"join",
"strip",
"=",
"str",
".",
"strip",
"next_line",
"=",
"next",
"if",
"header",
"is",
"None",
":",
"header",
"=",
"next",
"(",
"handle",
")",
"# Read first FASTQ entry header",
"# Check if input is text or bytestream",
"if",
"(",
"isinstance",
"(",
"header",
",",
"bytes",
")",
")",
":",
"def",
"next_line",
"(",
"i",
")",
":",
"return",
"next",
"(",
"i",
")",
".",
"decode",
"(",
"'utf-8'",
")",
"header",
"=",
"strip",
"(",
"header",
".",
"decode",
"(",
"'utf-8'",
")",
")",
"else",
":",
"header",
"=",
"strip",
"(",
"header",
")",
"try",
":",
"# Manually construct a for loop to improve speed by using 'next'",
"while",
"True",
":",
"# Loop until StopIteration Exception raised",
"line",
"=",
"strip",
"(",
"next_line",
"(",
"handle",
")",
")",
"data",
"=",
"FastqEntry",
"(",
")",
"if",
"not",
"header",
"[",
"0",
"]",
"==",
"'@'",
":",
"raise",
"IOError",
"(",
"'Bad FASTQ format: no \"@\" at beginning of line'",
")",
"try",
":",
"data",
".",
"id",
",",
"data",
".",
"description",
"=",
"header",
"[",
"1",
":",
"]",
".",
"split",
"(",
"' '",
",",
"1",
")",
"except",
"ValueError",
":",
"# No description",
"data",
".",
"id",
"=",
"header",
"[",
"1",
":",
"]",
"data",
".",
"description",
"=",
"''",
"# obtain sequence",
"sequence_list",
"=",
"[",
"]",
"while",
"line",
"and",
"not",
"line",
"[",
"0",
"]",
"==",
"'+'",
"and",
"not",
"line",
"[",
"0",
"]",
"==",
"'#'",
":",
"append",
"(",
"sequence_list",
",",
"line",
")",
"line",
"=",
"strip",
"(",
"next_line",
"(",
"handle",
")",
")",
"data",
".",
"sequence",
"=",
"join",
"(",
"''",
",",
"sequence_list",
")",
"line",
"=",
"strip",
"(",
"next_line",
"(",
"handle",
")",
")",
"# Skip line containing only '+'",
"# Obtain quality scores",
"quality_list",
"=",
"[",
"]",
"seq_len",
"=",
"len",
"(",
"data",
".",
"sequence",
")",
"qual_len",
"=",
"0",
"while",
"line",
"and",
"qual_len",
"<",
"seq_len",
":",
"append",
"(",
"quality_list",
",",
"line",
")",
"qual_len",
"+=",
"len",
"(",
"line",
")",
"line",
"=",
"strip",
"(",
"next_line",
"(",
"handle",
")",
")",
"# Raises StopIteration at EOF",
"header",
"=",
"line",
"# Store current line so it's not lost next iteration",
"data",
".",
"quality",
"=",
"join",
"(",
"''",
",",
"quality_list",
")",
"yield",
"data",
"except",
"StopIteration",
":",
"# Yield last FASTQ entry",
"data",
".",
"quality",
"=",
"join",
"(",
"''",
",",
"quality_list",
")",
"yield",
"data"
] | 36.714286 | 22.314286 |
def record_delete_subfield_from(rec, tag, subfield_position,
field_position_global=None,
field_position_local=None):
"""
Delete subfield from position specified.
Specify the subfield by tag, field number and subfield position.
"""
subfields = record_get_subfields(
rec, tag,
field_position_global=field_position_global,
field_position_local=field_position_local)
try:
del subfields[subfield_position]
except IndexError:
raise InvenioBibRecordFieldError(
"The record does not contain the subfield "
"'%(subfieldIndex)s' inside the field (local: "
"'%(fieldIndexLocal)s, global: '%(fieldIndexGlobal)s' ) of tag "
"'%(tag)s'." %
{"subfieldIndex": subfield_position,
"fieldIndexLocal": str(field_position_local),
"fieldIndexGlobal": str(field_position_global),
"tag": tag})
if not subfields:
if field_position_global is not None:
for position, field in enumerate(rec[tag]):
if field[4] == field_position_global:
del rec[tag][position]
else:
del rec[tag][field_position_local]
if not rec[tag]:
del rec[tag] | [
"def",
"record_delete_subfield_from",
"(",
"rec",
",",
"tag",
",",
"subfield_position",
",",
"field_position_global",
"=",
"None",
",",
"field_position_local",
"=",
"None",
")",
":",
"subfields",
"=",
"record_get_subfields",
"(",
"rec",
",",
"tag",
",",
"field_position_global",
"=",
"field_position_global",
",",
"field_position_local",
"=",
"field_position_local",
")",
"try",
":",
"del",
"subfields",
"[",
"subfield_position",
"]",
"except",
"IndexError",
":",
"raise",
"InvenioBibRecordFieldError",
"(",
"\"The record does not contain the subfield \"",
"\"'%(subfieldIndex)s' inside the field (local: \"",
"\"'%(fieldIndexLocal)s, global: '%(fieldIndexGlobal)s' ) of tag \"",
"\"'%(tag)s'.\"",
"%",
"{",
"\"subfieldIndex\"",
":",
"subfield_position",
",",
"\"fieldIndexLocal\"",
":",
"str",
"(",
"field_position_local",
")",
",",
"\"fieldIndexGlobal\"",
":",
"str",
"(",
"field_position_global",
")",
",",
"\"tag\"",
":",
"tag",
"}",
")",
"if",
"not",
"subfields",
":",
"if",
"field_position_global",
"is",
"not",
"None",
":",
"for",
"position",
",",
"field",
"in",
"enumerate",
"(",
"rec",
"[",
"tag",
"]",
")",
":",
"if",
"field",
"[",
"4",
"]",
"==",
"field_position_global",
":",
"del",
"rec",
"[",
"tag",
"]",
"[",
"position",
"]",
"else",
":",
"del",
"rec",
"[",
"tag",
"]",
"[",
"field_position_local",
"]",
"if",
"not",
"rec",
"[",
"tag",
"]",
":",
"del",
"rec",
"[",
"tag",
"]"
] | 37.171429 | 16.371429 |
def competing_interests(soup, fntype_filter):
"""
Find the fn tags included in the competing interest
"""
competing_interests_section = extract_nodes(soup, "fn-group", attr="content-type", value="competing-interest")
if not competing_interests_section:
return None
fn = extract_nodes(first(competing_interests_section), "fn")
interests = footnotes(fn, fntype_filter)
return interests | [
"def",
"competing_interests",
"(",
"soup",
",",
"fntype_filter",
")",
":",
"competing_interests_section",
"=",
"extract_nodes",
"(",
"soup",
",",
"\"fn-group\"",
",",
"attr",
"=",
"\"content-type\"",
",",
"value",
"=",
"\"competing-interest\"",
")",
"if",
"not",
"competing_interests_section",
":",
"return",
"None",
"fn",
"=",
"extract_nodes",
"(",
"first",
"(",
"competing_interests_section",
")",
",",
"\"fn\"",
")",
"interests",
"=",
"footnotes",
"(",
"fn",
",",
"fntype_filter",
")",
"return",
"interests"
] | 34.5 | 20.333333 |
def run_nose(self, params):
"""
:type params: Params
"""
thread.set_index(params.thread_index)
log.debug("[%s] Starting nose iterations: %s", params.worker_index, params)
assert isinstance(params.tests, list)
# argv.extend(['--with-apiritif', '--nocapture', '--exe', '--nologcapture'])
end_time = self.params.ramp_up + self.params.hold_for
end_time += time.time() if end_time else 0
time.sleep(params.delay)
plugin = ApiritifPlugin(self._writer)
self._writer.concurrency += 1
config = Config(env=os.environ, files=all_config_files(), plugins=DefaultPluginManager())
config.plugins.addPlugins(extraplugins=[plugin])
config.testNames = params.tests
config.verbosity = 3 if params.verbose else 0
if params.verbose:
config.stream = open(os.devnull, "w") # FIXME: use "with", allow writing to file/log
iteration = 0
try:
while True:
log.debug("Starting iteration:: index=%d,start_time=%.3f", iteration, time.time())
thread.set_iteration(iteration)
ApiritifTestProgram(config=config)
log.debug("Finishing iteration:: index=%d,end_time=%.3f", iteration, time.time())
iteration += 1
# reasons to stop
if plugin.stop_reason:
log.debug("[%s] finished prematurely: %s", params.worker_index, plugin.stop_reason)
elif iteration >= params.iterations:
log.debug("[%s] iteration limit reached: %s", params.worker_index, params.iterations)
elif 0 < end_time <= time.time():
log.debug("[%s] duration limit reached: %s", params.worker_index, params.hold_for)
else:
continue # continue if no one is faced
break
finally:
self._writer.concurrency -= 1
if params.verbose:
config.stream.close() | [
"def",
"run_nose",
"(",
"self",
",",
"params",
")",
":",
"thread",
".",
"set_index",
"(",
"params",
".",
"thread_index",
")",
"log",
".",
"debug",
"(",
"\"[%s] Starting nose iterations: %s\"",
",",
"params",
".",
"worker_index",
",",
"params",
")",
"assert",
"isinstance",
"(",
"params",
".",
"tests",
",",
"list",
")",
"# argv.extend(['--with-apiritif', '--nocapture', '--exe', '--nologcapture'])",
"end_time",
"=",
"self",
".",
"params",
".",
"ramp_up",
"+",
"self",
".",
"params",
".",
"hold_for",
"end_time",
"+=",
"time",
".",
"time",
"(",
")",
"if",
"end_time",
"else",
"0",
"time",
".",
"sleep",
"(",
"params",
".",
"delay",
")",
"plugin",
"=",
"ApiritifPlugin",
"(",
"self",
".",
"_writer",
")",
"self",
".",
"_writer",
".",
"concurrency",
"+=",
"1",
"config",
"=",
"Config",
"(",
"env",
"=",
"os",
".",
"environ",
",",
"files",
"=",
"all_config_files",
"(",
")",
",",
"plugins",
"=",
"DefaultPluginManager",
"(",
")",
")",
"config",
".",
"plugins",
".",
"addPlugins",
"(",
"extraplugins",
"=",
"[",
"plugin",
"]",
")",
"config",
".",
"testNames",
"=",
"params",
".",
"tests",
"config",
".",
"verbosity",
"=",
"3",
"if",
"params",
".",
"verbose",
"else",
"0",
"if",
"params",
".",
"verbose",
":",
"config",
".",
"stream",
"=",
"open",
"(",
"os",
".",
"devnull",
",",
"\"w\"",
")",
"# FIXME: use \"with\", allow writing to file/log",
"iteration",
"=",
"0",
"try",
":",
"while",
"True",
":",
"log",
".",
"debug",
"(",
"\"Starting iteration:: index=%d,start_time=%.3f\"",
",",
"iteration",
",",
"time",
".",
"time",
"(",
")",
")",
"thread",
".",
"set_iteration",
"(",
"iteration",
")",
"ApiritifTestProgram",
"(",
"config",
"=",
"config",
")",
"log",
".",
"debug",
"(",
"\"Finishing iteration:: index=%d,end_time=%.3f\"",
",",
"iteration",
",",
"time",
".",
"time",
"(",
")",
")",
"iteration",
"+=",
"1",
"# reasons to stop",
"if",
"plugin",
".",
"stop_reason",
":",
"log",
".",
"debug",
"(",
"\"[%s] finished prematurely: %s\"",
",",
"params",
".",
"worker_index",
",",
"plugin",
".",
"stop_reason",
")",
"elif",
"iteration",
">=",
"params",
".",
"iterations",
":",
"log",
".",
"debug",
"(",
"\"[%s] iteration limit reached: %s\"",
",",
"params",
".",
"worker_index",
",",
"params",
".",
"iterations",
")",
"elif",
"0",
"<",
"end_time",
"<=",
"time",
".",
"time",
"(",
")",
":",
"log",
".",
"debug",
"(",
"\"[%s] duration limit reached: %s\"",
",",
"params",
".",
"worker_index",
",",
"params",
".",
"hold_for",
")",
"else",
":",
"continue",
"# continue if no one is faced",
"break",
"finally",
":",
"self",
".",
"_writer",
".",
"concurrency",
"-=",
"1",
"if",
"params",
".",
"verbose",
":",
"config",
".",
"stream",
".",
"close",
"(",
")"
] | 41.061224 | 23.836735 |
def cli(env):
"""List options for creating Reserved Capacity"""
manager = CapacityManager(env.client)
items = manager.get_create_options()
items.sort(key=lambda term: int(term['capacity']))
table = formatting.Table(["KeyName", "Description", "Term", "Default Hourly Price Per Instance"],
title="Reserved Capacity Options")
table.align["Hourly Price"] = "l"
table.align["Description"] = "l"
table.align["KeyName"] = "l"
for item in items:
table.add_row([
item['keyName'], item['description'], item['capacity'], get_price(item)
])
env.fout(table)
regions = manager.get_available_routers()
location_table = formatting.Table(['Location', 'POD', 'BackendRouterId'], 'Orderable Locations')
for region in regions:
for location in region['locations']:
for pod in location['location']['pods']:
location_table.add_row([region['keyname'], pod['backendRouterName'], pod['backendRouterId']])
env.fout(location_table) | [
"def",
"cli",
"(",
"env",
")",
":",
"manager",
"=",
"CapacityManager",
"(",
"env",
".",
"client",
")",
"items",
"=",
"manager",
".",
"get_create_options",
"(",
")",
"items",
".",
"sort",
"(",
"key",
"=",
"lambda",
"term",
":",
"int",
"(",
"term",
"[",
"'capacity'",
"]",
")",
")",
"table",
"=",
"formatting",
".",
"Table",
"(",
"[",
"\"KeyName\"",
",",
"\"Description\"",
",",
"\"Term\"",
",",
"\"Default Hourly Price Per Instance\"",
"]",
",",
"title",
"=",
"\"Reserved Capacity Options\"",
")",
"table",
".",
"align",
"[",
"\"Hourly Price\"",
"]",
"=",
"\"l\"",
"table",
".",
"align",
"[",
"\"Description\"",
"]",
"=",
"\"l\"",
"table",
".",
"align",
"[",
"\"KeyName\"",
"]",
"=",
"\"l\"",
"for",
"item",
"in",
"items",
":",
"table",
".",
"add_row",
"(",
"[",
"item",
"[",
"'keyName'",
"]",
",",
"item",
"[",
"'description'",
"]",
",",
"item",
"[",
"'capacity'",
"]",
",",
"get_price",
"(",
"item",
")",
"]",
")",
"env",
".",
"fout",
"(",
"table",
")",
"regions",
"=",
"manager",
".",
"get_available_routers",
"(",
")",
"location_table",
"=",
"formatting",
".",
"Table",
"(",
"[",
"'Location'",
",",
"'POD'",
",",
"'BackendRouterId'",
"]",
",",
"'Orderable Locations'",
")",
"for",
"region",
"in",
"regions",
":",
"for",
"location",
"in",
"region",
"[",
"'locations'",
"]",
":",
"for",
"pod",
"in",
"location",
"[",
"'location'",
"]",
"[",
"'pods'",
"]",
":",
"location_table",
".",
"add_row",
"(",
"[",
"region",
"[",
"'keyname'",
"]",
",",
"pod",
"[",
"'backendRouterName'",
"]",
",",
"pod",
"[",
"'backendRouterId'",
"]",
"]",
")",
"env",
".",
"fout",
"(",
"location_table",
")"
] | 42.958333 | 21.916667 |
def stop(self):
"""Stop the stream and terminate PyAudio"""
self.prestop()
if not self._graceful:
self._graceful = True
self.stream.stop_stream()
self.audio.terminate()
msg = 'Stopped'
self.verbose_info(msg, log=False)
# Log 'Stopped' anyway
if self.log:
self.logging.info(msg)
if self.collect:
if self._data:
print('Collected result:')
print(' min: %10d' % self._data['min'])
print(' max: %10d' % self._data['max'])
print(' avg: %10d' % int(self._data['avg']))
self.poststop() | [
"def",
"stop",
"(",
"self",
")",
":",
"self",
".",
"prestop",
"(",
")",
"if",
"not",
"self",
".",
"_graceful",
":",
"self",
".",
"_graceful",
"=",
"True",
"self",
".",
"stream",
".",
"stop_stream",
"(",
")",
"self",
".",
"audio",
".",
"terminate",
"(",
")",
"msg",
"=",
"'Stopped'",
"self",
".",
"verbose_info",
"(",
"msg",
",",
"log",
"=",
"False",
")",
"# Log 'Stopped' anyway",
"if",
"self",
".",
"log",
":",
"self",
".",
"logging",
".",
"info",
"(",
"msg",
")",
"if",
"self",
".",
"collect",
":",
"if",
"self",
".",
"_data",
":",
"print",
"(",
"'Collected result:'",
")",
"print",
"(",
"' min: %10d'",
"%",
"self",
".",
"_data",
"[",
"'min'",
"]",
")",
"print",
"(",
"' max: %10d'",
"%",
"self",
".",
"_data",
"[",
"'max'",
"]",
")",
"print",
"(",
"' avg: %10d'",
"%",
"int",
"(",
"self",
".",
"_data",
"[",
"'avg'",
"]",
")",
")",
"self",
".",
"poststop",
"(",
")"
] | 34.526316 | 12.578947 |
def synchronise_device_state(self, device_state, authentication_headers):
"""
Synchronizing the component states with AVS
Components state must be synchronised with AVS after establishing the
downchannel stream in order to create a persistent connection with AVS.
Note that currently this function is paying lip-service synchronising
the device state: the device state is hard-coded.
"""
payload = {
'context': device_state,
'event': {
'header': {
'namespace': 'System',
'name': 'SynchronizeState',
'messageId': ''
},
'payload': {}
}
}
multipart_data = MultipartEncoder(
fields=[
(
'metadata', (
'metadata',
json.dumps(payload),
'application/json',
{'Content-Disposition': "form-data; name='metadata'"}
)
),
],
boundary='boundary'
)
headers = {
**authentication_headers,
'Content-Type': multipart_data.content_type
}
stream_id = self.connection.request(
'GET',
'/v20160207/events',
body=multipart_data,
headers=headers,
)
response = self.connection.get_response(stream_id)
assert response.status in [http.client.NO_CONTENT, http.client.OK] | [
"def",
"synchronise_device_state",
"(",
"self",
",",
"device_state",
",",
"authentication_headers",
")",
":",
"payload",
"=",
"{",
"'context'",
":",
"device_state",
",",
"'event'",
":",
"{",
"'header'",
":",
"{",
"'namespace'",
":",
"'System'",
",",
"'name'",
":",
"'SynchronizeState'",
",",
"'messageId'",
":",
"''",
"}",
",",
"'payload'",
":",
"{",
"}",
"}",
"}",
"multipart_data",
"=",
"MultipartEncoder",
"(",
"fields",
"=",
"[",
"(",
"'metadata'",
",",
"(",
"'metadata'",
",",
"json",
".",
"dumps",
"(",
"payload",
")",
",",
"'application/json'",
",",
"{",
"'Content-Disposition'",
":",
"\"form-data; name='metadata'\"",
"}",
")",
")",
",",
"]",
",",
"boundary",
"=",
"'boundary'",
")",
"headers",
"=",
"{",
"*",
"*",
"authentication_headers",
",",
"'Content-Type'",
":",
"multipart_data",
".",
"content_type",
"}",
"stream_id",
"=",
"self",
".",
"connection",
".",
"request",
"(",
"'GET'",
",",
"'/v20160207/events'",
",",
"body",
"=",
"multipart_data",
",",
"headers",
"=",
"headers",
",",
")",
"response",
"=",
"self",
".",
"connection",
".",
"get_response",
"(",
"stream_id",
")",
"assert",
"response",
".",
"status",
"in",
"[",
"http",
".",
"client",
".",
"NO_CONTENT",
",",
"http",
".",
"client",
".",
"OK",
"]"
] | 32.333333 | 18.958333 |
def args(self) -> str:
"""Provides arguments for the command."""
return '{}{}{}{}{}'.format(
to_ascii_hex(self._index, 2),
to_ascii_hex(self._group_number, 2),
to_ascii_hex(self._unit_number, 2),
to_ascii_hex(int(self._enable_status), 4),
to_ascii_hex(int(self._switches), 4)) | [
"def",
"args",
"(",
"self",
")",
"->",
"str",
":",
"return",
"'{}{}{}{}{}'",
".",
"format",
"(",
"to_ascii_hex",
"(",
"self",
".",
"_index",
",",
"2",
")",
",",
"to_ascii_hex",
"(",
"self",
".",
"_group_number",
",",
"2",
")",
",",
"to_ascii_hex",
"(",
"self",
".",
"_unit_number",
",",
"2",
")",
",",
"to_ascii_hex",
"(",
"int",
"(",
"self",
".",
"_enable_status",
")",
",",
"4",
")",
",",
"to_ascii_hex",
"(",
"int",
"(",
"self",
".",
"_switches",
")",
",",
"4",
")",
")"
] | 43.125 | 7.75 |
def notify_telegram(title, content, token=None, chat=None, mention_user=None, **kwargs):
"""
Sends a telegram notification and returns *True* on success. The communication with the telegram
API might have some delays and is therefore handled by a thread.
"""
# test import
import telegram # noqa: F401
cfg = Config.instance()
# get default token and chat
if not token:
token = cfg.get_expanded("notifications", "telegram_token")
if not chat:
chat = cfg.get_expanded("notifications", "telegram_chat")
if not token or not chat:
logger.warning("cannot send Telegram notification, token ({}) or chat ({}) empty".format(
token, chat))
return False
# append the user to mention to the title
# unless explicitly set to empty string
mention_text = ""
if mention_user is None:
mention_user = cfg.get_expanded("notifications", "telegram_mention_user")
if mention_user:
mention_text = " (@{})".format(mention_user)
# request data for the API call
request = {
"parse_mode": "Markdown",
}
# standard or attachment content?
if isinstance(content, six.string_types):
request["text"] = "{}{}\n\n{}".format(title, mention_text, content)
else:
# content is a dict, add some formatting
request["text"] = "{}{}\n\n".format(title, mention_text)
for key, value in content.items():
request["text"] += "_{}_: {}\n".format(key, value)
# extend by arbitrary kwargs
request.update(kwargs)
# threaded, non-blocking API communication
thread = threading.Thread(target=_notify_telegram, args=(token, chat, request))
thread.start()
return True | [
"def",
"notify_telegram",
"(",
"title",
",",
"content",
",",
"token",
"=",
"None",
",",
"chat",
"=",
"None",
",",
"mention_user",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"# test import",
"import",
"telegram",
"# noqa: F401",
"cfg",
"=",
"Config",
".",
"instance",
"(",
")",
"# get default token and chat",
"if",
"not",
"token",
":",
"token",
"=",
"cfg",
".",
"get_expanded",
"(",
"\"notifications\"",
",",
"\"telegram_token\"",
")",
"if",
"not",
"chat",
":",
"chat",
"=",
"cfg",
".",
"get_expanded",
"(",
"\"notifications\"",
",",
"\"telegram_chat\"",
")",
"if",
"not",
"token",
"or",
"not",
"chat",
":",
"logger",
".",
"warning",
"(",
"\"cannot send Telegram notification, token ({}) or chat ({}) empty\"",
".",
"format",
"(",
"token",
",",
"chat",
")",
")",
"return",
"False",
"# append the user to mention to the title",
"# unless explicitly set to empty string",
"mention_text",
"=",
"\"\"",
"if",
"mention_user",
"is",
"None",
":",
"mention_user",
"=",
"cfg",
".",
"get_expanded",
"(",
"\"notifications\"",
",",
"\"telegram_mention_user\"",
")",
"if",
"mention_user",
":",
"mention_text",
"=",
"\" (@{})\"",
".",
"format",
"(",
"mention_user",
")",
"# request data for the API call",
"request",
"=",
"{",
"\"parse_mode\"",
":",
"\"Markdown\"",
",",
"}",
"# standard or attachment content?",
"if",
"isinstance",
"(",
"content",
",",
"six",
".",
"string_types",
")",
":",
"request",
"[",
"\"text\"",
"]",
"=",
"\"{}{}\\n\\n{}\"",
".",
"format",
"(",
"title",
",",
"mention_text",
",",
"content",
")",
"else",
":",
"# content is a dict, add some formatting",
"request",
"[",
"\"text\"",
"]",
"=",
"\"{}{}\\n\\n\"",
".",
"format",
"(",
"title",
",",
"mention_text",
")",
"for",
"key",
",",
"value",
"in",
"content",
".",
"items",
"(",
")",
":",
"request",
"[",
"\"text\"",
"]",
"+=",
"\"_{}_: {}\\n\"",
".",
"format",
"(",
"key",
",",
"value",
")",
"# extend by arbitrary kwargs",
"request",
".",
"update",
"(",
"kwargs",
")",
"# threaded, non-blocking API communication",
"thread",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"_notify_telegram",
",",
"args",
"=",
"(",
"token",
",",
"chat",
",",
"request",
")",
")",
"thread",
".",
"start",
"(",
")",
"return",
"True"
] | 32.596154 | 23.480769 |
def format_property(name, value):
"""Format the name and value (both unicode) of a property as a string."""
result = b''
utf8_name = utf8_bytes_string(name)
result = b'property ' + utf8_name
if value is not None:
utf8_value = utf8_bytes_string(value)
result += b' ' + ('%d' % len(utf8_value)).encode('ascii') + b' ' + utf8_value
return result | [
"def",
"format_property",
"(",
"name",
",",
"value",
")",
":",
"result",
"=",
"b''",
"utf8_name",
"=",
"utf8_bytes_string",
"(",
"name",
")",
"result",
"=",
"b'property '",
"+",
"utf8_name",
"if",
"value",
"is",
"not",
"None",
":",
"utf8_value",
"=",
"utf8_bytes_string",
"(",
"value",
")",
"result",
"+=",
"b' '",
"+",
"(",
"'%d'",
"%",
"len",
"(",
"utf8_value",
")",
")",
".",
"encode",
"(",
"'ascii'",
")",
"+",
"b' '",
"+",
"utf8_value",
"return",
"result"
] | 34 | 18.454545 |
def length(self):
"""Returns the route length (cost)
Returns
-------
int
Route length (cost).
"""
cost = 0
depot = self._problem.depot()
last = depot
for i in self._nodes:
a, b = last, i
if a.name() > b.name():
a, b = b, a
cost = cost + self._problem.distance(a, b)
last = i
cost = cost + self._problem.distance(depot, last)
return cost | [
"def",
"length",
"(",
"self",
")",
":",
"cost",
"=",
"0",
"depot",
"=",
"self",
".",
"_problem",
".",
"depot",
"(",
")",
"last",
"=",
"depot",
"for",
"i",
"in",
"self",
".",
"_nodes",
":",
"a",
",",
"b",
"=",
"last",
",",
"i",
"if",
"a",
".",
"name",
"(",
")",
">",
"b",
".",
"name",
"(",
")",
":",
"a",
",",
"b",
"=",
"b",
",",
"a",
"cost",
"=",
"cost",
"+",
"self",
".",
"_problem",
".",
"distance",
"(",
"a",
",",
"b",
")",
"last",
"=",
"i",
"cost",
"=",
"cost",
"+",
"self",
".",
"_problem",
".",
"distance",
"(",
"depot",
",",
"last",
")",
"return",
"cost"
] | 21.347826 | 20.173913 |
def sigangledAngle(self,dangle,assumeZeroMean=True,smallest=False,
simple=False):
"""
NAME:
sigangledAngle
PURPOSE:
calculate the dispersion in the perpendicular angle at a given angle
INPUT:
dangle - angle offset along the stream
assumeZeroMean= (True) if True, assume that the mean is zero (should be)
smallest= (False) calculate for smallest eigenvalue direction rather than for middle
simple= (False), if True, return an even simpler estimate
OUTPUT:
dispersion in the perpendicular angle at this angle
HISTORY:
2013-12-06 - Written - Bovy (IAS)
"""
if smallest: eigIndx= 0
else: eigIndx= 1
if simple:
dt= self.meantdAngle(dangle,use_physical=False)
return numpy.sqrt(self._sigangle2
+self._sortedSigOEig[eigIndx]*dt**2.)
aplow= numpy.amax([numpy.sqrt(self._sortedSigOEig[eigIndx])*self._tdisrupt*5.,
self._sigangle])
numsig2= integrate.quad(lambda x: x**2.*self.pangledAngle(x,dangle),
aplow,-aplow)[0]
if not assumeZeroMean:
nummean= integrate.quad(lambda x: x*self.pangledAngle(x,dangle),
aplow,-aplow)[0]
else:
nummean= 0.
denom= integrate.quad(self.pangledAngle,aplow,-aplow,(dangle,))[0]
if denom == 0.: return numpy.nan
else: return numpy.sqrt(numsig2/denom-(nummean/denom)**2.) | [
"def",
"sigangledAngle",
"(",
"self",
",",
"dangle",
",",
"assumeZeroMean",
"=",
"True",
",",
"smallest",
"=",
"False",
",",
"simple",
"=",
"False",
")",
":",
"if",
"smallest",
":",
"eigIndx",
"=",
"0",
"else",
":",
"eigIndx",
"=",
"1",
"if",
"simple",
":",
"dt",
"=",
"self",
".",
"meantdAngle",
"(",
"dangle",
",",
"use_physical",
"=",
"False",
")",
"return",
"numpy",
".",
"sqrt",
"(",
"self",
".",
"_sigangle2",
"+",
"self",
".",
"_sortedSigOEig",
"[",
"eigIndx",
"]",
"*",
"dt",
"**",
"2.",
")",
"aplow",
"=",
"numpy",
".",
"amax",
"(",
"[",
"numpy",
".",
"sqrt",
"(",
"self",
".",
"_sortedSigOEig",
"[",
"eigIndx",
"]",
")",
"*",
"self",
".",
"_tdisrupt",
"*",
"5.",
",",
"self",
".",
"_sigangle",
"]",
")",
"numsig2",
"=",
"integrate",
".",
"quad",
"(",
"lambda",
"x",
":",
"x",
"**",
"2.",
"*",
"self",
".",
"pangledAngle",
"(",
"x",
",",
"dangle",
")",
",",
"aplow",
",",
"-",
"aplow",
")",
"[",
"0",
"]",
"if",
"not",
"assumeZeroMean",
":",
"nummean",
"=",
"integrate",
".",
"quad",
"(",
"lambda",
"x",
":",
"x",
"*",
"self",
".",
"pangledAngle",
"(",
"x",
",",
"dangle",
")",
",",
"aplow",
",",
"-",
"aplow",
")",
"[",
"0",
"]",
"else",
":",
"nummean",
"=",
"0.",
"denom",
"=",
"integrate",
".",
"quad",
"(",
"self",
".",
"pangledAngle",
",",
"aplow",
",",
"-",
"aplow",
",",
"(",
"dangle",
",",
")",
")",
"[",
"0",
"]",
"if",
"denom",
"==",
"0.",
":",
"return",
"numpy",
".",
"nan",
"else",
":",
"return",
"numpy",
".",
"sqrt",
"(",
"numsig2",
"/",
"denom",
"-",
"(",
"nummean",
"/",
"denom",
")",
"**",
"2.",
")"
] | 32.895833 | 25.979167 |
def short_encode(input, errors='strict'):
"""Transliterate to 8 bit using as few letters as possible.
For example, \u00e4 LATIN SMALL LETTER A WITH DIAERESIS ``ä`` will
be replaced with ``a``.
"""
if not isinstance(input, text_type):
input = text_type(input, sys.getdefaultencoding(), errors)
length = len(input)
input = unicodedata.normalize('NFKC', input)
return input.translate(short_table), length | [
"def",
"short_encode",
"(",
"input",
",",
"errors",
"=",
"'strict'",
")",
":",
"if",
"not",
"isinstance",
"(",
"input",
",",
"text_type",
")",
":",
"input",
"=",
"text_type",
"(",
"input",
",",
"sys",
".",
"getdefaultencoding",
"(",
")",
",",
"errors",
")",
"length",
"=",
"len",
"(",
"input",
")",
"input",
"=",
"unicodedata",
".",
"normalize",
"(",
"'NFKC'",
",",
"input",
")",
"return",
"input",
".",
"translate",
"(",
"short_table",
")",
",",
"length"
] | 36 | 15.166667 |
def attempt_dev_link_via_import(self, egg):
"""Create egg-link to FS location if an egg is found through importing.
Sometimes an egg *is* installed, but without a proper egg-info file.
So we attempt to import the egg in order to return a link anyway.
TODO: currently it only works with simple package names like
"psycopg2" and "mapnik".
"""
try:
imported = __import__(egg)
except ImportError:
self.logger.warn("Tried importing '%s', but that also didn't work.", egg)
self.logger.debug("For reference, sys.path is %s", sys.path)
return
self.logger.info("Importing %s works, however", egg)
try:
probable_location = os.path.dirname(imported.__file__)
except: # Bare except
self.logger.exception("Determining the location failed, however")
return
filesystem_egg_link = os.path.join(
self.dev_egg_dir,
'%s.egg-link' % egg)
f = open(filesystem_egg_link, 'w')
f.write(probable_location)
f.close()
self.logger.info('Using sysegg %s for %s', probable_location, egg)
self.added.append(filesystem_egg_link)
return True | [
"def",
"attempt_dev_link_via_import",
"(",
"self",
",",
"egg",
")",
":",
"try",
":",
"imported",
"=",
"__import__",
"(",
"egg",
")",
"except",
"ImportError",
":",
"self",
".",
"logger",
".",
"warn",
"(",
"\"Tried importing '%s', but that also didn't work.\"",
",",
"egg",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"\"For reference, sys.path is %s\"",
",",
"sys",
".",
"path",
")",
"return",
"self",
".",
"logger",
".",
"info",
"(",
"\"Importing %s works, however\"",
",",
"egg",
")",
"try",
":",
"probable_location",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"imported",
".",
"__file__",
")",
"except",
":",
"# Bare except",
"self",
".",
"logger",
".",
"exception",
"(",
"\"Determining the location failed, however\"",
")",
"return",
"filesystem_egg_link",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"dev_egg_dir",
",",
"'%s.egg-link'",
"%",
"egg",
")",
"f",
"=",
"open",
"(",
"filesystem_egg_link",
",",
"'w'",
")",
"f",
".",
"write",
"(",
"probable_location",
")",
"f",
".",
"close",
"(",
")",
"self",
".",
"logger",
".",
"info",
"(",
"'Using sysegg %s for %s'",
",",
"probable_location",
",",
"egg",
")",
"self",
".",
"added",
".",
"append",
"(",
"filesystem_egg_link",
")",
"return",
"True"
] | 39.774194 | 20.225806 |
def step_impl06(context):
"""Prepare test for singleton property.
:param context: test context.
"""
store = context.SingleStore
context.st_1 = store()
context.st_2 = store()
context.st_3 = store() | [
"def",
"step_impl06",
"(",
"context",
")",
":",
"store",
"=",
"context",
".",
"SingleStore",
"context",
".",
"st_1",
"=",
"store",
"(",
")",
"context",
".",
"st_2",
"=",
"store",
"(",
")",
"context",
".",
"st_3",
"=",
"store",
"(",
")"
] | 24.111111 | 12.555556 |
def request(self, apdu):
"""This function is called by transaction functions to send
to the application."""
if _debug: ServerSSM._debug("request %r", apdu)
# make sure it has a good source and destination
apdu.pduSource = self.pdu_address
apdu.pduDestination = None
# send it via the device
self.ssmSAP.sap_request(apdu) | [
"def",
"request",
"(",
"self",
",",
"apdu",
")",
":",
"if",
"_debug",
":",
"ServerSSM",
".",
"_debug",
"(",
"\"request %r\"",
",",
"apdu",
")",
"# make sure it has a good source and destination",
"apdu",
".",
"pduSource",
"=",
"self",
".",
"pdu_address",
"apdu",
".",
"pduDestination",
"=",
"None",
"# send it via the device",
"self",
".",
"ssmSAP",
".",
"sap_request",
"(",
"apdu",
")"
] | 34.181818 | 13.181818 |
def create_session(self, session_id, register=True):
"""Creates new session object and returns it.
`request`
Request that created the session. Will be used to get query string
parameters and cookies
`register`
Should be session registered in a storage. Websockets don't
need it.
"""
# TODO: Possible optimization here for settings.get
s = self._session_kls(self._connection,
self,
session_id,
self.settings.get('disconnect_delay')
)
if register:
self._sessions.add(s)
return s | [
"def",
"create_session",
"(",
"self",
",",
"session_id",
",",
"register",
"=",
"True",
")",
":",
"# TODO: Possible optimization here for settings.get",
"s",
"=",
"self",
".",
"_session_kls",
"(",
"self",
".",
"_connection",
",",
"self",
",",
"session_id",
",",
"self",
".",
"settings",
".",
"get",
"(",
"'disconnect_delay'",
")",
")",
"if",
"register",
":",
"self",
".",
"_sessions",
".",
"add",
"(",
"s",
")",
"return",
"s"
] | 33.095238 | 18.714286 |
def to_string(self, buf=None, na_rep='NaN', float_format=None, header=True,
index=True, length=False, dtype=False, name=False,
max_rows=None):
"""
Render a string representation of the Series.
Parameters
----------
buf : StringIO-like, optional
Buffer to write to.
na_rep : str, optional
String representation of NaN to use, default 'NaN'.
float_format : one-parameter function, optional
Formatter function to apply to columns' elements if they are
floats, default None.
header : bool, default True
Add the Series header (index name).
index : bool, optional
Add index (row) labels, default True.
length : bool, default False
Add the Series length.
dtype : bool, default False
Add the Series dtype.
name : bool, default False
Add the Series name if not None.
max_rows : int, optional
Maximum number of rows to show before truncating. If None, show
all.
Returns
-------
str or None
String representation of Series if ``buf=None``, otherwise None.
"""
formatter = fmt.SeriesFormatter(self, name=name, length=length,
header=header, index=index,
dtype=dtype, na_rep=na_rep,
float_format=float_format,
max_rows=max_rows)
result = formatter.to_string()
# catch contract violations
if not isinstance(result, str):
raise AssertionError("result must be of type unicode, type"
" of result is {0!r}"
"".format(result.__class__.__name__))
if buf is None:
return result
else:
try:
buf.write(result)
except AttributeError:
with open(buf, 'w') as f:
f.write(result) | [
"def",
"to_string",
"(",
"self",
",",
"buf",
"=",
"None",
",",
"na_rep",
"=",
"'NaN'",
",",
"float_format",
"=",
"None",
",",
"header",
"=",
"True",
",",
"index",
"=",
"True",
",",
"length",
"=",
"False",
",",
"dtype",
"=",
"False",
",",
"name",
"=",
"False",
",",
"max_rows",
"=",
"None",
")",
":",
"formatter",
"=",
"fmt",
".",
"SeriesFormatter",
"(",
"self",
",",
"name",
"=",
"name",
",",
"length",
"=",
"length",
",",
"header",
"=",
"header",
",",
"index",
"=",
"index",
",",
"dtype",
"=",
"dtype",
",",
"na_rep",
"=",
"na_rep",
",",
"float_format",
"=",
"float_format",
",",
"max_rows",
"=",
"max_rows",
")",
"result",
"=",
"formatter",
".",
"to_string",
"(",
")",
"# catch contract violations",
"if",
"not",
"isinstance",
"(",
"result",
",",
"str",
")",
":",
"raise",
"AssertionError",
"(",
"\"result must be of type unicode, type\"",
"\" of result is {0!r}\"",
"\"\"",
".",
"format",
"(",
"result",
".",
"__class__",
".",
"__name__",
")",
")",
"if",
"buf",
"is",
"None",
":",
"return",
"result",
"else",
":",
"try",
":",
"buf",
".",
"write",
"(",
"result",
")",
"except",
"AttributeError",
":",
"with",
"open",
"(",
"buf",
",",
"'w'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"result",
")"
] | 37.303571 | 17.446429 |
def check_dependency(self, operation, dependency):
"""
Enhances default behavior of method by checking dependency for matching operation.
"""
if isinstance(dependency[1], SQLBlob):
# NOTE: we follow the sort order created by `assemble_changes` so we build a fixed chain
# of operations. thus we should match exact operation here.
return dependency[3] == operation
return super(MigrationAutodetector, self).check_dependency(operation, dependency) | [
"def",
"check_dependency",
"(",
"self",
",",
"operation",
",",
"dependency",
")",
":",
"if",
"isinstance",
"(",
"dependency",
"[",
"1",
"]",
",",
"SQLBlob",
")",
":",
"# NOTE: we follow the sort order created by `assemble_changes` so we build a fixed chain",
"# of operations. thus we should match exact operation here.",
"return",
"dependency",
"[",
"3",
"]",
"==",
"operation",
"return",
"super",
"(",
"MigrationAutodetector",
",",
"self",
")",
".",
"check_dependency",
"(",
"operation",
",",
"dependency",
")"
] | 57 | 23.444444 |
def em_rates_from_E_DA_mix(em_rates_tot, E_values):
"""D and A emission rates for two populations.
"""
em_rates_d, em_rates_a = [], []
for em_rate_tot, E_value in zip(em_rates_tot, E_values):
em_rate_di, em_rate_ai = em_rates_from_E_DA(em_rate_tot, E_value)
em_rates_d.append(em_rate_di)
em_rates_a.append(em_rate_ai)
return em_rates_d, em_rates_a | [
"def",
"em_rates_from_E_DA_mix",
"(",
"em_rates_tot",
",",
"E_values",
")",
":",
"em_rates_d",
",",
"em_rates_a",
"=",
"[",
"]",
",",
"[",
"]",
"for",
"em_rate_tot",
",",
"E_value",
"in",
"zip",
"(",
"em_rates_tot",
",",
"E_values",
")",
":",
"em_rate_di",
",",
"em_rate_ai",
"=",
"em_rates_from_E_DA",
"(",
"em_rate_tot",
",",
"E_value",
")",
"em_rates_d",
".",
"append",
"(",
"em_rate_di",
")",
"em_rates_a",
".",
"append",
"(",
"em_rate_ai",
")",
"return",
"em_rates_d",
",",
"em_rates_a"
] | 42.555556 | 9.111111 |
def resolve_possible_paths(path, relative_prefix, possible_extensions=None, leading_underscore=False):
"""
Attempts to resolve the given absolute or relative ``path``. If it
doesn't exist as is, tries to create an absolute path using the
``relative_prefix``. If that fails, tries relative/absolute versions
with each of ``possible_extensions``.
:returns: The absolute path, or ``None`` if no such file can be found.
"""
possible_extensions = [''] + list(possible_extensions) if possible_extensions else ['']
possible_paths = [path + e if os.path.isabs(path + e) else os.path.join(relative_prefix, path + e)
for e in possible_extensions]
if leading_underscore and not os.path.basename(path).startswith('_'):
extra_paths = [os.path.join(os.path.dirname(p), '_' + os.path.basename(p))
for p in possible_paths]
possible_paths = possible_paths + extra_paths
for p in possible_paths:
p = os.path.normpath(p)
if os.path.isfile(p):
return p
return None | [
"def",
"resolve_possible_paths",
"(",
"path",
",",
"relative_prefix",
",",
"possible_extensions",
"=",
"None",
",",
"leading_underscore",
"=",
"False",
")",
":",
"possible_extensions",
"=",
"[",
"''",
"]",
"+",
"list",
"(",
"possible_extensions",
")",
"if",
"possible_extensions",
"else",
"[",
"''",
"]",
"possible_paths",
"=",
"[",
"path",
"+",
"e",
"if",
"os",
".",
"path",
".",
"isabs",
"(",
"path",
"+",
"e",
")",
"else",
"os",
".",
"path",
".",
"join",
"(",
"relative_prefix",
",",
"path",
"+",
"e",
")",
"for",
"e",
"in",
"possible_extensions",
"]",
"if",
"leading_underscore",
"and",
"not",
"os",
".",
"path",
".",
"basename",
"(",
"path",
")",
".",
"startswith",
"(",
"'_'",
")",
":",
"extra_paths",
"=",
"[",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"p",
")",
",",
"'_'",
"+",
"os",
".",
"path",
".",
"basename",
"(",
"p",
")",
")",
"for",
"p",
"in",
"possible_paths",
"]",
"possible_paths",
"=",
"possible_paths",
"+",
"extra_paths",
"for",
"p",
"in",
"possible_paths",
":",
"p",
"=",
"os",
".",
"path",
".",
"normpath",
"(",
"p",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"p",
")",
":",
"return",
"p",
"return",
"None"
] | 44.25 | 26.75 |
def write_gpio(self, gpio=None):
"""Write the specified byte value to the GPIO registor. If no value
specified the current buffered value will be written.
"""
if gpio is not None:
self.gpio = gpio
self._device.writeList(self.GPIO, self.gpio) | [
"def",
"write_gpio",
"(",
"self",
",",
"gpio",
"=",
"None",
")",
":",
"if",
"gpio",
"is",
"not",
"None",
":",
"self",
".",
"gpio",
"=",
"gpio",
"self",
".",
"_device",
".",
"writeList",
"(",
"self",
".",
"GPIO",
",",
"self",
".",
"gpio",
")"
] | 41.142857 | 9.285714 |
def _handle_list_marker(self):
"""Handle a list marker at the head (``#``, ``*``, ``;``, ``:``)."""
markup = self._read()
if markup == ";":
self._context |= contexts.DL_TERM
self._emit(tokens.TagOpenOpen(wiki_markup=markup))
self._emit_text(get_html_tag(markup))
self._emit(tokens.TagCloseSelfclose()) | [
"def",
"_handle_list_marker",
"(",
"self",
")",
":",
"markup",
"=",
"self",
".",
"_read",
"(",
")",
"if",
"markup",
"==",
"\";\"",
":",
"self",
".",
"_context",
"|=",
"contexts",
".",
"DL_TERM",
"self",
".",
"_emit",
"(",
"tokens",
".",
"TagOpenOpen",
"(",
"wiki_markup",
"=",
"markup",
")",
")",
"self",
".",
"_emit_text",
"(",
"get_html_tag",
"(",
"markup",
")",
")",
"self",
".",
"_emit",
"(",
"tokens",
".",
"TagCloseSelfclose",
"(",
")",
")"
] | 44.25 | 8.75 |
def uninstall_packages():
"""
Uninstall unwanted packages
"""
p = server_state('packages_installed')
if p: installed = set(p)
else: return
env.uninstalled_packages[env.host] = []
#first uninstall any that have been taken off the list
packages = set(get_packages())
uninstall = installed - packages
if uninstall and env.verbosity:
print env.host,'UNINSTALLING HOST PACKAGES'
for p in uninstall:
if env.verbosity:
print ' - uninstalling',p
uninstall_package(p)
env.uninstalled_packages[env.host].append(p)
set_server_state('packages_installed',get_packages())
return | [
"def",
"uninstall_packages",
"(",
")",
":",
"p",
"=",
"server_state",
"(",
"'packages_installed'",
")",
"if",
"p",
":",
"installed",
"=",
"set",
"(",
"p",
")",
"else",
":",
"return",
"env",
".",
"uninstalled_packages",
"[",
"env",
".",
"host",
"]",
"=",
"[",
"]",
"#first uninstall any that have been taken off the list",
"packages",
"=",
"set",
"(",
"get_packages",
"(",
")",
")",
"uninstall",
"=",
"installed",
"-",
"packages",
"if",
"uninstall",
"and",
"env",
".",
"verbosity",
":",
"print",
"env",
".",
"host",
",",
"'UNINSTALLING HOST PACKAGES'",
"for",
"p",
"in",
"uninstall",
":",
"if",
"env",
".",
"verbosity",
":",
"print",
"' - uninstalling'",
",",
"p",
"uninstall_package",
"(",
"p",
")",
"env",
".",
"uninstalled_packages",
"[",
"env",
".",
"host",
"]",
".",
"append",
"(",
"p",
")",
"set_server_state",
"(",
"'packages_installed'",
",",
"get_packages",
"(",
")",
")",
"return"
] | 32.25 | 10.75 |
def generateKey(self, template, mecha=MechanismAESGENERATEKEY):
"""
generate a secret key
:param template: template for the secret key
:param mecha: mechanism to use
:return: handle of the generated key
:rtype: PyKCS11.LowLevel.CK_OBJECT_HANDLE
"""
t = self._template2ckattrlist(template)
ck_handle = PyKCS11.LowLevel.CK_OBJECT_HANDLE()
m = mecha.to_native()
rv = self.lib.C_GenerateKey(self.session, m, t, ck_handle)
if rv != CKR_OK:
raise PyKCS11Error(rv)
return ck_handle | [
"def",
"generateKey",
"(",
"self",
",",
"template",
",",
"mecha",
"=",
"MechanismAESGENERATEKEY",
")",
":",
"t",
"=",
"self",
".",
"_template2ckattrlist",
"(",
"template",
")",
"ck_handle",
"=",
"PyKCS11",
".",
"LowLevel",
".",
"CK_OBJECT_HANDLE",
"(",
")",
"m",
"=",
"mecha",
".",
"to_native",
"(",
")",
"rv",
"=",
"self",
".",
"lib",
".",
"C_GenerateKey",
"(",
"self",
".",
"session",
",",
"m",
",",
"t",
",",
"ck_handle",
")",
"if",
"rv",
"!=",
"CKR_OK",
":",
"raise",
"PyKCS11Error",
"(",
"rv",
")",
"return",
"ck_handle"
] | 36 | 12.375 |
def _scalar_to_vector(self, m):
"""Allow submodels with scalar equations. Convert to 1D vector systems.
Args:
m (Model)
"""
if not isinstance(m.y0, numbers.Number):
return m
else:
m = copy.deepcopy(m)
t0 = 0.0
if isinstance(m.y0, numbers.Integral):
numtype = np.float64
else:
numtype = type(m.y0)
y0_orig = m.y0
m.y0 = np.array([m.y0], dtype=numtype)
def make_vector_fn(fn):
def newfn(y, t):
return np.array([fn(y[0], t)], dtype=numtype)
newfn.__name__ = fn.__name__
return newfn
def make_matrix_fn(fn):
def newfn(y, t):
return np.array([[fn(y[0], t)]], dtype=numtype)
newfn.__name__ = fn.__name__
return newfn
def make_coupling_fn(fn):
def newfn(source_y, target_y, weight):
return np.array([fn(source_y[0], target_y[0], weight)])
newfn.__name__ = fn.__name__
return newfn
if isinstance(m.f(y0_orig, t0), numbers.Number):
m.f = make_vector_fn(m.f)
if hasattr(m, 'G') and isinstance(m.G(y0_orig,t0), numbers.Number):
m.G = make_matrix_fn(m.G)
if (hasattr(m, 'coupling') and
isinstance(m.coupling(y0_orig, y0_orig, 0.5),
numbers.Number)):
m.coupling = make_coupling_fn(m.coupling)
return m | [
"def",
"_scalar_to_vector",
"(",
"self",
",",
"m",
")",
":",
"if",
"not",
"isinstance",
"(",
"m",
".",
"y0",
",",
"numbers",
".",
"Number",
")",
":",
"return",
"m",
"else",
":",
"m",
"=",
"copy",
".",
"deepcopy",
"(",
"m",
")",
"t0",
"=",
"0.0",
"if",
"isinstance",
"(",
"m",
".",
"y0",
",",
"numbers",
".",
"Integral",
")",
":",
"numtype",
"=",
"np",
".",
"float64",
"else",
":",
"numtype",
"=",
"type",
"(",
"m",
".",
"y0",
")",
"y0_orig",
"=",
"m",
".",
"y0",
"m",
".",
"y0",
"=",
"np",
".",
"array",
"(",
"[",
"m",
".",
"y0",
"]",
",",
"dtype",
"=",
"numtype",
")",
"def",
"make_vector_fn",
"(",
"fn",
")",
":",
"def",
"newfn",
"(",
"y",
",",
"t",
")",
":",
"return",
"np",
".",
"array",
"(",
"[",
"fn",
"(",
"y",
"[",
"0",
"]",
",",
"t",
")",
"]",
",",
"dtype",
"=",
"numtype",
")",
"newfn",
".",
"__name__",
"=",
"fn",
".",
"__name__",
"return",
"newfn",
"def",
"make_matrix_fn",
"(",
"fn",
")",
":",
"def",
"newfn",
"(",
"y",
",",
"t",
")",
":",
"return",
"np",
".",
"array",
"(",
"[",
"[",
"fn",
"(",
"y",
"[",
"0",
"]",
",",
"t",
")",
"]",
"]",
",",
"dtype",
"=",
"numtype",
")",
"newfn",
".",
"__name__",
"=",
"fn",
".",
"__name__",
"return",
"newfn",
"def",
"make_coupling_fn",
"(",
"fn",
")",
":",
"def",
"newfn",
"(",
"source_y",
",",
"target_y",
",",
"weight",
")",
":",
"return",
"np",
".",
"array",
"(",
"[",
"fn",
"(",
"source_y",
"[",
"0",
"]",
",",
"target_y",
"[",
"0",
"]",
",",
"weight",
")",
"]",
")",
"newfn",
".",
"__name__",
"=",
"fn",
".",
"__name__",
"return",
"newfn",
"if",
"isinstance",
"(",
"m",
".",
"f",
"(",
"y0_orig",
",",
"t0",
")",
",",
"numbers",
".",
"Number",
")",
":",
"m",
".",
"f",
"=",
"make_vector_fn",
"(",
"m",
".",
"f",
")",
"if",
"hasattr",
"(",
"m",
",",
"'G'",
")",
"and",
"isinstance",
"(",
"m",
".",
"G",
"(",
"y0_orig",
",",
"t0",
")",
",",
"numbers",
".",
"Number",
")",
":",
"m",
".",
"G",
"=",
"make_matrix_fn",
"(",
"m",
".",
"G",
")",
"if",
"(",
"hasattr",
"(",
"m",
",",
"'coupling'",
")",
"and",
"isinstance",
"(",
"m",
".",
"coupling",
"(",
"y0_orig",
",",
"y0_orig",
",",
"0.5",
")",
",",
"numbers",
".",
"Number",
")",
")",
":",
"m",
".",
"coupling",
"=",
"make_coupling_fn",
"(",
"m",
".",
"coupling",
")",
"return",
"m"
] | 40.05 | 12.9 |
def create(self, api_version=values.unset, friendly_name=values.unset,
sms_application_sid=values.unset, sms_fallback_method=values.unset,
sms_fallback_url=values.unset, sms_method=values.unset,
sms_url=values.unset, status_callback=values.unset,
status_callback_method=values.unset,
voice_application_sid=values.unset,
voice_caller_id_lookup=values.unset,
voice_fallback_method=values.unset, voice_fallback_url=values.unset,
voice_method=values.unset, voice_url=values.unset,
emergency_status=values.unset, emergency_address_sid=values.unset,
trunk_sid=values.unset, identity_sid=values.unset,
address_sid=values.unset, voice_receive_mode=values.unset,
phone_number=values.unset, area_code=values.unset):
"""
Create a new IncomingPhoneNumberInstance
:param unicode api_version: The API version to use for incoming calls made to the new phone number
:param unicode friendly_name: A string to describe the new phone number
:param unicode sms_application_sid: The SID of the application to handle SMS messages
:param unicode sms_fallback_method: HTTP method used with sms_fallback_url
:param unicode sms_fallback_url: The URL we call when an error occurs while executing TwiML
:param unicode sms_method: The HTTP method to use with sms url
:param unicode sms_url: The URL we should call when the new phone number receives an incoming SMS message
:param unicode status_callback: The URL we should call to send status information to your application
:param unicode status_callback_method: HTTP method we should use to call status_callback
:param unicode voice_application_sid: The SID of the application to handle the new phone number
:param bool voice_caller_id_lookup: Whether to lookup the caller's name
:param unicode voice_fallback_method: The HTTP method used with voice_fallback_url
:param unicode voice_fallback_url: The URL we will call when an error occurs in TwiML
:param unicode voice_method: The HTTP method used with the voice_url
:param unicode voice_url: The URL we should call when the phone number receives a call
:param IncomingPhoneNumberInstance.EmergencyStatus emergency_status: Status determining whether the new phone number is enabled for emergency calling
:param unicode emergency_address_sid: The emergency address configuration to use for emergency calling
:param unicode trunk_sid: SID of the trunk to handle calls to the new phone number
:param unicode identity_sid: The SID of the Identity resource to associate with the new phone number
:param unicode address_sid: The SID of the Address resource associated with the phone number
:param IncomingPhoneNumberInstance.VoiceReceiveMode voice_receive_mode: Incoming call type: fax or voice
:param unicode phone_number: The phone number to purchase in E.164 format
:param unicode area_code: The desired area code for the new phone number
:returns: Newly created IncomingPhoneNumberInstance
:rtype: twilio.rest.api.v2010.account.incoming_phone_number.IncomingPhoneNumberInstance
"""
data = values.of({
'PhoneNumber': phone_number,
'AreaCode': area_code,
'ApiVersion': api_version,
'FriendlyName': friendly_name,
'SmsApplicationSid': sms_application_sid,
'SmsFallbackMethod': sms_fallback_method,
'SmsFallbackUrl': sms_fallback_url,
'SmsMethod': sms_method,
'SmsUrl': sms_url,
'StatusCallback': status_callback,
'StatusCallbackMethod': status_callback_method,
'VoiceApplicationSid': voice_application_sid,
'VoiceCallerIdLookup': voice_caller_id_lookup,
'VoiceFallbackMethod': voice_fallback_method,
'VoiceFallbackUrl': voice_fallback_url,
'VoiceMethod': voice_method,
'VoiceUrl': voice_url,
'EmergencyStatus': emergency_status,
'EmergencyAddressSid': emergency_address_sid,
'TrunkSid': trunk_sid,
'IdentitySid': identity_sid,
'AddressSid': address_sid,
'VoiceReceiveMode': voice_receive_mode,
})
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return IncomingPhoneNumberInstance(
self._version,
payload,
account_sid=self._solution['account_sid'],
) | [
"def",
"create",
"(",
"self",
",",
"api_version",
"=",
"values",
".",
"unset",
",",
"friendly_name",
"=",
"values",
".",
"unset",
",",
"sms_application_sid",
"=",
"values",
".",
"unset",
",",
"sms_fallback_method",
"=",
"values",
".",
"unset",
",",
"sms_fallback_url",
"=",
"values",
".",
"unset",
",",
"sms_method",
"=",
"values",
".",
"unset",
",",
"sms_url",
"=",
"values",
".",
"unset",
",",
"status_callback",
"=",
"values",
".",
"unset",
",",
"status_callback_method",
"=",
"values",
".",
"unset",
",",
"voice_application_sid",
"=",
"values",
".",
"unset",
",",
"voice_caller_id_lookup",
"=",
"values",
".",
"unset",
",",
"voice_fallback_method",
"=",
"values",
".",
"unset",
",",
"voice_fallback_url",
"=",
"values",
".",
"unset",
",",
"voice_method",
"=",
"values",
".",
"unset",
",",
"voice_url",
"=",
"values",
".",
"unset",
",",
"emergency_status",
"=",
"values",
".",
"unset",
",",
"emergency_address_sid",
"=",
"values",
".",
"unset",
",",
"trunk_sid",
"=",
"values",
".",
"unset",
",",
"identity_sid",
"=",
"values",
".",
"unset",
",",
"address_sid",
"=",
"values",
".",
"unset",
",",
"voice_receive_mode",
"=",
"values",
".",
"unset",
",",
"phone_number",
"=",
"values",
".",
"unset",
",",
"area_code",
"=",
"values",
".",
"unset",
")",
":",
"data",
"=",
"values",
".",
"of",
"(",
"{",
"'PhoneNumber'",
":",
"phone_number",
",",
"'AreaCode'",
":",
"area_code",
",",
"'ApiVersion'",
":",
"api_version",
",",
"'FriendlyName'",
":",
"friendly_name",
",",
"'SmsApplicationSid'",
":",
"sms_application_sid",
",",
"'SmsFallbackMethod'",
":",
"sms_fallback_method",
",",
"'SmsFallbackUrl'",
":",
"sms_fallback_url",
",",
"'SmsMethod'",
":",
"sms_method",
",",
"'SmsUrl'",
":",
"sms_url",
",",
"'StatusCallback'",
":",
"status_callback",
",",
"'StatusCallbackMethod'",
":",
"status_callback_method",
",",
"'VoiceApplicationSid'",
":",
"voice_application_sid",
",",
"'VoiceCallerIdLookup'",
":",
"voice_caller_id_lookup",
",",
"'VoiceFallbackMethod'",
":",
"voice_fallback_method",
",",
"'VoiceFallbackUrl'",
":",
"voice_fallback_url",
",",
"'VoiceMethod'",
":",
"voice_method",
",",
"'VoiceUrl'",
":",
"voice_url",
",",
"'EmergencyStatus'",
":",
"emergency_status",
",",
"'EmergencyAddressSid'",
":",
"emergency_address_sid",
",",
"'TrunkSid'",
":",
"trunk_sid",
",",
"'IdentitySid'",
":",
"identity_sid",
",",
"'AddressSid'",
":",
"address_sid",
",",
"'VoiceReceiveMode'",
":",
"voice_receive_mode",
",",
"}",
")",
"payload",
"=",
"self",
".",
"_version",
".",
"create",
"(",
"'POST'",
",",
"self",
".",
"_uri",
",",
"data",
"=",
"data",
",",
")",
"return",
"IncomingPhoneNumberInstance",
"(",
"self",
".",
"_version",
",",
"payload",
",",
"account_sid",
"=",
"self",
".",
"_solution",
"[",
"'account_sid'",
"]",
",",
")"
] | 58.2375 | 28.8125 |
def restore_scrollbar_position(self):
"""Restoring scrollbar position after main window is visible"""
scrollbar_pos = self.get_option('scrollbar_position', None)
if scrollbar_pos is not None:
self.explorer.treewidget.set_scrollbar_position(scrollbar_pos) | [
"def",
"restore_scrollbar_position",
"(",
"self",
")",
":",
"scrollbar_pos",
"=",
"self",
".",
"get_option",
"(",
"'scrollbar_position'",
",",
"None",
")",
"if",
"scrollbar_pos",
"is",
"not",
"None",
":",
"self",
".",
"explorer",
".",
"treewidget",
".",
"set_scrollbar_position",
"(",
"scrollbar_pos",
")"
] | 58 | 13.2 |
def upvote_num(self):
"""获取收到的的赞同数量.
:return: 收到的的赞同数量
:rtype: int
"""
if self.url is None:
return 0
else:
number = int(self.soup.find(
'span', class_='zm-profile-header-user-agree').strong.text)
return number | [
"def",
"upvote_num",
"(",
"self",
")",
":",
"if",
"self",
".",
"url",
"is",
"None",
":",
"return",
"0",
"else",
":",
"number",
"=",
"int",
"(",
"self",
".",
"soup",
".",
"find",
"(",
"'span'",
",",
"class_",
"=",
"'zm-profile-header-user-agree'",
")",
".",
"strong",
".",
"text",
")",
"return",
"number"
] | 24.916667 | 17 |
def check_events(self):
"""Call the event dispatcher.
Quit the main loop when the `QUIT` event is reached.
:Return: `True` if `QUIT` was reached.
"""
if self.event_dispatcher.flush() is QUIT:
self._quit = True
return True
return False | [
"def",
"check_events",
"(",
"self",
")",
":",
"if",
"self",
".",
"event_dispatcher",
".",
"flush",
"(",
")",
"is",
"QUIT",
":",
"self",
".",
"_quit",
"=",
"True",
"return",
"True",
"return",
"False"
] | 27.090909 | 16.363636 |
def system_add_column_family(self, cf_def):
"""
adds a column family. returns the new schema id.
Parameters:
- cf_def
"""
self._seqid += 1
d = self._reqs[self._seqid] = defer.Deferred()
self.send_system_add_column_family(cf_def)
return d | [
"def",
"system_add_column_family",
"(",
"self",
",",
"cf_def",
")",
":",
"self",
".",
"_seqid",
"+=",
"1",
"d",
"=",
"self",
".",
"_reqs",
"[",
"self",
".",
"_seqid",
"]",
"=",
"defer",
".",
"Deferred",
"(",
")",
"self",
".",
"send_system_add_column_family",
"(",
"cf_def",
")",
"return",
"d"
] | 24.090909 | 15.545455 |
def load_library(lib, name=None, lib_cls=None):
"""Loads a library. Catches and logs exceptions.
Returns: the loaded library or None
arguments:
* lib -- path to/name of the library to be loaded
* name -- the library's identifier (for logging)
Defaults to None.
* lib_cls -- library class. Defaults to None (-> ctypes.CDLL).
"""
try:
if lib_cls:
return lib_cls(lib)
else:
return ctypes.CDLL(lib)
except Exception:
if name:
lib_msg = '%s (%s)' % (name, lib)
else:
lib_msg = lib
lib_msg += ' could not be loaded'
if sys.platform == 'cygwin':
lib_msg += ' in cygwin'
_LOGGER.error(lib_msg, exc_info=True)
return None | [
"def",
"load_library",
"(",
"lib",
",",
"name",
"=",
"None",
",",
"lib_cls",
"=",
"None",
")",
":",
"try",
":",
"if",
"lib_cls",
":",
"return",
"lib_cls",
"(",
"lib",
")",
"else",
":",
"return",
"ctypes",
".",
"CDLL",
"(",
"lib",
")",
"except",
"Exception",
":",
"if",
"name",
":",
"lib_msg",
"=",
"'%s (%s)'",
"%",
"(",
"name",
",",
"lib",
")",
"else",
":",
"lib_msg",
"=",
"lib",
"lib_msg",
"+=",
"' could not be loaded'",
"if",
"sys",
".",
"platform",
"==",
"'cygwin'",
":",
"lib_msg",
"+=",
"' in cygwin'",
"_LOGGER",
".",
"error",
"(",
"lib_msg",
",",
"exc_info",
"=",
"True",
")",
"return",
"None"
] | 28.035714 | 17.285714 |
def _read_proc_file(path, opts):
'''
Return a dict of JID metadata, or None
'''
serial = salt.payload.Serial(opts)
current_thread = threading.currentThread().name
pid = os.getpid()
with salt.utils.files.fopen(path, 'rb') as fp_:
buf = fp_.read()
fp_.close()
if buf:
data = serial.loads(buf)
else:
# Proc file is empty, remove
try:
os.remove(path)
except IOError:
log.debug('Unable to remove proc file %s.', path)
return None
if not isinstance(data, dict):
# Invalid serial object
return None
if not salt.utils.process.os_is_running(data['pid']):
# The process is no longer running, clear out the file and
# continue
try:
os.remove(path)
except IOError:
log.debug('Unable to remove proc file %s.', path)
return None
if opts.get('multiprocessing'):
if data.get('pid') == pid:
return None
else:
if data.get('pid') != pid:
try:
os.remove(path)
except IOError:
log.debug('Unable to remove proc file %s.', path)
return None
if data.get('jid') == current_thread:
return None
if not data.get('jid') in [x.name for x in threading.enumerate()]:
try:
os.remove(path)
except IOError:
log.debug('Unable to remove proc file %s.', path)
return None
if not _check_cmdline(data):
pid = data.get('pid')
if pid:
log.warning(
'PID %s exists but does not appear to be a salt process.', pid
)
try:
os.remove(path)
except IOError:
log.debug('Unable to remove proc file %s.', path)
return None
return data | [
"def",
"_read_proc_file",
"(",
"path",
",",
"opts",
")",
":",
"serial",
"=",
"salt",
".",
"payload",
".",
"Serial",
"(",
"opts",
")",
"current_thread",
"=",
"threading",
".",
"currentThread",
"(",
")",
".",
"name",
"pid",
"=",
"os",
".",
"getpid",
"(",
")",
"with",
"salt",
".",
"utils",
".",
"files",
".",
"fopen",
"(",
"path",
",",
"'rb'",
")",
"as",
"fp_",
":",
"buf",
"=",
"fp_",
".",
"read",
"(",
")",
"fp_",
".",
"close",
"(",
")",
"if",
"buf",
":",
"data",
"=",
"serial",
".",
"loads",
"(",
"buf",
")",
"else",
":",
"# Proc file is empty, remove",
"try",
":",
"os",
".",
"remove",
"(",
"path",
")",
"except",
"IOError",
":",
"log",
".",
"debug",
"(",
"'Unable to remove proc file %s.'",
",",
"path",
")",
"return",
"None",
"if",
"not",
"isinstance",
"(",
"data",
",",
"dict",
")",
":",
"# Invalid serial object",
"return",
"None",
"if",
"not",
"salt",
".",
"utils",
".",
"process",
".",
"os_is_running",
"(",
"data",
"[",
"'pid'",
"]",
")",
":",
"# The process is no longer running, clear out the file and",
"# continue",
"try",
":",
"os",
".",
"remove",
"(",
"path",
")",
"except",
"IOError",
":",
"log",
".",
"debug",
"(",
"'Unable to remove proc file %s.'",
",",
"path",
")",
"return",
"None",
"if",
"opts",
".",
"get",
"(",
"'multiprocessing'",
")",
":",
"if",
"data",
".",
"get",
"(",
"'pid'",
")",
"==",
"pid",
":",
"return",
"None",
"else",
":",
"if",
"data",
".",
"get",
"(",
"'pid'",
")",
"!=",
"pid",
":",
"try",
":",
"os",
".",
"remove",
"(",
"path",
")",
"except",
"IOError",
":",
"log",
".",
"debug",
"(",
"'Unable to remove proc file %s.'",
",",
"path",
")",
"return",
"None",
"if",
"data",
".",
"get",
"(",
"'jid'",
")",
"==",
"current_thread",
":",
"return",
"None",
"if",
"not",
"data",
".",
"get",
"(",
"'jid'",
")",
"in",
"[",
"x",
".",
"name",
"for",
"x",
"in",
"threading",
".",
"enumerate",
"(",
")",
"]",
":",
"try",
":",
"os",
".",
"remove",
"(",
"path",
")",
"except",
"IOError",
":",
"log",
".",
"debug",
"(",
"'Unable to remove proc file %s.'",
",",
"path",
")",
"return",
"None",
"if",
"not",
"_check_cmdline",
"(",
"data",
")",
":",
"pid",
"=",
"data",
".",
"get",
"(",
"'pid'",
")",
"if",
"pid",
":",
"log",
".",
"warning",
"(",
"'PID %s exists but does not appear to be a salt process.'",
",",
"pid",
")",
"try",
":",
"os",
".",
"remove",
"(",
"path",
")",
"except",
"IOError",
":",
"log",
".",
"debug",
"(",
"'Unable to remove proc file %s.'",
",",
"path",
")",
"return",
"None",
"return",
"data"
] | 30.721311 | 17.836066 |
def _geu16(ins):
''' Compares & pops top 2 operands out of the stack, and checks
if the 1st operand >= 2nd operand (top of the stack).
Pushes 0 if False, 1 if True.
16 bit unsigned version
'''
output = _16bit_oper(ins.quad[2], ins.quad[3])
output.append('or a')
output.append('sbc hl, de')
output.append('ccf')
output.append('sbc a, a')
output.append('push af')
return output | [
"def",
"_geu16",
"(",
"ins",
")",
":",
"output",
"=",
"_16bit_oper",
"(",
"ins",
".",
"quad",
"[",
"2",
"]",
",",
"ins",
".",
"quad",
"[",
"3",
"]",
")",
"output",
".",
"append",
"(",
"'or a'",
")",
"output",
".",
"append",
"(",
"'sbc hl, de'",
")",
"output",
".",
"append",
"(",
"'ccf'",
")",
"output",
".",
"append",
"(",
"'sbc a, a'",
")",
"output",
".",
"append",
"(",
"'push af'",
")",
"return",
"output"
] | 30.214286 | 18.071429 |
def parse_config(config_file):
"""Parse a YAML configuration file"""
try:
with open(config_file, 'r') as f:
return yaml.load(f)
except IOError:
print "Configuration file {} not found or not readable.".format(config_file)
raise | [
"def",
"parse_config",
"(",
"config_file",
")",
":",
"try",
":",
"with",
"open",
"(",
"config_file",
",",
"'r'",
")",
"as",
"f",
":",
"return",
"yaml",
".",
"load",
"(",
"f",
")",
"except",
"IOError",
":",
"print",
"\"Configuration file {} not found or not readable.\"",
".",
"format",
"(",
"config_file",
")",
"raise"
] | 33.375 | 18 |
def _srm(self, data):
"""Expectation-Maximization algorithm for fitting the probabilistic SRM.
Parameters
----------
data : list of 2D arrays, element i has shape=[voxels_i, samples]
Each element in the list contains the fMRI data of one subject.
Returns
-------
sigma_s : array, shape=[features, features]
The covariance :math:`\\Sigma_s` of the shared response Normal
distribution.
w : list of array, element i has shape=[voxels_i, features]
The orthogonal transforms (mappings) :math:`W_i` for each subject.
mu : list of array, element i has shape=[voxels_i]
The voxel means :math:`\\mu_i` over the samples for each subject.
rho2 : array, shape=[subjects]
The estimated noise variance :math:`\\rho_i^2` for each subject
s : array, shape=[features, samples]
The shared response.
"""
local_min = min([d.shape[1] for d in data if d is not None],
default=sys.maxsize)
samples = self.comm.allreduce(local_min, op=MPI.MIN)
subjects = len(data)
self.random_state_ = np.random.RandomState(self.rand_seed)
random_states = [
np.random.RandomState(self.random_state_.randint(2 ** 32))
for i in range(len(data))]
# Initialization step: initialize the outputs with initial values,
# voxels with the number of voxels in each subject, and trace_xtx with
# the ||X_i||_F^2 of each subject.
w, voxels = _init_w_transforms(data, self.features, random_states,
self.comm)
x, mu, rho2, trace_xtx = self._init_structures(data, subjects)
shared_response = np.zeros((self.features, samples))
sigma_s = np.identity(self.features)
rank = self.comm.Get_rank()
# Main loop of the algorithm (run
for iteration in range(self.n_iter):
logger.info('Iteration %d' % (iteration + 1))
# E-step:
# Sum the inverted the rho2 elements for computing W^T * Psi^-1 * W
if rank == 0:
rho0 = (1 / rho2).sum()
# Invert Sigma_s using Cholesky factorization
(chol_sigma_s, lower_sigma_s) = scipy.linalg.cho_factor(
sigma_s, check_finite=False)
inv_sigma_s = scipy.linalg.cho_solve(
(chol_sigma_s, lower_sigma_s), np.identity(self.features),
check_finite=False)
# Invert (Sigma_s + rho_0 * I) using Cholesky factorization
sigma_s_rhos = inv_sigma_s + np.identity(self.features) * rho0
chol_sigma_s_rhos, lower_sigma_s_rhos = \
scipy.linalg.cho_factor(sigma_s_rhos,
check_finite=False)
inv_sigma_s_rhos = scipy.linalg.cho_solve(
(chol_sigma_s_rhos, lower_sigma_s_rhos),
np.identity(self.features), check_finite=False)
# Compute the sum of W_i^T * rho_i^-2 * X_i, and the sum of traces
# of X_i^T * rho_i^-2 * X_i
wt_invpsi_x = np.zeros((self.features, samples))
trace_xt_invsigma2_x = 0.0
for subject in range(subjects):
if data[subject] is not None:
wt_invpsi_x += (w[subject].T.dot(x[subject])) \
/ rho2[subject]
trace_xt_invsigma2_x += trace_xtx[subject] / rho2[subject]
wt_invpsi_x = self.comm.reduce(wt_invpsi_x, op=MPI.SUM)
trace_xt_invsigma2_x = self.comm.reduce(trace_xt_invsigma2_x,
op=MPI.SUM)
trace_sigma_s = None
if rank == 0:
log_det_psi = np.sum(np.log(rho2) * voxels)
# Update the shared response
shared_response = sigma_s.dot(
np.identity(self.features) - rho0 * inv_sigma_s_rhos).dot(
wt_invpsi_x)
# M-step
# Update Sigma_s and compute its trace
sigma_s = (inv_sigma_s_rhos
+ shared_response.dot(shared_response.T) / samples)
trace_sigma_s = samples * np.trace(sigma_s)
shared_response = self.comm.bcast(shared_response)
trace_sigma_s = self.comm.bcast(trace_sigma_s)
# Update each subject's mapping transform W_i and error variance
# rho_i^2
for subject in range(subjects):
if x[subject] is not None:
a_subject = x[subject].dot(shared_response.T)
perturbation = np.zeros(a_subject.shape)
np.fill_diagonal(perturbation, 0.001)
u_subject, s_subject, v_subject = np.linalg.svd(
a_subject + perturbation, full_matrices=False)
w[subject] = u_subject.dot(v_subject)
rho2[subject] = trace_xtx[subject]
rho2[subject] += -2 * np.sum(w[subject] * a_subject).sum()
rho2[subject] += trace_sigma_s
rho2[subject] /= samples * voxels[subject]
else:
rho2[subject] = 0
rho2 = self.comm.allreduce(rho2, op=MPI.SUM)
if rank == 0:
if logger.isEnabledFor(logging.INFO):
# Calculate and log the current log-likelihood for checking
# convergence
loglike = self._likelihood(
chol_sigma_s_rhos, log_det_psi, chol_sigma_s,
trace_xt_invsigma2_x, inv_sigma_s_rhos, wt_invpsi_x,
samples)
logger.info('Objective function %f' % loglike)
sigma_s = self.comm.bcast(sigma_s)
return sigma_s, w, mu, rho2, shared_response | [
"def",
"_srm",
"(",
"self",
",",
"data",
")",
":",
"local_min",
"=",
"min",
"(",
"[",
"d",
".",
"shape",
"[",
"1",
"]",
"for",
"d",
"in",
"data",
"if",
"d",
"is",
"not",
"None",
"]",
",",
"default",
"=",
"sys",
".",
"maxsize",
")",
"samples",
"=",
"self",
".",
"comm",
".",
"allreduce",
"(",
"local_min",
",",
"op",
"=",
"MPI",
".",
"MIN",
")",
"subjects",
"=",
"len",
"(",
"data",
")",
"self",
".",
"random_state_",
"=",
"np",
".",
"random",
".",
"RandomState",
"(",
"self",
".",
"rand_seed",
")",
"random_states",
"=",
"[",
"np",
".",
"random",
".",
"RandomState",
"(",
"self",
".",
"random_state_",
".",
"randint",
"(",
"2",
"**",
"32",
")",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"data",
")",
")",
"]",
"# Initialization step: initialize the outputs with initial values,",
"# voxels with the number of voxels in each subject, and trace_xtx with",
"# the ||X_i||_F^2 of each subject.",
"w",
",",
"voxels",
"=",
"_init_w_transforms",
"(",
"data",
",",
"self",
".",
"features",
",",
"random_states",
",",
"self",
".",
"comm",
")",
"x",
",",
"mu",
",",
"rho2",
",",
"trace_xtx",
"=",
"self",
".",
"_init_structures",
"(",
"data",
",",
"subjects",
")",
"shared_response",
"=",
"np",
".",
"zeros",
"(",
"(",
"self",
".",
"features",
",",
"samples",
")",
")",
"sigma_s",
"=",
"np",
".",
"identity",
"(",
"self",
".",
"features",
")",
"rank",
"=",
"self",
".",
"comm",
".",
"Get_rank",
"(",
")",
"# Main loop of the algorithm (run",
"for",
"iteration",
"in",
"range",
"(",
"self",
".",
"n_iter",
")",
":",
"logger",
".",
"info",
"(",
"'Iteration %d'",
"%",
"(",
"iteration",
"+",
"1",
")",
")",
"# E-step:",
"# Sum the inverted the rho2 elements for computing W^T * Psi^-1 * W",
"if",
"rank",
"==",
"0",
":",
"rho0",
"=",
"(",
"1",
"/",
"rho2",
")",
".",
"sum",
"(",
")",
"# Invert Sigma_s using Cholesky factorization",
"(",
"chol_sigma_s",
",",
"lower_sigma_s",
")",
"=",
"scipy",
".",
"linalg",
".",
"cho_factor",
"(",
"sigma_s",
",",
"check_finite",
"=",
"False",
")",
"inv_sigma_s",
"=",
"scipy",
".",
"linalg",
".",
"cho_solve",
"(",
"(",
"chol_sigma_s",
",",
"lower_sigma_s",
")",
",",
"np",
".",
"identity",
"(",
"self",
".",
"features",
")",
",",
"check_finite",
"=",
"False",
")",
"# Invert (Sigma_s + rho_0 * I) using Cholesky factorization",
"sigma_s_rhos",
"=",
"inv_sigma_s",
"+",
"np",
".",
"identity",
"(",
"self",
".",
"features",
")",
"*",
"rho0",
"chol_sigma_s_rhos",
",",
"lower_sigma_s_rhos",
"=",
"scipy",
".",
"linalg",
".",
"cho_factor",
"(",
"sigma_s_rhos",
",",
"check_finite",
"=",
"False",
")",
"inv_sigma_s_rhos",
"=",
"scipy",
".",
"linalg",
".",
"cho_solve",
"(",
"(",
"chol_sigma_s_rhos",
",",
"lower_sigma_s_rhos",
")",
",",
"np",
".",
"identity",
"(",
"self",
".",
"features",
")",
",",
"check_finite",
"=",
"False",
")",
"# Compute the sum of W_i^T * rho_i^-2 * X_i, and the sum of traces",
"# of X_i^T * rho_i^-2 * X_i",
"wt_invpsi_x",
"=",
"np",
".",
"zeros",
"(",
"(",
"self",
".",
"features",
",",
"samples",
")",
")",
"trace_xt_invsigma2_x",
"=",
"0.0",
"for",
"subject",
"in",
"range",
"(",
"subjects",
")",
":",
"if",
"data",
"[",
"subject",
"]",
"is",
"not",
"None",
":",
"wt_invpsi_x",
"+=",
"(",
"w",
"[",
"subject",
"]",
".",
"T",
".",
"dot",
"(",
"x",
"[",
"subject",
"]",
")",
")",
"/",
"rho2",
"[",
"subject",
"]",
"trace_xt_invsigma2_x",
"+=",
"trace_xtx",
"[",
"subject",
"]",
"/",
"rho2",
"[",
"subject",
"]",
"wt_invpsi_x",
"=",
"self",
".",
"comm",
".",
"reduce",
"(",
"wt_invpsi_x",
",",
"op",
"=",
"MPI",
".",
"SUM",
")",
"trace_xt_invsigma2_x",
"=",
"self",
".",
"comm",
".",
"reduce",
"(",
"trace_xt_invsigma2_x",
",",
"op",
"=",
"MPI",
".",
"SUM",
")",
"trace_sigma_s",
"=",
"None",
"if",
"rank",
"==",
"0",
":",
"log_det_psi",
"=",
"np",
".",
"sum",
"(",
"np",
".",
"log",
"(",
"rho2",
")",
"*",
"voxels",
")",
"# Update the shared response",
"shared_response",
"=",
"sigma_s",
".",
"dot",
"(",
"np",
".",
"identity",
"(",
"self",
".",
"features",
")",
"-",
"rho0",
"*",
"inv_sigma_s_rhos",
")",
".",
"dot",
"(",
"wt_invpsi_x",
")",
"# M-step",
"# Update Sigma_s and compute its trace",
"sigma_s",
"=",
"(",
"inv_sigma_s_rhos",
"+",
"shared_response",
".",
"dot",
"(",
"shared_response",
".",
"T",
")",
"/",
"samples",
")",
"trace_sigma_s",
"=",
"samples",
"*",
"np",
".",
"trace",
"(",
"sigma_s",
")",
"shared_response",
"=",
"self",
".",
"comm",
".",
"bcast",
"(",
"shared_response",
")",
"trace_sigma_s",
"=",
"self",
".",
"comm",
".",
"bcast",
"(",
"trace_sigma_s",
")",
"# Update each subject's mapping transform W_i and error variance",
"# rho_i^2",
"for",
"subject",
"in",
"range",
"(",
"subjects",
")",
":",
"if",
"x",
"[",
"subject",
"]",
"is",
"not",
"None",
":",
"a_subject",
"=",
"x",
"[",
"subject",
"]",
".",
"dot",
"(",
"shared_response",
".",
"T",
")",
"perturbation",
"=",
"np",
".",
"zeros",
"(",
"a_subject",
".",
"shape",
")",
"np",
".",
"fill_diagonal",
"(",
"perturbation",
",",
"0.001",
")",
"u_subject",
",",
"s_subject",
",",
"v_subject",
"=",
"np",
".",
"linalg",
".",
"svd",
"(",
"a_subject",
"+",
"perturbation",
",",
"full_matrices",
"=",
"False",
")",
"w",
"[",
"subject",
"]",
"=",
"u_subject",
".",
"dot",
"(",
"v_subject",
")",
"rho2",
"[",
"subject",
"]",
"=",
"trace_xtx",
"[",
"subject",
"]",
"rho2",
"[",
"subject",
"]",
"+=",
"-",
"2",
"*",
"np",
".",
"sum",
"(",
"w",
"[",
"subject",
"]",
"*",
"a_subject",
")",
".",
"sum",
"(",
")",
"rho2",
"[",
"subject",
"]",
"+=",
"trace_sigma_s",
"rho2",
"[",
"subject",
"]",
"/=",
"samples",
"*",
"voxels",
"[",
"subject",
"]",
"else",
":",
"rho2",
"[",
"subject",
"]",
"=",
"0",
"rho2",
"=",
"self",
".",
"comm",
".",
"allreduce",
"(",
"rho2",
",",
"op",
"=",
"MPI",
".",
"SUM",
")",
"if",
"rank",
"==",
"0",
":",
"if",
"logger",
".",
"isEnabledFor",
"(",
"logging",
".",
"INFO",
")",
":",
"# Calculate and log the current log-likelihood for checking",
"# convergence",
"loglike",
"=",
"self",
".",
"_likelihood",
"(",
"chol_sigma_s_rhos",
",",
"log_det_psi",
",",
"chol_sigma_s",
",",
"trace_xt_invsigma2_x",
",",
"inv_sigma_s_rhos",
",",
"wt_invpsi_x",
",",
"samples",
")",
"logger",
".",
"info",
"(",
"'Objective function %f'",
"%",
"loglike",
")",
"sigma_s",
"=",
"self",
".",
"comm",
".",
"bcast",
"(",
"sigma_s",
")",
"return",
"sigma_s",
",",
"w",
",",
"mu",
",",
"rho2",
",",
"shared_response"
] | 42.647482 | 22.841727 |
def normalized_messages(self, no_field_name='_entity'):
"""Return all the error messages as a dictionary"""
if isinstance(self.messages, dict):
return self.messages
if not self.field_names:
return {no_field_name: self.messages}
return dict((name, self.messages) for name in self.field_names) | [
"def",
"normalized_messages",
"(",
"self",
",",
"no_field_name",
"=",
"'_entity'",
")",
":",
"if",
"isinstance",
"(",
"self",
".",
"messages",
",",
"dict",
")",
":",
"return",
"self",
".",
"messages",
"if",
"not",
"self",
".",
"field_names",
":",
"return",
"{",
"no_field_name",
":",
"self",
".",
"messages",
"}",
"return",
"dict",
"(",
"(",
"name",
",",
"self",
".",
"messages",
")",
"for",
"name",
"in",
"self",
".",
"field_names",
")"
] | 42.625 | 14.25 |
def cmd_print(self):
"""Returns the raw lines to be printed."""
if not self._valid_lines:
return ''
return '\n'.join([line.raw_line for line in self._valid_lines]) + '\n' | [
"def",
"cmd_print",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_valid_lines",
":",
"return",
"''",
"return",
"'\\n'",
".",
"join",
"(",
"[",
"line",
".",
"raw_line",
"for",
"line",
"in",
"self",
".",
"_valid_lines",
"]",
")",
"+",
"'\\n'"
] | 40.4 | 16.8 |
def update(self, item, id_expression=None, upsert=False, update_ops={}, safe=None, **kwargs):
''' Update an item in the database. Uses the on_update keyword to each
field to decide which operations to do, or.
:param item: An instance of a :class:`~ommongo.document.Document` \
subclass
:param id_expression: A query expression that uniquely picks out \
the item which should be updated. If id_expression is not \
passed, update uses item.mongo_id.
:param upsert: Whether the update operation should be an upsert. \
If the item may not be in the database yet this should be True
:param update_ops: By default the operation used to update a field \
is specified with the on_update argument to its constructor. \
To override that value, use this dictionary, with \
:class:`~ommongo.document.QueryField` objects as the keys \
and the mongo operation to use as the values.
:param kwargs: The kwargs are merged into update_ops dict to \
decide which fields to update the operation for. These can \
only be for the top-level document since the keys \
are just strings.
.. warning::
This operation is **experimental** and **not fully tested**,
although it does have code coverage.
'''
if safe is None:
safe = self.safe
self.queue.append(UpdateDocumentOp(self.transaction_id, self, item, safe, id_expression=id_expression,
upsert=upsert, update_ops=update_ops, **kwargs))
if self.autoflush:
return self.flush() | [
"def",
"update",
"(",
"self",
",",
"item",
",",
"id_expression",
"=",
"None",
",",
"upsert",
"=",
"False",
",",
"update_ops",
"=",
"{",
"}",
",",
"safe",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"safe",
"is",
"None",
":",
"safe",
"=",
"self",
".",
"safe",
"self",
".",
"queue",
".",
"append",
"(",
"UpdateDocumentOp",
"(",
"self",
".",
"transaction_id",
",",
"self",
",",
"item",
",",
"safe",
",",
"id_expression",
"=",
"id_expression",
",",
"upsert",
"=",
"upsert",
",",
"update_ops",
"=",
"update_ops",
",",
"*",
"*",
"kwargs",
")",
")",
"if",
"self",
".",
"autoflush",
":",
"return",
"self",
".",
"flush",
"(",
")"
] | 46.09375 | 25.40625 |
def update_factor_providers(self, factor_name, name, body):
"""Get Guardian factor providers.
Returns provider configuration
Args:
factor_name (str): Either push-notification or sms
name (str): Name of the provider
body (dict):
See: https://auth0.com/docs/api/management/v2#!/Guardian/put_twilio
"""
url = self._url('factors/{}/providers/{}'.format(factor_name, name))
return self.client.put(url, data=body) | [
"def",
"update_factor_providers",
"(",
"self",
",",
"factor_name",
",",
"name",
",",
"body",
")",
":",
"url",
"=",
"self",
".",
"_url",
"(",
"'factors/{}/providers/{}'",
".",
"format",
"(",
"factor_name",
",",
"name",
")",
")",
"return",
"self",
".",
"client",
".",
"put",
"(",
"url",
",",
"data",
"=",
"body",
")"
] | 37.923077 | 19.461538 |
def submit_post_searchquery(url, data, apikey):
'''This submits a POST query to an LCC-Server search API endpoint.
Handles streaming of the results, and returns the final JSON stream. Also
handles results that time out.
Parameters
----------
url : str
The URL of the search API endpoint to hit. This is something like
`https://data.hatsurveys.org/api/conesearch`
data : dict
A dict of the search query parameters to pass to the search service.
apikey : str
The API key to use to access the search service. API keys are required
for all POST request made to an LCC-Server's API endpoints.
Returns
-------
(status_flag, data_dict, dataset_id) : tuple
This returns a tuple containing the status of the request: ('complete',
'failed', 'background', etc.), a dict parsed from the JSON result of the
request, and a dataset ID, which can be used to reconstruct the URL on
the LCC-Server where the results can be browsed.
'''
# first, we need to convert any columns and collections items to broken out
# params
postdata = {}
for key in data:
if key == 'columns':
postdata['columns[]'] = data[key]
elif key == 'collections':
postdata['collections[]'] = data[key]
else:
postdata[key] = data[key]
# do the urlencode with doseq=True
# we also need to encode to bytes
encoded_postdata = urlencode(postdata, doseq=True).encode()
# if apikey is not None, add it in as an Authorization: Bearer [apikey]
# header
if apikey:
headers = {'Authorization':'Bearer: %s' % apikey}
else:
headers = {}
LOGINFO('submitting search query to LCC-Server API URL: %s' % url)
try:
# hit the server with a POST request
req = Request(url, data=encoded_postdata, headers=headers)
resp = urlopen(req)
if resp.code == 200:
# we'll iterate over the lines in the response
# this works super-well for ND-JSON!
for line in resp:
data = json.loads(line)
msg = data['message']
status = data['status']
if status != 'failed':
LOGINFO('status: %s, %s' % (status, msg))
else:
LOGERROR('status: %s, %s' % (status, msg))
# here, we'll decide what to do about the query
# completed query or query sent to background...
if status in ('ok','background'):
setid = data['result']['setid']
# save the data pickle to astrobase lccs directory
outpickle = os.path.join(os.path.expanduser('~'),
'.astrobase',
'lccs',
'query-%s.pkl' % setid)
if not os.path.exists(os.path.dirname(outpickle)):
os.makedirs(os.path.dirname(outpickle))
with open(outpickle,'wb') as outfd:
pickle.dump(data, outfd, pickle.HIGHEST_PROTOCOL)
LOGINFO('saved query info to %s, use this to '
'download results later with '
'retrieve_dataset_files' % outpickle)
# we're done at this point, return
return status, data, data['result']['setid']
# the query probably failed...
elif status == 'failed':
# we're done at this point, return
return status, data, None
# if the response was not OK, then we probably failed
else:
try:
data = json.load(resp)
msg = data['message']
LOGERROR(msg)
return 'failed', None, None
except Exception as e:
LOGEXCEPTION('failed to submit query to %s' % url)
return 'failed', None, None
except HTTPError as e:
LOGERROR('could not submit query to LCC API at: %s' % url)
LOGERROR('HTTP status code was %s, reason: %s' % (e.code, e.reason))
return 'failed', None, None | [
"def",
"submit_post_searchquery",
"(",
"url",
",",
"data",
",",
"apikey",
")",
":",
"# first, we need to convert any columns and collections items to broken out",
"# params",
"postdata",
"=",
"{",
"}",
"for",
"key",
"in",
"data",
":",
"if",
"key",
"==",
"'columns'",
":",
"postdata",
"[",
"'columns[]'",
"]",
"=",
"data",
"[",
"key",
"]",
"elif",
"key",
"==",
"'collections'",
":",
"postdata",
"[",
"'collections[]'",
"]",
"=",
"data",
"[",
"key",
"]",
"else",
":",
"postdata",
"[",
"key",
"]",
"=",
"data",
"[",
"key",
"]",
"# do the urlencode with doseq=True",
"# we also need to encode to bytes",
"encoded_postdata",
"=",
"urlencode",
"(",
"postdata",
",",
"doseq",
"=",
"True",
")",
".",
"encode",
"(",
")",
"# if apikey is not None, add it in as an Authorization: Bearer [apikey]",
"# header",
"if",
"apikey",
":",
"headers",
"=",
"{",
"'Authorization'",
":",
"'Bearer: %s'",
"%",
"apikey",
"}",
"else",
":",
"headers",
"=",
"{",
"}",
"LOGINFO",
"(",
"'submitting search query to LCC-Server API URL: %s'",
"%",
"url",
")",
"try",
":",
"# hit the server with a POST request",
"req",
"=",
"Request",
"(",
"url",
",",
"data",
"=",
"encoded_postdata",
",",
"headers",
"=",
"headers",
")",
"resp",
"=",
"urlopen",
"(",
"req",
")",
"if",
"resp",
".",
"code",
"==",
"200",
":",
"# we'll iterate over the lines in the response",
"# this works super-well for ND-JSON!",
"for",
"line",
"in",
"resp",
":",
"data",
"=",
"json",
".",
"loads",
"(",
"line",
")",
"msg",
"=",
"data",
"[",
"'message'",
"]",
"status",
"=",
"data",
"[",
"'status'",
"]",
"if",
"status",
"!=",
"'failed'",
":",
"LOGINFO",
"(",
"'status: %s, %s'",
"%",
"(",
"status",
",",
"msg",
")",
")",
"else",
":",
"LOGERROR",
"(",
"'status: %s, %s'",
"%",
"(",
"status",
",",
"msg",
")",
")",
"# here, we'll decide what to do about the query",
"# completed query or query sent to background...",
"if",
"status",
"in",
"(",
"'ok'",
",",
"'background'",
")",
":",
"setid",
"=",
"data",
"[",
"'result'",
"]",
"[",
"'setid'",
"]",
"# save the data pickle to astrobase lccs directory",
"outpickle",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"expanduser",
"(",
"'~'",
")",
",",
"'.astrobase'",
",",
"'lccs'",
",",
"'query-%s.pkl'",
"%",
"setid",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"outpickle",
")",
")",
":",
"os",
".",
"makedirs",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"outpickle",
")",
")",
"with",
"open",
"(",
"outpickle",
",",
"'wb'",
")",
"as",
"outfd",
":",
"pickle",
".",
"dump",
"(",
"data",
",",
"outfd",
",",
"pickle",
".",
"HIGHEST_PROTOCOL",
")",
"LOGINFO",
"(",
"'saved query info to %s, use this to '",
"'download results later with '",
"'retrieve_dataset_files'",
"%",
"outpickle",
")",
"# we're done at this point, return",
"return",
"status",
",",
"data",
",",
"data",
"[",
"'result'",
"]",
"[",
"'setid'",
"]",
"# the query probably failed...",
"elif",
"status",
"==",
"'failed'",
":",
"# we're done at this point, return",
"return",
"status",
",",
"data",
",",
"None",
"# if the response was not OK, then we probably failed",
"else",
":",
"try",
":",
"data",
"=",
"json",
".",
"load",
"(",
"resp",
")",
"msg",
"=",
"data",
"[",
"'message'",
"]",
"LOGERROR",
"(",
"msg",
")",
"return",
"'failed'",
",",
"None",
",",
"None",
"except",
"Exception",
"as",
"e",
":",
"LOGEXCEPTION",
"(",
"'failed to submit query to %s'",
"%",
"url",
")",
"return",
"'failed'",
",",
"None",
",",
"None",
"except",
"HTTPError",
"as",
"e",
":",
"LOGERROR",
"(",
"'could not submit query to LCC API at: %s'",
"%",
"url",
")",
"LOGERROR",
"(",
"'HTTP status code was %s, reason: %s'",
"%",
"(",
"e",
".",
"code",
",",
"e",
".",
"reason",
")",
")",
"return",
"'failed'",
",",
"None",
",",
"None"
] | 33.140625 | 25.015625 |
def _assert_valid_start(start_int, count_int, total_int):
"""Assert that the number of objects visible to the active subject is higher than
the requested start position for the slice.
This ensures that it's possible to create a valid slice.
"""
if total_int and start_int >= total_int:
raise d1_common.types.exceptions.InvalidRequest(
0,
'Requested a non-existing slice. start={} count={} total={}'.format(
start_int, count_int, total_int
),
) | [
"def",
"_assert_valid_start",
"(",
"start_int",
",",
"count_int",
",",
"total_int",
")",
":",
"if",
"total_int",
"and",
"start_int",
">=",
"total_int",
":",
"raise",
"d1_common",
".",
"types",
".",
"exceptions",
".",
"InvalidRequest",
"(",
"0",
",",
"'Requested a non-existing slice. start={} count={} total={}'",
".",
"format",
"(",
"start_int",
",",
"count_int",
",",
"total_int",
")",
",",
")"
] | 37.142857 | 19.571429 |
def duplicate(self):
'''
Returns a copy of the current address.
@returns: Address
'''
return self.__class__(street_address=self.street_address,
city=self.city, zipcode=self.zipcode,
state=self.state, country=self.country) | [
"def",
"duplicate",
"(",
"self",
")",
":",
"return",
"self",
".",
"__class__",
"(",
"street_address",
"=",
"self",
".",
"street_address",
",",
"city",
"=",
"self",
".",
"city",
",",
"zipcode",
"=",
"self",
".",
"zipcode",
",",
"state",
"=",
"self",
".",
"state",
",",
"country",
"=",
"self",
".",
"country",
")"
] | 39.25 | 22.5 |
def has_offline_historical_manager_or_raise(self):
"""Raises an exception if model uses a history manager and
historical model history_id is not a UUIDField.
Note: expected to use edc_model.HistoricalRecords instead of
simple_history.HistoricalRecords.
"""
try:
model = self.instance.__class__.history.model
except AttributeError:
model = self.instance.__class__
field = [field for field in model._meta.fields if field.name == "history_id"]
if field and not isinstance(field[0], UUIDField):
raise OfflineHistoricalManagerError(
f"Field 'history_id' of historical model "
f"'{model._meta.app_label}.{model._meta.model_name}' "
"must be an UUIDfield. "
"For history = HistoricalRecords() use edc_model.HistoricalRecords instead of "
"simple_history.HistoricalRecords(). "
f"See '{self.instance._meta.app_label}.{self.instance._meta.model_name}'."
) | [
"def",
"has_offline_historical_manager_or_raise",
"(",
"self",
")",
":",
"try",
":",
"model",
"=",
"self",
".",
"instance",
".",
"__class__",
".",
"history",
".",
"model",
"except",
"AttributeError",
":",
"model",
"=",
"self",
".",
"instance",
".",
"__class__",
"field",
"=",
"[",
"field",
"for",
"field",
"in",
"model",
".",
"_meta",
".",
"fields",
"if",
"field",
".",
"name",
"==",
"\"history_id\"",
"]",
"if",
"field",
"and",
"not",
"isinstance",
"(",
"field",
"[",
"0",
"]",
",",
"UUIDField",
")",
":",
"raise",
"OfflineHistoricalManagerError",
"(",
"f\"Field 'history_id' of historical model \"",
"f\"'{model._meta.app_label}.{model._meta.model_name}' \"",
"\"must be an UUIDfield. \"",
"\"For history = HistoricalRecords() use edc_model.HistoricalRecords instead of \"",
"\"simple_history.HistoricalRecords(). \"",
"f\"See '{self.instance._meta.app_label}.{self.instance._meta.model_name}'.\"",
")"
] | 49.666667 | 19.809524 |
def scale(self, x, y=None, z=None):
"Uniform scale, if only sx argument is specified"
if y is None:
y = x
if z is None:
z = x
m = self
for col in range(4):
# Only the top three rows
m[0,col] *= x
m[1,col] *= y
m[2,col] *= z
return self | [
"def",
"scale",
"(",
"self",
",",
"x",
",",
"y",
"=",
"None",
",",
"z",
"=",
"None",
")",
":",
"if",
"y",
"is",
"None",
":",
"y",
"=",
"x",
"if",
"z",
"is",
"None",
":",
"z",
"=",
"x",
"m",
"=",
"self",
"for",
"col",
"in",
"range",
"(",
"4",
")",
":",
"# Only the top three rows\r",
"m",
"[",
"0",
",",
"col",
"]",
"*=",
"x",
"m",
"[",
"1",
",",
"col",
"]",
"*=",
"y",
"m",
"[",
"2",
",",
"col",
"]",
"*=",
"z",
"return",
"self"
] | 27.307692 | 15.461538 |
def get_search_fields(self, exclude=None):
"""
Get the fields for searching for an item.
"""
exclude = set(exclude)
if self.search_fields and len(self.search_fields) > 1:
exclude = exclude.union(self.search_fields)
return self.get_filter_fields(exclude=exclude) | [
"def",
"get_search_fields",
"(",
"self",
",",
"exclude",
"=",
"None",
")",
":",
"exclude",
"=",
"set",
"(",
"exclude",
")",
"if",
"self",
".",
"search_fields",
"and",
"len",
"(",
"self",
".",
"search_fields",
")",
">",
"1",
":",
"exclude",
"=",
"exclude",
".",
"union",
"(",
"self",
".",
"search_fields",
")",
"return",
"self",
".",
"get_filter_fields",
"(",
"exclude",
"=",
"exclude",
")"
] | 34.888889 | 12.444444 |
def item_huisnummer_adapter(obj, request):
"""
Adapter for rendering an object of
:class:`crabpy.gateway.crab.Huisnummer` to json.
"""
return {
'id': obj.id,
'huisnummer': obj.huisnummer,
'postadres': obj.postadres,
'status': {
'id': obj.status.id,
'naam': obj.status.naam,
'definitie': obj.status.definitie
},
'metadata': {
'begin_tijd': obj.metadata.begin_tijd,
'begin_datum': obj.metadata.begin_datum,
'begin_bewerking': {
'id': obj.metadata.begin_bewerking.id,
'naam': obj.metadata.begin_bewerking.naam,
'definitie': obj.metadata.begin_bewerking.definitie
},
'begin_organisatie': {
'id': obj.metadata.begin_organisatie.id,
'naam': obj.metadata.begin_organisatie.naam,
'definitie': obj.metadata.begin_organisatie.definitie
}
},
'bounding_box': obj.bounding_box
} | [
"def",
"item_huisnummer_adapter",
"(",
"obj",
",",
"request",
")",
":",
"return",
"{",
"'id'",
":",
"obj",
".",
"id",
",",
"'huisnummer'",
":",
"obj",
".",
"huisnummer",
",",
"'postadres'",
":",
"obj",
".",
"postadres",
",",
"'status'",
":",
"{",
"'id'",
":",
"obj",
".",
"status",
".",
"id",
",",
"'naam'",
":",
"obj",
".",
"status",
".",
"naam",
",",
"'definitie'",
":",
"obj",
".",
"status",
".",
"definitie",
"}",
",",
"'metadata'",
":",
"{",
"'begin_tijd'",
":",
"obj",
".",
"metadata",
".",
"begin_tijd",
",",
"'begin_datum'",
":",
"obj",
".",
"metadata",
".",
"begin_datum",
",",
"'begin_bewerking'",
":",
"{",
"'id'",
":",
"obj",
".",
"metadata",
".",
"begin_bewerking",
".",
"id",
",",
"'naam'",
":",
"obj",
".",
"metadata",
".",
"begin_bewerking",
".",
"naam",
",",
"'definitie'",
":",
"obj",
".",
"metadata",
".",
"begin_bewerking",
".",
"definitie",
"}",
",",
"'begin_organisatie'",
":",
"{",
"'id'",
":",
"obj",
".",
"metadata",
".",
"begin_organisatie",
".",
"id",
",",
"'naam'",
":",
"obj",
".",
"metadata",
".",
"begin_organisatie",
".",
"naam",
",",
"'definitie'",
":",
"obj",
".",
"metadata",
".",
"begin_organisatie",
".",
"definitie",
"}",
"}",
",",
"'bounding_box'",
":",
"obj",
".",
"bounding_box",
"}"
] | 34.266667 | 14.533333 |
def invalid_type_error(method_name, arg_name, got_value, expected_type,
version='0.13.0'):
"""Raise a CompilationException when an adapter method available to macros
has changed.
"""
got_type = type(got_value)
msg = ("As of {version}, 'adapter.{method_name}' expects argument "
"'{arg_name}' to be of type '{expected_type}', instead got "
"{got_value} ({got_type})")
raise_compiler_error(msg.format(version=version, method_name=method_name,
arg_name=arg_name, expected_type=expected_type,
got_value=got_value, got_type=got_type)) | [
"def",
"invalid_type_error",
"(",
"method_name",
",",
"arg_name",
",",
"got_value",
",",
"expected_type",
",",
"version",
"=",
"'0.13.0'",
")",
":",
"got_type",
"=",
"type",
"(",
"got_value",
")",
"msg",
"=",
"(",
"\"As of {version}, 'adapter.{method_name}' expects argument \"",
"\"'{arg_name}' to be of type '{expected_type}', instead got \"",
"\"{got_value} ({got_type})\"",
")",
"raise_compiler_error",
"(",
"msg",
".",
"format",
"(",
"version",
"=",
"version",
",",
"method_name",
"=",
"method_name",
",",
"arg_name",
"=",
"arg_name",
",",
"expected_type",
"=",
"expected_type",
",",
"got_value",
"=",
"got_value",
",",
"got_type",
"=",
"got_type",
")",
")"
] | 53.083333 | 18.666667 |
def set_precision(predictions, labels,
weights_fn=common_layers.weights_nonzero):
"""Precision of set predictions.
Args:
predictions : A Tensor of scores of shape [batch, nlabels].
labels: A Tensor of int32s giving true set elements,
of shape [batch, seq_length].
weights_fn: A function to weight the elements.
Returns:
hits: A Tensor of shape [batch, nlabels].
weights: A Tensor of shape [batch, nlabels].
"""
with tf.variable_scope("set_precision", values=[predictions, labels]):
labels = tf.squeeze(labels, [2, 3])
weights = weights_fn(labels)
labels = tf.one_hot(labels, predictions.shape[-1])
labels = tf.reduce_max(labels, axis=1)
labels = tf.cast(labels, tf.bool)
return tf.to_float(tf.equal(labels, predictions)), weights | [
"def",
"set_precision",
"(",
"predictions",
",",
"labels",
",",
"weights_fn",
"=",
"common_layers",
".",
"weights_nonzero",
")",
":",
"with",
"tf",
".",
"variable_scope",
"(",
"\"set_precision\"",
",",
"values",
"=",
"[",
"predictions",
",",
"labels",
"]",
")",
":",
"labels",
"=",
"tf",
".",
"squeeze",
"(",
"labels",
",",
"[",
"2",
",",
"3",
"]",
")",
"weights",
"=",
"weights_fn",
"(",
"labels",
")",
"labels",
"=",
"tf",
".",
"one_hot",
"(",
"labels",
",",
"predictions",
".",
"shape",
"[",
"-",
"1",
"]",
")",
"labels",
"=",
"tf",
".",
"reduce_max",
"(",
"labels",
",",
"axis",
"=",
"1",
")",
"labels",
"=",
"tf",
".",
"cast",
"(",
"labels",
",",
"tf",
".",
"bool",
")",
"return",
"tf",
".",
"to_float",
"(",
"tf",
".",
"equal",
"(",
"labels",
",",
"predictions",
")",
")",
",",
"weights"
] | 37.571429 | 14.952381 |
def _w_within_shard(args: Dict[str, Any]):
"""Applies a W gate when the gate acts only within a shard."""
index = args['index']
half_turns = args['half_turns']
axis_half_turns = args['axis_half_turns']
state = _state_shard(args)
pm_vect = _pm_vects(args)[index]
num_shard_qubits = args['num_shard_qubits']
shard_size = 2 ** num_shard_qubits
reshape_tuple = (2 ** (num_shard_qubits - 1 - index), 2, 2 ** index)
perm_state = np.reshape(
np.reshape(state, reshape_tuple)[:, ::-1, :], shard_size)
cos = np.cos(-0.5 * np.pi * half_turns)
sin = np.sin(-0.5 * np.pi * half_turns)
cos_axis = np.cos(np.pi * axis_half_turns)
sin_axis = np.sin(np.pi * axis_half_turns)
new_state = cos * state + 1j * sin * perm_state * (
cos_axis - 1j * sin_axis * pm_vect)
np.copyto(state, new_state) | [
"def",
"_w_within_shard",
"(",
"args",
":",
"Dict",
"[",
"str",
",",
"Any",
"]",
")",
":",
"index",
"=",
"args",
"[",
"'index'",
"]",
"half_turns",
"=",
"args",
"[",
"'half_turns'",
"]",
"axis_half_turns",
"=",
"args",
"[",
"'axis_half_turns'",
"]",
"state",
"=",
"_state_shard",
"(",
"args",
")",
"pm_vect",
"=",
"_pm_vects",
"(",
"args",
")",
"[",
"index",
"]",
"num_shard_qubits",
"=",
"args",
"[",
"'num_shard_qubits'",
"]",
"shard_size",
"=",
"2",
"**",
"num_shard_qubits",
"reshape_tuple",
"=",
"(",
"2",
"**",
"(",
"num_shard_qubits",
"-",
"1",
"-",
"index",
")",
",",
"2",
",",
"2",
"**",
"index",
")",
"perm_state",
"=",
"np",
".",
"reshape",
"(",
"np",
".",
"reshape",
"(",
"state",
",",
"reshape_tuple",
")",
"[",
":",
",",
":",
":",
"-",
"1",
",",
":",
"]",
",",
"shard_size",
")",
"cos",
"=",
"np",
".",
"cos",
"(",
"-",
"0.5",
"*",
"np",
".",
"pi",
"*",
"half_turns",
")",
"sin",
"=",
"np",
".",
"sin",
"(",
"-",
"0.5",
"*",
"np",
".",
"pi",
"*",
"half_turns",
")",
"cos_axis",
"=",
"np",
".",
"cos",
"(",
"np",
".",
"pi",
"*",
"axis_half_turns",
")",
"sin_axis",
"=",
"np",
".",
"sin",
"(",
"np",
".",
"pi",
"*",
"axis_half_turns",
")",
"new_state",
"=",
"cos",
"*",
"state",
"+",
"1j",
"*",
"sin",
"*",
"perm_state",
"*",
"(",
"cos_axis",
"-",
"1j",
"*",
"sin_axis",
"*",
"pm_vect",
")",
"np",
".",
"copyto",
"(",
"state",
",",
"new_state",
")"
] | 38 | 12.909091 |
def windows_dir_format(host_dir, user):
"""Format a string for the location of the user's folder on the Windows (TJ03) fileserver."""
if user and user.grade:
grade = int(user.grade)
else:
return host_dir
if grade in range(9, 13):
win_path = "/{}/".format(user.username)
else:
win_path = ""
return host_dir.replace("{win}", win_path) | [
"def",
"windows_dir_format",
"(",
"host_dir",
",",
"user",
")",
":",
"if",
"user",
"and",
"user",
".",
"grade",
":",
"grade",
"=",
"int",
"(",
"user",
".",
"grade",
")",
"else",
":",
"return",
"host_dir",
"if",
"grade",
"in",
"range",
"(",
"9",
",",
"13",
")",
":",
"win_path",
"=",
"\"/{}/\"",
".",
"format",
"(",
"user",
".",
"username",
")",
"else",
":",
"win_path",
"=",
"\"\"",
"return",
"host_dir",
".",
"replace",
"(",
"\"{win}\"",
",",
"win_path",
")"
] | 31.5 | 15.416667 |
def search_range(self, value):
"""Set private ``_search_range`` and reset ``_block_matcher``."""
if value == 0 or not value % 16:
self._search_range = value
else:
raise InvalidSearchRangeError("Search range must be a multiple of "
"16.")
self._replace_bm() | [
"def",
"search_range",
"(",
"self",
",",
"value",
")",
":",
"if",
"value",
"==",
"0",
"or",
"not",
"value",
"%",
"16",
":",
"self",
".",
"_search_range",
"=",
"value",
"else",
":",
"raise",
"InvalidSearchRangeError",
"(",
"\"Search range must be a multiple of \"",
"\"16.\"",
")",
"self",
".",
"_replace_bm",
"(",
")"
] | 43.375 | 12.5 |
def mapColorRampToValues(cls, colorRamp, minValue, maxValue, alpha=1.0):
"""
Creates color ramp based on min and max values of all the raster pixels from all rasters. If pixel value is one
of the no data values it will be excluded in the color ramp interpolation. Returns colorRamp, slope, intercept
:param colorRamp: A list of RGB tuples representing a color ramp (e.g.: results of either generate color ramp method)
:param minValue: Minimum value of range of values to map to color ramp
:param maxValue: Maximum value of range of values to map to color ramp
:param alpha: Decimal representing transparency (e.g.: 0.8)
:rtype : MappedColorRamp
"""
minRampIndex = 0 # Always zero
maxRampIndex = float(len(colorRamp) - 1) # Map color ramp indices to values using equation of a line
# Resulting equation will be:
# rampIndex = slope * value + intercept
if minValue != maxValue:
slope = (maxRampIndex - minRampIndex) / (maxValue - minValue)
intercept = maxRampIndex - (slope * maxValue)
else:
slope = 0
intercept = minRampIndex
# Return color ramp, slope, and intercept to interpolate by value
mappedColorRamp = MappedColorRamp(colorRamp=colorRamp,
slope=slope,
intercept=intercept,
min=minValue,
max=maxValue,
alpha=alpha)
return mappedColorRamp | [
"def",
"mapColorRampToValues",
"(",
"cls",
",",
"colorRamp",
",",
"minValue",
",",
"maxValue",
",",
"alpha",
"=",
"1.0",
")",
":",
"minRampIndex",
"=",
"0",
"# Always zero",
"maxRampIndex",
"=",
"float",
"(",
"len",
"(",
"colorRamp",
")",
"-",
"1",
")",
"# Map color ramp indices to values using equation of a line",
"# Resulting equation will be:",
"# rampIndex = slope * value + intercept",
"if",
"minValue",
"!=",
"maxValue",
":",
"slope",
"=",
"(",
"maxRampIndex",
"-",
"minRampIndex",
")",
"/",
"(",
"maxValue",
"-",
"minValue",
")",
"intercept",
"=",
"maxRampIndex",
"-",
"(",
"slope",
"*",
"maxValue",
")",
"else",
":",
"slope",
"=",
"0",
"intercept",
"=",
"minRampIndex",
"# Return color ramp, slope, and intercept to interpolate by value",
"mappedColorRamp",
"=",
"MappedColorRamp",
"(",
"colorRamp",
"=",
"colorRamp",
",",
"slope",
"=",
"slope",
",",
"intercept",
"=",
"intercept",
",",
"min",
"=",
"minValue",
",",
"max",
"=",
"maxValue",
",",
"alpha",
"=",
"alpha",
")",
"return",
"mappedColorRamp"
] | 49.090909 | 27.818182 |
def operation(func=None, pipeline_facts=None):
'''
Decorator that takes a simple module function and turn it into the internal
operation representation that consists of a list of commands + options
(sudo, (sudo|su)_user, env).
'''
# If not decorating, return function with config attached
if func is None:
def decorator(f):
setattr(f, 'pipeline_facts', pipeline_facts)
return operation(f)
return decorator
# Index the operation!
module_bits = func.__module__.split('.')
module_name = module_bits[-1]
op_name = '.'.join((module_name, func.__name__))
OPERATIONS.append(op_name)
# Actually decorate!
@wraps(func)
def decorated_func(*args, **kwargs):
# Prepare state/host
#
# If we're in CLI mode, there's no state/host passed down, we need to
# use the global "pseudo" modules.
if len(args) < 2 or not (
isinstance(args[0], (State, PseudoModule))
and isinstance(args[1], (Host, PseudoModule))
):
state = pseudo_state._module
host = pseudo_host._module
if state.in_op:
raise PyinfraError((
'Nested operation called without state/host: {0} ({1})'
).format(op_name, _get_call_location()))
if state.in_deploy:
raise PyinfraError((
'Nested deploy operation called without state/host: {0} ({1})'
).format(op_name, _get_call_location()))
# Otherwise (API mode) we just trim off the commands
else:
args_copy = list(args)
state, host = args[0], args[1]
args = args_copy[2:]
# In API mode we have the kwarg - if a nested operation call we have
# current_frameinfo.
frameinfo = kwargs.pop('frameinfo', get_caller_frameinfo())
# Configure operation
#
# Name the operation
names = None
autoname = False
# Look for a set as the first argument
if len(args) > 0 and isinstance(args[0], set):
names = args[0]
args_copy = list(args)
args = args[1:]
# Generate an operation name if needed (Module/Operation format)
else:
autoname = True
module_bits = func.__module__.split('.')
module_name = module_bits[-1]
names = {
'{0}/{1}'.format(module_name.title(), func.__name__.title()),
}
if state.deploy_name:
names = {
'{0} | {1}'.format(state.deploy_name, name)
for name in names
}
# Get the meta kwargs (globals that apply to all hosts)
op_meta_kwargs = pop_op_kwargs(state, kwargs)
# If this op is being called inside another, just return here
# (any unwanted/op-related kwargs removed above).
if state.in_op:
return func(state, host, *args, **kwargs) or []
filename = frameinfo.filename
line_number = frameinfo.lineno
# Figure out the lines this operation was called from (essentially like
# a line-call-stack).
op_lines = [line_number]
if state.deploy_line_numbers:
op_lines = list(state.deploy_line_numbers) + op_lines
# Inject the current op file number (only incremented in CLI mode)
op_lines.insert(0, state.current_op_file)
# Make a hash from the call stack lines
op_hash = make_hash(op_lines)
# Avoid adding duplicates! This happens if an operation is called within
# a loop - such that the filename/lineno/code _are_ the same, but the
# arguments might be different. We just append an increasing number to
# the op hash and also handle below with the op order.
host_op_hashes = state.meta[host]['op_hashes']
duplicate_op_count = 0
while op_hash in host_op_hashes:
logger.debug('Duplicate hash ({0}) detected!'.format(op_hash))
op_hash = '{0}-{1}'.format(op_hash, duplicate_op_count)
duplicate_op_count += 1
host_op_hashes.add(op_hash)
if duplicate_op_count:
op_lines.append(duplicate_op_count)
op_lines = tuple(op_lines)
state.op_line_numbers_to_hash[op_lines] = op_hash
logger.debug('Adding operation, {0}, called @ {1}:{2}, opLines={3}, opHash={4}'.format(
names, filename, line_number, op_lines, op_hash,
))
# Ensure shared (between servers) operation meta
op_meta = state.op_meta.setdefault(op_hash, {
'names': set(),
'args': [],
})
# Add any meta kwargs (sudo, etc) to the meta - first parse any strings
# as jinja templates.
actual_op_meta_kwargs = {
key: get_arg_value(state, host, a)
for key, a in six.iteritems(op_meta_kwargs)
}
op_meta.update(actual_op_meta_kwargs)
# Add any new names to the set
op_meta['names'].update(names)
# Attach normal args, if we're auto-naming this operation
if autoname:
for arg in args:
if isinstance(arg, FunctionType):
arg = arg.__name__
if arg not in op_meta['args']:
op_meta['args'].append(arg)
# Attach keyword args
for key, value in six.iteritems(kwargs):
arg = '='.join((str(key), str(value)))
if arg not in op_meta['args']:
op_meta['args'].append(arg)
# Check if we're actually running the operation on this host
#
# Run once and we've already added meta for this op? Stop here.
if op_meta_kwargs['run_once']:
has_run = False
for ops in six.itervalues(state.ops):
if op_hash in ops:
has_run = True
break
if has_run:
return OperationMeta(op_hash)
# If we're limited, stop here - *after* we've created op_meta. This
# ensures the meta object always exists, even if no hosts actually ever
# execute the op (due to limit or otherwise).
hosts = op_meta_kwargs['hosts']
when = op_meta_kwargs['when']
if (
# Limited by the state's limit_hosts?
(state.limit_hosts is not None and host not in state.limit_hosts)
# Limited by the operation kwarg hosts?
or (hosts is not None and host not in hosts)
# Limited by the operation kwarg when? We check == because when is
# normally attribute wrapped as a AttrDataBool, which is actually
# an integer (Python doesn't allow subclassing bool!).
or when == False # noqa
):
return OperationMeta(op_hash)
# "Run" operation
#
# Otherwise, flag as in-op and run it to get the commands
state.in_op = True
state.current_op_hash = op_hash
# Generate actual arguments by parsing strings as jinja2 templates. This
# means you can string format arguments w/o generating multiple
# operations. Only affects top level operations, as must be run "in_op"
# so facts are gathered correctly.
actual_args = [
get_arg_value(state, host, a)
for a in args
]
actual_kwargs = {
key: get_arg_value(state, host, a)
for key, a in six.iteritems(kwargs)
}
# Convert to list as the result may be a generator
commands = unroll_generators(func(
state, host,
*actual_args,
**actual_kwargs
))
state.in_op = False
state.current_op_hash = None
# Add host-specific operation data to state
#
# We're doing some commands, meta/ops++
state.meta[host]['ops'] += 1
state.meta[host]['commands'] += len(commands)
# Add the server-relevant commands
state.ops[host][op_hash] = {
'commands': commands,
}
# Return result meta for use in deploy scripts
return OperationMeta(op_hash, commands)
decorated_func._pyinfra_op = func
return decorated_func | [
"def",
"operation",
"(",
"func",
"=",
"None",
",",
"pipeline_facts",
"=",
"None",
")",
":",
"# If not decorating, return function with config attached",
"if",
"func",
"is",
"None",
":",
"def",
"decorator",
"(",
"f",
")",
":",
"setattr",
"(",
"f",
",",
"'pipeline_facts'",
",",
"pipeline_facts",
")",
"return",
"operation",
"(",
"f",
")",
"return",
"decorator",
"# Index the operation!",
"module_bits",
"=",
"func",
".",
"__module__",
".",
"split",
"(",
"'.'",
")",
"module_name",
"=",
"module_bits",
"[",
"-",
"1",
"]",
"op_name",
"=",
"'.'",
".",
"join",
"(",
"(",
"module_name",
",",
"func",
".",
"__name__",
")",
")",
"OPERATIONS",
".",
"append",
"(",
"op_name",
")",
"# Actually decorate!",
"@",
"wraps",
"(",
"func",
")",
"def",
"decorated_func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# Prepare state/host",
"#",
"# If we're in CLI mode, there's no state/host passed down, we need to",
"# use the global \"pseudo\" modules.",
"if",
"len",
"(",
"args",
")",
"<",
"2",
"or",
"not",
"(",
"isinstance",
"(",
"args",
"[",
"0",
"]",
",",
"(",
"State",
",",
"PseudoModule",
")",
")",
"and",
"isinstance",
"(",
"args",
"[",
"1",
"]",
",",
"(",
"Host",
",",
"PseudoModule",
")",
")",
")",
":",
"state",
"=",
"pseudo_state",
".",
"_module",
"host",
"=",
"pseudo_host",
".",
"_module",
"if",
"state",
".",
"in_op",
":",
"raise",
"PyinfraError",
"(",
"(",
"'Nested operation called without state/host: {0} ({1})'",
")",
".",
"format",
"(",
"op_name",
",",
"_get_call_location",
"(",
")",
")",
")",
"if",
"state",
".",
"in_deploy",
":",
"raise",
"PyinfraError",
"(",
"(",
"'Nested deploy operation called without state/host: {0} ({1})'",
")",
".",
"format",
"(",
"op_name",
",",
"_get_call_location",
"(",
")",
")",
")",
"# Otherwise (API mode) we just trim off the commands",
"else",
":",
"args_copy",
"=",
"list",
"(",
"args",
")",
"state",
",",
"host",
"=",
"args",
"[",
"0",
"]",
",",
"args",
"[",
"1",
"]",
"args",
"=",
"args_copy",
"[",
"2",
":",
"]",
"# In API mode we have the kwarg - if a nested operation call we have",
"# current_frameinfo.",
"frameinfo",
"=",
"kwargs",
".",
"pop",
"(",
"'frameinfo'",
",",
"get_caller_frameinfo",
"(",
")",
")",
"# Configure operation",
"#",
"# Name the operation",
"names",
"=",
"None",
"autoname",
"=",
"False",
"# Look for a set as the first argument",
"if",
"len",
"(",
"args",
")",
">",
"0",
"and",
"isinstance",
"(",
"args",
"[",
"0",
"]",
",",
"set",
")",
":",
"names",
"=",
"args",
"[",
"0",
"]",
"args_copy",
"=",
"list",
"(",
"args",
")",
"args",
"=",
"args",
"[",
"1",
":",
"]",
"# Generate an operation name if needed (Module/Operation format)",
"else",
":",
"autoname",
"=",
"True",
"module_bits",
"=",
"func",
".",
"__module__",
".",
"split",
"(",
"'.'",
")",
"module_name",
"=",
"module_bits",
"[",
"-",
"1",
"]",
"names",
"=",
"{",
"'{0}/{1}'",
".",
"format",
"(",
"module_name",
".",
"title",
"(",
")",
",",
"func",
".",
"__name__",
".",
"title",
"(",
")",
")",
",",
"}",
"if",
"state",
".",
"deploy_name",
":",
"names",
"=",
"{",
"'{0} | {1}'",
".",
"format",
"(",
"state",
".",
"deploy_name",
",",
"name",
")",
"for",
"name",
"in",
"names",
"}",
"# Get the meta kwargs (globals that apply to all hosts)",
"op_meta_kwargs",
"=",
"pop_op_kwargs",
"(",
"state",
",",
"kwargs",
")",
"# If this op is being called inside another, just return here",
"# (any unwanted/op-related kwargs removed above).",
"if",
"state",
".",
"in_op",
":",
"return",
"func",
"(",
"state",
",",
"host",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"or",
"[",
"]",
"filename",
"=",
"frameinfo",
".",
"filename",
"line_number",
"=",
"frameinfo",
".",
"lineno",
"# Figure out the lines this operation was called from (essentially like",
"# a line-call-stack).",
"op_lines",
"=",
"[",
"line_number",
"]",
"if",
"state",
".",
"deploy_line_numbers",
":",
"op_lines",
"=",
"list",
"(",
"state",
".",
"deploy_line_numbers",
")",
"+",
"op_lines",
"# Inject the current op file number (only incremented in CLI mode)",
"op_lines",
".",
"insert",
"(",
"0",
",",
"state",
".",
"current_op_file",
")",
"# Make a hash from the call stack lines",
"op_hash",
"=",
"make_hash",
"(",
"op_lines",
")",
"# Avoid adding duplicates! This happens if an operation is called within",
"# a loop - such that the filename/lineno/code _are_ the same, but the",
"# arguments might be different. We just append an increasing number to",
"# the op hash and also handle below with the op order.",
"host_op_hashes",
"=",
"state",
".",
"meta",
"[",
"host",
"]",
"[",
"'op_hashes'",
"]",
"duplicate_op_count",
"=",
"0",
"while",
"op_hash",
"in",
"host_op_hashes",
":",
"logger",
".",
"debug",
"(",
"'Duplicate hash ({0}) detected!'",
".",
"format",
"(",
"op_hash",
")",
")",
"op_hash",
"=",
"'{0}-{1}'",
".",
"format",
"(",
"op_hash",
",",
"duplicate_op_count",
")",
"duplicate_op_count",
"+=",
"1",
"host_op_hashes",
".",
"add",
"(",
"op_hash",
")",
"if",
"duplicate_op_count",
":",
"op_lines",
".",
"append",
"(",
"duplicate_op_count",
")",
"op_lines",
"=",
"tuple",
"(",
"op_lines",
")",
"state",
".",
"op_line_numbers_to_hash",
"[",
"op_lines",
"]",
"=",
"op_hash",
"logger",
".",
"debug",
"(",
"'Adding operation, {0}, called @ {1}:{2}, opLines={3}, opHash={4}'",
".",
"format",
"(",
"names",
",",
"filename",
",",
"line_number",
",",
"op_lines",
",",
"op_hash",
",",
")",
")",
"# Ensure shared (between servers) operation meta",
"op_meta",
"=",
"state",
".",
"op_meta",
".",
"setdefault",
"(",
"op_hash",
",",
"{",
"'names'",
":",
"set",
"(",
")",
",",
"'args'",
":",
"[",
"]",
",",
"}",
")",
"# Add any meta kwargs (sudo, etc) to the meta - first parse any strings",
"# as jinja templates.",
"actual_op_meta_kwargs",
"=",
"{",
"key",
":",
"get_arg_value",
"(",
"state",
",",
"host",
",",
"a",
")",
"for",
"key",
",",
"a",
"in",
"six",
".",
"iteritems",
"(",
"op_meta_kwargs",
")",
"}",
"op_meta",
".",
"update",
"(",
"actual_op_meta_kwargs",
")",
"# Add any new names to the set",
"op_meta",
"[",
"'names'",
"]",
".",
"update",
"(",
"names",
")",
"# Attach normal args, if we're auto-naming this operation",
"if",
"autoname",
":",
"for",
"arg",
"in",
"args",
":",
"if",
"isinstance",
"(",
"arg",
",",
"FunctionType",
")",
":",
"arg",
"=",
"arg",
".",
"__name__",
"if",
"arg",
"not",
"in",
"op_meta",
"[",
"'args'",
"]",
":",
"op_meta",
"[",
"'args'",
"]",
".",
"append",
"(",
"arg",
")",
"# Attach keyword args",
"for",
"key",
",",
"value",
"in",
"six",
".",
"iteritems",
"(",
"kwargs",
")",
":",
"arg",
"=",
"'='",
".",
"join",
"(",
"(",
"str",
"(",
"key",
")",
",",
"str",
"(",
"value",
")",
")",
")",
"if",
"arg",
"not",
"in",
"op_meta",
"[",
"'args'",
"]",
":",
"op_meta",
"[",
"'args'",
"]",
".",
"append",
"(",
"arg",
")",
"# Check if we're actually running the operation on this host",
"#",
"# Run once and we've already added meta for this op? Stop here.",
"if",
"op_meta_kwargs",
"[",
"'run_once'",
"]",
":",
"has_run",
"=",
"False",
"for",
"ops",
"in",
"six",
".",
"itervalues",
"(",
"state",
".",
"ops",
")",
":",
"if",
"op_hash",
"in",
"ops",
":",
"has_run",
"=",
"True",
"break",
"if",
"has_run",
":",
"return",
"OperationMeta",
"(",
"op_hash",
")",
"# If we're limited, stop here - *after* we've created op_meta. This",
"# ensures the meta object always exists, even if no hosts actually ever",
"# execute the op (due to limit or otherwise).",
"hosts",
"=",
"op_meta_kwargs",
"[",
"'hosts'",
"]",
"when",
"=",
"op_meta_kwargs",
"[",
"'when'",
"]",
"if",
"(",
"# Limited by the state's limit_hosts?",
"(",
"state",
".",
"limit_hosts",
"is",
"not",
"None",
"and",
"host",
"not",
"in",
"state",
".",
"limit_hosts",
")",
"# Limited by the operation kwarg hosts?",
"or",
"(",
"hosts",
"is",
"not",
"None",
"and",
"host",
"not",
"in",
"hosts",
")",
"# Limited by the operation kwarg when? We check == because when is",
"# normally attribute wrapped as a AttrDataBool, which is actually",
"# an integer (Python doesn't allow subclassing bool!).",
"or",
"when",
"==",
"False",
"# noqa",
")",
":",
"return",
"OperationMeta",
"(",
"op_hash",
")",
"# \"Run\" operation",
"#",
"# Otherwise, flag as in-op and run it to get the commands",
"state",
".",
"in_op",
"=",
"True",
"state",
".",
"current_op_hash",
"=",
"op_hash",
"# Generate actual arguments by parsing strings as jinja2 templates. This",
"# means you can string format arguments w/o generating multiple",
"# operations. Only affects top level operations, as must be run \"in_op\"",
"# so facts are gathered correctly.",
"actual_args",
"=",
"[",
"get_arg_value",
"(",
"state",
",",
"host",
",",
"a",
")",
"for",
"a",
"in",
"args",
"]",
"actual_kwargs",
"=",
"{",
"key",
":",
"get_arg_value",
"(",
"state",
",",
"host",
",",
"a",
")",
"for",
"key",
",",
"a",
"in",
"six",
".",
"iteritems",
"(",
"kwargs",
")",
"}",
"# Convert to list as the result may be a generator",
"commands",
"=",
"unroll_generators",
"(",
"func",
"(",
"state",
",",
"host",
",",
"*",
"actual_args",
",",
"*",
"*",
"actual_kwargs",
")",
")",
"state",
".",
"in_op",
"=",
"False",
"state",
".",
"current_op_hash",
"=",
"None",
"# Add host-specific operation data to state",
"#",
"# We're doing some commands, meta/ops++",
"state",
".",
"meta",
"[",
"host",
"]",
"[",
"'ops'",
"]",
"+=",
"1",
"state",
".",
"meta",
"[",
"host",
"]",
"[",
"'commands'",
"]",
"+=",
"len",
"(",
"commands",
")",
"# Add the server-relevant commands",
"state",
".",
"ops",
"[",
"host",
"]",
"[",
"op_hash",
"]",
"=",
"{",
"'commands'",
":",
"commands",
",",
"}",
"# Return result meta for use in deploy scripts",
"return",
"OperationMeta",
"(",
"op_hash",
",",
"commands",
")",
"decorated_func",
".",
"_pyinfra_op",
"=",
"func",
"return",
"decorated_func"
] | 33.727273 | 21.008264 |
def _is_inside(self, span1, span2, covered_spans):
"""Returns True if both `span1` and `span2` fall within
`covered_spans`.
:param span1: start and end indices of a span
:type span1: 2-`tuple` of `int`
:param span2: start and end indices of a span
:type span2: 2-`tuple` of `int`
:param covered_spans: lists of start and end indices for parts
of the texts already covered by a sequence
:type covered_spans: `list` of two `list`s of 2-`tuple` of `int`
:rtype: `bool`
"""
if self._is_span_inside(span1, covered_spans[0]) and \
self._is_span_inside(span2, covered_spans[1]):
return True
return False | [
"def",
"_is_inside",
"(",
"self",
",",
"span1",
",",
"span2",
",",
"covered_spans",
")",
":",
"if",
"self",
".",
"_is_span_inside",
"(",
"span1",
",",
"covered_spans",
"[",
"0",
"]",
")",
"and",
"self",
".",
"_is_span_inside",
"(",
"span2",
",",
"covered_spans",
"[",
"1",
"]",
")",
":",
"return",
"True",
"return",
"False"
] | 40.555556 | 17.888889 |
def get_ip_packet(data, client_port, server_port, is_loopback=False):
""" if client_port is 0 any client_port is good """
header = _loopback if is_loopback else _ethernet
try:
header.unpack(data)
except Exception as ex:
raise ValueError('Bad header: %s' % ex)
tcp_p = getattr(header.data, 'data', None)
if type(tcp_p) != dpkt.tcp.TCP:
raise ValueError('Not a TCP packet')
if tcp_p.dport == server_port:
if client_port != 0 and tcp_p.sport != client_port:
raise ValueError('Request from different client')
elif tcp_p.sport == server_port:
if client_port != 0 and tcp_p.dport != client_port:
raise ValueError('Reply for different client')
else:
raise ValueError('Packet not for/from client/server')
return header.data | [
"def",
"get_ip_packet",
"(",
"data",
",",
"client_port",
",",
"server_port",
",",
"is_loopback",
"=",
"False",
")",
":",
"header",
"=",
"_loopback",
"if",
"is_loopback",
"else",
"_ethernet",
"try",
":",
"header",
".",
"unpack",
"(",
"data",
")",
"except",
"Exception",
"as",
"ex",
":",
"raise",
"ValueError",
"(",
"'Bad header: %s'",
"%",
"ex",
")",
"tcp_p",
"=",
"getattr",
"(",
"header",
".",
"data",
",",
"'data'",
",",
"None",
")",
"if",
"type",
"(",
"tcp_p",
")",
"!=",
"dpkt",
".",
"tcp",
".",
"TCP",
":",
"raise",
"ValueError",
"(",
"'Not a TCP packet'",
")",
"if",
"tcp_p",
".",
"dport",
"==",
"server_port",
":",
"if",
"client_port",
"!=",
"0",
"and",
"tcp_p",
".",
"sport",
"!=",
"client_port",
":",
"raise",
"ValueError",
"(",
"'Request from different client'",
")",
"elif",
"tcp_p",
".",
"sport",
"==",
"server_port",
":",
"if",
"client_port",
"!=",
"0",
"and",
"tcp_p",
".",
"dport",
"!=",
"client_port",
":",
"raise",
"ValueError",
"(",
"'Reply for different client'",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Packet not for/from client/server'",
")",
"return",
"header",
".",
"data"
] | 35.173913 | 19.043478 |
def schedule(self, func, *args, **kwargs):
"""
Schedules a function func for execution.
One special parameter is track_progress. If passed in and not None, the func will be passed in a
keyword parameter called update_progress:
def update_progress(progress, total_progress, stage=""):
The running function can call the update_progress function to notify interested parties of the function's
current progress.
Another special parameter is the "cancellable" keyword parameter. When passed in and not None, a special
"check_for_cancel" parameter is passed in. When called, it raises an error when the user has requested a job
to be cancelled.
The caller can also pass in any pickleable object into the "extra_metadata" parameter. This data is stored
within the job and can be retrieved when the job status is queried.
All other parameters are directly passed to the function when it starts running.
:type func: callable or str
:param func: A callable object that will be scheduled for running.
:return: a string representing the job_id.
"""
# if the func is already a job object, just schedule that directly.
if isinstance(func, Job):
job = func
# else, turn it into a job first.
else:
job = Job(func, *args, **kwargs)
job.track_progress = kwargs.pop('track_progress', False)
job.cancellable = kwargs.pop('cancellable', False)
job.extra_metadata = kwargs.pop('extra_metadata', {})
job_id = self.storage.schedule_job(job)
return job_id | [
"def",
"schedule",
"(",
"self",
",",
"func",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# if the func is already a job object, just schedule that directly.",
"if",
"isinstance",
"(",
"func",
",",
"Job",
")",
":",
"job",
"=",
"func",
"# else, turn it into a job first.",
"else",
":",
"job",
"=",
"Job",
"(",
"func",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"job",
".",
"track_progress",
"=",
"kwargs",
".",
"pop",
"(",
"'track_progress'",
",",
"False",
")",
"job",
".",
"cancellable",
"=",
"kwargs",
".",
"pop",
"(",
"'cancellable'",
",",
"False",
")",
"job",
".",
"extra_metadata",
"=",
"kwargs",
".",
"pop",
"(",
"'extra_metadata'",
",",
"{",
"}",
")",
"job_id",
"=",
"self",
".",
"storage",
".",
"schedule_job",
"(",
"job",
")",
"return",
"job_id"
] | 43 | 29.105263 |
def GenerateClient(args):
"""Driver for client code generation."""
codegen = _GetCodegenFromFlags(args)
if codegen is None:
logging.error('Failed to create codegen, exiting.')
return 128
_WriteGeneratedFiles(args, codegen)
if args.init_file != 'none':
_WriteInit(codegen) | [
"def",
"GenerateClient",
"(",
"args",
")",
":",
"codegen",
"=",
"_GetCodegenFromFlags",
"(",
"args",
")",
"if",
"codegen",
"is",
"None",
":",
"logging",
".",
"error",
"(",
"'Failed to create codegen, exiting.'",
")",
"return",
"128",
"_WriteGeneratedFiles",
"(",
"args",
",",
"codegen",
")",
"if",
"args",
".",
"init_file",
"!=",
"'none'",
":",
"_WriteInit",
"(",
"codegen",
")"
] | 27.909091 | 15.909091 |
def PCA_reduce(X, Q):
"""
A helpful function for linearly reducing the dimensionality of the data X
to Q.
:param X: data array of size N (number of points) x D (dimensions)
:param Q: Number of latent dimensions, Q < D
:return: PCA projection array of size N x Q.
"""
assert Q <= X.shape[1], 'Cannot have more latent dimensions than observed'
evals, evecs = np.linalg.eigh(np.cov(X.T))
W = evecs[:, -Q:]
return (X - X.mean(0)).dot(W) | [
"def",
"PCA_reduce",
"(",
"X",
",",
"Q",
")",
":",
"assert",
"Q",
"<=",
"X",
".",
"shape",
"[",
"1",
"]",
",",
"'Cannot have more latent dimensions than observed'",
"evals",
",",
"evecs",
"=",
"np",
".",
"linalg",
".",
"eigh",
"(",
"np",
".",
"cov",
"(",
"X",
".",
"T",
")",
")",
"W",
"=",
"evecs",
"[",
":",
",",
"-",
"Q",
":",
"]",
"return",
"(",
"X",
"-",
"X",
".",
"mean",
"(",
"0",
")",
")",
".",
"dot",
"(",
"W",
")"
] | 38.75 | 16.916667 |
def logtrick_minimizer(minimizer):
r"""
Log-Trick decorator for optimizers.
This decorator implements the "log trick" for optimizing positive bounded
variables. It will apply this trick for any variables that correspond to a
Positive() bound.
Examples
--------
>>> from scipy.optimize import minimize as sp_min
>>> from ..btypes import Bound, Positive
Here is an example where we may want to enforce a particular parameter or
parameters to be strictly greater than zero,
>>> def cost(w, lambda_):
... sq_norm = w.T.dot(w)
... return .5 * lambda_ * sq_norm, lambda_ * w
Now let's enforce that the `w` are positive,
>>> bounds = [Positive(), Positive(), Positive()]
>>> new_min = logtrick_minimizer(sp_min)
Initial values
>>> w_0 = np.array([.5, .1, .2])
>>> lambda_0 = .25
>>> res = new_min(cost, w_0, args=(lambda_0,), bounds=bounds,
... method='L-BFGS-B', jac=True)
>>> res.x >= 0
array([ True, True, True], dtype=bool)
Note
----
This decorator only works on unstructured optimizers. However, it can be
use with structured_minimizer, so long as it is the inner wrapper.
"""
@wraps(minimizer)
def new_minimizer(fun, x0, jac=True, bounds=None, **minimizer_kwargs):
if bounds is None:
return minimizer(fun, x0, jac=jac, bounds=bounds,
**minimizer_kwargs)
logx, expx, gradx, bounds = _logtrick_gen(bounds)
# Intercept gradient
if callable(jac):
def new_jac(x, *fargs, **fkwargs):
return gradx(jac(expx(x), *fargs, **fkwargs), x)
else:
new_jac = jac
# Intercept objective
if (not callable(jac)) and bool(jac):
def new_fun(x, *fargs, **fkwargs):
o, g = fun(expx(x), *fargs, **fkwargs)
return o, gradx(g, x)
else:
def new_fun(x, *fargs, **fkwargs):
return fun(expx(x), *fargs, **fkwargs)
# Transform the final result
result = minimizer(new_fun, logx(x0), jac=new_jac, bounds=bounds,
**minimizer_kwargs)
result['x'] = expx(result['x'])
return result
return new_minimizer | [
"def",
"logtrick_minimizer",
"(",
"minimizer",
")",
":",
"@",
"wraps",
"(",
"minimizer",
")",
"def",
"new_minimizer",
"(",
"fun",
",",
"x0",
",",
"jac",
"=",
"True",
",",
"bounds",
"=",
"None",
",",
"*",
"*",
"minimizer_kwargs",
")",
":",
"if",
"bounds",
"is",
"None",
":",
"return",
"minimizer",
"(",
"fun",
",",
"x0",
",",
"jac",
"=",
"jac",
",",
"bounds",
"=",
"bounds",
",",
"*",
"*",
"minimizer_kwargs",
")",
"logx",
",",
"expx",
",",
"gradx",
",",
"bounds",
"=",
"_logtrick_gen",
"(",
"bounds",
")",
"# Intercept gradient",
"if",
"callable",
"(",
"jac",
")",
":",
"def",
"new_jac",
"(",
"x",
",",
"*",
"fargs",
",",
"*",
"*",
"fkwargs",
")",
":",
"return",
"gradx",
"(",
"jac",
"(",
"expx",
"(",
"x",
")",
",",
"*",
"fargs",
",",
"*",
"*",
"fkwargs",
")",
",",
"x",
")",
"else",
":",
"new_jac",
"=",
"jac",
"# Intercept objective",
"if",
"(",
"not",
"callable",
"(",
"jac",
")",
")",
"and",
"bool",
"(",
"jac",
")",
":",
"def",
"new_fun",
"(",
"x",
",",
"*",
"fargs",
",",
"*",
"*",
"fkwargs",
")",
":",
"o",
",",
"g",
"=",
"fun",
"(",
"expx",
"(",
"x",
")",
",",
"*",
"fargs",
",",
"*",
"*",
"fkwargs",
")",
"return",
"o",
",",
"gradx",
"(",
"g",
",",
"x",
")",
"else",
":",
"def",
"new_fun",
"(",
"x",
",",
"*",
"fargs",
",",
"*",
"*",
"fkwargs",
")",
":",
"return",
"fun",
"(",
"expx",
"(",
"x",
")",
",",
"*",
"fargs",
",",
"*",
"*",
"fkwargs",
")",
"# Transform the final result",
"result",
"=",
"minimizer",
"(",
"new_fun",
",",
"logx",
"(",
"x0",
")",
",",
"jac",
"=",
"new_jac",
",",
"bounds",
"=",
"bounds",
",",
"*",
"*",
"minimizer_kwargs",
")",
"result",
"[",
"'x'",
"]",
"=",
"expx",
"(",
"result",
"[",
"'x'",
"]",
")",
"return",
"result",
"return",
"new_minimizer"
] | 31.055556 | 21.236111 |
def get_real_percent(self):
"""get_real_percent()
Returns the unmodified percentage of the score based on a 0-point scale."""
if not (self.votes and self.score):
return 0
return 100 * (self.get_real_rating() / self.field.range) | [
"def",
"get_real_percent",
"(",
"self",
")",
":",
"if",
"not",
"(",
"self",
".",
"votes",
"and",
"self",
".",
"score",
")",
":",
"return",
"0",
"return",
"100",
"*",
"(",
"self",
".",
"get_real_rating",
"(",
")",
"/",
"self",
".",
"field",
".",
"range",
")"
] | 39.142857 | 13.142857 |
def template_global(self, name: Optional[str]=None) -> Callable:
"""Add a template global.
This is designed to be used as a decorator. An example usage,
.. code-block:: python
@app.template_global('name')
def five():
return 5
Arguments:
name: The global name (defaults to function name).
"""
def decorator(func: Callable) -> Callable:
self.add_template_global(func, name=name)
return func
return decorator | [
"def",
"template_global",
"(",
"self",
",",
"name",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
")",
"->",
"Callable",
":",
"def",
"decorator",
"(",
"func",
":",
"Callable",
")",
"->",
"Callable",
":",
"self",
".",
"add_template_global",
"(",
"func",
",",
"name",
"=",
"name",
")",
"return",
"func",
"return",
"decorator"
] | 29.111111 | 19.777778 |
def exportFile(self, jobStoreFileID, dstUrl):
"""
Exports file to destination pointed at by the destination URL.
See :func:`toil.jobStores.abstractJobStore.AbstractJobStore.exportFile` for a
full description
"""
self._assertContextManagerUsed()
self._jobStore.exportFile(jobStoreFileID, dstUrl) | [
"def",
"exportFile",
"(",
"self",
",",
"jobStoreFileID",
",",
"dstUrl",
")",
":",
"self",
".",
"_assertContextManagerUsed",
"(",
")",
"self",
".",
"_jobStore",
".",
"exportFile",
"(",
"jobStoreFileID",
",",
"dstUrl",
")"
] | 38.111111 | 17 |
def get_parent_ps(self):
"""
Return a :class:`ParameterSet` of all Parameters in the same
:class:`phoebe.frontend.bundle.Bundle` which share the same
meta-tags (except qualifier, twig, uniquetwig)
:return: the parent :class:`ParameterSet`
"""
if self._bundle is None:
return None
metawargs = {k:v for k,v in self.meta.items() if k not in ['qualifier', 'twig', 'uniquetwig']}
return self._bundle.filter(**metawargs) | [
"def",
"get_parent_ps",
"(",
"self",
")",
":",
"if",
"self",
".",
"_bundle",
"is",
"None",
":",
"return",
"None",
"metawargs",
"=",
"{",
"k",
":",
"v",
"for",
"k",
",",
"v",
"in",
"self",
".",
"meta",
".",
"items",
"(",
")",
"if",
"k",
"not",
"in",
"[",
"'qualifier'",
",",
"'twig'",
",",
"'uniquetwig'",
"]",
"}",
"return",
"self",
".",
"_bundle",
".",
"filter",
"(",
"*",
"*",
"metawargs",
")"
] | 34.857143 | 22 |
def has_error(self):
"""
Queries the server to check if the job has an error.
Returns True or False.
"""
self.get_info()
if 'status' not in self.info:
return False
if 'hasError' not in self.info['status']:
return False
return self.info['status']['hasError'] | [
"def",
"has_error",
"(",
"self",
")",
":",
"self",
".",
"get_info",
"(",
")",
"if",
"'status'",
"not",
"in",
"self",
".",
"info",
":",
"return",
"False",
"if",
"'hasError'",
"not",
"in",
"self",
".",
"info",
"[",
"'status'",
"]",
":",
"return",
"False",
"return",
"self",
".",
"info",
"[",
"'status'",
"]",
"[",
"'hasError'",
"]"
] | 25.769231 | 15.153846 |
def can_create_gradebook_with_record_types(self, gradebook_record_types):
"""Tests if this user can create a single ``Gradebook`` using the desired record types.
While ``GradingManager.getGradebookRecordTypes()`` can be used
to examine which records are supported, this method tests which
record(s) are required for creating a specific ``Gradebook``.
Providing an empty array tests if a ``Gradebook`` can be created
with no records.
arg: gradebook_record_types (osid.type.Type[]): array of
gradebook record types
return: (boolean) - ``true`` if ``Gradebook`` creation using the
specified ``Types`` is supported, ``false`` otherwise
raise: NullArgument - ``gradebook_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.can_create_bin_with_record_types
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
if self._catalog_session is not None:
return self._catalog_session.can_create_catalog_with_record_types(catalog_record_types=gradebook_record_types)
return True | [
"def",
"can_create_gradebook_with_record_types",
"(",
"self",
",",
"gradebook_record_types",
")",
":",
"# Implemented from template for",
"# osid.resource.BinAdminSession.can_create_bin_with_record_types",
"# NOTE: It is expected that real authentication hints will be",
"# handled in a service adapter above the pay grade of this impl.",
"if",
"self",
".",
"_catalog_session",
"is",
"not",
"None",
":",
"return",
"self",
".",
"_catalog_session",
".",
"can_create_catalog_with_record_types",
"(",
"catalog_record_types",
"=",
"gradebook_record_types",
")",
"return",
"True"
] | 54.375 | 26.625 |
def RetryQuestion(question_text, output_re="", default_val=None):
"""Continually ask a question until the output_re is matched."""
while True:
if default_val is not None:
new_text = "%s [%s]: " % (question_text, default_val)
else:
new_text = "%s: " % question_text
# pytype: disable=wrong-arg-count
output = builtins.input(new_text) or str(default_val)
# pytype: enable=wrong-arg-count
output = output.strip()
if not output_re or re.match(output_re, output):
break
else:
print("Invalid input, must match %s" % output_re)
return output | [
"def",
"RetryQuestion",
"(",
"question_text",
",",
"output_re",
"=",
"\"\"",
",",
"default_val",
"=",
"None",
")",
":",
"while",
"True",
":",
"if",
"default_val",
"is",
"not",
"None",
":",
"new_text",
"=",
"\"%s [%s]: \"",
"%",
"(",
"question_text",
",",
"default_val",
")",
"else",
":",
"new_text",
"=",
"\"%s: \"",
"%",
"question_text",
"# pytype: disable=wrong-arg-count",
"output",
"=",
"builtins",
".",
"input",
"(",
"new_text",
")",
"or",
"str",
"(",
"default_val",
")",
"# pytype: enable=wrong-arg-count",
"output",
"=",
"output",
".",
"strip",
"(",
")",
"if",
"not",
"output_re",
"or",
"re",
".",
"match",
"(",
"output_re",
",",
"output",
")",
":",
"break",
"else",
":",
"print",
"(",
"\"Invalid input, must match %s\"",
"%",
"output_re",
")",
"return",
"output"
] | 36.3125 | 16.3125 |
def rate_limit(max_interval=20, sampling=3, f=lambda x: x):
'''x rises by 1 from 0 on each iteraton, back to 0 on triggering.
f(x) should rise up to f(max_interval) in some way (with default
"f(x)=x" probability rises lineary with 100% chance on "x=max_interval").
"sampling" affect probablility in an "c=1-(1-c0)*(1-c1)*...*(1-cx)" exponential way.'''
from random import random
val = 0
val_max = float(f(max_interval))
while True:
if val % sampling == 0:
trigger = random() > (val_max - f(val)) / val_max
if trigger: val = 0
yield trigger
else: yield False
val += 1 | [
"def",
"rate_limit",
"(",
"max_interval",
"=",
"20",
",",
"sampling",
"=",
"3",
",",
"f",
"=",
"lambda",
"x",
":",
"x",
")",
":",
"from",
"random",
"import",
"random",
"val",
"=",
"0",
"val_max",
"=",
"float",
"(",
"f",
"(",
"max_interval",
")",
")",
"while",
"True",
":",
"if",
"val",
"%",
"sampling",
"==",
"0",
":",
"trigger",
"=",
"random",
"(",
")",
">",
"(",
"val_max",
"-",
"f",
"(",
"val",
")",
")",
"/",
"val_max",
"if",
"trigger",
":",
"val",
"=",
"0",
"yield",
"trigger",
"else",
":",
"yield",
"False",
"val",
"+=",
"1"
] | 38.6 | 23.8 |
def render(template, saltenv='base', sls='', tmplpath=None, **kws):
'''
Render the python module's components
:rtype: string
'''
template = tmplpath
if not os.path.isfile(template):
raise SaltRenderError('Template {0} is not a file!'.format(template))
tmp_data = salt.utils.templates.py(
template,
True,
__salt__=__salt__,
salt=__salt__,
__grains__=__grains__,
grains=__grains__,
__opts__=__opts__,
opts=__opts__,
__pillar__=__pillar__,
pillar=__pillar__,
__env__=saltenv,
saltenv=saltenv,
__sls__=sls,
sls=sls,
**kws)
if not tmp_data.get('result', False):
raise SaltRenderError(tmp_data.get('data',
'Unknown render error in py renderer'))
return tmp_data['data'] | [
"def",
"render",
"(",
"template",
",",
"saltenv",
"=",
"'base'",
",",
"sls",
"=",
"''",
",",
"tmplpath",
"=",
"None",
",",
"*",
"*",
"kws",
")",
":",
"template",
"=",
"tmplpath",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"template",
")",
":",
"raise",
"SaltRenderError",
"(",
"'Template {0} is not a file!'",
".",
"format",
"(",
"template",
")",
")",
"tmp_data",
"=",
"salt",
".",
"utils",
".",
"templates",
".",
"py",
"(",
"template",
",",
"True",
",",
"__salt__",
"=",
"__salt__",
",",
"salt",
"=",
"__salt__",
",",
"__grains__",
"=",
"__grains__",
",",
"grains",
"=",
"__grains__",
",",
"__opts__",
"=",
"__opts__",
",",
"opts",
"=",
"__opts__",
",",
"__pillar__",
"=",
"__pillar__",
",",
"pillar",
"=",
"__pillar__",
",",
"__env__",
"=",
"saltenv",
",",
"saltenv",
"=",
"saltenv",
",",
"__sls__",
"=",
"sls",
",",
"sls",
"=",
"sls",
",",
"*",
"*",
"kws",
")",
"if",
"not",
"tmp_data",
".",
"get",
"(",
"'result'",
",",
"False",
")",
":",
"raise",
"SaltRenderError",
"(",
"tmp_data",
".",
"get",
"(",
"'data'",
",",
"'Unknown render error in py renderer'",
")",
")",
"return",
"tmp_data",
"[",
"'data'",
"]"
] | 28.387097 | 17.225806 |
def mavlink_packet(self, m):
'''handle an incoming mavlink packet'''
mtype = m.get_type()
if mtype in ['WAYPOINT_COUNT','MISSION_COUNT']:
self.wploader.expected_count = m.count
if self.wp_op is None:
#self.console.error("No waypoint load started")
pass
else:
self.wploader.clear()
self.console.writeln("Requesting %u waypoints t=%s now=%s" % (m.count,
time.asctime(time.localtime(m._timestamp)),
time.asctime()))
self.send_wp_requests()
elif mtype in ['WAYPOINT', 'MISSION_ITEM'] and self.wp_op is not None:
if m.seq < self.wploader.count():
#print("DUPLICATE %u" % m.seq)
return
if m.seq+1 > self.wploader.expected_count:
self.console.writeln("Unexpected waypoint number %u - expected %u" % (m.seq, self.wploader.count()))
self.wp_received[m.seq] = m
next_seq = self.wploader.count()
while next_seq in self.wp_received:
m = self.wp_received.pop(next_seq)
self.wploader.add(m)
next_seq += 1
if self.wploader.count() != self.wploader.expected_count:
#print("m.seq=%u expected_count=%u" % (m.seq, self.wploader.expected_count))
self.send_wp_requests()
return
if self.wp_op == 'list':
for i in range(self.wploader.count()):
w = self.wploader.wp(i)
print("%u %u %.10f %.10f %f p1=%.1f p2=%.1f p3=%.1f p4=%.1f cur=%u auto=%u" % (
w.command, w.frame, w.x, w.y, w.z,
w.param1, w.param2, w.param3, w.param4,
w.current, w.autocontinue))
if self.logdir is not None:
fname = 'way.txt'
if m.get_srcSystem() != 1:
fname = 'way_%u.txt' % m.get_srcSystem()
waytxt = os.path.join(self.logdir, fname)
self.save_waypoints(waytxt)
print("Saved waypoints to %s" % waytxt)
self.loading_waypoints = False
elif self.wp_op == "save":
self.save_waypoints(self.wp_save_filename)
self.wp_op = None
self.wp_requested = {}
self.wp_received = {}
elif mtype in ["WAYPOINT_REQUEST", "MISSION_REQUEST"]:
self.process_waypoint_request(m, self.master)
elif mtype in ["WAYPOINT_CURRENT", "MISSION_CURRENT"]:
if m.seq != self.last_waypoint:
self.last_waypoint = m.seq
if self.settings.wpupdates:
self.say("waypoint %u" % m.seq,priority='message')
elif mtype == "MISSION_ITEM_REACHED":
wp = self.wploader.wp(m.seq)
if wp is None:
# should we spit out a warning?!
# self.say("No waypoints")
pass
else:
if wp.command == mavutil.mavlink.MAV_CMD_DO_LAND_START:
alt_offset = self.get_mav_param('ALT_OFFSET', 0)
if alt_offset > 0.005:
self.say("ALT OFFSET IS NOT ZERO passing DO_LAND_START") | [
"def",
"mavlink_packet",
"(",
"self",
",",
"m",
")",
":",
"mtype",
"=",
"m",
".",
"get_type",
"(",
")",
"if",
"mtype",
"in",
"[",
"'WAYPOINT_COUNT'",
",",
"'MISSION_COUNT'",
"]",
":",
"self",
".",
"wploader",
".",
"expected_count",
"=",
"m",
".",
"count",
"if",
"self",
".",
"wp_op",
"is",
"None",
":",
"#self.console.error(\"No waypoint load started\")",
"pass",
"else",
":",
"self",
".",
"wploader",
".",
"clear",
"(",
")",
"self",
".",
"console",
".",
"writeln",
"(",
"\"Requesting %u waypoints t=%s now=%s\"",
"%",
"(",
"m",
".",
"count",
",",
"time",
".",
"asctime",
"(",
"time",
".",
"localtime",
"(",
"m",
".",
"_timestamp",
")",
")",
",",
"time",
".",
"asctime",
"(",
")",
")",
")",
"self",
".",
"send_wp_requests",
"(",
")",
"elif",
"mtype",
"in",
"[",
"'WAYPOINT'",
",",
"'MISSION_ITEM'",
"]",
"and",
"self",
".",
"wp_op",
"is",
"not",
"None",
":",
"if",
"m",
".",
"seq",
"<",
"self",
".",
"wploader",
".",
"count",
"(",
")",
":",
"#print(\"DUPLICATE %u\" % m.seq)",
"return",
"if",
"m",
".",
"seq",
"+",
"1",
">",
"self",
".",
"wploader",
".",
"expected_count",
":",
"self",
".",
"console",
".",
"writeln",
"(",
"\"Unexpected waypoint number %u - expected %u\"",
"%",
"(",
"m",
".",
"seq",
",",
"self",
".",
"wploader",
".",
"count",
"(",
")",
")",
")",
"self",
".",
"wp_received",
"[",
"m",
".",
"seq",
"]",
"=",
"m",
"next_seq",
"=",
"self",
".",
"wploader",
".",
"count",
"(",
")",
"while",
"next_seq",
"in",
"self",
".",
"wp_received",
":",
"m",
"=",
"self",
".",
"wp_received",
".",
"pop",
"(",
"next_seq",
")",
"self",
".",
"wploader",
".",
"add",
"(",
"m",
")",
"next_seq",
"+=",
"1",
"if",
"self",
".",
"wploader",
".",
"count",
"(",
")",
"!=",
"self",
".",
"wploader",
".",
"expected_count",
":",
"#print(\"m.seq=%u expected_count=%u\" % (m.seq, self.wploader.expected_count))",
"self",
".",
"send_wp_requests",
"(",
")",
"return",
"if",
"self",
".",
"wp_op",
"==",
"'list'",
":",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"wploader",
".",
"count",
"(",
")",
")",
":",
"w",
"=",
"self",
".",
"wploader",
".",
"wp",
"(",
"i",
")",
"print",
"(",
"\"%u %u %.10f %.10f %f p1=%.1f p2=%.1f p3=%.1f p4=%.1f cur=%u auto=%u\"",
"%",
"(",
"w",
".",
"command",
",",
"w",
".",
"frame",
",",
"w",
".",
"x",
",",
"w",
".",
"y",
",",
"w",
".",
"z",
",",
"w",
".",
"param1",
",",
"w",
".",
"param2",
",",
"w",
".",
"param3",
",",
"w",
".",
"param4",
",",
"w",
".",
"current",
",",
"w",
".",
"autocontinue",
")",
")",
"if",
"self",
".",
"logdir",
"is",
"not",
"None",
":",
"fname",
"=",
"'way.txt'",
"if",
"m",
".",
"get_srcSystem",
"(",
")",
"!=",
"1",
":",
"fname",
"=",
"'way_%u.txt'",
"%",
"m",
".",
"get_srcSystem",
"(",
")",
"waytxt",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"logdir",
",",
"fname",
")",
"self",
".",
"save_waypoints",
"(",
"waytxt",
")",
"print",
"(",
"\"Saved waypoints to %s\"",
"%",
"waytxt",
")",
"self",
".",
"loading_waypoints",
"=",
"False",
"elif",
"self",
".",
"wp_op",
"==",
"\"save\"",
":",
"self",
".",
"save_waypoints",
"(",
"self",
".",
"wp_save_filename",
")",
"self",
".",
"wp_op",
"=",
"None",
"self",
".",
"wp_requested",
"=",
"{",
"}",
"self",
".",
"wp_received",
"=",
"{",
"}",
"elif",
"mtype",
"in",
"[",
"\"WAYPOINT_REQUEST\"",
",",
"\"MISSION_REQUEST\"",
"]",
":",
"self",
".",
"process_waypoint_request",
"(",
"m",
",",
"self",
".",
"master",
")",
"elif",
"mtype",
"in",
"[",
"\"WAYPOINT_CURRENT\"",
",",
"\"MISSION_CURRENT\"",
"]",
":",
"if",
"m",
".",
"seq",
"!=",
"self",
".",
"last_waypoint",
":",
"self",
".",
"last_waypoint",
"=",
"m",
".",
"seq",
"if",
"self",
".",
"settings",
".",
"wpupdates",
":",
"self",
".",
"say",
"(",
"\"waypoint %u\"",
"%",
"m",
".",
"seq",
",",
"priority",
"=",
"'message'",
")",
"elif",
"mtype",
"==",
"\"MISSION_ITEM_REACHED\"",
":",
"wp",
"=",
"self",
".",
"wploader",
".",
"wp",
"(",
"m",
".",
"seq",
")",
"if",
"wp",
"is",
"None",
":",
"# should we spit out a warning?!",
"# self.say(\"No waypoints\")",
"pass",
"else",
":",
"if",
"wp",
".",
"command",
"==",
"mavutil",
".",
"mavlink",
".",
"MAV_CMD_DO_LAND_START",
":",
"alt_offset",
"=",
"self",
".",
"get_mav_param",
"(",
"'ALT_OFFSET'",
",",
"0",
")",
"if",
"alt_offset",
">",
"0.005",
":",
"self",
".",
"say",
"(",
"\"ALT OFFSET IS NOT ZERO passing DO_LAND_START\"",
")"
] | 47.638889 | 18.194444 |
def setsweep(self, sweep=0, channel=0):
"""set the sweep and channel of an ABF. Both start at 0."""
try:
sweep=int(sweep)
except:
self.log.error("trying to set sweep to [%s]",sweep)
return
if sweep<0:
sweep=self.sweeps-1-sweep # if negative, start from the end
sweep=max(0,min(sweep,self.sweeps-1)) # correct for out of range sweeps
if 'sweep' in dir(self) and self.sweep == sweep and self.derivative is False:
self.log.debug("sweep %d already set",sweep)
return
#self.log.debug("loading sweep %d (Ch%d)",sweep,channel)
self.channels=self.ABFblock.segments[sweep].size["analogsignals"]
if self.channels>1 and sweep==0:
self.log.info("WARNING: multichannel not yet supported!") #TODO:
self.trace = self.ABFblock.segments[sweep].analogsignals[channel]
self.sweep=sweep # currently selected sweep
self.channel=channel # currently selected channel
# sweep information
self.rate = int(self.trace.sampling_rate) # Hz
self.period = float(1/self.rate) # seconds (inverse of sample rate)
self.pointsPerSec = int(self.rate) # for easy access
self.pointsPerMs = int(self.rate/1000.0) # for easy access
self.sweepSize = len(self.trace) # number of data points per sweep
self.sweepInterval = self.trace.duration.magnitude # sweep interval (seconds)
self.sweepLength = float(self.trace.t_stop-self.trace.t_start) # in seconds
self.length = self.sweepLength*self.sweeps # length (sec) of total recording
self.lengthMinutes = self.length/60.0 # length (minutes) of total recording
if str(self.trace.dimensionality) == 'pA':
self.units,self.units2="pA","clamp current (pA)"
self.unitsD,self.unitsD2="pA/ms","current velocity (pA/ms)"
self.protoUnits,self.protoUnits2="mV","command voltage (mV)"
elif str(self.trace.dimensionality) == 'mV':
self.units,self.units2="mV","membrane potential (mV)"
self.unitsD,self.unitsD2="V/s","potential velocity (V/s)"
self.protoUnits,self.protoUnits2="pA","command current (pA)"
else:
self.units,self.units2="?","unknown units"
self.unitsD,self.unitsD2="?","unknown units"
# sweep data
self.sweepY = self.trace.magnitude # sweep data (mV or pA)
self.sweepT = self.trace.times.magnitude # actual sweep times (sec)
self.sweepStart = float(self.trace.t_start) # time start of sweep (sec)
self.sweepX2 = self.sweepT-self.trace.t_start.magnitude # sweeps overlap
self.sweepX = self.sweepX2+sweep*self.sweepInterval # assume no gaps
if self.derivative:
self.log.debug("taking derivative")
#self.sweepD=np.diff(self.sweepY) # take derivative
self.sweepD=self.sweepY[1:]-self.sweepY[:-1] # better?
self.sweepD=np.insert(self.sweepD,0,self.sweepD[0]) # add a point
self.sweepD/=(self.period*1000) # correct for sample rate
else:
self.sweepD=[0] # derivative is forced to be empty
# generate the protocol too
self.generate_protocol() | [
"def",
"setsweep",
"(",
"self",
",",
"sweep",
"=",
"0",
",",
"channel",
"=",
"0",
")",
":",
"try",
":",
"sweep",
"=",
"int",
"(",
"sweep",
")",
"except",
":",
"self",
".",
"log",
".",
"error",
"(",
"\"trying to set sweep to [%s]\"",
",",
"sweep",
")",
"return",
"if",
"sweep",
"<",
"0",
":",
"sweep",
"=",
"self",
".",
"sweeps",
"-",
"1",
"-",
"sweep",
"# if negative, start from the end",
"sweep",
"=",
"max",
"(",
"0",
",",
"min",
"(",
"sweep",
",",
"self",
".",
"sweeps",
"-",
"1",
")",
")",
"# correct for out of range sweeps",
"if",
"'sweep'",
"in",
"dir",
"(",
"self",
")",
"and",
"self",
".",
"sweep",
"==",
"sweep",
"and",
"self",
".",
"derivative",
"is",
"False",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"sweep %d already set\"",
",",
"sweep",
")",
"return",
"#self.log.debug(\"loading sweep %d (Ch%d)\",sweep,channel)",
"self",
".",
"channels",
"=",
"self",
".",
"ABFblock",
".",
"segments",
"[",
"sweep",
"]",
".",
"size",
"[",
"\"analogsignals\"",
"]",
"if",
"self",
".",
"channels",
">",
"1",
"and",
"sweep",
"==",
"0",
":",
"self",
".",
"log",
".",
"info",
"(",
"\"WARNING: multichannel not yet supported!\"",
")",
"#TODO:",
"self",
".",
"trace",
"=",
"self",
".",
"ABFblock",
".",
"segments",
"[",
"sweep",
"]",
".",
"analogsignals",
"[",
"channel",
"]",
"self",
".",
"sweep",
"=",
"sweep",
"# currently selected sweep",
"self",
".",
"channel",
"=",
"channel",
"# currently selected channel",
"# sweep information",
"self",
".",
"rate",
"=",
"int",
"(",
"self",
".",
"trace",
".",
"sampling_rate",
")",
"# Hz",
"self",
".",
"period",
"=",
"float",
"(",
"1",
"/",
"self",
".",
"rate",
")",
"# seconds (inverse of sample rate)",
"self",
".",
"pointsPerSec",
"=",
"int",
"(",
"self",
".",
"rate",
")",
"# for easy access",
"self",
".",
"pointsPerMs",
"=",
"int",
"(",
"self",
".",
"rate",
"/",
"1000.0",
")",
"# for easy access",
"self",
".",
"sweepSize",
"=",
"len",
"(",
"self",
".",
"trace",
")",
"# number of data points per sweep",
"self",
".",
"sweepInterval",
"=",
"self",
".",
"trace",
".",
"duration",
".",
"magnitude",
"# sweep interval (seconds)",
"self",
".",
"sweepLength",
"=",
"float",
"(",
"self",
".",
"trace",
".",
"t_stop",
"-",
"self",
".",
"trace",
".",
"t_start",
")",
"# in seconds",
"self",
".",
"length",
"=",
"self",
".",
"sweepLength",
"*",
"self",
".",
"sweeps",
"# length (sec) of total recording",
"self",
".",
"lengthMinutes",
"=",
"self",
".",
"length",
"/",
"60.0",
"# length (minutes) of total recording",
"if",
"str",
"(",
"self",
".",
"trace",
".",
"dimensionality",
")",
"==",
"'pA'",
":",
"self",
".",
"units",
",",
"self",
".",
"units2",
"=",
"\"pA\"",
",",
"\"clamp current (pA)\"",
"self",
".",
"unitsD",
",",
"self",
".",
"unitsD2",
"=",
"\"pA/ms\"",
",",
"\"current velocity (pA/ms)\"",
"self",
".",
"protoUnits",
",",
"self",
".",
"protoUnits2",
"=",
"\"mV\"",
",",
"\"command voltage (mV)\"",
"elif",
"str",
"(",
"self",
".",
"trace",
".",
"dimensionality",
")",
"==",
"'mV'",
":",
"self",
".",
"units",
",",
"self",
".",
"units2",
"=",
"\"mV\"",
",",
"\"membrane potential (mV)\"",
"self",
".",
"unitsD",
",",
"self",
".",
"unitsD2",
"=",
"\"V/s\"",
",",
"\"potential velocity (V/s)\"",
"self",
".",
"protoUnits",
",",
"self",
".",
"protoUnits2",
"=",
"\"pA\"",
",",
"\"command current (pA)\"",
"else",
":",
"self",
".",
"units",
",",
"self",
".",
"units2",
"=",
"\"?\"",
",",
"\"unknown units\"",
"self",
".",
"unitsD",
",",
"self",
".",
"unitsD2",
"=",
"\"?\"",
",",
"\"unknown units\"",
"# sweep data",
"self",
".",
"sweepY",
"=",
"self",
".",
"trace",
".",
"magnitude",
"# sweep data (mV or pA)",
"self",
".",
"sweepT",
"=",
"self",
".",
"trace",
".",
"times",
".",
"magnitude",
"# actual sweep times (sec)",
"self",
".",
"sweepStart",
"=",
"float",
"(",
"self",
".",
"trace",
".",
"t_start",
")",
"# time start of sweep (sec)",
"self",
".",
"sweepX2",
"=",
"self",
".",
"sweepT",
"-",
"self",
".",
"trace",
".",
"t_start",
".",
"magnitude",
"# sweeps overlap",
"self",
".",
"sweepX",
"=",
"self",
".",
"sweepX2",
"+",
"sweep",
"*",
"self",
".",
"sweepInterval",
"# assume no gaps",
"if",
"self",
".",
"derivative",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"taking derivative\"",
")",
"#self.sweepD=np.diff(self.sweepY) # take derivative",
"self",
".",
"sweepD",
"=",
"self",
".",
"sweepY",
"[",
"1",
":",
"]",
"-",
"self",
".",
"sweepY",
"[",
":",
"-",
"1",
"]",
"# better?",
"self",
".",
"sweepD",
"=",
"np",
".",
"insert",
"(",
"self",
".",
"sweepD",
",",
"0",
",",
"self",
".",
"sweepD",
"[",
"0",
"]",
")",
"# add a point",
"self",
".",
"sweepD",
"/=",
"(",
"self",
".",
"period",
"*",
"1000",
")",
"# correct for sample rate",
"else",
":",
"self",
".",
"sweepD",
"=",
"[",
"0",
"]",
"# derivative is forced to be empty",
"# generate the protocol too",
"self",
".",
"generate_protocol",
"(",
")"
] | 52.721311 | 25.52459 |
def getOutEdges(self, label=None):
"""Gets all the outgoing edges of the node. If label
parameter is provided, it only returns the edges of
the given label
@params label: Optional parameter to filter the edges
@returns A generator function with the outgoing edges"""
if label:
for edge in self.neoelement.relationships.outgoing(types=[label]):
yield Edge(edge)
else:
for edge in self.neoelement.relationships.outgoing():
yield Edge(edge) | [
"def",
"getOutEdges",
"(",
"self",
",",
"label",
"=",
"None",
")",
":",
"if",
"label",
":",
"for",
"edge",
"in",
"self",
".",
"neoelement",
".",
"relationships",
".",
"outgoing",
"(",
"types",
"=",
"[",
"label",
"]",
")",
":",
"yield",
"Edge",
"(",
"edge",
")",
"else",
":",
"for",
"edge",
"in",
"self",
".",
"neoelement",
".",
"relationships",
".",
"outgoing",
"(",
")",
":",
"yield",
"Edge",
"(",
"edge",
")"
] | 41.384615 | 17.846154 |
def login(self, response):
"""
Login using email/username and password, used to get the auth token
@param str account
@param str password
@param int duration (optional)
"""
self._state_params['auth'] = response['auth']
self._user_data = response['user']
if not self.logged_in:
raise ApiLoginFailure(response) | [
"def",
"login",
"(",
"self",
",",
"response",
")",
":",
"self",
".",
"_state_params",
"[",
"'auth'",
"]",
"=",
"response",
"[",
"'auth'",
"]",
"self",
".",
"_user_data",
"=",
"response",
"[",
"'user'",
"]",
"if",
"not",
"self",
".",
"logged_in",
":",
"raise",
"ApiLoginFailure",
"(",
"response",
")"
] | 31.833333 | 12.166667 |
def send_video(self, user_id, media_id, title=None,
description=None, account=None):
"""
发送视频消息
详情请参考
http://mp.weixin.qq.com/wiki/7/12a5a320ae96fecdf0e15cb06123de9f.html
:param user_id: 用户 ID 。 就是你收到的 `Message` 的 source
:param media_id: 发送的视频的媒体ID。 可以通过 :func:`upload_media` 上传。
:param title: 视频消息的标题
:param description: 视频消息的描述
:param account: 可选,客服账号
:return: 返回的 JSON 数据包
使用示例::
from wechatpy import WeChatClient
client = WeChatClient('appid', 'secret')
res = client.message.send_video('openid', 'media_id', 'title', 'description')
"""
video_data = {
'media_id': media_id,
}
if title:
video_data['title'] = title
if description:
video_data['description'] = description
data = {
'touser': user_id,
'msgtype': 'video',
'video': video_data
}
return self._send_custom_message(data, account=account) | [
"def",
"send_video",
"(",
"self",
",",
"user_id",
",",
"media_id",
",",
"title",
"=",
"None",
",",
"description",
"=",
"None",
",",
"account",
"=",
"None",
")",
":",
"video_data",
"=",
"{",
"'media_id'",
":",
"media_id",
",",
"}",
"if",
"title",
":",
"video_data",
"[",
"'title'",
"]",
"=",
"title",
"if",
"description",
":",
"video_data",
"[",
"'description'",
"]",
"=",
"description",
"data",
"=",
"{",
"'touser'",
":",
"user_id",
",",
"'msgtype'",
":",
"'video'",
",",
"'video'",
":",
"video_data",
"}",
"return",
"self",
".",
"_send_custom_message",
"(",
"data",
",",
"account",
"=",
"account",
")"
] | 29.111111 | 20.444444 |
def time_delay_from_earth_center(self, right_ascension, declination, t_gps):
"""Return the time delay from the earth center
"""
return self.time_delay_from_location(np.array([0, 0, 0]),
right_ascension,
declination,
t_gps) | [
"def",
"time_delay_from_earth_center",
"(",
"self",
",",
"right_ascension",
",",
"declination",
",",
"t_gps",
")",
":",
"return",
"self",
".",
"time_delay_from_location",
"(",
"np",
".",
"array",
"(",
"[",
"0",
",",
"0",
",",
"0",
"]",
")",
",",
"right_ascension",
",",
"declination",
",",
"t_gps",
")"
] | 53.571429 | 15.714286 |
def delete_thumbnails(relative_source_path, root=None, basedir=None,
subdir=None, prefix=None):
"""
Delete all thumbnails for a source image.
"""
thumbs = thumbnails_for_file(relative_source_path, root, basedir, subdir,
prefix)
return _delete_using_thumbs_list(thumbs) | [
"def",
"delete_thumbnails",
"(",
"relative_source_path",
",",
"root",
"=",
"None",
",",
"basedir",
"=",
"None",
",",
"subdir",
"=",
"None",
",",
"prefix",
"=",
"None",
")",
":",
"thumbs",
"=",
"thumbnails_for_file",
"(",
"relative_source_path",
",",
"root",
",",
"basedir",
",",
"subdir",
",",
"prefix",
")",
"return",
"_delete_using_thumbs_list",
"(",
"thumbs",
")"
] | 42 | 10.25 |
def encrypt(self, password, kdf=None, iterations=None):
'''
Generate a string with the encrypted key, as in
:meth:`~eth_account.account.Account.encrypt`, but without a private key argument.
'''
return self._publicapi.encrypt(self.privateKey, password, kdf=kdf, iterations=iterations) | [
"def",
"encrypt",
"(",
"self",
",",
"password",
",",
"kdf",
"=",
"None",
",",
"iterations",
"=",
"None",
")",
":",
"return",
"self",
".",
"_publicapi",
".",
"encrypt",
"(",
"self",
".",
"privateKey",
",",
"password",
",",
"kdf",
"=",
"kdf",
",",
"iterations",
"=",
"iterations",
")"
] | 53 | 32.333333 |
def setup(app):
"""Setup."""
# add_html_theme is new in Sphinx 1.6+
if hasattr(app, 'add_html_theme'):
theme_path = get_html_theme_path()[0]
app.add_html_theme('bootstrap', os.path.join(theme_path, 'bootstrap')) | [
"def",
"setup",
"(",
"app",
")",
":",
"# add_html_theme is new in Sphinx 1.6+",
"if",
"hasattr",
"(",
"app",
",",
"'add_html_theme'",
")",
":",
"theme_path",
"=",
"get_html_theme_path",
"(",
")",
"[",
"0",
"]",
"app",
".",
"add_html_theme",
"(",
"'bootstrap'",
",",
"os",
".",
"path",
".",
"join",
"(",
"theme_path",
",",
"'bootstrap'",
")",
")"
] | 39 | 12 |
def setup_seasonal(self):
"""
Check if there's some seasonal holiday going on, setup appropriate
Shibe picture and load holiday words.
Note: if there are two or more holidays defined for a certain date,
the first one takes precedence.
"""
# If we've specified a season, just run that one
if self.ns.season:
return self.load_season(self.ns.season)
# If we've specified another doge or no doge at all, it does not make
# sense to use seasons.
if self.ns.doge_path is not None and not self.ns.no_shibe:
return
now = datetime.datetime.now()
for season, data in wow.SEASONS.items():
start, end = data['dates']
start_dt = datetime.datetime(now.year, start[0], start[1])
# Be sane if the holiday season spans over New Year's day.
end_dt = datetime.datetime(
now.year + (start[0] > end[0] and 1 or 0), end[0], end[1])
if start_dt <= now <= end_dt:
# Wow, much holiday!
return self.load_season(season) | [
"def",
"setup_seasonal",
"(",
"self",
")",
":",
"# If we've specified a season, just run that one",
"if",
"self",
".",
"ns",
".",
"season",
":",
"return",
"self",
".",
"load_season",
"(",
"self",
".",
"ns",
".",
"season",
")",
"# If we've specified another doge or no doge at all, it does not make",
"# sense to use seasons.",
"if",
"self",
".",
"ns",
".",
"doge_path",
"is",
"not",
"None",
"and",
"not",
"self",
".",
"ns",
".",
"no_shibe",
":",
"return",
"now",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"for",
"season",
",",
"data",
"in",
"wow",
".",
"SEASONS",
".",
"items",
"(",
")",
":",
"start",
",",
"end",
"=",
"data",
"[",
"'dates'",
"]",
"start_dt",
"=",
"datetime",
".",
"datetime",
"(",
"now",
".",
"year",
",",
"start",
"[",
"0",
"]",
",",
"start",
"[",
"1",
"]",
")",
"# Be sane if the holiday season spans over New Year's day.",
"end_dt",
"=",
"datetime",
".",
"datetime",
"(",
"now",
".",
"year",
"+",
"(",
"start",
"[",
"0",
"]",
">",
"end",
"[",
"0",
"]",
"and",
"1",
"or",
"0",
")",
",",
"end",
"[",
"0",
"]",
",",
"end",
"[",
"1",
"]",
")",
"if",
"start_dt",
"<=",
"now",
"<=",
"end_dt",
":",
"# Wow, much holiday!",
"return",
"self",
".",
"load_season",
"(",
"season",
")"
] | 34.53125 | 20.78125 |
def fire_failed_contact_lookup(self, msisdn):
"""
Fires a webhook in the event of a failed WhatsApp contact lookup.
"""
payload = {"address": msisdn}
# We cannot user the raw_hook_event here, because we don't have a user, so we
# manually filter and send the hooks for all users
hooks = Hook.objects.filter(event="whatsapp.failed_contact_check")
for hook in hooks:
hook.deliver_hook(
None, payload_override={"hook": hook.dict(), "data": payload}
) | [
"def",
"fire_failed_contact_lookup",
"(",
"self",
",",
"msisdn",
")",
":",
"payload",
"=",
"{",
"\"address\"",
":",
"msisdn",
"}",
"# We cannot user the raw_hook_event here, because we don't have a user, so we",
"# manually filter and send the hooks for all users",
"hooks",
"=",
"Hook",
".",
"objects",
".",
"filter",
"(",
"event",
"=",
"\"whatsapp.failed_contact_check\"",
")",
"for",
"hook",
"in",
"hooks",
":",
"hook",
".",
"deliver_hook",
"(",
"None",
",",
"payload_override",
"=",
"{",
"\"hook\"",
":",
"hook",
".",
"dict",
"(",
")",
",",
"\"data\"",
":",
"payload",
"}",
")"
] | 45 | 18.833333 |
def disable_category(self, category: str, message_to_print: str) -> None:
"""
Disable an entire category of commands
:param category: the category to disable
:param message_to_print: what to print when anything in this category is run or help is called on it
while disabled
The variable COMMAND_NAME can be used as a placeholder for the name of the
command being disabled.
ex: message_to_print = "{} is currently disabled".format(COMMAND_NAME)
"""
all_commands = self.get_all_commands()
for cmd_name in all_commands:
func = self.cmd_func(cmd_name)
if hasattr(func, HELP_CATEGORY) and getattr(func, HELP_CATEGORY) == category:
self.disable_command(cmd_name, message_to_print) | [
"def",
"disable_category",
"(",
"self",
",",
"category",
":",
"str",
",",
"message_to_print",
":",
"str",
")",
"->",
"None",
":",
"all_commands",
"=",
"self",
".",
"get_all_commands",
"(",
")",
"for",
"cmd_name",
"in",
"all_commands",
":",
"func",
"=",
"self",
".",
"cmd_func",
"(",
"cmd_name",
")",
"if",
"hasattr",
"(",
"func",
",",
"HELP_CATEGORY",
")",
"and",
"getattr",
"(",
"func",
",",
"HELP_CATEGORY",
")",
"==",
"category",
":",
"self",
".",
"disable_command",
"(",
"cmd_name",
",",
"message_to_print",
")"
] | 52.235294 | 25.411765 |
def send_request(self, http_request):
"""
Send a request and get response
"""
self.request_object = http_request
self.build_socket()
self.build_request()
try:
self.sock.send(self.request)
except socket.error as err:
raise errors.TestError(
'We were unable to send the request to the socket',
{
'msg': err,
'function': 'http.HttpUA.send_request'
})
finally:
self.get_response() | [
"def",
"send_request",
"(",
"self",
",",
"http_request",
")",
":",
"self",
".",
"request_object",
"=",
"http_request",
"self",
".",
"build_socket",
"(",
")",
"self",
".",
"build_request",
"(",
")",
"try",
":",
"self",
".",
"sock",
".",
"send",
"(",
"self",
".",
"request",
")",
"except",
"socket",
".",
"error",
"as",
"err",
":",
"raise",
"errors",
".",
"TestError",
"(",
"'We were unable to send the request to the socket'",
",",
"{",
"'msg'",
":",
"err",
",",
"'function'",
":",
"'http.HttpUA.send_request'",
"}",
")",
"finally",
":",
"self",
".",
"get_response",
"(",
")"
] | 31.111111 | 10.888889 |
def write_slide_list(self, logname, slides):
""" Write list of slides to logfile """
# Write slides.txt with list of slides
with open('%s/%s' % (self.cache, logname), 'w') as logfile:
for slide in slides:
heading = slide['heading']['text']
filename = self.get_image_name(heading)
print('%s,%d' % (filename, slide.get('time', 0)),
file=logfile) | [
"def",
"write_slide_list",
"(",
"self",
",",
"logname",
",",
"slides",
")",
":",
"# Write slides.txt with list of slides",
"with",
"open",
"(",
"'%s/%s'",
"%",
"(",
"self",
".",
"cache",
",",
"logname",
")",
",",
"'w'",
")",
"as",
"logfile",
":",
"for",
"slide",
"in",
"slides",
":",
"heading",
"=",
"slide",
"[",
"'heading'",
"]",
"[",
"'text'",
"]",
"filename",
"=",
"self",
".",
"get_image_name",
"(",
"heading",
")",
"print",
"(",
"'%s,%d'",
"%",
"(",
"filename",
",",
"slide",
".",
"get",
"(",
"'time'",
",",
"0",
")",
")",
",",
"file",
"=",
"logfile",
")"
] | 45.7 | 12.4 |
def format_cffi_externs(cls):
"""Generate stubs for the cffi bindings from @_extern_decl methods."""
extern_decls = [
f.extern_signature.pretty_print()
for _, f in cls._extern_fields.items()
]
return (
'extern "Python" {\n'
+ '\n'.join(extern_decls)
+ '\n}\n') | [
"def",
"format_cffi_externs",
"(",
"cls",
")",
":",
"extern_decls",
"=",
"[",
"f",
".",
"extern_signature",
".",
"pretty_print",
"(",
")",
"for",
"_",
",",
"f",
"in",
"cls",
".",
"_extern_fields",
".",
"items",
"(",
")",
"]",
"return",
"(",
"'extern \"Python\" {\\n'",
"+",
"'\\n'",
".",
"join",
"(",
"extern_decls",
")",
"+",
"'\\n}\\n'",
")"
] | 29.7 | 14.5 |
def defaults_decorator(defaults):
"""Decorator to append default kwargs to a function.
"""
def decorator(func):
"""Function that appends default kwargs to a function.
"""
kwargs = dict(header='Keyword arguments\n-----------------\n',
indent=' ',
footer='\n')
doc = defaults_docstring(defaults, **kwargs)
if func.__doc__ is None:
func.__doc__ = ''
func.__doc__ += doc
return func
return decorator | [
"def",
"defaults_decorator",
"(",
"defaults",
")",
":",
"def",
"decorator",
"(",
"func",
")",
":",
"\"\"\"Function that appends default kwargs to a function.\n \"\"\"",
"kwargs",
"=",
"dict",
"(",
"header",
"=",
"'Keyword arguments\\n-----------------\\n'",
",",
"indent",
"=",
"' '",
",",
"footer",
"=",
"'\\n'",
")",
"doc",
"=",
"defaults_docstring",
"(",
"defaults",
",",
"*",
"*",
"kwargs",
")",
"if",
"func",
".",
"__doc__",
"is",
"None",
":",
"func",
".",
"__doc__",
"=",
"''",
"func",
".",
"__doc__",
"+=",
"doc",
"return",
"func",
"return",
"decorator"
] | 31.9375 | 11.9375 |
def _find_update_fields(cls, field, doc):
"""Find the fields in the update document which match the given field.
Both the field and the top level keys in the doc may be in dot
notation, eg "a.b.c". Returns a list of tuples (path, field_value) or
the empty list if the field is not present.
"""
def find_partial_matches():
for key in doc:
if len(key) > len(field):
# Handle case where field is a prefix of key, eg field is
# 'a' and key is 'a.b'.
if key.startswith(field) and key[len(field)] == ".":
yield [key], doc[key]
# Continue searching, there may be multiple matches.
# For example, field 'a' should match 'a.b' and 'a.c'.
elif len(key) < len(field):
# Handle case where key is a prefix of field, eg field is
# 'a.b' and key is 'a'.
if field.startswith(key) and field[len(key)] == ".":
# Search for the remaining part of the field
matched = cls._find_field(field[len(key) + 1 :], doc[key])
if matched:
# Add the top level key to the path.
match = matched[0]
match[0].insert(0, key)
yield match
# Stop searching, it's not possible for any other
# keys in the update doc to match this field.
return
try:
return [([field], doc[field])]
except KeyError:
# Field does not exactly match any key in the update doc.
return list(find_partial_matches()) | [
"def",
"_find_update_fields",
"(",
"cls",
",",
"field",
",",
"doc",
")",
":",
"def",
"find_partial_matches",
"(",
")",
":",
"for",
"key",
"in",
"doc",
":",
"if",
"len",
"(",
"key",
")",
">",
"len",
"(",
"field",
")",
":",
"# Handle case where field is a prefix of key, eg field is",
"# 'a' and key is 'a.b'.",
"if",
"key",
".",
"startswith",
"(",
"field",
")",
"and",
"key",
"[",
"len",
"(",
"field",
")",
"]",
"==",
"\".\"",
":",
"yield",
"[",
"key",
"]",
",",
"doc",
"[",
"key",
"]",
"# Continue searching, there may be multiple matches.",
"# For example, field 'a' should match 'a.b' and 'a.c'.",
"elif",
"len",
"(",
"key",
")",
"<",
"len",
"(",
"field",
")",
":",
"# Handle case where key is a prefix of field, eg field is",
"# 'a.b' and key is 'a'.",
"if",
"field",
".",
"startswith",
"(",
"key",
")",
"and",
"field",
"[",
"len",
"(",
"key",
")",
"]",
"==",
"\".\"",
":",
"# Search for the remaining part of the field",
"matched",
"=",
"cls",
".",
"_find_field",
"(",
"field",
"[",
"len",
"(",
"key",
")",
"+",
"1",
":",
"]",
",",
"doc",
"[",
"key",
"]",
")",
"if",
"matched",
":",
"# Add the top level key to the path.",
"match",
"=",
"matched",
"[",
"0",
"]",
"match",
"[",
"0",
"]",
".",
"insert",
"(",
"0",
",",
"key",
")",
"yield",
"match",
"# Stop searching, it's not possible for any other",
"# keys in the update doc to match this field.",
"return",
"try",
":",
"return",
"[",
"(",
"[",
"field",
"]",
",",
"doc",
"[",
"field",
"]",
")",
"]",
"except",
"KeyError",
":",
"# Field does not exactly match any key in the update doc.",
"return",
"list",
"(",
"find_partial_matches",
"(",
")",
")"
] | 48.864865 | 19.324324 |
def spit_config(self, conf_file, firstwordonly=False):
"""conf_file a file opened for writing."""
cfg = ConfigParser.RawConfigParser()
for sec in _CONFIG_SECS:
cfg.add_section(sec)
sec = 'channels'
for i in sorted(self.pack.D):
cfg.set(sec, str(i),
self.pack.name(i, firstwordonly=firstwordonly))
sec = 'conditions'
for k in self.sorted_conkeys():
cfg.set(sec, k, self.conditions[k])
cfg.write(conf_file) | [
"def",
"spit_config",
"(",
"self",
",",
"conf_file",
",",
"firstwordonly",
"=",
"False",
")",
":",
"cfg",
"=",
"ConfigParser",
".",
"RawConfigParser",
"(",
")",
"for",
"sec",
"in",
"_CONFIG_SECS",
":",
"cfg",
".",
"add_section",
"(",
"sec",
")",
"sec",
"=",
"'channels'",
"for",
"i",
"in",
"sorted",
"(",
"self",
".",
"pack",
".",
"D",
")",
":",
"cfg",
".",
"set",
"(",
"sec",
",",
"str",
"(",
"i",
")",
",",
"self",
".",
"pack",
".",
"name",
"(",
"i",
",",
"firstwordonly",
"=",
"firstwordonly",
")",
")",
"sec",
"=",
"'conditions'",
"for",
"k",
"in",
"self",
".",
"sorted_conkeys",
"(",
")",
":",
"cfg",
".",
"set",
"(",
"sec",
",",
"k",
",",
"self",
".",
"conditions",
"[",
"k",
"]",
")",
"cfg",
".",
"write",
"(",
"conf_file",
")"
] | 30.117647 | 16.588235 |
def submit_status_external_cmd(cmd_file, status_file):
''' Submits the status lines in the status_file to Nagios' external cmd file.
'''
try:
with open(cmd_file, 'a') as cmd_file:
cmd_file.write(status_file.read())
except IOError:
exit("Fatal error: Unable to write to Nagios external command file '%s'.\n"
"Make sure that the file exists and is writable." % (cmd_file,)) | [
"def",
"submit_status_external_cmd",
"(",
"cmd_file",
",",
"status_file",
")",
":",
"try",
":",
"with",
"open",
"(",
"cmd_file",
",",
"'a'",
")",
"as",
"cmd_file",
":",
"cmd_file",
".",
"write",
"(",
"status_file",
".",
"read",
"(",
")",
")",
"except",
"IOError",
":",
"exit",
"(",
"\"Fatal error: Unable to write to Nagios external command file '%s'.\\n\"",
"\"Make sure that the file exists and is writable.\"",
"%",
"(",
"cmd_file",
",",
")",
")"
] | 43.777778 | 24.222222 |
def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'request') and self.request is not None:
_dict['request'] = self.request._to_dict()
if hasattr(self, 'response') and self.response is not None:
_dict['response'] = self.response._to_dict()
if hasattr(self, 'log_id') and self.log_id is not None:
_dict['log_id'] = self.log_id
if hasattr(self,
'request_timestamp') and self.request_timestamp is not None:
_dict['request_timestamp'] = self.request_timestamp
if hasattr(
self,
'response_timestamp') and self.response_timestamp is not None:
_dict['response_timestamp'] = self.response_timestamp
if hasattr(self, 'workspace_id') and self.workspace_id is not None:
_dict['workspace_id'] = self.workspace_id
if hasattr(self, 'language') and self.language is not None:
_dict['language'] = self.language
return _dict | [
"def",
"_to_dict",
"(",
"self",
")",
":",
"_dict",
"=",
"{",
"}",
"if",
"hasattr",
"(",
"self",
",",
"'request'",
")",
"and",
"self",
".",
"request",
"is",
"not",
"None",
":",
"_dict",
"[",
"'request'",
"]",
"=",
"self",
".",
"request",
".",
"_to_dict",
"(",
")",
"if",
"hasattr",
"(",
"self",
",",
"'response'",
")",
"and",
"self",
".",
"response",
"is",
"not",
"None",
":",
"_dict",
"[",
"'response'",
"]",
"=",
"self",
".",
"response",
".",
"_to_dict",
"(",
")",
"if",
"hasattr",
"(",
"self",
",",
"'log_id'",
")",
"and",
"self",
".",
"log_id",
"is",
"not",
"None",
":",
"_dict",
"[",
"'log_id'",
"]",
"=",
"self",
".",
"log_id",
"if",
"hasattr",
"(",
"self",
",",
"'request_timestamp'",
")",
"and",
"self",
".",
"request_timestamp",
"is",
"not",
"None",
":",
"_dict",
"[",
"'request_timestamp'",
"]",
"=",
"self",
".",
"request_timestamp",
"if",
"hasattr",
"(",
"self",
",",
"'response_timestamp'",
")",
"and",
"self",
".",
"response_timestamp",
"is",
"not",
"None",
":",
"_dict",
"[",
"'response_timestamp'",
"]",
"=",
"self",
".",
"response_timestamp",
"if",
"hasattr",
"(",
"self",
",",
"'workspace_id'",
")",
"and",
"self",
".",
"workspace_id",
"is",
"not",
"None",
":",
"_dict",
"[",
"'workspace_id'",
"]",
"=",
"self",
".",
"workspace_id",
"if",
"hasattr",
"(",
"self",
",",
"'language'",
")",
"and",
"self",
".",
"language",
"is",
"not",
"None",
":",
"_dict",
"[",
"'language'",
"]",
"=",
"self",
".",
"language",
"return",
"_dict"
] | 50.238095 | 20.47619 |
def remove_data_flows_with_data_port_id(self, data_port_id):
"""Remove an data ports whose from_key or to_key equals the passed data_port_id
:param int data_port_id: the id of a data_port of which all data_flows should be removed, the id can be a input or
output data port id
"""
# delete all data flows in parent related to data_port_id and self.state_id = external data flows
# checking is_root_state_of_library is only necessary in case of scoped variables, as the scoped variables
# they are not destroyed by the library state, as the library state does not have a reference to the scoped vars
if not self.is_root_state and not self.is_root_state_of_library:
data_flow_ids_to_remove = []
for data_flow_id, data_flow in self.parent.data_flows.items():
if data_flow.from_state == self.state_id and data_flow.from_key == data_port_id or \
data_flow.to_state == self.state_id and data_flow.to_key == data_port_id:
data_flow_ids_to_remove.append(data_flow_id)
for data_flow_id in data_flow_ids_to_remove:
self.parent.remove_data_flow(data_flow_id)
# delete all data flows in self related to data_port_id and self.state_id = internal data flows
data_flow_ids_to_remove = []
for data_flow_id, data_flow in self.data_flows.items():
if data_flow.from_state == self.state_id and data_flow.from_key == data_port_id or \
data_flow.to_state == self.state_id and data_flow.to_key == data_port_id:
data_flow_ids_to_remove.append(data_flow_id)
for data_flow_id in data_flow_ids_to_remove:
self.remove_data_flow(data_flow_id) | [
"def",
"remove_data_flows_with_data_port_id",
"(",
"self",
",",
"data_port_id",
")",
":",
"# delete all data flows in parent related to data_port_id and self.state_id = external data flows",
"# checking is_root_state_of_library is only necessary in case of scoped variables, as the scoped variables",
"# they are not destroyed by the library state, as the library state does not have a reference to the scoped vars",
"if",
"not",
"self",
".",
"is_root_state",
"and",
"not",
"self",
".",
"is_root_state_of_library",
":",
"data_flow_ids_to_remove",
"=",
"[",
"]",
"for",
"data_flow_id",
",",
"data_flow",
"in",
"self",
".",
"parent",
".",
"data_flows",
".",
"items",
"(",
")",
":",
"if",
"data_flow",
".",
"from_state",
"==",
"self",
".",
"state_id",
"and",
"data_flow",
".",
"from_key",
"==",
"data_port_id",
"or",
"data_flow",
".",
"to_state",
"==",
"self",
".",
"state_id",
"and",
"data_flow",
".",
"to_key",
"==",
"data_port_id",
":",
"data_flow_ids_to_remove",
".",
"append",
"(",
"data_flow_id",
")",
"for",
"data_flow_id",
"in",
"data_flow_ids_to_remove",
":",
"self",
".",
"parent",
".",
"remove_data_flow",
"(",
"data_flow_id",
")",
"# delete all data flows in self related to data_port_id and self.state_id = internal data flows",
"data_flow_ids_to_remove",
"=",
"[",
"]",
"for",
"data_flow_id",
",",
"data_flow",
"in",
"self",
".",
"data_flows",
".",
"items",
"(",
")",
":",
"if",
"data_flow",
".",
"from_state",
"==",
"self",
".",
"state_id",
"and",
"data_flow",
".",
"from_key",
"==",
"data_port_id",
"or",
"data_flow",
".",
"to_state",
"==",
"self",
".",
"state_id",
"and",
"data_flow",
".",
"to_key",
"==",
"data_port_id",
":",
"data_flow_ids_to_remove",
".",
"append",
"(",
"data_flow_id",
")",
"for",
"data_flow_id",
"in",
"data_flow_ids_to_remove",
":",
"self",
".",
"remove_data_flow",
"(",
"data_flow_id",
")"
] | 62.37931 | 35.827586 |
def gallery_image_versions(self):
"""Instance depends on the API version:
* 2018-06-01: :class:`GalleryImageVersionsOperations<azure.mgmt.compute.v2018_06_01.operations.GalleryImageVersionsOperations>`
* 2019-03-01: :class:`GalleryImageVersionsOperations<azure.mgmt.compute.v2019_03_01.operations.GalleryImageVersionsOperations>`
"""
api_version = self._get_api_version('gallery_image_versions')
if api_version == '2018-06-01':
from .v2018_06_01.operations import GalleryImageVersionsOperations as OperationClass
elif api_version == '2019-03-01':
from .v2019_03_01.operations import GalleryImageVersionsOperations as OperationClass
else:
raise NotImplementedError("APIVersion {} is not available".format(api_version))
return OperationClass(self._client, self.config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) | [
"def",
"gallery_image_versions",
"(",
"self",
")",
":",
"api_version",
"=",
"self",
".",
"_get_api_version",
"(",
"'gallery_image_versions'",
")",
"if",
"api_version",
"==",
"'2018-06-01'",
":",
"from",
".",
"v2018_06_01",
".",
"operations",
"import",
"GalleryImageVersionsOperations",
"as",
"OperationClass",
"elif",
"api_version",
"==",
"'2019-03-01'",
":",
"from",
".",
"v2019_03_01",
".",
"operations",
"import",
"GalleryImageVersionsOperations",
"as",
"OperationClass",
"else",
":",
"raise",
"NotImplementedError",
"(",
"\"APIVersion {} is not available\"",
".",
"format",
"(",
"api_version",
")",
")",
"return",
"OperationClass",
"(",
"self",
".",
"_client",
",",
"self",
".",
"config",
",",
"Serializer",
"(",
"self",
".",
"_models_dict",
"(",
"api_version",
")",
")",
",",
"Deserializer",
"(",
"self",
".",
"_models_dict",
"(",
"api_version",
")",
")",
")"
] | 68.428571 | 40.714286 |
def reset(ctx):
"""
Reset OpenPGP application.
This action will wipe all OpenPGP data, and set all PINs to their default
values.
"""
click.echo("Resetting OpenPGP data, don't remove your YubiKey...")
ctx.obj['controller'].reset()
click.echo('Success! All data has been cleared and default PINs are set.')
echo_default_pins() | [
"def",
"reset",
"(",
"ctx",
")",
":",
"click",
".",
"echo",
"(",
"\"Resetting OpenPGP data, don't remove your YubiKey...\"",
")",
"ctx",
".",
"obj",
"[",
"'controller'",
"]",
".",
"reset",
"(",
")",
"click",
".",
"echo",
"(",
"'Success! All data has been cleared and default PINs are set.'",
")",
"echo_default_pins",
"(",
")"
] | 31.909091 | 21.181818 |
def receive(self, msg):
"""
Returns a (receiver, msg) pair, where receiver is `None` if no route for
the message was found, or otherwise an object with a `receive` method
that can accept that `msg`.
"""
x = self.routing
while not isinstance(x, ActionList):
if not x or not msg:
return None, msg
if not isinstance(x, dict):
raise ValueError('Unexpected type %s' % type(x))
_, value = msg.popitem(last=False)
x = x.get(str(value))
return x, msg | [
"def",
"receive",
"(",
"self",
",",
"msg",
")",
":",
"x",
"=",
"self",
".",
"routing",
"while",
"not",
"isinstance",
"(",
"x",
",",
"ActionList",
")",
":",
"if",
"not",
"x",
"or",
"not",
"msg",
":",
"return",
"None",
",",
"msg",
"if",
"not",
"isinstance",
"(",
"x",
",",
"dict",
")",
":",
"raise",
"ValueError",
"(",
"'Unexpected type %s'",
"%",
"type",
"(",
"x",
")",
")",
"_",
",",
"value",
"=",
"msg",
".",
"popitem",
"(",
"last",
"=",
"False",
")",
"x",
"=",
"x",
".",
"get",
"(",
"str",
"(",
"value",
")",
")",
"return",
"x",
",",
"msg"
] | 31.777778 | 17.333333 |
def count_alleles_subpops(self, subpops, max_allele=None):
"""Count alleles for multiple subpopulations simultaneously.
Parameters
----------
subpops : dict (string -> sequence of ints)
Mapping of subpopulation names to sample indices.
max_allele : int, optional
The highest allele index to count. Alleles above this will be
ignored.
Returns
-------
out : dict (string -> AlleleCountsArray)
A mapping of subpopulation names to allele counts arrays.
"""
if max_allele is None:
max_allele = self.max()
out = {name: self.count_alleles(max_allele=max_allele, subpop=subpop)
for name, subpop in subpops.items()}
return out | [
"def",
"count_alleles_subpops",
"(",
"self",
",",
"subpops",
",",
"max_allele",
"=",
"None",
")",
":",
"if",
"max_allele",
"is",
"None",
":",
"max_allele",
"=",
"self",
".",
"max",
"(",
")",
"out",
"=",
"{",
"name",
":",
"self",
".",
"count_alleles",
"(",
"max_allele",
"=",
"max_allele",
",",
"subpop",
"=",
"subpop",
")",
"for",
"name",
",",
"subpop",
"in",
"subpops",
".",
"items",
"(",
")",
"}",
"return",
"out"
] | 30.8 | 22.6 |
def decode(self, images, save=None, round=4, names=None, **kwargs):
""" Decodes a set of images.
Args:
images: The images to decode. Can be:
- A single String specifying the filename of the image to decode
- A list of filenames
- A single NumPy array containing the image data
save: Optional filename to save results to. If None (default), returns
all results as an array.
round: Optional integer indicating number of decimals to round result
to. Defaults to 4.
names: Optional list of names corresponding to the images in filenames.
If passed, must be of same length and in same order as filenames.
By default, the columns in the output will be named using the image
filenames.
Returns:
An n_features x n_files numpy array, where each feature is a row and
each image is a column. The meaning of the values depends on the
decoding method used. """
if isinstance(images, string_types):
images = [images]
if isinstance(images, list):
imgs_to_decode = imageutils.load_imgs(images, self.masker)
else:
imgs_to_decode = images
methods = {
'pearson': self._pearson_correlation,
'dot': self._dot_product,
'roi': self._roi_association
}
result = np.around(
methods[self.method](imgs_to_decode, **kwargs), round)
# if save is not None:
if names is None:
if type(images).__module__ == np.__name__:
names = ['image_%d' % i for i in range(images.shape[1])]
elif self.method == 'roi':
names = ['cluster_%d' % i for i in range(result.shape[1])]
else:
names = images
result = pd.DataFrame(result, columns=names, index=self.feature_names)
if save is not None:
result.to_csv(save, index_label='Feature')
return result | [
"def",
"decode",
"(",
"self",
",",
"images",
",",
"save",
"=",
"None",
",",
"round",
"=",
"4",
",",
"names",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"isinstance",
"(",
"images",
",",
"string_types",
")",
":",
"images",
"=",
"[",
"images",
"]",
"if",
"isinstance",
"(",
"images",
",",
"list",
")",
":",
"imgs_to_decode",
"=",
"imageutils",
".",
"load_imgs",
"(",
"images",
",",
"self",
".",
"masker",
")",
"else",
":",
"imgs_to_decode",
"=",
"images",
"methods",
"=",
"{",
"'pearson'",
":",
"self",
".",
"_pearson_correlation",
",",
"'dot'",
":",
"self",
".",
"_dot_product",
",",
"'roi'",
":",
"self",
".",
"_roi_association",
"}",
"result",
"=",
"np",
".",
"around",
"(",
"methods",
"[",
"self",
".",
"method",
"]",
"(",
"imgs_to_decode",
",",
"*",
"*",
"kwargs",
")",
",",
"round",
")",
"# if save is not None:",
"if",
"names",
"is",
"None",
":",
"if",
"type",
"(",
"images",
")",
".",
"__module__",
"==",
"np",
".",
"__name__",
":",
"names",
"=",
"[",
"'image_%d'",
"%",
"i",
"for",
"i",
"in",
"range",
"(",
"images",
".",
"shape",
"[",
"1",
"]",
")",
"]",
"elif",
"self",
".",
"method",
"==",
"'roi'",
":",
"names",
"=",
"[",
"'cluster_%d'",
"%",
"i",
"for",
"i",
"in",
"range",
"(",
"result",
".",
"shape",
"[",
"1",
"]",
")",
"]",
"else",
":",
"names",
"=",
"images",
"result",
"=",
"pd",
".",
"DataFrame",
"(",
"result",
",",
"columns",
"=",
"names",
",",
"index",
"=",
"self",
".",
"feature_names",
")",
"if",
"save",
"is",
"not",
"None",
":",
"result",
".",
"to_csv",
"(",
"save",
",",
"index_label",
"=",
"'Feature'",
")",
"return",
"result"
] | 37.296296 | 23.240741 |
def _tag_net_direction(data):
"""Create a tag based on the direction of the traffic"""
# IP or IPv6
src = data['packet']['src_domain']
dst = data['packet']['dst_domain']
if src == 'internal':
if dst == 'internal' or 'multicast' in dst or 'broadcast' in dst:
return 'internal'
else:
return 'outgoing'
elif dst == 'internal':
return 'incoming'
else:
return None | [
"def",
"_tag_net_direction",
"(",
"data",
")",
":",
"# IP or IPv6",
"src",
"=",
"data",
"[",
"'packet'",
"]",
"[",
"'src_domain'",
"]",
"dst",
"=",
"data",
"[",
"'packet'",
"]",
"[",
"'dst_domain'",
"]",
"if",
"src",
"==",
"'internal'",
":",
"if",
"dst",
"==",
"'internal'",
"or",
"'multicast'",
"in",
"dst",
"or",
"'broadcast'",
"in",
"dst",
":",
"return",
"'internal'",
"else",
":",
"return",
"'outgoing'",
"elif",
"dst",
"==",
"'internal'",
":",
"return",
"'incoming'",
"else",
":",
"return",
"None"
] | 32.133333 | 14.933333 |
def segment(self, value=None, scope=None, metric_scope=None, **selection):
"""
Return a new query, limited to a segment of all users or sessions.
Accepts segment objects, filtered segment objects and segment names:
```python
query.segment(account.segments['browser'])
query.segment('browser')
query.segment(account.segments['browser'].any('Chrome', 'Firefox'))
```
Segment can also accept a segment expression when you pass
in a `type` argument. The type argument can be either `users`
or `sessions`. This is pretty close to the metal.
```python
# will be translated into `users::condition::perUser::ga:sessions>10`
query.segment('condition::perUser::ga:sessions>10', type='users')
```
See the [Google Analytics dynamic segments documentation][segments]
You can also use the `any`, `all`, `followed_by` and
`immediately_followed_by` functions in this module to
chain together segments.
Everything about how segments get handled is still in flux.
Feel free to propose ideas for a nicer interface on
the [GitHub issues page][issues]
[segments]: https://developers.google.com/analytics/devguides/reporting/core/v3/segments#reference
[issues]: https://github.com/debrouwere/google-analytics/issues
"""
"""
Technical note to self about segments:
* users or sessions
* sequence or condition
* scope (perHit, perSession, perUser -- gte primary scope)
Multiple conditions can be ANDed or ORed together; these two are equivalent
users::condition::ga:revenue>10;ga:sessionDuration>60
users::condition::ga:revenue>10;users::condition::ga:sessionDuration>60
For sequences, prepending ^ means the first part of the sequence has to match
the first session/hit/...
* users and sessions conditions can be combined (but only with AND)
* sequences and conditions can also be combined (but only with AND)
sessions::sequence::ga:browser==Chrome;
condition::perHit::ga:timeOnPage>5
->>
ga:deviceCategory==mobile;ga:revenue>10;
users::sequence::ga:deviceCategory==desktop
->>
ga:deviceCategory=mobile;
ga:revenue>100;
condition::ga:browser==Chrome
Problem: keyword arguments are passed as a dictionary, not an ordered dictionary!
So e.g. this is risky
query.sessions(time_on_page__gt=5, device_category='mobile', followed_by=True)
"""
SCOPES = {
'hits': 'perHit',
'sessions': 'perSession',
'users': 'perUser',
}
segments = self.meta.setdefault('segments', [])
if value and len(selection):
raise ValueError("Cannot specify a filter string and a filter keyword selection at the same time.")
elif value:
value = [self.api.segments.serialize(value)]
elif len(selection):
if not scope:
raise ValueError("Scope is required. Choose from: users, sessions.")
if metric_scope:
metric_scope = SCOPES[metric_scope]
value = select(self.api.columns, selection)
value = [[scope, 'condition', metric_scope, condition] for condition in value]
value = ['::'.join(filter(None, condition)) for condition in value]
segments.append(value)
self.raw['segment'] = utils.paste(segments, ',', ';')
return self | [
"def",
"segment",
"(",
"self",
",",
"value",
"=",
"None",
",",
"scope",
"=",
"None",
",",
"metric_scope",
"=",
"None",
",",
"*",
"*",
"selection",
")",
":",
"\"\"\"\n Technical note to self about segments:\n\n * users or sessions\n * sequence or condition\n * scope (perHit, perSession, perUser -- gte primary scope)\n\n Multiple conditions can be ANDed or ORed together; these two are equivalent\n\n users::condition::ga:revenue>10;ga:sessionDuration>60\n users::condition::ga:revenue>10;users::condition::ga:sessionDuration>60\n\n For sequences, prepending ^ means the first part of the sequence has to match\n the first session/hit/...\n\n * users and sessions conditions can be combined (but only with AND)\n * sequences and conditions can also be combined (but only with AND)\n\n sessions::sequence::ga:browser==Chrome;\n condition::perHit::ga:timeOnPage>5\n ->>\n ga:deviceCategory==mobile;ga:revenue>10;\n\n users::sequence::ga:deviceCategory==desktop\n ->>\n ga:deviceCategory=mobile;\n ga:revenue>100;\n condition::ga:browser==Chrome\n\n Problem: keyword arguments are passed as a dictionary, not an ordered dictionary!\n So e.g. this is risky\n\n query.sessions(time_on_page__gt=5, device_category='mobile', followed_by=True)\n \"\"\"",
"SCOPES",
"=",
"{",
"'hits'",
":",
"'perHit'",
",",
"'sessions'",
":",
"'perSession'",
",",
"'users'",
":",
"'perUser'",
",",
"}",
"segments",
"=",
"self",
".",
"meta",
".",
"setdefault",
"(",
"'segments'",
",",
"[",
"]",
")",
"if",
"value",
"and",
"len",
"(",
"selection",
")",
":",
"raise",
"ValueError",
"(",
"\"Cannot specify a filter string and a filter keyword selection at the same time.\"",
")",
"elif",
"value",
":",
"value",
"=",
"[",
"self",
".",
"api",
".",
"segments",
".",
"serialize",
"(",
"value",
")",
"]",
"elif",
"len",
"(",
"selection",
")",
":",
"if",
"not",
"scope",
":",
"raise",
"ValueError",
"(",
"\"Scope is required. Choose from: users, sessions.\"",
")",
"if",
"metric_scope",
":",
"metric_scope",
"=",
"SCOPES",
"[",
"metric_scope",
"]",
"value",
"=",
"select",
"(",
"self",
".",
"api",
".",
"columns",
",",
"selection",
")",
"value",
"=",
"[",
"[",
"scope",
",",
"'condition'",
",",
"metric_scope",
",",
"condition",
"]",
"for",
"condition",
"in",
"value",
"]",
"value",
"=",
"[",
"'::'",
".",
"join",
"(",
"filter",
"(",
"None",
",",
"condition",
")",
")",
"for",
"condition",
"in",
"value",
"]",
"segments",
".",
"append",
"(",
"value",
")",
"self",
".",
"raw",
"[",
"'segment'",
"]",
"=",
"utils",
".",
"paste",
"(",
"segments",
",",
"','",
",",
"';'",
")",
"return",
"self"
] | 37.147368 | 26.410526 |
def get_file(self, key, fp, headers, cb=None, num_cb=10, torrent=False,
version_id=None):
"""
Retrieves a file from a Key
:type key: :class:`boto.s3.key.Key` or subclass
:param key: The Key object from which upload is to be downloaded
:type fp: file
:param fp: File pointer into which data should be downloaded
:type headers: string
:param: headers to send when retrieving the files
:type cb: function
:param cb: (optional) a callback function that will be called to report
progress on the download. The callback should accept two integer
parameters, the first representing the number of bytes that have
been successfully transmitted from the storage service and
the second representing the total number of bytes that need
to be transmitted.
:type num_cb: int
:param num_cb: (optional) If a callback is specified with the cb
parameter this parameter determines the granularity of the callback
by defining the maximum number of times the callback will be
called during the file transfer.
:type torrent: bool
:param torrent: Flag for whether to get a torrent for the file
:type version_id: string
:param version_id: The version ID (optional)
Raises ResumableDownloadException if a problem occurs during
the transfer.
"""
debug = key.bucket.connection.debug
if not headers:
headers = {}
# Use num-retries from constructor if one was provided; else check
# for a value specified in the boto config file; else default to 5.
if self.num_retries is None:
self.num_retries = config.getint('Boto', 'num_retries', 5)
progress_less_iterations = 0
while True: # Retry as long as we're making progress.
had_file_bytes_before_attempt = get_cur_file_size(fp)
try:
self._attempt_resumable_download(key, fp, headers, cb, num_cb,
torrent, version_id)
# Download succceded, so remove the tracker file (if have one).
self._remove_tracker_file()
# Previously, check_final_md5() was called here to validate
# downloaded file's checksum, however, to be consistent with
# non-resumable downloads, this call was removed. Checksum
# validation of file contents should be done by the caller.
if debug >= 1:
print 'Resumable download complete.'
return
except self.RETRYABLE_EXCEPTIONS, e:
if debug >= 1:
print('Caught exception (%s)' % e.__repr__())
if isinstance(e, IOError) and e.errno == errno.EPIPE:
# Broken pipe error causes httplib to immediately
# close the socket (http://bugs.python.org/issue5542),
# so we need to close and reopen the key before resuming
# the download.
key.get_file(fp, headers, cb, num_cb, torrent, version_id,
override_num_retries=0)
except ResumableDownloadException, e:
if (e.disposition ==
ResumableTransferDisposition.ABORT_CUR_PROCESS):
if debug >= 1:
print('Caught non-retryable ResumableDownloadException '
'(%s)' % e.message)
raise
elif (e.disposition ==
ResumableTransferDisposition.ABORT):
if debug >= 1:
print('Caught non-retryable ResumableDownloadException '
'(%s); aborting and removing tracker file' %
e.message)
self._remove_tracker_file()
raise
else:
if debug >= 1:
print('Caught ResumableDownloadException (%s) - will '
'retry' % e.message)
# At this point we had a re-tryable failure; see if made progress.
if get_cur_file_size(fp) > had_file_bytes_before_attempt:
progress_less_iterations = 0
else:
progress_less_iterations += 1
if progress_less_iterations > self.num_retries:
# Don't retry any longer in the current process.
raise ResumableDownloadException(
'Too many resumable download attempts failed without '
'progress. You might try this download again later',
ResumableTransferDisposition.ABORT_CUR_PROCESS)
# Close the key, in case a previous download died partway
# through and left data in the underlying key HTTP buffer.
# Do this within a try/except block in case the connection is
# closed (since key.close() attempts to do a final read, in which
# case this read attempt would get an IncompleteRead exception,
# which we can safely ignore.
try:
key.close()
except httplib.IncompleteRead:
pass
sleep_time_secs = 2**progress_less_iterations
if debug >= 1:
print('Got retryable failure (%d progress-less in a row).\n'
'Sleeping %d seconds before re-trying' %
(progress_less_iterations, sleep_time_secs))
time.sleep(sleep_time_secs) | [
"def",
"get_file",
"(",
"self",
",",
"key",
",",
"fp",
",",
"headers",
",",
"cb",
"=",
"None",
",",
"num_cb",
"=",
"10",
",",
"torrent",
"=",
"False",
",",
"version_id",
"=",
"None",
")",
":",
"debug",
"=",
"key",
".",
"bucket",
".",
"connection",
".",
"debug",
"if",
"not",
"headers",
":",
"headers",
"=",
"{",
"}",
"# Use num-retries from constructor if one was provided; else check",
"# for a value specified in the boto config file; else default to 5.",
"if",
"self",
".",
"num_retries",
"is",
"None",
":",
"self",
".",
"num_retries",
"=",
"config",
".",
"getint",
"(",
"'Boto'",
",",
"'num_retries'",
",",
"5",
")",
"progress_less_iterations",
"=",
"0",
"while",
"True",
":",
"# Retry as long as we're making progress.",
"had_file_bytes_before_attempt",
"=",
"get_cur_file_size",
"(",
"fp",
")",
"try",
":",
"self",
".",
"_attempt_resumable_download",
"(",
"key",
",",
"fp",
",",
"headers",
",",
"cb",
",",
"num_cb",
",",
"torrent",
",",
"version_id",
")",
"# Download succceded, so remove the tracker file (if have one).",
"self",
".",
"_remove_tracker_file",
"(",
")",
"# Previously, check_final_md5() was called here to validate ",
"# downloaded file's checksum, however, to be consistent with",
"# non-resumable downloads, this call was removed. Checksum",
"# validation of file contents should be done by the caller.",
"if",
"debug",
">=",
"1",
":",
"print",
"'Resumable download complete.'",
"return",
"except",
"self",
".",
"RETRYABLE_EXCEPTIONS",
",",
"e",
":",
"if",
"debug",
">=",
"1",
":",
"print",
"(",
"'Caught exception (%s)'",
"%",
"e",
".",
"__repr__",
"(",
")",
")",
"if",
"isinstance",
"(",
"e",
",",
"IOError",
")",
"and",
"e",
".",
"errno",
"==",
"errno",
".",
"EPIPE",
":",
"# Broken pipe error causes httplib to immediately",
"# close the socket (http://bugs.python.org/issue5542),",
"# so we need to close and reopen the key before resuming",
"# the download.",
"key",
".",
"get_file",
"(",
"fp",
",",
"headers",
",",
"cb",
",",
"num_cb",
",",
"torrent",
",",
"version_id",
",",
"override_num_retries",
"=",
"0",
")",
"except",
"ResumableDownloadException",
",",
"e",
":",
"if",
"(",
"e",
".",
"disposition",
"==",
"ResumableTransferDisposition",
".",
"ABORT_CUR_PROCESS",
")",
":",
"if",
"debug",
">=",
"1",
":",
"print",
"(",
"'Caught non-retryable ResumableDownloadException '",
"'(%s)'",
"%",
"e",
".",
"message",
")",
"raise",
"elif",
"(",
"e",
".",
"disposition",
"==",
"ResumableTransferDisposition",
".",
"ABORT",
")",
":",
"if",
"debug",
">=",
"1",
":",
"print",
"(",
"'Caught non-retryable ResumableDownloadException '",
"'(%s); aborting and removing tracker file'",
"%",
"e",
".",
"message",
")",
"self",
".",
"_remove_tracker_file",
"(",
")",
"raise",
"else",
":",
"if",
"debug",
">=",
"1",
":",
"print",
"(",
"'Caught ResumableDownloadException (%s) - will '",
"'retry'",
"%",
"e",
".",
"message",
")",
"# At this point we had a re-tryable failure; see if made progress.",
"if",
"get_cur_file_size",
"(",
"fp",
")",
">",
"had_file_bytes_before_attempt",
":",
"progress_less_iterations",
"=",
"0",
"else",
":",
"progress_less_iterations",
"+=",
"1",
"if",
"progress_less_iterations",
">",
"self",
".",
"num_retries",
":",
"# Don't retry any longer in the current process.",
"raise",
"ResumableDownloadException",
"(",
"'Too many resumable download attempts failed without '",
"'progress. You might try this download again later'",
",",
"ResumableTransferDisposition",
".",
"ABORT_CUR_PROCESS",
")",
"# Close the key, in case a previous download died partway",
"# through and left data in the underlying key HTTP buffer.",
"# Do this within a try/except block in case the connection is",
"# closed (since key.close() attempts to do a final read, in which",
"# case this read attempt would get an IncompleteRead exception,",
"# which we can safely ignore.",
"try",
":",
"key",
".",
"close",
"(",
")",
"except",
"httplib",
".",
"IncompleteRead",
":",
"pass",
"sleep_time_secs",
"=",
"2",
"**",
"progress_less_iterations",
"if",
"debug",
">=",
"1",
":",
"print",
"(",
"'Got retryable failure (%d progress-less in a row).\\n'",
"'Sleeping %d seconds before re-trying'",
"%",
"(",
"progress_less_iterations",
",",
"sleep_time_secs",
")",
")",
"time",
".",
"sleep",
"(",
"sleep_time_secs",
")"
] | 47.165289 | 22.603306 |
def col_dtypes(df): # Does some work to reduce possibility of errors and stuff
""" Returns dictionary of datatypes in a DataFrame (uses string representation)
Parameters:
df - DataFrame
The DataFrame to return the object types of
Pandas datatypes are as follows:
object,number,bool,datetime,category,timedelta,datetimetz
This method uses queues and iterates over the columns in linear time.
It does extra steps to ensure that no further work with numpy datatypes needs
to be done.
"""
test_list = [col_isobj,col_isnum,col_isbool,col_isdt,col_iscat,col_istdelt,col_isdtz]
deque_list = [(deque(col_method(df)),name) \
for col_method,name in zip(test_list,_globals.__dtype_names) if len(col_method(df))]
type_dict = {}
for que, name in deque_list:
while len(que):
type_dict[que.popleft()] = name
return type_dict | [
"def",
"col_dtypes",
"(",
"df",
")",
":",
"# Does some work to reduce possibility of errors and stuff",
"test_list",
"=",
"[",
"col_isobj",
",",
"col_isnum",
",",
"col_isbool",
",",
"col_isdt",
",",
"col_iscat",
",",
"col_istdelt",
",",
"col_isdtz",
"]",
"deque_list",
"=",
"[",
"(",
"deque",
"(",
"col_method",
"(",
"df",
")",
")",
",",
"name",
")",
"for",
"col_method",
",",
"name",
"in",
"zip",
"(",
"test_list",
",",
"_globals",
".",
"__dtype_names",
")",
"if",
"len",
"(",
"col_method",
"(",
"df",
")",
")",
"]",
"type_dict",
"=",
"{",
"}",
"for",
"que",
",",
"name",
"in",
"deque_list",
":",
"while",
"len",
"(",
"que",
")",
":",
"type_dict",
"[",
"que",
".",
"popleft",
"(",
")",
"]",
"=",
"name",
"return",
"type_dict"
] | 44.85 | 22.25 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.