text stringlengths 89 104k | code_tokens list | avg_line_len float64 7.91 980 | score float64 0 630 |
|---|---|---|---|
def _validate_message(self, message):
"""Validate XML response from iLO.
This function validates the XML response to see
if the exit status is 0 or not in the response.
If the status is non-zero it raises exception.
"""
if message.tag != 'RIBCL':
# the true case shall be unreachable for response
# XML from Ilo as all messages are tagged with RIBCL
# but still raise an exception if any invalid
# XML response is returned by Ilo. Set status to some
# arbitary non-zero value.
status = -1
raise exception.IloClientInternalError(message, status)
for child in message:
if child.tag != 'RESPONSE':
return message
status = int(child.get('STATUS'), 16)
msg = child.get('MESSAGE')
if status == 0 and msg != 'No error':
return msg
if status != 0:
if 'syntax error' in msg or 'Feature not supported' in msg:
for cmd in BOOT_MODE_CMDS:
if cmd in msg:
platform = self.get_product_name()
msg = ("%(cmd)s is not supported on %(platform)s" %
{'cmd': cmd, 'platform': platform})
LOG.debug(self._("Got invalid response with "
"message: '%(message)s'"),
{'message': msg})
raise (exception.IloCommandNotSupportedError
(msg, status))
else:
LOG.debug(self._("Got invalid response with "
"message: '%(message)s'"),
{'message': msg})
raise exception.IloClientInternalError(msg, status)
if (status in exception.IloLoginFailError.statuses or
msg in exception.IloLoginFailError.messages):
LOG.debug(self._("Got invalid response with "
"message: '%(message)s'"),
{'message': msg})
raise exception.IloLoginFailError(msg, status)
LOG.debug(self._("Got invalid response with "
"message: '%(message)s'"),
{'message': msg})
raise exception.IloError(msg, status) | [
"def",
"_validate_message",
"(",
"self",
",",
"message",
")",
":",
"if",
"message",
".",
"tag",
"!=",
"'RIBCL'",
":",
"# the true case shall be unreachable for response",
"# XML from Ilo as all messages are tagged with RIBCL",
"# but still raise an exception if any invalid",
"# XML response is returned by Ilo. Set status to some",
"# arbitary non-zero value.",
"status",
"=",
"-",
"1",
"raise",
"exception",
".",
"IloClientInternalError",
"(",
"message",
",",
"status",
")",
"for",
"child",
"in",
"message",
":",
"if",
"child",
".",
"tag",
"!=",
"'RESPONSE'",
":",
"return",
"message",
"status",
"=",
"int",
"(",
"child",
".",
"get",
"(",
"'STATUS'",
")",
",",
"16",
")",
"msg",
"=",
"child",
".",
"get",
"(",
"'MESSAGE'",
")",
"if",
"status",
"==",
"0",
"and",
"msg",
"!=",
"'No error'",
":",
"return",
"msg",
"if",
"status",
"!=",
"0",
":",
"if",
"'syntax error'",
"in",
"msg",
"or",
"'Feature not supported'",
"in",
"msg",
":",
"for",
"cmd",
"in",
"BOOT_MODE_CMDS",
":",
"if",
"cmd",
"in",
"msg",
":",
"platform",
"=",
"self",
".",
"get_product_name",
"(",
")",
"msg",
"=",
"(",
"\"%(cmd)s is not supported on %(platform)s\"",
"%",
"{",
"'cmd'",
":",
"cmd",
",",
"'platform'",
":",
"platform",
"}",
")",
"LOG",
".",
"debug",
"(",
"self",
".",
"_",
"(",
"\"Got invalid response with \"",
"\"message: '%(message)s'\"",
")",
",",
"{",
"'message'",
":",
"msg",
"}",
")",
"raise",
"(",
"exception",
".",
"IloCommandNotSupportedError",
"(",
"msg",
",",
"status",
")",
")",
"else",
":",
"LOG",
".",
"debug",
"(",
"self",
".",
"_",
"(",
"\"Got invalid response with \"",
"\"message: '%(message)s'\"",
")",
",",
"{",
"'message'",
":",
"msg",
"}",
")",
"raise",
"exception",
".",
"IloClientInternalError",
"(",
"msg",
",",
"status",
")",
"if",
"(",
"status",
"in",
"exception",
".",
"IloLoginFailError",
".",
"statuses",
"or",
"msg",
"in",
"exception",
".",
"IloLoginFailError",
".",
"messages",
")",
":",
"LOG",
".",
"debug",
"(",
"self",
".",
"_",
"(",
"\"Got invalid response with \"",
"\"message: '%(message)s'\"",
")",
",",
"{",
"'message'",
":",
"msg",
"}",
")",
"raise",
"exception",
".",
"IloLoginFailError",
"(",
"msg",
",",
"status",
")",
"LOG",
".",
"debug",
"(",
"self",
".",
"_",
"(",
"\"Got invalid response with \"",
"\"message: '%(message)s'\"",
")",
",",
"{",
"'message'",
":",
"msg",
"}",
")",
"raise",
"exception",
".",
"IloError",
"(",
"msg",
",",
"status",
")"
] | 49.45098 | 18.45098 |
def hello():
""""http://click.pocoo.org/5/
http://click.pocoo.org/5/api/
"""
click.clear()
click.secho('Hello World!', fg='green')
click.secho('Some more text', bg='blue', fg='white')
click.secho('ATTENTION', blink=True, bold=True)
click.echo('Continue? [yn] ', nl=False)
c = click.getchar()
click.echo()
if c == 'y':
click.echo('We will go on')
elif c == 'n':
click.echo('Abort!')
else:
click.echo('Invalid input :(')
click.echo_via_pager('\n'.join('Line %d' % idx
for idx in range(200))) | [
"def",
"hello",
"(",
")",
":",
"click",
".",
"clear",
"(",
")",
"click",
".",
"secho",
"(",
"'Hello World!'",
",",
"fg",
"=",
"'green'",
")",
"click",
".",
"secho",
"(",
"'Some more text'",
",",
"bg",
"=",
"'blue'",
",",
"fg",
"=",
"'white'",
")",
"click",
".",
"secho",
"(",
"'ATTENTION'",
",",
"blink",
"=",
"True",
",",
"bold",
"=",
"True",
")",
"click",
".",
"echo",
"(",
"'Continue? [yn] '",
",",
"nl",
"=",
"False",
")",
"c",
"=",
"click",
".",
"getchar",
"(",
")",
"click",
".",
"echo",
"(",
")",
"if",
"c",
"==",
"'y'",
":",
"click",
".",
"echo",
"(",
"'We will go on'",
")",
"elif",
"c",
"==",
"'n'",
":",
"click",
".",
"echo",
"(",
"'Abort!'",
")",
"else",
":",
"click",
".",
"echo",
"(",
"'Invalid input :('",
")",
"click",
".",
"echo_via_pager",
"(",
"'\\n'",
".",
"join",
"(",
"'Line %d'",
"%",
"idx",
"for",
"idx",
"in",
"range",
"(",
"200",
")",
")",
")"
] | 29.1 | 14.6 |
def trunc(text, length):
"""
Truncates text to given length, taking into account wide characters.
If truncated, the last char is replaced by an elipsis.
"""
if length < 1:
raise ValueError("length should be 1 or larger")
# Remove whitespace first so no unneccesary truncation is done.
text = text.strip()
text_length = wcswidth(text)
if text_length <= length:
return text
# We cannot just remove n characters from the end since we don't know how
# wide these characters are and how it will affect text length.
# Use wcwidth to determine how many characters need to be truncated.
chars_to_truncate = 0
trunc_length = 0
for char in reversed(text):
chars_to_truncate += 1
trunc_length += wcwidth(char)
if text_length - trunc_length <= length:
break
# Additional char to make room for elipsis
n = chars_to_truncate + 1
return text[:-n].strip() + '…' | [
"def",
"trunc",
"(",
"text",
",",
"length",
")",
":",
"if",
"length",
"<",
"1",
":",
"raise",
"ValueError",
"(",
"\"length should be 1 or larger\"",
")",
"# Remove whitespace first so no unneccesary truncation is done.",
"text",
"=",
"text",
".",
"strip",
"(",
")",
"text_length",
"=",
"wcswidth",
"(",
"text",
")",
"if",
"text_length",
"<=",
"length",
":",
"return",
"text",
"# We cannot just remove n characters from the end since we don't know how",
"# wide these characters are and how it will affect text length.",
"# Use wcwidth to determine how many characters need to be truncated.",
"chars_to_truncate",
"=",
"0",
"trunc_length",
"=",
"0",
"for",
"char",
"in",
"reversed",
"(",
"text",
")",
":",
"chars_to_truncate",
"+=",
"1",
"trunc_length",
"+=",
"wcwidth",
"(",
"char",
")",
"if",
"text_length",
"-",
"trunc_length",
"<=",
"length",
":",
"break",
"# Additional char to make room for elipsis",
"n",
"=",
"chars_to_truncate",
"+",
"1",
"return",
"text",
"[",
":",
"-",
"n",
"]",
".",
"strip",
"(",
")",
"+",
"'…'"
] | 31.5 | 19.833333 |
def get_image_size(self, token, resolution=0):
"""
Return the size of the volume (3D). Convenient for when you want
to download the entirety of a dataset.
Arguments:
token (str): The token for which to find the dataset image bounds
resolution (int : 0): The resolution at which to get image bounds.
Defaults to 0, to get the largest area available.
Returns:
int[3]: The size of the bounds. Should == get_volume.shape
Raises:
RemoteDataNotFoundError: If the token is invalid, or if the
metadata at that resolution is unavailable in projinfo.
"""
info = self.get_proj_info(token)
res = str(resolution)
if res not in info['dataset']['imagesize']:
raise RemoteDataNotFoundError("Resolution " + res +
" is not available.")
return info['dataset']['imagesize'][str(resolution)] | [
"def",
"get_image_size",
"(",
"self",
",",
"token",
",",
"resolution",
"=",
"0",
")",
":",
"info",
"=",
"self",
".",
"get_proj_info",
"(",
"token",
")",
"res",
"=",
"str",
"(",
"resolution",
")",
"if",
"res",
"not",
"in",
"info",
"[",
"'dataset'",
"]",
"[",
"'imagesize'",
"]",
":",
"raise",
"RemoteDataNotFoundError",
"(",
"\"Resolution \"",
"+",
"res",
"+",
"\" is not available.\"",
")",
"return",
"info",
"[",
"'dataset'",
"]",
"[",
"'imagesize'",
"]",
"[",
"str",
"(",
"resolution",
")",
"]"
] | 42.304348 | 22.391304 |
def draw(molecule, TraversalType=SmilesTraversal):
"""(molecule)->canonical representation of a molecule
Well, it's only canonical if the atom symorders are
canonical, otherwise it's arbitrary.
atoms must have a symorder attribute
bonds must have a equiv_class attribute"""
result = []
atoms = allAtoms = molecule.atoms
visitedAtoms = {}
#
# Traverse all components of the graph to form
# the output string
while atoms:
atom = _get_lowest_symorder(atoms)
visitedAtoms[atom] = 1
visitedBonds = {}
nextTraverse = TraversalType()
atomsUsed, bondsUsed = [], []
_traverse(atom, nextTraverse, None,
visitedAtoms, visitedBonds,
atomsUsed, bondsUsed, TraversalType)
atoms = []
for atom in allAtoms:
if not visitedAtoms.has_key(atom):
atoms.append(atom)
assert nextTraverse.atoms == atomsUsed
assert nextTraverse.bonds == bondsUsed, "%s %s"%(
nextTraverse.bonds, bondsUsed)
result.append((str(nextTraverse),
atomsUsed, bondsUsed))
result.sort()
fragments = []
for r in result:
fragments.append(r[0])
return ".".join(fragments), result | [
"def",
"draw",
"(",
"molecule",
",",
"TraversalType",
"=",
"SmilesTraversal",
")",
":",
"result",
"=",
"[",
"]",
"atoms",
"=",
"allAtoms",
"=",
"molecule",
".",
"atoms",
"visitedAtoms",
"=",
"{",
"}",
"#",
"# Traverse all components of the graph to form",
"# the output string",
"while",
"atoms",
":",
"atom",
"=",
"_get_lowest_symorder",
"(",
"atoms",
")",
"visitedAtoms",
"[",
"atom",
"]",
"=",
"1",
"visitedBonds",
"=",
"{",
"}",
"nextTraverse",
"=",
"TraversalType",
"(",
")",
"atomsUsed",
",",
"bondsUsed",
"=",
"[",
"]",
",",
"[",
"]",
"_traverse",
"(",
"atom",
",",
"nextTraverse",
",",
"None",
",",
"visitedAtoms",
",",
"visitedBonds",
",",
"atomsUsed",
",",
"bondsUsed",
",",
"TraversalType",
")",
"atoms",
"=",
"[",
"]",
"for",
"atom",
"in",
"allAtoms",
":",
"if",
"not",
"visitedAtoms",
".",
"has_key",
"(",
"atom",
")",
":",
"atoms",
".",
"append",
"(",
"atom",
")",
"assert",
"nextTraverse",
".",
"atoms",
"==",
"atomsUsed",
"assert",
"nextTraverse",
".",
"bonds",
"==",
"bondsUsed",
",",
"\"%s %s\"",
"%",
"(",
"nextTraverse",
".",
"bonds",
",",
"bondsUsed",
")",
"result",
".",
"append",
"(",
"(",
"str",
"(",
"nextTraverse",
")",
",",
"atomsUsed",
",",
"bondsUsed",
")",
")",
"result",
".",
"sort",
"(",
")",
"fragments",
"=",
"[",
"]",
"for",
"r",
"in",
"result",
":",
"fragments",
".",
"append",
"(",
"r",
"[",
"0",
"]",
")",
"return",
"\".\"",
".",
"join",
"(",
"fragments",
")",
",",
"result"
] | 29.952381 | 15.166667 |
def _read_mptcp_prio(self, bits, size):
"""Read Change Subflow Priority option.
Positional arguments:
* bits - str, 4-bit data
* size - int, length of option
Returns:
* dict -- extracted Change Subflow Priority (MP_PRIO) option
Structure of MP_PRIO [RFC 6824]:
1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+---------------+---------------+-------+-----+-+--------------+
| Kind | Length |Subtype| |B| AddrID (opt) |
+---------------+---------------+-------+-----+-+--------------+
Octets Bits Name Description
0 0 tcp.opt.kind Kind (30)
1 8 tcp.opt.length Length (3/4)
2 16 tcp.opt.mp.subtype Subtype (5)
2 23 tcp.opt.mp.prio.backup Backup Path (B)
3 24 tcp.opt.mp.prio.addrid Address ID (optional)
"""
temp = self._read_unpack(1) if size else None
data = dict(
subtype='MP_PRIO',
prio=dict(
res=b'\x00' * 3,
backup=True if int(bits[3]) else False,
addrid=temp,
),
)
return data | [
"def",
"_read_mptcp_prio",
"(",
"self",
",",
"bits",
",",
"size",
")",
":",
"temp",
"=",
"self",
".",
"_read_unpack",
"(",
"1",
")",
"if",
"size",
"else",
"None",
"data",
"=",
"dict",
"(",
"subtype",
"=",
"'MP_PRIO'",
",",
"prio",
"=",
"dict",
"(",
"res",
"=",
"b'\\x00'",
"*",
"3",
",",
"backup",
"=",
"True",
"if",
"int",
"(",
"bits",
"[",
"3",
"]",
")",
"else",
"False",
",",
"addrid",
"=",
"temp",
",",
")",
",",
")",
"return",
"data"
] | 38.189189 | 23.972973 |
def get_file_encoding(self, file_path, preferred_encoding=None):
"""
Gets an eventual cached encoding for file_path.
Raises a KeyError if no encoding were cached for the specified file
path.
:param file_path: path of the file to look up
:returns: The cached encoding.
"""
_logger().debug('getting encoding for %s', file_path)
try:
map = json.loads(self._settings.value('cachedFileEncodings'))
except TypeError:
map = {}
try:
return map[file_path]
except KeyError:
encodings = self.preferred_encodings
if preferred_encoding:
encodings.insert(0, preferred_encoding)
for encoding in encodings:
_logger().debug('trying encoding: %s', encoding)
try:
with open(file_path, encoding=encoding) as f:
f.read()
except (UnicodeDecodeError, IOError, OSError):
pass
else:
return encoding
raise KeyError(file_path) | [
"def",
"get_file_encoding",
"(",
"self",
",",
"file_path",
",",
"preferred_encoding",
"=",
"None",
")",
":",
"_logger",
"(",
")",
".",
"debug",
"(",
"'getting encoding for %s'",
",",
"file_path",
")",
"try",
":",
"map",
"=",
"json",
".",
"loads",
"(",
"self",
".",
"_settings",
".",
"value",
"(",
"'cachedFileEncodings'",
")",
")",
"except",
"TypeError",
":",
"map",
"=",
"{",
"}",
"try",
":",
"return",
"map",
"[",
"file_path",
"]",
"except",
"KeyError",
":",
"encodings",
"=",
"self",
".",
"preferred_encodings",
"if",
"preferred_encoding",
":",
"encodings",
".",
"insert",
"(",
"0",
",",
"preferred_encoding",
")",
"for",
"encoding",
"in",
"encodings",
":",
"_logger",
"(",
")",
".",
"debug",
"(",
"'trying encoding: %s'",
",",
"encoding",
")",
"try",
":",
"with",
"open",
"(",
"file_path",
",",
"encoding",
"=",
"encoding",
")",
"as",
"f",
":",
"f",
".",
"read",
"(",
")",
"except",
"(",
"UnicodeDecodeError",
",",
"IOError",
",",
"OSError",
")",
":",
"pass",
"else",
":",
"return",
"encoding",
"raise",
"KeyError",
"(",
"file_path",
")"
] | 35.967742 | 17.322581 |
def send(self, event):
# type(Event) -> bytes
"""Send an event to the remote.
This will return the bytes to send based on the event or raise
a LocalProtocolError if the event is not valid given the
state.
"""
data = b""
if isinstance(event, Request):
data += self._initiate_connection(event)
elif isinstance(event, AcceptConnection):
data += self._accept(event)
elif isinstance(event, RejectConnection):
data += self._reject(event)
elif isinstance(event, RejectData):
data += self._send_reject_data(event)
else:
raise LocalProtocolError(
"Event {} cannot be sent during the handshake".format(event)
)
return data | [
"def",
"send",
"(",
"self",
",",
"event",
")",
":",
"# type(Event) -> bytes",
"data",
"=",
"b\"\"",
"if",
"isinstance",
"(",
"event",
",",
"Request",
")",
":",
"data",
"+=",
"self",
".",
"_initiate_connection",
"(",
"event",
")",
"elif",
"isinstance",
"(",
"event",
",",
"AcceptConnection",
")",
":",
"data",
"+=",
"self",
".",
"_accept",
"(",
"event",
")",
"elif",
"isinstance",
"(",
"event",
",",
"RejectConnection",
")",
":",
"data",
"+=",
"self",
".",
"_reject",
"(",
"event",
")",
"elif",
"isinstance",
"(",
"event",
",",
"RejectData",
")",
":",
"data",
"+=",
"self",
".",
"_send_reject_data",
"(",
"event",
")",
"else",
":",
"raise",
"LocalProtocolError",
"(",
"\"Event {} cannot be sent during the handshake\"",
".",
"format",
"(",
"event",
")",
")",
"return",
"data"
] | 34.086957 | 16.086957 |
def purcell(target, r_toroid, surface_tension='pore.surface_tension',
contact_angle='pore.contact_angle',
diameter='throat.diameter'):
r"""
Computes the throat capillary entry pressure assuming the throat is a
toroid.
Parameters
----------
target : OpenPNM Object
The object for which these values are being calculated. This
controls the length of the calculated array, and also provides
access to other necessary thermofluid properties.
r_toroid : float or array_like
The radius of the toroid surrounding the pore
surface_tension : dict key (string)
The dictionary key containing the surface tension values to be used.
If a pore property is given, it is interpolated to a throat list.
contact_angle : dict key (string)
The dictionary key containing the contact angle values to be used.
If a pore property is given, it is interpolated to a throat list.
diameter : dict key (string)
The dictionary key containing the throat diameter values to be used.
Notes
-----
This approach accounts for the converging-diverging nature of many throat
types. Advancing the meniscus beyond the apex of the toroid requires an
increase in capillary pressure beyond that for a cylindical tube of the
same radius. The details of this equation are described by Mason and
Morrow [1]_, and explored by Gostick [2]_ in the context of a pore network
model.
References
----------
.. [1] G. Mason, N. R. Morrow, Effect of contact angle on capillary
displacement curvatures in pore throats formed by spheres. J.
Colloid Interface Sci. 168, 130 (1994).
.. [2] J. Gostick, Random pore network modeling of fibrous PEMFC gas
diffusion media using Voronoi and Delaunay tessellations. J.
Electrochem. Soc. 160, F731 (2013).
"""
network = target.project.network
phase = target.project.find_phase(target)
element, sigma, theta = _get_key_props(phase=phase,
diameter=diameter,
surface_tension=surface_tension,
contact_angle=contact_angle)
r = network[diameter]/2
R = r_toroid
alpha = theta - 180 + \
_sp.rad2deg(_sp.arcsin(_sp.sin(_sp.radians(theta))/(1+r/R)))
value = (-2*sigma/r) * \
(_sp.cos(_sp.radians(theta - alpha)) /
(1 + R/r*(1 - _sp.cos(_sp.radians(alpha)))))
if diameter.split('.')[0] == 'throat':
value = value[phase.throats(target.name)]
else:
value = value[phase.pores(target.name)]
return value | [
"def",
"purcell",
"(",
"target",
",",
"r_toroid",
",",
"surface_tension",
"=",
"'pore.surface_tension'",
",",
"contact_angle",
"=",
"'pore.contact_angle'",
",",
"diameter",
"=",
"'throat.diameter'",
")",
":",
"network",
"=",
"target",
".",
"project",
".",
"network",
"phase",
"=",
"target",
".",
"project",
".",
"find_phase",
"(",
"target",
")",
"element",
",",
"sigma",
",",
"theta",
"=",
"_get_key_props",
"(",
"phase",
"=",
"phase",
",",
"diameter",
"=",
"diameter",
",",
"surface_tension",
"=",
"surface_tension",
",",
"contact_angle",
"=",
"contact_angle",
")",
"r",
"=",
"network",
"[",
"diameter",
"]",
"/",
"2",
"R",
"=",
"r_toroid",
"alpha",
"=",
"theta",
"-",
"180",
"+",
"_sp",
".",
"rad2deg",
"(",
"_sp",
".",
"arcsin",
"(",
"_sp",
".",
"sin",
"(",
"_sp",
".",
"radians",
"(",
"theta",
")",
")",
"/",
"(",
"1",
"+",
"r",
"/",
"R",
")",
")",
")",
"value",
"=",
"(",
"-",
"2",
"*",
"sigma",
"/",
"r",
")",
"*",
"(",
"_sp",
".",
"cos",
"(",
"_sp",
".",
"radians",
"(",
"theta",
"-",
"alpha",
")",
")",
"/",
"(",
"1",
"+",
"R",
"/",
"r",
"*",
"(",
"1",
"-",
"_sp",
".",
"cos",
"(",
"_sp",
".",
"radians",
"(",
"alpha",
")",
")",
")",
")",
")",
"if",
"diameter",
".",
"split",
"(",
"'.'",
")",
"[",
"0",
"]",
"==",
"'throat'",
":",
"value",
"=",
"value",
"[",
"phase",
".",
"throats",
"(",
"target",
".",
"name",
")",
"]",
"else",
":",
"value",
"=",
"value",
"[",
"phase",
".",
"pores",
"(",
"target",
".",
"name",
")",
"]",
"return",
"value"
] | 40.348485 | 23.666667 |
def force_populate(self):
"""
Populates the parser with the entire contents of the
word reference file.
"""
if not os.path.exists(self.ref):
raise FileNotFoundError("The reference file path '{}' does not exists.".format(self.ref))
with open(self.ref, 'r') as f:
for word in f:
word = word.strip('\n')
self.db.add(word)
self.populated = True | [
"def",
"force_populate",
"(",
"self",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"ref",
")",
":",
"raise",
"FileNotFoundError",
"(",
"\"The reference file path '{}' does not exists.\"",
".",
"format",
"(",
"self",
".",
"ref",
")",
")",
"with",
"open",
"(",
"self",
".",
"ref",
",",
"'r'",
")",
"as",
"f",
":",
"for",
"word",
"in",
"f",
":",
"word",
"=",
"word",
".",
"strip",
"(",
"'\\n'",
")",
"self",
".",
"db",
".",
"add",
"(",
"word",
")",
"self",
".",
"populated",
"=",
"True"
] | 36.75 | 11.916667 |
def part_specs(self, part):
'''
returns the specifications of the given part. If multiple parts are
matched, only the first one will be output.
part: the productname or sku
prints the results on stdout
'''
result = self._e.parts_match(
queries=[{'mpn_or_sku': part}],
exact_only=True,
show_mpn=True,
show_manufacturer=True,
show_octopart_url=True,
show_short_description=True,
show_specs=True,
show_category_uids=True,
show_external_links=True,
show_reference_designs=True,
show_cad_models=True,
show_datasheets=True,
include_specs=True,
include_category_uids=True,
include_external_links=True,
include_reference_designs=True,
include_cad_models=True,
include_datasheets=True
)
if result[1][0]['hits'] == 0:
print("No result")
return ReturnValues.NO_RESULTS
result = result[1][0]['items'][0]
print("Showing specs for '{}':".format(result['mpn']))
print(" → Manufacturer: {}".format(result['manufacturer']['name']))
print(" → Specifications: ")
for k,v in result['specs'].items():
name = v['metadata']['name'] if v['metadata']['name'] else k
min_value = v['min_value'] if v['min_value'] else ''
max_value = v['max_value'] if v['max_value'] else ''
unit = ' ({})'.format(v['metadata']['unit']['name']) if v['metadata']['unit'] else ''
value = ','.join(v['value']) if len(v['value']) > 0 else ''
if value and not (min_value or max_value):
print(" → {:20}: {}{}".format(name, value, unit))
elif value and min_value and max_value:
print(" → {:20}: {}{} (min: {}, max: {})".format(name, value, unit, min_value, max_value))
elif not value and min_value and max_value:
print(" → {:20}:{} min: {}, max: {}".format(name, unit, min_value, max_value))
elif not value and min_value and not max_value:
print(" → {:20}:{} min: {}".format(name, unit, min_value))
elif not value and not min_value and max_value:
print(" → {:20}:{} max: {}".format(name, unit, max_value))
print(" → URI: {}".format(result['octopart_url']))
if result['external_links']['evalkit_url'] \
or result['external_links']['freesample_url'] \
or result['external_links']['product_url']:
print(" → External Links")
if result['external_links']['evalkit_url']:
print(" → Evaluation kit: {}".format(result['external_links']['evalkit_url']))
if result['external_links']['freesample_url']:
print(" → Free Sample: {}".format(result['external_links']['freesample_url']))
if result['external_links']['product_url']:
print(" → Product URI: {}".format(result['external_links']['product_url']))
if len(result['datasheets']) > 0:
print(" → Datasheets")
for datasheet in result['datasheets']:
print(" → URL: {}".format(datasheet['url']))
if datasheet['metadata']:
print(" → Updated: {}".format(datasheet['metadata']['last_updated']))
print(" → Nb Pages: {}".format(datasheet['metadata']['num_pages']))
if len(result['reference_designs']) > 0:
print(" → Reference designs: ")
if len(result['cad_models']) > 0:
print(" → CAD Models: ")
return ReturnValues.OK | [
"def",
"part_specs",
"(",
"self",
",",
"part",
")",
":",
"result",
"=",
"self",
".",
"_e",
".",
"parts_match",
"(",
"queries",
"=",
"[",
"{",
"'mpn_or_sku'",
":",
"part",
"}",
"]",
",",
"exact_only",
"=",
"True",
",",
"show_mpn",
"=",
"True",
",",
"show_manufacturer",
"=",
"True",
",",
"show_octopart_url",
"=",
"True",
",",
"show_short_description",
"=",
"True",
",",
"show_specs",
"=",
"True",
",",
"show_category_uids",
"=",
"True",
",",
"show_external_links",
"=",
"True",
",",
"show_reference_designs",
"=",
"True",
",",
"show_cad_models",
"=",
"True",
",",
"show_datasheets",
"=",
"True",
",",
"include_specs",
"=",
"True",
",",
"include_category_uids",
"=",
"True",
",",
"include_external_links",
"=",
"True",
",",
"include_reference_designs",
"=",
"True",
",",
"include_cad_models",
"=",
"True",
",",
"include_datasheets",
"=",
"True",
")",
"if",
"result",
"[",
"1",
"]",
"[",
"0",
"]",
"[",
"'hits'",
"]",
"==",
"0",
":",
"print",
"(",
"\"No result\"",
")",
"return",
"ReturnValues",
".",
"NO_RESULTS",
"result",
"=",
"result",
"[",
"1",
"]",
"[",
"0",
"]",
"[",
"'items'",
"]",
"[",
"0",
"]",
"print",
"(",
"\"Showing specs for '{}':\"",
".",
"format",
"(",
"result",
"[",
"'mpn'",
"]",
")",
")",
"print",
"(",
"\" → Manufacturer: {}\".f",
"o",
"rmat(r",
"e",
"sult['",
"m",
"anufacturer'][",
"'",
"n",
"ame'])",
")",
"",
"",
"print",
"(",
"\" → Specifications: \")",
"",
"for",
"k",
",",
"v",
"in",
"result",
"[",
"'specs'",
"]",
".",
"items",
"(",
")",
":",
"name",
"=",
"v",
"[",
"'metadata'",
"]",
"[",
"'name'",
"]",
"if",
"v",
"[",
"'metadata'",
"]",
"[",
"'name'",
"]",
"else",
"k",
"min_value",
"=",
"v",
"[",
"'min_value'",
"]",
"if",
"v",
"[",
"'min_value'",
"]",
"else",
"''",
"max_value",
"=",
"v",
"[",
"'max_value'",
"]",
"if",
"v",
"[",
"'max_value'",
"]",
"else",
"''",
"unit",
"=",
"' ({})'",
".",
"format",
"(",
"v",
"[",
"'metadata'",
"]",
"[",
"'unit'",
"]",
"[",
"'name'",
"]",
")",
"if",
"v",
"[",
"'metadata'",
"]",
"[",
"'unit'",
"]",
"else",
"''",
"value",
"=",
"','",
".",
"join",
"(",
"v",
"[",
"'value'",
"]",
")",
"if",
"len",
"(",
"v",
"[",
"'value'",
"]",
")",
">",
"0",
"else",
"''",
"if",
"value",
"and",
"not",
"(",
"min_value",
"or",
"max_value",
")",
":",
"print",
"(",
"\" → {:20}: {}{}\".f",
"o",
"rmat(n",
"a",
"me, ",
"v",
"lue, ",
"u",
"it))",
"",
"",
"elif",
"value",
"and",
"min_value",
"and",
"max_value",
":",
"print",
"(",
"\" → {:20}: {}{} (min: {}, max: {})\".f",
"o",
"rmat(n",
"a",
"me, ",
"v",
"lue, ",
"u",
"it, ",
"m",
"n_value, ",
"m",
"x_value))",
"",
"",
"elif",
"not",
"value",
"and",
"min_value",
"and",
"max_value",
":",
"print",
"(",
"\" → {:20}:{} min: {}, max: {}\".f",
"o",
"rmat(n",
"a",
"me, ",
"u",
"it, ",
"m",
"n_value, ",
"m",
"x_value))",
"",
"",
"elif",
"not",
"value",
"and",
"min_value",
"and",
"not",
"max_value",
":",
"print",
"(",
"\" → {:20}:{} min: {}\".f",
"o",
"rmat(n",
"a",
"me, ",
"u",
"it, ",
"m",
"n_value))",
"",
"",
"elif",
"not",
"value",
"and",
"not",
"min_value",
"and",
"max_value",
":",
"print",
"(",
"\" → {:20}:{} max: {}\".f",
"o",
"rmat(n",
"a",
"me, ",
"u",
"it, ",
"m",
"x_value))",
"",
"",
"print",
"(",
"\" → URI: {}\".f",
"o",
"rmat(r",
"e",
"sult['",
"o",
"ctopart_url'])",
")",
"",
"",
"if",
"result",
"[",
"'external_links'",
"]",
"[",
"'evalkit_url'",
"]",
"or",
"result",
"[",
"'external_links'",
"]",
"[",
"'freesample_url'",
"]",
"or",
"result",
"[",
"'external_links'",
"]",
"[",
"'product_url'",
"]",
":",
"print",
"(",
"\" → External Links\")",
"",
"if",
"result",
"[",
"'external_links'",
"]",
"[",
"'evalkit_url'",
"]",
":",
"print",
"(",
"\" → Evaluation kit: {}\".f",
"o",
"rmat(r",
"e",
"sult['",
"e",
"xternal_links'][",
"'",
"e",
"valkit_url'])",
")",
"",
"",
"if",
"result",
"[",
"'external_links'",
"]",
"[",
"'freesample_url'",
"]",
":",
"print",
"(",
"\" → Free Sample: {}\".f",
"o",
"rmat(r",
"e",
"sult['",
"e",
"xternal_links'][",
"'",
"f",
"reesample_url'])",
")",
"",
"",
"if",
"result",
"[",
"'external_links'",
"]",
"[",
"'product_url'",
"]",
":",
"print",
"(",
"\" → Product URI: {}\".f",
"o",
"rmat(r",
"e",
"sult['",
"e",
"xternal_links'][",
"'",
"p",
"roduct_url'])",
")",
"",
"",
"if",
"len",
"(",
"result",
"[",
"'datasheets'",
"]",
")",
">",
"0",
":",
"print",
"(",
"\" → Datasheets\")",
"",
"for",
"datasheet",
"in",
"result",
"[",
"'datasheets'",
"]",
":",
"print",
"(",
"\" → URL: {}\".f",
"o",
"rmat(d",
"a",
"tasheet['",
"u",
"rl'])",
")",
"",
"",
"if",
"datasheet",
"[",
"'metadata'",
"]",
":",
"print",
"(",
"\" → Updated: {}\".f",
"o",
"rmat(d",
"a",
"tasheet['",
"m",
"etadata'][",
"'",
"l",
"ast_updated'])",
")",
"",
"",
"print",
"(",
"\" → Nb Pages: {}\".f",
"o",
"rmat(d",
"a",
"tasheet['",
"m",
"etadata'][",
"'",
"n",
"um_pages'])",
")",
"",
"",
"if",
"len",
"(",
"result",
"[",
"'reference_designs'",
"]",
")",
">",
"0",
":",
"print",
"(",
"\" → Reference designs: \")",
"",
"if",
"len",
"(",
"result",
"[",
"'cad_models'",
"]",
")",
">",
"0",
":",
"print",
"(",
"\" → CAD Models: \")",
"",
"return",
"ReturnValues",
".",
"OK"
] | 48.076923 | 19.102564 |
def vectorize(fn):
"""
Allows a method to accept one or more values,
but internally deal only with a single item,
and returning a list or a single item depending
on what is desired.
"""
@functools.wraps(fn)
def vectorized_method(self, values, *vargs, **kwargs):
wrap = not isinstance(values, (list, tuple))
should_unwrap = not kwargs.setdefault('wrap', False)
unwrap = wrap and should_unwrap
del kwargs['wrap']
if wrap:
values = [values]
results = [fn(self, value, *vargs, **kwargs) for value in values]
if unwrap:
results = results[0]
return results
return vectorized_method | [
"def",
"vectorize",
"(",
"fn",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"fn",
")",
"def",
"vectorized_method",
"(",
"self",
",",
"values",
",",
"*",
"vargs",
",",
"*",
"*",
"kwargs",
")",
":",
"wrap",
"=",
"not",
"isinstance",
"(",
"values",
",",
"(",
"list",
",",
"tuple",
")",
")",
"should_unwrap",
"=",
"not",
"kwargs",
".",
"setdefault",
"(",
"'wrap'",
",",
"False",
")",
"unwrap",
"=",
"wrap",
"and",
"should_unwrap",
"del",
"kwargs",
"[",
"'wrap'",
"]",
"if",
"wrap",
":",
"values",
"=",
"[",
"values",
"]",
"results",
"=",
"[",
"fn",
"(",
"self",
",",
"value",
",",
"*",
"vargs",
",",
"*",
"*",
"kwargs",
")",
"for",
"value",
"in",
"values",
"]",
"if",
"unwrap",
":",
"results",
"=",
"results",
"[",
"0",
"]",
"return",
"results",
"return",
"vectorized_method"
] | 26.461538 | 19.538462 |
def gaussian(h, Xi, x):
"""
Gaussian Kernel for continuous variables
Parameters
----------
h : 1-D ndarray, shape (K,)
The bandwidths used to estimate the value of the kernel function.
Xi : 1-D ndarray, shape (K,)
The value of the training set.
x : 1-D ndarray, shape (K,)
The value at which the kernel density is being estimated.
Returns
-------
kernel_value : ndarray, shape (nobs, K)
The value of the kernel function at each training point for each var.
"""
return (1. / np.sqrt(2 * np.pi)) * np.exp(-(Xi - x)**2 / (h**2 * 2.)) | [
"def",
"gaussian",
"(",
"h",
",",
"Xi",
",",
"x",
")",
":",
"return",
"(",
"1.",
"/",
"np",
".",
"sqrt",
"(",
"2",
"*",
"np",
".",
"pi",
")",
")",
"*",
"np",
".",
"exp",
"(",
"-",
"(",
"Xi",
"-",
"x",
")",
"**",
"2",
"/",
"(",
"h",
"**",
"2",
"*",
"2.",
")",
")"
] | 31.263158 | 19.473684 |
def xbm(self, scale=1, quiet_zone=4):
"""Returns a string representing an XBM image of the QR code.
The XBM format is a black and white image format that looks like a
C header file.
Because displaying QR codes in Tkinter is the
primary use case for this renderer, this method does not take a file
parameter. Instead it retuns the rendered QR code data as a string.
Example of using this renderer with Tkinter:
>>> import pyqrcode
>>> import tkinter
>>> code = pyqrcode.create('Knights who say ni!')
>>> code_xbm = code.xbm(scale=5)
>>>
>>> top = tkinter.Tk()
>>> code_bmp = tkinter.BitmapImage(data=code_xbm)
>>> code_bmp.config(foreground="black")
>>> code_bmp.config(background="white")
>>> label = tkinter.Label(image=code_bmp)
>>> label.pack()
The *scale* parameter sets how large to draw a single module. By
default one pixel is used to draw a single module. This may make the
code too small to be read efficiently. Increasing the scale will make
the code larger. Only integer scales are usable. This method will
attempt to coerce the parameter into an integer (e.g. 2.5 will become 2,
and '3' will become 3). You can use the :py:meth:`get_png_size` method
to calculate the actual pixel size of this image when displayed.
The *quiet_zone* parameter sets how wide the quiet zone around the code
should be. According to the standard this should be 4 modules. It is
left settable because such a wide quiet zone is unnecessary in many
applications where the QR code is not being printed.
"""
return builder._xbm(self.code, scale, quiet_zone) | [
"def",
"xbm",
"(",
"self",
",",
"scale",
"=",
"1",
",",
"quiet_zone",
"=",
"4",
")",
":",
"return",
"builder",
".",
"_xbm",
"(",
"self",
".",
"code",
",",
"scale",
",",
"quiet_zone",
")"
] | 49.405405 | 23.351351 |
def _datetime_view(
request,
template,
dt,
timeslot_factory=None,
items=None,
params=None
):
'''
Build a time slot grid representation for the given datetime ``dt``. See
utils.create_timeslot_table documentation for items and params.
Context parameters:
``day``
the specified datetime value (dt)
``next_day``
day + 1 day
``prev_day``
day - 1 day
``timeslots``
time slot grid of (time, cells) rows
'''
timeslot_factory = timeslot_factory or utils.create_timeslot_table
params = params or {}
return render(request, template, {
'day': dt,
'next_day': dt + timedelta(days=+1),
'prev_day': dt + timedelta(days=-1),
'timeslots': timeslot_factory(dt, items, **params)
}) | [
"def",
"_datetime_view",
"(",
"request",
",",
"template",
",",
"dt",
",",
"timeslot_factory",
"=",
"None",
",",
"items",
"=",
"None",
",",
"params",
"=",
"None",
")",
":",
"timeslot_factory",
"=",
"timeslot_factory",
"or",
"utils",
".",
"create_timeslot_table",
"params",
"=",
"params",
"or",
"{",
"}",
"return",
"render",
"(",
"request",
",",
"template",
",",
"{",
"'day'",
":",
"dt",
",",
"'next_day'",
":",
"dt",
"+",
"timedelta",
"(",
"days",
"=",
"+",
"1",
")",
",",
"'prev_day'",
":",
"dt",
"+",
"timedelta",
"(",
"days",
"=",
"-",
"1",
")",
",",
"'timeslots'",
":",
"timeslot_factory",
"(",
"dt",
",",
"items",
",",
"*",
"*",
"params",
")",
"}",
")"
] | 21.75 | 25.25 |
def raise_exception(self, exception, tup=None):
"""Report an exception back to Storm via logging.
:param exception: a Python exception.
:param tup: a :class:`Tuple` object.
"""
if tup:
message = (
"Python {exception_name} raised while processing Tuple "
"{tup!r}\n{traceback}"
)
else:
message = "Python {exception_name} raised\n{traceback}"
message = message.format(
exception_name=exception.__class__.__name__, tup=tup, traceback=format_exc()
)
self.send_message({"command": "error", "msg": str(message)})
self.send_message({"command": "sync"}) | [
"def",
"raise_exception",
"(",
"self",
",",
"exception",
",",
"tup",
"=",
"None",
")",
":",
"if",
"tup",
":",
"message",
"=",
"(",
"\"Python {exception_name} raised while processing Tuple \"",
"\"{tup!r}\\n{traceback}\"",
")",
"else",
":",
"message",
"=",
"\"Python {exception_name} raised\\n{traceback}\"",
"message",
"=",
"message",
".",
"format",
"(",
"exception_name",
"=",
"exception",
".",
"__class__",
".",
"__name__",
",",
"tup",
"=",
"tup",
",",
"traceback",
"=",
"format_exc",
"(",
")",
")",
"self",
".",
"send_message",
"(",
"{",
"\"command\"",
":",
"\"error\"",
",",
"\"msg\"",
":",
"str",
"(",
"message",
")",
"}",
")",
"self",
".",
"send_message",
"(",
"{",
"\"command\"",
":",
"\"sync\"",
"}",
")"
] | 38.277778 | 18.5 |
def floatformat(fmt_string):
"""
Context manager to change the default format string for the
function :func:`openquake.commonlib.writers.scientificformat`.
:param fmt_string: the format to use; for instance '%13.9E'
"""
fmt_defaults = scientificformat.__defaults__
scientificformat.__defaults__ = (fmt_string,) + fmt_defaults[1:]
try:
yield
finally:
scientificformat.__defaults__ = fmt_defaults | [
"def",
"floatformat",
"(",
"fmt_string",
")",
":",
"fmt_defaults",
"=",
"scientificformat",
".",
"__defaults__",
"scientificformat",
".",
"__defaults__",
"=",
"(",
"fmt_string",
",",
")",
"+",
"fmt_defaults",
"[",
"1",
":",
"]",
"try",
":",
"yield",
"finally",
":",
"scientificformat",
".",
"__defaults__",
"=",
"fmt_defaults"
] | 33.461538 | 19.923077 |
def parse_directory_index(directory_index):
"""
Retrieve a directory index and make a list of the RPMs listed.
"""
# Normalize our URL style
if not directory_index.endswith('/'):
directory_index = directory_index + '/'
site_index = urllib2.urlopen(directory_index)
parsed_site_index = bs(site_index)
rpm_link_tags = parsed_site_index.findAll('a', href=re.compile(r'.*rpm$'))
# Only save the HREF attribute values from the links found
rpm_names = [link['href'] for link in rpm_link_tags]
# Join the index path with the discovered names so we only return complete paths
remote_list = map(lambda end: "".join([directory_index, end]), rpm_names)
return remote_list | [
"def",
"parse_directory_index",
"(",
"directory_index",
")",
":",
"# Normalize our URL style",
"if",
"not",
"directory_index",
".",
"endswith",
"(",
"'/'",
")",
":",
"directory_index",
"=",
"directory_index",
"+",
"'/'",
"site_index",
"=",
"urllib2",
".",
"urlopen",
"(",
"directory_index",
")",
"parsed_site_index",
"=",
"bs",
"(",
"site_index",
")",
"rpm_link_tags",
"=",
"parsed_site_index",
".",
"findAll",
"(",
"'a'",
",",
"href",
"=",
"re",
".",
"compile",
"(",
"r'.*rpm$'",
")",
")",
"# Only save the HREF attribute values from the links found",
"rpm_names",
"=",
"[",
"link",
"[",
"'href'",
"]",
"for",
"link",
"in",
"rpm_link_tags",
"]",
"# Join the index path with the discovered names so we only return complete paths",
"remote_list",
"=",
"map",
"(",
"lambda",
"end",
":",
"\"\"",
".",
"join",
"(",
"[",
"directory_index",
",",
"end",
"]",
")",
",",
"rpm_names",
")",
"return",
"remote_list"
] | 37.157895 | 20.736842 |
def parse(self):
"""
Return the list of string of all the decorators found
"""
self._parse(self.method)
return list(set([deco for deco in self.decos if deco])) | [
"def",
"parse",
"(",
"self",
")",
":",
"self",
".",
"_parse",
"(",
"self",
".",
"method",
")",
"return",
"list",
"(",
"set",
"(",
"[",
"deco",
"for",
"deco",
"in",
"self",
".",
"decos",
"if",
"deco",
"]",
")",
")"
] | 32.333333 | 12.666667 |
def find_bounds(model):
"""
Return the median upper and lower bound of the metabolic model.
Bounds can vary from model to model. Cobrapy defaults to (-1000, 1000) but
this may not be the case for merged or autogenerated models. In these
cases, this function is used to iterate over all the bounds of all the
reactions and find the median bound values in the model, which are
then used as the 'most common' bounds.
Parameters
----------
model : cobra.Model
The metabolic model under investigation.
"""
lower_bounds = np.asarray([rxn.lower_bound for rxn in model.reactions],
dtype=float)
upper_bounds = np.asarray([rxn.upper_bound for rxn in model.reactions],
dtype=float)
lower_bound = np.nanmedian(lower_bounds[lower_bounds != 0.0])
upper_bound = np.nanmedian(upper_bounds[upper_bounds != 0.0])
if np.isnan(lower_bound):
LOGGER.warning("Could not identify a median lower bound.")
lower_bound = -1000.0
if np.isnan(upper_bound):
LOGGER.warning("Could not identify a median upper bound.")
upper_bound = 1000.0
return lower_bound, upper_bound | [
"def",
"find_bounds",
"(",
"model",
")",
":",
"lower_bounds",
"=",
"np",
".",
"asarray",
"(",
"[",
"rxn",
".",
"lower_bound",
"for",
"rxn",
"in",
"model",
".",
"reactions",
"]",
",",
"dtype",
"=",
"float",
")",
"upper_bounds",
"=",
"np",
".",
"asarray",
"(",
"[",
"rxn",
".",
"upper_bound",
"for",
"rxn",
"in",
"model",
".",
"reactions",
"]",
",",
"dtype",
"=",
"float",
")",
"lower_bound",
"=",
"np",
".",
"nanmedian",
"(",
"lower_bounds",
"[",
"lower_bounds",
"!=",
"0.0",
"]",
")",
"upper_bound",
"=",
"np",
".",
"nanmedian",
"(",
"upper_bounds",
"[",
"upper_bounds",
"!=",
"0.0",
"]",
")",
"if",
"np",
".",
"isnan",
"(",
"lower_bound",
")",
":",
"LOGGER",
".",
"warning",
"(",
"\"Could not identify a median lower bound.\"",
")",
"lower_bound",
"=",
"-",
"1000.0",
"if",
"np",
".",
"isnan",
"(",
"upper_bound",
")",
":",
"LOGGER",
".",
"warning",
"(",
"\"Could not identify a median upper bound.\"",
")",
"upper_bound",
"=",
"1000.0",
"return",
"lower_bound",
",",
"upper_bound"
] | 40.896552 | 20.827586 |
def user_data(self, access_token, *args, **kwargs):
"""Loads user data from service"""
url = '{}/userinfo'.format(self.BASE_URL)
response = self.get_json(
url,
headers={'Authorization': 'Bearer ' + access_token},
)
self.check_correct_audience(response['audience'])
userdata = response['user']
return userdata | [
"def",
"user_data",
"(",
"self",
",",
"access_token",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"url",
"=",
"'{}/userinfo'",
".",
"format",
"(",
"self",
".",
"BASE_URL",
")",
"response",
"=",
"self",
".",
"get_json",
"(",
"url",
",",
"headers",
"=",
"{",
"'Authorization'",
":",
"'Bearer '",
"+",
"access_token",
"}",
",",
")",
"self",
".",
"check_correct_audience",
"(",
"response",
"[",
"'audience'",
"]",
")",
"userdata",
"=",
"response",
"[",
"'user'",
"]",
"return",
"userdata"
] | 34.454545 | 16.818182 |
def _fullClone(self, shallowClone=False):
"""Perform full clone and checkout to the revision if specified
In the case of shallow clones if any of the step fail abort whole build step.
"""
res = yield self._clone(shallowClone)
if res != RC_SUCCESS:
return res
# If revision specified checkout that revision
if self.revision:
res = yield self._dovccmd(['reset', '--hard',
self.revision, '--'],
shallowClone)
# init and update submodules, recursively. If there's not recursion
# it will not do it.
if self.submodules:
res = yield self._dovccmd(['submodule', 'update',
'--init', '--recursive'],
shallowClone)
return res | [
"def",
"_fullClone",
"(",
"self",
",",
"shallowClone",
"=",
"False",
")",
":",
"res",
"=",
"yield",
"self",
".",
"_clone",
"(",
"shallowClone",
")",
"if",
"res",
"!=",
"RC_SUCCESS",
":",
"return",
"res",
"# If revision specified checkout that revision",
"if",
"self",
".",
"revision",
":",
"res",
"=",
"yield",
"self",
".",
"_dovccmd",
"(",
"[",
"'reset'",
",",
"'--hard'",
",",
"self",
".",
"revision",
",",
"'--'",
"]",
",",
"shallowClone",
")",
"# init and update submodules, recursively. If there's not recursion",
"# it will not do it.",
"if",
"self",
".",
"submodules",
":",
"res",
"=",
"yield",
"self",
".",
"_dovccmd",
"(",
"[",
"'submodule'",
",",
"'update'",
",",
"'--init'",
",",
"'--recursive'",
"]",
",",
"shallowClone",
")",
"return",
"res"
] | 41.809524 | 18 |
def concat(attrs, inputs, proto_obj):
""" Joins input arrays along a given axis. """
new_attrs = translation_utils._fix_attribute_names(attrs, {'axis': 'dim'})
return 'concat', new_attrs, inputs | [
"def",
"concat",
"(",
"attrs",
",",
"inputs",
",",
"proto_obj",
")",
":",
"new_attrs",
"=",
"translation_utils",
".",
"_fix_attribute_names",
"(",
"attrs",
",",
"{",
"'axis'",
":",
"'dim'",
"}",
")",
"return",
"'concat'",
",",
"new_attrs",
",",
"inputs"
] | 50.75 | 10.75 |
def encrypt_message(self, reply, timestamp=None, nonce=None):
"""
加密微信回复
:param reply: 加密前的回复
:type reply: WeChatReply 或 XML 文本
:return: 加密后的回复文本
"""
if hasattr(reply, "render"):
reply = reply.render()
timestamp = timestamp or to_text(int(time.time()))
nonce = nonce or generate_token(5)
encrypt = to_text(self.prp_crypto.encrypt(reply, self.app_id))
signature = get_signature(self.token, timestamp, nonce, encrypt)
return to_text(
self.ENCRYPTED_MESSAGE_XML.format(
encrypt=encrypt,
signature=signature,
timestamp=timestamp,
nonce=nonce
)
) | [
"def",
"encrypt_message",
"(",
"self",
",",
"reply",
",",
"timestamp",
"=",
"None",
",",
"nonce",
"=",
"None",
")",
":",
"if",
"hasattr",
"(",
"reply",
",",
"\"render\"",
")",
":",
"reply",
"=",
"reply",
".",
"render",
"(",
")",
"timestamp",
"=",
"timestamp",
"or",
"to_text",
"(",
"int",
"(",
"time",
".",
"time",
"(",
")",
")",
")",
"nonce",
"=",
"nonce",
"or",
"generate_token",
"(",
"5",
")",
"encrypt",
"=",
"to_text",
"(",
"self",
".",
"prp_crypto",
".",
"encrypt",
"(",
"reply",
",",
"self",
".",
"app_id",
")",
")",
"signature",
"=",
"get_signature",
"(",
"self",
".",
"token",
",",
"timestamp",
",",
"nonce",
",",
"encrypt",
")",
"return",
"to_text",
"(",
"self",
".",
"ENCRYPTED_MESSAGE_XML",
".",
"format",
"(",
"encrypt",
"=",
"encrypt",
",",
"signature",
"=",
"signature",
",",
"timestamp",
"=",
"timestamp",
",",
"nonce",
"=",
"nonce",
")",
")"
] | 32.954545 | 14.409091 |
def getRotatedSize(corners, angle):
""" Determine the size of a rotated (meta)image."""
if angle:
_rotm = fileutil.buildRotMatrix(angle)
# Rotate about the center
_corners = np.dot(corners, _rotm)
else:
# If there is no rotation, simply return original values
_corners = corners
return computeRange(_corners) | [
"def",
"getRotatedSize",
"(",
"corners",
",",
"angle",
")",
":",
"if",
"angle",
":",
"_rotm",
"=",
"fileutil",
".",
"buildRotMatrix",
"(",
"angle",
")",
"# Rotate about the center",
"_corners",
"=",
"np",
".",
"dot",
"(",
"corners",
",",
"_rotm",
")",
"else",
":",
"# If there is no rotation, simply return original values",
"_corners",
"=",
"corners",
"return",
"computeRange",
"(",
"_corners",
")"
] | 32.272727 | 14.727273 |
def prt_goids(self, goids=None, prtfmt=None, sortby=True, prt=sys.stdout):
"""Given GO IDs, print decriptive info about each GO Term."""
if goids is None:
goids = self.go_sources
nts = self.get_nts(goids, sortby)
if prtfmt is None:
prtfmt = self.prt_attr['fmta']
for ntgo in nts:
key2val = ntgo._asdict()
prt.write("{GO}\n".format(GO=prtfmt.format(**key2val)))
return nts | [
"def",
"prt_goids",
"(",
"self",
",",
"goids",
"=",
"None",
",",
"prtfmt",
"=",
"None",
",",
"sortby",
"=",
"True",
",",
"prt",
"=",
"sys",
".",
"stdout",
")",
":",
"if",
"goids",
"is",
"None",
":",
"goids",
"=",
"self",
".",
"go_sources",
"nts",
"=",
"self",
".",
"get_nts",
"(",
"goids",
",",
"sortby",
")",
"if",
"prtfmt",
"is",
"None",
":",
"prtfmt",
"=",
"self",
".",
"prt_attr",
"[",
"'fmta'",
"]",
"for",
"ntgo",
"in",
"nts",
":",
"key2val",
"=",
"ntgo",
".",
"_asdict",
"(",
")",
"prt",
".",
"write",
"(",
"\"{GO}\\n\"",
".",
"format",
"(",
"GO",
"=",
"prtfmt",
".",
"format",
"(",
"*",
"*",
"key2val",
")",
")",
")",
"return",
"nts"
] | 41.545455 | 12.727273 |
def gmst(utc_time):
"""Greenwich mean sidereal utc_time, in radians.
As defined in the AIAA 2006 implementation:
http://www.celestrak.com/publications/AIAA/2006-6753/
"""
ut1 = jdays2000(utc_time) / 36525.0
theta = 67310.54841 + ut1 * (876600 * 3600 + 8640184.812866 + ut1 *
(0.093104 - ut1 * 6.2 * 10e-6))
return np.deg2rad(theta / 240.0) % (2 * np.pi) | [
"def",
"gmst",
"(",
"utc_time",
")",
":",
"ut1",
"=",
"jdays2000",
"(",
"utc_time",
")",
"/",
"36525.0",
"theta",
"=",
"67310.54841",
"+",
"ut1",
"*",
"(",
"876600",
"*",
"3600",
"+",
"8640184.812866",
"+",
"ut1",
"*",
"(",
"0.093104",
"-",
"ut1",
"*",
"6.2",
"*",
"10e-6",
")",
")",
"return",
"np",
".",
"deg2rad",
"(",
"theta",
"/",
"240.0",
")",
"%",
"(",
"2",
"*",
"np",
".",
"pi",
")"
] | 40.6 | 15.1 |
def instaprint(figure='gcf', arguments='', threaded=False, file_format='pdf'):
"""
Quick function that saves the specified figure as a postscript and then
calls the command defined by spinmob.prefs['instaprint'] with this
postscript file as the argument.
figure='gcf' can be 'all', a number, or a list of numbers
"""
global _settings
if 'instaprint' not in _settings.keys():
print("No print command setup. Set the user variable settings['instaprint'].")
return
if figure=='gcf': figure=[_pylab.gcf().number]
elif figure=='all': figure=_pylab.get_fignums()
if not getattr(figure,'__iter__',False): figure = [figure]
print("figure numbers in queue:", figure)
figures=[]
for n in figure: figures.append(_pylab.figure(n))
# now run the ps printing command
if threaded:
# store the canvas type of the last figure
canvas_type = type(figures[-1].canvas)
# launch the aforementioned function as a separate thread
_thread.start_new_thread(_print_figures, (figures,arguments,file_format,))
# wait until the thread is running
_time.sleep(0.25)
# wait until the canvas type has returned to normal
t0 = _time.time()
while not canvas_type == type(figures[-1].canvas) and _time.time()-t0 < 5.0:
_time.sleep(0.1)
if _time.time()-t0 >= 5.0:
print("WARNING: Timed out waiting for canvas to return to original state!")
# bring back the figure and command line
_pylab.draw()
else:
_print_figures(figures, arguments, file_format)
_pylab.draw() | [
"def",
"instaprint",
"(",
"figure",
"=",
"'gcf'",
",",
"arguments",
"=",
"''",
",",
"threaded",
"=",
"False",
",",
"file_format",
"=",
"'pdf'",
")",
":",
"global",
"_settings",
"if",
"'instaprint'",
"not",
"in",
"_settings",
".",
"keys",
"(",
")",
":",
"print",
"(",
"\"No print command setup. Set the user variable settings['instaprint'].\"",
")",
"return",
"if",
"figure",
"==",
"'gcf'",
":",
"figure",
"=",
"[",
"_pylab",
".",
"gcf",
"(",
")",
".",
"number",
"]",
"elif",
"figure",
"==",
"'all'",
":",
"figure",
"=",
"_pylab",
".",
"get_fignums",
"(",
")",
"if",
"not",
"getattr",
"(",
"figure",
",",
"'__iter__'",
",",
"False",
")",
":",
"figure",
"=",
"[",
"figure",
"]",
"print",
"(",
"\"figure numbers in queue:\"",
",",
"figure",
")",
"figures",
"=",
"[",
"]",
"for",
"n",
"in",
"figure",
":",
"figures",
".",
"append",
"(",
"_pylab",
".",
"figure",
"(",
"n",
")",
")",
"# now run the ps printing command",
"if",
"threaded",
":",
"# store the canvas type of the last figure",
"canvas_type",
"=",
"type",
"(",
"figures",
"[",
"-",
"1",
"]",
".",
"canvas",
")",
"# launch the aforementioned function as a separate thread",
"_thread",
".",
"start_new_thread",
"(",
"_print_figures",
",",
"(",
"figures",
",",
"arguments",
",",
"file_format",
",",
")",
")",
"# wait until the thread is running",
"_time",
".",
"sleep",
"(",
"0.25",
")",
"# wait until the canvas type has returned to normal",
"t0",
"=",
"_time",
".",
"time",
"(",
")",
"while",
"not",
"canvas_type",
"==",
"type",
"(",
"figures",
"[",
"-",
"1",
"]",
".",
"canvas",
")",
"and",
"_time",
".",
"time",
"(",
")",
"-",
"t0",
"<",
"5.0",
":",
"_time",
".",
"sleep",
"(",
"0.1",
")",
"if",
"_time",
".",
"time",
"(",
")",
"-",
"t0",
">=",
"5.0",
":",
"print",
"(",
"\"WARNING: Timed out waiting for canvas to return to original state!\"",
")",
"# bring back the figure and command line",
"_pylab",
".",
"draw",
"(",
")",
"else",
":",
"_print_figures",
"(",
"figures",
",",
"arguments",
",",
"file_format",
")",
"_pylab",
".",
"draw",
"(",
")"
] | 33.583333 | 24.125 |
def n_join(self, other, psi=-40.76, omega=-178.25, phi=-65.07,
o_c_n_angle=None, c_n_ca_angle=None, c_n_length=None, relabel=True):
"""Joins other to self at the N-terminus via a peptide bond.
Notes
-----
This function directly modifies self. It does not return a new object.
Parameters
----------
other: Residue or Polypeptide
psi: float
Psi torsion angle (degrees) between final `Residue` of other
and first `Residue` of self.
omega: float
Omega torsion angle (degrees) between final `Residue` of
other and first `Residue` of self.
phi: float
Phi torsion angle (degrees) between final `Residue` of other
and first `Residue` of self.
o_c_n_angle: float or None
Desired angle between O, C (final `Residue` of other) and N
(first `Residue` of self) atoms. If `None`, default value is
taken from `ideal_backbone_bond_angles`.
c_n_ca_angle: float or None
Desired angle between C (final `Residue` of other) and N, CA
(first `Residue` of self) atoms. If `None`, default value is taken
from `ideal_backbone_bond_angles`.
c_n_length: float or None
Desired peptide bond length between final `Residue` of other
and first `Residue` of self. If None, default value is taken
from ideal_backbone_bond_lengths.
relabel: bool
If True, relabel_all is run on self before returning.
Raises
------
TypeError:
If other is not a `Residue` or a `Polypeptide`
"""
if isinstance(other, Residue):
other = Polypeptide([other])
if not isinstance(other, Polypeptide):
raise TypeError(
'Only Polypeptide or Residue objects can be joined to a Polypeptide')
if abs(omega) >= 90:
peptide_conformation = 'trans'
else:
peptide_conformation = 'cis'
if o_c_n_angle is None:
o_c_n_angle = ideal_backbone_bond_angles[peptide_conformation]['o_c_n']
if c_n_ca_angle is None:
c_n_ca_angle = ideal_backbone_bond_angles[peptide_conformation]['c_n_ca']
if c_n_length is None:
c_n_length = ideal_backbone_bond_lengths['c_n']
r1 = self[0]
r1_n = r1['N']._vector
r1_ca = r1['CA']._vector
r1_c = r1['C']._vector
# p1 is point that will be used to position the C atom of r2.
p1 = r1_ca[:]
# rotate p1 by c_n_ca_angle, about axis perpendicular to the
# r1_n, r1_ca, r1_c plane, passing through r1_ca.
axis = numpy.cross((r1_ca - r1_n), (r1_c - r1_n))
q = Quaternion.angle_and_axis(angle=c_n_ca_angle, axis=axis)
p1 = q.rotate_vector(v=p1, point=r1_n)
# Ensure p1 is separated from r1_n by the correct distance.
p1 = r1_n + (c_n_length * unit_vector(p1 - r1_n))
# translate other so that its final C atom is at p1
other.translate(vector=(p1 - other[-1]['C']._vector))
# Force CA-C=O-N to be in a plane, and fix O=C-N angle accordingly
measured_dihedral = dihedral(
other[-1]['CA'], other[-1]['C'], other[-1]['O'], r1['N'])
desired_dihedral = 180.0
axis = other[-1]['O'] - other[-1]['C']
other.rotate(angle=(measured_dihedral - desired_dihedral),
axis=axis, point=other[-1]['C']._vector)
axis = (numpy.cross(other[-1]['O'] - other[-1]
['C'], r1['N'] - other[-1]['C']))
measured_o_c_n = angle_between_vectors(
other[-1]['O'] - other[-1]['C'], r1['N'] - other[-1]['C'])
other.rotate(angle=(measured_o_c_n - o_c_n_angle),
axis=axis, point=other[-1]['C']._vector)
# rotate other to obtain desired phi, omega, psi values at the join.
measured_phi = dihedral(other[-1]['C'], r1['N'], r1['CA'], r1['C'])
other.rotate(angle=(phi - measured_phi),
axis=(r1_n - r1_ca), point=r1_ca)
measured_omega = dihedral(
other[-1]['CA'], other[-1]['C'], r1['N'], r1['CA'])
other.rotate(angle=(measured_omega - omega),
axis=(r1['N'] - other[-1]['C']), point=r1_n)
measured_psi = dihedral(
other[-1]['N'], other[-1]['CA'], other[-1]['C'], r1['N'])
other.rotate(angle=-(measured_psi - psi), axis=(other[-1]['CA'] - other[-1]['C']),
point=other[-1]['CA']._vector)
self._monomers = other._monomers + self._monomers
if relabel:
self.relabel_all()
self.tags['assigned_ff'] = False
return | [
"def",
"n_join",
"(",
"self",
",",
"other",
",",
"psi",
"=",
"-",
"40.76",
",",
"omega",
"=",
"-",
"178.25",
",",
"phi",
"=",
"-",
"65.07",
",",
"o_c_n_angle",
"=",
"None",
",",
"c_n_ca_angle",
"=",
"None",
",",
"c_n_length",
"=",
"None",
",",
"relabel",
"=",
"True",
")",
":",
"if",
"isinstance",
"(",
"other",
",",
"Residue",
")",
":",
"other",
"=",
"Polypeptide",
"(",
"[",
"other",
"]",
")",
"if",
"not",
"isinstance",
"(",
"other",
",",
"Polypeptide",
")",
":",
"raise",
"TypeError",
"(",
"'Only Polypeptide or Residue objects can be joined to a Polypeptide'",
")",
"if",
"abs",
"(",
"omega",
")",
">=",
"90",
":",
"peptide_conformation",
"=",
"'trans'",
"else",
":",
"peptide_conformation",
"=",
"'cis'",
"if",
"o_c_n_angle",
"is",
"None",
":",
"o_c_n_angle",
"=",
"ideal_backbone_bond_angles",
"[",
"peptide_conformation",
"]",
"[",
"'o_c_n'",
"]",
"if",
"c_n_ca_angle",
"is",
"None",
":",
"c_n_ca_angle",
"=",
"ideal_backbone_bond_angles",
"[",
"peptide_conformation",
"]",
"[",
"'c_n_ca'",
"]",
"if",
"c_n_length",
"is",
"None",
":",
"c_n_length",
"=",
"ideal_backbone_bond_lengths",
"[",
"'c_n'",
"]",
"r1",
"=",
"self",
"[",
"0",
"]",
"r1_n",
"=",
"r1",
"[",
"'N'",
"]",
".",
"_vector",
"r1_ca",
"=",
"r1",
"[",
"'CA'",
"]",
".",
"_vector",
"r1_c",
"=",
"r1",
"[",
"'C'",
"]",
".",
"_vector",
"# p1 is point that will be used to position the C atom of r2.",
"p1",
"=",
"r1_ca",
"[",
":",
"]",
"# rotate p1 by c_n_ca_angle, about axis perpendicular to the",
"# r1_n, r1_ca, r1_c plane, passing through r1_ca.",
"axis",
"=",
"numpy",
".",
"cross",
"(",
"(",
"r1_ca",
"-",
"r1_n",
")",
",",
"(",
"r1_c",
"-",
"r1_n",
")",
")",
"q",
"=",
"Quaternion",
".",
"angle_and_axis",
"(",
"angle",
"=",
"c_n_ca_angle",
",",
"axis",
"=",
"axis",
")",
"p1",
"=",
"q",
".",
"rotate_vector",
"(",
"v",
"=",
"p1",
",",
"point",
"=",
"r1_n",
")",
"# Ensure p1 is separated from r1_n by the correct distance.",
"p1",
"=",
"r1_n",
"+",
"(",
"c_n_length",
"*",
"unit_vector",
"(",
"p1",
"-",
"r1_n",
")",
")",
"# translate other so that its final C atom is at p1",
"other",
".",
"translate",
"(",
"vector",
"=",
"(",
"p1",
"-",
"other",
"[",
"-",
"1",
"]",
"[",
"'C'",
"]",
".",
"_vector",
")",
")",
"# Force CA-C=O-N to be in a plane, and fix O=C-N angle accordingly",
"measured_dihedral",
"=",
"dihedral",
"(",
"other",
"[",
"-",
"1",
"]",
"[",
"'CA'",
"]",
",",
"other",
"[",
"-",
"1",
"]",
"[",
"'C'",
"]",
",",
"other",
"[",
"-",
"1",
"]",
"[",
"'O'",
"]",
",",
"r1",
"[",
"'N'",
"]",
")",
"desired_dihedral",
"=",
"180.0",
"axis",
"=",
"other",
"[",
"-",
"1",
"]",
"[",
"'O'",
"]",
"-",
"other",
"[",
"-",
"1",
"]",
"[",
"'C'",
"]",
"other",
".",
"rotate",
"(",
"angle",
"=",
"(",
"measured_dihedral",
"-",
"desired_dihedral",
")",
",",
"axis",
"=",
"axis",
",",
"point",
"=",
"other",
"[",
"-",
"1",
"]",
"[",
"'C'",
"]",
".",
"_vector",
")",
"axis",
"=",
"(",
"numpy",
".",
"cross",
"(",
"other",
"[",
"-",
"1",
"]",
"[",
"'O'",
"]",
"-",
"other",
"[",
"-",
"1",
"]",
"[",
"'C'",
"]",
",",
"r1",
"[",
"'N'",
"]",
"-",
"other",
"[",
"-",
"1",
"]",
"[",
"'C'",
"]",
")",
")",
"measured_o_c_n",
"=",
"angle_between_vectors",
"(",
"other",
"[",
"-",
"1",
"]",
"[",
"'O'",
"]",
"-",
"other",
"[",
"-",
"1",
"]",
"[",
"'C'",
"]",
",",
"r1",
"[",
"'N'",
"]",
"-",
"other",
"[",
"-",
"1",
"]",
"[",
"'C'",
"]",
")",
"other",
".",
"rotate",
"(",
"angle",
"=",
"(",
"measured_o_c_n",
"-",
"o_c_n_angle",
")",
",",
"axis",
"=",
"axis",
",",
"point",
"=",
"other",
"[",
"-",
"1",
"]",
"[",
"'C'",
"]",
".",
"_vector",
")",
"# rotate other to obtain desired phi, omega, psi values at the join.",
"measured_phi",
"=",
"dihedral",
"(",
"other",
"[",
"-",
"1",
"]",
"[",
"'C'",
"]",
",",
"r1",
"[",
"'N'",
"]",
",",
"r1",
"[",
"'CA'",
"]",
",",
"r1",
"[",
"'C'",
"]",
")",
"other",
".",
"rotate",
"(",
"angle",
"=",
"(",
"phi",
"-",
"measured_phi",
")",
",",
"axis",
"=",
"(",
"r1_n",
"-",
"r1_ca",
")",
",",
"point",
"=",
"r1_ca",
")",
"measured_omega",
"=",
"dihedral",
"(",
"other",
"[",
"-",
"1",
"]",
"[",
"'CA'",
"]",
",",
"other",
"[",
"-",
"1",
"]",
"[",
"'C'",
"]",
",",
"r1",
"[",
"'N'",
"]",
",",
"r1",
"[",
"'CA'",
"]",
")",
"other",
".",
"rotate",
"(",
"angle",
"=",
"(",
"measured_omega",
"-",
"omega",
")",
",",
"axis",
"=",
"(",
"r1",
"[",
"'N'",
"]",
"-",
"other",
"[",
"-",
"1",
"]",
"[",
"'C'",
"]",
")",
",",
"point",
"=",
"r1_n",
")",
"measured_psi",
"=",
"dihedral",
"(",
"other",
"[",
"-",
"1",
"]",
"[",
"'N'",
"]",
",",
"other",
"[",
"-",
"1",
"]",
"[",
"'CA'",
"]",
",",
"other",
"[",
"-",
"1",
"]",
"[",
"'C'",
"]",
",",
"r1",
"[",
"'N'",
"]",
")",
"other",
".",
"rotate",
"(",
"angle",
"=",
"-",
"(",
"measured_psi",
"-",
"psi",
")",
",",
"axis",
"=",
"(",
"other",
"[",
"-",
"1",
"]",
"[",
"'CA'",
"]",
"-",
"other",
"[",
"-",
"1",
"]",
"[",
"'C'",
"]",
")",
",",
"point",
"=",
"other",
"[",
"-",
"1",
"]",
"[",
"'CA'",
"]",
".",
"_vector",
")",
"self",
".",
"_monomers",
"=",
"other",
".",
"_monomers",
"+",
"self",
".",
"_monomers",
"if",
"relabel",
":",
"self",
".",
"relabel_all",
"(",
")",
"self",
".",
"tags",
"[",
"'assigned_ff'",
"]",
"=",
"False",
"return"
] | 46.94 | 19.45 |
def update_network(self, network, body=None):
"""Updates a network."""
return self.put(self.network_path % (network), body=body) | [
"def",
"update_network",
"(",
"self",
",",
"network",
",",
"body",
"=",
"None",
")",
":",
"return",
"self",
".",
"put",
"(",
"self",
".",
"network_path",
"%",
"(",
"network",
")",
",",
"body",
"=",
"body",
")"
] | 47.333333 | 10 |
def validate_string_list(lst):
"""Validate that the input is a list of strings.
Raises ValueError if not."""
if not isinstance(lst, list):
raise ValueError('input %r must be a list' % lst)
for x in lst:
if not isinstance(x, basestring):
raise ValueError('element %r in list must be a string' % x) | [
"def",
"validate_string_list",
"(",
"lst",
")",
":",
"if",
"not",
"isinstance",
"(",
"lst",
",",
"list",
")",
":",
"raise",
"ValueError",
"(",
"'input %r must be a list'",
"%",
"lst",
")",
"for",
"x",
"in",
"lst",
":",
"if",
"not",
"isinstance",
"(",
"x",
",",
"basestring",
")",
":",
"raise",
"ValueError",
"(",
"'element %r in list must be a string'",
"%",
"x",
")"
] | 37 | 14.333333 |
def list(
self, skiptoken=None, skip=None, top=None, select=None, search=None, filter=None, view=None, group_name=None, cache_control="no-cache", custom_headers=None, raw=False, **operation_config):
"""List all entities (Management Groups, Subscriptions, etc.) for the
authenticated user.
:param skiptoken: Page continuation token is only used if a previous
operation returned a partial result. If a previous response contains a
nextLink element, the value of the nextLink element will include a
token parameter that specifies a starting point to use for subsequent
calls.
:type skiptoken: str
:param skip: Number of entities to skip over when retrieving results.
Passing this in will override $skipToken.
:type skip: int
:param top: Number of elements to return when retrieving results.
Passing this in will override $skipToken.
:type top: int
:param select: This parameter specifies the fields to include in the
response. Can include any combination of
Name,DisplayName,Type,ParentDisplayNameChain,ParentChain, e.g.
'$select=Name,DisplayName,Type,ParentDisplayNameChain,ParentNameChain'.
When specified the $select parameter can override select in
$skipToken.
:type select: str
:param search: The $search parameter is used in conjunction with the
$filter parameter to return three different outputs depending on the
parameter passed in. With $search=AllowedParents the API will return
the entity info of all groups that the requested entity will be able
to reparent to as determined by the user's permissions. With
$search=AllowedChildren the API will return the entity info of all
entities that can be added as children of the requested entity. With
$search=ParentAndFirstLevelChildren the API will return the parent and
first level of children that the user has either direct access to or
indirect access via one of their descendants. Possible values include:
'AllowedParents', 'AllowedChildren', 'ParentAndFirstLevelChildren',
'ParentOnly', 'ChildrenOnly'
:type search: str
:param filter: The filter parameter allows you to filter on the name
or display name fields. You can check for equality on the name field
(e.g. name eq '{entityName}') and you can check for substrings on
either the name or display name fields(e.g. contains(name,
'{substringToSearch}'), contains(displayName, '{substringToSearch')).
Note that the '{entityName}' and '{substringToSearch}' fields are
checked case insensitively.
:type filter: str
:param view: The view parameter allows clients to filter the type of
data that is returned by the getEntities call. Possible values
include: 'FullHierarchy', 'GroupsOnly', 'SubscriptionsOnly', 'Audit'
:type view: str
:param group_name: A filter which allows the get entities call to
focus on a particular group (i.e. "$filter=name eq 'groupName'")
:type group_name: str
:param cache_control: Indicates that the request shouldn't utilize any
caches.
:type cache_control: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of EntityInfo
:rtype:
~azure.mgmt.managementgroups.models.EntityInfoPaged[~azure.mgmt.managementgroups.models.EntityInfo]
:raises:
:class:`ErrorResponseException<azure.mgmt.managementgroups.models.ErrorResponseException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list.metadata['url']
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
if skiptoken is not None:
query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
if skip is not None:
query_parameters['$skip'] = self._serialize.query("skip", skip, 'int')
if top is not None:
query_parameters['$top'] = self._serialize.query("top", top, 'int')
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, 'str')
if search is not None:
query_parameters['$search'] = self._serialize.query("search", search, 'str')
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
if view is not None:
query_parameters['$view'] = self._serialize.query("view", view, 'str')
if group_name is not None:
query_parameters['groupName'] = self._serialize.query("group_name", group_name, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if cache_control is not None:
header_parameters['Cache-Control'] = self._serialize.header("cache_control", cache_control, 'str')
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.EntityInfoPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.EntityInfoPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized | [
"def",
"list",
"(",
"self",
",",
"skiptoken",
"=",
"None",
",",
"skip",
"=",
"None",
",",
"top",
"=",
"None",
",",
"select",
"=",
"None",
",",
"search",
"=",
"None",
",",
"filter",
"=",
"None",
",",
"view",
"=",
"None",
",",
"group_name",
"=",
"None",
",",
"cache_control",
"=",
"\"no-cache\"",
",",
"custom_headers",
"=",
"None",
",",
"raw",
"=",
"False",
",",
"*",
"*",
"operation_config",
")",
":",
"def",
"internal_paging",
"(",
"next_link",
"=",
"None",
",",
"raw",
"=",
"False",
")",
":",
"if",
"not",
"next_link",
":",
"# Construct URL",
"url",
"=",
"self",
".",
"list",
".",
"metadata",
"[",
"'url'",
"]",
"# Construct parameters",
"query_parameters",
"=",
"{",
"}",
"query_parameters",
"[",
"'api-version'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"\"self.api_version\"",
",",
"self",
".",
"api_version",
",",
"'str'",
")",
"if",
"skiptoken",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'$skiptoken'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"\"skiptoken\"",
",",
"skiptoken",
",",
"'str'",
")",
"if",
"skip",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'$skip'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"\"skip\"",
",",
"skip",
",",
"'int'",
")",
"if",
"top",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'$top'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"\"top\"",
",",
"top",
",",
"'int'",
")",
"if",
"select",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'$select'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"\"select\"",
",",
"select",
",",
"'str'",
")",
"if",
"search",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'$search'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"\"search\"",
",",
"search",
",",
"'str'",
")",
"if",
"filter",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'$filter'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"\"filter\"",
",",
"filter",
",",
"'str'",
")",
"if",
"view",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'$view'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"\"view\"",
",",
"view",
",",
"'str'",
")",
"if",
"group_name",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'groupName'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"\"group_name\"",
",",
"group_name",
",",
"'str'",
")",
"else",
":",
"url",
"=",
"next_link",
"query_parameters",
"=",
"{",
"}",
"# Construct headers",
"header_parameters",
"=",
"{",
"}",
"header_parameters",
"[",
"'Accept'",
"]",
"=",
"'application/json'",
"if",
"self",
".",
"config",
".",
"generate_client_request_id",
":",
"header_parameters",
"[",
"'x-ms-client-request-id'",
"]",
"=",
"str",
"(",
"uuid",
".",
"uuid1",
"(",
")",
")",
"if",
"custom_headers",
":",
"header_parameters",
".",
"update",
"(",
"custom_headers",
")",
"if",
"cache_control",
"is",
"not",
"None",
":",
"header_parameters",
"[",
"'Cache-Control'",
"]",
"=",
"self",
".",
"_serialize",
".",
"header",
"(",
"\"cache_control\"",
",",
"cache_control",
",",
"'str'",
")",
"if",
"self",
".",
"config",
".",
"accept_language",
"is",
"not",
"None",
":",
"header_parameters",
"[",
"'accept-language'",
"]",
"=",
"self",
".",
"_serialize",
".",
"header",
"(",
"\"self.config.accept_language\"",
",",
"self",
".",
"config",
".",
"accept_language",
",",
"'str'",
")",
"# Construct and send request",
"request",
"=",
"self",
".",
"_client",
".",
"post",
"(",
"url",
",",
"query_parameters",
",",
"header_parameters",
")",
"response",
"=",
"self",
".",
"_client",
".",
"send",
"(",
"request",
",",
"stream",
"=",
"False",
",",
"*",
"*",
"operation_config",
")",
"if",
"response",
".",
"status_code",
"not",
"in",
"[",
"200",
"]",
":",
"raise",
"models",
".",
"ErrorResponseException",
"(",
"self",
".",
"_deserialize",
",",
"response",
")",
"return",
"response",
"# Deserialize response",
"deserialized",
"=",
"models",
".",
"EntityInfoPaged",
"(",
"internal_paging",
",",
"self",
".",
"_deserialize",
".",
"dependencies",
")",
"if",
"raw",
":",
"header_dict",
"=",
"{",
"}",
"client_raw_response",
"=",
"models",
".",
"EntityInfoPaged",
"(",
"internal_paging",
",",
"self",
".",
"_deserialize",
".",
"dependencies",
",",
"header_dict",
")",
"return",
"client_raw_response",
"return",
"deserialized"
] | 54.047619 | 28.68254 |
def _put(self, *args, **kwargs):
"""
A wrapper for putting things. It will also json encode your 'data' parameter
:returns: The response of your put
:rtype: dict
"""
if 'data' in kwargs:
kwargs['data'] = json.dumps(kwargs['data'])
response = requests.put(*args, **kwargs)
response.raise_for_status() | [
"def",
"_put",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"'data'",
"in",
"kwargs",
":",
"kwargs",
"[",
"'data'",
"]",
"=",
"json",
".",
"dumps",
"(",
"kwargs",
"[",
"'data'",
"]",
")",
"response",
"=",
"requests",
".",
"put",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"response",
".",
"raise_for_status",
"(",
")"
] | 33.272727 | 14 |
def _parse_bounding_box(bounding_box):
'''
Parse response bounding box from the CapakeyRestGateway to (MinimumX, MinimumY, MaximumX, MaximumY)
:param bounding_box: response bounding box from the CapakeyRestGateway
:return: (MinimumX, MinimumY, MaximumX, MaximumY)
'''
coordinates = json.loads(bounding_box)["coordinates"]
x_coords = [x for x, y in coordinates[0]]
y_coords = [y for x, y in coordinates[0]]
return min(x_coords), min(y_coords), max(x_coords), max(y_coords) | [
"def",
"_parse_bounding_box",
"(",
"bounding_box",
")",
":",
"coordinates",
"=",
"json",
".",
"loads",
"(",
"bounding_box",
")",
"[",
"\"coordinates\"",
"]",
"x_coords",
"=",
"[",
"x",
"for",
"x",
",",
"y",
"in",
"coordinates",
"[",
"0",
"]",
"]",
"y_coords",
"=",
"[",
"y",
"for",
"x",
",",
"y",
"in",
"coordinates",
"[",
"0",
"]",
"]",
"return",
"min",
"(",
"x_coords",
")",
",",
"min",
"(",
"y_coords",
")",
",",
"max",
"(",
"x_coords",
")",
",",
"max",
"(",
"y_coords",
")"
] | 49.272727 | 26 |
def serialize(self):
"""
Serializes the Peer data as a simple JSON map string.
"""
return json.dumps({
"name": self.name,
"ip": self.ip,
"port": self.port
}, sort_keys=True) | [
"def",
"serialize",
"(",
"self",
")",
":",
"return",
"json",
".",
"dumps",
"(",
"{",
"\"name\"",
":",
"self",
".",
"name",
",",
"\"ip\"",
":",
"self",
".",
"ip",
",",
"\"port\"",
":",
"self",
".",
"port",
"}",
",",
"sort_keys",
"=",
"True",
")"
] | 26.777778 | 11.444444 |
def context_include(zap_helper, name, pattern):
"""Include a pattern in a given context."""
console.info('Including regex {0} in context with name: {1}'.format(pattern, name))
with zap_error_handler():
result = zap_helper.zap.context.include_in_context(contextname=name, regex=pattern)
if result != 'OK':
raise ZAPError('Including regex from context failed: {}'.format(result)) | [
"def",
"context_include",
"(",
"zap_helper",
",",
"name",
",",
"pattern",
")",
":",
"console",
".",
"info",
"(",
"'Including regex {0} in context with name: {1}'",
".",
"format",
"(",
"pattern",
",",
"name",
")",
")",
"with",
"zap_error_handler",
"(",
")",
":",
"result",
"=",
"zap_helper",
".",
"zap",
".",
"context",
".",
"include_in_context",
"(",
"contextname",
"=",
"name",
",",
"regex",
"=",
"pattern",
")",
"if",
"result",
"!=",
"'OK'",
":",
"raise",
"ZAPError",
"(",
"'Including regex from context failed: {}'",
".",
"format",
"(",
"result",
")",
")"
] | 51.375 | 26.75 |
async def disable_user(self, username):
"""Disable a user.
:param str username: Username
"""
user_facade = client.UserManagerFacade.from_connection(
self.connection())
entity = client.Entity(tag.user(username))
return await user_facade.DisableUser([entity]) | [
"async",
"def",
"disable_user",
"(",
"self",
",",
"username",
")",
":",
"user_facade",
"=",
"client",
".",
"UserManagerFacade",
".",
"from_connection",
"(",
"self",
".",
"connection",
"(",
")",
")",
"entity",
"=",
"client",
".",
"Entity",
"(",
"tag",
".",
"user",
"(",
"username",
")",
")",
"return",
"await",
"user_facade",
".",
"DisableUser",
"(",
"[",
"entity",
"]",
")"
] | 31 | 14.1 |
def take_home_pay(gross_pay, employer_match, taxes_and_fees, numtype='float'):
"""
Calculate net take-home pay including employer retirement savings match
using the formula laid out by Mr. Money Mustache:
http://www.mrmoneymustache.com/2015/01/26/calculating-net-worth/
Args:
gross_pay: float or int, gross monthly pay.
employer_match: float or int, the 401(k) match from your employer.
taxes_and_fees: list, taxes and fees that are deducted from your paycheck.
numtype: string, 'decimal' or 'float'; the type of number to return.
Returns:
your monthly take-home pay.
"""
if numtype == 'decimal':
return (Decimal(gross_pay) + Decimal(employer_match)) - Decimal(
sum(taxes_and_fees)
)
else:
return (float(gross_pay) + float(employer_match)) - sum(taxes_and_fees) | [
"def",
"take_home_pay",
"(",
"gross_pay",
",",
"employer_match",
",",
"taxes_and_fees",
",",
"numtype",
"=",
"'float'",
")",
":",
"if",
"numtype",
"==",
"'decimal'",
":",
"return",
"(",
"Decimal",
"(",
"gross_pay",
")",
"+",
"Decimal",
"(",
"employer_match",
")",
")",
"-",
"Decimal",
"(",
"sum",
"(",
"taxes_and_fees",
")",
")",
"else",
":",
"return",
"(",
"float",
"(",
"gross_pay",
")",
"+",
"float",
"(",
"employer_match",
")",
")",
"-",
"sum",
"(",
"taxes_and_fees",
")"
] | 35.625 | 27.291667 |
def check_accesspoints(sess):
"""
check the status of all connected access points
"""
ap_names = walk_data(sess, name_ap_oid, helper)[0]
ap_operationals = walk_data(sess, operational_ap_oid, helper)[0]
ap_availabilitys = walk_data(sess, availability_ap_oid, helper)[0]
ap_alarms = walk_data(sess, alarm_ap_oid, helper)[0]
#ap_ip = walk_data(sess, ip_ap_oid, helper) # no result
helper.add_summary("Access Points Status")
for x in range(len(ap_names)):
ap_name = ap_names[x]
ap_operational = ap_operationals[x]
ap_availability = ap_availabilitys[x]
ap_alarm = ap_alarms[x]
# Add all states to the long output
helper.add_long_output("%s - Operational: %s - Availabilty: %s - Alarm: %s" % (ap_name, operational_states[int(ap_operational)], availability_states[int(ap_availability)], alarm_states[int(ap_alarm)]))
# Operational State
if ap_operational != "1" and ap_operational != "4":
helper.status(critical)
helper.add_summary("%s Operational State: %s" % (ap_name, operational_states[int(ap_operational)]))
# Avaiability State
if ap_availability != "3":
helper.status(critical)
helper.add_summary("%s Availability State: %s" % (ap_name, availability_states[int(ap_availability)]))
# Alarm State
if ap_alarm == "2":
helper.status(warning)
helper.add_summary("%s Controller Alarm State: %s" % (ap_name, alarm_states[int(ap_alarm)]))
if ap_alarm == "3" or ap_alarm == "4":
helper.status(critical)
helper.add_summary("%s Controller Alarm State: %s" % (ap_name, alarm_states[int(ap_alarm)])) | [
"def",
"check_accesspoints",
"(",
"sess",
")",
":",
"ap_names",
"=",
"walk_data",
"(",
"sess",
",",
"name_ap_oid",
",",
"helper",
")",
"[",
"0",
"]",
"ap_operationals",
"=",
"walk_data",
"(",
"sess",
",",
"operational_ap_oid",
",",
"helper",
")",
"[",
"0",
"]",
"ap_availabilitys",
"=",
"walk_data",
"(",
"sess",
",",
"availability_ap_oid",
",",
"helper",
")",
"[",
"0",
"]",
"ap_alarms",
"=",
"walk_data",
"(",
"sess",
",",
"alarm_ap_oid",
",",
"helper",
")",
"[",
"0",
"]",
"#ap_ip = walk_data(sess, ip_ap_oid, helper) # no result",
"helper",
".",
"add_summary",
"(",
"\"Access Points Status\"",
")",
"for",
"x",
"in",
"range",
"(",
"len",
"(",
"ap_names",
")",
")",
":",
"ap_name",
"=",
"ap_names",
"[",
"x",
"]",
"ap_operational",
"=",
"ap_operationals",
"[",
"x",
"]",
"ap_availability",
"=",
"ap_availabilitys",
"[",
"x",
"]",
"ap_alarm",
"=",
"ap_alarms",
"[",
"x",
"]",
"# Add all states to the long output",
"helper",
".",
"add_long_output",
"(",
"\"%s - Operational: %s - Availabilty: %s - Alarm: %s\"",
"%",
"(",
"ap_name",
",",
"operational_states",
"[",
"int",
"(",
"ap_operational",
")",
"]",
",",
"availability_states",
"[",
"int",
"(",
"ap_availability",
")",
"]",
",",
"alarm_states",
"[",
"int",
"(",
"ap_alarm",
")",
"]",
")",
")",
"# Operational State",
"if",
"ap_operational",
"!=",
"\"1\"",
"and",
"ap_operational",
"!=",
"\"4\"",
":",
"helper",
".",
"status",
"(",
"critical",
")",
"helper",
".",
"add_summary",
"(",
"\"%s Operational State: %s\"",
"%",
"(",
"ap_name",
",",
"operational_states",
"[",
"int",
"(",
"ap_operational",
")",
"]",
")",
")",
"# Avaiability State",
"if",
"ap_availability",
"!=",
"\"3\"",
":",
"helper",
".",
"status",
"(",
"critical",
")",
"helper",
".",
"add_summary",
"(",
"\"%s Availability State: %s\"",
"%",
"(",
"ap_name",
",",
"availability_states",
"[",
"int",
"(",
"ap_availability",
")",
"]",
")",
")",
"# Alarm State",
"if",
"ap_alarm",
"==",
"\"2\"",
":",
"helper",
".",
"status",
"(",
"warning",
")",
"helper",
".",
"add_summary",
"(",
"\"%s Controller Alarm State: %s\"",
"%",
"(",
"ap_name",
",",
"alarm_states",
"[",
"int",
"(",
"ap_alarm",
")",
"]",
")",
")",
"if",
"ap_alarm",
"==",
"\"3\"",
"or",
"ap_alarm",
"==",
"\"4\"",
":",
"helper",
".",
"status",
"(",
"critical",
")",
"helper",
".",
"add_summary",
"(",
"\"%s Controller Alarm State: %s\"",
"%",
"(",
"ap_name",
",",
"alarm_states",
"[",
"int",
"(",
"ap_alarm",
")",
"]",
")",
")"
] | 46.358974 | 27.025641 |
def status(self):
"""
check the status of the network and the peers
:return: network_height, peer_status
"""
peer = random.choice(self.PEERS)
formatted_peer = 'http://{}:4001'.format(peer)
peerdata = requests.get(url=formatted_peer + '/api/peers/').json()['peers']
peers_status = {}
networkheight = max([x['height'] for x in peerdata])
for i in peerdata:
if 'http://{}:4001'.format(i['ip']) in self.PEERS:
peers_status.update({i['ip']: {
'height': i['height'],
'status': i['status'],
'version': i['version'],
'delay': i['delay'],
}})
return {
'network_height': networkheight,
'peer_status': peers_status
} | [
"def",
"status",
"(",
"self",
")",
":",
"peer",
"=",
"random",
".",
"choice",
"(",
"self",
".",
"PEERS",
")",
"formatted_peer",
"=",
"'http://{}:4001'",
".",
"format",
"(",
"peer",
")",
"peerdata",
"=",
"requests",
".",
"get",
"(",
"url",
"=",
"formatted_peer",
"+",
"'/api/peers/'",
")",
".",
"json",
"(",
")",
"[",
"'peers'",
"]",
"peers_status",
"=",
"{",
"}",
"networkheight",
"=",
"max",
"(",
"[",
"x",
"[",
"'height'",
"]",
"for",
"x",
"in",
"peerdata",
"]",
")",
"for",
"i",
"in",
"peerdata",
":",
"if",
"'http://{}:4001'",
".",
"format",
"(",
"i",
"[",
"'ip'",
"]",
")",
"in",
"self",
".",
"PEERS",
":",
"peers_status",
".",
"update",
"(",
"{",
"i",
"[",
"'ip'",
"]",
":",
"{",
"'height'",
":",
"i",
"[",
"'height'",
"]",
",",
"'status'",
":",
"i",
"[",
"'status'",
"]",
",",
"'version'",
":",
"i",
"[",
"'version'",
"]",
",",
"'delay'",
":",
"i",
"[",
"'delay'",
"]",
",",
"}",
"}",
")",
"return",
"{",
"'network_height'",
":",
"networkheight",
",",
"'peer_status'",
":",
"peers_status",
"}"
] | 31.846154 | 16.307692 |
def get(self, sid):
"""
Constructs a CredentialListContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.trunking.v1.trunk.credential_list.CredentialListContext
:rtype: twilio.rest.trunking.v1.trunk.credential_list.CredentialListContext
"""
return CredentialListContext(self._version, trunk_sid=self._solution['trunk_sid'], sid=sid, ) | [
"def",
"get",
"(",
"self",
",",
"sid",
")",
":",
"return",
"CredentialListContext",
"(",
"self",
".",
"_version",
",",
"trunk_sid",
"=",
"self",
".",
"_solution",
"[",
"'trunk_sid'",
"]",
",",
"sid",
"=",
"sid",
",",
")"
] | 41.8 | 27.8 |
def _new_stream(self, idx):
'''Randomly select and create a new stream.
Parameters
----------
idx : int, [0:n_streams - 1]
The stream index to replace
'''
# Choose the stream index from the candidate pool
self.stream_idxs_[idx] = self.rng.choice(
self.n_streams, p=self.distribution_)
# Activate the Streamer, and get the weights
self.streams_[idx], self.stream_weights_[idx] = self._activate_stream(
self.stream_idxs_[idx])
# Reset the sample count to zero
self.stream_counts_[idx] = 0 | [
"def",
"_new_stream",
"(",
"self",
",",
"idx",
")",
":",
"# Choose the stream index from the candidate pool",
"self",
".",
"stream_idxs_",
"[",
"idx",
"]",
"=",
"self",
".",
"rng",
".",
"choice",
"(",
"self",
".",
"n_streams",
",",
"p",
"=",
"self",
".",
"distribution_",
")",
"# Activate the Streamer, and get the weights",
"self",
".",
"streams_",
"[",
"idx",
"]",
",",
"self",
".",
"stream_weights_",
"[",
"idx",
"]",
"=",
"self",
".",
"_activate_stream",
"(",
"self",
".",
"stream_idxs_",
"[",
"idx",
"]",
")",
"# Reset the sample count to zero",
"self",
".",
"stream_counts_",
"[",
"idx",
"]",
"=",
"0"
] | 33.111111 | 17.555556 |
def get_sub_doc(self, subpage):
"""Returns PyQuery object for a given subpage URL.
:subpage: The subpage of the season, e.g. 'per_game'.
:returns: PyQuery object.
"""
html = sportsref.utils.get_html(self._subpage_url(subpage))
return pq(html) | [
"def",
"get_sub_doc",
"(",
"self",
",",
"subpage",
")",
":",
"html",
"=",
"sportsref",
".",
"utils",
".",
"get_html",
"(",
"self",
".",
"_subpage_url",
"(",
"subpage",
")",
")",
"return",
"pq",
"(",
"html",
")"
] | 40.571429 | 11.571429 |
def list_namespaced_pod_disruption_budget(self, namespace, **kwargs): # noqa: E501
"""list_namespaced_pod_disruption_budget # noqa: E501
list or watch objects of kind PodDisruptionBudget # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_namespaced_pod_disruption_budget(namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1PodDisruptionBudgetList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_namespaced_pod_disruption_budget_with_http_info(namespace, **kwargs) # noqa: E501
else:
(data) = self.list_namespaced_pod_disruption_budget_with_http_info(namespace, **kwargs) # noqa: E501
return data | [
"def",
"list_namespaced_pod_disruption_budget",
"(",
"self",
",",
"namespace",
",",
"*",
"*",
"kwargs",
")",
":",
"# noqa: E501",
"kwargs",
"[",
"'_return_http_data_only'",
"]",
"=",
"True",
"if",
"kwargs",
".",
"get",
"(",
"'async_req'",
")",
":",
"return",
"self",
".",
"list_namespaced_pod_disruption_budget_with_http_info",
"(",
"namespace",
",",
"*",
"*",
"kwargs",
")",
"# noqa: E501",
"else",
":",
"(",
"data",
")",
"=",
"self",
".",
"list_namespaced_pod_disruption_budget_with_http_info",
"(",
"namespace",
",",
"*",
"*",
"kwargs",
")",
"# noqa: E501",
"return",
"data"
] | 165.233333 | 135.266667 |
def transform(self, X=None, y=None):
"""
Transform an image using an Affine transform with
rotation parameters randomly generated from the user-specified
range. Return the transform if X=None.
Arguments
---------
X : ANTsImage
Image to transform
y : ANTsImage (optional)
Another image to transform
Returns
-------
ANTsImage if y is None, else a tuple of ANTsImage types
Examples
--------
>>> import ants
>>> img = ants.image_read(ants.get_data('ch2'))
>>> tx = ants.contrib.RandomRotate3D(rotation_range=(-10,10))
>>> img2 = tx.transform(img)
"""
# random draw in rotation range
rotation_x = random.gauss(self.rotation_range[0], self.rotation_range[1])
rotation_y = random.gauss(self.rotation_range[0], self.rotation_range[1])
rotation_z = random.gauss(self.rotation_range[0], self.rotation_range[1])
self.params = (rotation_x, rotation_y, rotation_z)
tx = Rotate3D((rotation_x, rotation_y, rotation_z),
reference=self.reference,
lazy=self.lazy)
return tx.transform(X,y) | [
"def",
"transform",
"(",
"self",
",",
"X",
"=",
"None",
",",
"y",
"=",
"None",
")",
":",
"# random draw in rotation range",
"rotation_x",
"=",
"random",
".",
"gauss",
"(",
"self",
".",
"rotation_range",
"[",
"0",
"]",
",",
"self",
".",
"rotation_range",
"[",
"1",
"]",
")",
"rotation_y",
"=",
"random",
".",
"gauss",
"(",
"self",
".",
"rotation_range",
"[",
"0",
"]",
",",
"self",
".",
"rotation_range",
"[",
"1",
"]",
")",
"rotation_z",
"=",
"random",
".",
"gauss",
"(",
"self",
".",
"rotation_range",
"[",
"0",
"]",
",",
"self",
".",
"rotation_range",
"[",
"1",
"]",
")",
"self",
".",
"params",
"=",
"(",
"rotation_x",
",",
"rotation_y",
",",
"rotation_z",
")",
"tx",
"=",
"Rotate3D",
"(",
"(",
"rotation_x",
",",
"rotation_y",
",",
"rotation_z",
")",
",",
"reference",
"=",
"self",
".",
"reference",
",",
"lazy",
"=",
"self",
".",
"lazy",
")",
"return",
"tx",
".",
"transform",
"(",
"X",
",",
"y",
")"
] | 33.5 | 20.777778 |
def append_little_endian64(self, unsigned_value):
"""Appends an unsigned 64-bit integer to the internal buffer,
in little-endian byte order.
"""
if not 0 <= unsigned_value <= wire_format.UINT64_MAX:
raise errors.EncodeError(
'Unsigned 64-bit out of range: %d' % unsigned_value)
self.append_raw_bytes(struct.pack(
wire_format.FORMAT_UINT64_LITTLE_ENDIAN, unsigned_value)) | [
"def",
"append_little_endian64",
"(",
"self",
",",
"unsigned_value",
")",
":",
"if",
"not",
"0",
"<=",
"unsigned_value",
"<=",
"wire_format",
".",
"UINT64_MAX",
":",
"raise",
"errors",
".",
"EncodeError",
"(",
"'Unsigned 64-bit out of range: %d'",
"%",
"unsigned_value",
")",
"self",
".",
"append_raw_bytes",
"(",
"struct",
".",
"pack",
"(",
"wire_format",
".",
"FORMAT_UINT64_LITTLE_ENDIAN",
",",
"unsigned_value",
")",
")"
] | 49.111111 | 10.666667 |
def is_functional_group(self, atom, group):
"""Given a pybel atom, look up if it belongs to a function group"""
n_atoms = [a_neighbor.GetAtomicNum() for a_neighbor in pybel.ob.OBAtomAtomIter(atom.OBAtom)]
if group in ['quartamine', 'tertamine'] and atom.atomicnum == 7: # Nitrogen
# It's a nitrogen, so could be a protonated amine or quaternary ammonium
if '1' not in n_atoms and len(n_atoms) == 4:
return True if group == 'quartamine' else False # It's a quat. ammonium (N with 4 residues != H)
elif atom.OBAtom.GetHyb() == 3 and len(n_atoms) >= 3:
return True if group == 'tertamine' else False # It's sp3-hybridized, so could pick up an hydrogen
else:
return False
if group in ['sulfonium', 'sulfonicacid', 'sulfate'] and atom.atomicnum == 16: # Sulfur
if '1' not in n_atoms and len(n_atoms) == 3: # It's a sulfonium (S with 3 residues != H)
return True if group == 'sulfonium' else False
elif n_atoms.count(8) == 3: # It's a sulfonate or sulfonic acid
return True if group == 'sulfonicacid' else False
elif n_atoms.count(8) == 4: # It's a sulfate
return True if group == 'sulfate' else False
if group == 'phosphate' and atom.atomicnum == 15: # Phosphor
if set(n_atoms) == {8}: # It's a phosphate
return True
if group in ['carboxylate', 'guanidine'] and atom.atomicnum == 6: # It's a carbon atom
if n_atoms.count(8) == 2 and n_atoms.count(6) == 1: # It's a carboxylate group
return True if group == 'carboxylate' else False
elif n_atoms.count(7) == 3 and len(n_atoms) == 3: # It's a guanidine group
nitro_partners = []
for nitro in pybel.ob.OBAtomAtomIter(atom.OBAtom):
nitro_partners.append(len([b_neighbor for b_neighbor in pybel.ob.OBAtomAtomIter(nitro)]))
if min(nitro_partners) == 1: # One nitrogen is only connected to the carbon, can pick up a H
return True if group == 'guanidine' else False
if group == 'halocarbon' and atom.atomicnum in [9, 17, 35, 53]: # Halogen atoms
n_atoms = [na for na in pybel.ob.OBAtomAtomIter(atom.OBAtom) if na.GetAtomicNum() == 6]
if len(n_atoms) == 1: # Halocarbon
return True
else:
return False | [
"def",
"is_functional_group",
"(",
"self",
",",
"atom",
",",
"group",
")",
":",
"n_atoms",
"=",
"[",
"a_neighbor",
".",
"GetAtomicNum",
"(",
")",
"for",
"a_neighbor",
"in",
"pybel",
".",
"ob",
".",
"OBAtomAtomIter",
"(",
"atom",
".",
"OBAtom",
")",
"]",
"if",
"group",
"in",
"[",
"'quartamine'",
",",
"'tertamine'",
"]",
"and",
"atom",
".",
"atomicnum",
"==",
"7",
":",
"# Nitrogen",
"# It's a nitrogen, so could be a protonated amine or quaternary ammonium",
"if",
"'1'",
"not",
"in",
"n_atoms",
"and",
"len",
"(",
"n_atoms",
")",
"==",
"4",
":",
"return",
"True",
"if",
"group",
"==",
"'quartamine'",
"else",
"False",
"# It's a quat. ammonium (N with 4 residues != H)",
"elif",
"atom",
".",
"OBAtom",
".",
"GetHyb",
"(",
")",
"==",
"3",
"and",
"len",
"(",
"n_atoms",
")",
">=",
"3",
":",
"return",
"True",
"if",
"group",
"==",
"'tertamine'",
"else",
"False",
"# It's sp3-hybridized, so could pick up an hydrogen",
"else",
":",
"return",
"False",
"if",
"group",
"in",
"[",
"'sulfonium'",
",",
"'sulfonicacid'",
",",
"'sulfate'",
"]",
"and",
"atom",
".",
"atomicnum",
"==",
"16",
":",
"# Sulfur",
"if",
"'1'",
"not",
"in",
"n_atoms",
"and",
"len",
"(",
"n_atoms",
")",
"==",
"3",
":",
"# It's a sulfonium (S with 3 residues != H)",
"return",
"True",
"if",
"group",
"==",
"'sulfonium'",
"else",
"False",
"elif",
"n_atoms",
".",
"count",
"(",
"8",
")",
"==",
"3",
":",
"# It's a sulfonate or sulfonic acid",
"return",
"True",
"if",
"group",
"==",
"'sulfonicacid'",
"else",
"False",
"elif",
"n_atoms",
".",
"count",
"(",
"8",
")",
"==",
"4",
":",
"# It's a sulfate",
"return",
"True",
"if",
"group",
"==",
"'sulfate'",
"else",
"False",
"if",
"group",
"==",
"'phosphate'",
"and",
"atom",
".",
"atomicnum",
"==",
"15",
":",
"# Phosphor",
"if",
"set",
"(",
"n_atoms",
")",
"==",
"{",
"8",
"}",
":",
"# It's a phosphate",
"return",
"True",
"if",
"group",
"in",
"[",
"'carboxylate'",
",",
"'guanidine'",
"]",
"and",
"atom",
".",
"atomicnum",
"==",
"6",
":",
"# It's a carbon atom",
"if",
"n_atoms",
".",
"count",
"(",
"8",
")",
"==",
"2",
"and",
"n_atoms",
".",
"count",
"(",
"6",
")",
"==",
"1",
":",
"# It's a carboxylate group",
"return",
"True",
"if",
"group",
"==",
"'carboxylate'",
"else",
"False",
"elif",
"n_atoms",
".",
"count",
"(",
"7",
")",
"==",
"3",
"and",
"len",
"(",
"n_atoms",
")",
"==",
"3",
":",
"# It's a guanidine group",
"nitro_partners",
"=",
"[",
"]",
"for",
"nitro",
"in",
"pybel",
".",
"ob",
".",
"OBAtomAtomIter",
"(",
"atom",
".",
"OBAtom",
")",
":",
"nitro_partners",
".",
"append",
"(",
"len",
"(",
"[",
"b_neighbor",
"for",
"b_neighbor",
"in",
"pybel",
".",
"ob",
".",
"OBAtomAtomIter",
"(",
"nitro",
")",
"]",
")",
")",
"if",
"min",
"(",
"nitro_partners",
")",
"==",
"1",
":",
"# One nitrogen is only connected to the carbon, can pick up a H",
"return",
"True",
"if",
"group",
"==",
"'guanidine'",
"else",
"False",
"if",
"group",
"==",
"'halocarbon'",
"and",
"atom",
".",
"atomicnum",
"in",
"[",
"9",
",",
"17",
",",
"35",
",",
"53",
"]",
":",
"# Halogen atoms",
"n_atoms",
"=",
"[",
"na",
"for",
"na",
"in",
"pybel",
".",
"ob",
".",
"OBAtomAtomIter",
"(",
"atom",
".",
"OBAtom",
")",
"if",
"na",
".",
"GetAtomicNum",
"(",
")",
"==",
"6",
"]",
"if",
"len",
"(",
"n_atoms",
")",
"==",
"1",
":",
"# Halocarbon",
"return",
"True",
"else",
":",
"return",
"False"
] | 60.195122 | 34.414634 |
def _conf(cls, opts):
"""Setup logging via ini-file from logging_conf_file option."""
if not opts.logging_conf_file:
return False
if not os.path.exists(opts.logging_conf_file):
# FileNotFoundError added only in Python 3.3
# https://docs.python.org/3/whatsnew/3.3.html#pep-3151-reworking-the-os-and-io-exception-hierarchy
raise OSError("Error: Unable to locate specified logging configuration file!")
logging.config.fileConfig(opts.logging_conf_file, disable_existing_loggers=False)
return True | [
"def",
"_conf",
"(",
"cls",
",",
"opts",
")",
":",
"if",
"not",
"opts",
".",
"logging_conf_file",
":",
"return",
"False",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"opts",
".",
"logging_conf_file",
")",
":",
"# FileNotFoundError added only in Python 3.3",
"# https://docs.python.org/3/whatsnew/3.3.html#pep-3151-reworking-the-os-and-io-exception-hierarchy",
"raise",
"OSError",
"(",
"\"Error: Unable to locate specified logging configuration file!\"",
")",
"logging",
".",
"config",
".",
"fileConfig",
"(",
"opts",
".",
"logging_conf_file",
",",
"disable_existing_loggers",
"=",
"False",
")",
"return",
"True"
] | 47.666667 | 28.083333 |
def predict_proba(self, a, b, device=None):
"""Infer causal directions using the trained NCC pairwise model.
Args:
a (numpy.ndarray): Variable 1
b (numpy.ndarray): Variable 2
device (str): Device to run the algorithm on (defaults to ``cdt.SETTINGS.default_device``)
Returns:
float: Causation score (Value : 1 if a->b and -1 if b->a)
"""
device = SETTINGS.get_default(device=device)
if self.model is None:
print('Model has to be trained before doing any predictions')
raise ValueError
if len(np.array(a).shape) == 1:
a = np.array(a).reshape((-1, 1))
b = np.array(b).reshape((-1, 1))
m = np.hstack((a, b))
m = scale(m)
m = m.astype('float32')
m = th.from_numpy(m).t().unsqueeze(0)
if th.cuda.is_available():
m = m.cuda()
return (self.model(m).data.cpu().numpy()-.5) * 2 | [
"def",
"predict_proba",
"(",
"self",
",",
"a",
",",
"b",
",",
"device",
"=",
"None",
")",
":",
"device",
"=",
"SETTINGS",
".",
"get_default",
"(",
"device",
"=",
"device",
")",
"if",
"self",
".",
"model",
"is",
"None",
":",
"print",
"(",
"'Model has to be trained before doing any predictions'",
")",
"raise",
"ValueError",
"if",
"len",
"(",
"np",
".",
"array",
"(",
"a",
")",
".",
"shape",
")",
"==",
"1",
":",
"a",
"=",
"np",
".",
"array",
"(",
"a",
")",
".",
"reshape",
"(",
"(",
"-",
"1",
",",
"1",
")",
")",
"b",
"=",
"np",
".",
"array",
"(",
"b",
")",
".",
"reshape",
"(",
"(",
"-",
"1",
",",
"1",
")",
")",
"m",
"=",
"np",
".",
"hstack",
"(",
"(",
"a",
",",
"b",
")",
")",
"m",
"=",
"scale",
"(",
"m",
")",
"m",
"=",
"m",
".",
"astype",
"(",
"'float32'",
")",
"m",
"=",
"th",
".",
"from_numpy",
"(",
"m",
")",
".",
"t",
"(",
")",
".",
"unsqueeze",
"(",
"0",
")",
"if",
"th",
".",
"cuda",
".",
"is_available",
"(",
")",
":",
"m",
"=",
"m",
".",
"cuda",
"(",
")",
"return",
"(",
"self",
".",
"model",
"(",
"m",
")",
".",
"data",
".",
"cpu",
"(",
")",
".",
"numpy",
"(",
")",
"-",
".5",
")",
"*",
"2"
] | 35.444444 | 17.259259 |
def enable_contact_host_notifications(self, contact):
"""Enable host notifications for a contact
Format of the line that triggers function call::
ENABLE_CONTACT_HOST_NOTIFICATIONS;<contact_name>
:param contact: contact to enable
:type contact: alignak.objects.contact.Contact
:return: None
"""
if not contact.host_notifications_enabled:
contact.modified_attributes |= \
DICT_MODATTR["MODATTR_NOTIFICATIONS_ENABLED"].value
contact.host_notifications_enabled = True
self.send_an_element(contact.get_update_status_brok()) | [
"def",
"enable_contact_host_notifications",
"(",
"self",
",",
"contact",
")",
":",
"if",
"not",
"contact",
".",
"host_notifications_enabled",
":",
"contact",
".",
"modified_attributes",
"|=",
"DICT_MODATTR",
"[",
"\"MODATTR_NOTIFICATIONS_ENABLED\"",
"]",
".",
"value",
"contact",
".",
"host_notifications_enabled",
"=",
"True",
"self",
".",
"send_an_element",
"(",
"contact",
".",
"get_update_status_brok",
"(",
")",
")"
] | 41.466667 | 15.933333 |
def traverse_bfs(self):
'''Perform a Breadth-First Search (BFS) starting at this ``Node`` object'. Yields (``Node``, distance) tuples
Args:
``include_self`` (``bool``): ``True`` to include self in the traversal, otherwise ``False``
'''
if not isinstance(include_self, bool):
raise TypeError("include_self must be a bool")
q = deque(); dist = dict(); dist[self] = 0; q.append((self,0))
while len(q) != 0:
curr = q.popleft(); yield curr
for c in curr[0].children:
if c not in dist:
if c.edge_length is None:
el = 0
else:
el = c.edge_length
dist[c] = dist[curr[0]] + el; q.append((c,dist[c]))
if curr[0].parent is not None and curr[0].parent not in dist:
if curr[0].edge_length is None:
el = 0
else:
el = curr[0].edge_length
dist[curr[0].parent] = dist[curr[0]] + el; q.append((curr[0].parent,dist[curr[0].parent])) | [
"def",
"traverse_bfs",
"(",
"self",
")",
":",
"if",
"not",
"isinstance",
"(",
"include_self",
",",
"bool",
")",
":",
"raise",
"TypeError",
"(",
"\"include_self must be a bool\"",
")",
"q",
"=",
"deque",
"(",
")",
"dist",
"=",
"dict",
"(",
")",
"dist",
"[",
"self",
"]",
"=",
"0",
"q",
".",
"append",
"(",
"(",
"self",
",",
"0",
")",
")",
"while",
"len",
"(",
"q",
")",
"!=",
"0",
":",
"curr",
"=",
"q",
".",
"popleft",
"(",
")",
"yield",
"curr",
"for",
"c",
"in",
"curr",
"[",
"0",
"]",
".",
"children",
":",
"if",
"c",
"not",
"in",
"dist",
":",
"if",
"c",
".",
"edge_length",
"is",
"None",
":",
"el",
"=",
"0",
"else",
":",
"el",
"=",
"c",
".",
"edge_length",
"dist",
"[",
"c",
"]",
"=",
"dist",
"[",
"curr",
"[",
"0",
"]",
"]",
"+",
"el",
"q",
".",
"append",
"(",
"(",
"c",
",",
"dist",
"[",
"c",
"]",
")",
")",
"if",
"curr",
"[",
"0",
"]",
".",
"parent",
"is",
"not",
"None",
"and",
"curr",
"[",
"0",
"]",
".",
"parent",
"not",
"in",
"dist",
":",
"if",
"curr",
"[",
"0",
"]",
".",
"edge_length",
"is",
"None",
":",
"el",
"=",
"0",
"else",
":",
"el",
"=",
"curr",
"[",
"0",
"]",
".",
"edge_length",
"dist",
"[",
"curr",
"[",
"0",
"]",
".",
"parent",
"]",
"=",
"dist",
"[",
"curr",
"[",
"0",
"]",
"]",
"+",
"el",
"q",
".",
"append",
"(",
"(",
"curr",
"[",
"0",
"]",
".",
"parent",
",",
"dist",
"[",
"curr",
"[",
"0",
"]",
".",
"parent",
"]",
")",
")"
] | 46.583333 | 22.083333 |
def make_comparison_png(self, outpath=None, include_legend=False):
"""
Creates a thematic map image with a three color beside it
:param outpath: if specified, will save the image instead of showing it
:param include_legend: if true will include the thamatic map label legend
"""
from matplotlib.patches import Patch
fig, axs = plt.subplots(ncols=2, sharex=True, sharey=True)
three_color = self.make_three_color()
axs[0].imshow(three_color)
axs[0].set_axis_off()
shape = self.thmap.shape
axs[1].imshow(self.thmap,
origin='lower',
interpolation='nearest',
cmap=self.config.solar_cmap,
vmin=-1, vmax=len(self.config.solar_classes)-1)
if include_legend:
legend_elements = [Patch(facecolor=c, label=sc, edgecolor='k')
for sc, c in self.config.solar_colors.items()]
axs[1].legend(handles=legend_elements, fontsize='x-small',
bbox_to_anchor=(0., 1.02, 1., .102), loc=3,
ncol=2, mode="expand", borderaxespad=0.)
axs[1].set_xlim([0, shape[0]])
axs[1].set_ylim([0, shape[0]])
axs[1].set_aspect("equal")
axs[1].set_axis_off()
if outpath:
fig.savefig(outpath, dpi=300,
transparent=True,
bbox_inches='tight',
pad_inches=0.)
plt.close()
else:
plt.show() | [
"def",
"make_comparison_png",
"(",
"self",
",",
"outpath",
"=",
"None",
",",
"include_legend",
"=",
"False",
")",
":",
"from",
"matplotlib",
".",
"patches",
"import",
"Patch",
"fig",
",",
"axs",
"=",
"plt",
".",
"subplots",
"(",
"ncols",
"=",
"2",
",",
"sharex",
"=",
"True",
",",
"sharey",
"=",
"True",
")",
"three_color",
"=",
"self",
".",
"make_three_color",
"(",
")",
"axs",
"[",
"0",
"]",
".",
"imshow",
"(",
"three_color",
")",
"axs",
"[",
"0",
"]",
".",
"set_axis_off",
"(",
")",
"shape",
"=",
"self",
".",
"thmap",
".",
"shape",
"axs",
"[",
"1",
"]",
".",
"imshow",
"(",
"self",
".",
"thmap",
",",
"origin",
"=",
"'lower'",
",",
"interpolation",
"=",
"'nearest'",
",",
"cmap",
"=",
"self",
".",
"config",
".",
"solar_cmap",
",",
"vmin",
"=",
"-",
"1",
",",
"vmax",
"=",
"len",
"(",
"self",
".",
"config",
".",
"solar_classes",
")",
"-",
"1",
")",
"if",
"include_legend",
":",
"legend_elements",
"=",
"[",
"Patch",
"(",
"facecolor",
"=",
"c",
",",
"label",
"=",
"sc",
",",
"edgecolor",
"=",
"'k'",
")",
"for",
"sc",
",",
"c",
"in",
"self",
".",
"config",
".",
"solar_colors",
".",
"items",
"(",
")",
"]",
"axs",
"[",
"1",
"]",
".",
"legend",
"(",
"handles",
"=",
"legend_elements",
",",
"fontsize",
"=",
"'x-small'",
",",
"bbox_to_anchor",
"=",
"(",
"0.",
",",
"1.02",
",",
"1.",
",",
".102",
")",
",",
"loc",
"=",
"3",
",",
"ncol",
"=",
"2",
",",
"mode",
"=",
"\"expand\"",
",",
"borderaxespad",
"=",
"0.",
")",
"axs",
"[",
"1",
"]",
".",
"set_xlim",
"(",
"[",
"0",
",",
"shape",
"[",
"0",
"]",
"]",
")",
"axs",
"[",
"1",
"]",
".",
"set_ylim",
"(",
"[",
"0",
",",
"shape",
"[",
"0",
"]",
"]",
")",
"axs",
"[",
"1",
"]",
".",
"set_aspect",
"(",
"\"equal\"",
")",
"axs",
"[",
"1",
"]",
".",
"set_axis_off",
"(",
")",
"if",
"outpath",
":",
"fig",
".",
"savefig",
"(",
"outpath",
",",
"dpi",
"=",
"300",
",",
"transparent",
"=",
"True",
",",
"bbox_inches",
"=",
"'tight'",
",",
"pad_inches",
"=",
"0.",
")",
"plt",
".",
"close",
"(",
")",
"else",
":",
"plt",
".",
"show",
"(",
")"
] | 38.875 | 18.125 |
def acquire_restore(lock, state):
"""Acquire a lock and restore its state."""
if hasattr(lock, '_acquire_restore'):
lock._acquire_restore(state)
elif hasattr(lock, 'acquire'):
lock.acquire()
else:
raise TypeError('expecting Lock/RLock') | [
"def",
"acquire_restore",
"(",
"lock",
",",
"state",
")",
":",
"if",
"hasattr",
"(",
"lock",
",",
"'_acquire_restore'",
")",
":",
"lock",
".",
"_acquire_restore",
"(",
"state",
")",
"elif",
"hasattr",
"(",
"lock",
",",
"'acquire'",
")",
":",
"lock",
".",
"acquire",
"(",
")",
"else",
":",
"raise",
"TypeError",
"(",
"'expecting Lock/RLock'",
")"
] | 33.625 | 9.25 |
def override_spec(cls, **kwargs):
"""OVerride 'spec' and '_default_spec' with given values"""
cls._default_spec.set(**kwargs)
cls.spec.set(**kwargs) | [
"def",
"override_spec",
"(",
"cls",
",",
"*",
"*",
"kwargs",
")",
":",
"cls",
".",
"_default_spec",
".",
"set",
"(",
"*",
"*",
"kwargs",
")",
"cls",
".",
"spec",
".",
"set",
"(",
"*",
"*",
"kwargs",
")"
] | 42.25 | 4.5 |
def reader_acquire(self):
"""Acquire the lock to read"""
self._order_mutex.acquire()
self._readers_mutex.acquire()
if self._readers == 0:
self._access_mutex.acquire()
self._readers += 1
self._order_mutex.release()
self._readers_mutex.release() | [
"def",
"reader_acquire",
"(",
"self",
")",
":",
"self",
".",
"_order_mutex",
".",
"acquire",
"(",
")",
"self",
".",
"_readers_mutex",
".",
"acquire",
"(",
")",
"if",
"self",
".",
"_readers",
"==",
"0",
":",
"self",
".",
"_access_mutex",
".",
"acquire",
"(",
")",
"self",
".",
"_readers",
"+=",
"1",
"self",
".",
"_order_mutex",
".",
"release",
"(",
")",
"self",
".",
"_readers_mutex",
".",
"release",
"(",
")"
] | 25.25 | 14.583333 |
def _parse_astorb_database_file(
self,
astorbgz):
"""* parse astorb database file*
**Key Arguments:**
- ``astorbgz`` -- path to the downloaded astorb database file
**Return:**
- ``astorbDictList`` -- the astorb database parsed as a list of dictionaries
"""
self.log.info('starting the ``_parse_astorb_database_file`` method')
print "Parsing the astorb.dat orbital elements file"
with gzip.open(astorbgz, 'rb') as f:
thisData = f.read()
astorbDictList = []
lines = thisData.split("\n")
for l in lines:
if len(l) < 50:
continue
d = {}
# PARSE THE LINE FROM astorb.dat (UNNEEDED VALUES COMMENTED OUT)
d["mpc_number"] = l[0:7].strip()
d["name"] = l[7:26].strip()
d["discoverer"] = l[26:41].strip()
d["H_abs_mag"] = l[41:48].strip()
d["G_slope"] = l[48:54].strip()
d["color_b_v"] = l[54:59].strip()
d["diameter_km"] = l[59:65].strip()
d["class"] = l[65:71].strip()
# d["int1"] = l[71:75].strip()
# d["int2"] = l[75:79].strip()
# d["int3"] = l[79:83].strip()
# d["int4"] = l[83:87].strip()
# d["int5"] = l[87:91].strip()
# d["int6"] = l[91:95].strip()
d["orbital_arc_days"] = l[95:101].strip()
d["number_obs"] = l[101:106].strip()
d["epoch"] = l[106:115].strip()
d["M_mean_anomaly_deg"] = l[115:126].strip()
d["o_arg_peri_deg"] = l[126:137].strip()
d["O_long_asc_node_deg"] = l[137:148].strip()
d["i_inclination_deg"] = l[148:158].strip()
d["e_eccentricity"] = l[158:169].strip()
d["a_semimajor_axis"] = l[169:182].strip()
d["orbit_comp_date"] = l[182:191].strip()
d["ephem_uncertainty_arcsec"] = l[191:199].strip()
d["ephem_uncertainty_change_arcsec_day"] = l[199:208].strip()
d["ephem_uncertainty_date"] = l[208:217].strip()
# d["peak_ephem_uncertainty_next_arcsec"] = l[217:225].strip()
# d["peak_ephem_uncertainty_next_date"] = l[225:234].strip()
# d["peak_ephem_uncertainty_10_yrs_from_ceu_arcsec"] = l[
# 217:225].strip()
# d["peak_ephem_uncertainty_10_yrs_from_ceu_date"] = l[
# 242:251].strip()
# d["peak_ephem_uncertainty_10_yrs_from_peu_arcsec"] = l[
# 251:259].strip()
# d["peak_ephem_uncertainty_10_yrs_from_peu_date"] = l[
# 259:].strip()
yyyy = int(d["epoch"][:4])
mm = int(d["epoch"][4:6])
dd = int(d["epoch"][6:])
d["epoch_xeph"] = "%(mm)s/%(dd)s/%(yyyy)s" % locals()
# CONVERT ASTORB DATABASE LINE TO XEPHEM DATABASE FORMAT
xephemStr = "%(mpc_number)s %(name)s,e,%(i_inclination_deg)s,%(O_long_asc_node_deg)s,%(o_arg_peri_deg)s,%(a_semimajor_axis)s,0,%(e_eccentricity)s,%(M_mean_anomaly_deg)s,%(epoch_xeph)s,2000.0,%(H_abs_mag)s,%(G_slope)s" % d
xephemStr = xephemStr.strip()
d["pyephem_string"] = xephemStr
d["astorb_string"] = l
# TIDYUP
if len(d["mpc_number"]) == 0:
d["mpc_number"] = None
for k, v in d.iteritems():
if v != None and len(v) == 0:
d[k] = None
astorbDictList.append(d)
print "Finshed parsing the astorb.dat orbital elements file"
self.log.info('completed the ``_parse_astorb_database_file`` method')
return astorbDictList | [
"def",
"_parse_astorb_database_file",
"(",
"self",
",",
"astorbgz",
")",
":",
"self",
".",
"log",
".",
"info",
"(",
"'starting the ``_parse_astorb_database_file`` method'",
")",
"print",
"\"Parsing the astorb.dat orbital elements file\"",
"with",
"gzip",
".",
"open",
"(",
"astorbgz",
",",
"'rb'",
")",
"as",
"f",
":",
"thisData",
"=",
"f",
".",
"read",
"(",
")",
"astorbDictList",
"=",
"[",
"]",
"lines",
"=",
"thisData",
".",
"split",
"(",
"\"\\n\"",
")",
"for",
"l",
"in",
"lines",
":",
"if",
"len",
"(",
"l",
")",
"<",
"50",
":",
"continue",
"d",
"=",
"{",
"}",
"# PARSE THE LINE FROM astorb.dat (UNNEEDED VALUES COMMENTED OUT)",
"d",
"[",
"\"mpc_number\"",
"]",
"=",
"l",
"[",
"0",
":",
"7",
"]",
".",
"strip",
"(",
")",
"d",
"[",
"\"name\"",
"]",
"=",
"l",
"[",
"7",
":",
"26",
"]",
".",
"strip",
"(",
")",
"d",
"[",
"\"discoverer\"",
"]",
"=",
"l",
"[",
"26",
":",
"41",
"]",
".",
"strip",
"(",
")",
"d",
"[",
"\"H_abs_mag\"",
"]",
"=",
"l",
"[",
"41",
":",
"48",
"]",
".",
"strip",
"(",
")",
"d",
"[",
"\"G_slope\"",
"]",
"=",
"l",
"[",
"48",
":",
"54",
"]",
".",
"strip",
"(",
")",
"d",
"[",
"\"color_b_v\"",
"]",
"=",
"l",
"[",
"54",
":",
"59",
"]",
".",
"strip",
"(",
")",
"d",
"[",
"\"diameter_km\"",
"]",
"=",
"l",
"[",
"59",
":",
"65",
"]",
".",
"strip",
"(",
")",
"d",
"[",
"\"class\"",
"]",
"=",
"l",
"[",
"65",
":",
"71",
"]",
".",
"strip",
"(",
")",
"# d[\"int1\"] = l[71:75].strip()",
"# d[\"int2\"] = l[75:79].strip()",
"# d[\"int3\"] = l[79:83].strip()",
"# d[\"int4\"] = l[83:87].strip()",
"# d[\"int5\"] = l[87:91].strip()",
"# d[\"int6\"] = l[91:95].strip()",
"d",
"[",
"\"orbital_arc_days\"",
"]",
"=",
"l",
"[",
"95",
":",
"101",
"]",
".",
"strip",
"(",
")",
"d",
"[",
"\"number_obs\"",
"]",
"=",
"l",
"[",
"101",
":",
"106",
"]",
".",
"strip",
"(",
")",
"d",
"[",
"\"epoch\"",
"]",
"=",
"l",
"[",
"106",
":",
"115",
"]",
".",
"strip",
"(",
")",
"d",
"[",
"\"M_mean_anomaly_deg\"",
"]",
"=",
"l",
"[",
"115",
":",
"126",
"]",
".",
"strip",
"(",
")",
"d",
"[",
"\"o_arg_peri_deg\"",
"]",
"=",
"l",
"[",
"126",
":",
"137",
"]",
".",
"strip",
"(",
")",
"d",
"[",
"\"O_long_asc_node_deg\"",
"]",
"=",
"l",
"[",
"137",
":",
"148",
"]",
".",
"strip",
"(",
")",
"d",
"[",
"\"i_inclination_deg\"",
"]",
"=",
"l",
"[",
"148",
":",
"158",
"]",
".",
"strip",
"(",
")",
"d",
"[",
"\"e_eccentricity\"",
"]",
"=",
"l",
"[",
"158",
":",
"169",
"]",
".",
"strip",
"(",
")",
"d",
"[",
"\"a_semimajor_axis\"",
"]",
"=",
"l",
"[",
"169",
":",
"182",
"]",
".",
"strip",
"(",
")",
"d",
"[",
"\"orbit_comp_date\"",
"]",
"=",
"l",
"[",
"182",
":",
"191",
"]",
".",
"strip",
"(",
")",
"d",
"[",
"\"ephem_uncertainty_arcsec\"",
"]",
"=",
"l",
"[",
"191",
":",
"199",
"]",
".",
"strip",
"(",
")",
"d",
"[",
"\"ephem_uncertainty_change_arcsec_day\"",
"]",
"=",
"l",
"[",
"199",
":",
"208",
"]",
".",
"strip",
"(",
")",
"d",
"[",
"\"ephem_uncertainty_date\"",
"]",
"=",
"l",
"[",
"208",
":",
"217",
"]",
".",
"strip",
"(",
")",
"# d[\"peak_ephem_uncertainty_next_arcsec\"] = l[217:225].strip()",
"# d[\"peak_ephem_uncertainty_next_date\"] = l[225:234].strip()",
"# d[\"peak_ephem_uncertainty_10_yrs_from_ceu_arcsec\"] = l[",
"# 217:225].strip()",
"# d[\"peak_ephem_uncertainty_10_yrs_from_ceu_date\"] = l[",
"# 242:251].strip()",
"# d[\"peak_ephem_uncertainty_10_yrs_from_peu_arcsec\"] = l[",
"# 251:259].strip()",
"# d[\"peak_ephem_uncertainty_10_yrs_from_peu_date\"] = l[",
"# 259:].strip()",
"yyyy",
"=",
"int",
"(",
"d",
"[",
"\"epoch\"",
"]",
"[",
":",
"4",
"]",
")",
"mm",
"=",
"int",
"(",
"d",
"[",
"\"epoch\"",
"]",
"[",
"4",
":",
"6",
"]",
")",
"dd",
"=",
"int",
"(",
"d",
"[",
"\"epoch\"",
"]",
"[",
"6",
":",
"]",
")",
"d",
"[",
"\"epoch_xeph\"",
"]",
"=",
"\"%(mm)s/%(dd)s/%(yyyy)s\"",
"%",
"locals",
"(",
")",
"# CONVERT ASTORB DATABASE LINE TO XEPHEM DATABASE FORMAT",
"xephemStr",
"=",
"\"%(mpc_number)s %(name)s,e,%(i_inclination_deg)s,%(O_long_asc_node_deg)s,%(o_arg_peri_deg)s,%(a_semimajor_axis)s,0,%(e_eccentricity)s,%(M_mean_anomaly_deg)s,%(epoch_xeph)s,2000.0,%(H_abs_mag)s,%(G_slope)s\"",
"%",
"d",
"xephemStr",
"=",
"xephemStr",
".",
"strip",
"(",
")",
"d",
"[",
"\"pyephem_string\"",
"]",
"=",
"xephemStr",
"d",
"[",
"\"astorb_string\"",
"]",
"=",
"l",
"# TIDYUP",
"if",
"len",
"(",
"d",
"[",
"\"mpc_number\"",
"]",
")",
"==",
"0",
":",
"d",
"[",
"\"mpc_number\"",
"]",
"=",
"None",
"for",
"k",
",",
"v",
"in",
"d",
".",
"iteritems",
"(",
")",
":",
"if",
"v",
"!=",
"None",
"and",
"len",
"(",
"v",
")",
"==",
"0",
":",
"d",
"[",
"k",
"]",
"=",
"None",
"astorbDictList",
".",
"append",
"(",
"d",
")",
"print",
"\"Finshed parsing the astorb.dat orbital elements file\"",
"self",
".",
"log",
".",
"info",
"(",
"'completed the ``_parse_astorb_database_file`` method'",
")",
"return",
"astorbDictList"
] | 39.771739 | 19.847826 |
def feed_appdata(self, data, offset=0):
"""Feed plaintext data into the pipe.
Return an (ssldata, offset) tuple. The ssldata element is a list of
buffers containing record level data that needs to be sent to the
remote SSL instance. The offset is the number of plaintext bytes that
were processed, which may be less than the length of data.
NOTE: In case of short writes, this call MUST be retried with the SAME
buffer passed into the *data* argument (i.e. the ``id()`` must be the
same). This is an OpenSSL requirement. A further particularity is that
a short write will always have offset == 0, because the _ssl module
does not enable partial writes. And even though the offset is zero,
there will still be encrypted data in ssldata.
"""
if self._state == self.S_UNWRAPPED:
# pass through data in unwrapped mode
return ([data[offset:]] if offset < len(data) else [], len(data))
ssldata = []
view = memoryview(data)
while True:
self._need_ssldata = False
try:
if offset < len(view):
offset += self._sslobj.write(view[offset:])
except ssl.SSLError as e:
# It is not allowed to call write() after unwrap() until the
# close_notify is acknowledged. We return the condition to the
# caller as a short write.
if sslcompat.get_reason(e) == 'PROTOCOL_IS_SHUTDOWN':
e.errno = ssl.SSL_ERROR_WANT_READ
if e.errno not in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE,
ssl.SSL_ERROR_SYSCALL):
raise
self._need_ssldata = e.errno == ssl.SSL_ERROR_WANT_READ
# See if there's any record level data back for us.
if self._outgoing.pending:
ssldata.append(self._outgoing.read())
if offset == len(view) or self._need_ssldata:
break
return (ssldata, offset) | [
"def",
"feed_appdata",
"(",
"self",
",",
"data",
",",
"offset",
"=",
"0",
")",
":",
"if",
"self",
".",
"_state",
"==",
"self",
".",
"S_UNWRAPPED",
":",
"# pass through data in unwrapped mode",
"return",
"(",
"[",
"data",
"[",
"offset",
":",
"]",
"]",
"if",
"offset",
"<",
"len",
"(",
"data",
")",
"else",
"[",
"]",
",",
"len",
"(",
"data",
")",
")",
"ssldata",
"=",
"[",
"]",
"view",
"=",
"memoryview",
"(",
"data",
")",
"while",
"True",
":",
"self",
".",
"_need_ssldata",
"=",
"False",
"try",
":",
"if",
"offset",
"<",
"len",
"(",
"view",
")",
":",
"offset",
"+=",
"self",
".",
"_sslobj",
".",
"write",
"(",
"view",
"[",
"offset",
":",
"]",
")",
"except",
"ssl",
".",
"SSLError",
"as",
"e",
":",
"# It is not allowed to call write() after unwrap() until the",
"# close_notify is acknowledged. We return the condition to the",
"# caller as a short write.",
"if",
"sslcompat",
".",
"get_reason",
"(",
"e",
")",
"==",
"'PROTOCOL_IS_SHUTDOWN'",
":",
"e",
".",
"errno",
"=",
"ssl",
".",
"SSL_ERROR_WANT_READ",
"if",
"e",
".",
"errno",
"not",
"in",
"(",
"ssl",
".",
"SSL_ERROR_WANT_READ",
",",
"ssl",
".",
"SSL_ERROR_WANT_WRITE",
",",
"ssl",
".",
"SSL_ERROR_SYSCALL",
")",
":",
"raise",
"self",
".",
"_need_ssldata",
"=",
"e",
".",
"errno",
"==",
"ssl",
".",
"SSL_ERROR_WANT_READ",
"# See if there's any record level data back for us.",
"if",
"self",
".",
"_outgoing",
".",
"pending",
":",
"ssldata",
".",
"append",
"(",
"self",
".",
"_outgoing",
".",
"read",
"(",
")",
")",
"if",
"offset",
"==",
"len",
"(",
"view",
")",
"or",
"self",
".",
"_need_ssldata",
":",
"break",
"return",
"(",
"ssldata",
",",
"offset",
")"
] | 50.609756 | 21.243902 |
def generate_twofactor_code_for_time(shared_secret, timestamp):
"""Generate Steam 2FA code for timestamp
:param shared_secret: authenticator shared secret
:type shared_secret: bytes
:param timestamp: timestamp to use, if left out uses current time
:type timestamp: int
:return: steam two factor code
:rtype: str
"""
hmac = hmac_sha1(bytes(shared_secret),
struct.pack('>Q', int(timestamp)//30)) # this will NOT stop working in 2038
start = ord(hmac[19:20]) & 0xF
codeint = struct.unpack('>I', hmac[start:start+4])[0] & 0x7fffffff
charset = '23456789BCDFGHJKMNPQRTVWXY'
code = ''
for _ in range(5):
codeint, i = divmod(codeint, len(charset))
code += charset[i]
return code | [
"def",
"generate_twofactor_code_for_time",
"(",
"shared_secret",
",",
"timestamp",
")",
":",
"hmac",
"=",
"hmac_sha1",
"(",
"bytes",
"(",
"shared_secret",
")",
",",
"struct",
".",
"pack",
"(",
"'>Q'",
",",
"int",
"(",
"timestamp",
")",
"//",
"30",
")",
")",
"# this will NOT stop working in 2038",
"start",
"=",
"ord",
"(",
"hmac",
"[",
"19",
":",
"20",
"]",
")",
"&",
"0xF",
"codeint",
"=",
"struct",
".",
"unpack",
"(",
"'>I'",
",",
"hmac",
"[",
"start",
":",
"start",
"+",
"4",
"]",
")",
"[",
"0",
"]",
"&",
"0x7fffffff",
"charset",
"=",
"'23456789BCDFGHJKMNPQRTVWXY'",
"code",
"=",
"''",
"for",
"_",
"in",
"range",
"(",
"5",
")",
":",
"codeint",
",",
"i",
"=",
"divmod",
"(",
"codeint",
",",
"len",
"(",
"charset",
")",
")",
"code",
"+=",
"charset",
"[",
"i",
"]",
"return",
"code"
] | 31.25 | 21.375 |
def handle_error(self, error=None):
"""Trap for TCPServer errors, otherwise continue."""
if _debug: TCPServerActor._debug("handle_error %r", error)
# pass along to the director
if error is not None:
self.director.actor_error(self, error)
else:
TCPServer.handle_error(self) | [
"def",
"handle_error",
"(",
"self",
",",
"error",
"=",
"None",
")",
":",
"if",
"_debug",
":",
"TCPServerActor",
".",
"_debug",
"(",
"\"handle_error %r\"",
",",
"error",
")",
"# pass along to the director",
"if",
"error",
"is",
"not",
"None",
":",
"self",
".",
"director",
".",
"actor_error",
"(",
"self",
",",
"error",
")",
"else",
":",
"TCPServer",
".",
"handle_error",
"(",
"self",
")"
] | 36.555556 | 13.666667 |
def reload(self):
"""
Rerun the query (lazily).
The results will contain any values on the server side that have changed since the last run.
:return: None
"""
self._results = []
self._next_item_index = 0
self._next_page_index = 0
self._last_page_seen = False | [
"def",
"reload",
"(",
"self",
")",
":",
"self",
".",
"_results",
"=",
"[",
"]",
"self",
".",
"_next_item_index",
"=",
"0",
"self",
".",
"_next_page_index",
"=",
"0",
"self",
".",
"_last_page_seen",
"=",
"False"
] | 32.1 | 14.1 |
def _parse_shape_list(shape_list, crs):
""" Checks if the given list of shapes is in correct format and parses geometry objects
:param shape_list: The parameter `shape_list` from class initialization
:type shape_list: list(shapely.geometry.multipolygon.MultiPolygon or shapely.geometry.polygon.Polygon)
:raises: ValueError
"""
if not isinstance(shape_list, list):
raise ValueError('Splitter must be initialized with a list of shapes')
return [AreaSplitter._parse_shape(shape, crs) for shape in shape_list] | [
"def",
"_parse_shape_list",
"(",
"shape_list",
",",
"crs",
")",
":",
"if",
"not",
"isinstance",
"(",
"shape_list",
",",
"list",
")",
":",
"raise",
"ValueError",
"(",
"'Splitter must be initialized with a list of shapes'",
")",
"return",
"[",
"AreaSplitter",
".",
"_parse_shape",
"(",
"shape",
",",
"crs",
")",
"for",
"shape",
"in",
"shape_list",
"]"
] | 51.363636 | 26.090909 |
def manage_conflict(self, item, name):
"""
Checks if an object holding the same name already exists in the index.
If so, it compares their definition order: the lowest definition order
is kept. If definition order equal, an error is risen.Item
The method returns the item that should be added after it has decided
which one should be kept.
If the new item has precedence over the New existing one, the
existing is removed for the new to replace it.
:param item: object to check for conflict
:type item: alignak.objects.item.Item
:param name: name of the object
:type name: str
:return: 'item' parameter modified
:rtype: object
"""
if item.is_tpl():
existing = self.name_to_template[name]
else:
existing = self.name_to_item[name]
if existing == item:
return item
existing_prio = getattr(
existing,
"definition_order",
existing.properties["definition_order"].default)
item_prio = getattr(
item,
"definition_order",
item.properties["definition_order"].default)
if existing_prio < item_prio:
# Existing item has lower priority, so it has precedence.
return existing
if existing_prio > item_prio:
# New item has lower priority, so it has precedence.
# Existing item will be deleted below
pass
else:
# Don't know which one to keep, lastly defined has precedence
objcls = getattr(self.inner_class, "my_type", "[unknown]")
mesg = "duplicate %s '%s', from: '%s' and '%s', using lastly defined. " \
"You may manually set the definition_order parameter to avoid this message." \
% (objcls, name, item.imported_from, existing.imported_from)
item.configuration_warnings.append(mesg)
if item.is_tpl():
self.remove_template(existing)
else:
self.remove_item(existing)
return item | [
"def",
"manage_conflict",
"(",
"self",
",",
"item",
",",
"name",
")",
":",
"if",
"item",
".",
"is_tpl",
"(",
")",
":",
"existing",
"=",
"self",
".",
"name_to_template",
"[",
"name",
"]",
"else",
":",
"existing",
"=",
"self",
".",
"name_to_item",
"[",
"name",
"]",
"if",
"existing",
"==",
"item",
":",
"return",
"item",
"existing_prio",
"=",
"getattr",
"(",
"existing",
",",
"\"definition_order\"",
",",
"existing",
".",
"properties",
"[",
"\"definition_order\"",
"]",
".",
"default",
")",
"item_prio",
"=",
"getattr",
"(",
"item",
",",
"\"definition_order\"",
",",
"item",
".",
"properties",
"[",
"\"definition_order\"",
"]",
".",
"default",
")",
"if",
"existing_prio",
"<",
"item_prio",
":",
"# Existing item has lower priority, so it has precedence.",
"return",
"existing",
"if",
"existing_prio",
">",
"item_prio",
":",
"# New item has lower priority, so it has precedence.",
"# Existing item will be deleted below",
"pass",
"else",
":",
"# Don't know which one to keep, lastly defined has precedence",
"objcls",
"=",
"getattr",
"(",
"self",
".",
"inner_class",
",",
"\"my_type\"",
",",
"\"[unknown]\"",
")",
"mesg",
"=",
"\"duplicate %s '%s', from: '%s' and '%s', using lastly defined. \"",
"\"You may manually set the definition_order parameter to avoid this message.\"",
"%",
"(",
"objcls",
",",
"name",
",",
"item",
".",
"imported_from",
",",
"existing",
".",
"imported_from",
")",
"item",
".",
"configuration_warnings",
".",
"append",
"(",
"mesg",
")",
"if",
"item",
".",
"is_tpl",
"(",
")",
":",
"self",
".",
"remove_template",
"(",
"existing",
")",
"else",
":",
"self",
".",
"remove_item",
"(",
"existing",
")",
"return",
"item"
] | 38.907407 | 19.648148 |
def load_data(self):
"""
Loads image and label data from specified directory path.
:return: Dataset object containing image and label data.
"""
images = list()
labels = list()
emotion_index_map = dict()
label_directories = [dir for dir in os.listdir(self.datapath) if not dir.startswith('.')]
for label_directory in label_directories:
if self.target_emotion_map:
if label_directory not in self.target_emotion_map.keys(): continue
self._add_new_label_to_map(label_directory, emotion_index_map)
label_directory_path = self.datapath + '/' + label_directory
if self.time_delay:
self._load_series_for_single_emotion_directory(images, label_directory, label_directory_path, labels)
else:
image_files = [image_file for image_file in os.listdir(label_directory_path) if not image_file.startswith('.')]
self._load_images_from_directory_to_array(image_files, images, label_directory, label_directory_path, labels)
vectorized_labels = self._vectorize_labels(emotion_index_map, labels)
self._check_data_not_empty(images)
return self._load_dataset(np.array(images), np.array(vectorized_labels), emotion_index_map) | [
"def",
"load_data",
"(",
"self",
")",
":",
"images",
"=",
"list",
"(",
")",
"labels",
"=",
"list",
"(",
")",
"emotion_index_map",
"=",
"dict",
"(",
")",
"label_directories",
"=",
"[",
"dir",
"for",
"dir",
"in",
"os",
".",
"listdir",
"(",
"self",
".",
"datapath",
")",
"if",
"not",
"dir",
".",
"startswith",
"(",
"'.'",
")",
"]",
"for",
"label_directory",
"in",
"label_directories",
":",
"if",
"self",
".",
"target_emotion_map",
":",
"if",
"label_directory",
"not",
"in",
"self",
".",
"target_emotion_map",
".",
"keys",
"(",
")",
":",
"continue",
"self",
".",
"_add_new_label_to_map",
"(",
"label_directory",
",",
"emotion_index_map",
")",
"label_directory_path",
"=",
"self",
".",
"datapath",
"+",
"'/'",
"+",
"label_directory",
"if",
"self",
".",
"time_delay",
":",
"self",
".",
"_load_series_for_single_emotion_directory",
"(",
"images",
",",
"label_directory",
",",
"label_directory_path",
",",
"labels",
")",
"else",
":",
"image_files",
"=",
"[",
"image_file",
"for",
"image_file",
"in",
"os",
".",
"listdir",
"(",
"label_directory_path",
")",
"if",
"not",
"image_file",
".",
"startswith",
"(",
"'.'",
")",
"]",
"self",
".",
"_load_images_from_directory_to_array",
"(",
"image_files",
",",
"images",
",",
"label_directory",
",",
"label_directory_path",
",",
"labels",
")",
"vectorized_labels",
"=",
"self",
".",
"_vectorize_labels",
"(",
"emotion_index_map",
",",
"labels",
")",
"self",
".",
"_check_data_not_empty",
"(",
"images",
")",
"return",
"self",
".",
"_load_dataset",
"(",
"np",
".",
"array",
"(",
"images",
")",
",",
"np",
".",
"array",
"(",
"vectorized_labels",
")",
",",
"emotion_index_map",
")"
] | 52.08 | 31.44 |
def rebuild_indexes(self, chunk_size=1000, aggressive_clear=False, index_class=None):
"""Rebuild all indexes tied to this field
Parameters
----------
chunk_size: int
Default to 1000, it's the number of instances to load at once.
aggressive_clear: bool
Will be passed to the `aggressive` argument of the `clear_indexes` method.
If `False`, all values will be normally deindexed. If `True`, the work
will be done at low level, scanning for keys that may match the ones used by the indexes
index_class: type
Allow to build only index(es) for this index class instead of all indexes.
Raises
------
AssertionError
If called from an instance field. It must be called from the model field
Also raised if the field is not indexable
Examples
--------
>>> MyModel.get_field('myfield').rebuild_indexes()
>>> MyModel.get_field('myfield').clear_indexes(index_class=MyIndex)
"""
assert self.indexable, "Field not indexable"
assert self.attached_to_model, \
'`rebuild_indexes` can only be called on a field attached to the model'
for index in self._indexes:
if index_class and not isinstance(index, index_class):
continue
index.rebuild(chunk_size=chunk_size, aggressive_clear=aggressive_clear) | [
"def",
"rebuild_indexes",
"(",
"self",
",",
"chunk_size",
"=",
"1000",
",",
"aggressive_clear",
"=",
"False",
",",
"index_class",
"=",
"None",
")",
":",
"assert",
"self",
".",
"indexable",
",",
"\"Field not indexable\"",
"assert",
"self",
".",
"attached_to_model",
",",
"'`rebuild_indexes` can only be called on a field attached to the model'",
"for",
"index",
"in",
"self",
".",
"_indexes",
":",
"if",
"index_class",
"and",
"not",
"isinstance",
"(",
"index",
",",
"index_class",
")",
":",
"continue",
"index",
".",
"rebuild",
"(",
"chunk_size",
"=",
"chunk_size",
",",
"aggressive_clear",
"=",
"aggressive_clear",
")"
] | 39.5 | 28.111111 |
def read_csv(self, file: str, table: str = '_csv', libref: str = '', results: str = '',
opts: dict = None) -> 'SASdata':
"""
:param file: either the OS filesystem path of the file, or HTTP://... for a url accessible file
:param table: the name of the SAS Data Set to create
:param libref: the libref for the SAS Data Set being created. Defaults to WORK, or USER if assigned
:param results: format of results, SASsession.results is default, PANDAS, HTML or TEXT are the alternatives
:param opts: a dictionary containing any of the following Proc Import options(datarow, delimiter, getnames, guessingrows)
:return: SASdata object
"""
opts = opts if opts is not None else {}
if results == '':
results = self.results
self._io.read_csv(file, table, libref, self.nosub, opts)
if self.exist(table, libref):
return SASdata(self, libref, table, results)
else:
return None | [
"def",
"read_csv",
"(",
"self",
",",
"file",
":",
"str",
",",
"table",
":",
"str",
"=",
"'_csv'",
",",
"libref",
":",
"str",
"=",
"''",
",",
"results",
":",
"str",
"=",
"''",
",",
"opts",
":",
"dict",
"=",
"None",
")",
"->",
"'SASdata'",
":",
"opts",
"=",
"opts",
"if",
"opts",
"is",
"not",
"None",
"else",
"{",
"}",
"if",
"results",
"==",
"''",
":",
"results",
"=",
"self",
".",
"results",
"self",
".",
"_io",
".",
"read_csv",
"(",
"file",
",",
"table",
",",
"libref",
",",
"self",
".",
"nosub",
",",
"opts",
")",
"if",
"self",
".",
"exist",
"(",
"table",
",",
"libref",
")",
":",
"return",
"SASdata",
"(",
"self",
",",
"libref",
",",
"table",
",",
"results",
")",
"else",
":",
"return",
"None"
] | 47.714286 | 29.238095 |
def parse_int_list(s):
"""
Parse a comma-separated list of strings.
The list may additionally contain ranges such as "1-5",
which will be expanded into "1,2,3,4,5".
"""
result = []
for item in s.split(','):
item = item.strip().split('-')
if len(item) == 1:
result.append(int(item[0]))
elif len(item) == 2:
start, end = item
result.extend(range(int(start), int(end)+1))
else:
raise ValueError("invalid range: '{0}'".format(s))
return result | [
"def",
"parse_int_list",
"(",
"s",
")",
":",
"result",
"=",
"[",
"]",
"for",
"item",
"in",
"s",
".",
"split",
"(",
"','",
")",
":",
"item",
"=",
"item",
".",
"strip",
"(",
")",
".",
"split",
"(",
"'-'",
")",
"if",
"len",
"(",
"item",
")",
"==",
"1",
":",
"result",
".",
"append",
"(",
"int",
"(",
"item",
"[",
"0",
"]",
")",
")",
"elif",
"len",
"(",
"item",
")",
"==",
"2",
":",
"start",
",",
"end",
"=",
"item",
"result",
".",
"extend",
"(",
"range",
"(",
"int",
"(",
"start",
")",
",",
"int",
"(",
"end",
")",
"+",
"1",
")",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"invalid range: '{0}'\"",
".",
"format",
"(",
"s",
")",
")",
"return",
"result"
] | 31.470588 | 12.294118 |
def note_off(self, channel, note, velocity):
"""Return bytes for a 'note off' event."""
return self.midi_event(NOTE_OFF, channel, note, velocity) | [
"def",
"note_off",
"(",
"self",
",",
"channel",
",",
"note",
",",
"velocity",
")",
":",
"return",
"self",
".",
"midi_event",
"(",
"NOTE_OFF",
",",
"channel",
",",
"note",
",",
"velocity",
")"
] | 53 | 9.666667 |
def get_frame(self, frame_idx, env_idx):
""" Return frame from the buffer """
if frame_idx >= self.current_size:
raise VelException("Requested frame beyond the size of the buffer")
accumulator = []
last_frame = self.state_buffer[frame_idx, env_idx]
accumulator.append(last_frame)
for i in range(self.frame_history - 1):
prev_idx = (frame_idx - 1) % self.buffer_capacity
if prev_idx == self.current_idx:
raise VelException("Cannot provide enough history for the frame")
elif self.dones_buffer[prev_idx, env_idx]:
# If previous frame was done - just append zeroes
accumulator.append(np.zeros_like(last_frame))
else:
frame_idx = prev_idx
accumulator.append(self.state_buffer[frame_idx, env_idx])
# We're pushing the elements in reverse order
return np.concatenate(accumulator[::-1], axis=-1) | [
"def",
"get_frame",
"(",
"self",
",",
"frame_idx",
",",
"env_idx",
")",
":",
"if",
"frame_idx",
">=",
"self",
".",
"current_size",
":",
"raise",
"VelException",
"(",
"\"Requested frame beyond the size of the buffer\"",
")",
"accumulator",
"=",
"[",
"]",
"last_frame",
"=",
"self",
".",
"state_buffer",
"[",
"frame_idx",
",",
"env_idx",
"]",
"accumulator",
".",
"append",
"(",
"last_frame",
")",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"frame_history",
"-",
"1",
")",
":",
"prev_idx",
"=",
"(",
"frame_idx",
"-",
"1",
")",
"%",
"self",
".",
"buffer_capacity",
"if",
"prev_idx",
"==",
"self",
".",
"current_idx",
":",
"raise",
"VelException",
"(",
"\"Cannot provide enough history for the frame\"",
")",
"elif",
"self",
".",
"dones_buffer",
"[",
"prev_idx",
",",
"env_idx",
"]",
":",
"# If previous frame was done - just append zeroes",
"accumulator",
".",
"append",
"(",
"np",
".",
"zeros_like",
"(",
"last_frame",
")",
")",
"else",
":",
"frame_idx",
"=",
"prev_idx",
"accumulator",
".",
"append",
"(",
"self",
".",
"state_buffer",
"[",
"frame_idx",
",",
"env_idx",
"]",
")",
"# We're pushing the elements in reverse order",
"return",
"np",
".",
"concatenate",
"(",
"accumulator",
"[",
":",
":",
"-",
"1",
"]",
",",
"axis",
"=",
"-",
"1",
")"
] | 38.96 | 21.6 |
def scan(self, A1, X1):
"""
LML, fixed-effect sizes, and scale of the candidate set.
Parameters
----------
A1 : (p, e) array_like
Trait-by-environments design matrix.
X1 : (n, m) array_like
Variants set matrix.
Returns
-------
lml : float
Log of the marginal likelihood for the set.
effsizes0 : (c, p) ndarray
Fixed-effect sizes for the covariates.
effsizes0_se : (c, p) ndarray
Fixed-effect size standard errors for the covariates.
effsizes1 : (m, e) ndarray
Fixed-effect sizes for the candidates.
effsizes1_se : (m, e) ndarray
Fixed-effect size standard errors for the candidates.
scale : float
Optimal scale.
"""
from numpy import empty
from numpy.linalg import multi_dot
from numpy_sugar import epsilon, is_all_finite
from scipy.linalg import cho_solve
A1 = asarray(A1, float)
X1 = asarray(X1, float)
if not is_all_finite(A1):
raise ValueError("A1 parameter has non-finite elements.")
if not is_all_finite(X1):
raise ValueError("X1 parameter has non-finite elements.")
if A1.shape[1] == 0:
beta_se = sqrt(self.null_beta_covariance.diagonal())
return {
"lml": self.null_lml(),
"effsizes0": unvec(self.null_beta, (self._ncovariates, -1)),
"effsizes0_se": unvec(beta_se, (self._ncovariates, -1)),
"effsizes1": empty((0,)),
"effsizes1_se": empty((0,)),
"scale": self.null_scale,
}
X1X1 = X1.T @ X1
XX1 = self._X.T @ X1
AWA1 = self._WA.T @ A1
A1W = A1.T @ self._W
GX1 = self._G.T @ X1
MRiM1 = kron(AWA1, XX1)
M1RiM1 = kron(A1W @ A1, X1X1)
M1Riy = vec(multi_dot([X1.T, self._Y, A1W.T]))
XRiM1 = kron(self._WL0.T @ A1, GX1)
ZiXRiM1 = cho_solve(self._Lz, XRiM1)
MRiXZiXRiM1 = self._XRiM.T @ ZiXRiM1
M1RiXZiXRiM1 = XRiM1.T @ ZiXRiM1
M1RiXZiXRiy = XRiM1.T @ self._ZiXRiy
T0 = [[self._MRiM, MRiM1], [MRiM1.T, M1RiM1]]
T1 = [[self._MRiXZiXRiM, MRiXZiXRiM1], [MRiXZiXRiM1.T, M1RiXZiXRiM1]]
T2 = [self._MRiy, M1Riy]
T3 = [self._MRiXZiXRiy, M1RiXZiXRiy]
MKiM = block(T0) - block(T1)
MKiy = block(T2) - block(T3)
beta = rsolve(MKiM, MKiy)
mKiy = beta.T @ MKiy
cp = self._ntraits * self._ncovariates
effsizes0 = unvec(beta[:cp], (self._ncovariates, self._ntraits))
effsizes1 = unvec(beta[cp:], (X1.shape[1], A1.shape[1]))
np = self._nsamples * self._ntraits
sqrtdot = self._yKiy - mKiy
scale = clip(sqrtdot / np, epsilon.tiny, inf)
lml = self._static_lml() / 2 - np * safe_log(scale) / 2 - np / 2
effsizes_se = sqrt(clip(scale * pinv(MKiM).diagonal(), epsilon.tiny, inf))
effsizes0_se = unvec(effsizes_se[:cp], (self._ncovariates, self._ntraits))
effsizes1_se = unvec(effsizes_se[cp:], (X1.shape[1], A1.shape[1]))
return {
"lml": lml,
"effsizes0": effsizes0,
"effsizes1": effsizes1,
"scale": scale,
"effsizes0_se": effsizes0_se,
"effsizes1_se": effsizes1_se,
} | [
"def",
"scan",
"(",
"self",
",",
"A1",
",",
"X1",
")",
":",
"from",
"numpy",
"import",
"empty",
"from",
"numpy",
".",
"linalg",
"import",
"multi_dot",
"from",
"numpy_sugar",
"import",
"epsilon",
",",
"is_all_finite",
"from",
"scipy",
".",
"linalg",
"import",
"cho_solve",
"A1",
"=",
"asarray",
"(",
"A1",
",",
"float",
")",
"X1",
"=",
"asarray",
"(",
"X1",
",",
"float",
")",
"if",
"not",
"is_all_finite",
"(",
"A1",
")",
":",
"raise",
"ValueError",
"(",
"\"A1 parameter has non-finite elements.\"",
")",
"if",
"not",
"is_all_finite",
"(",
"X1",
")",
":",
"raise",
"ValueError",
"(",
"\"X1 parameter has non-finite elements.\"",
")",
"if",
"A1",
".",
"shape",
"[",
"1",
"]",
"==",
"0",
":",
"beta_se",
"=",
"sqrt",
"(",
"self",
".",
"null_beta_covariance",
".",
"diagonal",
"(",
")",
")",
"return",
"{",
"\"lml\"",
":",
"self",
".",
"null_lml",
"(",
")",
",",
"\"effsizes0\"",
":",
"unvec",
"(",
"self",
".",
"null_beta",
",",
"(",
"self",
".",
"_ncovariates",
",",
"-",
"1",
")",
")",
",",
"\"effsizes0_se\"",
":",
"unvec",
"(",
"beta_se",
",",
"(",
"self",
".",
"_ncovariates",
",",
"-",
"1",
")",
")",
",",
"\"effsizes1\"",
":",
"empty",
"(",
"(",
"0",
",",
")",
")",
",",
"\"effsizes1_se\"",
":",
"empty",
"(",
"(",
"0",
",",
")",
")",
",",
"\"scale\"",
":",
"self",
".",
"null_scale",
",",
"}",
"X1X1",
"=",
"X1",
".",
"T",
"@",
"X1",
"XX1",
"=",
"self",
".",
"_X",
".",
"T",
"@",
"X1",
"AWA1",
"=",
"self",
".",
"_WA",
".",
"T",
"@",
"A1",
"A1W",
"=",
"A1",
".",
"T",
"@",
"self",
".",
"_W",
"GX1",
"=",
"self",
".",
"_G",
".",
"T",
"@",
"X1",
"MRiM1",
"=",
"kron",
"(",
"AWA1",
",",
"XX1",
")",
"M1RiM1",
"=",
"kron",
"(",
"A1W",
"@",
"A1",
",",
"X1X1",
")",
"M1Riy",
"=",
"vec",
"(",
"multi_dot",
"(",
"[",
"X1",
".",
"T",
",",
"self",
".",
"_Y",
",",
"A1W",
".",
"T",
"]",
")",
")",
"XRiM1",
"=",
"kron",
"(",
"self",
".",
"_WL0",
".",
"T",
"@",
"A1",
",",
"GX1",
")",
"ZiXRiM1",
"=",
"cho_solve",
"(",
"self",
".",
"_Lz",
",",
"XRiM1",
")",
"MRiXZiXRiM1",
"=",
"self",
".",
"_XRiM",
".",
"T",
"@",
"ZiXRiM1",
"M1RiXZiXRiM1",
"=",
"XRiM1",
".",
"T",
"@",
"ZiXRiM1",
"M1RiXZiXRiy",
"=",
"XRiM1",
".",
"T",
"@",
"self",
".",
"_ZiXRiy",
"T0",
"=",
"[",
"[",
"self",
".",
"_MRiM",
",",
"MRiM1",
"]",
",",
"[",
"MRiM1",
".",
"T",
",",
"M1RiM1",
"]",
"]",
"T1",
"=",
"[",
"[",
"self",
".",
"_MRiXZiXRiM",
",",
"MRiXZiXRiM1",
"]",
",",
"[",
"MRiXZiXRiM1",
".",
"T",
",",
"M1RiXZiXRiM1",
"]",
"]",
"T2",
"=",
"[",
"self",
".",
"_MRiy",
",",
"M1Riy",
"]",
"T3",
"=",
"[",
"self",
".",
"_MRiXZiXRiy",
",",
"M1RiXZiXRiy",
"]",
"MKiM",
"=",
"block",
"(",
"T0",
")",
"-",
"block",
"(",
"T1",
")",
"MKiy",
"=",
"block",
"(",
"T2",
")",
"-",
"block",
"(",
"T3",
")",
"beta",
"=",
"rsolve",
"(",
"MKiM",
",",
"MKiy",
")",
"mKiy",
"=",
"beta",
".",
"T",
"@",
"MKiy",
"cp",
"=",
"self",
".",
"_ntraits",
"*",
"self",
".",
"_ncovariates",
"effsizes0",
"=",
"unvec",
"(",
"beta",
"[",
":",
"cp",
"]",
",",
"(",
"self",
".",
"_ncovariates",
",",
"self",
".",
"_ntraits",
")",
")",
"effsizes1",
"=",
"unvec",
"(",
"beta",
"[",
"cp",
":",
"]",
",",
"(",
"X1",
".",
"shape",
"[",
"1",
"]",
",",
"A1",
".",
"shape",
"[",
"1",
"]",
")",
")",
"np",
"=",
"self",
".",
"_nsamples",
"*",
"self",
".",
"_ntraits",
"sqrtdot",
"=",
"self",
".",
"_yKiy",
"-",
"mKiy",
"scale",
"=",
"clip",
"(",
"sqrtdot",
"/",
"np",
",",
"epsilon",
".",
"tiny",
",",
"inf",
")",
"lml",
"=",
"self",
".",
"_static_lml",
"(",
")",
"/",
"2",
"-",
"np",
"*",
"safe_log",
"(",
"scale",
")",
"/",
"2",
"-",
"np",
"/",
"2",
"effsizes_se",
"=",
"sqrt",
"(",
"clip",
"(",
"scale",
"*",
"pinv",
"(",
"MKiM",
")",
".",
"diagonal",
"(",
")",
",",
"epsilon",
".",
"tiny",
",",
"inf",
")",
")",
"effsizes0_se",
"=",
"unvec",
"(",
"effsizes_se",
"[",
":",
"cp",
"]",
",",
"(",
"self",
".",
"_ncovariates",
",",
"self",
".",
"_ntraits",
")",
")",
"effsizes1_se",
"=",
"unvec",
"(",
"effsizes_se",
"[",
"cp",
":",
"]",
",",
"(",
"X1",
".",
"shape",
"[",
"1",
"]",
",",
"A1",
".",
"shape",
"[",
"1",
"]",
")",
")",
"return",
"{",
"\"lml\"",
":",
"lml",
",",
"\"effsizes0\"",
":",
"effsizes0",
",",
"\"effsizes1\"",
":",
"effsizes1",
",",
"\"scale\"",
":",
"scale",
",",
"\"effsizes0_se\"",
":",
"effsizes0_se",
",",
"\"effsizes1_se\"",
":",
"effsizes1_se",
",",
"}"
] | 33.787879 | 17.828283 |
def clear_duration(self):
"""Clears the duration.
raise: NoAccess - ``Metadata.isRequired()`` or
``Metadata.isReadOnly()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.assessment.AssessmentOfferedForm.clear_duration_template
if (self.get_duration_metadata().is_read_only() or
self.get_duration_metadata().is_required()):
raise errors.NoAccess()
self._my_map['duration'] = self._duration_default | [
"def",
"clear_duration",
"(",
"self",
")",
":",
"# Implemented from template for osid.assessment.AssessmentOfferedForm.clear_duration_template",
"if",
"(",
"self",
".",
"get_duration_metadata",
"(",
")",
".",
"is_read_only",
"(",
")",
"or",
"self",
".",
"get_duration_metadata",
"(",
")",
".",
"is_required",
"(",
")",
")",
":",
"raise",
"errors",
".",
"NoAccess",
"(",
")",
"self",
".",
"_my_map",
"[",
"'duration'",
"]",
"=",
"self",
".",
"_duration_default"
] | 42.538462 | 20.846154 |
def setup_prjs_signals(self, ):
"""Setup the signals for the projects page
:returns: None
:rtype: None
:raises: None
"""
log.debug("Setting up projects page signals.")
self.prjs_prj_view_pb.clicked.connect(self.prjs_view_prj)
self.prjs_prj_create_pb.clicked.connect(self.prjs_create_prj) | [
"def",
"setup_prjs_signals",
"(",
"self",
",",
")",
":",
"log",
".",
"debug",
"(",
"\"Setting up projects page signals.\"",
")",
"self",
".",
"prjs_prj_view_pb",
".",
"clicked",
".",
"connect",
"(",
"self",
".",
"prjs_view_prj",
")",
"self",
".",
"prjs_prj_create_pb",
".",
"clicked",
".",
"connect",
"(",
"self",
".",
"prjs_create_prj",
")"
] | 34.3 | 17.4 |
def insert(self, index, child, **kwargs):
'''add a new script to the container.
:param child: a ``string`` representing an absolute path to the script
or relative path (does not start with ``http`` or ``/``), in which
case the :attr:`Media.media_path` attribute is prepended.
'''
if child:
script = self.script(child, **kwargs)
if script not in self.children:
if index is None:
self.children.append(script)
else:
self.children.insert(index, script) | [
"def",
"insert",
"(",
"self",
",",
"index",
",",
"child",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"child",
":",
"script",
"=",
"self",
".",
"script",
"(",
"child",
",",
"*",
"*",
"kwargs",
")",
"if",
"script",
"not",
"in",
"self",
".",
"children",
":",
"if",
"index",
"is",
"None",
":",
"self",
".",
"children",
".",
"append",
"(",
"script",
")",
"else",
":",
"self",
".",
"children",
".",
"insert",
"(",
"index",
",",
"script",
")"
] | 42 | 18.857143 |
def publish(obj, event, event_state, **kwargs):
"""Publish an event from an object.
This is a really basic pub-sub event system to allow for tracking progress
on methods externally. It fires the events for the first match it finds in
the object hierarchy, going most specific to least. If no match is found
for the exact event+event_state, the most specific event+ANY is fired
instead.
Multiple callbacks can be bound to the event+event_state if desired. All
will be fired in the order they were registered.
"""
# short-circuit if nothing is listening
if len(EVENT_HANDLERS) == 0:
return
if inspect.isclass(obj):
pub_cls = obj
else:
pub_cls = obj.__class__
potential = [x.__name__ for x in inspect.getmro(pub_cls)]
# if we don't find a match for this event/event_state we fire the events
# for this event/ANY instead for the closest match
fallbacks = None
callbacks = []
for cls in potential:
event_key = '.'.join([cls, event, event_state])
backup_key = '.'.join([cls, event, states.ANY])
if event_key in EVENT_HANDLERS:
callbacks = EVENT_HANDLERS[event_key]
break
elif fallbacks is None and backup_key in EVENT_HANDLERS:
fallbacks = EVENT_HANDLERS[backup_key]
if fallbacks is not None:
callbacks = fallbacks
for callback in callbacks:
callback(obj, **kwargs)
return | [
"def",
"publish",
"(",
"obj",
",",
"event",
",",
"event_state",
",",
"*",
"*",
"kwargs",
")",
":",
"# short-circuit if nothing is listening",
"if",
"len",
"(",
"EVENT_HANDLERS",
")",
"==",
"0",
":",
"return",
"if",
"inspect",
".",
"isclass",
"(",
"obj",
")",
":",
"pub_cls",
"=",
"obj",
"else",
":",
"pub_cls",
"=",
"obj",
".",
"__class__",
"potential",
"=",
"[",
"x",
".",
"__name__",
"for",
"x",
"in",
"inspect",
".",
"getmro",
"(",
"pub_cls",
")",
"]",
"# if we don't find a match for this event/event_state we fire the events",
"# for this event/ANY instead for the closest match",
"fallbacks",
"=",
"None",
"callbacks",
"=",
"[",
"]",
"for",
"cls",
"in",
"potential",
":",
"event_key",
"=",
"'.'",
".",
"join",
"(",
"[",
"cls",
",",
"event",
",",
"event_state",
"]",
")",
"backup_key",
"=",
"'.'",
".",
"join",
"(",
"[",
"cls",
",",
"event",
",",
"states",
".",
"ANY",
"]",
")",
"if",
"event_key",
"in",
"EVENT_HANDLERS",
":",
"callbacks",
"=",
"EVENT_HANDLERS",
"[",
"event_key",
"]",
"break",
"elif",
"fallbacks",
"is",
"None",
"and",
"backup_key",
"in",
"EVENT_HANDLERS",
":",
"fallbacks",
"=",
"EVENT_HANDLERS",
"[",
"backup_key",
"]",
"if",
"fallbacks",
"is",
"not",
"None",
":",
"callbacks",
"=",
"fallbacks",
"for",
"callback",
"in",
"callbacks",
":",
"callback",
"(",
"obj",
",",
"*",
"*",
"kwargs",
")",
"return"
] | 34.902439 | 21.341463 |
def readmarheader(filename):
"""Read a header from a MarResearch .image file."""
with open(filename, 'rb') as f:
intheader = np.fromstring(f.read(10 * 4), np.int32)
floatheader = np.fromstring(f.read(15 * 4), '<f4')
strheader = f.read(24)
f.read(4)
otherstrings = [f.read(16) for i in range(29)]
return {'Xsize': intheader[0], 'Ysize': intheader[1], 'MeasTime': intheader[8],
'BeamPosX': floatheader[7], 'BeamPosY': floatheader[8],
'Wavelength': floatheader[9], 'Dist': floatheader[10],
'__Origin__': 'MarResearch .image', 'recordlength': intheader[2],
'highintensitypixels': intheader[4],
'highintensityrecords': intheader[5],
'Date': dateutil.parser.parse(strheader),
'Detector': 'MARCCD', '__particle__': 'photon'} | [
"def",
"readmarheader",
"(",
"filename",
")",
":",
"with",
"open",
"(",
"filename",
",",
"'rb'",
")",
"as",
"f",
":",
"intheader",
"=",
"np",
".",
"fromstring",
"(",
"f",
".",
"read",
"(",
"10",
"*",
"4",
")",
",",
"np",
".",
"int32",
")",
"floatheader",
"=",
"np",
".",
"fromstring",
"(",
"f",
".",
"read",
"(",
"15",
"*",
"4",
")",
",",
"'<f4'",
")",
"strheader",
"=",
"f",
".",
"read",
"(",
"24",
")",
"f",
".",
"read",
"(",
"4",
")",
"otherstrings",
"=",
"[",
"f",
".",
"read",
"(",
"16",
")",
"for",
"i",
"in",
"range",
"(",
"29",
")",
"]",
"return",
"{",
"'Xsize'",
":",
"intheader",
"[",
"0",
"]",
",",
"'Ysize'",
":",
"intheader",
"[",
"1",
"]",
",",
"'MeasTime'",
":",
"intheader",
"[",
"8",
"]",
",",
"'BeamPosX'",
":",
"floatheader",
"[",
"7",
"]",
",",
"'BeamPosY'",
":",
"floatheader",
"[",
"8",
"]",
",",
"'Wavelength'",
":",
"floatheader",
"[",
"9",
"]",
",",
"'Dist'",
":",
"floatheader",
"[",
"10",
"]",
",",
"'__Origin__'",
":",
"'MarResearch .image'",
",",
"'recordlength'",
":",
"intheader",
"[",
"2",
"]",
",",
"'highintensitypixels'",
":",
"intheader",
"[",
"4",
"]",
",",
"'highintensityrecords'",
":",
"intheader",
"[",
"5",
"]",
",",
"'Date'",
":",
"dateutil",
".",
"parser",
".",
"parse",
"(",
"strheader",
")",
",",
"'Detector'",
":",
"'MARCCD'",
",",
"'__particle__'",
":",
"'photon'",
"}"
] | 52.375 | 17.6875 |
def create(self, label, status=None, master=None):
""" Create an Identity
:param label: The label to give this new identity
:param status: The status of this identity. Default: 'active'
:param master: Represents whether this identity is a master.
Default: False
:return: dict of REST API output with headers attached
:rtype: :class:`~datasift.request.DictResponse`
:raises: :class:`~datasift.exceptions.DataSiftApiException`,
:class:`requests.exceptions.HTTPError`
"""
params = {'label': label}
if status:
params['status'] = status
if master:
params['master'] = master
return self.request.post('', params) | [
"def",
"create",
"(",
"self",
",",
"label",
",",
"status",
"=",
"None",
",",
"master",
"=",
"None",
")",
":",
"params",
"=",
"{",
"'label'",
":",
"label",
"}",
"if",
"status",
":",
"params",
"[",
"'status'",
"]",
"=",
"status",
"if",
"master",
":",
"params",
"[",
"'master'",
"]",
"=",
"master",
"return",
"self",
".",
"request",
".",
"post",
"(",
"''",
",",
"params",
")"
] | 36.428571 | 19.904762 |
def add_header_info(data_api, struct_inflator):
""" Add ancilliary header information to the structure.
:param data_api the interface to the decoded data
:param struct_inflator the interface to put the data into the client object
"""
struct_inflator.set_header_info(data_api.r_free,
data_api.r_work,
data_api.resolution,
data_api.title,
data_api.deposition_date,
data_api.release_date,
data_api.experimental_methods) | [
"def",
"add_header_info",
"(",
"data_api",
",",
"struct_inflator",
")",
":",
"struct_inflator",
".",
"set_header_info",
"(",
"data_api",
".",
"r_free",
",",
"data_api",
".",
"r_work",
",",
"data_api",
".",
"resolution",
",",
"data_api",
".",
"title",
",",
"data_api",
".",
"deposition_date",
",",
"data_api",
".",
"release_date",
",",
"data_api",
".",
"experimental_methods",
")"
] | 52.916667 | 14.25 |
def create_ethereum_client(uri, timeout=60, *, loop=None):
"""Create client to ethereum node based on schema.
:param uri: Host on ethereum node
:type uri: str
:param timeout: An optional total time of timeout call
:type timeout: int
:param loop: An optional *event loop* instance
(uses :func:`asyncio.get_event_loop` if not specified).
:type loop: :ref:`EventLoop<asyncio-event-loop>`
:return: :class:`BaseAsyncIOClient` instance.
"""
if loop is None:
loop = asyncio.get_event_loop()
presult = urlparse(uri)
if presult.scheme in ('ipc', 'unix'):
reader, writer = yield from asyncio.open_unix_connection(presult.path,
loop=loop)
return AsyncIOIPCClient(reader, writer, uri, timeout, loop=loop)
elif presult.scheme in ('http', 'https'):
tls = presult.scheme[-1] == 's'
netloc = presult.netloc.split(':')
host = netloc.pop(0)
port = netloc.pop(0) if netloc else (443 if tls else 80)
return AsyncIOHTTPClient(host, port, tls, timeout, loop=loop)
else:
raise RuntimeError('This scheme does not supported.') | [
"def",
"create_ethereum_client",
"(",
"uri",
",",
"timeout",
"=",
"60",
",",
"*",
",",
"loop",
"=",
"None",
")",
":",
"if",
"loop",
"is",
"None",
":",
"loop",
"=",
"asyncio",
".",
"get_event_loop",
"(",
")",
"presult",
"=",
"urlparse",
"(",
"uri",
")",
"if",
"presult",
".",
"scheme",
"in",
"(",
"'ipc'",
",",
"'unix'",
")",
":",
"reader",
",",
"writer",
"=",
"yield",
"from",
"asyncio",
".",
"open_unix_connection",
"(",
"presult",
".",
"path",
",",
"loop",
"=",
"loop",
")",
"return",
"AsyncIOIPCClient",
"(",
"reader",
",",
"writer",
",",
"uri",
",",
"timeout",
",",
"loop",
"=",
"loop",
")",
"elif",
"presult",
".",
"scheme",
"in",
"(",
"'http'",
",",
"'https'",
")",
":",
"tls",
"=",
"presult",
".",
"scheme",
"[",
"-",
"1",
"]",
"==",
"'s'",
"netloc",
"=",
"presult",
".",
"netloc",
".",
"split",
"(",
"':'",
")",
"host",
"=",
"netloc",
".",
"pop",
"(",
"0",
")",
"port",
"=",
"netloc",
".",
"pop",
"(",
"0",
")",
"if",
"netloc",
"else",
"(",
"443",
"if",
"tls",
"else",
"80",
")",
"return",
"AsyncIOHTTPClient",
"(",
"host",
",",
"port",
",",
"tls",
",",
"timeout",
",",
"loop",
"=",
"loop",
")",
"else",
":",
"raise",
"RuntimeError",
"(",
"'This scheme does not supported.'",
")"
] | 38.258065 | 19.580645 |
def search(self, **kwargs):
"""
Method to search neighbors based on extends search.
:param search: Dict containing QuerySets to find neighbors.
:param include: Array containing fields to include on response.
:param exclude: Array containing fields to exclude on response.
:param fields: Array containing fields to override default fields.
:param kind: Determine if result will be detailed ('detail') or basic ('basic').
:return: Dict containing neighbors
"""
return super(ApiV4Neighbor, self).get(self.prepare_url(
'api/v4/neighbor/', kwargs)) | [
"def",
"search",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"super",
"(",
"ApiV4Neighbor",
",",
"self",
")",
".",
"get",
"(",
"self",
".",
"prepare_url",
"(",
"'api/v4/neighbor/'",
",",
"kwargs",
")",
")"
] | 44.642857 | 22.071429 |
def swapWH(self):
"""!
\~english Swap width and height of rectangles
\~chinese 交换矩形高宽边数据
"""
width = self.width
self.width = self.height
self.height = width | [
"def",
"swapWH",
"(",
"self",
")",
":",
"width",
"=",
"self",
".",
"width",
"self",
".",
"width",
"=",
"self",
".",
"height",
"self",
".",
"height",
"=",
"width"
] | 25.625 | 10.5 |
def _from_string(cls, serialized):
"""
Return a DefinitionLocator parsing the given serialized string
:param serialized: matches the string to
"""
parse = cls.URL_RE.match(serialized)
if not parse:
raise InvalidKeyError(cls, serialized)
parse = parse.groupdict()
if parse['definition_id']:
parse['definition_id'] = cls.as_object_id(parse['definition_id'])
return cls(**{key: parse.get(key) for key in cls.KEY_FIELDS}) | [
"def",
"_from_string",
"(",
"cls",
",",
"serialized",
")",
":",
"parse",
"=",
"cls",
".",
"URL_RE",
".",
"match",
"(",
"serialized",
")",
"if",
"not",
"parse",
":",
"raise",
"InvalidKeyError",
"(",
"cls",
",",
"serialized",
")",
"parse",
"=",
"parse",
".",
"groupdict",
"(",
")",
"if",
"parse",
"[",
"'definition_id'",
"]",
":",
"parse",
"[",
"'definition_id'",
"]",
"=",
"cls",
".",
"as_object_id",
"(",
"parse",
"[",
"'definition_id'",
"]",
")",
"return",
"cls",
"(",
"*",
"*",
"{",
"key",
":",
"parse",
".",
"get",
"(",
"key",
")",
"for",
"key",
"in",
"cls",
".",
"KEY_FIELDS",
"}",
")"
] | 35.857143 | 16.857143 |
def save_json(filename: str, config: Union[List, Dict]):
"""Save JSON data to a file.
Returns True on success.
"""
try:
data = json.dumps(config, sort_keys=True, indent=4)
with open(filename, 'w', encoding='utf-8') as fdesc:
fdesc.write(data)
return True
except TypeError as error:
_LOGGER.exception('Failed to serialize to JSON: %s',
filename)
raise PytradfriError(error)
except OSError as error:
_LOGGER.exception('Saving JSON file failed: %s',
filename)
raise PytradfriError(error) | [
"def",
"save_json",
"(",
"filename",
":",
"str",
",",
"config",
":",
"Union",
"[",
"List",
",",
"Dict",
"]",
")",
":",
"try",
":",
"data",
"=",
"json",
".",
"dumps",
"(",
"config",
",",
"sort_keys",
"=",
"True",
",",
"indent",
"=",
"4",
")",
"with",
"open",
"(",
"filename",
",",
"'w'",
",",
"encoding",
"=",
"'utf-8'",
")",
"as",
"fdesc",
":",
"fdesc",
".",
"write",
"(",
"data",
")",
"return",
"True",
"except",
"TypeError",
"as",
"error",
":",
"_LOGGER",
".",
"exception",
"(",
"'Failed to serialize to JSON: %s'",
",",
"filename",
")",
"raise",
"PytradfriError",
"(",
"error",
")",
"except",
"OSError",
"as",
"error",
":",
"_LOGGER",
".",
"exception",
"(",
"'Saving JSON file failed: %s'",
",",
"filename",
")",
"raise",
"PytradfriError",
"(",
"error",
")"
] | 34.222222 | 13.611111 |
def get_key_delivery_url(access_token, ck_id, key_type):
'''Get Media Services Key Delivery URL.
Args:
access_token (str): A valid Azure authentication token.
ck_id (str): A Media Service Content Key ID.
key_type (str): A Media Service key Type.
Returns:
HTTP response. JSON body.
'''
path = '/ContentKeys'
full_path = ''.join([path, "('", ck_id, "')", "/GetKeyDeliveryUrl"])
endpoint = ''.join([ams_rest_endpoint, full_path])
body = '{"keyDeliveryType": "' + key_type + '"}'
return do_ams_post(endpoint, full_path, body, access_token) | [
"def",
"get_key_delivery_url",
"(",
"access_token",
",",
"ck_id",
",",
"key_type",
")",
":",
"path",
"=",
"'/ContentKeys'",
"full_path",
"=",
"''",
".",
"join",
"(",
"[",
"path",
",",
"\"('\"",
",",
"ck_id",
",",
"\"')\"",
",",
"\"/GetKeyDeliveryUrl\"",
"]",
")",
"endpoint",
"=",
"''",
".",
"join",
"(",
"[",
"ams_rest_endpoint",
",",
"full_path",
"]",
")",
"body",
"=",
"'{\"keyDeliveryType\": \"'",
"+",
"key_type",
"+",
"'\"}'",
"return",
"do_ams_post",
"(",
"endpoint",
",",
"full_path",
",",
"body",
",",
"access_token",
")"
] | 36.875 | 21.125 |
def _get_caller_supplement(caller, data):
"""Some callers like MuTect incorporate a second caller for indels.
"""
if caller == "mutect":
icaller = tz.get_in(["config", "algorithm", "indelcaller"], data)
if icaller:
caller = "%s/%s" % (caller, icaller)
return caller | [
"def",
"_get_caller_supplement",
"(",
"caller",
",",
"data",
")",
":",
"if",
"caller",
"==",
"\"mutect\"",
":",
"icaller",
"=",
"tz",
".",
"get_in",
"(",
"[",
"\"config\"",
",",
"\"algorithm\"",
",",
"\"indelcaller\"",
"]",
",",
"data",
")",
"if",
"icaller",
":",
"caller",
"=",
"\"%s/%s\"",
"%",
"(",
"caller",
",",
"icaller",
")",
"return",
"caller"
] | 37.75 | 12.5 |
def selectShapePoint(self, point):
"""Select the first shape created which contains this point."""
self.deSelectShape()
if self.selectedVertex(): # A vertex is marked for selection.
index, shape = self.hVertex, self.hShape
shape.highlightVertex(index, shape.MOVE_VERTEX)
self.selectShape(shape)
return
for shape in reversed(self.shapes):
if self.isVisible(shape) and shape.containsPoint(point):
self.selectShape(shape)
self.calculateOffsets(shape, point)
return | [
"def",
"selectShapePoint",
"(",
"self",
",",
"point",
")",
":",
"self",
".",
"deSelectShape",
"(",
")",
"if",
"self",
".",
"selectedVertex",
"(",
")",
":",
"# A vertex is marked for selection.",
"index",
",",
"shape",
"=",
"self",
".",
"hVertex",
",",
"self",
".",
"hShape",
"shape",
".",
"highlightVertex",
"(",
"index",
",",
"shape",
".",
"MOVE_VERTEX",
")",
"self",
".",
"selectShape",
"(",
"shape",
")",
"return",
"for",
"shape",
"in",
"reversed",
"(",
"self",
".",
"shapes",
")",
":",
"if",
"self",
".",
"isVisible",
"(",
"shape",
")",
"and",
"shape",
".",
"containsPoint",
"(",
"point",
")",
":",
"self",
".",
"selectShape",
"(",
"shape",
")",
"self",
".",
"calculateOffsets",
"(",
"shape",
",",
"point",
")",
"return"
] | 45.384615 | 12.846154 |
def _indirect_jump_resolved(self, jump, jump_addr, resolved_by, targets):
"""
Called when an indirect jump is successfully resolved.
:param IndirectJump jump: The resolved indirect jump, or None if an IndirectJump instance is
not available.
:param int jump_addr: Address of the resolved indirect jump.
:param IndirectJumpResolver resolved_by: The resolver used to resolve this indirect jump.
:param list targets: List of indirect jump targets.
:param CFGJob job: The job at the start of the block containing the indirect jump.
:return: None
"""
addr = jump.addr if jump is not None else jump_addr
l.debug('The indirect jump at %#x is successfully resolved by %s. It has %d targets.', addr, resolved_by, len(targets))
self.kb.resolved_indirect_jumps.add(addr) | [
"def",
"_indirect_jump_resolved",
"(",
"self",
",",
"jump",
",",
"jump_addr",
",",
"resolved_by",
",",
"targets",
")",
":",
"addr",
"=",
"jump",
".",
"addr",
"if",
"jump",
"is",
"not",
"None",
"else",
"jump_addr",
"l",
".",
"debug",
"(",
"'The indirect jump at %#x is successfully resolved by %s. It has %d targets.'",
",",
"addr",
",",
"resolved_by",
",",
"len",
"(",
"targets",
")",
")",
"self",
".",
"kb",
".",
"resolved_indirect_jumps",
".",
"add",
"(",
"addr",
")"
] | 57.882353 | 37.647059 |
def simulation(self, ts_length=90, random_state=None):
"""
Compute a simulated sample path assuming Gaussian shocks.
Parameters
----------
ts_length : scalar(int), optional(default=90)
Number of periods to simulate for
random_state : int or np.random.RandomState, optional
Random seed (integer) or np.random.RandomState instance to set
the initial state of the random number generator for
reproducibility. If None, a randomly initialized RandomState is
used.
Returns
-------
vals : array_like(float)
A simulation of the model that corresponds to this class
"""
from scipy.signal import dlsim
random_state = check_random_state(random_state)
sys = self.ma_poly, self.ar_poly, 1
u = random_state.randn(ts_length, 1) * self.sigma
vals = dlsim(sys, u)[1]
return vals.flatten() | [
"def",
"simulation",
"(",
"self",
",",
"ts_length",
"=",
"90",
",",
"random_state",
"=",
"None",
")",
":",
"from",
"scipy",
".",
"signal",
"import",
"dlsim",
"random_state",
"=",
"check_random_state",
"(",
"random_state",
")",
"sys",
"=",
"self",
".",
"ma_poly",
",",
"self",
".",
"ar_poly",
",",
"1",
"u",
"=",
"random_state",
".",
"randn",
"(",
"ts_length",
",",
"1",
")",
"*",
"self",
".",
"sigma",
"vals",
"=",
"dlsim",
"(",
"sys",
",",
"u",
")",
"[",
"1",
"]",
"return",
"vals",
".",
"flatten",
"(",
")"
] | 32.724138 | 21.413793 |
def count(self):
'''Estimate the cardinality count based on the technique described in
`this paper <http://ieeexplore.ieee.org/stamp/stamp.jsp?arnumber=365694>`_.
Returns:
int: The estimated cardinality of the set represented by this MinHash.
'''
k = len(self)
return np.float(k) / np.sum(self.hashvalues / np.float(_max_hash)) - 1.0 | [
"def",
"count",
"(",
"self",
")",
":",
"k",
"=",
"len",
"(",
"self",
")",
"return",
"np",
".",
"float",
"(",
"k",
")",
"/",
"np",
".",
"sum",
"(",
"self",
".",
"hashvalues",
"/",
"np",
".",
"float",
"(",
"_max_hash",
")",
")",
"-",
"1.0"
] | 42.888889 | 33.111111 |
def find_entity_view(self, view_type, begin_entity=None, filter={},
properties=None):
"""Find a ManagedEntity of the requested type.
Traverses the MOB looking for an entity matching the filter.
:param view_type: The type of ManagedEntity to find.
:type view_type: str
:param begin_entity: The MOR to start searching for the entity. \
The default is to start the search at the root folder.
:type begin_entity: ManagedObjectReference or None
:param filter: Key/value pairs to filter the results. The key is \
a valid parameter of the ManagedEntity type. The value is what \
that parameter should match.
:type filter: dict
:returns: If an entity is found, a ManagedEntity matching the search.
:rtype: ManagedEntity
"""
if properties is None:
properties = []
kls = classmapper(view_type)
# Start the search at the root folder if no begin_entity was given
if not begin_entity:
begin_entity = self.sc.rootFolder._mo_ref
logger.debug("Using %s", self.sc.rootFolder._mo_ref)
property_spec = self.create('PropertySpec')
property_spec.type = view_type
property_spec.all = False
property_spec.pathSet = list(filter.keys())
pfs = self.get_search_filter_spec(begin_entity, property_spec)
# Retrieve properties from server and update entity
#obj_contents = self.propertyCollector.RetrieveProperties(specSet=pfs)
obj_contents = self.sc.propertyCollector.RetrieveProperties(specSet=pfs)
# TODO: Implement filtering
if not filter:
logger.warning('No filter specified, returning first match.')
# If no filter is specified we just return the first item
# in the list of returned objects
logger.debug("Creating class in find_entity_view (filter)")
view = kls(obj_contents[0].obj._mo_ref, self)
logger.debug("Completed creating class in find_entity_view (filter)")
#view.update_view_data(properties)
return view
matched = False
# Iterate through obj_contents retrieved
for obj_content in obj_contents:
# If there are is no propSet, skip this one
if not obj_content.propSet:
continue
matches = 0
# Iterate through each property in the set
for prop in obj_content.propSet:
for key in filter.keys():
# If the property name is in the defined filter
if prop.name == key:
# ...and it matches the value specified
# TODO: Regex this?
if prop.val == filter[prop.name]:
# We've found a match
matches += 1
else:
break
else:
continue
if matches == len(filter):
filtered_obj_content = obj_content
matched = True
break
else:
continue
if matched is not True:
# There were no matches
raise ObjectNotFoundError("No matching objects for filter")
logger.debug("Creating class in find_entity_view")
view = kls(filtered_obj_content.obj._mo_ref, self)
logger.debug("Completed creating class in find_entity_view")
#view.update_view_data(properties=properties)
return view | [
"def",
"find_entity_view",
"(",
"self",
",",
"view_type",
",",
"begin_entity",
"=",
"None",
",",
"filter",
"=",
"{",
"}",
",",
"properties",
"=",
"None",
")",
":",
"if",
"properties",
"is",
"None",
":",
"properties",
"=",
"[",
"]",
"kls",
"=",
"classmapper",
"(",
"view_type",
")",
"# Start the search at the root folder if no begin_entity was given",
"if",
"not",
"begin_entity",
":",
"begin_entity",
"=",
"self",
".",
"sc",
".",
"rootFolder",
".",
"_mo_ref",
"logger",
".",
"debug",
"(",
"\"Using %s\"",
",",
"self",
".",
"sc",
".",
"rootFolder",
".",
"_mo_ref",
")",
"property_spec",
"=",
"self",
".",
"create",
"(",
"'PropertySpec'",
")",
"property_spec",
".",
"type",
"=",
"view_type",
"property_spec",
".",
"all",
"=",
"False",
"property_spec",
".",
"pathSet",
"=",
"list",
"(",
"filter",
".",
"keys",
"(",
")",
")",
"pfs",
"=",
"self",
".",
"get_search_filter_spec",
"(",
"begin_entity",
",",
"property_spec",
")",
"# Retrieve properties from server and update entity",
"#obj_contents = self.propertyCollector.RetrieveProperties(specSet=pfs)",
"obj_contents",
"=",
"self",
".",
"sc",
".",
"propertyCollector",
".",
"RetrieveProperties",
"(",
"specSet",
"=",
"pfs",
")",
"# TODO: Implement filtering",
"if",
"not",
"filter",
":",
"logger",
".",
"warning",
"(",
"'No filter specified, returning first match.'",
")",
"# If no filter is specified we just return the first item",
"# in the list of returned objects",
"logger",
".",
"debug",
"(",
"\"Creating class in find_entity_view (filter)\"",
")",
"view",
"=",
"kls",
"(",
"obj_contents",
"[",
"0",
"]",
".",
"obj",
".",
"_mo_ref",
",",
"self",
")",
"logger",
".",
"debug",
"(",
"\"Completed creating class in find_entity_view (filter)\"",
")",
"#view.update_view_data(properties)",
"return",
"view",
"matched",
"=",
"False",
"# Iterate through obj_contents retrieved",
"for",
"obj_content",
"in",
"obj_contents",
":",
"# If there are is no propSet, skip this one",
"if",
"not",
"obj_content",
".",
"propSet",
":",
"continue",
"matches",
"=",
"0",
"# Iterate through each property in the set",
"for",
"prop",
"in",
"obj_content",
".",
"propSet",
":",
"for",
"key",
"in",
"filter",
".",
"keys",
"(",
")",
":",
"# If the property name is in the defined filter",
"if",
"prop",
".",
"name",
"==",
"key",
":",
"# ...and it matches the value specified",
"# TODO: Regex this?",
"if",
"prop",
".",
"val",
"==",
"filter",
"[",
"prop",
".",
"name",
"]",
":",
"# We've found a match",
"matches",
"+=",
"1",
"else",
":",
"break",
"else",
":",
"continue",
"if",
"matches",
"==",
"len",
"(",
"filter",
")",
":",
"filtered_obj_content",
"=",
"obj_content",
"matched",
"=",
"True",
"break",
"else",
":",
"continue",
"if",
"matched",
"is",
"not",
"True",
":",
"# There were no matches",
"raise",
"ObjectNotFoundError",
"(",
"\"No matching objects for filter\"",
")",
"logger",
".",
"debug",
"(",
"\"Creating class in find_entity_view\"",
")",
"view",
"=",
"kls",
"(",
"filtered_obj_content",
".",
"obj",
".",
"_mo_ref",
",",
"self",
")",
"logger",
".",
"debug",
"(",
"\"Completed creating class in find_entity_view\"",
")",
"#view.update_view_data(properties=properties)",
"return",
"view"
] | 40.602273 | 19.090909 |
def install_isochrones(self):
"""
Call to isochrone install command:
http://stackoverflow.com/a/24353921/4075339
"""
cmd_obj = self.distribution.get_command_obj('isochrones')
cmd_obj.force = self.force
if self.ugali_dir: cmd_obj.ugali_dir = self.ugali_dir
self.run_command('isochrones') | [
"def",
"install_isochrones",
"(",
"self",
")",
":",
"cmd_obj",
"=",
"self",
".",
"distribution",
".",
"get_command_obj",
"(",
"'isochrones'",
")",
"cmd_obj",
".",
"force",
"=",
"self",
".",
"force",
"if",
"self",
".",
"ugali_dir",
":",
"cmd_obj",
".",
"ugali_dir",
"=",
"self",
".",
"ugali_dir",
"self",
".",
"run_command",
"(",
"'isochrones'",
")"
] | 38 | 8.666667 |
def rebuild( self ):
"""
Rebuilds the parts widget with the latest text.
"""
navitem = self.currentItem()
if ( navitem ):
navitem.initialize()
self.setUpdatesEnabled(False)
self.scrollWidget().show()
self._originalText = ''
partsw = self.partsWidget()
for button in self._buttonGroup.buttons():
self._buttonGroup.removeButton(button)
button.close()
button.setParent(None)
button.deleteLater()
# create the root button
layout = partsw.layout()
parts = self.parts()
button = QToolButton(partsw)
button.setAutoRaise(True)
button.setMaximumWidth(12)
button.setArrowType(Qt.RightArrow)
button.setProperty('path', wrapVariant(''))
button.setProperty('is_completer', wrapVariant(True))
last_button = button
self._buttonGroup.addButton(button)
layout.insertWidget(0, button)
# check to see if we have a navigation model setup
if ( self._navigationModel ):
last_item = self._navigationModel.itemByPath(self.text())
show_last = last_item and last_item.rowCount() > 0
else:
show_last = False
# load the navigation system
count = len(parts)
for i, part in enumerate(parts):
path = self.separator().join(parts[:i+1])
button = QToolButton(partsw)
button.setAutoRaise(True)
button.setText(part)
if ( self._navigationModel ):
item = self._navigationModel.itemByPath(path)
if ( item ):
button.setIcon(item.icon())
button.setToolButtonStyle(Qt.ToolButtonTextBesideIcon)
button.setProperty('path', wrapVariant(path))
button.setProperty('is_completer', wrapVariant(False))
self._buttonGroup.addButton(button)
layout.insertWidget((i * 2) + 1, button)
# determine if we should show the final button
if ( show_last or i < (count - 1) ):
button = QToolButton(partsw)
button.setAutoRaise(True)
button.setMaximumWidth(12)
button.setArrowType(Qt.RightArrow)
button.setProperty('path', wrapVariant(path))
button.setProperty('is_completer', wrapVariant(True))
self._buttonGroup.addButton(button)
layout.insertWidget((i * 2) + 2, button)
last_button = button
if ( self.scrollWidget().width() < partsw.width() ):
self.scrollParts(partsw.width() - self.scrollWidget().width())
self.setUpdatesEnabled(True)
self.navigationChanged.emit() | [
"def",
"rebuild",
"(",
"self",
")",
":",
"navitem",
"=",
"self",
".",
"currentItem",
"(",
")",
"if",
"(",
"navitem",
")",
":",
"navitem",
".",
"initialize",
"(",
")",
"self",
".",
"setUpdatesEnabled",
"(",
"False",
")",
"self",
".",
"scrollWidget",
"(",
")",
".",
"show",
"(",
")",
"self",
".",
"_originalText",
"=",
"''",
"partsw",
"=",
"self",
".",
"partsWidget",
"(",
")",
"for",
"button",
"in",
"self",
".",
"_buttonGroup",
".",
"buttons",
"(",
")",
":",
"self",
".",
"_buttonGroup",
".",
"removeButton",
"(",
"button",
")",
"button",
".",
"close",
"(",
")",
"button",
".",
"setParent",
"(",
"None",
")",
"button",
".",
"deleteLater",
"(",
")",
"# create the root button",
"layout",
"=",
"partsw",
".",
"layout",
"(",
")",
"parts",
"=",
"self",
".",
"parts",
"(",
")",
"button",
"=",
"QToolButton",
"(",
"partsw",
")",
"button",
".",
"setAutoRaise",
"(",
"True",
")",
"button",
".",
"setMaximumWidth",
"(",
"12",
")",
"button",
".",
"setArrowType",
"(",
"Qt",
".",
"RightArrow",
")",
"button",
".",
"setProperty",
"(",
"'path'",
",",
"wrapVariant",
"(",
"''",
")",
")",
"button",
".",
"setProperty",
"(",
"'is_completer'",
",",
"wrapVariant",
"(",
"True",
")",
")",
"last_button",
"=",
"button",
"self",
".",
"_buttonGroup",
".",
"addButton",
"(",
"button",
")",
"layout",
".",
"insertWidget",
"(",
"0",
",",
"button",
")",
"# check to see if we have a navigation model setup",
"if",
"(",
"self",
".",
"_navigationModel",
")",
":",
"last_item",
"=",
"self",
".",
"_navigationModel",
".",
"itemByPath",
"(",
"self",
".",
"text",
"(",
")",
")",
"show_last",
"=",
"last_item",
"and",
"last_item",
".",
"rowCount",
"(",
")",
">",
"0",
"else",
":",
"show_last",
"=",
"False",
"# load the navigation system",
"count",
"=",
"len",
"(",
"parts",
")",
"for",
"i",
",",
"part",
"in",
"enumerate",
"(",
"parts",
")",
":",
"path",
"=",
"self",
".",
"separator",
"(",
")",
".",
"join",
"(",
"parts",
"[",
":",
"i",
"+",
"1",
"]",
")",
"button",
"=",
"QToolButton",
"(",
"partsw",
")",
"button",
".",
"setAutoRaise",
"(",
"True",
")",
"button",
".",
"setText",
"(",
"part",
")",
"if",
"(",
"self",
".",
"_navigationModel",
")",
":",
"item",
"=",
"self",
".",
"_navigationModel",
".",
"itemByPath",
"(",
"path",
")",
"if",
"(",
"item",
")",
":",
"button",
".",
"setIcon",
"(",
"item",
".",
"icon",
"(",
")",
")",
"button",
".",
"setToolButtonStyle",
"(",
"Qt",
".",
"ToolButtonTextBesideIcon",
")",
"button",
".",
"setProperty",
"(",
"'path'",
",",
"wrapVariant",
"(",
"path",
")",
")",
"button",
".",
"setProperty",
"(",
"'is_completer'",
",",
"wrapVariant",
"(",
"False",
")",
")",
"self",
".",
"_buttonGroup",
".",
"addButton",
"(",
"button",
")",
"layout",
".",
"insertWidget",
"(",
"(",
"i",
"*",
"2",
")",
"+",
"1",
",",
"button",
")",
"# determine if we should show the final button",
"if",
"(",
"show_last",
"or",
"i",
"<",
"(",
"count",
"-",
"1",
")",
")",
":",
"button",
"=",
"QToolButton",
"(",
"partsw",
")",
"button",
".",
"setAutoRaise",
"(",
"True",
")",
"button",
".",
"setMaximumWidth",
"(",
"12",
")",
"button",
".",
"setArrowType",
"(",
"Qt",
".",
"RightArrow",
")",
"button",
".",
"setProperty",
"(",
"'path'",
",",
"wrapVariant",
"(",
"path",
")",
")",
"button",
".",
"setProperty",
"(",
"'is_completer'",
",",
"wrapVariant",
"(",
"True",
")",
")",
"self",
".",
"_buttonGroup",
".",
"addButton",
"(",
"button",
")",
"layout",
".",
"insertWidget",
"(",
"(",
"i",
"*",
"2",
")",
"+",
"2",
",",
"button",
")",
"last_button",
"=",
"button",
"if",
"(",
"self",
".",
"scrollWidget",
"(",
")",
".",
"width",
"(",
")",
"<",
"partsw",
".",
"width",
"(",
")",
")",
":",
"self",
".",
"scrollParts",
"(",
"partsw",
".",
"width",
"(",
")",
"-",
"self",
".",
"scrollWidget",
"(",
")",
".",
"width",
"(",
")",
")",
"self",
".",
"setUpdatesEnabled",
"(",
"True",
")",
"self",
".",
"navigationChanged",
".",
"emit",
"(",
")"
] | 35.771084 | 15.144578 |
def doRollover(self):
"""
Do a rollover, as described in __init__().
"""
self._close()
if self.backupCount <= 0:
# Don't keep any backups, just overwrite the existing backup file
# Locking doesn't much matter here; since we are overwriting it anyway
self.stream = self.do_open("w")
self._close()
return
# Determine if we can rename the log file or not. Windows refuses to
# rename an open file, Unix is inode base so it doesn't care.
# Attempt to rename logfile to tempname:
# There is a slight race-condition here, but it seems unavoidable
tmpname = None
while not tmpname or os.path.exists(tmpname):
tmpname = "%s.rotate.%08d" % (self.baseFilename, randbits(64))
try:
# Do a rename test to determine if we can successfully rename the log file
os.rename(self.baseFilename, tmpname)
if self.use_gzip:
self.do_gzip(tmpname)
except (IOError, OSError):
exc_value = sys.exc_info()[1]
self._console_log(
"rename failed. File in use? exception=%s" % (exc_value,), stack=True)
return
gzip_ext = ''
if self.use_gzip:
gzip_ext = '.gz'
def do_rename(source_fn, dest_fn):
self._console_log("Rename %s -> %s" % (source_fn, dest_fn + gzip_ext))
if os.path.exists(dest_fn):
os.remove(dest_fn)
if os.path.exists(dest_fn + gzip_ext):
os.remove(dest_fn + gzip_ext)
source_gzip = source_fn + gzip_ext
if os.path.exists(source_gzip):
os.rename(source_gzip, dest_fn + gzip_ext)
elif os.path.exists(source_fn):
os.rename(source_fn, dest_fn)
# Q: Is there some way to protect this code from a KeyboardInterrupt?
# This isn't necessarily a data loss issue, but it certainly does
# break the rotation process during stress testing.
# There is currently no mechanism in place to handle the situation
# where one of these log files cannot be renamed. (Example, user
# opens "logfile.3" in notepad); we could test rename each file, but
# nobody's complained about this being an issue; so the additional
# code complexity isn't warranted.
for i in range(self.backupCount - 1, 0, -1):
sfn = "%s.%d" % (self.baseFilename, i)
dfn = "%s.%d" % (self.baseFilename, i + 1)
if os.path.exists(sfn + gzip_ext):
do_rename(sfn, dfn)
dfn = self.baseFilename + ".1"
do_rename(tmpname, dfn)
if self.use_gzip:
logFilename = self.baseFilename + ".1.gz"
self._do_chown_and_chmod(logFilename)
self._console_log("Rotation completed") | [
"def",
"doRollover",
"(",
"self",
")",
":",
"self",
".",
"_close",
"(",
")",
"if",
"self",
".",
"backupCount",
"<=",
"0",
":",
"# Don't keep any backups, just overwrite the existing backup file",
"# Locking doesn't much matter here; since we are overwriting it anyway",
"self",
".",
"stream",
"=",
"self",
".",
"do_open",
"(",
"\"w\"",
")",
"self",
".",
"_close",
"(",
")",
"return",
"# Determine if we can rename the log file or not. Windows refuses to",
"# rename an open file, Unix is inode base so it doesn't care.",
"# Attempt to rename logfile to tempname:",
"# There is a slight race-condition here, but it seems unavoidable",
"tmpname",
"=",
"None",
"while",
"not",
"tmpname",
"or",
"os",
".",
"path",
".",
"exists",
"(",
"tmpname",
")",
":",
"tmpname",
"=",
"\"%s.rotate.%08d\"",
"%",
"(",
"self",
".",
"baseFilename",
",",
"randbits",
"(",
"64",
")",
")",
"try",
":",
"# Do a rename test to determine if we can successfully rename the log file",
"os",
".",
"rename",
"(",
"self",
".",
"baseFilename",
",",
"tmpname",
")",
"if",
"self",
".",
"use_gzip",
":",
"self",
".",
"do_gzip",
"(",
"tmpname",
")",
"except",
"(",
"IOError",
",",
"OSError",
")",
":",
"exc_value",
"=",
"sys",
".",
"exc_info",
"(",
")",
"[",
"1",
"]",
"self",
".",
"_console_log",
"(",
"\"rename failed. File in use? exception=%s\"",
"%",
"(",
"exc_value",
",",
")",
",",
"stack",
"=",
"True",
")",
"return",
"gzip_ext",
"=",
"''",
"if",
"self",
".",
"use_gzip",
":",
"gzip_ext",
"=",
"'.gz'",
"def",
"do_rename",
"(",
"source_fn",
",",
"dest_fn",
")",
":",
"self",
".",
"_console_log",
"(",
"\"Rename %s -> %s\"",
"%",
"(",
"source_fn",
",",
"dest_fn",
"+",
"gzip_ext",
")",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"dest_fn",
")",
":",
"os",
".",
"remove",
"(",
"dest_fn",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"dest_fn",
"+",
"gzip_ext",
")",
":",
"os",
".",
"remove",
"(",
"dest_fn",
"+",
"gzip_ext",
")",
"source_gzip",
"=",
"source_fn",
"+",
"gzip_ext",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"source_gzip",
")",
":",
"os",
".",
"rename",
"(",
"source_gzip",
",",
"dest_fn",
"+",
"gzip_ext",
")",
"elif",
"os",
".",
"path",
".",
"exists",
"(",
"source_fn",
")",
":",
"os",
".",
"rename",
"(",
"source_fn",
",",
"dest_fn",
")",
"# Q: Is there some way to protect this code from a KeyboardInterrupt?",
"# This isn't necessarily a data loss issue, but it certainly does",
"# break the rotation process during stress testing.",
"# There is currently no mechanism in place to handle the situation",
"# where one of these log files cannot be renamed. (Example, user",
"# opens \"logfile.3\" in notepad); we could test rename each file, but",
"# nobody's complained about this being an issue; so the additional",
"# code complexity isn't warranted.",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"backupCount",
"-",
"1",
",",
"0",
",",
"-",
"1",
")",
":",
"sfn",
"=",
"\"%s.%d\"",
"%",
"(",
"self",
".",
"baseFilename",
",",
"i",
")",
"dfn",
"=",
"\"%s.%d\"",
"%",
"(",
"self",
".",
"baseFilename",
",",
"i",
"+",
"1",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"sfn",
"+",
"gzip_ext",
")",
":",
"do_rename",
"(",
"sfn",
",",
"dfn",
")",
"dfn",
"=",
"self",
".",
"baseFilename",
"+",
"\".1\"",
"do_rename",
"(",
"tmpname",
",",
"dfn",
")",
"if",
"self",
".",
"use_gzip",
":",
"logFilename",
"=",
"self",
".",
"baseFilename",
"+",
"\".1.gz\"",
"self",
".",
"_do_chown_and_chmod",
"(",
"logFilename",
")",
"self",
".",
"_console_log",
"(",
"\"Rotation completed\"",
")"
] | 40.828571 | 19.485714 |
def load(self):
"""
Load the repo database from the remote source, and then parse it.
:return:
"""
data = self.http_request(self.location())
self._parse(data)
return self | [
"def",
"load",
"(",
"self",
")",
":",
"data",
"=",
"self",
".",
"http_request",
"(",
"self",
".",
"location",
"(",
")",
")",
"self",
".",
"_parse",
"(",
"data",
")",
"return",
"self"
] | 27.375 | 15.875 |
def crop(data, crinfo):
"""
Crop the data.
crop(data, crinfo)
:param crinfo: min and max for each axis - [[minX, maxX], [minY, maxY], [minZ, maxZ]]
"""
crinfo = fix_crinfo(crinfo)
return data[
__int_or_none(crinfo[0][0]) : __int_or_none(crinfo[0][1]),
__int_or_none(crinfo[1][0]) : __int_or_none(crinfo[1][1]),
__int_or_none(crinfo[2][0]) : __int_or_none(crinfo[2][1]),
] | [
"def",
"crop",
"(",
"data",
",",
"crinfo",
")",
":",
"crinfo",
"=",
"fix_crinfo",
"(",
"crinfo",
")",
"return",
"data",
"[",
"__int_or_none",
"(",
"crinfo",
"[",
"0",
"]",
"[",
"0",
"]",
")",
":",
"__int_or_none",
"(",
"crinfo",
"[",
"0",
"]",
"[",
"1",
"]",
")",
",",
"__int_or_none",
"(",
"crinfo",
"[",
"1",
"]",
"[",
"0",
"]",
")",
":",
"__int_or_none",
"(",
"crinfo",
"[",
"1",
"]",
"[",
"1",
"]",
")",
",",
"__int_or_none",
"(",
"crinfo",
"[",
"2",
"]",
"[",
"0",
"]",
")",
":",
"__int_or_none",
"(",
"crinfo",
"[",
"2",
"]",
"[",
"1",
"]",
")",
",",
"]"
] | 27.733333 | 24.8 |
def strframe(obj, extended=False):
"""
Return a string with a frame record pretty-formatted.
The record is typically an item in a list generated by `inspect.stack()
<https://docs.python.org/3/library/inspect.html#inspect.stack>`_).
:param obj: Frame record
:type obj: tuple
:param extended: Flag that indicates whether contents of the frame object
are printed (True) or not (False)
:type extended: boolean
:rtype: string
"""
# Stack frame -> (frame object [0], filename [1], line number of current
# line [2], function name [3], list of lines of context from source
# code [4], index of current line within list [5])
fname = normalize_windows_fname(obj[1])
ret = list()
ret.append(pcolor("Frame object ID: {0}".format(hex(id(obj[0]))), "yellow"))
ret.append("File name......: {0}".format(fname))
ret.append("Line number....: {0}".format(obj[2]))
ret.append("Function name..: {0}".format(obj[3]))
ret.append("Context........: {0}".format(obj[4]))
ret.append("Index..........: {0}".format(obj[5]))
if extended:
ret.append("f_back ID......: {0}".format(hex(id(obj[0].f_back))))
ret.append("f_builtins.....: {0}".format(obj[0].f_builtins))
ret.append("f_code.........: {0}".format(obj[0].f_code))
ret.append("f_globals......: {0}".format(obj[0].f_globals))
ret.append("f_lasti........: {0}".format(obj[0].f_lasti))
ret.append("f_lineno.......: {0}".format(obj[0].f_lineno))
ret.append("f_locals.......: {0}".format(obj[0].f_locals))
if hasattr(obj[0], "f_restricted"): # pragma: no cover
ret.append("f_restricted...: {0}".format(obj[0].f_restricted))
ret.append("f_trace........: {0}".format(obj[0].f_trace))
return "\n".join(ret) | [
"def",
"strframe",
"(",
"obj",
",",
"extended",
"=",
"False",
")",
":",
"# Stack frame -> (frame object [0], filename [1], line number of current",
"# line [2], function name [3], list of lines of context from source",
"# code [4], index of current line within list [5])",
"fname",
"=",
"normalize_windows_fname",
"(",
"obj",
"[",
"1",
"]",
")",
"ret",
"=",
"list",
"(",
")",
"ret",
".",
"append",
"(",
"pcolor",
"(",
"\"Frame object ID: {0}\"",
".",
"format",
"(",
"hex",
"(",
"id",
"(",
"obj",
"[",
"0",
"]",
")",
")",
")",
",",
"\"yellow\"",
")",
")",
"ret",
".",
"append",
"(",
"\"File name......: {0}\"",
".",
"format",
"(",
"fname",
")",
")",
"ret",
".",
"append",
"(",
"\"Line number....: {0}\"",
".",
"format",
"(",
"obj",
"[",
"2",
"]",
")",
")",
"ret",
".",
"append",
"(",
"\"Function name..: {0}\"",
".",
"format",
"(",
"obj",
"[",
"3",
"]",
")",
")",
"ret",
".",
"append",
"(",
"\"Context........: {0}\"",
".",
"format",
"(",
"obj",
"[",
"4",
"]",
")",
")",
"ret",
".",
"append",
"(",
"\"Index..........: {0}\"",
".",
"format",
"(",
"obj",
"[",
"5",
"]",
")",
")",
"if",
"extended",
":",
"ret",
".",
"append",
"(",
"\"f_back ID......: {0}\"",
".",
"format",
"(",
"hex",
"(",
"id",
"(",
"obj",
"[",
"0",
"]",
".",
"f_back",
")",
")",
")",
")",
"ret",
".",
"append",
"(",
"\"f_builtins.....: {0}\"",
".",
"format",
"(",
"obj",
"[",
"0",
"]",
".",
"f_builtins",
")",
")",
"ret",
".",
"append",
"(",
"\"f_code.........: {0}\"",
".",
"format",
"(",
"obj",
"[",
"0",
"]",
".",
"f_code",
")",
")",
"ret",
".",
"append",
"(",
"\"f_globals......: {0}\"",
".",
"format",
"(",
"obj",
"[",
"0",
"]",
".",
"f_globals",
")",
")",
"ret",
".",
"append",
"(",
"\"f_lasti........: {0}\"",
".",
"format",
"(",
"obj",
"[",
"0",
"]",
".",
"f_lasti",
")",
")",
"ret",
".",
"append",
"(",
"\"f_lineno.......: {0}\"",
".",
"format",
"(",
"obj",
"[",
"0",
"]",
".",
"f_lineno",
")",
")",
"ret",
".",
"append",
"(",
"\"f_locals.......: {0}\"",
".",
"format",
"(",
"obj",
"[",
"0",
"]",
".",
"f_locals",
")",
")",
"if",
"hasattr",
"(",
"obj",
"[",
"0",
"]",
",",
"\"f_restricted\"",
")",
":",
"# pragma: no cover",
"ret",
".",
"append",
"(",
"\"f_restricted...: {0}\"",
".",
"format",
"(",
"obj",
"[",
"0",
"]",
".",
"f_restricted",
")",
")",
"ret",
".",
"append",
"(",
"\"f_trace........: {0}\"",
".",
"format",
"(",
"obj",
"[",
"0",
"]",
".",
"f_trace",
")",
")",
"return",
"\"\\n\"",
".",
"join",
"(",
"ret",
")"
] | 46.051282 | 22.615385 |
def ui(root_url, path):
"""
Generate URL for a path in the Taskcluster ui.
The purpose of the function is to switch on rootUrl:
"The driver for having a ui method is so we can just call ui with a path and any root url,
and the returned url should work for both our current deployment (with root URL = https://taskcluster.net)
and any future deployment. The returned value is essentially rootURL == 'https://taskcluster.net'
'https://tools.taskcluster.net/${path}'
'${rootURL}/${path}' "
"""
root_url = root_url.rstrip('/')
path = path.lstrip('/')
if root_url == OLD_ROOT_URL:
return 'https://tools.taskcluster.net/{}'.format(path)
else:
return '{}/{}'.format(root_url, path) | [
"def",
"ui",
"(",
"root_url",
",",
"path",
")",
":",
"root_url",
"=",
"root_url",
".",
"rstrip",
"(",
"'/'",
")",
"path",
"=",
"path",
".",
"lstrip",
"(",
"'/'",
")",
"if",
"root_url",
"==",
"OLD_ROOT_URL",
":",
"return",
"'https://tools.taskcluster.net/{}'",
".",
"format",
"(",
"path",
")",
"else",
":",
"return",
"'{}/{}'",
".",
"format",
"(",
"root_url",
",",
"path",
")"
] | 45.4375 | 20.5625 |
def callback_parent(attr, old, new):
'''Update data directories drop down with new parent directory'''
import os
# Remove accidental white space if copy/pasted
new = new.strip()
parent_input.value = new
# Verify new parent path exists and update `datadirs_select` widget
if os.path.exists(new):
# Create sorted list of data directories, ignore files
joinisdir = lambda parent, d: os.path.isdir(os.path.join(parent, d))
options = sorted([d for d in os.listdir(new) if joinisdir(new, d)])
# Update dropdown list of available data directories and select first
datadirs_select.options = options
datadirs_select.value = options[0]
callback_datadirs('value', options[0], options[0])
else:
msg = '''
The parent path `{}` does not exist.
Check that you have entered the absolute path.
'''.format(new)
output_window.text = output_template.format(msg)
return None | [
"def",
"callback_parent",
"(",
"attr",
",",
"old",
",",
"new",
")",
":",
"import",
"os",
"# Remove accidental white space if copy/pasted",
"new",
"=",
"new",
".",
"strip",
"(",
")",
"parent_input",
".",
"value",
"=",
"new",
"# Verify new parent path exists and update `datadirs_select` widget",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"new",
")",
":",
"# Create sorted list of data directories, ignore files",
"joinisdir",
"=",
"lambda",
"parent",
",",
"d",
":",
"os",
".",
"path",
".",
"isdir",
"(",
"os",
".",
"path",
".",
"join",
"(",
"parent",
",",
"d",
")",
")",
"options",
"=",
"sorted",
"(",
"[",
"d",
"for",
"d",
"in",
"os",
".",
"listdir",
"(",
"new",
")",
"if",
"joinisdir",
"(",
"new",
",",
"d",
")",
"]",
")",
"# Update dropdown list of available data directories and select first",
"datadirs_select",
".",
"options",
"=",
"options",
"datadirs_select",
".",
"value",
"=",
"options",
"[",
"0",
"]",
"callback_datadirs",
"(",
"'value'",
",",
"options",
"[",
"0",
"]",
",",
"options",
"[",
"0",
"]",
")",
"else",
":",
"msg",
"=",
"'''\n The parent path `{}` does not exist.\n\n Check that you have entered the absolute path.\n '''",
".",
"format",
"(",
"new",
")",
"output_window",
".",
"text",
"=",
"output_template",
".",
"format",
"(",
"msg",
")",
"return",
"None"
] | 35.071429 | 24 |
def reply(self, obj, result, command_exec_status='ok', info_messages=[],
warning_messages=[], error_messages=[]):
"""Build a response from a previouslsy received command message, send it
and return number of sent bytes.
:param result: Used to send back the result of the command execution to
the debugger client.
:type result: dict
See send() above for others parameters definition.
"""
with self._connection_lock:
# TODO: add a parameter to remove args from messages ?
if True:
del obj['args']
obj['result'] = result
obj['commandExecStatus'] = command_exec_status
obj['info_messages'] = info_messages
obj['warning_messages'] = warning_messages
obj['error_messages'] = error_messages
msg_str = self.encode(obj)
msg_bytes = bytearray(msg_str, 'utf-8')
send_bytes_count = self._connection.sendall(msg_bytes)
self.log_sent(msg_bytes)
return send_bytes_count | [
"def",
"reply",
"(",
"self",
",",
"obj",
",",
"result",
",",
"command_exec_status",
"=",
"'ok'",
",",
"info_messages",
"=",
"[",
"]",
",",
"warning_messages",
"=",
"[",
"]",
",",
"error_messages",
"=",
"[",
"]",
")",
":",
"with",
"self",
".",
"_connection_lock",
":",
"# TODO: add a parameter to remove args from messages ?",
"if",
"True",
":",
"del",
"obj",
"[",
"'args'",
"]",
"obj",
"[",
"'result'",
"]",
"=",
"result",
"obj",
"[",
"'commandExecStatus'",
"]",
"=",
"command_exec_status",
"obj",
"[",
"'info_messages'",
"]",
"=",
"info_messages",
"obj",
"[",
"'warning_messages'",
"]",
"=",
"warning_messages",
"obj",
"[",
"'error_messages'",
"]",
"=",
"error_messages",
"msg_str",
"=",
"self",
".",
"encode",
"(",
"obj",
")",
"msg_bytes",
"=",
"bytearray",
"(",
"msg_str",
",",
"'utf-8'",
")",
"send_bytes_count",
"=",
"self",
".",
"_connection",
".",
"sendall",
"(",
"msg_bytes",
")",
"self",
".",
"log_sent",
"(",
"msg_bytes",
")",
"return",
"send_bytes_count"
] | 43.92 | 14.44 |
def setXpanId(self, xPanId):
"""set extended PAN ID of Thread Network
Args:
xPanId: extended PAN ID in hex format
Returns:
True: successful to set the extended PAN ID
False: fail to set the extended PAN ID
"""
xpanid = ''
print '%s call setXpanId' % self.port
print xPanId
try:
if not isinstance(xPanId, str):
xpanid = self.__convertLongToString(xPanId)
# prepend '0' at the beginning
if len(xpanid) < 16:
xpanid = xpanid.zfill(16)
print xpanid
cmd = WPANCTL_CMD + 'setprop Network:XPANID %s' % xpanid
datasetCmd = WPANCTL_CMD + 'setprop Dataset:ExtendedPanId %s' % xpanid
else:
xpanid = xPanId
cmd = WPANCTL_CMD + 'setprop Network:XPANID %s' % xpanid
datasetCmd = WPANCTL_CMD + 'setprop Dataset:ExtendedPanId %s' % xpanid
self.xpanId = xpanid
self.hasActiveDatasetToCommit = True
return self.__sendCommand(cmd)[0] != 'Fail' and self.__sendCommand(datasetCmd)[0] != 'Fail'
except Exception, e:
ModuleHelper.WriteIntoDebugLogger('setXpanId() Error: ' + str(e)) | [
"def",
"setXpanId",
"(",
"self",
",",
"xPanId",
")",
":",
"xpanid",
"=",
"''",
"print",
"'%s call setXpanId'",
"%",
"self",
".",
"port",
"print",
"xPanId",
"try",
":",
"if",
"not",
"isinstance",
"(",
"xPanId",
",",
"str",
")",
":",
"xpanid",
"=",
"self",
".",
"__convertLongToString",
"(",
"xPanId",
")",
"# prepend '0' at the beginning",
"if",
"len",
"(",
"xpanid",
")",
"<",
"16",
":",
"xpanid",
"=",
"xpanid",
".",
"zfill",
"(",
"16",
")",
"print",
"xpanid",
"cmd",
"=",
"WPANCTL_CMD",
"+",
"'setprop Network:XPANID %s'",
"%",
"xpanid",
"datasetCmd",
"=",
"WPANCTL_CMD",
"+",
"'setprop Dataset:ExtendedPanId %s'",
"%",
"xpanid",
"else",
":",
"xpanid",
"=",
"xPanId",
"cmd",
"=",
"WPANCTL_CMD",
"+",
"'setprop Network:XPANID %s'",
"%",
"xpanid",
"datasetCmd",
"=",
"WPANCTL_CMD",
"+",
"'setprop Dataset:ExtendedPanId %s'",
"%",
"xpanid",
"self",
".",
"xpanId",
"=",
"xpanid",
"self",
".",
"hasActiveDatasetToCommit",
"=",
"True",
"return",
"self",
".",
"__sendCommand",
"(",
"cmd",
")",
"[",
"0",
"]",
"!=",
"'Fail'",
"and",
"self",
".",
"__sendCommand",
"(",
"datasetCmd",
")",
"[",
"0",
"]",
"!=",
"'Fail'",
"except",
"Exception",
",",
"e",
":",
"ModuleHelper",
".",
"WriteIntoDebugLogger",
"(",
"'setXpanId() Error: '",
"+",
"str",
"(",
"e",
")",
")"
] | 39 | 21.212121 |
def touch(self, conn, key, exptime):
"""The command is used to update the expiration time of
an existing item without fetching it.
:param key: ``bytes``, is the key to update expiration time
:param exptime: ``int``, is expiration time. This replaces the existing
expiration time.
:return: ``bool``, True in case of success.
"""
assert self._validate_key(key)
_cmd = b' '.join([b'touch', key, str(exptime).encode('utf-8')])
cmd = _cmd + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (const.TOUCHED, const.NOT_FOUND):
raise ClientException('Memcached touch failed', resp)
return resp == const.TOUCHED | [
"def",
"touch",
"(",
"self",
",",
"conn",
",",
"key",
",",
"exptime",
")",
":",
"assert",
"self",
".",
"_validate_key",
"(",
"key",
")",
"_cmd",
"=",
"b' '",
".",
"join",
"(",
"[",
"b'touch'",
",",
"key",
",",
"str",
"(",
"exptime",
")",
".",
"encode",
"(",
"'utf-8'",
")",
"]",
")",
"cmd",
"=",
"_cmd",
"+",
"b'\\r\\n'",
"resp",
"=",
"yield",
"from",
"self",
".",
"_execute_simple_command",
"(",
"conn",
",",
"cmd",
")",
"if",
"resp",
"not",
"in",
"(",
"const",
".",
"TOUCHED",
",",
"const",
".",
"NOT_FOUND",
")",
":",
"raise",
"ClientException",
"(",
"'Memcached touch failed'",
",",
"resp",
")",
"return",
"resp",
"==",
"const",
".",
"TOUCHED"
] | 43.235294 | 17.470588 |
def sensor_bias_encode(self, axBias, ayBias, azBias, gxBias, gyBias, gzBias):
'''
Accelerometer and gyro biases.
axBias : Accelerometer X bias (m/s) (float)
ayBias : Accelerometer Y bias (m/s) (float)
azBias : Accelerometer Z bias (m/s) (float)
gxBias : Gyro X bias (rad/s) (float)
gyBias : Gyro Y bias (rad/s) (float)
gzBias : Gyro Z bias (rad/s) (float)
'''
return MAVLink_sensor_bias_message(axBias, ayBias, azBias, gxBias, gyBias, gzBias) | [
"def",
"sensor_bias_encode",
"(",
"self",
",",
"axBias",
",",
"ayBias",
",",
"azBias",
",",
"gxBias",
",",
"gyBias",
",",
"gzBias",
")",
":",
"return",
"MAVLink_sensor_bias_message",
"(",
"axBias",
",",
"ayBias",
",",
"azBias",
",",
"gxBias",
",",
"gyBias",
",",
"gzBias",
")"
] | 54.307692 | 33.076923 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.