text stringlengths 89 104k | code_tokens list | avg_line_len float64 7.91 980 | score float64 0 630 |
|---|---|---|---|
def end(self):
"""
This method must be called after the operation returns.
Note that this method is not to be invoked by the user; it is invoked
by the implementation of the :class:`~zhmcclient.Session` class.
If the statistics keeper holding this time statistics is enabled, this
method takes the current time, calculates the duration of the operation
since the last call to :meth:`~zhmcclient.TimeStats.begin`, and updates
the time statistics to reflect the new operation.
If the statistics keeper holding this time statistics is disabled,
this method does nothing, in order to save resources.
If this method is called without a preceding call to
:meth:`~zhmcclient.TimeStats.begin`, a :exc:`py:RuntimeError` is
raised.
Raises:
RuntimeError
"""
if self.keeper.enabled:
if self._begin_time is None:
raise RuntimeError("end() called without preceding begin()")
dt = time.time() - self._begin_time
self._begin_time = None
self._count += 1
self._sum += dt
if dt > self._max:
self._max = dt
if dt < self._min:
self._min = dt | [
"def",
"end",
"(",
"self",
")",
":",
"if",
"self",
".",
"keeper",
".",
"enabled",
":",
"if",
"self",
".",
"_begin_time",
"is",
"None",
":",
"raise",
"RuntimeError",
"(",
"\"end() called without preceding begin()\"",
")",
"dt",
"=",
"time",
".",
"time",
"("... | 39.5 | 22.125 |
def get(self, name_or_uri):
"""
Get the role by its URI or Name.
Args:
name_or_uri:
Can be either the Name or the URI.
Returns:
dict: Role
"""
name_or_uri = quote(name_or_uri)
return self._client.get(name_or_uri) | [
"def",
"get",
"(",
"self",
",",
"name_or_uri",
")",
":",
"name_or_uri",
"=",
"quote",
"(",
"name_or_uri",
")",
"return",
"self",
".",
"_client",
".",
"get",
"(",
"name_or_uri",
")"
] | 22.923077 | 14.769231 |
def binary_float_to_decimal_float(number: Union[float, str]) -> float:
"""
Convert binary floating point to decimal floating point.
:param number: Binary floating point.
:return: Decimal floating point representation of binary floating point.
"""
if isinstance(number, str):
if number[0] == '-':
n_sign = -1
else:
n_sign = 1
elif isinstance(number, float):
n_sign = np.sign(number)
number = str(number)
deci = 0
for ndx, val in enumerate(number.split('.')[-1]):
deci += float(val) / 2**(ndx+1)
deci *= n_sign
return deci | [
"def",
"binary_float_to_decimal_float",
"(",
"number",
":",
"Union",
"[",
"float",
",",
"str",
"]",
")",
"->",
"float",
":",
"if",
"isinstance",
"(",
"number",
",",
"str",
")",
":",
"if",
"number",
"[",
"0",
"]",
"==",
"'-'",
":",
"n_sign",
"=",
"-",... | 27.727273 | 18.363636 |
def run_calibration(self, interval, applycal):
"""Runs the calibration operation with the current settings
:param interval: The repetition interval between stimuli presentations (seconds)
:type interval: float
:param applycal: Whether to apply a previous saved calibration to this run
:type applycal: bool
:returns: :py:class:`threading.Thread` -- the acquisition thread
"""
if self.selected_calibration_index == 2:
self.tone_calibrator.apply_calibration(applycal)
self.tone_calibrator.setup(interval)
return self.tone_calibrator.run()
else:
self.bs_calibrator.set_stim_by_index(self.selected_calibration_index)
self.bs_calibrator.apply_calibration(applycal)
self.bs_calibrator.setup(interval)
return self.bs_calibrator.run() | [
"def",
"run_calibration",
"(",
"self",
",",
"interval",
",",
"applycal",
")",
":",
"if",
"self",
".",
"selected_calibration_index",
"==",
"2",
":",
"self",
".",
"tone_calibrator",
".",
"apply_calibration",
"(",
"applycal",
")",
"self",
".",
"tone_calibrator",
... | 48.5 | 17.722222 |
def convert_reset_type(value):
"""! @brief Convert a reset_type session option value to the Target.ResetType enum.
@param value The value of the reset_type session option.
@exception ValueError Raised if an unknown reset_type value is passed.
"""
value = value.lower()
if value not in RESET_TYPE_MAP:
raise ValueError("unexpected value for reset_type option ('%s')" % value)
return RESET_TYPE_MAP[value] | [
"def",
"convert_reset_type",
"(",
"value",
")",
":",
"value",
"=",
"value",
".",
"lower",
"(",
")",
"if",
"value",
"not",
"in",
"RESET_TYPE_MAP",
":",
"raise",
"ValueError",
"(",
"\"unexpected value for reset_type option ('%s')\"",
"%",
"value",
")",
"return",
"... | 47.888889 | 14.777778 |
def mutate_add_connection(self, config):
"""
Attempt to add a new connection, the only restriction being that the output
node cannot be one of the network input pins.
"""
possible_outputs = list(iterkeys(self.nodes))
out_node = choice(possible_outputs)
possible_inputs = possible_outputs + config.input_keys
in_node = choice(possible_inputs)
# Don't duplicate connections.
key = (in_node, out_node)
if key in self.connections:
# TODO: Should this be using mutation to/from rates? Hairy to configure...
if config.check_structural_mutation_surer():
self.connections[key].enabled = True
return
# Don't allow connections between two output nodes
if in_node in config.output_keys and out_node in config.output_keys:
return
# No need to check for connections between input nodes:
# they cannot be the output end of a connection (see above).
# For feed-forward networks, avoid creating cycles.
if config.feed_forward and creates_cycle(list(iterkeys(self.connections)), key):
return
cg = self.create_connection(config, in_node, out_node)
self.connections[cg.key] = cg | [
"def",
"mutate_add_connection",
"(",
"self",
",",
"config",
")",
":",
"possible_outputs",
"=",
"list",
"(",
"iterkeys",
"(",
"self",
".",
"nodes",
")",
")",
"out_node",
"=",
"choice",
"(",
"possible_outputs",
")",
"possible_inputs",
"=",
"possible_outputs",
"+... | 39.4375 | 21.4375 |
def ngfileupload_partfactory(part_number=None, content_length=None,
uploaded_file=None):
"""Part factory for ng-file-upload.
:param part_number: The part number. (Default: ``None``)
:param content_length: The content length. (Default: ``None``)
:param uploaded_file: The upload request. (Default: ``None``)
:returns: The content length, part number, stream, HTTP Content-Type
header.
"""
return content_length, part_number, uploaded_file.stream, \
uploaded_file.headers.get('Content-Type'), None, None | [
"def",
"ngfileupload_partfactory",
"(",
"part_number",
"=",
"None",
",",
"content_length",
"=",
"None",
",",
"uploaded_file",
"=",
"None",
")",
":",
"return",
"content_length",
",",
"part_number",
",",
"uploaded_file",
".",
"stream",
",",
"uploaded_file",
".",
"... | 47 | 20.666667 |
def hook(name=None, *args, **kwargs):
"""Decorator to register the function as a hook
"""
def decorator(f):
if not hasattr(f, "hooks"):
f.hooks = []
f.hooks.append((name or f.__name__, args, kwargs))
return f
return decorator | [
"def",
"hook",
"(",
"name",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"def",
"decorator",
"(",
"f",
")",
":",
"if",
"not",
"hasattr",
"(",
"f",
",",
"\"hooks\"",
")",
":",
"f",
".",
"hooks",
"=",
"[",
"]",
"f",
".",
... | 29.888889 | 11.666667 |
def calculate_concat_output_shapes(operator):
'''
Allowed input/output patterns are
1. [N_1, C, H, W], ..., [N_n, C, H, W] ---> [N_1 + ... + N_n, C, H, W]
2. [N, C_1, H, W], ..., [N, C_n, H, W] ---> [N, C_1 + ... + C_n, H, W]
'''
check_input_and_output_numbers(operator, input_count_range=[1, None], output_count_range=[1, 1])
output_shape = copy.deepcopy(operator.inputs[0].type.shape)
dims = []
for variable in operator.inputs:
if variable.type.shape[0] != 'None' and variable.type.shape[0] != output_shape[0]:
raise RuntimeError('Only dimensions along C-axis can be different')
if variable.type.shape[2] != 'None' and variable.type.shape[2] != output_shape[2]:
raise RuntimeError('Only dimensions along C-axis can be different')
if variable.type.shape[3] != 'None' and variable.type.shape[3] != output_shape[3]:
raise RuntimeError('Only dimensions along C-axis can be different')
dims.append(variable.type.shape[1])
output_shape[1] = 'None' if 'None' in dims else sum(dims)
operator.outputs[0].type.shape = output_shape | [
"def",
"calculate_concat_output_shapes",
"(",
"operator",
")",
":",
"check_input_and_output_numbers",
"(",
"operator",
",",
"input_count_range",
"=",
"[",
"1",
",",
"None",
"]",
",",
"output_count_range",
"=",
"[",
"1",
",",
"1",
"]",
")",
"output_shape",
"=",
... | 53.52381 | 30.666667 |
def add_format(self, format_id, number, entry_type, description):
"""
Add a format line to the header.
Arguments:
format_id (str): The id of the format line
number (str): Integer or any of [A,R,G,.]
entry_type (str): Any of [Integer,Float,Flag,Character,String]
description (str): A description of the info line
"""
format_line = '##FORMAT=<ID={0},Number={1},Type={2},Description="{3}">'.format(
format_id, number, entry_type, description
)
logger.info("Adding format line to vcf: {0}".format(format_line))
self.parse_meta_data(format_line)
return | [
"def",
"add_format",
"(",
"self",
",",
"format_id",
",",
"number",
",",
"entry_type",
",",
"description",
")",
":",
"format_line",
"=",
"'##FORMAT=<ID={0},Number={1},Type={2},Description=\"{3}\">'",
".",
"format",
"(",
"format_id",
",",
"number",
",",
"entry_type",
... | 39.117647 | 21.235294 |
def load(self, name, skip='', skip_original='', default_template=None, layout=None):
"""Loads a template."""
filename = self.resolve_path(name, skip=skip, skip_original=skip_original,
default_template=default_template)
if not filename:
raise ParseError("Can't find template %s." % name)
with self.lock:
if layout:
_filename = filename + '$$' + layout
mtime = os.path.getmtime(filename)
else:
mtime = None
_filename = filename
if self.cache:
if not self.use_tmp:
#check current template file expiration
t = self.templates.get(_filename, mtime=mtime)
if t:
#check depends tempaltes expiration
check = self.check_expiration(t)
if not check:
return t
else:
#get cached file from disk
pass
t = self._create_template(name, filename, layout=layout)
if self.cache:
if not self.use_tmp:
self.templates.set(_filename, t, mtime)
else:
#save cached file to disk
pass
return t | [
"def",
"load",
"(",
"self",
",",
"name",
",",
"skip",
"=",
"''",
",",
"skip_original",
"=",
"''",
",",
"default_template",
"=",
"None",
",",
"layout",
"=",
"None",
")",
":",
"filename",
"=",
"self",
".",
"resolve_path",
"(",
"name",
",",
"skip",
"=",... | 38.054054 | 18.648649 |
def _countWhereGreaterEqualInRows(sparseMatrix, rows, threshold):
"""
Like countWhereGreaterOrEqual, but for an arbitrary selection of rows, and
without any column filtering.
"""
return sum(sparseMatrix.countWhereGreaterOrEqual(row, row+1,
0, sparseMatrix.nCols(),
threshold)
for row in rows) | [
"def",
"_countWhereGreaterEqualInRows",
"(",
"sparseMatrix",
",",
"rows",
",",
"threshold",
")",
":",
"return",
"sum",
"(",
"sparseMatrix",
".",
"countWhereGreaterOrEqual",
"(",
"row",
",",
"row",
"+",
"1",
",",
"0",
",",
"sparseMatrix",
".",
"nCols",
"(",
"... | 45.444444 | 17.666667 |
def _write(self, s):
"""Write a string out to the SSL socket fully."""
try:
write = self.sock.write
except AttributeError:
# Works around a bug in python socket library
raise IOError('Socket closed')
else:
while s:
n = write(s)
if not n:
raise IOError('Socket closed')
s = s[n:] | [
"def",
"_write",
"(",
"self",
",",
"s",
")",
":",
"try",
":",
"write",
"=",
"self",
".",
"sock",
".",
"write",
"except",
"AttributeError",
":",
"# Works around a bug in python socket library",
"raise",
"IOError",
"(",
"'Socket closed'",
")",
"else",
":",
"whil... | 31.846154 | 13.923077 |
def _make_skel_func(code, cell_count, base_globals=None):
""" Creates a skeleton function object that contains just the provided
code and the correct number of cells in func_closure. All other
func attributes (e.g. func_globals) are empty.
"""
if base_globals is None:
base_globals = {}
base_globals['__builtins__'] = __builtins__
closure = (
tuple(_make_empty_cell() for _ in range(cell_count))
if cell_count >= 0 else
None
)
return types.FunctionType(code, base_globals, None, None, closure) | [
"def",
"_make_skel_func",
"(",
"code",
",",
"cell_count",
",",
"base_globals",
"=",
"None",
")",
":",
"if",
"base_globals",
"is",
"None",
":",
"base_globals",
"=",
"{",
"}",
"base_globals",
"[",
"'__builtins__'",
"]",
"=",
"__builtins__",
"closure",
"=",
"("... | 37.133333 | 18.933333 |
def _set_topology_group(self, v, load=False):
"""
Setter method for topology_group, mapped from YANG variable /topology_group (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_topology_group is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_topology_group() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("topology_group_id",topology_group.topology_group, yang_name="topology-group", rest_name="topology-group", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='topology-group-id', extensions={u'tailf-common': {u'info': u'Configure topology vlan group for L2 protocols', u'cli-no-key-completion': None, u'cli-full-no': None, u'sort-priority': u'145', u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-no-match-completion': None, u'cli-full-command': None, u'callpoint': u'TopologyGroupCallpoint', u'cli-mode-name': u'config-topo-group-$(topology-group-id)'}}), is_container='list', yang_name="topology-group", rest_name="topology-group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure topology vlan group for L2 protocols', u'cli-no-key-completion': None, u'cli-full-no': None, u'sort-priority': u'145', u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-no-match-completion': None, u'cli-full-command': None, u'callpoint': u'TopologyGroupCallpoint', u'cli-mode-name': u'config-topo-group-$(topology-group-id)'}}, namespace='urn:brocade.com:mgmt:brocade-topology-group', defining_module='brocade-topology-group', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """topology_group must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("topology_group_id",topology_group.topology_group, yang_name="topology-group", rest_name="topology-group", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='topology-group-id', extensions={u'tailf-common': {u'info': u'Configure topology vlan group for L2 protocols', u'cli-no-key-completion': None, u'cli-full-no': None, u'sort-priority': u'145', u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-no-match-completion': None, u'cli-full-command': None, u'callpoint': u'TopologyGroupCallpoint', u'cli-mode-name': u'config-topo-group-$(topology-group-id)'}}), is_container='list', yang_name="topology-group", rest_name="topology-group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure topology vlan group for L2 protocols', u'cli-no-key-completion': None, u'cli-full-no': None, u'sort-priority': u'145', u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-no-match-completion': None, u'cli-full-command': None, u'callpoint': u'TopologyGroupCallpoint', u'cli-mode-name': u'config-topo-group-$(topology-group-id)'}}, namespace='urn:brocade.com:mgmt:brocade-topology-group', defining_module='brocade-topology-group', yang_type='list', is_config=True)""",
})
self.__topology_group = t
if hasattr(self, '_set'):
self._set() | [
"def",
"_set_topology_group",
"(",
"self",
",",
"v",
",",
"load",
"=",
"False",
")",
":",
"if",
"hasattr",
"(",
"v",
",",
"\"_utype\"",
")",
":",
"v",
"=",
"v",
".",
"_utype",
"(",
"v",
")",
"try",
":",
"t",
"=",
"YANGDynClass",
"(",
"v",
",",
... | 159.227273 | 76.818182 |
def delete_directory(self, dirname):
"""Delete a directory (and contents) from the bucket.
Parameters
----------
dirname : `str`
Name of the directory, relative to ``bucket_root/``.
Raises
------
RuntimeError
Raised when there are no objects to delete (directory
does not exist).
"""
key = os.path.join(self._bucket_root, dirname)
if not key.endswith('/'):
key += '/'
key_objects = [{'Key': obj.key}
for obj in self._bucket.objects.filter(Prefix=key)]
if len(key_objects) == 0:
msg = 'No objects in bucket directory {}'.format(dirname)
raise RuntimeError(msg)
delete_keys = {'Objects': key_objects}
# based on http://stackoverflow.com/a/34888103
s3 = self._session.resource('s3')
r = s3.meta.client.delete_objects(Bucket=self._bucket.name,
Delete=delete_keys)
self._logger.debug(r)
if 'Errors' in r:
raise S3Error('S3 could not delete {0}'.format(key)) | [
"def",
"delete_directory",
"(",
"self",
",",
"dirname",
")",
":",
"key",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"_bucket_root",
",",
"dirname",
")",
"if",
"not",
"key",
".",
"endswith",
"(",
"'/'",
")",
":",
"key",
"+=",
"'/'",
"key... | 36.064516 | 17.806452 |
def get_setters_property_name(node):
"""Get the name of the property that the given node is a setter for.
:param node: The node to get the property name for.
:type node: str
:rtype: str or None
:returns: The name of the property that the node is a setter for,
or None if one could not be found.
"""
decorators = node.decorators.nodes if node.decorators else []
for decorator in decorators:
if (
isinstance(decorator, astroid.Attribute)
and decorator.attrname == "setter"
and isinstance(decorator.expr, astroid.Name)
):
return decorator.expr.name
return None | [
"def",
"get_setters_property_name",
"(",
"node",
")",
":",
"decorators",
"=",
"node",
".",
"decorators",
".",
"nodes",
"if",
"node",
".",
"decorators",
"else",
"[",
"]",
"for",
"decorator",
"in",
"decorators",
":",
"if",
"(",
"isinstance",
"(",
"decorator",
... | 34.157895 | 16.842105 |
def insert_multiple(self, documents):
"""
Insert multiple documents into the table.
:param documents: a list of documents to insert
:returns: a list containing the inserted documents' IDs
"""
doc_ids = []
data = self._read()
for doc in documents:
doc_id = self._get_doc_id(doc)
doc_ids.append(doc_id)
data[doc_id] = dict(doc)
self._write(data)
return doc_ids | [
"def",
"insert_multiple",
"(",
"self",
",",
"documents",
")",
":",
"doc_ids",
"=",
"[",
"]",
"data",
"=",
"self",
".",
"_read",
"(",
")",
"for",
"doc",
"in",
"documents",
":",
"doc_id",
"=",
"self",
".",
"_get_doc_id",
"(",
"doc",
")",
"doc_ids",
"."... | 23.05 | 18.95 |
def _reset(self):
""" Rebuilds structure for AST and resets internal data.
"""
self._filename = None
self._block_map = {}
self._ast = []
self._ast.append(None) # header
self._ast.append([]) # options list
self._ast.append([]) | [
"def",
"_reset",
"(",
"self",
")",
":",
"self",
".",
"_filename",
"=",
"None",
"self",
".",
"_block_map",
"=",
"{",
"}",
"self",
".",
"_ast",
"=",
"[",
"]",
"self",
".",
"_ast",
".",
"append",
"(",
"None",
")",
"# header",
"self",
".",
"_ast",
".... | 28.9 | 12.2 |
def from_base64_data(cls, **kwargs):
'''Load a :class:`StdModel` from possibly base64encoded data.
This method is used to load models from data obtained from the :meth:`tojson`
method.'''
o = cls()
meta = cls._meta
pkname = meta.pkname()
for name, value in iteritems(kwargs):
if name == pkname:
field = meta.pk
elif name in meta.dfields:
field = meta.dfields[name]
else:
continue
value = field.to_python(value)
setattr(o, field.attname, value)
return o | [
"def",
"from_base64_data",
"(",
"cls",
",",
"*",
"*",
"kwargs",
")",
":",
"o",
"=",
"cls",
"(",
")",
"meta",
"=",
"cls",
".",
"_meta",
"pkname",
"=",
"meta",
".",
"pkname",
"(",
")",
"for",
"name",
",",
"value",
"in",
"iteritems",
"(",
"kwargs",
... | 32.888889 | 15.888889 |
def remove_declaration(self, decl):
"""
Removes declaration from members list.
:param decl: declaration to be removed
:type decl: :class:`declaration_t`
"""
del self.declarations[self.declarations.index(decl)]
decl.cache.reset() | [
"def",
"remove_declaration",
"(",
"self",
",",
"decl",
")",
":",
"del",
"self",
".",
"declarations",
"[",
"self",
".",
"declarations",
".",
"index",
"(",
"decl",
")",
"]",
"decl",
".",
"cache",
".",
"reset",
"(",
")"
] | 25.181818 | 15.727273 |
def _set_show_mpls_dynamic_bypass(self, v, load=False):
"""
Setter method for show_mpls_dynamic_bypass, mapped from YANG variable /brocade_mpls_rpc/show_mpls_dynamic_bypass (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_show_mpls_dynamic_bypass is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_show_mpls_dynamic_bypass() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=show_mpls_dynamic_bypass.show_mpls_dynamic_bypass, is_leaf=True, yang_name="show-mpls-dynamic-bypass", rest_name="show-mpls-dynamic-bypass", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'full', u'actionpoint': u'showMplsDynamicBypass'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='rpc', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """show_mpls_dynamic_bypass must be of a type compatible with rpc""",
'defined-type': "rpc",
'generated-type': """YANGDynClass(base=show_mpls_dynamic_bypass.show_mpls_dynamic_bypass, is_leaf=True, yang_name="show-mpls-dynamic-bypass", rest_name="show-mpls-dynamic-bypass", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'full', u'actionpoint': u'showMplsDynamicBypass'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='rpc', is_config=True)""",
})
self.__show_mpls_dynamic_bypass = t
if hasattr(self, '_set'):
self._set() | [
"def",
"_set_show_mpls_dynamic_bypass",
"(",
"self",
",",
"v",
",",
"load",
"=",
"False",
")",
":",
"if",
"hasattr",
"(",
"v",
",",
"\"_utype\"",
")",
":",
"v",
"=",
"v",
".",
"_utype",
"(",
"v",
")",
"try",
":",
"t",
"=",
"YANGDynClass",
"(",
"v",... | 80.227273 | 38 |
def start(self, exceptions):
"""Start the Heartbeat Checker.
:param list exceptions:
:return:
"""
if not self._interval:
return False
self._running.set()
with self._lock:
self._threshold = 0
self._reads_since_check = 0
self._writes_since_check = 0
self._exceptions = exceptions
LOGGER.debug('Heartbeat Checker Started')
return self._start_new_timer() | [
"def",
"start",
"(",
"self",
",",
"exceptions",
")",
":",
"if",
"not",
"self",
".",
"_interval",
":",
"return",
"False",
"self",
".",
"_running",
".",
"set",
"(",
")",
"with",
"self",
".",
"_lock",
":",
"self",
".",
"_threshold",
"=",
"0",
"self",
... | 29 | 10.25 |
def size_in_days(self):
"""Return the size of the period in days.
>>> period('month', '2012-2-29', 4).size_in_days
28
>>> period('year', '2012', 1).size_in_days
366
"""
unit, instant, length = self
if unit == DAY:
return length
if unit in [MONTH, YEAR]:
last_day = self.start.offset(length, unit).offset(-1, DAY)
return (last_day.date - self.start.date).days + 1
raise ValueError("Cannot calculate number of days in {0}".format(unit)) | [
"def",
"size_in_days",
"(",
"self",
")",
":",
"unit",
",",
"instant",
",",
"length",
"=",
"self",
"if",
"unit",
"==",
"DAY",
":",
"return",
"length",
"if",
"unit",
"in",
"[",
"MONTH",
",",
"YEAR",
"]",
":",
"last_day",
"=",
"self",
".",
"start",
".... | 31.588235 | 20.882353 |
def crop_on_centerpoint(self, image, width, height, ppoi=(0.5, 0.5)):
"""
Return a PIL Image instance cropped from `image`.
Image has an aspect ratio provided by dividing `width` / `height`),
sized down to `width`x`height`. Any 'excess pixels' are trimmed away
in respect to the pixel of `image` that corresponds to `ppoi` (Primary
Point of Interest).
`image`: A PIL Image instance
`width`: Integer, width of the image to return (in pixels)
`height`: Integer, height of the image to return (in pixels)
`ppoi`: A 2-tuple of floats with values greater than 0 and less than 1
These values are converted into a cartesian coordinate that
signifies the 'center pixel' which the crop will center on
(to trim the excess from the 'long side').
Determines whether to trim away pixels from either the left/right or
top/bottom sides by comparing the aspect ratio of `image` vs the
aspect ratio of `width`x`height`.
Will trim from the left/right sides if the aspect ratio of `image`
is greater-than-or-equal-to the aspect ratio of `width`x`height`.
Will trim from the top/bottom sides if the aspect ration of `image`
is less-than the aspect ratio or `width`x`height`.
Similar to Kevin Cazabon's ImageOps.fit method but uses the
ppoi value as an absolute centerpoint (as opposed as a
percentage to trim off the 'long sides').
"""
ppoi_x_axis = int(image.size[0] * ppoi[0])
ppoi_y_axis = int(image.size[1] * ppoi[1])
center_pixel_coord = (ppoi_x_axis, ppoi_y_axis)
# Calculate the aspect ratio of `image`
orig_aspect_ratio = float(
image.size[0]
) / float(
image.size[1]
)
crop_aspect_ratio = float(width) / float(height)
# Figure out if we're trimming from the left/right or top/bottom
if orig_aspect_ratio >= crop_aspect_ratio:
# `image` is wider than what's needed,
# crop from left/right sides
orig_crop_width = int(
(crop_aspect_ratio * float(image.size[1])) + 0.5
)
orig_crop_height = image.size[1]
crop_boundary_top = 0
crop_boundary_bottom = orig_crop_height
crop_boundary_left = center_pixel_coord[0] - (orig_crop_width // 2)
crop_boundary_right = crop_boundary_left + orig_crop_width
if crop_boundary_left < 0:
crop_boundary_left = 0
crop_boundary_right = crop_boundary_left + orig_crop_width
elif crop_boundary_right > image.size[0]:
crop_boundary_right = image.size[0]
crop_boundary_left = image.size[0] - orig_crop_width
else:
# `image` is taller than what's needed,
# crop from top/bottom sides
orig_crop_width = image.size[0]
orig_crop_height = int(
(float(image.size[0]) / crop_aspect_ratio) + 0.5
)
crop_boundary_left = 0
crop_boundary_right = orig_crop_width
crop_boundary_top = center_pixel_coord[1] - (orig_crop_height // 2)
crop_boundary_bottom = crop_boundary_top + orig_crop_height
if crop_boundary_top < 0:
crop_boundary_top = 0
crop_boundary_bottom = crop_boundary_top + orig_crop_height
elif crop_boundary_bottom > image.size[1]:
crop_boundary_bottom = image.size[1]
crop_boundary_top = image.size[1] - orig_crop_height
# Cropping the image from the original image
cropped_image = image.crop(
(
crop_boundary_left,
crop_boundary_top,
crop_boundary_right,
crop_boundary_bottom
)
)
# Resizing the newly cropped image to the size specified
# (as determined by `width`x`height`)
return cropped_image.resize(
(width, height),
Image.ANTIALIAS
) | [
"def",
"crop_on_centerpoint",
"(",
"self",
",",
"image",
",",
"width",
",",
"height",
",",
"ppoi",
"=",
"(",
"0.5",
",",
"0.5",
")",
")",
":",
"ppoi_x_axis",
"=",
"int",
"(",
"image",
".",
"size",
"[",
"0",
"]",
"*",
"ppoi",
"[",
"0",
"]",
")",
... | 43.924731 | 19.795699 |
def simulate_measurement(self, index: int) -> bool:
"""Simulates a single qubit measurement in the computational basis.
Args:
index: Which qubit is measured.
Returns:
True iff the measurement result corresponds to the |1> state.
"""
args = self._shard_num_args({'index': index})
prob_one = np.sum(self._pool.map(_one_prob_per_shard, args))
result = bool(np.random.random() <= prob_one)
args = self._shard_num_args({
'index': index,
'result': result,
'prob_one': prob_one
})
self._pool.map(_collapse_state, args)
return result | [
"def",
"simulate_measurement",
"(",
"self",
",",
"index",
":",
"int",
")",
"->",
"bool",
":",
"args",
"=",
"self",
".",
"_shard_num_args",
"(",
"{",
"'index'",
":",
"index",
"}",
")",
"prob_one",
"=",
"np",
".",
"sum",
"(",
"self",
".",
"_pool",
".",... | 32.85 | 18.05 |
def _process_qtls_genomic_location(
self, raw, txid, build_id, build_label, common_name, limit=None):
"""
This method
Triples created:
:param limit:
:return:
"""
if self.test_mode:
graph = self.testgraph
else:
graph = self.graph
model = Model(graph)
line_counter = 0
geno = Genotype(graph)
# assume that chrs get added to the genome elsewhere
taxon_curie = 'NCBITaxon:' + txid
eco_id = self.globaltt['quantitative trait analysis evidence']
LOG.info("Processing QTL locations for %s from %s", taxon_curie, raw)
with gzip.open(raw, 'rt', encoding='ISO-8859-1') as tsvfile:
reader = csv.reader(tsvfile, delimiter="\t")
for row in reader:
line_counter += 1
if re.match(r'^#', ' '.join(row)):
continue
(chromosome, qtl_source, qtl_type, start_bp, stop_bp, frame, strand,
score, attr) = row
example = '''
Chr.Z Animal QTLdb Production_QTL 33954873 34023581...
QTL_ID=2242;Name="Spleen percentage";Abbrev="SPLP";PUBMED_ID=17012160;trait_ID=2234;
trait="Spleen percentage";breed="leghorn";"FlankMarkers=ADL0022";VTO_name="spleen mass";
MO_name="spleen weight to body weight ratio";Map_Type="Linkage";Model="Mendelian";
Test_Base="Chromosome-wise";Significance="Significant";P-value="<0.05";F-Stat="5.52";
Variance="2.94";Dominance_Effect="-0.002";Additive_Effect="0.01
'''
str(example)
# make dictionary of attributes
# keys are:
# QTL_ID,Name,Abbrev,PUBMED_ID,trait_ID,trait,FlankMarkers,
# VTO_name,Map_Type,Significance,P-value,Model,
# Test_Base,Variance, Bayes-value,PTO_name,gene_IDsrc,peak_cM,
# CMO_name,gene_ID,F-Stat,LOD-score,Additive_Effect,
# Dominance_Effect,Likelihood_Ratio,LS-means,Breed,
# trait (duplicate with Name),Variance,Bayes-value,
# F-Stat,LOD-score,Additive_Effect,Dominance_Effect,
# Likelihood_Ratio,LS-means
# deal with poorly formed attributes
if re.search(r'"FlankMarkers";', attr):
attr = re.sub(r'FlankMarkers;', '', attr)
attr_items = re.sub(r'"', '', attr).split(";")
bad_attrs = set()
for attributes in attr_items:
if not re.search(r'=', attributes):
# remove this attribute from the list
bad_attrs.add(attributes)
attr_set = set(attr_items) - bad_attrs
attribute_dict = dict(item.split("=") for item in attr_set)
qtl_num = attribute_dict.get('QTL_ID')
if self.test_mode and int(qtl_num) not in self.test_ids:
continue
# make association between QTL and trait based on taxon
qtl_id = common_name + 'QTL:' + str(qtl_num)
model.addIndividualToGraph(qtl_id, None, self.globaltt['QTL'])
geno.addTaxon(taxon_curie, qtl_id)
#
trait_id = 'AQTLTrait:' + attribute_dict.get('trait_ID')
# if pub is in attributes, add it to the association
pub_id = None
if 'PUBMED_ID' in attribute_dict.keys():
pub_id = attribute_dict.get('PUBMED_ID')
if re.match(r'ISU.*', pub_id):
pub_id = 'AQTLPub:' + pub_id.strip()
reference = Reference(graph, pub_id)
else:
pub_id = 'PMID:' + pub_id.strip()
reference = Reference(
graph, pub_id, self.globaltt['journal article'])
reference.addRefToGraph()
# Add QTL to graph
assoc = G2PAssoc(
graph, self.name, qtl_id, trait_id,
self.globaltt['is marker for'])
assoc.add_evidence(eco_id)
assoc.add_source(pub_id)
if 'P-value' in attribute_dict.keys():
scr = re.sub(r'<', '', attribute_dict.get('P-value'))
if ',' in scr:
scr = re.sub(r',', '.', scr)
if scr.isnumeric():
score = float(scr)
assoc.set_score(score)
assoc.add_association_to_graph()
# TODO make association to breed
# (which means making QTL feature in Breed background)
# get location of QTL
chromosome = re.sub(r'Chr\.', '', chromosome)
chrom_id = makeChromID(chromosome, taxon_curie, 'CHR')
chrom_in_build_id = makeChromID(chromosome, build_id, 'MONARCH')
geno.addChromosomeInstance(
chromosome, build_id, build_label, chrom_id)
qtl_feature = Feature(graph, qtl_id, None, self.globaltt['QTL'])
if start_bp == '':
start_bp = None
qtl_feature.addFeatureStartLocation(
start_bp, chrom_in_build_id, strand,
[self.globaltt['FuzzyPosition']])
if stop_bp == '':
stop_bp = None
qtl_feature.addFeatureEndLocation(
stop_bp, chrom_in_build_id, strand,
[self.globaltt['FuzzyPosition']])
qtl_feature.addTaxonToFeature(taxon_curie)
qtl_feature.addFeatureToGraph()
if not self.test_mode and limit is not None and line_counter > limit:
break
# LOG.warning("Bad attribute flags in this file") # what does this even mean??
LOG.info("Done with QTL genomic mappings for %s", taxon_curie)
return | [
"def",
"_process_qtls_genomic_location",
"(",
"self",
",",
"raw",
",",
"txid",
",",
"build_id",
",",
"build_label",
",",
"common_name",
",",
"limit",
"=",
"None",
")",
":",
"if",
"self",
".",
"test_mode",
":",
"graph",
"=",
"self",
".",
"testgraph",
"else"... | 44.402985 | 20.761194 |
def get_start_and_end_time(self, ref=None):
"""Specific function to get start time and end time for CalendarDaterange
:param ref: time in seconds
:type ref: int
:return: tuple with start and end time
:rtype: tuple (int, int)
"""
return (get_start_of_day(self.syear, int(self.smon), self.smday),
get_end_of_day(self.eyear, int(self.emon), self.emday)) | [
"def",
"get_start_and_end_time",
"(",
"self",
",",
"ref",
"=",
"None",
")",
":",
"return",
"(",
"get_start_of_day",
"(",
"self",
".",
"syear",
",",
"int",
"(",
"self",
".",
"smon",
")",
",",
"self",
".",
"smday",
")",
",",
"get_end_of_day",
"(",
"self"... | 41.4 | 14.4 |
def services(self, *args, **kwargs):
"""Retrieve services belonging to this scope.
See :class:`pykechain.Client.services` for available parameters.
.. versionadded:: 1.13
"""
return self._client.services(*args, scope=self.id, **kwargs) | [
"def",
"services",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"_client",
".",
"services",
"(",
"*",
"args",
",",
"scope",
"=",
"self",
".",
"id",
",",
"*",
"*",
"kwargs",
")"
] | 33.75 | 19.25 |
def _prevent_core_dump(cls):
"""Prevent the process from generating a core dump."""
try:
# Try to get the current limit
resource.getrlimit(resource.RLIMIT_CORE)
except ValueError:
# System doesn't support the RLIMIT_CORE resource limit
return
else:
# Set the soft and hard limits for core dump size to zero
resource.setrlimit(resource.RLIMIT_CORE, (0, 0)) | [
"def",
"_prevent_core_dump",
"(",
"cls",
")",
":",
"try",
":",
"# Try to get the current limit",
"resource",
".",
"getrlimit",
"(",
"resource",
".",
"RLIMIT_CORE",
")",
"except",
"ValueError",
":",
"# System doesn't support the RLIMIT_CORE resource limit",
"return",
"else... | 40.818182 | 17.545455 |
def get_genus_type(self):
"""Gets the genus type of this object.
return: (osid.type.Type) - the genus type of this object
compliance: mandatory - This method must be implemented.
"""
if self._my_genus_type_map is None:
url_path = '/handcar/services/learning/types/' + self._my_map['genusTypeId']
# url_str = self._base_url + '/types/' + self._my_map['genusTypeId']
# self._my_genus_type_map = self._load_json(url_str)
self._my_genus_type_map = self._get_request(url_path)
return Type(self._my_genus_type_map) | [
"def",
"get_genus_type",
"(",
"self",
")",
":",
"if",
"self",
".",
"_my_genus_type_map",
"is",
"None",
":",
"url_path",
"=",
"'/handcar/services/learning/types/'",
"+",
"self",
".",
"_my_map",
"[",
"'genusTypeId'",
"]",
"# url_str = self._base_url + '/types/'... | 45.538462 | 21.923077 |
def get_activity_objective_bank_session(self, proxy):
"""Gets the session for retrieving activity to objective bank mappings.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.learning.ActivityObjectiveBankSession) - an
``ActivityObjectiveBankSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_activity_objective_bank()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_activity_objective_bank()`` is ``true``.*
"""
if not self.supports_activity_objective_bank():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.ActivityObjectiveBankSession(proxy=proxy, runtime=self._runtime) | [
"def",
"get_activity_objective_bank_session",
"(",
"self",
",",
"proxy",
")",
":",
"if",
"not",
"self",
".",
"supports_activity_objective_bank",
"(",
")",
":",
"raise",
"errors",
".",
"Unimplemented",
"(",
")",
"# pylint: disable=no-member",
"return",
"sessions",
".... | 47.777778 | 18 |
def get_languages_from_item(ct_item, item):
"""
Get the languages configured for the current item
:param ct_item:
:param item:
:return:
"""
try:
item_lan = TransItemLanguage.objects.filter(content_type__pk=ct_item.id, object_id=item.id).get()
languages = [lang.code for lang in item_lan.languages.all()]
return languages
except TransItemLanguage.DoesNotExist:
return [] | [
"def",
"get_languages_from_item",
"(",
"ct_item",
",",
"item",
")",
":",
"try",
":",
"item_lan",
"=",
"TransItemLanguage",
".",
"objects",
".",
"filter",
"(",
"content_type__pk",
"=",
"ct_item",
".",
"id",
",",
"object_id",
"=",
"item",
".",
"id",
")",
"."... | 36.076923 | 19 |
def train(self, epochs=2000, training_iterations=5):
'''
Parameters
----------
epochs : int
Number of epochs to train for. Default is 2000.
training_iterations : int
Number of times to repeat training process. Default is training_iterations.
Returns
-------
A trained word2vec model.
'''
self._scan_and_build_vocab()
for _ in range(training_iterations):
self.model.train(CorpusAdapterForGensim.get_sentences(self.corpus),
total_examples=self.model.corpus_count,
epochs=epochs)
return self.model | [
"def",
"train",
"(",
"self",
",",
"epochs",
"=",
"2000",
",",
"training_iterations",
"=",
"5",
")",
":",
"self",
".",
"_scan_and_build_vocab",
"(",
")",
"for",
"_",
"in",
"range",
"(",
"training_iterations",
")",
":",
"self",
".",
"model",
".",
"train",
... | 27.6 | 23.5 |
def mask(self, image):
""" self.mask setter
Parameters
----------
image: str or img-like object.
See NeuroImage constructor docstring.
"""
if image is None:
self._mask = None
try:
mask = load_mask(image)
except Exception as exc:
raise Exception('Could not load mask image {}.'.format(image)) from exc
else:
self._mask = mask | [
"def",
"mask",
"(",
"self",
",",
"image",
")",
":",
"if",
"image",
"is",
"None",
":",
"self",
".",
"_mask",
"=",
"None",
"try",
":",
"mask",
"=",
"load_mask",
"(",
"image",
")",
"except",
"Exception",
"as",
"exc",
":",
"raise",
"Exception",
"(",
"'... | 26 | 17.705882 |
def get_angle_addr(value):
""" angle-addr = [CFWS] "<" addr-spec ">" [CFWS] / obs-angle-addr
obs-angle-addr = [CFWS] "<" obs-route addr-spec ">" [CFWS]
"""
angle_addr = AngleAddr()
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
angle_addr.append(token)
if not value or value[0] != '<':
raise errors.HeaderParseError(
"expected angle-addr but found '{}'".format(value))
angle_addr.append(ValueTerminal('<', 'angle-addr-start'))
value = value[1:]
# Although it is not legal per RFC5322, SMTP uses '<>' in certain
# circumstances.
if value[0] == '>':
angle_addr.append(ValueTerminal('>', 'angle-addr-end'))
angle_addr.defects.append(errors.InvalidHeaderDefect(
"null addr-spec in angle-addr"))
value = value[1:]
return angle_addr, value
try:
token, value = get_addr_spec(value)
except errors.HeaderParseError:
try:
token, value = get_obs_route(value)
angle_addr.defects.append(errors.ObsoleteHeaderDefect(
"obsolete route specification in angle-addr"))
except errors.HeaderParseError:
raise errors.HeaderParseError(
"expected addr-spec or obs-route but found '{}'".format(value))
angle_addr.append(token)
token, value = get_addr_spec(value)
angle_addr.append(token)
if value and value[0] == '>':
value = value[1:]
else:
angle_addr.defects.append(errors.InvalidHeaderDefect(
"missing trailing '>' on angle-addr"))
angle_addr.append(ValueTerminal('>', 'angle-addr-end'))
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
angle_addr.append(token)
return angle_addr, value | [
"def",
"get_angle_addr",
"(",
"value",
")",
":",
"angle_addr",
"=",
"AngleAddr",
"(",
")",
"if",
"value",
"[",
"0",
"]",
"in",
"CFWS_LEADER",
":",
"token",
",",
"value",
"=",
"get_cfws",
"(",
"value",
")",
"angle_addr",
".",
"append",
"(",
"token",
")"... | 39.222222 | 14.022222 |
def init_model(self, clear_registry: bool = True) -> None:
"""Load the control file of the actual |Element| object, initialise
its |Model| object, build the required connections via (an eventually
overridden version of) method |Model.connect| of class |Model|, and
update its derived parameter values via calling (an eventually
overridden version) of method |Parameters.update| of class |Parameters|.
See method |HydPy.init_models| of class |HydPy| and property
|model| of class |Element| fur further information.
"""
try:
with hydpy.pub.options.warnsimulationstep(False):
info = hydpy.pub.controlmanager.load_file(
element=self, clear_registry=clear_registry)
self.model = info['model']
self.model.parameters.update()
except OSError:
if hydpy.pub.options.warnmissingcontrolfile:
warnings.warn(
f'Due to a missing or no accessible control file, no '
f'model could be initialised for element `{self.name}`')
else:
objecttools.augment_excmessage(
f'While trying to initialise the model '
f'object of element `{self.name}`')
except BaseException:
objecttools.augment_excmessage(
f'While trying to initialise the model '
f'object of element `{self.name}`') | [
"def",
"init_model",
"(",
"self",
",",
"clear_registry",
":",
"bool",
"=",
"True",
")",
"->",
"None",
":",
"try",
":",
"with",
"hydpy",
".",
"pub",
".",
"options",
".",
"warnsimulationstep",
"(",
"False",
")",
":",
"info",
"=",
"hydpy",
".",
"pub",
"... | 49.133333 | 20.2 |
def main():
"""
NAME
scalc.py
DESCRIPTION
calculates Sb from VGP Long,VGP Lat,Directional kappa,Site latitude data
SYNTAX
scalc -h [command line options] [< standard input]
INPUT
takes space delimited files with PLong, PLat,[kappa, N_site, slat]
OPTIONS
-h prints help message and quits
-f FILE: specify input file
-c cutoff: specify VGP colatitude cutoff value
-k cutoff: specify kappa cutoff
-v : use the VanDammme criterion
-a: use antipodes of reverse data: default is to use only normal
-r use only reverse data, default is False
-b: do a bootstrap for confidence
-p: do relative to principle axis
-n: set minimum n for samples (specimens) per site
-mm97: correct for within site scatter (McElhinny & McFadden, 1997)
NOTES
if kappa, N_site, lat supplied, will consider within site scatter
OUTPUT
N Sb Sb_lower Sb_upper Co-lat. Cutoff
"""
kappa, cutoff = 0, 180
rev, anti, boot = 0, 0, 0
spin,n,v,mm97 = 0,0,0,0
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-f' in sys.argv:
ind = sys.argv.index("-f")
in_file = sys.argv[ind + 1]
vgp_df=pd.read_csv(in_file,delim_whitespace=True,header=None)
else:
vgp_df=pd.read_csv(sys.stdin,delim_whitespace=True,header=None)
if '-c' in sys.argv:
ind = sys.argv.index('-c')
cutoff = float(sys.argv[ind + 1])
if '-k' in sys.argv:
ind = sys.argv.index('-k')
kappa = float(sys.argv[ind + 1])
if '-n' in sys.argv:
ind = sys.argv.index('-n')
n = int(sys.argv[ind + 1])
if '-a' in sys.argv: anti = 1
if '-r' in sys.argv: rev=1
if '-b' in sys.argv: boot = 1
if '-v' in sys.argv: v = 1
if '-p' in sys.argv: spin = 1
if '-mm97' in sys.argv: mm97=1
#
#
if len(list(vgp_df.columns))==2:
vgp_df.columns=['vgp_lon','vgp_lat']
vgp_df['dir_k'],vgp_df['dir_n_samples'],vgp_df['lat']=0,0,0
else:
vgp_df.columns=['vgp_lon','vgp_lat','dir_k','dir_n_samples','lat']
N,S_B,low,high,cutoff=pmag.scalc_vgp_df(vgp_df,anti=anti,rev=rev,cutoff=cutoff,kappa=kappa,n=n,spin=spin,v=v,boot=boot,mm97=mm97)
if high!=0:
print(N, '%7.1f %7.1f %7.1f %7.1f ' % (S_B, low, high, cutoff))
else:
print(N, '%7.1f %7.1f ' % (S_B, cutoff)) | [
"def",
"main",
"(",
")",
":",
"kappa",
",",
"cutoff",
"=",
"0",
",",
"180",
"rev",
",",
"anti",
",",
"boot",
"=",
"0",
",",
"0",
",",
"0",
"spin",
",",
"n",
",",
"v",
",",
"mm97",
"=",
"0",
",",
"0",
",",
"0",
",",
"0",
"if",
"'-h'",
"i... | 36.136364 | 18.166667 |
def _create_dns_list(self, dns):
"""
:param dns:
:return:
"""
if not dns:
return None
dns_list = []
if isinstance(dns, six.string_types):
if is_valid_ip(dns):
dns_list.append(dns)
else:
raise ValueError("dns is required to be a valid ip adress. {0} was passed.".format(dns))
elif isinstance(dns, list):
for dns_entry in dns:
if is_valid_ip(dns_entry):
dns_list.append(dns_entry)
else:
raise ValueError("dns is required to be a valid ip adress. {0} was passed.".format(dns))
else:
raise ValueError("dns and dns search must be a list or string. {0} was passed.".format(dns))
return dns_list | [
"def",
"_create_dns_list",
"(",
"self",
",",
"dns",
")",
":",
"if",
"not",
"dns",
":",
"return",
"None",
"dns_list",
"=",
"[",
"]",
"if",
"isinstance",
"(",
"dns",
",",
"six",
".",
"string_types",
")",
":",
"if",
"is_valid_ip",
"(",
"dns",
")",
":",
... | 30.884615 | 22.653846 |
def plot_cumulative_density(self, **kwargs):
"""
Plots a pretty figure of {0}.{1}
Matplotlib plot arguments can be passed in inside the kwargs, plus
Parameters
-----------
show_censors: bool
place markers at censorship events. Default: False
censor_styles: bool
If show_censors, this dictionary will be passed into the plot call.
ci_alpha: bool
the transparency level of the confidence interval. Default: 0.3
ci_force_lines: bool
force the confidence intervals to be line plots (versus default shaded areas). Default: False
ci_show: bool
show confidence intervals. Default: True
ci_legend: bool
if ci_force_lines is True, this is a boolean flag to add the lines' labels to the legend. Default: False
at_risk_counts: bool
show group sizes at time points. See function ``add_at_risk_counts`` for details. Default: False
loc: slice
specify a time-based subsection of the curves to plot, ex:
>>> model.plot(loc=slice(0.,10.))
will plot the time values between t=0. and t=10.
iloc: slice
specify a location-based subsection of the curves to plot, ex:
>>> model.plot(iloc=slice(0,10))
will plot the first 10 time points.
invert_y_axis: bool
boolean to invert the y-axis, useful to show cumulative graphs instead of survival graphs. (Deprecated, use ``plot_cumulative_density()``)
Returns
-------
ax:
a pyplot axis object
"""
return _plot_estimate(
self,
estimate=self.cumulative_density_,
confidence_intervals=self.confidence_interval_cumulative_density_,
**kwargs
) | [
"def",
"plot_cumulative_density",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_plot_estimate",
"(",
"self",
",",
"estimate",
"=",
"self",
".",
"cumulative_density_",
",",
"confidence_intervals",
"=",
"self",
".",
"confidence_interval_cumulative_densit... | 37.833333 | 26.333333 |
def _maybe_from_pandas(data, feature_names, feature_types):
""" Extract internal data from pd.DataFrame """
try:
import pandas as pd
except ImportError:
return data, feature_names, feature_types
if not isinstance(data, pd.DataFrame):
return data, feature_names, feature_types
dtypes = data.dtypes
if not all(dtype.name in ('int64', 'float64', 'bool') for dtype in dtypes):
raise ValueError('DataFrame.dtypes must be int, float or bool')
if feature_names is None:
feature_names = data.columns.format()
if feature_types is None:
mapper = {'int64': 'int', 'float64': 'q', 'bool': 'i'}
feature_types = [mapper[dtype.name] for dtype in dtypes]
data = data.values.astype('float')
return data, feature_names, feature_types | [
"def",
"_maybe_from_pandas",
"(",
"data",
",",
"feature_names",
",",
"feature_types",
")",
":",
"try",
":",
"import",
"pandas",
"as",
"pd",
"except",
"ImportError",
":",
"return",
"data",
",",
"feature_names",
",",
"feature_types",
"if",
"not",
"isinstance",
"... | 37.809524 | 18.428571 |
def is_temple_project():
"""Raises `InvalidTempleProjectError` if repository is not a temple project"""
if not os.path.exists(temple.constants.TEMPLE_CONFIG_FILE):
msg = 'No {} file found in repository.'.format(temple.constants.TEMPLE_CONFIG_FILE)
raise temple.exceptions.InvalidTempleProjectError(msg) | [
"def",
"is_temple_project",
"(",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"temple",
".",
"constants",
".",
"TEMPLE_CONFIG_FILE",
")",
":",
"msg",
"=",
"'No {} file found in repository.'",
".",
"format",
"(",
"temple",
".",
"constants",
"... | 64.4 | 22.4 |
def density_2d(self, x, y, rho0, Ra, Rs, center_x=0, center_y=0):
"""
projected density
:param x:
:param y:
:param rho0:
:param Ra:
:param Rs:
:param center_x:
:param center_y:
:return:
"""
Ra, Rs = self._sort_ra_rs(Ra, Rs)
x_ = x - center_x
y_ = y - center_y
r = np.sqrt(x_**2 + y_**2)
sigma0 = self.rho2sigma(rho0, Ra, Rs)
sigma = sigma0 * Ra * Rs / (Rs - Ra) * (1 / np.sqrt(Ra ** 2 + r ** 2) - 1 / np.sqrt(Rs ** 2 + r ** 2))
return sigma | [
"def",
"density_2d",
"(",
"self",
",",
"x",
",",
"y",
",",
"rho0",
",",
"Ra",
",",
"Rs",
",",
"center_x",
"=",
"0",
",",
"center_y",
"=",
"0",
")",
":",
"Ra",
",",
"Rs",
"=",
"self",
".",
"_sort_ra_rs",
"(",
"Ra",
",",
"Rs",
")",
"x_",
"=",
... | 29.789474 | 17.789474 |
def is_url_allowed(url):
""" Return ``True`` if ``url`` is not in ``blacklist``.
:rtype: bool
"""
blacklist = [
r'\.ttf', r'\.woff', r'fonts\.googleapis\.com', r'\.png', r'\.jpe?g', r'\.gif',
r'\.svg'
]
for ft in blacklist:
if re.search(ft, url):
return False
return True | [
"def",
"is_url_allowed",
"(",
"url",
")",
":",
"blacklist",
"=",
"[",
"r'\\.ttf'",
",",
"r'\\.woff'",
",",
"r'fonts\\.googleapis\\.com'",
",",
"r'\\.png'",
",",
"r'\\.jpe?g'",
",",
"r'\\.gif'",
",",
"r'\\.svg'",
"]",
"for",
"ft",
"in",
"blacklist",
":",
"if",
... | 20.25 | 24.75 |
def execute_sql_statements(
ctask, query_id, rendered_query, return_results=True, store_results=False,
user_name=None, session=None, start_time=None,
):
"""Executes the sql query returns the results."""
if store_results and start_time:
# only asynchronous queries
stats_logger.timing(
'sqllab.query.time_pending', now_as_float() - start_time)
query = get_query(query_id, session)
payload = dict(query_id=query_id)
database = query.database
db_engine_spec = database.db_engine_spec
db_engine_spec.patch()
if store_results and not results_backend:
raise SqlLabException("Results backend isn't configured.")
# Breaking down into multiple statements
parsed_query = ParsedQuery(rendered_query)
statements = parsed_query.get_statements()
logging.info(f'Executing {len(statements)} statement(s)')
logging.info("Set query to 'running'")
query.status = QueryStatus.RUNNING
query.start_running_time = now_as_float()
engine = database.get_sqla_engine(
schema=query.schema,
nullpool=True,
user_name=user_name,
source=sources.get('sql_lab', None),
)
# Sharing a single connection and cursor across the
# execution of all statements (if many)
with closing(engine.raw_connection()) as conn:
with closing(conn.cursor()) as cursor:
statement_count = len(statements)
for i, statement in enumerate(statements):
# TODO CHECK IF STOPPED
msg = f'Running statement {i+1} out of {statement_count}'
logging.info(msg)
query.set_extra_json_key('progress', msg)
session.commit()
try:
cdf = execute_sql_statement(
statement, query, user_name, session, cursor)
msg = f'Running statement {i+1} out of {statement_count}'
except Exception as e:
msg = str(e)
if statement_count > 1:
msg = f'[Statement {i+1} out of {statement_count}] ' + msg
payload = handle_query_error(msg, query, session, payload)
return payload
# Success, updating the query entry in database
query.rows = cdf.size
query.progress = 100
query.set_extra_json_key('progress', None)
query.status = QueryStatus.SUCCESS
if query.select_as_cta:
query.select_sql = database.select_star(
query.tmp_table_name,
limit=query.limit,
schema=database.force_ctas_schema,
show_cols=False,
latest_partition=False)
query.end_time = now_as_float()
payload.update({
'status': query.status,
'data': cdf.data if cdf.data else [],
'columns': cdf.columns if cdf.columns else [],
'query': query.to_dict(),
})
if store_results:
key = str(uuid.uuid4())
logging.info(f'Storing results in results backend, key: {key}')
with stats_timing('sqllab.query.results_backend_write', stats_logger):
json_payload = json.dumps(
payload, default=json_iso_dttm_ser, ignore_nan=True)
cache_timeout = database.cache_timeout
if cache_timeout is None:
cache_timeout = config.get('CACHE_DEFAULT_TIMEOUT', 0)
results_backend.set(key, zlib_compress(json_payload), cache_timeout)
query.results_key = key
session.commit()
if return_results:
return payload | [
"def",
"execute_sql_statements",
"(",
"ctask",
",",
"query_id",
",",
"rendered_query",
",",
"return_results",
"=",
"True",
",",
"store_results",
"=",
"False",
",",
"user_name",
"=",
"None",
",",
"session",
"=",
"None",
",",
"start_time",
"=",
"None",
",",
")... | 37.978261 | 16.228261 |
def load_config_file(appdirs=DEFAULT_APPDIRS, file_name=DEFAULT_CONFIG_FILENAME,
fallback_config_instance=None):
"""
Retrieve config information from file at default location.
If no config file is found a new one will be created either with ``fallback_config_instance``
as content or if none is provided with the result of ``get_default_backend_config``.
Args:
appdirs (HamsterAppDirs, optional): ``HamsterAppDirs`` instance storing app/user specific
path information.
file_name (text_type, optional): Name of the config file. Defaults to
``DEFAULT_CONFIG_FILENAME``.
fallback_config_instance (ConfigParser): Backend config that is to be used to populate the
config file that is created if no pre-existing one can be found.
Returns:
SafeConfigParser: Config loaded from file, either from the the pre-existing config
file or the one created with fallback values.
"""
if not fallback_config_instance:
fallback_config_instance = backend_config_to_configparser(
get_default_backend_config(appdirs)
)
config = SafeConfigParser()
path = get_config_path(appdirs, file_name)
if not config.read(path):
config = write_config_file(
fallback_config_instance, appdirs=appdirs, file_name=file_name
)
return config | [
"def",
"load_config_file",
"(",
"appdirs",
"=",
"DEFAULT_APPDIRS",
",",
"file_name",
"=",
"DEFAULT_CONFIG_FILENAME",
",",
"fallback_config_instance",
"=",
"None",
")",
":",
"if",
"not",
"fallback_config_instance",
":",
"fallback_config_instance",
"=",
"backend_config_to_c... | 42.53125 | 26.40625 |
def purge_deleted(
self, vault_name, location, custom_headers=None, raw=False, polling=True, **operation_config):
"""Permanently deletes the specified vault. aka Purges the deleted Azure
key vault.
:param vault_name: The name of the soft-deleted vault.
:type vault_name: str
:param location: The location of the soft-deleted vault.
:type location: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._purge_deleted_initial(
vault_name=vault_name,
location=location,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) | [
"def",
"purge_deleted",
"(",
"self",
",",
"vault_name",
",",
"location",
",",
"custom_headers",
"=",
"None",
",",
"raw",
"=",
"False",
",",
"polling",
"=",
"True",
",",
"*",
"*",
"operation_config",
")",
":",
"raw_result",
"=",
"self",
".",
"_purge_deleted... | 47.875 | 21.95 |
def get_assets_by_repositories(self, repository_ids):
"""Gets the list of ``Assets`` corresponding to a list of ``Repository`` objects.
arg: repository_ids (osid.id.IdList): list of repository
``Ids``
return: (osid.repository.AssetList) - list of assets
raise: NullArgument - ``repository_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.get_resources_by_bins
asset_list = []
for repository_id in repository_ids:
asset_list += list(
self.get_assets_by_repository(repository_id))
return objects.AssetList(asset_list) | [
"def",
"get_assets_by_repositories",
"(",
"self",
",",
"repository_ids",
")",
":",
"# Implemented from template for",
"# osid.resource.ResourceBinSession.get_resources_by_bins",
"asset_list",
"=",
"[",
"]",
"for",
"repository_id",
"in",
"repository_ids",
":",
"asset_list",
"+... | 44.894737 | 16.894737 |
def _keys2sls(self, keys, key2sl):
"""Convert an input key to a list of slices."""
sls = list()
if isinstance(keys, tuple):
for key in keys:
sls.append(key2sl(key))
else:
sls.append(key2sl(keys))
if len(sls) > self.ndim:
fstr = "expected <= {0.ndim} slice dimensions, got {1}"
raise ValueError(fstr.format(self, len(sls)))
return sls | [
"def",
"_keys2sls",
"(",
"self",
",",
"keys",
",",
"key2sl",
")",
":",
"sls",
"=",
"list",
"(",
")",
"if",
"isinstance",
"(",
"keys",
",",
"tuple",
")",
":",
"for",
"key",
"in",
"keys",
":",
"sls",
".",
"append",
"(",
"key2sl",
"(",
"key",
")",
... | 36.166667 | 12.416667 |
def rest(url, req="GET", data=None):
"""Main function to be called from this module.
send a request using method 'req' and to the url. the _rest() function
will add the base_url to this, so 'url' should be something like '/ips'.
"""
load_variables()
return _rest(base_url + url, req, data) | [
"def",
"rest",
"(",
"url",
",",
"req",
"=",
"\"GET\"",
",",
"data",
"=",
"None",
")",
":",
"load_variables",
"(",
")",
"return",
"_rest",
"(",
"base_url",
"+",
"url",
",",
"req",
",",
"data",
")"
] | 34.111111 | 19.666667 |
def persist_block(self,
block: 'BaseBlock'
) -> Tuple[Tuple[Hash32, ...], Tuple[Hash32, ...]]:
"""
Persist the given block's header and uncles.
Assumes all block transactions have been persisted already.
"""
with self.db.atomic_batch() as db:
return self._persist_block(db, block) | [
"def",
"persist_block",
"(",
"self",
",",
"block",
":",
"'BaseBlock'",
")",
"->",
"Tuple",
"[",
"Tuple",
"[",
"Hash32",
",",
"...",
"]",
",",
"Tuple",
"[",
"Hash32",
",",
"...",
"]",
"]",
":",
"with",
"self",
".",
"db",
".",
"atomic_batch",
"(",
")... | 36.8 | 14 |
def formula_to_dictionary(formula='', thickness=np.NaN, density=np.NaN, database='ENDF_VII'):
"""create dictionary based on formula given
Parameters:
===========
formula: string
ex: 'AgCo2'
ex: 'Ag'
thickness: float (in mm) default is np.NaN
density: float (in g/cm3) default is np.NaN
database: string (default is ENDV_VIII). Database where to look for elements
Raises:
=======
ValueError if one of the element is missing from the database
Return:
=======
the dictionary of the elements passed
ex: {'AgCo2': {'elements': ['Ag','Co'],
'stoichiometric_ratio': [1,2],
'thickness': {'value': thickness,
'units': 'mm'},
'density': {'value': density,
'units': 'g/cm3'},
'molar_mass': {'value': np.nan,
'units': 'g/mol'},
}
"""
if '.' in formula:
raise ValueError("formula '{}' is invalid, containing symbol '{}' !".format(formula, '.'))
_formula_parsed = re.findall(r'([A-Z][a-z]*)(\d*)', formula)
if len(_formula_parsed) == 0:
raise ValueError("formula '{}' is invalid !".format(formula))
# _dictionary = {}
_elements_array = []
_atomic_ratio_array = []
for _element in _formula_parsed:
[_single_element, _atomic_ratio] = list(_element)
if not is_element_in_database(element=_single_element, database=database):
raise ValueError("element '{}' not found in the database '{}'!".format(_single_element, database))
if _atomic_ratio == '':
_atomic_ratio = 1
_atomic_ratio_array.append(int(_atomic_ratio))
_elements_array.append(_single_element)
_dict = {formula: {'elements': _elements_array,
'stoichiometric_ratio': _atomic_ratio_array,
'thickness': {'value': thickness,
'units': 'mm'},
'density': {'value': density,
'units': 'g/cm3'},
'molar_mass': {'value': np.nan,
'units': 'g/mol'}
}
}
return _dict | [
"def",
"formula_to_dictionary",
"(",
"formula",
"=",
"''",
",",
"thickness",
"=",
"np",
".",
"NaN",
",",
"density",
"=",
"np",
".",
"NaN",
",",
"database",
"=",
"'ENDF_VII'",
")",
":",
"if",
"'.'",
"in",
"formula",
":",
"raise",
"ValueError",
"(",
"\"f... | 38.2 | 21.366667 |
def hash64(key, seed):
"""
Wrapper around mmh3.hash64 to get us single 64-bit value.
This also does the extra work of ensuring that we always treat the
returned values as big-endian unsigned long, like smhasher used to
do.
"""
hash_val = mmh3.hash64(key, seed)[0]
return struct.unpack('>Q', struct.pack('q', hash_val))[0] | [
"def",
"hash64",
"(",
"key",
",",
"seed",
")",
":",
"hash_val",
"=",
"mmh3",
".",
"hash64",
"(",
"key",
",",
"seed",
")",
"[",
"0",
"]",
"return",
"struct",
".",
"unpack",
"(",
"'>Q'",
",",
"struct",
".",
"pack",
"(",
"'q'",
",",
"hash_val",
")",... | 34.5 | 19.3 |
def _initial_placement(movable_vertices, vertices_resources, machine, random):
"""For internal use. Produces a random, sequential initial placement,
updating the resource availabilities of every core in the supplied machine.
Parameters
----------
movable_vertices : {vertex, ...}
A set of the vertices to be given a random initial placement.
vertices_resources : {vertex: {resource: value, ...}, ...}
machine : :py:class:`rig.place_and_route.Machine`
A machine object describing the machine into which the vertices should
be placed.
All chips hosting fixed vertices should have a chip_resource_exceptions
entry which accounts for the allocated resources.
When this function returns, the machine.chip_resource_exceptions will
be updated to account for the resources consumed by the initial
placement of movable vertices.
random : :py:class`random.Random`
The random number generator to use
Returns
-------
{vertex: (x, y), ...}
For all movable_vertices.
Raises
------
InsufficientResourceError
InvalidConstraintError
"""
# Initially fill chips in the system in a random order
locations = list(machine)
random.shuffle(locations)
location_iter = iter(locations)
# Greedily place the vertices in a random order
movable_vertices = list(v for v in vertices_resources
if v in movable_vertices)
random.shuffle(movable_vertices)
vertex_iter = iter(movable_vertices)
placement = {}
try:
location = next(location_iter)
except StopIteration:
raise InsufficientResourceError("No working chips in system.")
while True:
# Get a vertex to place
try:
vertex = next(vertex_iter)
except StopIteration:
# All vertices have been placed
break
# Advance through the set of available locations until we find a chip
# where the vertex fits
while True:
resources_if_placed = subtract_resources(
machine[location], vertices_resources[vertex])
if overallocated(resources_if_placed):
# The vertex won't fit on this chip, move onto the next chip
try:
location = next(location_iter)
continue
except StopIteration:
raise InsufficientResourceError(
"Ran out of chips while attempting to place vertex "
"{}".format(vertex))
else:
# The vertex fits: record the resources consumed and move on to
# the next vertex.
placement[vertex] = location
machine[location] = resources_if_placed
break
return placement | [
"def",
"_initial_placement",
"(",
"movable_vertices",
",",
"vertices_resources",
",",
"machine",
",",
"random",
")",
":",
"# Initially fill chips in the system in a random order",
"locations",
"=",
"list",
"(",
"machine",
")",
"random",
".",
"shuffle",
"(",
"locations",... | 35.620253 | 20.455696 |
def vectorize_dialogue_ohe(self, dia):
"""
Take in a dialogue (a sequence of tokenized utterances) and transform it into a
sequence of sequences of one-hot vectors
"""
# we squeeze it because it's coming out with an extra empty
# dimension at the front of the shape: (1 x dia x utt x word)
return np.array([[self.vectorize_utterance_ohe(utt) for utt in dia]]).squeeze() | [
"def",
"vectorize_dialogue_ohe",
"(",
"self",
",",
"dia",
")",
":",
"# we squeeze it because it's coming out with an extra empty",
"# dimension at the front of the shape: (1 x dia x utt x word)",
"return",
"np",
".",
"array",
"(",
"[",
"[",
"self",
".",
"vectorize_utterance_ohe... | 52.25 | 20 |
def _replace_fields(self, json_dict):
"""
Delete this object's attributes, and replace with
those in json_dict.
"""
for key in self._json_dict.keys():
if not key.startswith("_"):
delattr(self, key)
self._json_dict = json_dict
self._set_fields(json_dict) | [
"def",
"_replace_fields",
"(",
"self",
",",
"json_dict",
")",
":",
"for",
"key",
"in",
"self",
".",
"_json_dict",
".",
"keys",
"(",
")",
":",
"if",
"not",
"key",
".",
"startswith",
"(",
"\"_\"",
")",
":",
"delattr",
"(",
"self",
",",
"key",
")",
"s... | 32.8 | 5.2 |
def generate_doc(self, dir_name, vasprun_files):
"""
Process aflow style runs, where each run is actually a combination of
two vasp runs.
"""
try:
fullpath = os.path.abspath(dir_name)
# Defensively copy the additional fields first. This is a MUST.
# Otherwise, parallel updates will see the same object and inserts
# will be overridden!!
d = {k: v for k, v in self.additional_fields.items()}
d["dir_name"] = fullpath
d["schema_version"] = VaspToDbTaskDrone.__version__
d["calculations"] = [
self.process_vasprun(dir_name, taskname, filename)
for taskname, filename in vasprun_files.items()]
d1 = d["calculations"][0]
d2 = d["calculations"][-1]
# Now map some useful info to the root level.
for root_key in ["completed_at", "nsites", "unit_cell_formula",
"reduced_cell_formula", "pretty_formula",
"elements", "nelements", "cif", "density",
"is_hubbard", "hubbards", "run_type"]:
d[root_key] = d2[root_key]
d["chemsys"] = "-".join(sorted(d2["elements"]))
# store any overrides to the exchange correlation functional
xc = d2["input"]["incar"].get("GGA")
if xc:
xc = xc.upper()
d["input"] = {"crystal": d1["input"]["crystal"],
"is_lasph": d2["input"]["incar"].get("LASPH", False),
"potcar_spec": d1["input"].get("potcar_spec"),
"xc_override": xc}
vals = sorted(d2["reduced_cell_formula"].values())
d["anonymous_formula"] = {string.ascii_uppercase[i]: float(vals[i])
for i in range(len(vals))}
d["output"] = {
"crystal": d2["output"]["crystal"],
"final_energy": d2["output"]["final_energy"],
"final_energy_per_atom": d2["output"]["final_energy_per_atom"]}
d["name"] = "aflow"
p = d2["input"]["potcar_type"][0].split("_")
pot_type = p[0]
functional = "lda" if len(pot_type) == 1 else "_".join(p[1:])
d["pseudo_potential"] = {"functional": functional.lower(),
"pot_type": pot_type.lower(),
"labels": d2["input"]["potcar"]}
if len(d["calculations"]) == len(self.runs) or \
list(vasprun_files.keys())[0] != "relax1":
d["state"] = "successful" if d2["has_vasp_completed"] \
else "unsuccessful"
else:
d["state"] = "stopped"
d["analysis"] = get_basic_analysis_and_error_checks(d)
sg = SpacegroupAnalyzer(Structure.from_dict(d["output"]["crystal"]),
0.1)
d["spacegroup"] = {"symbol": sg.get_space_group_symbol(),
"number": sg.get_space_group_number(),
"point_group": sg.get_point_group_symbol(),
"source": "spglib",
"crystal_system": sg.get_crystal_system(),
"hall": sg.get_hall()}
d["oxide_type"] = d2["oxide_type"]
d["last_updated"] = datetime.datetime.today()
return d
except Exception as ex:
import traceback
print(traceback.format_exc())
logger.error("Error in " + os.path.abspath(dir_name) +
".\n" + traceback.format_exc())
return None | [
"def",
"generate_doc",
"(",
"self",
",",
"dir_name",
",",
"vasprun_files",
")",
":",
"try",
":",
"fullpath",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"dir_name",
")",
"# Defensively copy the additional fields first. This is a MUST.",
"# Otherwise, parallel updates ... | 49.706667 | 20.56 |
def centroid_sources(data, xpos, ypos, box_size=11, footprint=None,
error=None, mask=None, centroid_func=centroid_com):
"""
Calculate the centroid of sources at the defined positions.
A cutout image centered on each input position will be used to
calculate the centroid position. The cutout image is defined either
using the ``box_size`` or ``footprint`` keyword. The ``footprint``
keyword can be used to create a non-rectangular cutout image.
Parameters
----------
data : array_like
The 2D array of the image.
xpos, ypos : float or array-like of float
The initial ``x`` and ``y`` pixel position(s) of the center
position. A cutout image centered on this position be used to
calculate the centroid.
box_size : int or array-like of int, optional
The size of the cutout image along each axis. If ``box_size``
is a number, then a square cutout of ``box_size`` will be
created. If ``box_size`` has two elements, they should be in
``(ny, nx)`` order.
footprint : `~numpy.ndarray` of bools, optional
A 2D boolean array where `True` values describe the local
footprint region to cutout. ``footprint`` can be used to create
a non-rectangular cutout image, in which case the input ``xpos``
and ``ypos`` represent the center of the minimal bounding box
for the input ``footprint``. ``box_size=(n, m)`` is equivalent
to ``footprint=np.ones((n, m))``. Either ``box_size`` or
``footprint`` must be defined. If they are both defined, then
``footprint`` overrides ``box_size``.
mask : array_like, bool, optional
A 2D boolean array with the same shape as ``data``, where a
`True` value indicates the corresponding element of ``data`` is
masked.
error : array_like, optional
The 2D array of the 1-sigma errors of the input ``data``.
``error`` must have the same shape as ``data``. ``error`` will
be used only if supported by the input ``centroid_func``.
centroid_func : callable, optional
A callable object (e.g. function or class) that is used to
calculate the centroid of a 2D array. The ``centroid_func``
must accept a 2D `~numpy.ndarray`, have a ``mask`` keyword and
optionally an ``error`` keyword. The callable object must
return a tuple of two 1D `~numpy.ndarray`\\s, representing the x
and y centroids. The default is
`~photutils.centroids.centroid_com`.
Returns
-------
xcentroid, ycentroid : `~numpy.ndarray`
The ``x`` and ``y`` pixel position(s) of the centroids.
"""
xpos = np.atleast_1d(xpos)
ypos = np.atleast_1d(ypos)
if xpos.ndim != 1:
raise ValueError('xpos must be a 1D array.')
if ypos.ndim != 1:
raise ValueError('ypos must be a 1D array.')
if footprint is None:
if box_size is None:
raise ValueError('box_size or footprint must be defined.')
else:
box_size = np.atleast_1d(box_size)
if len(box_size) == 1:
box_size = np.repeat(box_size, 2)
if len(box_size) != 2:
raise ValueError('box_size must have 1 or 2 elements.')
footprint = np.ones(box_size, dtype=bool)
else:
footprint = np.asanyarray(footprint, dtype=bool)
if footprint.ndim != 2:
raise ValueError('footprint must be a 2D array.')
use_error = False
spec = inspect.getfullargspec(centroid_func)
if 'mask' not in spec.args:
raise ValueError('The input "centroid_func" must have a "mask" '
'keyword.')
if 'error' in spec.args:
use_error = True
xcentroids = []
ycentroids = []
for xp, yp in zip(xpos, ypos):
slices_large, slices_small = overlap_slices(data.shape,
footprint.shape, (yp, xp))
data_cutout = data[slices_large]
mask_cutout = None
if mask is not None:
mask_cutout = mask[slices_large]
footprint_mask = ~footprint
# trim footprint mask if partial overlap on the data
footprint_mask = footprint_mask[slices_small]
if mask_cutout is None:
mask_cutout = footprint_mask
else:
# combine the input mask and footprint mask
mask_cutout = np.logical_or(mask_cutout, footprint_mask)
if error is not None and use_error:
error_cutout = error[slices_large]
xcen, ycen = centroid_func(data_cutout, mask=mask_cutout,
error=error_cutout)
else:
xcen, ycen = centroid_func(data_cutout, mask=mask_cutout)
xcentroids.append(xcen + slices_large[1].start)
ycentroids.append(ycen + slices_large[0].start)
return np.array(xcentroids), np.array(ycentroids) | [
"def",
"centroid_sources",
"(",
"data",
",",
"xpos",
",",
"ypos",
",",
"box_size",
"=",
"11",
",",
"footprint",
"=",
"None",
",",
"error",
"=",
"None",
",",
"mask",
"=",
"None",
",",
"centroid_func",
"=",
"centroid_com",
")",
":",
"xpos",
"=",
"np",
... | 39.290323 | 21.741935 |
def ephemeral(*,
port: int = 6060,
timeout_connection: int = 30,
verbose: bool = False
) -> Iterator[Client]:
"""
Launches an ephemeral server instance that will be immediately
close when no longer in context.
Parameters:
port: the port that the server should run on.
verbose: if set to True, the server will print its output to the
stdout, otherwise it will remain silent.
Returns:
a client for communicating with the server.
"""
url = "http://127.0.0.1:{}".format(port)
cmd = ["bugzood", "--debug", "-p", str(port)]
try:
stdout = None if verbose else subprocess.DEVNULL
stderr = None if verbose else subprocess.DEVNULL
proc = subprocess.Popen(cmd,
preexec_fn=os.setsid,
stdout=stdout,
stderr=stderr)
yield Client(url, timeout_connection=timeout_connection)
finally:
os.killpg(proc.pid, signal.SIGTERM) | [
"def",
"ephemeral",
"(",
"*",
",",
"port",
":",
"int",
"=",
"6060",
",",
"timeout_connection",
":",
"int",
"=",
"30",
",",
"verbose",
":",
"bool",
"=",
"False",
")",
"->",
"Iterator",
"[",
"Client",
"]",
":",
"url",
"=",
"\"http://127.0.0.1:{}\"",
".",... | 36.034483 | 15.068966 |
def compact(self, revision, physical=False):
"""
Compact the event history in etcd up to a given revision.
All superseded keys with a revision less than the compaction revision
will be removed.
:param revision: revision for the compaction operation
:param physical: if set to True, the request will wait until the
compaction is physically applied to the local database
such that compacted entries are totally removed from
the backend database
"""
compact_request = etcdrpc.CompactionRequest(revision=revision,
physical=physical)
self.kvstub.Compact(
compact_request,
self.timeout,
credentials=self.call_credentials,
metadata=self.metadata
) | [
"def",
"compact",
"(",
"self",
",",
"revision",
",",
"physical",
"=",
"False",
")",
":",
"compact_request",
"=",
"etcdrpc",
".",
"CompactionRequest",
"(",
"revision",
"=",
"revision",
",",
"physical",
"=",
"physical",
")",
"self",
".",
"kvstub",
".",
"Comp... | 41.761905 | 20.904762 |
def write(self, address, data, x, y, p=0):
"""Write a bytestring to an address in memory.
It is strongly encouraged to only read and write to blocks of memory
allocated using :py:meth:`.sdram_alloc`. Additionally,
:py:meth:`.sdram_alloc_as_filelike` can be used to safely wrap
read/write access to memory with a file-like interface and prevent
accidental access to areas outside the allocated block.
Parameters
----------
address : int
The address at which to start writing the data. Addresses are given
within the address space of a SpiNNaker core. See the SpiNNaker
datasheet for more information.
data : :py:class:`bytes`
Data to write into memory. Writes are automatically broken into a
sequence of SCP write commands.
"""
# Call the SCPConnection to perform the write on our behalf
connection = self._get_connection(x, y)
return connection.write(self.scp_data_length, self.scp_window_size,
x, y, p, address, data) | [
"def",
"write",
"(",
"self",
",",
"address",
",",
"data",
",",
"x",
",",
"y",
",",
"p",
"=",
"0",
")",
":",
"# Call the SCPConnection to perform the write on our behalf",
"connection",
"=",
"self",
".",
"_get_connection",
"(",
"x",
",",
"y",
")",
"return",
... | 47.913043 | 21.695652 |
def available_digests(family=None, name=None):
""" Return names of available generators
:param family: name of hash-generator family to select
:param name: name of hash-generator to select
:return: set of int
"""
generators = WHash.available_generators(family=family, name=name)
return set([WHash.generator(x).generator_digest_size() for x in generators]) | [
"def",
"available_digests",
"(",
"family",
"=",
"None",
",",
"name",
"=",
"None",
")",
":",
"generators",
"=",
"WHash",
".",
"available_generators",
"(",
"family",
"=",
"family",
",",
"name",
"=",
"name",
")",
"return",
"set",
"(",
"[",
"WHash",
".",
"... | 36.2 | 19.3 |
def rebase_event(self, event):
"""
Rebase the coordinates of the passed event to frame-relative coordinates.
:param event: The event to be rebased.
:returns: A new event object appropriately re-based.
"""
new_event = copy(event)
if isinstance(new_event, MouseEvent):
origin = self._canvas.origin
new_event.x -= origin[0]
new_event.y -= origin[1] - self._canvas.start_line
logger.debug("New event: %s", new_event)
return new_event | [
"def",
"rebase_event",
"(",
"self",
",",
"event",
")",
":",
"new_event",
"=",
"copy",
"(",
"event",
")",
"if",
"isinstance",
"(",
"new_event",
",",
"MouseEvent",
")",
":",
"origin",
"=",
"self",
".",
"_canvas",
".",
"origin",
"new_event",
".",
"x",
"-=... | 37.5 | 12.928571 |
def soft_target_update(self):
"""
Soft update model parameters:
.. math::
\\theta_target = \\tau \\times \\theta_local + (1 - \\tau) \\times \\theta_target ,
with \\tau \\ll 1
See https://arxiv.org/pdf/1509.02971.pdf
"""
for target_param, local_param in zip(self.target.parameters(), self.local.parameters()):
target_param.data.copy_(self.tau * local_param.data + (1.0 - self.tau) * target_param.data) | [
"def",
"soft_target_update",
"(",
"self",
")",
":",
"for",
"target_param",
",",
"local_param",
"in",
"zip",
"(",
"self",
".",
"target",
".",
"parameters",
"(",
")",
",",
"self",
".",
"local",
".",
"parameters",
"(",
")",
")",
":",
"target_param",
".",
... | 36.166667 | 25.333333 |
def clear(self, context=None):
""" Delete all data from the graph. """
context = URIRef(context).n3() if context is not None else '?g'
query = """
DELETE { GRAPH %s { ?s ?p ?o } } WHERE { GRAPH %s { ?s ?p ?o } }
""" % (context, context)
self.parent.graph.update(query) | [
"def",
"clear",
"(",
"self",
",",
"context",
"=",
"None",
")",
":",
"context",
"=",
"URIRef",
"(",
"context",
")",
".",
"n3",
"(",
")",
"if",
"context",
"is",
"not",
"None",
"else",
"'?g'",
"query",
"=",
"\"\"\"\n DELETE { GRAPH %s { ?s ?p ?o } } ... | 44.857143 | 11.142857 |
def get_traffic_items(self):
"""
:return: dictionary {name: object} of all traffic items.
"""
traffic = self.get_child_static('traffic')
return {o.obj_name(): o for o in traffic.get_objects_or_children_by_type('trafficItem')} | [
"def",
"get_traffic_items",
"(",
"self",
")",
":",
"traffic",
"=",
"self",
".",
"get_child_static",
"(",
"'traffic'",
")",
"return",
"{",
"o",
".",
"obj_name",
"(",
")",
":",
"o",
"for",
"o",
"in",
"traffic",
".",
"get_objects_or_children_by_type",
"(",
"'... | 37.142857 | 20.285714 |
def processRequest(self, request: Request, frm: str):
"""
Handle a REQUEST from the client.
If the request has already been executed, the node re-sends the reply to
the client. Otherwise, the node acknowledges the client request, adds it
to its list of client requests, and sends a PROPAGATE to the
remaining nodes.
:param request: the REQUEST from the client
:param frm: the name of the client that sent this REQUEST
"""
logger.debug("{} received client request: {} from {}".
format(self.name, request, frm))
self.nodeRequestSpikeMonitorData['accum'] += 1
# TODO: What if client sends requests with same request id quickly so
# before reply for one is generated, the other comes. In that
# case we need to keep track of what requests ids node has seen
# in-memory and once request with a particular request id is processed,
# it should be removed from that in-memory DS.
# If request is already processed(there is a reply for the
# request in
# the node's transaction store then return the reply from the
# transaction store)
# TODO: What if the reply was a REQNACK? Its not gonna be found in the
# replies.
txn_type = request.operation[TXN_TYPE]
if self.is_action(txn_type):
self.process_action(request, frm)
elif txn_type == GET_TXN:
self.handle_get_txn_req(request, frm)
self.total_read_request_number += 1
elif self.is_query(txn_type):
self.process_query(request, frm)
self.total_read_request_number += 1
elif self.can_write_txn(txn_type):
reply = self.getReplyFromLedgerForRequest(request)
if reply:
logger.debug("{} returning reply from already processed "
"REQUEST: {}".format(self, request))
self.transmitToClient(reply, frm)
return
# If the node is not already processing the request
if not self.isProcessingReq(request.key):
self.startedProcessingReq(request.key, frm)
# forced request should be processed before consensus
self.handle_request_if_forced(request)
# If not already got the propagate request(PROPAGATE) for the
# corresponding client request(REQUEST)
self.recordAndPropagate(request, frm)
self.send_ack_to_client((request.identifier, request.reqId), frm)
else:
raise InvalidClientRequest(
request.identifier,
request.reqId,
'Pool is in readonly mode, try again in 60 seconds') | [
"def",
"processRequest",
"(",
"self",
",",
"request",
":",
"Request",
",",
"frm",
":",
"str",
")",
":",
"logger",
".",
"debug",
"(",
"\"{} received client request: {} from {}\"",
".",
"format",
"(",
"self",
".",
"name",
",",
"request",
",",
"frm",
")",
")"... | 42.046154 | 21.523077 |
def get_broadcast_shape(*tensors):
"""Get broadcast shape as a Python list of integers (preferred) or `Tensor`.
Args:
*tensors: One or more `Tensor` objects (already converted!).
Returns:
broadcast shape: Python list (if shapes determined statically), otherwise
an `int32` `Tensor`.
"""
# Try static.
s_shape = tensors[0].shape
for t in tensors[1:]:
s_shape = tf.broadcast_static_shape(s_shape, t.shape)
if tensorshape_util.is_fully_defined(s_shape):
return tensorshape_util.as_list(s_shape)
# Fallback on dynamic.
d_shape = tf.shape(input=tensors[0])
for t in tensors[1:]:
d_shape = tf.broadcast_dynamic_shape(d_shape, tf.shape(input=t))
return d_shape | [
"def",
"get_broadcast_shape",
"(",
"*",
"tensors",
")",
":",
"# Try static.",
"s_shape",
"=",
"tensors",
"[",
"0",
"]",
".",
"shape",
"for",
"t",
"in",
"tensors",
"[",
"1",
":",
"]",
":",
"s_shape",
"=",
"tf",
".",
"broadcast_static_shape",
"(",
"s_shape... | 31.227273 | 19.818182 |
def getMouse(self):
"""
Waits for a mouse click.
"""
# FIXME: this isn't working during an executing cell
self.mouse_x.value = -1
self.mouse_y.value = -1
while self.mouse_x.value == -1 and self.mouse_y.value == -1:
time.sleep(.1)
return (self.mouse_x.value, self.mouse_y.value) | [
"def",
"getMouse",
"(",
"self",
")",
":",
"# FIXME: this isn't working during an executing cell",
"self",
".",
"mouse_x",
".",
"value",
"=",
"-",
"1",
"self",
".",
"mouse_y",
".",
"value",
"=",
"-",
"1",
"while",
"self",
".",
"mouse_x",
".",
"value",
"==",
... | 34.4 | 12.4 |
def refreshFromTarget(self, level=0):
""" Refreshes the configuration tree from the target it monitors (if present).
Recursively call _refreshNodeFromTarget for itself and all children. Subclasses should
typically override _refreshNodeFromTarget instead of this function.
During updateTarget's execution refreshFromTarget is blocked to avoid loops.
"""
if self.getRefreshBlocked():
logger.debug("_refreshNodeFromTarget blocked")
return
if False and level == 0:
logger.debug("refreshFromTarget: {}".format(self.nodePath))
self._refreshNodeFromTarget()
for child in self.childItems:
child.refreshFromTarget(level=level + 1) | [
"def",
"refreshFromTarget",
"(",
"self",
",",
"level",
"=",
"0",
")",
":",
"if",
"self",
".",
"getRefreshBlocked",
"(",
")",
":",
"logger",
".",
"debug",
"(",
"\"_refreshNodeFromTarget blocked\"",
")",
"return",
"if",
"False",
"and",
"level",
"==",
"0",
":... | 46.25 | 20.5625 |
def _convert_xml_to_service_properties(response):
'''
<?xml version="1.0" encoding="utf-8"?>
<StorageServiceProperties>
<Logging>
<Version>version-number</Version>
<Delete>true|false</Delete>
<Read>true|false</Read>
<Write>true|false</Write>
<RetentionPolicy>
<Enabled>true|false</Enabled>
<Days>number-of-days</Days>
</RetentionPolicy>
</Logging>
<HourMetrics>
<Version>version-number</Version>
<Enabled>true|false</Enabled>
<IncludeAPIs>true|false</IncludeAPIs>
<RetentionPolicy>
<Enabled>true|false</Enabled>
<Days>number-of-days</Days>
</RetentionPolicy>
</HourMetrics>
<MinuteMetrics>
<Version>version-number</Version>
<Enabled>true|false</Enabled>
<IncludeAPIs>true|false</IncludeAPIs>
<RetentionPolicy>
<Enabled>true|false</Enabled>
<Days>number-of-days</Days>
</RetentionPolicy>
</MinuteMetrics>
<Cors>
<CorsRule>
<AllowedOrigins>comma-separated-list-of-allowed-origins</AllowedOrigins>
<AllowedMethods>comma-separated-list-of-HTTP-verb</AllowedMethods>
<MaxAgeInSeconds>max-caching-age-in-seconds</MaxAgeInSeconds>
<ExposedHeaders>comma-seperated-list-of-response-headers</ExposedHeaders>
<AllowedHeaders>comma-seperated-list-of-request-headers</AllowedHeaders>
</CorsRule>
</Cors>
<DeleteRetentionPolicy>
<Enabled>true|false</Enabled>
<Days>number-of-days</Days>
</DeleteRetentionPolicy>
<StaticWebsite>
<Enabled>true|false</Enabled>
<IndexDocument></IndexDocument>
<ErrorDocument404Path></ErrorDocument404Path>
</StaticWebsite>
</StorageServiceProperties>
'''
if response is None or response.body is None:
return None
service_properties_element = ETree.fromstring(response.body)
service_properties = ServiceProperties()
# Logging
logging = service_properties_element.find('Logging')
if logging is not None:
service_properties.logging = Logging()
service_properties.logging.version = logging.find('Version').text
service_properties.logging.delete = _bool(logging.find('Delete').text)
service_properties.logging.read = _bool(logging.find('Read').text)
service_properties.logging.write = _bool(logging.find('Write').text)
_convert_xml_to_retention_policy(logging.find('RetentionPolicy'),
service_properties.logging.retention_policy)
# HourMetrics
hour_metrics_element = service_properties_element.find('HourMetrics')
if hour_metrics_element is not None:
service_properties.hour_metrics = Metrics()
_convert_xml_to_metrics(hour_metrics_element, service_properties.hour_metrics)
# MinuteMetrics
minute_metrics_element = service_properties_element.find('MinuteMetrics')
if minute_metrics_element is not None:
service_properties.minute_metrics = Metrics()
_convert_xml_to_metrics(minute_metrics_element, service_properties.minute_metrics)
# CORS
cors = service_properties_element.find('Cors')
if cors is not None:
service_properties.cors = list()
for rule in cors.findall('CorsRule'):
allowed_origins = rule.find('AllowedOrigins').text.split(',')
allowed_methods = rule.find('AllowedMethods').text.split(',')
max_age_in_seconds = int(rule.find('MaxAgeInSeconds').text)
cors_rule = CorsRule(allowed_origins, allowed_methods, max_age_in_seconds)
exposed_headers = rule.find('ExposedHeaders').text
if exposed_headers is not None:
cors_rule.exposed_headers = exposed_headers.split(',')
allowed_headers = rule.find('AllowedHeaders').text
if allowed_headers is not None:
cors_rule.allowed_headers = allowed_headers.split(',')
service_properties.cors.append(cors_rule)
# Target version
target_version = service_properties_element.find('DefaultServiceVersion')
if target_version is not None:
service_properties.target_version = target_version.text
# DeleteRetentionPolicy
delete_retention_policy_element = service_properties_element.find('DeleteRetentionPolicy')
if delete_retention_policy_element is not None:
service_properties.delete_retention_policy = DeleteRetentionPolicy()
policy_enabled = _bool(delete_retention_policy_element.find('Enabled').text)
service_properties.delete_retention_policy.enabled = policy_enabled
if policy_enabled:
service_properties.delete_retention_policy.days = int(delete_retention_policy_element.find('Days').text)
# StaticWebsite
static_website_element = service_properties_element.find('StaticWebsite')
if static_website_element is not None:
service_properties.static_website = StaticWebsite()
service_properties.static_website.enabled = _bool(static_website_element.find('Enabled').text)
index_document_element = static_website_element.find('IndexDocument')
if index_document_element is not None:
service_properties.static_website.index_document = index_document_element.text
error_document_element = static_website_element.find('ErrorDocument404Path')
if error_document_element is not None:
service_properties.static_website.error_document_404_path = error_document_element.text
return service_properties | [
"def",
"_convert_xml_to_service_properties",
"(",
"response",
")",
":",
"if",
"response",
"is",
"None",
"or",
"response",
".",
"body",
"is",
"None",
":",
"return",
"None",
"service_properties_element",
"=",
"ETree",
".",
"fromstring",
"(",
"response",
".",
"body... | 42.552239 | 22.910448 |
def convert(ast):
"""Convert BEL1 AST Function to BEL2 AST Function"""
if ast and ast.type == "Function":
# Activity function conversion
if (
ast.name != "molecularActivity"
and ast.name in spec["namespaces"]["Activity"]["list"]
):
print("name", ast.name, "type", ast.type)
ast = convert_activity(ast)
return ast # Otherwise - this will trigger on the BEL2 molecularActivity
# translocation conversion
elif ast.name in ["tloc", "translocation"]:
ast = convert_tloc(ast)
fus_flag = False
for idx, arg in enumerate(ast.args):
if arg.__class__.__name__ == "Function":
# Fix substitution -> variation()
if arg.name in ["sub", "substitution"]:
ast.args[idx] = convert_sub(arg)
elif arg.name in ["trunc", "truncation"]:
ast.args[idx] = convert_trunc(arg)
elif arg.name in ["pmod", "proteinModification"]:
ast.args[idx] = convert_pmod(arg)
elif arg.name in ["fus", "fusion"]:
fus_flag = True
# Recursively process Functions
ast.args[idx] = convert(ast.args[idx])
if fus_flag:
ast = convert_fus(ast)
return ast | [
"def",
"convert",
"(",
"ast",
")",
":",
"if",
"ast",
"and",
"ast",
".",
"type",
"==",
"\"Function\"",
":",
"# Activity function conversion",
"if",
"(",
"ast",
".",
"name",
"!=",
"\"molecularActivity\"",
"and",
"ast",
".",
"name",
"in",
"spec",
"[",
"\"name... | 32.609756 | 20.02439 |
def ssh_config(self, name=''):
"""
Get the SSH parameters for connecting to a vagrant VM.
"""
r = self.local_renderer
with self.settings(hide('running')):
output = r.local('vagrant ssh-config %s' % name, capture=True)
config = {}
for line in output.splitlines()[1:]:
key, value = line.strip().split(' ', 2)
config[key] = value
return config | [
"def",
"ssh_config",
"(",
"self",
",",
"name",
"=",
"''",
")",
":",
"r",
"=",
"self",
".",
"local_renderer",
"with",
"self",
".",
"settings",
"(",
"hide",
"(",
"'running'",
")",
")",
":",
"output",
"=",
"r",
".",
"local",
"(",
"'vagrant ssh-config %s'"... | 33 | 14.076923 |
def init(self, acct: Account, payer_acct: Account, gas_limit: int, gas_price: int) -> str:
"""
This interface is used to call the TotalSupply method in ope4
that initialize smart contract parameter.
:param acct: an Account class that used to sign the transaction.
:param payer_acct: an Account class that used to pay for the transaction.
:param gas_limit: an int value that indicate the gas limit.
:param gas_price: an int value that indicate the gas price.
:return: the hexadecimal transaction hash value.
"""
func = InvokeFunction('init')
tx_hash = self.__sdk.get_network().send_neo_vm_transaction(self.__hex_contract_address, acct, payer_acct,
gas_limit, gas_price, func)
return tx_hash | [
"def",
"init",
"(",
"self",
",",
"acct",
":",
"Account",
",",
"payer_acct",
":",
"Account",
",",
"gas_limit",
":",
"int",
",",
"gas_price",
":",
"int",
")",
"->",
"str",
":",
"func",
"=",
"InvokeFunction",
"(",
"'init'",
")",
"tx_hash",
"=",
"self",
... | 55.933333 | 27.933333 |
def _update_index(self, axis, key, value):
"""Update the current axis index based on a given key or value
This is an internal method designed to set the origin or step for
an index, whilst updating existing Index arrays as appropriate
Examples
--------
>>> self._update_index("x0", 0)
>>> self._update_index("dx", 0)
To actually set an index array, use `_set_index`
"""
# delete current value if given None
if value is None:
return delattr(self, key)
_key = "_{}".format(key)
index = "{[0]}index".format(axis)
unit = "{[0]}unit".format(axis)
# convert float to Quantity
if not isinstance(value, Quantity):
try:
value = Quantity(value, getattr(self, unit))
except TypeError:
value = Quantity(float(value), getattr(self, unit))
# if value is changing, delete current index
try:
curr = getattr(self, _key)
except AttributeError:
delattr(self, index)
else:
if (
value is None or
getattr(self, key) is None or
not value.unit.is_equivalent(curr.unit) or
value != curr
):
delattr(self, index)
# set new value
setattr(self, _key, value)
return value | [
"def",
"_update_index",
"(",
"self",
",",
"axis",
",",
"key",
",",
"value",
")",
":",
"# delete current value if given None",
"if",
"value",
"is",
"None",
":",
"return",
"delattr",
"(",
"self",
",",
"key",
")",
"_key",
"=",
"\"_{}\"",
".",
"format",
"(",
... | 31.111111 | 16.866667 |
def download_file(filename, session):
""" Downloads a file """
print('Downloading file %s' % filename)
infilesource = os.path.join('sftp://' + ADDRESS + WORKING_DIR,
filename)
infiletarget = os.path.join(os.getcwd(), filename)
incoming = saga.filesystem.File(infilesource, session=session, flags=OVERWRITE)
incoming.copy(infiletarget)
print('Transfer of `%s` to `%s` successful' % (filename, infiletarget)) | [
"def",
"download_file",
"(",
"filename",
",",
"session",
")",
":",
"print",
"(",
"'Downloading file %s'",
"%",
"filename",
")",
"infilesource",
"=",
"os",
".",
"path",
".",
"join",
"(",
"'sftp://'",
"+",
"ADDRESS",
"+",
"WORKING_DIR",
",",
"filename",
")",
... | 51 | 15 |
def batch_filter(self, Zs,update_first=False, saver=False):
""" Batch processes a sequences of measurements.
Parameters
----------
Zs : list-like
list of measurements at each time step `self.dt` Missing
measurements must be represented by 'None'.
update_first : bool, default=False, optional,
controls whether the order of operations is update followed by
predict, or predict followed by update.
saver : filterpy.common.Saver, optional
filterpy.common.Saver object. If provided, saver.save() will be
called after every epoch
Returns
-------
means: ndarray ((n, dim_x, 1))
array of the state for each time step. Each entry is an np.array.
In other words `means[k,:]` is the state at step `k`.
covariance: ndarray((n, dim_x, dim_x))
array of the covariances for each time step. In other words
`covariance[k, :, :]` is the covariance at step `k`.
"""
n = np.size(Zs, 0)
# mean estimates from H-Infinity Filter
means = zeros((n, self.dim_x, 1))
# state covariances from H-Infinity Filter
covariances = zeros((n, self.dim_x, self.dim_x))
if update_first:
for i, z in enumerate(Zs):
self.update(z)
means[i, :] = self.x
covariances[i, :, :] = self.P
self.predict()
if saver is not None:
saver.save()
else:
for i, z in enumerate(Zs):
self.predict()
self.update(z)
means[i, :] = self.x
covariances[i, :, :] = self.P
if saver is not None:
saver.save()
return (means, covariances) | [
"def",
"batch_filter",
"(",
"self",
",",
"Zs",
",",
"update_first",
"=",
"False",
",",
"saver",
"=",
"False",
")",
":",
"n",
"=",
"np",
".",
"size",
"(",
"Zs",
",",
"0",
")",
"# mean estimates from H-Infinity Filter",
"means",
"=",
"zeros",
"(",
"(",
"... | 31.964912 | 19.350877 |
def expand(expression):
"""
Expand a reference expression to individual spans.
Also works on space-separated ID lists, although a sequence of space
characters will be considered a delimiter.
>>> expand('a1')
'a1'
>>> expand('a1[3:5]')
'a1[3:5]'
>>> expand('a1[3:5+6:7]')
'a1[3:5]+a1[6:7]'
>>> expand('a1 a2 a3')
'a1 a2 a3'
"""
tokens = []
for (pre, _id, _range) in robust_ref_re.findall(expression):
if not _range:
tokens.append('{}{}'.format(pre, _id))
else:
tokens.append(pre)
tokens.extend(
'{}{}[{}:{}]'.format(delim, _id, start, end)
for delim, start, end in span_re.findall(_range)
)
return ''.join(tokens) | [
"def",
"expand",
"(",
"expression",
")",
":",
"tokens",
"=",
"[",
"]",
"for",
"(",
"pre",
",",
"_id",
",",
"_range",
")",
"in",
"robust_ref_re",
".",
"findall",
"(",
"expression",
")",
":",
"if",
"not",
"_range",
":",
"tokens",
".",
"append",
"(",
... | 27.777778 | 19.407407 |
def _read_stdout(self):
"""
Reads the child process' stdout and process it.
"""
output = self._decode(self._process.readAllStandardOutput().data())
if self._formatter:
self._formatter.append_message(output, output_format=OutputFormat.NormalMessageFormat)
else:
self.insertPlainText(output) | [
"def",
"_read_stdout",
"(",
"self",
")",
":",
"output",
"=",
"self",
".",
"_decode",
"(",
"self",
".",
"_process",
".",
"readAllStandardOutput",
"(",
")",
".",
"data",
"(",
")",
")",
"if",
"self",
".",
"_formatter",
":",
"self",
".",
"_formatter",
".",... | 39.222222 | 18.333333 |
def _group_changes(cur, wanted, remove=False):
'''
Determine if the groups need to be changed
'''
old = set(cur)
new = set(wanted)
if (remove and old != new) or (not remove and not new.issubset(old)):
return True
return False | [
"def",
"_group_changes",
"(",
"cur",
",",
"wanted",
",",
"remove",
"=",
"False",
")",
":",
"old",
"=",
"set",
"(",
"cur",
")",
"new",
"=",
"set",
"(",
"wanted",
")",
"if",
"(",
"remove",
"and",
"old",
"!=",
"new",
")",
"or",
"(",
"not",
"remove",... | 28.111111 | 21.888889 |
async def write_non_secret(self, storec: StorageRecord, replace_meta: bool = False) -> StorageRecord:
"""
Add or update non-secret storage record to the wallet; return resulting wallet non-secret record.
:param storec: non-secret storage record
:param replace_meta: whether to replace any existing metadata on matching record or to augment it
:return: non-secret storage record as it appears in the wallet after write
"""
LOGGER.debug('Wallet.write_non_secret >>> storec: %s, replace_meta: %s', storec, replace_meta)
if not self.handle:
LOGGER.debug('Wallet.write_non_secret <!< Wallet %s is closed', self.name)
raise WalletState('Wallet {} is closed'.format(self.name))
if not StorageRecord.ok_tags(storec.tags):
LOGGER.debug('Wallet.write_non_secret <!< bad storage record tags %s; use flat {str: str} dict', storec)
raise BadRecord('Bad storage record tags {}; use flat {{str:str}} dict'.format(storec))
try:
record = json.loads(await non_secrets.get_wallet_record(
self.handle,
storec.type,
storec.id,
json.dumps({
'retrieveType': False,
'retrieveValue': True,
'retrieveTags': True
})))
if record['value'] != storec.value:
await non_secrets.update_wallet_record_value(
self.handle,
storec.type,
storec.id,
storec.value)
except IndyError as x_indy:
if x_indy.error_code == ErrorCode.WalletItemNotFound:
await non_secrets.add_wallet_record(
self.handle,
storec.type,
storec.id,
storec.value,
json.dumps(storec.tags) if storec.tags else None)
else:
LOGGER.debug(
'Wallet.write_non_secret <!< Wallet lookup raised indy error code %s',
x_indy.error_code)
raise
else:
if (record['tags'] or None) != storec.tags: # record maps no tags to {}, not None
tags = (storec.tags or {}) if replace_meta else {**record['tags'], **(storec.tags or {})}
await non_secrets.update_wallet_record_tags(
self.handle,
storec.type,
storec.id,
json.dumps(tags)) # indy-sdk takes '{}' instead of None for null tags
record = json.loads(await non_secrets.get_wallet_record(
self.handle,
storec.type,
storec.id,
json.dumps({
'retrieveType': False,
'retrieveValue': True,
'retrieveTags': True
})))
rv = StorageRecord(storec.type, record['value'], tags=record.get('tags', None), ident=record['id'])
LOGGER.debug('Wallet.write_non_secret <<< %s', rv)
return rv | [
"async",
"def",
"write_non_secret",
"(",
"self",
",",
"storec",
":",
"StorageRecord",
",",
"replace_meta",
":",
"bool",
"=",
"False",
")",
"->",
"StorageRecord",
":",
"LOGGER",
".",
"debug",
"(",
"'Wallet.write_non_secret >>> storec: %s, replace_meta: %s'",
",",
"st... | 42.943662 | 24.352113 |
def get_cpu_info():
'''
Returns the CPU info by using the best sources of information for your OS.
Returns the result in a dict
'''
import json
output = get_cpu_info_json()
# Convert JSON to Python with non unicode strings
output = json.loads(output, object_hook = _utf_to_str)
return output | [
"def",
"get_cpu_info",
"(",
")",
":",
"import",
"json",
"output",
"=",
"get_cpu_info_json",
"(",
")",
"# Convert JSON to Python with non unicode strings",
"output",
"=",
"json",
".",
"loads",
"(",
"output",
",",
"object_hook",
"=",
"_utf_to_str",
")",
"return",
"o... | 20.785714 | 27.785714 |
def build_from_file(self, dockerfile, tag, **kwargs):
"""
Builds a docker image from the given :class:`~dockermap.build.dockerfile.DockerFile`. Use this as a shortcut to
:meth:`build_from_context`, if no extra data is added to the context.
:param dockerfile: An instance of :class:`~dockermap.build.dockerfile.DockerFile`.
:type dockerfile: dockermap.build.dockerfile.DockerFile
:param tag: New image tag.
:type tag: unicode | str
:param kwargs: See :meth:`docker.client.Client.build`.
:return: New, generated image id or ``None``.
:rtype: unicode | str
"""
with DockerContext(dockerfile, finalize=True) as ctx:
return self.build_from_context(ctx, tag, **kwargs) | [
"def",
"build_from_file",
"(",
"self",
",",
"dockerfile",
",",
"tag",
",",
"*",
"*",
"kwargs",
")",
":",
"with",
"DockerContext",
"(",
"dockerfile",
",",
"finalize",
"=",
"True",
")",
"as",
"ctx",
":",
"return",
"self",
".",
"build_from_context",
"(",
"c... | 50.466667 | 23 |
def edit(self):
"""
Edit the SSH Key
"""
input_params = {
"name": self.name,
"public_key": self.public_key,
}
data = self.get_data(
"account/keys/%s" % self.id,
type=PUT,
params=input_params
)
if data:
self.id = data['ssh_key']['id'] | [
"def",
"edit",
"(",
"self",
")",
":",
"input_params",
"=",
"{",
"\"name\"",
":",
"self",
".",
"name",
",",
"\"public_key\"",
":",
"self",
".",
"public_key",
",",
"}",
"data",
"=",
"self",
".",
"get_data",
"(",
"\"account/keys/%s\"",
"%",
"self",
".",
"... | 21.117647 | 16.058824 |
def set_sim_data(inj, field, data):
"""Sets data of a SimInspiral instance."""
try:
sim_field = sim_inspiral_map[field]
except KeyError:
sim_field = field
# for tc, map to geocentric times
if sim_field == 'tc':
inj.geocent_end_time = int(data)
inj.geocent_end_time_ns = int(1e9*(data % 1))
else:
setattr(inj, sim_field, data) | [
"def",
"set_sim_data",
"(",
"inj",
",",
"field",
",",
"data",
")",
":",
"try",
":",
"sim_field",
"=",
"sim_inspiral_map",
"[",
"field",
"]",
"except",
"KeyError",
":",
"sim_field",
"=",
"field",
"# for tc, map to geocentric times",
"if",
"sim_field",
"==",
"'t... | 31.5 | 11.666667 |
def get_cfn_parameters(self):
"""Return a dictionary of variables with `type` :class:`CFNType`.
Returns:
dict: variables that need to be submitted as CloudFormation
Parameters.
"""
variables = self.get_variables()
output = {}
for key, value in variables.items():
if hasattr(value, "to_parameter_value"):
output[key] = value.to_parameter_value()
return output | [
"def",
"get_cfn_parameters",
"(",
"self",
")",
":",
"variables",
"=",
"self",
".",
"get_variables",
"(",
")",
"output",
"=",
"{",
"}",
"for",
"key",
",",
"value",
"in",
"variables",
".",
"items",
"(",
")",
":",
"if",
"hasattr",
"(",
"value",
",",
"\"... | 32.785714 | 16.5 |
def write_manifest(self):
"""
Write the manifest content to the zip file. It must be a predictable
order.
"""
config = configparser.ConfigParser()
config.add_section('Manifest')
for f in sorted(self.manifest.keys()):
config.set('Manifest', f.replace(
'\\', '/').lower(), self.manifest[f])
ini = StringIO()
config.write(ini)
self.manifest_data = ini.getvalue()
self.package_zip.writestr(self.MANIFEST_FILE, self.manifest_data) | [
"def",
"write_manifest",
"(",
"self",
")",
":",
"config",
"=",
"configparser",
".",
"ConfigParser",
"(",
")",
"config",
".",
"add_section",
"(",
"'Manifest'",
")",
"for",
"f",
"in",
"sorted",
"(",
"self",
".",
"manifest",
".",
"keys",
"(",
")",
")",
":... | 31.058824 | 17.294118 |
def cancel_hardware(self, hardware_id, reason='unneeded', comment='', immediate=False):
"""Cancels the specified dedicated server.
Example::
# Cancels hardware id 1234
result = mgr.cancel_hardware(hardware_id=1234)
:param int hardware_id: The ID of the hardware to be cancelled.
:param string reason: The reason code for the cancellation. This should come from
:func:`get_cancellation_reasons`.
:param string comment: An optional comment to include with the cancellation.
:param bool immediate: If set to True, will automatically update the cancelation ticket to request
the resource be reclaimed asap. This request still has to be reviewed by a human
:returns: True on success or an exception
"""
# Get cancel reason
reasons = self.get_cancellation_reasons()
cancel_reason = reasons.get(reason, reasons['unneeded'])
ticket_mgr = SoftLayer.TicketManager(self.client)
mask = 'mask[id, hourlyBillingFlag, billingItem[id], openCancellationTicket[id], activeTransaction]'
hw_billing = self.get_hardware(hardware_id, mask=mask)
if 'activeTransaction' in hw_billing:
raise SoftLayer.SoftLayerError("Unable to cancel hardware with running transaction")
if 'billingItem' not in hw_billing:
raise SoftLayer.SoftLayerError("Ticket #%s already exists for this server" %
hw_billing['openCancellationTicket']['id'])
billing_id = hw_billing['billingItem']['id']
if immediate and not hw_billing['hourlyBillingFlag']:
LOGGER.warning("Immediate cancelation of montly servers is not guaranteed."
"Please check the cancelation ticket for updates.")
result = self.client.call('Billing_Item', 'cancelItem',
False, False, cancel_reason, comment, id=billing_id)
hw_billing = self.get_hardware(hardware_id, mask=mask)
ticket_number = hw_billing['openCancellationTicket']['id']
cancel_message = "Please reclaim this server ASAP, it is no longer needed. Thankyou."
ticket_mgr.update_ticket(ticket_number, cancel_message)
LOGGER.info("Cancelation ticket #%s has been updated requesting immediate reclaim", ticket_number)
else:
result = self.client.call('Billing_Item', 'cancelItem',
immediate, False, cancel_reason, comment, id=billing_id)
hw_billing = self.get_hardware(hardware_id, mask=mask)
ticket_number = hw_billing['openCancellationTicket']['id']
LOGGER.info("Cancelation ticket #%s has been created", ticket_number)
return result | [
"def",
"cancel_hardware",
"(",
"self",
",",
"hardware_id",
",",
"reason",
"=",
"'unneeded'",
",",
"comment",
"=",
"''",
",",
"immediate",
"=",
"False",
")",
":",
"# Get cancel reason",
"reasons",
"=",
"self",
".",
"get_cancellation_reasons",
"(",
")",
"cancel_... | 54.153846 | 33.134615 |
def parse_parameters(self, parameters):
"""Parses and sets parameters in the model."""
self.parameters = []
for param_name, param_value in parameters.items():
p = Parameter(param_name, param_value)
if p:
self.parameters.append(p) | [
"def",
"parse_parameters",
"(",
"self",
",",
"parameters",
")",
":",
"self",
".",
"parameters",
"=",
"[",
"]",
"for",
"param_name",
",",
"param_value",
"in",
"parameters",
".",
"items",
"(",
")",
":",
"p",
"=",
"Parameter",
"(",
"param_name",
",",
"param... | 35.875 | 13.125 |
def transform(self, a, b, c, d, e, f):
""" Adjust the current transformation state of the current graphics state
matrix. Not recommended for the faint of heart.
"""
a0, b0, c0, d0, e0, f0 = self._currentMatrix
self._currentMatrix = (a0 * a + c0 * b, b0 * a + d0 * b,
a0 * c + c0 * d, b0 * c + d0 * d,
a0 * e + c0 * f + e0, b0 * e + d0 * f + f0)
a1, b1, c1, d1, e1, f1 = self._currentMatrix
self.session._out('%.2f %.2f %.2f %.2f %.2f %.2f cm' % (a1, b1, c1, d1, e1, f1), self.page) | [
"def",
"transform",
"(",
"self",
",",
"a",
",",
"b",
",",
"c",
",",
"d",
",",
"e",
",",
"f",
")",
":",
"a0",
",",
"b0",
",",
"c0",
",",
"d0",
",",
"e0",
",",
"f0",
"=",
"self",
".",
"_currentMatrix",
"self",
".",
"_currentMatrix",
"=",
"(",
... | 59.1 | 18.3 |
def search(self, id_perm):
"""Search Administrative Permission from by the identifier.
:param id_perm: Identifier of the Administrative Permission. Integer value and greater than zero.
:return: Following dictionary:
::
{'perm': {'ugrupo': < ugrupo_id >,
'permission': < permission_id >, 'id': < id >,
'escrita': < escrita >, 'leitura': < leitura >}}
:raise InvalidParameterError: Group User identifier is null and invalid.
:raise PermissaoAdministrativaNaoExisteError: Administrative Permission not registered.
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response.
"""
if not is_valid_int_param(id_perm):
raise InvalidParameterError(
u'The identifier of Administrative Permission is invalid or was not informed.')
url = 'aperms/get/' + str(id_perm) + '/'
code, xml = self.submit(None, 'GET', url)
return self.response(code, xml) | [
"def",
"search",
"(",
"self",
",",
"id_perm",
")",
":",
"if",
"not",
"is_valid_int_param",
"(",
"id_perm",
")",
":",
"raise",
"InvalidParameterError",
"(",
"u'The identifier of Administrative Permission is invalid or was not informed.'",
")",
"url",
"=",
"'aperms/get/'",
... | 37.607143 | 26.75 |
def smove(self, source, destination, member):
"""Move member from the set at source to the set at destination. This
operation is atomic. In every given moment the element will appear to
be a member of source or destination for other clients.
If the source set does not exist or does not contain the specified
element, no operation is performed and :data:`False` is returned.
Otherwise, the element is removed from the source set and added to the
destination set. When the specified element already exists in the
destination set, it is only removed from the source set.
An error is returned if source or destination does not hold a set
value.
.. note::
**Time complexity**: ``O(1)``
:param source: The source set key
:type source: :class:`str`, :class:`bytes`
:param destination: The destination set key
:type destination: :class:`str`, :class:`bytes`
:param member: The member value to move
:type member: :class:`str`, :class:`bytes`
:rtype: bool
:raises: :exc:`~tredis.exceptions.RedisError`
"""
return self._execute([b'SMOVE', source, destination, member], 1) | [
"def",
"smove",
"(",
"self",
",",
"source",
",",
"destination",
",",
"member",
")",
":",
"return",
"self",
".",
"_execute",
"(",
"[",
"b'SMOVE'",
",",
"source",
",",
"destination",
",",
"member",
"]",
",",
"1",
")"
] | 42 | 23.034483 |
def _ready(self):
"""
Marks the task as ready for execution.
"""
if self._has_state(self.COMPLETED) or self._has_state(self.CANCELLED):
return
self._set_state(self.READY)
self.task_spec._on_ready(self) | [
"def",
"_ready",
"(",
"self",
")",
":",
"if",
"self",
".",
"_has_state",
"(",
"self",
".",
"COMPLETED",
")",
"or",
"self",
".",
"_has_state",
"(",
"self",
".",
"CANCELLED",
")",
":",
"return",
"self",
".",
"_set_state",
"(",
"self",
".",
"READY",
")"... | 31.75 | 12 |
def add_where_clause(self, clause):
"""
adds a where clause to this statement
:param clause: the clause to add
:type clause: WhereClause
"""
if not isinstance(clause, WhereClause):
raise StatementException("only instances of WhereClause can be added to statements")
clause.set_context_id(self.context_counter)
self.context_counter += clause.get_context_size()
self.where_clauses.append(clause) | [
"def",
"add_where_clause",
"(",
"self",
",",
"clause",
")",
":",
"if",
"not",
"isinstance",
"(",
"clause",
",",
"WhereClause",
")",
":",
"raise",
"StatementException",
"(",
"\"only instances of WhereClause can be added to statements\"",
")",
"clause",
".",
"set_contex... | 42.454545 | 9.909091 |
def geometry_within_radius(geometry, center, radius):
"""
To valid whether point or linestring or polygon is inside a radius around a center
Keyword arguments:
geometry -- point/linstring/polygon geojson object
center -- point geojson object
radius -- radius
if(geometry inside radius) return true else false
"""
if geometry['type'] == 'Point':
return point_distance(geometry, center) <= radius
elif geometry['type'] == 'LineString' or geometry['type'] == 'Polygon':
point = {}
# it's enough to check the exterior ring of the Polygon
coordinates = geometry['coordinates'][0] if geometry['type'] == 'Polygon' else geometry['coordinates']
for coordinate in coordinates:
point['coordinates'] = coordinate
if point_distance(point, center) > radius:
return False
return True | [
"def",
"geometry_within_radius",
"(",
"geometry",
",",
"center",
",",
"radius",
")",
":",
"if",
"geometry",
"[",
"'type'",
"]",
"==",
"'Point'",
":",
"return",
"point_distance",
"(",
"geometry",
",",
"center",
")",
"<=",
"radius",
"elif",
"geometry",
"[",
... | 38.304348 | 20.652174 |
def get_default_repo(self):
"""
Go through all the repositories defined in the config file and search
for a truthy value for the ``default`` key. If there isn't any return
None.
"""
for repo in self.get_repos():
if self.get_safe(repo, 'default') and self.getboolean(repo, 'default'):
return repo
return False | [
"def",
"get_default_repo",
"(",
"self",
")",
":",
"for",
"repo",
"in",
"self",
".",
"get_repos",
"(",
")",
":",
"if",
"self",
".",
"get_safe",
"(",
"repo",
",",
"'default'",
")",
"and",
"self",
".",
"getboolean",
"(",
"repo",
",",
"'default'",
")",
"... | 38.3 | 19.3 |
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: IpAccessControlListContext for this IpAccessControlListInstance
:rtype: twilio.rest.api.v2010.account.sip.ip_access_control_list.IpAccessControlListContext
"""
if self._context is None:
self._context = IpAccessControlListContext(
self._version,
account_sid=self._solution['account_sid'],
sid=self._solution['sid'],
)
return self._context | [
"def",
"_proxy",
"(",
"self",
")",
":",
"if",
"self",
".",
"_context",
"is",
"None",
":",
"self",
".",
"_context",
"=",
"IpAccessControlListContext",
"(",
"self",
".",
"_version",
",",
"account_sid",
"=",
"self",
".",
"_solution",
"[",
"'account_sid'",
"]"... | 42.8 | 22.533333 |
def get_batched_changesets(self, changesets_request_data):
"""GetBatchedChangesets.
Returns changesets for a given list of changeset Ids.
:param :class:`<TfvcChangesetsRequestData> <azure.devops.v5_0.tfvc.models.TfvcChangesetsRequestData>` changesets_request_data: List of changeset IDs.
:rtype: [TfvcChangesetRef]
"""
content = self._serialize.body(changesets_request_data, 'TfvcChangesetsRequestData')
response = self._send(http_method='POST',
location_id='b7e7c173-803c-4fea-9ec8-31ee35c5502a',
version='5.0',
content=content)
return self._deserialize('[TfvcChangesetRef]', self._unwrap_collection(response)) | [
"def",
"get_batched_changesets",
"(",
"self",
",",
"changesets_request_data",
")",
":",
"content",
"=",
"self",
".",
"_serialize",
".",
"body",
"(",
"changesets_request_data",
",",
"'TfvcChangesetsRequestData'",
")",
"response",
"=",
"self",
".",
"_send",
"(",
"ht... | 62.833333 | 26.916667 |
def distance_to_semi_arc(alon, alat, aazimuth, plons, plats):
"""
In this method we use a reference system centerd on (alon, alat) and with
the y-axis corresponding to aazimuth direction to calculate the minimum
distance from a semiarc with generates in (alon, alat).
Parameters are the same as for :func:`distance_to_arc`.
"""
if type(plons) is float:
plons = numpy.array([plons])
plats = numpy.array([plats])
azimuth_to_target = azimuth(alon, alat, plons, plats)
# Find the indexes of the points in the positive y halfspace
idx = numpy.nonzero(numpy.cos(
numpy.radians((aazimuth-azimuth_to_target))) > 0.0)
# Find the indexes of the points in the negative y halfspace
idx_not = numpy.nonzero(numpy.cos(
numpy.radians((aazimuth-azimuth_to_target))) <= 0.0)
idx_ll_quadr = numpy.nonzero(
(numpy.cos(numpy.radians((aazimuth-azimuth_to_target))) <= 0.0) &
(numpy.sin(numpy.radians((aazimuth-azimuth_to_target))) > 0.0))
# Initialise the array containing the final distances
distance = numpy.zeros_like(plons)
# Compute the distance between the semi-arc with 'aazimuth' direction
# and the set of sites in the positive half-space. The shortest distance to
# the semi-arc in this case can be computed using the function
# :func:`openquake.hazardlib.geo.geodetic.distance_to_arc`.
if len(idx):
distance_to_target = geodetic_distance(alon, alat,
plons[idx], plats[idx])
t_angle = (azimuth_to_target[idx] - aazimuth + 360) % 360
angle = numpy.arccos((numpy.sin(numpy.radians(t_angle)) *
numpy.sin(distance_to_target /
EARTH_RADIUS)))
distance[idx] = (numpy.pi / 2 - angle) * EARTH_RADIUS
# Compute the distance between the reference point and the set of sites
# in the negative half-space. The shortest distance for the semi-arc for
# all the points in the negative semi-space simply corresponds to the
# shortest distance to its origin.
if len(idx_not):
distance[idx_not] = geodetic_distance(alon, alat,
plons[idx_not], plats[idx_not])
distance[idx_ll_quadr] = -1 * distance[idx_ll_quadr]
return distance | [
"def",
"distance_to_semi_arc",
"(",
"alon",
",",
"alat",
",",
"aazimuth",
",",
"plons",
",",
"plats",
")",
":",
"if",
"type",
"(",
"plons",
")",
"is",
"float",
":",
"plons",
"=",
"numpy",
".",
"array",
"(",
"[",
"plons",
"]",
")",
"plats",
"=",
"nu... | 43.754717 | 24.018868 |
def get_field_info(model):
"""
Given a model class, returns a `FieldInfo` instance, which is a
`namedtuple`, containing metadata about the various field types on the model
including information about their relationships.
"""
# Deal with the primary key.
if issubclass(model, mongoengine.EmbeddedDocument):
pk = None
else:
pk = model._fields[model._meta['id_field']]
# Deal with regular fields.
fields = OrderedDict()
# Deal with forward relationships.
# Pass forward relations since there is no relations on mongodb
references = OrderedDict()
embedded = OrderedDict()
def add_field(name, field):
if isinstance(field, REFERENCING_FIELD_TYPES):
references[name] = get_relation_info(field)
elif isinstance(field, EMBEDDING_FIELD_TYPES):
embedded[name] = get_relation_info(field)
elif isinstance(field, COMPOUND_FIELD_TYPES):
fields[name] = field
if field.field:
add_field(name + '.child', field.field)
elif field is pk:
return
else:
fields[name] = field
for field_name in model._fields_ordered:
add_field(field_name, model._fields[field_name])
# Shortcut that merges both regular fields and the pk,
# for simplifying regular field lookup.
fields_and_pk = OrderedDict()
fields_and_pk['pk'] = pk
fields_and_pk[getattr(pk, 'name', 'pk')] = pk
fields_and_pk.update(fields)
return FieldInfo(pk,
fields,
references,
fields_and_pk,
embedded) | [
"def",
"get_field_info",
"(",
"model",
")",
":",
"# Deal with the primary key.",
"if",
"issubclass",
"(",
"model",
",",
"mongoengine",
".",
"EmbeddedDocument",
")",
":",
"pk",
"=",
"None",
"else",
":",
"pk",
"=",
"model",
".",
"_fields",
"[",
"model",
".",
... | 32.34 | 16.98 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.