text stringlengths 89 104k | code_tokens list | avg_line_len float64 7.91 980 | score float64 0 630 |
|---|---|---|---|
def b(self):
'''
Positive antipodal point on the minor axis, Point class.
'''
b = Point(self.center)
if self.xAxisIsMinor:
b.x += self.minorRadius
else:
b.y += self.minorRadius
return b | [
"def",
"b",
"(",
"self",
")",
":",
"b",
"=",
"Point",
"(",
"self",
".",
"center",
")",
"if",
"self",
".",
"xAxisIsMinor",
":",
"b",
".",
"x",
"+=",
"self",
".",
"minorRadius",
"else",
":",
"b",
".",
"y",
"+=",
"self",
".",
"minorRadius",
"return",
"b"
] | 21.333333 | 22.666667 |
def map(self, f):
"""
Maps this Pair with *f*'; see :meth:`vanilla.core.Recver.map`
Returns a new Pair of our current Sender and the mapped target's
Recver.
"""
return self._replace(recver=self.recver.map(f)) | [
"def",
"map",
"(",
"self",
",",
"f",
")",
":",
"return",
"self",
".",
"_replace",
"(",
"recver",
"=",
"self",
".",
"recver",
".",
"map",
"(",
"f",
")",
")"
] | 31.25 | 20.5 |
def must_be_same(self, klass):
"""Called to make sure a Node is a Dir. Since we're an
Entry, we can morph into one."""
if self.__class__ is not klass:
self.__class__ = klass
self._morph()
self.clear() | [
"def",
"must_be_same",
"(",
"self",
",",
"klass",
")",
":",
"if",
"self",
".",
"__class__",
"is",
"not",
"klass",
":",
"self",
".",
"__class__",
"=",
"klass",
"self",
".",
"_morph",
"(",
")",
"self",
".",
"clear",
"(",
")"
] | 36.428571 | 6.857143 |
def comparison_stats(df, els=None):
"""
Compute comparison stats for test and LAtools data.
Population-level similarity assessed by a Kolmogorov-Smirnov test.
Individual similarity assessed by a pairwise Wilcoxon signed rank test.
Trends in residuals assessed by regression analysis, where significance of
the slope and intercept is determined by t-tests (both relative to zero).
Parameters
----------
df : pandas.DataFrame
A dataframe containing reference ('X/Ca_r'), test user
('X/Ca_t') and LAtools ('X123') data.
els : list
list of elements (names only) to plot.
Returns
-------
pandas.DataFrame
"""
if els is None:
els = ['Li', 'Mg', 'Al', 'P', 'Ti', 'Y', 'La', 'Ce', 'Pr', 'Nd', 'Sm',
'Eu', 'Gd', 'Tb', 'Dy', 'Ho', 'Er', 'Tm', 'Yb', 'Lu', 'Hf', 'Pb', 'Th',
'U']
yl_stats = []
for i, e in enumerate(els):
x = df.loc[:, e + '_rd'].values
yl = df.loc[:, e + '_la'].values
yl_stats.append(summary_stats(x, yl, e))
yl_stats = pd.concat(yl_stats).T
return yl_stats.T | [
"def",
"comparison_stats",
"(",
"df",
",",
"els",
"=",
"None",
")",
":",
"if",
"els",
"is",
"None",
":",
"els",
"=",
"[",
"'Li'",
",",
"'Mg'",
",",
"'Al'",
",",
"'P'",
",",
"'Ti'",
",",
"'Y'",
",",
"'La'",
",",
"'Ce'",
",",
"'Pr'",
",",
"'Nd'",
",",
"'Sm'",
",",
"'Eu'",
",",
"'Gd'",
",",
"'Tb'",
",",
"'Dy'",
",",
"'Ho'",
",",
"'Er'",
",",
"'Tm'",
",",
"'Yb'",
",",
"'Lu'",
",",
"'Hf'",
",",
"'Pb'",
",",
"'Th'",
",",
"'U'",
"]",
"yl_stats",
"=",
"[",
"]",
"for",
"i",
",",
"e",
"in",
"enumerate",
"(",
"els",
")",
":",
"x",
"=",
"df",
".",
"loc",
"[",
":",
",",
"e",
"+",
"'_rd'",
"]",
".",
"values",
"yl",
"=",
"df",
".",
"loc",
"[",
":",
",",
"e",
"+",
"'_la'",
"]",
".",
"values",
"yl_stats",
".",
"append",
"(",
"summary_stats",
"(",
"x",
",",
"yl",
",",
"e",
")",
")",
"yl_stats",
"=",
"pd",
".",
"concat",
"(",
"yl_stats",
")",
".",
"T",
"return",
"yl_stats",
".",
"T"
] | 28.825 | 23.575 |
def create_response(
self, data: dict = None, response: requests.Response = None, errors: list = None
) -> Response:
"""
Helper function to generate a :class:`~notifiers.core.Response` object
:param data: The data that was used to send the notification
:param response: :class:`requests.Response` if exist
:param errors: List of errors if relevant
"""
status = FAILURE_STATUS if errors else SUCCESS_STATUS
return Response(
status=status,
provider=self.name,
data=data,
response=response,
errors=errors,
) | [
"def",
"create_response",
"(",
"self",
",",
"data",
":",
"dict",
"=",
"None",
",",
"response",
":",
"requests",
".",
"Response",
"=",
"None",
",",
"errors",
":",
"list",
"=",
"None",
")",
"->",
"Response",
":",
"status",
"=",
"FAILURE_STATUS",
"if",
"errors",
"else",
"SUCCESS_STATUS",
"return",
"Response",
"(",
"status",
"=",
"status",
",",
"provider",
"=",
"self",
".",
"name",
",",
"data",
"=",
"data",
",",
"response",
"=",
"response",
",",
"errors",
"=",
"errors",
",",
")"
] | 35.111111 | 19.888889 |
def row_to_dict(self, row, allele, alternate_alleles):
"""Return a parsed dictionary for JSON."""
def _variant_sbid(**kwargs):
"""Generates a SolveBio variant ID (SBID)."""
return '{build}-{chromosome}-{start}-{stop}-{allele}'\
.format(**kwargs).upper()
if allele == '.':
# Try to use the ref, if '.' is supplied for alt.
allele = row.REF or allele
genomic_coordinates = {
'build': self.genome_build,
'chromosome': row.CHROM,
'start': row.POS,
'stop': row.POS + len(row.REF) - 1
}
# SolveBio standard variant format
variant_sbid = _variant_sbid(allele=allele,
**genomic_coordinates)
return {
'genomic_coordinates': genomic_coordinates,
'variant': variant_sbid,
'allele': allele,
'row_id': row.ID,
'reference_allele': row.REF,
'alternate_alleles': alternate_alleles,
'info': self._parse_info(row.INFO),
'qual': row.QUAL,
'filter': row.FILTER
} | [
"def",
"row_to_dict",
"(",
"self",
",",
"row",
",",
"allele",
",",
"alternate_alleles",
")",
":",
"def",
"_variant_sbid",
"(",
"*",
"*",
"kwargs",
")",
":",
"\"\"\"Generates a SolveBio variant ID (SBID).\"\"\"",
"return",
"'{build}-{chromosome}-{start}-{stop}-{allele}'",
".",
"format",
"(",
"*",
"*",
"kwargs",
")",
".",
"upper",
"(",
")",
"if",
"allele",
"==",
"'.'",
":",
"# Try to use the ref, if '.' is supplied for alt.",
"allele",
"=",
"row",
".",
"REF",
"or",
"allele",
"genomic_coordinates",
"=",
"{",
"'build'",
":",
"self",
".",
"genome_build",
",",
"'chromosome'",
":",
"row",
".",
"CHROM",
",",
"'start'",
":",
"row",
".",
"POS",
",",
"'stop'",
":",
"row",
".",
"POS",
"+",
"len",
"(",
"row",
".",
"REF",
")",
"-",
"1",
"}",
"# SolveBio standard variant format",
"variant_sbid",
"=",
"_variant_sbid",
"(",
"allele",
"=",
"allele",
",",
"*",
"*",
"genomic_coordinates",
")",
"return",
"{",
"'genomic_coordinates'",
":",
"genomic_coordinates",
",",
"'variant'",
":",
"variant_sbid",
",",
"'allele'",
":",
"allele",
",",
"'row_id'",
":",
"row",
".",
"ID",
",",
"'reference_allele'",
":",
"row",
".",
"REF",
",",
"'alternate_alleles'",
":",
"alternate_alleles",
",",
"'info'",
":",
"self",
".",
"_parse_info",
"(",
"row",
".",
"INFO",
")",
",",
"'qual'",
":",
"row",
".",
"QUAL",
",",
"'filter'",
":",
"row",
".",
"FILTER",
"}"
] | 33.617647 | 15 |
def _executemany(self, cursor, query, parameters):
"""The function is mostly useful for commands that update the database:
any result set returned by the query is discarded."""
try:
self._log(query)
cursor.executemany(query, parameters)
except OperationalError as e: # pragma: no cover
logging.error('Error connecting to PostgreSQL on %s, e', self.host, e)
self.close()
raise | [
"def",
"_executemany",
"(",
"self",
",",
"cursor",
",",
"query",
",",
"parameters",
")",
":",
"try",
":",
"self",
".",
"_log",
"(",
"query",
")",
"cursor",
".",
"executemany",
"(",
"query",
",",
"parameters",
")",
"except",
"OperationalError",
"as",
"e",
":",
"# pragma: no cover",
"logging",
".",
"error",
"(",
"'Error connecting to PostgreSQL on %s, e'",
",",
"self",
".",
"host",
",",
"e",
")",
"self",
".",
"close",
"(",
")",
"raise"
] | 46.2 | 15.7 |
def ExpandGroups(path):
"""Performs group expansion on a given path.
For example, given path `foo/{bar,baz}/{quux,norf}` this method will yield
`foo/bar/quux`, `foo/bar/norf`, `foo/baz/quux`, `foo/baz/norf`.
Args:
path: A path to expand.
Yields:
Paths that can be obtained from given path by expanding groups.
"""
precondition.AssertType(path, Text)
chunks = []
offset = 0
for match in PATH_GROUP_REGEX.finditer(path):
chunks.append([path[offset:match.start()]])
chunks.append(match.group("alts").split(","))
offset = match.end()
chunks.append([path[offset:]])
for prod in itertools.product(*chunks):
yield "".join(prod) | [
"def",
"ExpandGroups",
"(",
"path",
")",
":",
"precondition",
".",
"AssertType",
"(",
"path",
",",
"Text",
")",
"chunks",
"=",
"[",
"]",
"offset",
"=",
"0",
"for",
"match",
"in",
"PATH_GROUP_REGEX",
".",
"finditer",
"(",
"path",
")",
":",
"chunks",
".",
"append",
"(",
"[",
"path",
"[",
"offset",
":",
"match",
".",
"start",
"(",
")",
"]",
"]",
")",
"chunks",
".",
"append",
"(",
"match",
".",
"group",
"(",
"\"alts\"",
")",
".",
"split",
"(",
"\",\"",
")",
")",
"offset",
"=",
"match",
".",
"end",
"(",
")",
"chunks",
".",
"append",
"(",
"[",
"path",
"[",
"offset",
":",
"]",
"]",
")",
"for",
"prod",
"in",
"itertools",
".",
"product",
"(",
"*",
"chunks",
")",
":",
"yield",
"\"\"",
".",
"join",
"(",
"prod",
")"
] | 25 | 22.5 |
def regular_generic_msg(hostname, result, oneline, caption):
''' output on the result of a module run that is not command '''
if not oneline:
return "%s | %s >> %s\n" % (hostname, caption, utils.jsonify(result,format=True))
else:
return "%s | %s >> %s\n" % (hostname, caption, utils.jsonify(result)) | [
"def",
"regular_generic_msg",
"(",
"hostname",
",",
"result",
",",
"oneline",
",",
"caption",
")",
":",
"if",
"not",
"oneline",
":",
"return",
"\"%s | %s >> %s\\n\"",
"%",
"(",
"hostname",
",",
"caption",
",",
"utils",
".",
"jsonify",
"(",
"result",
",",
"format",
"=",
"True",
")",
")",
"else",
":",
"return",
"\"%s | %s >> %s\\n\"",
"%",
"(",
"hostname",
",",
"caption",
",",
"utils",
".",
"jsonify",
"(",
"result",
")",
")"
] | 46 | 32.285714 |
def set_headers(self, headers):
"""Set headers"""
for (header, value) in headers.iteritems():
self.set_header(header, value) | [
"def",
"set_headers",
"(",
"self",
",",
"headers",
")",
":",
"for",
"(",
"header",
",",
"value",
")",
"in",
"headers",
".",
"iteritems",
"(",
")",
":",
"self",
".",
"set_header",
"(",
"header",
",",
"value",
")"
] | 37.25 | 5.5 |
def normalize_jr(jr, url=None):
""" normalize JSON reference, also fix
implicit reference of JSON pointer.
input:
- #/definitions/User
- http://test.com/swagger.json#/definitions/User
output:
- http://test.com/swagger.json#/definitions/User
input:
- some_folder/User.json
output:
- http://test.com/some_folder/User.json
"""
if jr == None:
return jr
idx = jr.find('#')
path, jp = (jr[:idx], jr[idx+1:]) if idx != -1 else (jr, None)
if len(path) > 0:
p = six.moves.urllib.parse.urlparse(path)
if p.scheme == '' and url:
p = six.moves.urllib.parse.urlparse(url)
# it's the path of relative file
path = six.moves.urllib.parse.urlunparse(p[:2]+('/'.join([os.path.dirname(p.path), path]),)+p[3:])
path = derelativise_url(path)
else:
path = url
if path:
return ''.join([path, '#', jp]) if jp else path
else:
return '#' + jp | [
"def",
"normalize_jr",
"(",
"jr",
",",
"url",
"=",
"None",
")",
":",
"if",
"jr",
"==",
"None",
":",
"return",
"jr",
"idx",
"=",
"jr",
".",
"find",
"(",
"'#'",
")",
"path",
",",
"jp",
"=",
"(",
"jr",
"[",
":",
"idx",
"]",
",",
"jr",
"[",
"idx",
"+",
"1",
":",
"]",
")",
"if",
"idx",
"!=",
"-",
"1",
"else",
"(",
"jr",
",",
"None",
")",
"if",
"len",
"(",
"path",
")",
">",
"0",
":",
"p",
"=",
"six",
".",
"moves",
".",
"urllib",
".",
"parse",
".",
"urlparse",
"(",
"path",
")",
"if",
"p",
".",
"scheme",
"==",
"''",
"and",
"url",
":",
"p",
"=",
"six",
".",
"moves",
".",
"urllib",
".",
"parse",
".",
"urlparse",
"(",
"url",
")",
"# it's the path of relative file",
"path",
"=",
"six",
".",
"moves",
".",
"urllib",
".",
"parse",
".",
"urlunparse",
"(",
"p",
"[",
":",
"2",
"]",
"+",
"(",
"'/'",
".",
"join",
"(",
"[",
"os",
".",
"path",
".",
"dirname",
"(",
"p",
".",
"path",
")",
",",
"path",
"]",
")",
",",
")",
"+",
"p",
"[",
"3",
":",
"]",
")",
"path",
"=",
"derelativise_url",
"(",
"path",
")",
"else",
":",
"path",
"=",
"url",
"if",
"path",
":",
"return",
"''",
".",
"join",
"(",
"[",
"path",
",",
"'#'",
",",
"jp",
"]",
")",
"if",
"jp",
"else",
"path",
"else",
":",
"return",
"'#'",
"+",
"jp"
] | 27.4 | 21.085714 |
def add_constraints(self):
"""
Set the base constraints of the relation query
"""
if self._constraints:
super(MorphOneOrMany, self).add_constraints()
self._query.where(self._morph_type, self._morph_name) | [
"def",
"add_constraints",
"(",
"self",
")",
":",
"if",
"self",
".",
"_constraints",
":",
"super",
"(",
"MorphOneOrMany",
",",
"self",
")",
".",
"add_constraints",
"(",
")",
"self",
".",
"_query",
".",
"where",
"(",
"self",
".",
"_morph_type",
",",
"self",
".",
"_morph_name",
")"
] | 31.625 | 15.125 |
def set_xlimits_for_all(self, row_column_list=None, min=None, max=None):
"""Set x-axis limits of specified subplots.
:param row_column_list: a list containing (row, column) tuples to
specify the subplots, or None to indicate *all* subplots.
:type row_column_list: list or None
:param min: minimal axis value
:param max: maximum axis value
"""
if row_column_list is None:
self.limits['xmin'] = min
self.limits['xmax'] = max
else:
for row, column in row_column_list:
self.set_xlimits(row, column, min, max) | [
"def",
"set_xlimits_for_all",
"(",
"self",
",",
"row_column_list",
"=",
"None",
",",
"min",
"=",
"None",
",",
"max",
"=",
"None",
")",
":",
"if",
"row_column_list",
"is",
"None",
":",
"self",
".",
"limits",
"[",
"'xmin'",
"]",
"=",
"min",
"self",
".",
"limits",
"[",
"'xmax'",
"]",
"=",
"max",
"else",
":",
"for",
"row",
",",
"column",
"in",
"row_column_list",
":",
"self",
".",
"set_xlimits",
"(",
"row",
",",
"column",
",",
"min",
",",
"max",
")"
] | 38.6875 | 15.0625 |
def time_calls_with_dims(**dims):
"""Decorator to time the execution of the function with dimensions."""
def time_wrapper(fn):
@functools.wraps(fn)
def fn_wrapper(*args, **kwargs):
_timer = timer("%s_calls" %
pyformance.registry.get_qualname(fn), **dims)
with _timer.time(fn=pyformance.registry.get_qualname(fn)):
return fn(*args, **kwargs)
return fn_wrapper
return time_wrapper | [
"def",
"time_calls_with_dims",
"(",
"*",
"*",
"dims",
")",
":",
"def",
"time_wrapper",
"(",
"fn",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"fn",
")",
"def",
"fn_wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"_timer",
"=",
"timer",
"(",
"\"%s_calls\"",
"%",
"pyformance",
".",
"registry",
".",
"get_qualname",
"(",
"fn",
")",
",",
"*",
"*",
"dims",
")",
"with",
"_timer",
".",
"time",
"(",
"fn",
"=",
"pyformance",
".",
"registry",
".",
"get_qualname",
"(",
"fn",
")",
")",
":",
"return",
"fn",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"fn_wrapper",
"return",
"time_wrapper"
] | 42.818182 | 11.909091 |
def _capitalize_first_letter(word):
"""Return a regex pattern with the first letter.
Accepts both lowercase and uppercase.
"""
if word[0].isalpha():
# These two cases are necessary in order to get a regex pattern
# starting with '[xX]' and not '[Xx]'. This allows to check for
# colliding regex afterwards.
if word[0].isupper():
return "[" + word[0].swapcase() + word[0] + "]" + word[1:]
else:
return "[" + word[0] + word[0].swapcase() + "]" + word[1:]
return word | [
"def",
"_capitalize_first_letter",
"(",
"word",
")",
":",
"if",
"word",
"[",
"0",
"]",
".",
"isalpha",
"(",
")",
":",
"# These two cases are necessary in order to get a regex pattern",
"# starting with '[xX]' and not '[Xx]'. This allows to check for",
"# colliding regex afterwards.",
"if",
"word",
"[",
"0",
"]",
".",
"isupper",
"(",
")",
":",
"return",
"\"[\"",
"+",
"word",
"[",
"0",
"]",
".",
"swapcase",
"(",
")",
"+",
"word",
"[",
"0",
"]",
"+",
"\"]\"",
"+",
"word",
"[",
"1",
":",
"]",
"else",
":",
"return",
"\"[\"",
"+",
"word",
"[",
"0",
"]",
"+",
"word",
"[",
"0",
"]",
".",
"swapcase",
"(",
")",
"+",
"\"]\"",
"+",
"word",
"[",
"1",
":",
"]",
"return",
"word"
] | 38.285714 | 17.785714 |
def get_all_for_project(self, name, **kwargs):
"""
Gets the Build Records produced from the BuildConfiguration by name.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_all_for_project(name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: BuildConfiguration name (required)
:param int page_index: Page index
:param int page_size: Pagination size
:param str sort: Sorting RSQL
:param str q: RSQL query
:return: BuildRecordPage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_all_for_project_with_http_info(name, **kwargs)
else:
(data) = self.get_all_for_project_with_http_info(name, **kwargs)
return data | [
"def",
"get_all_for_project",
"(",
"self",
",",
"name",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"[",
"'_return_http_data_only'",
"]",
"=",
"True",
"if",
"kwargs",
".",
"get",
"(",
"'callback'",
")",
":",
"return",
"self",
".",
"get_all_for_project_with_http_info",
"(",
"name",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"(",
"data",
")",
"=",
"self",
".",
"get_all_for_project_with_http_info",
"(",
"name",
",",
"*",
"*",
"kwargs",
")",
"return",
"data"
] | 42.344828 | 16.62069 |
def format_screen(strng):
"""Format a string for screen printing.
This removes some latex-type format codes."""
# Paragraph continue
par_re = re.compile(r'\\$',re.MULTILINE)
strng = par_re.sub('',strng)
return strng | [
"def",
"format_screen",
"(",
"strng",
")",
":",
"# Paragraph continue",
"par_re",
"=",
"re",
".",
"compile",
"(",
"r'\\\\$'",
",",
"re",
".",
"MULTILINE",
")",
"strng",
"=",
"par_re",
".",
"sub",
"(",
"''",
",",
"strng",
")",
"return",
"strng"
] | 29.125 | 13.375 |
def load_privatekey(type, buffer, passphrase=None):
"""
Load a private key (PKey) from the string *buffer* encoded with the type
*type*.
:param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1)
:param buffer: The buffer the key is stored in
:param passphrase: (optional) if encrypted PEM format, this can be
either the passphrase to use, or a callback for
providing the passphrase.
:return: The PKey object
"""
if isinstance(buffer, _text_type):
buffer = buffer.encode("ascii")
bio = _new_mem_buf(buffer)
helper = _PassphraseHelper(type, passphrase)
if type == FILETYPE_PEM:
evp_pkey = _lib.PEM_read_bio_PrivateKey(
bio, _ffi.NULL, helper.callback, helper.callback_args)
helper.raise_if_problem()
elif type == FILETYPE_ASN1:
evp_pkey = _lib.d2i_PrivateKey_bio(bio, _ffi.NULL)
else:
raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1")
if evp_pkey == _ffi.NULL:
_raise_current_error()
pkey = PKey.__new__(PKey)
pkey._pkey = _ffi.gc(evp_pkey, _lib.EVP_PKEY_free)
return pkey | [
"def",
"load_privatekey",
"(",
"type",
",",
"buffer",
",",
"passphrase",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"buffer",
",",
"_text_type",
")",
":",
"buffer",
"=",
"buffer",
".",
"encode",
"(",
"\"ascii\"",
")",
"bio",
"=",
"_new_mem_buf",
"(",
"buffer",
")",
"helper",
"=",
"_PassphraseHelper",
"(",
"type",
",",
"passphrase",
")",
"if",
"type",
"==",
"FILETYPE_PEM",
":",
"evp_pkey",
"=",
"_lib",
".",
"PEM_read_bio_PrivateKey",
"(",
"bio",
",",
"_ffi",
".",
"NULL",
",",
"helper",
".",
"callback",
",",
"helper",
".",
"callback_args",
")",
"helper",
".",
"raise_if_problem",
"(",
")",
"elif",
"type",
"==",
"FILETYPE_ASN1",
":",
"evp_pkey",
"=",
"_lib",
".",
"d2i_PrivateKey_bio",
"(",
"bio",
",",
"_ffi",
".",
"NULL",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"type argument must be FILETYPE_PEM or FILETYPE_ASN1\"",
")",
"if",
"evp_pkey",
"==",
"_ffi",
".",
"NULL",
":",
"_raise_current_error",
"(",
")",
"pkey",
"=",
"PKey",
".",
"__new__",
"(",
"PKey",
")",
"pkey",
".",
"_pkey",
"=",
"_ffi",
".",
"gc",
"(",
"evp_pkey",
",",
"_lib",
".",
"EVP_PKEY_free",
")",
"return",
"pkey"
] | 33.794118 | 19.852941 |
def register_model(self, model, bundle):
"""
Registers a bundle as the main bundle for a
model. Used when we need to lookup urls by
a model.
"""
if model in self._model_registry:
raise AlreadyRegistered('The model %s is already registered' \
% model)
if bundle.url_params:
raise Exception("A primary model bundle cannot have dynamic \
url_parameters")
self._model_registry[model] = bundle | [
"def",
"register_model",
"(",
"self",
",",
"model",
",",
"bundle",
")",
":",
"if",
"model",
"in",
"self",
".",
"_model_registry",
":",
"raise",
"AlreadyRegistered",
"(",
"'The model %s is already registered'",
"%",
"model",
")",
"if",
"bundle",
".",
"url_params",
":",
"raise",
"Exception",
"(",
"\"A primary model bundle cannot have dynamic \\\n url_parameters\"",
")",
"self",
".",
"_model_registry",
"[",
"model",
"]",
"=",
"bundle"
] | 35.266667 | 14.466667 |
def _get_diff(self):
"""Get a diff between running config and a proposed file."""
diff = []
self._create_sot_file()
command = ('show diff rollback-patch file {0} file {1}'.format(
'sot_file', self.replace_file.split('/')[-1]))
diff_out = self.device.send_command(command)
try:
diff_out = diff_out.split(
'#Generating Rollback Patch')[1].replace(
'Rollback Patch is Empty', '').strip()
for line in diff_out.splitlines():
if line:
if line[0].strip() != '!' and line[0].strip() != '.':
diff.append(line.rstrip(' '))
except (AttributeError, KeyError):
raise ReplaceConfigException(
'Could not calculate diff. It\'s possible the given file doesn\'t exist.')
return '\n'.join(diff) | [
"def",
"_get_diff",
"(",
"self",
")",
":",
"diff",
"=",
"[",
"]",
"self",
".",
"_create_sot_file",
"(",
")",
"command",
"=",
"(",
"'show diff rollback-patch file {0} file {1}'",
".",
"format",
"(",
"'sot_file'",
",",
"self",
".",
"replace_file",
".",
"split",
"(",
"'/'",
")",
"[",
"-",
"1",
"]",
")",
")",
"diff_out",
"=",
"self",
".",
"device",
".",
"send_command",
"(",
"command",
")",
"try",
":",
"diff_out",
"=",
"diff_out",
".",
"split",
"(",
"'#Generating Rollback Patch'",
")",
"[",
"1",
"]",
".",
"replace",
"(",
"'Rollback Patch is Empty'",
",",
"''",
")",
".",
"strip",
"(",
")",
"for",
"line",
"in",
"diff_out",
".",
"splitlines",
"(",
")",
":",
"if",
"line",
":",
"if",
"line",
"[",
"0",
"]",
".",
"strip",
"(",
")",
"!=",
"'!'",
"and",
"line",
"[",
"0",
"]",
".",
"strip",
"(",
")",
"!=",
"'.'",
":",
"diff",
".",
"append",
"(",
"line",
".",
"rstrip",
"(",
"' '",
")",
")",
"except",
"(",
"AttributeError",
",",
"KeyError",
")",
":",
"raise",
"ReplaceConfigException",
"(",
"'Could not calculate diff. It\\'s possible the given file doesn\\'t exist.'",
")",
"return",
"'\\n'",
".",
"join",
"(",
"diff",
")"
] | 46.736842 | 16.631579 |
def guesser(types=GUESS_TYPES, strict=False):
"""Create a type guesser for multiple values."""
return TypeGuesser(types=types, strict=strict) | [
"def",
"guesser",
"(",
"types",
"=",
"GUESS_TYPES",
",",
"strict",
"=",
"False",
")",
":",
"return",
"TypeGuesser",
"(",
"types",
"=",
"types",
",",
"strict",
"=",
"strict",
")"
] | 49 | 5 |
def _expand_paths_itr(paths, marker='*'):
"""Iterator version of :func:`expand_paths`.
"""
for path in paths:
if is_path(path):
if marker in path: # glob path pattern
for ppath in sglob(path):
yield ppath
else:
yield path # a simple file path
elif is_path_obj(path):
if marker in path.as_posix():
for ppath in sglob(path.as_posix()):
yield normpath(ppath)
else:
yield normpath(path.as_posix())
elif is_ioinfo(path):
yield path.path
else: # A file or file-like object
yield path | [
"def",
"_expand_paths_itr",
"(",
"paths",
",",
"marker",
"=",
"'*'",
")",
":",
"for",
"path",
"in",
"paths",
":",
"if",
"is_path",
"(",
"path",
")",
":",
"if",
"marker",
"in",
"path",
":",
"# glob path pattern",
"for",
"ppath",
"in",
"sglob",
"(",
"path",
")",
":",
"yield",
"ppath",
"else",
":",
"yield",
"path",
"# a simple file path",
"elif",
"is_path_obj",
"(",
"path",
")",
":",
"if",
"marker",
"in",
"path",
".",
"as_posix",
"(",
")",
":",
"for",
"ppath",
"in",
"sglob",
"(",
"path",
".",
"as_posix",
"(",
")",
")",
":",
"yield",
"normpath",
"(",
"ppath",
")",
"else",
":",
"yield",
"normpath",
"(",
"path",
".",
"as_posix",
"(",
")",
")",
"elif",
"is_ioinfo",
"(",
"path",
")",
":",
"yield",
"path",
".",
"path",
"else",
":",
"# A file or file-like object",
"yield",
"path"
] | 34.05 | 9.2 |
def failures():
"""Show any unexpected failures"""
if not HAVE_BIN_LIBS:
click.echo("missing required binary libs (lz4, msgpack)")
return
q = Queue('failed', connection=worker.connection)
for i in q.get_job_ids():
j = q.job_class.fetch(i, connection=q.connection)
click.echo("%s on %s" % (j.func_name, j.origin))
if not j.func_name.endswith('process_keyset'):
click.echo("params %s %s" % (j._args, j._kwargs))
click.echo(j.exc_info) | [
"def",
"failures",
"(",
")",
":",
"if",
"not",
"HAVE_BIN_LIBS",
":",
"click",
".",
"echo",
"(",
"\"missing required binary libs (lz4, msgpack)\"",
")",
"return",
"q",
"=",
"Queue",
"(",
"'failed'",
",",
"connection",
"=",
"worker",
".",
"connection",
")",
"for",
"i",
"in",
"q",
".",
"get_job_ids",
"(",
")",
":",
"j",
"=",
"q",
".",
"job_class",
".",
"fetch",
"(",
"i",
",",
"connection",
"=",
"q",
".",
"connection",
")",
"click",
".",
"echo",
"(",
"\"%s on %s\"",
"%",
"(",
"j",
".",
"func_name",
",",
"j",
".",
"origin",
")",
")",
"if",
"not",
"j",
".",
"func_name",
".",
"endswith",
"(",
"'process_keyset'",
")",
":",
"click",
".",
"echo",
"(",
"\"params %s %s\"",
"%",
"(",
"j",
".",
"_args",
",",
"j",
".",
"_kwargs",
")",
")",
"click",
".",
"echo",
"(",
"j",
".",
"exc_info",
")"
] | 38.230769 | 17.923077 |
def search(self, keyword, types=[], terr=KKBOXTerritory.TAIWAN):
'''
Searches within KKBOX's database.
:param keyword: the keyword.
:type keyword: str
:param types: the search types.
:return: list
:param terr: the current territory.
:return: API response.
:rtype: dict
See `https://docs-en.kkbox.codes/v1.1/reference#search_1`.
'''
url = 'https://api.kkbox.com/v1.1/search'
url += '?' + url_parse.urlencode({'q': keyword, 'territory': terr})
if len(types) > 0:
url += '&type=' + ','.join(types)
return self.http._post_data(url, None, self.http._headers_with_access_token()) | [
"def",
"search",
"(",
"self",
",",
"keyword",
",",
"types",
"=",
"[",
"]",
",",
"terr",
"=",
"KKBOXTerritory",
".",
"TAIWAN",
")",
":",
"url",
"=",
"'https://api.kkbox.com/v1.1/search'",
"url",
"+=",
"'?'",
"+",
"url_parse",
".",
"urlencode",
"(",
"{",
"'q'",
":",
"keyword",
",",
"'territory'",
":",
"terr",
"}",
")",
"if",
"len",
"(",
"types",
")",
">",
"0",
":",
"url",
"+=",
"'&type='",
"+",
"','",
".",
"join",
"(",
"types",
")",
"return",
"self",
".",
"http",
".",
"_post_data",
"(",
"url",
",",
"None",
",",
"self",
".",
"http",
".",
"_headers_with_access_token",
"(",
")",
")"
] | 36.263158 | 19.421053 |
def sig_handler(self, sig, _):
""" Handle the signal sent to the process
:param sig: Signal set to the process
:param _: Frame is not being used
"""
import tornado.ioloop
from tornado.process import task_id
tid = task_id()
pid = os.getpid()
if tid is None:
logger.warning("main process (pid %s) caught signal: %s" %
(pid, sig))
else:
logger.warning("child %s (pid %s) caught signal: %s" %
(tid, pid, sig))
tornado.ioloop.IOLoop.current().add_callback(self.shutdown) | [
"def",
"sig_handler",
"(",
"self",
",",
"sig",
",",
"_",
")",
":",
"import",
"tornado",
".",
"ioloop",
"from",
"tornado",
".",
"process",
"import",
"task_id",
"tid",
"=",
"task_id",
"(",
")",
"pid",
"=",
"os",
".",
"getpid",
"(",
")",
"if",
"tid",
"is",
"None",
":",
"logger",
".",
"warning",
"(",
"\"main process (pid %s) caught signal: %s\"",
"%",
"(",
"pid",
",",
"sig",
")",
")",
"else",
":",
"logger",
".",
"warning",
"(",
"\"child %s (pid %s) caught signal: %s\"",
"%",
"(",
"tid",
",",
"pid",
",",
"sig",
")",
")",
"tornado",
".",
"ioloop",
".",
"IOLoop",
".",
"current",
"(",
")",
".",
"add_callback",
"(",
"self",
".",
"shutdown",
")"
] | 38.5625 | 12.1875 |
def export(self, top=True):
"""Exports object to its string representation.
Args:
top (bool): if True appends `internal_name` before values.
All non list objects should be exported with value top=True,
all list objects, that are embedded in as fields inlist objects
should be exported with `top`=False
Returns:
str: The objects string representation
"""
out = []
if top:
out.append(self._internal_name)
out.append(self._to_str(self.title_of_design_condition))
out.append(self._to_str(self.unkown_field))
out.append(self._to_str(self.design_stat_heating))
out.append(self._to_str(self.coldestmonth))
out.append(self._to_str(self.db996))
out.append(self._to_str(self.db990))
out.append(self._to_str(self.dp996))
out.append(self._to_str(self.hr_dp996))
out.append(self._to_str(self.db_dp996))
out.append(self._to_str(self.dp990))
out.append(self._to_str(self.hr_dp990))
out.append(self._to_str(self.db_dp990))
out.append(self._to_str(self.ws004c))
out.append(self._to_str(self.db_ws004c))
out.append(self._to_str(self.ws010c))
out.append(self._to_str(self.db_ws010c))
out.append(self._to_str(self.ws_db996))
out.append(self._to_str(self.wd_db996))
out.append(self._to_str(self.design_stat_cooling))
out.append(self._to_str(self.hottestmonth))
out.append(self._to_str(self.dbr))
out.append(self._to_str(self.db004))
out.append(self._to_str(self.wb_db004))
out.append(self._to_str(self.db010))
out.append(self._to_str(self.wb_db010))
out.append(self._to_str(self.db020))
out.append(self._to_str(self.wb_db020))
out.append(self._to_str(self.wb004))
out.append(self._to_str(self.db_wb004))
out.append(self._to_str(self.wb010))
out.append(self._to_str(self.db_wb010))
out.append(self._to_str(self.wb020))
out.append(self._to_str(self.db_wb020))
out.append(self._to_str(self.ws_db004))
out.append(self._to_str(self.wd_db004))
out.append(self._to_str(self.dp004))
out.append(self._to_str(self.hr_dp004))
out.append(self._to_str(self.db_dp004))
out.append(self._to_str(self.dp010))
out.append(self._to_str(self.hr_dp010))
out.append(self._to_str(self.db_dp010))
out.append(self._to_str(self.dp020))
out.append(self._to_str(self.hr_dp020))
out.append(self._to_str(self.db_dp020))
out.append(self._to_str(self.en004))
out.append(self._to_str(self.db_en004))
out.append(self._to_str(self.en010))
out.append(self._to_str(self.db_en010))
out.append(self._to_str(self.en020))
out.append(self._to_str(self.db_en020))
out.append(self._to_str(self.hrs_84_and_db12_8_or_20_6))
out.append(self._to_str(self.design_stat_extremes))
out.append(self._to_str(self.ws010))
out.append(self._to_str(self.ws025))
out.append(self._to_str(self.ws050))
out.append(self._to_str(self.wbmax))
out.append(self._to_str(self.dbmin_mean))
out.append(self._to_str(self.dbmax_mean))
out.append(self._to_str(self.dbmin_stddev))
out.append(self._to_str(self.dbmax_stddev))
out.append(self._to_str(self.dbmin05years))
out.append(self._to_str(self.dbmax05years))
out.append(self._to_str(self.dbmin10years))
out.append(self._to_str(self.dbmax10years))
out.append(self._to_str(self.dbmin20years))
out.append(self._to_str(self.dbmax20years))
out.append(self._to_str(self.dbmin50years))
out.append(self._to_str(self.dbmax50years))
return ",".join(out) | [
"def",
"export",
"(",
"self",
",",
"top",
"=",
"True",
")",
":",
"out",
"=",
"[",
"]",
"if",
"top",
":",
"out",
".",
"append",
"(",
"self",
".",
"_internal_name",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"title_of_design_condition",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"unkown_field",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"design_stat_heating",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"coldestmonth",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"db996",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"db990",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"dp996",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"hr_dp996",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"db_dp996",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"dp990",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"hr_dp990",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"db_dp990",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"ws004c",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"db_ws004c",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"ws010c",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"db_ws010c",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"ws_db996",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"wd_db996",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"design_stat_cooling",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"hottestmonth",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"dbr",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"db004",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"wb_db004",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"db010",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"wb_db010",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"db020",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"wb_db020",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"wb004",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"db_wb004",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"wb010",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"db_wb010",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"wb020",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"db_wb020",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"ws_db004",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"wd_db004",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"dp004",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"hr_dp004",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"db_dp004",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"dp010",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"hr_dp010",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"db_dp010",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"dp020",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"hr_dp020",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"db_dp020",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"en004",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"db_en004",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"en010",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"db_en010",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"en020",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"db_en020",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"hrs_84_and_db12_8_or_20_6",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"design_stat_extremes",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"ws010",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"ws025",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"ws050",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"wbmax",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"dbmin_mean",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"dbmax_mean",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"dbmin_stddev",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"dbmax_stddev",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"dbmin05years",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"dbmax05years",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"dbmin10years",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"dbmax10years",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"dbmin20years",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"dbmax20years",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"dbmin50years",
")",
")",
"out",
".",
"append",
"(",
"self",
".",
"_to_str",
"(",
"self",
".",
"dbmax50years",
")",
")",
"return",
"\",\"",
".",
"join",
"(",
"out",
")"
] | 44.752941 | 10.682353 |
def get_contents_as_string(self, headers=None,
cb=None, num_cb=10,
torrent=False,
version_id=None,
response_headers=None):
"""
Retrieve an object from S3 using the name of the Key object as the
key in S3. Return the contents of the object as a string.
See get_contents_to_file method for details about the
parameters.
:type headers: dict
:param headers: Any additional headers to send in the request
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept
two integer parameters, the first representing the
number of bytes that have been successfully
transmitted to S3 and the second representing the
size of the to be transmitted object.
:type cb: int
:param num_cb: (optional) If a callback is specified with
the cb parameter this parameter determines the
granularity of the callback by defining
the maximum number of times the callback will
be called during the file transfer.
:type torrent: bool
:param torrent: If True, returns the contents of a torrent file
as a string.
:type response_headers: dict
:param response_headers: A dictionary containing HTTP headers/values
that will override any headers associated with
the stored object in the response.
See http://goo.gl/EWOPb for details.
:rtype: string
:returns: The contents of the file as a string
"""
fp = StringIO.StringIO()
self.get_contents_to_file(fp, headers, cb, num_cb, torrent=torrent,
version_id=version_id,
response_headers=response_headers)
return fp.getvalue() | [
"def",
"get_contents_as_string",
"(",
"self",
",",
"headers",
"=",
"None",
",",
"cb",
"=",
"None",
",",
"num_cb",
"=",
"10",
",",
"torrent",
"=",
"False",
",",
"version_id",
"=",
"None",
",",
"response_headers",
"=",
"None",
")",
":",
"fp",
"=",
"StringIO",
".",
"StringIO",
"(",
")",
"self",
".",
"get_contents_to_file",
"(",
"fp",
",",
"headers",
",",
"cb",
",",
"num_cb",
",",
"torrent",
"=",
"torrent",
",",
"version_id",
"=",
"version_id",
",",
"response_headers",
"=",
"response_headers",
")",
"return",
"fp",
".",
"getvalue",
"(",
")"
] | 45.489362 | 22.297872 |
def _get_rnn_layer(mode, num_layers, input_size, hidden_size, dropout, weight_dropout):
"""create rnn layer given specs"""
if mode == 'rnn_relu':
rnn_block = functools.partial(rnn.RNN, activation='relu')
elif mode == 'rnn_tanh':
rnn_block = functools.partial(rnn.RNN, activation='tanh')
elif mode == 'lstm':
rnn_block = rnn.LSTM
elif mode == 'gru':
rnn_block = rnn.GRU
block = rnn_block(hidden_size, num_layers, dropout=dropout,
input_size=input_size)
if weight_dropout:
apply_weight_drop(block, '.*h2h_weight', rate=weight_dropout)
return block | [
"def",
"_get_rnn_layer",
"(",
"mode",
",",
"num_layers",
",",
"input_size",
",",
"hidden_size",
",",
"dropout",
",",
"weight_dropout",
")",
":",
"if",
"mode",
"==",
"'rnn_relu'",
":",
"rnn_block",
"=",
"functools",
".",
"partial",
"(",
"rnn",
".",
"RNN",
",",
"activation",
"=",
"'relu'",
")",
"elif",
"mode",
"==",
"'rnn_tanh'",
":",
"rnn_block",
"=",
"functools",
".",
"partial",
"(",
"rnn",
".",
"RNN",
",",
"activation",
"=",
"'tanh'",
")",
"elif",
"mode",
"==",
"'lstm'",
":",
"rnn_block",
"=",
"rnn",
".",
"LSTM",
"elif",
"mode",
"==",
"'gru'",
":",
"rnn_block",
"=",
"rnn",
".",
"GRU",
"block",
"=",
"rnn_block",
"(",
"hidden_size",
",",
"num_layers",
",",
"dropout",
"=",
"dropout",
",",
"input_size",
"=",
"input_size",
")",
"if",
"weight_dropout",
":",
"apply_weight_drop",
"(",
"block",
",",
"'.*h2h_weight'",
",",
"rate",
"=",
"weight_dropout",
")",
"return",
"block"
] | 34.722222 | 22.166667 |
def show_firewall_policy(self, firewall_policy, **_params):
"""Fetches information of a certain firewall policy."""
return self.get(self.firewall_policy_path % (firewall_policy),
params=_params) | [
"def",
"show_firewall_policy",
"(",
"self",
",",
"firewall_policy",
",",
"*",
"*",
"_params",
")",
":",
"return",
"self",
".",
"get",
"(",
"self",
".",
"firewall_policy_path",
"%",
"(",
"firewall_policy",
")",
",",
"params",
"=",
"_params",
")"
] | 57.75 | 12.5 |
def Query(r, what, fields, qfilter=None):
"""
Retrieves information about resources.
@type what: string
@param what: Resource name, one of L{constants.QR_VIA_RAPI}
@type fields: list of string
@param fields: Requested fields
@type qfilter: None or list
@param qfilter: Query filter
@rtype: string
@return: job id
"""
body = {
"fields": fields,
}
if qfilter is not None:
body["qfilter"] = body["filter"] = qfilter
return r.request("put", "/2/query/%s" % what, content=body) | [
"def",
"Query",
"(",
"r",
",",
"what",
",",
"fields",
",",
"qfilter",
"=",
"None",
")",
":",
"body",
"=",
"{",
"\"fields\"",
":",
"fields",
",",
"}",
"if",
"qfilter",
"is",
"not",
"None",
":",
"body",
"[",
"\"qfilter\"",
"]",
"=",
"body",
"[",
"\"filter\"",
"]",
"=",
"qfilter",
"return",
"r",
".",
"request",
"(",
"\"put\"",
",",
"\"/2/query/%s\"",
"%",
"what",
",",
"content",
"=",
"body",
")"
] | 23.086957 | 19.173913 |
def set_label(self, value,callb=None):
"""Convenience method to set the label of the device
This method will send a SetLabel message to the device, and request callb be executed
when an ACK is received. The default callback will simply cache the value.
:param value: The new label
:type value: str
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: None
:rtype: None
"""
if len(value) > 32:
value = value[:32]
mypartial=partial(self.resp_set_label,label=value)
if callb:
self.req_with_ack(SetLabel, {"label": value},lambda x,y:(mypartial(y),callb(x,y)) )
else:
self.req_with_ack(SetLabel, {"label": value},lambda x,y:mypartial(y) ) | [
"def",
"set_label",
"(",
"self",
",",
"value",
",",
"callb",
"=",
"None",
")",
":",
"if",
"len",
"(",
"value",
")",
">",
"32",
":",
"value",
"=",
"value",
"[",
":",
"32",
"]",
"mypartial",
"=",
"partial",
"(",
"self",
".",
"resp_set_label",
",",
"label",
"=",
"value",
")",
"if",
"callb",
":",
"self",
".",
"req_with_ack",
"(",
"SetLabel",
",",
"{",
"\"label\"",
":",
"value",
"}",
",",
"lambda",
"x",
",",
"y",
":",
"(",
"mypartial",
"(",
"y",
")",
",",
"callb",
"(",
"x",
",",
"y",
")",
")",
")",
"else",
":",
"self",
".",
"req_with_ack",
"(",
"SetLabel",
",",
"{",
"\"label\"",
":",
"value",
"}",
",",
"lambda",
"x",
",",
"y",
":",
"mypartial",
"(",
"y",
")",
")"
] | 42.904762 | 22.857143 |
def check_ups_alarms_present(the_session, the_helper, the_snmp_value):
"""
OID .1.3.6.1.2.1.33.1.6.1.0
MIB excerpt
The present number of active alarm conditions.
"""
if the_snmp_value != '0':
the_helper.add_status(pynag.Plugins.critical)
else:
the_helper.add_status(pynag.Plugins.ok)
the_helper.set_summary("{} active alarms ".format(the_snmp_value)) | [
"def",
"check_ups_alarms_present",
"(",
"the_session",
",",
"the_helper",
",",
"the_snmp_value",
")",
":",
"if",
"the_snmp_value",
"!=",
"'0'",
":",
"the_helper",
".",
"add_status",
"(",
"pynag",
".",
"Plugins",
".",
"critical",
")",
"else",
":",
"the_helper",
".",
"add_status",
"(",
"pynag",
".",
"Plugins",
".",
"ok",
")",
"the_helper",
".",
"set_summary",
"(",
"\"{} active alarms \"",
".",
"format",
"(",
"the_snmp_value",
")",
")"
] | 35.272727 | 15.090909 |
def init_duts(self, args): # pylint: disable=too-many-locals,too-many-branches
"""
Initializes duts of different types based on configuration provided by AllocationContext.
Able to do the initialization of duts in parallel, if --parallel_flash was provided.
:param args: Argument Namespace object
:return: list of initialized duts.
"""
# TODO: Split into smaller chunks to reduce complexity.
threads = []
abort_queue = Queue()
def thread_wrapper(*thread_args, **thread_kwargs):
"""
Run initialization function for dut
:param thread_args: arguments to pass to the function
:param thread_kwargs: keyword arguments, (func: callable, abort_queue: Queue)
:return: Result of func(*thread_args)
"""
# pylint: disable=broad-except
try:
return thread_kwargs["func"](*thread_args)
except Exception as error:
thread_kwargs["abort_queue"].put((thread_args[2], error))
for index, dut_conf in enumerate(self._allocation_contexts):
dut_type = dut_conf.get("type")
func = self.get_dut_init_function(dut_type)
if func is None:
continue
threads.append(((self, dut_conf.get_alloc_data().get_requirements(), index + 1, args),
{"func": func, "abort_queue": abort_queue}))
try:
thread_limit = len(threads) if args.parallel_flash else 1
pool = ThreadPool(thread_limit)
async_results = [pool.apply_async(func=thread_wrapper,
args=t[0], kwds=t[1])
for t in threads]
# Wait for resources to be ready.
[res.get() for res in async_results] # pylint: disable=expression-not-assigned
pool.close()
pool.join()
if not abort_queue.empty():
msg = "Dut Initialization failed, reason(s):"
while not abort_queue.empty():
dut_index, error = abort_queue.get()
msg = "{}\nDUT index {} - {}".format(msg, dut_index, error)
raise AllocationError(msg)
# Sort duts to same order as in dut_conf_list
self.duts.sort(key=lambda d: d.index)
self.dutinformations.sort(key=lambda d: d.index)
except KeyboardInterrupt:
msg = "Received keyboard interrupt, waiting for flashing to finish"
self.logger.info(msg)
for dut in self.duts:
dut.close_dut(False)
dut.close_connection()
if hasattr(dut, "release"):
dut.release()
dut = None
raise
except RuntimeError:
self.logger.exception("RuntimeError during flashing")
# ValueError is raised if ThreadPool is tried to initiate with
# zero threads.
except ValueError:
self.logger.exception("No devices allocated")
raise AllocationError("Dut Initialization failed!")
except (DutConnectionError, TypeError):
for dut in self.duts:
if hasattr(dut, "release"):
dut.release()
raise AllocationError("Dut Initialization failed!")
finally:
if pool:
pool.close()
pool.join()
self.logger.debug("Allocated following duts:")
for dut in self.duts:
dut.print_info()
return self.duts | [
"def",
"init_duts",
"(",
"self",
",",
"args",
")",
":",
"# pylint: disable=too-many-locals,too-many-branches",
"# TODO: Split into smaller chunks to reduce complexity.",
"threads",
"=",
"[",
"]",
"abort_queue",
"=",
"Queue",
"(",
")",
"def",
"thread_wrapper",
"(",
"*",
"thread_args",
",",
"*",
"*",
"thread_kwargs",
")",
":",
"\"\"\"\n Run initialization function for dut\n\n :param thread_args: arguments to pass to the function\n :param thread_kwargs: keyword arguments, (func: callable, abort_queue: Queue)\n :return: Result of func(*thread_args)\n \"\"\"",
"# pylint: disable=broad-except",
"try",
":",
"return",
"thread_kwargs",
"[",
"\"func\"",
"]",
"(",
"*",
"thread_args",
")",
"except",
"Exception",
"as",
"error",
":",
"thread_kwargs",
"[",
"\"abort_queue\"",
"]",
".",
"put",
"(",
"(",
"thread_args",
"[",
"2",
"]",
",",
"error",
")",
")",
"for",
"index",
",",
"dut_conf",
"in",
"enumerate",
"(",
"self",
".",
"_allocation_contexts",
")",
":",
"dut_type",
"=",
"dut_conf",
".",
"get",
"(",
"\"type\"",
")",
"func",
"=",
"self",
".",
"get_dut_init_function",
"(",
"dut_type",
")",
"if",
"func",
"is",
"None",
":",
"continue",
"threads",
".",
"append",
"(",
"(",
"(",
"self",
",",
"dut_conf",
".",
"get_alloc_data",
"(",
")",
".",
"get_requirements",
"(",
")",
",",
"index",
"+",
"1",
",",
"args",
")",
",",
"{",
"\"func\"",
":",
"func",
",",
"\"abort_queue\"",
":",
"abort_queue",
"}",
")",
")",
"try",
":",
"thread_limit",
"=",
"len",
"(",
"threads",
")",
"if",
"args",
".",
"parallel_flash",
"else",
"1",
"pool",
"=",
"ThreadPool",
"(",
"thread_limit",
")",
"async_results",
"=",
"[",
"pool",
".",
"apply_async",
"(",
"func",
"=",
"thread_wrapper",
",",
"args",
"=",
"t",
"[",
"0",
"]",
",",
"kwds",
"=",
"t",
"[",
"1",
"]",
")",
"for",
"t",
"in",
"threads",
"]",
"# Wait for resources to be ready.",
"[",
"res",
".",
"get",
"(",
")",
"for",
"res",
"in",
"async_results",
"]",
"# pylint: disable=expression-not-assigned",
"pool",
".",
"close",
"(",
")",
"pool",
".",
"join",
"(",
")",
"if",
"not",
"abort_queue",
".",
"empty",
"(",
")",
":",
"msg",
"=",
"\"Dut Initialization failed, reason(s):\"",
"while",
"not",
"abort_queue",
".",
"empty",
"(",
")",
":",
"dut_index",
",",
"error",
"=",
"abort_queue",
".",
"get",
"(",
")",
"msg",
"=",
"\"{}\\nDUT index {} - {}\"",
".",
"format",
"(",
"msg",
",",
"dut_index",
",",
"error",
")",
"raise",
"AllocationError",
"(",
"msg",
")",
"# Sort duts to same order as in dut_conf_list",
"self",
".",
"duts",
".",
"sort",
"(",
"key",
"=",
"lambda",
"d",
":",
"d",
".",
"index",
")",
"self",
".",
"dutinformations",
".",
"sort",
"(",
"key",
"=",
"lambda",
"d",
":",
"d",
".",
"index",
")",
"except",
"KeyboardInterrupt",
":",
"msg",
"=",
"\"Received keyboard interrupt, waiting for flashing to finish\"",
"self",
".",
"logger",
".",
"info",
"(",
"msg",
")",
"for",
"dut",
"in",
"self",
".",
"duts",
":",
"dut",
".",
"close_dut",
"(",
"False",
")",
"dut",
".",
"close_connection",
"(",
")",
"if",
"hasattr",
"(",
"dut",
",",
"\"release\"",
")",
":",
"dut",
".",
"release",
"(",
")",
"dut",
"=",
"None",
"raise",
"except",
"RuntimeError",
":",
"self",
".",
"logger",
".",
"exception",
"(",
"\"RuntimeError during flashing\"",
")",
"# ValueError is raised if ThreadPool is tried to initiate with",
"# zero threads.",
"except",
"ValueError",
":",
"self",
".",
"logger",
".",
"exception",
"(",
"\"No devices allocated\"",
")",
"raise",
"AllocationError",
"(",
"\"Dut Initialization failed!\"",
")",
"except",
"(",
"DutConnectionError",
",",
"TypeError",
")",
":",
"for",
"dut",
"in",
"self",
".",
"duts",
":",
"if",
"hasattr",
"(",
"dut",
",",
"\"release\"",
")",
":",
"dut",
".",
"release",
"(",
")",
"raise",
"AllocationError",
"(",
"\"Dut Initialization failed!\"",
")",
"finally",
":",
"if",
"pool",
":",
"pool",
".",
"close",
"(",
")",
"pool",
".",
"join",
"(",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"\"Allocated following duts:\"",
")",
"for",
"dut",
"in",
"self",
".",
"duts",
":",
"dut",
".",
"print_info",
"(",
")",
"return",
"self",
".",
"duts"
] | 39.622222 | 20 |
def _decompress_data(self, data):
'''Decompress the given data and return the uncompressed data.'''
if self._decompressor:
try:
return self._decompressor.decompress(data)
except zlib.error as error:
raise ProtocolError(
'zlib error: {0}.'.format(error)
) from error
else:
return data | [
"def",
"_decompress_data",
"(",
"self",
",",
"data",
")",
":",
"if",
"self",
".",
"_decompressor",
":",
"try",
":",
"return",
"self",
".",
"_decompressor",
".",
"decompress",
"(",
"data",
")",
"except",
"zlib",
".",
"error",
"as",
"error",
":",
"raise",
"ProtocolError",
"(",
"'zlib error: {0}.'",
".",
"format",
"(",
"error",
")",
")",
"from",
"error",
"else",
":",
"return",
"data"
] | 36.454545 | 15 |
def capture(self, instance_id, name, additional_disks=False, notes=None):
"""Capture one or all disks from a VS to a SoftLayer image.
Parameters set to None will be ignored and not attempted to be updated.
:param integer instance_id: the instance ID to edit
:param string name: name assigned to the image
:param bool additional_disks: set to true to include all additional
attached storage devices
:param string notes: notes about this particular image
:returns: dictionary -- information about the capture transaction.
Example::
name = "Testing Images"
notes = "Some notes about this image"
result = mgr.capture(instance_id=12345, name=name, notes=notes)
"""
vsi = self.client.call(
'Virtual_Guest',
'getObject',
id=instance_id,
mask="""id,
blockDevices[id,device,mountType,
diskImage[id,metadataFlag,type[keyName]]]""")
disks_to_capture = []
for block_device in vsi['blockDevices']:
# We never want metadata disks
if utils.lookup(block_device, 'diskImage', 'metadataFlag'):
continue
# We never want swap devices
type_name = utils.lookup(block_device, 'diskImage', 'type', 'keyName')
if type_name == 'SWAP':
continue
# We never want CD images
if block_device['mountType'] == 'CD':
continue
# Only use the first block device if we don't want additional disks
if not additional_disks and str(block_device['device']) != '0':
continue
disks_to_capture.append(block_device)
return self.guest.createArchiveTransaction(
name, disks_to_capture, notes, id=instance_id) | [
"def",
"capture",
"(",
"self",
",",
"instance_id",
",",
"name",
",",
"additional_disks",
"=",
"False",
",",
"notes",
"=",
"None",
")",
":",
"vsi",
"=",
"self",
".",
"client",
".",
"call",
"(",
"'Virtual_Guest'",
",",
"'getObject'",
",",
"id",
"=",
"instance_id",
",",
"mask",
"=",
"\"\"\"id,\n blockDevices[id,device,mountType,\n diskImage[id,metadataFlag,type[keyName]]]\"\"\"",
")",
"disks_to_capture",
"=",
"[",
"]",
"for",
"block_device",
"in",
"vsi",
"[",
"'blockDevices'",
"]",
":",
"# We never want metadata disks",
"if",
"utils",
".",
"lookup",
"(",
"block_device",
",",
"'diskImage'",
",",
"'metadataFlag'",
")",
":",
"continue",
"# We never want swap devices",
"type_name",
"=",
"utils",
".",
"lookup",
"(",
"block_device",
",",
"'diskImage'",
",",
"'type'",
",",
"'keyName'",
")",
"if",
"type_name",
"==",
"'SWAP'",
":",
"continue",
"# We never want CD images",
"if",
"block_device",
"[",
"'mountType'",
"]",
"==",
"'CD'",
":",
"continue",
"# Only use the first block device if we don't want additional disks",
"if",
"not",
"additional_disks",
"and",
"str",
"(",
"block_device",
"[",
"'device'",
"]",
")",
"!=",
"'0'",
":",
"continue",
"disks_to_capture",
".",
"append",
"(",
"block_device",
")",
"return",
"self",
".",
"guest",
".",
"createArchiveTransaction",
"(",
"name",
",",
"disks_to_capture",
",",
"notes",
",",
"id",
"=",
"instance_id",
")"
] | 36.666667 | 21.843137 |
def pfeedback(self, msg: str) -> None:
"""For printing nonessential feedback. Can be silenced with `quiet`.
Inclusion in redirected output is controlled by `feedback_to_output`."""
if not self.quiet:
if self.feedback_to_output:
self.poutput(msg)
else:
self.decolorized_write(sys.stderr, "{}\n".format(msg)) | [
"def",
"pfeedback",
"(",
"self",
",",
"msg",
":",
"str",
")",
"->",
"None",
":",
"if",
"not",
"self",
".",
"quiet",
":",
"if",
"self",
".",
"feedback_to_output",
":",
"self",
".",
"poutput",
"(",
"msg",
")",
"else",
":",
"self",
".",
"decolorized_write",
"(",
"sys",
".",
"stderr",
",",
"\"{}\\n\"",
".",
"format",
"(",
"msg",
")",
")"
] | 47.875 | 9.625 |
def _patch(self, route, data, headers=None, failure_message=None):
"""
Execute a patch request and return the result
"""
headers = self._get_headers(headers)
response_lambda = (
lambda: requests.patch(
self._get_qualified_route(route), headers=headers, data=data, verify=False, proxies=self.proxies
)
)
response = check_for_rate_limiting(response_lambda(), response_lambda)
return self._handle_response(response, failure_message) | [
"def",
"_patch",
"(",
"self",
",",
"route",
",",
"data",
",",
"headers",
"=",
"None",
",",
"failure_message",
"=",
"None",
")",
":",
"headers",
"=",
"self",
".",
"_get_headers",
"(",
"headers",
")",
"response_lambda",
"=",
"(",
"lambda",
":",
"requests",
".",
"patch",
"(",
"self",
".",
"_get_qualified_route",
"(",
"route",
")",
",",
"headers",
"=",
"headers",
",",
"data",
"=",
"data",
",",
"verify",
"=",
"False",
",",
"proxies",
"=",
"self",
".",
"proxies",
")",
")",
"response",
"=",
"check_for_rate_limiting",
"(",
"response_lambda",
"(",
")",
",",
"response_lambda",
")",
"return",
"self",
".",
"_handle_response",
"(",
"response",
",",
"failure_message",
")"
] | 43.5 | 21 |
def _dfs_usb_info(obj, parents):
""" Find all of the usb info that we can from this particular IORegistry
tree with depth first search (and searching the parent stack....)
"""
output = {}
if (
"BSD Name" in obj
and obj["BSD Name"].startswith("disk")
and mbed_volume_name_match.search(obj["IORegistryEntryName"])
):
disk_id = obj["BSD Name"]
usb_info = {"serial": None}
for parent in [obj] + parents:
if "USB Serial Number" in parent:
usb_info["serial"] = parent["USB Serial Number"]
break
output[disk_id] = usb_info
for child in obj.get("IORegistryEntryChildren", []):
output.update(_dfs_usb_info(child, [obj] + parents))
return output | [
"def",
"_dfs_usb_info",
"(",
"obj",
",",
"parents",
")",
":",
"output",
"=",
"{",
"}",
"if",
"(",
"\"BSD Name\"",
"in",
"obj",
"and",
"obj",
"[",
"\"BSD Name\"",
"]",
".",
"startswith",
"(",
"\"disk\"",
")",
"and",
"mbed_volume_name_match",
".",
"search",
"(",
"obj",
"[",
"\"IORegistryEntryName\"",
"]",
")",
")",
":",
"disk_id",
"=",
"obj",
"[",
"\"BSD Name\"",
"]",
"usb_info",
"=",
"{",
"\"serial\"",
":",
"None",
"}",
"for",
"parent",
"in",
"[",
"obj",
"]",
"+",
"parents",
":",
"if",
"\"USB Serial Number\"",
"in",
"parent",
":",
"usb_info",
"[",
"\"serial\"",
"]",
"=",
"parent",
"[",
"\"USB Serial Number\"",
"]",
"break",
"output",
"[",
"disk_id",
"]",
"=",
"usb_info",
"for",
"child",
"in",
"obj",
".",
"get",
"(",
"\"IORegistryEntryChildren\"",
",",
"[",
"]",
")",
":",
"output",
".",
"update",
"(",
"_dfs_usb_info",
"(",
"child",
",",
"[",
"obj",
"]",
"+",
"parents",
")",
")",
"return",
"output"
] | 38 | 15.45 |
def sample_vMF(mu, kappa, num_samples):
"""Generate num_samples N-dimensional samples from von Mises Fisher
distribution around center mu \in R^N with concentration kappa.
"""
dim = len(mu)
result = np.zeros((num_samples, dim))
for nn in range(num_samples):
# sample offset from center (on sphere) with spread kappa
w = _sample_weight(kappa, dim)
# sample a point v on the unit sphere that's orthogonal to mu
v = _sample_orthonormal_to(mu)
# compute new point
result[nn, :] = v * np.sqrt(1. - w ** 2) + w * mu
return result | [
"def",
"sample_vMF",
"(",
"mu",
",",
"kappa",
",",
"num_samples",
")",
":",
"dim",
"=",
"len",
"(",
"mu",
")",
"result",
"=",
"np",
".",
"zeros",
"(",
"(",
"num_samples",
",",
"dim",
")",
")",
"for",
"nn",
"in",
"range",
"(",
"num_samples",
")",
":",
"# sample offset from center (on sphere) with spread kappa",
"w",
"=",
"_sample_weight",
"(",
"kappa",
",",
"dim",
")",
"# sample a point v on the unit sphere that's orthogonal to mu",
"v",
"=",
"_sample_orthonormal_to",
"(",
"mu",
")",
"# compute new point",
"result",
"[",
"nn",
",",
":",
"]",
"=",
"v",
"*",
"np",
".",
"sqrt",
"(",
"1.",
"-",
"w",
"**",
"2",
")",
"+",
"w",
"*",
"mu",
"return",
"result"
] | 34.470588 | 17.058824 |
def jacobian_s(nodes, degree, dimension):
r"""Compute :math:`\frac{\partial B}{\partial s}`.
.. note::
This is a helper for :func:`_jacobian_both`, which has an
equivalent Fortran implementation.
Args:
nodes (numpy.ndarray): Array of nodes in a surface.
degree (int): The degree of the surface.
dimension (int): The dimension the surface lives in.
Returns:
numpy.ndarray: Nodes of the Jacobian surface in
B |eacute| zier form.
"""
num_nodes = (degree * (degree + 1)) // 2
result = np.empty((dimension, num_nodes), order="F")
index = 0
i = 0
for num_vals in six.moves.xrange(degree, 0, -1):
for _ in six.moves.xrange(num_vals):
result[:, index] = nodes[:, i + 1] - nodes[:, i]
# Update the indices
index += 1
i += 1
# In between each row, the index gains an extra value.
i += 1
return float(degree) * result | [
"def",
"jacobian_s",
"(",
"nodes",
",",
"degree",
",",
"dimension",
")",
":",
"num_nodes",
"=",
"(",
"degree",
"*",
"(",
"degree",
"+",
"1",
")",
")",
"//",
"2",
"result",
"=",
"np",
".",
"empty",
"(",
"(",
"dimension",
",",
"num_nodes",
")",
",",
"order",
"=",
"\"F\"",
")",
"index",
"=",
"0",
"i",
"=",
"0",
"for",
"num_vals",
"in",
"six",
".",
"moves",
".",
"xrange",
"(",
"degree",
",",
"0",
",",
"-",
"1",
")",
":",
"for",
"_",
"in",
"six",
".",
"moves",
".",
"xrange",
"(",
"num_vals",
")",
":",
"result",
"[",
":",
",",
"index",
"]",
"=",
"nodes",
"[",
":",
",",
"i",
"+",
"1",
"]",
"-",
"nodes",
"[",
":",
",",
"i",
"]",
"# Update the indices",
"index",
"+=",
"1",
"i",
"+=",
"1",
"# In between each row, the index gains an extra value.",
"i",
"+=",
"1",
"return",
"float",
"(",
"degree",
")",
"*",
"result"
] | 31.7 | 18.733333 |
def apply(self, token, previous=(None, None), next=(None, None)):
""" Returns a (token, tag)-tuple for the given token,
in context of the given previous and next (token, tag)-tuples.
"""
return [token[0], self._classifier.classify(self._v(token[0], previous, next))] | [
"def",
"apply",
"(",
"self",
",",
"token",
",",
"previous",
"=",
"(",
"None",
",",
"None",
")",
",",
"next",
"=",
"(",
"None",
",",
"None",
")",
")",
":",
"return",
"[",
"token",
"[",
"0",
"]",
",",
"self",
".",
"_classifier",
".",
"classify",
"(",
"self",
".",
"_v",
"(",
"token",
"[",
"0",
"]",
",",
"previous",
",",
"next",
")",
")",
"]"
] | 59.6 | 21.2 |
def add_filters(self, filterer, filters):
"""Add filters to a filterer from a list of names."""
for f in filters:
try:
filterer.addFilter(self.config['filters'][f])
except StandardError as e:
raise ValueError('Unable to add filter %r: %s' % (f, e)) | [
"def",
"add_filters",
"(",
"self",
",",
"filterer",
",",
"filters",
")",
":",
"for",
"f",
"in",
"filters",
":",
"try",
":",
"filterer",
".",
"addFilter",
"(",
"self",
".",
"config",
"[",
"'filters'",
"]",
"[",
"f",
"]",
")",
"except",
"StandardError",
"as",
"e",
":",
"raise",
"ValueError",
"(",
"'Unable to add filter %r: %s'",
"%",
"(",
"f",
",",
"e",
")",
")"
] | 44.857143 | 13.571429 |
def save_matches(self, matches):
"""Save matches of a failed execution to the log.
:param matches: a list of matches in JSON format
"""
if not os.path.exists(os.path.dirname(self.location())):
os.makedirs(os.path.dirname(self.location()))
with open(self.location(), "w+") as f:
matches = [m for m in matches if not m['processed']]
for m in matches:
match_obj = json.dumps(m)
f.write(match_obj + "\n") | [
"def",
"save_matches",
"(",
"self",
",",
"matches",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"self",
".",
"location",
"(",
")",
")",
")",
":",
"os",
".",
"makedirs",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"self",
".",
"location",
"(",
")",
")",
")",
"with",
"open",
"(",
"self",
".",
"location",
"(",
")",
",",
"\"w+\"",
")",
"as",
"f",
":",
"matches",
"=",
"[",
"m",
"for",
"m",
"in",
"matches",
"if",
"not",
"m",
"[",
"'processed'",
"]",
"]",
"for",
"m",
"in",
"matches",
":",
"match_obj",
"=",
"json",
".",
"dumps",
"(",
"m",
")",
"f",
".",
"write",
"(",
"match_obj",
"+",
"\"\\n\"",
")"
] | 38.307692 | 14.461538 |
def _perform_file_action(self, filename, action):
"""Perform action on specific file types.
Dynamic dispatch function for performing actions on
specific file types.
"""
ext = os.path.splitext(filename)[1]
try:
func = getattr(self, '_{}_{}'.format(action, ext[1:]))
except AttributeError:
raise Exception('Unsupported file type {}'.format(ext[1:]))
func(filename) | [
"def",
"_perform_file_action",
"(",
"self",
",",
"filename",
",",
"action",
")",
":",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"filename",
")",
"[",
"1",
"]",
"try",
":",
"func",
"=",
"getattr",
"(",
"self",
",",
"'_{}_{}'",
".",
"format",
"(",
"action",
",",
"ext",
"[",
"1",
":",
"]",
")",
")",
"except",
"AttributeError",
":",
"raise",
"Exception",
"(",
"'Unsupported file type {}'",
".",
"format",
"(",
"ext",
"[",
"1",
":",
"]",
")",
")",
"func",
"(",
"filename",
")"
] | 36.666667 | 16.333333 |
def retrieve(self, key):
"""Retrieves a cached array if possible."""
column_file = os.path.join(self._hash_dir, '%s.json' % key)
cache_file = os.path.join(self._hash_dir, '%s.npy' % key)
if os.path.exists(cache_file):
data = np.load(cache_file)
if os.path.exists(column_file):
with open(column_file, 'r') as json_file:
columns = json.load(json_file)
data = pd.DataFrame(data, columns=columns)
else:
return None
return data | [
"def",
"retrieve",
"(",
"self",
",",
"key",
")",
":",
"column_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"_hash_dir",
",",
"'%s.json'",
"%",
"key",
")",
"cache_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"_hash_dir",
",",
"'%s.npy'",
"%",
"key",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"cache_file",
")",
":",
"data",
"=",
"np",
".",
"load",
"(",
"cache_file",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"column_file",
")",
":",
"with",
"open",
"(",
"column_file",
",",
"'r'",
")",
"as",
"json_file",
":",
"columns",
"=",
"json",
".",
"load",
"(",
"json_file",
")",
"data",
"=",
"pd",
".",
"DataFrame",
"(",
"data",
",",
"columns",
"=",
"columns",
")",
"else",
":",
"return",
"None",
"return",
"data"
] | 36.4 | 17.666667 |
def play(self):
"""
Keep only times in ScienceSegments which are in the playground
"""
length = len(self)
# initialize list of output segments
ostart = -1
outlist = []
begin_s2 = 729273613
play_space = 6370
play_len = 600
for seg in self:
start = seg.start()
stop = seg.end()
id = seg.id()
# select first playground segment which ends after start of seg
play_start = begin_s2+play_space*( 1 +
int((start - begin_s2 - play_len)/play_space) )
while play_start < stop:
if play_start > start:
ostart = play_start
else:
ostart = start
play_stop = play_start + play_len
if play_stop < stop:
ostop = play_stop
else:
ostop = stop
x = ScienceSegment(tuple([id, ostart, ostop, ostop-ostart]))
outlist.append(x)
# step forward
play_start = play_start + play_space
# save the playground segs and return the length
self.__sci_segs = outlist
return len(self) | [
"def",
"play",
"(",
"self",
")",
":",
"length",
"=",
"len",
"(",
"self",
")",
"# initialize list of output segments",
"ostart",
"=",
"-",
"1",
"outlist",
"=",
"[",
"]",
"begin_s2",
"=",
"729273613",
"play_space",
"=",
"6370",
"play_len",
"=",
"600",
"for",
"seg",
"in",
"self",
":",
"start",
"=",
"seg",
".",
"start",
"(",
")",
"stop",
"=",
"seg",
".",
"end",
"(",
")",
"id",
"=",
"seg",
".",
"id",
"(",
")",
"# select first playground segment which ends after start of seg",
"play_start",
"=",
"begin_s2",
"+",
"play_space",
"*",
"(",
"1",
"+",
"int",
"(",
"(",
"start",
"-",
"begin_s2",
"-",
"play_len",
")",
"/",
"play_space",
")",
")",
"while",
"play_start",
"<",
"stop",
":",
"if",
"play_start",
">",
"start",
":",
"ostart",
"=",
"play_start",
"else",
":",
"ostart",
"=",
"start",
"play_stop",
"=",
"play_start",
"+",
"play_len",
"if",
"play_stop",
"<",
"stop",
":",
"ostop",
"=",
"play_stop",
"else",
":",
"ostop",
"=",
"stop",
"x",
"=",
"ScienceSegment",
"(",
"tuple",
"(",
"[",
"id",
",",
"ostart",
",",
"ostop",
",",
"ostop",
"-",
"ostart",
"]",
")",
")",
"outlist",
".",
"append",
"(",
"x",
")",
"# step forward",
"play_start",
"=",
"play_start",
"+",
"play_space",
"# save the playground segs and return the length",
"self",
".",
"__sci_segs",
"=",
"outlist",
"return",
"len",
"(",
"self",
")"
] | 22.217391 | 21.521739 |
def metricCompute(self, sensorToBody, bodyToSpecificObject):
"""
Compute the
"sensor's location relative to a specific object"
from the
"body's location relative to a specific object"
and the
"sensor's location relative to body"
@param sensorToBody (numpy array)
Active cells of a single module that represents the sensor's location
relative to the body
@param bodyToSpecificObject (numpy array)
Active cells of a single module that represents the body's location relative
to a specific object
"""
overlaps = self.metricConnections.computeActivity({
"bodyToSpecificObject": bodyToSpecificObject,
"sensorToBody": sensorToBody,
})
self.activeMetricSegments = np.where(overlaps >= 2)[0]
self.activeCells = np.unique(
self.metricConnections.mapSegmentsToCells(
self.activeMetricSegments)) | [
"def",
"metricCompute",
"(",
"self",
",",
"sensorToBody",
",",
"bodyToSpecificObject",
")",
":",
"overlaps",
"=",
"self",
".",
"metricConnections",
".",
"computeActivity",
"(",
"{",
"\"bodyToSpecificObject\"",
":",
"bodyToSpecificObject",
",",
"\"sensorToBody\"",
":",
"sensorToBody",
",",
"}",
")",
"self",
".",
"activeMetricSegments",
"=",
"np",
".",
"where",
"(",
"overlaps",
">=",
"2",
")",
"[",
"0",
"]",
"self",
".",
"activeCells",
"=",
"np",
".",
"unique",
"(",
"self",
".",
"metricConnections",
".",
"mapSegmentsToCells",
"(",
"self",
".",
"activeMetricSegments",
")",
")"
] | 33.307692 | 18 |
def _assign_enterprise_role_to_users(self, _get_batch_method, options, is_feature_role=False):
"""
Assigns enterprise role to users.
"""
role_name = options['role']
batch_limit = options['batch_limit']
batch_sleep = options['batch_sleep']
batch_offset = options['batch_offset']
current_batch_index = batch_offset
users_batch = _get_batch_method(
batch_offset,
batch_offset + batch_limit
)
role_class = SystemWideEnterpriseRole
role_assignment_class = SystemWideEnterpriseUserRoleAssignment
if is_feature_role:
role_class = EnterpriseFeatureRole
role_assignment_class = EnterpriseFeatureUserRoleAssignment
enterprise_role = role_class.objects.get(name=role_name)
while users_batch.count() > 0:
for index, user in enumerate(users_batch):
LOGGER.info(
'Processing user with index %s and id %s',
current_batch_index + index, user.id
)
role_assignment_class.objects.get_or_create(
user=user,
role=enterprise_role
)
sleep(batch_sleep)
current_batch_index += len(users_batch)
users_batch = _get_batch_method(
current_batch_index,
current_batch_index + batch_limit
) | [
"def",
"_assign_enterprise_role_to_users",
"(",
"self",
",",
"_get_batch_method",
",",
"options",
",",
"is_feature_role",
"=",
"False",
")",
":",
"role_name",
"=",
"options",
"[",
"'role'",
"]",
"batch_limit",
"=",
"options",
"[",
"'batch_limit'",
"]",
"batch_sleep",
"=",
"options",
"[",
"'batch_sleep'",
"]",
"batch_offset",
"=",
"options",
"[",
"'batch_offset'",
"]",
"current_batch_index",
"=",
"batch_offset",
"users_batch",
"=",
"_get_batch_method",
"(",
"batch_offset",
",",
"batch_offset",
"+",
"batch_limit",
")",
"role_class",
"=",
"SystemWideEnterpriseRole",
"role_assignment_class",
"=",
"SystemWideEnterpriseUserRoleAssignment",
"if",
"is_feature_role",
":",
"role_class",
"=",
"EnterpriseFeatureRole",
"role_assignment_class",
"=",
"EnterpriseFeatureUserRoleAssignment",
"enterprise_role",
"=",
"role_class",
".",
"objects",
".",
"get",
"(",
"name",
"=",
"role_name",
")",
"while",
"users_batch",
".",
"count",
"(",
")",
">",
"0",
":",
"for",
"index",
",",
"user",
"in",
"enumerate",
"(",
"users_batch",
")",
":",
"LOGGER",
".",
"info",
"(",
"'Processing user with index %s and id %s'",
",",
"current_batch_index",
"+",
"index",
",",
"user",
".",
"id",
")",
"role_assignment_class",
".",
"objects",
".",
"get_or_create",
"(",
"user",
"=",
"user",
",",
"role",
"=",
"enterprise_role",
")",
"sleep",
"(",
"batch_sleep",
")",
"current_batch_index",
"+=",
"len",
"(",
"users_batch",
")",
"users_batch",
"=",
"_get_batch_method",
"(",
"current_batch_index",
",",
"current_batch_index",
"+",
"batch_limit",
")"
] | 34.829268 | 16.585366 |
def _struct_or_lob_handler(c, ctx):
"""Handles tokens that begin with an open brace."""
assert c == _OPEN_BRACE
c, self = yield
yield ctx.immediate_transition(_STRUCT_OR_LOB_TABLE[c](c, ctx)) | [
"def",
"_struct_or_lob_handler",
"(",
"c",
",",
"ctx",
")",
":",
"assert",
"c",
"==",
"_OPEN_BRACE",
"c",
",",
"self",
"=",
"yield",
"yield",
"ctx",
".",
"immediate_transition",
"(",
"_STRUCT_OR_LOB_TABLE",
"[",
"c",
"]",
"(",
"c",
",",
"ctx",
")",
")"
] | 40.6 | 13.2 |
def deep_get(self, content, keys, traversed_path=None):
'''
Allow to retrieve content nested inside a several layers deep dict/list
Examples: -content: {
"key1": {
"key2" : [
{
"name" : "object1",
"value" : 42
},
{
"name" : "object2",
"value" : 72
}
]
}
}
-keys: ["key1", "key2", "1", "value"]
would return:
[(["key1", "key2", "1", "value"], 72)]
-keys: ["key1", "key2", "1", "*"]
would return:
[(["key1", "key2", "1", "value"], 72), (["key1", "key2", "1", "name"], "object2")]
-keys: ["key1", "key2", "*", "value"]
would return:
[(["key1", "key2", "1", "value"], 72), (["key1", "key2", "0", "value"], 42)]
'''
if traversed_path is None:
traversed_path = []
if keys == []:
return [(traversed_path, content)]
key = keys[0]
regex = "".join(["^", key, "$"])
try:
key_rex = re.compile(regex)
except Exception:
self.warning("Cannot compile regex: %s" % regex)
return []
results = []
for new_key, new_content in self.items(content):
if key_rex.match(new_key):
results.extend(self.deep_get(new_content, keys[1:], traversed_path + [str(new_key)]))
return results | [
"def",
"deep_get",
"(",
"self",
",",
"content",
",",
"keys",
",",
"traversed_path",
"=",
"None",
")",
":",
"if",
"traversed_path",
"is",
"None",
":",
"traversed_path",
"=",
"[",
"]",
"if",
"keys",
"==",
"[",
"]",
":",
"return",
"[",
"(",
"traversed_path",
",",
"content",
")",
"]",
"key",
"=",
"keys",
"[",
"0",
"]",
"regex",
"=",
"\"\"",
".",
"join",
"(",
"[",
"\"^\"",
",",
"key",
",",
"\"$\"",
"]",
")",
"try",
":",
"key_rex",
"=",
"re",
".",
"compile",
"(",
"regex",
")",
"except",
"Exception",
":",
"self",
".",
"warning",
"(",
"\"Cannot compile regex: %s\"",
"%",
"regex",
")",
"return",
"[",
"]",
"results",
"=",
"[",
"]",
"for",
"new_key",
",",
"new_content",
"in",
"self",
".",
"items",
"(",
"content",
")",
":",
"if",
"key_rex",
".",
"match",
"(",
"new_key",
")",
":",
"results",
".",
"extend",
"(",
"self",
".",
"deep_get",
"(",
"new_content",
",",
"keys",
"[",
"1",
":",
"]",
",",
"traversed_path",
"+",
"[",
"str",
"(",
"new_key",
")",
"]",
")",
")",
"return",
"results"
] | 39.875 | 19.625 |
def update(self, deviceUid, metadata=None, deviceInfo=None, status=None):
"""
Update an existing device
"""
if not isinstance(deviceUid, DeviceUid) and isinstance(deviceUid, dict):
deviceUid = DeviceUid(**deviceUid)
deviceUrl = "api/v0002/device/types/%s/devices/%s" % (deviceUid.typeId, deviceUid.deviceId)
data = {"status": status, "deviceInfo": deviceInfo, "metadata": metadata}
r = self._apiClient.put(deviceUrl, data)
if r.status_code == 200:
return Device(apiClient=self._apiClient, **r.json())
else:
raise ApiException(r) | [
"def",
"update",
"(",
"self",
",",
"deviceUid",
",",
"metadata",
"=",
"None",
",",
"deviceInfo",
"=",
"None",
",",
"status",
"=",
"None",
")",
":",
"if",
"not",
"isinstance",
"(",
"deviceUid",
",",
"DeviceUid",
")",
"and",
"isinstance",
"(",
"deviceUid",
",",
"dict",
")",
":",
"deviceUid",
"=",
"DeviceUid",
"(",
"*",
"*",
"deviceUid",
")",
"deviceUrl",
"=",
"\"api/v0002/device/types/%s/devices/%s\"",
"%",
"(",
"deviceUid",
".",
"typeId",
",",
"deviceUid",
".",
"deviceId",
")",
"data",
"=",
"{",
"\"status\"",
":",
"status",
",",
"\"deviceInfo\"",
":",
"deviceInfo",
",",
"\"metadata\"",
":",
"metadata",
"}",
"r",
"=",
"self",
".",
"_apiClient",
".",
"put",
"(",
"deviceUrl",
",",
"data",
")",
"if",
"r",
".",
"status_code",
"==",
"200",
":",
"return",
"Device",
"(",
"apiClient",
"=",
"self",
".",
"_apiClient",
",",
"*",
"*",
"r",
".",
"json",
"(",
")",
")",
"else",
":",
"raise",
"ApiException",
"(",
"r",
")"
] | 36.705882 | 24.705882 |
def _gradient_black_lines(
self, text, start, step=1,
fore=None, back=None, style=None, reverse=False,
movefactor=2, rgb_mode=False):
""" Yield colorized characters,
within the 24-length black gradient,
treating each line separately.
"""
if not movefactor:
def factor(i):
return start
else:
# Increase the start for each line.
def factor(i):
return start + (i * movefactor)
return '\n'.join((
self._gradient_black_line(
line,
start=factor(i),
step=step,
fore=fore,
back=back,
style=style,
reverse=reverse,
rgb_mode=rgb_mode,
)
for i, line in enumerate(text.splitlines())
)) | [
"def",
"_gradient_black_lines",
"(",
"self",
",",
"text",
",",
"start",
",",
"step",
"=",
"1",
",",
"fore",
"=",
"None",
",",
"back",
"=",
"None",
",",
"style",
"=",
"None",
",",
"reverse",
"=",
"False",
",",
"movefactor",
"=",
"2",
",",
"rgb_mode",
"=",
"False",
")",
":",
"if",
"not",
"movefactor",
":",
"def",
"factor",
"(",
"i",
")",
":",
"return",
"start",
"else",
":",
"# Increase the start for each line.",
"def",
"factor",
"(",
"i",
")",
":",
"return",
"start",
"+",
"(",
"i",
"*",
"movefactor",
")",
"return",
"'\\n'",
".",
"join",
"(",
"(",
"self",
".",
"_gradient_black_line",
"(",
"line",
",",
"start",
"=",
"factor",
"(",
"i",
")",
",",
"step",
"=",
"step",
",",
"fore",
"=",
"fore",
",",
"back",
"=",
"back",
",",
"style",
"=",
"style",
",",
"reverse",
"=",
"reverse",
",",
"rgb_mode",
"=",
"rgb_mode",
",",
")",
"for",
"i",
",",
"line",
"in",
"enumerate",
"(",
"text",
".",
"splitlines",
"(",
")",
")",
")",
")"
] | 31.642857 | 11.642857 |
def add_simmanager_api(self, mock):
'''Add org.ofono.SimManager API to a mock'''
iface = 'org.ofono.SimManager'
mock.AddProperties(iface, {
'BarredDialing': _parameters.get('BarredDialing', False),
'CardIdentifier': _parameters.get('CardIdentifier', new_iccid(self)),
'FixedDialing': _parameters.get('FixedDialing', False),
'LockedPins': _parameters.get('LockedPins', dbus.Array([], signature='s')),
'MobileCountryCode': _parameters.get('MobileCountryCode', '310'),
'MobileNetworkCode': _parameters.get('MobileNetworkCode', '150'),
'PreferredLanguages': _parameters.get('PreferredLanguages', ['en']),
'Present': _parameters.get('Present', dbus.Boolean(True)),
'Retries': _parameters.get('Retries', dbus.Dictionary([["pin", dbus.Byte(3)], ["puk", dbus.Byte(10)]])),
'PinRequired': _parameters.get('PinRequired', "none"),
'SubscriberNumbers': _parameters.get('SubscriberNumbers', ['123456789', '234567890']),
'SubscriberIdentity': _parameters.get('SubscriberIdentity', new_imsi(self)),
})
mock.AddMethods(iface, [
('GetProperties', '', 'a{sv}', 'ret = self.GetAll("%s")' % iface),
('SetProperty', 'sv', '', 'self.Set("%(i)s", args[0], args[1]); '
'self.EmitSignal("%(i)s", "PropertyChanged", "sv", [args[0], args[1]])' % {'i': iface}),
('ChangePin', 'sss', '', ''),
('EnterPin', 'ss', '',
'correctPin = "1234"\n'
'newRetries = self.Get("%(i)s", "Retries")\n'
'if args[0] == "pin" and args[1] != correctPin:\n'
' newRetries["pin"] = dbus.Byte(newRetries["pin"] - 1)\n'
'elif args[0] == "pin":\n'
' newRetries["pin"] = dbus.Byte(3)\n'
'self.Set("%(i)s", "Retries", newRetries)\n'
'self.EmitSignal("%(i)s", "PropertyChanged", "sv", ["Retries", newRetries])\n'
'if args[0] == "pin" and args[1] != correctPin:\n'
' class Failed(dbus.exceptions.DBusException):\n'
' _dbus_error_name = "org.ofono.Error.Failed"\n'
' raise Failed("Operation failed")' % {'i': iface}),
('ResetPin', 'sss', '',
'correctPuk = "12345678"\n'
'newRetries = self.Get("%(i)s", "Retries")\n'
'if args[0] == "puk" and args[1] != correctPuk:\n'
' newRetries["puk"] = dbus.Byte(newRetries["puk"] - 1)\n'
'elif args[0] == "puk":\n'
' newRetries["pin"] = dbus.Byte(3)\n'
' newRetries["puk"] = dbus.Byte(10)\n'
'self.Set("%(i)s", "Retries", newRetries)\n'
'self.EmitSignal("%(i)s", "PropertyChanged", "sv", ["Retries", newRetries])\n'
'if args[0] == "puk" and args[1] != correctPuk:\n'
' class Failed(dbus.exceptions.DBusException):\n'
' _dbus_error_name = "org.ofono.Error.Failed"\n'
' raise Failed("Operation failed")' % {'i': iface}),
('LockPin', 'ss', '', ''),
('UnlockPin', 'ss', '', ''),
]) | [
"def",
"add_simmanager_api",
"(",
"self",
",",
"mock",
")",
":",
"iface",
"=",
"'org.ofono.SimManager'",
"mock",
".",
"AddProperties",
"(",
"iface",
",",
"{",
"'BarredDialing'",
":",
"_parameters",
".",
"get",
"(",
"'BarredDialing'",
",",
"False",
")",
",",
"'CardIdentifier'",
":",
"_parameters",
".",
"get",
"(",
"'CardIdentifier'",
",",
"new_iccid",
"(",
"self",
")",
")",
",",
"'FixedDialing'",
":",
"_parameters",
".",
"get",
"(",
"'FixedDialing'",
",",
"False",
")",
",",
"'LockedPins'",
":",
"_parameters",
".",
"get",
"(",
"'LockedPins'",
",",
"dbus",
".",
"Array",
"(",
"[",
"]",
",",
"signature",
"=",
"'s'",
")",
")",
",",
"'MobileCountryCode'",
":",
"_parameters",
".",
"get",
"(",
"'MobileCountryCode'",
",",
"'310'",
")",
",",
"'MobileNetworkCode'",
":",
"_parameters",
".",
"get",
"(",
"'MobileNetworkCode'",
",",
"'150'",
")",
",",
"'PreferredLanguages'",
":",
"_parameters",
".",
"get",
"(",
"'PreferredLanguages'",
",",
"[",
"'en'",
"]",
")",
",",
"'Present'",
":",
"_parameters",
".",
"get",
"(",
"'Present'",
",",
"dbus",
".",
"Boolean",
"(",
"True",
")",
")",
",",
"'Retries'",
":",
"_parameters",
".",
"get",
"(",
"'Retries'",
",",
"dbus",
".",
"Dictionary",
"(",
"[",
"[",
"\"pin\"",
",",
"dbus",
".",
"Byte",
"(",
"3",
")",
"]",
",",
"[",
"\"puk\"",
",",
"dbus",
".",
"Byte",
"(",
"10",
")",
"]",
"]",
")",
")",
",",
"'PinRequired'",
":",
"_parameters",
".",
"get",
"(",
"'PinRequired'",
",",
"\"none\"",
")",
",",
"'SubscriberNumbers'",
":",
"_parameters",
".",
"get",
"(",
"'SubscriberNumbers'",
",",
"[",
"'123456789'",
",",
"'234567890'",
"]",
")",
",",
"'SubscriberIdentity'",
":",
"_parameters",
".",
"get",
"(",
"'SubscriberIdentity'",
",",
"new_imsi",
"(",
"self",
")",
")",
",",
"}",
")",
"mock",
".",
"AddMethods",
"(",
"iface",
",",
"[",
"(",
"'GetProperties'",
",",
"''",
",",
"'a{sv}'",
",",
"'ret = self.GetAll(\"%s\")'",
"%",
"iface",
")",
",",
"(",
"'SetProperty'",
",",
"'sv'",
",",
"''",
",",
"'self.Set(\"%(i)s\", args[0], args[1]); '",
"'self.EmitSignal(\"%(i)s\", \"PropertyChanged\", \"sv\", [args[0], args[1]])'",
"%",
"{",
"'i'",
":",
"iface",
"}",
")",
",",
"(",
"'ChangePin'",
",",
"'sss'",
",",
"''",
",",
"''",
")",
",",
"(",
"'EnterPin'",
",",
"'ss'",
",",
"''",
",",
"'correctPin = \"1234\"\\n'",
"'newRetries = self.Get(\"%(i)s\", \"Retries\")\\n'",
"'if args[0] == \"pin\" and args[1] != correctPin:\\n'",
"' newRetries[\"pin\"] = dbus.Byte(newRetries[\"pin\"] - 1)\\n'",
"'elif args[0] == \"pin\":\\n'",
"' newRetries[\"pin\"] = dbus.Byte(3)\\n'",
"'self.Set(\"%(i)s\", \"Retries\", newRetries)\\n'",
"'self.EmitSignal(\"%(i)s\", \"PropertyChanged\", \"sv\", [\"Retries\", newRetries])\\n'",
"'if args[0] == \"pin\" and args[1] != correctPin:\\n'",
"' class Failed(dbus.exceptions.DBusException):\\n'",
"' _dbus_error_name = \"org.ofono.Error.Failed\"\\n'",
"' raise Failed(\"Operation failed\")'",
"%",
"{",
"'i'",
":",
"iface",
"}",
")",
",",
"(",
"'ResetPin'",
",",
"'sss'",
",",
"''",
",",
"'correctPuk = \"12345678\"\\n'",
"'newRetries = self.Get(\"%(i)s\", \"Retries\")\\n'",
"'if args[0] == \"puk\" and args[1] != correctPuk:\\n'",
"' newRetries[\"puk\"] = dbus.Byte(newRetries[\"puk\"] - 1)\\n'",
"'elif args[0] == \"puk\":\\n'",
"' newRetries[\"pin\"] = dbus.Byte(3)\\n'",
"' newRetries[\"puk\"] = dbus.Byte(10)\\n'",
"'self.Set(\"%(i)s\", \"Retries\", newRetries)\\n'",
"'self.EmitSignal(\"%(i)s\", \"PropertyChanged\", \"sv\", [\"Retries\", newRetries])\\n'",
"'if args[0] == \"puk\" and args[1] != correctPuk:\\n'",
"' class Failed(dbus.exceptions.DBusException):\\n'",
"' _dbus_error_name = \"org.ofono.Error.Failed\"\\n'",
"' raise Failed(\"Operation failed\")'",
"%",
"{",
"'i'",
":",
"iface",
"}",
")",
",",
"(",
"'LockPin'",
",",
"'ss'",
",",
"''",
",",
"''",
")",
",",
"(",
"'UnlockPin'",
",",
"'ss'",
",",
"''",
",",
"''",
")",
",",
"]",
")"
] | 49.233333 | 25.033333 |
def get_location(self, ip, detailed=False):
"""Returns a dictionary with location data or False on failure.
Amount of information about IP contained in the dictionary depends
upon `detailed` flag state.
"""
seek = self._get_pos(ip)
if seek > 0:
return self._parse_location(seek, detailed=detailed)
return False | [
"def",
"get_location",
"(",
"self",
",",
"ip",
",",
"detailed",
"=",
"False",
")",
":",
"seek",
"=",
"self",
".",
"_get_pos",
"(",
"ip",
")",
"if",
"seek",
">",
"0",
":",
"return",
"self",
".",
"_parse_location",
"(",
"seek",
",",
"detailed",
"=",
"detailed",
")",
"return",
"False"
] | 28.461538 | 21.076923 |
def parsedate(data):
"""Convert a time string to a time tuple."""
t = parsedate_tz(data)
if isinstance(t, tuple):
return t[:9]
else:
return t | [
"def",
"parsedate",
"(",
"data",
")",
":",
"t",
"=",
"parsedate_tz",
"(",
"data",
")",
"if",
"isinstance",
"(",
"t",
",",
"tuple",
")",
":",
"return",
"t",
"[",
":",
"9",
"]",
"else",
":",
"return",
"t"
] | 23.857143 | 17.285714 |
def get_rsa_props(
object_class,
exported_cfgs,
remote_intents=None,
ep_svc_id=None,
fw_id=None,
pkg_vers=None,
service_intents=None,
):
"""
Constructs a dictionary of RSA properties from the given arguments
:param object_class: Service specifications
:param exported_cfgs: Export configurations
:param remote_intents: Supported remote intents
:param ep_svc_id: Endpoint service ID
:param fw_id: Remote Framework ID
:param pkg_vers: Version number of the specification package
:param service_intents: Service intents
:return: A dictionary of properties
"""
results = {}
if not object_class:
raise ArgumentError(
"object_class", "object_class must be an [] of Strings"
)
results["objectClass"] = object_class
if not exported_cfgs:
raise ArgumentError(
"exported_cfgs", "exported_cfgs must be an array of Strings"
)
results[REMOTE_CONFIGS_SUPPORTED] = exported_cfgs
results[SERVICE_IMPORTED_CONFIGS] = exported_cfgs
if remote_intents:
results[REMOTE_INTENTS_SUPPORTED] = remote_intents
if service_intents:
results[SERVICE_INTENTS] = service_intents
if not ep_svc_id:
ep_svc_id = get_next_rsid()
results[ENDPOINT_SERVICE_ID] = ep_svc_id
results[SERVICE_ID] = ep_svc_id
if not fw_id:
# No framework ID means an error
fw_id = "endpoint-in-error"
results[ENDPOINT_FRAMEWORK_UUID] = fw_id
if pkg_vers:
if isinstance(pkg_vers, type(tuple())):
pkg_vers = [pkg_vers]
for pkg_ver in pkg_vers:
results[pkg_ver[0]] = pkg_ver[1]
results[ENDPOINT_ID] = create_uuid()
results[SERVICE_IMPORTED] = "true"
return results | [
"def",
"get_rsa_props",
"(",
"object_class",
",",
"exported_cfgs",
",",
"remote_intents",
"=",
"None",
",",
"ep_svc_id",
"=",
"None",
",",
"fw_id",
"=",
"None",
",",
"pkg_vers",
"=",
"None",
",",
"service_intents",
"=",
"None",
",",
")",
":",
"results",
"=",
"{",
"}",
"if",
"not",
"object_class",
":",
"raise",
"ArgumentError",
"(",
"\"object_class\"",
",",
"\"object_class must be an [] of Strings\"",
")",
"results",
"[",
"\"objectClass\"",
"]",
"=",
"object_class",
"if",
"not",
"exported_cfgs",
":",
"raise",
"ArgumentError",
"(",
"\"exported_cfgs\"",
",",
"\"exported_cfgs must be an array of Strings\"",
")",
"results",
"[",
"REMOTE_CONFIGS_SUPPORTED",
"]",
"=",
"exported_cfgs",
"results",
"[",
"SERVICE_IMPORTED_CONFIGS",
"]",
"=",
"exported_cfgs",
"if",
"remote_intents",
":",
"results",
"[",
"REMOTE_INTENTS_SUPPORTED",
"]",
"=",
"remote_intents",
"if",
"service_intents",
":",
"results",
"[",
"SERVICE_INTENTS",
"]",
"=",
"service_intents",
"if",
"not",
"ep_svc_id",
":",
"ep_svc_id",
"=",
"get_next_rsid",
"(",
")",
"results",
"[",
"ENDPOINT_SERVICE_ID",
"]",
"=",
"ep_svc_id",
"results",
"[",
"SERVICE_ID",
"]",
"=",
"ep_svc_id",
"if",
"not",
"fw_id",
":",
"# No framework ID means an error",
"fw_id",
"=",
"\"endpoint-in-error\"",
"results",
"[",
"ENDPOINT_FRAMEWORK_UUID",
"]",
"=",
"fw_id",
"if",
"pkg_vers",
":",
"if",
"isinstance",
"(",
"pkg_vers",
",",
"type",
"(",
"tuple",
"(",
")",
")",
")",
":",
"pkg_vers",
"=",
"[",
"pkg_vers",
"]",
"for",
"pkg_ver",
"in",
"pkg_vers",
":",
"results",
"[",
"pkg_ver",
"[",
"0",
"]",
"]",
"=",
"pkg_ver",
"[",
"1",
"]",
"results",
"[",
"ENDPOINT_ID",
"]",
"=",
"create_uuid",
"(",
")",
"results",
"[",
"SERVICE_IMPORTED",
"]",
"=",
"\"true\"",
"return",
"results"
] | 32.566038 | 14.339623 |
def _futureExceptions(self, request):
"""
Returns all future extra info, cancellations and postponements created
for this recurring event
"""
retval = []
# We know all future exception dates are in the parent time zone
myToday = timezone.localdate(timezone=self.tz)
for extraInfo in ExtraInfoPage.events(request).child_of(self) \
.filter(except_date__gte=myToday):
retval.append(extraInfo)
for cancellation in CancellationPage.events(request).child_of(self) \
.filter(except_date__gte=myToday):
postponement = getattr(cancellation, "postponementpage", None)
if postponement:
retval.append(postponement)
else:
retval.append(cancellation)
retval.sort(key=attrgetter('except_date'))
# notice these are events not ThisEvents
return retval | [
"def",
"_futureExceptions",
"(",
"self",
",",
"request",
")",
":",
"retval",
"=",
"[",
"]",
"# We know all future exception dates are in the parent time zone",
"myToday",
"=",
"timezone",
".",
"localdate",
"(",
"timezone",
"=",
"self",
".",
"tz",
")",
"for",
"extraInfo",
"in",
"ExtraInfoPage",
".",
"events",
"(",
"request",
")",
".",
"child_of",
"(",
"self",
")",
".",
"filter",
"(",
"except_date__gte",
"=",
"myToday",
")",
":",
"retval",
".",
"append",
"(",
"extraInfo",
")",
"for",
"cancellation",
"in",
"CancellationPage",
".",
"events",
"(",
"request",
")",
".",
"child_of",
"(",
"self",
")",
".",
"filter",
"(",
"except_date__gte",
"=",
"myToday",
")",
":",
"postponement",
"=",
"getattr",
"(",
"cancellation",
",",
"\"postponementpage\"",
",",
"None",
")",
"if",
"postponement",
":",
"retval",
".",
"append",
"(",
"postponement",
")",
"else",
":",
"retval",
".",
"append",
"(",
"cancellation",
")",
"retval",
".",
"sort",
"(",
"key",
"=",
"attrgetter",
"(",
"'except_date'",
")",
")",
"# notice these are events not ThisEvents",
"return",
"retval"
] | 44.636364 | 19.090909 |
def get_filename(disposition):
"""Parse Content-Disposition header to pull out the filename bit.
See: http://tools.ietf.org/html/rfc2616#section-19.5.1
"""
if disposition:
params = [param.strip() for param in disposition.split(';')[1:]]
for param in params:
if '=' in param:
name, value = param.split('=', 1)
if name == 'filename':
return value.strip('"') | [
"def",
"get_filename",
"(",
"disposition",
")",
":",
"if",
"disposition",
":",
"params",
"=",
"[",
"param",
".",
"strip",
"(",
")",
"for",
"param",
"in",
"disposition",
".",
"split",
"(",
"';'",
")",
"[",
"1",
":",
"]",
"]",
"for",
"param",
"in",
"params",
":",
"if",
"'='",
"in",
"param",
":",
"name",
",",
"value",
"=",
"param",
".",
"split",
"(",
"'='",
",",
"1",
")",
"if",
"name",
"==",
"'filename'",
":",
"return",
"value",
".",
"strip",
"(",
"'\"'",
")"
] | 33.923077 | 15.307692 |
def copy(self, *args, **kwargs):
"""
Make a copy of this object.
See Also:
For arguments and description of behavior see `pandas docs`_.
.. _pandas docs: http://pandas.pydata.org/pandas-docs/stable/generated/pandas.Series.copy.html
"""
cls = self.__class__ # Note that type conversion does not perform copy
return cls(pd.DataFrame(self).copy(*args, **kwargs)) | [
"def",
"copy",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"cls",
"=",
"self",
".",
"__class__",
"# Note that type conversion does not perform copy",
"return",
"cls",
"(",
"pd",
".",
"DataFrame",
"(",
"self",
")",
".",
"copy",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
")"
] | 38.363636 | 24.727273 |
def _set_shortcut_ownership(path, user):
'''
Set the ownership of a shortcut and return a boolean indicating
success/failure
'''
try:
__salt__['file.lchown'](path, user)
except OSError:
pass
return _check_shortcut_ownership(path, user) | [
"def",
"_set_shortcut_ownership",
"(",
"path",
",",
"user",
")",
":",
"try",
":",
"__salt__",
"[",
"'file.lchown'",
"]",
"(",
"path",
",",
"user",
")",
"except",
"OSError",
":",
"pass",
"return",
"_check_shortcut_ownership",
"(",
"path",
",",
"user",
")"
] | 27 | 20.6 |
def build_is_last_day_of_season(num_steps_per_season):
"""Build utility method to compute whether the season is changing."""
num_steps_per_cycle = np.sum(num_steps_per_season)
changepoints = np.cumsum(np.ravel(num_steps_per_season)) - 1
def is_last_day_of_season(t):
t_ = dist_util.maybe_get_static_value(t)
if t_ is not None: # static case
step_in_cycle = t_ % num_steps_per_cycle
return any(step_in_cycle == changepoints)
else:
step_in_cycle = tf.math.floormod(t, num_steps_per_cycle)
return tf.reduce_any(
input_tensor=tf.equal(step_in_cycle, changepoints))
return is_last_day_of_season | [
"def",
"build_is_last_day_of_season",
"(",
"num_steps_per_season",
")",
":",
"num_steps_per_cycle",
"=",
"np",
".",
"sum",
"(",
"num_steps_per_season",
")",
"changepoints",
"=",
"np",
".",
"cumsum",
"(",
"np",
".",
"ravel",
"(",
"num_steps_per_season",
")",
")",
"-",
"1",
"def",
"is_last_day_of_season",
"(",
"t",
")",
":",
"t_",
"=",
"dist_util",
".",
"maybe_get_static_value",
"(",
"t",
")",
"if",
"t_",
"is",
"not",
"None",
":",
"# static case",
"step_in_cycle",
"=",
"t_",
"%",
"num_steps_per_cycle",
"return",
"any",
"(",
"step_in_cycle",
"==",
"changepoints",
")",
"else",
":",
"step_in_cycle",
"=",
"tf",
".",
"math",
".",
"floormod",
"(",
"t",
",",
"num_steps_per_cycle",
")",
"return",
"tf",
".",
"reduce_any",
"(",
"input_tensor",
"=",
"tf",
".",
"equal",
"(",
"step_in_cycle",
",",
"changepoints",
")",
")",
"return",
"is_last_day_of_season"
] | 45.214286 | 12.428571 |
def printInvoice(self, REQUEST=None, RESPONSE=None):
"""Print invoice
"""
invoice = self.getInvoice()
invoice_url = invoice.absolute_url()
RESPONSE.redirect('{}/invoice_print'.format(invoice_url)) | [
"def",
"printInvoice",
"(",
"self",
",",
"REQUEST",
"=",
"None",
",",
"RESPONSE",
"=",
"None",
")",
":",
"invoice",
"=",
"self",
".",
"getInvoice",
"(",
")",
"invoice_url",
"=",
"invoice",
".",
"absolute_url",
"(",
")",
"RESPONSE",
".",
"redirect",
"(",
"'{}/invoice_print'",
".",
"format",
"(",
"invoice_url",
")",
")"
] | 38.5 | 7.666667 |
def set_character_set(self, charset):
"""Set the connection character set to charset. The character
set can only be changed in MySQL-4.1 and newer. If you try
to change the character set from the current value in an
older version, NotSupportedError will be raised."""
if charset in ("utf8mb4", "utf8mb3"):
py_charset = "utf8"
else:
py_charset = charset
if self.character_set_name() != charset:
try:
super(Connection, self).set_character_set(charset)
except AttributeError:
if self._server_version < (4, 1):
raise NotSupportedError("server is too old to set charset")
self.query('SET NAMES %s' % charset)
self.store_result()
self.encoding = py_charset | [
"def",
"set_character_set",
"(",
"self",
",",
"charset",
")",
":",
"if",
"charset",
"in",
"(",
"\"utf8mb4\"",
",",
"\"utf8mb3\"",
")",
":",
"py_charset",
"=",
"\"utf8\"",
"else",
":",
"py_charset",
"=",
"charset",
"if",
"self",
".",
"character_set_name",
"(",
")",
"!=",
"charset",
":",
"try",
":",
"super",
"(",
"Connection",
",",
"self",
")",
".",
"set_character_set",
"(",
"charset",
")",
"except",
"AttributeError",
":",
"if",
"self",
".",
"_server_version",
"<",
"(",
"4",
",",
"1",
")",
":",
"raise",
"NotSupportedError",
"(",
"\"server is too old to set charset\"",
")",
"self",
".",
"query",
"(",
"'SET NAMES %s'",
"%",
"charset",
")",
"self",
".",
"store_result",
"(",
")",
"self",
".",
"encoding",
"=",
"py_charset"
] | 46.055556 | 13.166667 |
def get_dimension(data):
"""
Get dimension of the data passed by argument independently if it's an
arrays or dictionaries
"""
result = [0, 0]
if isinstance(data, list):
result = get_dimension_array(data)
elif isinstance(data, dict):
result = get_dimension_dict(data)
return result | [
"def",
"get_dimension",
"(",
"data",
")",
":",
"result",
"=",
"[",
"0",
",",
"0",
"]",
"if",
"isinstance",
"(",
"data",
",",
"list",
")",
":",
"result",
"=",
"get_dimension_array",
"(",
"data",
")",
"elif",
"isinstance",
"(",
"data",
",",
"dict",
")",
":",
"result",
"=",
"get_dimension_dict",
"(",
"data",
")",
"return",
"result"
] | 22.714286 | 17.714286 |
def map_to_subset(self, file, outfile=None, ontology=None, subset=None, class_map=None, relations=None):
"""
Map a file to a subset, writing out results
You can pass either a subset name (e.g. goslim_generic) or a dictionary with ready-made mappings
Arguments
---------
file: file
Name or file object for input assoc file
outfile: file
Name or file object for output (mapped) assoc file; writes to stdout if not set
subset: str
Optional name of subset to map to, e.g. goslim_generic
class_map: dict
Mapping between asserted class ids and ids to map to. Many to many
ontology: `Ontology`
Ontology to extract subset from
"""
if subset is not None:
logging.info("Creating mapping for subset: {}".format(subset))
class_map = ontology.create_slim_mapping(subset=subset, relations=relations)
if class_map is None:
raise ValueError("Neither class_map not subset is set")
col = self.ANNOTATION_CLASS_COLUMN
file = self._ensure_file(file)
tuples = []
for line in file:
if line.startswith("!"):
continue
vals = line.split("\t")
logging.info("LINE: {} VALS: {}".format(line, vals))
if len(vals) < col:
raise ValueError("Line: {} has too few cols, expect class id in col {}".format(line, col))
cid = vals[col]
if cid not in class_map or len(class_map[cid]) == 0:
self.report.error(line, Report.UNMAPPED_ID, cid)
continue
else:
for mcid in class_map[cid]:
vals[col] = mcid
line = "\t".join(vals)
if outfile is not None:
outfile.write(line)
else:
print(line) | [
"def",
"map_to_subset",
"(",
"self",
",",
"file",
",",
"outfile",
"=",
"None",
",",
"ontology",
"=",
"None",
",",
"subset",
"=",
"None",
",",
"class_map",
"=",
"None",
",",
"relations",
"=",
"None",
")",
":",
"if",
"subset",
"is",
"not",
"None",
":",
"logging",
".",
"info",
"(",
"\"Creating mapping for subset: {}\"",
".",
"format",
"(",
"subset",
")",
")",
"class_map",
"=",
"ontology",
".",
"create_slim_mapping",
"(",
"subset",
"=",
"subset",
",",
"relations",
"=",
"relations",
")",
"if",
"class_map",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"Neither class_map not subset is set\"",
")",
"col",
"=",
"self",
".",
"ANNOTATION_CLASS_COLUMN",
"file",
"=",
"self",
".",
"_ensure_file",
"(",
"file",
")",
"tuples",
"=",
"[",
"]",
"for",
"line",
"in",
"file",
":",
"if",
"line",
".",
"startswith",
"(",
"\"!\"",
")",
":",
"continue",
"vals",
"=",
"line",
".",
"split",
"(",
"\"\\t\"",
")",
"logging",
".",
"info",
"(",
"\"LINE: {} VALS: {}\"",
".",
"format",
"(",
"line",
",",
"vals",
")",
")",
"if",
"len",
"(",
"vals",
")",
"<",
"col",
":",
"raise",
"ValueError",
"(",
"\"Line: {} has too few cols, expect class id in col {}\"",
".",
"format",
"(",
"line",
",",
"col",
")",
")",
"cid",
"=",
"vals",
"[",
"col",
"]",
"if",
"cid",
"not",
"in",
"class_map",
"or",
"len",
"(",
"class_map",
"[",
"cid",
"]",
")",
"==",
"0",
":",
"self",
".",
"report",
".",
"error",
"(",
"line",
",",
"Report",
".",
"UNMAPPED_ID",
",",
"cid",
")",
"continue",
"else",
":",
"for",
"mcid",
"in",
"class_map",
"[",
"cid",
"]",
":",
"vals",
"[",
"col",
"]",
"=",
"mcid",
"line",
"=",
"\"\\t\"",
".",
"join",
"(",
"vals",
")",
"if",
"outfile",
"is",
"not",
"None",
":",
"outfile",
".",
"write",
"(",
"line",
")",
"else",
":",
"print",
"(",
"line",
")"
] | 40.104167 | 20.729167 |
def prune(t):
"""Returns the currently defining instance of t.
As a side effect, collapses the list of type instances. The function Prune
is used whenever a type expression has to be inspected: it will always
return a type expression which is either an uninstantiated type variable or
a type operator; i.e. it will skip instantiated variables, and will
actually prune them from expressions to remove long chains of instantiated
variables.
Args:
t: The type to be pruned
Returns:
An uninstantiated TypeVariable or a TypeOperator
"""
if isinstance(t, TypeVariable):
if t.instance is not None:
t.instance = prune(t.instance)
return t.instance
return t | [
"def",
"prune",
"(",
"t",
")",
":",
"if",
"isinstance",
"(",
"t",
",",
"TypeVariable",
")",
":",
"if",
"t",
".",
"instance",
"is",
"not",
"None",
":",
"t",
".",
"instance",
"=",
"prune",
"(",
"t",
".",
"instance",
")",
"return",
"t",
".",
"instance",
"return",
"t"
] | 34.619048 | 23.238095 |
def input_fields(self, preamble, *args):
"""Get a set of fields from the user. Optionally a preamble may be
shown to the user secribing the fields to return. The fields are
specified as the remaining arguments with each field being a a
list with the following entries:
- a programmer-visible name for the field
- a string prompt to show to the user
- one of the following values:
- string: return a string from the user
- password: return a string from the user but do not echo the
input to the screen
- boolean: return a boolean value from the user
- integer: return an integer value from the user
- the default value (optional)
Fields are requested from the user in the order specified.
Fields are returned in a dictionary with the field names being the keys
and the values being the items.
"""
self.new_section()
if preamble is not None:
self.message(preamble)
if any([True for x in args if len(x) > 3]):
self.message("""
Some questions have default answers which can be selected by
pressing 'Enter' at the prompt.""")
output_dict = { }
for field in args:
(field_name, prompt, field_type) = field[:3]
default = None
if len(field) > 3:
default = field[3]
if field_type == 'string':
output_dict[field_name] = self.input(prompt, default = default)
elif field_type == 'password':
output_dict[field_name] = self.input(prompt, no_echo=True)
elif field_type == 'boolean':
output_dict[field_name] = self.input_boolean(prompt, default = default)
elif field_type == 'integer':
output_dict[field_name] = self.input_integer(prompt, default = default)
return output_dict | [
"def",
"input_fields",
"(",
"self",
",",
"preamble",
",",
"*",
"args",
")",
":",
"self",
".",
"new_section",
"(",
")",
"if",
"preamble",
"is",
"not",
"None",
":",
"self",
".",
"message",
"(",
"preamble",
")",
"if",
"any",
"(",
"[",
"True",
"for",
"x",
"in",
"args",
"if",
"len",
"(",
"x",
")",
">",
"3",
"]",
")",
":",
"self",
".",
"message",
"(",
"\"\"\"\n Some questions have default answers which can be selected by\n pressing 'Enter' at the prompt.\"\"\"",
")",
"output_dict",
"=",
"{",
"}",
"for",
"field",
"in",
"args",
":",
"(",
"field_name",
",",
"prompt",
",",
"field_type",
")",
"=",
"field",
"[",
":",
"3",
"]",
"default",
"=",
"None",
"if",
"len",
"(",
"field",
")",
">",
"3",
":",
"default",
"=",
"field",
"[",
"3",
"]",
"if",
"field_type",
"==",
"'string'",
":",
"output_dict",
"[",
"field_name",
"]",
"=",
"self",
".",
"input",
"(",
"prompt",
",",
"default",
"=",
"default",
")",
"elif",
"field_type",
"==",
"'password'",
":",
"output_dict",
"[",
"field_name",
"]",
"=",
"self",
".",
"input",
"(",
"prompt",
",",
"no_echo",
"=",
"True",
")",
"elif",
"field_type",
"==",
"'boolean'",
":",
"output_dict",
"[",
"field_name",
"]",
"=",
"self",
".",
"input_boolean",
"(",
"prompt",
",",
"default",
"=",
"default",
")",
"elif",
"field_type",
"==",
"'integer'",
":",
"output_dict",
"[",
"field_name",
"]",
"=",
"self",
".",
"input_integer",
"(",
"prompt",
",",
"default",
"=",
"default",
")",
"return",
"output_dict"
] | 39.66 | 19.86 |
def DbGetClassAttributeList(self, argin):
""" Get attrilute list for a given Tango class with a specified filter
:param argin: Str[0] = Tango class name
Str[1] = Attribute name filter (eg: att*)
:type: tango.DevVarStringArray
:return: Str[0] = Class attribute name
Str[n] = Class attribute name
:rtype: tango.DevVarStringArray """
self._log.debug("In DbGetClassAttributeList()")
class_name = argin[0]
wildcard = replace_wildcard(argin[1])
return self.db.get_class_attribute_list(class_name, wildcard) | [
"def",
"DbGetClassAttributeList",
"(",
"self",
",",
"argin",
")",
":",
"self",
".",
"_log",
".",
"debug",
"(",
"\"In DbGetClassAttributeList()\"",
")",
"class_name",
"=",
"argin",
"[",
"0",
"]",
"wildcard",
"=",
"replace_wildcard",
"(",
"argin",
"[",
"1",
"]",
")",
"return",
"self",
".",
"db",
".",
"get_class_attribute_list",
"(",
"class_name",
",",
"wildcard",
")"
] | 44.384615 | 9.846154 |
def location_path(self, path):
"""
Set the Location-Path of the response.
:type path: String
:param path: the Location-Path as a string
"""
path = path.strip("/")
tmp = path.split("?")
path = tmp[0]
paths = path.split("/")
for p in paths:
option = Option()
option.number = defines.OptionRegistry.LOCATION_PATH.number
option.value = p
self.add_option(option) | [
"def",
"location_path",
"(",
"self",
",",
"path",
")",
":",
"path",
"=",
"path",
".",
"strip",
"(",
"\"/\"",
")",
"tmp",
"=",
"path",
".",
"split",
"(",
"\"?\"",
")",
"path",
"=",
"tmp",
"[",
"0",
"]",
"paths",
"=",
"path",
".",
"split",
"(",
"\"/\"",
")",
"for",
"p",
"in",
"paths",
":",
"option",
"=",
"Option",
"(",
")",
"option",
".",
"number",
"=",
"defines",
".",
"OptionRegistry",
".",
"LOCATION_PATH",
".",
"number",
"option",
".",
"value",
"=",
"p",
"self",
".",
"add_option",
"(",
"option",
")"
] | 29.4375 | 12.8125 |
def _beaglebone_id(self):
"""Try to detect id of a Beaglebone."""
try:
with open("/sys/bus/nvmem/devices/0-00500/nvmem", "rb") as eeprom:
eeprom_bytes = eeprom.read(16)
except FileNotFoundError:
return None
if eeprom_bytes[:4] != b'\xaaU3\xee':
return None
id_string = eeprom_bytes[4:].decode("ascii")
for model, bb_ids in _BEAGLEBONE_BOARD_IDS.items():
for bb_id in bb_ids:
if id_string == bb_id[1]:
return model
return None | [
"def",
"_beaglebone_id",
"(",
"self",
")",
":",
"try",
":",
"with",
"open",
"(",
"\"/sys/bus/nvmem/devices/0-00500/nvmem\"",
",",
"\"rb\"",
")",
"as",
"eeprom",
":",
"eeprom_bytes",
"=",
"eeprom",
".",
"read",
"(",
"16",
")",
"except",
"FileNotFoundError",
":",
"return",
"None",
"if",
"eeprom_bytes",
"[",
":",
"4",
"]",
"!=",
"b'\\xaaU3\\xee'",
":",
"return",
"None",
"id_string",
"=",
"eeprom_bytes",
"[",
"4",
":",
"]",
".",
"decode",
"(",
"\"ascii\"",
")",
"for",
"model",
",",
"bb_ids",
"in",
"_BEAGLEBONE_BOARD_IDS",
".",
"items",
"(",
")",
":",
"for",
"bb_id",
"in",
"bb_ids",
":",
"if",
"id_string",
"==",
"bb_id",
"[",
"1",
"]",
":",
"return",
"model",
"return",
"None"
] | 31.5 | 17.888889 |
def label_image(
self,
parent,
basic_config,
feature,
image_classification_config=None,
bounding_poly_config=None,
polyline_config=None,
segmentation_config=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Starts a labeling task for image. The type of image labeling task is
configured by feature in the request.
Example:
>>> from google.cloud import datalabeling_v1beta1
>>> from google.cloud.datalabeling_v1beta1 import enums
>>>
>>> client = datalabeling_v1beta1.DataLabelingServiceClient()
>>>
>>> parent = client.dataset_path('[PROJECT]', '[DATASET]')
>>>
>>> # TODO: Initialize `basic_config`:
>>> basic_config = {}
>>>
>>> # TODO: Initialize `feature`:
>>> feature = enums.LabelImageRequest.Feature.FEATURE_UNSPECIFIED
>>>
>>> response = client.label_image(parent, basic_config, feature)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
parent (str): Required. Name of the dataset to request labeling task, format:
projects/{project\_id}/datasets/{dataset\_id}
basic_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.HumanAnnotationConfig]): Required. Basic human annotation config.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.datalabeling_v1beta1.types.HumanAnnotationConfig`
feature (~google.cloud.datalabeling_v1beta1.types.Feature): Required. The type of image labeling task.
image_classification_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.ImageClassificationConfig]): Configuration for image classification task. One of
image\_classification\_config, bounding\_poly\_config, polyline\_config
and segmentation\_config is required.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.datalabeling_v1beta1.types.ImageClassificationConfig`
bounding_poly_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.BoundingPolyConfig]): Configuration for bounding box and bounding poly task. One of
image\_classification\_config, bounding\_poly\_config, polyline\_config
and segmentation\_config is required.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.datalabeling_v1beta1.types.BoundingPolyConfig`
polyline_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.PolylineConfig]): Configuration for polyline task. One of image\_classification\_config,
bounding\_poly\_config, polyline\_config and segmentation\_config is
required.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.datalabeling_v1beta1.types.PolylineConfig`
segmentation_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.SegmentationConfig]): Configuration for segmentation task. One of
image\_classification\_config, bounding\_poly\_config, polyline\_config
and segmentation\_config is required.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.datalabeling_v1beta1.types.SegmentationConfig`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.datalabeling_v1beta1.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "label_image" not in self._inner_api_calls:
self._inner_api_calls[
"label_image"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.label_image,
default_retry=self._method_configs["LabelImage"].retry,
default_timeout=self._method_configs["LabelImage"].timeout,
client_info=self._client_info,
)
# Sanity check: We have some fields which are mutually exclusive;
# raise ValueError if more than one is sent.
google.api_core.protobuf_helpers.check_oneof(
image_classification_config=image_classification_config,
bounding_poly_config=bounding_poly_config,
polyline_config=polyline_config,
segmentation_config=segmentation_config,
)
request = data_labeling_service_pb2.LabelImageRequest(
parent=parent,
basic_config=basic_config,
feature=feature,
image_classification_config=image_classification_config,
bounding_poly_config=bounding_poly_config,
polyline_config=polyline_config,
segmentation_config=segmentation_config,
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
operation = self._inner_api_calls["label_image"](
request, retry=retry, timeout=timeout, metadata=metadata
)
return google.api_core.operation.from_gapic(
operation,
self.transport._operations_client,
dataset_pb2.AnnotatedDataset,
metadata_type=proto_operations_pb2.LabelOperationMetadata,
) | [
"def",
"label_image",
"(",
"self",
",",
"parent",
",",
"basic_config",
",",
"feature",
",",
"image_classification_config",
"=",
"None",
",",
"bounding_poly_config",
"=",
"None",
",",
"polyline_config",
"=",
"None",
",",
"segmentation_config",
"=",
"None",
",",
"retry",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"timeout",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"metadata",
"=",
"None",
",",
")",
":",
"# Wrap the transport method to add retry and timeout logic.",
"if",
"\"label_image\"",
"not",
"in",
"self",
".",
"_inner_api_calls",
":",
"self",
".",
"_inner_api_calls",
"[",
"\"label_image\"",
"]",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"wrap_method",
"(",
"self",
".",
"transport",
".",
"label_image",
",",
"default_retry",
"=",
"self",
".",
"_method_configs",
"[",
"\"LabelImage\"",
"]",
".",
"retry",
",",
"default_timeout",
"=",
"self",
".",
"_method_configs",
"[",
"\"LabelImage\"",
"]",
".",
"timeout",
",",
"client_info",
"=",
"self",
".",
"_client_info",
",",
")",
"# Sanity check: We have some fields which are mutually exclusive;",
"# raise ValueError if more than one is sent.",
"google",
".",
"api_core",
".",
"protobuf_helpers",
".",
"check_oneof",
"(",
"image_classification_config",
"=",
"image_classification_config",
",",
"bounding_poly_config",
"=",
"bounding_poly_config",
",",
"polyline_config",
"=",
"polyline_config",
",",
"segmentation_config",
"=",
"segmentation_config",
",",
")",
"request",
"=",
"data_labeling_service_pb2",
".",
"LabelImageRequest",
"(",
"parent",
"=",
"parent",
",",
"basic_config",
"=",
"basic_config",
",",
"feature",
"=",
"feature",
",",
"image_classification_config",
"=",
"image_classification_config",
",",
"bounding_poly_config",
"=",
"bounding_poly_config",
",",
"polyline_config",
"=",
"polyline_config",
",",
"segmentation_config",
"=",
"segmentation_config",
",",
")",
"if",
"metadata",
"is",
"None",
":",
"metadata",
"=",
"[",
"]",
"metadata",
"=",
"list",
"(",
"metadata",
")",
"try",
":",
"routing_header",
"=",
"[",
"(",
"\"parent\"",
",",
"parent",
")",
"]",
"except",
"AttributeError",
":",
"pass",
"else",
":",
"routing_metadata",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"routing_header",
".",
"to_grpc_metadata",
"(",
"routing_header",
")",
"metadata",
".",
"append",
"(",
"routing_metadata",
")",
"operation",
"=",
"self",
".",
"_inner_api_calls",
"[",
"\"label_image\"",
"]",
"(",
"request",
",",
"retry",
"=",
"retry",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"metadata",
")",
"return",
"google",
".",
"api_core",
".",
"operation",
".",
"from_gapic",
"(",
"operation",
",",
"self",
".",
"transport",
".",
"_operations_client",
",",
"dataset_pb2",
".",
"AnnotatedDataset",
",",
"metadata_type",
"=",
"proto_operations_pb2",
".",
"LabelOperationMetadata",
",",
")"
] | 48.236111 | 28 |
def send_root_file(self, filename):
"""
Function used to send static files from the root of the domain.
"""
cache_timeout = self.get_send_file_max_age(filename)
return send_from_directory(self.config['ROOT_FOLDER'], filename,
cache_timeout=cache_timeout) | [
"def",
"send_root_file",
"(",
"self",
",",
"filename",
")",
":",
"cache_timeout",
"=",
"self",
".",
"get_send_file_max_age",
"(",
"filename",
")",
"return",
"send_from_directory",
"(",
"self",
".",
"config",
"[",
"'ROOT_FOLDER'",
"]",
",",
"filename",
",",
"cache_timeout",
"=",
"cache_timeout",
")"
] | 46.142857 | 15.857143 |
def GetTemplates(alias=None,location=None):
"""Gets the list of Templates available to the account and location.
https://t3n.zendesk.com/entries/23102683-List-Available-Server-Templates
:param alias: short code for a particular account. If none will use account's default alias
:param location: datacenter where group resides
"""
if alias is None: alias = clc.v1.Account.GetAlias()
if location is None: location = clc.v1.Account.GetLocation()
r = clc.v1.API.Call('post','Server/ListAvailableServerTemplates', { 'AccountAlias': alias, 'Location': location } )
return(r['Templates']) | [
"def",
"GetTemplates",
"(",
"alias",
"=",
"None",
",",
"location",
"=",
"None",
")",
":",
"if",
"alias",
"is",
"None",
":",
"alias",
"=",
"clc",
".",
"v1",
".",
"Account",
".",
"GetAlias",
"(",
")",
"if",
"location",
"is",
"None",
":",
"location",
"=",
"clc",
".",
"v1",
".",
"Account",
".",
"GetLocation",
"(",
")",
"r",
"=",
"clc",
".",
"v1",
".",
"API",
".",
"Call",
"(",
"'post'",
",",
"'Server/ListAvailableServerTemplates'",
",",
"{",
"'AccountAlias'",
":",
"alias",
",",
"'Location'",
":",
"location",
"}",
")",
"return",
"(",
"r",
"[",
"'Templates'",
"]",
")"
] | 45.615385 | 26.923077 |
def set_credentials(self, username, password=None, region=None,
tenant_id=None, authenticate=False):
"""
Sets the username and password directly. Because Rackspace auth uses
the api_key, make sure that any old values are cleared.
"""
self.api_key = None
super(RaxIdentity, self).set_credentials(username, password=password,
region=region, tenant_id=tenant_id, authenticate=authenticate) | [
"def",
"set_credentials",
"(",
"self",
",",
"username",
",",
"password",
"=",
"None",
",",
"region",
"=",
"None",
",",
"tenant_id",
"=",
"None",
",",
"authenticate",
"=",
"False",
")",
":",
"self",
".",
"api_key",
"=",
"None",
"super",
"(",
"RaxIdentity",
",",
"self",
")",
".",
"set_credentials",
"(",
"username",
",",
"password",
"=",
"password",
",",
"region",
"=",
"region",
",",
"tenant_id",
"=",
"tenant_id",
",",
"authenticate",
"=",
"authenticate",
")"
] | 50.444444 | 19.777778 |
def write_wav(self, filename):
"""Write this sample to a WAV file.
:param filename: the file to which to write
"""
wave_output = None
try:
wave_output = wave.open(filename, 'w')
wave_output.setparams(WAVE_PARAMS)
frames = bytearray([x << 4 for x in self.sample_data])
wave_output.writeframes(frames)
finally:
if wave_output is not None:
wave_output.close() | [
"def",
"write_wav",
"(",
"self",
",",
"filename",
")",
":",
"wave_output",
"=",
"None",
"try",
":",
"wave_output",
"=",
"wave",
".",
"open",
"(",
"filename",
",",
"'w'",
")",
"wave_output",
".",
"setparams",
"(",
"WAVE_PARAMS",
")",
"frames",
"=",
"bytearray",
"(",
"[",
"x",
"<<",
"4",
"for",
"x",
"in",
"self",
".",
"sample_data",
"]",
")",
"wave_output",
".",
"writeframes",
"(",
"frames",
")",
"finally",
":",
"if",
"wave_output",
"is",
"not",
"None",
":",
"wave_output",
".",
"close",
"(",
")"
] | 26 | 18.777778 |
def _parse_indices(self, indices):
r"""
This private method accepts a list of pores or throats and returns a
properly structured Numpy array of indices.
Parameters
----------
indices : multiple options
This argument can accept numerous different data types including
boolean masks, integers and arrays.
Returns
-------
A Numpy array of indices.
Notes
-----
This method should only be called by the method that is actually using
the locations, to avoid calling it multiple times.
"""
if indices is None:
indices = sp.array([], ndmin=1, dtype=int)
locs = sp.array(indices, ndmin=1)
# If boolean array, convert to indices
if locs.dtype == bool:
if sp.size(locs) == self.Np:
locs = self.Ps[locs]
elif sp.size(locs) == self.Nt:
locs = self.Ts[locs]
else:
raise Exception('Mask of locations must be either ' +
'Np nor Nt long')
locs = locs.astype(dtype=int)
return locs | [
"def",
"_parse_indices",
"(",
"self",
",",
"indices",
")",
":",
"if",
"indices",
"is",
"None",
":",
"indices",
"=",
"sp",
".",
"array",
"(",
"[",
"]",
",",
"ndmin",
"=",
"1",
",",
"dtype",
"=",
"int",
")",
"locs",
"=",
"sp",
".",
"array",
"(",
"indices",
",",
"ndmin",
"=",
"1",
")",
"# If boolean array, convert to indices",
"if",
"locs",
".",
"dtype",
"==",
"bool",
":",
"if",
"sp",
".",
"size",
"(",
"locs",
")",
"==",
"self",
".",
"Np",
":",
"locs",
"=",
"self",
".",
"Ps",
"[",
"locs",
"]",
"elif",
"sp",
".",
"size",
"(",
"locs",
")",
"==",
"self",
".",
"Nt",
":",
"locs",
"=",
"self",
".",
"Ts",
"[",
"locs",
"]",
"else",
":",
"raise",
"Exception",
"(",
"'Mask of locations must be either '",
"+",
"'Np nor Nt long'",
")",
"locs",
"=",
"locs",
".",
"astype",
"(",
"dtype",
"=",
"int",
")",
"return",
"locs"
] | 33.676471 | 16.823529 |
def log_x_cb(self, w, val):
"""Toggle linear/log scale for X-axis."""
self.tab_plot.logx = val
self.plot_two_columns() | [
"def",
"log_x_cb",
"(",
"self",
",",
"w",
",",
"val",
")",
":",
"self",
".",
"tab_plot",
".",
"logx",
"=",
"val",
"self",
".",
"plot_two_columns",
"(",
")"
] | 34.75 | 7.5 |
def updateFile(cls, file_, url):
"""Check and update file compares with remote_url
Args:
file_: str. Local filename. Normally it's __file__
url: str. Remote url of raw file content. Normally it's https://raw.github.com/...
Returns:
bool: file updated or not
"""
def compare(s1, s2):
return s1 == s2, len(s2) - len(s1)
if not url or not file_:
return False
try:
req = urllib.request.urlopen(url)
raw_codes = req.read()
with open(file_, 'rb') as f:
current_codes = f.read().replace(b'\r', b'')
is_same, diff = compare(current_codes, raw_codes)
if is_same:
cit.info("{} is already up-to-date.".format(file_))
return False
else:
cit.ask("A new version is available. Update? (Diff: {})".format(diff))
if cit.get_choice(['Yes', 'No']) == 'Yes':
with open(file_, 'wb') as f:
f.write(raw_codes)
cit.info("Update Success.")
return True
else:
cit.warn("Update Canceled")
return False
except Exception as e:
cit.err("{f} update failed: {e}".format(f=file_, e=e))
return False | [
"def",
"updateFile",
"(",
"cls",
",",
"file_",
",",
"url",
")",
":",
"def",
"compare",
"(",
"s1",
",",
"s2",
")",
":",
"return",
"s1",
"==",
"s2",
",",
"len",
"(",
"s2",
")",
"-",
"len",
"(",
"s1",
")",
"if",
"not",
"url",
"or",
"not",
"file_",
":",
"return",
"False",
"try",
":",
"req",
"=",
"urllib",
".",
"request",
".",
"urlopen",
"(",
"url",
")",
"raw_codes",
"=",
"req",
".",
"read",
"(",
")",
"with",
"open",
"(",
"file_",
",",
"'rb'",
")",
"as",
"f",
":",
"current_codes",
"=",
"f",
".",
"read",
"(",
")",
".",
"replace",
"(",
"b'\\r'",
",",
"b''",
")",
"is_same",
",",
"diff",
"=",
"compare",
"(",
"current_codes",
",",
"raw_codes",
")",
"if",
"is_same",
":",
"cit",
".",
"info",
"(",
"\"{} is already up-to-date.\"",
".",
"format",
"(",
"file_",
")",
")",
"return",
"False",
"else",
":",
"cit",
".",
"ask",
"(",
"\"A new version is available. Update? (Diff: {})\"",
".",
"format",
"(",
"diff",
")",
")",
"if",
"cit",
".",
"get_choice",
"(",
"[",
"'Yes'",
",",
"'No'",
"]",
")",
"==",
"'Yes'",
":",
"with",
"open",
"(",
"file_",
",",
"'wb'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"raw_codes",
")",
"cit",
".",
"info",
"(",
"\"Update Success.\"",
")",
"return",
"True",
"else",
":",
"cit",
".",
"warn",
"(",
"\"Update Canceled\"",
")",
"return",
"False",
"except",
"Exception",
"as",
"e",
":",
"cit",
".",
"err",
"(",
"\"{f} update failed: {e}\"",
".",
"format",
"(",
"f",
"=",
"file_",
",",
"e",
"=",
"e",
")",
")",
"return",
"False"
] | 38.083333 | 16.527778 |
def skycoord_to_pixel_scale_angle(skycoord, wcs, small_offset=1 * u.arcsec):
"""
Convert a set of SkyCoord coordinates into pixel coordinates, pixel
scales, and position angles.
Parameters
----------
skycoord : `~astropy.coordinates.SkyCoord`
Sky coordinates
wcs : `~astropy.wcs.WCS`
The WCS transformation to use
small_offset : `~astropy.units.Quantity`
A small offset to use to compute the angle
Returns
-------
pixcoord : `~regions.PixCoord`
Pixel coordinates
scale : float
The pixel scale at each location, in degrees/pixel
angle : `~astropy.units.Quantity`
The position angle of the celestial coordinate system in pixel space.
"""
# Convert to pixel coordinates
x, y = skycoord_to_pixel(skycoord, wcs, mode=skycoord_to_pixel_mode)
pixcoord = PixCoord(x=x, y=y)
# We take a point directly 'above' (in latitude) the position requested
# and convert it to pixel coordinates, then we use that to figure out the
# scale and position angle of the coordinate system at the location of
# the points.
# Find the coordinates as a representation object
r_old = skycoord.represent_as('unitspherical')
# Add a a small perturbation in the latitude direction (since longitude
# is more difficult because it is not directly an angle).
dlat = small_offset
r_new = UnitSphericalRepresentation(r_old.lon, r_old.lat + dlat)
coords_offset = skycoord.realize_frame(r_new)
# Find pixel coordinates of offset coordinates
x_offset, y_offset = skycoord_to_pixel(coords_offset, wcs,
mode=skycoord_to_pixel_mode)
# Find vector
dx = x_offset - x
dy = y_offset - y
# Find the length of the vector
scale = np.hypot(dx, dy) / dlat.to('degree').value
# Find the position angle
angle = np.arctan2(dy, dx) * u.radian
return pixcoord, scale, angle | [
"def",
"skycoord_to_pixel_scale_angle",
"(",
"skycoord",
",",
"wcs",
",",
"small_offset",
"=",
"1",
"*",
"u",
".",
"arcsec",
")",
":",
"# Convert to pixel coordinates",
"x",
",",
"y",
"=",
"skycoord_to_pixel",
"(",
"skycoord",
",",
"wcs",
",",
"mode",
"=",
"skycoord_to_pixel_mode",
")",
"pixcoord",
"=",
"PixCoord",
"(",
"x",
"=",
"x",
",",
"y",
"=",
"y",
")",
"# We take a point directly 'above' (in latitude) the position requested",
"# and convert it to pixel coordinates, then we use that to figure out the",
"# scale and position angle of the coordinate system at the location of",
"# the points.",
"# Find the coordinates as a representation object",
"r_old",
"=",
"skycoord",
".",
"represent_as",
"(",
"'unitspherical'",
")",
"# Add a a small perturbation in the latitude direction (since longitude",
"# is more difficult because it is not directly an angle).",
"dlat",
"=",
"small_offset",
"r_new",
"=",
"UnitSphericalRepresentation",
"(",
"r_old",
".",
"lon",
",",
"r_old",
".",
"lat",
"+",
"dlat",
")",
"coords_offset",
"=",
"skycoord",
".",
"realize_frame",
"(",
"r_new",
")",
"# Find pixel coordinates of offset coordinates",
"x_offset",
",",
"y_offset",
"=",
"skycoord_to_pixel",
"(",
"coords_offset",
",",
"wcs",
",",
"mode",
"=",
"skycoord_to_pixel_mode",
")",
"# Find vector",
"dx",
"=",
"x_offset",
"-",
"x",
"dy",
"=",
"y_offset",
"-",
"y",
"# Find the length of the vector",
"scale",
"=",
"np",
".",
"hypot",
"(",
"dx",
",",
"dy",
")",
"/",
"dlat",
".",
"to",
"(",
"'degree'",
")",
".",
"value",
"# Find the position angle",
"angle",
"=",
"np",
".",
"arctan2",
"(",
"dy",
",",
"dx",
")",
"*",
"u",
".",
"radian",
"return",
"pixcoord",
",",
"scale",
",",
"angle"
] | 33.578947 | 21.894737 |
def exec_scratch_virtualenv(args):
"""
goals:
- get any random site-packages off of the pythonpath
- ensure we can import virtualenv
- ensure that we're not using the interpreter that we may need to delete
- idempotency: do nothing if the above goals are already met
"""
scratch = Scratch()
if not exists(scratch.python):
run(('virtualenv', scratch.venv))
if not exists(join(scratch.src, 'virtualenv.py')):
scratch_python = venv_python(scratch.venv)
# TODO: do we allow user-defined override of which version of virtualenv to install?
tmp = scratch.src + '.tmp'
run((scratch_python, '-m', 'pip.__main__', 'install', 'virtualenv', '--target', tmp))
from os import rename
rename(tmp, scratch.src)
import sys
from os.path import realpath
# We want to compare the paths themselves as sometimes sys.path is the same
# as scratch.venv, but with a suffix of bin/..
if realpath(sys.prefix) != realpath(scratch.venv):
# TODO-TEST: sometimes we would get a stale version of venv-update
exec_((scratch.python, dotpy(__file__)) + args) # never returns
# TODO-TEST: the original venv-update's directory was on sys.path (when using symlinking)
sys.path[0] = scratch.src | [
"def",
"exec_scratch_virtualenv",
"(",
"args",
")",
":",
"scratch",
"=",
"Scratch",
"(",
")",
"if",
"not",
"exists",
"(",
"scratch",
".",
"python",
")",
":",
"run",
"(",
"(",
"'virtualenv'",
",",
"scratch",
".",
"venv",
")",
")",
"if",
"not",
"exists",
"(",
"join",
"(",
"scratch",
".",
"src",
",",
"'virtualenv.py'",
")",
")",
":",
"scratch_python",
"=",
"venv_python",
"(",
"scratch",
".",
"venv",
")",
"# TODO: do we allow user-defined override of which version of virtualenv to install?",
"tmp",
"=",
"scratch",
".",
"src",
"+",
"'.tmp'",
"run",
"(",
"(",
"scratch_python",
",",
"'-m'",
",",
"'pip.__main__'",
",",
"'install'",
",",
"'virtualenv'",
",",
"'--target'",
",",
"tmp",
")",
")",
"from",
"os",
"import",
"rename",
"rename",
"(",
"tmp",
",",
"scratch",
".",
"src",
")",
"import",
"sys",
"from",
"os",
".",
"path",
"import",
"realpath",
"# We want to compare the paths themselves as sometimes sys.path is the same",
"# as scratch.venv, but with a suffix of bin/..",
"if",
"realpath",
"(",
"sys",
".",
"prefix",
")",
"!=",
"realpath",
"(",
"scratch",
".",
"venv",
")",
":",
"# TODO-TEST: sometimes we would get a stale version of venv-update",
"exec_",
"(",
"(",
"scratch",
".",
"python",
",",
"dotpy",
"(",
"__file__",
")",
")",
"+",
"args",
")",
"# never returns",
"# TODO-TEST: the original venv-update's directory was on sys.path (when using symlinking)",
"sys",
".",
"path",
"[",
"0",
"]",
"=",
"scratch",
".",
"src"
] | 41.483871 | 22.258065 |
def set_entries(self, entries, user_scope):
"""SetEntries.
[Preview API] Set the specified setting entry values for the given user/all-users scope
:param {object} entries: The entries to set
:param str user_scope: User-Scope at which to set the values. Should be "me" for the current user or "host" for all users.
"""
route_values = {}
if user_scope is not None:
route_values['userScope'] = self._serialize.url('user_scope', user_scope, 'str')
content = self._serialize.body(entries, '{object}')
self._send(http_method='PATCH',
location_id='cd006711-163d-4cd4-a597-b05bad2556ff',
version='5.0-preview.1',
route_values=route_values,
content=content) | [
"def",
"set_entries",
"(",
"self",
",",
"entries",
",",
"user_scope",
")",
":",
"route_values",
"=",
"{",
"}",
"if",
"user_scope",
"is",
"not",
"None",
":",
"route_values",
"[",
"'userScope'",
"]",
"=",
"self",
".",
"_serialize",
".",
"url",
"(",
"'user_scope'",
",",
"user_scope",
",",
"'str'",
")",
"content",
"=",
"self",
".",
"_serialize",
".",
"body",
"(",
"entries",
",",
"'{object}'",
")",
"self",
".",
"_send",
"(",
"http_method",
"=",
"'PATCH'",
",",
"location_id",
"=",
"'cd006711-163d-4cd4-a597-b05bad2556ff'",
",",
"version",
"=",
"'5.0-preview.1'",
",",
"route_values",
"=",
"route_values",
",",
"content",
"=",
"content",
")"
] | 52.933333 | 19.666667 |
def select_inputs(self, address: str, amount: int) -> dict:
'''finds apropriate utxo's to include in rawtx, while being careful
to never spend old transactions with a lot of coin age.
Argument is intiger, returns list of apropriate UTXO's'''
utxos = []
utxo_sum = Decimal(0)
for tx in sorted(self.listunspent(address=address), key=itemgetter('confirmations')):
if tx["address"] not in (self.pa_parameters.P2TH_addr,
self.pa_parameters.test_P2TH_addr):
utxos.append(
MutableTxIn(txid=tx['txid'],
txout=tx['vout'],
sequence=Sequence.max(),
script_sig=ScriptSig.empty())
)
utxo_sum += Decimal(tx["amount"])
if utxo_sum >= amount:
return {'utxos': utxos, 'total': utxo_sum}
if utxo_sum < amount:
raise InsufficientFunds("Insufficient funds.")
raise Exception("undefined behavior :.(") | [
"def",
"select_inputs",
"(",
"self",
",",
"address",
":",
"str",
",",
"amount",
":",
"int",
")",
"->",
"dict",
":",
"utxos",
"=",
"[",
"]",
"utxo_sum",
"=",
"Decimal",
"(",
"0",
")",
"for",
"tx",
"in",
"sorted",
"(",
"self",
".",
"listunspent",
"(",
"address",
"=",
"address",
")",
",",
"key",
"=",
"itemgetter",
"(",
"'confirmations'",
")",
")",
":",
"if",
"tx",
"[",
"\"address\"",
"]",
"not",
"in",
"(",
"self",
".",
"pa_parameters",
".",
"P2TH_addr",
",",
"self",
".",
"pa_parameters",
".",
"test_P2TH_addr",
")",
":",
"utxos",
".",
"append",
"(",
"MutableTxIn",
"(",
"txid",
"=",
"tx",
"[",
"'txid'",
"]",
",",
"txout",
"=",
"tx",
"[",
"'vout'",
"]",
",",
"sequence",
"=",
"Sequence",
".",
"max",
"(",
")",
",",
"script_sig",
"=",
"ScriptSig",
".",
"empty",
"(",
")",
")",
")",
"utxo_sum",
"+=",
"Decimal",
"(",
"tx",
"[",
"\"amount\"",
"]",
")",
"if",
"utxo_sum",
">=",
"amount",
":",
"return",
"{",
"'utxos'",
":",
"utxos",
",",
"'total'",
":",
"utxo_sum",
"}",
"if",
"utxo_sum",
"<",
"amount",
":",
"raise",
"InsufficientFunds",
"(",
"\"Insufficient funds.\"",
")",
"raise",
"Exception",
"(",
"\"undefined behavior :.(\"",
")"
] | 41.111111 | 24.148148 |
def runUAT(self, args):
"""
Runs the Unreal Automation Tool with the supplied arguments
"""
Utility.run([self.getRunUATScript()] + args, cwd=self.getEngineRoot(), raiseOnError=True) | [
"def",
"runUAT",
"(",
"self",
",",
"args",
")",
":",
"Utility",
".",
"run",
"(",
"[",
"self",
".",
"getRunUATScript",
"(",
")",
"]",
"+",
"args",
",",
"cwd",
"=",
"self",
".",
"getEngineRoot",
"(",
")",
",",
"raiseOnError",
"=",
"True",
")"
] | 37 | 17.8 |
def flower(port, address):
"""Runs a Celery Flower web server
Celery Flower is a UI to monitor the Celery operation on a given
broker"""
BROKER_URL = celery_app.conf.BROKER_URL
cmd = (
'celery flower '
f'--broker={BROKER_URL} '
f'--port={port} '
f'--address={address} '
)
logging.info(
"The 'superset flower' command is deprecated. Please use the 'celery "
"flower' command instead.")
print(Fore.GREEN + 'Starting a Celery Flower instance')
print(Fore.BLUE + '-=' * 40)
print(Fore.YELLOW + cmd)
print(Fore.BLUE + '-=' * 40)
Popen(cmd, shell=True).wait() | [
"def",
"flower",
"(",
"port",
",",
"address",
")",
":",
"BROKER_URL",
"=",
"celery_app",
".",
"conf",
".",
"BROKER_URL",
"cmd",
"=",
"(",
"'celery flower '",
"f'--broker={BROKER_URL} '",
"f'--port={port} '",
"f'--address={address} '",
")",
"logging",
".",
"info",
"(",
"\"The 'superset flower' command is deprecated. Please use the 'celery \"",
"\"flower' command instead.\"",
")",
"print",
"(",
"Fore",
".",
"GREEN",
"+",
"'Starting a Celery Flower instance'",
")",
"print",
"(",
"Fore",
".",
"BLUE",
"+",
"'-='",
"*",
"40",
")",
"print",
"(",
"Fore",
".",
"YELLOW",
"+",
"cmd",
")",
"print",
"(",
"Fore",
".",
"BLUE",
"+",
"'-='",
"*",
"40",
")",
"Popen",
"(",
"cmd",
",",
"shell",
"=",
"True",
")",
".",
"wait",
"(",
")"
] | 31.55 | 15.8 |
def create_convert_sbml_id_function(
compartment_prefix='C_', reaction_prefix='R_',
compound_prefix='M_', decode_id=entry_id_from_cobra_encoding):
"""Create function for converting SBML IDs.
The returned function will strip prefixes, decode the ID using the provided
function. These prefixes are common on IDs in SBML models because the IDs
live in a global namespace.
"""
def convert_sbml_id(entry):
if isinstance(entry, BaseCompartmentEntry):
prefix = compartment_prefix
elif isinstance(entry, BaseReactionEntry):
prefix = reaction_prefix
elif isinstance(entry, BaseCompoundEntry):
prefix = compound_prefix
new_id = entry.id
if decode_id is not None:
new_id = decode_id(new_id)
if prefix is not None and new_id.startswith(prefix):
new_id = new_id[len(prefix):]
return new_id
return convert_sbml_id | [
"def",
"create_convert_sbml_id_function",
"(",
"compartment_prefix",
"=",
"'C_'",
",",
"reaction_prefix",
"=",
"'R_'",
",",
"compound_prefix",
"=",
"'M_'",
",",
"decode_id",
"=",
"entry_id_from_cobra_encoding",
")",
":",
"def",
"convert_sbml_id",
"(",
"entry",
")",
":",
"if",
"isinstance",
"(",
"entry",
",",
"BaseCompartmentEntry",
")",
":",
"prefix",
"=",
"compartment_prefix",
"elif",
"isinstance",
"(",
"entry",
",",
"BaseReactionEntry",
")",
":",
"prefix",
"=",
"reaction_prefix",
"elif",
"isinstance",
"(",
"entry",
",",
"BaseCompoundEntry",
")",
":",
"prefix",
"=",
"compound_prefix",
"new_id",
"=",
"entry",
".",
"id",
"if",
"decode_id",
"is",
"not",
"None",
":",
"new_id",
"=",
"decode_id",
"(",
"new_id",
")",
"if",
"prefix",
"is",
"not",
"None",
"and",
"new_id",
".",
"startswith",
"(",
"prefix",
")",
":",
"new_id",
"=",
"new_id",
"[",
"len",
"(",
"prefix",
")",
":",
"]",
"return",
"new_id",
"return",
"convert_sbml_id"
] | 36.076923 | 16.153846 |
def export_configuration_generator(self, sql, sql_args):
"""
Generator for :class:`meteorpi_model.ExportConfiguration`
:param sql:
A SQL statement which must return rows describing export configurations
:param sql_args:
Any variables required to populate the query provided in 'sql'
:return:
A generator which produces :class:`meteorpi_model.ExportConfiguration` instances from the supplied SQL,
closing any opened cursors on completion.
"""
self.con.execute(sql, sql_args)
results = self.con.fetchall()
output = []
for result in results:
if result['exportType'] == "observation":
search = mp.ObservationSearch.from_dict(json.loads(result['searchString']))
elif result['exportType'] == "file":
search = mp.FileRecordSearch.from_dict(json.loads(result['searchString']))
else:
search = mp.ObservatoryMetadataSearch.from_dict(json.loads(result['searchString']))
conf = mp.ExportConfiguration(target_url=result['targetURL'], user_id=result['targetUser'],
password=result['targetPassword'], search=search,
name=result['exportName'], description=result['description'],
enabled=result['active'], config_id=result['exportConfigId'])
output.append(conf)
return output | [
"def",
"export_configuration_generator",
"(",
"self",
",",
"sql",
",",
"sql_args",
")",
":",
"self",
".",
"con",
".",
"execute",
"(",
"sql",
",",
"sql_args",
")",
"results",
"=",
"self",
".",
"con",
".",
"fetchall",
"(",
")",
"output",
"=",
"[",
"]",
"for",
"result",
"in",
"results",
":",
"if",
"result",
"[",
"'exportType'",
"]",
"==",
"\"observation\"",
":",
"search",
"=",
"mp",
".",
"ObservationSearch",
".",
"from_dict",
"(",
"json",
".",
"loads",
"(",
"result",
"[",
"'searchString'",
"]",
")",
")",
"elif",
"result",
"[",
"'exportType'",
"]",
"==",
"\"file\"",
":",
"search",
"=",
"mp",
".",
"FileRecordSearch",
".",
"from_dict",
"(",
"json",
".",
"loads",
"(",
"result",
"[",
"'searchString'",
"]",
")",
")",
"else",
":",
"search",
"=",
"mp",
".",
"ObservatoryMetadataSearch",
".",
"from_dict",
"(",
"json",
".",
"loads",
"(",
"result",
"[",
"'searchString'",
"]",
")",
")",
"conf",
"=",
"mp",
".",
"ExportConfiguration",
"(",
"target_url",
"=",
"result",
"[",
"'targetURL'",
"]",
",",
"user_id",
"=",
"result",
"[",
"'targetUser'",
"]",
",",
"password",
"=",
"result",
"[",
"'targetPassword'",
"]",
",",
"search",
"=",
"search",
",",
"name",
"=",
"result",
"[",
"'exportName'",
"]",
",",
"description",
"=",
"result",
"[",
"'description'",
"]",
",",
"enabled",
"=",
"result",
"[",
"'active'",
"]",
",",
"config_id",
"=",
"result",
"[",
"'exportConfigId'",
"]",
")",
"output",
".",
"append",
"(",
"conf",
")",
"return",
"output"
] | 50.066667 | 29.8 |
def _ordered_struct_start_handler(handler, ctx):
"""Handles the special case of ordered structs, specified by the type ID 0xD1.
This coroutine's only purpose is to ensure that the struct in question declares at least one field name/value pair,
as required by the spec.
"""
_, self = yield
self_handler = _create_delegate_handler(self)
(length, _), _ = yield ctx.immediate_transition(
_var_uint_field_handler(self_handler, ctx)
)
if length < 2:
# A valid field name/value pair is at least two octets: one for the field name SID and one for the value.
raise IonException('Ordered structs (type ID 0xD1) must have at least one field name/value pair.')
yield ctx.immediate_transition(handler(length, ctx)) | [
"def",
"_ordered_struct_start_handler",
"(",
"handler",
",",
"ctx",
")",
":",
"_",
",",
"self",
"=",
"yield",
"self_handler",
"=",
"_create_delegate_handler",
"(",
"self",
")",
"(",
"length",
",",
"_",
")",
",",
"_",
"=",
"yield",
"ctx",
".",
"immediate_transition",
"(",
"_var_uint_field_handler",
"(",
"self_handler",
",",
"ctx",
")",
")",
"if",
"length",
"<",
"2",
":",
"# A valid field name/value pair is at least two octets: one for the field name SID and one for the value.",
"raise",
"IonException",
"(",
"'Ordered structs (type ID 0xD1) must have at least one field name/value pair.'",
")",
"yield",
"ctx",
".",
"immediate_transition",
"(",
"handler",
"(",
"length",
",",
"ctx",
")",
")"
] | 50.133333 | 26.866667 |
def format(self, model: AssetAllocationModel, full: bool = False):
""" Returns the view-friendly output of the aa model """
self.full = full
# Header
output = f"Asset Allocation model, total: {model.currency} {model.total_amount:,.2f}\n"
# Column Headers
for column in self.columns:
name = column['name']
if not self.full and name == "loc.cur.":
# Skip local currency if not displaying stocks.
continue
width = column["width"]
output += f"{name:^{width}}"
output += "\n"
output += f"-------------------------------------------------------------------------------\n"
# Asset classes
view_model = ModelMapper(model).map_to_linear(self.full)
for row in view_model:
output += self.__format_row(row) + "\n"
return output | [
"def",
"format",
"(",
"self",
",",
"model",
":",
"AssetAllocationModel",
",",
"full",
":",
"bool",
"=",
"False",
")",
":",
"self",
".",
"full",
"=",
"full",
"# Header",
"output",
"=",
"f\"Asset Allocation model, total: {model.currency} {model.total_amount:,.2f}\\n\"",
"# Column Headers",
"for",
"column",
"in",
"self",
".",
"columns",
":",
"name",
"=",
"column",
"[",
"'name'",
"]",
"if",
"not",
"self",
".",
"full",
"and",
"name",
"==",
"\"loc.cur.\"",
":",
"# Skip local currency if not displaying stocks.",
"continue",
"width",
"=",
"column",
"[",
"\"width\"",
"]",
"output",
"+=",
"f\"{name:^{width}}\"",
"output",
"+=",
"\"\\n\"",
"output",
"+=",
"f\"-------------------------------------------------------------------------------\\n\"",
"# Asset classes",
"view_model",
"=",
"ModelMapper",
"(",
"model",
")",
".",
"map_to_linear",
"(",
"self",
".",
"full",
")",
"for",
"row",
"in",
"view_model",
":",
"output",
"+=",
"self",
".",
"__format_row",
"(",
"row",
")",
"+",
"\"\\n\"",
"return",
"output"
] | 35.36 | 22.64 |
def get(self, request, bot_id, id, format=None):
"""
Get list of header parameters of a handler
---
serializer: AbsParamSerializer
responseMessages:
- code: 401
message: Not authenticated
"""
return super(HeaderParameterList, self).get(request, bot_id, id, format) | [
"def",
"get",
"(",
"self",
",",
"request",
",",
"bot_id",
",",
"id",
",",
"format",
"=",
"None",
")",
":",
"return",
"super",
"(",
"HeaderParameterList",
",",
"self",
")",
".",
"get",
"(",
"request",
",",
"bot_id",
",",
"id",
",",
"format",
")"
] | 33.7 | 12.1 |
def tcp_traceflow(packet, timestamp, *, data_link, count=NotImplemented):
"""Trace packet flow for TCP."""
if getattr(packet, 'ip', None):
ip = packet['ip']
elif getattr(packet, 'ip6', None):
ip = packet['ip6']
else:
return False, None
tcp = getattr(ip, 'tcp', None)
if tcp is not None:
flags = bin(tcp.flags)[2:].zfill(8)
data = dict(
protocol=data_link, # data link type from global header
index=count, # frame number
frame=packet2dict(packet, timestamp, data_link=data_link), # extracted packet
syn=bool(int(flags[6])), # TCP synchronise (SYN) flag
fin=bool(int(flags[7])), # TCP finish (FIN) flag
src=ipaddress.ip_address(ip.src), # source IP
dst=ipaddress.ip_address(ip.dst), # destination IP
srcport=tcp.sport, # TCP source port
dstport=tcp.dport, # TCP destination port
timestamp=timestamp, # timestamp
)
return True, data
return False, None | [
"def",
"tcp_traceflow",
"(",
"packet",
",",
"timestamp",
",",
"*",
",",
"data_link",
",",
"count",
"=",
"NotImplemented",
")",
":",
"if",
"getattr",
"(",
"packet",
",",
"'ip'",
",",
"None",
")",
":",
"ip",
"=",
"packet",
"[",
"'ip'",
"]",
"elif",
"getattr",
"(",
"packet",
",",
"'ip6'",
",",
"None",
")",
":",
"ip",
"=",
"packet",
"[",
"'ip6'",
"]",
"else",
":",
"return",
"False",
",",
"None",
"tcp",
"=",
"getattr",
"(",
"ip",
",",
"'tcp'",
",",
"None",
")",
"if",
"tcp",
"is",
"not",
"None",
":",
"flags",
"=",
"bin",
"(",
"tcp",
".",
"flags",
")",
"[",
"2",
":",
"]",
".",
"zfill",
"(",
"8",
")",
"data",
"=",
"dict",
"(",
"protocol",
"=",
"data_link",
",",
"# data link type from global header",
"index",
"=",
"count",
",",
"# frame number",
"frame",
"=",
"packet2dict",
"(",
"packet",
",",
"timestamp",
",",
"data_link",
"=",
"data_link",
")",
",",
"# extracted packet",
"syn",
"=",
"bool",
"(",
"int",
"(",
"flags",
"[",
"6",
"]",
")",
")",
",",
"# TCP synchronise (SYN) flag",
"fin",
"=",
"bool",
"(",
"int",
"(",
"flags",
"[",
"7",
"]",
")",
")",
",",
"# TCP finish (FIN) flag",
"src",
"=",
"ipaddress",
".",
"ip_address",
"(",
"ip",
".",
"src",
")",
",",
"# source IP",
"dst",
"=",
"ipaddress",
".",
"ip_address",
"(",
"ip",
".",
"dst",
")",
",",
"# destination IP",
"srcport",
"=",
"tcp",
".",
"sport",
",",
"# TCP source port",
"dstport",
"=",
"tcp",
".",
"dport",
",",
"# TCP destination port",
"timestamp",
"=",
"timestamp",
",",
"# timestamp",
")",
"return",
"True",
",",
"data",
"return",
"False",
",",
"None"
] | 54.36 | 29.56 |
def update_ip_address(context, id, ip_address):
"""Due to NCP-1592 ensure that address_type cannot change after update."""
LOG.info("update_ip_address %s for tenant %s" % (id, context.tenant_id))
ports = []
if 'ip_address' not in ip_address:
raise n_exc.BadRequest(resource="ip_addresses",
msg="Invalid request body.")
with context.session.begin():
db_address = db_api.ip_address_find(context, id=id, scope=db_api.ONE)
if not db_address:
raise q_exc.IpAddressNotFound(addr_id=id)
iptype = db_address.address_type
if iptype == ip_types.FIXED and not CONF.QUARK.ipaddr_allow_fixed_ip:
raise n_exc.BadRequest(
resource="ip_addresses",
msg="Fixed ips cannot be updated using this interface.")
reset = ip_address['ip_address'].get('reset_allocation_time', False)
if reset and db_address['deallocated'] == 1:
if context.is_admin:
LOG.info("IP's deallocated time being manually reset")
db_address['deallocated_at'] = _get_deallocated_override()
else:
msg = "Modification of reset_allocation_time requires admin"
raise webob.exc.HTTPForbidden(detail=msg)
port_ids = ip_address['ip_address'].get('port_ids', None)
if port_ids is not None and not port_ids:
raise n_exc.BadRequest(
resource="ip_addresses",
msg="Cannot be updated with empty port_id list")
if iptype == ip_types.SHARED:
has_owner = db_address.has_any_shared_owner()
if port_ids:
if iptype == ip_types.FIXED and len(port_ids) > 1:
raise n_exc.BadRequest(
resource="ip_addresses",
msg="Fixed ips cannot be updated with more than one port.")
_raise_if_shared_and_enabled(ip_address, db_address)
ports = db_api.port_find(context, tenant_id=context.tenant_id,
id=port_ids, scope=db_api.ALL)
# NOTE(name): could be considered inefficient because we're
# converting to a list to check length. Maybe revisit
if len(ports) != len(port_ids):
raise n_exc.PortNotFound(port_id=port_ids)
validate_and_fetch_segment(ports, db_address["network_id"])
validate_port_ip_quotas(context, db_address.network_id, ports)
if iptype == ip_types.SHARED and has_owner:
for assoc in db_address.associations:
pid = assoc.port_id
if pid not in port_ids and 'none' != assoc.service:
raise q_exc.PortRequiresDisassociation()
LOG.info("Updating IP address, %s, to only be used by the"
"following ports: %s" % (db_address.address_readable,
[p.id for p in ports]))
new_address = db_api.update_port_associations_for_ip(context,
ports,
db_address)
elif iptype == ip_types.SHARED and has_owner:
raise q_exc.PortRequiresDisassociation()
elif 'deallocated' in ip_address['ip_address']\
and context.is_admin:
# Verify no port associations
if len(db_address.associations) != 0:
exc_msg = ("IP %s cannot be deallocated or allocated while"
" still associated with ports: %s"
% (db_address['address_readable'],
db_address.associations))
raise q_exc.ActionNotAuthorized(msg=exc_msg)
# NOTE: If an admin, allow a user to set deallocated to false
# in order to reserve a deallocated IP. Alternatively, allow them
# reverse that choice if a mistake was made.
if ip_address['ip_address']['deallocated'] == 'False':
db_address['deallocated'] = False
else:
db_address['deallocated'] = True
return v._make_ip_dict(db_address, context.is_admin)
else:
ipam_driver.deallocate_ip_address(context, db_address)
return v._make_ip_dict(db_address, context.is_admin)
return v._make_ip_dict(new_address, context.is_admin) | [
"def",
"update_ip_address",
"(",
"context",
",",
"id",
",",
"ip_address",
")",
":",
"LOG",
".",
"info",
"(",
"\"update_ip_address %s for tenant %s\"",
"%",
"(",
"id",
",",
"context",
".",
"tenant_id",
")",
")",
"ports",
"=",
"[",
"]",
"if",
"'ip_address'",
"not",
"in",
"ip_address",
":",
"raise",
"n_exc",
".",
"BadRequest",
"(",
"resource",
"=",
"\"ip_addresses\"",
",",
"msg",
"=",
"\"Invalid request body.\"",
")",
"with",
"context",
".",
"session",
".",
"begin",
"(",
")",
":",
"db_address",
"=",
"db_api",
".",
"ip_address_find",
"(",
"context",
",",
"id",
"=",
"id",
",",
"scope",
"=",
"db_api",
".",
"ONE",
")",
"if",
"not",
"db_address",
":",
"raise",
"q_exc",
".",
"IpAddressNotFound",
"(",
"addr_id",
"=",
"id",
")",
"iptype",
"=",
"db_address",
".",
"address_type",
"if",
"iptype",
"==",
"ip_types",
".",
"FIXED",
"and",
"not",
"CONF",
".",
"QUARK",
".",
"ipaddr_allow_fixed_ip",
":",
"raise",
"n_exc",
".",
"BadRequest",
"(",
"resource",
"=",
"\"ip_addresses\"",
",",
"msg",
"=",
"\"Fixed ips cannot be updated using this interface.\"",
")",
"reset",
"=",
"ip_address",
"[",
"'ip_address'",
"]",
".",
"get",
"(",
"'reset_allocation_time'",
",",
"False",
")",
"if",
"reset",
"and",
"db_address",
"[",
"'deallocated'",
"]",
"==",
"1",
":",
"if",
"context",
".",
"is_admin",
":",
"LOG",
".",
"info",
"(",
"\"IP's deallocated time being manually reset\"",
")",
"db_address",
"[",
"'deallocated_at'",
"]",
"=",
"_get_deallocated_override",
"(",
")",
"else",
":",
"msg",
"=",
"\"Modification of reset_allocation_time requires admin\"",
"raise",
"webob",
".",
"exc",
".",
"HTTPForbidden",
"(",
"detail",
"=",
"msg",
")",
"port_ids",
"=",
"ip_address",
"[",
"'ip_address'",
"]",
".",
"get",
"(",
"'port_ids'",
",",
"None",
")",
"if",
"port_ids",
"is",
"not",
"None",
"and",
"not",
"port_ids",
":",
"raise",
"n_exc",
".",
"BadRequest",
"(",
"resource",
"=",
"\"ip_addresses\"",
",",
"msg",
"=",
"\"Cannot be updated with empty port_id list\"",
")",
"if",
"iptype",
"==",
"ip_types",
".",
"SHARED",
":",
"has_owner",
"=",
"db_address",
".",
"has_any_shared_owner",
"(",
")",
"if",
"port_ids",
":",
"if",
"iptype",
"==",
"ip_types",
".",
"FIXED",
"and",
"len",
"(",
"port_ids",
")",
">",
"1",
":",
"raise",
"n_exc",
".",
"BadRequest",
"(",
"resource",
"=",
"\"ip_addresses\"",
",",
"msg",
"=",
"\"Fixed ips cannot be updated with more than one port.\"",
")",
"_raise_if_shared_and_enabled",
"(",
"ip_address",
",",
"db_address",
")",
"ports",
"=",
"db_api",
".",
"port_find",
"(",
"context",
",",
"tenant_id",
"=",
"context",
".",
"tenant_id",
",",
"id",
"=",
"port_ids",
",",
"scope",
"=",
"db_api",
".",
"ALL",
")",
"# NOTE(name): could be considered inefficient because we're",
"# converting to a list to check length. Maybe revisit",
"if",
"len",
"(",
"ports",
")",
"!=",
"len",
"(",
"port_ids",
")",
":",
"raise",
"n_exc",
".",
"PortNotFound",
"(",
"port_id",
"=",
"port_ids",
")",
"validate_and_fetch_segment",
"(",
"ports",
",",
"db_address",
"[",
"\"network_id\"",
"]",
")",
"validate_port_ip_quotas",
"(",
"context",
",",
"db_address",
".",
"network_id",
",",
"ports",
")",
"if",
"iptype",
"==",
"ip_types",
".",
"SHARED",
"and",
"has_owner",
":",
"for",
"assoc",
"in",
"db_address",
".",
"associations",
":",
"pid",
"=",
"assoc",
".",
"port_id",
"if",
"pid",
"not",
"in",
"port_ids",
"and",
"'none'",
"!=",
"assoc",
".",
"service",
":",
"raise",
"q_exc",
".",
"PortRequiresDisassociation",
"(",
")",
"LOG",
".",
"info",
"(",
"\"Updating IP address, %s, to only be used by the\"",
"\"following ports: %s\"",
"%",
"(",
"db_address",
".",
"address_readable",
",",
"[",
"p",
".",
"id",
"for",
"p",
"in",
"ports",
"]",
")",
")",
"new_address",
"=",
"db_api",
".",
"update_port_associations_for_ip",
"(",
"context",
",",
"ports",
",",
"db_address",
")",
"elif",
"iptype",
"==",
"ip_types",
".",
"SHARED",
"and",
"has_owner",
":",
"raise",
"q_exc",
".",
"PortRequiresDisassociation",
"(",
")",
"elif",
"'deallocated'",
"in",
"ip_address",
"[",
"'ip_address'",
"]",
"and",
"context",
".",
"is_admin",
":",
"# Verify no port associations",
"if",
"len",
"(",
"db_address",
".",
"associations",
")",
"!=",
"0",
":",
"exc_msg",
"=",
"(",
"\"IP %s cannot be deallocated or allocated while\"",
"\" still associated with ports: %s\"",
"%",
"(",
"db_address",
"[",
"'address_readable'",
"]",
",",
"db_address",
".",
"associations",
")",
")",
"raise",
"q_exc",
".",
"ActionNotAuthorized",
"(",
"msg",
"=",
"exc_msg",
")",
"# NOTE: If an admin, allow a user to set deallocated to false",
"# in order to reserve a deallocated IP. Alternatively, allow them",
"# reverse that choice if a mistake was made.",
"if",
"ip_address",
"[",
"'ip_address'",
"]",
"[",
"'deallocated'",
"]",
"==",
"'False'",
":",
"db_address",
"[",
"'deallocated'",
"]",
"=",
"False",
"else",
":",
"db_address",
"[",
"'deallocated'",
"]",
"=",
"True",
"return",
"v",
".",
"_make_ip_dict",
"(",
"db_address",
",",
"context",
".",
"is_admin",
")",
"else",
":",
"ipam_driver",
".",
"deallocate_ip_address",
"(",
"context",
",",
"db_address",
")",
"return",
"v",
".",
"_make_ip_dict",
"(",
"db_address",
",",
"context",
".",
"is_admin",
")",
"return",
"v",
".",
"_make_ip_dict",
"(",
"new_address",
",",
"context",
".",
"is_admin",
")"
] | 49.640449 | 21.977528 |
def rem2ics():
"""Command line tool to convert from Remind to iCalendar"""
# pylint: disable=maybe-no-member
from argparse import ArgumentParser, FileType
from dateutil.parser import parse
from sys import stdin, stdout
parser = ArgumentParser(description='Converter from Remind to iCalendar syntax.')
parser.add_argument('-s', '--startdate', type=lambda s: parse(s).date(),
default=date.today() - timedelta(weeks=12),
help='Start offset for remind call (default: -12 weeks)')
parser.add_argument('-m', '--month', type=int, default=15,
help='Number of month to generate calendar beginning wit startdate (default: 15)')
parser.add_argument('-a', '--alarm', type=int, default=-10,
help='Trigger time for the alarm before the event in minutes (default: -10)')
parser.add_argument('-z', '--zone',
help='Timezone of Remind file (default: local timezone)')
parser.add_argument('infile', nargs='?', default=expanduser('~/.reminders'),
help='The Remind file to process (default: ~/.reminders)')
parser.add_argument('outfile', nargs='?', type=FileType('w'), default=stdout,
help='Output iCalendar file (default: stdout)')
args = parser.parse_args()
zone = timezone(args.zone) if args.zone else None
if args.infile == '-':
remind = Remind(args.infile, zone, args.startdate, args.month, timedelta(minutes=args.alarm))
vobject = remind.stdin_to_vobject(stdin.read())
if vobject:
args.outfile.write(vobject.serialize())
else:
remind = Remind(args.infile, zone, args.startdate, args.month, timedelta(minutes=args.alarm))
args.outfile.write(remind.to_vobject().serialize()) | [
"def",
"rem2ics",
"(",
")",
":",
"# pylint: disable=maybe-no-member",
"from",
"argparse",
"import",
"ArgumentParser",
",",
"FileType",
"from",
"dateutil",
".",
"parser",
"import",
"parse",
"from",
"sys",
"import",
"stdin",
",",
"stdout",
"parser",
"=",
"ArgumentParser",
"(",
"description",
"=",
"'Converter from Remind to iCalendar syntax.'",
")",
"parser",
".",
"add_argument",
"(",
"'-s'",
",",
"'--startdate'",
",",
"type",
"=",
"lambda",
"s",
":",
"parse",
"(",
"s",
")",
".",
"date",
"(",
")",
",",
"default",
"=",
"date",
".",
"today",
"(",
")",
"-",
"timedelta",
"(",
"weeks",
"=",
"12",
")",
",",
"help",
"=",
"'Start offset for remind call (default: -12 weeks)'",
")",
"parser",
".",
"add_argument",
"(",
"'-m'",
",",
"'--month'",
",",
"type",
"=",
"int",
",",
"default",
"=",
"15",
",",
"help",
"=",
"'Number of month to generate calendar beginning wit startdate (default: 15)'",
")",
"parser",
".",
"add_argument",
"(",
"'-a'",
",",
"'--alarm'",
",",
"type",
"=",
"int",
",",
"default",
"=",
"-",
"10",
",",
"help",
"=",
"'Trigger time for the alarm before the event in minutes (default: -10)'",
")",
"parser",
".",
"add_argument",
"(",
"'-z'",
",",
"'--zone'",
",",
"help",
"=",
"'Timezone of Remind file (default: local timezone)'",
")",
"parser",
".",
"add_argument",
"(",
"'infile'",
",",
"nargs",
"=",
"'?'",
",",
"default",
"=",
"expanduser",
"(",
"'~/.reminders'",
")",
",",
"help",
"=",
"'The Remind file to process (default: ~/.reminders)'",
")",
"parser",
".",
"add_argument",
"(",
"'outfile'",
",",
"nargs",
"=",
"'?'",
",",
"type",
"=",
"FileType",
"(",
"'w'",
")",
",",
"default",
"=",
"stdout",
",",
"help",
"=",
"'Output iCalendar file (default: stdout)'",
")",
"args",
"=",
"parser",
".",
"parse_args",
"(",
")",
"zone",
"=",
"timezone",
"(",
"args",
".",
"zone",
")",
"if",
"args",
".",
"zone",
"else",
"None",
"if",
"args",
".",
"infile",
"==",
"'-'",
":",
"remind",
"=",
"Remind",
"(",
"args",
".",
"infile",
",",
"zone",
",",
"args",
".",
"startdate",
",",
"args",
".",
"month",
",",
"timedelta",
"(",
"minutes",
"=",
"args",
".",
"alarm",
")",
")",
"vobject",
"=",
"remind",
".",
"stdin_to_vobject",
"(",
"stdin",
".",
"read",
"(",
")",
")",
"if",
"vobject",
":",
"args",
".",
"outfile",
".",
"write",
"(",
"vobject",
".",
"serialize",
"(",
")",
")",
"else",
":",
"remind",
"=",
"Remind",
"(",
"args",
".",
"infile",
",",
"zone",
",",
"args",
".",
"startdate",
",",
"args",
".",
"month",
",",
"timedelta",
"(",
"minutes",
"=",
"args",
".",
"alarm",
")",
")",
"args",
".",
"outfile",
".",
"write",
"(",
"remind",
".",
"to_vobject",
"(",
")",
".",
"serialize",
"(",
")",
")"
] | 54.909091 | 28.515152 |
def end_of_directory(self, succeeded=True, update_listing=False,
cache_to_disc=True):
'''Wrapper for xbmcplugin.endOfDirectory. Records state in
self._end_of_directory.
Typically it is not necessary to call this method directly, as
calling :meth:`~xbmcswift2.Plugin.finish` will call this method.
'''
self._update_listing = update_listing
if not self._end_of_directory:
self._end_of_directory = True
# Finalize the directory items
return xbmcplugin.endOfDirectory(self.handle, succeeded,
update_listing, cache_to_disc)
assert False, 'Already called endOfDirectory.' | [
"def",
"end_of_directory",
"(",
"self",
",",
"succeeded",
"=",
"True",
",",
"update_listing",
"=",
"False",
",",
"cache_to_disc",
"=",
"True",
")",
":",
"self",
".",
"_update_listing",
"=",
"update_listing",
"if",
"not",
"self",
".",
"_end_of_directory",
":",
"self",
".",
"_end_of_directory",
"=",
"True",
"# Finalize the directory items",
"return",
"xbmcplugin",
".",
"endOfDirectory",
"(",
"self",
".",
"handle",
",",
"succeeded",
",",
"update_listing",
",",
"cache_to_disc",
")",
"assert",
"False",
",",
"'Already called endOfDirectory.'"
] | 48.133333 | 18.8 |
def relative_probability( self, l1, l2, c1, c2 ):
"""
The relative probability for a jump between two sites with specific site types and coordination numbers.
Args:
l1 (Str): Site label for the initial site.
l2 (Str): Site label for the final site.
c1 (Int): Coordination number for the initial site.
c2 (Int): Coordination number for the final site.
Returns:
(Float): The relative probability of this jump occurring.
"""
if self.site_energies:
site_delta_E = self.site_energies[ l2 ] - self.site_energies[ l1 ]
else:
site_delta_E = 0.0
if self.nn_energy:
delta_nn = c2 - c1 - 1 # -1 because the hopping ion is not counted in the final site occupation number
site_delta_E += delta_nn * self.nn_energy
return metropolis( site_delta_E ) | [
"def",
"relative_probability",
"(",
"self",
",",
"l1",
",",
"l2",
",",
"c1",
",",
"c2",
")",
":",
"if",
"self",
".",
"site_energies",
":",
"site_delta_E",
"=",
"self",
".",
"site_energies",
"[",
"l2",
"]",
"-",
"self",
".",
"site_energies",
"[",
"l1",
"]",
"else",
":",
"site_delta_E",
"=",
"0.0",
"if",
"self",
".",
"nn_energy",
":",
"delta_nn",
"=",
"c2",
"-",
"c1",
"-",
"1",
"# -1 because the hopping ion is not counted in the final site occupation number",
"site_delta_E",
"+=",
"delta_nn",
"*",
"self",
".",
"nn_energy",
"return",
"metropolis",
"(",
"site_delta_E",
")"
] | 42.809524 | 23.571429 |
def _normalize_images(self):
"""
normalizes image filenames by prepending 'File:' if needed
"""
if 'image' not in self.data:
return
for img in self.data['image']:
fname = img['file'].replace('_', ' ')
fstart = fname.startswith('File:')
istart = fname.startswith('Image:')
if not fstart and not istart:
fname = 'File:' + fname
img['orig'] = img['file']
img['file'] = fname | [
"def",
"_normalize_images",
"(",
"self",
")",
":",
"if",
"'image'",
"not",
"in",
"self",
".",
"data",
":",
"return",
"for",
"img",
"in",
"self",
".",
"data",
"[",
"'image'",
"]",
":",
"fname",
"=",
"img",
"[",
"'file'",
"]",
".",
"replace",
"(",
"'_'",
",",
"' '",
")",
"fstart",
"=",
"fname",
".",
"startswith",
"(",
"'File:'",
")",
"istart",
"=",
"fname",
".",
"startswith",
"(",
"'Image:'",
")",
"if",
"not",
"fstart",
"and",
"not",
"istart",
":",
"fname",
"=",
"'File:'",
"+",
"fname",
"img",
"[",
"'orig'",
"]",
"=",
"img",
"[",
"'file'",
"]",
"img",
"[",
"'file'",
"]",
"=",
"fname"
] | 36.142857 | 6.857143 |
def update_recommender(ctx, weeks, processes):
"""
Download and build the recommendations.
- Fetch new statistics from the current week.
- Generate recommendations.
- Update the recommendations.
"""
weeks = get_last_weeks(weeks)
recommender = RecordRecommender(config)
# Redownload incomplete weeks
first_weeks = weeks[:2]
recommender.fetch_weeks(first_weeks, overwrite=True)
# Download missing weeks
recommender.fetch_weeks(weeks, overwrite=False)
print("Build Profiles")
ctx.invoke(profiles, weeks=weeks)
print("Generate Recommendations")
ctx.invoke(build, processes=processes) | [
"def",
"update_recommender",
"(",
"ctx",
",",
"weeks",
",",
"processes",
")",
":",
"weeks",
"=",
"get_last_weeks",
"(",
"weeks",
")",
"recommender",
"=",
"RecordRecommender",
"(",
"config",
")",
"# Redownload incomplete weeks",
"first_weeks",
"=",
"weeks",
"[",
":",
"2",
"]",
"recommender",
".",
"fetch_weeks",
"(",
"first_weeks",
",",
"overwrite",
"=",
"True",
")",
"# Download missing weeks",
"recommender",
".",
"fetch_weeks",
"(",
"weeks",
",",
"overwrite",
"=",
"False",
")",
"print",
"(",
"\"Build Profiles\"",
")",
"ctx",
".",
"invoke",
"(",
"profiles",
",",
"weeks",
"=",
"weeks",
")",
"print",
"(",
"\"Generate Recommendations\"",
")",
"ctx",
".",
"invoke",
"(",
"build",
",",
"processes",
"=",
"processes",
")"
] | 30 | 11.619048 |
def _read_configuration(config_filename):
"""
Checks the supplement file.
:param str config_filename: The name of the configuration file.
:rtype: (configparser.ConfigParser,configparser.ConfigParser)
"""
config = ConfigParser()
config.read(config_filename)
if 'supplement' in config['database']:
path = os.path.dirname(config_filename) + '/' + config.get('database', 'supplement')
config_supplement = ConfigParser()
config_supplement.read(path)
else:
config_supplement = None
return config, config_supplement | [
"def",
"_read_configuration",
"(",
"config_filename",
")",
":",
"config",
"=",
"ConfigParser",
"(",
")",
"config",
".",
"read",
"(",
"config_filename",
")",
"if",
"'supplement'",
"in",
"config",
"[",
"'database'",
"]",
":",
"path",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"config_filename",
")",
"+",
"'/'",
"+",
"config",
".",
"get",
"(",
"'database'",
",",
"'supplement'",
")",
"config_supplement",
"=",
"ConfigParser",
"(",
")",
"config_supplement",
".",
"read",
"(",
"path",
")",
"else",
":",
"config_supplement",
"=",
"None",
"return",
"config",
",",
"config_supplement"
] | 32.736842 | 17.789474 |
def regenerate(location='http://www.iana.org/assignments/language-subtag-registry',
filename=None, default_encoding='utf-8'):
"""
Generate the languages Python module.
"""
paren = re.compile('\([^)]*\)')
# Get the language list.
data = urllib2.urlopen(location)
if ('content-type' in data.headers and
'charset=' in data.headers['content-type']):
encoding = data.headers['content-type'].split('charset=')[-1]
else:
encoding = default_encoding
content = data.read().decode(encoding)
languages = []
info = {}
p = None
for line in content.splitlines():
if line == '%%':
if 'Type' in info and info['Type'] == 'language':
languages.append(info)
info = {}
elif ':' not in line and p:
info[p[0]] = paren.sub('', p[2]+line).strip()
else:
p = line.partition(':')
if not p[0] in info: # Keep the first description as it should be the most common
info[p[0]] = paren.sub('', p[2]).strip()
languages_lines = map(lambda x:'("%s", _(u"%s")),'%(x['Subtag'],x['Description']), languages)
# Generate and save the file.
if not filename:
filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'languages.py')
# TODO: first make a backup of the file if it exists already.
f = codecs.open(filename, 'w', 'utf-8')
f.write(TEMPLATE % {
'languages': '\n '.join(languages_lines),
})
f.close() | [
"def",
"regenerate",
"(",
"location",
"=",
"'http://www.iana.org/assignments/language-subtag-registry'",
",",
"filename",
"=",
"None",
",",
"default_encoding",
"=",
"'utf-8'",
")",
":",
"paren",
"=",
"re",
".",
"compile",
"(",
"'\\([^)]*\\)'",
")",
"# Get the language list.",
"data",
"=",
"urllib2",
".",
"urlopen",
"(",
"location",
")",
"if",
"(",
"'content-type'",
"in",
"data",
".",
"headers",
"and",
"'charset='",
"in",
"data",
".",
"headers",
"[",
"'content-type'",
"]",
")",
":",
"encoding",
"=",
"data",
".",
"headers",
"[",
"'content-type'",
"]",
".",
"split",
"(",
"'charset='",
")",
"[",
"-",
"1",
"]",
"else",
":",
"encoding",
"=",
"default_encoding",
"content",
"=",
"data",
".",
"read",
"(",
")",
".",
"decode",
"(",
"encoding",
")",
"languages",
"=",
"[",
"]",
"info",
"=",
"{",
"}",
"p",
"=",
"None",
"for",
"line",
"in",
"content",
".",
"splitlines",
"(",
")",
":",
"if",
"line",
"==",
"'%%'",
":",
"if",
"'Type'",
"in",
"info",
"and",
"info",
"[",
"'Type'",
"]",
"==",
"'language'",
":",
"languages",
".",
"append",
"(",
"info",
")",
"info",
"=",
"{",
"}",
"elif",
"':'",
"not",
"in",
"line",
"and",
"p",
":",
"info",
"[",
"p",
"[",
"0",
"]",
"]",
"=",
"paren",
".",
"sub",
"(",
"''",
",",
"p",
"[",
"2",
"]",
"+",
"line",
")",
".",
"strip",
"(",
")",
"else",
":",
"p",
"=",
"line",
".",
"partition",
"(",
"':'",
")",
"if",
"not",
"p",
"[",
"0",
"]",
"in",
"info",
":",
"# Keep the first description as it should be the most common",
"info",
"[",
"p",
"[",
"0",
"]",
"]",
"=",
"paren",
".",
"sub",
"(",
"''",
",",
"p",
"[",
"2",
"]",
")",
".",
"strip",
"(",
")",
"languages_lines",
"=",
"map",
"(",
"lambda",
"x",
":",
"'(\"%s\", _(u\"%s\")),'",
"%",
"(",
"x",
"[",
"'Subtag'",
"]",
",",
"x",
"[",
"'Description'",
"]",
")",
",",
"languages",
")",
"# Generate and save the file.",
"if",
"not",
"filename",
":",
"filename",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"realpath",
"(",
"__file__",
")",
")",
",",
"'languages.py'",
")",
"# TODO: first make a backup of the file if it exists already.",
"f",
"=",
"codecs",
".",
"open",
"(",
"filename",
",",
"'w'",
",",
"'utf-8'",
")",
"f",
".",
"write",
"(",
"TEMPLATE",
"%",
"{",
"'languages'",
":",
"'\\n '",
".",
"join",
"(",
"languages_lines",
")",
",",
"}",
")",
"f",
".",
"close",
"(",
")"
] | 36.780488 | 19.609756 |
def create_colormap(palette):
"""Create colormap of the given numpy file, color vector or colormap."""
from trollimage.colormap import Colormap
fname = palette.get('filename', None)
if fname:
data = np.load(fname)
cmap = []
num = 1.0 * data.shape[0]
for i in range(int(num)):
cmap.append((i / num, (data[i, 0] / 255., data[i, 1] / 255.,
data[i, 2] / 255.)))
return Colormap(*cmap)
colors = palette.get('colors', None)
if isinstance(colors, list):
cmap = []
values = palette.get('values', None)
for idx, color in enumerate(colors):
if values:
value = values[idx]
else:
value = idx / float(len(colors) - 1)
cmap.append((value, tuple(color)))
return Colormap(*cmap)
if isinstance(colors, str):
from trollimage import colormap
import copy
return copy.copy(getattr(colormap, colors))
return None | [
"def",
"create_colormap",
"(",
"palette",
")",
":",
"from",
"trollimage",
".",
"colormap",
"import",
"Colormap",
"fname",
"=",
"palette",
".",
"get",
"(",
"'filename'",
",",
"None",
")",
"if",
"fname",
":",
"data",
"=",
"np",
".",
"load",
"(",
"fname",
")",
"cmap",
"=",
"[",
"]",
"num",
"=",
"1.0",
"*",
"data",
".",
"shape",
"[",
"0",
"]",
"for",
"i",
"in",
"range",
"(",
"int",
"(",
"num",
")",
")",
":",
"cmap",
".",
"append",
"(",
"(",
"i",
"/",
"num",
",",
"(",
"data",
"[",
"i",
",",
"0",
"]",
"/",
"255.",
",",
"data",
"[",
"i",
",",
"1",
"]",
"/",
"255.",
",",
"data",
"[",
"i",
",",
"2",
"]",
"/",
"255.",
")",
")",
")",
"return",
"Colormap",
"(",
"*",
"cmap",
")",
"colors",
"=",
"palette",
".",
"get",
"(",
"'colors'",
",",
"None",
")",
"if",
"isinstance",
"(",
"colors",
",",
"list",
")",
":",
"cmap",
"=",
"[",
"]",
"values",
"=",
"palette",
".",
"get",
"(",
"'values'",
",",
"None",
")",
"for",
"idx",
",",
"color",
"in",
"enumerate",
"(",
"colors",
")",
":",
"if",
"values",
":",
"value",
"=",
"values",
"[",
"idx",
"]",
"else",
":",
"value",
"=",
"idx",
"/",
"float",
"(",
"len",
"(",
"colors",
")",
"-",
"1",
")",
"cmap",
".",
"append",
"(",
"(",
"value",
",",
"tuple",
"(",
"color",
")",
")",
")",
"return",
"Colormap",
"(",
"*",
"cmap",
")",
"if",
"isinstance",
"(",
"colors",
",",
"str",
")",
":",
"from",
"trollimage",
"import",
"colormap",
"import",
"copy",
"return",
"copy",
".",
"copy",
"(",
"getattr",
"(",
"colormap",
",",
"colors",
")",
")",
"return",
"None"
] | 32.451613 | 14.451613 |
def fuzzy_search(self, *filters):
"""
Perform a "fuzzy" search that matches the given characters in the given order.
:param filters: The pattern(s) to search for.
:returns: The matched password names (a list of strings).
"""
matches = []
logger.verbose(
"Performing fuzzy search on %s (%s) ..", pluralize(len(filters), "pattern"), concatenate(map(repr, filters))
)
patterns = list(map(create_fuzzy_pattern, filters))
for entry in self.filtered_entries:
if all(p.search(entry.name) for p in patterns):
matches.append(entry)
logger.log(
logging.INFO if matches else logging.VERBOSE,
"Matched %s using fuzzy search.",
pluralize(len(matches), "password"),
)
return matches | [
"def",
"fuzzy_search",
"(",
"self",
",",
"*",
"filters",
")",
":",
"matches",
"=",
"[",
"]",
"logger",
".",
"verbose",
"(",
"\"Performing fuzzy search on %s (%s) ..\"",
",",
"pluralize",
"(",
"len",
"(",
"filters",
")",
",",
"\"pattern\"",
")",
",",
"concatenate",
"(",
"map",
"(",
"repr",
",",
"filters",
")",
")",
")",
"patterns",
"=",
"list",
"(",
"map",
"(",
"create_fuzzy_pattern",
",",
"filters",
")",
")",
"for",
"entry",
"in",
"self",
".",
"filtered_entries",
":",
"if",
"all",
"(",
"p",
".",
"search",
"(",
"entry",
".",
"name",
")",
"for",
"p",
"in",
"patterns",
")",
":",
"matches",
".",
"append",
"(",
"entry",
")",
"logger",
".",
"log",
"(",
"logging",
".",
"INFO",
"if",
"matches",
"else",
"logging",
".",
"VERBOSE",
",",
"\"Matched %s using fuzzy search.\"",
",",
"pluralize",
"(",
"len",
"(",
"matches",
")",
",",
"\"password\"",
")",
",",
")",
"return",
"matches"
] | 39.47619 | 20.142857 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.