text stringlengths 89 104k | code_tokens list | avg_line_len float64 7.91 980 | score float64 0 630 |
|---|---|---|---|
def _interpret_angle(name, angle_object, angle_float, unit='degrees'):
"""Return an angle in radians from one of two arguments.
It is common for Skyfield routines to accept both an argument like
`alt` that takes an Angle object as well as an `alt_degrees` that
can be given a bare float or a sexagesimal tuple. A pair of such
arguments can be passed to this routine for interpretation.
"""
if angle_object is not None:
if isinstance(angle_object, Angle):
return angle_object.radians
elif angle_float is not None:
return _unsexagesimalize(angle_float) * _from_degrees
raise ValueError('you must either provide the {0}= parameter with'
' an Angle argument or supply the {0}_{1}= parameter'
' with a numeric argument'.format(name, unit)) | [
"def",
"_interpret_angle",
"(",
"name",
",",
"angle_object",
",",
"angle_float",
",",
"unit",
"=",
"'degrees'",
")",
":",
"if",
"angle_object",
"is",
"not",
"None",
":",
"if",
"isinstance",
"(",
"angle_object",
",",
"Angle",
")",
":",
"return",
"angle_object",
".",
"radians",
"elif",
"angle_float",
"is",
"not",
"None",
":",
"return",
"_unsexagesimalize",
"(",
"angle_float",
")",
"*",
"_from_degrees",
"raise",
"ValueError",
"(",
"'you must either provide the {0}= parameter with'",
"' an Angle argument or supply the {0}_{1}= parameter'",
"' with a numeric argument'",
".",
"format",
"(",
"name",
",",
"unit",
")",
")"
] | 48.647059 | 20.705882 |
def on_connect(client):
"""
Sample on_connect function.
Handles new connections.
"""
print "++ Opened connection to %s" % client.addrport()
broadcast('%s joins the conversation.\n' % client.addrport() )
CLIENT_LIST.append(client)
client.send("Welcome to the Chat Server, %s.\n" % client.addrport() ) | [
"def",
"on_connect",
"(",
"client",
")",
":",
"print",
"\"++ Opened connection to %s\"",
"%",
"client",
".",
"addrport",
"(",
")",
"broadcast",
"(",
"'%s joins the conversation.\\n'",
"%",
"client",
".",
"addrport",
"(",
")",
")",
"CLIENT_LIST",
".",
"append",
"(",
"client",
")",
"client",
".",
"send",
"(",
"\"Welcome to the Chat Server, %s.\\n\"",
"%",
"client",
".",
"addrport",
"(",
")",
")"
] | 35.888889 | 13.888889 |
def in_dir(directory, create=True):
"""Context manager to execute a code block in a directory.
* The directory is created if it does not exist (unless
create=False is set)
* At the end or after an exception code always returns to
the directory that was the current directory before entering
the block.
"""
startdir = os.getcwd()
try:
try:
os.chdir(directory)
logger.debug("Working in {directory!r}...".format(**vars()))
except OSError as err:
if create and err.errno == errno.ENOENT:
os.makedirs(directory)
os.chdir(directory)
logger.info("Working in {directory!r} (newly created)...".format(**vars()))
else:
logger.exception("Failed to start working in {directory!r}.".format(**vars()))
raise
yield os.getcwd()
finally:
os.chdir(startdir) | [
"def",
"in_dir",
"(",
"directory",
",",
"create",
"=",
"True",
")",
":",
"startdir",
"=",
"os",
".",
"getcwd",
"(",
")",
"try",
":",
"try",
":",
"os",
".",
"chdir",
"(",
"directory",
")",
"logger",
".",
"debug",
"(",
"\"Working in {directory!r}...\"",
".",
"format",
"(",
"*",
"*",
"vars",
"(",
")",
")",
")",
"except",
"OSError",
"as",
"err",
":",
"if",
"create",
"and",
"err",
".",
"errno",
"==",
"errno",
".",
"ENOENT",
":",
"os",
".",
"makedirs",
"(",
"directory",
")",
"os",
".",
"chdir",
"(",
"directory",
")",
"logger",
".",
"info",
"(",
"\"Working in {directory!r} (newly created)...\"",
".",
"format",
"(",
"*",
"*",
"vars",
"(",
")",
")",
")",
"else",
":",
"logger",
".",
"exception",
"(",
"\"Failed to start working in {directory!r}.\"",
".",
"format",
"(",
"*",
"*",
"vars",
"(",
")",
")",
")",
"raise",
"yield",
"os",
".",
"getcwd",
"(",
")",
"finally",
":",
"os",
".",
"chdir",
"(",
"startdir",
")"
] | 36.88 | 19.88 |
def chart(colors, data, args, labels):
"""Handle the normalization of data and the printing of the graph."""
len_categories = len(data[0])
if len_categories > 1:
# Stacked graph
if args['stacked']:
normal_dat = normalize(data, args['width'])
stacked_graph(labels, data, normal_dat, len_categories,
args, colors)
return
if not colors:
colors = [None] * len_categories
# Multiple series graph with different scales
# Normalization per category
if args['different_scale']:
for i in range(len_categories):
cat_data = []
for dat in data:
cat_data.append([dat[i]])
# Normalize data, handle negatives.
normal_cat_data = normalize(cat_data, args['width'])
# Generate data for a row.
for row in horiz_rows(labels, cat_data, normal_cat_data,
args, [colors[i]]):
# Print the row
if not args['vertical']:
print_row(*row)
else:
vertic = vertically(*row, args=args)
# Vertical graph
if args['vertical']:
print_vertical(vertic, labels, colors[i], args)
print()
value_list.clear(), zipped_list.clear(), vertical_list.clear()
return
# One category/Multiple series graph with same scale
# All-together normalization
if not args['stacked']:
normal_dat = normalize(data, args['width'])
for row in horiz_rows(labels, data, normal_dat, args, colors):
if not args['vertical']:
print_row(*row)
else:
vertic = vertically(*row, args=args)
if args['vertical'] and len_categories == 1:
if colors:
color = colors[0]
else:
color = None
print_vertical(vertic, labels, color, args)
print() | [
"def",
"chart",
"(",
"colors",
",",
"data",
",",
"args",
",",
"labels",
")",
":",
"len_categories",
"=",
"len",
"(",
"data",
"[",
"0",
"]",
")",
"if",
"len_categories",
">",
"1",
":",
"# Stacked graph",
"if",
"args",
"[",
"'stacked'",
"]",
":",
"normal_dat",
"=",
"normalize",
"(",
"data",
",",
"args",
"[",
"'width'",
"]",
")",
"stacked_graph",
"(",
"labels",
",",
"data",
",",
"normal_dat",
",",
"len_categories",
",",
"args",
",",
"colors",
")",
"return",
"if",
"not",
"colors",
":",
"colors",
"=",
"[",
"None",
"]",
"*",
"len_categories",
"# Multiple series graph with different scales",
"# Normalization per category",
"if",
"args",
"[",
"'different_scale'",
"]",
":",
"for",
"i",
"in",
"range",
"(",
"len_categories",
")",
":",
"cat_data",
"=",
"[",
"]",
"for",
"dat",
"in",
"data",
":",
"cat_data",
".",
"append",
"(",
"[",
"dat",
"[",
"i",
"]",
"]",
")",
"# Normalize data, handle negatives.",
"normal_cat_data",
"=",
"normalize",
"(",
"cat_data",
",",
"args",
"[",
"'width'",
"]",
")",
"# Generate data for a row.",
"for",
"row",
"in",
"horiz_rows",
"(",
"labels",
",",
"cat_data",
",",
"normal_cat_data",
",",
"args",
",",
"[",
"colors",
"[",
"i",
"]",
"]",
")",
":",
"# Print the row",
"if",
"not",
"args",
"[",
"'vertical'",
"]",
":",
"print_row",
"(",
"*",
"row",
")",
"else",
":",
"vertic",
"=",
"vertically",
"(",
"*",
"row",
",",
"args",
"=",
"args",
")",
"# Vertical graph",
"if",
"args",
"[",
"'vertical'",
"]",
":",
"print_vertical",
"(",
"vertic",
",",
"labels",
",",
"colors",
"[",
"i",
"]",
",",
"args",
")",
"print",
"(",
")",
"value_list",
".",
"clear",
"(",
")",
",",
"zipped_list",
".",
"clear",
"(",
")",
",",
"vertical_list",
".",
"clear",
"(",
")",
"return",
"# One category/Multiple series graph with same scale",
"# All-together normalization",
"if",
"not",
"args",
"[",
"'stacked'",
"]",
":",
"normal_dat",
"=",
"normalize",
"(",
"data",
",",
"args",
"[",
"'width'",
"]",
")",
"for",
"row",
"in",
"horiz_rows",
"(",
"labels",
",",
"data",
",",
"normal_dat",
",",
"args",
",",
"colors",
")",
":",
"if",
"not",
"args",
"[",
"'vertical'",
"]",
":",
"print_row",
"(",
"*",
"row",
")",
"else",
":",
"vertic",
"=",
"vertically",
"(",
"*",
"row",
",",
"args",
"=",
"args",
")",
"if",
"args",
"[",
"'vertical'",
"]",
"and",
"len_categories",
"==",
"1",
":",
"if",
"colors",
":",
"color",
"=",
"colors",
"[",
"0",
"]",
"else",
":",
"color",
"=",
"None",
"print_vertical",
"(",
"vertic",
",",
"labels",
",",
"color",
",",
"args",
")",
"print",
"(",
")"
] | 34.147541 | 17.606557 |
def parallel(func, inputs, n_jobs, expand_args=False):
"""
Convenience wrapper around joblib's parallelization.
"""
if expand_args:
return Parallel(n_jobs=n_jobs)(delayed(func)(*args) for args in inputs)
else:
return Parallel(n_jobs=n_jobs)(delayed(func)(arg) for arg in inputs) | [
"def",
"parallel",
"(",
"func",
",",
"inputs",
",",
"n_jobs",
",",
"expand_args",
"=",
"False",
")",
":",
"if",
"expand_args",
":",
"return",
"Parallel",
"(",
"n_jobs",
"=",
"n_jobs",
")",
"(",
"delayed",
"(",
"func",
")",
"(",
"*",
"args",
")",
"for",
"args",
"in",
"inputs",
")",
"else",
":",
"return",
"Parallel",
"(",
"n_jobs",
"=",
"n_jobs",
")",
"(",
"delayed",
"(",
"func",
")",
"(",
"arg",
")",
"for",
"arg",
"in",
"inputs",
")"
] | 38.375 | 19.625 |
def get_realnames(packages):
"""
Return list of unique case-correct package names.
Packages are listed in a case-insensitive sorted order.
"""
return sorted({get_distribution(p).project_name for p in packages},
key=lambda n: n.lower()) | [
"def",
"get_realnames",
"(",
"packages",
")",
":",
"return",
"sorted",
"(",
"{",
"get_distribution",
"(",
"p",
")",
".",
"project_name",
"for",
"p",
"in",
"packages",
"}",
",",
"key",
"=",
"lambda",
"n",
":",
"n",
".",
"lower",
"(",
")",
")"
] | 33.375 | 14.625 |
def draw(self, y_pred, residuals, train=False, **kwargs):
"""
Draw the residuals against the predicted value for the specified split.
It is best to draw the training split first, then the test split so
that the test split (usually smaller) is above the training split;
particularly if the histogram is turned on.
Parameters
----------
y_pred : ndarray or Series of length n
An array or series of predicted target values
residuals : ndarray or Series of length n
An array or series of the difference between the predicted and the
target values
train : boolean, default: False
If False, `draw` assumes that the residual points being plotted
are from the test data; if True, `draw` assumes the residuals
are the train data.
Returns
------
ax : the axis with the plotted figure
"""
if train:
color = self.colors['train_point']
label = "Train $R^2 = {:0.3f}$".format(self.train_score_)
alpha = self.alphas['train_point']
else:
color = self.colors['test_point']
label = "Test $R^2 = {:0.3f}$".format(self.test_score_)
alpha = self.alphas['test_point']
# Update the legend information
self._labels.append(label)
self._colors.append(color)
# Draw the residuals scatter plot
self.ax.scatter(
y_pred, residuals, c=color, alpha=alpha, label=label
)
# Add residuals histogram
if self.hist in {True, 'frequency'}:
self.hax.hist(residuals, bins=50, orientation="horizontal", color=color)
elif self.hist == 'density':
self.hax.hist(
residuals, bins=50, orientation="horizontal", density=True, color=color
)
# Ensure the current axes is always the main residuals axes
plt.sca(self.ax)
return self.ax | [
"def",
"draw",
"(",
"self",
",",
"y_pred",
",",
"residuals",
",",
"train",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"train",
":",
"color",
"=",
"self",
".",
"colors",
"[",
"'train_point'",
"]",
"label",
"=",
"\"Train $R^2 = {:0.3f}$\"",
".",
"format",
"(",
"self",
".",
"train_score_",
")",
"alpha",
"=",
"self",
".",
"alphas",
"[",
"'train_point'",
"]",
"else",
":",
"color",
"=",
"self",
".",
"colors",
"[",
"'test_point'",
"]",
"label",
"=",
"\"Test $R^2 = {:0.3f}$\"",
".",
"format",
"(",
"self",
".",
"test_score_",
")",
"alpha",
"=",
"self",
".",
"alphas",
"[",
"'test_point'",
"]",
"# Update the legend information",
"self",
".",
"_labels",
".",
"append",
"(",
"label",
")",
"self",
".",
"_colors",
".",
"append",
"(",
"color",
")",
"# Draw the residuals scatter plot",
"self",
".",
"ax",
".",
"scatter",
"(",
"y_pred",
",",
"residuals",
",",
"c",
"=",
"color",
",",
"alpha",
"=",
"alpha",
",",
"label",
"=",
"label",
")",
"# Add residuals histogram",
"if",
"self",
".",
"hist",
"in",
"{",
"True",
",",
"'frequency'",
"}",
":",
"self",
".",
"hax",
".",
"hist",
"(",
"residuals",
",",
"bins",
"=",
"50",
",",
"orientation",
"=",
"\"horizontal\"",
",",
"color",
"=",
"color",
")",
"elif",
"self",
".",
"hist",
"==",
"'density'",
":",
"self",
".",
"hax",
".",
"hist",
"(",
"residuals",
",",
"bins",
"=",
"50",
",",
"orientation",
"=",
"\"horizontal\"",
",",
"density",
"=",
"True",
",",
"color",
"=",
"color",
")",
"# Ensure the current axes is always the main residuals axes",
"plt",
".",
"sca",
"(",
"self",
".",
"ax",
")",
"return",
"self",
".",
"ax"
] | 36.036364 | 21.236364 |
def append(self, text, afterline=None):
"""Append text to the current buffer.
Args:
text (str or Sequence[str]): One or many lines of text to append.
afterline (Optional[int]):
Line number to append after. If 0, text is prepended before the
first line; if ``None``, at end of the buffer.
"""
if afterline:
self._vim.current.buffer.append(text, afterline)
else:
self._vim.current.buffer.append(text) | [
"def",
"append",
"(",
"self",
",",
"text",
",",
"afterline",
"=",
"None",
")",
":",
"if",
"afterline",
":",
"self",
".",
"_vim",
".",
"current",
".",
"buffer",
".",
"append",
"(",
"text",
",",
"afterline",
")",
"else",
":",
"self",
".",
"_vim",
".",
"current",
".",
"buffer",
".",
"append",
"(",
"text",
")"
] | 39 | 18.692308 |
def is_opening_code_fence(line: str, parser: str = 'github'):
r"""Determine if the given line is possibly the opening of a fenced code block.
:parameter line: a single markdown line to evaluate.
:parameter parser: decides rules on how to generate the anchor text.
Defaults to ``github``.
:type line: str
:type parser: str
:returns: None if the input line is not an opening code fence. Otherwise,
returns the string which will identify the closing code fence
according to the input parsers' rules.
:rtype: typing.Optional[str]
:raises: a built-in exception.
"""
if (parser == 'github' or parser == 'cmark' or parser == 'gitlab'
or parser == 'commonmarker'):
markers = md_parser['github']['code fence']['marker']
marker_min_length = md_parser['github']['code fence'][
'min_marker_characters']
if not is_valid_code_fence_indent(line):
return None
line = line.lstrip(' ').rstrip('\n')
if not line.startswith(
(markers[0] * marker_min_length, markers[1] * marker_min_length)):
return None
if line == len(line) * line[0]:
info_string = str()
else:
info_string = line.lstrip(line[0])
# Backticks or tildes in info string are explicitly forbidden.
if markers[0] in info_string or markers[1] in info_string:
return None
# Solves example 107. See:
# https://github.github.com/gfm/#example-107
if line.rstrip(markers[0]) != line and line.rstrip(markers[1]) != line:
return None
return line.rstrip(info_string)
elif parser == 'redcarpet':
# TODO.
return None | [
"def",
"is_opening_code_fence",
"(",
"line",
":",
"str",
",",
"parser",
":",
"str",
"=",
"'github'",
")",
":",
"if",
"(",
"parser",
"==",
"'github'",
"or",
"parser",
"==",
"'cmark'",
"or",
"parser",
"==",
"'gitlab'",
"or",
"parser",
"==",
"'commonmarker'",
")",
":",
"markers",
"=",
"md_parser",
"[",
"'github'",
"]",
"[",
"'code fence'",
"]",
"[",
"'marker'",
"]",
"marker_min_length",
"=",
"md_parser",
"[",
"'github'",
"]",
"[",
"'code fence'",
"]",
"[",
"'min_marker_characters'",
"]",
"if",
"not",
"is_valid_code_fence_indent",
"(",
"line",
")",
":",
"return",
"None",
"line",
"=",
"line",
".",
"lstrip",
"(",
"' '",
")",
".",
"rstrip",
"(",
"'\\n'",
")",
"if",
"not",
"line",
".",
"startswith",
"(",
"(",
"markers",
"[",
"0",
"]",
"*",
"marker_min_length",
",",
"markers",
"[",
"1",
"]",
"*",
"marker_min_length",
")",
")",
":",
"return",
"None",
"if",
"line",
"==",
"len",
"(",
"line",
")",
"*",
"line",
"[",
"0",
"]",
":",
"info_string",
"=",
"str",
"(",
")",
"else",
":",
"info_string",
"=",
"line",
".",
"lstrip",
"(",
"line",
"[",
"0",
"]",
")",
"# Backticks or tildes in info string are explicitly forbidden.",
"if",
"markers",
"[",
"0",
"]",
"in",
"info_string",
"or",
"markers",
"[",
"1",
"]",
"in",
"info_string",
":",
"return",
"None",
"# Solves example 107. See:",
"# https://github.github.com/gfm/#example-107",
"if",
"line",
".",
"rstrip",
"(",
"markers",
"[",
"0",
"]",
")",
"!=",
"line",
"and",
"line",
".",
"rstrip",
"(",
"markers",
"[",
"1",
"]",
")",
"!=",
"line",
":",
"return",
"None",
"return",
"line",
".",
"rstrip",
"(",
"info_string",
")",
"elif",
"parser",
"==",
"'redcarpet'",
":",
"# TODO.",
"return",
"None"
] | 38.795455 | 18.659091 |
def update(self, **kwargs):
"""Customize the lazy field"""
assert not self.called
self.kw.update(kwargs)
return self | [
"def",
"update",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"assert",
"not",
"self",
".",
"called",
"self",
".",
"kw",
".",
"update",
"(",
"kwargs",
")",
"return",
"self"
] | 28.8 | 10.8 |
def is_ec2_instance():
"""Try fetching instance metadata at 'curl http://169.254.169.254/latest/meta-data/'
to see if host is on an ec2 instance"""
# Note: this code assumes that docker containers running on ec2 instances
# inherit instances metadata, which they do as of 2016-08-25
global IS_EC2_INSTANCE
if IS_EC2_INSTANCE != -1:
# Returned the cached value
return IS_EC2_INSTANCE
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(0.2)
try:
s.connect(("169.254.169.254", 80))
IS_EC2_INSTANCE = 1
return True
except socket.timeout:
IS_EC2_INSTANCE = 0
return False
except socket.error:
IS_EC2_INSTANCE = 0
return False | [
"def",
"is_ec2_instance",
"(",
")",
":",
"# Note: this code assumes that docker containers running on ec2 instances",
"# inherit instances metadata, which they do as of 2016-08-25",
"global",
"IS_EC2_INSTANCE",
"if",
"IS_EC2_INSTANCE",
"!=",
"-",
"1",
":",
"# Returned the cached value",
"return",
"IS_EC2_INSTANCE",
"s",
"=",
"socket",
".",
"socket",
"(",
"socket",
".",
"AF_INET",
",",
"socket",
".",
"SOCK_STREAM",
")",
"s",
".",
"settimeout",
"(",
"0.2",
")",
"try",
":",
"s",
".",
"connect",
"(",
"(",
"\"169.254.169.254\"",
",",
"80",
")",
")",
"IS_EC2_INSTANCE",
"=",
"1",
"return",
"True",
"except",
"socket",
".",
"timeout",
":",
"IS_EC2_INSTANCE",
"=",
"0",
"return",
"False",
"except",
"socket",
".",
"error",
":",
"IS_EC2_INSTANCE",
"=",
"0",
"return",
"False"
] | 29.28 | 19.16 |
def serialize_attrib(self, op):
"""
Serializer for :meth:`SpiffWorkflow.operators.Attrib`.
Example::
<attribute>foobar</attribute>
"""
elem = etree.Element('attribute')
elem.text = op.name
return elem | [
"def",
"serialize_attrib",
"(",
"self",
",",
"op",
")",
":",
"elem",
"=",
"etree",
".",
"Element",
"(",
"'attribute'",
")",
"elem",
".",
"text",
"=",
"op",
".",
"name",
"return",
"elem"
] | 23.636364 | 15.454545 |
def get_bytes(self):
"""set_client_DH_params#f5045f1f nonce:int128 server_nonce:int128 encrypted_data:bytes = Set_client_DH_params_answer"""
ret = struct.pack("<I16s16s", set_client_DH_params.constructor, self.nonce, self.server_nonce)
bytes_io = BytesIO()
bytes_io.write(ret)
serialize_string(bytes_io, self.encrypted_data)
return bytes_io.getvalue() | [
"def",
"get_bytes",
"(",
"self",
")",
":",
"ret",
"=",
"struct",
".",
"pack",
"(",
"\"<I16s16s\"",
",",
"set_client_DH_params",
".",
"constructor",
",",
"self",
".",
"nonce",
",",
"self",
".",
"server_nonce",
")",
"bytes_io",
"=",
"BytesIO",
"(",
")",
"bytes_io",
".",
"write",
"(",
"ret",
")",
"serialize_string",
"(",
"bytes_io",
",",
"self",
".",
"encrypted_data",
")",
"return",
"bytes_io",
".",
"getvalue",
"(",
")"
] | 39.3 | 24.8 |
def get_affinity(pid):
"""
Returns the affinity mask of the process whose ID is pid.
@param pid: process PID (0 == current process)
@type pid: C{int}
@return: set of CPU ids
@rtype: C{set}
"""
cpuset = cpu_set_t()
result = set()
libnuma.sched_getaffinity(pid, sizeof(cpu_set_t), byref(cpuset))
for i in range(0, sizeof(cpu_set_t)*8):
if __CPU_ISSET(i, cpuset):
result.add(i)
return result | [
"def",
"get_affinity",
"(",
"pid",
")",
":",
"cpuset",
"=",
"cpu_set_t",
"(",
")",
"result",
"=",
"set",
"(",
")",
"libnuma",
".",
"sched_getaffinity",
"(",
"pid",
",",
"sizeof",
"(",
"cpu_set_t",
")",
",",
"byref",
"(",
"cpuset",
")",
")",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"sizeof",
"(",
"cpu_set_t",
")",
"*",
"8",
")",
":",
"if",
"__CPU_ISSET",
"(",
"i",
",",
"cpuset",
")",
":",
"result",
".",
"add",
"(",
"i",
")",
"return",
"result"
] | 22.1 | 20.8 |
def _read_by_weight(self, F, att_weights, value):
"""Read from the value matrix given the attention weights.
Parameters
----------
F : symbol or ndarray
att_weights : Symbol or NDArray
Attention weights.
For single-head attention,
Shape (batch_size, query_length, memory_length).
For multi-head attention,
Shape (batch_size, num_heads, query_length, memory_length).
value : Symbol or NDArray
Value of the memory. Shape (batch_size, memory_length, total_value_dim)
Returns
-------
context_vec: Symbol or NDArray
Shape (batch_size, query_length, context_vec_dim)
"""
output = F.batch_dot(att_weights, value)
return output | [
"def",
"_read_by_weight",
"(",
"self",
",",
"F",
",",
"att_weights",
",",
"value",
")",
":",
"output",
"=",
"F",
".",
"batch_dot",
"(",
"att_weights",
",",
"value",
")",
"return",
"output"
] | 35.818182 | 16.772727 |
def auth(view, **kwargs):
"""
This plugin allow user to login to application
kwargs:
- signin_view
- signout_view
- template_dir
- menu:
- name
- group_name
- ...
@plugin(user.login, model=model.User)
class MyAccount(Juice):
pass
"""
endpoint_namespace = view.__name__ + ":%s"
view_name = view.__name__
UserModel = kwargs.pop("model")
User = UserModel.User
login_view = endpoint_namespace % "login"
on_signin_view = kwargs.get("signin_view", "Index:index")
on_signout_view = kwargs.get("signout_view", "Index:index")
template_dir = kwargs.get("template_dir", "Juice/Plugin/User/Account")
template_page = template_dir + "/%s.html"
login_manager = LoginManager()
login_manager.login_view = login_view
login_manager.login_message_category = "error"
init_app(login_manager.init_app)
menu_context = view
_menu = kwargs.get("menu", {})
if _menu:
@menu(**_menu)
class UserAccountMenu(object): pass
menu_context = UserAccountMenu
@login_manager.user_loader
def load_user(userid):
return User.get(userid)
View.g(__USER_AUTH_ENABLED__=True)
class Auth(object):
decorators = view.decorators + [login_required]
SESSION_KEY_SET_EMAIL_DATA = "set_email_tmp_data"
TEMP_DATA_KEY = "login_tmp_data"
@property
def tmp_data(self):
return session[self.TEMP_DATA_KEY]
@tmp_data.setter
def tmp_data(self, data):
session[self.TEMP_DATA_KEY] = data
def _login_enabled(self):
if self.get_config("USER_AUTH_ALLOW_LOGIN") is not True:
abort("UserLoginDisabledError")
def _signup_enabled(self):
if self.get_config("USER_AUTH_ALLOW_SIGNUP") is not True:
abort("UserSignupDisabledError")
def _oauth_enabled(self):
if self.get_config("USER_AUTH_ALLOW_OAUTH") is not True:
abort("UserOAuthDisabledError")
def _send_reset_password(self, user):
delivery = self.get_config("USER_AUTH_PASSWORD_RESET_METHOD")
token_reset_ttl = self.get_config("USER_AUTH_TOKEN_RESET_TTL", 60)
new_password = None
if delivery.upper() == "TOKEN":
token = user.set_temp_login(token_reset_ttl)
url = url_for(endpoint_namespace % "reset_password",
token=token,
_external=True)
else:
new_password = user.set_password(password=None, random=True)
url = url_for(endpoint_namespace % "login", _external=True)
mail.send(template="reset-password.txt",
method_=delivery,
to=user.email,
name=user.email,
url=url,
new_password=new_password)
@classmethod
def login_user(cls, user):
login_user(user)
now = datetime.datetime.now()
user.update(last_login=now, last_visited=now)
@menu("Login",
endpoint=endpoint_namespace % "login",
visible_with_auth_user=False,
extends=menu_context)
@template(template_page % "login",
endpoint_namespace=endpoint_namespace)
@route("login/",
methods=["GET", "POST"],
endpoint=endpoint_namespace % "login")
@no_login_required
def login(self):
""" Login page """
self._login_enabled()
logout_user()
self.tmp_data = None
self.meta_tags(title="Login")
if request.method == "POST":
email = request.form.get("email").strip()
password = request.form.get("password").strip()
if not email or not password:
flash("Email or Password is empty", "error")
return redirect(url_for(login_view, next=request.form.get("next")))
user = User.get_by_email(email)
if user and user.password_hash and user.password_matched(password):
self.login_user(user)
return redirect(request.form.get("next") or url_for(on_signin_view))
else:
flash("Email or Password is invalid", "error")
return redirect(url_for(login_view, next=request.form.get("next")))
return dict(login_url_next=request.args.get("next", ""),
login_url_default=url_for(on_signin_view),
signup_enabled=self.get_config("USER_AUTH_ALLOW_SIGNUP"),
oauth_enabled=self.get_config("USER_AUTH_ALLOW_LOGIN"))
@menu("Logout",
endpoint=endpoint_namespace % "logout",
visible_with_auth_user=True,
order=100,
extends=menu_context)
@route("logout/",
endpoint=endpoint_namespace % "logout")
@no_login_required
def logout(self):
logout_user()
return redirect(url_for(on_signout_view or login_view))
@menu("Signup",
endpoint=endpoint_namespace % "signup",
visible_with_auth_user=False,
extends=menu_context)
@template(template_page % "signup",
endpoint_namespace=endpoint_namespace)
@route("signup/",
methods=["GET", "POST"],
endpoint=endpoint_namespace % "signup")
@no_login_required
def signup(self):
"""
For Email Signup
:return:
"""
self._login_enabled()
self._signup_enabled()
self.meta_tags(title="Signup")
if request.method == "POST":
# reCaptcha
if not recaptcha.verify():
flash("Invalid Security code", "error")
return redirect(url_for(endpoint_namespace % "signup",
next=request.form.get("next")))
try:
name = request.form.get("name")
email = request.form.get("email")
password = request.form.get("password")
password2 = request.form.get("password2")
profile_image_url = request.form.get("profile_image_url", None)
if not name:
raise UserError("Name is required")
elif not utils.is_valid_email(email):
raise UserError("Invalid email address '%s'" % email)
elif not password.strip() or password.strip() != password2.strip():
raise UserError("Passwords don't match")
elif not utils.is_valid_password(password):
raise UserError("Invalid password")
else:
new_account = User.new(email=email,
password=password.strip(),
first_name=name,
profile_image_url=profile_image_url,
signup_method="email")
self.login_user(new_account)
return redirect(request.form.get("next") or url_for(on_signin_view))
except ApplicationError as ex:
flash(ex.message, "error")
return redirect(url_for(endpoint_namespace % "signup",
next=request.form.get("next")))
logout_user()
return dict(login_url_next=request.args.get("next", ""))
@route("lost-password/",
methods=["GET", "POST"],
endpoint=endpoint_namespace % "lost_password")
@template(template_page % "lost_password",
endpoint_namespace=endpoint_namespace)
@no_login_required
def lost_password(self):
self._login_enabled()
logout_user()
self.meta_tags(title="Lost Password")
if request.method == "POST":
email = request.form.get("email")
user = User.get_by_email(email)
if user:
self._send_reset_password(user)
flash("A new password has been sent to '%s'" % email, "success")
else:
flash("Invalid email address", "error")
return redirect(url_for(login_view))
else:
return {}
@menu("Account Settings",
endpoint=endpoint_namespace % "account_settings",
order=99,
visible_with_auth_user=True,
extends=menu_context)
@template(template_page % "account_settings",
endpoint_namespace=endpoint_namespace)
@route("account-settings",
methods=["GET", "POST"],
endpoint=endpoint_namespace % "account_settings")
@fresh_login_required
def account_settings(self):
self.meta_tags(title="Account Settings")
if request.method == "POST":
action = request.form.get("action")
try:
action = action.lower()
#
if action == "info":
first_name = request.form.get("first_name").strip()
last_name = request.form.get("last_name", "").strip()
data = {
"first_name": first_name,
"last_name": last_name
}
current_user.update(**data)
flash("Account info updated successfully!", "success")
#
elif action == "login":
confirm_password = request.form.get("confirm-password").strip()
if current_user.password_matched(confirm_password):
self.change_login_handler()
flash("Login Info updated successfully!", "success")
else:
flash("Invalid password", "error")
#
elif action == "password":
confirm_password = request.form.get("confirm-password").strip()
if current_user.password_matched(confirm_password):
self.change_password_handler()
flash("Password updated successfully!", "success")
else:
flash("Invalid password", "error")
elif action == "profile-photo":
file = request.files.get("file")
if file:
prefix = "profile-photos/%s/" % current_user.id
extensions = ["jpg", "jpeg", "png", "gif"]
my_photo = storage.upload(file,
prefix=prefix,
allowed_extensions=extensions)
if my_photo:
url = my_photo.url
current_user.update(profile_image_url=url)
flash("Profile Image updated successfully!", "success")
else:
raise UserError("Invalid action")
except Exception as e:
flash(e.message, "error")
return redirect(url_for(endpoint_namespace % "account_settings"))
return {}
@classmethod
def change_login_handler(cls, user_context=None, email=None):
if not user_context:
user_context = current_user
if not email:
email = request.form.get("email").strip()
if not utils.is_valid_email(email):
raise UserWarning("Invalid email address '%s'" % email)
else:
if email != user_context.email and User.get_by_email(email):
raise UserWarning("Email exists already '%s'" % email)
elif email != user_context.email:
user_context.update(email=email)
return True
return False
@classmethod
def change_password_handler(cls, user_context=None, password=None,
password2=None):
if not user_context:
user_context = current_user
if not password:
password = request.form.get("password").strip()
if not password2:
password2 = request.form.get("password2").strip()
if password:
if password != password2:
raise UserWarning("Password don't match")
elif not utils.is_valid_password(password):
raise UserWarning("Invalid password")
else:
user_context.set_password(password)
return True
else:
raise UserWarning("Password is empty")
# OAUTH Login
@route("oauth-login/<provider>",
methods=["GET", "POST"],
endpoint=endpoint_namespace % "oauth_login")
@template(template_page % "oauth_login",
endpoint_namespace=endpoint_namespace)
@no_login_required
def oauth_login(self, provider):
""" Login via oauth providers """
self._login_enabled()
self._oauth_enabled()
provider = provider.lower()
result = oauth.login(provider)
response = oauth.response
popup_js_custom = {
"action": "",
"url": ""
}
if result:
if result.error:
pass
elif result.user:
result.user.update()
oauth_user = result.user
user = User.get_by_oauth(provider=provider,
provider_user_id=oauth_user.id)
if not user:
if oauth_user.email and User.get_by_email(oauth_user.email):
flash("Account already exists with this email '%s'. "
"Try to login or retrieve your password " % oauth_user.email, "error")
popup_js_custom.update({
"action": "redirect",
"url": url_for(login_view, next=request.form.get("next"))
})
else:
tmp_data = {
"is_oauth": True,
"provider": provider,
"id": oauth_user.id,
"name": oauth_user.name,
"picture": oauth_user.picture,
"first_name": oauth_user.first_name,
"last_name": oauth_user.last_name,
"email": oauth_user.email,
"link": oauth_user.link
}
if not oauth_user.email:
self.tmp_data = tmp_data
popup_js_custom.update({
"action": "redirect",
"url": url_for(endpoint_namespace % "setup_login")
})
else:
try:
picture = oauth_user.picture
user = User.new(email=oauth_user.email,
name=oauth_user.name,
signup_method=provider,
profile_image_url=picture
)
user.add_oauth(provider,
oauth_user.provider_id,
name=oauth_user.name,
email=oauth_user.email,
profile_image_url=oauth_user.picture,
link=oauth_user.link)
except ModelError as e:
flash(e.message, "error")
popup_js_custom.update({
"action": "redirect",
"url": url_for(endpoint_namespace % "login")
})
if user:
self.login_user(user)
return dict(popup_js=result.popup_js(custom=popup_js_custom),
template_=template_page % "oauth_login")
return response
@template(template_page % "setup_login",
endpoint_namespace=endpoint_namespace)
@route("setup-login/", methods=["GET", "POST"],
endpoint=endpoint_namespace % "setup_login")
def setup_login(self):
"""
Allows to setup a email password if it's not provided specially
coming from oauth-login
:return:
"""
self._login_enabled()
self.meta_tags(title="Setup Login")
# Only user without email can set email
if current_user.is_authenticated() and current_user.email:
return redirect(url_for(endpoint_namespace % "account_settings"))
if self.tmp_data:
if request.method == "POST":
if not self.tmp_data["is_oauth"]:
return redirect(endpoint_namespace % "login")
try:
email = request.form.get("email")
password = request.form.get("password")
password2 = request.form.get("password2")
if not utils.is_valid_email(email):
raise UserError("Invalid email address '%s'" % email)
elif User.get_by_email(email):
raise UserError("An account exists already with this email address '%s' " % email)
elif not password.strip() or password.strip() != password2.strip():
raise UserError("Passwords don't match")
elif not utils.is_valid_password(password):
raise UserError("Invalid password")
else:
user = User.new(email=email,
password=password.strip(),
name=self.tmp_data["name"],
profile_image_url=self.tmp_data["picture"],
signup_method=self.tmp_data["provider"])
user.add_oauth(self.tmp_data["provider"],
self.tmp_data["id"],
name=self.tmp_data["name"],
email=email,
profile_image_url=self.tmp_data["picture"],
link=self.tmp_data["link"])
self.login_user(user)
self.tmp_data = None
return redirect(request.form.get("next") or url_for(on_signin_view))
except ApplicationError as ex:
flash(ex.message, "error")
return redirect(url_for(endpoint_namespace % "login"))
return dict(provider=self.tmp_data)
else:
return redirect(url_for(endpoint_namespace % "login"))
@route("reset-password/<token>",
methods=["GET", "POST"],
endpoint=endpoint_namespace % "reset_password")
@template(template_page % "reset_password",
endpoint_namespace=endpoint_namespace)
@no_login_required
def reset_password(self, token):
self._login_enabled()
logout_user()
self.meta_tags(title="Reset Password")
user = User.get_by_temp_login(token)
if user:
if not user.has_temp_login:
return redirect(url_for(on_signin_view))
if request.method == "POST":
try:
self.change_password_handler(user_context=user)
user.clear_temp_login()
flash("Password updated successfully!", "success")
return redirect(url_for(on_signin_view))
except Exception as ex:
flash("Error: %s" % ex.message, "error")
return redirect(url_for(endpoint_namespace % "reset_password",
token=token))
else:
return dict(token=token)
else:
abort(404, "Invalid token")
@route("oauth-connect", methods=["POST"],
endpoint="%s:oauth_connect" % endpoint_namespace)
def oauth_connect(self):
""" To login via social """
email = request.form.get("email").strip()
name = request.form.get("name").strip()
provider = request.form.get("provider").strip()
provider_user_id = request.form.get("provider_user_id").strip()
image_url = request.form.get("image_url").strip()
next = request.form.get("next", "")
try:
current_user.oauth_connect(provider=provider,
provider_user_id=provider_user_id,
email=email,
name=name,
image_url=image_url)
except Exception as ex:
flash("Unable to link your account", "error")
return redirect(url_for(endpoint_namespace % "account_settings"))
return Auth | [
"def",
"auth",
"(",
"view",
",",
"*",
"*",
"kwargs",
")",
":",
"endpoint_namespace",
"=",
"view",
".",
"__name__",
"+",
"\":%s\"",
"view_name",
"=",
"view",
".",
"__name__",
"UserModel",
"=",
"kwargs",
".",
"pop",
"(",
"\"model\"",
")",
"User",
"=",
"UserModel",
".",
"User",
"login_view",
"=",
"endpoint_namespace",
"%",
"\"login\"",
"on_signin_view",
"=",
"kwargs",
".",
"get",
"(",
"\"signin_view\"",
",",
"\"Index:index\"",
")",
"on_signout_view",
"=",
"kwargs",
".",
"get",
"(",
"\"signout_view\"",
",",
"\"Index:index\"",
")",
"template_dir",
"=",
"kwargs",
".",
"get",
"(",
"\"template_dir\"",
",",
"\"Juice/Plugin/User/Account\"",
")",
"template_page",
"=",
"template_dir",
"+",
"\"/%s.html\"",
"login_manager",
"=",
"LoginManager",
"(",
")",
"login_manager",
".",
"login_view",
"=",
"login_view",
"login_manager",
".",
"login_message_category",
"=",
"\"error\"",
"init_app",
"(",
"login_manager",
".",
"init_app",
")",
"menu_context",
"=",
"view",
"_menu",
"=",
"kwargs",
".",
"get",
"(",
"\"menu\"",
",",
"{",
"}",
")",
"if",
"_menu",
":",
"@",
"menu",
"(",
"*",
"*",
"_menu",
")",
"class",
"UserAccountMenu",
"(",
"object",
")",
":",
"pass",
"menu_context",
"=",
"UserAccountMenu",
"@",
"login_manager",
".",
"user_loader",
"def",
"load_user",
"(",
"userid",
")",
":",
"return",
"User",
".",
"get",
"(",
"userid",
")",
"View",
".",
"g",
"(",
"__USER_AUTH_ENABLED__",
"=",
"True",
")",
"class",
"Auth",
"(",
"object",
")",
":",
"decorators",
"=",
"view",
".",
"decorators",
"+",
"[",
"login_required",
"]",
"SESSION_KEY_SET_EMAIL_DATA",
"=",
"\"set_email_tmp_data\"",
"TEMP_DATA_KEY",
"=",
"\"login_tmp_data\"",
"@",
"property",
"def",
"tmp_data",
"(",
"self",
")",
":",
"return",
"session",
"[",
"self",
".",
"TEMP_DATA_KEY",
"]",
"@",
"tmp_data",
".",
"setter",
"def",
"tmp_data",
"(",
"self",
",",
"data",
")",
":",
"session",
"[",
"self",
".",
"TEMP_DATA_KEY",
"]",
"=",
"data",
"def",
"_login_enabled",
"(",
"self",
")",
":",
"if",
"self",
".",
"get_config",
"(",
"\"USER_AUTH_ALLOW_LOGIN\"",
")",
"is",
"not",
"True",
":",
"abort",
"(",
"\"UserLoginDisabledError\"",
")",
"def",
"_signup_enabled",
"(",
"self",
")",
":",
"if",
"self",
".",
"get_config",
"(",
"\"USER_AUTH_ALLOW_SIGNUP\"",
")",
"is",
"not",
"True",
":",
"abort",
"(",
"\"UserSignupDisabledError\"",
")",
"def",
"_oauth_enabled",
"(",
"self",
")",
":",
"if",
"self",
".",
"get_config",
"(",
"\"USER_AUTH_ALLOW_OAUTH\"",
")",
"is",
"not",
"True",
":",
"abort",
"(",
"\"UserOAuthDisabledError\"",
")",
"def",
"_send_reset_password",
"(",
"self",
",",
"user",
")",
":",
"delivery",
"=",
"self",
".",
"get_config",
"(",
"\"USER_AUTH_PASSWORD_RESET_METHOD\"",
")",
"token_reset_ttl",
"=",
"self",
".",
"get_config",
"(",
"\"USER_AUTH_TOKEN_RESET_TTL\"",
",",
"60",
")",
"new_password",
"=",
"None",
"if",
"delivery",
".",
"upper",
"(",
")",
"==",
"\"TOKEN\"",
":",
"token",
"=",
"user",
".",
"set_temp_login",
"(",
"token_reset_ttl",
")",
"url",
"=",
"url_for",
"(",
"endpoint_namespace",
"%",
"\"reset_password\"",
",",
"token",
"=",
"token",
",",
"_external",
"=",
"True",
")",
"else",
":",
"new_password",
"=",
"user",
".",
"set_password",
"(",
"password",
"=",
"None",
",",
"random",
"=",
"True",
")",
"url",
"=",
"url_for",
"(",
"endpoint_namespace",
"%",
"\"login\"",
",",
"_external",
"=",
"True",
")",
"mail",
".",
"send",
"(",
"template",
"=",
"\"reset-password.txt\"",
",",
"method_",
"=",
"delivery",
",",
"to",
"=",
"user",
".",
"email",
",",
"name",
"=",
"user",
".",
"email",
",",
"url",
"=",
"url",
",",
"new_password",
"=",
"new_password",
")",
"@",
"classmethod",
"def",
"login_user",
"(",
"cls",
",",
"user",
")",
":",
"login_user",
"(",
"user",
")",
"now",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"user",
".",
"update",
"(",
"last_login",
"=",
"now",
",",
"last_visited",
"=",
"now",
")",
"@",
"menu",
"(",
"\"Login\"",
",",
"endpoint",
"=",
"endpoint_namespace",
"%",
"\"login\"",
",",
"visible_with_auth_user",
"=",
"False",
",",
"extends",
"=",
"menu_context",
")",
"@",
"template",
"(",
"template_page",
"%",
"\"login\"",
",",
"endpoint_namespace",
"=",
"endpoint_namespace",
")",
"@",
"route",
"(",
"\"login/\"",
",",
"methods",
"=",
"[",
"\"GET\"",
",",
"\"POST\"",
"]",
",",
"endpoint",
"=",
"endpoint_namespace",
"%",
"\"login\"",
")",
"@",
"no_login_required",
"def",
"login",
"(",
"self",
")",
":",
"\"\"\" Login page \"\"\"",
"self",
".",
"_login_enabled",
"(",
")",
"logout_user",
"(",
")",
"self",
".",
"tmp_data",
"=",
"None",
"self",
".",
"meta_tags",
"(",
"title",
"=",
"\"Login\"",
")",
"if",
"request",
".",
"method",
"==",
"\"POST\"",
":",
"email",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"email\"",
")",
".",
"strip",
"(",
")",
"password",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"password\"",
")",
".",
"strip",
"(",
")",
"if",
"not",
"email",
"or",
"not",
"password",
":",
"flash",
"(",
"\"Email or Password is empty\"",
",",
"\"error\"",
")",
"return",
"redirect",
"(",
"url_for",
"(",
"login_view",
",",
"next",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"next\"",
")",
")",
")",
"user",
"=",
"User",
".",
"get_by_email",
"(",
"email",
")",
"if",
"user",
"and",
"user",
".",
"password_hash",
"and",
"user",
".",
"password_matched",
"(",
"password",
")",
":",
"self",
".",
"login_user",
"(",
"user",
")",
"return",
"redirect",
"(",
"request",
".",
"form",
".",
"get",
"(",
"\"next\"",
")",
"or",
"url_for",
"(",
"on_signin_view",
")",
")",
"else",
":",
"flash",
"(",
"\"Email or Password is invalid\"",
",",
"\"error\"",
")",
"return",
"redirect",
"(",
"url_for",
"(",
"login_view",
",",
"next",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"next\"",
")",
")",
")",
"return",
"dict",
"(",
"login_url_next",
"=",
"request",
".",
"args",
".",
"get",
"(",
"\"next\"",
",",
"\"\"",
")",
",",
"login_url_default",
"=",
"url_for",
"(",
"on_signin_view",
")",
",",
"signup_enabled",
"=",
"self",
".",
"get_config",
"(",
"\"USER_AUTH_ALLOW_SIGNUP\"",
")",
",",
"oauth_enabled",
"=",
"self",
".",
"get_config",
"(",
"\"USER_AUTH_ALLOW_LOGIN\"",
")",
")",
"@",
"menu",
"(",
"\"Logout\"",
",",
"endpoint",
"=",
"endpoint_namespace",
"%",
"\"logout\"",
",",
"visible_with_auth_user",
"=",
"True",
",",
"order",
"=",
"100",
",",
"extends",
"=",
"menu_context",
")",
"@",
"route",
"(",
"\"logout/\"",
",",
"endpoint",
"=",
"endpoint_namespace",
"%",
"\"logout\"",
")",
"@",
"no_login_required",
"def",
"logout",
"(",
"self",
")",
":",
"logout_user",
"(",
")",
"return",
"redirect",
"(",
"url_for",
"(",
"on_signout_view",
"or",
"login_view",
")",
")",
"@",
"menu",
"(",
"\"Signup\"",
",",
"endpoint",
"=",
"endpoint_namespace",
"%",
"\"signup\"",
",",
"visible_with_auth_user",
"=",
"False",
",",
"extends",
"=",
"menu_context",
")",
"@",
"template",
"(",
"template_page",
"%",
"\"signup\"",
",",
"endpoint_namespace",
"=",
"endpoint_namespace",
")",
"@",
"route",
"(",
"\"signup/\"",
",",
"methods",
"=",
"[",
"\"GET\"",
",",
"\"POST\"",
"]",
",",
"endpoint",
"=",
"endpoint_namespace",
"%",
"\"signup\"",
")",
"@",
"no_login_required",
"def",
"signup",
"(",
"self",
")",
":",
"\"\"\"\n For Email Signup\n :return:\n \"\"\"",
"self",
".",
"_login_enabled",
"(",
")",
"self",
".",
"_signup_enabled",
"(",
")",
"self",
".",
"meta_tags",
"(",
"title",
"=",
"\"Signup\"",
")",
"if",
"request",
".",
"method",
"==",
"\"POST\"",
":",
"# reCaptcha",
"if",
"not",
"recaptcha",
".",
"verify",
"(",
")",
":",
"flash",
"(",
"\"Invalid Security code\"",
",",
"\"error\"",
")",
"return",
"redirect",
"(",
"url_for",
"(",
"endpoint_namespace",
"%",
"\"signup\"",
",",
"next",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"next\"",
")",
")",
")",
"try",
":",
"name",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"name\"",
")",
"email",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"email\"",
")",
"password",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"password\"",
")",
"password2",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"password2\"",
")",
"profile_image_url",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"profile_image_url\"",
",",
"None",
")",
"if",
"not",
"name",
":",
"raise",
"UserError",
"(",
"\"Name is required\"",
")",
"elif",
"not",
"utils",
".",
"is_valid_email",
"(",
"email",
")",
":",
"raise",
"UserError",
"(",
"\"Invalid email address '%s'\"",
"%",
"email",
")",
"elif",
"not",
"password",
".",
"strip",
"(",
")",
"or",
"password",
".",
"strip",
"(",
")",
"!=",
"password2",
".",
"strip",
"(",
")",
":",
"raise",
"UserError",
"(",
"\"Passwords don't match\"",
")",
"elif",
"not",
"utils",
".",
"is_valid_password",
"(",
"password",
")",
":",
"raise",
"UserError",
"(",
"\"Invalid password\"",
")",
"else",
":",
"new_account",
"=",
"User",
".",
"new",
"(",
"email",
"=",
"email",
",",
"password",
"=",
"password",
".",
"strip",
"(",
")",
",",
"first_name",
"=",
"name",
",",
"profile_image_url",
"=",
"profile_image_url",
",",
"signup_method",
"=",
"\"email\"",
")",
"self",
".",
"login_user",
"(",
"new_account",
")",
"return",
"redirect",
"(",
"request",
".",
"form",
".",
"get",
"(",
"\"next\"",
")",
"or",
"url_for",
"(",
"on_signin_view",
")",
")",
"except",
"ApplicationError",
"as",
"ex",
":",
"flash",
"(",
"ex",
".",
"message",
",",
"\"error\"",
")",
"return",
"redirect",
"(",
"url_for",
"(",
"endpoint_namespace",
"%",
"\"signup\"",
",",
"next",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"next\"",
")",
")",
")",
"logout_user",
"(",
")",
"return",
"dict",
"(",
"login_url_next",
"=",
"request",
".",
"args",
".",
"get",
"(",
"\"next\"",
",",
"\"\"",
")",
")",
"@",
"route",
"(",
"\"lost-password/\"",
",",
"methods",
"=",
"[",
"\"GET\"",
",",
"\"POST\"",
"]",
",",
"endpoint",
"=",
"endpoint_namespace",
"%",
"\"lost_password\"",
")",
"@",
"template",
"(",
"template_page",
"%",
"\"lost_password\"",
",",
"endpoint_namespace",
"=",
"endpoint_namespace",
")",
"@",
"no_login_required",
"def",
"lost_password",
"(",
"self",
")",
":",
"self",
".",
"_login_enabled",
"(",
")",
"logout_user",
"(",
")",
"self",
".",
"meta_tags",
"(",
"title",
"=",
"\"Lost Password\"",
")",
"if",
"request",
".",
"method",
"==",
"\"POST\"",
":",
"email",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"email\"",
")",
"user",
"=",
"User",
".",
"get_by_email",
"(",
"email",
")",
"if",
"user",
":",
"self",
".",
"_send_reset_password",
"(",
"user",
")",
"flash",
"(",
"\"A new password has been sent to '%s'\"",
"%",
"email",
",",
"\"success\"",
")",
"else",
":",
"flash",
"(",
"\"Invalid email address\"",
",",
"\"error\"",
")",
"return",
"redirect",
"(",
"url_for",
"(",
"login_view",
")",
")",
"else",
":",
"return",
"{",
"}",
"@",
"menu",
"(",
"\"Account Settings\"",
",",
"endpoint",
"=",
"endpoint_namespace",
"%",
"\"account_settings\"",
",",
"order",
"=",
"99",
",",
"visible_with_auth_user",
"=",
"True",
",",
"extends",
"=",
"menu_context",
")",
"@",
"template",
"(",
"template_page",
"%",
"\"account_settings\"",
",",
"endpoint_namespace",
"=",
"endpoint_namespace",
")",
"@",
"route",
"(",
"\"account-settings\"",
",",
"methods",
"=",
"[",
"\"GET\"",
",",
"\"POST\"",
"]",
",",
"endpoint",
"=",
"endpoint_namespace",
"%",
"\"account_settings\"",
")",
"@",
"fresh_login_required",
"def",
"account_settings",
"(",
"self",
")",
":",
"self",
".",
"meta_tags",
"(",
"title",
"=",
"\"Account Settings\"",
")",
"if",
"request",
".",
"method",
"==",
"\"POST\"",
":",
"action",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"action\"",
")",
"try",
":",
"action",
"=",
"action",
".",
"lower",
"(",
")",
"#",
"if",
"action",
"==",
"\"info\"",
":",
"first_name",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"first_name\"",
")",
".",
"strip",
"(",
")",
"last_name",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"last_name\"",
",",
"\"\"",
")",
".",
"strip",
"(",
")",
"data",
"=",
"{",
"\"first_name\"",
":",
"first_name",
",",
"\"last_name\"",
":",
"last_name",
"}",
"current_user",
".",
"update",
"(",
"*",
"*",
"data",
")",
"flash",
"(",
"\"Account info updated successfully!\"",
",",
"\"success\"",
")",
"#",
"elif",
"action",
"==",
"\"login\"",
":",
"confirm_password",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"confirm-password\"",
")",
".",
"strip",
"(",
")",
"if",
"current_user",
".",
"password_matched",
"(",
"confirm_password",
")",
":",
"self",
".",
"change_login_handler",
"(",
")",
"flash",
"(",
"\"Login Info updated successfully!\"",
",",
"\"success\"",
")",
"else",
":",
"flash",
"(",
"\"Invalid password\"",
",",
"\"error\"",
")",
"#",
"elif",
"action",
"==",
"\"password\"",
":",
"confirm_password",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"confirm-password\"",
")",
".",
"strip",
"(",
")",
"if",
"current_user",
".",
"password_matched",
"(",
"confirm_password",
")",
":",
"self",
".",
"change_password_handler",
"(",
")",
"flash",
"(",
"\"Password updated successfully!\"",
",",
"\"success\"",
")",
"else",
":",
"flash",
"(",
"\"Invalid password\"",
",",
"\"error\"",
")",
"elif",
"action",
"==",
"\"profile-photo\"",
":",
"file",
"=",
"request",
".",
"files",
".",
"get",
"(",
"\"file\"",
")",
"if",
"file",
":",
"prefix",
"=",
"\"profile-photos/%s/\"",
"%",
"current_user",
".",
"id",
"extensions",
"=",
"[",
"\"jpg\"",
",",
"\"jpeg\"",
",",
"\"png\"",
",",
"\"gif\"",
"]",
"my_photo",
"=",
"storage",
".",
"upload",
"(",
"file",
",",
"prefix",
"=",
"prefix",
",",
"allowed_extensions",
"=",
"extensions",
")",
"if",
"my_photo",
":",
"url",
"=",
"my_photo",
".",
"url",
"current_user",
".",
"update",
"(",
"profile_image_url",
"=",
"url",
")",
"flash",
"(",
"\"Profile Image updated successfully!\"",
",",
"\"success\"",
")",
"else",
":",
"raise",
"UserError",
"(",
"\"Invalid action\"",
")",
"except",
"Exception",
"as",
"e",
":",
"flash",
"(",
"e",
".",
"message",
",",
"\"error\"",
")",
"return",
"redirect",
"(",
"url_for",
"(",
"endpoint_namespace",
"%",
"\"account_settings\"",
")",
")",
"return",
"{",
"}",
"@",
"classmethod",
"def",
"change_login_handler",
"(",
"cls",
",",
"user_context",
"=",
"None",
",",
"email",
"=",
"None",
")",
":",
"if",
"not",
"user_context",
":",
"user_context",
"=",
"current_user",
"if",
"not",
"email",
":",
"email",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"email\"",
")",
".",
"strip",
"(",
")",
"if",
"not",
"utils",
".",
"is_valid_email",
"(",
"email",
")",
":",
"raise",
"UserWarning",
"(",
"\"Invalid email address '%s'\"",
"%",
"email",
")",
"else",
":",
"if",
"email",
"!=",
"user_context",
".",
"email",
"and",
"User",
".",
"get_by_email",
"(",
"email",
")",
":",
"raise",
"UserWarning",
"(",
"\"Email exists already '%s'\"",
"%",
"email",
")",
"elif",
"email",
"!=",
"user_context",
".",
"email",
":",
"user_context",
".",
"update",
"(",
"email",
"=",
"email",
")",
"return",
"True",
"return",
"False",
"@",
"classmethod",
"def",
"change_password_handler",
"(",
"cls",
",",
"user_context",
"=",
"None",
",",
"password",
"=",
"None",
",",
"password2",
"=",
"None",
")",
":",
"if",
"not",
"user_context",
":",
"user_context",
"=",
"current_user",
"if",
"not",
"password",
":",
"password",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"password\"",
")",
".",
"strip",
"(",
")",
"if",
"not",
"password2",
":",
"password2",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"password2\"",
")",
".",
"strip",
"(",
")",
"if",
"password",
":",
"if",
"password",
"!=",
"password2",
":",
"raise",
"UserWarning",
"(",
"\"Password don't match\"",
")",
"elif",
"not",
"utils",
".",
"is_valid_password",
"(",
"password",
")",
":",
"raise",
"UserWarning",
"(",
"\"Invalid password\"",
")",
"else",
":",
"user_context",
".",
"set_password",
"(",
"password",
")",
"return",
"True",
"else",
":",
"raise",
"UserWarning",
"(",
"\"Password is empty\"",
")",
"# OAUTH Login",
"@",
"route",
"(",
"\"oauth-login/<provider>\"",
",",
"methods",
"=",
"[",
"\"GET\"",
",",
"\"POST\"",
"]",
",",
"endpoint",
"=",
"endpoint_namespace",
"%",
"\"oauth_login\"",
")",
"@",
"template",
"(",
"template_page",
"%",
"\"oauth_login\"",
",",
"endpoint_namespace",
"=",
"endpoint_namespace",
")",
"@",
"no_login_required",
"def",
"oauth_login",
"(",
"self",
",",
"provider",
")",
":",
"\"\"\" Login via oauth providers \"\"\"",
"self",
".",
"_login_enabled",
"(",
")",
"self",
".",
"_oauth_enabled",
"(",
")",
"provider",
"=",
"provider",
".",
"lower",
"(",
")",
"result",
"=",
"oauth",
".",
"login",
"(",
"provider",
")",
"response",
"=",
"oauth",
".",
"response",
"popup_js_custom",
"=",
"{",
"\"action\"",
":",
"\"\"",
",",
"\"url\"",
":",
"\"\"",
"}",
"if",
"result",
":",
"if",
"result",
".",
"error",
":",
"pass",
"elif",
"result",
".",
"user",
":",
"result",
".",
"user",
".",
"update",
"(",
")",
"oauth_user",
"=",
"result",
".",
"user",
"user",
"=",
"User",
".",
"get_by_oauth",
"(",
"provider",
"=",
"provider",
",",
"provider_user_id",
"=",
"oauth_user",
".",
"id",
")",
"if",
"not",
"user",
":",
"if",
"oauth_user",
".",
"email",
"and",
"User",
".",
"get_by_email",
"(",
"oauth_user",
".",
"email",
")",
":",
"flash",
"(",
"\"Account already exists with this email '%s'. \"",
"\"Try to login or retrieve your password \"",
"%",
"oauth_user",
".",
"email",
",",
"\"error\"",
")",
"popup_js_custom",
".",
"update",
"(",
"{",
"\"action\"",
":",
"\"redirect\"",
",",
"\"url\"",
":",
"url_for",
"(",
"login_view",
",",
"next",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"next\"",
")",
")",
"}",
")",
"else",
":",
"tmp_data",
"=",
"{",
"\"is_oauth\"",
":",
"True",
",",
"\"provider\"",
":",
"provider",
",",
"\"id\"",
":",
"oauth_user",
".",
"id",
",",
"\"name\"",
":",
"oauth_user",
".",
"name",
",",
"\"picture\"",
":",
"oauth_user",
".",
"picture",
",",
"\"first_name\"",
":",
"oauth_user",
".",
"first_name",
",",
"\"last_name\"",
":",
"oauth_user",
".",
"last_name",
",",
"\"email\"",
":",
"oauth_user",
".",
"email",
",",
"\"link\"",
":",
"oauth_user",
".",
"link",
"}",
"if",
"not",
"oauth_user",
".",
"email",
":",
"self",
".",
"tmp_data",
"=",
"tmp_data",
"popup_js_custom",
".",
"update",
"(",
"{",
"\"action\"",
":",
"\"redirect\"",
",",
"\"url\"",
":",
"url_for",
"(",
"endpoint_namespace",
"%",
"\"setup_login\"",
")",
"}",
")",
"else",
":",
"try",
":",
"picture",
"=",
"oauth_user",
".",
"picture",
"user",
"=",
"User",
".",
"new",
"(",
"email",
"=",
"oauth_user",
".",
"email",
",",
"name",
"=",
"oauth_user",
".",
"name",
",",
"signup_method",
"=",
"provider",
",",
"profile_image_url",
"=",
"picture",
")",
"user",
".",
"add_oauth",
"(",
"provider",
",",
"oauth_user",
".",
"provider_id",
",",
"name",
"=",
"oauth_user",
".",
"name",
",",
"email",
"=",
"oauth_user",
".",
"email",
",",
"profile_image_url",
"=",
"oauth_user",
".",
"picture",
",",
"link",
"=",
"oauth_user",
".",
"link",
")",
"except",
"ModelError",
"as",
"e",
":",
"flash",
"(",
"e",
".",
"message",
",",
"\"error\"",
")",
"popup_js_custom",
".",
"update",
"(",
"{",
"\"action\"",
":",
"\"redirect\"",
",",
"\"url\"",
":",
"url_for",
"(",
"endpoint_namespace",
"%",
"\"login\"",
")",
"}",
")",
"if",
"user",
":",
"self",
".",
"login_user",
"(",
"user",
")",
"return",
"dict",
"(",
"popup_js",
"=",
"result",
".",
"popup_js",
"(",
"custom",
"=",
"popup_js_custom",
")",
",",
"template_",
"=",
"template_page",
"%",
"\"oauth_login\"",
")",
"return",
"response",
"@",
"template",
"(",
"template_page",
"%",
"\"setup_login\"",
",",
"endpoint_namespace",
"=",
"endpoint_namespace",
")",
"@",
"route",
"(",
"\"setup-login/\"",
",",
"methods",
"=",
"[",
"\"GET\"",
",",
"\"POST\"",
"]",
",",
"endpoint",
"=",
"endpoint_namespace",
"%",
"\"setup_login\"",
")",
"def",
"setup_login",
"(",
"self",
")",
":",
"\"\"\"\n Allows to setup a email password if it's not provided specially\n coming from oauth-login\n :return:\n \"\"\"",
"self",
".",
"_login_enabled",
"(",
")",
"self",
".",
"meta_tags",
"(",
"title",
"=",
"\"Setup Login\"",
")",
"# Only user without email can set email",
"if",
"current_user",
".",
"is_authenticated",
"(",
")",
"and",
"current_user",
".",
"email",
":",
"return",
"redirect",
"(",
"url_for",
"(",
"endpoint_namespace",
"%",
"\"account_settings\"",
")",
")",
"if",
"self",
".",
"tmp_data",
":",
"if",
"request",
".",
"method",
"==",
"\"POST\"",
":",
"if",
"not",
"self",
".",
"tmp_data",
"[",
"\"is_oauth\"",
"]",
":",
"return",
"redirect",
"(",
"endpoint_namespace",
"%",
"\"login\"",
")",
"try",
":",
"email",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"email\"",
")",
"password",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"password\"",
")",
"password2",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"password2\"",
")",
"if",
"not",
"utils",
".",
"is_valid_email",
"(",
"email",
")",
":",
"raise",
"UserError",
"(",
"\"Invalid email address '%s'\"",
"%",
"email",
")",
"elif",
"User",
".",
"get_by_email",
"(",
"email",
")",
":",
"raise",
"UserError",
"(",
"\"An account exists already with this email address '%s' \"",
"%",
"email",
")",
"elif",
"not",
"password",
".",
"strip",
"(",
")",
"or",
"password",
".",
"strip",
"(",
")",
"!=",
"password2",
".",
"strip",
"(",
")",
":",
"raise",
"UserError",
"(",
"\"Passwords don't match\"",
")",
"elif",
"not",
"utils",
".",
"is_valid_password",
"(",
"password",
")",
":",
"raise",
"UserError",
"(",
"\"Invalid password\"",
")",
"else",
":",
"user",
"=",
"User",
".",
"new",
"(",
"email",
"=",
"email",
",",
"password",
"=",
"password",
".",
"strip",
"(",
")",
",",
"name",
"=",
"self",
".",
"tmp_data",
"[",
"\"name\"",
"]",
",",
"profile_image_url",
"=",
"self",
".",
"tmp_data",
"[",
"\"picture\"",
"]",
",",
"signup_method",
"=",
"self",
".",
"tmp_data",
"[",
"\"provider\"",
"]",
")",
"user",
".",
"add_oauth",
"(",
"self",
".",
"tmp_data",
"[",
"\"provider\"",
"]",
",",
"self",
".",
"tmp_data",
"[",
"\"id\"",
"]",
",",
"name",
"=",
"self",
".",
"tmp_data",
"[",
"\"name\"",
"]",
",",
"email",
"=",
"email",
",",
"profile_image_url",
"=",
"self",
".",
"tmp_data",
"[",
"\"picture\"",
"]",
",",
"link",
"=",
"self",
".",
"tmp_data",
"[",
"\"link\"",
"]",
")",
"self",
".",
"login_user",
"(",
"user",
")",
"self",
".",
"tmp_data",
"=",
"None",
"return",
"redirect",
"(",
"request",
".",
"form",
".",
"get",
"(",
"\"next\"",
")",
"or",
"url_for",
"(",
"on_signin_view",
")",
")",
"except",
"ApplicationError",
"as",
"ex",
":",
"flash",
"(",
"ex",
".",
"message",
",",
"\"error\"",
")",
"return",
"redirect",
"(",
"url_for",
"(",
"endpoint_namespace",
"%",
"\"login\"",
")",
")",
"return",
"dict",
"(",
"provider",
"=",
"self",
".",
"tmp_data",
")",
"else",
":",
"return",
"redirect",
"(",
"url_for",
"(",
"endpoint_namespace",
"%",
"\"login\"",
")",
")",
"@",
"route",
"(",
"\"reset-password/<token>\"",
",",
"methods",
"=",
"[",
"\"GET\"",
",",
"\"POST\"",
"]",
",",
"endpoint",
"=",
"endpoint_namespace",
"%",
"\"reset_password\"",
")",
"@",
"template",
"(",
"template_page",
"%",
"\"reset_password\"",
",",
"endpoint_namespace",
"=",
"endpoint_namespace",
")",
"@",
"no_login_required",
"def",
"reset_password",
"(",
"self",
",",
"token",
")",
":",
"self",
".",
"_login_enabled",
"(",
")",
"logout_user",
"(",
")",
"self",
".",
"meta_tags",
"(",
"title",
"=",
"\"Reset Password\"",
")",
"user",
"=",
"User",
".",
"get_by_temp_login",
"(",
"token",
")",
"if",
"user",
":",
"if",
"not",
"user",
".",
"has_temp_login",
":",
"return",
"redirect",
"(",
"url_for",
"(",
"on_signin_view",
")",
")",
"if",
"request",
".",
"method",
"==",
"\"POST\"",
":",
"try",
":",
"self",
".",
"change_password_handler",
"(",
"user_context",
"=",
"user",
")",
"user",
".",
"clear_temp_login",
"(",
")",
"flash",
"(",
"\"Password updated successfully!\"",
",",
"\"success\"",
")",
"return",
"redirect",
"(",
"url_for",
"(",
"on_signin_view",
")",
")",
"except",
"Exception",
"as",
"ex",
":",
"flash",
"(",
"\"Error: %s\"",
"%",
"ex",
".",
"message",
",",
"\"error\"",
")",
"return",
"redirect",
"(",
"url_for",
"(",
"endpoint_namespace",
"%",
"\"reset_password\"",
",",
"token",
"=",
"token",
")",
")",
"else",
":",
"return",
"dict",
"(",
"token",
"=",
"token",
")",
"else",
":",
"abort",
"(",
"404",
",",
"\"Invalid token\"",
")",
"@",
"route",
"(",
"\"oauth-connect\"",
",",
"methods",
"=",
"[",
"\"POST\"",
"]",
",",
"endpoint",
"=",
"\"%s:oauth_connect\"",
"%",
"endpoint_namespace",
")",
"def",
"oauth_connect",
"(",
"self",
")",
":",
"\"\"\" To login via social \"\"\"",
"email",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"email\"",
")",
".",
"strip",
"(",
")",
"name",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"name\"",
")",
".",
"strip",
"(",
")",
"provider",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"provider\"",
")",
".",
"strip",
"(",
")",
"provider_user_id",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"provider_user_id\"",
")",
".",
"strip",
"(",
")",
"image_url",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"image_url\"",
")",
".",
"strip",
"(",
")",
"next",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"next\"",
",",
"\"\"",
")",
"try",
":",
"current_user",
".",
"oauth_connect",
"(",
"provider",
"=",
"provider",
",",
"provider_user_id",
"=",
"provider_user_id",
",",
"email",
"=",
"email",
",",
"name",
"=",
"name",
",",
"image_url",
"=",
"image_url",
")",
"except",
"Exception",
"as",
"ex",
":",
"flash",
"(",
"\"Unable to link your account\"",
",",
"\"error\"",
")",
"return",
"redirect",
"(",
"url_for",
"(",
"endpoint_namespace",
"%",
"\"account_settings\"",
")",
")",
"return",
"Auth"
] | 41.650092 | 20.298343 |
def put(self, file_path, upload_path = ''):
"""PUT
Args:
file_path: Full path for a file you want to upload
upload_path: Ndrive path where you want to upload file
ex) /Picture/
Returns:
True: Upload success
False: Upload failed
"""
f = open(file_path, "r")
c = f.read()
file_name = os.path.basename(file_path)
now = datetime.datetime.now().isoformat()
url = nurls['put'] + upload_path + file_name
headers = {'userid': self.user_id,
'useridx': self.useridx,
'MODIFYDATE': now,
'Content-Type': magic.from_file(file_path, mime=True),
'charset': 'UTF-8',
'Origin': 'http://ndrive2.naver.com',
}
r = self.session.put(url = url, data = c, headers = headers)
return self.resultManager(r.text) | [
"def",
"put",
"(",
"self",
",",
"file_path",
",",
"upload_path",
"=",
"''",
")",
":",
"f",
"=",
"open",
"(",
"file_path",
",",
"\"r\"",
")",
"c",
"=",
"f",
".",
"read",
"(",
")",
"file_name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"file_path",
")",
"now",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
".",
"isoformat",
"(",
")",
"url",
"=",
"nurls",
"[",
"'put'",
"]",
"+",
"upload_path",
"+",
"file_name",
"headers",
"=",
"{",
"'userid'",
":",
"self",
".",
"user_id",
",",
"'useridx'",
":",
"self",
".",
"useridx",
",",
"'MODIFYDATE'",
":",
"now",
",",
"'Content-Type'",
":",
"magic",
".",
"from_file",
"(",
"file_path",
",",
"mime",
"=",
"True",
")",
",",
"'charset'",
":",
"'UTF-8'",
",",
"'Origin'",
":",
"'http://ndrive2.naver.com'",
",",
"}",
"r",
"=",
"self",
".",
"session",
".",
"put",
"(",
"url",
"=",
"url",
",",
"data",
"=",
"c",
",",
"headers",
"=",
"headers",
")",
"return",
"self",
".",
"resultManager",
"(",
"r",
".",
"text",
")"
] | 29.83871 | 18.83871 |
def zip_file(self, app_path, app_name, tmp_path):
"""Zip the App with tcex extension.
Args:
app_path (str): The path of the current project.
app_name (str): The name of the App.
tmp_path (str): The temp output path for the zip.
"""
# zip build directory
zip_file = os.path.join(app_path, self.args.outdir, app_name)
zip_file_zip = '{}.zip'.format(zip_file)
zip_file_tcx = '{}.tcx'.format(zip_file)
shutil.make_archive(zip_file, 'zip', tmp_path, app_name)
shutil.move(zip_file_zip, zip_file_tcx)
self._app_packages.append(zip_file_tcx)
# update package data
self.package_data['package'].append({'action': 'App Package:', 'output': zip_file_tcx}) | [
"def",
"zip_file",
"(",
"self",
",",
"app_path",
",",
"app_name",
",",
"tmp_path",
")",
":",
"# zip build directory",
"zip_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"app_path",
",",
"self",
".",
"args",
".",
"outdir",
",",
"app_name",
")",
"zip_file_zip",
"=",
"'{}.zip'",
".",
"format",
"(",
"zip_file",
")",
"zip_file_tcx",
"=",
"'{}.tcx'",
".",
"format",
"(",
"zip_file",
")",
"shutil",
".",
"make_archive",
"(",
"zip_file",
",",
"'zip'",
",",
"tmp_path",
",",
"app_name",
")",
"shutil",
".",
"move",
"(",
"zip_file_zip",
",",
"zip_file_tcx",
")",
"self",
".",
"_app_packages",
".",
"append",
"(",
"zip_file_tcx",
")",
"# update package data",
"self",
".",
"package_data",
"[",
"'package'",
"]",
".",
"append",
"(",
"{",
"'action'",
":",
"'App Package:'",
",",
"'output'",
":",
"zip_file_tcx",
"}",
")"
] | 44.764706 | 16.764706 |
def _create_connection(self):
"""Create a connection.
:return:
"""
attempts = 0
while True:
attempts += 1
if self._stopped.is_set():
break
try:
self._connection = Connection(self.hostname,
self.username,
self.password)
break
except amqpstorm.AMQPError as why:
LOGGER.warning(why)
if self.max_retries and attempts > self.max_retries:
raise Exception('max number of retries reached')
time.sleep(min(attempts * 2, 30))
except KeyboardInterrupt:
break | [
"def",
"_create_connection",
"(",
"self",
")",
":",
"attempts",
"=",
"0",
"while",
"True",
":",
"attempts",
"+=",
"1",
"if",
"self",
".",
"_stopped",
".",
"is_set",
"(",
")",
":",
"break",
"try",
":",
"self",
".",
"_connection",
"=",
"Connection",
"(",
"self",
".",
"hostname",
",",
"self",
".",
"username",
",",
"self",
".",
"password",
")",
"break",
"except",
"amqpstorm",
".",
"AMQPError",
"as",
"why",
":",
"LOGGER",
".",
"warning",
"(",
"why",
")",
"if",
"self",
".",
"max_retries",
"and",
"attempts",
">",
"self",
".",
"max_retries",
":",
"raise",
"Exception",
"(",
"'max number of retries reached'",
")",
"time",
".",
"sleep",
"(",
"min",
"(",
"attempts",
"*",
"2",
",",
"30",
")",
")",
"except",
"KeyboardInterrupt",
":",
"break"
] | 34.136364 | 16.045455 |
def get_current_temperature(self, refresh=False):
"""Get current temperature"""
if refresh:
self.refresh()
try:
return float(self.get_value('temperature'))
except (TypeError, ValueError):
return None | [
"def",
"get_current_temperature",
"(",
"self",
",",
"refresh",
"=",
"False",
")",
":",
"if",
"refresh",
":",
"self",
".",
"refresh",
"(",
")",
"try",
":",
"return",
"float",
"(",
"self",
".",
"get_value",
"(",
"'temperature'",
")",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
")",
":",
"return",
"None"
] | 32.5 | 13.125 |
def init_widget(self):
""" Initialize the underlying widget.
This reads all items declared in the enamldef block for this node
and sets only the values that have been specified. All other values
will be left as default. Doing it this way makes atom to only create
the properties that need to be overridden from defaults thus greatly
reducing the number of initialization checks, saving time and memory.
If you don't want this to happen override `get_declared_keys`
to return an empty list.
"""
super(AndroidView, self).init_widget()
# Initialize the widget by updating only the members that
# have read expressions declared. This saves a lot of time and
# simplifies widget initialization code
for k, v in self.get_declared_items():
handler = getattr(self, 'set_'+k, None)
if handler:
handler(v) | [
"def",
"init_widget",
"(",
"self",
")",
":",
"super",
"(",
"AndroidView",
",",
"self",
")",
".",
"init_widget",
"(",
")",
"# Initialize the widget by updating only the members that",
"# have read expressions declared. This saves a lot of time and",
"# simplifies widget initialization code",
"for",
"k",
",",
"v",
"in",
"self",
".",
"get_declared_items",
"(",
")",
":",
"handler",
"=",
"getattr",
"(",
"self",
",",
"'set_'",
"+",
"k",
",",
"None",
")",
"if",
"handler",
":",
"handler",
"(",
"v",
")"
] | 43.272727 | 22.545455 |
def international_str(self, name, sdmxobj):
'''
return DictLike of xml:lang attributes. If node has no attributes,
assume that language is 'en'.
'''
# Get language tokens like 'en', 'fr'...
elem_attrib = self._paths['int_str_names'](sdmxobj._elem, name=name)
values = self._paths['int_str_values'](sdmxobj._elem, name=name)
# Unilingual strings have no attributes. Assume 'en' instead.
if not elem_attrib:
elem_attrib = ['en']
return DictLike(zip(elem_attrib, values)) | [
"def",
"international_str",
"(",
"self",
",",
"name",
",",
"sdmxobj",
")",
":",
"# Get language tokens like 'en', 'fr'...\r",
"elem_attrib",
"=",
"self",
".",
"_paths",
"[",
"'int_str_names'",
"]",
"(",
"sdmxobj",
".",
"_elem",
",",
"name",
"=",
"name",
")",
"values",
"=",
"self",
".",
"_paths",
"[",
"'int_str_values'",
"]",
"(",
"sdmxobj",
".",
"_elem",
",",
"name",
"=",
"name",
")",
"# Unilingual strings have no attributes. Assume 'en' instead.\r",
"if",
"not",
"elem_attrib",
":",
"elem_attrib",
"=",
"[",
"'en'",
"]",
"return",
"DictLike",
"(",
"zip",
"(",
"elem_attrib",
",",
"values",
")",
")"
] | 46.666667 | 19.5 |
def has_current_path(self, path, **kwargs):
"""
Checks if the page has the given path.
Args:
path (str | RegexObject): The string or regex that the current "path" should match.
**kwargs: Arbitrary keyword arguments for :class:`CurrentPathQuery`.
Returns:
bool: Whether it matches.
"""
try:
return self.assert_current_path(path, **kwargs)
except ExpectationNotMet:
return False | [
"def",
"has_current_path",
"(",
"self",
",",
"path",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"return",
"self",
".",
"assert_current_path",
"(",
"path",
",",
"*",
"*",
"kwargs",
")",
"except",
"ExpectationNotMet",
":",
"return",
"False"
] | 30 | 21.75 |
def formatreturn(arg, input_is_array=False):
"""If the given argument is a numpy array with shape (1,), just returns
that value."""
if not input_is_array and arg.size == 1:
arg = arg.item()
return arg | [
"def",
"formatreturn",
"(",
"arg",
",",
"input_is_array",
"=",
"False",
")",
":",
"if",
"not",
"input_is_array",
"and",
"arg",
".",
"size",
"==",
"1",
":",
"arg",
"=",
"arg",
".",
"item",
"(",
")",
"return",
"arg"
] | 36.5 | 8.333333 |
def get_form_kwargs(self):
""" Returns the keyword arguments to provide tp the associated form. """
kwargs = super(ModelFormMixin, self).get_form_kwargs()
kwargs['poll'] = self.object
return kwargs | [
"def",
"get_form_kwargs",
"(",
"self",
")",
":",
"kwargs",
"=",
"super",
"(",
"ModelFormMixin",
",",
"self",
")",
".",
"get_form_kwargs",
"(",
")",
"kwargs",
"[",
"'poll'",
"]",
"=",
"self",
".",
"object",
"return",
"kwargs"
] | 45 | 11.8 |
def get_admin_urls_for_registration(self):
"""
Utilised by Wagtail's 'register_admin_urls' hook to register urls for
our the views that class offers.
"""
urls = super(OrderModelAdmin, self).get_admin_urls_for_registration()
urls = urls + (
url(self.url_helper.get_action_url_pattern('detail'),
self.detail_view,
name=self.url_helper.get_action_url_name('detail')),
)
return urls | [
"def",
"get_admin_urls_for_registration",
"(",
"self",
")",
":",
"urls",
"=",
"super",
"(",
"OrderModelAdmin",
",",
"self",
")",
".",
"get_admin_urls_for_registration",
"(",
")",
"urls",
"=",
"urls",
"+",
"(",
"url",
"(",
"self",
".",
"url_helper",
".",
"get_action_url_pattern",
"(",
"'detail'",
")",
",",
"self",
".",
"detail_view",
",",
"name",
"=",
"self",
".",
"url_helper",
".",
"get_action_url_name",
"(",
"'detail'",
")",
")",
",",
")",
"return",
"urls"
] | 39.583333 | 17.083333 |
def p_statement_randomize_expr(p):
""" statement : RANDOMIZE expr
"""
p[0] = make_sentence('RANDOMIZE', make_typecast(TYPE.ulong, p[2], p.lineno(1))) | [
"def",
"p_statement_randomize_expr",
"(",
"p",
")",
":",
"p",
"[",
"0",
"]",
"=",
"make_sentence",
"(",
"'RANDOMIZE'",
",",
"make_typecast",
"(",
"TYPE",
".",
"ulong",
",",
"p",
"[",
"2",
"]",
",",
"p",
".",
"lineno",
"(",
"1",
")",
")",
")"
] | 39.5 | 12.25 |
def attention_lm_translation():
"""Version to use for seq2seq."""
hparams = attention_lm_base()
hparams.layer_preprocess_sequence = "n"
hparams.layer_postprocess_sequence = "da"
hparams.learning_rate = 0.4
hparams.prepend_mode = "prepend_inputs_masked_attention"
hparams.max_length = 512
hparams.label_smoothing = 0.1
hparams.shared_embedding_and_softmax_weights = True
return hparams | [
"def",
"attention_lm_translation",
"(",
")",
":",
"hparams",
"=",
"attention_lm_base",
"(",
")",
"hparams",
".",
"layer_preprocess_sequence",
"=",
"\"n\"",
"hparams",
".",
"layer_postprocess_sequence",
"=",
"\"da\"",
"hparams",
".",
"learning_rate",
"=",
"0.4",
"hparams",
".",
"prepend_mode",
"=",
"\"prepend_inputs_masked_attention\"",
"hparams",
".",
"max_length",
"=",
"512",
"hparams",
".",
"label_smoothing",
"=",
"0.1",
"hparams",
".",
"shared_embedding_and_softmax_weights",
"=",
"True",
"return",
"hparams"
] | 35.818182 | 10.090909 |
def compare(molecules, ensemble_lookup, options):
"""
compare stuff
:param molecules:
:param ensemble_lookup:
:param options:
:return:
"""
print(" Analyzing differences ... ")
print('')
sort_order = classification.get_sort_order(molecules)
ensemble1 = sorted(ensemble_lookup.keys())[0]
ensemble2 = sorted(ensemble_lookup.keys())[1]
stats = {}
stats['header'] = [' ']
name = os.path.basename(ensemble1).replace('.csv', '')
stats['header'].append(name)
name = os.path.basename(ensemble2).replace('.csv', '')
stats['header'].append(name)
stats['header'].append('Difference')
stats['header'].append('95% CI')
stats['header'].append('p-value')
molecules1 = copy.deepcopy(molecules)
molecules2 = copy.deepcopy(molecules)
score_structure1 = classification.make_score_structure(molecules1, ensemble_lookup[ensemble1])
score_structure2 = classification.make_score_structure(molecules2, ensemble_lookup[ensemble2])
auc_structure_1 = classification.make_auc_structure(score_structure1)
auc_structure_2 = classification.make_auc_structure(score_structure2)
# calculate auc value differences
auc_diff = classification.calculate_auc_diff(auc_structure_1, auc_structure_2, sort_order)
stats['AUC'] = auc_diff
# calculate enrichment factor differences
fpfList = make_fpfList(options)
for fpf in fpfList:
fpf = float(fpf)
ef_structure1 = classification.make_ef_structure(score_structure1, fpf, sort_order)
ef_structure2 = classification.make_ef_structure(score_structure2, fpf, sort_order)
if ef_structure1 and ef_structure2:
ef_diff = classification.calculate_ef_diff(ef_structure1, ef_structure2, fpf)
title = 'E%s' % fpf
stats[title] = ef_diff
# write results summary
output.write_diff_summary(stats, options)
# write roc curves
if options.write_roc:
print(" Writing ROC data ... ")
print('')
output.write_roc(auc_structure_1, ensemble1, options)
output.write_roc(auc_structure_2, ensemble2, options)
# plot
if options.plot:
print(" Making plots ... ")
print('')
plotter(molecules, ensemble_lookup, options) | [
"def",
"compare",
"(",
"molecules",
",",
"ensemble_lookup",
",",
"options",
")",
":",
"print",
"(",
"\" Analyzing differences ... \"",
")",
"print",
"(",
"''",
")",
"sort_order",
"=",
"classification",
".",
"get_sort_order",
"(",
"molecules",
")",
"ensemble1",
"=",
"sorted",
"(",
"ensemble_lookup",
".",
"keys",
"(",
")",
")",
"[",
"0",
"]",
"ensemble2",
"=",
"sorted",
"(",
"ensemble_lookup",
".",
"keys",
"(",
")",
")",
"[",
"1",
"]",
"stats",
"=",
"{",
"}",
"stats",
"[",
"'header'",
"]",
"=",
"[",
"' '",
"]",
"name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"ensemble1",
")",
".",
"replace",
"(",
"'.csv'",
",",
"''",
")",
"stats",
"[",
"'header'",
"]",
".",
"append",
"(",
"name",
")",
"name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"ensemble2",
")",
".",
"replace",
"(",
"'.csv'",
",",
"''",
")",
"stats",
"[",
"'header'",
"]",
".",
"append",
"(",
"name",
")",
"stats",
"[",
"'header'",
"]",
".",
"append",
"(",
"'Difference'",
")",
"stats",
"[",
"'header'",
"]",
".",
"append",
"(",
"'95% CI'",
")",
"stats",
"[",
"'header'",
"]",
".",
"append",
"(",
"'p-value'",
")",
"molecules1",
"=",
"copy",
".",
"deepcopy",
"(",
"molecules",
")",
"molecules2",
"=",
"copy",
".",
"deepcopy",
"(",
"molecules",
")",
"score_structure1",
"=",
"classification",
".",
"make_score_structure",
"(",
"molecules1",
",",
"ensemble_lookup",
"[",
"ensemble1",
"]",
")",
"score_structure2",
"=",
"classification",
".",
"make_score_structure",
"(",
"molecules2",
",",
"ensemble_lookup",
"[",
"ensemble2",
"]",
")",
"auc_structure_1",
"=",
"classification",
".",
"make_auc_structure",
"(",
"score_structure1",
")",
"auc_structure_2",
"=",
"classification",
".",
"make_auc_structure",
"(",
"score_structure2",
")",
"# calculate auc value differences",
"auc_diff",
"=",
"classification",
".",
"calculate_auc_diff",
"(",
"auc_structure_1",
",",
"auc_structure_2",
",",
"sort_order",
")",
"stats",
"[",
"'AUC'",
"]",
"=",
"auc_diff",
"# calculate enrichment factor differences",
"fpfList",
"=",
"make_fpfList",
"(",
"options",
")",
"for",
"fpf",
"in",
"fpfList",
":",
"fpf",
"=",
"float",
"(",
"fpf",
")",
"ef_structure1",
"=",
"classification",
".",
"make_ef_structure",
"(",
"score_structure1",
",",
"fpf",
",",
"sort_order",
")",
"ef_structure2",
"=",
"classification",
".",
"make_ef_structure",
"(",
"score_structure2",
",",
"fpf",
",",
"sort_order",
")",
"if",
"ef_structure1",
"and",
"ef_structure2",
":",
"ef_diff",
"=",
"classification",
".",
"calculate_ef_diff",
"(",
"ef_structure1",
",",
"ef_structure2",
",",
"fpf",
")",
"title",
"=",
"'E%s'",
"%",
"fpf",
"stats",
"[",
"title",
"]",
"=",
"ef_diff",
"# write results summary",
"output",
".",
"write_diff_summary",
"(",
"stats",
",",
"options",
")",
"# write roc curves",
"if",
"options",
".",
"write_roc",
":",
"print",
"(",
"\" Writing ROC data ... \"",
")",
"print",
"(",
"''",
")",
"output",
".",
"write_roc",
"(",
"auc_structure_1",
",",
"ensemble1",
",",
"options",
")",
"output",
".",
"write_roc",
"(",
"auc_structure_2",
",",
"ensemble2",
",",
"options",
")",
"# plot",
"if",
"options",
".",
"plot",
":",
"print",
"(",
"\" Making plots ... \"",
")",
"print",
"(",
"''",
")",
"plotter",
"(",
"molecules",
",",
"ensemble_lookup",
",",
"options",
")"
] | 33.134328 | 21.880597 |
def radio_calibration_send(self, aileron, elevator, rudder, gyro, pitch, throttle, force_mavlink1=False):
'''
Complete set of calibration parameters for the radio
aileron : Aileron setpoints: left, center, right (uint16_t)
elevator : Elevator setpoints: nose down, center, nose up (uint16_t)
rudder : Rudder setpoints: nose left, center, nose right (uint16_t)
gyro : Tail gyro mode/gain setpoints: heading hold, rate mode (uint16_t)
pitch : Pitch curve setpoints (every 25%) (uint16_t)
throttle : Throttle curve setpoints (every 25%) (uint16_t)
'''
return self.send(self.radio_calibration_encode(aileron, elevator, rudder, gyro, pitch, throttle), force_mavlink1=force_mavlink1) | [
"def",
"radio_calibration_send",
"(",
"self",
",",
"aileron",
",",
"elevator",
",",
"rudder",
",",
"gyro",
",",
"pitch",
",",
"throttle",
",",
"force_mavlink1",
"=",
"False",
")",
":",
"return",
"self",
".",
"send",
"(",
"self",
".",
"radio_calibration_encode",
"(",
"aileron",
",",
"elevator",
",",
"rudder",
",",
"gyro",
",",
"pitch",
",",
"throttle",
")",
",",
"force_mavlink1",
"=",
"force_mavlink1",
")"
] | 72.230769 | 51 |
def error(self, s, pos):
"""Show text and a caret under that. For example:
x = 2y + z
^
"""
print("Lexical error:")
print("%s" % s[:pos+10]) # + 10 for trailing context
print("%s^" % (" "*(pos-1)))
for t in self.rv: print(t)
raise SystemExit | [
"def",
"error",
"(",
"self",
",",
"s",
",",
"pos",
")",
":",
"print",
"(",
"\"Lexical error:\"",
")",
"print",
"(",
"\"%s\"",
"%",
"s",
"[",
":",
"pos",
"+",
"10",
"]",
")",
"# + 10 for trailing context",
"print",
"(",
"\"%s^\"",
"%",
"(",
"\" \"",
"*",
"(",
"pos",
"-",
"1",
")",
")",
")",
"for",
"t",
"in",
"self",
".",
"rv",
":",
"print",
"(",
"t",
")",
"raise",
"SystemExit"
] | 28.6 | 13.6 |
def _op_factory(func, kwargs, opname, bands, rgb_op=False):
"""create an operation function closure
don't call directly, use parse_operations
returns a function which itself takes and returns ndarrays
"""
def f(arr):
# Avoid mutation by copying
newarr = arr.copy()
if rgb_op:
# apply func to array's first 3 bands, assumed r,g,b
# additional band(s) are untouched
newarr[0:3] = func(newarr[0:3], **kwargs)
else:
# apply func to array band at a time
for b in bands:
newarr[b - 1] = func(arr[b - 1], **kwargs)
return newarr
f.__name__ = str(opname)
return f | [
"def",
"_op_factory",
"(",
"func",
",",
"kwargs",
",",
"opname",
",",
"bands",
",",
"rgb_op",
"=",
"False",
")",
":",
"def",
"f",
"(",
"arr",
")",
":",
"# Avoid mutation by copying",
"newarr",
"=",
"arr",
".",
"copy",
"(",
")",
"if",
"rgb_op",
":",
"# apply func to array's first 3 bands, assumed r,g,b",
"# additional band(s) are untouched",
"newarr",
"[",
"0",
":",
"3",
"]",
"=",
"func",
"(",
"newarr",
"[",
"0",
":",
"3",
"]",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"# apply func to array band at a time",
"for",
"b",
"in",
"bands",
":",
"newarr",
"[",
"b",
"-",
"1",
"]",
"=",
"func",
"(",
"arr",
"[",
"b",
"-",
"1",
"]",
",",
"*",
"*",
"kwargs",
")",
"return",
"newarr",
"f",
".",
"__name__",
"=",
"str",
"(",
"opname",
")",
"return",
"f"
] | 32.428571 | 17.095238 |
def disable_device(self):
"""
disable (lock) device, to ensure no user activity in device while some process run
:return: bool
"""
cmd_response = self.__send_command(const.CMD_DISABLEDEVICE)
if cmd_response.get('status'):
self.is_enabled = False
return True
else:
raise ZKErrorResponse("Can't disable device") | [
"def",
"disable_device",
"(",
"self",
")",
":",
"cmd_response",
"=",
"self",
".",
"__send_command",
"(",
"const",
".",
"CMD_DISABLEDEVICE",
")",
"if",
"cmd_response",
".",
"get",
"(",
"'status'",
")",
":",
"self",
".",
"is_enabled",
"=",
"False",
"return",
"True",
"else",
":",
"raise",
"ZKErrorResponse",
"(",
"\"Can't disable device\"",
")"
] | 32.583333 | 18.25 |
def parse_files(self, req, name, field):
"""Pull a file from the request."""
return core.get_value(req.FILES, name, field) | [
"def",
"parse_files",
"(",
"self",
",",
"req",
",",
"name",
",",
"field",
")",
":",
"return",
"core",
".",
"get_value",
"(",
"req",
".",
"FILES",
",",
"name",
",",
"field",
")"
] | 45.333333 | 4.333333 |
def get_manager(self, osid=None, impl_class_name=None, version=None):
"""Finds, loads and instantiates providers of OSID managers.
Providers must conform to an OsidManager interface. The
interfaces are defined in the OSID enumeration. For all OSID
requests, an instance of ``OsidManager`` that implements the
``OsidManager`` interface is returned. In bindings where
permitted, this can be safely cast into the requested manager.
arg: osid (osid.OSID): represents the OSID
arg: impl_class_name (string): the name of the implementation
arg: version (osid.installation.Version): the minimum
required OSID specification version
return: (osid.OsidManager) - the manager of the service
raise: ConfigurationError - an error in configuring the
implementation
raise: NotFound - the implementation class was not found
raise: NullArgument - ``impl_class_name`` or ``version`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unsupported - ``impl_class_name`` does not support the
requested OSID
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: After finding and instantiating the
requested ``OsidManager,`` providers must invoke
``OsidManager.initialize(OsidRuntimeManager)`` where the
environment is an instance of the current environment that
includes the configuration for the service being initialized.
The ``OsidRuntimeManager`` passed may include information useful
for the configuration such as the identity of the service being
instantiated.
"""
# This implementation assumes that all osid impls reside as seperate
# packages in the dlkit library, so that for instance the proxy manager for an
# OSID = 'osidpackage' in an implementation named 'impl_name' manager can
# be found in the python path for the module: dlkit.impl_name.osid.managers
# Also this implementation currently ignores the OSID specification version.
from importlib import import_module
try:
manager_module = import_module('dlkit.' + impl_class_name + '.' + osid.lower() + '.managers')
except ImportError:
raise NotFound()
try:
manager = getattr(manager_module, osid.title() + 'Manager')
except AttributeError:
raise Unsupported()
return manager | [
"def",
"get_manager",
"(",
"self",
",",
"osid",
"=",
"None",
",",
"impl_class_name",
"=",
"None",
",",
"version",
"=",
"None",
")",
":",
"# This implementation assumes that all osid impls reside as seperate",
"# packages in the dlkit library, so that for instance the proxy manager for an",
"# OSID = 'osidpackage' in an implementation named 'impl_name' manager can",
"# be found in the python path for the module: dlkit.impl_name.osid.managers",
"# Also this implementation currently ignores the OSID specification version.",
"from",
"importlib",
"import",
"import_module",
"try",
":",
"manager_module",
"=",
"import_module",
"(",
"'dlkit.'",
"+",
"impl_class_name",
"+",
"'.'",
"+",
"osid",
".",
"lower",
"(",
")",
"+",
"'.managers'",
")",
"except",
"ImportError",
":",
"raise",
"NotFound",
"(",
")",
"try",
":",
"manager",
"=",
"getattr",
"(",
"manager_module",
",",
"osid",
".",
"title",
"(",
")",
"+",
"'Manager'",
")",
"except",
"AttributeError",
":",
"raise",
"Unsupported",
"(",
")",
"return",
"manager"
] | 52.895833 | 25.125 |
def end_grouping(self):
"""
Raises IndexError when no group is open.
"""
close = self._open.pop()
if not close:
return
if self._open:
self._open[-1].extend(close)
elif self._undoing:
self._redo.append(close)
else:
self._undo.append(close)
self.notify() | [
"def",
"end_grouping",
"(",
"self",
")",
":",
"close",
"=",
"self",
".",
"_open",
".",
"pop",
"(",
")",
"if",
"not",
"close",
":",
"return",
"if",
"self",
".",
"_open",
":",
"self",
".",
"_open",
"[",
"-",
"1",
"]",
".",
"extend",
"(",
"close",
")",
"elif",
"self",
".",
"_undoing",
":",
"self",
".",
"_redo",
".",
"append",
"(",
"close",
")",
"else",
":",
"self",
".",
"_undo",
".",
"append",
"(",
"close",
")",
"self",
".",
"notify",
"(",
")"
] | 25.642857 | 11.357143 |
def open(name=None, fileobj=None, closefd=True):
"""
Use all decompressor possible to make the stream
"""
return Guesser().open(name=name, fileobj=fileobj, closefd=closefd) | [
"def",
"open",
"(",
"name",
"=",
"None",
",",
"fileobj",
"=",
"None",
",",
"closefd",
"=",
"True",
")",
":",
"return",
"Guesser",
"(",
")",
".",
"open",
"(",
"name",
"=",
"name",
",",
"fileobj",
"=",
"fileobj",
",",
"closefd",
"=",
"closefd",
")"
] | 36.8 | 10 |
def _regressor_names(con_name, hrf_model, fir_delays=None):
""" Returns a list of regressor names, computed from con-name and hrf type
Parameters
----------
con_name: string
identifier of the condition
hrf_model: string or None,
hrf model chosen
fir_delays: 1D array_like, optional,
Delays used in case of an FIR model
Returns
-------
names: list of strings,
regressor names
"""
if hrf_model in ['glover', 'spm', None]:
return [con_name]
elif hrf_model in ["glover + derivative", 'spm + derivative']:
return [con_name, con_name + "_derivative"]
elif hrf_model in ['spm + derivative + dispersion',
'glover + derivative + dispersion']:
return [con_name, con_name + "_derivative", con_name + "_dispersion"]
elif hrf_model == 'fir':
return [con_name + "_delay_%d" % i for i in fir_delays] | [
"def",
"_regressor_names",
"(",
"con_name",
",",
"hrf_model",
",",
"fir_delays",
"=",
"None",
")",
":",
"if",
"hrf_model",
"in",
"[",
"'glover'",
",",
"'spm'",
",",
"None",
"]",
":",
"return",
"[",
"con_name",
"]",
"elif",
"hrf_model",
"in",
"[",
"\"glover + derivative\"",
",",
"'spm + derivative'",
"]",
":",
"return",
"[",
"con_name",
",",
"con_name",
"+",
"\"_derivative\"",
"]",
"elif",
"hrf_model",
"in",
"[",
"'spm + derivative + dispersion'",
",",
"'glover + derivative + dispersion'",
"]",
":",
"return",
"[",
"con_name",
",",
"con_name",
"+",
"\"_derivative\"",
",",
"con_name",
"+",
"\"_dispersion\"",
"]",
"elif",
"hrf_model",
"==",
"'fir'",
":",
"return",
"[",
"con_name",
"+",
"\"_delay_%d\"",
"%",
"i",
"for",
"i",
"in",
"fir_delays",
"]"
] | 32.25 | 19.142857 |
def paths(self, destination_account, destination_amount, source_account, destination_asset_code,
destination_asset_issuer=None):
"""Load a list of assets available to the source account id and find
any payment paths from those source assets to the desired
destination asset.
See the below docs for more information on required and optional
parameters for further specifying your search.
`GET /paths
<https://www.stellar.org/developers/horizon/reference/endpoints/path-finding.html>`_
:param str destination_account: The destination account that any returned path should use.
:param str destination_amount: The amount, denominated in the destination asset,
that any returned path should be able to satisfy.
:param str source_account: The sender's account id. Any returned path must use a source that the sender can hold.
:param str destination_asset_code: The asset code for the destination.
:param destination_asset_issuer: The asset issuer for the destination, if it is a native asset, let it be `None`.
:type destination_asset_issuer: str, None
:return: A list of paths that can be used to complete a payment based
on a given query.
:rtype: dict
"""
destination_asset = Asset(destination_asset_code, destination_asset_issuer)
destination_asset_params = {
'destination_asset_type': destination_asset.type,
'destination_asset_code': None if destination_asset.is_native() else destination_asset.code,
'destination_asset_issuer': destination_asset.issuer
}
endpoint = '/paths'
params = self.__query_params(destination_account=destination_account,
source_account=source_account,
destination_amount=destination_amount,
**destination_asset_params
)
return self.query(endpoint, params) | [
"def",
"paths",
"(",
"self",
",",
"destination_account",
",",
"destination_amount",
",",
"source_account",
",",
"destination_asset_code",
",",
"destination_asset_issuer",
"=",
"None",
")",
":",
"destination_asset",
"=",
"Asset",
"(",
"destination_asset_code",
",",
"destination_asset_issuer",
")",
"destination_asset_params",
"=",
"{",
"'destination_asset_type'",
":",
"destination_asset",
".",
"type",
",",
"'destination_asset_code'",
":",
"None",
"if",
"destination_asset",
".",
"is_native",
"(",
")",
"else",
"destination_asset",
".",
"code",
",",
"'destination_asset_issuer'",
":",
"destination_asset",
".",
"issuer",
"}",
"endpoint",
"=",
"'/paths'",
"params",
"=",
"self",
".",
"__query_params",
"(",
"destination_account",
"=",
"destination_account",
",",
"source_account",
"=",
"source_account",
",",
"destination_amount",
"=",
"destination_amount",
",",
"*",
"*",
"destination_asset_params",
")",
"return",
"self",
".",
"query",
"(",
"endpoint",
",",
"params",
")"
] | 52.435897 | 30.512821 |
def dsphdr(x, y, z):
"""
This routine computes the Jacobian of the transformation from
rectangular to spherical coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dsphdr_c.html
:param x: X-coordinate of point.
:type x: float
:param y: Y-coordinate of point.
:type y: float
:param z: Z-coordinate of point.
:type z: float
:return: Matrix of partial derivatives.
:rtype: 3x3-Element Array of floats
"""
x = ctypes.c_double(x)
y = ctypes.c_double(y)
z = ctypes.c_double(z)
jacobi = stypes.emptyDoubleMatrix()
libspice.dsphdr_c(x, y, z, jacobi)
return stypes.cMatrixToNumpy(jacobi) | [
"def",
"dsphdr",
"(",
"x",
",",
"y",
",",
"z",
")",
":",
"x",
"=",
"ctypes",
".",
"c_double",
"(",
"x",
")",
"y",
"=",
"ctypes",
".",
"c_double",
"(",
"y",
")",
"z",
"=",
"ctypes",
".",
"c_double",
"(",
"z",
")",
"jacobi",
"=",
"stypes",
".",
"emptyDoubleMatrix",
"(",
")",
"libspice",
".",
"dsphdr_c",
"(",
"x",
",",
"y",
",",
"z",
",",
"jacobi",
")",
"return",
"stypes",
".",
"cMatrixToNumpy",
"(",
"jacobi",
")"
] | 28.347826 | 14.173913 |
def nested(*contexts):
"""
Reimplementation of nested in python 3.
"""
with ExitStack() as stack:
results = [
stack.enter_context(context)
for context in contexts
]
yield results | [
"def",
"nested",
"(",
"*",
"contexts",
")",
":",
"with",
"ExitStack",
"(",
")",
"as",
"stack",
":",
"results",
"=",
"[",
"stack",
".",
"enter_context",
"(",
"context",
")",
"for",
"context",
"in",
"contexts",
"]",
"yield",
"results"
] | 23.3 | 10.7 |
def auto_register_inlines(admin_site, metadata_class):
""" This is a questionable function that automatically adds our metadata
inline to all relevant models in the site.
"""
inline_class = get_inline(metadata_class)
for model, admin_class_instance in admin_site._registry.items():
_monkey_inline(model, admin_class_instance, metadata_class, inline_class, admin_site)
# Monkey patch the register method to automatically add an inline for this site.
# _with_inline() is a decorator that wraps the register function with the same injection code
# used above (_monkey_inline).
admin_site.register = _with_inline(admin_site.register, admin_site, metadata_class, inline_class) | [
"def",
"auto_register_inlines",
"(",
"admin_site",
",",
"metadata_class",
")",
":",
"inline_class",
"=",
"get_inline",
"(",
"metadata_class",
")",
"for",
"model",
",",
"admin_class_instance",
"in",
"admin_site",
".",
"_registry",
".",
"items",
"(",
")",
":",
"_monkey_inline",
"(",
"model",
",",
"admin_class_instance",
",",
"metadata_class",
",",
"inline_class",
",",
"admin_site",
")",
"# Monkey patch the register method to automatically add an inline for this site.",
"# _with_inline() is a decorator that wraps the register function with the same injection code",
"# used above (_monkey_inline).",
"admin_site",
".",
"register",
"=",
"_with_inline",
"(",
"admin_site",
".",
"register",
",",
"admin_site",
",",
"metadata_class",
",",
"inline_class",
")"
] | 54.615385 | 27.615385 |
async def create(
cls, node: Union[Node, str],
name: str,
backing_device: Union[BlockDevice, Partition],
cache_set: Union[BcacheCacheSet, int],
cache_mode: CacheMode, *,
uuid: str = None):
"""
Create a Bcache on a Node.
:param node: Node to create the interface on.
:type node: `Node` or `str`
:param name: Name of the Bcache.
:type name: `str`
:param backing_device: Either a block device or partition to create
the Bcache from.
:type backing_device: `BlockDevice` or `Partition`
:param cache_set: Bcache cache set to use in front of backing device.
:type cache_set: `BcacheCacheSet` or `int`
:param cache_mode: Caching mode to use for this device.
:type cache_mode: `CacheMode`
:type backing_device: `BlockDevice` or `Partition`
:param uuid: The UUID for the Bcache (optional).
:type uuid: `str`
"""
params = {
'name': name,
}
if isinstance(node, str):
params['system_id'] = node
elif isinstance(node, Node):
params['system_id'] = node.system_id
else:
raise TypeError(
'node must be a Node or str, not %s' % (
type(node).__name__))
if isinstance(backing_device, BlockDevice):
params['backing_device'] = backing_device.id
elif isinstance(backing_device, Partition):
params['backing_partition'] = backing_device.id
else:
raise TypeError(
"backing_device must be a BlockDevice or Partition, "
"not %s" % type(backing_device).__name__)
if isinstance(cache_set, BcacheCacheSet):
params['cache_set'] = cache_set.id
elif isinstance(cache_set, int):
params['cache_set'] = cache_set
else:
raise TypeError(
"cache_set must be a BcacheCacheSet or int, "
"not %s" % type(cache_set).__name__)
if isinstance(cache_mode, CacheMode):
params['cache_mode'] = cache_mode.value
else:
raise TypeError(
"cache_mode must be a CacheMode, "
"not %s" % type(cache_mode).__name__)
if uuid is not None:
params['uuid'] = uuid
return cls._object(await cls._handler.create(**params)) | [
"async",
"def",
"create",
"(",
"cls",
",",
"node",
":",
"Union",
"[",
"Node",
",",
"str",
"]",
",",
"name",
":",
"str",
",",
"backing_device",
":",
"Union",
"[",
"BlockDevice",
",",
"Partition",
"]",
",",
"cache_set",
":",
"Union",
"[",
"BcacheCacheSet",
",",
"int",
"]",
",",
"cache_mode",
":",
"CacheMode",
",",
"*",
",",
"uuid",
":",
"str",
"=",
"None",
")",
":",
"params",
"=",
"{",
"'name'",
":",
"name",
",",
"}",
"if",
"isinstance",
"(",
"node",
",",
"str",
")",
":",
"params",
"[",
"'system_id'",
"]",
"=",
"node",
"elif",
"isinstance",
"(",
"node",
",",
"Node",
")",
":",
"params",
"[",
"'system_id'",
"]",
"=",
"node",
".",
"system_id",
"else",
":",
"raise",
"TypeError",
"(",
"'node must be a Node or str, not %s'",
"%",
"(",
"type",
"(",
"node",
")",
".",
"__name__",
")",
")",
"if",
"isinstance",
"(",
"backing_device",
",",
"BlockDevice",
")",
":",
"params",
"[",
"'backing_device'",
"]",
"=",
"backing_device",
".",
"id",
"elif",
"isinstance",
"(",
"backing_device",
",",
"Partition",
")",
":",
"params",
"[",
"'backing_partition'",
"]",
"=",
"backing_device",
".",
"id",
"else",
":",
"raise",
"TypeError",
"(",
"\"backing_device must be a BlockDevice or Partition, \"",
"\"not %s\"",
"%",
"type",
"(",
"backing_device",
")",
".",
"__name__",
")",
"if",
"isinstance",
"(",
"cache_set",
",",
"BcacheCacheSet",
")",
":",
"params",
"[",
"'cache_set'",
"]",
"=",
"cache_set",
".",
"id",
"elif",
"isinstance",
"(",
"cache_set",
",",
"int",
")",
":",
"params",
"[",
"'cache_set'",
"]",
"=",
"cache_set",
"else",
":",
"raise",
"TypeError",
"(",
"\"cache_set must be a BcacheCacheSet or int, \"",
"\"not %s\"",
"%",
"type",
"(",
"cache_set",
")",
".",
"__name__",
")",
"if",
"isinstance",
"(",
"cache_mode",
",",
"CacheMode",
")",
":",
"params",
"[",
"'cache_mode'",
"]",
"=",
"cache_mode",
".",
"value",
"else",
":",
"raise",
"TypeError",
"(",
"\"cache_mode must be a CacheMode, \"",
"\"not %s\"",
"%",
"type",
"(",
"cache_mode",
")",
".",
"__name__",
")",
"if",
"uuid",
"is",
"not",
"None",
":",
"params",
"[",
"'uuid'",
"]",
"=",
"uuid",
"return",
"cls",
".",
"_object",
"(",
"await",
"cls",
".",
"_handler",
".",
"create",
"(",
"*",
"*",
"params",
")",
")"
] | 37.153846 | 15.461538 |
def normalize_file(file, separators=None):
"""
Normalizes the file path to use the POSIX path separator (i.e., ``'/'``).
*file* (:class:`str`) is the file path.
*separators* (:class:`~collections.abc.Collection` of :class:`str`; or
:data:`None`) optionally contains the path separators to normalize.
This does not need to include the POSIX path separator (``'/'``), but
including it will not affect the results. Default is :data:`None` for
:data:`NORMALIZE_PATH_SEPS`. To prevent normalization, pass an empty
container (e.g., an empty tuple ``()``).
Returns the normalized file path (:class:`str`).
"""
# Normalize path separators.
if separators is None:
separators = NORMALIZE_PATH_SEPS
norm_file = file
for sep in separators:
norm_file = norm_file.replace(sep, posixpath.sep)
# Remove current directory prefix.
if norm_file.startswith('./'):
norm_file = norm_file[2:]
return norm_file | [
"def",
"normalize_file",
"(",
"file",
",",
"separators",
"=",
"None",
")",
":",
"# Normalize path separators.",
"if",
"separators",
"is",
"None",
":",
"separators",
"=",
"NORMALIZE_PATH_SEPS",
"norm_file",
"=",
"file",
"for",
"sep",
"in",
"separators",
":",
"norm_file",
"=",
"norm_file",
".",
"replace",
"(",
"sep",
",",
"posixpath",
".",
"sep",
")",
"# Remove current directory prefix.",
"if",
"norm_file",
".",
"startswith",
"(",
"'./'",
")",
":",
"norm_file",
"=",
"norm_file",
"[",
"2",
":",
"]",
"return",
"norm_file"
] | 32.925926 | 19.592593 |
def touch_object(self, objects: Set[Object]) -> Set[Object]:
"""
Returns all objects that touch the given set of objects.
"""
objects_per_box = self._separate_objects_by_boxes(objects)
return_set = set()
for box, box_objects in objects_per_box.items():
candidate_objects = box.objects
for object_ in box_objects:
for candidate_object in candidate_objects:
if self._objects_touch_each_other(object_, candidate_object):
return_set.add(candidate_object)
return return_set | [
"def",
"touch_object",
"(",
"self",
",",
"objects",
":",
"Set",
"[",
"Object",
"]",
")",
"->",
"Set",
"[",
"Object",
"]",
":",
"objects_per_box",
"=",
"self",
".",
"_separate_objects_by_boxes",
"(",
"objects",
")",
"return_set",
"=",
"set",
"(",
")",
"for",
"box",
",",
"box_objects",
"in",
"objects_per_box",
".",
"items",
"(",
")",
":",
"candidate_objects",
"=",
"box",
".",
"objects",
"for",
"object_",
"in",
"box_objects",
":",
"for",
"candidate_object",
"in",
"candidate_objects",
":",
"if",
"self",
".",
"_objects_touch_each_other",
"(",
"object_",
",",
"candidate_object",
")",
":",
"return_set",
".",
"add",
"(",
"candidate_object",
")",
"return",
"return_set"
] | 45.846154 | 14.923077 |
async def running(self):
"""Start websocket connection."""
url = 'http://{}:{}'.format(self.host, self.port)
try:
async with self.session.ws_connect(url) as ws:
self.state = STATE_RUNNING
async for msg in ws:
if self.state == STATE_STOPPED:
break
elif msg.type == aiohttp.WSMsgType.TEXT:
self._data = json.loads(msg.data)
self.async_session_handler_callback('data')
_LOGGER.debug('Websocket data: %s', msg.data)
elif msg.type == aiohttp.WSMsgType.CLOSED:
break
elif msg.type == aiohttp.WSMsgType.ERROR:
break
except aiohttp.ClientConnectorError:
if self.state != STATE_STOPPED:
self.retry()
except Exception as err:
_LOGGER.error('Unexpected error %s', err)
if self.state != STATE_STOPPED:
self.retry()
else:
if self.state != STATE_STOPPED:
self.retry() | [
"async",
"def",
"running",
"(",
"self",
")",
":",
"url",
"=",
"'http://{}:{}'",
".",
"format",
"(",
"self",
".",
"host",
",",
"self",
".",
"port",
")",
"try",
":",
"async",
"with",
"self",
".",
"session",
".",
"ws_connect",
"(",
"url",
")",
"as",
"ws",
":",
"self",
".",
"state",
"=",
"STATE_RUNNING",
"async",
"for",
"msg",
"in",
"ws",
":",
"if",
"self",
".",
"state",
"==",
"STATE_STOPPED",
":",
"break",
"elif",
"msg",
".",
"type",
"==",
"aiohttp",
".",
"WSMsgType",
".",
"TEXT",
":",
"self",
".",
"_data",
"=",
"json",
".",
"loads",
"(",
"msg",
".",
"data",
")",
"self",
".",
"async_session_handler_callback",
"(",
"'data'",
")",
"_LOGGER",
".",
"debug",
"(",
"'Websocket data: %s'",
",",
"msg",
".",
"data",
")",
"elif",
"msg",
".",
"type",
"==",
"aiohttp",
".",
"WSMsgType",
".",
"CLOSED",
":",
"break",
"elif",
"msg",
".",
"type",
"==",
"aiohttp",
".",
"WSMsgType",
".",
"ERROR",
":",
"break",
"except",
"aiohttp",
".",
"ClientConnectorError",
":",
"if",
"self",
".",
"state",
"!=",
"STATE_STOPPED",
":",
"self",
".",
"retry",
"(",
")",
"except",
"Exception",
"as",
"err",
":",
"_LOGGER",
".",
"error",
"(",
"'Unexpected error %s'",
",",
"err",
")",
"if",
"self",
".",
"state",
"!=",
"STATE_STOPPED",
":",
"self",
".",
"retry",
"(",
")",
"else",
":",
"if",
"self",
".",
"state",
"!=",
"STATE_STOPPED",
":",
"self",
".",
"retry",
"(",
")"
] | 42.185185 | 13.407407 |
def wet_bulb_from_db_rh(db_temp, rh, b_press=101325):
"""Wet Bulb Temperature (C) at db_temp (C),
Relative Humidity rh (%), and Pressure b_press (Pa).
Note:
[1] J. Sullivan and L. D. Sanders. "Method for obtaining wet-bulb temperatures by
modifying the psychrometric formula." Center for Experiment Design and Data
Analysis. NOAA - National Oceanic and Atmospheric Administration.
http://www.srh.noaa.gov/epz/?n=wxcalc_rh
"""
es = 6.112 * math.e**((17.67 * db_temp) / (db_temp + 243.5))
e = (es * rh) / 100
t_w = 0
increse = 10.0
previoussign = 1
e_d = 1
while math.fabs(e_d) > 0.005:
e_wg = 6.112 * (math.e**((17.67 * t_w) / (t_w + 243.5)))
eg = e_wg - (b_press/100) * (db_temp - t_w) * 0.00066 * (1 + (0.00155 * t_w))
e_d = e - eg
if e_d == 0:
break
else:
if e_d < 0:
cursign = -1
if cursign != previoussign:
previoussign = cursign
increse = increse / 10
else:
increse = increse
else:
cursign = 1
if cursign != previoussign:
previoussign = cursign
increse = increse/10
else:
increse = increse
t_w = t_w + increse * previoussign
return t_w | [
"def",
"wet_bulb_from_db_rh",
"(",
"db_temp",
",",
"rh",
",",
"b_press",
"=",
"101325",
")",
":",
"es",
"=",
"6.112",
"*",
"math",
".",
"e",
"**",
"(",
"(",
"17.67",
"*",
"db_temp",
")",
"/",
"(",
"db_temp",
"+",
"243.5",
")",
")",
"e",
"=",
"(",
"es",
"*",
"rh",
")",
"/",
"100",
"t_w",
"=",
"0",
"increse",
"=",
"10.0",
"previoussign",
"=",
"1",
"e_d",
"=",
"1",
"while",
"math",
".",
"fabs",
"(",
"e_d",
")",
">",
"0.005",
":",
"e_wg",
"=",
"6.112",
"*",
"(",
"math",
".",
"e",
"**",
"(",
"(",
"17.67",
"*",
"t_w",
")",
"/",
"(",
"t_w",
"+",
"243.5",
")",
")",
")",
"eg",
"=",
"e_wg",
"-",
"(",
"b_press",
"/",
"100",
")",
"*",
"(",
"db_temp",
"-",
"t_w",
")",
"*",
"0.00066",
"*",
"(",
"1",
"+",
"(",
"0.00155",
"*",
"t_w",
")",
")",
"e_d",
"=",
"e",
"-",
"eg",
"if",
"e_d",
"==",
"0",
":",
"break",
"else",
":",
"if",
"e_d",
"<",
"0",
":",
"cursign",
"=",
"-",
"1",
"if",
"cursign",
"!=",
"previoussign",
":",
"previoussign",
"=",
"cursign",
"increse",
"=",
"increse",
"/",
"10",
"else",
":",
"increse",
"=",
"increse",
"else",
":",
"cursign",
"=",
"1",
"if",
"cursign",
"!=",
"previoussign",
":",
"previoussign",
"=",
"cursign",
"increse",
"=",
"increse",
"/",
"10",
"else",
":",
"increse",
"=",
"increse",
"t_w",
"=",
"t_w",
"+",
"increse",
"*",
"previoussign",
"return",
"t_w"
] | 35.461538 | 17.666667 |
def send(self, sender: PytgbotApiBot):
"""
Send the message via pytgbot.
:param sender: The bot instance to send with.
:type sender: pytgbot.bot.Bot
:rtype: PytgbotApiMessage
"""
return sender.send_audio(
# receiver, self.media, disable_notification=self.disable_notification, reply_to_message_id=reply_id
audio=self.audio, chat_id=self.receiver, reply_to_message_id=self.reply_id, caption=self.caption, parse_mode=self.parse_mode, duration=self.duration, performer=self.performer, title=self.title, thumb=self.thumb, disable_notification=self.disable_notification, reply_markup=self.reply_markup
) | [
"def",
"send",
"(",
"self",
",",
"sender",
":",
"PytgbotApiBot",
")",
":",
"return",
"sender",
".",
"send_audio",
"(",
"# receiver, self.media, disable_notification=self.disable_notification, reply_to_message_id=reply_id",
"audio",
"=",
"self",
".",
"audio",
",",
"chat_id",
"=",
"self",
".",
"receiver",
",",
"reply_to_message_id",
"=",
"self",
".",
"reply_id",
",",
"caption",
"=",
"self",
".",
"caption",
",",
"parse_mode",
"=",
"self",
".",
"parse_mode",
",",
"duration",
"=",
"self",
".",
"duration",
",",
"performer",
"=",
"self",
".",
"performer",
",",
"title",
"=",
"self",
".",
"title",
",",
"thumb",
"=",
"self",
".",
"thumb",
",",
"disable_notification",
"=",
"self",
".",
"disable_notification",
",",
"reply_markup",
"=",
"self",
".",
"reply_markup",
")"
] | 52.076923 | 36.846154 |
def play():
"""Open the matched movie with a media player."""
with sqlite3.connect(ARGS.database) as connection:
connection.text_factory = str
cursor = connection.cursor()
if ARGS.pattern:
if not ARGS.strict:
ARGS.pattern = '%{0}%'.format(ARGS.pattern)
cursor.execute('SELECT * FROM Movies WHERE Name LIKE (?)',
[ARGS.pattern])
try:
path = sorted([row for row in cursor])[0][1]
replace_map = {' ': '\\ ', '"': '\\"', "'": "\\'"}
for key, val in replace_map.iteritems():
path = path.replace(key, val)
os.system('{0} {1} &'.format(ARGS.player, path))
except IndexError:
exit('Error: Movie not found.') | [
"def",
"play",
"(",
")",
":",
"with",
"sqlite3",
".",
"connect",
"(",
"ARGS",
".",
"database",
")",
"as",
"connection",
":",
"connection",
".",
"text_factory",
"=",
"str",
"cursor",
"=",
"connection",
".",
"cursor",
"(",
")",
"if",
"ARGS",
".",
"pattern",
":",
"if",
"not",
"ARGS",
".",
"strict",
":",
"ARGS",
".",
"pattern",
"=",
"'%{0}%'",
".",
"format",
"(",
"ARGS",
".",
"pattern",
")",
"cursor",
".",
"execute",
"(",
"'SELECT * FROM Movies WHERE Name LIKE (?)'",
",",
"[",
"ARGS",
".",
"pattern",
"]",
")",
"try",
":",
"path",
"=",
"sorted",
"(",
"[",
"row",
"for",
"row",
"in",
"cursor",
"]",
")",
"[",
"0",
"]",
"[",
"1",
"]",
"replace_map",
"=",
"{",
"' '",
":",
"'\\\\ '",
",",
"'\"'",
":",
"'\\\\\"'",
",",
"\"'\"",
":",
"\"\\\\'\"",
"}",
"for",
"key",
",",
"val",
"in",
"replace_map",
".",
"iteritems",
"(",
")",
":",
"path",
"=",
"path",
".",
"replace",
"(",
"key",
",",
"val",
")",
"os",
".",
"system",
"(",
"'{0} {1} &'",
".",
"format",
"(",
"ARGS",
".",
"player",
",",
"path",
")",
")",
"except",
"IndexError",
":",
"exit",
"(",
"'Error: Movie not found.'",
")"
] | 44.722222 | 14.555556 |
def get_groups_by_path(self, path, parent=None):
"""
Retrieve all groups matching the given path and optionally filtered by the given parent node.
The path is converted into the absolute path of the OS before comparison.
:param path: The name of the group that has to be returned
:param parent: A PBXGroup object where the object has to be retrieved from. If None all matching groups are returned
:return: An list of all matching groups
"""
groups = self.objects.get_objects_in_section(u'PBXGroup')
groups = [group for group in groups if group.get_path() == path]
if parent:
return [group for group in groups if parent.has_child(group)]
return groups | [
"def",
"get_groups_by_path",
"(",
"self",
",",
"path",
",",
"parent",
"=",
"None",
")",
":",
"groups",
"=",
"self",
".",
"objects",
".",
"get_objects_in_section",
"(",
"u'PBXGroup'",
")",
"groups",
"=",
"[",
"group",
"for",
"group",
"in",
"groups",
"if",
"group",
".",
"get_path",
"(",
")",
"==",
"path",
"]",
"if",
"parent",
":",
"return",
"[",
"group",
"for",
"group",
"in",
"groups",
"if",
"parent",
".",
"has_child",
"(",
"group",
")",
"]",
"return",
"groups"
] | 49.2 | 29.2 |
def initdoc(request, namespace, docid, mode, template, context=None, configuration=None):
"""Initialise a document (not invoked directly)"""
perspective = request.GET.get('perspective','document')
if context is None: context = {}
if 'configuration' in request.session:
configuration = request.session['configuration']
elif configuration is None:
return fatalerror(request, "No configuration specified")
if configuration not in settings.CONFIGURATIONS:
return fatalerror(request, "Specified configuration does not exist")
flatargs = {
'setdefinitions': True,
'declarations': True, #implies provenance as well
'metadata': True,
'toc': True,
'slices': request.GET.get('slices',settings.CONFIGURATIONS[configuration].get('slices','p:25,s:100')), #overriden either by configuration or by user
'customslicesize': 0, #disabled for initial probe
'textclasses': True,
}
error = False
try:
doc = flat.comm.query(request, "USE " + namespace + "/" + docid + " PROBE", **flatargs) #retrieves only the meta information, not document content
context.update(getcontext(request,namespace,docid, doc, mode, configuration))
except Exception as e:
context.update(docserveerror(e))
error = True
if not error:
dorequiredeclaration = 'requiredeclaration' in settings.CONFIGURATIONS[configuration] and settings.CONFIGURATIONS[configuration]['requiredeclaration']
if dorequiredeclaration:
if not 'declarations' in doc:
return fatalerror(request, "Refusing to load document, missing expected declarations, none declared")
declarations = doc['declarations']
for annotationtype, annotationset in settings.CONFIGURATIONS[configuration]['requiredeclaration']:
found = False
for d in declarations:
if annotationtype == d['annotationtype'] and (not annotationset or annotationset == d['set']):
found = True
break
if not found:
if annotationset:
return fatalerror(request, "Refusing to load document, missing expected declaration for annotation type " + annotationtype + "/" + annotationset)
else:
return fatalerror(request, "Refusing to load document, missing expected declaration for annotation type " + annotationtype)
dometadataindex = 'metadataindex' in settings.CONFIGURATIONS[configuration] and settings.CONFIGURATIONS[configuration]['metadataindex']
if dometadataindex:
metadata = json.loads(context['metadata'])
for metakey in settings.CONFIGURATIONS[configuration]['metadataindex']:
if metakey in metadata:
MetadataIndex.objects.update_or_create(namespace=namespace,docid=docid, key=metakey,defaults={'value':metadata[metakey]})
response = render(request, template, context)
if 'fatalerror' in context:
response.status_code = 500
return response | [
"def",
"initdoc",
"(",
"request",
",",
"namespace",
",",
"docid",
",",
"mode",
",",
"template",
",",
"context",
"=",
"None",
",",
"configuration",
"=",
"None",
")",
":",
"perspective",
"=",
"request",
".",
"GET",
".",
"get",
"(",
"'perspective'",
",",
"'document'",
")",
"if",
"context",
"is",
"None",
":",
"context",
"=",
"{",
"}",
"if",
"'configuration'",
"in",
"request",
".",
"session",
":",
"configuration",
"=",
"request",
".",
"session",
"[",
"'configuration'",
"]",
"elif",
"configuration",
"is",
"None",
":",
"return",
"fatalerror",
"(",
"request",
",",
"\"No configuration specified\"",
")",
"if",
"configuration",
"not",
"in",
"settings",
".",
"CONFIGURATIONS",
":",
"return",
"fatalerror",
"(",
"request",
",",
"\"Specified configuration does not exist\"",
")",
"flatargs",
"=",
"{",
"'setdefinitions'",
":",
"True",
",",
"'declarations'",
":",
"True",
",",
"#implies provenance as well",
"'metadata'",
":",
"True",
",",
"'toc'",
":",
"True",
",",
"'slices'",
":",
"request",
".",
"GET",
".",
"get",
"(",
"'slices'",
",",
"settings",
".",
"CONFIGURATIONS",
"[",
"configuration",
"]",
".",
"get",
"(",
"'slices'",
",",
"'p:25,s:100'",
")",
")",
",",
"#overriden either by configuration or by user",
"'customslicesize'",
":",
"0",
",",
"#disabled for initial probe",
"'textclasses'",
":",
"True",
",",
"}",
"error",
"=",
"False",
"try",
":",
"doc",
"=",
"flat",
".",
"comm",
".",
"query",
"(",
"request",
",",
"\"USE \"",
"+",
"namespace",
"+",
"\"/\"",
"+",
"docid",
"+",
"\" PROBE\"",
",",
"*",
"*",
"flatargs",
")",
"#retrieves only the meta information, not document content",
"context",
".",
"update",
"(",
"getcontext",
"(",
"request",
",",
"namespace",
",",
"docid",
",",
"doc",
",",
"mode",
",",
"configuration",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"context",
".",
"update",
"(",
"docserveerror",
"(",
"e",
")",
")",
"error",
"=",
"True",
"if",
"not",
"error",
":",
"dorequiredeclaration",
"=",
"'requiredeclaration'",
"in",
"settings",
".",
"CONFIGURATIONS",
"[",
"configuration",
"]",
"and",
"settings",
".",
"CONFIGURATIONS",
"[",
"configuration",
"]",
"[",
"'requiredeclaration'",
"]",
"if",
"dorequiredeclaration",
":",
"if",
"not",
"'declarations'",
"in",
"doc",
":",
"return",
"fatalerror",
"(",
"request",
",",
"\"Refusing to load document, missing expected declarations, none declared\"",
")",
"declarations",
"=",
"doc",
"[",
"'declarations'",
"]",
"for",
"annotationtype",
",",
"annotationset",
"in",
"settings",
".",
"CONFIGURATIONS",
"[",
"configuration",
"]",
"[",
"'requiredeclaration'",
"]",
":",
"found",
"=",
"False",
"for",
"d",
"in",
"declarations",
":",
"if",
"annotationtype",
"==",
"d",
"[",
"'annotationtype'",
"]",
"and",
"(",
"not",
"annotationset",
"or",
"annotationset",
"==",
"d",
"[",
"'set'",
"]",
")",
":",
"found",
"=",
"True",
"break",
"if",
"not",
"found",
":",
"if",
"annotationset",
":",
"return",
"fatalerror",
"(",
"request",
",",
"\"Refusing to load document, missing expected declaration for annotation type \"",
"+",
"annotationtype",
"+",
"\"/\"",
"+",
"annotationset",
")",
"else",
":",
"return",
"fatalerror",
"(",
"request",
",",
"\"Refusing to load document, missing expected declaration for annotation type \"",
"+",
"annotationtype",
")",
"dometadataindex",
"=",
"'metadataindex'",
"in",
"settings",
".",
"CONFIGURATIONS",
"[",
"configuration",
"]",
"and",
"settings",
".",
"CONFIGURATIONS",
"[",
"configuration",
"]",
"[",
"'metadataindex'",
"]",
"if",
"dometadataindex",
":",
"metadata",
"=",
"json",
".",
"loads",
"(",
"context",
"[",
"'metadata'",
"]",
")",
"for",
"metakey",
"in",
"settings",
".",
"CONFIGURATIONS",
"[",
"configuration",
"]",
"[",
"'metadataindex'",
"]",
":",
"if",
"metakey",
"in",
"metadata",
":",
"MetadataIndex",
".",
"objects",
".",
"update_or_create",
"(",
"namespace",
"=",
"namespace",
",",
"docid",
"=",
"docid",
",",
"key",
"=",
"metakey",
",",
"defaults",
"=",
"{",
"'value'",
":",
"metadata",
"[",
"metakey",
"]",
"}",
")",
"response",
"=",
"render",
"(",
"request",
",",
"template",
",",
"context",
")",
"if",
"'fatalerror'",
"in",
"context",
":",
"response",
".",
"status_code",
"=",
"500",
"return",
"response"
] | 56.327273 | 31.890909 |
def channels_voice_agent_user_display_create(self, agent_id, user_id, data, **kwargs):
"https://developer.zendesk.com/rest_api/docs/voice-api/partner_edition#open-a-users-profile-in-an-agents-browser"
api_path = "/api/v2/channels/voice/agents/{agent_id}/users/{user_id}/display.json"
api_path = api_path.format(agent_id=agent_id, user_id=user_id)
return self.call(api_path, method="POST", data=data, **kwargs) | [
"def",
"channels_voice_agent_user_display_create",
"(",
"self",
",",
"agent_id",
",",
"user_id",
",",
"data",
",",
"*",
"*",
"kwargs",
")",
":",
"api_path",
"=",
"\"/api/v2/channels/voice/agents/{agent_id}/users/{user_id}/display.json\"",
"api_path",
"=",
"api_path",
".",
"format",
"(",
"agent_id",
"=",
"agent_id",
",",
"user_id",
"=",
"user_id",
")",
"return",
"self",
".",
"call",
"(",
"api_path",
",",
"method",
"=",
"\"POST\"",
",",
"data",
"=",
"data",
",",
"*",
"*",
"kwargs",
")"
] | 87.4 | 47.4 |
def run_gap_k_selection(data, k_min=1, k_max=50, B=5,
skip=5, **kwargs):
"""
Runs gap score for all k from k_min to k_max.
"""
if k_min == k_max:
return k_min
gap_vals = []
sk_vals = []
k_range = list(range(k_min, k_max, skip))
min_k = 0
min_i = 0
for i, k in enumerate(k_range):
km = KMeans(k)
clusters = km.fit_predict(data)
gap, sk = calculate_gap(data, clusters, km, B=B)
if len(gap_vals) > 1:
if gap_vals[-1] >= gap - (skip+1)*sk:
min_i = i
min_k = k_range[i-1]
break
#return k_range[-1], gap_vals, sk_vals
gap_vals.append(gap)
sk_vals.append(sk)
if min_k == 0:
min_k = k_max
if skip == 1:
return min_k, gap_vals, sk_vals
gap_vals = []
sk_vals = []
for k in range(min_k - skip, min_k + skip):
km = KMeans(k)
clusters = km.fit_predict(data)
gap, sk = calculate_gap(data, clusters, km, B=B)
if len(gap_vals) > 1:
if gap_vals[-1] >= gap - sk:
min_k = k-1
return min_k, gap_vals, sk_vals
gap_vals.append(gap)
sk_vals.append(sk)
return k, gap_vals, sk_vals | [
"def",
"run_gap_k_selection",
"(",
"data",
",",
"k_min",
"=",
"1",
",",
"k_max",
"=",
"50",
",",
"B",
"=",
"5",
",",
"skip",
"=",
"5",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"k_min",
"==",
"k_max",
":",
"return",
"k_min",
"gap_vals",
"=",
"[",
"]",
"sk_vals",
"=",
"[",
"]",
"k_range",
"=",
"list",
"(",
"range",
"(",
"k_min",
",",
"k_max",
",",
"skip",
")",
")",
"min_k",
"=",
"0",
"min_i",
"=",
"0",
"for",
"i",
",",
"k",
"in",
"enumerate",
"(",
"k_range",
")",
":",
"km",
"=",
"KMeans",
"(",
"k",
")",
"clusters",
"=",
"km",
".",
"fit_predict",
"(",
"data",
")",
"gap",
",",
"sk",
"=",
"calculate_gap",
"(",
"data",
",",
"clusters",
",",
"km",
",",
"B",
"=",
"B",
")",
"if",
"len",
"(",
"gap_vals",
")",
">",
"1",
":",
"if",
"gap_vals",
"[",
"-",
"1",
"]",
">=",
"gap",
"-",
"(",
"skip",
"+",
"1",
")",
"*",
"sk",
":",
"min_i",
"=",
"i",
"min_k",
"=",
"k_range",
"[",
"i",
"-",
"1",
"]",
"break",
"#return k_range[-1], gap_vals, sk_vals",
"gap_vals",
".",
"append",
"(",
"gap",
")",
"sk_vals",
".",
"append",
"(",
"sk",
")",
"if",
"min_k",
"==",
"0",
":",
"min_k",
"=",
"k_max",
"if",
"skip",
"==",
"1",
":",
"return",
"min_k",
",",
"gap_vals",
",",
"sk_vals",
"gap_vals",
"=",
"[",
"]",
"sk_vals",
"=",
"[",
"]",
"for",
"k",
"in",
"range",
"(",
"min_k",
"-",
"skip",
",",
"min_k",
"+",
"skip",
")",
":",
"km",
"=",
"KMeans",
"(",
"k",
")",
"clusters",
"=",
"km",
".",
"fit_predict",
"(",
"data",
")",
"gap",
",",
"sk",
"=",
"calculate_gap",
"(",
"data",
",",
"clusters",
",",
"km",
",",
"B",
"=",
"B",
")",
"if",
"len",
"(",
"gap_vals",
")",
">",
"1",
":",
"if",
"gap_vals",
"[",
"-",
"1",
"]",
">=",
"gap",
"-",
"sk",
":",
"min_k",
"=",
"k",
"-",
"1",
"return",
"min_k",
",",
"gap_vals",
",",
"sk_vals",
"gap_vals",
".",
"append",
"(",
"gap",
")",
"sk_vals",
".",
"append",
"(",
"sk",
")",
"return",
"k",
",",
"gap_vals",
",",
"sk_vals"
] | 29.95122 | 13.121951 |
def _set_properties(self, api_response):
"""Update properties from resource in body of ``api_response``
:type api_response: dict
:param api_response: response returned from an API call
"""
self._properties.clear()
cleaned = api_response.copy()
self.dns_name = cleaned.pop("dnsName", None)
if "creationTime" in cleaned:
cleaned["creationTime"] = _rfc3339_to_datetime(cleaned["creationTime"])
self._properties.update(cleaned) | [
"def",
"_set_properties",
"(",
"self",
",",
"api_response",
")",
":",
"self",
".",
"_properties",
".",
"clear",
"(",
")",
"cleaned",
"=",
"api_response",
".",
"copy",
"(",
")",
"self",
".",
"dns_name",
"=",
"cleaned",
".",
"pop",
"(",
"\"dnsName\"",
",",
"None",
")",
"if",
"\"creationTime\"",
"in",
"cleaned",
":",
"cleaned",
"[",
"\"creationTime\"",
"]",
"=",
"_rfc3339_to_datetime",
"(",
"cleaned",
"[",
"\"creationTime\"",
"]",
")",
"self",
".",
"_properties",
".",
"update",
"(",
"cleaned",
")"
] | 41.416667 | 11.666667 |
def address(self, is_compressed=None):
"""
Return the public address representation of this key, if available.
"""
return self._network.address.for_p2pkh(self.hash160(is_compressed=is_compressed)) | [
"def",
"address",
"(",
"self",
",",
"is_compressed",
"=",
"None",
")",
":",
"return",
"self",
".",
"_network",
".",
"address",
".",
"for_p2pkh",
"(",
"self",
".",
"hash160",
"(",
"is_compressed",
"=",
"is_compressed",
")",
")"
] | 44.8 | 17.2 |
def schema_complete():
"""Schema for data in CollectorUpdate."""
return Schema({
'stage': And(str, len),
'timestamp': int,
'status': And(str, lambda s: s in ['started', 'succeeded', 'failed']),
# optional matrix
Optional('matrix', default='default'): And(str, len),
# optional information
Optional('information', default={}): {
Optional(Regex(r'([a-z][_a-z]*)')): object
}
}) | [
"def",
"schema_complete",
"(",
")",
":",
"return",
"Schema",
"(",
"{",
"'stage'",
":",
"And",
"(",
"str",
",",
"len",
")",
",",
"'timestamp'",
":",
"int",
",",
"'status'",
":",
"And",
"(",
"str",
",",
"lambda",
"s",
":",
"s",
"in",
"[",
"'started'",
",",
"'succeeded'",
",",
"'failed'",
"]",
")",
",",
"# optional matrix",
"Optional",
"(",
"'matrix'",
",",
"default",
"=",
"'default'",
")",
":",
"And",
"(",
"str",
",",
"len",
")",
",",
"# optional information",
"Optional",
"(",
"'information'",
",",
"default",
"=",
"{",
"}",
")",
":",
"{",
"Optional",
"(",
"Regex",
"(",
"r'([a-z][_a-z]*)'",
")",
")",
":",
"object",
"}",
"}",
")"
] | 38.384615 | 16.923077 |
def do_action(self, target, dry_run=False):
"""
:param target: Full path and filename
:param dry_run: True - don't actually perform action. False: perform action. No effect for this rule.
:return: filename: Full path and filename after action completes
"""
if dry_run is False:
try:
filename = os.path.basename(target)
size = os.path.getsize(target)
print("{0}\t{1}".format(filename, size))
except OSError:
self.logger.error("Error getting size for file: {0}".format(target))
return target | [
"def",
"do_action",
"(",
"self",
",",
"target",
",",
"dry_run",
"=",
"False",
")",
":",
"if",
"dry_run",
"is",
"False",
":",
"try",
":",
"filename",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"target",
")",
"size",
"=",
"os",
".",
"path",
".",
"getsize",
"(",
"target",
")",
"print",
"(",
"\"{0}\\t{1}\"",
".",
"format",
"(",
"filename",
",",
"size",
")",
")",
"except",
"OSError",
":",
"self",
".",
"logger",
".",
"error",
"(",
"\"Error getting size for file: {0}\"",
".",
"format",
"(",
"target",
")",
")",
"return",
"target"
] | 41.333333 | 19.6 |
def update(self):
"""
Update this object with the latest changes from Bugzilla
>>> bug.status
'NEW'
#Changes happen on Bugzilla
>>> bug.update()
>>> bug.status
'FIXED'
"""
if 'id' in self._bug:
result = self._bugsy.request('bug/%s' % self._bug['id'])
self._bug = dict(**result['bugs'][0])
else:
raise BugException("Unable to update bug that isn't in Bugzilla") | [
"def",
"update",
"(",
"self",
")",
":",
"if",
"'id'",
"in",
"self",
".",
"_bug",
":",
"result",
"=",
"self",
".",
"_bugsy",
".",
"request",
"(",
"'bug/%s'",
"%",
"self",
".",
"_bug",
"[",
"'id'",
"]",
")",
"self",
".",
"_bug",
"=",
"dict",
"(",
"*",
"*",
"result",
"[",
"'bugs'",
"]",
"[",
"0",
"]",
")",
"else",
":",
"raise",
"BugException",
"(",
"\"Unable to update bug that isn't in Bugzilla\"",
")"
] | 31.125 | 18 |
def _paramf16(ins):
""" Pushes 32bit fixed point param into the stack
"""
output = _f16_oper(ins.quad[1])
output.append('push de')
output.append('push hl')
return output | [
"def",
"_paramf16",
"(",
"ins",
")",
":",
"output",
"=",
"_f16_oper",
"(",
"ins",
".",
"quad",
"[",
"1",
"]",
")",
"output",
".",
"append",
"(",
"'push de'",
")",
"output",
".",
"append",
"(",
"'push hl'",
")",
"return",
"output"
] | 26.714286 | 10.428571 |
def _get_default_radius(site):
"""
An internal method to get a "default" covalent/element radius
Args:
site: (Site)
Returns:
Covalent radius of element on site, or Atomic radius if unavailable
"""
try:
return CovalentRadius.radius[site.specie.symbol]
except:
return site.specie.atomic_radius | [
"def",
"_get_default_radius",
"(",
"site",
")",
":",
"try",
":",
"return",
"CovalentRadius",
".",
"radius",
"[",
"site",
".",
"specie",
".",
"symbol",
"]",
"except",
":",
"return",
"site",
".",
"specie",
".",
"atomic_radius"
] | 27.428571 | 21.571429 |
def ReadCronJobs(self, cronjob_ids=None):
"""Reads a cronjob from the database."""
if cronjob_ids is None:
res = [job.Copy() for job in itervalues(self.cronjobs)]
else:
res = []
for job_id in cronjob_ids:
try:
res.append(self.cronjobs[job_id].Copy())
except KeyError:
raise db.UnknownCronJobError("Cron job with id %s not found." %
job_id)
for job in res:
lease = self.cronjob_leases.get(job.cron_job_id)
if lease:
job.leased_until, job.leased_by = lease
return res | [
"def",
"ReadCronJobs",
"(",
"self",
",",
"cronjob_ids",
"=",
"None",
")",
":",
"if",
"cronjob_ids",
"is",
"None",
":",
"res",
"=",
"[",
"job",
".",
"Copy",
"(",
")",
"for",
"job",
"in",
"itervalues",
"(",
"self",
".",
"cronjobs",
")",
"]",
"else",
":",
"res",
"=",
"[",
"]",
"for",
"job_id",
"in",
"cronjob_ids",
":",
"try",
":",
"res",
".",
"append",
"(",
"self",
".",
"cronjobs",
"[",
"job_id",
"]",
".",
"Copy",
"(",
")",
")",
"except",
"KeyError",
":",
"raise",
"db",
".",
"UnknownCronJobError",
"(",
"\"Cron job with id %s not found.\"",
"%",
"job_id",
")",
"for",
"job",
"in",
"res",
":",
"lease",
"=",
"self",
".",
"cronjob_leases",
".",
"get",
"(",
"job",
".",
"cron_job_id",
")",
"if",
"lease",
":",
"job",
".",
"leased_until",
",",
"job",
".",
"leased_by",
"=",
"lease",
"return",
"res"
] | 30.631579 | 19.263158 |
def filter(args):
"""
%prog filter gffile > filtered.gff
Filter the gff file based on criteria below:
(1) feature attribute values: [Identity, Coverage].
You can get this type of gff by using gmap
$ gmap -f 2 ....
(2) Total bp length of child features
"""
p = OptionParser(filter.__doc__)
p.add_option("--type", default="mRNA",
help="The feature to scan for the attributes [default: %default]")
g1 = OptionGroup(p, "Filter by identity/coverage attribute values")
g1.add_option("--id", default=95, type="float",
help="Minimum identity [default: %default]")
g1.add_option("--coverage", default=90, type="float",
help="Minimum coverage [default: %default]")
g1.add_option("--nocase", default=False, action="store_true",
help="Case insensitive lookup of attribute names [default: %default]")
p.add_option_group(g1)
g2 = OptionGroup(p, "Filter by child feature bp length")
g2.add_option("--child_ftype", default=None, type="str",
help="Child featuretype to consider")
g2.add_option("--child_bp", default=None, type="int",
help="Filter by total bp of children of chosen ftype")
p.add_option_group(g2)
p.set_outfile()
opts, args = p.parse_args(args)
otype, oid, ocov = opts.type, opts.id, opts.coverage
cftype, clenbp = opts.child_ftype, opts.child_bp
id_attr, cov_attr = "Identity", "Coverage"
if opts.nocase:
id_attr, cov_attr = id_attr.lower(), cov_attr.lower()
if len(args) != 1:
sys.exit(not p.print_help())
gffile, = args
gffdb = make_index(gffile)
bad = set()
ptype = None
for g in gffdb.features_of_type(otype, order_by=('seqid', 'start')):
if not ptype:
parent = list(gffdb.parents(g))
ptype = parent[0].featuretype \
if len(parent) > 0 else otype
if cftype and clenbp:
if gffdb.children_bp(g, child_featuretype=cftype) < clenbp:
bad.add(g.id)
elif oid and ocov:
identity = float(g.attributes[id_attr][0])
coverage = float(g.attributes[cov_attr][0])
if identity < oid or coverage < ocov:
bad.add(g.id)
logging.debug("{0} bad accns marked.".format(len(bad)))
fw = must_open(opts.outfile, "w")
for g in gffdb.features_of_type(ptype, order_by=('seqid', 'start')):
if ptype != otype:
feats = list(gffdb.children(g, featuretype=otype, order_by=('start')))
ok_feats = [f for f in feats if f.id not in bad]
if len(ok_feats) > 0:
print(g, file=fw)
for feat in ok_feats:
print(feat, file=fw)
for child in gffdb.children(feat, order_by=('start')):
print(child, file=fw)
else:
if g.id not in bad:
print(g, file=fw)
for child in gffdb.children(g, order_by=('start')):
print(child, file=fw)
fw.close() | [
"def",
"filter",
"(",
"args",
")",
":",
"p",
"=",
"OptionParser",
"(",
"filter",
".",
"__doc__",
")",
"p",
".",
"add_option",
"(",
"\"--type\"",
",",
"default",
"=",
"\"mRNA\"",
",",
"help",
"=",
"\"The feature to scan for the attributes [default: %default]\"",
")",
"g1",
"=",
"OptionGroup",
"(",
"p",
",",
"\"Filter by identity/coverage attribute values\"",
")",
"g1",
".",
"add_option",
"(",
"\"--id\"",
",",
"default",
"=",
"95",
",",
"type",
"=",
"\"float\"",
",",
"help",
"=",
"\"Minimum identity [default: %default]\"",
")",
"g1",
".",
"add_option",
"(",
"\"--coverage\"",
",",
"default",
"=",
"90",
",",
"type",
"=",
"\"float\"",
",",
"help",
"=",
"\"Minimum coverage [default: %default]\"",
")",
"g1",
".",
"add_option",
"(",
"\"--nocase\"",
",",
"default",
"=",
"False",
",",
"action",
"=",
"\"store_true\"",
",",
"help",
"=",
"\"Case insensitive lookup of attribute names [default: %default]\"",
")",
"p",
".",
"add_option_group",
"(",
"g1",
")",
"g2",
"=",
"OptionGroup",
"(",
"p",
",",
"\"Filter by child feature bp length\"",
")",
"g2",
".",
"add_option",
"(",
"\"--child_ftype\"",
",",
"default",
"=",
"None",
",",
"type",
"=",
"\"str\"",
",",
"help",
"=",
"\"Child featuretype to consider\"",
")",
"g2",
".",
"add_option",
"(",
"\"--child_bp\"",
",",
"default",
"=",
"None",
",",
"type",
"=",
"\"int\"",
",",
"help",
"=",
"\"Filter by total bp of children of chosen ftype\"",
")",
"p",
".",
"add_option_group",
"(",
"g2",
")",
"p",
".",
"set_outfile",
"(",
")",
"opts",
",",
"args",
"=",
"p",
".",
"parse_args",
"(",
"args",
")",
"otype",
",",
"oid",
",",
"ocov",
"=",
"opts",
".",
"type",
",",
"opts",
".",
"id",
",",
"opts",
".",
"coverage",
"cftype",
",",
"clenbp",
"=",
"opts",
".",
"child_ftype",
",",
"opts",
".",
"child_bp",
"id_attr",
",",
"cov_attr",
"=",
"\"Identity\"",
",",
"\"Coverage\"",
"if",
"opts",
".",
"nocase",
":",
"id_attr",
",",
"cov_attr",
"=",
"id_attr",
".",
"lower",
"(",
")",
",",
"cov_attr",
".",
"lower",
"(",
")",
"if",
"len",
"(",
"args",
")",
"!=",
"1",
":",
"sys",
".",
"exit",
"(",
"not",
"p",
".",
"print_help",
"(",
")",
")",
"gffile",
",",
"=",
"args",
"gffdb",
"=",
"make_index",
"(",
"gffile",
")",
"bad",
"=",
"set",
"(",
")",
"ptype",
"=",
"None",
"for",
"g",
"in",
"gffdb",
".",
"features_of_type",
"(",
"otype",
",",
"order_by",
"=",
"(",
"'seqid'",
",",
"'start'",
")",
")",
":",
"if",
"not",
"ptype",
":",
"parent",
"=",
"list",
"(",
"gffdb",
".",
"parents",
"(",
"g",
")",
")",
"ptype",
"=",
"parent",
"[",
"0",
"]",
".",
"featuretype",
"if",
"len",
"(",
"parent",
")",
">",
"0",
"else",
"otype",
"if",
"cftype",
"and",
"clenbp",
":",
"if",
"gffdb",
".",
"children_bp",
"(",
"g",
",",
"child_featuretype",
"=",
"cftype",
")",
"<",
"clenbp",
":",
"bad",
".",
"add",
"(",
"g",
".",
"id",
")",
"elif",
"oid",
"and",
"ocov",
":",
"identity",
"=",
"float",
"(",
"g",
".",
"attributes",
"[",
"id_attr",
"]",
"[",
"0",
"]",
")",
"coverage",
"=",
"float",
"(",
"g",
".",
"attributes",
"[",
"cov_attr",
"]",
"[",
"0",
"]",
")",
"if",
"identity",
"<",
"oid",
"or",
"coverage",
"<",
"ocov",
":",
"bad",
".",
"add",
"(",
"g",
".",
"id",
")",
"logging",
".",
"debug",
"(",
"\"{0} bad accns marked.\"",
".",
"format",
"(",
"len",
"(",
"bad",
")",
")",
")",
"fw",
"=",
"must_open",
"(",
"opts",
".",
"outfile",
",",
"\"w\"",
")",
"for",
"g",
"in",
"gffdb",
".",
"features_of_type",
"(",
"ptype",
",",
"order_by",
"=",
"(",
"'seqid'",
",",
"'start'",
")",
")",
":",
"if",
"ptype",
"!=",
"otype",
":",
"feats",
"=",
"list",
"(",
"gffdb",
".",
"children",
"(",
"g",
",",
"featuretype",
"=",
"otype",
",",
"order_by",
"=",
"(",
"'start'",
")",
")",
")",
"ok_feats",
"=",
"[",
"f",
"for",
"f",
"in",
"feats",
"if",
"f",
".",
"id",
"not",
"in",
"bad",
"]",
"if",
"len",
"(",
"ok_feats",
")",
">",
"0",
":",
"print",
"(",
"g",
",",
"file",
"=",
"fw",
")",
"for",
"feat",
"in",
"ok_feats",
":",
"print",
"(",
"feat",
",",
"file",
"=",
"fw",
")",
"for",
"child",
"in",
"gffdb",
".",
"children",
"(",
"feat",
",",
"order_by",
"=",
"(",
"'start'",
")",
")",
":",
"print",
"(",
"child",
",",
"file",
"=",
"fw",
")",
"else",
":",
"if",
"g",
".",
"id",
"not",
"in",
"bad",
":",
"print",
"(",
"g",
",",
"file",
"=",
"fw",
")",
"for",
"child",
"in",
"gffdb",
".",
"children",
"(",
"g",
",",
"order_by",
"=",
"(",
"'start'",
")",
")",
":",
"print",
"(",
"child",
",",
"file",
"=",
"fw",
")",
"fw",
".",
"close",
"(",
")"
] | 38.367089 | 18.341772 |
def setonce(decorator):
"""A descriptor modifier which allows __set__ to be called at most once."""
def decorate(fn, *args, **kwargs):
parent = decorator(fn, *args, **kwargs)
#doc = _add_msg(getattr(parent, '__doc__', None), '*@setonce*')
doc = getattr(parent, '__doc__', None)
assert hasattr(parent, "__set__") # don't use for non-data descriptors!
return _setonce(fn.__name__, parent, doc)
return decorate | [
"def",
"setonce",
"(",
"decorator",
")",
":",
"def",
"decorate",
"(",
"fn",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"parent",
"=",
"decorator",
"(",
"fn",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"#doc = _add_msg(getattr(parent, '__doc__', None), '*@setonce*')",
"doc",
"=",
"getattr",
"(",
"parent",
",",
"'__doc__'",
",",
"None",
")",
"assert",
"hasattr",
"(",
"parent",
",",
"\"__set__\"",
")",
"# don't use for non-data descriptors!",
"return",
"_setonce",
"(",
"fn",
".",
"__name__",
",",
"parent",
",",
"doc",
")",
"return",
"decorate"
] | 50.111111 | 14.666667 |
def state_estimation_ensemble(data, k, n_runs=10, M_list=[], **se_params):
"""
Runs an ensemble method on the list of M results...
Args:
data: genes x cells array
k: number of classes
n_runs (optional): number of random initializations of state estimation
M_list (optional): list of M arrays from state estimation
se_params (optional): optional poisson_estimate_state params
Returns:
M_new
W_new
ll
"""
if len(M_list)==0:
M_list = []
for i in range(n_runs):
M, W, ll = poisson_estimate_state(data, k, **se_params)
M_list.append(M)
M_stacked = np.hstack(M_list)
M_new, W_new, ll = poisson_estimate_state(M_stacked, k, **se_params)
W_new = np.dot(data.T, M_new)
W_new = W_new/W_new.sum(0)
return M_new, W_new, ll | [
"def",
"state_estimation_ensemble",
"(",
"data",
",",
"k",
",",
"n_runs",
"=",
"10",
",",
"M_list",
"=",
"[",
"]",
",",
"*",
"*",
"se_params",
")",
":",
"if",
"len",
"(",
"M_list",
")",
"==",
"0",
":",
"M_list",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"n_runs",
")",
":",
"M",
",",
"W",
",",
"ll",
"=",
"poisson_estimate_state",
"(",
"data",
",",
"k",
",",
"*",
"*",
"se_params",
")",
"M_list",
".",
"append",
"(",
"M",
")",
"M_stacked",
"=",
"np",
".",
"hstack",
"(",
"M_list",
")",
"M_new",
",",
"W_new",
",",
"ll",
"=",
"poisson_estimate_state",
"(",
"M_stacked",
",",
"k",
",",
"*",
"*",
"se_params",
")",
"W_new",
"=",
"np",
".",
"dot",
"(",
"data",
".",
"T",
",",
"M_new",
")",
"W_new",
"=",
"W_new",
"/",
"W_new",
".",
"sum",
"(",
"0",
")",
"return",
"M_new",
",",
"W_new",
",",
"ll"
] | 32.115385 | 20.730769 |
def isRegionValid(self):
""" Returns false if the whole region is not even partially inside any screen, otherwise true """
screens = PlatformManager.getScreenDetails()
for screen in screens:
s_x, s_y, s_w, s_h = screen["rect"]
if self.x+self.w >= s_x and s_x+s_w >= self.x and self.y+self.h >= s_y and s_y+s_h >= self.y:
# Rects overlap
return True
return False | [
"def",
"isRegionValid",
"(",
"self",
")",
":",
"screens",
"=",
"PlatformManager",
".",
"getScreenDetails",
"(",
")",
"for",
"screen",
"in",
"screens",
":",
"s_x",
",",
"s_y",
",",
"s_w",
",",
"s_h",
"=",
"screen",
"[",
"\"rect\"",
"]",
"if",
"self",
".",
"x",
"+",
"self",
".",
"w",
">=",
"s_x",
"and",
"s_x",
"+",
"s_w",
">=",
"self",
".",
"x",
"and",
"self",
".",
"y",
"+",
"self",
".",
"h",
">=",
"s_y",
"and",
"s_y",
"+",
"s_h",
">=",
"self",
".",
"y",
":",
"# Rects overlap",
"return",
"True",
"return",
"False"
] | 49 | 16.888889 |
def check(self, cfg, state, peek_blocks):
"""
Check if the specified address will be executed
:param cfg:
:param state:
:param int peek_blocks:
:return:
:rtype: bool
"""
# Get the current CFGNode from the CFG
node = self._get_cfg_node(cfg, state)
if node is None:
# Umm it doesn't exist on the control flow graph - why?
l.error('Failed to find CFGNode for state %s on the control flow graph.', state)
return False
# crawl the graph to see if we can reach the target address next
for src, dst in self._dfs_edges(cfg.graph, node, max_steps=peek_blocks):
if src.addr == self.addr or dst.addr == self.addr:
l.debug("State %s will reach %#x.", state, self.addr)
return True
l.debug('SimState %s will not reach %#x.', state, self.addr)
return False | [
"def",
"check",
"(",
"self",
",",
"cfg",
",",
"state",
",",
"peek_blocks",
")",
":",
"# Get the current CFGNode from the CFG",
"node",
"=",
"self",
".",
"_get_cfg_node",
"(",
"cfg",
",",
"state",
")",
"if",
"node",
"is",
"None",
":",
"# Umm it doesn't exist on the control flow graph - why?",
"l",
".",
"error",
"(",
"'Failed to find CFGNode for state %s on the control flow graph.'",
",",
"state",
")",
"return",
"False",
"# crawl the graph to see if we can reach the target address next",
"for",
"src",
",",
"dst",
"in",
"self",
".",
"_dfs_edges",
"(",
"cfg",
".",
"graph",
",",
"node",
",",
"max_steps",
"=",
"peek_blocks",
")",
":",
"if",
"src",
".",
"addr",
"==",
"self",
".",
"addr",
"or",
"dst",
".",
"addr",
"==",
"self",
".",
"addr",
":",
"l",
".",
"debug",
"(",
"\"State %s will reach %#x.\"",
",",
"state",
",",
"self",
".",
"addr",
")",
"return",
"True",
"l",
".",
"debug",
"(",
"'SimState %s will not reach %#x.'",
",",
"state",
",",
"self",
".",
"addr",
")",
"return",
"False"
] | 34.111111 | 22.777778 |
async def deregister(self, service):
"""Deregisters a local service
Parameters:
service (ObjectID): Service ID
Returns:
bool: ``True`` on success
The deregister endpoint is used to remove a service from the local
agent. The agent will take care of deregistering the service with the
Catalog. If there is an associated check, that is also deregistered.
"""
service_id = extract_attr(service, keys=["ServiceID", "ID"])
response = await self._api.get(
"/v1/agent/service/deregister", service_id)
return response.status == 200 | [
"async",
"def",
"deregister",
"(",
"self",
",",
"service",
")",
":",
"service_id",
"=",
"extract_attr",
"(",
"service",
",",
"keys",
"=",
"[",
"\"ServiceID\"",
",",
"\"ID\"",
"]",
")",
"response",
"=",
"await",
"self",
".",
"_api",
".",
"get",
"(",
"\"/v1/agent/service/deregister\"",
",",
"service_id",
")",
"return",
"response",
".",
"status",
"==",
"200"
] | 39.0625 | 18 |
def get_attribute(self, code, default=None):
"""Get attribute for user"""
try:
return self.get(code=code).value
except models.ObjectDoesNotExist:
return default | [
"def",
"get_attribute",
"(",
"self",
",",
"code",
",",
"default",
"=",
"None",
")",
":",
"try",
":",
"return",
"self",
".",
"get",
"(",
"code",
"=",
"code",
")",
".",
"value",
"except",
"models",
".",
"ObjectDoesNotExist",
":",
"return",
"default"
] | 33.833333 | 8.5 |
def get_tagstring_representation(self, tag, onebelow_normal=None,
onebelow_focus=None):
"""
looks up user's preferred way to represent a given tagstring.
:param tag: tagstring
:type tag: str
:param onebelow_normal: attribute that shines through if unfocussed
:type onebelow_normal: urwid.AttrSpec
:param onebelow_focus: attribute that shines through if focussed
:type onebelow_focus: urwid.AttrSpec
If `onebelow_normal` or `onebelow_focus` is given these attributes will
be used as fallbacks for fg/bg values '' and 'default'.
This returns a dictionary mapping
:normal: to :class:`urwid.AttrSpec` used if unfocussed
:focussed: to :class:`urwid.AttrSpec` used if focussed
:translated: to an alternative string representation
"""
colourmode = int(self._config.get('colourmode'))
theme = self._theme
cfg = self._config
colours = [1, 16, 256]
def colourpick(triple):
""" pick attribute from triple (mono,16c,256c) according to current
colourmode"""
if triple is None:
return None
return triple[colours.index(colourmode)]
# global default attributes for tagstrings.
# These could contain values '' and 'default' which we interpret as
# "use the values from the widget below"
default_normal = theme.get_attribute(colourmode, 'global', 'tag')
default_focus = theme.get_attribute(colourmode, 'global', 'tag_focus')
# local defaults for tagstring attributes. depend on next lower widget
fallback_normal = resolve_att(onebelow_normal, default_normal)
fallback_focus = resolve_att(onebelow_focus, default_focus)
for sec in cfg['tags'].sections:
if re.match('^{}$'.format(sec), tag):
normal = resolve_att(colourpick(cfg['tags'][sec]['normal']),
fallback_normal)
focus = resolve_att(colourpick(cfg['tags'][sec]['focus']),
fallback_focus)
translated = cfg['tags'][sec]['translated']
translated = string_decode(translated, 'UTF-8')
if translated is None:
translated = tag
translation = cfg['tags'][sec]['translation']
if translation:
translated = re.sub(translation[0], translation[1], tag)
break
else:
normal = fallback_normal
focus = fallback_focus
translated = tag
return {'normal': normal, 'focussed': focus, 'translated': translated} | [
"def",
"get_tagstring_representation",
"(",
"self",
",",
"tag",
",",
"onebelow_normal",
"=",
"None",
",",
"onebelow_focus",
"=",
"None",
")",
":",
"colourmode",
"=",
"int",
"(",
"self",
".",
"_config",
".",
"get",
"(",
"'colourmode'",
")",
")",
"theme",
"=",
"self",
".",
"_theme",
"cfg",
"=",
"self",
".",
"_config",
"colours",
"=",
"[",
"1",
",",
"16",
",",
"256",
"]",
"def",
"colourpick",
"(",
"triple",
")",
":",
"\"\"\" pick attribute from triple (mono,16c,256c) according to current\n colourmode\"\"\"",
"if",
"triple",
"is",
"None",
":",
"return",
"None",
"return",
"triple",
"[",
"colours",
".",
"index",
"(",
"colourmode",
")",
"]",
"# global default attributes for tagstrings.",
"# These could contain values '' and 'default' which we interpret as",
"# \"use the values from the widget below\"",
"default_normal",
"=",
"theme",
".",
"get_attribute",
"(",
"colourmode",
",",
"'global'",
",",
"'tag'",
")",
"default_focus",
"=",
"theme",
".",
"get_attribute",
"(",
"colourmode",
",",
"'global'",
",",
"'tag_focus'",
")",
"# local defaults for tagstring attributes. depend on next lower widget",
"fallback_normal",
"=",
"resolve_att",
"(",
"onebelow_normal",
",",
"default_normal",
")",
"fallback_focus",
"=",
"resolve_att",
"(",
"onebelow_focus",
",",
"default_focus",
")",
"for",
"sec",
"in",
"cfg",
"[",
"'tags'",
"]",
".",
"sections",
":",
"if",
"re",
".",
"match",
"(",
"'^{}$'",
".",
"format",
"(",
"sec",
")",
",",
"tag",
")",
":",
"normal",
"=",
"resolve_att",
"(",
"colourpick",
"(",
"cfg",
"[",
"'tags'",
"]",
"[",
"sec",
"]",
"[",
"'normal'",
"]",
")",
",",
"fallback_normal",
")",
"focus",
"=",
"resolve_att",
"(",
"colourpick",
"(",
"cfg",
"[",
"'tags'",
"]",
"[",
"sec",
"]",
"[",
"'focus'",
"]",
")",
",",
"fallback_focus",
")",
"translated",
"=",
"cfg",
"[",
"'tags'",
"]",
"[",
"sec",
"]",
"[",
"'translated'",
"]",
"translated",
"=",
"string_decode",
"(",
"translated",
",",
"'UTF-8'",
")",
"if",
"translated",
"is",
"None",
":",
"translated",
"=",
"tag",
"translation",
"=",
"cfg",
"[",
"'tags'",
"]",
"[",
"sec",
"]",
"[",
"'translation'",
"]",
"if",
"translation",
":",
"translated",
"=",
"re",
".",
"sub",
"(",
"translation",
"[",
"0",
"]",
",",
"translation",
"[",
"1",
"]",
",",
"tag",
")",
"break",
"else",
":",
"normal",
"=",
"fallback_normal",
"focus",
"=",
"fallback_focus",
"translated",
"=",
"tag",
"return",
"{",
"'normal'",
":",
"normal",
",",
"'focussed'",
":",
"focus",
",",
"'translated'",
":",
"translated",
"}"
] | 43.174603 | 20.888889 |
def _plain_or_callable(obj):
"""Returns the value of the called object of obj is a callable,
otherwise the plain object.
Returns None if obj is None.
>>> obj = None
>>> _plain_or_callable(obj)
>>> stmt = 'select * from sys.nodes'
>>> _plain_or_callable(stmt)
'select * from sys.nodes'
>>> def _args():
... return [1, 'name']
>>> _plain_or_callable(_args)
[1, 'name']
>>> _plain_or_callable((x for x in range(10)))
0
>>> class BulkArgsGenerator:
... def __call__(self):
... return [[1, 'foo'], [2, 'bar'], [3, 'foobar']]
>>> _plain_or_callable(BulkArgsGenerator())
[[1, 'foo'], [2, 'bar'], [3, 'foobar']]
"""
if callable(obj):
return obj()
elif isinstance(obj, types.GeneratorType):
return next(obj)
else:
return obj | [
"def",
"_plain_or_callable",
"(",
"obj",
")",
":",
"if",
"callable",
"(",
"obj",
")",
":",
"return",
"obj",
"(",
")",
"elif",
"isinstance",
"(",
"obj",
",",
"types",
".",
"GeneratorType",
")",
":",
"return",
"next",
"(",
"obj",
")",
"else",
":",
"return",
"obj"
] | 25.65625 | 17.15625 |
def _unlock(self, name, client_id, request_id):
"""Handles unlocking"""
if name in self._locks:
other_client_id, other_request_id, lock_time = self._locks[name]
if other_client_id != client_id:
response = (self.RELEASE_ERROR + self.DELIMITER +
'Lock `%s` was acquired by `%s` (old request id `%s`) and not by '
'`%s` (request id `%s`)' % (name,
other_client_id,
other_request_id,
client_id,
request_id))
self._logger.error(response)
return response
else:
del self._locks[name]
return self.RELEASED
elif (name, client_id) in self._timeout_locks:
other_request_id, lock_time = self._timeout_locks[(name, client_id)]
timeout = time.time() - lock_time - self._timeout
response = (self.RELEASE_ERROR + self.DELIMITER +
'Lock `%s` timed out %f seconds ago (client id `%s`, '
'old request id `%s`)' % (name, timeout, client_id, other_request_id))
return response
else:
response = (self.RELEASE_ERROR + self.DELIMITER +
'Lock `%s` cannot be found in database (client id `%s`, '
'request id `%s`)' % (name, client_id, request_id))
self._logger.warning(response)
return response | [
"def",
"_unlock",
"(",
"self",
",",
"name",
",",
"client_id",
",",
"request_id",
")",
":",
"if",
"name",
"in",
"self",
".",
"_locks",
":",
"other_client_id",
",",
"other_request_id",
",",
"lock_time",
"=",
"self",
".",
"_locks",
"[",
"name",
"]",
"if",
"other_client_id",
"!=",
"client_id",
":",
"response",
"=",
"(",
"self",
".",
"RELEASE_ERROR",
"+",
"self",
".",
"DELIMITER",
"+",
"'Lock `%s` was acquired by `%s` (old request id `%s`) and not by '",
"'`%s` (request id `%s`)'",
"%",
"(",
"name",
",",
"other_client_id",
",",
"other_request_id",
",",
"client_id",
",",
"request_id",
")",
")",
"self",
".",
"_logger",
".",
"error",
"(",
"response",
")",
"return",
"response",
"else",
":",
"del",
"self",
".",
"_locks",
"[",
"name",
"]",
"return",
"self",
".",
"RELEASED",
"elif",
"(",
"name",
",",
"client_id",
")",
"in",
"self",
".",
"_timeout_locks",
":",
"other_request_id",
",",
"lock_time",
"=",
"self",
".",
"_timeout_locks",
"[",
"(",
"name",
",",
"client_id",
")",
"]",
"timeout",
"=",
"time",
".",
"time",
"(",
")",
"-",
"lock_time",
"-",
"self",
".",
"_timeout",
"response",
"=",
"(",
"self",
".",
"RELEASE_ERROR",
"+",
"self",
".",
"DELIMITER",
"+",
"'Lock `%s` timed out %f seconds ago (client id `%s`, '",
"'old request id `%s`)'",
"%",
"(",
"name",
",",
"timeout",
",",
"client_id",
",",
"other_request_id",
")",
")",
"return",
"response",
"else",
":",
"response",
"=",
"(",
"self",
".",
"RELEASE_ERROR",
"+",
"self",
".",
"DELIMITER",
"+",
"'Lock `%s` cannot be found in database (client id `%s`, '",
"'request id `%s`)'",
"%",
"(",
"name",
",",
"client_id",
",",
"request_id",
")",
")",
"self",
".",
"_logger",
".",
"warning",
"(",
"response",
")",
"return",
"response"
] | 54.9 | 21.933333 |
def todate(self):
"""
Calculates the corresponding day in the gregorian calendar. this is the main use case of this library.
:return: Corresponding date in gregorian calendar.
:rtype: :py:class:`datetime.date`
"""
arr = get_gregorian_date_from_julian_day(self.tojulianday())
return datetime.date(int(arr[0]), int(arr[1]), int(arr[2])) | [
"def",
"todate",
"(",
"self",
")",
":",
"arr",
"=",
"get_gregorian_date_from_julian_day",
"(",
"self",
".",
"tojulianday",
"(",
")",
")",
"return",
"datetime",
".",
"date",
"(",
"int",
"(",
"arr",
"[",
"0",
"]",
")",
",",
"int",
"(",
"arr",
"[",
"1",
"]",
")",
",",
"int",
"(",
"arr",
"[",
"2",
"]",
")",
")"
] | 42.555556 | 23 |
def get():
"""Check the health of this service"""
uptime = time.time() - START_TIME
response = dict(uptime=f'{uptime:.2f}s',
links=dict(root='{}'.format(get_root_url())))
# TODO(BM) check if we can connect to the config database ...
# try:
# DB.get_sub_array_ids()
# except ConnectionError as error:
# response['state'] = 'ERROR'
# response['message'] = str(error)
return response, HTTPStatus.OK | [
"def",
"get",
"(",
")",
":",
"uptime",
"=",
"time",
".",
"time",
"(",
")",
"-",
"START_TIME",
"response",
"=",
"dict",
"(",
"uptime",
"=",
"f'{uptime:.2f}s'",
",",
"links",
"=",
"dict",
"(",
"root",
"=",
"'{}'",
".",
"format",
"(",
"get_root_url",
"(",
")",
")",
")",
")",
"# TODO(BM) check if we can connect to the config database ...",
"# try:",
"# DB.get_sub_array_ids()",
"# except ConnectionError as error:",
"# response['state'] = 'ERROR'",
"# response['message'] = str(error)",
"return",
"response",
",",
"HTTPStatus",
".",
"OK"
] | 35.076923 | 13.692308 |
def to_comment(comment):
"""
Convert a string to a ``.properties`` file comment. All non-Latin-1
characters in the string are escaped using ``\\uXXXX`` escapes (after
converting non-BMP characters to surrogate pairs), a ``#`` is prepended to
the string, any CR LF or CR line breaks in the string are converted to LF,
and a ``#`` is inserted after any line break not already followed by a
``#`` or ``!``. No trailing newline is added.
>>> to_comment('They say foo=bar,\\r\\nbut does bar=foo?')
'#They say foo=bar,\\n#but does bar=foo?'
:param comment: the string to convert to a comment
:type comment: text string
:rtype: text string
"""
return '#' + re.sub(r'[^\x00-\xFF]', _esc,
re.sub(r'\n(?![#!])', '\n#',
re.sub(r'\r\n?', '\n', comment))) | [
"def",
"to_comment",
"(",
"comment",
")",
":",
"return",
"'#'",
"+",
"re",
".",
"sub",
"(",
"r'[^\\x00-\\xFF]'",
",",
"_esc",
",",
"re",
".",
"sub",
"(",
"r'\\n(?![#!])'",
",",
"'\\n#'",
",",
"re",
".",
"sub",
"(",
"r'\\r\\n?'",
",",
"'\\n'",
",",
"comment",
")",
")",
")"
] | 44.157895 | 20.578947 |
def _find_ip4_addresses():
"""Find all the IP4 addresses currently bound to interfaces
"""
global _ip4_addresses
proto = socket.AF_INET
if _ip4_addresses is None:
_ip4_addresses = []
#
# Determine the interface for the default gateway
# (if any) and, later, prioritise the INET address on
# that interface.
#
default_gateway = netifaces.gateways()['default']
if proto in default_gateway:
_, default_gateway_interface = default_gateway[proto]
else:
default_gateway_interface = None
for interface in netifaces.interfaces():
for info in netifaces.ifaddresses(interface).get(netifaces.AF_INET, []):
if info['addr']:
if interface == default_gateway_interface:
_ip4_addresses.insert(0, info['addr'])
else:
_ip4_addresses.append(info['addr'])
return _ip4_addresses | [
"def",
"_find_ip4_addresses",
"(",
")",
":",
"global",
"_ip4_addresses",
"proto",
"=",
"socket",
".",
"AF_INET",
"if",
"_ip4_addresses",
"is",
"None",
":",
"_ip4_addresses",
"=",
"[",
"]",
"#",
"# Determine the interface for the default gateway",
"# (if any) and, later, prioritise the INET address on",
"# that interface.",
"#",
"default_gateway",
"=",
"netifaces",
".",
"gateways",
"(",
")",
"[",
"'default'",
"]",
"if",
"proto",
"in",
"default_gateway",
":",
"_",
",",
"default_gateway_interface",
"=",
"default_gateway",
"[",
"proto",
"]",
"else",
":",
"default_gateway_interface",
"=",
"None",
"for",
"interface",
"in",
"netifaces",
".",
"interfaces",
"(",
")",
":",
"for",
"info",
"in",
"netifaces",
".",
"ifaddresses",
"(",
"interface",
")",
".",
"get",
"(",
"netifaces",
".",
"AF_INET",
",",
"[",
"]",
")",
":",
"if",
"info",
"[",
"'addr'",
"]",
":",
"if",
"interface",
"==",
"default_gateway_interface",
":",
"_ip4_addresses",
".",
"insert",
"(",
"0",
",",
"info",
"[",
"'addr'",
"]",
")",
"else",
":",
"_ip4_addresses",
".",
"append",
"(",
"info",
"[",
"'addr'",
"]",
")",
"return",
"_ip4_addresses"
] | 34.892857 | 18.964286 |
def append_flag_values(self, flag_values):
"""Appends flags registered in another FlagValues instance.
Args:
flag_values: FlagValues, the FlagValues instance from which to copy flags.
"""
for flag_name, flag in six.iteritems(flag_values._flags()): # pylint: disable=protected-access
# Each flags with short_name appears here twice (once under its
# normal name, and again with its short name). To prevent
# problems (DuplicateFlagError) with double flag registration, we
# perform a check to make sure that the entry we're looking at is
# for its normal name.
if flag_name == flag.name:
try:
self[flag_name] = flag
except _exceptions.DuplicateFlagError:
raise _exceptions.DuplicateFlagError.from_flag(
flag_name, self, other_flag_values=flag_values) | [
"def",
"append_flag_values",
"(",
"self",
",",
"flag_values",
")",
":",
"for",
"flag_name",
",",
"flag",
"in",
"six",
".",
"iteritems",
"(",
"flag_values",
".",
"_flags",
"(",
")",
")",
":",
"# pylint: disable=protected-access",
"# Each flags with short_name appears here twice (once under its",
"# normal name, and again with its short name). To prevent",
"# problems (DuplicateFlagError) with double flag registration, we",
"# perform a check to make sure that the entry we're looking at is",
"# for its normal name.",
"if",
"flag_name",
"==",
"flag",
".",
"name",
":",
"try",
":",
"self",
"[",
"flag_name",
"]",
"=",
"flag",
"except",
"_exceptions",
".",
"DuplicateFlagError",
":",
"raise",
"_exceptions",
".",
"DuplicateFlagError",
".",
"from_flag",
"(",
"flag_name",
",",
"self",
",",
"other_flag_values",
"=",
"flag_values",
")"
] | 46.833333 | 21.5 |
def from_private_key(account_name, private_key=None, private_key_path=None,
storage=None, storage_path=None, api_version="v3",
readonly=False, http_client=None, ga_hook=None):
"""Create a client for a service account.
Create a client with an account name and a private key.
Args:
account_name: str, the account identifier (probably the account email).
private_key: str, the private key as a string.
private_key_path: str, path to a file with the private key in.
storage: oauth2client.client.Storage, a Storage implementation to store
credentials.
storage_path: str, path to a file storage.
readonly: bool, default False, if True only readonly access is requested
from GA.
http_client: httplib2.Http, Override the default http client used.
ga_hook: function, a hook that is called every time a query is made
against GA.
"""
if not private_key:
if not private_key_path:
raise GapyError(
"Must provide either a private_key or a private_key_file")
if isinstance(private_key_path, basestring):
private_key_path = open(private_key_path)
private_key = private_key_path.read()
storage = _get_storage(storage, storage_path)
scope = GOOGLE_API_SCOPE_READONLY if readonly else GOOGLE_API_SCOPE
credentials = SignedJwtAssertionCredentials(account_name, private_key,
scope)
credentials.set_store(storage)
return Client(_build(credentials, api_version, http_client), ga_hook) | [
"def",
"from_private_key",
"(",
"account_name",
",",
"private_key",
"=",
"None",
",",
"private_key_path",
"=",
"None",
",",
"storage",
"=",
"None",
",",
"storage_path",
"=",
"None",
",",
"api_version",
"=",
"\"v3\"",
",",
"readonly",
"=",
"False",
",",
"http_client",
"=",
"None",
",",
"ga_hook",
"=",
"None",
")",
":",
"if",
"not",
"private_key",
":",
"if",
"not",
"private_key_path",
":",
"raise",
"GapyError",
"(",
"\"Must provide either a private_key or a private_key_file\"",
")",
"if",
"isinstance",
"(",
"private_key_path",
",",
"basestring",
")",
":",
"private_key_path",
"=",
"open",
"(",
"private_key_path",
")",
"private_key",
"=",
"private_key_path",
".",
"read",
"(",
")",
"storage",
"=",
"_get_storage",
"(",
"storage",
",",
"storage_path",
")",
"scope",
"=",
"GOOGLE_API_SCOPE_READONLY",
"if",
"readonly",
"else",
"GOOGLE_API_SCOPE",
"credentials",
"=",
"SignedJwtAssertionCredentials",
"(",
"account_name",
",",
"private_key",
",",
"scope",
")",
"credentials",
".",
"set_store",
"(",
"storage",
")",
"return",
"Client",
"(",
"_build",
"(",
"credentials",
",",
"api_version",
",",
"http_client",
")",
",",
"ga_hook",
")"
] | 45 | 23.333333 |
def _readClusterSettings(self):
"""
Read the current instance's meta-data to get the cluster settings.
"""
# get the leader metadata
mdUrl = "http://169.254.169.254/metadata/instance?api-version=2017-08-01"
header = {'Metadata': 'True'}
request = urllib.request.Request(url=mdUrl, headers=header)
response = urllib.request.urlopen(request)
data = response.read()
dataStr = data.decode("utf-8")
metadata = json.loads(dataStr)
# set values from the leader meta-data
self._zone = metadata['compute']['location']
self.clusterName = metadata['compute']['resourceGroupName']
tagsStr = metadata['compute']['tags']
tags = dict(item.split(":") for item in tagsStr.split(";"))
self._owner = tags.get('owner', 'no-owner')
leader = self.getLeader()
self._leaderPrivateIP = leader.privateIP
self._setSSH() # create id_rsa.pub file on the leader if it is not there
self._masterPublicKeyFile = self.LEADER_HOME_DIR + '.ssh/id_rsa.pub'
# Add static nodes to /etc/hosts since Azure sometimes fails to find them with DNS
map(lambda x: self._addToHosts(x), self.getProvisionedWorkers(None)) | [
"def",
"_readClusterSettings",
"(",
"self",
")",
":",
"# get the leader metadata",
"mdUrl",
"=",
"\"http://169.254.169.254/metadata/instance?api-version=2017-08-01\"",
"header",
"=",
"{",
"'Metadata'",
":",
"'True'",
"}",
"request",
"=",
"urllib",
".",
"request",
".",
"Request",
"(",
"url",
"=",
"mdUrl",
",",
"headers",
"=",
"header",
")",
"response",
"=",
"urllib",
".",
"request",
".",
"urlopen",
"(",
"request",
")",
"data",
"=",
"response",
".",
"read",
"(",
")",
"dataStr",
"=",
"data",
".",
"decode",
"(",
"\"utf-8\"",
")",
"metadata",
"=",
"json",
".",
"loads",
"(",
"dataStr",
")",
"# set values from the leader meta-data",
"self",
".",
"_zone",
"=",
"metadata",
"[",
"'compute'",
"]",
"[",
"'location'",
"]",
"self",
".",
"clusterName",
"=",
"metadata",
"[",
"'compute'",
"]",
"[",
"'resourceGroupName'",
"]",
"tagsStr",
"=",
"metadata",
"[",
"'compute'",
"]",
"[",
"'tags'",
"]",
"tags",
"=",
"dict",
"(",
"item",
".",
"split",
"(",
"\":\"",
")",
"for",
"item",
"in",
"tagsStr",
".",
"split",
"(",
"\";\"",
")",
")",
"self",
".",
"_owner",
"=",
"tags",
".",
"get",
"(",
"'owner'",
",",
"'no-owner'",
")",
"leader",
"=",
"self",
".",
"getLeader",
"(",
")",
"self",
".",
"_leaderPrivateIP",
"=",
"leader",
".",
"privateIP",
"self",
".",
"_setSSH",
"(",
")",
"# create id_rsa.pub file on the leader if it is not there",
"self",
".",
"_masterPublicKeyFile",
"=",
"self",
".",
"LEADER_HOME_DIR",
"+",
"'.ssh/id_rsa.pub'",
"# Add static nodes to /etc/hosts since Azure sometimes fails to find them with DNS",
"map",
"(",
"lambda",
"x",
":",
"self",
".",
"_addToHosts",
"(",
"x",
")",
",",
"self",
".",
"getProvisionedWorkers",
"(",
"None",
")",
")"
] | 47.461538 | 18.923077 |
def parse_genotypes(self, lb, ub):
"""Extracts a fraction of the file (current chunk of loci) loading
the genotypes into memoery.
:param lb: Lower bound of the current chunk
:param ub: Upper bound of the current chunk
:return: Dosage dosages for current chunk
"""
file = self.openfile(self.current_file)
words = file.readline().strip().split()[lb:ub]
word_count = len(words)
idx =0
if word_count > 0:
dosages = numpy.empty((self.ind_count, word_count), dtype='|S5')
while word_count > 1:
dosages[idx] = numpy.array(words)
idx += 1
line = file.readline()
words = line.strip().split()[lb:ub]
word_count = len(words)
else:
raise EOFError
return dosages | [
"def",
"parse_genotypes",
"(",
"self",
",",
"lb",
",",
"ub",
")",
":",
"file",
"=",
"self",
".",
"openfile",
"(",
"self",
".",
"current_file",
")",
"words",
"=",
"file",
".",
"readline",
"(",
")",
".",
"strip",
"(",
")",
".",
"split",
"(",
")",
"[",
"lb",
":",
"ub",
"]",
"word_count",
"=",
"len",
"(",
"words",
")",
"idx",
"=",
"0",
"if",
"word_count",
">",
"0",
":",
"dosages",
"=",
"numpy",
".",
"empty",
"(",
"(",
"self",
".",
"ind_count",
",",
"word_count",
")",
",",
"dtype",
"=",
"'|S5'",
")",
"while",
"word_count",
">",
"1",
":",
"dosages",
"[",
"idx",
"]",
"=",
"numpy",
".",
"array",
"(",
"words",
")",
"idx",
"+=",
"1",
"line",
"=",
"file",
".",
"readline",
"(",
")",
"words",
"=",
"line",
".",
"strip",
"(",
")",
".",
"split",
"(",
")",
"[",
"lb",
":",
"ub",
"]",
"word_count",
"=",
"len",
"(",
"words",
")",
"else",
":",
"raise",
"EOFError",
"return",
"dosages"
] | 32.615385 | 15.884615 |
def before_app_request(self, f):
"""Like :meth:`Flask.before_request`. Such a function is executed
before each request, even if outside of a blueprint.
"""
self.record_once(lambda s: s.app.before_request_funcs
.setdefault(None, []).append(f))
return f | [
"def",
"before_app_request",
"(",
"self",
",",
"f",
")",
":",
"self",
".",
"record_once",
"(",
"lambda",
"s",
":",
"s",
".",
"app",
".",
"before_request_funcs",
".",
"setdefault",
"(",
"None",
",",
"[",
"]",
")",
".",
"append",
"(",
"f",
")",
")",
"return",
"f"
] | 42.571429 | 11 |
def i8(self, name, value=None, align=None):
"""Add an 1 byte integer field to template.
This is an convenience method that simply calls `Int` keyword with predefined length."""
self.int(1, name, value, align) | [
"def",
"i8",
"(",
"self",
",",
"name",
",",
"value",
"=",
"None",
",",
"align",
"=",
"None",
")",
":",
"self",
".",
"int",
"(",
"1",
",",
"name",
",",
"value",
",",
"align",
")"
] | 45.8 | 8.8 |
def _ss_reg(self):
"""Sum of squares of the regression."""
return np.sum(
np.square(self._predicted - np.expand_dims(self._ybar, axis=1)),
axis=1,
) | [
"def",
"_ss_reg",
"(",
"self",
")",
":",
"return",
"np",
".",
"sum",
"(",
"np",
".",
"square",
"(",
"self",
".",
"_predicted",
"-",
"np",
".",
"expand_dims",
"(",
"self",
".",
"_ybar",
",",
"axis",
"=",
"1",
")",
")",
",",
"axis",
"=",
"1",
",",
")"
] | 32.666667 | 21 |
def get_host_health_temperature_sensors(self, data=None):
"""Get the health Temp Sensor report.
:param: the data to retrieve from the server, defaults to None.
:returns: the dictionary containing the temperature sensors
information.
:raises: IloConnectionError if failed connecting to the iLO.
:raises: IloError, on an error from iLO.
"""
data = self.get_host_health_data(data)
d = data['GET_EMBEDDED_HEALTH_DATA']['TEMPERATURE']['TEMP']
if not isinstance(d, list):
d = [d]
return d | [
"def",
"get_host_health_temperature_sensors",
"(",
"self",
",",
"data",
"=",
"None",
")",
":",
"data",
"=",
"self",
".",
"get_host_health_data",
"(",
"data",
")",
"d",
"=",
"data",
"[",
"'GET_EMBEDDED_HEALTH_DATA'",
"]",
"[",
"'TEMPERATURE'",
"]",
"[",
"'TEMP'",
"]",
"if",
"not",
"isinstance",
"(",
"d",
",",
"list",
")",
":",
"d",
"=",
"[",
"d",
"]",
"return",
"d"
] | 41 | 17.857143 |
def conform_query(cls, query):
"""Converts the query string from a target uri, uses
cls.allowed_kwargs, and cls.filter_kwargs to drive logic.
:param query: Unparsed query string
:type query: urllib.parse.unsplit(uri).query
:returns: Dictionary of parsed values, everything in cls.allowed_kwargs
with values set to True will be parsed as json strings.
"""
query = parse_qs(query, keep_blank_values=True)
# Remove any unexpected keywords from the query string.
if cls.filter_kwargs:
query = {x: y for x, y in query.items() if x in cls.allowed_kwargs}
for key, vals in query.items():
# Multiple values of the same name could be passed use first
# Also params without strings will be treated as true values
if cls.allowed_kwargs.get(key, False):
val = json.loads(vals[0] or 'true')
else:
val = vals[0] or 'true'
query[key] = val
return query | [
"def",
"conform_query",
"(",
"cls",
",",
"query",
")",
":",
"query",
"=",
"parse_qs",
"(",
"query",
",",
"keep_blank_values",
"=",
"True",
")",
"# Remove any unexpected keywords from the query string.",
"if",
"cls",
".",
"filter_kwargs",
":",
"query",
"=",
"{",
"x",
":",
"y",
"for",
"x",
",",
"y",
"in",
"query",
".",
"items",
"(",
")",
"if",
"x",
"in",
"cls",
".",
"allowed_kwargs",
"}",
"for",
"key",
",",
"vals",
"in",
"query",
".",
"items",
"(",
")",
":",
"# Multiple values of the same name could be passed use first",
"# Also params without strings will be treated as true values",
"if",
"cls",
".",
"allowed_kwargs",
".",
"get",
"(",
"key",
",",
"False",
")",
":",
"val",
"=",
"json",
".",
"loads",
"(",
"vals",
"[",
"0",
"]",
"or",
"'true'",
")",
"else",
":",
"val",
"=",
"vals",
"[",
"0",
"]",
"or",
"'true'",
"query",
"[",
"key",
"]",
"=",
"val",
"return",
"query"
] | 37.814815 | 21.703704 |
def tag_begin(self, tag_name, attributes=None):
"""Marks the beginning of the ``tag_name`` structure.
Call :meth:`tag_end` with the same ``tag_name`` to mark the end of the
structure.
The attributes string is of the form "key1=value2 key2=value2 ...".
Values may be boolean (true/false or 1/0), integer, float, string, or
an array.
String values are enclosed in single quotes ('). Single quotes and
backslashes inside the string should be escaped with a backslash.
Boolean values may be set to true by only specifying the key. eg the
attribute string "key" is the equivalent to "key=true".
Arrays are enclosed in '[]'. eg "rect=[1.2 4.3 2.0 3.0]".
If no attributes are required, ``attributes`` can be omitted, an empty
string or None.
See cairo's Tags and Links Description for the list of tags and
attributes.
Invalid nesting of tags or invalid attributes will cause the context to
shutdown with a status of ``CAIRO_STATUS_TAG_ERROR``.
See :meth:`tag_end`.
:param tag_name: tag name
:param attributes: tag attributes
*New in cairo 1.16.*
*New in cairocffi 0.9.*
"""
if attributes is None:
attributes = ''
cairo.cairo_tag_begin(
self._pointer, _encode_string(tag_name),
_encode_string(attributes))
self._check_status() | [
"def",
"tag_begin",
"(",
"self",
",",
"tag_name",
",",
"attributes",
"=",
"None",
")",
":",
"if",
"attributes",
"is",
"None",
":",
"attributes",
"=",
"''",
"cairo",
".",
"cairo_tag_begin",
"(",
"self",
".",
"_pointer",
",",
"_encode_string",
"(",
"tag_name",
")",
",",
"_encode_string",
"(",
"attributes",
")",
")",
"self",
".",
"_check_status",
"(",
")"
] | 33.325581 | 25.55814 |
def prob_classify(self, text):
"""Return the label probability distribution for classifying a string
of text.
Example:
::
>>> classifier = MaxEntClassifier(train_data)
>>> prob_dist = classifier.prob_classify("I feel happy this morning.")
>>> prob_dist.max()
'positive'
>>> prob_dist.prob("positive")
0.7
:rtype: nltk.probability.DictionaryProbDist
"""
feats = self.extract_features(text)
return self.classifier.prob_classify(feats) | [
"def",
"prob_classify",
"(",
"self",
",",
"text",
")",
":",
"feats",
"=",
"self",
".",
"extract_features",
"(",
"text",
")",
"return",
"self",
".",
"classifier",
".",
"prob_classify",
"(",
"feats",
")"
] | 29.157895 | 20.315789 |
def call(self, method, *args):
""" Calls the service method defined with the arguments provided """
try:
response = getattr(self.client.service, method)(*args)
except (URLError, SSLError) as e:
log.exception('Failed to connect to responsys service')
raise ConnectError("Request to service timed out")
except WebFault as web_fault:
fault_name = getattr(web_fault.fault, 'faultstring', None)
error = str(web_fault.fault.detail)
if fault_name == 'TableFault':
raise TableFault(error)
if fault_name == 'ListFault':
raise ListFault(error)
if fault_name == 'API_LIMIT_EXCEEDED':
raise ApiLimitError(error)
if fault_name == 'AccountFault':
raise AccountFault(error)
raise ServiceError(web_fault.fault, web_fault.document)
return response | [
"def",
"call",
"(",
"self",
",",
"method",
",",
"*",
"args",
")",
":",
"try",
":",
"response",
"=",
"getattr",
"(",
"self",
".",
"client",
".",
"service",
",",
"method",
")",
"(",
"*",
"args",
")",
"except",
"(",
"URLError",
",",
"SSLError",
")",
"as",
"e",
":",
"log",
".",
"exception",
"(",
"'Failed to connect to responsys service'",
")",
"raise",
"ConnectError",
"(",
"\"Request to service timed out\"",
")",
"except",
"WebFault",
"as",
"web_fault",
":",
"fault_name",
"=",
"getattr",
"(",
"web_fault",
".",
"fault",
",",
"'faultstring'",
",",
"None",
")",
"error",
"=",
"str",
"(",
"web_fault",
".",
"fault",
".",
"detail",
")",
"if",
"fault_name",
"==",
"'TableFault'",
":",
"raise",
"TableFault",
"(",
"error",
")",
"if",
"fault_name",
"==",
"'ListFault'",
":",
"raise",
"ListFault",
"(",
"error",
")",
"if",
"fault_name",
"==",
"'API_LIMIT_EXCEEDED'",
":",
"raise",
"ApiLimitError",
"(",
"error",
")",
"if",
"fault_name",
"==",
"'AccountFault'",
":",
"raise",
"AccountFault",
"(",
"error",
")",
"raise",
"ServiceError",
"(",
"web_fault",
".",
"fault",
",",
"web_fault",
".",
"document",
")",
"return",
"response"
] | 42.5 | 13.681818 |
def callback_request(self, msg, reply_cb=None, inform_cb=None,
user_data=None, timeout=None, use_mid=None):
"""Send a request messsage.
Parameters
----------
msg : Message object
The request message to send.
reply_cb : function
The reply callback with signature reply_cb(msg)
or reply_cb(msg, \*user_data)
inform_cb : function
The inform callback with signature inform_cb(msg)
or inform_cb(msg, \*user_data)
user_data : tuple
Optional user data to send to the reply and inform
callbacks.
timeout : float in seconds
How long to wait for a reply. The default is the
the timeout set when creating the AsyncClient.
use_mid : boolean, optional
Whether to use message IDs. Default is to use message IDs
if the server supports them.
"""
if timeout is None:
timeout = self._request_timeout
mid = self._get_mid_and_update_msg(msg, use_mid)
if timeout is None: # deal with 'no timeout', i.e. None
timeout_handle = None
else:
timeout_handle = self.ioloop.call_later(
timeout, partial(self._handle_timeout, mid, self.ioloop.time()))
self._push_async_request(
mid, msg, reply_cb, inform_cb, user_data, timeout_handle)
try:
self.send_request(msg)
except KatcpClientError, e:
error_reply = Message.request(msg.name, "fail", str(e))
error_reply.mid = mid
self.handle_reply(error_reply) | [
"def",
"callback_request",
"(",
"self",
",",
"msg",
",",
"reply_cb",
"=",
"None",
",",
"inform_cb",
"=",
"None",
",",
"user_data",
"=",
"None",
",",
"timeout",
"=",
"None",
",",
"use_mid",
"=",
"None",
")",
":",
"if",
"timeout",
"is",
"None",
":",
"timeout",
"=",
"self",
".",
"_request_timeout",
"mid",
"=",
"self",
".",
"_get_mid_and_update_msg",
"(",
"msg",
",",
"use_mid",
")",
"if",
"timeout",
"is",
"None",
":",
"# deal with 'no timeout', i.e. None",
"timeout_handle",
"=",
"None",
"else",
":",
"timeout_handle",
"=",
"self",
".",
"ioloop",
".",
"call_later",
"(",
"timeout",
",",
"partial",
"(",
"self",
".",
"_handle_timeout",
",",
"mid",
",",
"self",
".",
"ioloop",
".",
"time",
"(",
")",
")",
")",
"self",
".",
"_push_async_request",
"(",
"mid",
",",
"msg",
",",
"reply_cb",
",",
"inform_cb",
",",
"user_data",
",",
"timeout_handle",
")",
"try",
":",
"self",
".",
"send_request",
"(",
"msg",
")",
"except",
"KatcpClientError",
",",
"e",
":",
"error_reply",
"=",
"Message",
".",
"request",
"(",
"msg",
".",
"name",
",",
"\"fail\"",
",",
"str",
"(",
"e",
")",
")",
"error_reply",
".",
"mid",
"=",
"mid",
"self",
".",
"handle_reply",
"(",
"error_reply",
")"
] | 36.377778 | 17.8 |
def measurements(self):
"""Return modeled measurements
1. dimension: frequency
2. dimension: config-number
3. dimension: 2: magnitude and phase (resistivity)
"""
m_all = np.array([self.tds[key].measurements() for key in
sorted(self.tds.keys())])
return m_all | [
"def",
"measurements",
"(",
"self",
")",
":",
"m_all",
"=",
"np",
".",
"array",
"(",
"[",
"self",
".",
"tds",
"[",
"key",
"]",
".",
"measurements",
"(",
")",
"for",
"key",
"in",
"sorted",
"(",
"self",
".",
"tds",
".",
"keys",
"(",
")",
")",
"]",
")",
"return",
"m_all"
] | 30.181818 | 16.818182 |
def insert_child ( self, object, index, child ):
""" Inserts a child into the object's children.
"""
if isinstance( child, Subgraph ):
object.subgraphs.insert( index, child )
elif isinstance( child, Cluster ):
object.clusters.insert( index, child )
elif isinstance( child, Node ):
object.nodes.insert( index, child )
elif isinstance( child, Edge ):
object.edges.insert( index, child )
else:
pass | [
"def",
"insert_child",
"(",
"self",
",",
"object",
",",
"index",
",",
"child",
")",
":",
"if",
"isinstance",
"(",
"child",
",",
"Subgraph",
")",
":",
"object",
".",
"subgraphs",
".",
"insert",
"(",
"index",
",",
"child",
")",
"elif",
"isinstance",
"(",
"child",
",",
"Cluster",
")",
":",
"object",
".",
"clusters",
".",
"insert",
"(",
"index",
",",
"child",
")",
"elif",
"isinstance",
"(",
"child",
",",
"Node",
")",
":",
"object",
".",
"nodes",
".",
"insert",
"(",
"index",
",",
"child",
")",
"elif",
"isinstance",
"(",
"child",
",",
"Edge",
")",
":",
"object",
".",
"edges",
".",
"insert",
"(",
"index",
",",
"child",
")",
"else",
":",
"pass"
] | 29.352941 | 15.235294 |
def set_permissions(self, object, replace=False):
"""
Sets the S3 ACL grants for the given object to the appropriate
value based on the type of Distribution. If the Distribution
is serving private content the ACL will be set to include the
Origin Access Identity associated with the Distribution. If
the Distribution is serving public content the content will
be set up with "public-read".
:type object: :class:`boto.cloudfront.object.Object`
:param enabled: The Object whose ACL is being set
:type replace: bool
:param replace: If False, the Origin Access Identity will be
appended to the existing ACL for the object.
If True, the ACL for the object will be
completely replaced with one that grants
READ permission to the Origin Access Identity.
"""
if isinstance(self.config.origin, S3Origin):
if self.config.origin.origin_access_identity:
id = self.config.origin.origin_access_identity.split('/')[-1]
oai = self.connection.get_origin_access_identity_info(id)
policy = object.get_acl()
if replace:
policy.acl = ACL()
policy.acl.add_user_grant('READ', oai.s3_user_id)
object.set_acl(policy)
else:
object.set_canned_acl('public-read') | [
"def",
"set_permissions",
"(",
"self",
",",
"object",
",",
"replace",
"=",
"False",
")",
":",
"if",
"isinstance",
"(",
"self",
".",
"config",
".",
"origin",
",",
"S3Origin",
")",
":",
"if",
"self",
".",
"config",
".",
"origin",
".",
"origin_access_identity",
":",
"id",
"=",
"self",
".",
"config",
".",
"origin",
".",
"origin_access_identity",
".",
"split",
"(",
"'/'",
")",
"[",
"-",
"1",
"]",
"oai",
"=",
"self",
".",
"connection",
".",
"get_origin_access_identity_info",
"(",
"id",
")",
"policy",
"=",
"object",
".",
"get_acl",
"(",
")",
"if",
"replace",
":",
"policy",
".",
"acl",
"=",
"ACL",
"(",
")",
"policy",
".",
"acl",
".",
"add_user_grant",
"(",
"'READ'",
",",
"oai",
".",
"s3_user_id",
")",
"object",
".",
"set_acl",
"(",
"policy",
")",
"else",
":",
"object",
".",
"set_canned_acl",
"(",
"'public-read'",
")"
] | 47.258065 | 20.483871 |
def _readASCII(self, filename):
"""ASCII files have no headers. Following synphot, this
routine will assume the first column is wavelength in Angstroms,
and the second column is flux in Flam.
"""
self.waveunits = units.Units('angstrom')
self.fluxunits = units.Units('flam')
wlist, flist = self._columnsFromASCII(filename)
self._wavetable = N.array(wlist, dtype=N.float64)
self._fluxtable = N.array(flist, dtype=N.float64) | [
"def",
"_readASCII",
"(",
"self",
",",
"filename",
")",
":",
"self",
".",
"waveunits",
"=",
"units",
".",
"Units",
"(",
"'angstrom'",
")",
"self",
".",
"fluxunits",
"=",
"units",
".",
"Units",
"(",
"'flam'",
")",
"wlist",
",",
"flist",
"=",
"self",
".",
"_columnsFromASCII",
"(",
"filename",
")",
"self",
".",
"_wavetable",
"=",
"N",
".",
"array",
"(",
"wlist",
",",
"dtype",
"=",
"N",
".",
"float64",
")",
"self",
".",
"_fluxtable",
"=",
"N",
".",
"array",
"(",
"flist",
",",
"dtype",
"=",
"N",
".",
"float64",
")"
] | 44 | 13.454545 |
def render_request(self, sort=True):
"""Render the dict's Cookie objects into a string formatted for HTTP
request headers (simple 'Cookie: ' style).
"""
if not sort:
return ("; ".join(
cookie.render_request() for cookie in self.values()))
return ("; ".join(sorted(
cookie.render_request() for cookie in self.values()))) | [
"def",
"render_request",
"(",
"self",
",",
"sort",
"=",
"True",
")",
":",
"if",
"not",
"sort",
":",
"return",
"(",
"\"; \"",
".",
"join",
"(",
"cookie",
".",
"render_request",
"(",
")",
"for",
"cookie",
"in",
"self",
".",
"values",
"(",
")",
")",
")",
"return",
"(",
"\"; \"",
".",
"join",
"(",
"sorted",
"(",
"cookie",
".",
"render_request",
"(",
")",
"for",
"cookie",
"in",
"self",
".",
"values",
"(",
")",
")",
")",
")"
] | 43.444444 | 11.777778 |
def follow(self, gpid, callback=None, callback_parsed=None):
"""Create a subscription (i.e. follow) a Feed/Point with a global point id (gpid) and a feed data callback
Returns a new [RemoteFeed](RemotePoint.m.html#IoticAgent.IOT.RemotePoint.RemoteFeed)
object or the existing one if the subscription already exists - OR -
Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException)
containing the error if the infrastructure detects a problem
Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException)
if there is a communications problem between you and the infrastructure
`gpid` (required) (uuid) global id of the Point you want to follow `-OR-`
`gpid` (required) (lid,pid) tuple of `(thing_localid, point_localid)` for local subscription
`callback` (optional) (function reference) callback function to invoke on receipt of feed data.
The callback receives a single dict argument, with keys of:
#!python
'data' # (decoded or raw bytes)
'mime' # (None, unless payload was not decoded and has a mime type)
'pid' # (the global id of the feed from which the data originates)
'time' # (datetime representing UTC timestamp of share)
`callback_parsed` (optional) (function reference) callback function to invoke on receipt of feed data. This is
equivalent to `callback` except the dict includes the `parsed` key which holds the set of values in a
[PointDataObject](./Point.m.html#IoticAgent.IOT.Point.PointDataObject) instance. If both
`callback_parsed` and `callback` have been specified, the former takes precedence and `callback` is only called
if the point data could not be parsed according to its current value description.
`NOTE`: `callback_parsed` can only be used if `auto_encode_decode` is enabled for the client instance.
"""
if callback_parsed:
callback = self._client._get_parsed_feed_callback(callback_parsed, callback)
return self.__sub(R_FEED, gpid, callback=callback) | [
"def",
"follow",
"(",
"self",
",",
"gpid",
",",
"callback",
"=",
"None",
",",
"callback_parsed",
"=",
"None",
")",
":",
"if",
"callback_parsed",
":",
"callback",
"=",
"self",
".",
"_client",
".",
"_get_parsed_feed_callback",
"(",
"callback_parsed",
",",
"callback",
")",
"return",
"self",
".",
"__sub",
"(",
"R_FEED",
",",
"gpid",
",",
"callback",
"=",
"callback",
")"
] | 59.333333 | 39.916667 |
def iteration(self, node_status=True):
"""
Execute a single model iteration
:return: Iteration_id, Incremental node status (dictionary node->status)
"""
# One iteration changes the opinion of several voters using the following procedure:
# - select randomly one voter (speaker 1)
# - select randomly one of its neighbours (speaker 2)
# - if the two voters agree, their neighbours take their opinion
self.clean_initial_status(self.available_statuses.values())
if self.actual_iteration == 0:
self.actual_iteration += 1
delta, node_count, status_delta = self.status_delta(self.status)
if node_status:
return {"iteration": 0, "status": self.status.copy(),
"node_count": node_count.copy(), "status_delta": status_delta.copy()}
else:
return {"iteration": 0, "status": {},
"node_count": node_count.copy(), "status_delta": status_delta.copy()}
delta = {}
status_delta = {st: 0 for st in self.available_statuses.values()}
# select a random node
speaker1 = list(self.graph.nodes())[np.random.randint(0, self.graph.number_of_nodes())]
# select a random neighbour
neighbours = list(self.graph.neighbors(speaker1))
if isinstance(self.graph, nx.DiGraph):
# add also the predecessors
neighbours += list(self.graph.predecessors(speaker1))
speaker2 = neighbours[np.random.randint(0, len(neighbours))]
if self.status[speaker1] == self.status[speaker2]:
# select listeners (all neighbours of two speakers)
neighbours = list(self.graph.neighbors(speaker1)) + list(self.graph.neighbors(speaker2))
if isinstance(self.graph, nx.DiGraph):
# assumed if a->b then b can be influenced by a
# but not the other way around - the link between the speakers doesn't matter
neighbours = list(self.graph.successors(speaker1)) + list(self.graph.successors(speaker2))
# update status of listeners
for listener in neighbours:
if self.status[speaker1] != self.status[listener]:
delta[listener] = self.status[speaker1]
status_delta[self.status[listener]] += 1
for x in self.available_statuses.values():
if x != self.status[listener]:
status_delta[x] -= 1
self.status[listener] = self.status[speaker1]
node_count = {st: len([n for n in self.status if self.status[n] == st])
for st in self.available_statuses.values()}
self.actual_iteration += 1
if node_status:
return {"iteration": self.actual_iteration - 1, "status": delta.copy(),
"node_count": node_count.copy(), "status_delta": status_delta.copy()}
else:
return {"iteration": self.actual_iteration - 1, "status": {},
"node_count": node_count.copy(), "status_delta": status_delta.copy()} | [
"def",
"iteration",
"(",
"self",
",",
"node_status",
"=",
"True",
")",
":",
"# One iteration changes the opinion of several voters using the following procedure:",
"# - select randomly one voter (speaker 1)",
"# - select randomly one of its neighbours (speaker 2)",
"# - if the two voters agree, their neighbours take their opinion",
"self",
".",
"clean_initial_status",
"(",
"self",
".",
"available_statuses",
".",
"values",
"(",
")",
")",
"if",
"self",
".",
"actual_iteration",
"==",
"0",
":",
"self",
".",
"actual_iteration",
"+=",
"1",
"delta",
",",
"node_count",
",",
"status_delta",
"=",
"self",
".",
"status_delta",
"(",
"self",
".",
"status",
")",
"if",
"node_status",
":",
"return",
"{",
"\"iteration\"",
":",
"0",
",",
"\"status\"",
":",
"self",
".",
"status",
".",
"copy",
"(",
")",
",",
"\"node_count\"",
":",
"node_count",
".",
"copy",
"(",
")",
",",
"\"status_delta\"",
":",
"status_delta",
".",
"copy",
"(",
")",
"}",
"else",
":",
"return",
"{",
"\"iteration\"",
":",
"0",
",",
"\"status\"",
":",
"{",
"}",
",",
"\"node_count\"",
":",
"node_count",
".",
"copy",
"(",
")",
",",
"\"status_delta\"",
":",
"status_delta",
".",
"copy",
"(",
")",
"}",
"delta",
"=",
"{",
"}",
"status_delta",
"=",
"{",
"st",
":",
"0",
"for",
"st",
"in",
"self",
".",
"available_statuses",
".",
"values",
"(",
")",
"}",
"# select a random node",
"speaker1",
"=",
"list",
"(",
"self",
".",
"graph",
".",
"nodes",
"(",
")",
")",
"[",
"np",
".",
"random",
".",
"randint",
"(",
"0",
",",
"self",
".",
"graph",
".",
"number_of_nodes",
"(",
")",
")",
"]",
"# select a random neighbour",
"neighbours",
"=",
"list",
"(",
"self",
".",
"graph",
".",
"neighbors",
"(",
"speaker1",
")",
")",
"if",
"isinstance",
"(",
"self",
".",
"graph",
",",
"nx",
".",
"DiGraph",
")",
":",
"# add also the predecessors",
"neighbours",
"+=",
"list",
"(",
"self",
".",
"graph",
".",
"predecessors",
"(",
"speaker1",
")",
")",
"speaker2",
"=",
"neighbours",
"[",
"np",
".",
"random",
".",
"randint",
"(",
"0",
",",
"len",
"(",
"neighbours",
")",
")",
"]",
"if",
"self",
".",
"status",
"[",
"speaker1",
"]",
"==",
"self",
".",
"status",
"[",
"speaker2",
"]",
":",
"# select listeners (all neighbours of two speakers)",
"neighbours",
"=",
"list",
"(",
"self",
".",
"graph",
".",
"neighbors",
"(",
"speaker1",
")",
")",
"+",
"list",
"(",
"self",
".",
"graph",
".",
"neighbors",
"(",
"speaker2",
")",
")",
"if",
"isinstance",
"(",
"self",
".",
"graph",
",",
"nx",
".",
"DiGraph",
")",
":",
"# assumed if a->b then b can be influenced by a",
"# but not the other way around - the link between the speakers doesn't matter",
"neighbours",
"=",
"list",
"(",
"self",
".",
"graph",
".",
"successors",
"(",
"speaker1",
")",
")",
"+",
"list",
"(",
"self",
".",
"graph",
".",
"successors",
"(",
"speaker2",
")",
")",
"# update status of listeners",
"for",
"listener",
"in",
"neighbours",
":",
"if",
"self",
".",
"status",
"[",
"speaker1",
"]",
"!=",
"self",
".",
"status",
"[",
"listener",
"]",
":",
"delta",
"[",
"listener",
"]",
"=",
"self",
".",
"status",
"[",
"speaker1",
"]",
"status_delta",
"[",
"self",
".",
"status",
"[",
"listener",
"]",
"]",
"+=",
"1",
"for",
"x",
"in",
"self",
".",
"available_statuses",
".",
"values",
"(",
")",
":",
"if",
"x",
"!=",
"self",
".",
"status",
"[",
"listener",
"]",
":",
"status_delta",
"[",
"x",
"]",
"-=",
"1",
"self",
".",
"status",
"[",
"listener",
"]",
"=",
"self",
".",
"status",
"[",
"speaker1",
"]",
"node_count",
"=",
"{",
"st",
":",
"len",
"(",
"[",
"n",
"for",
"n",
"in",
"self",
".",
"status",
"if",
"self",
".",
"status",
"[",
"n",
"]",
"==",
"st",
"]",
")",
"for",
"st",
"in",
"self",
".",
"available_statuses",
".",
"values",
"(",
")",
"}",
"self",
".",
"actual_iteration",
"+=",
"1",
"if",
"node_status",
":",
"return",
"{",
"\"iteration\"",
":",
"self",
".",
"actual_iteration",
"-",
"1",
",",
"\"status\"",
":",
"delta",
".",
"copy",
"(",
")",
",",
"\"node_count\"",
":",
"node_count",
".",
"copy",
"(",
")",
",",
"\"status_delta\"",
":",
"status_delta",
".",
"copy",
"(",
")",
"}",
"else",
":",
"return",
"{",
"\"iteration\"",
":",
"self",
".",
"actual_iteration",
"-",
"1",
",",
"\"status\"",
":",
"{",
"}",
",",
"\"node_count\"",
":",
"node_count",
".",
"copy",
"(",
")",
",",
"\"status_delta\"",
":",
"status_delta",
".",
"copy",
"(",
")",
"}"
] | 45.897059 | 27.073529 |
def _get_postqueue_stats(self, postfix_config_dir, tags):
# get some intersting configuratin values from postconf
pc_output, _, _ = get_subprocess_output(['postconf', 'mail_version'], self.log, False)
postfix_version = pc_output.strip('\n').split('=')[1].strip()
pc_output, _, _ = get_subprocess_output(['postconf', 'authorized_mailq_users'], self.log, False)
authorized_mailq_users = pc_output.strip('\n').split('=')[1].strip()
self.log.debug('authorized_mailq_users : {}'.format(authorized_mailq_users))
output, _, _ = get_subprocess_output(['postqueue', '-c', postfix_config_dir, '-p'], self.log, False)
active_count = 0
hold_count = 0
deferred_count = 0
# postque -p sample output
'''
root@postfix:/opt/datadog-agent/agent/checks.d# postqueue -p
----Queue ID----- --Size-- ---Arrival Time---- --Sender/Recipient------
3xWyLP6Nmfz23fk 367 Tue Aug 15 16:17:33 root@postfix.devnull.home
(deferred transport)
alice@crypto.io
3xWyD86NwZz23ff! 358 Tue Aug 15 16:12:08 root@postfix.devnull.home
(deferred transport)
bob@crypto.io
-- 1 Kbytes in 2 Requests.
'''
for line in output.splitlines():
if '*' in line:
active_count += 1
continue
if '!' in line:
hold_count += 1
continue
if line[0:1].isdigit():
deferred_count += 1
self.log.debug('Postfix Version: %s' % postfix_version)
self.gauge(
'postfix.queue.size', active_count, tags=tags + ['queue:active', 'instance:{}'.format(postfix_config_dir)]
)
self.gauge(
'postfix.queue.size', hold_count, tags=tags + ['queue:hold', 'instance:{}'.format(postfix_config_dir)]
)
self.gauge(
'postfix.queue.size',
deferred_count,
tags=tags + ['queue:deferred', 'instance:{}'.format(postfix_config_dir)],
) | [
"def",
"_get_postqueue_stats",
"(",
"self",
",",
"postfix_config_dir",
",",
"tags",
")",
":",
"# get some intersting configuratin values from postconf",
"pc_output",
",",
"_",
",",
"_",
"=",
"get_subprocess_output",
"(",
"[",
"'postconf'",
",",
"'mail_version'",
"]",
",",
"self",
".",
"log",
",",
"False",
")",
"postfix_version",
"=",
"pc_output",
".",
"strip",
"(",
"'\\n'",
")",
".",
"split",
"(",
"'='",
")",
"[",
"1",
"]",
".",
"strip",
"(",
")",
"pc_output",
",",
"_",
",",
"_",
"=",
"get_subprocess_output",
"(",
"[",
"'postconf'",
",",
"'authorized_mailq_users'",
"]",
",",
"self",
".",
"log",
",",
"False",
")",
"authorized_mailq_users",
"=",
"pc_output",
".",
"strip",
"(",
"'\\n'",
")",
".",
"split",
"(",
"'='",
")",
"[",
"1",
"]",
".",
"strip",
"(",
")",
"self",
".",
"log",
".",
"debug",
"(",
"'authorized_mailq_users : {}'",
".",
"format",
"(",
"authorized_mailq_users",
")",
")",
"output",
",",
"_",
",",
"_",
"=",
"get_subprocess_output",
"(",
"[",
"'postqueue'",
",",
"'-c'",
",",
"postfix_config_dir",
",",
"'-p'",
"]",
",",
"self",
".",
"log",
",",
"False",
")",
"active_count",
"=",
"0",
"hold_count",
"=",
"0",
"deferred_count",
"=",
"0",
"# postque -p sample output",
"for",
"line",
"in",
"output",
".",
"splitlines",
"(",
")",
":",
"if",
"'*'",
"in",
"line",
":",
"active_count",
"+=",
"1",
"continue",
"if",
"'!'",
"in",
"line",
":",
"hold_count",
"+=",
"1",
"continue",
"if",
"line",
"[",
"0",
":",
"1",
"]",
".",
"isdigit",
"(",
")",
":",
"deferred_count",
"+=",
"1",
"self",
".",
"log",
".",
"debug",
"(",
"'Postfix Version: %s'",
"%",
"postfix_version",
")",
"self",
".",
"gauge",
"(",
"'postfix.queue.size'",
",",
"active_count",
",",
"tags",
"=",
"tags",
"+",
"[",
"'queue:active'",
",",
"'instance:{}'",
".",
"format",
"(",
"postfix_config_dir",
")",
"]",
")",
"self",
".",
"gauge",
"(",
"'postfix.queue.size'",
",",
"hold_count",
",",
"tags",
"=",
"tags",
"+",
"[",
"'queue:hold'",
",",
"'instance:{}'",
".",
"format",
"(",
"postfix_config_dir",
")",
"]",
")",
"self",
".",
"gauge",
"(",
"'postfix.queue.size'",
",",
"deferred_count",
",",
"tags",
"=",
"tags",
"+",
"[",
"'queue:deferred'",
",",
"'instance:{}'",
".",
"format",
"(",
"postfix_config_dir",
")",
"]",
",",
")"
] | 41.333333 | 30.148148 |
def matchBytes(self, bytes):
"""Look for a sequence of bytes at the start of a string. If the bytes
are found return True and advance the position to the byte after the
match. Otherwise return False and leave the position alone"""
p = self.position
data = self[p:p + len(bytes)]
rv = data.startswith(bytes)
if rv:
self.position += len(bytes)
return rv | [
"def",
"matchBytes",
"(",
"self",
",",
"bytes",
")",
":",
"p",
"=",
"self",
".",
"position",
"data",
"=",
"self",
"[",
"p",
":",
"p",
"+",
"len",
"(",
"bytes",
")",
"]",
"rv",
"=",
"data",
".",
"startswith",
"(",
"bytes",
")",
"if",
"rv",
":",
"self",
".",
"position",
"+=",
"len",
"(",
"bytes",
")",
"return",
"rv"
] | 41.8 | 12.1 |
def from_bytes(cls, data):
"""
I am so sorry.
"""
len_username = int.from_bytes(data[0:2], byteorder="big")
offset_username = 2 + len_username
username = data[2:offset_username].decode("UTF-8")
offset_password = 2 + offset_username
len_password = int.from_bytes(
data[offset_username:offset_password], byteorder="big"
)
pass_begin = offset_password
pass_end = offset_password + len_password
password = data[pass_begin:pass_end].decode("UTF-8")
return cls(username, password) | [
"def",
"from_bytes",
"(",
"cls",
",",
"data",
")",
":",
"len_username",
"=",
"int",
".",
"from_bytes",
"(",
"data",
"[",
"0",
":",
"2",
"]",
",",
"byteorder",
"=",
"\"big\"",
")",
"offset_username",
"=",
"2",
"+",
"len_username",
"username",
"=",
"data",
"[",
"2",
":",
"offset_username",
"]",
".",
"decode",
"(",
"\"UTF-8\"",
")",
"offset_password",
"=",
"2",
"+",
"offset_username",
"len_password",
"=",
"int",
".",
"from_bytes",
"(",
"data",
"[",
"offset_username",
":",
"offset_password",
"]",
",",
"byteorder",
"=",
"\"big\"",
")",
"pass_begin",
"=",
"offset_password",
"pass_end",
"=",
"offset_password",
"+",
"len_password",
"password",
"=",
"data",
"[",
"pass_begin",
":",
"pass_end",
"]",
".",
"decode",
"(",
"\"UTF-8\"",
")",
"return",
"cls",
"(",
"username",
",",
"password",
")"
] | 36 | 13.5 |
def get_default_config(self):
"""
Returns the default collector settings
"""
default_config = super(WebsiteMonitorCollector,
self).get_default_config()
default_config['URL'] = ''
default_config['path'] = 'websitemonitor'
return default_config | [
"def",
"get_default_config",
"(",
"self",
")",
":",
"default_config",
"=",
"super",
"(",
"WebsiteMonitorCollector",
",",
"self",
")",
".",
"get_default_config",
"(",
")",
"default_config",
"[",
"'URL'",
"]",
"=",
"''",
"default_config",
"[",
"'path'",
"]",
"=",
"'websitemonitor'",
"return",
"default_config"
] | 35.666667 | 8.333333 |
def create_discount_coupon(cls, discount_coupon, **kwargs):
"""Create DiscountCoupon
Create a new DiscountCoupon
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_discount_coupon(discount_coupon, async=True)
>>> result = thread.get()
:param async bool
:param DiscountCoupon discount_coupon: Attributes of discountCoupon to create (required)
:return: DiscountCoupon
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._create_discount_coupon_with_http_info(discount_coupon, **kwargs)
else:
(data) = cls._create_discount_coupon_with_http_info(discount_coupon, **kwargs)
return data | [
"def",
"create_discount_coupon",
"(",
"cls",
",",
"discount_coupon",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"[",
"'_return_http_data_only'",
"]",
"=",
"True",
"if",
"kwargs",
".",
"get",
"(",
"'async'",
")",
":",
"return",
"cls",
".",
"_create_discount_coupon_with_http_info",
"(",
"discount_coupon",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"(",
"data",
")",
"=",
"cls",
".",
"_create_discount_coupon_with_http_info",
"(",
"discount_coupon",
",",
"*",
"*",
"kwargs",
")",
"return",
"data"
] | 43.904762 | 21.761905 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.