text stringlengths 89 104k | code_tokens list | avg_line_len float64 7.91 980 | score float64 0 630 |
|---|---|---|---|
def prepare_metadata(self):
# type: () -> None
"""Ensure that project metadata is available.
Under PEP 517, call the backend hook to prepare the metadata.
Under legacy processing, call setup.py egg-info.
"""
assert self.source_dir
with indent_log():
if self.use_pep517:
self.prepare_pep517_metadata()
else:
self.run_egg_info()
if not self.req:
if isinstance(parse_version(self.metadata["Version"]), Version):
op = "=="
else:
op = "==="
self.req = Requirement(
"".join([
self.metadata["Name"],
op,
self.metadata["Version"],
])
)
self._correct_build_location()
else:
metadata_name = canonicalize_name(self.metadata["Name"])
if canonicalize_name(self.req.name) != metadata_name:
logger.warning(
'Generating metadata for package %s '
'produced metadata for project name %s. Fix your '
'#egg=%s fragments.',
self.name, metadata_name, self.name
)
self.req = Requirement(metadata_name) | [
"def",
"prepare_metadata",
"(",
"self",
")",
":",
"# type: () -> None",
"assert",
"self",
".",
"source_dir",
"with",
"indent_log",
"(",
")",
":",
"if",
"self",
".",
"use_pep517",
":",
"self",
".",
"prepare_pep517_metadata",
"(",
")",
"else",
":",
"self",
".",
"run_egg_info",
"(",
")",
"if",
"not",
"self",
".",
"req",
":",
"if",
"isinstance",
"(",
"parse_version",
"(",
"self",
".",
"metadata",
"[",
"\"Version\"",
"]",
")",
",",
"Version",
")",
":",
"op",
"=",
"\"==\"",
"else",
":",
"op",
"=",
"\"===\"",
"self",
".",
"req",
"=",
"Requirement",
"(",
"\"\"",
".",
"join",
"(",
"[",
"self",
".",
"metadata",
"[",
"\"Name\"",
"]",
",",
"op",
",",
"self",
".",
"metadata",
"[",
"\"Version\"",
"]",
",",
"]",
")",
")",
"self",
".",
"_correct_build_location",
"(",
")",
"else",
":",
"metadata_name",
"=",
"canonicalize_name",
"(",
"self",
".",
"metadata",
"[",
"\"Name\"",
"]",
")",
"if",
"canonicalize_name",
"(",
"self",
".",
"req",
".",
"name",
")",
"!=",
"metadata_name",
":",
"logger",
".",
"warning",
"(",
"'Generating metadata for package %s '",
"'produced metadata for project name %s. Fix your '",
"'#egg=%s fragments.'",
",",
"self",
".",
"name",
",",
"metadata_name",
",",
"self",
".",
"name",
")",
"self",
".",
"req",
"=",
"Requirement",
"(",
"metadata_name",
")"
] | 34.421053 | 17 |
def is_data_diverging(data_container):
"""
We want to use this to check whether the data are diverging or not.
This is a simple check, can be made much more sophisticated.
:param data_container: A generic container of data points.
:type data_container: `iterable`
"""
assert infer_data_type(data_container) in [
"ordinal",
"continuous",
], "Data type should be ordinal or continuous"
# Check whether the data contains negative and positive values.
has_negative = False
has_positive = False
for i in data_container:
if i < 0:
has_negative = True
elif i > 0:
has_positive = True
if has_negative and has_positive:
return True
else:
return False | [
"def",
"is_data_diverging",
"(",
"data_container",
")",
":",
"assert",
"infer_data_type",
"(",
"data_container",
")",
"in",
"[",
"\"ordinal\"",
",",
"\"continuous\"",
",",
"]",
",",
"\"Data type should be ordinal or continuous\"",
"# Check whether the data contains negative and positive values.",
"has_negative",
"=",
"False",
"has_positive",
"=",
"False",
"for",
"i",
"in",
"data_container",
":",
"if",
"i",
"<",
"0",
":",
"has_negative",
"=",
"True",
"elif",
"i",
">",
"0",
":",
"has_positive",
"=",
"True",
"if",
"has_negative",
"and",
"has_positive",
":",
"return",
"True",
"else",
":",
"return",
"False"
] | 28.730769 | 18.038462 |
def ConsultarUltimoComprobante(self, tipo_cbte=151, pto_vta=1):
"Consulta el último No de Comprobante registrado"
ret = self.client.consultarUltimoNroComprobantePorPtoVta(
auth={
'token': self.Token, 'sign': self.Sign,
'cuit': self.Cuit, },
solicitud={
'puntoVenta': pto_vta,
'tipoComprobante': tipo_cbte},
)
ret = ret['respuesta']
self.__analizar_errores(ret)
self.NroComprobante = ret['nroComprobante']
return True | [
"def",
"ConsultarUltimoComprobante",
"(",
"self",
",",
"tipo_cbte",
"=",
"151",
",",
"pto_vta",
"=",
"1",
")",
":",
"ret",
"=",
"self",
".",
"client",
".",
"consultarUltimoNroComprobantePorPtoVta",
"(",
"auth",
"=",
"{",
"'token'",
":",
"self",
".",
"Token",
",",
"'sign'",
":",
"self",
".",
"Sign",
",",
"'cuit'",
":",
"self",
".",
"Cuit",
",",
"}",
",",
"solicitud",
"=",
"{",
"'puntoVenta'",
":",
"pto_vta",
",",
"'tipoComprobante'",
":",
"tipo_cbte",
"}",
",",
")",
"ret",
"=",
"ret",
"[",
"'respuesta'",
"]",
"self",
".",
"__analizar_errores",
"(",
"ret",
")",
"self",
".",
"NroComprobante",
"=",
"ret",
"[",
"'nroComprobante'",
"]",
"return",
"True"
] | 43.357143 | 14.357143 |
def delta(n, d=None, center=0):
""" Create TT-vector for delta-function :math:`\\delta(x - x_0)`. """
if isinstance(n, six.integer_types):
n = [n]
if d is None:
n0 = _np.asanyarray(n, dtype=_np.int32)
else:
n0 = _np.array(n * d, dtype=_np.int32)
d = n0.size
if center < 0:
cind = [0] * d
else:
cind = []
for i in xrange(d):
cind.append(center % n0[i])
center //= n0[i]
if center > 0:
cind = [0] * d
cr = []
for i in xrange(d):
cur_core = _np.zeros((1, n0[i], 1))
cur_core[0, cind[i], 0] = 1
cr.append(cur_core)
return _vector.vector.from_list(cr) | [
"def",
"delta",
"(",
"n",
",",
"d",
"=",
"None",
",",
"center",
"=",
"0",
")",
":",
"if",
"isinstance",
"(",
"n",
",",
"six",
".",
"integer_types",
")",
":",
"n",
"=",
"[",
"n",
"]",
"if",
"d",
"is",
"None",
":",
"n0",
"=",
"_np",
".",
"asanyarray",
"(",
"n",
",",
"dtype",
"=",
"_np",
".",
"int32",
")",
"else",
":",
"n0",
"=",
"_np",
".",
"array",
"(",
"n",
"*",
"d",
",",
"dtype",
"=",
"_np",
".",
"int32",
")",
"d",
"=",
"n0",
".",
"size",
"if",
"center",
"<",
"0",
":",
"cind",
"=",
"[",
"0",
"]",
"*",
"d",
"else",
":",
"cind",
"=",
"[",
"]",
"for",
"i",
"in",
"xrange",
"(",
"d",
")",
":",
"cind",
".",
"append",
"(",
"center",
"%",
"n0",
"[",
"i",
"]",
")",
"center",
"//=",
"n0",
"[",
"i",
"]",
"if",
"center",
">",
"0",
":",
"cind",
"=",
"[",
"0",
"]",
"*",
"d",
"cr",
"=",
"[",
"]",
"for",
"i",
"in",
"xrange",
"(",
"d",
")",
":",
"cur_core",
"=",
"_np",
".",
"zeros",
"(",
"(",
"1",
",",
"n0",
"[",
"i",
"]",
",",
"1",
")",
")",
"cur_core",
"[",
"0",
",",
"cind",
"[",
"i",
"]",
",",
"0",
"]",
"=",
"1",
"cr",
".",
"append",
"(",
"cur_core",
")",
"return",
"_vector",
".",
"vector",
".",
"from_list",
"(",
"cr",
")"
] | 27.16 | 15.44 |
def run_canu(self):
'''Runs canu instead of spades'''
cmd = self._make_canu_command(self.outdir,'canu')
ok, errs = common.syscall(cmd, verbose=self.verbose, allow_fail=False)
if not ok:
raise Error('Error running Canu.')
original_contigs = os.path.join(self.outdir, 'canu.contigs.fasta')
renamed_contigs = os.path.join(self.outdir, 'contigs.fasta')
Assembler._rename_canu_contigs(original_contigs, renamed_contigs)
original_gfa = os.path.join(self.outdir, 'canu.contigs.gfa')
renamed_gfa = os.path.join(self.outdir, 'contigs.gfa')
os.rename(original_gfa, renamed_gfa) | [
"def",
"run_canu",
"(",
"self",
")",
":",
"cmd",
"=",
"self",
".",
"_make_canu_command",
"(",
"self",
".",
"outdir",
",",
"'canu'",
")",
"ok",
",",
"errs",
"=",
"common",
".",
"syscall",
"(",
"cmd",
",",
"verbose",
"=",
"self",
".",
"verbose",
",",
"allow_fail",
"=",
"False",
")",
"if",
"not",
"ok",
":",
"raise",
"Error",
"(",
"'Error running Canu.'",
")",
"original_contigs",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"outdir",
",",
"'canu.contigs.fasta'",
")",
"renamed_contigs",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"outdir",
",",
"'contigs.fasta'",
")",
"Assembler",
".",
"_rename_canu_contigs",
"(",
"original_contigs",
",",
"renamed_contigs",
")",
"original_gfa",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"outdir",
",",
"'canu.contigs.gfa'",
")",
"renamed_gfa",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"outdir",
",",
"'contigs.gfa'",
")",
"os",
".",
"rename",
"(",
"original_gfa",
",",
"renamed_gfa",
")"
] | 49.846154 | 22.615385 |
def main():
"""
Simple command-line program for powering on virtual machines on a system.
"""
args = GetArgs()
if args.password:
password = args.password
else:
password = getpass.getpass(prompt='Enter password for host %s and user %s: ' % (args.host,args.user))
try:
vmnames = args.vmname
if not len(vmnames):
print("No virtual machine specified for poweron")
sys.exit()
context = None
if hasattr(ssl, '_create_unverified_context'):
context = ssl._create_unverified_context()
si = SmartConnect(host=args.host,
user=args.user,
pwd=password,
port=int(args.port),
sslContext=context)
if not si:
print("Cannot connect to specified host using specified username and password")
sys.exit()
atexit.register(Disconnect, si)
# Retreive the list of Virtual Machines from the inventory objects
# under the rootFolder
content = si.content
objView = content.viewManager.CreateContainerView(content.rootFolder,
[vim.VirtualMachine],
True)
vmList = objView.view
objView.Destroy()
# Find the vm and power it on
tasks = [vm.PowerOn() for vm in vmList if vm.name in vmnames]
# Wait for power on to complete
WaitForTasks(tasks, si)
print("Virtual Machine(s) have been powered on successfully")
except vmodl.MethodFault as e:
print("Caught vmodl fault : " + e.msg)
except Exception as e:
print("Caught Exception : " + str(e)) | [
"def",
"main",
"(",
")",
":",
"args",
"=",
"GetArgs",
"(",
")",
"if",
"args",
".",
"password",
":",
"password",
"=",
"args",
".",
"password",
"else",
":",
"password",
"=",
"getpass",
".",
"getpass",
"(",
"prompt",
"=",
"'Enter password for host %s and user %s: '",
"%",
"(",
"args",
".",
"host",
",",
"args",
".",
"user",
")",
")",
"try",
":",
"vmnames",
"=",
"args",
".",
"vmname",
"if",
"not",
"len",
"(",
"vmnames",
")",
":",
"print",
"(",
"\"No virtual machine specified for poweron\"",
")",
"sys",
".",
"exit",
"(",
")",
"context",
"=",
"None",
"if",
"hasattr",
"(",
"ssl",
",",
"'_create_unverified_context'",
")",
":",
"context",
"=",
"ssl",
".",
"_create_unverified_context",
"(",
")",
"si",
"=",
"SmartConnect",
"(",
"host",
"=",
"args",
".",
"host",
",",
"user",
"=",
"args",
".",
"user",
",",
"pwd",
"=",
"password",
",",
"port",
"=",
"int",
"(",
"args",
".",
"port",
")",
",",
"sslContext",
"=",
"context",
")",
"if",
"not",
"si",
":",
"print",
"(",
"\"Cannot connect to specified host using specified username and password\"",
")",
"sys",
".",
"exit",
"(",
")",
"atexit",
".",
"register",
"(",
"Disconnect",
",",
"si",
")",
"# Retreive the list of Virtual Machines from the inventory objects",
"# under the rootFolder",
"content",
"=",
"si",
".",
"content",
"objView",
"=",
"content",
".",
"viewManager",
".",
"CreateContainerView",
"(",
"content",
".",
"rootFolder",
",",
"[",
"vim",
".",
"VirtualMachine",
"]",
",",
"True",
")",
"vmList",
"=",
"objView",
".",
"view",
"objView",
".",
"Destroy",
"(",
")",
"# Find the vm and power it on",
"tasks",
"=",
"[",
"vm",
".",
"PowerOn",
"(",
")",
"for",
"vm",
"in",
"vmList",
"if",
"vm",
".",
"name",
"in",
"vmnames",
"]",
"# Wait for power on to complete",
"WaitForTasks",
"(",
"tasks",
",",
"si",
")",
"print",
"(",
"\"Virtual Machine(s) have been powered on successfully\"",
")",
"except",
"vmodl",
".",
"MethodFault",
"as",
"e",
":",
"print",
"(",
"\"Caught vmodl fault : \"",
"+",
"e",
".",
"msg",
")",
"except",
"Exception",
"as",
"e",
":",
"print",
"(",
"\"Caught Exception : \"",
"+",
"str",
"(",
"e",
")",
")"
] | 32.823529 | 20.941176 |
def _make_connect(module, args, kwargs):
"""
Returns a function capable of making connections with a particular
driver given the supplied credentials.
"""
# pylint: disable-msg=W0142
return functools.partial(module.connect, *args, **kwargs) | [
"def",
"_make_connect",
"(",
"module",
",",
"args",
",",
"kwargs",
")",
":",
"# pylint: disable-msg=W0142",
"return",
"functools",
".",
"partial",
"(",
"module",
".",
"connect",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | 36.857143 | 8.857143 |
def update_cb(self, context, t, idx, userdata):
"""A sink property changed, calls request_update"""
if t & PA_SUBSCRIPTION_EVENT_FACILITY_MASK == PA_SUBSCRIPTION_EVENT_SERVER:
pa_operation_unref(
pa_context_get_server_info(context, self._server_info_cb, None))
self.request_update(context) | [
"def",
"update_cb",
"(",
"self",
",",
"context",
",",
"t",
",",
"idx",
",",
"userdata",
")",
":",
"if",
"t",
"&",
"PA_SUBSCRIPTION_EVENT_FACILITY_MASK",
"==",
"PA_SUBSCRIPTION_EVENT_SERVER",
":",
"pa_operation_unref",
"(",
"pa_context_get_server_info",
"(",
"context",
",",
"self",
".",
"_server_info_cb",
",",
"None",
")",
")",
"self",
".",
"request_update",
"(",
"context",
")"
] | 42 | 22.875 |
def _set_scripts(self, host_metadata, scripts):
"""
Temporary method to set the host scripts
TODO:
remove once the "ovirt-scripts" option gets deprecated
Args:
host_metadata(dict): host metadata to set scripts in
Returns:
dict: the updated metadata
"""
scripts_key = 'deploy-scripts'
if 'ovirt-scritps' in host_metadata:
scripts_key = 'ovirt-scripts'
host_metadata[scripts_key] = scripts
return host_metadata | [
"def",
"_set_scripts",
"(",
"self",
",",
"host_metadata",
",",
"scripts",
")",
":",
"scripts_key",
"=",
"'deploy-scripts'",
"if",
"'ovirt-scritps'",
"in",
"host_metadata",
":",
"scripts_key",
"=",
"'ovirt-scripts'",
"host_metadata",
"[",
"scripts_key",
"]",
"=",
"scripts",
"return",
"host_metadata"
] | 27.473684 | 17.263158 |
def node_get_args(node):
"""Return an ordered mapping from params to args"""
obj = node[OBJ]
key = node[KEY]
boundargs = obj.formula.signature.bind(*key)
boundargs.apply_defaults()
return boundargs.arguments | [
"def",
"node_get_args",
"(",
"node",
")",
":",
"obj",
"=",
"node",
"[",
"OBJ",
"]",
"key",
"=",
"node",
"[",
"KEY",
"]",
"boundargs",
"=",
"obj",
".",
"formula",
".",
"signature",
".",
"bind",
"(",
"*",
"key",
")",
"boundargs",
".",
"apply_defaults",
"(",
")",
"return",
"boundargs",
".",
"arguments"
] | 32.142857 | 12.285714 |
def calculate_sun(self, month, day, hour, is_solar_time=False):
"""Get Sun data for an hour of the year.
Args:
month: An integer between 1-12
day: An integer between 1-31
hour: A positive number between 0..23
is_solar_time: A boolean to indicate if the input hour is solar time.
(Default: False)
Returns:
A sun object for this particular time
"""
datetime = DateTime(month, day, *self._calculate_hour_and_minute(hour),
leap_year=self.is_leap_year)
return self.calculate_sun_from_date_time(datetime, is_solar_time) | [
"def",
"calculate_sun",
"(",
"self",
",",
"month",
",",
"day",
",",
"hour",
",",
"is_solar_time",
"=",
"False",
")",
":",
"datetime",
"=",
"DateTime",
"(",
"month",
",",
"day",
",",
"*",
"self",
".",
"_calculate_hour_and_minute",
"(",
"hour",
")",
",",
"leap_year",
"=",
"self",
".",
"is_leap_year",
")",
"return",
"self",
".",
"calculate_sun_from_date_time",
"(",
"datetime",
",",
"is_solar_time",
")"
] | 40.75 | 19.4375 |
def _cached_css_compile(pattern, namespaces, custom, flags):
"""Cached CSS compile."""
custom_selectors = process_custom(custom)
return cm.SoupSieve(
pattern,
CSSParser(pattern, custom=custom_selectors, flags=flags).process_selectors(),
namespaces,
custom,
flags
) | [
"def",
"_cached_css_compile",
"(",
"pattern",
",",
"namespaces",
",",
"custom",
",",
"flags",
")",
":",
"custom_selectors",
"=",
"process_custom",
"(",
"custom",
")",
"return",
"cm",
".",
"SoupSieve",
"(",
"pattern",
",",
"CSSParser",
"(",
"pattern",
",",
"custom",
"=",
"custom_selectors",
",",
"flags",
"=",
"flags",
")",
".",
"process_selectors",
"(",
")",
",",
"namespaces",
",",
"custom",
",",
"flags",
")"
] | 28.272727 | 23.454545 |
def _get_convergence_plans(project, service_names):
'''
Get action executed for each container
:param project:
:param service_names:
:return:
'''
ret = {}
plans = project._get_convergence_plans(project.get_services(service_names),
ConvergenceStrategy.changed)
for cont in plans:
(action, container) = plans[cont]
if action == 'create':
ret[cont] = 'Creating container'
elif action == 'recreate':
ret[cont] = 'Re-creating container'
elif action == 'start':
ret[cont] = 'Starting container'
elif action == 'noop':
ret[cont] = 'Container is up to date'
return ret | [
"def",
"_get_convergence_plans",
"(",
"project",
",",
"service_names",
")",
":",
"ret",
"=",
"{",
"}",
"plans",
"=",
"project",
".",
"_get_convergence_plans",
"(",
"project",
".",
"get_services",
"(",
"service_names",
")",
",",
"ConvergenceStrategy",
".",
"changed",
")",
"for",
"cont",
"in",
"plans",
":",
"(",
"action",
",",
"container",
")",
"=",
"plans",
"[",
"cont",
"]",
"if",
"action",
"==",
"'create'",
":",
"ret",
"[",
"cont",
"]",
"=",
"'Creating container'",
"elif",
"action",
"==",
"'recreate'",
":",
"ret",
"[",
"cont",
"]",
"=",
"'Re-creating container'",
"elif",
"action",
"==",
"'start'",
":",
"ret",
"[",
"cont",
"]",
"=",
"'Starting container'",
"elif",
"action",
"==",
"'noop'",
":",
"ret",
"[",
"cont",
"]",
"=",
"'Container is up to date'",
"return",
"ret"
] | 32.318182 | 17.5 |
def token_perplexity_micro(eval_data, predictions, scores, learner='ignored'):
'''
Return the micro-averaged per-token perplexity `exp(-score / num_tokens)`
computed over the entire corpus, as a length-1 list of floats.
The log scores in `scores` should be base e (`exp`, `log`).
>>> refs = [Instance(None, ''),
... Instance(None, ''),
... Instance(None, '2')]
>>> scores = [np.log(1.0), np.log(0.25), np.log(1 / 64.)]
>>> perplexity = token_perplexity_micro(refs, None, scores)
>>> [round(p) for p in perplexity]
... # sequence perplexities: [1, 4, 64]
... # per-token perplexities: [1, 4, 8]
... # micro-average: gmean([1, 4, 8, 8])
[4.0]
'''
lens = np.array([len(_maybe_tokenize(inst.output)) + 1 for inst in eval_data])
return [np.exp(np.average(-np.array(scores) / lens, weights=lens))] | [
"def",
"token_perplexity_micro",
"(",
"eval_data",
",",
"predictions",
",",
"scores",
",",
"learner",
"=",
"'ignored'",
")",
":",
"lens",
"=",
"np",
".",
"array",
"(",
"[",
"len",
"(",
"_maybe_tokenize",
"(",
"inst",
".",
"output",
")",
")",
"+",
"1",
"for",
"inst",
"in",
"eval_data",
"]",
")",
"return",
"[",
"np",
".",
"exp",
"(",
"np",
".",
"average",
"(",
"-",
"np",
".",
"array",
"(",
"scores",
")",
"/",
"lens",
",",
"weights",
"=",
"lens",
")",
")",
"]"
] | 45.157895 | 21.263158 |
def buy(self, currencyPair, rate, amount, fillOrKill=None,
immediateOrCancel=None, postOnly=None):
"""Places a limit buy order in a given market. Required POST parameters
are "currencyPair", "rate", and "amount". If successful, the method
will return the order number.
You may optionally set "fillOrKill", "immediateOrCancel", "postOnly"
to 1. A fill-or-kill order will either fill in its entirety or be
completely aborted. An immediate-or-cancel order can be partially or
completely filled, but any portion of the order that cannot be filled
immediately will be canceled rather than left on the order book.
A post-only order will only be placed if no portion of it fills
immediately; this guarantees you will never pay the taker fee on any
part of the order that fills."""
return self._private('buy', currencyPair=currencyPair, rate=rate,
amount=amount, fillOrKill=fillOrKill,
immediateOrCancel=immediateOrCancel,
postOnly=postOnly) | [
"def",
"buy",
"(",
"self",
",",
"currencyPair",
",",
"rate",
",",
"amount",
",",
"fillOrKill",
"=",
"None",
",",
"immediateOrCancel",
"=",
"None",
",",
"postOnly",
"=",
"None",
")",
":",
"return",
"self",
".",
"_private",
"(",
"'buy'",
",",
"currencyPair",
"=",
"currencyPair",
",",
"rate",
"=",
"rate",
",",
"amount",
"=",
"amount",
",",
"fillOrKill",
"=",
"fillOrKill",
",",
"immediateOrCancel",
"=",
"immediateOrCancel",
",",
"postOnly",
"=",
"postOnly",
")"
] | 65.411765 | 23.470588 |
def get_all_modified_on(chebi_ids):
'''Returns all modified on'''
all_modified_ons = [get_modified_on(chebi_id) for chebi_id in chebi_ids]
all_modified_ons = [modified_on for modified_on in all_modified_ons
if modified_on is not None]
return None if len(all_modified_ons) == 0 else sorted(all_modified_ons)[-1] | [
"def",
"get_all_modified_on",
"(",
"chebi_ids",
")",
":",
"all_modified_ons",
"=",
"[",
"get_modified_on",
"(",
"chebi_id",
")",
"for",
"chebi_id",
"in",
"chebi_ids",
"]",
"all_modified_ons",
"=",
"[",
"modified_on",
"for",
"modified_on",
"in",
"all_modified_ons",
"if",
"modified_on",
"is",
"not",
"None",
"]",
"return",
"None",
"if",
"len",
"(",
"all_modified_ons",
")",
"==",
"0",
"else",
"sorted",
"(",
"all_modified_ons",
")",
"[",
"-",
"1",
"]"
] | 57.5 | 21.5 |
def tobool(obj, default=False):
'''
Returns a bool representation of `obj`: if `obj` is not a string,
it is returned cast to a boolean by calling `bool()`. Otherwise, it
is checked for "truthy" or "falsy" values, and that is returned. If
it is not truthy or falsy, `default` is returned (which defaults to
``False``) unless `default` is set to ``ValueError``, in which case
an exception is raised.
'''
if isinstance(obj, bool):
return obj
if not isstr(obj):
return bool(obj)
lobj = obj.lower()
if lobj in truthy:
return True
if lobj in falsy:
return False
if default is ValueError:
raise ValueError('invalid literal for tobool(): %r' % (obj,))
return default | [
"def",
"tobool",
"(",
"obj",
",",
"default",
"=",
"False",
")",
":",
"if",
"isinstance",
"(",
"obj",
",",
"bool",
")",
":",
"return",
"obj",
"if",
"not",
"isstr",
"(",
"obj",
")",
":",
"return",
"bool",
"(",
"obj",
")",
"lobj",
"=",
"obj",
".",
"lower",
"(",
")",
"if",
"lobj",
"in",
"truthy",
":",
"return",
"True",
"if",
"lobj",
"in",
"falsy",
":",
"return",
"False",
"if",
"default",
"is",
"ValueError",
":",
"raise",
"ValueError",
"(",
"'invalid literal for tobool(): %r'",
"%",
"(",
"obj",
",",
")",
")",
"return",
"default"
] | 31.272727 | 24 |
def generate_options_map():
"""Generate an ``options_map` to pass to ``extract_from_dir``
This is the options_map that's used to generate a Jinja2 environment. We
want to generate and environment for extraction that's the same as the
environment we use for rendering.
This allows developers to explicitly set a ``JINJA2_CONFIG`` in settings.
If that's not there, then this will pull the relevant bits from the first
Jinja2 backend listed in ``TEMPLATES``.
"""
try:
return settings.PUENTE['JINJA2_CONFIG']
except KeyError:
pass
# If using Django 1.8+, we can skim the TEMPLATES for a backend that we
# know about and extract the settings from that.
for tmpl_config in getattr(settings, 'TEMPLATES', []):
try:
backend = tmpl_config['BACKEND']
except KeyError:
continue
if backend == 'django_jinja.backend.Jinja2':
extensions = tmpl_config.get('OPTIONS', {}).get('extensions', [])
return {
'**.*': {
'extensions': ','.join(extensions),
'silent': 'False',
}
}
# If this is Django 1.7 and Jingo, try to grab extensions from
# JINJA_CONFIG.
if getattr(settings, 'JINJA_CONFIG'):
jinja_config = settings.JINJA_CONFIG
if callable(jinja_config):
jinja_config = jinja_config()
return {
'**.*': {
'extensions': ','.join(jinja_config['extensions']),
'silent': 'False',
}
}
raise CommandError(
'No valid jinja2 config found in settings. See configuration '
'documentation.'
) | [
"def",
"generate_options_map",
"(",
")",
":",
"try",
":",
"return",
"settings",
".",
"PUENTE",
"[",
"'JINJA2_CONFIG'",
"]",
"except",
"KeyError",
":",
"pass",
"# If using Django 1.8+, we can skim the TEMPLATES for a backend that we",
"# know about and extract the settings from that.",
"for",
"tmpl_config",
"in",
"getattr",
"(",
"settings",
",",
"'TEMPLATES'",
",",
"[",
"]",
")",
":",
"try",
":",
"backend",
"=",
"tmpl_config",
"[",
"'BACKEND'",
"]",
"except",
"KeyError",
":",
"continue",
"if",
"backend",
"==",
"'django_jinja.backend.Jinja2'",
":",
"extensions",
"=",
"tmpl_config",
".",
"get",
"(",
"'OPTIONS'",
",",
"{",
"}",
")",
".",
"get",
"(",
"'extensions'",
",",
"[",
"]",
")",
"return",
"{",
"'**.*'",
":",
"{",
"'extensions'",
":",
"','",
".",
"join",
"(",
"extensions",
")",
",",
"'silent'",
":",
"'False'",
",",
"}",
"}",
"# If this is Django 1.7 and Jingo, try to grab extensions from",
"# JINJA_CONFIG.",
"if",
"getattr",
"(",
"settings",
",",
"'JINJA_CONFIG'",
")",
":",
"jinja_config",
"=",
"settings",
".",
"JINJA_CONFIG",
"if",
"callable",
"(",
"jinja_config",
")",
":",
"jinja_config",
"=",
"jinja_config",
"(",
")",
"return",
"{",
"'**.*'",
":",
"{",
"'extensions'",
":",
"','",
".",
"join",
"(",
"jinja_config",
"[",
"'extensions'",
"]",
")",
",",
"'silent'",
":",
"'False'",
",",
"}",
"}",
"raise",
"CommandError",
"(",
"'No valid jinja2 config found in settings. See configuration '",
"'documentation.'",
")"
] | 32.921569 | 21.666667 |
def make_pdb(self):
"""Generates a PDB string for the `Monomer`."""
pdb_str = write_pdb(
[self], ' ' if not self.ampal_parent else self.ampal_parent.id)
return pdb_str | [
"def",
"make_pdb",
"(",
"self",
")",
":",
"pdb_str",
"=",
"write_pdb",
"(",
"[",
"self",
"]",
",",
"' '",
"if",
"not",
"self",
".",
"ampal_parent",
"else",
"self",
".",
"ampal_parent",
".",
"id",
")",
"return",
"pdb_str"
] | 39.8 | 17.2 |
def clean_bytes(line):
"""
Cleans a byte sequence of shell directives and decodes it.
"""
text = line.decode('utf-8').replace('\r', '').strip('\n')
return re.sub(r'\x1b[^m]*m', '', text).replace("``", "`\u200b`").strip('\n') | [
"def",
"clean_bytes",
"(",
"line",
")",
":",
"text",
"=",
"line",
".",
"decode",
"(",
"'utf-8'",
")",
".",
"replace",
"(",
"'\\r'",
",",
"''",
")",
".",
"strip",
"(",
"'\\n'",
")",
"return",
"re",
".",
"sub",
"(",
"r'\\x1b[^m]*m'",
",",
"''",
",",
"text",
")",
".",
"replace",
"(",
"\"``\"",
",",
"\"`\\u200b`\"",
")",
".",
"strip",
"(",
"'\\n'",
")"
] | 37 | 21.857143 |
def setPalette(self, palette):
"""
Sets the palette for this widget and the scroll area.
:param palette | <QPalette>
"""
super(XPopupWidget, self).setPalette(palette)
self._scrollArea.setPalette(palette) | [
"def",
"setPalette",
"(",
"self",
",",
"palette",
")",
":",
"super",
"(",
"XPopupWidget",
",",
"self",
")",
".",
"setPalette",
"(",
"palette",
")",
"self",
".",
"_scrollArea",
".",
"setPalette",
"(",
"palette",
")"
] | 33.125 | 10.125 |
def _check_cv(self, val, random_state=None):
"""
Validate the cv method passed in. Returns the split strategy if no
validation exception is raised.
"""
# Use default splitter in this case
if val is None: val = 0.1
if isinstance(val, float) and val <= 1.0:
return ShuffleSplit(
n_splits=1, test_size=val, random_state=random_state
)
if hasattr(val, "split") and hasattr(val, "get_n_splits"):
if random_state is not None and hasattr(val, "random_state"):
val.random_state = random_state
return val
raise YellowbrickValueError(
"'{}' is not a valid cv splitter".format(type(val))
) | [
"def",
"_check_cv",
"(",
"self",
",",
"val",
",",
"random_state",
"=",
"None",
")",
":",
"# Use default splitter in this case",
"if",
"val",
"is",
"None",
":",
"val",
"=",
"0.1",
"if",
"isinstance",
"(",
"val",
",",
"float",
")",
"and",
"val",
"<=",
"1.0",
":",
"return",
"ShuffleSplit",
"(",
"n_splits",
"=",
"1",
",",
"test_size",
"=",
"val",
",",
"random_state",
"=",
"random_state",
")",
"if",
"hasattr",
"(",
"val",
",",
"\"split\"",
")",
"and",
"hasattr",
"(",
"val",
",",
"\"get_n_splits\"",
")",
":",
"if",
"random_state",
"is",
"not",
"None",
"and",
"hasattr",
"(",
"val",
",",
"\"random_state\"",
")",
":",
"val",
".",
"random_state",
"=",
"random_state",
"return",
"val",
"raise",
"YellowbrickValueError",
"(",
"\"'{}' is not a valid cv splitter\"",
".",
"format",
"(",
"type",
"(",
"val",
")",
")",
")"
] | 34.904762 | 18.238095 |
def label(self, label):
"""
set the label
"""
if self.direction in ['i'] and label is not None:
raise ValueError("label not accepted for indep dimension")
if label is None:
self._label = label
return
if not isinstance(label, str):
try:
label = str(label)
except:
raise TypeError("label must be of type str")
self._label = label | [
"def",
"label",
"(",
"self",
",",
"label",
")",
":",
"if",
"self",
".",
"direction",
"in",
"[",
"'i'",
"]",
"and",
"label",
"is",
"not",
"None",
":",
"raise",
"ValueError",
"(",
"\"label not accepted for indep dimension\"",
")",
"if",
"label",
"is",
"None",
":",
"self",
".",
"_label",
"=",
"label",
"return",
"if",
"not",
"isinstance",
"(",
"label",
",",
"str",
")",
":",
"try",
":",
"label",
"=",
"str",
"(",
"label",
")",
"except",
":",
"raise",
"TypeError",
"(",
"\"label must be of type str\"",
")",
"self",
".",
"_label",
"=",
"label"
] | 24.263158 | 19.736842 |
def _trace_filename(self):
"""
Creates trace filename.
"""
dir_stub = ''
if self.output_directory is not None:
dir_stub = self.output_directory
if self.each_time:
filename = '{0}_{1}.json'.format(
self.output_file_name, self.counter)
else:
filename = '{0}.json'.format(self.output_file_name)
return os.path.join(dir_stub, filename) | [
"def",
"_trace_filename",
"(",
"self",
")",
":",
"dir_stub",
"=",
"''",
"if",
"self",
".",
"output_directory",
"is",
"not",
"None",
":",
"dir_stub",
"=",
"self",
".",
"output_directory",
"if",
"self",
".",
"each_time",
":",
"filename",
"=",
"'{0}_{1}.json'",
".",
"format",
"(",
"self",
".",
"output_file_name",
",",
"self",
".",
"counter",
")",
"else",
":",
"filename",
"=",
"'{0}.json'",
".",
"format",
"(",
"self",
".",
"output_file_name",
")",
"return",
"os",
".",
"path",
".",
"join",
"(",
"dir_stub",
",",
"filename",
")"
] | 33.461538 | 10.692308 |
async def set_reply_markup(msg: Dict, request: 'Request', stack: 'Stack') \
-> None:
"""
Add the "reply markup" to a message from the layers
:param msg: Message dictionary
:param request: Current request being replied
:param stack: Stack to analyze
"""
from bernard.platforms.telegram.layers import InlineKeyboard, \
ReplyKeyboard, \
ReplyKeyboardRemove
try:
keyboard = stack.get_layer(InlineKeyboard)
except KeyError:
pass
else:
msg['reply_markup'] = await keyboard.serialize(request)
try:
keyboard = stack.get_layer(ReplyKeyboard)
except KeyError:
pass
else:
msg['reply_markup'] = await keyboard.serialize(request)
try:
remove = stack.get_layer(ReplyKeyboardRemove)
except KeyError:
pass
else:
msg['reply_markup'] = remove.serialize() | [
"async",
"def",
"set_reply_markup",
"(",
"msg",
":",
"Dict",
",",
"request",
":",
"'Request'",
",",
"stack",
":",
"'Stack'",
")",
"->",
"None",
":",
"from",
"bernard",
".",
"platforms",
".",
"telegram",
".",
"layers",
"import",
"InlineKeyboard",
",",
"ReplyKeyboard",
",",
"ReplyKeyboardRemove",
"try",
":",
"keyboard",
"=",
"stack",
".",
"get_layer",
"(",
"InlineKeyboard",
")",
"except",
"KeyError",
":",
"pass",
"else",
":",
"msg",
"[",
"'reply_markup'",
"]",
"=",
"await",
"keyboard",
".",
"serialize",
"(",
"request",
")",
"try",
":",
"keyboard",
"=",
"stack",
".",
"get_layer",
"(",
"ReplyKeyboard",
")",
"except",
"KeyError",
":",
"pass",
"else",
":",
"msg",
"[",
"'reply_markup'",
"]",
"=",
"await",
"keyboard",
".",
"serialize",
"(",
"request",
")",
"try",
":",
"remove",
"=",
"stack",
".",
"get_layer",
"(",
"ReplyKeyboardRemove",
")",
"except",
"KeyError",
":",
"pass",
"else",
":",
"msg",
"[",
"'reply_markup'",
"]",
"=",
"remove",
".",
"serialize",
"(",
")"
] | 27.125 | 21.5625 |
def graph(self, ASres=None, padding=0, vspread=0.75, title="Multi-Traceroute Probe (MTR)", timestamp="", rtt=1, **kargs):
"""x.graph(ASres=conf.AS_resolver, other args):
ASres = None : Use AS default resolver => 'conf.AS_resolver'
ASres = AS_resolver() : default whois AS resolver (riswhois.ripe.net)
ASres = AS_resolver_cymru(): use whois.cymru.com whois database
ASres = AS_resolver(server="whois.ra.net")
padding: Show packets with padding as a red 3D-Box.
vspread: Vertical separation between nodes on graph.
title: Title text for the rendering graphic.
timestamp: Title Time Stamp text to appear below the Title text.
rtt: Display Round-Trip Times (msec) for Hops along trace edges.
format: Output type (svg, ps, gif, jpg, etc.), passed to dot's "-T" option.
figsize: w,h tuple in inches. See matplotlib documentation.
target: filename. If None, uses matplotlib to display.
prog: Which graphviz program to use."""
if self._asres is None:
self._asres = conf.AS_resolver
if (self._graphdef is None or # Remake the graph if there are any changes
self._graphasres != self._asres or
self._graphpadding != padding):
self.make_dot_graph(ASres, padding, vspread, title, timestamp, rtt)
return do_graph(self._graphdef, **kargs) | [
"def",
"graph",
"(",
"self",
",",
"ASres",
"=",
"None",
",",
"padding",
"=",
"0",
",",
"vspread",
"=",
"0.75",
",",
"title",
"=",
"\"Multi-Traceroute Probe (MTR)\"",
",",
"timestamp",
"=",
"\"\"",
",",
"rtt",
"=",
"1",
",",
"*",
"*",
"kargs",
")",
":",
"if",
"self",
".",
"_asres",
"is",
"None",
":",
"self",
".",
"_asres",
"=",
"conf",
".",
"AS_resolver",
"if",
"(",
"self",
".",
"_graphdef",
"is",
"None",
"or",
"# Remake the graph if there are any changes",
"self",
".",
"_graphasres",
"!=",
"self",
".",
"_asres",
"or",
"self",
".",
"_graphpadding",
"!=",
"padding",
")",
":",
"self",
".",
"make_dot_graph",
"(",
"ASres",
",",
"padding",
",",
"vspread",
",",
"title",
",",
"timestamp",
",",
"rtt",
")",
"return",
"do_graph",
"(",
"self",
".",
"_graphdef",
",",
"*",
"*",
"kargs",
")"
] | 60.869565 | 24.173913 |
def controlMsg(self, requestType, request, buffer, value = 0, index = 0, timeout = 100):
r"""Perform a control request to the default control pipe on a device.
Arguments:
requestType: specifies the direction of data flow, the type
of request, and the recipient.
request: specifies the request.
buffer: if the transfer is a write transfer, buffer is a sequence
with the transfer data, otherwise, buffer is the number of
bytes to read.
value: specific information to pass to the device. (default: 0)
index: specific information to pass to the device. (default: 0)
timeout: operation timeout in milliseconds. (default: 100)
Returns the number of bytes written.
"""
return self.dev.ctrl_transfer(
requestType,
request,
wValue = value,
wIndex = index,
data_or_wLength = buffer,
timeout = timeout) | [
"def",
"controlMsg",
"(",
"self",
",",
"requestType",
",",
"request",
",",
"buffer",
",",
"value",
"=",
"0",
",",
"index",
"=",
"0",
",",
"timeout",
"=",
"100",
")",
":",
"return",
"self",
".",
"dev",
".",
"ctrl_transfer",
"(",
"requestType",
",",
"request",
",",
"wValue",
"=",
"value",
",",
"wIndex",
"=",
"index",
",",
"data_or_wLength",
"=",
"buffer",
",",
"timeout",
"=",
"timeout",
")"
] | 48.863636 | 17.727273 |
def compute_group_colors(self):
"""Computes the group colors according to node colors"""
seen = set()
self.group_label_color = [
x for x in self.node_colors if not (x in seen or seen.add(x))
] | [
"def",
"compute_group_colors",
"(",
"self",
")",
":",
"seen",
"=",
"set",
"(",
")",
"self",
".",
"group_label_color",
"=",
"[",
"x",
"for",
"x",
"in",
"self",
".",
"node_colors",
"if",
"not",
"(",
"x",
"in",
"seen",
"or",
"seen",
".",
"add",
"(",
"x",
")",
")",
"]"
] | 38.5 | 16.5 |
def get_arrays(self, type_img):
''' Return arrays the region of interest
Args:
type_img (str): Either lola or wac.
Returns:
A tupple of three arrays ``(X,Y,Z)`` with ``X`` contains the
longitudes, ``Y`` contains the latitude and ``Z`` the values
extracted for the region of interest.
Note:
The argument has to be either lola or wac. Note case sensitive.
All return arrays have the same size.
All coordinates are in degree.
'''
if type_img.lower() == 'lola':
return LolaMap(self.ppdlola, *self.window, path_pdsfile=self.path_pdsfiles).image()
elif type_img.lower() == 'wac':
return WacMap(self.ppdwac, *self.window, path_pdsfile=self.path_pdsfiles).image()
else:
raise ValueError('The img type has to be either "Lola" or "Wac"') | [
"def",
"get_arrays",
"(",
"self",
",",
"type_img",
")",
":",
"if",
"type_img",
".",
"lower",
"(",
")",
"==",
"'lola'",
":",
"return",
"LolaMap",
"(",
"self",
".",
"ppdlola",
",",
"*",
"self",
".",
"window",
",",
"path_pdsfile",
"=",
"self",
".",
"path_pdsfiles",
")",
".",
"image",
"(",
")",
"elif",
"type_img",
".",
"lower",
"(",
")",
"==",
"'wac'",
":",
"return",
"WacMap",
"(",
"self",
".",
"ppdwac",
",",
"*",
"self",
".",
"window",
",",
"path_pdsfile",
"=",
"self",
".",
"path_pdsfiles",
")",
".",
"image",
"(",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'The img type has to be either \"Lola\" or \"Wac\"'",
")"
] | 37.208333 | 26.041667 |
def fix_tags_on_cands_missing_reals(user_id, vos_dir, property):
"At the moment this just checks for a single user's missing reals. Easy to generalise it to all users."
con = context.get_context(vos_dir)
user_progress = []
listing = con.get_listing(tasks.get_suffix('reals'))
mpc_listing = con.get_listing('mpc')
for filename in listing:
if not filename.startswith('fk'):
user = storage.get_property(con.get_full_path(filename), property)
if (user is not None):
# and (user == user_id): # modify 'and' to generalise to all users with work in this directory
#realsfile = filename.replace('cands', 'reals')
#if not con.exists(realsfile):
# print filename, 'no reals file', realsfile
# go through the listing of .mpc files and see if any match this reals.astrom
is_present = False
for mpcfile in [f for f in mpc_listing if not f.startswith('fk')]:
if mpcfile.startswith(filename):
print filename, user, 'exists!', mpcfile
is_present = True
if not is_present:
user_progress.append(filename)
print filename, user, 'no mpc file'
storage.set_property(con.get_full_path(filename), property, None)
print 'Fixed files:', len(user_progress)
return | [
"def",
"fix_tags_on_cands_missing_reals",
"(",
"user_id",
",",
"vos_dir",
",",
"property",
")",
":",
"con",
"=",
"context",
".",
"get_context",
"(",
"vos_dir",
")",
"user_progress",
"=",
"[",
"]",
"listing",
"=",
"con",
".",
"get_listing",
"(",
"tasks",
".",
"get_suffix",
"(",
"'reals'",
")",
")",
"mpc_listing",
"=",
"con",
".",
"get_listing",
"(",
"'mpc'",
")",
"for",
"filename",
"in",
"listing",
":",
"if",
"not",
"filename",
".",
"startswith",
"(",
"'fk'",
")",
":",
"user",
"=",
"storage",
".",
"get_property",
"(",
"con",
".",
"get_full_path",
"(",
"filename",
")",
",",
"property",
")",
"if",
"(",
"user",
"is",
"not",
"None",
")",
":",
"# and (user == user_id): # modify 'and' to generalise to all users with work in this directory",
"#realsfile = filename.replace('cands', 'reals')",
"#if not con.exists(realsfile):",
"# print filename, 'no reals file', realsfile",
"# go through the listing of .mpc files and see if any match this reals.astrom",
"is_present",
"=",
"False",
"for",
"mpcfile",
"in",
"[",
"f",
"for",
"f",
"in",
"mpc_listing",
"if",
"not",
"f",
".",
"startswith",
"(",
"'fk'",
")",
"]",
":",
"if",
"mpcfile",
".",
"startswith",
"(",
"filename",
")",
":",
"print",
"filename",
",",
"user",
",",
"'exists!'",
",",
"mpcfile",
"is_present",
"=",
"True",
"if",
"not",
"is_present",
":",
"user_progress",
".",
"append",
"(",
"filename",
")",
"print",
"filename",
",",
"user",
",",
"'no mpc file'",
"storage",
".",
"set_property",
"(",
"con",
".",
"get_full_path",
"(",
"filename",
")",
",",
"property",
",",
"None",
")",
"print",
"'Fixed files:'",
",",
"len",
"(",
"user_progress",
")",
"return"
] | 47.833333 | 23.833333 |
def mine(self): # pragma: no cover
"""
Search for domain or URL related to the original URL or domain.
:return: The mined domains or URL.
:rtype: dict
"""
if PyFunceble.CONFIGURATION["mining"]:
# The mining is activated.
try:
# We get the history.
history = PyFunceble.requests.get(
self.to_get,
timeout=PyFunceble.CONFIGURATION["seconds_before_http_timeout"],
headers=self.headers,
).history
# We initiate a dictionnary which will save the
# list of mined links.
mined = {self.to_get_bare: []}
for element in history:
# We loop through the history.
# We update the element.
element = element.url
if PyFunceble.INTERN["to_test_type"] == "url":
# We are testing a full url.
# We get the element to append.
to_append = Check().is_url_valid(element, return_base=False)
elif PyFunceble.INTERN["to_test_type"] == "domain":
# We are testing a domain.
# We get the element to append.
to_append = Check().is_url_valid(element, return_base=True)
else:
raise Exception("Unknown tested.")
if to_append:
# There is something to append.
if to_append.endswith(":80"):
# The port is present.
# We get rid of it.
to_append = to_append[:-3]
if to_append != self.to_get_bare:
# The element to append is different as
# the element we are globally testing.
# We append the element to append to the
# list of mined links.
mined[self.to_get_bare].append(to_append)
if mined[self.to_get_bare]:
# There is something in the list of mined links.
# We return the whole element.
return mined
# There is nothing in the list of mined links.
# We return None.
return None
except (
PyFunceble.requests.ConnectionError,
PyFunceble.requests.exceptions.Timeout,
PyFunceble.requests.exceptions.InvalidURL,
PyFunceble.socket.timeout,
urllib3_exceptions.InvalidHeader,
UnicodeDecodeError, # The probability that this happend in production is minimal.
):
# Something went wrong.
# We return None.
return None
return None | [
"def",
"mine",
"(",
"self",
")",
":",
"# pragma: no cover",
"if",
"PyFunceble",
".",
"CONFIGURATION",
"[",
"\"mining\"",
"]",
":",
"# The mining is activated.",
"try",
":",
"# We get the history.",
"history",
"=",
"PyFunceble",
".",
"requests",
".",
"get",
"(",
"self",
".",
"to_get",
",",
"timeout",
"=",
"PyFunceble",
".",
"CONFIGURATION",
"[",
"\"seconds_before_http_timeout\"",
"]",
",",
"headers",
"=",
"self",
".",
"headers",
",",
")",
".",
"history",
"# We initiate a dictionnary which will save the",
"# list of mined links.",
"mined",
"=",
"{",
"self",
".",
"to_get_bare",
":",
"[",
"]",
"}",
"for",
"element",
"in",
"history",
":",
"# We loop through the history.",
"# We update the element.",
"element",
"=",
"element",
".",
"url",
"if",
"PyFunceble",
".",
"INTERN",
"[",
"\"to_test_type\"",
"]",
"==",
"\"url\"",
":",
"# We are testing a full url.",
"# We get the element to append.",
"to_append",
"=",
"Check",
"(",
")",
".",
"is_url_valid",
"(",
"element",
",",
"return_base",
"=",
"False",
")",
"elif",
"PyFunceble",
".",
"INTERN",
"[",
"\"to_test_type\"",
"]",
"==",
"\"domain\"",
":",
"# We are testing a domain.",
"# We get the element to append.",
"to_append",
"=",
"Check",
"(",
")",
".",
"is_url_valid",
"(",
"element",
",",
"return_base",
"=",
"True",
")",
"else",
":",
"raise",
"Exception",
"(",
"\"Unknown tested.\"",
")",
"if",
"to_append",
":",
"# There is something to append.",
"if",
"to_append",
".",
"endswith",
"(",
"\":80\"",
")",
":",
"# The port is present.",
"# We get rid of it.",
"to_append",
"=",
"to_append",
"[",
":",
"-",
"3",
"]",
"if",
"to_append",
"!=",
"self",
".",
"to_get_bare",
":",
"# The element to append is different as",
"# the element we are globally testing.",
"# We append the element to append to the",
"# list of mined links.",
"mined",
"[",
"self",
".",
"to_get_bare",
"]",
".",
"append",
"(",
"to_append",
")",
"if",
"mined",
"[",
"self",
".",
"to_get_bare",
"]",
":",
"# There is something in the list of mined links.",
"# We return the whole element.",
"return",
"mined",
"# There is nothing in the list of mined links.",
"# We return None.",
"return",
"None",
"except",
"(",
"PyFunceble",
".",
"requests",
".",
"ConnectionError",
",",
"PyFunceble",
".",
"requests",
".",
"exceptions",
".",
"Timeout",
",",
"PyFunceble",
".",
"requests",
".",
"exceptions",
".",
"InvalidURL",
",",
"PyFunceble",
".",
"socket",
".",
"timeout",
",",
"urllib3_exceptions",
".",
"InvalidHeader",
",",
"UnicodeDecodeError",
",",
"# The probability that this happend in production is minimal.",
")",
":",
"# Something went wrong.",
"# We return None.",
"return",
"None",
"return",
"None"
] | 35.771084 | 21 |
def command_line_parsed(
self,
available_plugins: Set[Type[Plugin]],
args_command_list: Any,
malformed_servers: List[ServerStringParsingError]
) -> None:
"""The CLI was just started and successfully parsed the command line.
""" | [
"def",
"command_line_parsed",
"(",
"self",
",",
"available_plugins",
":",
"Set",
"[",
"Type",
"[",
"Plugin",
"]",
"]",
",",
"args_command_list",
":",
"Any",
",",
"malformed_servers",
":",
"List",
"[",
"ServerStringParsingError",
"]",
")",
"->",
"None",
":"
] | 36 | 12.5 |
def cmd_ssh_user(tar_aminame, inst_name):
"""Calculate instance login-username based on image-name.
Args:
tar_aminame (str): name of the image instance created with.
inst_name (str): name of the instance.
Returns:
username (str): name for ssh based on AMI-name.
"""
if tar_aminame == "Unknown":
tar_aminame = inst_name
# first 5 chars of AMI-name can be anywhere in AMI-Name
userlu = {"ubunt": "ubuntu", "debia": "admin", "fedor": "root",
"cento": "centos", "openb": "root"}
usertemp = ['name'] + [value for key, value in list(userlu.items())
if key in tar_aminame.lower()]
usertemp = dict(zip(usertemp[::2], usertemp[1::2]))
username = usertemp.get('name', 'ec2-user')
debg.dprint("loginuser Calculated: ", username)
return username | [
"def",
"cmd_ssh_user",
"(",
"tar_aminame",
",",
"inst_name",
")",
":",
"if",
"tar_aminame",
"==",
"\"Unknown\"",
":",
"tar_aminame",
"=",
"inst_name",
"# first 5 chars of AMI-name can be anywhere in AMI-Name",
"userlu",
"=",
"{",
"\"ubunt\"",
":",
"\"ubuntu\"",
",",
"\"debia\"",
":",
"\"admin\"",
",",
"\"fedor\"",
":",
"\"root\"",
",",
"\"cento\"",
":",
"\"centos\"",
",",
"\"openb\"",
":",
"\"root\"",
"}",
"usertemp",
"=",
"[",
"'name'",
"]",
"+",
"[",
"value",
"for",
"key",
",",
"value",
"in",
"list",
"(",
"userlu",
".",
"items",
"(",
")",
")",
"if",
"key",
"in",
"tar_aminame",
".",
"lower",
"(",
")",
"]",
"usertemp",
"=",
"dict",
"(",
"zip",
"(",
"usertemp",
"[",
":",
":",
"2",
"]",
",",
"usertemp",
"[",
"1",
":",
":",
"2",
"]",
")",
")",
"username",
"=",
"usertemp",
".",
"get",
"(",
"'name'",
",",
"'ec2-user'",
")",
"debg",
".",
"dprint",
"(",
"\"loginuser Calculated: \"",
",",
"username",
")",
"return",
"username"
] | 39.809524 | 17.238095 |
def sphinx_class(self):
"""Redefine sphinx class so documentation links to instance_class"""
classdoc = ':class:`{cls} <{pref}.{cls}>`'.format(
cls=self.instance_class.__name__,
pref=self.instance_class.__module__,
)
return classdoc | [
"def",
"sphinx_class",
"(",
"self",
")",
":",
"classdoc",
"=",
"':class:`{cls} <{pref}.{cls}>`'",
".",
"format",
"(",
"cls",
"=",
"self",
".",
"instance_class",
".",
"__name__",
",",
"pref",
"=",
"self",
".",
"instance_class",
".",
"__module__",
",",
")",
"return",
"classdoc"
] | 40.285714 | 13.714286 |
def random_sent(self, index):
"""
Get one sample from corpus consisting of two sentences. With prob. 50% these are two subsequent sentences
from one doc. With 50% the second sentence will be a random one from another doc.
:param index: int, index of sample.
:return: (str, str, int), sentence 1, sentence 2, isNextSentence Label
"""
t1, t2 = self.get_corpus_line(index)
if random.random() > 0.5:
label = 0
else:
t2 = self.get_random_line()
label = 1
assert len(t1) > 0
assert len(t2) > 0
return t1, t2, label | [
"def",
"random_sent",
"(",
"self",
",",
"index",
")",
":",
"t1",
",",
"t2",
"=",
"self",
".",
"get_corpus_line",
"(",
"index",
")",
"if",
"random",
".",
"random",
"(",
")",
">",
"0.5",
":",
"label",
"=",
"0",
"else",
":",
"t2",
"=",
"self",
".",
"get_random_line",
"(",
")",
"label",
"=",
"1",
"assert",
"len",
"(",
"t1",
")",
">",
"0",
"assert",
"len",
"(",
"t2",
")",
">",
"0",
"return",
"t1",
",",
"t2",
",",
"label"
] | 36.764706 | 19.470588 |
async def createTask(self, *args, **kwargs):
"""
Create New Task
Create a new task, this is an **idempotent** operation, so repeat it if
you get an internal server error or network connection is dropped.
**Task `deadline`**: the deadline property can be no more than 5 days
into the future. This is to limit the amount of pending tasks not being
taken care of. Ideally, you should use a much shorter deadline.
**Task expiration**: the `expires` property must be greater than the
task `deadline`. If not provided it will default to `deadline` + one
year. Notice, that artifacts created by task must expire before the task.
**Task specific routing-keys**: using the `task.routes` property you may
define task specific routing-keys. If a task has a task specific
routing-key: `<route>`, then when the AMQP message about the task is
published, the message will be CC'ed with the routing-key:
`route.<route>`. This is useful if you want another component to listen
for completed tasks you have posted. The caller must have scope
`queue:route:<route>` for each route.
**Dependencies**: any tasks referenced in `task.dependencies` must have
already been created at the time of this call.
**Scopes**: Note that the scopes required to complete this API call depend
on the content of the `scopes`, `routes`, `schedulerId`, `priority`,
`provisionerId`, and `workerType` properties of the task definition.
**Legacy Scopes**: The `queue:create-task:..` scope without a priority and
the `queue:define-task:..` and `queue:task-group-id:..` scopes are considered
legacy and should not be used. Note that the new, non-legacy scopes require
a `queue:scheduler-id:..` scope as well as scopes for the proper priority.
This method takes input: ``v1/create-task-request.json#``
This method gives output: ``v1/task-status-response.json#``
This method is ``stable``
"""
return await self._makeApiCall(self.funcinfo["createTask"], *args, **kwargs) | [
"async",
"def",
"createTask",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"await",
"self",
".",
"_makeApiCall",
"(",
"self",
".",
"funcinfo",
"[",
"\"createTask\"",
"]",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | 49.767442 | 32.697674 |
def categorical_partition_data(data):
"""Convenience method for creating weights from categorical data.
Args:
data (list-like): The data from which to construct the estimate.
Returns:
A new partition object::
{
"partition": (list) The categorical values present in the data
"weights": (list) The weights of the values in the partition.
}
"""
# Make dropna explicit (even though it defaults to true)
series = pd.Series(data)
value_counts = series.value_counts(dropna=True)
# Compute weights using denominator only of nonnull values
null_indexes = series.isnull()
nonnull_count = (null_indexes == False).sum()
weights = value_counts.values / nonnull_count
return {
"values": value_counts.index.tolist(),
"weights": weights
} | [
"def",
"categorical_partition_data",
"(",
"data",
")",
":",
"# Make dropna explicit (even though it defaults to true)",
"series",
"=",
"pd",
".",
"Series",
"(",
"data",
")",
"value_counts",
"=",
"series",
".",
"value_counts",
"(",
"dropna",
"=",
"True",
")",
"# Compute weights using denominator only of nonnull values",
"null_indexes",
"=",
"series",
".",
"isnull",
"(",
")",
"nonnull_count",
"=",
"(",
"null_indexes",
"==",
"False",
")",
".",
"sum",
"(",
")",
"weights",
"=",
"value_counts",
".",
"values",
"/",
"nonnull_count",
"return",
"{",
"\"values\"",
":",
"value_counts",
".",
"index",
".",
"tolist",
"(",
")",
",",
"\"weights\"",
":",
"weights",
"}"
] | 30.035714 | 22.964286 |
def query_flag(ifo, name, start_time, end_time,
source='any', server="segments.ligo.org",
veto_definer=None, cache=False):
"""Return the times where the flag is active
Parameters
----------
ifo: string
The interferometer to query (H1, L1).
name: string
The status flag to query from LOSC.
start_time: int
The starting gps time to begin querying from LOSC
end_time: int
The end gps time of the query
source: str, Optional
Choice between "GWOSC" or "dqsegdb". If dqsegdb, the server option may
also be given. The default is to try GWOSC first then try dqsegdb.
server: str, Optional
The server path. Only used with dqsegdb atm.
veto_definer: str, Optional
The path to a veto definer to define groups of flags which
themselves define a set of segments.
cache: bool
If true cache the query. Default is not to cache
Returns
---------
segments: glue.segments.segmentlist
List of segments
"""
info = name.split(':')
if len(info) == 2:
segment_name, version = info
elif len(info) == 1:
segment_name = info[0]
version = 1
flag_segments = segmentlist([])
if source in ['GWOSC', 'any']:
# Special cases as the LOSC convention is backwards from normal
# LIGO / Virgo operation!!!!
if (('_HW_INJ' in segment_name and 'NO' not in segment_name) or
'VETO' in segment_name):
data = query_flag(ifo, 'DATA', start_time, end_time)
if '_HW_INJ' in segment_name:
name = 'NO_' + segment_name
else:
name = segment_name.replace('_VETO', '')
negate = query_flag(ifo, name, start_time, end_time, cache=cache)
return (data - negate).coalesce()
duration = end_time - start_time
url = GWOSC_URL.format(get_run(start_time + duration/2),
ifo, segment_name,
int(start_time), int(duration))
try:
fname = download_file(url, cache=cache)
data = json.load(open(fname, 'r'))
if 'segments' in data:
flag_segments = data['segments']
except Exception as e:
msg = "Unable to find segments in GWOSC, check flag name or times"
print(e)
if source != 'any':
raise ValueError(msg)
else:
print("Tried and failed GWOSC {}, trying dqsegdb", name)
return query_flag(ifo, segment_name, start_time, end_time,
source='dqsegdb', server=server,
veto_definer=veto_definer)
elif source == 'dqsegdb':
# Let's not hard require dqsegdb to be installed if we never get here.
try:
from dqsegdb.apicalls import dqsegdbQueryTimes as query
except ImportError:
raise ValueError("Could not query flag. Install dqsegdb"
":'pip install dqsegdb'")
# The veto definer will allow the use of MACRO names
# These directly correspond the name defined in the veto definer file.
if veto_definer is not None:
veto_def = parse_veto_definer(veto_definer)
# We treat the veto definer name as if it were its own flag and
# a process the flags in the veto definer
if veto_definer is not None and segment_name in veto_def[ifo]:
for flag in veto_def[ifo][segment_name]:
segs = query("https", server, ifo, flag['name'],
flag['version'], 'active',
int(start_time), int(end_time))[0]['active']
# Apply padding to each segment
for rseg in segs:
seg_start = rseg[0] + flag['start_pad']
seg_end = rseg[1] + flag['end_pad']
flag_segments.append(segment(seg_start, seg_end))
# Apply start / end of the veto definer segment
send = segmentlist([segment([veto_def['start'], veto_def['end']])])
flag_segments = (flag_segments.coalesce() & send)
else: # Standard case just query directly.
try:
segs = query("https", server, ifo, name, version,
'active', int(start_time),
int(end_time))[0]['active']
for rseg in segs:
flag_segments.append(segment(rseg[0], rseg[1]))
except Exception as e:
print("Could not query flag, check name "
" (%s) or times" % segment_name)
raise e
else:
raise ValueError("Source must be dqsegdb or GWOSC."
" Got {}".format(source))
return segmentlist(flag_segments).coalesce() | [
"def",
"query_flag",
"(",
"ifo",
",",
"name",
",",
"start_time",
",",
"end_time",
",",
"source",
"=",
"'any'",
",",
"server",
"=",
"\"segments.ligo.org\"",
",",
"veto_definer",
"=",
"None",
",",
"cache",
"=",
"False",
")",
":",
"info",
"=",
"name",
".",
"split",
"(",
"':'",
")",
"if",
"len",
"(",
"info",
")",
"==",
"2",
":",
"segment_name",
",",
"version",
"=",
"info",
"elif",
"len",
"(",
"info",
")",
"==",
"1",
":",
"segment_name",
"=",
"info",
"[",
"0",
"]",
"version",
"=",
"1",
"flag_segments",
"=",
"segmentlist",
"(",
"[",
"]",
")",
"if",
"source",
"in",
"[",
"'GWOSC'",
",",
"'any'",
"]",
":",
"# Special cases as the LOSC convention is backwards from normal",
"# LIGO / Virgo operation!!!!",
"if",
"(",
"(",
"'_HW_INJ'",
"in",
"segment_name",
"and",
"'NO'",
"not",
"in",
"segment_name",
")",
"or",
"'VETO'",
"in",
"segment_name",
")",
":",
"data",
"=",
"query_flag",
"(",
"ifo",
",",
"'DATA'",
",",
"start_time",
",",
"end_time",
")",
"if",
"'_HW_INJ'",
"in",
"segment_name",
":",
"name",
"=",
"'NO_'",
"+",
"segment_name",
"else",
":",
"name",
"=",
"segment_name",
".",
"replace",
"(",
"'_VETO'",
",",
"''",
")",
"negate",
"=",
"query_flag",
"(",
"ifo",
",",
"name",
",",
"start_time",
",",
"end_time",
",",
"cache",
"=",
"cache",
")",
"return",
"(",
"data",
"-",
"negate",
")",
".",
"coalesce",
"(",
")",
"duration",
"=",
"end_time",
"-",
"start_time",
"url",
"=",
"GWOSC_URL",
".",
"format",
"(",
"get_run",
"(",
"start_time",
"+",
"duration",
"/",
"2",
")",
",",
"ifo",
",",
"segment_name",
",",
"int",
"(",
"start_time",
")",
",",
"int",
"(",
"duration",
")",
")",
"try",
":",
"fname",
"=",
"download_file",
"(",
"url",
",",
"cache",
"=",
"cache",
")",
"data",
"=",
"json",
".",
"load",
"(",
"open",
"(",
"fname",
",",
"'r'",
")",
")",
"if",
"'segments'",
"in",
"data",
":",
"flag_segments",
"=",
"data",
"[",
"'segments'",
"]",
"except",
"Exception",
"as",
"e",
":",
"msg",
"=",
"\"Unable to find segments in GWOSC, check flag name or times\"",
"print",
"(",
"e",
")",
"if",
"source",
"!=",
"'any'",
":",
"raise",
"ValueError",
"(",
"msg",
")",
"else",
":",
"print",
"(",
"\"Tried and failed GWOSC {}, trying dqsegdb\"",
",",
"name",
")",
"return",
"query_flag",
"(",
"ifo",
",",
"segment_name",
",",
"start_time",
",",
"end_time",
",",
"source",
"=",
"'dqsegdb'",
",",
"server",
"=",
"server",
",",
"veto_definer",
"=",
"veto_definer",
")",
"elif",
"source",
"==",
"'dqsegdb'",
":",
"# Let's not hard require dqsegdb to be installed if we never get here.",
"try",
":",
"from",
"dqsegdb",
".",
"apicalls",
"import",
"dqsegdbQueryTimes",
"as",
"query",
"except",
"ImportError",
":",
"raise",
"ValueError",
"(",
"\"Could not query flag. Install dqsegdb\"",
"\":'pip install dqsegdb'\"",
")",
"# The veto definer will allow the use of MACRO names",
"# These directly correspond the name defined in the veto definer file.",
"if",
"veto_definer",
"is",
"not",
"None",
":",
"veto_def",
"=",
"parse_veto_definer",
"(",
"veto_definer",
")",
"# We treat the veto definer name as if it were its own flag and",
"# a process the flags in the veto definer",
"if",
"veto_definer",
"is",
"not",
"None",
"and",
"segment_name",
"in",
"veto_def",
"[",
"ifo",
"]",
":",
"for",
"flag",
"in",
"veto_def",
"[",
"ifo",
"]",
"[",
"segment_name",
"]",
":",
"segs",
"=",
"query",
"(",
"\"https\"",
",",
"server",
",",
"ifo",
",",
"flag",
"[",
"'name'",
"]",
",",
"flag",
"[",
"'version'",
"]",
",",
"'active'",
",",
"int",
"(",
"start_time",
")",
",",
"int",
"(",
"end_time",
")",
")",
"[",
"0",
"]",
"[",
"'active'",
"]",
"# Apply padding to each segment",
"for",
"rseg",
"in",
"segs",
":",
"seg_start",
"=",
"rseg",
"[",
"0",
"]",
"+",
"flag",
"[",
"'start_pad'",
"]",
"seg_end",
"=",
"rseg",
"[",
"1",
"]",
"+",
"flag",
"[",
"'end_pad'",
"]",
"flag_segments",
".",
"append",
"(",
"segment",
"(",
"seg_start",
",",
"seg_end",
")",
")",
"# Apply start / end of the veto definer segment",
"send",
"=",
"segmentlist",
"(",
"[",
"segment",
"(",
"[",
"veto_def",
"[",
"'start'",
"]",
",",
"veto_def",
"[",
"'end'",
"]",
"]",
")",
"]",
")",
"flag_segments",
"=",
"(",
"flag_segments",
".",
"coalesce",
"(",
")",
"&",
"send",
")",
"else",
":",
"# Standard case just query directly.",
"try",
":",
"segs",
"=",
"query",
"(",
"\"https\"",
",",
"server",
",",
"ifo",
",",
"name",
",",
"version",
",",
"'active'",
",",
"int",
"(",
"start_time",
")",
",",
"int",
"(",
"end_time",
")",
")",
"[",
"0",
"]",
"[",
"'active'",
"]",
"for",
"rseg",
"in",
"segs",
":",
"flag_segments",
".",
"append",
"(",
"segment",
"(",
"rseg",
"[",
"0",
"]",
",",
"rseg",
"[",
"1",
"]",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"print",
"(",
"\"Could not query flag, check name \"",
"\" (%s) or times\"",
"%",
"segment_name",
")",
"raise",
"e",
"else",
":",
"raise",
"ValueError",
"(",
"\"Source must be dqsegdb or GWOSC.\"",
"\" Got {}\"",
".",
"format",
"(",
"source",
")",
")",
"return",
"segmentlist",
"(",
"flag_segments",
")",
".",
"coalesce",
"(",
")"
] | 38.055118 | 20.582677 |
def load_lynx(as_series=False):
"""Annual numbers of lynx trappings for 1821–1934 in Canada.
This time-series records the number of skins of predators (lynx) that were
collected over several years by the Hudson's Bay Company. The dataset was
taken from Brockwell & Davis (1991) and appears to be the series
considered by Campbell & Walker (1977).
Parameters
----------
as_series : bool, optional (default=False)
Whether to return a Pandas series. If True, the index will be set to
the observed years. If False, will return a 1d numpy array.
Examples
--------
>>> from pmdarima.datasets import load_lynx
>>> load_lynx()
array([ 269, 321, 585, 871, 1475, 2821, 3928, 5943, 4950, 2577, 523,
98, 184, 279, 409, 2285, 2685, 3409, 1824, 409, 151, 45,
68, 213, 546, 1033, 2129, 2536, 957, 361, 377, 225, 360,
731, 1638, 2725, 2871, 2119, 684, 299, 236, 245, 552, 1623,
3311, 6721, 4254, 687, 255, 473, 358, 784, 1594, 1676, 2251,
1426, 756, 299, 201, 229, 469, 736, 2042, 2811, 4431, 2511,
389, 73, 39, 49, 59, 188, 377, 1292, 4031, 3495, 587,
105, 153, 387, 758, 1307, 3465, 6991, 6313, 3794, 1836, 345,
382, 808, 1388, 2713, 3800, 3091, 2985, 3790, 674, 81, 80,
108, 229, 399, 1132, 2432, 3574, 2935, 1537, 529, 485, 662,
1000, 1590, 2657, 3396])
>>> load_lynx(True).head()
1821 269
1822 321
1823 585
1824 871
1825 1475
dtype: int64
Notes
-----
This is annual data and not seasonal in nature (i.e., :math:`m=1`)
References
----------
.. [1] Brockwell, P. J. and Davis, R. A. (1991)
Time Series and Forecasting Methods. Second edition.
Springer. Series G (page 557).
.. [2] https://stat.ethz.ch/R-manual/R-devel/library/datasets/html/lynx.html # noqa: E501
Returns
-------
lynx : array-like, shape=(n_samples,)
The lynx dataset. There are 114 observations.
"""
rslt = np.array([269, 321, 585, 871, 1475, 2821, 3928, 5943, 4950,
2577, 523, 98, 184, 279, 409, 2285, 2685, 3409,
1824, 409, 151, 45, 68, 213, 546, 1033, 2129,
2536, 957, 361, 377, 225, 360, 731, 1638, 2725,
2871, 2119, 684, 299, 236, 245, 552, 1623, 3311,
6721, 4254, 687, 255, 473, 358, 784, 1594, 1676,
2251, 1426, 756, 299, 201, 229, 469, 736, 2042,
2811, 4431, 2511, 389, 73, 39, 49, 59, 188,
377, 1292, 4031, 3495, 587, 105, 153, 387, 758,
1307, 3465, 6991, 6313, 3794, 1836, 345, 382, 808,
1388, 2713, 3800, 3091, 2985, 3790, 674, 81, 80,
108, 229, 399, 1132, 2432, 3574, 2935, 1537, 529,
485, 662, 1000, 1590, 2657, 3396])
# Set the index if necessary
if as_series:
return pd.Series(rslt, index=range(1821, 1935))
return rslt | [
"def",
"load_lynx",
"(",
"as_series",
"=",
"False",
")",
":",
"rslt",
"=",
"np",
".",
"array",
"(",
"[",
"269",
",",
"321",
",",
"585",
",",
"871",
",",
"1475",
",",
"2821",
",",
"3928",
",",
"5943",
",",
"4950",
",",
"2577",
",",
"523",
",",
"98",
",",
"184",
",",
"279",
",",
"409",
",",
"2285",
",",
"2685",
",",
"3409",
",",
"1824",
",",
"409",
",",
"151",
",",
"45",
",",
"68",
",",
"213",
",",
"546",
",",
"1033",
",",
"2129",
",",
"2536",
",",
"957",
",",
"361",
",",
"377",
",",
"225",
",",
"360",
",",
"731",
",",
"1638",
",",
"2725",
",",
"2871",
",",
"2119",
",",
"684",
",",
"299",
",",
"236",
",",
"245",
",",
"552",
",",
"1623",
",",
"3311",
",",
"6721",
",",
"4254",
",",
"687",
",",
"255",
",",
"473",
",",
"358",
",",
"784",
",",
"1594",
",",
"1676",
",",
"2251",
",",
"1426",
",",
"756",
",",
"299",
",",
"201",
",",
"229",
",",
"469",
",",
"736",
",",
"2042",
",",
"2811",
",",
"4431",
",",
"2511",
",",
"389",
",",
"73",
",",
"39",
",",
"49",
",",
"59",
",",
"188",
",",
"377",
",",
"1292",
",",
"4031",
",",
"3495",
",",
"587",
",",
"105",
",",
"153",
",",
"387",
",",
"758",
",",
"1307",
",",
"3465",
",",
"6991",
",",
"6313",
",",
"3794",
",",
"1836",
",",
"345",
",",
"382",
",",
"808",
",",
"1388",
",",
"2713",
",",
"3800",
",",
"3091",
",",
"2985",
",",
"3790",
",",
"674",
",",
"81",
",",
"80",
",",
"108",
",",
"229",
",",
"399",
",",
"1132",
",",
"2432",
",",
"3574",
",",
"2935",
",",
"1537",
",",
"529",
",",
"485",
",",
"662",
",",
"1000",
",",
"1590",
",",
"2657",
",",
"3396",
"]",
")",
"# Set the index if necessary",
"if",
"as_series",
":",
"return",
"pd",
".",
"Series",
"(",
"rslt",
",",
"index",
"=",
"range",
"(",
"1821",
",",
"1935",
")",
")",
"return",
"rslt"
] | 42.068493 | 26.438356 |
def makeResetPacket(ID, param):
"""
Resets a servo to one of 3 reset states:
XL320_RESET_ALL = 0xFF
XL320_RESET_ALL_BUT_ID = 0x01
XL320_RESET_ALL_BUT_ID_BAUD_RATE = 0x02
"""
if param not in [0x01, 0x02, 0xff]:
raise Exception('Packet.makeResetPacket invalide parameter {}'.format(param))
# pkt = makePacket(ID, xl320.XL320_RESET, None, [param])
pkt = makePacket(ID, xl320.XL320_RESET, None, [1])
return pkt | [
"def",
"makeResetPacket",
"(",
"ID",
",",
"param",
")",
":",
"if",
"param",
"not",
"in",
"[",
"0x01",
",",
"0x02",
",",
"0xff",
"]",
":",
"raise",
"Exception",
"(",
"'Packet.makeResetPacket invalide parameter {}'",
".",
"format",
"(",
"param",
")",
")",
"# pkt = makePacket(ID, xl320.XL320_RESET, None, [param])",
"pkt",
"=",
"makePacket",
"(",
"ID",
",",
"xl320",
".",
"XL320_RESET",
",",
"None",
",",
"[",
"1",
"]",
")",
"return",
"pkt"
] | 33.384615 | 11.538462 |
def getRoles(self):
"""Get all :class:`rtcclient.models.Role` objects in this project
area
If no :class:`Roles` are retrieved, `None` is returned.
:return: a :class:`list` that contains all
:class:`rtcclient.models.Role` objects
:rtype: list
"""
# no need to retrieve all the entries from _get_paged_resources
# role raw data is very simple that contains no other links
self.log.info("Get all the roles in <ProjectArea %s>",
self)
roles_url = "/".join([self.rtc_obj.url,
"process/project-areas/%s/roles" % self.id])
resp = self.get(roles_url,
verify=False,
proxies=self.rtc_obj.proxies,
headers=self.rtc_obj.headers)
roles_list = list()
raw_data = xmltodict.parse(resp.content)
roles_raw = raw_data['jp06:roles']['jp06:role']
if not roles_raw:
self.log.warning("There are no roles in <ProjectArea %s>",
self)
return None
for role_raw in roles_raw:
role = Role(role_raw.get("jp06:url"),
self.rtc_obj,
raw_data=role_raw)
roles_list.append(role)
return roles_list | [
"def",
"getRoles",
"(",
"self",
")",
":",
"# no need to retrieve all the entries from _get_paged_resources",
"# role raw data is very simple that contains no other links",
"self",
".",
"log",
".",
"info",
"(",
"\"Get all the roles in <ProjectArea %s>\"",
",",
"self",
")",
"roles_url",
"=",
"\"/\"",
".",
"join",
"(",
"[",
"self",
".",
"rtc_obj",
".",
"url",
",",
"\"process/project-areas/%s/roles\"",
"%",
"self",
".",
"id",
"]",
")",
"resp",
"=",
"self",
".",
"get",
"(",
"roles_url",
",",
"verify",
"=",
"False",
",",
"proxies",
"=",
"self",
".",
"rtc_obj",
".",
"proxies",
",",
"headers",
"=",
"self",
".",
"rtc_obj",
".",
"headers",
")",
"roles_list",
"=",
"list",
"(",
")",
"raw_data",
"=",
"xmltodict",
".",
"parse",
"(",
"resp",
".",
"content",
")",
"roles_raw",
"=",
"raw_data",
"[",
"'jp06:roles'",
"]",
"[",
"'jp06:role'",
"]",
"if",
"not",
"roles_raw",
":",
"self",
".",
"log",
".",
"warning",
"(",
"\"There are no roles in <ProjectArea %s>\"",
",",
"self",
")",
"return",
"None",
"for",
"role_raw",
"in",
"roles_raw",
":",
"role",
"=",
"Role",
"(",
"role_raw",
".",
"get",
"(",
"\"jp06:url\"",
")",
",",
"self",
".",
"rtc_obj",
",",
"raw_data",
"=",
"role_raw",
")",
"roles_list",
".",
"append",
"(",
"role",
")",
"return",
"roles_list"
] | 35.864865 | 17.972973 |
def flatten(d, reducer='tuple', inverse=False):
"""Flatten dict-like object.
Parameters
----------
d: dict-like object
The dict that will be flattened.
reducer: {'tuple', 'path', function} (default: 'tuple')
The key joining method. If a function is given, the function will be
used to reduce.
'tuple': The resulting key will be tuple of the original keys
'path': Use ``os.path.join`` to join keys.
inverse: bool (default: False)
Whether you want invert the resulting key and value.
Returns
-------
flat_dict: dict
"""
if isinstance(reducer, str):
reducer = REDUCER_DICT[reducer]
flat_dict = {}
def _flatten(d, parent=None):
for key, value in six.viewitems(d):
flat_key = reducer(parent, key)
if isinstance(value, Mapping):
_flatten(value, flat_key)
else:
if inverse:
flat_key, value = value, flat_key
if flat_key in flat_dict:
raise ValueError("duplicated key '{}'".format(flat_key))
flat_dict[flat_key] = value
_flatten(d)
return flat_dict | [
"def",
"flatten",
"(",
"d",
",",
"reducer",
"=",
"'tuple'",
",",
"inverse",
"=",
"False",
")",
":",
"if",
"isinstance",
"(",
"reducer",
",",
"str",
")",
":",
"reducer",
"=",
"REDUCER_DICT",
"[",
"reducer",
"]",
"flat_dict",
"=",
"{",
"}",
"def",
"_flatten",
"(",
"d",
",",
"parent",
"=",
"None",
")",
":",
"for",
"key",
",",
"value",
"in",
"six",
".",
"viewitems",
"(",
"d",
")",
":",
"flat_key",
"=",
"reducer",
"(",
"parent",
",",
"key",
")",
"if",
"isinstance",
"(",
"value",
",",
"Mapping",
")",
":",
"_flatten",
"(",
"value",
",",
"flat_key",
")",
"else",
":",
"if",
"inverse",
":",
"flat_key",
",",
"value",
"=",
"value",
",",
"flat_key",
"if",
"flat_key",
"in",
"flat_dict",
":",
"raise",
"ValueError",
"(",
"\"duplicated key '{}'\"",
".",
"format",
"(",
"flat_key",
")",
")",
"flat_dict",
"[",
"flat_key",
"]",
"=",
"value",
"_flatten",
"(",
"d",
")",
"return",
"flat_dict"
] | 31.675676 | 17.108108 |
def refresh(self):
"""Refreshes the editor panels (resize and update margins)."""
logger.debug('Refresh panels')
self.resize()
self._update(self.editor.contentsRect(), 0,
force_update_margins=True) | [
"def",
"refresh",
"(",
"self",
")",
":",
"logger",
".",
"debug",
"(",
"'Refresh panels'",
")",
"self",
".",
"resize",
"(",
")",
"self",
".",
"_update",
"(",
"self",
".",
"editor",
".",
"contentsRect",
"(",
")",
",",
"0",
",",
"force_update_margins",
"=",
"True",
")"
] | 40.833333 | 10.166667 |
def check_user_can_view_comments(user_info, recid):
"""Check if the user is authorized to view comments for given
recid.
Returns the same type as acc_authorize_action
"""
# Check user can view the record itself first
(auth_code, auth_msg) = check_user_can_view_record(user_info, recid)
if auth_code:
return (auth_code, auth_msg)
# Check if user can view the comments
# But first can we find an authorization for this case action,
# for this collection?
record_primary_collection = guess_primary_collection_of_a_record(recid)
return acc_authorize_action(
user_info,
'viewcomment',
authorized_if_no_roles=True,
collection=record_primary_collection) | [
"def",
"check_user_can_view_comments",
"(",
"user_info",
",",
"recid",
")",
":",
"# Check user can view the record itself first",
"(",
"auth_code",
",",
"auth_msg",
")",
"=",
"check_user_can_view_record",
"(",
"user_info",
",",
"recid",
")",
"if",
"auth_code",
":",
"return",
"(",
"auth_code",
",",
"auth_msg",
")",
"# Check if user can view the comments",
"# But first can we find an authorization for this case action,",
"# for this collection?",
"record_primary_collection",
"=",
"guess_primary_collection_of_a_record",
"(",
"recid",
")",
"return",
"acc_authorize_action",
"(",
"user_info",
",",
"'viewcomment'",
",",
"authorized_if_no_roles",
"=",
"True",
",",
"collection",
"=",
"record_primary_collection",
")"
] | 35.85 | 16.55 |
def get_args(self, state, all_params, remainder, argspec, im_self):
'''
Determines the arguments for a controller based upon parameters
passed the argument specification for the controller.
'''
args = []
varargs = []
kwargs = dict()
valid_args = argspec.args[:]
if ismethod(state.controller) or im_self:
valid_args.pop(0) # pop off `self`
pecan_state = state.request.pecan
remainder = [x for x in remainder if x]
if im_self is not None:
args.append(im_self)
# grab the routing args from nested REST controllers
if 'routing_args' in pecan_state:
remainder = pecan_state['routing_args'] + list(remainder)
del pecan_state['routing_args']
# handle positional arguments
if valid_args and remainder:
args.extend(remainder[:len(valid_args)])
remainder = remainder[len(valid_args):]
valid_args = valid_args[len(args):]
# handle wildcard arguments
if [i for i in remainder if i]:
if not argspec[1]:
abort(404)
varargs.extend(remainder)
# get the default positional arguments
if argspec[3]:
defaults = dict(izip(argspec[0][-len(argspec[3]):], argspec[3]))
else:
defaults = dict()
# handle positional GET/POST params
for name in valid_args:
if name in all_params:
args.append(all_params.pop(name))
elif name in defaults:
args.append(defaults[name])
else:
break
# handle wildcard GET/POST params
if argspec[2]:
for name, value in six.iteritems(all_params):
if name not in argspec[0]:
kwargs[name] = value
return args, varargs, kwargs | [
"def",
"get_args",
"(",
"self",
",",
"state",
",",
"all_params",
",",
"remainder",
",",
"argspec",
",",
"im_self",
")",
":",
"args",
"=",
"[",
"]",
"varargs",
"=",
"[",
"]",
"kwargs",
"=",
"dict",
"(",
")",
"valid_args",
"=",
"argspec",
".",
"args",
"[",
":",
"]",
"if",
"ismethod",
"(",
"state",
".",
"controller",
")",
"or",
"im_self",
":",
"valid_args",
".",
"pop",
"(",
"0",
")",
"# pop off `self`",
"pecan_state",
"=",
"state",
".",
"request",
".",
"pecan",
"remainder",
"=",
"[",
"x",
"for",
"x",
"in",
"remainder",
"if",
"x",
"]",
"if",
"im_self",
"is",
"not",
"None",
":",
"args",
".",
"append",
"(",
"im_self",
")",
"# grab the routing args from nested REST controllers",
"if",
"'routing_args'",
"in",
"pecan_state",
":",
"remainder",
"=",
"pecan_state",
"[",
"'routing_args'",
"]",
"+",
"list",
"(",
"remainder",
")",
"del",
"pecan_state",
"[",
"'routing_args'",
"]",
"# handle positional arguments",
"if",
"valid_args",
"and",
"remainder",
":",
"args",
".",
"extend",
"(",
"remainder",
"[",
":",
"len",
"(",
"valid_args",
")",
"]",
")",
"remainder",
"=",
"remainder",
"[",
"len",
"(",
"valid_args",
")",
":",
"]",
"valid_args",
"=",
"valid_args",
"[",
"len",
"(",
"args",
")",
":",
"]",
"# handle wildcard arguments",
"if",
"[",
"i",
"for",
"i",
"in",
"remainder",
"if",
"i",
"]",
":",
"if",
"not",
"argspec",
"[",
"1",
"]",
":",
"abort",
"(",
"404",
")",
"varargs",
".",
"extend",
"(",
"remainder",
")",
"# get the default positional arguments",
"if",
"argspec",
"[",
"3",
"]",
":",
"defaults",
"=",
"dict",
"(",
"izip",
"(",
"argspec",
"[",
"0",
"]",
"[",
"-",
"len",
"(",
"argspec",
"[",
"3",
"]",
")",
":",
"]",
",",
"argspec",
"[",
"3",
"]",
")",
")",
"else",
":",
"defaults",
"=",
"dict",
"(",
")",
"# handle positional GET/POST params",
"for",
"name",
"in",
"valid_args",
":",
"if",
"name",
"in",
"all_params",
":",
"args",
".",
"append",
"(",
"all_params",
".",
"pop",
"(",
"name",
")",
")",
"elif",
"name",
"in",
"defaults",
":",
"args",
".",
"append",
"(",
"defaults",
"[",
"name",
"]",
")",
"else",
":",
"break",
"# handle wildcard GET/POST params",
"if",
"argspec",
"[",
"2",
"]",
":",
"for",
"name",
",",
"value",
"in",
"six",
".",
"iteritems",
"(",
"all_params",
")",
":",
"if",
"name",
"not",
"in",
"argspec",
"[",
"0",
"]",
":",
"kwargs",
"[",
"name",
"]",
"=",
"value",
"return",
"args",
",",
"varargs",
",",
"kwargs"
] | 32.684211 | 16.54386 |
def hue(self, img1, img2):
"""Applies the hue blend mode.
Hues image img1 with image img2.
The hue filter replaces the hues of pixels in img1
with the hues of pixels in img2.
Returns a composite image with the alpha channel retained.
"""
import colorsys
p1 = list(img1.getdata())
p2 = list(img2.getdata())
for i in range(len(p1)):
r1, g1, b1, a1 = p1[i]
r1 = r1 / 255.0
g1 = g1 / 255.0
b1 = b1 / 255.0
h1, s1, v1 = colorsys.rgb_to_hsv(r1, g1, b1)
r2, g2, b2, a2 = p2[i]
r2 = r2 / 255.0
g2 = g2 / 255.0
b2 = b2 / 255.0
h2, s2, v2 = colorsys.rgb_to_hsv(r2, g2, b2)
r3, g3, b3 = colorsys.hsv_to_rgb(h2, s1, v1)
r3 = int(r3*255)
g3 = int(g3*255)
b3 = int(b3*255)
p1[i] = (r3, g3, b3, a1)
img = Image.new("RGBA", img1.size, 255)
img.putdata(p1)
return img | [
"def",
"hue",
"(",
"self",
",",
"img1",
",",
"img2",
")",
":",
"import",
"colorsys",
"p1",
"=",
"list",
"(",
"img1",
".",
"getdata",
"(",
")",
")",
"p2",
"=",
"list",
"(",
"img2",
".",
"getdata",
"(",
")",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"p1",
")",
")",
":",
"r1",
",",
"g1",
",",
"b1",
",",
"a1",
"=",
"p1",
"[",
"i",
"]",
"r1",
"=",
"r1",
"/",
"255.0",
"g1",
"=",
"g1",
"/",
"255.0",
"b1",
"=",
"b1",
"/",
"255.0",
"h1",
",",
"s1",
",",
"v1",
"=",
"colorsys",
".",
"rgb_to_hsv",
"(",
"r1",
",",
"g1",
",",
"b1",
")",
"r2",
",",
"g2",
",",
"b2",
",",
"a2",
"=",
"p2",
"[",
"i",
"]",
"r2",
"=",
"r2",
"/",
"255.0",
"g2",
"=",
"g2",
"/",
"255.0",
"b2",
"=",
"b2",
"/",
"255.0",
"h2",
",",
"s2",
",",
"v2",
"=",
"colorsys",
".",
"rgb_to_hsv",
"(",
"r2",
",",
"g2",
",",
"b2",
")",
"r3",
",",
"g3",
",",
"b3",
"=",
"colorsys",
".",
"hsv_to_rgb",
"(",
"h2",
",",
"s1",
",",
"v1",
")",
"r3",
"=",
"int",
"(",
"r3",
"*",
"255",
")",
"g3",
"=",
"int",
"(",
"g3",
"*",
"255",
")",
"b3",
"=",
"int",
"(",
"b3",
"*",
"255",
")",
"p1",
"[",
"i",
"]",
"=",
"(",
"r3",
",",
"g3",
",",
"b3",
",",
"a1",
")",
"img",
"=",
"Image",
".",
"new",
"(",
"\"RGBA\"",
",",
"img1",
".",
"size",
",",
"255",
")",
"img",
".",
"putdata",
"(",
"p1",
")",
"return",
"img"
] | 26.55 | 17.625 |
def _read_widget(self):
"""Returns the value currently stored into the widget, after
transforming it accordingly to possibly specified function.
This is implemented by calling the getter provided by the
user. This method can raise InvalidValue (raised by the
getter) when the value in the widget must not be considered as
valid."""
getter = self._wid_info[self._wid][0]
return getter(self._wid) | [
"def",
"_read_widget",
"(",
"self",
")",
":",
"getter",
"=",
"self",
".",
"_wid_info",
"[",
"self",
".",
"_wid",
"]",
"[",
"0",
"]",
"return",
"getter",
"(",
"self",
".",
"_wid",
")"
] | 45 | 17.5 |
def add_JSscript(self, js_script, js_loc):
"""add (highcharts) javascript in the beginning or at the end of script
use only if necessary
"""
if js_loc == 'head':
self.jscript_head_flag = True
if self.jscript_head:
self.jscript_head = self.jscript_head + '\n' + js_script
else:
self.jscript_head = js_script
elif js_loc == 'end':
self.jscript_end_flag = True
if self.jscript_end:
self.jscript_end = self.jscript_end + '\n' + js_script
else:
self.jscript_end = js_script
else:
raise OptionTypeError("Not An Accepted script location: %s, either 'head' or 'end'"
% js_loc) | [
"def",
"add_JSscript",
"(",
"self",
",",
"js_script",
",",
"js_loc",
")",
":",
"if",
"js_loc",
"==",
"'head'",
":",
"self",
".",
"jscript_head_flag",
"=",
"True",
"if",
"self",
".",
"jscript_head",
":",
"self",
".",
"jscript_head",
"=",
"self",
".",
"jscript_head",
"+",
"'\\n'",
"+",
"js_script",
"else",
":",
"self",
".",
"jscript_head",
"=",
"js_script",
"elif",
"js_loc",
"==",
"'end'",
":",
"self",
".",
"jscript_end_flag",
"=",
"True",
"if",
"self",
".",
"jscript_end",
":",
"self",
".",
"jscript_end",
"=",
"self",
".",
"jscript_end",
"+",
"'\\n'",
"+",
"js_script",
"else",
":",
"self",
".",
"jscript_end",
"=",
"js_script",
"else",
":",
"raise",
"OptionTypeError",
"(",
"\"Not An Accepted script location: %s, either 'head' or 'end'\"",
"%",
"js_loc",
")"
] | 41.105263 | 13.421053 |
def upload_file(self, session, output, serverdir):
"""
Upload a file to koji
:return: str, pathname on server
"""
name = output.metadata['filename']
self.log.debug("uploading %r to %r as %r",
output.file.name, serverdir, name)
kwargs = {}
if self.blocksize is not None:
kwargs['blocksize'] = self.blocksize
self.log.debug("using blocksize %d", self.blocksize)
upload_logger = KojiUploadLogger(self.log)
session.uploadWrapper(output.file.name, serverdir, name=name,
callback=upload_logger.callback, **kwargs)
path = os.path.join(serverdir, name)
self.log.debug("uploaded %r", path)
return path | [
"def",
"upload_file",
"(",
"self",
",",
"session",
",",
"output",
",",
"serverdir",
")",
":",
"name",
"=",
"output",
".",
"metadata",
"[",
"'filename'",
"]",
"self",
".",
"log",
".",
"debug",
"(",
"\"uploading %r to %r as %r\"",
",",
"output",
".",
"file",
".",
"name",
",",
"serverdir",
",",
"name",
")",
"kwargs",
"=",
"{",
"}",
"if",
"self",
".",
"blocksize",
"is",
"not",
"None",
":",
"kwargs",
"[",
"'blocksize'",
"]",
"=",
"self",
".",
"blocksize",
"self",
".",
"log",
".",
"debug",
"(",
"\"using blocksize %d\"",
",",
"self",
".",
"blocksize",
")",
"upload_logger",
"=",
"KojiUploadLogger",
"(",
"self",
".",
"log",
")",
"session",
".",
"uploadWrapper",
"(",
"output",
".",
"file",
".",
"name",
",",
"serverdir",
",",
"name",
"=",
"name",
",",
"callback",
"=",
"upload_logger",
".",
"callback",
",",
"*",
"*",
"kwargs",
")",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"serverdir",
",",
"name",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"uploaded %r\"",
",",
"path",
")",
"return",
"path"
] | 36 | 15.428571 |
def base64_encode_as_string(obj): # noqa
# type: (any) -> str
"""Encode object to base64
:param any obj: object to encode
:rtype: str
:return: base64 encoded string
"""
if on_python2():
return base64.b64encode(obj)
else:
return str(base64.b64encode(obj), 'ascii') | [
"def",
"base64_encode_as_string",
"(",
"obj",
")",
":",
"# noqa",
"# type: (any) -> str",
"if",
"on_python2",
"(",
")",
":",
"return",
"base64",
".",
"b64encode",
"(",
"obj",
")",
"else",
":",
"return",
"str",
"(",
"base64",
".",
"b64encode",
"(",
"obj",
")",
",",
"'ascii'",
")"
] | 27.454545 | 10.636364 |
def patch(self, url, data=None, **kwargs):
""" Shorthand for self.oauth_request(url, 'patch')
:param str url: url to send patch oauth request to
:param dict data: patch data to update the service
:param kwargs: extra params to send to request api
:return: Response of the request
:rtype: requests.Response
"""
return self.oauth_request(url, 'patch', data=data, **kwargs) | [
"def",
"patch",
"(",
"self",
",",
"url",
",",
"data",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"oauth_request",
"(",
"url",
",",
"'patch'",
",",
"data",
"=",
"data",
",",
"*",
"*",
"kwargs",
")"
] | 42.6 | 13.1 |
def init_app(self, app):
"""Initialize application in Flask-RBAC.
Adds (RBAC, app) to flask extensions.
Adds hook to authenticate permission before request.
:param app: Flask object
"""
app.config.setdefault('RBAC_USE_WHITE', False)
self.use_white = app.config['RBAC_USE_WHITE']
if not hasattr(app, 'extensions'):
app.extensions = {}
app.extensions['rbac'] = _RBACState(self, app)
self.acl.allow(anonymous, 'GET', 'static')
app.before_first_request(self._setup_acl)
app.before_request(self._authenticate) | [
"def",
"init_app",
"(",
"self",
",",
"app",
")",
":",
"app",
".",
"config",
".",
"setdefault",
"(",
"'RBAC_USE_WHITE'",
",",
"False",
")",
"self",
".",
"use_white",
"=",
"app",
".",
"config",
"[",
"'RBAC_USE_WHITE'",
"]",
"if",
"not",
"hasattr",
"(",
"app",
",",
"'extensions'",
")",
":",
"app",
".",
"extensions",
"=",
"{",
"}",
"app",
".",
"extensions",
"[",
"'rbac'",
"]",
"=",
"_RBACState",
"(",
"self",
",",
"app",
")",
"self",
".",
"acl",
".",
"allow",
"(",
"anonymous",
",",
"'GET'",
",",
"'static'",
")",
"app",
".",
"before_first_request",
"(",
"self",
".",
"_setup_acl",
")",
"app",
".",
"before_request",
"(",
"self",
".",
"_authenticate",
")"
] | 31.526316 | 17.157895 |
def set_selinux_context(path,
user=None,
role=None,
type=None, # pylint: disable=W0622
range=None, # pylint: disable=W0622
persist=False):
'''
.. versionchanged:: Neon
Added persist option
Set a specific SELinux label on a given path
CLI Example:
.. code-block:: bash
salt '*' file.set_selinux_context path <user> <role> <type> <range>
salt '*' file.set_selinux_context /etc/yum.repos.d/epel.repo system_u object_r system_conf_t s0
'''
if not any((user, role, type, range)):
return False
if persist:
fcontext_result = __salt__['selinux.fcontext_add_policy'](path,
sel_type=type, sel_user=user, sel_level=range)
if fcontext_result.get('retcode', None) is not 0:
# Problem setting fcontext policy
raise CommandExecutionError(
'Problem setting fcontext: {0}'.format(fcontext_result)
)
cmd = ['chcon']
if user:
cmd.extend(['-u', user])
if role:
cmd.extend(['-r', role])
if type:
cmd.extend(['-t', type])
if range:
cmd.extend(['-l', range])
cmd.append(path)
ret = not __salt__['cmd.retcode'](cmd, python_shell=False)
if ret:
return get_selinux_context(path)
else:
return ret | [
"def",
"set_selinux_context",
"(",
"path",
",",
"user",
"=",
"None",
",",
"role",
"=",
"None",
",",
"type",
"=",
"None",
",",
"# pylint: disable=W0622",
"range",
"=",
"None",
",",
"# pylint: disable=W0622",
"persist",
"=",
"False",
")",
":",
"if",
"not",
"any",
"(",
"(",
"user",
",",
"role",
",",
"type",
",",
"range",
")",
")",
":",
"return",
"False",
"if",
"persist",
":",
"fcontext_result",
"=",
"__salt__",
"[",
"'selinux.fcontext_add_policy'",
"]",
"(",
"path",
",",
"sel_type",
"=",
"type",
",",
"sel_user",
"=",
"user",
",",
"sel_level",
"=",
"range",
")",
"if",
"fcontext_result",
".",
"get",
"(",
"'retcode'",
",",
"None",
")",
"is",
"not",
"0",
":",
"# Problem setting fcontext policy",
"raise",
"CommandExecutionError",
"(",
"'Problem setting fcontext: {0}'",
".",
"format",
"(",
"fcontext_result",
")",
")",
"cmd",
"=",
"[",
"'chcon'",
"]",
"if",
"user",
":",
"cmd",
".",
"extend",
"(",
"[",
"'-u'",
",",
"user",
"]",
")",
"if",
"role",
":",
"cmd",
".",
"extend",
"(",
"[",
"'-r'",
",",
"role",
"]",
")",
"if",
"type",
":",
"cmd",
".",
"extend",
"(",
"[",
"'-t'",
",",
"type",
"]",
")",
"if",
"range",
":",
"cmd",
".",
"extend",
"(",
"[",
"'-l'",
",",
"range",
"]",
")",
"cmd",
".",
"append",
"(",
"path",
")",
"ret",
"=",
"not",
"__salt__",
"[",
"'cmd.retcode'",
"]",
"(",
"cmd",
",",
"python_shell",
"=",
"False",
")",
"if",
"ret",
":",
"return",
"get_selinux_context",
"(",
"path",
")",
"else",
":",
"return",
"ret"
] | 28.8125 | 22.604167 |
def packetToDict(pkt):
"""
Given a packet, this turns it into a dictionary ... is this useful?
in: packet, array of numbers
out: dictionary (key, value)
"""
d = {
'id': pkt[4],
# 'instruction': xl320.InstrToStr[pkt[7]],
# 'length': (pkt[6] << 8) + pkt[5],
# 'params': pkt[8:-2],
'Model Number': (pkt[10] << 8) + pkt[9],
'Firmware Ver': pkt[11],
'Error': ErrorStatusMsg[pkt[8]],
# 'crc': pkt[-2:]
}
return d | [
"def",
"packetToDict",
"(",
"pkt",
")",
":",
"d",
"=",
"{",
"'id'",
":",
"pkt",
"[",
"4",
"]",
",",
"# 'instruction': xl320.InstrToStr[pkt[7]],",
"# 'length': (pkt[6] << 8) + pkt[5],",
"# 'params': pkt[8:-2],",
"'Model Number'",
":",
"(",
"pkt",
"[",
"10",
"]",
"<<",
"8",
")",
"+",
"pkt",
"[",
"9",
"]",
",",
"'Firmware Ver'",
":",
"pkt",
"[",
"11",
"]",
",",
"'Error'",
":",
"ErrorStatusMsg",
"[",
"pkt",
"[",
"8",
"]",
"]",
",",
"# 'crc': pkt[-2:]",
"}",
"return",
"d"
] | 20.7 | 19.1 |
def Verify(self, mempool):
"""
Verify the transaction.
Args:
mempool:
Returns:
bool: True if verified. False otherwise.
"""
logger.info("Verifying transaction: %s " % self.Hash.ToBytes())
return Helper.VerifyScripts(self) | [
"def",
"Verify",
"(",
"self",
",",
"mempool",
")",
":",
"logger",
".",
"info",
"(",
"\"Verifying transaction: %s \"",
"%",
"self",
".",
"Hash",
".",
"ToBytes",
"(",
")",
")",
"return",
"Helper",
".",
"VerifyScripts",
"(",
"self",
")"
] | 22.461538 | 19.846154 |
def _filterize(name, value):
"""
Turn a `name` and `value` into a string expression compatible
the ``DataFrame.query`` method.
Parameters
----------
name : str
Should be the name of a column in the table to which the
filter will be applied.
A suffix of '_max' will result in a "less than" filter,
a suffix of '_min' will result in a "greater than or equal to" filter,
and no recognized suffix will result in an "equal to" filter.
value : any
Value side of filter for comparison to column values.
Returns
-------
filter_exp : str
"""
if name.endswith('_min'):
name = name[:-4]
comp = '>='
elif name.endswith('_max'):
name = name[:-4]
comp = '<'
else:
comp = '=='
result = '{} {} {!r}'.format(name, comp, value)
logger.debug(
'converted name={} and value={} to filter {}'.format(
name, value, result))
return result | [
"def",
"_filterize",
"(",
"name",
",",
"value",
")",
":",
"if",
"name",
".",
"endswith",
"(",
"'_min'",
")",
":",
"name",
"=",
"name",
"[",
":",
"-",
"4",
"]",
"comp",
"=",
"'>='",
"elif",
"name",
".",
"endswith",
"(",
"'_max'",
")",
":",
"name",
"=",
"name",
"[",
":",
"-",
"4",
"]",
"comp",
"=",
"'<'",
"else",
":",
"comp",
"=",
"'=='",
"result",
"=",
"'{} {} {!r}'",
".",
"format",
"(",
"name",
",",
"comp",
",",
"value",
")",
"logger",
".",
"debug",
"(",
"'converted name={} and value={} to filter {}'",
".",
"format",
"(",
"name",
",",
"value",
",",
"result",
")",
")",
"return",
"result"
] | 26.638889 | 22.194444 |
def save(keystorerc=None, keystore=None, files=[], verbose=False):
'''create a keystore, compress and encrypt to file'''
config = None
if keystorerc:
config = config_reader.read(keystorerc)
if not config:
print('No configuration found.', file=sys.stderr)
sys.exit(-1)
elif keystore and len(files) > 0:
config = {
'keystore': keystore,
'files': files
}
if 'verbose' in config and config['verbose']:
verbose = True
keystore_path = None
if 'keystore' not in config:
print('.keystorerc needs to specify a keystore file path.', file=sys.stderr)
sys.exit(-1)
keystore_path = os.path.expanduser(config['keystore'])
if os.path.isdir(keystore_path):
print('keystore cannot be a folder: {}'.format(config['keystore']), file=sys.stderr)
sys.exit(-1)
elif not os.path.isfile(keystore_path):
# If keystore file does not exist already, attempt to create one
try:
pathlib.Path(keystore_path).touch()
except OSError as err:
print('keystore cannot be accessed: {}\n{}'.format(config['keystore'], err), file=sys.stderr)
sys.exit(-1)
# iterate through keys and add them here
keystore = {}
try:
for p in config['files']:
expanded_path = os.path.expanduser(p)
path = pathlib.Path(expanded_path)
if verbose: print('Inspecting {}:'.format(expanded_path))
if not path.exists():
print('Error: File or folder does not exist: {}'.format(p), file=sys.stderr)
sys.exit(-1)
if path.is_dir():
for dirpath, dirnames, filenames in os.walk(expanded_path):
for name in filenames:
fullpath = os.path.join(dirpath, name)
if verbose: print('Adding {} ...'.format(fullpath))
with open(fullpath, 'rb') as keyfile:
b64_bytes = base64.encodebytes(keyfile.read()).decode('utf-8')
keystore[fullpath] = b64_bytes
elif path.is_file():
fullpath = expanded_path
if verbose: print('Adding {} ...'.format(fullpath))
with open(fullpath, 'rb') as keyfile:
b64_bytes = base64.encodebytes(keyfile.read()).decode('utf-8')
keystore[fullpath] = b64_bytes
if verbose: print('Added {} key(s) to keystore.\n'.format(len(keystore)))
# prompt user for a one-time passphase for encryption
do_passphrases_match = False
passphrase = None
print('This passphrase is used to decrypt your keystore. Please remember it.')
while not do_passphrases_match:
passphrase = getpass.getpass('Please enter a passphrase: ')
passphrase_verify = getpass.getpass('Please verify your passphrase: ')
do_passphrases_match = passphrase != '' and passphrase == passphrase_verify
if passphrase == '':
print('Passphrase cannot be empty.')
elif not do_passphrases_match:
print('Passphrases do not match. Please try again.')
if verbose: print('Passphrase accepted. Encrypting ...')
# serialise, compress, encrypt
serial_keystore = json.dumps(keystore)
compressed_keystore = gzip.compress(serial_keystore.encode('utf-8'))
try:
encrypted_keystore = simplecrypt.encrypt(passphrase, compressed_keystore)
except simplecrypt.EncryptionException as err:
print('You managed to bump into a very, very rare issue with AES.\nPlease contact the author. {}'.format(err), file=sys.stder)
sys.exit(-1)
# save encrypted keystore to file
keystore_path = os.path.expanduser(keystore_path)
if verbose: print('Writing to keystore file {}'.format(keystore_path))
with open(keystore_path, 'wb') as keystore_file:
keystore_file.write(encrypted_keystore)
if verbose: print('Keystore successfully created: ')
# if verbose: print(encrypted_keystore)
except KeyError as err:
print('.keystorerc config is missing `files` attribute: {}'.format(err), file=sys.stderr)
sys.exit(-1)
except TypeError as err:
print('Error: {}'.format(err), file=sys.stderr)
traceback.print_exc()
sys.exit(-1)
except OSError as err:
print('The file system gave an error: {}'.format(err), file=sys.stderr)
sys.exit(-1)
except Exception as err:
print('Serious error. Please report this bug to the author: {}'.format(err), file=sys.stderr)
sys.exit(-1) | [
"def",
"save",
"(",
"keystorerc",
"=",
"None",
",",
"keystore",
"=",
"None",
",",
"files",
"=",
"[",
"]",
",",
"verbose",
"=",
"False",
")",
":",
"config",
"=",
"None",
"if",
"keystorerc",
":",
"config",
"=",
"config_reader",
".",
"read",
"(",
"keystorerc",
")",
"if",
"not",
"config",
":",
"print",
"(",
"'No configuration found.'",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"sys",
".",
"exit",
"(",
"-",
"1",
")",
"elif",
"keystore",
"and",
"len",
"(",
"files",
")",
">",
"0",
":",
"config",
"=",
"{",
"'keystore'",
":",
"keystore",
",",
"'files'",
":",
"files",
"}",
"if",
"'verbose'",
"in",
"config",
"and",
"config",
"[",
"'verbose'",
"]",
":",
"verbose",
"=",
"True",
"keystore_path",
"=",
"None",
"if",
"'keystore'",
"not",
"in",
"config",
":",
"print",
"(",
"'.keystorerc needs to specify a keystore file path.'",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"sys",
".",
"exit",
"(",
"-",
"1",
")",
"keystore_path",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"config",
"[",
"'keystore'",
"]",
")",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"keystore_path",
")",
":",
"print",
"(",
"'keystore cannot be a folder: {}'",
".",
"format",
"(",
"config",
"[",
"'keystore'",
"]",
")",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"sys",
".",
"exit",
"(",
"-",
"1",
")",
"elif",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"keystore_path",
")",
":",
"# If keystore file does not exist already, attempt to create one",
"try",
":",
"pathlib",
".",
"Path",
"(",
"keystore_path",
")",
".",
"touch",
"(",
")",
"except",
"OSError",
"as",
"err",
":",
"print",
"(",
"'keystore cannot be accessed: {}\\n{}'",
".",
"format",
"(",
"config",
"[",
"'keystore'",
"]",
",",
"err",
")",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"sys",
".",
"exit",
"(",
"-",
"1",
")",
"# iterate through keys and add them here",
"keystore",
"=",
"{",
"}",
"try",
":",
"for",
"p",
"in",
"config",
"[",
"'files'",
"]",
":",
"expanded_path",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"p",
")",
"path",
"=",
"pathlib",
".",
"Path",
"(",
"expanded_path",
")",
"if",
"verbose",
":",
"print",
"(",
"'Inspecting {}:'",
".",
"format",
"(",
"expanded_path",
")",
")",
"if",
"not",
"path",
".",
"exists",
"(",
")",
":",
"print",
"(",
"'Error: File or folder does not exist: {}'",
".",
"format",
"(",
"p",
")",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"sys",
".",
"exit",
"(",
"-",
"1",
")",
"if",
"path",
".",
"is_dir",
"(",
")",
":",
"for",
"dirpath",
",",
"dirnames",
",",
"filenames",
"in",
"os",
".",
"walk",
"(",
"expanded_path",
")",
":",
"for",
"name",
"in",
"filenames",
":",
"fullpath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dirpath",
",",
"name",
")",
"if",
"verbose",
":",
"print",
"(",
"'Adding {} ...'",
".",
"format",
"(",
"fullpath",
")",
")",
"with",
"open",
"(",
"fullpath",
",",
"'rb'",
")",
"as",
"keyfile",
":",
"b64_bytes",
"=",
"base64",
".",
"encodebytes",
"(",
"keyfile",
".",
"read",
"(",
")",
")",
".",
"decode",
"(",
"'utf-8'",
")",
"keystore",
"[",
"fullpath",
"]",
"=",
"b64_bytes",
"elif",
"path",
".",
"is_file",
"(",
")",
":",
"fullpath",
"=",
"expanded_path",
"if",
"verbose",
":",
"print",
"(",
"'Adding {} ...'",
".",
"format",
"(",
"fullpath",
")",
")",
"with",
"open",
"(",
"fullpath",
",",
"'rb'",
")",
"as",
"keyfile",
":",
"b64_bytes",
"=",
"base64",
".",
"encodebytes",
"(",
"keyfile",
".",
"read",
"(",
")",
")",
".",
"decode",
"(",
"'utf-8'",
")",
"keystore",
"[",
"fullpath",
"]",
"=",
"b64_bytes",
"if",
"verbose",
":",
"print",
"(",
"'Added {} key(s) to keystore.\\n'",
".",
"format",
"(",
"len",
"(",
"keystore",
")",
")",
")",
"# prompt user for a one-time passphase for encryption",
"do_passphrases_match",
"=",
"False",
"passphrase",
"=",
"None",
"print",
"(",
"'This passphrase is used to decrypt your keystore. Please remember it.'",
")",
"while",
"not",
"do_passphrases_match",
":",
"passphrase",
"=",
"getpass",
".",
"getpass",
"(",
"'Please enter a passphrase: '",
")",
"passphrase_verify",
"=",
"getpass",
".",
"getpass",
"(",
"'Please verify your passphrase: '",
")",
"do_passphrases_match",
"=",
"passphrase",
"!=",
"''",
"and",
"passphrase",
"==",
"passphrase_verify",
"if",
"passphrase",
"==",
"''",
":",
"print",
"(",
"'Passphrase cannot be empty.'",
")",
"elif",
"not",
"do_passphrases_match",
":",
"print",
"(",
"'Passphrases do not match. Please try again.'",
")",
"if",
"verbose",
":",
"print",
"(",
"'Passphrase accepted. Encrypting ...'",
")",
"# serialise, compress, encrypt",
"serial_keystore",
"=",
"json",
".",
"dumps",
"(",
"keystore",
")",
"compressed_keystore",
"=",
"gzip",
".",
"compress",
"(",
"serial_keystore",
".",
"encode",
"(",
"'utf-8'",
")",
")",
"try",
":",
"encrypted_keystore",
"=",
"simplecrypt",
".",
"encrypt",
"(",
"passphrase",
",",
"compressed_keystore",
")",
"except",
"simplecrypt",
".",
"EncryptionException",
"as",
"err",
":",
"print",
"(",
"'You managed to bump into a very, very rare issue with AES.\\nPlease contact the author. {}'",
".",
"format",
"(",
"err",
")",
",",
"file",
"=",
"sys",
".",
"stder",
")",
"sys",
".",
"exit",
"(",
"-",
"1",
")",
"# save encrypted keystore to file",
"keystore_path",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"keystore_path",
")",
"if",
"verbose",
":",
"print",
"(",
"'Writing to keystore file {}'",
".",
"format",
"(",
"keystore_path",
")",
")",
"with",
"open",
"(",
"keystore_path",
",",
"'wb'",
")",
"as",
"keystore_file",
":",
"keystore_file",
".",
"write",
"(",
"encrypted_keystore",
")",
"if",
"verbose",
":",
"print",
"(",
"'Keystore successfully created: '",
")",
"# if verbose: print(encrypted_keystore)",
"except",
"KeyError",
"as",
"err",
":",
"print",
"(",
"'.keystorerc config is missing `files` attribute: {}'",
".",
"format",
"(",
"err",
")",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"sys",
".",
"exit",
"(",
"-",
"1",
")",
"except",
"TypeError",
"as",
"err",
":",
"print",
"(",
"'Error: {}'",
".",
"format",
"(",
"err",
")",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"traceback",
".",
"print_exc",
"(",
")",
"sys",
".",
"exit",
"(",
"-",
"1",
")",
"except",
"OSError",
"as",
"err",
":",
"print",
"(",
"'The file system gave an error: {}'",
".",
"format",
"(",
"err",
")",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"sys",
".",
"exit",
"(",
"-",
"1",
")",
"except",
"Exception",
"as",
"err",
":",
"print",
"(",
"'Serious error. Please report this bug to the author: {}'",
".",
"format",
"(",
"err",
")",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"sys",
".",
"exit",
"(",
"-",
"1",
")"
] | 37.123894 | 23.265487 |
def ani_depthplot(spec_file='specimens.txt', samp_file='samples.txt',
meas_file='measurements.txt', site_file='sites.txt',
age_file="", sum_file="", fmt='svg', dmin=-1, dmax=-1,
depth_scale='core_depth', dir_path='.', contribution=None):
"""
returns matplotlib figure with anisotropy data plotted against depth
available depth scales: 'composite_depth', 'core_depth' or 'age' (you must provide an age file to use this option).
You must provide valid specimens and sites files, and either a samples or an ages file.
You may additionally provide measurements and a summary file (csv).
Parameters
----------
spec_file : str, default "specimens.txt"
samp_file : str, default "samples.txt"
meas_file : str, default "measurements.txt"
site_file : str, default "sites.txt"
age_file : str, default ""
sum_file : str, default ""
fmt : str, default "svg"
format for figures, ["svg", "jpg", "pdf", "png"]
dmin : number, default -1
minimum depth to plot (if -1, default to plotting all)
dmax : number, default -1
maximum depth to plot (if -1, default to plotting all)
depth_scale : str, default "core_depth"
scale to plot, ['composite_depth', 'core_depth', 'age'].
if 'age' is selected, you must provide an ages file.
dir_path : str, default "."
directory for input files
contribution : cb.Contribution, default None
if provided, use Contribution object instead of reading in
data from files
Returns
---------
plot : matplotlib plot, or False if no plot could be created
name : figure name, or error message if no plot could be created
"""
if depth_scale == 'sample_core_depth':
depth_scale = 'core_depth'
if depth_scale == 'sample_composite_depth':
depth_scale = 'composite_depth'
pcol = 4
tint = 9
plots = 0
dmin, dmax = float(dmin), float(dmax)
# if contribution object is not provided, read in data from files
if isinstance(contribution, cb.Contribution):
con = contribution
else:
# format files to use full path
meas_file = pmag.resolve_file_name(meas_file, dir_path)
spec_file = pmag.resolve_file_name(spec_file, dir_path)
samp_file = pmag.resolve_file_name(samp_file, dir_path)
site_file = pmag.resolve_file_name(site_file, dir_path)
if age_file:
age_file = pmag.resolve_file_name(age_file, dir_path)
if not os.path.isfile(age_file):
print(
'Warning: you have provided an invalid age file. Attempting to use sample file instead')
age_file = None
depth_scale = 'core_depth'
else:
samp_file = age_file
depth_scale = 'age'
print(
'Warning: you have provided an ages format file, which will take precedence over samples')
samp_file = pmag.resolve_file_name(samp_file, dir_path)
label = 1
if sum_file:
sum_file = pmag.resolve_file_name(sum_file, dir_path)
core_df=pd.read_csv(sum_file)
depths=core_df['Top depth cored CSF (m)'].values
# contribution
dir_path = os.path.split(spec_file)[0]
tables = ['measurements', 'specimens', 'samples', 'sites']
con = cb.Contribution(dir_path, read_tables=tables,
custom_filenames={'measurements': meas_file, 'specimens': spec_file,
'samples': samp_file, 'sites': site_file})
for ftype in ['specimens', 'samples', 'sites']:
if not con.tables.get(ftype):
if ftype == 'samples':
if con.tables.get('ages'):
depth_scale = 'age'
continue
print("-W- This function requires a {} file to run.".format(ftype))
print(" Make sure you include one in your working directory")
return False, "missing required file type: {}".format(ftype)
# propagate needed values
con.propagate_cols(['core_depth'], 'samples', 'sites')
con.propagate_location_to_specimens()
# get data read in
isbulk = 0 # tests if there are bulk susceptibility measurements
ani_file = spec_file
SampData = con.tables['samples'].df
AniData = con.tables['specimens'].df
# add sample into specimens (AniData)
AniData = pd.merge(
AniData, SampData[['sample', depth_scale]], how='inner', on='sample')
# trim down AniData
cond = AniData[depth_scale].astype(bool)
AniData = AniData[cond]
if dmin != -1:
AniData = AniData[AniData[depth_scale] < dmax]
if dmax != -1:
AniData = AniData[AniData[depth_scale] > dmin]
AniData['core_depth'] = AniData[depth_scale]
if not age_file:
Samps = con.tables['samples'].convert_to_pmag_data_list()
else:
con.add_magic_table(dtype='ages', fname=age_file)
Samps = con.tables['ages'].convert_to_pmag_data_list()
# get age unit
age_unit = con.tables['ages'].df['age_unit'][0]
# propagate ages down to sample level
for s in Samps:
# change to upper case for every sample name
s['sample'] = s['sample'].upper()
if 'measurements' in con.tables:
isbulk = 1
Meas = con.tables['measurements'].df # convert_to_pmag_data_list()
if isbulk:
Meas = Meas[Meas['specimen'].astype('bool')]
Meas = Meas[Meas['susc_chi_volume'].astype(bool)]
# add core_depth into Measurements dataframe
Meas = pd.merge(Meas[['susc_chi_volume', 'specimen']], AniData[[
'specimen', 'core_depth']], how='inner', on='specimen')
Bulks = list(Meas['susc_chi_volume'] * 1e6)
BulkDepths = list(Meas['core_depth'])
else:
Bulks, BulkDepths = [], []
# now turn Data from pandas dataframe to a list of dicts
Data = list(AniData.T.apply(dict))
if len(Bulks) > 0: # set min and max bulk values
bmin = min(Bulks)
bmax = max(Bulks)
xlab = "Depth (m)"
#
if len(Data) > 0:
location = Data[0].get('location', 'unknown')
if cb.is_null(location):
location = 'unknown'
try:
location = con.tables['sites'].df['location'][0]
except KeyError:
pass
else:
return False, 'no data to plot'
# collect the data for plotting tau V3_inc and V1_dec
Depths, Tau1, Tau2, Tau3, V3Incs, P, V1Decs = [], [], [], [], [], [], []
F23s = []
Axs = [] # collect the plot ids
if len(Bulks) > 0:
pcol += 1
Data = pmag.get_dictitem(Data, 'aniso_s', '', 'not_null')
# get all the s1 values from Data as floats
aniso_s = pmag.get_dictkey(Data, 'aniso_s', '')
aniso_s = [a.split(':') for a in aniso_s if a is not None]
#print('aniso_s', aniso_s)
s1 = [float(a[0]) for a in aniso_s]
s2 = [float(a[1]) for a in aniso_s]
s3 = [float(a[2]) for a in aniso_s]
s4 = [float(a[3]) for a in aniso_s]
s5 = [float(a[4]) for a in aniso_s]
s6 = [float(a[5]) for a in aniso_s]
# we are good with s1 - s2
nmeas = pmag.get_dictkey(Data, 'aniso_s_n_measurements', 'int')
sigma = pmag.get_dictkey(Data, 'aniso_s_sigma', 'f')
Depths = pmag.get_dictkey(Data, 'core_depth', 'f')
# Ss=np.array([s1,s4,s5,s4,s2,s6,s5,s6,s3]).transpose() # make an array
Ss = np.array([s1, s2, s3, s4, s5, s6]).transpose() # make an array
# Ts=np.reshape(Ss,(len(Ss),3,-1)) # and re-shape to be n-length array of
# 3x3 sub-arrays
for k in range(len(Depths)):
# tau,Evecs= pmag.tauV(Ts[k]) # get the sorted eigenvalues and eigenvectors
# v3=pmag.cart2dir(Evecs[2])[1] # convert to inclination of the minimum
# eigenvector
fpars = pmag.dohext(nmeas[k] - 6, sigma[k], Ss[k])
V3Incs.append(fpars['v3_inc'])
V1Decs.append(fpars['v1_dec'])
Tau1.append(fpars['t1'])
Tau2.append(fpars['t2'])
Tau3.append(fpars['t3'])
P.append(old_div(Tau1[-1], Tau3[-1]))
F23s.append(fpars['F23'])
if len(Depths) > 0:
if dmax == -1:
dmax = max(Depths)
dmin = min(Depths)
tau_min = 1
for t in Tau3:
if t > 0 and t < tau_min:
tau_min = t
tau_max = max(Tau1)
# tau_min=min(Tau3)
P_max = max(P)
P_min = min(P)
# dmax=dmax+.05*dmax
# dmin=dmin-.05*dmax
main_plot = plt.figure(1, figsize=(11, 7)) # make the figure
# main_plot = plt.figure(1, figsize=(10, 8)) # make the figure
version_num = pmag.get_version()
plt.figtext(.02, .01, version_num) # attach the pmagpy version number
ax = plt.subplot(1, pcol, 1) # make the first column
Axs.append(ax)
ax.plot(Tau1, Depths, 'rs')
ax.plot(Tau2, Depths, 'b^')
ax.plot(Tau3, Depths, 'ko')
if sum_file:
for depth in depths:
if depth >= dmin and depth < dmax:
plt.axhline(depth,color='blue',linestyle='dotted')
if tau_min>.3: tau_min=.3
if tau_max<.36: tau_max=.36
ax.axis([tau_min, tau_max, dmax, dmin])
ax.set_xlabel('Eigenvalues')
if depth_scale == 'core_depth':
ax.set_ylabel('Depth (mbsf)')
elif depth_scale == 'age':
ax.set_ylabel('Age (' + age_unit + ')')
else:
ax.set_ylabel('Depth (mcd)')
ax2 = plt.subplot(1, pcol, 2) # make the second column
ax2.yaxis.set_major_locator(plt.NullLocator())
ax2.plot(P, Depths, 'rs')
ax2.axis([P_min, P_max, dmax, dmin])
ax2.set_xlabel('P')
ax2.set_title(location)
if sum_file:
for depth in depths:
if depth >= dmin and depth < dmax:
plt.axhline(depth,color='blue',linestyle='dotted')
Axs.append(ax2)
ax3 = plt.subplot(1, pcol, 3)
Axs.append(ax3)
ax3.plot(V3Incs, Depths, 'ko')
ax3.axis([0, 90, dmax, dmin])
ax3.set_xlabel('V3 Inclination')
ax3.yaxis.set_major_locator(plt.NullLocator())
if sum_file:
for depth in depths:
if depth >= dmin and depth < dmax:
plt.axhline(depth,color='blue',linestyle='dotted')
ax4 = plt.subplot(1, np.abs(pcol), 4)
Axs.append(ax4)
ax4.plot(V1Decs, Depths, 'rs')
ax4.axis([0, 360, dmax, dmin])
ax4.set_xlabel('V1 Declination')
ax4.yaxis.set_major_locator(plt.NullLocator())
if sum_file:
for depth in depths:
if depth >= dmin and depth < dmax:
plt.axhline(depth,color='blue',linestyle='dotted')
# ax5=plt.subplot(1,np.abs(pcol),5)
# Axs.append(ax5)
# ax5.plot(F23s,Depths,'rs')
# bounds=ax5.axis()
# ax5.axis([bounds[0],bounds[1],dmax,dmin])
# ax5.set_xlabel('F_23')
# ax5.semilogx()
# if sum_file:
# for core in Cores:
# depth=float(core[core_depth_key])
# if depth>=dmin and depth<=dmax:
# plt.plot([bounds[0],bounds[1]],[depth,depth],'b--')
# if pcol==5 and label==1:plt.text(bounds[1],depth+tint,core[core_label_key])
# if pcol==6:
if pcol == 5:
# ax6=plt.subplot(1,pcol,6)
ax6 = plt.subplot(1, pcol, 5)
Axs.append(ax6)
ax6.plot(Bulks, BulkDepths, 'bo')
ax6.axis([bmin - 1, 1.1 * bmax, dmax, dmin])
ax6.set_xlabel('Bulk Susc. (uSI)')
ax6.yaxis.set_major_locator(plt.NullLocator())
if sum_file:
for depth in depths:
if depth >= dmin and depth < dmax:
plt.axhline(depth,color='blue',linestyle='dotted')
for x in Axs:
# this makes the x-tick labels more reasonable - they were
# overcrowded using the defaults
pmagplotlib.delticks(x)
fig_name = location + '_ani_depthplot.' + fmt
return main_plot, [fig_name]
else:
return False, "No data to plot" | [
"def",
"ani_depthplot",
"(",
"spec_file",
"=",
"'specimens.txt'",
",",
"samp_file",
"=",
"'samples.txt'",
",",
"meas_file",
"=",
"'measurements.txt'",
",",
"site_file",
"=",
"'sites.txt'",
",",
"age_file",
"=",
"\"\"",
",",
"sum_file",
"=",
"\"\"",
",",
"fmt",
"=",
"'svg'",
",",
"dmin",
"=",
"-",
"1",
",",
"dmax",
"=",
"-",
"1",
",",
"depth_scale",
"=",
"'core_depth'",
",",
"dir_path",
"=",
"'.'",
",",
"contribution",
"=",
"None",
")",
":",
"if",
"depth_scale",
"==",
"'sample_core_depth'",
":",
"depth_scale",
"=",
"'core_depth'",
"if",
"depth_scale",
"==",
"'sample_composite_depth'",
":",
"depth_scale",
"=",
"'composite_depth'",
"pcol",
"=",
"4",
"tint",
"=",
"9",
"plots",
"=",
"0",
"dmin",
",",
"dmax",
"=",
"float",
"(",
"dmin",
")",
",",
"float",
"(",
"dmax",
")",
"# if contribution object is not provided, read in data from files",
"if",
"isinstance",
"(",
"contribution",
",",
"cb",
".",
"Contribution",
")",
":",
"con",
"=",
"contribution",
"else",
":",
"# format files to use full path",
"meas_file",
"=",
"pmag",
".",
"resolve_file_name",
"(",
"meas_file",
",",
"dir_path",
")",
"spec_file",
"=",
"pmag",
".",
"resolve_file_name",
"(",
"spec_file",
",",
"dir_path",
")",
"samp_file",
"=",
"pmag",
".",
"resolve_file_name",
"(",
"samp_file",
",",
"dir_path",
")",
"site_file",
"=",
"pmag",
".",
"resolve_file_name",
"(",
"site_file",
",",
"dir_path",
")",
"if",
"age_file",
":",
"age_file",
"=",
"pmag",
".",
"resolve_file_name",
"(",
"age_file",
",",
"dir_path",
")",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"age_file",
")",
":",
"print",
"(",
"'Warning: you have provided an invalid age file. Attempting to use sample file instead'",
")",
"age_file",
"=",
"None",
"depth_scale",
"=",
"'core_depth'",
"else",
":",
"samp_file",
"=",
"age_file",
"depth_scale",
"=",
"'age'",
"print",
"(",
"'Warning: you have provided an ages format file, which will take precedence over samples'",
")",
"samp_file",
"=",
"pmag",
".",
"resolve_file_name",
"(",
"samp_file",
",",
"dir_path",
")",
"label",
"=",
"1",
"if",
"sum_file",
":",
"sum_file",
"=",
"pmag",
".",
"resolve_file_name",
"(",
"sum_file",
",",
"dir_path",
")",
"core_df",
"=",
"pd",
".",
"read_csv",
"(",
"sum_file",
")",
"depths",
"=",
"core_df",
"[",
"'Top depth cored CSF (m)'",
"]",
".",
"values",
"# contribution",
"dir_path",
"=",
"os",
".",
"path",
".",
"split",
"(",
"spec_file",
")",
"[",
"0",
"]",
"tables",
"=",
"[",
"'measurements'",
",",
"'specimens'",
",",
"'samples'",
",",
"'sites'",
"]",
"con",
"=",
"cb",
".",
"Contribution",
"(",
"dir_path",
",",
"read_tables",
"=",
"tables",
",",
"custom_filenames",
"=",
"{",
"'measurements'",
":",
"meas_file",
",",
"'specimens'",
":",
"spec_file",
",",
"'samples'",
":",
"samp_file",
",",
"'sites'",
":",
"site_file",
"}",
")",
"for",
"ftype",
"in",
"[",
"'specimens'",
",",
"'samples'",
",",
"'sites'",
"]",
":",
"if",
"not",
"con",
".",
"tables",
".",
"get",
"(",
"ftype",
")",
":",
"if",
"ftype",
"==",
"'samples'",
":",
"if",
"con",
".",
"tables",
".",
"get",
"(",
"'ages'",
")",
":",
"depth_scale",
"=",
"'age'",
"continue",
"print",
"(",
"\"-W- This function requires a {} file to run.\"",
".",
"format",
"(",
"ftype",
")",
")",
"print",
"(",
"\" Make sure you include one in your working directory\"",
")",
"return",
"False",
",",
"\"missing required file type: {}\"",
".",
"format",
"(",
"ftype",
")",
"# propagate needed values",
"con",
".",
"propagate_cols",
"(",
"[",
"'core_depth'",
"]",
",",
"'samples'",
",",
"'sites'",
")",
"con",
".",
"propagate_location_to_specimens",
"(",
")",
"# get data read in",
"isbulk",
"=",
"0",
"# tests if there are bulk susceptibility measurements",
"ani_file",
"=",
"spec_file",
"SampData",
"=",
"con",
".",
"tables",
"[",
"'samples'",
"]",
".",
"df",
"AniData",
"=",
"con",
".",
"tables",
"[",
"'specimens'",
"]",
".",
"df",
"# add sample into specimens (AniData)",
"AniData",
"=",
"pd",
".",
"merge",
"(",
"AniData",
",",
"SampData",
"[",
"[",
"'sample'",
",",
"depth_scale",
"]",
"]",
",",
"how",
"=",
"'inner'",
",",
"on",
"=",
"'sample'",
")",
"# trim down AniData",
"cond",
"=",
"AniData",
"[",
"depth_scale",
"]",
".",
"astype",
"(",
"bool",
")",
"AniData",
"=",
"AniData",
"[",
"cond",
"]",
"if",
"dmin",
"!=",
"-",
"1",
":",
"AniData",
"=",
"AniData",
"[",
"AniData",
"[",
"depth_scale",
"]",
"<",
"dmax",
"]",
"if",
"dmax",
"!=",
"-",
"1",
":",
"AniData",
"=",
"AniData",
"[",
"AniData",
"[",
"depth_scale",
"]",
">",
"dmin",
"]",
"AniData",
"[",
"'core_depth'",
"]",
"=",
"AniData",
"[",
"depth_scale",
"]",
"if",
"not",
"age_file",
":",
"Samps",
"=",
"con",
".",
"tables",
"[",
"'samples'",
"]",
".",
"convert_to_pmag_data_list",
"(",
")",
"else",
":",
"con",
".",
"add_magic_table",
"(",
"dtype",
"=",
"'ages'",
",",
"fname",
"=",
"age_file",
")",
"Samps",
"=",
"con",
".",
"tables",
"[",
"'ages'",
"]",
".",
"convert_to_pmag_data_list",
"(",
")",
"# get age unit",
"age_unit",
"=",
"con",
".",
"tables",
"[",
"'ages'",
"]",
".",
"df",
"[",
"'age_unit'",
"]",
"[",
"0",
"]",
"# propagate ages down to sample level",
"for",
"s",
"in",
"Samps",
":",
"# change to upper case for every sample name",
"s",
"[",
"'sample'",
"]",
"=",
"s",
"[",
"'sample'",
"]",
".",
"upper",
"(",
")",
"if",
"'measurements'",
"in",
"con",
".",
"tables",
":",
"isbulk",
"=",
"1",
"Meas",
"=",
"con",
".",
"tables",
"[",
"'measurements'",
"]",
".",
"df",
"# convert_to_pmag_data_list()",
"if",
"isbulk",
":",
"Meas",
"=",
"Meas",
"[",
"Meas",
"[",
"'specimen'",
"]",
".",
"astype",
"(",
"'bool'",
")",
"]",
"Meas",
"=",
"Meas",
"[",
"Meas",
"[",
"'susc_chi_volume'",
"]",
".",
"astype",
"(",
"bool",
")",
"]",
"# add core_depth into Measurements dataframe",
"Meas",
"=",
"pd",
".",
"merge",
"(",
"Meas",
"[",
"[",
"'susc_chi_volume'",
",",
"'specimen'",
"]",
"]",
",",
"AniData",
"[",
"[",
"'specimen'",
",",
"'core_depth'",
"]",
"]",
",",
"how",
"=",
"'inner'",
",",
"on",
"=",
"'specimen'",
")",
"Bulks",
"=",
"list",
"(",
"Meas",
"[",
"'susc_chi_volume'",
"]",
"*",
"1e6",
")",
"BulkDepths",
"=",
"list",
"(",
"Meas",
"[",
"'core_depth'",
"]",
")",
"else",
":",
"Bulks",
",",
"BulkDepths",
"=",
"[",
"]",
",",
"[",
"]",
"# now turn Data from pandas dataframe to a list of dicts",
"Data",
"=",
"list",
"(",
"AniData",
".",
"T",
".",
"apply",
"(",
"dict",
")",
")",
"if",
"len",
"(",
"Bulks",
")",
">",
"0",
":",
"# set min and max bulk values",
"bmin",
"=",
"min",
"(",
"Bulks",
")",
"bmax",
"=",
"max",
"(",
"Bulks",
")",
"xlab",
"=",
"\"Depth (m)\"",
"#",
"if",
"len",
"(",
"Data",
")",
">",
"0",
":",
"location",
"=",
"Data",
"[",
"0",
"]",
".",
"get",
"(",
"'location'",
",",
"'unknown'",
")",
"if",
"cb",
".",
"is_null",
"(",
"location",
")",
":",
"location",
"=",
"'unknown'",
"try",
":",
"location",
"=",
"con",
".",
"tables",
"[",
"'sites'",
"]",
".",
"df",
"[",
"'location'",
"]",
"[",
"0",
"]",
"except",
"KeyError",
":",
"pass",
"else",
":",
"return",
"False",
",",
"'no data to plot'",
"# collect the data for plotting tau V3_inc and V1_dec",
"Depths",
",",
"Tau1",
",",
"Tau2",
",",
"Tau3",
",",
"V3Incs",
",",
"P",
",",
"V1Decs",
"=",
"[",
"]",
",",
"[",
"]",
",",
"[",
"]",
",",
"[",
"]",
",",
"[",
"]",
",",
"[",
"]",
",",
"[",
"]",
"F23s",
"=",
"[",
"]",
"Axs",
"=",
"[",
"]",
"# collect the plot ids",
"if",
"len",
"(",
"Bulks",
")",
">",
"0",
":",
"pcol",
"+=",
"1",
"Data",
"=",
"pmag",
".",
"get_dictitem",
"(",
"Data",
",",
"'aniso_s'",
",",
"''",
",",
"'not_null'",
")",
"# get all the s1 values from Data as floats",
"aniso_s",
"=",
"pmag",
".",
"get_dictkey",
"(",
"Data",
",",
"'aniso_s'",
",",
"''",
")",
"aniso_s",
"=",
"[",
"a",
".",
"split",
"(",
"':'",
")",
"for",
"a",
"in",
"aniso_s",
"if",
"a",
"is",
"not",
"None",
"]",
"#print('aniso_s', aniso_s)",
"s1",
"=",
"[",
"float",
"(",
"a",
"[",
"0",
"]",
")",
"for",
"a",
"in",
"aniso_s",
"]",
"s2",
"=",
"[",
"float",
"(",
"a",
"[",
"1",
"]",
")",
"for",
"a",
"in",
"aniso_s",
"]",
"s3",
"=",
"[",
"float",
"(",
"a",
"[",
"2",
"]",
")",
"for",
"a",
"in",
"aniso_s",
"]",
"s4",
"=",
"[",
"float",
"(",
"a",
"[",
"3",
"]",
")",
"for",
"a",
"in",
"aniso_s",
"]",
"s5",
"=",
"[",
"float",
"(",
"a",
"[",
"4",
"]",
")",
"for",
"a",
"in",
"aniso_s",
"]",
"s6",
"=",
"[",
"float",
"(",
"a",
"[",
"5",
"]",
")",
"for",
"a",
"in",
"aniso_s",
"]",
"# we are good with s1 - s2",
"nmeas",
"=",
"pmag",
".",
"get_dictkey",
"(",
"Data",
",",
"'aniso_s_n_measurements'",
",",
"'int'",
")",
"sigma",
"=",
"pmag",
".",
"get_dictkey",
"(",
"Data",
",",
"'aniso_s_sigma'",
",",
"'f'",
")",
"Depths",
"=",
"pmag",
".",
"get_dictkey",
"(",
"Data",
",",
"'core_depth'",
",",
"'f'",
")",
"# Ss=np.array([s1,s4,s5,s4,s2,s6,s5,s6,s3]).transpose() # make an array",
"Ss",
"=",
"np",
".",
"array",
"(",
"[",
"s1",
",",
"s2",
",",
"s3",
",",
"s4",
",",
"s5",
",",
"s6",
"]",
")",
".",
"transpose",
"(",
")",
"# make an array",
"# Ts=np.reshape(Ss,(len(Ss),3,-1)) # and re-shape to be n-length array of",
"# 3x3 sub-arrays",
"for",
"k",
"in",
"range",
"(",
"len",
"(",
"Depths",
")",
")",
":",
"# tau,Evecs= pmag.tauV(Ts[k]) # get the sorted eigenvalues and eigenvectors",
"# v3=pmag.cart2dir(Evecs[2])[1] # convert to inclination of the minimum",
"# eigenvector",
"fpars",
"=",
"pmag",
".",
"dohext",
"(",
"nmeas",
"[",
"k",
"]",
"-",
"6",
",",
"sigma",
"[",
"k",
"]",
",",
"Ss",
"[",
"k",
"]",
")",
"V3Incs",
".",
"append",
"(",
"fpars",
"[",
"'v3_inc'",
"]",
")",
"V1Decs",
".",
"append",
"(",
"fpars",
"[",
"'v1_dec'",
"]",
")",
"Tau1",
".",
"append",
"(",
"fpars",
"[",
"'t1'",
"]",
")",
"Tau2",
".",
"append",
"(",
"fpars",
"[",
"'t2'",
"]",
")",
"Tau3",
".",
"append",
"(",
"fpars",
"[",
"'t3'",
"]",
")",
"P",
".",
"append",
"(",
"old_div",
"(",
"Tau1",
"[",
"-",
"1",
"]",
",",
"Tau3",
"[",
"-",
"1",
"]",
")",
")",
"F23s",
".",
"append",
"(",
"fpars",
"[",
"'F23'",
"]",
")",
"if",
"len",
"(",
"Depths",
")",
">",
"0",
":",
"if",
"dmax",
"==",
"-",
"1",
":",
"dmax",
"=",
"max",
"(",
"Depths",
")",
"dmin",
"=",
"min",
"(",
"Depths",
")",
"tau_min",
"=",
"1",
"for",
"t",
"in",
"Tau3",
":",
"if",
"t",
">",
"0",
"and",
"t",
"<",
"tau_min",
":",
"tau_min",
"=",
"t",
"tau_max",
"=",
"max",
"(",
"Tau1",
")",
"# tau_min=min(Tau3)",
"P_max",
"=",
"max",
"(",
"P",
")",
"P_min",
"=",
"min",
"(",
"P",
")",
"# dmax=dmax+.05*dmax",
"# dmin=dmin-.05*dmax",
"main_plot",
"=",
"plt",
".",
"figure",
"(",
"1",
",",
"figsize",
"=",
"(",
"11",
",",
"7",
")",
")",
"# make the figure",
"# main_plot = plt.figure(1, figsize=(10, 8)) # make the figure",
"version_num",
"=",
"pmag",
".",
"get_version",
"(",
")",
"plt",
".",
"figtext",
"(",
".02",
",",
".01",
",",
"version_num",
")",
"# attach the pmagpy version number",
"ax",
"=",
"plt",
".",
"subplot",
"(",
"1",
",",
"pcol",
",",
"1",
")",
"# make the first column",
"Axs",
".",
"append",
"(",
"ax",
")",
"ax",
".",
"plot",
"(",
"Tau1",
",",
"Depths",
",",
"'rs'",
")",
"ax",
".",
"plot",
"(",
"Tau2",
",",
"Depths",
",",
"'b^'",
")",
"ax",
".",
"plot",
"(",
"Tau3",
",",
"Depths",
",",
"'ko'",
")",
"if",
"sum_file",
":",
"for",
"depth",
"in",
"depths",
":",
"if",
"depth",
">=",
"dmin",
"and",
"depth",
"<",
"dmax",
":",
"plt",
".",
"axhline",
"(",
"depth",
",",
"color",
"=",
"'blue'",
",",
"linestyle",
"=",
"'dotted'",
")",
"if",
"tau_min",
">",
".3",
":",
"tau_min",
"=",
".3",
"if",
"tau_max",
"<",
".36",
":",
"tau_max",
"=",
".36",
"ax",
".",
"axis",
"(",
"[",
"tau_min",
",",
"tau_max",
",",
"dmax",
",",
"dmin",
"]",
")",
"ax",
".",
"set_xlabel",
"(",
"'Eigenvalues'",
")",
"if",
"depth_scale",
"==",
"'core_depth'",
":",
"ax",
".",
"set_ylabel",
"(",
"'Depth (mbsf)'",
")",
"elif",
"depth_scale",
"==",
"'age'",
":",
"ax",
".",
"set_ylabel",
"(",
"'Age ('",
"+",
"age_unit",
"+",
"')'",
")",
"else",
":",
"ax",
".",
"set_ylabel",
"(",
"'Depth (mcd)'",
")",
"ax2",
"=",
"plt",
".",
"subplot",
"(",
"1",
",",
"pcol",
",",
"2",
")",
"# make the second column",
"ax2",
".",
"yaxis",
".",
"set_major_locator",
"(",
"plt",
".",
"NullLocator",
"(",
")",
")",
"ax2",
".",
"plot",
"(",
"P",
",",
"Depths",
",",
"'rs'",
")",
"ax2",
".",
"axis",
"(",
"[",
"P_min",
",",
"P_max",
",",
"dmax",
",",
"dmin",
"]",
")",
"ax2",
".",
"set_xlabel",
"(",
"'P'",
")",
"ax2",
".",
"set_title",
"(",
"location",
")",
"if",
"sum_file",
":",
"for",
"depth",
"in",
"depths",
":",
"if",
"depth",
">=",
"dmin",
"and",
"depth",
"<",
"dmax",
":",
"plt",
".",
"axhline",
"(",
"depth",
",",
"color",
"=",
"'blue'",
",",
"linestyle",
"=",
"'dotted'",
")",
"Axs",
".",
"append",
"(",
"ax2",
")",
"ax3",
"=",
"plt",
".",
"subplot",
"(",
"1",
",",
"pcol",
",",
"3",
")",
"Axs",
".",
"append",
"(",
"ax3",
")",
"ax3",
".",
"plot",
"(",
"V3Incs",
",",
"Depths",
",",
"'ko'",
")",
"ax3",
".",
"axis",
"(",
"[",
"0",
",",
"90",
",",
"dmax",
",",
"dmin",
"]",
")",
"ax3",
".",
"set_xlabel",
"(",
"'V3 Inclination'",
")",
"ax3",
".",
"yaxis",
".",
"set_major_locator",
"(",
"plt",
".",
"NullLocator",
"(",
")",
")",
"if",
"sum_file",
":",
"for",
"depth",
"in",
"depths",
":",
"if",
"depth",
">=",
"dmin",
"and",
"depth",
"<",
"dmax",
":",
"plt",
".",
"axhline",
"(",
"depth",
",",
"color",
"=",
"'blue'",
",",
"linestyle",
"=",
"'dotted'",
")",
"ax4",
"=",
"plt",
".",
"subplot",
"(",
"1",
",",
"np",
".",
"abs",
"(",
"pcol",
")",
",",
"4",
")",
"Axs",
".",
"append",
"(",
"ax4",
")",
"ax4",
".",
"plot",
"(",
"V1Decs",
",",
"Depths",
",",
"'rs'",
")",
"ax4",
".",
"axis",
"(",
"[",
"0",
",",
"360",
",",
"dmax",
",",
"dmin",
"]",
")",
"ax4",
".",
"set_xlabel",
"(",
"'V1 Declination'",
")",
"ax4",
".",
"yaxis",
".",
"set_major_locator",
"(",
"plt",
".",
"NullLocator",
"(",
")",
")",
"if",
"sum_file",
":",
"for",
"depth",
"in",
"depths",
":",
"if",
"depth",
">=",
"dmin",
"and",
"depth",
"<",
"dmax",
":",
"plt",
".",
"axhline",
"(",
"depth",
",",
"color",
"=",
"'blue'",
",",
"linestyle",
"=",
"'dotted'",
")",
"# ax5=plt.subplot(1,np.abs(pcol),5)",
"# Axs.append(ax5)",
"# ax5.plot(F23s,Depths,'rs')",
"# bounds=ax5.axis()",
"# ax5.axis([bounds[0],bounds[1],dmax,dmin])",
"# ax5.set_xlabel('F_23')",
"# ax5.semilogx()",
"# if sum_file:",
"# for core in Cores:",
"# depth=float(core[core_depth_key])",
"# if depth>=dmin and depth<=dmax:",
"# plt.plot([bounds[0],bounds[1]],[depth,depth],'b--')",
"# if pcol==5 and label==1:plt.text(bounds[1],depth+tint,core[core_label_key])",
"# if pcol==6:",
"if",
"pcol",
"==",
"5",
":",
"# ax6=plt.subplot(1,pcol,6)",
"ax6",
"=",
"plt",
".",
"subplot",
"(",
"1",
",",
"pcol",
",",
"5",
")",
"Axs",
".",
"append",
"(",
"ax6",
")",
"ax6",
".",
"plot",
"(",
"Bulks",
",",
"BulkDepths",
",",
"'bo'",
")",
"ax6",
".",
"axis",
"(",
"[",
"bmin",
"-",
"1",
",",
"1.1",
"*",
"bmax",
",",
"dmax",
",",
"dmin",
"]",
")",
"ax6",
".",
"set_xlabel",
"(",
"'Bulk Susc. (uSI)'",
")",
"ax6",
".",
"yaxis",
".",
"set_major_locator",
"(",
"plt",
".",
"NullLocator",
"(",
")",
")",
"if",
"sum_file",
":",
"for",
"depth",
"in",
"depths",
":",
"if",
"depth",
">=",
"dmin",
"and",
"depth",
"<",
"dmax",
":",
"plt",
".",
"axhline",
"(",
"depth",
",",
"color",
"=",
"'blue'",
",",
"linestyle",
"=",
"'dotted'",
")",
"for",
"x",
"in",
"Axs",
":",
"# this makes the x-tick labels more reasonable - they were",
"# overcrowded using the defaults",
"pmagplotlib",
".",
"delticks",
"(",
"x",
")",
"fig_name",
"=",
"location",
"+",
"'_ani_depthplot.'",
"+",
"fmt",
"return",
"main_plot",
",",
"[",
"fig_name",
"]",
"else",
":",
"return",
"False",
",",
"\"No data to plot\""
] | 38.464968 | 18.248408 |
def get_enrollments_for_section_by_sis_id(self, sis_section_id, params={}):
"""
Return a list of all enrollments for the passed section sis id.
"""
return self.get_enrollments_for_section(
self._sis_id(sis_section_id, sis_field="section"), params) | [
"def",
"get_enrollments_for_section_by_sis_id",
"(",
"self",
",",
"sis_section_id",
",",
"params",
"=",
"{",
"}",
")",
":",
"return",
"self",
".",
"get_enrollments_for_section",
"(",
"self",
".",
"_sis_id",
"(",
"sis_section_id",
",",
"sis_field",
"=",
"\"section\"",
")",
",",
"params",
")"
] | 47.666667 | 17.333333 |
def lrem(self, key, count, value):
"""Removes the first count occurrences of elements equal to value
from the list stored at key.
:raises TypeError: if count is not int
"""
if not isinstance(count, int):
raise TypeError("count argument must be int")
return self.execute(b'LREM', key, count, value) | [
"def",
"lrem",
"(",
"self",
",",
"key",
",",
"count",
",",
"value",
")",
":",
"if",
"not",
"isinstance",
"(",
"count",
",",
"int",
")",
":",
"raise",
"TypeError",
"(",
"\"count argument must be int\"",
")",
"return",
"self",
".",
"execute",
"(",
"b'LREM'",
",",
"key",
",",
"count",
",",
"value",
")"
] | 38.888889 | 10 |
def unjoin_domain(username=None,
password=None,
domain=None,
workgroup='WORKGROUP',
disable=False,
restart=False):
# pylint: disable=anomalous-backslash-in-string
'''
Unjoin a computer from an Active Directory Domain. Requires a restart.
Args:
username (str):
Username of an account which is authorized to manage computer
accounts on the domain. Needs to be a fully qualified name like
``user@domain.tld`` or ``domain.tld\\user``. If the domain is not
specified, the passed domain will be used. If the computer account
doesn't need to be disabled after the computer is unjoined, this can
be ``None``.
password (str):
The password of the specified user
domain (str):
The domain from which to unjoin the computer. Can be ``None``
workgroup (str):
The workgroup to join the computer to. Default is ``WORKGROUP``
.. versionadded:: 2015.8.2/2015.5.7
disable (bool):
``True`` to disable the computer account in Active Directory.
Default is ``False``
restart (bool):
``True`` will restart the computer after successful unjoin. Default
is ``False``
.. versionadded:: 2015.8.2/2015.5.7
Returns:
dict: Returns a dictionary if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
salt 'minion-id' system.unjoin_domain restart=True
salt 'minion-id' system.unjoin_domain username='unjoinuser' \\
password='unjoinpassword' disable=True \\
restart=True
'''
# pylint: enable=anomalous-backslash-in-string
if six.PY2:
username = _to_unicode(username)
password = _to_unicode(password)
domain = _to_unicode(domain)
status = get_domain_workgroup()
if 'Workgroup' in status:
if status['Workgroup'] == workgroup:
return 'Already joined to {0}'.format(workgroup)
if username and '\\' not in username and '@' not in username:
if domain:
username = '{0}@{1}'.format(username, domain)
else:
return 'Must specify domain if not supplied in username'
if username and password is None:
return 'Must specify a password if you pass a username'
NETSETUP_ACCT_DELETE = 0x4 # pylint: disable=invalid-name
unjoin_options = 0x0
if disable:
unjoin_options |= NETSETUP_ACCT_DELETE
with salt.utils.winapi.Com():
conn = wmi.WMI()
comp = conn.Win32_ComputerSystem()[0]
err = comp.UnjoinDomainOrWorkgroup(Password=password,
UserName=username,
FUnjoinOptions=unjoin_options)
# you have to do this because UnjoinDomainOrWorkgroup returns a
# strangely formatted value that looks like (0,)
if not err[0]:
err = comp.JoinDomainOrWorkgroup(Name=workgroup)
if not err[0]:
ret = {'Workgroup': workgroup,
'Restart': False}
if restart:
ret['Restart'] = reboot()
return ret
else:
log.error(win32api.FormatMessage(err[0]).rstrip())
log.error('Failed to join the computer to %s', workgroup)
return False
else:
log.error(win32api.FormatMessage(err[0]).rstrip())
log.error('Failed to unjoin computer from %s', status['Domain'])
return False | [
"def",
"unjoin_domain",
"(",
"username",
"=",
"None",
",",
"password",
"=",
"None",
",",
"domain",
"=",
"None",
",",
"workgroup",
"=",
"'WORKGROUP'",
",",
"disable",
"=",
"False",
",",
"restart",
"=",
"False",
")",
":",
"# pylint: disable=anomalous-backslash-in-string",
"# pylint: enable=anomalous-backslash-in-string",
"if",
"six",
".",
"PY2",
":",
"username",
"=",
"_to_unicode",
"(",
"username",
")",
"password",
"=",
"_to_unicode",
"(",
"password",
")",
"domain",
"=",
"_to_unicode",
"(",
"domain",
")",
"status",
"=",
"get_domain_workgroup",
"(",
")",
"if",
"'Workgroup'",
"in",
"status",
":",
"if",
"status",
"[",
"'Workgroup'",
"]",
"==",
"workgroup",
":",
"return",
"'Already joined to {0}'",
".",
"format",
"(",
"workgroup",
")",
"if",
"username",
"and",
"'\\\\'",
"not",
"in",
"username",
"and",
"'@'",
"not",
"in",
"username",
":",
"if",
"domain",
":",
"username",
"=",
"'{0}@{1}'",
".",
"format",
"(",
"username",
",",
"domain",
")",
"else",
":",
"return",
"'Must specify domain if not supplied in username'",
"if",
"username",
"and",
"password",
"is",
"None",
":",
"return",
"'Must specify a password if you pass a username'",
"NETSETUP_ACCT_DELETE",
"=",
"0x4",
"# pylint: disable=invalid-name",
"unjoin_options",
"=",
"0x0",
"if",
"disable",
":",
"unjoin_options",
"|=",
"NETSETUP_ACCT_DELETE",
"with",
"salt",
".",
"utils",
".",
"winapi",
".",
"Com",
"(",
")",
":",
"conn",
"=",
"wmi",
".",
"WMI",
"(",
")",
"comp",
"=",
"conn",
".",
"Win32_ComputerSystem",
"(",
")",
"[",
"0",
"]",
"err",
"=",
"comp",
".",
"UnjoinDomainOrWorkgroup",
"(",
"Password",
"=",
"password",
",",
"UserName",
"=",
"username",
",",
"FUnjoinOptions",
"=",
"unjoin_options",
")",
"# you have to do this because UnjoinDomainOrWorkgroup returns a",
"# strangely formatted value that looks like (0,)",
"if",
"not",
"err",
"[",
"0",
"]",
":",
"err",
"=",
"comp",
".",
"JoinDomainOrWorkgroup",
"(",
"Name",
"=",
"workgroup",
")",
"if",
"not",
"err",
"[",
"0",
"]",
":",
"ret",
"=",
"{",
"'Workgroup'",
":",
"workgroup",
",",
"'Restart'",
":",
"False",
"}",
"if",
"restart",
":",
"ret",
"[",
"'Restart'",
"]",
"=",
"reboot",
"(",
")",
"return",
"ret",
"else",
":",
"log",
".",
"error",
"(",
"win32api",
".",
"FormatMessage",
"(",
"err",
"[",
"0",
"]",
")",
".",
"rstrip",
"(",
")",
")",
"log",
".",
"error",
"(",
"'Failed to join the computer to %s'",
",",
"workgroup",
")",
"return",
"False",
"else",
":",
"log",
".",
"error",
"(",
"win32api",
".",
"FormatMessage",
"(",
"err",
"[",
"0",
"]",
")",
".",
"rstrip",
"(",
")",
")",
"log",
".",
"error",
"(",
"'Failed to unjoin computer from %s'",
",",
"status",
"[",
"'Domain'",
"]",
")",
"return",
"False"
] | 33.424528 | 22.971698 |
def get_sds_in_faultset(self, faultSetObj):
"""
Get list of SDS objects attached to a specific ScaleIO Faultset
:param faultSetObj: ScaleIO Faultset object
:rtype: list of SDS in specified Faultset
"""
self.conn.connection._check_login()
response = self.conn.connection._do_get("{}/{}{}/{}".format(self.conn.connection._api_url, 'types/FaultSet::', faultSetObj.id, 'relationships/Sds')).json()
all_sds = []
for sds in response:
all_sds.append(
SIO_SDS.from_dict(sds)
)
return all_sds | [
"def",
"get_sds_in_faultset",
"(",
"self",
",",
"faultSetObj",
")",
":",
"self",
".",
"conn",
".",
"connection",
".",
"_check_login",
"(",
")",
"response",
"=",
"self",
".",
"conn",
".",
"connection",
".",
"_do_get",
"(",
"\"{}/{}{}/{}\"",
".",
"format",
"(",
"self",
".",
"conn",
".",
"connection",
".",
"_api_url",
",",
"'types/FaultSet::'",
",",
"faultSetObj",
".",
"id",
",",
"'relationships/Sds'",
")",
")",
".",
"json",
"(",
")",
"all_sds",
"=",
"[",
"]",
"for",
"sds",
"in",
"response",
":",
"all_sds",
".",
"append",
"(",
"SIO_SDS",
".",
"from_dict",
"(",
"sds",
")",
")",
"return",
"all_sds"
] | 42.142857 | 19.428571 |
def transmute(df, *keep_columns, **kwargs):
"""
Creates columns and then returns those new columns and optionally specified
original columns from the DataFrame.
This works like `mutate`, but designed to discard the original columns used
to create the new ones.
Args:
*keep_columns: Column labels to keep. Can be string, symbolic, or
integer position.
Kwargs:
**kwargs: keys are the names of the new columns, values indicate
what the new column values will be.
Example:
diamonds >> transmute(x_plus_y=X.x + X.y, y_div_z=(X.y / X.z)) >> head(3)
y_div_z x_plus_y
0 1.637860 7.93
1 1.662338 7.73
2 1.761905 8.12
"""
keep_cols = []
for col in flatten(keep_columns):
try:
keep_cols.append(col.name)
except:
if isinstance(col, str):
keep_cols.append(col)
elif isinstance(col, int):
keep_cols.append(df.columns[col])
df = df.assign(**kwargs)
columns = [k for k in kwargs.keys()] + list(keep_cols)
return df[columns] | [
"def",
"transmute",
"(",
"df",
",",
"*",
"keep_columns",
",",
"*",
"*",
"kwargs",
")",
":",
"keep_cols",
"=",
"[",
"]",
"for",
"col",
"in",
"flatten",
"(",
"keep_columns",
")",
":",
"try",
":",
"keep_cols",
".",
"append",
"(",
"col",
".",
"name",
")",
"except",
":",
"if",
"isinstance",
"(",
"col",
",",
"str",
")",
":",
"keep_cols",
".",
"append",
"(",
"col",
")",
"elif",
"isinstance",
"(",
"col",
",",
"int",
")",
":",
"keep_cols",
".",
"append",
"(",
"df",
".",
"columns",
"[",
"col",
"]",
")",
"df",
"=",
"df",
".",
"assign",
"(",
"*",
"*",
"kwargs",
")",
"columns",
"=",
"[",
"k",
"for",
"k",
"in",
"kwargs",
".",
"keys",
"(",
")",
"]",
"+",
"list",
"(",
"keep_cols",
")",
"return",
"df",
"[",
"columns",
"]"
] | 29.473684 | 20.421053 |
def CreateSignatureScanner(cls, specification_store):
"""Creates a signature scanner for format specifications with signatures.
Args:
specification_store (FormatSpecificationStore): format specifications
with signatures.
Returns:
pysigscan.scanner: signature scanner.
"""
scanner_object = pysigscan.scanner()
for format_specification in specification_store.specifications:
for signature in format_specification.signatures:
pattern_offset = signature.offset
if pattern_offset is None:
signature_flags = pysigscan.signature_flags.NO_OFFSET
elif pattern_offset < 0:
pattern_offset *= -1
signature_flags = pysigscan.signature_flags.RELATIVE_FROM_END
else:
signature_flags = pysigscan.signature_flags.RELATIVE_FROM_START
scanner_object.add_signature(
signature.identifier, pattern_offset, signature.pattern,
signature_flags)
return scanner_object | [
"def",
"CreateSignatureScanner",
"(",
"cls",
",",
"specification_store",
")",
":",
"scanner_object",
"=",
"pysigscan",
".",
"scanner",
"(",
")",
"for",
"format_specification",
"in",
"specification_store",
".",
"specifications",
":",
"for",
"signature",
"in",
"format_specification",
".",
"signatures",
":",
"pattern_offset",
"=",
"signature",
".",
"offset",
"if",
"pattern_offset",
"is",
"None",
":",
"signature_flags",
"=",
"pysigscan",
".",
"signature_flags",
".",
"NO_OFFSET",
"elif",
"pattern_offset",
"<",
"0",
":",
"pattern_offset",
"*=",
"-",
"1",
"signature_flags",
"=",
"pysigscan",
".",
"signature_flags",
".",
"RELATIVE_FROM_END",
"else",
":",
"signature_flags",
"=",
"pysigscan",
".",
"signature_flags",
".",
"RELATIVE_FROM_START",
"scanner_object",
".",
"add_signature",
"(",
"signature",
".",
"identifier",
",",
"pattern_offset",
",",
"signature",
".",
"pattern",
",",
"signature_flags",
")",
"return",
"scanner_object"
] | 33.758621 | 20.793103 |
def random(self):
"""
Draws a new value for a stoch conditional on its parents
and returns it.
Raises an error if no 'random' argument was passed to __init__.
"""
if self._random:
# Get current values of parents for use as arguments for _random()
r = self._random(**self.parents.value)
else:
raise AttributeError(
'Stochastic ' +
self.__name__ +
' does not know how to draw its value, see documentation')
if self.shape:
r = np.reshape(r, self.shape)
# Set Stochastic's value to drawn value
if not self.observed:
self.value = r
return r | [
"def",
"random",
"(",
"self",
")",
":",
"if",
"self",
".",
"_random",
":",
"# Get current values of parents for use as arguments for _random()",
"r",
"=",
"self",
".",
"_random",
"(",
"*",
"*",
"self",
".",
"parents",
".",
"value",
")",
"else",
":",
"raise",
"AttributeError",
"(",
"'Stochastic '",
"+",
"self",
".",
"__name__",
"+",
"' does not know how to draw its value, see documentation'",
")",
"if",
"self",
".",
"shape",
":",
"r",
"=",
"np",
".",
"reshape",
"(",
"r",
",",
"self",
".",
"shape",
")",
"# Set Stochastic's value to drawn value",
"if",
"not",
"self",
".",
"observed",
":",
"self",
".",
"value",
"=",
"r",
"return",
"r"
] | 28.48 | 20.8 |
def visit(H, source_node):
"""Executes the 'Visit' algorithm described in the paper:
Giorgio Gallo, Giustino Longo, Stefano Pallottino, Sang Nguyen,
Directed hypergraphs and applications, Discrete Applied Mathematics,
Volume 42, Issues 2-3, 27 April 1993, Pages 177-201, ISSN 0166-218X,
http://dx.doi.org/10.1016/0166-218X(93)90045-P.
(http://www.sciencedirect.com/science/article/pii/0166218X9390045P)
The Visit algorithm begins from a source node and traverses a hyperedge
after any node in the hyperedge's tail has been reached.
:param H: the hypergraph to perform the 'Visit' algorithm on.
:param source_node: the initial node to begin traversal from.
:returns: set -- nodes that were visited in this traversal.
dict -- mapping from each node to the ID of the hyperedge that
preceeded it in this traversal; will map a node to None
if that node wasn't visited or if that node is the source
node.
dict -- mapping from each hyperedge ID to the node that preceeded
it in this traversal.
:raises: TypeError -- Algorithm only applicable to directed hypergraphs
"""
if not isinstance(H, DirectedHypergraph):
raise TypeError("Algorithm only applicable to directed hypergraphs")
node_set = H.get_node_set()
# Pv keeps track of the ID of the hyperedge that directely
# preceeded each node in the traversal
Pv = {node: None for node in node_set}
hyperedge_id_set = H.get_hyperedge_id_set()
# Pe keeps track of the node that directedly preceeded
# each hyperedge in the traversal
Pe = {hyperedge_id: None for hyperedge_id in hyperedge_id_set}
# Explicitly tracks the set of visited nodes
visited_nodes = set([source_node])
Q = Queue()
Q.put(source_node)
while not Q.empty():
current_node = Q.get()
# At current_node, we can traverse each hyperedge in its forward star
for hyperedge_id in H.get_forward_star(current_node):
if Pe[hyperedge_id] is not None:
continue
Pe[hyperedge_id] = current_node
# Traversing a hyperedge in current_node's forward star yields
# the set of head nodes of the hyperedge; visit each head node
for head_node in H.get_hyperedge_head(hyperedge_id):
if head_node in visited_nodes:
continue
Pv[head_node] = hyperedge_id
Q.put(head_node)
visited_nodes.add(head_node)
return visited_nodes, Pv, Pe | [
"def",
"visit",
"(",
"H",
",",
"source_node",
")",
":",
"if",
"not",
"isinstance",
"(",
"H",
",",
"DirectedHypergraph",
")",
":",
"raise",
"TypeError",
"(",
"\"Algorithm only applicable to directed hypergraphs\"",
")",
"node_set",
"=",
"H",
".",
"get_node_set",
"(",
")",
"# Pv keeps track of the ID of the hyperedge that directely",
"# preceeded each node in the traversal",
"Pv",
"=",
"{",
"node",
":",
"None",
"for",
"node",
"in",
"node_set",
"}",
"hyperedge_id_set",
"=",
"H",
".",
"get_hyperedge_id_set",
"(",
")",
"# Pe keeps track of the node that directedly preceeded",
"# each hyperedge in the traversal",
"Pe",
"=",
"{",
"hyperedge_id",
":",
"None",
"for",
"hyperedge_id",
"in",
"hyperedge_id_set",
"}",
"# Explicitly tracks the set of visited nodes",
"visited_nodes",
"=",
"set",
"(",
"[",
"source_node",
"]",
")",
"Q",
"=",
"Queue",
"(",
")",
"Q",
".",
"put",
"(",
"source_node",
")",
"while",
"not",
"Q",
".",
"empty",
"(",
")",
":",
"current_node",
"=",
"Q",
".",
"get",
"(",
")",
"# At current_node, we can traverse each hyperedge in its forward star",
"for",
"hyperedge_id",
"in",
"H",
".",
"get_forward_star",
"(",
"current_node",
")",
":",
"if",
"Pe",
"[",
"hyperedge_id",
"]",
"is",
"not",
"None",
":",
"continue",
"Pe",
"[",
"hyperedge_id",
"]",
"=",
"current_node",
"# Traversing a hyperedge in current_node's forward star yields",
"# the set of head nodes of the hyperedge; visit each head node",
"for",
"head_node",
"in",
"H",
".",
"get_hyperedge_head",
"(",
"hyperedge_id",
")",
":",
"if",
"head_node",
"in",
"visited_nodes",
":",
"continue",
"Pv",
"[",
"head_node",
"]",
"=",
"hyperedge_id",
"Q",
".",
"put",
"(",
"head_node",
")",
"visited_nodes",
".",
"add",
"(",
"head_node",
")",
"return",
"visited_nodes",
",",
"Pv",
",",
"Pe"
] | 43.20339 | 21.271186 |
def _load_debugger_subcommands(self, name):
""" Create an instance of each of the debugger
subcommands. Commands are found by importing files in the
directory 'name' + 'sub'. Some files are excluded via an array set
in __init__. For each of the remaining files, we import them
and scan for class names inside those files and for each class
name, we will create an instance of that class. The set of
DebuggerCommand class instances form set of possible debugger
commands."""
# Initialization
cmd_instances = []
class_prefix = capitalize(name) # e.g. Info, Set, or Show
module_dir = 'trepan.processor.command.%s_subcmd' % name
mod = __import__(module_dir, None, None, ['*'])
eval_cmd_template = 'command_mod.%s(self)'
# Import, instantiate, and add classes for each of the
# modules found in module_dir imported above.
for module_name in mod.__modules__:
import_name = module_dir + '.' + module_name
try:
command_mod = importlib.import_module(import_name)
except ImportError:
print(("Error importing name %s module %s: %s" %
(import_name, module_name, sys.exc_info()[0])))
continue
# Even though we tend not to do this, it is possible to
# put more than one class into a module/file. So look for
# all of them.
classnames = [ classname for classname, classvalue in
inspect.getmembers(command_mod, inspect.isclass)
if ('DebuggerCommand' != classname and
classname.startswith(class_prefix)) ]
for classname in classnames:
eval_cmd = eval_cmd_template % classname
try:
instance = eval(eval_cmd)
self.cmds.add(instance)
except:
print("Error eval'ing class %s" % classname)
pass
pass
pass
return cmd_instances | [
"def",
"_load_debugger_subcommands",
"(",
"self",
",",
"name",
")",
":",
"# Initialization",
"cmd_instances",
"=",
"[",
"]",
"class_prefix",
"=",
"capitalize",
"(",
"name",
")",
"# e.g. Info, Set, or Show",
"module_dir",
"=",
"'trepan.processor.command.%s_subcmd'",
"%",
"name",
"mod",
"=",
"__import__",
"(",
"module_dir",
",",
"None",
",",
"None",
",",
"[",
"'*'",
"]",
")",
"eval_cmd_template",
"=",
"'command_mod.%s(self)'",
"# Import, instantiate, and add classes for each of the",
"# modules found in module_dir imported above.",
"for",
"module_name",
"in",
"mod",
".",
"__modules__",
":",
"import_name",
"=",
"module_dir",
"+",
"'.'",
"+",
"module_name",
"try",
":",
"command_mod",
"=",
"importlib",
".",
"import_module",
"(",
"import_name",
")",
"except",
"ImportError",
":",
"print",
"(",
"(",
"\"Error importing name %s module %s: %s\"",
"%",
"(",
"import_name",
",",
"module_name",
",",
"sys",
".",
"exc_info",
"(",
")",
"[",
"0",
"]",
")",
")",
")",
"continue",
"# Even though we tend not to do this, it is possible to",
"# put more than one class into a module/file. So look for",
"# all of them.",
"classnames",
"=",
"[",
"classname",
"for",
"classname",
",",
"classvalue",
"in",
"inspect",
".",
"getmembers",
"(",
"command_mod",
",",
"inspect",
".",
"isclass",
")",
"if",
"(",
"'DebuggerCommand'",
"!=",
"classname",
"and",
"classname",
".",
"startswith",
"(",
"class_prefix",
")",
")",
"]",
"for",
"classname",
"in",
"classnames",
":",
"eval_cmd",
"=",
"eval_cmd_template",
"%",
"classname",
"try",
":",
"instance",
"=",
"eval",
"(",
"eval_cmd",
")",
"self",
".",
"cmds",
".",
"add",
"(",
"instance",
")",
"except",
":",
"print",
"(",
"\"Error eval'ing class %s\"",
"%",
"classname",
")",
"pass",
"pass",
"pass",
"return",
"cmd_instances"
] | 45.617021 | 20.978723 |
def _get_torrent_category(self, tag, result=None):
"""Given a tag containing torrent details try to find category
of torrent. In search pages the category is found in links of
the form <a href='/tv/'>TV</a> with TV replaced with movies, books
etc. For the home page I will use the result number to
decide the category"""
hrefs = ["/movies/", "/tv/", "/music/", "/games/", "/applications/", "/anime/",
"/books/", "/xxx/"]
category = None
if not result is None: # if result: 0 returns false.
# Searching home page, get category from result number
category = hrefs[result / 10].strip("/")
return category
for item in hrefs:
if tag.select("a[href=" + item + "]"):
category = item.strip("/")
return category | [
"def",
"_get_torrent_category",
"(",
"self",
",",
"tag",
",",
"result",
"=",
"None",
")",
":",
"hrefs",
"=",
"[",
"\"/movies/\"",
",",
"\"/tv/\"",
",",
"\"/music/\"",
",",
"\"/games/\"",
",",
"\"/applications/\"",
",",
"\"/anime/\"",
",",
"\"/books/\"",
",",
"\"/xxx/\"",
"]",
"category",
"=",
"None",
"if",
"not",
"result",
"is",
"None",
":",
"# if result: 0 returns false.",
"# Searching home page, get category from result number",
"category",
"=",
"hrefs",
"[",
"result",
"/",
"10",
"]",
".",
"strip",
"(",
"\"/\"",
")",
"return",
"category",
"for",
"item",
"in",
"hrefs",
":",
"if",
"tag",
".",
"select",
"(",
"\"a[href=\"",
"+",
"item",
"+",
"\"]\"",
")",
":",
"category",
"=",
"item",
".",
"strip",
"(",
"\"/\"",
")",
"return",
"category"
] | 39 | 18.631579 |
def wraps(__fn, **kw):
"""Like ``functools.wraps``, with support for annotations."""
kw['assigned'] = kw.get('assigned', WRAPPER_ASSIGNMENTS)
return functools.wraps(__fn, **kw) | [
"def",
"wraps",
"(",
"__fn",
",",
"*",
"*",
"kw",
")",
":",
"kw",
"[",
"'assigned'",
"]",
"=",
"kw",
".",
"get",
"(",
"'assigned'",
",",
"WRAPPER_ASSIGNMENTS",
")",
"return",
"functools",
".",
"wraps",
"(",
"__fn",
",",
"*",
"*",
"kw",
")"
] | 49.25 | 11 |
def from_signed_raw(cls: Type[CertificationType], signed_raw: str) -> CertificationType:
"""
Return Certification instance from signed raw document
:param signed_raw: Signed raw document
:return:
"""
n = 0
lines = signed_raw.splitlines(True)
version = int(Identity.parse_field("Version", lines[n]))
n += 1
Certification.parse_field("Type", lines[n])
n += 1
currency = Certification.parse_field("Currency", lines[n])
n += 1
pubkey_from = Certification.parse_field("Issuer", lines[n])
n += 1
identity_pubkey = Certification.parse_field("IdtyIssuer", lines[n])
n += 1
identity_uid = Certification.parse_field("IdtyUniqueID", lines[n])
n += 1
identity_timestamp = BlockUID.from_str(Certification.parse_field("IdtyTimestamp", lines[n]))
n += 1
identity_signature = Certification.parse_field("IdtySignature", lines[n])
n += 1
timestamp = BlockUID.from_str(Certification.parse_field("CertTimestamp", lines[n]))
n += 1
signature = Certification.parse_field("Signature", lines[n])
identity = Identity(version, currency, identity_pubkey, identity_uid, identity_timestamp, identity_signature)
return cls(version, currency, pubkey_from, identity, timestamp, signature) | [
"def",
"from_signed_raw",
"(",
"cls",
":",
"Type",
"[",
"CertificationType",
"]",
",",
"signed_raw",
":",
"str",
")",
"->",
"CertificationType",
":",
"n",
"=",
"0",
"lines",
"=",
"signed_raw",
".",
"splitlines",
"(",
"True",
")",
"version",
"=",
"int",
"(",
"Identity",
".",
"parse_field",
"(",
"\"Version\"",
",",
"lines",
"[",
"n",
"]",
")",
")",
"n",
"+=",
"1",
"Certification",
".",
"parse_field",
"(",
"\"Type\"",
",",
"lines",
"[",
"n",
"]",
")",
"n",
"+=",
"1",
"currency",
"=",
"Certification",
".",
"parse_field",
"(",
"\"Currency\"",
",",
"lines",
"[",
"n",
"]",
")",
"n",
"+=",
"1",
"pubkey_from",
"=",
"Certification",
".",
"parse_field",
"(",
"\"Issuer\"",
",",
"lines",
"[",
"n",
"]",
")",
"n",
"+=",
"1",
"identity_pubkey",
"=",
"Certification",
".",
"parse_field",
"(",
"\"IdtyIssuer\"",
",",
"lines",
"[",
"n",
"]",
")",
"n",
"+=",
"1",
"identity_uid",
"=",
"Certification",
".",
"parse_field",
"(",
"\"IdtyUniqueID\"",
",",
"lines",
"[",
"n",
"]",
")",
"n",
"+=",
"1",
"identity_timestamp",
"=",
"BlockUID",
".",
"from_str",
"(",
"Certification",
".",
"parse_field",
"(",
"\"IdtyTimestamp\"",
",",
"lines",
"[",
"n",
"]",
")",
")",
"n",
"+=",
"1",
"identity_signature",
"=",
"Certification",
".",
"parse_field",
"(",
"\"IdtySignature\"",
",",
"lines",
"[",
"n",
"]",
")",
"n",
"+=",
"1",
"timestamp",
"=",
"BlockUID",
".",
"from_str",
"(",
"Certification",
".",
"parse_field",
"(",
"\"CertTimestamp\"",
",",
"lines",
"[",
"n",
"]",
")",
")",
"n",
"+=",
"1",
"signature",
"=",
"Certification",
".",
"parse_field",
"(",
"\"Signature\"",
",",
"lines",
"[",
"n",
"]",
")",
"identity",
"=",
"Identity",
"(",
"version",
",",
"currency",
",",
"identity_pubkey",
",",
"identity_uid",
",",
"identity_timestamp",
",",
"identity_signature",
")",
"return",
"cls",
"(",
"version",
",",
"currency",
",",
"pubkey_from",
",",
"identity",
",",
"timestamp",
",",
"signature",
")"
] | 32.190476 | 31.904762 |
def getRandomSequence(length=500):
"""Generates a random name and sequence.
"""
fastaHeader = ""
for i in xrange(int(random.random()*100)):
fastaHeader = fastaHeader + random.choice([ 'A', 'C', '0', '9', ' ', '\t' ])
return (fastaHeader, \
"".join([ random.choice([ 'A', 'C', 'T', 'G', 'A', 'C', 'T', 'G', 'A', 'C', 'T', 'G', 'A', 'C', 'T', 'G', 'A', 'C', 'T', 'G', 'N' ]) for i in xrange((int)(random.random() * length))])) | [
"def",
"getRandomSequence",
"(",
"length",
"=",
"500",
")",
":",
"fastaHeader",
"=",
"\"\"",
"for",
"i",
"in",
"xrange",
"(",
"int",
"(",
"random",
".",
"random",
"(",
")",
"*",
"100",
")",
")",
":",
"fastaHeader",
"=",
"fastaHeader",
"+",
"random",
".",
"choice",
"(",
"[",
"'A'",
",",
"'C'",
",",
"'0'",
",",
"'9'",
",",
"' '",
",",
"'\\t'",
"]",
")",
"return",
"(",
"fastaHeader",
",",
"\"\"",
".",
"join",
"(",
"[",
"random",
".",
"choice",
"(",
"[",
"'A'",
",",
"'C'",
",",
"'T'",
",",
"'G'",
",",
"'A'",
",",
"'C'",
",",
"'T'",
",",
"'G'",
",",
"'A'",
",",
"'C'",
",",
"'T'",
",",
"'G'",
",",
"'A'",
",",
"'C'",
",",
"'T'",
",",
"'G'",
",",
"'A'",
",",
"'C'",
",",
"'T'",
",",
"'G'",
",",
"'N'",
"]",
")",
"for",
"i",
"in",
"xrange",
"(",
"(",
"int",
")",
"(",
"random",
".",
"random",
"(",
")",
"*",
"length",
")",
")",
"]",
")",
")"
] | 57.125 | 30.75 |
def get_target_state():
"""SDP target State.
Returns the target state; allowed target states and time updated
"""
sdp_state = SDPState()
errval, errdict = _check_status(sdp_state)
if errval == "error":
LOG.debug(errdict['reason'])
return dict(
current_target_state="unknown",
last_updated="unknown",
reason=errdict['reason']
)
LOG.debug('Getting target state')
target_state = sdp_state.target_state
LOG.debug('Target state = %s', target_state)
return dict(
current_target_state=target_state,
allowed_target_states=sdp_state.allowed_target_states[
sdp_state.current_state],
last_updated=sdp_state.target_timestamp.isoformat()) | [
"def",
"get_target_state",
"(",
")",
":",
"sdp_state",
"=",
"SDPState",
"(",
")",
"errval",
",",
"errdict",
"=",
"_check_status",
"(",
"sdp_state",
")",
"if",
"errval",
"==",
"\"error\"",
":",
"LOG",
".",
"debug",
"(",
"errdict",
"[",
"'reason'",
"]",
")",
"return",
"dict",
"(",
"current_target_state",
"=",
"\"unknown\"",
",",
"last_updated",
"=",
"\"unknown\"",
",",
"reason",
"=",
"errdict",
"[",
"'reason'",
"]",
")",
"LOG",
".",
"debug",
"(",
"'Getting target state'",
")",
"target_state",
"=",
"sdp_state",
".",
"target_state",
"LOG",
".",
"debug",
"(",
"'Target state = %s'",
",",
"target_state",
")",
"return",
"dict",
"(",
"current_target_state",
"=",
"target_state",
",",
"allowed_target_states",
"=",
"sdp_state",
".",
"allowed_target_states",
"[",
"sdp_state",
".",
"current_state",
"]",
",",
"last_updated",
"=",
"sdp_state",
".",
"target_timestamp",
".",
"isoformat",
"(",
")",
")"
] | 33.681818 | 12.272727 |
def syllabified_str(self, separator="."):
"""
Returns:
str: Syllabified word in string format
Examples:
>>> Word('conseil').syllabified_str()
'con.seil'
You can also specify the separator('.' by default)
>>> Word('sikerly').syllabified_str(separator = '-')
'sik-er-ly'
"""
return separator.join(self.syllabified if self.syllabified else self.syllabify()) | [
"def",
"syllabified_str",
"(",
"self",
",",
"separator",
"=",
"\".\"",
")",
":",
"return",
"separator",
".",
"join",
"(",
"self",
".",
"syllabified",
"if",
"self",
".",
"syllabified",
"else",
"self",
".",
"syllabify",
"(",
")",
")"
] | 30.4 | 21.2 |
def load_configs(self):
"""load config files"""
self.statemgr_config.set_state_locations(self.configs[STATEMGRS_KEY])
if EXTRA_LINKS_KEY in self.configs:
for extra_link in self.configs[EXTRA_LINKS_KEY]:
self.extra_links.append(self.validate_extra_link(extra_link)) | [
"def",
"load_configs",
"(",
"self",
")",
":",
"self",
".",
"statemgr_config",
".",
"set_state_locations",
"(",
"self",
".",
"configs",
"[",
"STATEMGRS_KEY",
"]",
")",
"if",
"EXTRA_LINKS_KEY",
"in",
"self",
".",
"configs",
":",
"for",
"extra_link",
"in",
"self",
".",
"configs",
"[",
"EXTRA_LINKS_KEY",
"]",
":",
"self",
".",
"extra_links",
".",
"append",
"(",
"self",
".",
"validate_extra_link",
"(",
"extra_link",
")",
")"
] | 47.5 | 15.666667 |
def find_all(pattern=None):
"""
Returns all serial ports present.
:param pattern: pattern to search for when retrieving serial ports
:type pattern: string
:returns: list of devices
:raises: :py:class:`~alarmdecoder.util.CommError`
"""
devices = []
try:
if pattern:
devices = serial.tools.list_ports.grep(pattern)
else:
devices = serial.tools.list_ports.comports()
except serial.SerialException as err:
raise CommError('Error enumerating serial devices: {0}'.format(str(err)), err)
return devices | [
"def",
"find_all",
"(",
"pattern",
"=",
"None",
")",
":",
"devices",
"=",
"[",
"]",
"try",
":",
"if",
"pattern",
":",
"devices",
"=",
"serial",
".",
"tools",
".",
"list_ports",
".",
"grep",
"(",
"pattern",
")",
"else",
":",
"devices",
"=",
"serial",
".",
"tools",
".",
"list_ports",
".",
"comports",
"(",
")",
"except",
"serial",
".",
"SerialException",
"as",
"err",
":",
"raise",
"CommError",
"(",
"'Error enumerating serial devices: {0}'",
".",
"format",
"(",
"str",
"(",
"err",
")",
")",
",",
"err",
")",
"return",
"devices"
] | 28.863636 | 22.136364 |
def _convert_folded_blocks(folded_ir_blocks):
"""Convert Filter/Traverse blocks and LocalField expressions within @fold to Gremlin objects."""
new_folded_ir_blocks = []
def folded_context_visitor(expression):
"""Transform LocalField objects into their Gremlin-specific counterpart."""
if not isinstance(expression, LocalField):
return expression
return GremlinFoldedLocalField(expression.field_name)
for block in folded_ir_blocks:
new_block = block
if isinstance(block, Filter):
new_predicate = block.predicate.visit_and_update(folded_context_visitor)
new_block = GremlinFoldedFilter(new_predicate)
elif isinstance(block, Traverse):
new_block = GremlinFoldedTraverse.from_traverse(block)
elif isinstance(block, (MarkLocation, Backtrack)):
# We remove MarkLocation and Backtrack blocks from the folded blocks output,
# since they do not produce any Gremlin output code inside folds.
continue
else:
raise AssertionError(u'Found an unexpected IR block in the folded IR blocks: '
u'{} {} {}'.format(type(block), block, folded_ir_blocks))
new_folded_ir_blocks.append(new_block)
return new_folded_ir_blocks | [
"def",
"_convert_folded_blocks",
"(",
"folded_ir_blocks",
")",
":",
"new_folded_ir_blocks",
"=",
"[",
"]",
"def",
"folded_context_visitor",
"(",
"expression",
")",
":",
"\"\"\"Transform LocalField objects into their Gremlin-specific counterpart.\"\"\"",
"if",
"not",
"isinstance",
"(",
"expression",
",",
"LocalField",
")",
":",
"return",
"expression",
"return",
"GremlinFoldedLocalField",
"(",
"expression",
".",
"field_name",
")",
"for",
"block",
"in",
"folded_ir_blocks",
":",
"new_block",
"=",
"block",
"if",
"isinstance",
"(",
"block",
",",
"Filter",
")",
":",
"new_predicate",
"=",
"block",
".",
"predicate",
".",
"visit_and_update",
"(",
"folded_context_visitor",
")",
"new_block",
"=",
"GremlinFoldedFilter",
"(",
"new_predicate",
")",
"elif",
"isinstance",
"(",
"block",
",",
"Traverse",
")",
":",
"new_block",
"=",
"GremlinFoldedTraverse",
".",
"from_traverse",
"(",
"block",
")",
"elif",
"isinstance",
"(",
"block",
",",
"(",
"MarkLocation",
",",
"Backtrack",
")",
")",
":",
"# We remove MarkLocation and Backtrack blocks from the folded blocks output,",
"# since they do not produce any Gremlin output code inside folds.",
"continue",
"else",
":",
"raise",
"AssertionError",
"(",
"u'Found an unexpected IR block in the folded IR blocks: '",
"u'{} {} {}'",
".",
"format",
"(",
"type",
"(",
"block",
")",
",",
"block",
",",
"folded_ir_blocks",
")",
")",
"new_folded_ir_blocks",
".",
"append",
"(",
"new_block",
")",
"return",
"new_folded_ir_blocks"
] | 43.266667 | 22.633333 |
def _from_metadata(self, urlpath):
"""Return set of local URLs if files already exist"""
md = self.get_metadata(urlpath)
if md is not None:
return [e['cache_path'] for e in md] | [
"def",
"_from_metadata",
"(",
"self",
",",
"urlpath",
")",
":",
"md",
"=",
"self",
".",
"get_metadata",
"(",
"urlpath",
")",
"if",
"md",
"is",
"not",
"None",
":",
"return",
"[",
"e",
"[",
"'cache_path'",
"]",
"for",
"e",
"in",
"md",
"]"
] | 41.6 | 5.8 |
def from_api_repr(cls, resource):
"""Factory: construct a dataset reference given its API representation
Args:
resource (Dict[str, str]):
Dataset reference resource representation returned from the API
Returns:
google.cloud.bigquery.dataset.DatasetReference:
Dataset reference parsed from ``resource``.
"""
project = resource["projectId"]
dataset_id = resource["datasetId"]
return cls(project, dataset_id) | [
"def",
"from_api_repr",
"(",
"cls",
",",
"resource",
")",
":",
"project",
"=",
"resource",
"[",
"\"projectId\"",
"]",
"dataset_id",
"=",
"resource",
"[",
"\"datasetId\"",
"]",
"return",
"cls",
"(",
"project",
",",
"dataset_id",
")"
] | 36.142857 | 15.785714 |
def _set_hide_mac_acl_ext(self, v, load=False):
"""
Setter method for hide_mac_acl_ext, mapped from YANG variable /mac/access_list/extended/hide_mac_acl_ext (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_hide_mac_acl_ext is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_hide_mac_acl_ext() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=hide_mac_acl_ext.hide_mac_acl_ext, is_container='container', presence=False, yang_name="hide-mac-acl-ext", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'hidden': u'wyser-write-hook'}}, namespace='urn:brocade.com:mgmt:brocade-mac-access-list', defining_module='brocade-mac-access-list', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """hide_mac_acl_ext must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=hide_mac_acl_ext.hide_mac_acl_ext, is_container='container', presence=False, yang_name="hide-mac-acl-ext", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'hidden': u'wyser-write-hook'}}, namespace='urn:brocade.com:mgmt:brocade-mac-access-list', defining_module='brocade-mac-access-list', yang_type='container', is_config=True)""",
})
self.__hide_mac_acl_ext = t
if hasattr(self, '_set'):
self._set() | [
"def",
"_set_hide_mac_acl_ext",
"(",
"self",
",",
"v",
",",
"load",
"=",
"False",
")",
":",
"if",
"hasattr",
"(",
"v",
",",
"\"_utype\"",
")",
":",
"v",
"=",
"v",
".",
"_utype",
"(",
"v",
")",
"try",
":",
"t",
"=",
"YANGDynClass",
"(",
"v",
",",
"base",
"=",
"hide_mac_acl_ext",
".",
"hide_mac_acl_ext",
",",
"is_container",
"=",
"'container'",
",",
"presence",
"=",
"False",
",",
"yang_name",
"=",
"\"hide-mac-acl-ext\"",
",",
"rest_name",
"=",
"\"\"",
",",
"parent",
"=",
"self",
",",
"path_helper",
"=",
"self",
".",
"_path_helper",
",",
"extmethods",
"=",
"self",
".",
"_extmethods",
",",
"register_paths",
"=",
"True",
",",
"extensions",
"=",
"{",
"u'tailf-common'",
":",
"{",
"u'cli-drop-node-name'",
":",
"None",
",",
"u'hidden'",
":",
"u'wyser-write-hook'",
"}",
"}",
",",
"namespace",
"=",
"'urn:brocade.com:mgmt:brocade-mac-access-list'",
",",
"defining_module",
"=",
"'brocade-mac-access-list'",
",",
"yang_type",
"=",
"'container'",
",",
"is_config",
"=",
"True",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
")",
":",
"raise",
"ValueError",
"(",
"{",
"'error-string'",
":",
"\"\"\"hide_mac_acl_ext must be of a type compatible with container\"\"\"",
",",
"'defined-type'",
":",
"\"container\"",
",",
"'generated-type'",
":",
"\"\"\"YANGDynClass(base=hide_mac_acl_ext.hide_mac_acl_ext, is_container='container', presence=False, yang_name=\"hide-mac-acl-ext\", rest_name=\"\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'hidden': u'wyser-write-hook'}}, namespace='urn:brocade.com:mgmt:brocade-mac-access-list', defining_module='brocade-mac-access-list', yang_type='container', is_config=True)\"\"\"",
",",
"}",
")",
"self",
".",
"__hide_mac_acl_ext",
"=",
"t",
"if",
"hasattr",
"(",
"self",
",",
"'_set'",
")",
":",
"self",
".",
"_set",
"(",
")"
] | 79.409091 | 37.181818 |
def receiver_blueprint_for(self, name):
""" Get a Flask blueprint for the named provider that handles incoming messages & status reports
Note: this requires Flask microframework.
:rtype: flask.blueprints.Blueprint
:returns: Flask Blueprint, fully functional
:raises KeyError: provider not found
:raises NotImplementedError: Provider does not implement a receiver
"""
# Get the provider & blueprint
provider = self.get_provider(name)
bp = provider.make_receiver_blueprint()
# Register a Flask handler that initializes `g.provider`
# This is the only way for the blueprint to get the current IProvider instance
from flask.globals import g # local import as the user is not required to use receivers at all
@bp.before_request
def init_g():
g.provider = provider
# Finish
return bp | [
"def",
"receiver_blueprint_for",
"(",
"self",
",",
"name",
")",
":",
"# Get the provider & blueprint",
"provider",
"=",
"self",
".",
"get_provider",
"(",
"name",
")",
"bp",
"=",
"provider",
".",
"make_receiver_blueprint",
"(",
")",
"# Register a Flask handler that initializes `g.provider`",
"# This is the only way for the blueprint to get the current IProvider instance",
"from",
"flask",
".",
"globals",
"import",
"g",
"# local import as the user is not required to use receivers at all",
"@",
"bp",
".",
"before_request",
"def",
"init_g",
"(",
")",
":",
"g",
".",
"provider",
"=",
"provider",
"# Finish",
"return",
"bp"
] | 38.666667 | 21.375 |
def _get_file_alignment_for_new_binary_file(self, file: File) -> int:
"""Detects alignment requirements for binary files with new nn::util::BinaryFileHeader."""
if len(file.data) <= 0x20:
return 0
bom = file.data[0xc:0xc+2]
if bom != b'\xff\xfe' and bom != b'\xfe\xff':
return 0
be = bom == b'\xfe\xff'
file_size: int = struct.unpack_from(_get_unpack_endian_character(be) + 'I', file.data, 0x1c)[0]
if len(file.data) != file_size:
return 0
return 1 << file.data[0xe] | [
"def",
"_get_file_alignment_for_new_binary_file",
"(",
"self",
",",
"file",
":",
"File",
")",
"->",
"int",
":",
"if",
"len",
"(",
"file",
".",
"data",
")",
"<=",
"0x20",
":",
"return",
"0",
"bom",
"=",
"file",
".",
"data",
"[",
"0xc",
":",
"0xc",
"+",
"2",
"]",
"if",
"bom",
"!=",
"b'\\xff\\xfe'",
"and",
"bom",
"!=",
"b'\\xfe\\xff'",
":",
"return",
"0",
"be",
"=",
"bom",
"==",
"b'\\xfe\\xff'",
"file_size",
":",
"int",
"=",
"struct",
".",
"unpack_from",
"(",
"_get_unpack_endian_character",
"(",
"be",
")",
"+",
"'I'",
",",
"file",
".",
"data",
",",
"0x1c",
")",
"[",
"0",
"]",
"if",
"len",
"(",
"file",
".",
"data",
")",
"!=",
"file_size",
":",
"return",
"0",
"return",
"1",
"<<",
"file",
".",
"data",
"[",
"0xe",
"]"
] | 42.692308 | 17.923077 |
def int_id(self):
"int: This key's numeric id."
id_or_name = self.id_or_name
if id_or_name is not None and isinstance(id_or_name, int):
return id_or_name
return None | [
"def",
"int_id",
"(",
"self",
")",
":",
"id_or_name",
"=",
"self",
".",
"id_or_name",
"if",
"id_or_name",
"is",
"not",
"None",
"and",
"isinstance",
"(",
"id_or_name",
",",
"int",
")",
":",
"return",
"id_or_name",
"return",
"None"
] | 34 | 14.666667 |
def get_key_from_request(self):
'''Return a key for the current request url.
:return: The storage key for the current url
:rettype: string
'''
path = "result:%s" % self.context.request.url
if self.is_auto_webp():
path += '/webp'
return path | [
"def",
"get_key_from_request",
"(",
"self",
")",
":",
"path",
"=",
"\"result:%s\"",
"%",
"self",
".",
"context",
".",
"request",
".",
"url",
"if",
"self",
".",
"is_auto_webp",
"(",
")",
":",
"path",
"+=",
"'/webp'",
"return",
"path"
] | 23.076923 | 22.615385 |
def rotate(self, angle, direction='z', axis=None):
"""
Returns a new Surface which is the same but rotated about a
given axis.
If the axis given is ``None``, the rotation will be computed
about the Surface's centroid.
:param angle: Rotation angle (in radians)
:type angle: float
:param direction: Axis direction ('x', 'y' or 'z')
:type direction: str
:param axis: Point in z=0 to perform as rotation axis
:type axis: tuple (len=2 or 3) or None
:returns: ``pyny.Surface``
"""
return Space(Place(self)).rotate(angle, direction, axis)[0].surface | [
"def",
"rotate",
"(",
"self",
",",
"angle",
",",
"direction",
"=",
"'z'",
",",
"axis",
"=",
"None",
")",
":",
"return",
"Space",
"(",
"Place",
"(",
"self",
")",
")",
".",
"rotate",
"(",
"angle",
",",
"direction",
",",
"axis",
")",
"[",
"0",
"]",
".",
"surface"
] | 39.588235 | 16.176471 |
def print_summary(self, strm):
"""Print summary of lint."""
nerr = 0
nerr += LintHelper._print_summary_map(strm, self.cpp_header_map, 'cpp-header')
nerr += LintHelper._print_summary_map(strm, self.cpp_src_map, 'cpp-soruce')
nerr += LintHelper._print_summary_map(strm, self.python_map, 'python')
if nerr == 0:
strm.write('All passed!\n')
else:
strm.write('%d files failed lint\n' % nerr)
return nerr | [
"def",
"print_summary",
"(",
"self",
",",
"strm",
")",
":",
"nerr",
"=",
"0",
"nerr",
"+=",
"LintHelper",
".",
"_print_summary_map",
"(",
"strm",
",",
"self",
".",
"cpp_header_map",
",",
"'cpp-header'",
")",
"nerr",
"+=",
"LintHelper",
".",
"_print_summary_map",
"(",
"strm",
",",
"self",
".",
"cpp_src_map",
",",
"'cpp-soruce'",
")",
"nerr",
"+=",
"LintHelper",
".",
"_print_summary_map",
"(",
"strm",
",",
"self",
".",
"python_map",
",",
"'python'",
")",
"if",
"nerr",
"==",
"0",
":",
"strm",
".",
"write",
"(",
"'All passed!\\n'",
")",
"else",
":",
"strm",
".",
"write",
"(",
"'%d files failed lint\\n'",
"%",
"nerr",
")",
"return",
"nerr"
] | 43.272727 | 22.181818 |
def get(self, x, y):
"""Get the state of a pixel. Returns bool.
:param x: x coordinate of the pixel
:param y: y coordinate of the pixel
"""
x = normalize(x)
y = normalize(y)
dot_index = pixel_map[y % 4][x % 2]
col, row = get_pos(x, y)
char = self.chars.get(row, {}).get(col)
if not char:
return False
if type(char) != int:
return True
return bool(char & dot_index) | [
"def",
"get",
"(",
"self",
",",
"x",
",",
"y",
")",
":",
"x",
"=",
"normalize",
"(",
"x",
")",
"y",
"=",
"normalize",
"(",
"y",
")",
"dot_index",
"=",
"pixel_map",
"[",
"y",
"%",
"4",
"]",
"[",
"x",
"%",
"2",
"]",
"col",
",",
"row",
"=",
"get_pos",
"(",
"x",
",",
"y",
")",
"char",
"=",
"self",
".",
"chars",
".",
"get",
"(",
"row",
",",
"{",
"}",
")",
".",
"get",
"(",
"col",
")",
"if",
"not",
"char",
":",
"return",
"False",
"if",
"type",
"(",
"char",
")",
"!=",
"int",
":",
"return",
"True",
"return",
"bool",
"(",
"char",
"&",
"dot_index",
")"
] | 24.736842 | 15.947368 |
def serialCmdPwdAuth(self, password_str):
""" Password step of set commands
This method is normally called within another serial command, so it
does not issue a termination string. Any default password is set
in the caller parameter list, never here.
Args:
password_str (str): Required password.
Returns:
bool: True on completion and ACK.
"""
result = False
try:
req_start = "0150310228" + binascii.hexlify(password_str) + "2903"
req_crc = self.calc_crc16(req_start[2:].decode("hex"))
req_str = req_start + req_crc
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
ekm_log("Password accepted (" + self.getContext() + ")")
result = True
else:
ekm_log("Password call failure no 06(" + self.getContext() + ")")
except:
ekm_log("Password call failure by exception(" + self.getContext() + ")")
ekm_log(traceback.format_exc(sys.exc_info()))
return result | [
"def",
"serialCmdPwdAuth",
"(",
"self",
",",
"password_str",
")",
":",
"result",
"=",
"False",
"try",
":",
"req_start",
"=",
"\"0150310228\"",
"+",
"binascii",
".",
"hexlify",
"(",
"password_str",
")",
"+",
"\"2903\"",
"req_crc",
"=",
"self",
".",
"calc_crc16",
"(",
"req_start",
"[",
"2",
":",
"]",
".",
"decode",
"(",
"\"hex\"",
")",
")",
"req_str",
"=",
"req_start",
"+",
"req_crc",
"self",
".",
"m_serial_port",
".",
"write",
"(",
"req_str",
".",
"decode",
"(",
"\"hex\"",
")",
")",
"if",
"self",
".",
"m_serial_port",
".",
"getResponse",
"(",
"self",
".",
"getContext",
"(",
")",
")",
".",
"encode",
"(",
"\"hex\"",
")",
"==",
"\"06\"",
":",
"ekm_log",
"(",
"\"Password accepted (\"",
"+",
"self",
".",
"getContext",
"(",
")",
"+",
"\")\"",
")",
"result",
"=",
"True",
"else",
":",
"ekm_log",
"(",
"\"Password call failure no 06(\"",
"+",
"self",
".",
"getContext",
"(",
")",
"+",
"\")\"",
")",
"except",
":",
"ekm_log",
"(",
"\"Password call failure by exception(\"",
"+",
"self",
".",
"getContext",
"(",
")",
"+",
"\")\"",
")",
"ekm_log",
"(",
"traceback",
".",
"format_exc",
"(",
"sys",
".",
"exc_info",
"(",
")",
")",
")",
"return",
"result"
] | 38.5 | 24.433333 |
def range_hourly(start=None, stop=None, timezone='UTC', count=None):
"""
This an alternative way to generating sets of Delorean objects with
HOURLY stops
"""
return stops(start=start, stop=stop, freq=HOURLY, timezone=timezone, count=count) | [
"def",
"range_hourly",
"(",
"start",
"=",
"None",
",",
"stop",
"=",
"None",
",",
"timezone",
"=",
"'UTC'",
",",
"count",
"=",
"None",
")",
":",
"return",
"stops",
"(",
"start",
"=",
"start",
",",
"stop",
"=",
"stop",
",",
"freq",
"=",
"HOURLY",
",",
"timezone",
"=",
"timezone",
",",
"count",
"=",
"count",
")"
] | 42.333333 | 21.333333 |
def _calc_dic(self):
"""Calculates deviance information Criterion"""
# Find mean deviance
mean_deviance = np.mean(self.db.trace('deviance')(), axis=0)
# Set values of all parameters to their mean
for stochastic in self.stochastics:
# Calculate mean of paramter
try:
mean_value = np.mean(
self.db.trace(
stochastic.__name__)(
),
axis=0)
# Set current value to mean
stochastic.value = mean_value
except KeyError:
print_(
"No trace available for %s. DIC value may not be valid." %
stochastic.__name__)
except TypeError:
print_(
"Not able to calculate DIC: invalid stochastic %s" %
stochastic.__name__)
return None
# Return twice deviance minus deviance at means
return 2 * mean_deviance - self.deviance | [
"def",
"_calc_dic",
"(",
"self",
")",
":",
"# Find mean deviance",
"mean_deviance",
"=",
"np",
".",
"mean",
"(",
"self",
".",
"db",
".",
"trace",
"(",
"'deviance'",
")",
"(",
")",
",",
"axis",
"=",
"0",
")",
"# Set values of all parameters to their mean",
"for",
"stochastic",
"in",
"self",
".",
"stochastics",
":",
"# Calculate mean of paramter",
"try",
":",
"mean_value",
"=",
"np",
".",
"mean",
"(",
"self",
".",
"db",
".",
"trace",
"(",
"stochastic",
".",
"__name__",
")",
"(",
")",
",",
"axis",
"=",
"0",
")",
"# Set current value to mean",
"stochastic",
".",
"value",
"=",
"mean_value",
"except",
"KeyError",
":",
"print_",
"(",
"\"No trace available for %s. DIC value may not be valid.\"",
"%",
"stochastic",
".",
"__name__",
")",
"except",
"TypeError",
":",
"print_",
"(",
"\"Not able to calculate DIC: invalid stochastic %s\"",
"%",
"stochastic",
".",
"__name__",
")",
"return",
"None",
"# Return twice deviance minus deviance at means",
"return",
"2",
"*",
"mean_deviance",
"-",
"self",
".",
"deviance"
] | 32.4375 | 17.34375 |
def list_media_services(access_token, subscription_id):
'''List the media services in a subscription.
Args:
access_token (str): A valid Azure authentication token.
subscription_id (str): Azure subscription id.
Returns:
HTTP response. JSON body.
'''
endpoint = ''.join([get_rm_endpoint(),
'/subscriptions/', subscription_id,
'/providers/microsoft.media/mediaservices?api-version=', MEDIA_API])
return do_get(endpoint, access_token) | [
"def",
"list_media_services",
"(",
"access_token",
",",
"subscription_id",
")",
":",
"endpoint",
"=",
"''",
".",
"join",
"(",
"[",
"get_rm_endpoint",
"(",
")",
",",
"'/subscriptions/'",
",",
"subscription_id",
",",
"'/providers/microsoft.media/mediaservices?api-version='",
",",
"MEDIA_API",
"]",
")",
"return",
"do_get",
"(",
"endpoint",
",",
"access_token",
")"
] | 36.785714 | 22.357143 |
def slaveof(self, host=None, port=None):
"""
Set the server to be a replicated slave of the instance identified
by the ``host`` and ``port``. If called without arguments, the
instance is promoted to a master instead.
"""
if host is None and port is None:
return self.execute_command('SLAVEOF', Token.get_token('NO'),
Token.get_token('ONE'))
return self.execute_command('SLAVEOF', host, port) | [
"def",
"slaveof",
"(",
"self",
",",
"host",
"=",
"None",
",",
"port",
"=",
"None",
")",
":",
"if",
"host",
"is",
"None",
"and",
"port",
"is",
"None",
":",
"return",
"self",
".",
"execute_command",
"(",
"'SLAVEOF'",
",",
"Token",
".",
"get_token",
"(",
"'NO'",
")",
",",
"Token",
".",
"get_token",
"(",
"'ONE'",
")",
")",
"return",
"self",
".",
"execute_command",
"(",
"'SLAVEOF'",
",",
"host",
",",
"port",
")"
] | 49 | 14.8 |
def _get_boll(cls, df):
""" Get Bollinger bands.
boll_ub means the upper band of the Bollinger bands
boll_lb means the lower band of the Bollinger bands
boll_ub = MA + Kσ
boll_lb = MA − Kσ
M = BOLL_PERIOD
K = BOLL_STD_TIMES
:param df: data
:return: None
"""
moving_avg = df['close_{}_sma'.format(cls.BOLL_PERIOD)]
moving_std = df['close_{}_mstd'.format(cls.BOLL_PERIOD)]
df['boll'] = moving_avg
moving_avg = list(map(np.float64, moving_avg))
moving_std = list(map(np.float64, moving_std))
# noinspection PyTypeChecker
df['boll_ub'] = np.add(moving_avg,
np.multiply(cls.BOLL_STD_TIMES, moving_std))
# noinspection PyTypeChecker
df['boll_lb'] = np.subtract(moving_avg,
np.multiply(cls.BOLL_STD_TIMES,
moving_std)) | [
"def",
"_get_boll",
"(",
"cls",
",",
"df",
")",
":",
"moving_avg",
"=",
"df",
"[",
"'close_{}_sma'",
".",
"format",
"(",
"cls",
".",
"BOLL_PERIOD",
")",
"]",
"moving_std",
"=",
"df",
"[",
"'close_{}_mstd'",
".",
"format",
"(",
"cls",
".",
"BOLL_PERIOD",
")",
"]",
"df",
"[",
"'boll'",
"]",
"=",
"moving_avg",
"moving_avg",
"=",
"list",
"(",
"map",
"(",
"np",
".",
"float64",
",",
"moving_avg",
")",
")",
"moving_std",
"=",
"list",
"(",
"map",
"(",
"np",
".",
"float64",
",",
"moving_std",
")",
")",
"# noinspection PyTypeChecker\r",
"df",
"[",
"'boll_ub'",
"]",
"=",
"np",
".",
"add",
"(",
"moving_avg",
",",
"np",
".",
"multiply",
"(",
"cls",
".",
"BOLL_STD_TIMES",
",",
"moving_std",
")",
")",
"# noinspection PyTypeChecker\r",
"df",
"[",
"'boll_lb'",
"]",
"=",
"np",
".",
"subtract",
"(",
"moving_avg",
",",
"np",
".",
"multiply",
"(",
"cls",
".",
"BOLL_STD_TIMES",
",",
"moving_std",
")",
")"
] | 40.791667 | 15.583333 |
def split_sequence_file_on_sample_ids_to_files(seqs,
outdir):
"""Split FASTA file on sample IDs.
Parameters
----------
seqs: file handler
file handler to demultiplexed FASTA file
outdir: string
dirpath to output split FASTA files
"""
logger = logging.getLogger(__name__)
logger.info('split_sequence_file_on_sample_ids_to_files'
' for file %s into dir %s' % (seqs, outdir))
outputs = {}
for bits in sequence_generator(seqs):
sample = sample_id_from_read_id(bits[0])
if sample not in outputs:
outputs[sample] = open(join(outdir, sample + '.fasta'), 'w')
outputs[sample].write(">%s\n%s\n" % (bits[0], bits[1]))
for sample in outputs:
outputs[sample].close()
logger.info('split to %d files' % len(outputs)) | [
"def",
"split_sequence_file_on_sample_ids_to_files",
"(",
"seqs",
",",
"outdir",
")",
":",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"__name__",
")",
"logger",
".",
"info",
"(",
"'split_sequence_file_on_sample_ids_to_files'",
"' for file %s into dir %s'",
"%",
"(",
"seqs",
",",
"outdir",
")",
")",
"outputs",
"=",
"{",
"}",
"for",
"bits",
"in",
"sequence_generator",
"(",
"seqs",
")",
":",
"sample",
"=",
"sample_id_from_read_id",
"(",
"bits",
"[",
"0",
"]",
")",
"if",
"sample",
"not",
"in",
"outputs",
":",
"outputs",
"[",
"sample",
"]",
"=",
"open",
"(",
"join",
"(",
"outdir",
",",
"sample",
"+",
"'.fasta'",
")",
",",
"'w'",
")",
"outputs",
"[",
"sample",
"]",
".",
"write",
"(",
"\">%s\\n%s\\n\"",
"%",
"(",
"bits",
"[",
"0",
"]",
",",
"bits",
"[",
"1",
"]",
")",
")",
"for",
"sample",
"in",
"outputs",
":",
"outputs",
"[",
"sample",
"]",
".",
"close",
"(",
")",
"logger",
".",
"info",
"(",
"'split to %d files'",
"%",
"len",
"(",
"outputs",
")",
")"
] | 32.769231 | 17.653846 |
def recalculate_psd(self):
""" Recalculate the psd
"""
seg_len = self.sample_rate * self.psd_segment_length
e = len(self.strain)
s = e - ((self.psd_samples + 1) * self.psd_segment_length / 2) * self.sample_rate
psd = pycbc.psd.welch(self.strain[s:e], seg_len=seg_len, seg_stride=seg_len / 2)
psd.dist = spa_distance(psd, 1.4, 1.4, self.low_frequency_cutoff) * pycbc.DYN_RANGE_FAC
# If the new psd is similar to the old one, don't replace it
if self.psd and self.psd_recalculate_difference:
if abs(self.psd.dist - psd.dist) / self.psd.dist < self.psd_recalculate_difference:
logging.info("Skipping recalculation of %s PSD, %s-%s",
self.detector, self.psd.dist, psd.dist)
return True
# If the new psd is *really* different than the old one, return an error
if self.psd and self.psd_abort_difference:
if abs(self.psd.dist - psd.dist) / self.psd.dist > self.psd_abort_difference:
logging.info("%s PSD is CRAZY, aborting!!!!, %s-%s",
self.detector, self.psd.dist, psd.dist)
self.psd = psd
self.psds = {}
return False
# If the new estimate replaces the current one, invalide the ineterpolate PSDs
self.psd = psd
self.psds = {}
logging.info("Recalculating %s PSD, %s", self.detector, psd.dist)
return True | [
"def",
"recalculate_psd",
"(",
"self",
")",
":",
"seg_len",
"=",
"self",
".",
"sample_rate",
"*",
"self",
".",
"psd_segment_length",
"e",
"=",
"len",
"(",
"self",
".",
"strain",
")",
"s",
"=",
"e",
"-",
"(",
"(",
"self",
".",
"psd_samples",
"+",
"1",
")",
"*",
"self",
".",
"psd_segment_length",
"/",
"2",
")",
"*",
"self",
".",
"sample_rate",
"psd",
"=",
"pycbc",
".",
"psd",
".",
"welch",
"(",
"self",
".",
"strain",
"[",
"s",
":",
"e",
"]",
",",
"seg_len",
"=",
"seg_len",
",",
"seg_stride",
"=",
"seg_len",
"/",
"2",
")",
"psd",
".",
"dist",
"=",
"spa_distance",
"(",
"psd",
",",
"1.4",
",",
"1.4",
",",
"self",
".",
"low_frequency_cutoff",
")",
"*",
"pycbc",
".",
"DYN_RANGE_FAC",
"# If the new psd is similar to the old one, don't replace it",
"if",
"self",
".",
"psd",
"and",
"self",
".",
"psd_recalculate_difference",
":",
"if",
"abs",
"(",
"self",
".",
"psd",
".",
"dist",
"-",
"psd",
".",
"dist",
")",
"/",
"self",
".",
"psd",
".",
"dist",
"<",
"self",
".",
"psd_recalculate_difference",
":",
"logging",
".",
"info",
"(",
"\"Skipping recalculation of %s PSD, %s-%s\"",
",",
"self",
".",
"detector",
",",
"self",
".",
"psd",
".",
"dist",
",",
"psd",
".",
"dist",
")",
"return",
"True",
"# If the new psd is *really* different than the old one, return an error",
"if",
"self",
".",
"psd",
"and",
"self",
".",
"psd_abort_difference",
":",
"if",
"abs",
"(",
"self",
".",
"psd",
".",
"dist",
"-",
"psd",
".",
"dist",
")",
"/",
"self",
".",
"psd",
".",
"dist",
">",
"self",
".",
"psd_abort_difference",
":",
"logging",
".",
"info",
"(",
"\"%s PSD is CRAZY, aborting!!!!, %s-%s\"",
",",
"self",
".",
"detector",
",",
"self",
".",
"psd",
".",
"dist",
",",
"psd",
".",
"dist",
")",
"self",
".",
"psd",
"=",
"psd",
"self",
".",
"psds",
"=",
"{",
"}",
"return",
"False",
"# If the new estimate replaces the current one, invalide the ineterpolate PSDs",
"self",
".",
"psd",
"=",
"psd",
"self",
".",
"psds",
"=",
"{",
"}",
"logging",
".",
"info",
"(",
"\"Recalculating %s PSD, %s\"",
",",
"self",
".",
"detector",
",",
"psd",
".",
"dist",
")",
"return",
"True"
] | 46.1875 | 27.875 |
def emit_answer_event(sender, instance, **kwargs):
"""
Save answer event to log file.
"""
if not issubclass(sender, Answer) or not kwargs['created']:
return
logger = get_events_logger()
logger.emit('answer', {
"user_id": instance.user_id,
"is_correct": instance.item_asked_id == instance.item_answered_id,
"context_id": [instance.context_id] if instance.context_id else [],
"item_id": instance.item_id,
"response_time_ms": instance.response_time,
"params": {
"session_id": instance.session_id,
"guess": instance.guess,
"practice_set_id": instance.practice_set_id,
"config_id": instance.config_id,
}}
) | [
"def",
"emit_answer_event",
"(",
"sender",
",",
"instance",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"issubclass",
"(",
"sender",
",",
"Answer",
")",
"or",
"not",
"kwargs",
"[",
"'created'",
"]",
":",
"return",
"logger",
"=",
"get_events_logger",
"(",
")",
"logger",
".",
"emit",
"(",
"'answer'",
",",
"{",
"\"user_id\"",
":",
"instance",
".",
"user_id",
",",
"\"is_correct\"",
":",
"instance",
".",
"item_asked_id",
"==",
"instance",
".",
"item_answered_id",
",",
"\"context_id\"",
":",
"[",
"instance",
".",
"context_id",
"]",
"if",
"instance",
".",
"context_id",
"else",
"[",
"]",
",",
"\"item_id\"",
":",
"instance",
".",
"item_id",
",",
"\"response_time_ms\"",
":",
"instance",
".",
"response_time",
",",
"\"params\"",
":",
"{",
"\"session_id\"",
":",
"instance",
".",
"session_id",
",",
"\"guess\"",
":",
"instance",
".",
"guess",
",",
"\"practice_set_id\"",
":",
"instance",
".",
"practice_set_id",
",",
"\"config_id\"",
":",
"instance",
".",
"config_id",
",",
"}",
"}",
")"
] | 36.1 | 14.5 |
def get_average_color(colors):
"""Calculate the average color from the list of colors, where each color
is a 3-tuple of (r, g, b) values.
"""
c = reduce(color_reducer, colors)
total = len(colors)
return tuple(v / total for v in c) | [
"def",
"get_average_color",
"(",
"colors",
")",
":",
"c",
"=",
"reduce",
"(",
"color_reducer",
",",
"colors",
")",
"total",
"=",
"len",
"(",
"colors",
")",
"return",
"tuple",
"(",
"v",
"/",
"total",
"for",
"v",
"in",
"c",
")"
] | 35.428571 | 5 |
def transform_api_header_authorization(param, value):
"""Transform a username:password value into a base64 string."""
try:
username, password = value.split(":", 1)
except ValueError:
raise click.BadParameter(
"Authorization header needs to be Authorization=username:password",
param=param,
)
value = "%s:%s" % (username.strip(), password)
value = base64.b64encode(bytes(value.encode()))
return "Basic %s" % value.decode("utf-8") | [
"def",
"transform_api_header_authorization",
"(",
"param",
",",
"value",
")",
":",
"try",
":",
"username",
",",
"password",
"=",
"value",
".",
"split",
"(",
"\":\"",
",",
"1",
")",
"except",
"ValueError",
":",
"raise",
"click",
".",
"BadParameter",
"(",
"\"Authorization header needs to be Authorization=username:password\"",
",",
"param",
"=",
"param",
",",
")",
"value",
"=",
"\"%s:%s\"",
"%",
"(",
"username",
".",
"strip",
"(",
")",
",",
"password",
")",
"value",
"=",
"base64",
".",
"b64encode",
"(",
"bytes",
"(",
"value",
".",
"encode",
"(",
")",
")",
")",
"return",
"\"Basic %s\"",
"%",
"value",
".",
"decode",
"(",
"\"utf-8\"",
")"
] | 37.615385 | 17.692308 |
def parameterize(string, separator='-'):
"""
Replace special characters in a string so that it may be used as part of a
'pretty' URL.
Example::
>>> parameterize(u"Donald E. Knuth")
'donald-e-knuth'
"""
string = transliterate(string)
# Turn unwanted chars into the separator
string = re.sub(r"(?i)[^a-z0-9\-_]+", separator, string)
if separator:
re_sep = re.escape(separator)
# No more than one of the separator in a row.
string = re.sub(r'%s{2,}' % re_sep, separator, string)
# Remove leading/trailing separator.
string = re.sub(r"(?i)^%(sep)s|%(sep)s$" % {'sep': re_sep}, '', string)
return string.lower() | [
"def",
"parameterize",
"(",
"string",
",",
"separator",
"=",
"'-'",
")",
":",
"string",
"=",
"transliterate",
"(",
"string",
")",
"# Turn unwanted chars into the separator",
"string",
"=",
"re",
".",
"sub",
"(",
"r\"(?i)[^a-z0-9\\-_]+\"",
",",
"separator",
",",
"string",
")",
"if",
"separator",
":",
"re_sep",
"=",
"re",
".",
"escape",
"(",
"separator",
")",
"# No more than one of the separator in a row.",
"string",
"=",
"re",
".",
"sub",
"(",
"r'%s{2,}'",
"%",
"re_sep",
",",
"separator",
",",
"string",
")",
"# Remove leading/trailing separator.",
"string",
"=",
"re",
".",
"sub",
"(",
"r\"(?i)^%(sep)s|%(sep)s$\"",
"%",
"{",
"'sep'",
":",
"re_sep",
"}",
",",
"''",
",",
"string",
")",
"return",
"string",
".",
"lower",
"(",
")"
] | 36.052632 | 15.631579 |
def get_node_by_coord(self, coord, relative=False):
"""Get a node from a node coord.
:param coord: the coordinates of the required node.
:type coord: tuple or list
:param relative: `True` if coord is relative to the node instance,
`False` for absolute coordinates.
:type relative: bool
:returns: the node corresponding to `coord`.
:rtype: Node or None
"""
if not isinstance(coord, (list, tuple)) or False in list(map(lambda i: type(i)==int, coord)):
logger.warning("%s.get_node_by_coord: node«%s», arg «coord»=«%s», «coord» must be list or tuple of integers." % (self.__class__.__name__, self.name, coord))
return None
if relative:
_node = self
else:
_node = self._root # _node = self.get_rootnode()
for idx in coord:
_node = _node.childs[idx]
if _node is None:
logger.warning("%s.get_node_by_coord: node«%s», arg «coord»=«%s» not valid." % (self.__class__.__name__, self.name, coord))
return None
return _node | [
"def",
"get_node_by_coord",
"(",
"self",
",",
"coord",
",",
"relative",
"=",
"False",
")",
":",
"if",
"not",
"isinstance",
"(",
"coord",
",",
"(",
"list",
",",
"tuple",
")",
")",
"or",
"False",
"in",
"list",
"(",
"map",
"(",
"lambda",
"i",
":",
"type",
"(",
"i",
")",
"==",
"int",
",",
"coord",
")",
")",
":",
"logger",
".",
"warning",
"(",
"\"%s.get_node_by_coord: node«%s», arg «coord»=«%s», «coord» must be list or tuple of integers.\" % (self",
"_",
"c",
"lass",
"_",
"_.__name_",
"_",
", self.n",
"a",
"e, c",
"o",
"ord)",
")",
"",
"",
"",
"return",
"None",
"if",
"relative",
":",
"_node",
"=",
"self",
"else",
":",
"_node",
"=",
"self",
".",
"_root",
"# _node = self.get_rootnode()",
"for",
"idx",
"in",
"coord",
":",
"_node",
"=",
"_node",
".",
"childs",
"[",
"idx",
"]",
"if",
"_node",
"is",
"None",
":",
"logger",
".",
"warning",
"(",
"\"%s.get_node_by_coord: node«%s», arg «coord»=«%s» not valid.\" % (se",
"f",
"_",
"_cla",
"s",
"s__.__nam",
"e",
"__, self",
".",
"ame,",
" ",
"coor",
"d",
")",
"",
"",
"return",
"None",
"return",
"_node"
] | 46.208333 | 25.041667 |
def add_dict_to_cookiejar(cj, cookie_dict):
"""Returns a CookieJar from a key/value dictionary.
:param cj: CookieJar to insert cookies into.
:param cookie_dict: Dict of key/values to insert into CookieJar.
"""
cj2 = cookiejar_from_dict(cookie_dict)
cj.update(cj2)
return cj | [
"def",
"add_dict_to_cookiejar",
"(",
"cj",
",",
"cookie_dict",
")",
":",
"cj2",
"=",
"cookiejar_from_dict",
"(",
"cookie_dict",
")",
"cj",
".",
"update",
"(",
"cj2",
")",
"return",
"cj"
] | 29.4 | 17 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.