text stringlengths 89 104k | code_tokens list | avg_line_len float64 7.91 980 | score float64 0 630 |
|---|---|---|---|
def style_classpath(self, products, scheduler):
"""Returns classpath as paths for scalastyle."""
classpath_entries = self._tool_classpath('scalastyle', products, scheduler)
return [classpath_entry.path for classpath_entry in classpath_entries] | [
"def",
"style_classpath",
"(",
"self",
",",
"products",
",",
"scheduler",
")",
":",
"classpath_entries",
"=",
"self",
".",
"_tool_classpath",
"(",
"'scalastyle'",
",",
"products",
",",
"scheduler",
")",
"return",
"[",
"classpath_entry",
".",
"path",
"for",
"classpath_entry",
"in",
"classpath_entries",
"]"
] | 63 | 20 |
def is_canonical_address(address: Any) -> bool:
"""
Returns `True` if the `value` is an address in its canonical form.
"""
if not is_bytes(address) or len(address) != 20:
return False
return address == to_canonical_address(address) | [
"def",
"is_canonical_address",
"(",
"address",
":",
"Any",
")",
"->",
"bool",
":",
"if",
"not",
"is_bytes",
"(",
"address",
")",
"or",
"len",
"(",
"address",
")",
"!=",
"20",
":",
"return",
"False",
"return",
"address",
"==",
"to_canonical_address",
"(",
"address",
")"
] | 36.142857 | 11.285714 |
def build_columns(self, X, verbose=False):
"""construct the model matrix columns for the term
Parameters
----------
X : array-like
Input dataset with n rows
verbose : bool
whether to show warnings
Returns
-------
scipy sparse array with n rows
"""
return sp.sparse.csc_matrix(X[:, self.feature][:, np.newaxis]) | [
"def",
"build_columns",
"(",
"self",
",",
"X",
",",
"verbose",
"=",
"False",
")",
":",
"return",
"sp",
".",
"sparse",
".",
"csc_matrix",
"(",
"X",
"[",
":",
",",
"self",
".",
"feature",
"]",
"[",
":",
",",
"np",
".",
"newaxis",
"]",
")"
] | 25.125 | 18.1875 |
def cancel(self, job_ids):
"""Cancel the jobs specified by a list of job ids.
Parameters
----------
job_ids : list of str
List of of job identifiers
Returns
-------
list of bool
Each entry in the list will contain False if the operation fails. Otherwise, the entry will be True.
"""
if self.linger is True:
logger.debug("Ignoring cancel requests due to linger mode")
return [False for x in job_ids]
try:
self.client.terminate_instances(InstanceIds=list(job_ids))
except Exception as e:
logger.error("Caught error while attempting to remove instances: {0}".format(job_ids))
raise e
else:
logger.debug("Removed the instances: {0}".format(job_ids))
for job_id in job_ids:
self.resources[job_id]["status"] = "COMPLETED"
for job_id in job_ids:
self.instances.remove(job_id)
return [True for x in job_ids] | [
"def",
"cancel",
"(",
"self",
",",
"job_ids",
")",
":",
"if",
"self",
".",
"linger",
"is",
"True",
":",
"logger",
".",
"debug",
"(",
"\"Ignoring cancel requests due to linger mode\"",
")",
"return",
"[",
"False",
"for",
"x",
"in",
"job_ids",
"]",
"try",
":",
"self",
".",
"client",
".",
"terminate_instances",
"(",
"InstanceIds",
"=",
"list",
"(",
"job_ids",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"\"Caught error while attempting to remove instances: {0}\"",
".",
"format",
"(",
"job_ids",
")",
")",
"raise",
"e",
"else",
":",
"logger",
".",
"debug",
"(",
"\"Removed the instances: {0}\"",
".",
"format",
"(",
"job_ids",
")",
")",
"for",
"job_id",
"in",
"job_ids",
":",
"self",
".",
"resources",
"[",
"job_id",
"]",
"[",
"\"status\"",
"]",
"=",
"\"COMPLETED\"",
"for",
"job_id",
"in",
"job_ids",
":",
"self",
".",
"instances",
".",
"remove",
"(",
"job_id",
")",
"return",
"[",
"True",
"for",
"x",
"in",
"job_ids",
"]"
] | 30.727273 | 23.666667 |
def get_link_mappings(link_id, link_2_id=None, **kwargs):
"""
Get all the resource attribute mappings in a network. If another network
is specified, only return the mappings between the two networks.
"""
qry = db.DBSession.query(ResourceAttrMap).filter(
or_(
and_(
ResourceAttrMap.resource_attr_id_a == ResourceAttr.id,
ResourceAttr.link_id == link_id),
and_(
ResourceAttrMap.resource_attr_id_b == ResourceAttr.id,
ResourceAttr.link_id == link_id)))
if link_2_id is not None:
aliased_ra = aliased(ResourceAttr, name="ra2")
qry = qry.filter(or_(
and_(
ResourceAttrMap.resource_attr_id_a == aliased_ra.id,
aliased_ra.link_id == link_2_id),
and_(
ResourceAttrMap.resource_attr_id_b == aliased_ra.id,
aliased_ra.link_id == link_2_id)))
return qry.all() | [
"def",
"get_link_mappings",
"(",
"link_id",
",",
"link_2_id",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"qry",
"=",
"db",
".",
"DBSession",
".",
"query",
"(",
"ResourceAttrMap",
")",
".",
"filter",
"(",
"or_",
"(",
"and_",
"(",
"ResourceAttrMap",
".",
"resource_attr_id_a",
"==",
"ResourceAttr",
".",
"id",
",",
"ResourceAttr",
".",
"link_id",
"==",
"link_id",
")",
",",
"and_",
"(",
"ResourceAttrMap",
".",
"resource_attr_id_b",
"==",
"ResourceAttr",
".",
"id",
",",
"ResourceAttr",
".",
"link_id",
"==",
"link_id",
")",
")",
")",
"if",
"link_2_id",
"is",
"not",
"None",
":",
"aliased_ra",
"=",
"aliased",
"(",
"ResourceAttr",
",",
"name",
"=",
"\"ra2\"",
")",
"qry",
"=",
"qry",
".",
"filter",
"(",
"or_",
"(",
"and_",
"(",
"ResourceAttrMap",
".",
"resource_attr_id_a",
"==",
"aliased_ra",
".",
"id",
",",
"aliased_ra",
".",
"link_id",
"==",
"link_2_id",
")",
",",
"and_",
"(",
"ResourceAttrMap",
".",
"resource_attr_id_b",
"==",
"aliased_ra",
".",
"id",
",",
"aliased_ra",
".",
"link_id",
"==",
"link_2_id",
")",
")",
")",
"return",
"qry",
".",
"all",
"(",
")"
] | 38.48 | 20.48 |
def strip_prompt(self, a_string):
"""Strip the trailing router prompt from the output."""
expect_string = r"^(OK|ERROR|Command not recognized\.)$"
response_list = a_string.split(self.RESPONSE_RETURN)
last_line = response_list[-1]
if re.search(expect_string, last_line):
return self.RESPONSE_RETURN.join(response_list[:-1])
else:
return a_string | [
"def",
"strip_prompt",
"(",
"self",
",",
"a_string",
")",
":",
"expect_string",
"=",
"r\"^(OK|ERROR|Command not recognized\\.)$\"",
"response_list",
"=",
"a_string",
".",
"split",
"(",
"self",
".",
"RESPONSE_RETURN",
")",
"last_line",
"=",
"response_list",
"[",
"-",
"1",
"]",
"if",
"re",
".",
"search",
"(",
"expect_string",
",",
"last_line",
")",
":",
"return",
"self",
".",
"RESPONSE_RETURN",
".",
"join",
"(",
"response_list",
"[",
":",
"-",
"1",
"]",
")",
"else",
":",
"return",
"a_string"
] | 45.333333 | 13.888889 |
def learn(self, *args, **kwargs):
"""[DEPRECATED] Use 'fit_predict'.
"""
warnings.warn("learn is deprecated, {}.fit_predict "
"instead".format(self.__class__.__name__))
return self.fit_predict(*args, **kwargs) | [
"def",
"learn",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"warnings",
".",
"warn",
"(",
"\"learn is deprecated, {}.fit_predict \"",
"\"instead\"",
".",
"format",
"(",
"self",
".",
"__class__",
".",
"__name__",
")",
")",
"return",
"self",
".",
"fit_predict",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | 36.857143 | 14.142857 |
def optimal_t(self, seq_p, seq_ch, pattern_multiplicity=None, ignore_gaps=False):
'''
Find the optimal distance between the two sequences
Parameters
----------
seq_p : character array
Parent sequence
seq_c : character array
Child sequence
pattern_multiplicity : numpy array
If sequences are reduced by combining identical alignment patterns,
these multplicities need to be accounted for when counting the number
of mutations across a branch. If None, all pattern are assumed to
occur exactly once.
ignore_gaps : bool
If True, ignore gaps in distance calculations
'''
seq_pair, multiplicity = self.compress_sequence_pair(seq_p, seq_ch,
pattern_multiplicity = pattern_multiplicity,
ignore_gaps=ignore_gaps)
return self.optimal_t_compressed(seq_pair, multiplicity) | [
"def",
"optimal_t",
"(",
"self",
",",
"seq_p",
",",
"seq_ch",
",",
"pattern_multiplicity",
"=",
"None",
",",
"ignore_gaps",
"=",
"False",
")",
":",
"seq_pair",
",",
"multiplicity",
"=",
"self",
".",
"compress_sequence_pair",
"(",
"seq_p",
",",
"seq_ch",
",",
"pattern_multiplicity",
"=",
"pattern_multiplicity",
",",
"ignore_gaps",
"=",
"ignore_gaps",
")",
"return",
"self",
".",
"optimal_t_compressed",
"(",
"seq_pair",
",",
"multiplicity",
")"
] | 38.407407 | 28.555556 |
def create_server_rackspace(connection,
distribution,
disk_name,
disk_size,
ami,
region,
key_pair,
instance_type,
instance_name,
tags={},
security_groups=None):
"""
Creates Rackspace Instance and saves it state in a local json file
"""
log_yellow("Creating Rackspace instance...")
flavor = connection.flavors.find(name=instance_type)
image = connection.images.find(name=ami)
server = connection.servers.create(name=instance_name,
flavor=flavor.id,
image=image.id,
region=region,
availability_zone=region,
key_name=key_pair)
while server.status == 'BUILD':
log_yellow("Waiting for build to finish...")
sleep(5)
server = connection.servers.get(server.id)
# check for errors
if server.status != 'ACTIVE':
log_red("Error creating rackspace instance")
exit(1)
# the server was assigned IPv4 and IPv6 addresses, locate the IPv4 address
ip_address = server.accessIPv4
if ip_address is None:
log_red('No IP address assigned')
exit(1)
wait_for_ssh(ip_address)
log_green('New server with IP address {0}.'.format(ip_address))
return server | [
"def",
"create_server_rackspace",
"(",
"connection",
",",
"distribution",
",",
"disk_name",
",",
"disk_size",
",",
"ami",
",",
"region",
",",
"key_pair",
",",
"instance_type",
",",
"instance_name",
",",
"tags",
"=",
"{",
"}",
",",
"security_groups",
"=",
"None",
")",
":",
"log_yellow",
"(",
"\"Creating Rackspace instance...\"",
")",
"flavor",
"=",
"connection",
".",
"flavors",
".",
"find",
"(",
"name",
"=",
"instance_type",
")",
"image",
"=",
"connection",
".",
"images",
".",
"find",
"(",
"name",
"=",
"ami",
")",
"server",
"=",
"connection",
".",
"servers",
".",
"create",
"(",
"name",
"=",
"instance_name",
",",
"flavor",
"=",
"flavor",
".",
"id",
",",
"image",
"=",
"image",
".",
"id",
",",
"region",
"=",
"region",
",",
"availability_zone",
"=",
"region",
",",
"key_name",
"=",
"key_pair",
")",
"while",
"server",
".",
"status",
"==",
"'BUILD'",
":",
"log_yellow",
"(",
"\"Waiting for build to finish...\"",
")",
"sleep",
"(",
"5",
")",
"server",
"=",
"connection",
".",
"servers",
".",
"get",
"(",
"server",
".",
"id",
")",
"# check for errors",
"if",
"server",
".",
"status",
"!=",
"'ACTIVE'",
":",
"log_red",
"(",
"\"Error creating rackspace instance\"",
")",
"exit",
"(",
"1",
")",
"# the server was assigned IPv4 and IPv6 addresses, locate the IPv4 address",
"ip_address",
"=",
"server",
".",
"accessIPv4",
"if",
"ip_address",
"is",
"None",
":",
"log_red",
"(",
"'No IP address assigned'",
")",
"exit",
"(",
"1",
")",
"wait_for_ssh",
"(",
"ip_address",
")",
"log_green",
"(",
"'New server with IP address {0}.'",
".",
"format",
"(",
"ip_address",
")",
")",
"return",
"server"
] | 33.723404 | 16.574468 |
def set_property_filter(filter_proto, name, op, value):
"""Set property filter contraint in the given datastore.Filter proto message.
Args:
filter_proto: datastore.Filter proto message
name: property name
op: datastore.PropertyFilter.Operation
value: property value
Returns:
the same datastore.Filter.
Usage:
>>> set_property_filter(filter_proto, 'foo',
... datastore.PropertyFilter.EQUAL, 'a') # WHERE 'foo' = 'a'
"""
filter_proto.Clear()
pf = filter_proto.property_filter
pf.property.name = name
pf.op = op
set_value(pf.value, value)
return filter_proto | [
"def",
"set_property_filter",
"(",
"filter_proto",
",",
"name",
",",
"op",
",",
"value",
")",
":",
"filter_proto",
".",
"Clear",
"(",
")",
"pf",
"=",
"filter_proto",
".",
"property_filter",
"pf",
".",
"property",
".",
"name",
"=",
"name",
"pf",
".",
"op",
"=",
"op",
"set_value",
"(",
"pf",
".",
"value",
",",
"value",
")",
"return",
"filter_proto"
] | 26.818182 | 18.818182 |
def start(self, positionals=None):
'''start the helper flow. We check helper system configurations to
determine components that should be collected for the submission.
This is where the client can also pass on any extra (positional)
arguments in a list from the user.
'''
bot.info('[helpme|%s]' %(self.name))
self.speak()
self._start(positionals) | [
"def",
"start",
"(",
"self",
",",
"positionals",
"=",
"None",
")",
":",
"bot",
".",
"info",
"(",
"'[helpme|%s]'",
"%",
"(",
"self",
".",
"name",
")",
")",
"self",
".",
"speak",
"(",
")",
"self",
".",
"_start",
"(",
"positionals",
")"
] | 45.666667 | 19.666667 |
def clear_scroll(self, scroll_id=None, body=None, **query_params):
"""
Clear the scroll request created by specifying the scroll parameter to
search.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-scroll.html>`_
:param scroll_id: A comma-separated list of scroll IDs to clear
:param body: A comma-separated list of scroll IDs to clear if none was
specified via the scroll_id parameter
"""
if scroll_id in NULL_VALUES and body in NULL_VALUES:
raise ValueError("You need to supply scroll_id or body.")
elif scroll_id and not body:
body = scroll_id
elif scroll_id:
query_params[EsConst.SCROLL_ID] = scroll_id
path = self._es_parser.make_path(EsMethods.SEARCH, EsMethods.SCROLL)
result = yield self._perform_request(HttpMethod.DELETE, path, body, params=query_params)
returnValue(result) | [
"def",
"clear_scroll",
"(",
"self",
",",
"scroll_id",
"=",
"None",
",",
"body",
"=",
"None",
",",
"*",
"*",
"query_params",
")",
":",
"if",
"scroll_id",
"in",
"NULL_VALUES",
"and",
"body",
"in",
"NULL_VALUES",
":",
"raise",
"ValueError",
"(",
"\"You need to supply scroll_id or body.\"",
")",
"elif",
"scroll_id",
"and",
"not",
"body",
":",
"body",
"=",
"scroll_id",
"elif",
"scroll_id",
":",
"query_params",
"[",
"EsConst",
".",
"SCROLL_ID",
"]",
"=",
"scroll_id",
"path",
"=",
"self",
".",
"_es_parser",
".",
"make_path",
"(",
"EsMethods",
".",
"SEARCH",
",",
"EsMethods",
".",
"SCROLL",
")",
"result",
"=",
"yield",
"self",
".",
"_perform_request",
"(",
"HttpMethod",
".",
"DELETE",
",",
"path",
",",
"body",
",",
"params",
"=",
"query_params",
")",
"returnValue",
"(",
"result",
")"
] | 50.052632 | 24.789474 |
def unfollow(user, obj):
""" Make a user unfollow an object """
try:
follow = Follow.objects.get_follows(obj).get(user=user)
follow.delete()
return follow
except Follow.DoesNotExist:
pass | [
"def",
"unfollow",
"(",
"user",
",",
"obj",
")",
":",
"try",
":",
"follow",
"=",
"Follow",
".",
"objects",
".",
"get_follows",
"(",
"obj",
")",
".",
"get",
"(",
"user",
"=",
"user",
")",
"follow",
".",
"delete",
"(",
")",
"return",
"follow",
"except",
"Follow",
".",
"DoesNotExist",
":",
"pass"
] | 28.125 | 17.875 |
def cfgGetBool(theObj, name, dflt):
""" Get a stringified val from a ConfigObj obj and return it as bool """
strval = theObj.get(name, None)
if strval is None:
return dflt
return strval.lower().strip() == 'true' | [
"def",
"cfgGetBool",
"(",
"theObj",
",",
"name",
",",
"dflt",
")",
":",
"strval",
"=",
"theObj",
".",
"get",
"(",
"name",
",",
"None",
")",
"if",
"strval",
"is",
"None",
":",
"return",
"dflt",
"return",
"strval",
".",
"lower",
"(",
")",
".",
"strip",
"(",
")",
"==",
"'true'"
] | 38.333333 | 8.666667 |
def strip_between(self, string, start, end):
"""Deletes everything between regexes start and end from string"""
regex = start + r'.*?' + end + r'\s*'
res = re.sub(regex, '', string,
flags=re.DOTALL|re.IGNORECASE|re.MULTILINE)
return res | [
"def",
"strip_between",
"(",
"self",
",",
"string",
",",
"start",
",",
"end",
")",
":",
"regex",
"=",
"start",
"+",
"r'.*?'",
"+",
"end",
"+",
"r'\\s*'",
"res",
"=",
"re",
".",
"sub",
"(",
"regex",
",",
"''",
",",
"string",
",",
"flags",
"=",
"re",
".",
"DOTALL",
"|",
"re",
".",
"IGNORECASE",
"|",
"re",
".",
"MULTILINE",
")",
"return",
"res"
] | 47.333333 | 9.333333 |
def get_template_folder():
"""Get path to the folder where th HTML templates are."""
cfg = get_project_configuration()
if 'templates' not in cfg:
home = os.path.expanduser("~")
rcfile = os.path.join(home, ".hwrtrc")
cfg['templates'] = pkg_resources.resource_filename('hwrt',
'templates/')
with open(rcfile, 'w') as f:
yaml.dump(cfg, f, default_flow_style=False)
return cfg['templates'] | [
"def",
"get_template_folder",
"(",
")",
":",
"cfg",
"=",
"get_project_configuration",
"(",
")",
"if",
"'templates'",
"not",
"in",
"cfg",
":",
"home",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"\"~\"",
")",
"rcfile",
"=",
"os",
".",
"path",
".",
"join",
"(",
"home",
",",
"\".hwrtrc\"",
")",
"cfg",
"[",
"'templates'",
"]",
"=",
"pkg_resources",
".",
"resource_filename",
"(",
"'hwrt'",
",",
"'templates/'",
")",
"with",
"open",
"(",
"rcfile",
",",
"'w'",
")",
"as",
"f",
":",
"yaml",
".",
"dump",
"(",
"cfg",
",",
"f",
",",
"default_flow_style",
"=",
"False",
")",
"return",
"cfg",
"[",
"'templates'",
"]"
] | 44.909091 | 11.363636 |
def source_expand(self, source):
'''Expand the wildcards for an S3 path. This emulates the shall expansion
for wildcards if the input is local path.
'''
result = []
if not isinstance(source, list):
source = [source]
for src in source:
# XXX Hacky: We need to disable recursive when we expand the input
# parameters, need to pass this as an override parameter if
# provided.
tmp = self.opt.recursive
self.opt.recursive = False
result += [f['name'] for f in self.s3walk(src, True)]
self.opt.recursive = tmp
if (len(result) == 0) and (not self.opt.ignore_empty_source):
fail("[Runtime Failure] Source doesn't exist.")
return result | [
"def",
"source_expand",
"(",
"self",
",",
"source",
")",
":",
"result",
"=",
"[",
"]",
"if",
"not",
"isinstance",
"(",
"source",
",",
"list",
")",
":",
"source",
"=",
"[",
"source",
"]",
"for",
"src",
"in",
"source",
":",
"# XXX Hacky: We need to disable recursive when we expand the input",
"# parameters, need to pass this as an override parameter if",
"# provided.",
"tmp",
"=",
"self",
".",
"opt",
".",
"recursive",
"self",
".",
"opt",
".",
"recursive",
"=",
"False",
"result",
"+=",
"[",
"f",
"[",
"'name'",
"]",
"for",
"f",
"in",
"self",
".",
"s3walk",
"(",
"src",
",",
"True",
")",
"]",
"self",
".",
"opt",
".",
"recursive",
"=",
"tmp",
"if",
"(",
"len",
"(",
"result",
")",
"==",
"0",
")",
"and",
"(",
"not",
"self",
".",
"opt",
".",
"ignore_empty_source",
")",
":",
"fail",
"(",
"\"[Runtime Failure] Source doesn't exist.\"",
")",
"return",
"result"
] | 32.818182 | 22.636364 |
def tokenize(self, sentence,
normalize=True,
is_feature=False,
is_surface=False,
return_list=False,
func_normalizer=text_preprocess.normalize_text):
# type: (text_type, bool, bool, bool, bool, Callable[[str],str]) -> Union[List[str], TokenizedSenetence]
"""This method returns tokenized result.
If return_list==True(default), this method returns list whose element is tuple consisted with word_stem and POS.
If return_list==False, this method returns TokenizedSenetence object.
"""
assert isinstance(normalize, bool)
assert isinstance(sentence, text_type)
normalized_sentence = func_normalizer(sentence)
if six.PY2:
normalized_sentence = normalized_sentence.encode('utf-8')
result = self.__list_tags(self.kytea.getTags(normalized_sentence))
token_objects = [
self.__extract_morphological_information(
kytea_tags_tuple=kytea_tags,
is_feature=is_feature
)
for kytea_tags in result]
if return_list:
tokenized_objects = TokenizedSenetence(
sentence=sentence,
tokenized_objects=token_objects
)
return tokenized_objects.convert_list_object()
else:
tokenized_objects = TokenizedSenetence(
sentence=sentence,
tokenized_objects=token_objects)
return tokenized_objects | [
"def",
"tokenize",
"(",
"self",
",",
"sentence",
",",
"normalize",
"=",
"True",
",",
"is_feature",
"=",
"False",
",",
"is_surface",
"=",
"False",
",",
"return_list",
"=",
"False",
",",
"func_normalizer",
"=",
"text_preprocess",
".",
"normalize_text",
")",
":",
"# type: (text_type, bool, bool, bool, bool, Callable[[str],str]) -> Union[List[str], TokenizedSenetence]",
"assert",
"isinstance",
"(",
"normalize",
",",
"bool",
")",
"assert",
"isinstance",
"(",
"sentence",
",",
"text_type",
")",
"normalized_sentence",
"=",
"func_normalizer",
"(",
"sentence",
")",
"if",
"six",
".",
"PY2",
":",
"normalized_sentence",
"=",
"normalized_sentence",
".",
"encode",
"(",
"'utf-8'",
")",
"result",
"=",
"self",
".",
"__list_tags",
"(",
"self",
".",
"kytea",
".",
"getTags",
"(",
"normalized_sentence",
")",
")",
"token_objects",
"=",
"[",
"self",
".",
"__extract_morphological_information",
"(",
"kytea_tags_tuple",
"=",
"kytea_tags",
",",
"is_feature",
"=",
"is_feature",
")",
"for",
"kytea_tags",
"in",
"result",
"]",
"if",
"return_list",
":",
"tokenized_objects",
"=",
"TokenizedSenetence",
"(",
"sentence",
"=",
"sentence",
",",
"tokenized_objects",
"=",
"token_objects",
")",
"return",
"tokenized_objects",
".",
"convert_list_object",
"(",
")",
"else",
":",
"tokenized_objects",
"=",
"TokenizedSenetence",
"(",
"sentence",
"=",
"sentence",
",",
"tokenized_objects",
"=",
"token_objects",
")",
"return",
"tokenized_objects"
] | 39.947368 | 19.078947 |
def validate(self, collection: BioCCollection):
"""Validate a single collection."""
for document in collection.documents:
self.validate_doc(document) | [
"def",
"validate",
"(",
"self",
",",
"collection",
":",
"BioCCollection",
")",
":",
"for",
"document",
"in",
"collection",
".",
"documents",
":",
"self",
".",
"validate_doc",
"(",
"document",
")"
] | 43.5 | 3.25 |
async def download_cot_artifacts(chain):
"""Call ``download_cot_artifact`` in parallel for each "upstreamArtifacts".
Optional artifacts are allowed to not be downloaded.
Args:
chain (ChainOfTrust): the chain of trust object
Returns:
list: list of full paths to downloaded artifacts. Failed optional artifacts
aren't returned
Raises:
CoTError: on chain of trust sha validation error, on a mandatory artifact
BaseDownloadError: on download error on a mandatory artifact
"""
upstream_artifacts = chain.task['payload'].get('upstreamArtifacts', [])
all_artifacts_per_task_id = get_all_artifacts_per_task_id(chain, upstream_artifacts)
mandatory_artifact_tasks = []
optional_artifact_tasks = []
for task_id, paths in all_artifacts_per_task_id.items():
for path in paths:
coroutine = asyncio.ensure_future(download_cot_artifact(chain, task_id, path))
if is_artifact_optional(chain, task_id, path):
optional_artifact_tasks.append(coroutine)
else:
mandatory_artifact_tasks.append(coroutine)
mandatory_artifacts_paths = await raise_future_exceptions(mandatory_artifact_tasks)
succeeded_optional_artifacts_paths, failed_optional_artifacts = \
await get_results_and_future_exceptions(optional_artifact_tasks)
if failed_optional_artifacts:
log.warning('Could not download {} artifacts: {}'.format(len(failed_optional_artifacts), failed_optional_artifacts))
return mandatory_artifacts_paths + succeeded_optional_artifacts_paths | [
"async",
"def",
"download_cot_artifacts",
"(",
"chain",
")",
":",
"upstream_artifacts",
"=",
"chain",
".",
"task",
"[",
"'payload'",
"]",
".",
"get",
"(",
"'upstreamArtifacts'",
",",
"[",
"]",
")",
"all_artifacts_per_task_id",
"=",
"get_all_artifacts_per_task_id",
"(",
"chain",
",",
"upstream_artifacts",
")",
"mandatory_artifact_tasks",
"=",
"[",
"]",
"optional_artifact_tasks",
"=",
"[",
"]",
"for",
"task_id",
",",
"paths",
"in",
"all_artifacts_per_task_id",
".",
"items",
"(",
")",
":",
"for",
"path",
"in",
"paths",
":",
"coroutine",
"=",
"asyncio",
".",
"ensure_future",
"(",
"download_cot_artifact",
"(",
"chain",
",",
"task_id",
",",
"path",
")",
")",
"if",
"is_artifact_optional",
"(",
"chain",
",",
"task_id",
",",
"path",
")",
":",
"optional_artifact_tasks",
".",
"append",
"(",
"coroutine",
")",
"else",
":",
"mandatory_artifact_tasks",
".",
"append",
"(",
"coroutine",
")",
"mandatory_artifacts_paths",
"=",
"await",
"raise_future_exceptions",
"(",
"mandatory_artifact_tasks",
")",
"succeeded_optional_artifacts_paths",
",",
"failed_optional_artifacts",
"=",
"await",
"get_results_and_future_exceptions",
"(",
"optional_artifact_tasks",
")",
"if",
"failed_optional_artifacts",
":",
"log",
".",
"warning",
"(",
"'Could not download {} artifacts: {}'",
".",
"format",
"(",
"len",
"(",
"failed_optional_artifacts",
")",
",",
"failed_optional_artifacts",
")",
")",
"return",
"mandatory_artifacts_paths",
"+",
"succeeded_optional_artifacts_paths"
] | 40.410256 | 29.179487 |
def time_plots(df, path, title=None, color="#4CB391", figformat="png",
log_length=False, plot_settings=None):
"""Making plots of time vs read length, time vs quality and cumulative yield."""
dfs = check_valid_time_and_sort(df, "start_time")
logging.info("Nanoplotter: Creating timeplots using {} reads.".format(len(dfs)))
cumyields = cumulative_yield(dfs=dfs.set_index("start_time"),
path=path,
figformat=figformat,
title=title,
color=color)
reads_pores_over_time = plot_over_time(dfs=dfs.set_index("start_time"),
path=path,
figformat=figformat,
title=title,
color=color)
violins = violin_plots_over_time(dfs=dfs,
path=path,
figformat=figformat,
title=title,
log_length=log_length,
plot_settings=plot_settings)
return cumyields + reads_pores_over_time + violins | [
"def",
"time_plots",
"(",
"df",
",",
"path",
",",
"title",
"=",
"None",
",",
"color",
"=",
"\"#4CB391\"",
",",
"figformat",
"=",
"\"png\"",
",",
"log_length",
"=",
"False",
",",
"plot_settings",
"=",
"None",
")",
":",
"dfs",
"=",
"check_valid_time_and_sort",
"(",
"df",
",",
"\"start_time\"",
")",
"logging",
".",
"info",
"(",
"\"Nanoplotter: Creating timeplots using {} reads.\"",
".",
"format",
"(",
"len",
"(",
"dfs",
")",
")",
")",
"cumyields",
"=",
"cumulative_yield",
"(",
"dfs",
"=",
"dfs",
".",
"set_index",
"(",
"\"start_time\"",
")",
",",
"path",
"=",
"path",
",",
"figformat",
"=",
"figformat",
",",
"title",
"=",
"title",
",",
"color",
"=",
"color",
")",
"reads_pores_over_time",
"=",
"plot_over_time",
"(",
"dfs",
"=",
"dfs",
".",
"set_index",
"(",
"\"start_time\"",
")",
",",
"path",
"=",
"path",
",",
"figformat",
"=",
"figformat",
",",
"title",
"=",
"title",
",",
"color",
"=",
"color",
")",
"violins",
"=",
"violin_plots_over_time",
"(",
"dfs",
"=",
"dfs",
",",
"path",
"=",
"path",
",",
"figformat",
"=",
"figformat",
",",
"title",
"=",
"title",
",",
"log_length",
"=",
"log_length",
",",
"plot_settings",
"=",
"plot_settings",
")",
"return",
"cumyields",
"+",
"reads_pores_over_time",
"+",
"violins"
] | 57.818182 | 15.818182 |
def stop_broadcast(self, broadcast_id):
"""
Use this method to stop a live broadcast of an OpenTok session
:param String broadcast_id: The ID of the broadcast you want to stop
:rtype A Broadcast object, which contains information of the broadcast: id, sessionId
projectId, createdAt, updatedAt and resolution
"""
endpoint = self.endpoints.broadcast_url(broadcast_id, stop=True)
response = requests.post(
endpoint,
headers=self.json_headers(),
proxies=self.proxies,
timeout=self.timeout
)
if response.status_code == 200:
return Broadcast(response.json())
elif response.status_code == 400:
raise BroadcastError(
'Invalid request. This response may indicate that data in your request '
'data is invalid JSON.')
elif response.status_code == 403:
raise AuthError('Authentication error.')
elif response.status_code == 409:
raise BroadcastError(
'The broadcast (with the specified ID) was not found or it has already '
'stopped.')
else:
raise RequestError('OpenTok server error.', response.status_code) | [
"def",
"stop_broadcast",
"(",
"self",
",",
"broadcast_id",
")",
":",
"endpoint",
"=",
"self",
".",
"endpoints",
".",
"broadcast_url",
"(",
"broadcast_id",
",",
"stop",
"=",
"True",
")",
"response",
"=",
"requests",
".",
"post",
"(",
"endpoint",
",",
"headers",
"=",
"self",
".",
"json_headers",
"(",
")",
",",
"proxies",
"=",
"self",
".",
"proxies",
",",
"timeout",
"=",
"self",
".",
"timeout",
")",
"if",
"response",
".",
"status_code",
"==",
"200",
":",
"return",
"Broadcast",
"(",
"response",
".",
"json",
"(",
")",
")",
"elif",
"response",
".",
"status_code",
"==",
"400",
":",
"raise",
"BroadcastError",
"(",
"'Invalid request. This response may indicate that data in your request '",
"'data is invalid JSON.'",
")",
"elif",
"response",
".",
"status_code",
"==",
"403",
":",
"raise",
"AuthError",
"(",
"'Authentication error.'",
")",
"elif",
"response",
".",
"status_code",
"==",
"409",
":",
"raise",
"BroadcastError",
"(",
"'The broadcast (with the specified ID) was not found or it has already '",
"'stopped.'",
")",
"else",
":",
"raise",
"RequestError",
"(",
"'OpenTok server error.'",
",",
"response",
".",
"status_code",
")"
] | 40.387097 | 18.258065 |
def download_patric_genomes(self, ids, force_rerun=False):
"""Download genome files from PATRIC given a list of PATRIC genome IDs and load them as strains.
Args:
ids (str, list): PATRIC ID or list of PATRIC IDs
force_rerun (bool): If genome files should be downloaded again even if they exist
"""
ids = ssbio.utils.force_list(ids)
counter = 0
log.info('Downloading sequences from PATRIC...')
for patric_id in tqdm(ids):
f = ssbio.databases.patric.download_coding_sequences(patric_id=patric_id, seqtype='protein',
outdir=self.sequences_by_organism_dir,
force_rerun=force_rerun)
if f:
self.load_strain(patric_id, f)
counter += 1
log.debug('{}: downloaded sequence'.format(patric_id))
else:
log.warning('{}: unable to download sequence'.format(patric_id))
log.info('Created {} new strain GEM-PROs, accessible at "strains" attribute'.format(counter)) | [
"def",
"download_patric_genomes",
"(",
"self",
",",
"ids",
",",
"force_rerun",
"=",
"False",
")",
":",
"ids",
"=",
"ssbio",
".",
"utils",
".",
"force_list",
"(",
"ids",
")",
"counter",
"=",
"0",
"log",
".",
"info",
"(",
"'Downloading sequences from PATRIC...'",
")",
"for",
"patric_id",
"in",
"tqdm",
"(",
"ids",
")",
":",
"f",
"=",
"ssbio",
".",
"databases",
".",
"patric",
".",
"download_coding_sequences",
"(",
"patric_id",
"=",
"patric_id",
",",
"seqtype",
"=",
"'protein'",
",",
"outdir",
"=",
"self",
".",
"sequences_by_organism_dir",
",",
"force_rerun",
"=",
"force_rerun",
")",
"if",
"f",
":",
"self",
".",
"load_strain",
"(",
"patric_id",
",",
"f",
")",
"counter",
"+=",
"1",
"log",
".",
"debug",
"(",
"'{}: downloaded sequence'",
".",
"format",
"(",
"patric_id",
")",
")",
"else",
":",
"log",
".",
"warning",
"(",
"'{}: unable to download sequence'",
".",
"format",
"(",
"patric_id",
")",
")",
"log",
".",
"info",
"(",
"'Created {} new strain GEM-PROs, accessible at \"strains\" attribute'",
".",
"format",
"(",
"counter",
")",
")"
] | 47.708333 | 28.833333 |
def check_access_token(self, request_token):
"""Checks that the token contains only safe characters
and is no shorter than lower and no longer than upper.
"""
lower, upper = self.access_token_length
return (set(request_token) <= self.safe_characters and
lower <= len(request_token) <= upper) | [
"def",
"check_access_token",
"(",
"self",
",",
"request_token",
")",
":",
"lower",
",",
"upper",
"=",
"self",
".",
"access_token_length",
"return",
"(",
"set",
"(",
"request_token",
")",
"<=",
"self",
".",
"safe_characters",
"and",
"lower",
"<=",
"len",
"(",
"request_token",
")",
"<=",
"upper",
")"
] | 48.714286 | 9.714286 |
def demo(nums=[]):
"Print a few usage examples on stdout."
nums = nums or [3, 1, 4, 1, 5, 9, 2, 6]
fmt = lambda num: '{0:g}'.format(num) if isinstance(num, (float, int)) else 'None'
nums1 = list(map(fmt, nums))
if __name__ == '__main__':
prog = sys.argv[0]
else:
prog = 'sparklines'
result = []
result.append('Usage examples (command-line and programmatic use):')
result.append('')
result.append('- Standard one-line sparkline')
result.append('{0!s} {1!s}'.format(prog, ' '.join(nums1)))
result.append('>>> print(sparklines([{0!s}])[0])'.format(', '.join(nums1)))
result.append(sparklines(nums)[0])
result.append('')
result.append('- Multi-line sparkline (n=2)')
result.append('{0!s} -n 2 {1!s}'.format(prog, ' '.join(nums1)))
result.append('>>> for line in sparklines([{0!s}], num_lines=2): print(line)'.format(', '.join(nums1)))
for line in sparklines(nums, num_lines=2):
result.append(line)
result.append('')
result.append('- Multi-line sparkline (n=3)')
result.append('{0!s} -n 3 {1!s}'.format(prog, ' '.join(nums1)))
result.append('>>> for line in sparklines([{0!s}], num_lines=3): print(line)'.format(', '.join(nums1)))
for line in sparklines(nums, num_lines=3):
result.append(line)
result.append('')
nums = nums + [None] + list(reversed(nums[:]))
result.append('- Standard one-line sparkline with gap')
result.append('{0!s} {1!s}'.format(prog, ' '.join(map(str, nums))))
result.append('>>> print(sparklines([{0!s}])[0])'.format(', '.join(map(str, nums))))
result.append(sparklines(nums)[0])
return '\n'.join(result) + '\n' | [
"def",
"demo",
"(",
"nums",
"=",
"[",
"]",
")",
":",
"nums",
"=",
"nums",
"or",
"[",
"3",
",",
"1",
",",
"4",
",",
"1",
",",
"5",
",",
"9",
",",
"2",
",",
"6",
"]",
"fmt",
"=",
"lambda",
"num",
":",
"'{0:g}'",
".",
"format",
"(",
"num",
")",
"if",
"isinstance",
"(",
"num",
",",
"(",
"float",
",",
"int",
")",
")",
"else",
"'None'",
"nums1",
"=",
"list",
"(",
"map",
"(",
"fmt",
",",
"nums",
")",
")",
"if",
"__name__",
"==",
"'__main__'",
":",
"prog",
"=",
"sys",
".",
"argv",
"[",
"0",
"]",
"else",
":",
"prog",
"=",
"'sparklines'",
"result",
"=",
"[",
"]",
"result",
".",
"append",
"(",
"'Usage examples (command-line and programmatic use):'",
")",
"result",
".",
"append",
"(",
"''",
")",
"result",
".",
"append",
"(",
"'- Standard one-line sparkline'",
")",
"result",
".",
"append",
"(",
"'{0!s} {1!s}'",
".",
"format",
"(",
"prog",
",",
"' '",
".",
"join",
"(",
"nums1",
")",
")",
")",
"result",
".",
"append",
"(",
"'>>> print(sparklines([{0!s}])[0])'",
".",
"format",
"(",
"', '",
".",
"join",
"(",
"nums1",
")",
")",
")",
"result",
".",
"append",
"(",
"sparklines",
"(",
"nums",
")",
"[",
"0",
"]",
")",
"result",
".",
"append",
"(",
"''",
")",
"result",
".",
"append",
"(",
"'- Multi-line sparkline (n=2)'",
")",
"result",
".",
"append",
"(",
"'{0!s} -n 2 {1!s}'",
".",
"format",
"(",
"prog",
",",
"' '",
".",
"join",
"(",
"nums1",
")",
")",
")",
"result",
".",
"append",
"(",
"'>>> for line in sparklines([{0!s}], num_lines=2): print(line)'",
".",
"format",
"(",
"', '",
".",
"join",
"(",
"nums1",
")",
")",
")",
"for",
"line",
"in",
"sparklines",
"(",
"nums",
",",
"num_lines",
"=",
"2",
")",
":",
"result",
".",
"append",
"(",
"line",
")",
"result",
".",
"append",
"(",
"''",
")",
"result",
".",
"append",
"(",
"'- Multi-line sparkline (n=3)'",
")",
"result",
".",
"append",
"(",
"'{0!s} -n 3 {1!s}'",
".",
"format",
"(",
"prog",
",",
"' '",
".",
"join",
"(",
"nums1",
")",
")",
")",
"result",
".",
"append",
"(",
"'>>> for line in sparklines([{0!s}], num_lines=3): print(line)'",
".",
"format",
"(",
"', '",
".",
"join",
"(",
"nums1",
")",
")",
")",
"for",
"line",
"in",
"sparklines",
"(",
"nums",
",",
"num_lines",
"=",
"3",
")",
":",
"result",
".",
"append",
"(",
"line",
")",
"result",
".",
"append",
"(",
"''",
")",
"nums",
"=",
"nums",
"+",
"[",
"None",
"]",
"+",
"list",
"(",
"reversed",
"(",
"nums",
"[",
":",
"]",
")",
")",
"result",
".",
"append",
"(",
"'- Standard one-line sparkline with gap'",
")",
"result",
".",
"append",
"(",
"'{0!s} {1!s}'",
".",
"format",
"(",
"prog",
",",
"' '",
".",
"join",
"(",
"map",
"(",
"str",
",",
"nums",
")",
")",
")",
")",
"result",
".",
"append",
"(",
"'>>> print(sparklines([{0!s}])[0])'",
".",
"format",
"(",
"', '",
".",
"join",
"(",
"map",
"(",
"str",
",",
"nums",
")",
")",
")",
")",
"result",
".",
"append",
"(",
"sparklines",
"(",
"nums",
")",
"[",
"0",
"]",
")",
"return",
"'\\n'",
".",
"join",
"(",
"result",
")",
"+",
"'\\n'"
] | 37.431818 | 24.431818 |
def retry(self, f, *args, **kwargs):
"""
Retries the given function self.tries times on NetworkErros
"""
backoff = random.random() / 100 # 5ms on average
for _ in range(self.tries - 1):
try:
return f(*args, **kwargs)
except NetworkError:
time.sleep(backoff)
backoff *= 2
return f(*args, **kwargs) | [
"def",
"retry",
"(",
"self",
",",
"f",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"backoff",
"=",
"random",
".",
"random",
"(",
")",
"/",
"100",
"# 5ms on average",
"for",
"_",
"in",
"range",
"(",
"self",
".",
"tries",
"-",
"1",
")",
":",
"try",
":",
"return",
"f",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"except",
"NetworkError",
":",
"time",
".",
"sleep",
"(",
"backoff",
")",
"backoff",
"*=",
"2",
"return",
"f",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | 33.833333 | 8.833333 |
def get_coinc_def_id(self, search, search_coinc_type, create_new = True, description = None):
"""
Return the coinc_def_id for the row in the table whose
search string and search_coinc_type integer have the values
given. If a matching row is not found, the default
behaviour is to create a new row and return the ID assigned
to the new row. If, instead, create_new is False then
KeyError is raised when a matching row is not found. The
optional description parameter can be used to set the
description string assigned to the new row if one is
created, otherwise the new row is left with no description.
"""
# look for the ID
rows = [row for row in self if (row.search, row.search_coinc_type) == (search, search_coinc_type)]
if len(rows) > 1:
raise ValueError("(search, search coincidence type) = ('%s', %d) is not unique" % (search, search_coinc_type))
if len(rows) > 0:
return rows[0].coinc_def_id
# coinc type not found in table
if not create_new:
raise KeyError((search, search_coinc_type))
row = self.RowType()
row.coinc_def_id = self.get_next_id()
row.search = search
row.search_coinc_type = search_coinc_type
row.description = description
self.append(row)
# return new ID
return row.coinc_def_id | [
"def",
"get_coinc_def_id",
"(",
"self",
",",
"search",
",",
"search_coinc_type",
",",
"create_new",
"=",
"True",
",",
"description",
"=",
"None",
")",
":",
"# look for the ID",
"rows",
"=",
"[",
"row",
"for",
"row",
"in",
"self",
"if",
"(",
"row",
".",
"search",
",",
"row",
".",
"search_coinc_type",
")",
"==",
"(",
"search",
",",
"search_coinc_type",
")",
"]",
"if",
"len",
"(",
"rows",
")",
">",
"1",
":",
"raise",
"ValueError",
"(",
"\"(search, search coincidence type) = ('%s', %d) is not unique\"",
"%",
"(",
"search",
",",
"search_coinc_type",
")",
")",
"if",
"len",
"(",
"rows",
")",
">",
"0",
":",
"return",
"rows",
"[",
"0",
"]",
".",
"coinc_def_id",
"# coinc type not found in table",
"if",
"not",
"create_new",
":",
"raise",
"KeyError",
"(",
"(",
"search",
",",
"search_coinc_type",
")",
")",
"row",
"=",
"self",
".",
"RowType",
"(",
")",
"row",
".",
"coinc_def_id",
"=",
"self",
".",
"get_next_id",
"(",
")",
"row",
".",
"search",
"=",
"search",
"row",
".",
"search_coinc_type",
"=",
"search_coinc_type",
"row",
".",
"description",
"=",
"description",
"self",
".",
"append",
"(",
"row",
")",
"# return new ID",
"return",
"row",
".",
"coinc_def_id"
] | 39.806452 | 20.580645 |
def doDirectPayment(self, params):
"""Call PayPal DoDirectPayment method."""
defaults = {"method": "DoDirectPayment", "paymentaction": "Sale"}
required = ["creditcardtype",
"acct",
"expdate",
"cvv2",
"ipaddress",
"firstname",
"lastname",
"street",
"city",
"state",
"countrycode",
"zip",
"amt",
]
nvp_obj = self._fetch(params, required, defaults)
if nvp_obj.flag:
raise PayPalFailure(nvp_obj.flag_info, nvp=nvp_obj)
# @@@ Could check cvv2match / avscode are both 'X' or '0'
# qd = django.http.QueryDict(nvp_obj.response)
# if qd.get('cvv2match') not in ['X', '0']:
# nvp_obj.set_flag("Invalid cvv2match: %s" % qd.get('cvv2match')
# if qd.get('avscode') not in ['X', '0']:
# nvp_obj.set_flag("Invalid avscode: %s" % qd.get('avscode')
return nvp_obj | [
"def",
"doDirectPayment",
"(",
"self",
",",
"params",
")",
":",
"defaults",
"=",
"{",
"\"method\"",
":",
"\"DoDirectPayment\"",
",",
"\"paymentaction\"",
":",
"\"Sale\"",
"}",
"required",
"=",
"[",
"\"creditcardtype\"",
",",
"\"acct\"",
",",
"\"expdate\"",
",",
"\"cvv2\"",
",",
"\"ipaddress\"",
",",
"\"firstname\"",
",",
"\"lastname\"",
",",
"\"street\"",
",",
"\"city\"",
",",
"\"state\"",
",",
"\"countrycode\"",
",",
"\"zip\"",
",",
"\"amt\"",
",",
"]",
"nvp_obj",
"=",
"self",
".",
"_fetch",
"(",
"params",
",",
"required",
",",
"defaults",
")",
"if",
"nvp_obj",
".",
"flag",
":",
"raise",
"PayPalFailure",
"(",
"nvp_obj",
".",
"flag_info",
",",
"nvp",
"=",
"nvp_obj",
")",
"# @@@ Could check cvv2match / avscode are both 'X' or '0'",
"# qd = django.http.QueryDict(nvp_obj.response)",
"# if qd.get('cvv2match') not in ['X', '0']:",
"# nvp_obj.set_flag(\"Invalid cvv2match: %s\" % qd.get('cvv2match')",
"# if qd.get('avscode') not in ['X', '0']:",
"# nvp_obj.set_flag(\"Invalid avscode: %s\" % qd.get('avscode')",
"return",
"nvp_obj"
] | 40.444444 | 14.407407 |
def doc_dir(self):
"""The absolute directory of the document"""
from os.path import abspath
if not self.ref:
return None
u = parse_app_url(self.ref)
return abspath(dirname(u.path)) | [
"def",
"doc_dir",
"(",
"self",
")",
":",
"from",
"os",
".",
"path",
"import",
"abspath",
"if",
"not",
"self",
".",
"ref",
":",
"return",
"None",
"u",
"=",
"parse_app_url",
"(",
"self",
".",
"ref",
")",
"return",
"abspath",
"(",
"dirname",
"(",
"u",
".",
"path",
")",
")"
] | 25.111111 | 16.222222 |
def delete_as(access_token, subscription_id, resource_group, as_name):
'''Delete availability set.
Args:
access_token (str): A valid Azure authentication token.
subscription_id (str): Azure subscription id.
resource_group (str): Azure resource group name.
as_name (str): Name of the availability set.
Returns:
HTTP response.
'''
endpoint = ''.join([get_rm_endpoint(),
'/subscriptions/', subscription_id,
'/resourceGroups/', resource_group,
'/providers/Microsoft.Compute/availabilitySets/', as_name,
'?api-version=', COMP_API])
return do_delete(endpoint, access_token) | [
"def",
"delete_as",
"(",
"access_token",
",",
"subscription_id",
",",
"resource_group",
",",
"as_name",
")",
":",
"endpoint",
"=",
"''",
".",
"join",
"(",
"[",
"get_rm_endpoint",
"(",
")",
",",
"'/subscriptions/'",
",",
"subscription_id",
",",
"'/resourceGroups/'",
",",
"resource_group",
",",
"'/providers/Microsoft.Compute/availabilitySets/'",
",",
"as_name",
",",
"'?api-version='",
",",
"COMP_API",
"]",
")",
"return",
"do_delete",
"(",
"endpoint",
",",
"access_token",
")"
] | 39.555556 | 21.666667 |
def retrofitted(self):
"""
:returns: the asset retrofitted value
"""
return self.calc('structural', {'structural': self._retrofitted},
self.area, self.number) | [
"def",
"retrofitted",
"(",
"self",
")",
":",
"return",
"self",
".",
"calc",
"(",
"'structural'",
",",
"{",
"'structural'",
":",
"self",
".",
"_retrofitted",
"}",
",",
"self",
".",
"area",
",",
"self",
".",
"number",
")"
] | 35 | 10.666667 |
def horizontal(y, xmin=0, xmax=1, color=None, width=None, dash=None, opacity=None):
"""Draws a horizontal line from `xmin` to `xmax`.
Parameters
----------
xmin : int, optional
xmax : int, optional
color : str, optional
width : number, optional
Returns
-------
Chart
"""
lineattr = {}
if color:
lineattr['color'] = color
if width:
lineattr['width'] = width
if dash:
lineattr['dash'] = dash
layout = dict(
shapes=[dict(type='line', x0=xmin, x1=xmax, y0=y, y1=y, opacity=opacity, line=lineattr)]
)
return Chart(layout=layout) | [
"def",
"horizontal",
"(",
"y",
",",
"xmin",
"=",
"0",
",",
"xmax",
"=",
"1",
",",
"color",
"=",
"None",
",",
"width",
"=",
"None",
",",
"dash",
"=",
"None",
",",
"opacity",
"=",
"None",
")",
":",
"lineattr",
"=",
"{",
"}",
"if",
"color",
":",
"lineattr",
"[",
"'color'",
"]",
"=",
"color",
"if",
"width",
":",
"lineattr",
"[",
"'width'",
"]",
"=",
"width",
"if",
"dash",
":",
"lineattr",
"[",
"'dash'",
"]",
"=",
"dash",
"layout",
"=",
"dict",
"(",
"shapes",
"=",
"[",
"dict",
"(",
"type",
"=",
"'line'",
",",
"x0",
"=",
"xmin",
",",
"x1",
"=",
"xmax",
",",
"y0",
"=",
"y",
",",
"y1",
"=",
"y",
",",
"opacity",
"=",
"opacity",
",",
"line",
"=",
"lineattr",
")",
"]",
")",
"return",
"Chart",
"(",
"layout",
"=",
"layout",
")"
] | 22.407407 | 24.185185 |
def calc_ethsw_port(self, port_num, port_def):
"""
Split and create the port entry for an Ethernet Switch
:param port_num: port number
:type port_num: str or int
:param str port_def: port definition
"""
# Port String - access 1 SW2 1
# 0: type 1: vlan 2: destination device 3: destination port
port_def = port_def.split(' ')
if len(port_def) == 4:
destination = {'device': port_def[2],
'port': port_def[3]}
else:
destination = {'device': 'NIO',
'port': port_def[2]}
# port entry
port = {'id': self.port_id,
'name': str(port_num),
'port_number': int(port_num),
'type': port_def[0],
'vlan': int(port_def[1])}
self.node['ports'].append(port)
self.calc_link(self.node['id'], self.port_id, port['name'],
destination)
self.port_id += 1 | [
"def",
"calc_ethsw_port",
"(",
"self",
",",
"port_num",
",",
"port_def",
")",
":",
"# Port String - access 1 SW2 1",
"# 0: type 1: vlan 2: destination device 3: destination port",
"port_def",
"=",
"port_def",
".",
"split",
"(",
"' '",
")",
"if",
"len",
"(",
"port_def",
")",
"==",
"4",
":",
"destination",
"=",
"{",
"'device'",
":",
"port_def",
"[",
"2",
"]",
",",
"'port'",
":",
"port_def",
"[",
"3",
"]",
"}",
"else",
":",
"destination",
"=",
"{",
"'device'",
":",
"'NIO'",
",",
"'port'",
":",
"port_def",
"[",
"2",
"]",
"}",
"# port entry",
"port",
"=",
"{",
"'id'",
":",
"self",
".",
"port_id",
",",
"'name'",
":",
"str",
"(",
"port_num",
")",
",",
"'port_number'",
":",
"int",
"(",
"port_num",
")",
",",
"'type'",
":",
"port_def",
"[",
"0",
"]",
",",
"'vlan'",
":",
"int",
"(",
"port_def",
"[",
"1",
"]",
")",
"}",
"self",
".",
"node",
"[",
"'ports'",
"]",
".",
"append",
"(",
"port",
")",
"self",
".",
"calc_link",
"(",
"self",
".",
"node",
"[",
"'id'",
"]",
",",
"self",
".",
"port_id",
",",
"port",
"[",
"'name'",
"]",
",",
"destination",
")",
"self",
".",
"port_id",
"+=",
"1"
] | 36.925926 | 9.666667 |
def get_one(cls,
execution_date,
key=None,
task_id=None,
dag_id=None,
include_prior_dates=False,
session=None):
"""
Retrieve an XCom value, optionally meeting certain criteria.
TODO: "pickling" has been deprecated and JSON is preferred.
"pickling" will be removed in Airflow 2.0.
:return: XCom value
"""
filters = []
if key:
filters.append(cls.key == key)
if task_id:
filters.append(cls.task_id == task_id)
if dag_id:
filters.append(cls.dag_id == dag_id)
if include_prior_dates:
filters.append(cls.execution_date <= execution_date)
else:
filters.append(cls.execution_date == execution_date)
query = (
session.query(cls.value).filter(and_(*filters))
.order_by(cls.execution_date.desc(), cls.timestamp.desc()))
result = query.first()
if result:
enable_pickling = configuration.getboolean('core', 'enable_xcom_pickling')
if enable_pickling:
return pickle.loads(result.value)
else:
try:
return json.loads(result.value.decode('UTF-8'))
except ValueError:
log = LoggingMixin().log
log.error("Could not deserialize the XCOM value from JSON. "
"If you are using pickles instead of JSON "
"for XCOM, then you need to enable pickle "
"support for XCOM in your airflow config.")
raise | [
"def",
"get_one",
"(",
"cls",
",",
"execution_date",
",",
"key",
"=",
"None",
",",
"task_id",
"=",
"None",
",",
"dag_id",
"=",
"None",
",",
"include_prior_dates",
"=",
"False",
",",
"session",
"=",
"None",
")",
":",
"filters",
"=",
"[",
"]",
"if",
"key",
":",
"filters",
".",
"append",
"(",
"cls",
".",
"key",
"==",
"key",
")",
"if",
"task_id",
":",
"filters",
".",
"append",
"(",
"cls",
".",
"task_id",
"==",
"task_id",
")",
"if",
"dag_id",
":",
"filters",
".",
"append",
"(",
"cls",
".",
"dag_id",
"==",
"dag_id",
")",
"if",
"include_prior_dates",
":",
"filters",
".",
"append",
"(",
"cls",
".",
"execution_date",
"<=",
"execution_date",
")",
"else",
":",
"filters",
".",
"append",
"(",
"cls",
".",
"execution_date",
"==",
"execution_date",
")",
"query",
"=",
"(",
"session",
".",
"query",
"(",
"cls",
".",
"value",
")",
".",
"filter",
"(",
"and_",
"(",
"*",
"filters",
")",
")",
".",
"order_by",
"(",
"cls",
".",
"execution_date",
".",
"desc",
"(",
")",
",",
"cls",
".",
"timestamp",
".",
"desc",
"(",
")",
")",
")",
"result",
"=",
"query",
".",
"first",
"(",
")",
"if",
"result",
":",
"enable_pickling",
"=",
"configuration",
".",
"getboolean",
"(",
"'core'",
",",
"'enable_xcom_pickling'",
")",
"if",
"enable_pickling",
":",
"return",
"pickle",
".",
"loads",
"(",
"result",
".",
"value",
")",
"else",
":",
"try",
":",
"return",
"json",
".",
"loads",
"(",
"result",
".",
"value",
".",
"decode",
"(",
"'UTF-8'",
")",
")",
"except",
"ValueError",
":",
"log",
"=",
"LoggingMixin",
"(",
")",
".",
"log",
"log",
".",
"error",
"(",
"\"Could not deserialize the XCOM value from JSON. \"",
"\"If you are using pickles instead of JSON \"",
"\"for XCOM, then you need to enable pickle \"",
"\"support for XCOM in your airflow config.\"",
")",
"raise"
] | 37.622222 | 19.622222 |
def dictfetchall(cursor):
"""Returns all rows from a cursor as a dict (rather than a headerless table)
From Django Documentation: https://docs.djangoproject.com/en/dev/topics/db/sql/
"""
desc = cursor.description
return [dict(zip([col[0] for col in desc], row)) for row in cursor.fetchall()] | [
"def",
"dictfetchall",
"(",
"cursor",
")",
":",
"desc",
"=",
"cursor",
".",
"description",
"return",
"[",
"dict",
"(",
"zip",
"(",
"[",
"col",
"[",
"0",
"]",
"for",
"col",
"in",
"desc",
"]",
",",
"row",
")",
")",
"for",
"row",
"in",
"cursor",
".",
"fetchall",
"(",
")",
"]"
] | 43.714286 | 21.571429 |
def GetFileSystemTypeIndicators(cls, path_spec, resolver_context=None):
"""Determines if a file contains a supported file system types.
Args:
path_spec (PathSpec): path specification.
resolver_context (Optional[Context]): resolver context, where None
represents the built-in context which is not multi process safe.
Returns:
list[str]: supported format type indicators.
"""
if (cls._file_system_remainder_list is None or
cls._file_system_store is None):
specification_store, remainder_list = cls._GetSpecificationStore(
definitions.FORMAT_CATEGORY_FILE_SYSTEM)
cls._file_system_remainder_list = remainder_list
cls._file_system_store = specification_store
if cls._file_system_scanner is None:
cls._file_system_scanner = cls._GetSignatureScanner(
cls._file_system_store)
return cls._GetTypeIndicators(
cls._file_system_scanner, cls._file_system_store,
cls._file_system_remainder_list, path_spec,
resolver_context=resolver_context) | [
"def",
"GetFileSystemTypeIndicators",
"(",
"cls",
",",
"path_spec",
",",
"resolver_context",
"=",
"None",
")",
":",
"if",
"(",
"cls",
".",
"_file_system_remainder_list",
"is",
"None",
"or",
"cls",
".",
"_file_system_store",
"is",
"None",
")",
":",
"specification_store",
",",
"remainder_list",
"=",
"cls",
".",
"_GetSpecificationStore",
"(",
"definitions",
".",
"FORMAT_CATEGORY_FILE_SYSTEM",
")",
"cls",
".",
"_file_system_remainder_list",
"=",
"remainder_list",
"cls",
".",
"_file_system_store",
"=",
"specification_store",
"if",
"cls",
".",
"_file_system_scanner",
"is",
"None",
":",
"cls",
".",
"_file_system_scanner",
"=",
"cls",
".",
"_GetSignatureScanner",
"(",
"cls",
".",
"_file_system_store",
")",
"return",
"cls",
".",
"_GetTypeIndicators",
"(",
"cls",
".",
"_file_system_scanner",
",",
"cls",
".",
"_file_system_store",
",",
"cls",
".",
"_file_system_remainder_list",
",",
"path_spec",
",",
"resolver_context",
"=",
"resolver_context",
")"
] | 39.961538 | 18.038462 |
def flag_forgotten_entries(session, today=None):
"""Flag any entries from previous days where users forgot to sign
out.
:param session: SQLAlchemy session through which to access the database.
:param today: (optional) The current date as a `datetime.date` object. Used for testing.
""" # noqa
today = date.today() if today is None else today
forgotten = (
session
.query(Entry)
.filter(Entry.time_out.is_(None))
.filter(Entry.forgot_sign_out.is_(False))
.filter(Entry.date < today)
)
for entry in forgotten:
e = sign_out(entry, forgot=True)
logger.debug('Signing out forgotten entry: {}'.format(e))
session.add(e)
session.commit() | [
"def",
"flag_forgotten_entries",
"(",
"session",
",",
"today",
"=",
"None",
")",
":",
"# noqa",
"today",
"=",
"date",
".",
"today",
"(",
")",
"if",
"today",
"is",
"None",
"else",
"today",
"forgotten",
"=",
"(",
"session",
".",
"query",
"(",
"Entry",
")",
".",
"filter",
"(",
"Entry",
".",
"time_out",
".",
"is_",
"(",
"None",
")",
")",
".",
"filter",
"(",
"Entry",
".",
"forgot_sign_out",
".",
"is_",
"(",
"False",
")",
")",
".",
"filter",
"(",
"Entry",
".",
"date",
"<",
"today",
")",
")",
"for",
"entry",
"in",
"forgotten",
":",
"e",
"=",
"sign_out",
"(",
"entry",
",",
"forgot",
"=",
"True",
")",
"logger",
".",
"debug",
"(",
"'Signing out forgotten entry: {}'",
".",
"format",
"(",
"e",
")",
")",
"session",
".",
"add",
"(",
"e",
")",
"session",
".",
"commit",
"(",
")"
] | 31.130435 | 21.434783 |
def generate_action(args):
"""Generate action."""
controller = args.get('<controller>')
action = args.get('<action>')
with_template = args.get('-t')
current_path = os.getcwd()
logger.info('Start generating action.')
controller_file_path = os.path.join(current_path, 'application/controllers', controller + '.py')
if not os.path.exists(controller_file_path):
logger.warning("The controller %s does't exist." % controller)
return
if with_template:
action_source_path = os.path.join(dirname(abspath(__file__)), 'templates/action.py')
else:
action_source_path = os.path.join(dirname(abspath(__file__)), 'templates/action_without_template.py')
# Add action source codes
with open(action_source_path, 'r') as action_source_file:
with open(controller_file_path, 'a') as controller_file:
for action_line in action_source_file:
new_line = action_line.replace('#{controller}', controller). \
replace('#{action}', action)
controller_file.write(new_line)
logger.info("Updated: %s" % _relative_path(controller_file_path))
if with_template:
# assets dir
assets_dir_path = os.path.join(current_path, 'application/pages/%s/%s' % (controller, action))
_mkdir_p(assets_dir_path)
# html
action_html_template_path = os.path.join(dirname(abspath(__file__)), 'templates/action.html')
action_html_path = os.path.join(assets_dir_path, '%s.html' % action)
with open(action_html_template_path, 'r') as action_html_template_file:
with open(action_html_path, 'w') as action_html_file:
for line in action_html_template_file:
new_line = line.replace('#{action}', action) \
.replace('#{action|title}', action.title()) \
.replace('#{controller}', controller)
action_html_file.write(new_line)
logger.info("New: %s" % _relative_path(action_html_path))
# js
action_js_template_path = os.path.join(dirname(abspath(__file__)), 'templates/action.js')
action_js_path = os.path.join(assets_dir_path, '%s.js' % action)
shutil.copy(action_js_template_path, action_js_path)
logger.info("New: %s" % _relative_path(action_js_path))
# less
action_less_template_path = os.path.join(dirname(abspath(__file__)), 'templates/action.less')
action_less_path = os.path.join(assets_dir_path, '%s.less' % action)
shutil.copy(action_less_template_path, action_less_path)
logger.info("New: %s" % _relative_path(action_less_path))
logger.info('Finish generating action.') | [
"def",
"generate_action",
"(",
"args",
")",
":",
"controller",
"=",
"args",
".",
"get",
"(",
"'<controller>'",
")",
"action",
"=",
"args",
".",
"get",
"(",
"'<action>'",
")",
"with_template",
"=",
"args",
".",
"get",
"(",
"'-t'",
")",
"current_path",
"=",
"os",
".",
"getcwd",
"(",
")",
"logger",
".",
"info",
"(",
"'Start generating action.'",
")",
"controller_file_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"current_path",
",",
"'application/controllers'",
",",
"controller",
"+",
"'.py'",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"controller_file_path",
")",
":",
"logger",
".",
"warning",
"(",
"\"The controller %s does't exist.\"",
"%",
"controller",
")",
"return",
"if",
"with_template",
":",
"action_source_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dirname",
"(",
"abspath",
"(",
"__file__",
")",
")",
",",
"'templates/action.py'",
")",
"else",
":",
"action_source_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dirname",
"(",
"abspath",
"(",
"__file__",
")",
")",
",",
"'templates/action_without_template.py'",
")",
"# Add action source codes",
"with",
"open",
"(",
"action_source_path",
",",
"'r'",
")",
"as",
"action_source_file",
":",
"with",
"open",
"(",
"controller_file_path",
",",
"'a'",
")",
"as",
"controller_file",
":",
"for",
"action_line",
"in",
"action_source_file",
":",
"new_line",
"=",
"action_line",
".",
"replace",
"(",
"'#{controller}'",
",",
"controller",
")",
".",
"replace",
"(",
"'#{action}'",
",",
"action",
")",
"controller_file",
".",
"write",
"(",
"new_line",
")",
"logger",
".",
"info",
"(",
"\"Updated: %s\"",
"%",
"_relative_path",
"(",
"controller_file_path",
")",
")",
"if",
"with_template",
":",
"# assets dir",
"assets_dir_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"current_path",
",",
"'application/pages/%s/%s'",
"%",
"(",
"controller",
",",
"action",
")",
")",
"_mkdir_p",
"(",
"assets_dir_path",
")",
"# html",
"action_html_template_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dirname",
"(",
"abspath",
"(",
"__file__",
")",
")",
",",
"'templates/action.html'",
")",
"action_html_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"assets_dir_path",
",",
"'%s.html'",
"%",
"action",
")",
"with",
"open",
"(",
"action_html_template_path",
",",
"'r'",
")",
"as",
"action_html_template_file",
":",
"with",
"open",
"(",
"action_html_path",
",",
"'w'",
")",
"as",
"action_html_file",
":",
"for",
"line",
"in",
"action_html_template_file",
":",
"new_line",
"=",
"line",
".",
"replace",
"(",
"'#{action}'",
",",
"action",
")",
".",
"replace",
"(",
"'#{action|title}'",
",",
"action",
".",
"title",
"(",
")",
")",
".",
"replace",
"(",
"'#{controller}'",
",",
"controller",
")",
"action_html_file",
".",
"write",
"(",
"new_line",
")",
"logger",
".",
"info",
"(",
"\"New: %s\"",
"%",
"_relative_path",
"(",
"action_html_path",
")",
")",
"# js",
"action_js_template_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dirname",
"(",
"abspath",
"(",
"__file__",
")",
")",
",",
"'templates/action.js'",
")",
"action_js_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"assets_dir_path",
",",
"'%s.js'",
"%",
"action",
")",
"shutil",
".",
"copy",
"(",
"action_js_template_path",
",",
"action_js_path",
")",
"logger",
".",
"info",
"(",
"\"New: %s\"",
"%",
"_relative_path",
"(",
"action_js_path",
")",
")",
"# less",
"action_less_template_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dirname",
"(",
"abspath",
"(",
"__file__",
")",
")",
",",
"'templates/action.less'",
")",
"action_less_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"assets_dir_path",
",",
"'%s.less'",
"%",
"action",
")",
"shutil",
".",
"copy",
"(",
"action_less_template_path",
",",
"action_less_path",
")",
"logger",
".",
"info",
"(",
"\"New: %s\"",
"%",
"_relative_path",
"(",
"action_less_path",
")",
")",
"logger",
".",
"info",
"(",
"'Finish generating action.'",
")"
] | 46.344828 | 27.62069 |
def texture_from_image(renderer, image_name):
"""Create an SDL2 Texture from an image file"""
soft_surface = ext.load_image(image_name)
texture = SDL_CreateTextureFromSurface(renderer.renderer, soft_surface)
SDL_FreeSurface(soft_surface)
return texture | [
"def",
"texture_from_image",
"(",
"renderer",
",",
"image_name",
")",
":",
"soft_surface",
"=",
"ext",
".",
"load_image",
"(",
"image_name",
")",
"texture",
"=",
"SDL_CreateTextureFromSurface",
"(",
"renderer",
".",
"renderer",
",",
"soft_surface",
")",
"SDL_FreeSurface",
"(",
"soft_surface",
")",
"return",
"texture"
] | 44.5 | 12.333333 |
def add_virtual_columns_proper_motion_gal2eq(self, long_in="ra", lat_in="dec", pm_long="pm_l", pm_lat="pm_b", pm_long_out="pm_ra", pm_lat_out="pm_dec",
name_prefix="__proper_motion_gal2eq",
right_ascension_galactic_pole=192.85,
declination_galactic_pole=27.12,
propagate_uncertainties=False,
radians=False):
"""Transform/rotate proper motions from galactic to equatorial coordinates.
Inverse of :py:`add_virtual_columns_proper_motion_eq2gal`
"""
kwargs = dict(**locals())
kwargs.pop('self')
kwargs['inverse'] = True
self.add_virtual_columns_proper_motion_eq2gal(**kwargs) | [
"def",
"add_virtual_columns_proper_motion_gal2eq",
"(",
"self",
",",
"long_in",
"=",
"\"ra\"",
",",
"lat_in",
"=",
"\"dec\"",
",",
"pm_long",
"=",
"\"pm_l\"",
",",
"pm_lat",
"=",
"\"pm_b\"",
",",
"pm_long_out",
"=",
"\"pm_ra\"",
",",
"pm_lat_out",
"=",
"\"pm_dec\"",
",",
"name_prefix",
"=",
"\"__proper_motion_gal2eq\"",
",",
"right_ascension_galactic_pole",
"=",
"192.85",
",",
"declination_galactic_pole",
"=",
"27.12",
",",
"propagate_uncertainties",
"=",
"False",
",",
"radians",
"=",
"False",
")",
":",
"kwargs",
"=",
"dict",
"(",
"*",
"*",
"locals",
"(",
")",
")",
"kwargs",
".",
"pop",
"(",
"'self'",
")",
"kwargs",
"[",
"'inverse'",
"]",
"=",
"True",
"self",
".",
"add_virtual_columns_proper_motion_eq2gal",
"(",
"*",
"*",
"kwargs",
")"
] | 57.642857 | 28.785714 |
def integer_based_slice(self, ts):
"""
Transform a :class:`TimeSlice` into integer indices that numpy can work
with
Args:
ts (slice, TimeSlice): the time slice to translate into integer
indices
"""
if isinstance(ts, slice):
try:
start = Seconds(0) if ts.start is None else ts.start
if start < Seconds(0):
start = self.end + start
stop = self.end if ts.stop is None else ts.stop
if stop < Seconds(0):
stop = self.end + stop
duration = stop - start
ts = TimeSlice(start=start, duration=duration)
except (ValueError, TypeError):
pass
if not isinstance(ts, TimeSlice):
return ts
diff = self.duration - self.frequency
start_index = \
max(0, np.floor((ts.start - diff) / self.frequency))
end = self.end if ts.duration is None else ts.end
# KLUDGE: This is basically arbitrary, but the motivation is that we'd
# like to differentiate between cases where the slice
# actually/intentionally overlaps a particular sample, and cases where
# the slice overlaps the sample by a tiny amount, due to rounding or
# lack of precision (e.g. Seconds(1) / SR44100().frequency).
ratio = np.round(end / self.frequency, 2)
stop_index = np.ceil(ratio)
return slice(int(start_index), int(stop_index)) | [
"def",
"integer_based_slice",
"(",
"self",
",",
"ts",
")",
":",
"if",
"isinstance",
"(",
"ts",
",",
"slice",
")",
":",
"try",
":",
"start",
"=",
"Seconds",
"(",
"0",
")",
"if",
"ts",
".",
"start",
"is",
"None",
"else",
"ts",
".",
"start",
"if",
"start",
"<",
"Seconds",
"(",
"0",
")",
":",
"start",
"=",
"self",
".",
"end",
"+",
"start",
"stop",
"=",
"self",
".",
"end",
"if",
"ts",
".",
"stop",
"is",
"None",
"else",
"ts",
".",
"stop",
"if",
"stop",
"<",
"Seconds",
"(",
"0",
")",
":",
"stop",
"=",
"self",
".",
"end",
"+",
"stop",
"duration",
"=",
"stop",
"-",
"start",
"ts",
"=",
"TimeSlice",
"(",
"start",
"=",
"start",
",",
"duration",
"=",
"duration",
")",
"except",
"(",
"ValueError",
",",
"TypeError",
")",
":",
"pass",
"if",
"not",
"isinstance",
"(",
"ts",
",",
"TimeSlice",
")",
":",
"return",
"ts",
"diff",
"=",
"self",
".",
"duration",
"-",
"self",
".",
"frequency",
"start_index",
"=",
"max",
"(",
"0",
",",
"np",
".",
"floor",
"(",
"(",
"ts",
".",
"start",
"-",
"diff",
")",
"/",
"self",
".",
"frequency",
")",
")",
"end",
"=",
"self",
".",
"end",
"if",
"ts",
".",
"duration",
"is",
"None",
"else",
"ts",
".",
"end",
"# KLUDGE: This is basically arbitrary, but the motivation is that we'd",
"# like to differentiate between cases where the slice",
"# actually/intentionally overlaps a particular sample, and cases where",
"# the slice overlaps the sample by a tiny amount, due to rounding or",
"# lack of precision (e.g. Seconds(1) / SR44100().frequency).",
"ratio",
"=",
"np",
".",
"round",
"(",
"end",
"/",
"self",
".",
"frequency",
",",
"2",
")",
"stop_index",
"=",
"np",
".",
"ceil",
"(",
"ratio",
")",
"return",
"slice",
"(",
"int",
"(",
"start_index",
")",
",",
"int",
"(",
"stop_index",
")",
")"
] | 36.047619 | 21.047619 |
def popUpItem(self, *args):
"""Return the specified item in a pop up menu."""
self.Press()
time.sleep(.5)
return self._menuItem(self, *args) | [
"def",
"popUpItem",
"(",
"self",
",",
"*",
"args",
")",
":",
"self",
".",
"Press",
"(",
")",
"time",
".",
"sleep",
"(",
".5",
")",
"return",
"self",
".",
"_menuItem",
"(",
"self",
",",
"*",
"args",
")"
] | 33.6 | 10.6 |
def get_run(session, dag_id, execution_date):
"""
:param dag_id: DAG ID
:type dag_id: unicode
:param execution_date: execution date
:type execution_date: datetime
:return: DagRun corresponding to the given dag_id and execution date
if one exists. None otherwise.
:rtype: airflow.models.DagRun
"""
qry = session.query(DagRun).filter(
DagRun.dag_id == dag_id,
DagRun.external_trigger == False, # noqa
DagRun.execution_date == execution_date,
)
return qry.first() | [
"def",
"get_run",
"(",
"session",
",",
"dag_id",
",",
"execution_date",
")",
":",
"qry",
"=",
"session",
".",
"query",
"(",
"DagRun",
")",
".",
"filter",
"(",
"DagRun",
".",
"dag_id",
"==",
"dag_id",
",",
"DagRun",
".",
"external_trigger",
"==",
"False",
",",
"# noqa",
"DagRun",
".",
"execution_date",
"==",
"execution_date",
",",
")",
"return",
"qry",
".",
"first",
"(",
")"
] | 36.3125 | 9.4375 |
def nacm_rule_list_cmdrule_context(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
nacm = ET.SubElement(config, "nacm", xmlns="urn:ietf:params:xml:ns:yang:ietf-netconf-acm")
rule_list = ET.SubElement(nacm, "rule-list")
name_key = ET.SubElement(rule_list, "name")
name_key.text = kwargs.pop('name')
cmdrule = ET.SubElement(rule_list, "cmdrule", xmlns="http://tail-f.com/yang/acm")
name_key = ET.SubElement(cmdrule, "name")
name_key.text = kwargs.pop('name')
context = ET.SubElement(cmdrule, "context")
context.text = kwargs.pop('context')
callback = kwargs.pop('callback', self._callback)
return callback(config) | [
"def",
"nacm_rule_list_cmdrule_context",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"config",
"=",
"ET",
".",
"Element",
"(",
"\"config\"",
")",
"nacm",
"=",
"ET",
".",
"SubElement",
"(",
"config",
",",
"\"nacm\"",
",",
"xmlns",
"=",
"\"urn:ietf:params:xml:ns:yang:ietf-netconf-acm\"",
")",
"rule_list",
"=",
"ET",
".",
"SubElement",
"(",
"nacm",
",",
"\"rule-list\"",
")",
"name_key",
"=",
"ET",
".",
"SubElement",
"(",
"rule_list",
",",
"\"name\"",
")",
"name_key",
".",
"text",
"=",
"kwargs",
".",
"pop",
"(",
"'name'",
")",
"cmdrule",
"=",
"ET",
".",
"SubElement",
"(",
"rule_list",
",",
"\"cmdrule\"",
",",
"xmlns",
"=",
"\"http://tail-f.com/yang/acm\"",
")",
"name_key",
"=",
"ET",
".",
"SubElement",
"(",
"cmdrule",
",",
"\"name\"",
")",
"name_key",
".",
"text",
"=",
"kwargs",
".",
"pop",
"(",
"'name'",
")",
"context",
"=",
"ET",
".",
"SubElement",
"(",
"cmdrule",
",",
"\"context\"",
")",
"context",
".",
"text",
"=",
"kwargs",
".",
"pop",
"(",
"'context'",
")",
"callback",
"=",
"kwargs",
".",
"pop",
"(",
"'callback'",
",",
"self",
".",
"_callback",
")",
"return",
"callback",
"(",
"config",
")"
] | 45.9375 | 14.875 |
def parse_exac_genes(lines):
"""Parse lines with exac formated genes
This is designed to take a dump with genes from exac.
This is downloaded from:
ftp.broadinstitute.org/pub/ExAC_release//release0.3/functional_gene_constraint/
fordist_cleaned_exac_r03_march16_z_pli_rec_null_data.txt
Args:
lines(iterable(str)): An iterable with ExAC formated genes
Yields:
exac_gene(dict): A dictionary with the relevant information
"""
header = []
logger.info("Parsing exac genes...")
for index, line in enumerate(lines):
if index == 0:
header = line.rstrip().split('\t')
elif len(line) > 0:
exac_gene = parse_exac_line(line, header)
yield exac_gene | [
"def",
"parse_exac_genes",
"(",
"lines",
")",
":",
"header",
"=",
"[",
"]",
"logger",
".",
"info",
"(",
"\"Parsing exac genes...\"",
")",
"for",
"index",
",",
"line",
"in",
"enumerate",
"(",
"lines",
")",
":",
"if",
"index",
"==",
"0",
":",
"header",
"=",
"line",
".",
"rstrip",
"(",
")",
".",
"split",
"(",
"'\\t'",
")",
"elif",
"len",
"(",
"line",
")",
">",
"0",
":",
"exac_gene",
"=",
"parse_exac_line",
"(",
"line",
",",
"header",
")",
"yield",
"exac_gene"
] | 36.090909 | 18.909091 |
def _(c: Concept, cutoff: float = 0.7) -> bool:
"""Check if a concept has a high grounding score. """
return is_grounded(c) and (top_grounding_score(c) >= cutoff) | [
"def",
"_",
"(",
"c",
":",
"Concept",
",",
"cutoff",
":",
"float",
"=",
"0.7",
")",
"->",
"bool",
":",
"return",
"is_grounded",
"(",
"c",
")",
"and",
"(",
"top_grounding_score",
"(",
"c",
")",
">=",
"cutoff",
")"
] | 42 | 17.75 |
def get_default_value(self, check):
"""
Given a check, return the default value for the check
(converted to the right type).
If the check doesn't specify a default value then a
``KeyError`` will be raised.
"""
fun_name, fun_args, fun_kwargs, default = self._parse_with_caching(check)
if default is None:
raise KeyError('Check "%s" has no default value.' % check)
value = self._handle_none(default)
if value is None:
return value
return self._check_value(value, fun_name, fun_args, fun_kwargs) | [
"def",
"get_default_value",
"(",
"self",
",",
"check",
")",
":",
"fun_name",
",",
"fun_args",
",",
"fun_kwargs",
",",
"default",
"=",
"self",
".",
"_parse_with_caching",
"(",
"check",
")",
"if",
"default",
"is",
"None",
":",
"raise",
"KeyError",
"(",
"'Check \"%s\" has no default value.'",
"%",
"check",
")",
"value",
"=",
"self",
".",
"_handle_none",
"(",
"default",
")",
"if",
"value",
"is",
"None",
":",
"return",
"value",
"return",
"self",
".",
"_check_value",
"(",
"value",
",",
"fun_name",
",",
"fun_args",
",",
"fun_kwargs",
")"
] | 39.4 | 15.933333 |
def remove_column(conn, table, column_name, schema=None):
"""
Removes given `activity` jsonb data column key. This function is useful
when you are doing schema changes that require removing a column.
Let's say you've been using PostgreSQL-Audit for a while for a table called
article. Now you want to remove one audited column called 'created_at' from
this table.
::
from alembic import op
from postgresql_audit import remove_column
def upgrade():
op.remove_column('article', 'created_at')
remove_column(op, 'article', 'created_at')
:param conn:
An object that is able to execute SQL (either SQLAlchemy Connection,
Engine or Alembic Operations object)
:param table:
The table to remove the column from
:param column_name:
Name of the column to remove
:param schema:
Optional name of schema to use.
"""
activity_table = get_activity_table(schema=schema)
remove = sa.cast(column_name, sa.Text)
query = (
activity_table
.update()
.values(
old_data=activity_table.c.old_data - remove,
changed_data=activity_table.c.changed_data - remove,
)
.where(activity_table.c.table_name == table)
)
return conn.execute(query) | [
"def",
"remove_column",
"(",
"conn",
",",
"table",
",",
"column_name",
",",
"schema",
"=",
"None",
")",
":",
"activity_table",
"=",
"get_activity_table",
"(",
"schema",
"=",
"schema",
")",
"remove",
"=",
"sa",
".",
"cast",
"(",
"column_name",
",",
"sa",
".",
"Text",
")",
"query",
"=",
"(",
"activity_table",
".",
"update",
"(",
")",
".",
"values",
"(",
"old_data",
"=",
"activity_table",
".",
"c",
".",
"old_data",
"-",
"remove",
",",
"changed_data",
"=",
"activity_table",
".",
"c",
".",
"changed_data",
"-",
"remove",
",",
")",
".",
"where",
"(",
"activity_table",
".",
"c",
".",
"table_name",
"==",
"table",
")",
")",
"return",
"conn",
".",
"execute",
"(",
"query",
")"
] | 30.833333 | 22.214286 |
def check_lazy_load_wegsegment(f):
'''
Decorator function to lazy load a :class:`Wegsegment`.
'''
def wrapper(*args):
wegsegment = args[0]
if (
wegsegment._methode_id is None or
wegsegment._geometrie is None or
wegsegment._metadata is None
):
log.debug('Lazy loading Wegsegment %d', wegsegment.id)
wegsegment.check_gateway()
w = wegsegment.gateway.get_wegsegment_by_id(wegsegment.id)
wegsegment._methode_id = w._methode_id
wegsegment._geometrie = w._geometrie
wegsegment._metadata = w._metadata
return f(*args)
return wrapper | [
"def",
"check_lazy_load_wegsegment",
"(",
"f",
")",
":",
"def",
"wrapper",
"(",
"*",
"args",
")",
":",
"wegsegment",
"=",
"args",
"[",
"0",
"]",
"if",
"(",
"wegsegment",
".",
"_methode_id",
"is",
"None",
"or",
"wegsegment",
".",
"_geometrie",
"is",
"None",
"or",
"wegsegment",
".",
"_metadata",
"is",
"None",
")",
":",
"log",
".",
"debug",
"(",
"'Lazy loading Wegsegment %d'",
",",
"wegsegment",
".",
"id",
")",
"wegsegment",
".",
"check_gateway",
"(",
")",
"w",
"=",
"wegsegment",
".",
"gateway",
".",
"get_wegsegment_by_id",
"(",
"wegsegment",
".",
"id",
")",
"wegsegment",
".",
"_methode_id",
"=",
"w",
".",
"_methode_id",
"wegsegment",
".",
"_geometrie",
"=",
"w",
".",
"_geometrie",
"wegsegment",
".",
"_metadata",
"=",
"w",
".",
"_metadata",
"return",
"f",
"(",
"*",
"args",
")",
"return",
"wrapper"
] | 35.105263 | 16.157895 |
def new_character(self, name, data=None, **kwargs):
"""Create and return a new :class:`Character`."""
self.add_character(name, data, **kwargs)
return self.character[name] | [
"def",
"new_character",
"(",
"self",
",",
"name",
",",
"data",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"add_character",
"(",
"name",
",",
"data",
",",
"*",
"*",
"kwargs",
")",
"return",
"self",
".",
"character",
"[",
"name",
"]"
] | 47.75 | 6 |
def convert(self, to_mag, from_mag=None):
""" Converts magnitudes using UBVRIJHKLMNQ photometry in Taurus-Auriga (Kenyon+ 1995)
ReadMe+ftp1995ApJS..101..117K Colors for main-sequence stars
If from_mag isn't specified the program will cycle through provided magnitudes and choose one. Note that all
magnitudes are first converted to V, and then to the requested magnitude.
:param to_mag: magnitude to convert to
:param from_mag: magnitude to convert from
:return:
"""
allowed_mags = "UBVJIHKLMN"
if from_mag:
if to_mag == 'V': # If V mag is requested (1/3) - from mag specified
return self._convert_to_from('V', from_mag)
if from_mag == 'V':
magV = self.magV
else:
magV = self._convert_to_from('V', from_mag)
return self._convert_to_from(to_mag, 'V', magV)
# if we can convert from any magnitude, try V first
elif not isNanOrNone(self.magV):
if to_mag == 'V': # If V mag is requested (2/3) - no need to convert
return self.magV
else:
return self._convert_to_from(to_mag, 'V', self.magV)
else: # Otherwise lets try all other magnitudes in turn
order = "UBJHKLMN" # V is the intermediate step from the others, done by default if possible
for mag_letter in order:
try:
magV = self._convert_to_from('V', mag_letter)
if to_mag == 'V': # If V mag is requested (3/3) - try all other mags to convert
logging.debug('Converted to magV from {0} got {1}'.format(mag_letter, magV))
return magV
else:
mag_val = self._convert_to_from(to_mag, 'V', magV)
logging.debug('Converted to mag{0} from {1} got {2}'.format(to_mag, mag_letter, mag_val))
return mag_val
except ValueError:
continue # this conversion may not be possible, try another
raise ValueError('Could not convert from any provided magnitudes') | [
"def",
"convert",
"(",
"self",
",",
"to_mag",
",",
"from_mag",
"=",
"None",
")",
":",
"allowed_mags",
"=",
"\"UBVJIHKLMN\"",
"if",
"from_mag",
":",
"if",
"to_mag",
"==",
"'V'",
":",
"# If V mag is requested (1/3) - from mag specified",
"return",
"self",
".",
"_convert_to_from",
"(",
"'V'",
",",
"from_mag",
")",
"if",
"from_mag",
"==",
"'V'",
":",
"magV",
"=",
"self",
".",
"magV",
"else",
":",
"magV",
"=",
"self",
".",
"_convert_to_from",
"(",
"'V'",
",",
"from_mag",
")",
"return",
"self",
".",
"_convert_to_from",
"(",
"to_mag",
",",
"'V'",
",",
"magV",
")",
"# if we can convert from any magnitude, try V first",
"elif",
"not",
"isNanOrNone",
"(",
"self",
".",
"magV",
")",
":",
"if",
"to_mag",
"==",
"'V'",
":",
"# If V mag is requested (2/3) - no need to convert",
"return",
"self",
".",
"magV",
"else",
":",
"return",
"self",
".",
"_convert_to_from",
"(",
"to_mag",
",",
"'V'",
",",
"self",
".",
"magV",
")",
"else",
":",
"# Otherwise lets try all other magnitudes in turn",
"order",
"=",
"\"UBJHKLMN\"",
"# V is the intermediate step from the others, done by default if possible",
"for",
"mag_letter",
"in",
"order",
":",
"try",
":",
"magV",
"=",
"self",
".",
"_convert_to_from",
"(",
"'V'",
",",
"mag_letter",
")",
"if",
"to_mag",
"==",
"'V'",
":",
"# If V mag is requested (3/3) - try all other mags to convert",
"logging",
".",
"debug",
"(",
"'Converted to magV from {0} got {1}'",
".",
"format",
"(",
"mag_letter",
",",
"magV",
")",
")",
"return",
"magV",
"else",
":",
"mag_val",
"=",
"self",
".",
"_convert_to_from",
"(",
"to_mag",
",",
"'V'",
",",
"magV",
")",
"logging",
".",
"debug",
"(",
"'Converted to mag{0} from {1} got {2}'",
".",
"format",
"(",
"to_mag",
",",
"mag_letter",
",",
"mag_val",
")",
")",
"return",
"mag_val",
"except",
"ValueError",
":",
"continue",
"# this conversion may not be possible, try another",
"raise",
"ValueError",
"(",
"'Could not convert from any provided magnitudes'",
")"
] | 48.488889 | 26.266667 |
def _build_date_header_string(self, date_value):
"""Gets the date_value (may be None, basestring, float or
datetime.datetime instance) and returns a valid date string as per
RFC 2822."""
if isinstance(date_value, datetime):
date_value = time.mktime(date_value.timetuple())
if not isinstance(date_value, basestring):
date_value = formatdate(date_value, localtime=True)
# Encode it here to avoid this:
# Date: =?utf-8?q?Sat=2C_01_Sep_2012_13=3A08=3A29_-0300?=
return native(date_value) | [
"def",
"_build_date_header_string",
"(",
"self",
",",
"date_value",
")",
":",
"if",
"isinstance",
"(",
"date_value",
",",
"datetime",
")",
":",
"date_value",
"=",
"time",
".",
"mktime",
"(",
"date_value",
".",
"timetuple",
"(",
")",
")",
"if",
"not",
"isinstance",
"(",
"date_value",
",",
"basestring",
")",
":",
"date_value",
"=",
"formatdate",
"(",
"date_value",
",",
"localtime",
"=",
"True",
")",
"# Encode it here to avoid this:",
"# Date: =?utf-8?q?Sat=2C_01_Sep_2012_13=3A08=3A29_-0300?=",
"return",
"native",
"(",
"date_value",
")"
] | 45 | 9.636364 |
def generate_delete_view(self):
"""Generate class based view for DeleteView"""
name = model_class_form(self.model + 'DeleteView')
delete_args = dict(
model=self.get_model_class,
template_name=self.get_template('delete'),
permissions=self.view_permission('delete'),
permission_required=self.check_permission_required,
login_required=self.check_login_required,
success_url=reverse_lazy('{}-{}-list'.format(self.app, self.custom_postfix_url)),
custom_postfix_url=self.custom_postfix_url
)
delete_class = type(name, (CrudBuilderMixin, DeleteView), delete_args)
self.classes[name] = delete_class
return delete_class | [
"def",
"generate_delete_view",
"(",
"self",
")",
":",
"name",
"=",
"model_class_form",
"(",
"self",
".",
"model",
"+",
"'DeleteView'",
")",
"delete_args",
"=",
"dict",
"(",
"model",
"=",
"self",
".",
"get_model_class",
",",
"template_name",
"=",
"self",
".",
"get_template",
"(",
"'delete'",
")",
",",
"permissions",
"=",
"self",
".",
"view_permission",
"(",
"'delete'",
")",
",",
"permission_required",
"=",
"self",
".",
"check_permission_required",
",",
"login_required",
"=",
"self",
".",
"check_login_required",
",",
"success_url",
"=",
"reverse_lazy",
"(",
"'{}-{}-list'",
".",
"format",
"(",
"self",
".",
"app",
",",
"self",
".",
"custom_postfix_url",
")",
")",
",",
"custom_postfix_url",
"=",
"self",
".",
"custom_postfix_url",
")",
"delete_class",
"=",
"type",
"(",
"name",
",",
"(",
"CrudBuilderMixin",
",",
"DeleteView",
")",
",",
"delete_args",
")",
"self",
".",
"classes",
"[",
"name",
"]",
"=",
"delete_class",
"return",
"delete_class"
] | 43.294118 | 19.764706 |
def _messageFromSender(self, sender, messageID):
"""
Locate a previously queued message by a given sender and messageID.
"""
return self.store.findUnique(
_QueuedMessage,
AND(_QueuedMessage.senderUsername == sender.localpart,
_QueuedMessage.senderDomain == sender.domain,
_QueuedMessage.messageID == messageID),
default=None) | [
"def",
"_messageFromSender",
"(",
"self",
",",
"sender",
",",
"messageID",
")",
":",
"return",
"self",
".",
"store",
".",
"findUnique",
"(",
"_QueuedMessage",
",",
"AND",
"(",
"_QueuedMessage",
".",
"senderUsername",
"==",
"sender",
".",
"localpart",
",",
"_QueuedMessage",
".",
"senderDomain",
"==",
"sender",
".",
"domain",
",",
"_QueuedMessage",
".",
"messageID",
"==",
"messageID",
")",
",",
"default",
"=",
"None",
")"
] | 41.6 | 13.6 |
def flush(name, table='filter', family='ipv4', **kwargs):
'''
.. versionadded:: 2014.1.0
Flush current iptables state
table
The table that owns the chain that should be modified
family
Networking family, either ipv4 or ipv6
'''
ret = {'name': name,
'changes': {},
'result': None,
'comment': ''}
for ignore in _STATE_INTERNAL_KEYWORDS:
if ignore in kwargs:
del kwargs[ignore]
if 'chain' not in kwargs:
kwargs['chain'] = ''
if __opts__['test']:
ret['comment'] = 'iptables rules in {0} table {1} chain {2} family needs to be flushed'.format(
name,
table,
family)
return ret
if not __salt__['iptables.flush'](table, kwargs['chain'], family):
ret['changes'] = {'locale': name}
ret['result'] = True
ret['comment'] = 'Flush iptables rules in {0} table {1} chain {2} family'.format(
table,
kwargs['chain'],
family
)
return ret
else:
ret['result'] = False
ret['comment'] = 'Failed to flush iptables rules'
return ret | [
"def",
"flush",
"(",
"name",
",",
"table",
"=",
"'filter'",
",",
"family",
"=",
"'ipv4'",
",",
"*",
"*",
"kwargs",
")",
":",
"ret",
"=",
"{",
"'name'",
":",
"name",
",",
"'changes'",
":",
"{",
"}",
",",
"'result'",
":",
"None",
",",
"'comment'",
":",
"''",
"}",
"for",
"ignore",
"in",
"_STATE_INTERNAL_KEYWORDS",
":",
"if",
"ignore",
"in",
"kwargs",
":",
"del",
"kwargs",
"[",
"ignore",
"]",
"if",
"'chain'",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"'chain'",
"]",
"=",
"''",
"if",
"__opts__",
"[",
"'test'",
"]",
":",
"ret",
"[",
"'comment'",
"]",
"=",
"'iptables rules in {0} table {1} chain {2} family needs to be flushed'",
".",
"format",
"(",
"name",
",",
"table",
",",
"family",
")",
"return",
"ret",
"if",
"not",
"__salt__",
"[",
"'iptables.flush'",
"]",
"(",
"table",
",",
"kwargs",
"[",
"'chain'",
"]",
",",
"family",
")",
":",
"ret",
"[",
"'changes'",
"]",
"=",
"{",
"'locale'",
":",
"name",
"}",
"ret",
"[",
"'result'",
"]",
"=",
"True",
"ret",
"[",
"'comment'",
"]",
"=",
"'Flush iptables rules in {0} table {1} chain {2} family'",
".",
"format",
"(",
"table",
",",
"kwargs",
"[",
"'chain'",
"]",
",",
"family",
")",
"return",
"ret",
"else",
":",
"ret",
"[",
"'result'",
"]",
"=",
"False",
"ret",
"[",
"'comment'",
"]",
"=",
"'Failed to flush iptables rules'",
"return",
"ret"
] | 26.697674 | 22.930233 |
def months_per_hour(self):
"""A list of tuples representing months per hour in this analysis period."""
month_hour = []
hour_range = xrange(self.st_hour, self.end_hour + 1)
for month in self.months_int:
month_hour.extend([(month, hr) for hr in hour_range])
return month_hour | [
"def",
"months_per_hour",
"(",
"self",
")",
":",
"month_hour",
"=",
"[",
"]",
"hour_range",
"=",
"xrange",
"(",
"self",
".",
"st_hour",
",",
"self",
".",
"end_hour",
"+",
"1",
")",
"for",
"month",
"in",
"self",
".",
"months_int",
":",
"month_hour",
".",
"extend",
"(",
"[",
"(",
"month",
",",
"hr",
")",
"for",
"hr",
"in",
"hour_range",
"]",
")",
"return",
"month_hour"
] | 45.714286 | 13.428571 |
def populateFromRow(self, featureSetRecord):
"""
Populates the instance variables of this FeatureSet from the specified
DB row.
"""
self._dbFilePath = featureSetRecord.dataurl
self.setAttributesJson(featureSetRecord.attributes)
self.populateFromFile(self._dbFilePath) | [
"def",
"populateFromRow",
"(",
"self",
",",
"featureSetRecord",
")",
":",
"self",
".",
"_dbFilePath",
"=",
"featureSetRecord",
".",
"dataurl",
"self",
".",
"setAttributesJson",
"(",
"featureSetRecord",
".",
"attributes",
")",
"self",
".",
"populateFromFile",
"(",
"self",
".",
"_dbFilePath",
")"
] | 39.5 | 13 |
def to_timedelta(value, strict=True):
"""
converts duration string to timedelta
strict=True (by default) raises StrictnessError if either hours,
minutes or seconds in duration string exceed allowed values
"""
if isinstance(value, int):
return timedelta(seconds=value) # assuming it's seconds
elif isinstance(value, timedelta):
return value
elif isinstance(value, str):
hours, minutes, seconds = _parse(value, strict)
elif isinstance(value, tuple):
check_tuple(value, strict)
hours, minutes, seconds = value
else:
raise TypeError(
'Value %s (type %s) not supported' % (
value, type(value).__name__
)
)
return timedelta(hours=hours, minutes=minutes, seconds=seconds) | [
"def",
"to_timedelta",
"(",
"value",
",",
"strict",
"=",
"True",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"int",
")",
":",
"return",
"timedelta",
"(",
"seconds",
"=",
"value",
")",
"# assuming it's seconds",
"elif",
"isinstance",
"(",
"value",
",",
"timedelta",
")",
":",
"return",
"value",
"elif",
"isinstance",
"(",
"value",
",",
"str",
")",
":",
"hours",
",",
"minutes",
",",
"seconds",
"=",
"_parse",
"(",
"value",
",",
"strict",
")",
"elif",
"isinstance",
"(",
"value",
",",
"tuple",
")",
":",
"check_tuple",
"(",
"value",
",",
"strict",
")",
"hours",
",",
"minutes",
",",
"seconds",
"=",
"value",
"else",
":",
"raise",
"TypeError",
"(",
"'Value %s (type %s) not supported'",
"%",
"(",
"value",
",",
"type",
"(",
"value",
")",
".",
"__name__",
")",
")",
"return",
"timedelta",
"(",
"hours",
"=",
"hours",
",",
"minutes",
"=",
"minutes",
",",
"seconds",
"=",
"seconds",
")"
] | 34.086957 | 14.434783 |
def toLily(self):
'''
Method which converts the object instance, its attributes and children to a string of lilypond code
:return: str of lilypond code
'''
lilystring = ""
if self.duration > 0:
lilystring += "r" + str(self.duration)
return lilystring | [
"def",
"toLily",
"(",
"self",
")",
":",
"lilystring",
"=",
"\"\"",
"if",
"self",
".",
"duration",
">",
"0",
":",
"lilystring",
"+=",
"\"r\"",
"+",
"str",
"(",
"self",
".",
"duration",
")",
"return",
"lilystring"
] | 31 | 24.4 |
def fromkeys (cls, iterable, value=None):
"""Construct new caseless dict from given data."""
d = cls()
for k in iterable:
dict.__setitem__(d, k.lower(), value)
return d | [
"def",
"fromkeys",
"(",
"cls",
",",
"iterable",
",",
"value",
"=",
"None",
")",
":",
"d",
"=",
"cls",
"(",
")",
"for",
"k",
"in",
"iterable",
":",
"dict",
".",
"__setitem__",
"(",
"d",
",",
"k",
".",
"lower",
"(",
")",
",",
"value",
")",
"return",
"d"
] | 34.5 | 11.833333 |
def _do_code_blocks(self, text):
"""Process Markdown `<pre><code>` blocks."""
code_block_re = re.compile(r'''
(?:\n\n|\A\n?)
( # $1 = the code block -- one or more lines, starting with a space/tab
(?:
(?:[ ]{%d} | \t) # Lines must start with a tab or a tab-width of spaces
.*\n+
)+
)
((?=^[ ]{0,%d}\S)|\Z) # Lookahead for non-space at line-start, or end of doc
''' % (self.tab_width, self.tab_width),
re.M | re.X)
return code_block_re.sub(self._code_block_sub, text) | [
"def",
"_do_code_blocks",
"(",
"self",
",",
"text",
")",
":",
"code_block_re",
"=",
"re",
".",
"compile",
"(",
"r'''\n (?:\\n\\n|\\A\\n?)\n ( # $1 = the code block -- one or more lines, starting with a space/tab\n (?:\n (?:[ ]{%d} | \\t) # Lines must start with a tab or a tab-width of spaces\n .*\\n+\n )+\n )\n ((?=^[ ]{0,%d}\\S)|\\Z) # Lookahead for non-space at line-start, or end of doc\n '''",
"%",
"(",
"self",
".",
"tab_width",
",",
"self",
".",
"tab_width",
")",
",",
"re",
".",
"M",
"|",
"re",
".",
"X",
")",
"return",
"code_block_re",
".",
"sub",
"(",
"self",
".",
"_code_block_sub",
",",
"text",
")"
] | 44.714286 | 22.714286 |
def get_undefined_namespace_names(graph: BELGraph, namespace: str) -> Set[str]:
"""Get the names from a namespace that wasn't actually defined.
:return: The set of all names from the undefined namespace
"""
return {
exc.name
for _, exc, _ in graph.warnings
if isinstance(exc, UndefinedNamespaceWarning) and exc.namespace == namespace
} | [
"def",
"get_undefined_namespace_names",
"(",
"graph",
":",
"BELGraph",
",",
"namespace",
":",
"str",
")",
"->",
"Set",
"[",
"str",
"]",
":",
"return",
"{",
"exc",
".",
"name",
"for",
"_",
",",
"exc",
",",
"_",
"in",
"graph",
".",
"warnings",
"if",
"isinstance",
"(",
"exc",
",",
"UndefinedNamespaceWarning",
")",
"and",
"exc",
".",
"namespace",
"==",
"namespace",
"}"
] | 37.1 | 23.3 |
def __looks_like_html(response):
"""Guesses entity type when Content-Type header is missing.
Since Content-Type is not strictly required, some servers leave it out.
"""
text = response.text.lstrip().lower()
return text.startswith('<html') or text.startswith('<!doctype') | [
"def",
"__looks_like_html",
"(",
"response",
")",
":",
"text",
"=",
"response",
".",
"text",
".",
"lstrip",
"(",
")",
".",
"lower",
"(",
")",
"return",
"text",
".",
"startswith",
"(",
"'<html'",
")",
"or",
"text",
".",
"startswith",
"(",
"'<!doctype'",
")"
] | 50.833333 | 13.833333 |
def request(self, method, apikey, *args, **kwargs):
""" Make a xml-rpc call to remote API. """
dry_run = kwargs.get('dry_run', False)
return_dry_run = kwargs.get('return_dry_run', False)
if return_dry_run:
args[-1]['--dry-run'] = True
try:
func = getattr(self.endpoint, method)
return func(apikey, *args)
except (socket.error, requests.exceptions.ConnectionError):
msg = 'Gandi API service is unreachable'
raise APICallFailed(msg)
except xmlrpclib.Fault as err:
msg = 'Gandi API has returned an error: %s' % err
if dry_run:
args[-1]['--dry-run'] = True
ret = func(apikey, *args)
raise DryRunException(msg, err.faultCode, ret)
raise APICallFailed(msg, err.faultCode)
except TypeError as err:
msg = 'An unknown error has occurred: %s' % err
raise APICallFailed(msg) | [
"def",
"request",
"(",
"self",
",",
"method",
",",
"apikey",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"dry_run",
"=",
"kwargs",
".",
"get",
"(",
"'dry_run'",
",",
"False",
")",
"return_dry_run",
"=",
"kwargs",
".",
"get",
"(",
"'return_dry_run'",
",",
"False",
")",
"if",
"return_dry_run",
":",
"args",
"[",
"-",
"1",
"]",
"[",
"'--dry-run'",
"]",
"=",
"True",
"try",
":",
"func",
"=",
"getattr",
"(",
"self",
".",
"endpoint",
",",
"method",
")",
"return",
"func",
"(",
"apikey",
",",
"*",
"args",
")",
"except",
"(",
"socket",
".",
"error",
",",
"requests",
".",
"exceptions",
".",
"ConnectionError",
")",
":",
"msg",
"=",
"'Gandi API service is unreachable'",
"raise",
"APICallFailed",
"(",
"msg",
")",
"except",
"xmlrpclib",
".",
"Fault",
"as",
"err",
":",
"msg",
"=",
"'Gandi API has returned an error: %s'",
"%",
"err",
"if",
"dry_run",
":",
"args",
"[",
"-",
"1",
"]",
"[",
"'--dry-run'",
"]",
"=",
"True",
"ret",
"=",
"func",
"(",
"apikey",
",",
"*",
"args",
")",
"raise",
"DryRunException",
"(",
"msg",
",",
"err",
".",
"faultCode",
",",
"ret",
")",
"raise",
"APICallFailed",
"(",
"msg",
",",
"err",
".",
"faultCode",
")",
"except",
"TypeError",
"as",
"err",
":",
"msg",
"=",
"'An unknown error has occurred: %s'",
"%",
"err",
"raise",
"APICallFailed",
"(",
"msg",
")"
] | 42.347826 | 12.26087 |
def energy_prof(step):
"""Energy flux.
This computation takes sphericity into account if necessary.
Args:
step (:class:`~stagpy.stagyydata._Step`): a step of a StagyyData
instance.
Returns:
tuple of :class:`numpy.array`: the energy flux and the radial position
at which it is evaluated.
"""
diff, rad = diffs_prof(step)
adv, _ = advts_prof(step)
return (diff + np.append(adv, 0)), rad | [
"def",
"energy_prof",
"(",
"step",
")",
":",
"diff",
",",
"rad",
"=",
"diffs_prof",
"(",
"step",
")",
"adv",
",",
"_",
"=",
"advts_prof",
"(",
"step",
")",
"return",
"(",
"diff",
"+",
"np",
".",
"append",
"(",
"adv",
",",
"0",
")",
")",
",",
"rad"
] | 29.333333 | 19.866667 |
def serialize(self):
"""
Converts the credentials to a percent encoded string to be stored for
later use.
:returns:
:class:`string`
"""
if self.provider_id is None:
raise ConfigError(
'To serialize credentials you need to specify a '
'unique integer under the "id" key in the config '
'for each provider!')
# Get the provider type specific items.
rest = self.provider_type_class().to_tuple(self)
# Provider ID and provider type ID are always the first two items.
result = (self.provider_id, self.provider_type_id) + rest
# Make sure that all items are strings.
stringified = [str(i) for i in result]
# Concatenate by newline.
concatenated = '\n'.join(stringified)
# Percent encode.
return parse.quote(concatenated, '') | [
"def",
"serialize",
"(",
"self",
")",
":",
"if",
"self",
".",
"provider_id",
"is",
"None",
":",
"raise",
"ConfigError",
"(",
"'To serialize credentials you need to specify a '",
"'unique integer under the \"id\" key in the config '",
"'for each provider!'",
")",
"# Get the provider type specific items.",
"rest",
"=",
"self",
".",
"provider_type_class",
"(",
")",
".",
"to_tuple",
"(",
"self",
")",
"# Provider ID and provider type ID are always the first two items.",
"result",
"=",
"(",
"self",
".",
"provider_id",
",",
"self",
".",
"provider_type_id",
")",
"+",
"rest",
"# Make sure that all items are strings.",
"stringified",
"=",
"[",
"str",
"(",
"i",
")",
"for",
"i",
"in",
"result",
"]",
"# Concatenate by newline.",
"concatenated",
"=",
"'\\n'",
".",
"join",
"(",
"stringified",
")",
"# Percent encode.",
"return",
"parse",
".",
"quote",
"(",
"concatenated",
",",
"''",
")"
] | 29.9 | 20.966667 |
def batch_flatten(x):
"""
Flatten the tensor except the first dimension.
"""
shape = x.get_shape().as_list()[1:]
if None not in shape:
return tf.reshape(x, [-1, int(np.prod(shape))])
return tf.reshape(x, tf.stack([tf.shape(x)[0], -1])) | [
"def",
"batch_flatten",
"(",
"x",
")",
":",
"shape",
"=",
"x",
".",
"get_shape",
"(",
")",
".",
"as_list",
"(",
")",
"[",
"1",
":",
"]",
"if",
"None",
"not",
"in",
"shape",
":",
"return",
"tf",
".",
"reshape",
"(",
"x",
",",
"[",
"-",
"1",
",",
"int",
"(",
"np",
".",
"prod",
"(",
"shape",
")",
")",
"]",
")",
"return",
"tf",
".",
"reshape",
"(",
"x",
",",
"tf",
".",
"stack",
"(",
"[",
"tf",
".",
"shape",
"(",
"x",
")",
"[",
"0",
"]",
",",
"-",
"1",
"]",
")",
")"
] | 32.5 | 9.5 |
def is_thin_archieve(self):
"""
Return the is thin archieve attribute of the BFD file being processed.
"""
if not self._ptr:
raise BfdException("BFD not initialized")
return _bfd.get_bfd_attribute(
self._ptr, BfdAttributes.IS_THIN_ARCHIEVE) | [
"def",
"is_thin_archieve",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_ptr",
":",
"raise",
"BfdException",
"(",
"\"BFD not initialized\"",
")",
"return",
"_bfd",
".",
"get_bfd_attribute",
"(",
"self",
".",
"_ptr",
",",
"BfdAttributes",
".",
"IS_THIN_ARCHIEVE",
")"
] | 33 | 15 |
def power(self, n):
"""Return the compose of a QuantumChannel with itself n times.
Args:
n (int): compute the matrix power of the superoperator matrix.
Returns:
SuperOp: the n-times composition channel as a SuperOp object.
Raises:
QiskitError: if the input and output dimensions of the
QuantumChannel are not equal, or the power is not an integer.
"""
if not isinstance(n, (int, np.integer)):
raise QiskitError("Can only power with integer powers.")
if self._input_dim != self._output_dim:
raise QiskitError("Can only power with input_dim = output_dim.")
# Override base class power so we can implement more efficiently
# using Numpy.matrix_power
return SuperOp(
np.linalg.matrix_power(self._data, n), self.input_dims(),
self.output_dims()) | [
"def",
"power",
"(",
"self",
",",
"n",
")",
":",
"if",
"not",
"isinstance",
"(",
"n",
",",
"(",
"int",
",",
"np",
".",
"integer",
")",
")",
":",
"raise",
"QiskitError",
"(",
"\"Can only power with integer powers.\"",
")",
"if",
"self",
".",
"_input_dim",
"!=",
"self",
".",
"_output_dim",
":",
"raise",
"QiskitError",
"(",
"\"Can only power with input_dim = output_dim.\"",
")",
"# Override base class power so we can implement more efficiently",
"# using Numpy.matrix_power",
"return",
"SuperOp",
"(",
"np",
".",
"linalg",
".",
"matrix_power",
"(",
"self",
".",
"_data",
",",
"n",
")",
",",
"self",
".",
"input_dims",
"(",
")",
",",
"self",
".",
"output_dims",
"(",
")",
")"
] | 40.818182 | 23.409091 |
def _scheduled_check_for_summaries(self):
"""Present the results if they have become available or timed out."""
if self._analysis_process is None:
return
# handle time out
timed_out = time.time() - self._analyze_start_time > self.time_limit
if timed_out:
self._handle_results('Analysis timed out but managed\n'
' to get lower turn results.',
'Analysis timed out with no results.')
return
# handle standard completion
try:
self._analysis_process.join(0.001)
except AssertionError:
pass # if some timing issue with closed process, just continue
if not self._analysis_process.is_alive():
self._handle_results('Completed analysis.',
'Unable to find the game on screen.')
return
#finally, if it's still alive, then come back later
self._base.after(self._POLL_PERIOD_MILLISECONDS,
self._scheduled_check_for_summaries) | [
"def",
"_scheduled_check_for_summaries",
"(",
"self",
")",
":",
"if",
"self",
".",
"_analysis_process",
"is",
"None",
":",
"return",
"# handle time out",
"timed_out",
"=",
"time",
".",
"time",
"(",
")",
"-",
"self",
".",
"_analyze_start_time",
">",
"self",
".",
"time_limit",
"if",
"timed_out",
":",
"self",
".",
"_handle_results",
"(",
"'Analysis timed out but managed\\n'",
"' to get lower turn results.'",
",",
"'Analysis timed out with no results.'",
")",
"return",
"# handle standard completion",
"try",
":",
"self",
".",
"_analysis_process",
".",
"join",
"(",
"0.001",
")",
"except",
"AssertionError",
":",
"pass",
"# if some timing issue with closed process, just continue",
"if",
"not",
"self",
".",
"_analysis_process",
".",
"is_alive",
"(",
")",
":",
"self",
".",
"_handle_results",
"(",
"'Completed analysis.'",
",",
"'Unable to find the game on screen.'",
")",
"return",
"#finally, if it's still alive, then come back later",
"self",
".",
"_base",
".",
"after",
"(",
"self",
".",
"_POLL_PERIOD_MILLISECONDS",
",",
"self",
".",
"_scheduled_check_for_summaries",
")"
] | 47.217391 | 17.956522 |
def _template(node_id, value=None):
"Check if a template is assigned to it and render that with the value"
result = []
select_template_from_node = fetch_query_string('select_template_from_node.sql')
try:
result = db.execute(text(select_template_from_node), node_id=node_id)
template_result = result.fetchone()
result.close()
if template_result and template_result['name']:
template = template_result['name']
if isinstance(value, dict):
return render_template(template, **value)
else:
return render_template(template, value=value)
except DatabaseError as err:
current_app.logger.error("DatabaseError: %s", err)
# No template assigned to this node so just return the value
return value | [
"def",
"_template",
"(",
"node_id",
",",
"value",
"=",
"None",
")",
":",
"result",
"=",
"[",
"]",
"select_template_from_node",
"=",
"fetch_query_string",
"(",
"'select_template_from_node.sql'",
")",
"try",
":",
"result",
"=",
"db",
".",
"execute",
"(",
"text",
"(",
"select_template_from_node",
")",
",",
"node_id",
"=",
"node_id",
")",
"template_result",
"=",
"result",
".",
"fetchone",
"(",
")",
"result",
".",
"close",
"(",
")",
"if",
"template_result",
"and",
"template_result",
"[",
"'name'",
"]",
":",
"template",
"=",
"template_result",
"[",
"'name'",
"]",
"if",
"isinstance",
"(",
"value",
",",
"dict",
")",
":",
"return",
"render_template",
"(",
"template",
",",
"*",
"*",
"value",
")",
"else",
":",
"return",
"render_template",
"(",
"template",
",",
"value",
"=",
"value",
")",
"except",
"DatabaseError",
"as",
"err",
":",
"current_app",
".",
"logger",
".",
"error",
"(",
"\"DatabaseError: %s\"",
",",
"err",
")",
"# No template assigned to this node so just return the value",
"return",
"value"
] | 40.1 | 21.7 |
def validate_character_instance_valid_for_arc(sender, instance, action, reverse, pk_set, *args, **kwargs):
'''
Evaluate attempts to assign a character instance to ensure it is from same
outline.
'''
if action == 'pre_add':
if reverse:
# Fetch arc definition through link.
for apk in pk_set:
arc_node = ArcElementNode.objects.get(pk=apk)
if arc_node.parent_outline != instance.outline:
raise IntegrityError(_('Character Instance and Arc Element must be from same outline.'))
else:
for cpk in pk_set:
char_instance = CharacterInstance.objects.get(pk=cpk)
if char_instance.outline != instance.parent_outline:
raise IntegrityError(_('Character Instance and Arc Element must be from the same outline.')) | [
"def",
"validate_character_instance_valid_for_arc",
"(",
"sender",
",",
"instance",
",",
"action",
",",
"reverse",
",",
"pk_set",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"action",
"==",
"'pre_add'",
":",
"if",
"reverse",
":",
"# Fetch arc definition through link.",
"for",
"apk",
"in",
"pk_set",
":",
"arc_node",
"=",
"ArcElementNode",
".",
"objects",
".",
"get",
"(",
"pk",
"=",
"apk",
")",
"if",
"arc_node",
".",
"parent_outline",
"!=",
"instance",
".",
"outline",
":",
"raise",
"IntegrityError",
"(",
"_",
"(",
"'Character Instance and Arc Element must be from same outline.'",
")",
")",
"else",
":",
"for",
"cpk",
"in",
"pk_set",
":",
"char_instance",
"=",
"CharacterInstance",
".",
"objects",
".",
"get",
"(",
"pk",
"=",
"cpk",
")",
"if",
"char_instance",
".",
"outline",
"!=",
"instance",
".",
"parent_outline",
":",
"raise",
"IntegrityError",
"(",
"_",
"(",
"'Character Instance and Arc Element must be from the same outline.'",
")",
")"
] | 50.470588 | 31.058824 |
def delete(path, regex=None, recurse=False, test=False):
"""Deletes the file or directory at `path`. If `path` is a directory and
`regex` is provided, matching files will be deleted; `recurse` controls
whether subdirectories are recursed. A list of deleted items is returned.
If `test` is true, nothing will be deleted and a list of items that would
have been deleted is returned.
"""
deleted = []
if op.isfile(path):
if not test: os.remove(path)
else: return [path]
return [] if op.exists(path) else [path]
elif op.isdir(path):
if regex:
for r,ds,fs in os.walk(path):
for i in fs:
if _is_match(regex, i):
deleted += delete(op.join(r,i), test=test)
if not recurse:
break
else:
if not test: shutil.rmtree(path)
else: return [path]
return [] if op.exists(path) else [path]
return deleted | [
"def",
"delete",
"(",
"path",
",",
"regex",
"=",
"None",
",",
"recurse",
"=",
"False",
",",
"test",
"=",
"False",
")",
":",
"deleted",
"=",
"[",
"]",
"if",
"op",
".",
"isfile",
"(",
"path",
")",
":",
"if",
"not",
"test",
":",
"os",
".",
"remove",
"(",
"path",
")",
"else",
":",
"return",
"[",
"path",
"]",
"return",
"[",
"]",
"if",
"op",
".",
"exists",
"(",
"path",
")",
"else",
"[",
"path",
"]",
"elif",
"op",
".",
"isdir",
"(",
"path",
")",
":",
"if",
"regex",
":",
"for",
"r",
",",
"ds",
",",
"fs",
"in",
"os",
".",
"walk",
"(",
"path",
")",
":",
"for",
"i",
"in",
"fs",
":",
"if",
"_is_match",
"(",
"regex",
",",
"i",
")",
":",
"deleted",
"+=",
"delete",
"(",
"op",
".",
"join",
"(",
"r",
",",
"i",
")",
",",
"test",
"=",
"test",
")",
"if",
"not",
"recurse",
":",
"break",
"else",
":",
"if",
"not",
"test",
":",
"shutil",
".",
"rmtree",
"(",
"path",
")",
"else",
":",
"return",
"[",
"path",
"]",
"return",
"[",
"]",
"if",
"op",
".",
"exists",
"(",
"path",
")",
"else",
"[",
"path",
"]",
"return",
"deleted"
] | 39.4 | 15.04 |
def one_version(self, index=0):
'''
Leaves only one version for each object.
:param index: List-like index of the version. 0 == first; -1 == last
'''
def prep(df):
start = sorted(df._start.tolist())[index]
return df[df._start == start]
return pd.concat([prep(df) for _, df in self.groupby(self._oid)]) | [
"def",
"one_version",
"(",
"self",
",",
"index",
"=",
"0",
")",
":",
"def",
"prep",
"(",
"df",
")",
":",
"start",
"=",
"sorted",
"(",
"df",
".",
"_start",
".",
"tolist",
"(",
")",
")",
"[",
"index",
"]",
"return",
"df",
"[",
"df",
".",
"_start",
"==",
"start",
"]",
"return",
"pd",
".",
"concat",
"(",
"[",
"prep",
"(",
"df",
")",
"for",
"_",
",",
"df",
"in",
"self",
".",
"groupby",
"(",
"self",
".",
"_oid",
")",
"]",
")"
] | 33.272727 | 23.454545 |
def get_exif_data(filename):
"""Return a dict with the raw EXIF data."""
logger = logging.getLogger(__name__)
img = _read_image(filename)
try:
exif = img._getexif() or {}
except ZeroDivisionError:
logger.warning('Failed to read EXIF data.')
return None
data = {TAGS.get(tag, tag): value for tag, value in exif.items()}
if 'GPSInfo' in data:
try:
data['GPSInfo'] = {GPSTAGS.get(tag, tag): value
for tag, value in data['GPSInfo'].items()}
except AttributeError:
logger = logging.getLogger(__name__)
logger.info('Failed to get GPS Info')
del data['GPSInfo']
return data | [
"def",
"get_exif_data",
"(",
"filename",
")",
":",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"__name__",
")",
"img",
"=",
"_read_image",
"(",
"filename",
")",
"try",
":",
"exif",
"=",
"img",
".",
"_getexif",
"(",
")",
"or",
"{",
"}",
"except",
"ZeroDivisionError",
":",
"logger",
".",
"warning",
"(",
"'Failed to read EXIF data.'",
")",
"return",
"None",
"data",
"=",
"{",
"TAGS",
".",
"get",
"(",
"tag",
",",
"tag",
")",
":",
"value",
"for",
"tag",
",",
"value",
"in",
"exif",
".",
"items",
"(",
")",
"}",
"if",
"'GPSInfo'",
"in",
"data",
":",
"try",
":",
"data",
"[",
"'GPSInfo'",
"]",
"=",
"{",
"GPSTAGS",
".",
"get",
"(",
"tag",
",",
"tag",
")",
":",
"value",
"for",
"tag",
",",
"value",
"in",
"data",
"[",
"'GPSInfo'",
"]",
".",
"items",
"(",
")",
"}",
"except",
"AttributeError",
":",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"__name__",
")",
"logger",
".",
"info",
"(",
"'Failed to get GPS Info'",
")",
"del",
"data",
"[",
"'GPSInfo'",
"]",
"return",
"data"
] | 29.125 | 20.25 |
def audio_detection_sensitivity(self):
"""Sensitivity level of Camera audio detection."""
if not self.triggers:
return None
for trigger in self.triggers:
if trigger.get("type") != "audioAmplitude":
continue
sensitivity = trigger.get("sensitivity")
if sensitivity:
return sensitivity.get("default")
return None | [
"def",
"audio_detection_sensitivity",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"triggers",
":",
"return",
"None",
"for",
"trigger",
"in",
"self",
".",
"triggers",
":",
"if",
"trigger",
".",
"get",
"(",
"\"type\"",
")",
"!=",
"\"audioAmplitude\"",
":",
"continue",
"sensitivity",
"=",
"trigger",
".",
"get",
"(",
"\"sensitivity\"",
")",
"if",
"sensitivity",
":",
"return",
"sensitivity",
".",
"get",
"(",
"\"default\"",
")",
"return",
"None"
] | 29.357143 | 17.071429 |
async def _on_status_change(self, update):
"""Update a service that has its status updated."""
info = update['payload']
new_number = info['new_status']
name = update['service']
if name not in self.services:
return
with self._state_lock:
is_changed = self.services[name].state != new_number
self.services[name].state = new_number
# Notify about this service state change if anyone is listening
if self._on_change_callback and is_changed:
self._on_change_callback(name, self.services[name].id, new_number, False, False) | [
"async",
"def",
"_on_status_change",
"(",
"self",
",",
"update",
")",
":",
"info",
"=",
"update",
"[",
"'payload'",
"]",
"new_number",
"=",
"info",
"[",
"'new_status'",
"]",
"name",
"=",
"update",
"[",
"'service'",
"]",
"if",
"name",
"not",
"in",
"self",
".",
"services",
":",
"return",
"with",
"self",
".",
"_state_lock",
":",
"is_changed",
"=",
"self",
".",
"services",
"[",
"name",
"]",
".",
"state",
"!=",
"new_number",
"self",
".",
"services",
"[",
"name",
"]",
".",
"state",
"=",
"new_number",
"# Notify about this service state change if anyone is listening",
"if",
"self",
".",
"_on_change_callback",
"and",
"is_changed",
":",
"self",
".",
"_on_change_callback",
"(",
"name",
",",
"self",
".",
"services",
"[",
"name",
"]",
".",
"id",
",",
"new_number",
",",
"False",
",",
"False",
")"
] | 36.294118 | 20.117647 |
def generate_csv(path, out):
"""\
Walks through the `path` and generates the CSV file `out`
"""
def is_berlin_cable(filename):
return 'BERLIN' in filename
writer = UnicodeWriter(open(out, 'wb'), delimiter=';')
writer.writerow(('Reference ID', 'Created', 'Origin', 'Subject'))
for cable in cables_from_source(path, predicate=is_berlin_cable):
writer.writerow((cable.reference_id, cable.created, cable.origin, titlefy(cable.subject))) | [
"def",
"generate_csv",
"(",
"path",
",",
"out",
")",
":",
"def",
"is_berlin_cable",
"(",
"filename",
")",
":",
"return",
"'BERLIN'",
"in",
"filename",
"writer",
"=",
"UnicodeWriter",
"(",
"open",
"(",
"out",
",",
"'wb'",
")",
",",
"delimiter",
"=",
"';'",
")",
"writer",
".",
"writerow",
"(",
"(",
"'Reference ID'",
",",
"'Created'",
",",
"'Origin'",
",",
"'Subject'",
")",
")",
"for",
"cable",
"in",
"cables_from_source",
"(",
"path",
",",
"predicate",
"=",
"is_berlin_cable",
")",
":",
"writer",
".",
"writerow",
"(",
"(",
"cable",
".",
"reference_id",
",",
"cable",
".",
"created",
",",
"cable",
".",
"origin",
",",
"titlefy",
"(",
"cable",
".",
"subject",
")",
")",
")"
] | 46.7 | 17.8 |
def concatenate(self, tpl, axis=None):
"""
Concatenates sparse tensors.
Parameters
----------
tpl : tuple of sparse tensors
Tensors to be concatenated.
axis : int, optional
Axis along which concatenation should take place
"""
if axis is None:
raise NotImplementedError(
'Sparse tensor concatenation without axis argument is not supported'
)
T = self
for i in range(1, len(tpl)):
T = _single_concatenate(T, tpl[i], axis=axis)
return T | [
"def",
"concatenate",
"(",
"self",
",",
"tpl",
",",
"axis",
"=",
"None",
")",
":",
"if",
"axis",
"is",
"None",
":",
"raise",
"NotImplementedError",
"(",
"'Sparse tensor concatenation without axis argument is not supported'",
")",
"T",
"=",
"self",
"for",
"i",
"in",
"range",
"(",
"1",
",",
"len",
"(",
"tpl",
")",
")",
":",
"T",
"=",
"_single_concatenate",
"(",
"T",
",",
"tpl",
"[",
"i",
"]",
",",
"axis",
"=",
"axis",
")",
"return",
"T"
] | 30.631579 | 14.842105 |
def read_reporting_revisions_get(self, project=None, fields=None, types=None, continuation_token=None, start_date_time=None, include_identity_ref=None, include_deleted=None, include_tag_ref=None, include_latest_only=None, expand=None, include_discussion_changes_only=None, max_page_size=None):
"""ReadReportingRevisionsGet.
Get a batch of work item revisions with the option of including deleted items
:param str project: Project ID or project name
:param [str] fields: A list of fields to return in work item revisions. Omit this parameter to get all reportable fields.
:param [str] types: A list of types to filter the results to specific work item types. Omit this parameter to get work item revisions of all work item types.
:param str continuation_token: Specifies the watermark to start the batch from. Omit this parameter to get the first batch of revisions.
:param datetime start_date_time: Date/time to use as a starting point for revisions, all revisions will occur after this date/time. Cannot be used in conjunction with 'watermark' parameter.
:param bool include_identity_ref: Return an identity reference instead of a string value for identity fields.
:param bool include_deleted: Specify if the deleted item should be returned.
:param bool include_tag_ref: Specify if the tag objects should be returned for System.Tags field.
:param bool include_latest_only: Return only the latest revisions of work items, skipping all historical revisions
:param str expand: Return all the fields in work item revisions, including long text fields which are not returned by default
:param bool include_discussion_changes_only: Return only the those revisions of work items, where only history field was changed
:param int max_page_size: The maximum number of results to return in this batch
:rtype: :class:`<ReportingWorkItemRevisionsBatch> <azure.devops.v5_0.work_item_tracking.models.ReportingWorkItemRevisionsBatch>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if fields is not None:
fields = ",".join(fields)
query_parameters['fields'] = self._serialize.query('fields', fields, 'str')
if types is not None:
types = ",".join(types)
query_parameters['types'] = self._serialize.query('types', types, 'str')
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
if start_date_time is not None:
query_parameters['startDateTime'] = self._serialize.query('start_date_time', start_date_time, 'iso-8601')
if include_identity_ref is not None:
query_parameters['includeIdentityRef'] = self._serialize.query('include_identity_ref', include_identity_ref, 'bool')
if include_deleted is not None:
query_parameters['includeDeleted'] = self._serialize.query('include_deleted', include_deleted, 'bool')
if include_tag_ref is not None:
query_parameters['includeTagRef'] = self._serialize.query('include_tag_ref', include_tag_ref, 'bool')
if include_latest_only is not None:
query_parameters['includeLatestOnly'] = self._serialize.query('include_latest_only', include_latest_only, 'bool')
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
if include_discussion_changes_only is not None:
query_parameters['includeDiscussionChangesOnly'] = self._serialize.query('include_discussion_changes_only', include_discussion_changes_only, 'bool')
if max_page_size is not None:
query_parameters['$maxPageSize'] = self._serialize.query('max_page_size', max_page_size, 'int')
response = self._send(http_method='GET',
location_id='f828fe59-dd87-495d-a17c-7a8d6211ca6c',
version='5.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('ReportingWorkItemRevisionsBatch', response) | [
"def",
"read_reporting_revisions_get",
"(",
"self",
",",
"project",
"=",
"None",
",",
"fields",
"=",
"None",
",",
"types",
"=",
"None",
",",
"continuation_token",
"=",
"None",
",",
"start_date_time",
"=",
"None",
",",
"include_identity_ref",
"=",
"None",
",",
"include_deleted",
"=",
"None",
",",
"include_tag_ref",
"=",
"None",
",",
"include_latest_only",
"=",
"None",
",",
"expand",
"=",
"None",
",",
"include_discussion_changes_only",
"=",
"None",
",",
"max_page_size",
"=",
"None",
")",
":",
"route_values",
"=",
"{",
"}",
"if",
"project",
"is",
"not",
"None",
":",
"route_values",
"[",
"'project'",
"]",
"=",
"self",
".",
"_serialize",
".",
"url",
"(",
"'project'",
",",
"project",
",",
"'str'",
")",
"query_parameters",
"=",
"{",
"}",
"if",
"fields",
"is",
"not",
"None",
":",
"fields",
"=",
"\",\"",
".",
"join",
"(",
"fields",
")",
"query_parameters",
"[",
"'fields'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"'fields'",
",",
"fields",
",",
"'str'",
")",
"if",
"types",
"is",
"not",
"None",
":",
"types",
"=",
"\",\"",
".",
"join",
"(",
"types",
")",
"query_parameters",
"[",
"'types'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"'types'",
",",
"types",
",",
"'str'",
")",
"if",
"continuation_token",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'continuationToken'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"'continuation_token'",
",",
"continuation_token",
",",
"'str'",
")",
"if",
"start_date_time",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'startDateTime'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"'start_date_time'",
",",
"start_date_time",
",",
"'iso-8601'",
")",
"if",
"include_identity_ref",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'includeIdentityRef'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"'include_identity_ref'",
",",
"include_identity_ref",
",",
"'bool'",
")",
"if",
"include_deleted",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'includeDeleted'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"'include_deleted'",
",",
"include_deleted",
",",
"'bool'",
")",
"if",
"include_tag_ref",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'includeTagRef'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"'include_tag_ref'",
",",
"include_tag_ref",
",",
"'bool'",
")",
"if",
"include_latest_only",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'includeLatestOnly'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"'include_latest_only'",
",",
"include_latest_only",
",",
"'bool'",
")",
"if",
"expand",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'$expand'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"'expand'",
",",
"expand",
",",
"'str'",
")",
"if",
"include_discussion_changes_only",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'includeDiscussionChangesOnly'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"'include_discussion_changes_only'",
",",
"include_discussion_changes_only",
",",
"'bool'",
")",
"if",
"max_page_size",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'$maxPageSize'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"'max_page_size'",
",",
"max_page_size",
",",
"'int'",
")",
"response",
"=",
"self",
".",
"_send",
"(",
"http_method",
"=",
"'GET'",
",",
"location_id",
"=",
"'f828fe59-dd87-495d-a17c-7a8d6211ca6c'",
",",
"version",
"=",
"'5.0'",
",",
"route_values",
"=",
"route_values",
",",
"query_parameters",
"=",
"query_parameters",
")",
"return",
"self",
".",
"_deserialize",
"(",
"'ReportingWorkItemRevisionsBatch'",
",",
"response",
")"
] | 84.666667 | 48.431373 |
def post_parse_response(self, response, **kwargs):
"""
Add scope claim to response, from the request, if not present in the
response
:param response: The response
:param kwargs: Extra Keyword arguments
:return: A possibly augmented response
"""
if "scope" not in response:
try:
_key = kwargs['state']
except KeyError:
pass
else:
if _key:
item = self.get_item(oauth2.AuthorizationRequest,
'auth_request', _key)
try:
response["scope"] = item["scope"]
except KeyError:
pass
return response | [
"def",
"post_parse_response",
"(",
"self",
",",
"response",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"\"scope\"",
"not",
"in",
"response",
":",
"try",
":",
"_key",
"=",
"kwargs",
"[",
"'state'",
"]",
"except",
"KeyError",
":",
"pass",
"else",
":",
"if",
"_key",
":",
"item",
"=",
"self",
".",
"get_item",
"(",
"oauth2",
".",
"AuthorizationRequest",
",",
"'auth_request'",
",",
"_key",
")",
"try",
":",
"response",
"[",
"\"scope\"",
"]",
"=",
"item",
"[",
"\"scope\"",
"]",
"except",
"KeyError",
":",
"pass",
"return",
"response"
] | 32.083333 | 16 |
def _AsMessageList(msg):
"""Convert the provided list-as-JsonValue to a list."""
# This really needs to live in extra_types, but extra_types needs
# to import this file to be able to register codecs.
# TODO(craigcitro): Split out a codecs module and fix this ugly
# import.
from apitools.base.py import extra_types
def _IsRepeatedJsonValue(msg):
"""Return True if msg is a repeated value as a JsonValue."""
if isinstance(msg, extra_types.JsonArray):
return True
if isinstance(msg, extra_types.JsonValue) and msg.array_value:
return True
return False
if not _IsRepeatedJsonValue(msg):
raise ValueError('invalid argument to _AsMessageList')
if isinstance(msg, extra_types.JsonValue):
msg = msg.array_value
if isinstance(msg, extra_types.JsonArray):
msg = msg.entries
return msg | [
"def",
"_AsMessageList",
"(",
"msg",
")",
":",
"# This really needs to live in extra_types, but extra_types needs",
"# to import this file to be able to register codecs.",
"# TODO(craigcitro): Split out a codecs module and fix this ugly",
"# import.",
"from",
"apitools",
".",
"base",
".",
"py",
"import",
"extra_types",
"def",
"_IsRepeatedJsonValue",
"(",
"msg",
")",
":",
"\"\"\"Return True if msg is a repeated value as a JsonValue.\"\"\"",
"if",
"isinstance",
"(",
"msg",
",",
"extra_types",
".",
"JsonArray",
")",
":",
"return",
"True",
"if",
"isinstance",
"(",
"msg",
",",
"extra_types",
".",
"JsonValue",
")",
"and",
"msg",
".",
"array_value",
":",
"return",
"True",
"return",
"False",
"if",
"not",
"_IsRepeatedJsonValue",
"(",
"msg",
")",
":",
"raise",
"ValueError",
"(",
"'invalid argument to _AsMessageList'",
")",
"if",
"isinstance",
"(",
"msg",
",",
"extra_types",
".",
"JsonValue",
")",
":",
"msg",
"=",
"msg",
".",
"array_value",
"if",
"isinstance",
"(",
"msg",
",",
"extra_types",
".",
"JsonArray",
")",
":",
"msg",
"=",
"msg",
".",
"entries",
"return",
"msg"
] | 38.217391 | 16.869565 |
def run(self):
"""
Returns the sum of unread messages across all registered backends
"""
unread = 0
current_unread = 0
for id, backend in enumerate(self.backends):
temp = backend.unread or 0
unread = unread + temp
if id == self.current_backend:
current_unread = temp
if not unread:
color = self.color
urgent = "false"
if self.hide_if_null:
self.output = None
return
else:
color = self.color_unread
urgent = "true"
format = self.format
if unread > 1:
format = self.format_plural
account_name = getattr(self.backends[self.current_backend], "account", "No name")
self.output = {
"full_text": format.format(unread=unread, current_unread=current_unread, account=account_name),
"urgent": urgent,
"color": color,
} | [
"def",
"run",
"(",
"self",
")",
":",
"unread",
"=",
"0",
"current_unread",
"=",
"0",
"for",
"id",
",",
"backend",
"in",
"enumerate",
"(",
"self",
".",
"backends",
")",
":",
"temp",
"=",
"backend",
".",
"unread",
"or",
"0",
"unread",
"=",
"unread",
"+",
"temp",
"if",
"id",
"==",
"self",
".",
"current_backend",
":",
"current_unread",
"=",
"temp",
"if",
"not",
"unread",
":",
"color",
"=",
"self",
".",
"color",
"urgent",
"=",
"\"false\"",
"if",
"self",
".",
"hide_if_null",
":",
"self",
".",
"output",
"=",
"None",
"return",
"else",
":",
"color",
"=",
"self",
".",
"color_unread",
"urgent",
"=",
"\"true\"",
"format",
"=",
"self",
".",
"format",
"if",
"unread",
">",
"1",
":",
"format",
"=",
"self",
".",
"format_plural",
"account_name",
"=",
"getattr",
"(",
"self",
".",
"backends",
"[",
"self",
".",
"current_backend",
"]",
",",
"\"account\"",
",",
"\"No name\"",
")",
"self",
".",
"output",
"=",
"{",
"\"full_text\"",
":",
"format",
".",
"format",
"(",
"unread",
"=",
"unread",
",",
"current_unread",
"=",
"current_unread",
",",
"account",
"=",
"account_name",
")",
",",
"\"urgent\"",
":",
"urgent",
",",
"\"color\"",
":",
"color",
",",
"}"
] | 29.545455 | 18.575758 |
def draw_rand_pos(self, radius, z_min, z_max,
min_r=np.array([0]), min_cell_interdist=10., **args):
"""
Draw some random location within radius, z_min, z_max,
and constrained by min_r and the minimum cell interdistance.
Returned argument is a list of dicts [{'xpos', 'ypos', 'zpos'},].
Parameters
----------
radius : float
Radius of population.
z_min : float
Lower z-boundary of population.
z_max : float
Upper z-boundary of population.
min_r : numpy.ndarray
Minimum distance to center axis as function of z.
min_cell_interdist : float
Minimum cell to cell interdistance.
**args : keyword arguments
Additional inputs that is being ignored.
Returns
-------
soma_pos : list
List of dicts of len population size
where dict have keys xpos, ypos, zpos specifying
xyz-coordinates of cell at list entry `i`.
See also
--------
PopulationSuper.calc_min_cell_interdist
"""
x = (np.random.rand(self.POPULATION_SIZE)-0.5)*radius*2
y = (np.random.rand(self.POPULATION_SIZE)-0.5)*radius*2
z = np.random.rand(self.POPULATION_SIZE)*(z_max - z_min) + z_min
min_r_z = {}
min_r = np.array(min_r)
if min_r.size > 0:
if type(min_r) == type(np.array([])):
j = 0
for j in range(min_r.shape[0]):
min_r_z[j] = np.interp(z, min_r[0,], min_r[1,])
if j > 0:
[w] = np.where(min_r_z[j] < min_r_z[j-1])
min_r_z[j][w] = min_r_z[j-1][w]
minrz = min_r_z[j]
else:
minrz = np.interp(z, min_r[0], min_r[1])
R_z = np.sqrt(x**2 + y**2)
#want to make sure that no somas are in the same place.
cell_interdist = self.calc_min_cell_interdist(x, y, z)
[u] = np.where(np.logical_or((R_z < minrz) != (R_z > radius),
cell_interdist < min_cell_interdist))
while len(u) > 0:
for i in range(len(u)):
x[u[i]] = (np.random.rand()-0.5)*radius*2
y[u[i]] = (np.random.rand()-0.5)*radius*2
z[u[i]] = np.random.rand()*(z_max - z_min) + z_min
if type(min_r) == type(()):
for j in range(np.shape(min_r)[0]):
min_r_z[j][u[i]] = \
np.interp(z[u[i]], min_r[0,], min_r[1,])
if j > 0:
[w] = np.where(min_r_z[j] < min_r_z[j-1])
min_r_z[j][w] = min_r_z[j-1][w]
minrz = min_r_z[j]
else:
minrz[u[i]] = np.interp(z[u[i]], min_r[0,], min_r[1,])
R_z = np.sqrt(x**2 + y**2)
#want to make sure that no somas are in the same place.
cell_interdist = self.calc_min_cell_interdist(x, y, z)
[u] = np.where(np.logical_or((R_z < minrz) != (R_z > radius),
cell_interdist < min_cell_interdist))
soma_pos = []
for i in range(self.POPULATION_SIZE):
soma_pos.append({'xpos' : x[i], 'ypos' : y[i], 'zpos' : z[i]})
return soma_pos | [
"def",
"draw_rand_pos",
"(",
"self",
",",
"radius",
",",
"z_min",
",",
"z_max",
",",
"min_r",
"=",
"np",
".",
"array",
"(",
"[",
"0",
"]",
")",
",",
"min_cell_interdist",
"=",
"10.",
",",
"*",
"*",
"args",
")",
":",
"x",
"=",
"(",
"np",
".",
"random",
".",
"rand",
"(",
"self",
".",
"POPULATION_SIZE",
")",
"-",
"0.5",
")",
"*",
"radius",
"*",
"2",
"y",
"=",
"(",
"np",
".",
"random",
".",
"rand",
"(",
"self",
".",
"POPULATION_SIZE",
")",
"-",
"0.5",
")",
"*",
"radius",
"*",
"2",
"z",
"=",
"np",
".",
"random",
".",
"rand",
"(",
"self",
".",
"POPULATION_SIZE",
")",
"*",
"(",
"z_max",
"-",
"z_min",
")",
"+",
"z_min",
"min_r_z",
"=",
"{",
"}",
"min_r",
"=",
"np",
".",
"array",
"(",
"min_r",
")",
"if",
"min_r",
".",
"size",
">",
"0",
":",
"if",
"type",
"(",
"min_r",
")",
"==",
"type",
"(",
"np",
".",
"array",
"(",
"[",
"]",
")",
")",
":",
"j",
"=",
"0",
"for",
"j",
"in",
"range",
"(",
"min_r",
".",
"shape",
"[",
"0",
"]",
")",
":",
"min_r_z",
"[",
"j",
"]",
"=",
"np",
".",
"interp",
"(",
"z",
",",
"min_r",
"[",
"0",
",",
"]",
",",
"min_r",
"[",
"1",
",",
"]",
")",
"if",
"j",
">",
"0",
":",
"[",
"w",
"]",
"=",
"np",
".",
"where",
"(",
"min_r_z",
"[",
"j",
"]",
"<",
"min_r_z",
"[",
"j",
"-",
"1",
"]",
")",
"min_r_z",
"[",
"j",
"]",
"[",
"w",
"]",
"=",
"min_r_z",
"[",
"j",
"-",
"1",
"]",
"[",
"w",
"]",
"minrz",
"=",
"min_r_z",
"[",
"j",
"]",
"else",
":",
"minrz",
"=",
"np",
".",
"interp",
"(",
"z",
",",
"min_r",
"[",
"0",
"]",
",",
"min_r",
"[",
"1",
"]",
")",
"R_z",
"=",
"np",
".",
"sqrt",
"(",
"x",
"**",
"2",
"+",
"y",
"**",
"2",
")",
"#want to make sure that no somas are in the same place.",
"cell_interdist",
"=",
"self",
".",
"calc_min_cell_interdist",
"(",
"x",
",",
"y",
",",
"z",
")",
"[",
"u",
"]",
"=",
"np",
".",
"where",
"(",
"np",
".",
"logical_or",
"(",
"(",
"R_z",
"<",
"minrz",
")",
"!=",
"(",
"R_z",
">",
"radius",
")",
",",
"cell_interdist",
"<",
"min_cell_interdist",
")",
")",
"while",
"len",
"(",
"u",
")",
">",
"0",
":",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"u",
")",
")",
":",
"x",
"[",
"u",
"[",
"i",
"]",
"]",
"=",
"(",
"np",
".",
"random",
".",
"rand",
"(",
")",
"-",
"0.5",
")",
"*",
"radius",
"*",
"2",
"y",
"[",
"u",
"[",
"i",
"]",
"]",
"=",
"(",
"np",
".",
"random",
".",
"rand",
"(",
")",
"-",
"0.5",
")",
"*",
"radius",
"*",
"2",
"z",
"[",
"u",
"[",
"i",
"]",
"]",
"=",
"np",
".",
"random",
".",
"rand",
"(",
")",
"*",
"(",
"z_max",
"-",
"z_min",
")",
"+",
"z_min",
"if",
"type",
"(",
"min_r",
")",
"==",
"type",
"(",
"(",
")",
")",
":",
"for",
"j",
"in",
"range",
"(",
"np",
".",
"shape",
"(",
"min_r",
")",
"[",
"0",
"]",
")",
":",
"min_r_z",
"[",
"j",
"]",
"[",
"u",
"[",
"i",
"]",
"]",
"=",
"np",
".",
"interp",
"(",
"z",
"[",
"u",
"[",
"i",
"]",
"]",
",",
"min_r",
"[",
"0",
",",
"]",
",",
"min_r",
"[",
"1",
",",
"]",
")",
"if",
"j",
">",
"0",
":",
"[",
"w",
"]",
"=",
"np",
".",
"where",
"(",
"min_r_z",
"[",
"j",
"]",
"<",
"min_r_z",
"[",
"j",
"-",
"1",
"]",
")",
"min_r_z",
"[",
"j",
"]",
"[",
"w",
"]",
"=",
"min_r_z",
"[",
"j",
"-",
"1",
"]",
"[",
"w",
"]",
"minrz",
"=",
"min_r_z",
"[",
"j",
"]",
"else",
":",
"minrz",
"[",
"u",
"[",
"i",
"]",
"]",
"=",
"np",
".",
"interp",
"(",
"z",
"[",
"u",
"[",
"i",
"]",
"]",
",",
"min_r",
"[",
"0",
",",
"]",
",",
"min_r",
"[",
"1",
",",
"]",
")",
"R_z",
"=",
"np",
".",
"sqrt",
"(",
"x",
"**",
"2",
"+",
"y",
"**",
"2",
")",
"#want to make sure that no somas are in the same place.",
"cell_interdist",
"=",
"self",
".",
"calc_min_cell_interdist",
"(",
"x",
",",
"y",
",",
"z",
")",
"[",
"u",
"]",
"=",
"np",
".",
"where",
"(",
"np",
".",
"logical_or",
"(",
"(",
"R_z",
"<",
"minrz",
")",
"!=",
"(",
"R_z",
">",
"radius",
")",
",",
"cell_interdist",
"<",
"min_cell_interdist",
")",
")",
"soma_pos",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"POPULATION_SIZE",
")",
":",
"soma_pos",
".",
"append",
"(",
"{",
"'xpos'",
":",
"x",
"[",
"i",
"]",
",",
"'ypos'",
":",
"y",
"[",
"i",
"]",
",",
"'zpos'",
":",
"z",
"[",
"i",
"]",
"}",
")",
"return",
"soma_pos"
] | 36.472527 | 20.692308 |
def source(self, value):
"""
Setter for **self.__source** attribute.
:param value: Attribute value.
:type value: unicode
"""
if value is not None:
assert type(value) is unicode, "'{0}' attribute: '{1}' type is not 'unicode'!".format(
"source", value)
assert os.path.exists(value), "'{0}' attribute: '{1}' file doesn't exists!".format("source", value)
self.__source = value | [
"def",
"source",
"(",
"self",
",",
"value",
")",
":",
"if",
"value",
"is",
"not",
"None",
":",
"assert",
"type",
"(",
"value",
")",
"is",
"unicode",
",",
"\"'{0}' attribute: '{1}' type is not 'unicode'!\"",
".",
"format",
"(",
"\"source\"",
",",
"value",
")",
"assert",
"os",
".",
"path",
".",
"exists",
"(",
"value",
")",
",",
"\"'{0}' attribute: '{1}' file doesn't exists!\"",
".",
"format",
"(",
"\"source\"",
",",
"value",
")",
"self",
".",
"__source",
"=",
"value"
] | 35.230769 | 21.230769 |
def make_internal_signing_service(config, entity_id):
"""
Given configuration initiate an InternalSigningService instance
:param config: The signing service configuration
:param entity_id: The entity identifier
:return: A InternalSigningService instance
"""
_args = dict([(k, v) for k, v in config.items() if k in KJ_SPECS])
_kj = init_key_jar(**_args)
return InternalSigningService(entity_id, _kj) | [
"def",
"make_internal_signing_service",
"(",
"config",
",",
"entity_id",
")",
":",
"_args",
"=",
"dict",
"(",
"[",
"(",
"k",
",",
"v",
")",
"for",
"k",
",",
"v",
"in",
"config",
".",
"items",
"(",
")",
"if",
"k",
"in",
"KJ_SPECS",
"]",
")",
"_kj",
"=",
"init_key_jar",
"(",
"*",
"*",
"_args",
")",
"return",
"InternalSigningService",
"(",
"entity_id",
",",
"_kj",
")"
] | 32.692308 | 17.615385 |
def dvds_top_rentals(self, **kwargs):
"""Gets the current opening movies from the API.
Args:
limit (optional): limits the number of movies returned, default=10
country (optional): localized data for selected country, default="us"
Returns:
A dict respresentation of the JSON returned from the API.
"""
path = self._get_path('dvds_top_rentals')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response | [
"def",
"dvds_top_rentals",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"path",
"=",
"self",
".",
"_get_path",
"(",
"'dvds_top_rentals'",
")",
"response",
"=",
"self",
".",
"_GET",
"(",
"path",
",",
"kwargs",
")",
"self",
".",
"_set_attrs_to_values",
"(",
"response",
")",
"return",
"response"
] | 34.133333 | 20.466667 |
def synchronize(self, pid, vendorSpecific=None):
"""See Also: synchronizeResponse() Args: pid: vendorSpecific:
Returns:
"""
response = self.synchronizeResponse(pid, vendorSpecific)
return self._read_boolean_response(response) | [
"def",
"synchronize",
"(",
"self",
",",
"pid",
",",
"vendorSpecific",
"=",
"None",
")",
":",
"response",
"=",
"self",
".",
"synchronizeResponse",
"(",
"pid",
",",
"vendorSpecific",
")",
"return",
"self",
".",
"_read_boolean_response",
"(",
"response",
")"
] | 32.5 | 18.5 |
def get_edge_citation(self, u: BaseEntity, v: BaseEntity, key: str) -> Optional[CitationDict]:
"""Get the citation for a given edge."""
return self._get_edge_attr(u, v, key, CITATION) | [
"def",
"get_edge_citation",
"(",
"self",
",",
"u",
":",
"BaseEntity",
",",
"v",
":",
"BaseEntity",
",",
"key",
":",
"str",
")",
"->",
"Optional",
"[",
"CitationDict",
"]",
":",
"return",
"self",
".",
"_get_edge_attr",
"(",
"u",
",",
"v",
",",
"key",
",",
"CITATION",
")"
] | 65.666667 | 23 |
def spher_harms(l, m, inclination):
"""Return spherical harmonic polarizations
"""
# FIXME: we are using spin -2 weighted spherical harmonics for now,
# when possible switch to spheroidal harmonics.
Y_lm = lal.SpinWeightedSphericalHarmonic(inclination, 0., -2, l, m).real
Y_lminusm = lal.SpinWeightedSphericalHarmonic(inclination, 0., -2, l, -m).real
Y_plus = Y_lm + (-1)**l * Y_lminusm
Y_cross = Y_lm - (-1)**l * Y_lminusm
return Y_plus, Y_cross | [
"def",
"spher_harms",
"(",
"l",
",",
"m",
",",
"inclination",
")",
":",
"# FIXME: we are using spin -2 weighted spherical harmonics for now,",
"# when possible switch to spheroidal harmonics.",
"Y_lm",
"=",
"lal",
".",
"SpinWeightedSphericalHarmonic",
"(",
"inclination",
",",
"0.",
",",
"-",
"2",
",",
"l",
",",
"m",
")",
".",
"real",
"Y_lminusm",
"=",
"lal",
".",
"SpinWeightedSphericalHarmonic",
"(",
"inclination",
",",
"0.",
",",
"-",
"2",
",",
"l",
",",
"-",
"m",
")",
".",
"real",
"Y_plus",
"=",
"Y_lm",
"+",
"(",
"-",
"1",
")",
"**",
"l",
"*",
"Y_lminusm",
"Y_cross",
"=",
"Y_lm",
"-",
"(",
"-",
"1",
")",
"**",
"l",
"*",
"Y_lminusm",
"return",
"Y_plus",
",",
"Y_cross"
] | 39.416667 | 18.333333 |
def delete_all(self, model_class):
'''Drop all records from the table model_class.__name__.lower()
'''
assert hasattr(model_class, '_fields'), 'Not a valid model class'
table = model_class.__name__.lower()
with Session() as conn:
SQL = f'DELETE FROM {table}'
conn.cursor().execute(SQL)
conn.commit() | [
"def",
"delete_all",
"(",
"self",
",",
"model_class",
")",
":",
"assert",
"hasattr",
"(",
"model_class",
",",
"'_fields'",
")",
",",
"'Not a valid model class'",
"table",
"=",
"model_class",
".",
"__name__",
".",
"lower",
"(",
")",
"with",
"Session",
"(",
")",
"as",
"conn",
":",
"SQL",
"=",
"f'DELETE FROM {table}'",
"conn",
".",
"cursor",
"(",
")",
".",
"execute",
"(",
"SQL",
")",
"conn",
".",
"commit",
"(",
")"
] | 36.7 | 16.9 |
def json_data(self, instance, default=None):
"""Get a JSON compatible value
"""
value = self.get(instance)
return value or default | [
"def",
"json_data",
"(",
"self",
",",
"instance",
",",
"default",
"=",
"None",
")",
":",
"value",
"=",
"self",
".",
"get",
"(",
"instance",
")",
"return",
"value",
"or",
"default"
] | 31.6 | 3.8 |
def clear_rr_ce_entries(self):
# type: () -> None
'''
A method to clear out all of the extent locations of all Rock Ridge
Continuation Entries that the PVD is tracking. This can be used to
reset all data before assigning new data.
Parameters:
None.
Returns:
Nothing.
'''
if not self._initialized:
raise pycdlibexception.PyCdlibInternalError('This Primary Volume Descriptor is not yet initialized')
for block in self.rr_ce_blocks:
block.set_extent_location(-1) | [
"def",
"clear_rr_ce_entries",
"(",
"self",
")",
":",
"# type: () -> None",
"if",
"not",
"self",
".",
"_initialized",
":",
"raise",
"pycdlibexception",
".",
"PyCdlibInternalError",
"(",
"'This Primary Volume Descriptor is not yet initialized'",
")",
"for",
"block",
"in",
"self",
".",
"rr_ce_blocks",
":",
"block",
".",
"set_extent_location",
"(",
"-",
"1",
")"
] | 33.411765 | 24.470588 |
def _gen_3spec(op, path, xattr=False):
"""
Returns a Spec tuple suitable for passing to the underlying C extension.
This variant is called for operations that lack an input value.
:param str path: The path to fetch
:param bool xattr: Whether this is an extended attribute
:return: a spec suitable for passing to the underlying C extension
"""
flags = 0
if xattr:
flags |= _P.SDSPEC_F_XATTR
return Spec(op, path, flags) | [
"def",
"_gen_3spec",
"(",
"op",
",",
"path",
",",
"xattr",
"=",
"False",
")",
":",
"flags",
"=",
"0",
"if",
"xattr",
":",
"flags",
"|=",
"_P",
".",
"SDSPEC_F_XATTR",
"return",
"Spec",
"(",
"op",
",",
"path",
",",
"flags",
")"
] | 35 | 17.307692 |
def handle_request(self, request, **resources):
""" Get a method for request and execute.
:return object: method result
"""
if not request.method in self._meta.callmap.keys():
raise HttpError(
'Unknown or unsupported method \'%s\'' % request.method,
status=status.HTTP_501_NOT_IMPLEMENTED)
# Get the appropriate create/read/update/delete function
view = getattr(self, self._meta.callmap[request.method])
# Get function data
return view(request, **resources) | [
"def",
"handle_request",
"(",
"self",
",",
"request",
",",
"*",
"*",
"resources",
")",
":",
"if",
"not",
"request",
".",
"method",
"in",
"self",
".",
"_meta",
".",
"callmap",
".",
"keys",
"(",
")",
":",
"raise",
"HttpError",
"(",
"'Unknown or unsupported method \\'%s\\''",
"%",
"request",
".",
"method",
",",
"status",
"=",
"status",
".",
"HTTP_501_NOT_IMPLEMENTED",
")",
"# Get the appropriate create/read/update/delete function",
"view",
"=",
"getattr",
"(",
"self",
",",
"self",
".",
"_meta",
".",
"callmap",
"[",
"request",
".",
"method",
"]",
")",
"# Get function data",
"return",
"view",
"(",
"request",
",",
"*",
"*",
"resources",
")"
] | 34.625 | 19.375 |
def run_interactive(query, editor=None, just_count=False, default_no=False):
"""
Asks the user about each patch suggested by the result of the query.
@param query An instance of the Query class.
@param editor Name of editor to use for manual intervention, e.g.
'vim'
or 'emacs'. If omitted/None, defaults to $EDITOR
environment variable.
@param just_count If true: don't run normally. Just print out number of
places in the codebase where the query matches.
"""
global yes_to_all
# Load start from bookmark, if appropriate.
bookmark = _load_bookmark()
if bookmark:
print('Resume where you left off, at %s (y/n)? '
% str(bookmark), end=' ')
sys.stdout.flush()
if (_prompt(default='y') == 'y'):
query.start_position = bookmark
# Okay, enough of this foolishness of computing start and end.
# Let's ask the user about some one line diffs!
print('Searching for first instance...')
suggestions = query.generate_patches()
if just_count:
for count, _ in enumerate(suggestions):
terminal.terminal_move_to_beginning_of_line()
print(count, end=" ")
sys.stdout.flush() # since print statement ends in comma
print()
return
for patch in suggestions:
_save_bookmark(patch.start_position)
_ask_about_patch(patch, editor, default_no)
print('Searching...')
_delete_bookmark()
if yes_to_all:
terminal.terminal_clear()
print(
"You MUST indicate in your code review:"
" \"codemod with 'Yes to all'\"."
"Make sure you and other people review the changes.\n\n"
"With great power, comes great responsibility."
) | [
"def",
"run_interactive",
"(",
"query",
",",
"editor",
"=",
"None",
",",
"just_count",
"=",
"False",
",",
"default_no",
"=",
"False",
")",
":",
"global",
"yes_to_all",
"# Load start from bookmark, if appropriate.",
"bookmark",
"=",
"_load_bookmark",
"(",
")",
"if",
"bookmark",
":",
"print",
"(",
"'Resume where you left off, at %s (y/n)? '",
"%",
"str",
"(",
"bookmark",
")",
",",
"end",
"=",
"' '",
")",
"sys",
".",
"stdout",
".",
"flush",
"(",
")",
"if",
"(",
"_prompt",
"(",
"default",
"=",
"'y'",
")",
"==",
"'y'",
")",
":",
"query",
".",
"start_position",
"=",
"bookmark",
"# Okay, enough of this foolishness of computing start and end.",
"# Let's ask the user about some one line diffs!",
"print",
"(",
"'Searching for first instance...'",
")",
"suggestions",
"=",
"query",
".",
"generate_patches",
"(",
")",
"if",
"just_count",
":",
"for",
"count",
",",
"_",
"in",
"enumerate",
"(",
"suggestions",
")",
":",
"terminal",
".",
"terminal_move_to_beginning_of_line",
"(",
")",
"print",
"(",
"count",
",",
"end",
"=",
"\" \"",
")",
"sys",
".",
"stdout",
".",
"flush",
"(",
")",
"# since print statement ends in comma",
"print",
"(",
")",
"return",
"for",
"patch",
"in",
"suggestions",
":",
"_save_bookmark",
"(",
"patch",
".",
"start_position",
")",
"_ask_about_patch",
"(",
"patch",
",",
"editor",
",",
"default_no",
")",
"print",
"(",
"'Searching...'",
")",
"_delete_bookmark",
"(",
")",
"if",
"yes_to_all",
":",
"terminal",
".",
"terminal_clear",
"(",
")",
"print",
"(",
"\"You MUST indicate in your code review:\"",
"\" \\\"codemod with 'Yes to all'\\\".\"",
"\"Make sure you and other people review the changes.\\n\\n\"",
"\"With great power, comes great responsibility.\"",
")"
] | 37.489796 | 18.591837 |
def execute_as(collector):
"""Execute a command (after the --) as an assumed role (specified by --artifact)"""
# Gonna assume role anyway...
collector.configuration['amazon']._validated = True
# Find the arn we want to assume
account_id = collector.configuration['accounts'][collector.configuration['aws_syncr'].environment]
arn = "arn:aws:iam::{0}:role/{1}".format(account_id, collector.configuration['aws_syncr'].artifact)
# Determine the command to run
parts = shlex.split(collector.configuration["aws_syncr"].extra)
if not parts:
suggestion = " ".join(sys.argv) + " -- /path/to/command_to_run"
msg = "No command was provided. Try something like:\n\t\t{0}".format(suggestion)
raise AwsSyncrError(msg)
# Get our aws credentials environment variables from the assumed role
env = dict(os.environ)
env.update(collector.configuration['amazon'].iam.assume_role_credentials(arn))
# Turn into the command we want to execute
os.execvpe(parts[0], parts, env) | [
"def",
"execute_as",
"(",
"collector",
")",
":",
"# Gonna assume role anyway...",
"collector",
".",
"configuration",
"[",
"'amazon'",
"]",
".",
"_validated",
"=",
"True",
"# Find the arn we want to assume",
"account_id",
"=",
"collector",
".",
"configuration",
"[",
"'accounts'",
"]",
"[",
"collector",
".",
"configuration",
"[",
"'aws_syncr'",
"]",
".",
"environment",
"]",
"arn",
"=",
"\"arn:aws:iam::{0}:role/{1}\"",
".",
"format",
"(",
"account_id",
",",
"collector",
".",
"configuration",
"[",
"'aws_syncr'",
"]",
".",
"artifact",
")",
"# Determine the command to run",
"parts",
"=",
"shlex",
".",
"split",
"(",
"collector",
".",
"configuration",
"[",
"\"aws_syncr\"",
"]",
".",
"extra",
")",
"if",
"not",
"parts",
":",
"suggestion",
"=",
"\" \"",
".",
"join",
"(",
"sys",
".",
"argv",
")",
"+",
"\" -- /path/to/command_to_run\"",
"msg",
"=",
"\"No command was provided. Try something like:\\n\\t\\t{0}\"",
".",
"format",
"(",
"suggestion",
")",
"raise",
"AwsSyncrError",
"(",
"msg",
")",
"# Get our aws credentials environment variables from the assumed role",
"env",
"=",
"dict",
"(",
"os",
".",
"environ",
")",
"env",
".",
"update",
"(",
"collector",
".",
"configuration",
"[",
"'amazon'",
"]",
".",
"iam",
".",
"assume_role_credentials",
"(",
"arn",
")",
")",
"# Turn into the command we want to execute",
"os",
".",
"execvpe",
"(",
"parts",
"[",
"0",
"]",
",",
"parts",
",",
"env",
")"
] | 46.090909 | 25.772727 |
def main(argv=None):
"""Main entry point for the cdstar CLI."""
args = docopt(__doc__, version=pycdstar.__version__, argv=argv, options_first=True)
subargs = [args['<command>']] + args['<args>']
if args['<command>'] in ['help', None]:
cmd = None
if len(subargs) > 1:
cmd = COMMANDS.get(subargs[1])
if cmd:
print(cmd.__doc__)
else:
print(__doc__)
return 0
cmd = COMMANDS.get(args['<command>'])
if not cmd:
print('unknown command')
print(__doc__)
return 0
cfg = Config(**dict(
cfg=args.pop('--cfg', None),
url=args.pop('--service', None),
user=args.pop('--user', None),
password=args.pop('--password', None)))
try:
res = cmd(
Cdstar(cfg=cfg),
docopt(cmd.__doc__, argv=subargs),
verbose=args.get('--verbose'))
if isinstance(res, types.GeneratorType):
res = list(res)
if isinstance(res, list):
for line in res:
print(line)
res = 0
return res or 0
except: # noqa: E722; # pragma: no cover
# FIXME: log exception!
return 256 | [
"def",
"main",
"(",
"argv",
"=",
"None",
")",
":",
"args",
"=",
"docopt",
"(",
"__doc__",
",",
"version",
"=",
"pycdstar",
".",
"__version__",
",",
"argv",
"=",
"argv",
",",
"options_first",
"=",
"True",
")",
"subargs",
"=",
"[",
"args",
"[",
"'<command>'",
"]",
"]",
"+",
"args",
"[",
"'<args>'",
"]",
"if",
"args",
"[",
"'<command>'",
"]",
"in",
"[",
"'help'",
",",
"None",
"]",
":",
"cmd",
"=",
"None",
"if",
"len",
"(",
"subargs",
")",
">",
"1",
":",
"cmd",
"=",
"COMMANDS",
".",
"get",
"(",
"subargs",
"[",
"1",
"]",
")",
"if",
"cmd",
":",
"print",
"(",
"cmd",
".",
"__doc__",
")",
"else",
":",
"print",
"(",
"__doc__",
")",
"return",
"0",
"cmd",
"=",
"COMMANDS",
".",
"get",
"(",
"args",
"[",
"'<command>'",
"]",
")",
"if",
"not",
"cmd",
":",
"print",
"(",
"'unknown command'",
")",
"print",
"(",
"__doc__",
")",
"return",
"0",
"cfg",
"=",
"Config",
"(",
"*",
"*",
"dict",
"(",
"cfg",
"=",
"args",
".",
"pop",
"(",
"'--cfg'",
",",
"None",
")",
",",
"url",
"=",
"args",
".",
"pop",
"(",
"'--service'",
",",
"None",
")",
",",
"user",
"=",
"args",
".",
"pop",
"(",
"'--user'",
",",
"None",
")",
",",
"password",
"=",
"args",
".",
"pop",
"(",
"'--password'",
",",
"None",
")",
")",
")",
"try",
":",
"res",
"=",
"cmd",
"(",
"Cdstar",
"(",
"cfg",
"=",
"cfg",
")",
",",
"docopt",
"(",
"cmd",
".",
"__doc__",
",",
"argv",
"=",
"subargs",
")",
",",
"verbose",
"=",
"args",
".",
"get",
"(",
"'--verbose'",
")",
")",
"if",
"isinstance",
"(",
"res",
",",
"types",
".",
"GeneratorType",
")",
":",
"res",
"=",
"list",
"(",
"res",
")",
"if",
"isinstance",
"(",
"res",
",",
"list",
")",
":",
"for",
"line",
"in",
"res",
":",
"print",
"(",
"line",
")",
"res",
"=",
"0",
"return",
"res",
"or",
"0",
"except",
":",
"# noqa: E722; # pragma: no cover",
"# FIXME: log exception!",
"return",
"256"
] | 28.238095 | 16.238095 |
def image_member(self):
"""
Returns a json-schema document that represents an image member entity.
(a container of member entities).
"""
uri = "/%s/member" % self.uri_base
resp, resp_body = self.api.method_get(uri)
return resp_body | [
"def",
"image_member",
"(",
"self",
")",
":",
"uri",
"=",
"\"/%s/member\"",
"%",
"self",
".",
"uri_base",
"resp",
",",
"resp_body",
"=",
"self",
".",
"api",
".",
"method_get",
"(",
"uri",
")",
"return",
"resp_body"
] | 35 | 10.5 |
def add_code_challenge(request_args, service, **kwargs):
"""
PKCE RFC 7636 support
To be added as a post_construct method to an
:py:class:`oidcservice.oidc.service.Authorization` instance
:param service: The service that uses this function
:param request_args: Set of request arguments
:param kwargs: Extra set of keyword arguments
:return: Updated set of request arguments
"""
try:
cv_len = service.service_context.config['code_challenge']['length']
except KeyError:
cv_len = 64 # Use default
# code_verifier: string of length cv_len
code_verifier = unreserved(cv_len)
_cv = code_verifier.encode()
try:
_method = service.service_context.config['code_challenge']['method']
except KeyError:
_method = 'S256'
try:
# Pick hash method
_hash_method = CC_METHOD[_method]
# Use it on the code_verifier
_hv = _hash_method(_cv).digest()
# base64 encode the hash value
code_challenge = b64e(_hv).decode('ascii')
except KeyError:
raise Unsupported(
'PKCE Transformation method:{}'.format(_method))
_item = Message(code_verifier=code_verifier,code_challenge_method=_method)
service.store_item(_item, 'pkce', request_args['state'])
request_args.update({"code_challenge": code_challenge,
"code_challenge_method": _method})
return request_args | [
"def",
"add_code_challenge",
"(",
"request_args",
",",
"service",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"cv_len",
"=",
"service",
".",
"service_context",
".",
"config",
"[",
"'code_challenge'",
"]",
"[",
"'length'",
"]",
"except",
"KeyError",
":",
"cv_len",
"=",
"64",
"# Use default",
"# code_verifier: string of length cv_len",
"code_verifier",
"=",
"unreserved",
"(",
"cv_len",
")",
"_cv",
"=",
"code_verifier",
".",
"encode",
"(",
")",
"try",
":",
"_method",
"=",
"service",
".",
"service_context",
".",
"config",
"[",
"'code_challenge'",
"]",
"[",
"'method'",
"]",
"except",
"KeyError",
":",
"_method",
"=",
"'S256'",
"try",
":",
"# Pick hash method",
"_hash_method",
"=",
"CC_METHOD",
"[",
"_method",
"]",
"# Use it on the code_verifier",
"_hv",
"=",
"_hash_method",
"(",
"_cv",
")",
".",
"digest",
"(",
")",
"# base64 encode the hash value",
"code_challenge",
"=",
"b64e",
"(",
"_hv",
")",
".",
"decode",
"(",
"'ascii'",
")",
"except",
"KeyError",
":",
"raise",
"Unsupported",
"(",
"'PKCE Transformation method:{}'",
".",
"format",
"(",
"_method",
")",
")",
"_item",
"=",
"Message",
"(",
"code_verifier",
"=",
"code_verifier",
",",
"code_challenge_method",
"=",
"_method",
")",
"service",
".",
"store_item",
"(",
"_item",
",",
"'pkce'",
",",
"request_args",
"[",
"'state'",
"]",
")",
"request_args",
".",
"update",
"(",
"{",
"\"code_challenge\"",
":",
"code_challenge",
",",
"\"code_challenge_method\"",
":",
"_method",
"}",
")",
"return",
"request_args"
] | 33.5 | 18.547619 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.