text stringlengths 89 104k | code_tokens list | avg_line_len float64 7.91 980 | score float64 0 630 |
|---|---|---|---|
def get_client(client_id):
"""Load the client.
Needed for grant_type client_credentials.
Add support for OAuth client_credentials access type, with user
inactivation support.
:param client_id: The client ID.
:returns: The client instance or ``None``.
"""
client = Client.query.get(client_id)
if client and client.user.active:
return client | [
"def",
"get_client",
"(",
"client_id",
")",
":",
"client",
"=",
"Client",
".",
"query",
".",
"get",
"(",
"client_id",
")",
"if",
"client",
"and",
"client",
".",
"user",
".",
"active",
":",
"return",
"client"
] | 26.642857 | 15.214286 |
def authenticate(self, request):
""" Attempt to authenticate the request.
:param request: django.http.Request instance
:return bool: True if success else raises HTTP_401
"""
authenticators = self._meta.authenticators
if request.method == 'OPTIONS' and ADREST_ALLOW_OPTIONS:
self.auth = AnonimousAuthenticator(self)
return True
error_message = "Authorization required."
for authenticator in authenticators:
auth = authenticator(self)
try:
if not auth.authenticate(request):
raise AssertionError(error_message)
self.auth = auth
auth.configure(request)
return True
except AssertionError, e:
error_message = str(e)
raise HttpError(error_message, status=status.HTTP_401_UNAUTHORIZED) | [
"def",
"authenticate",
"(",
"self",
",",
"request",
")",
":",
"authenticators",
"=",
"self",
".",
"_meta",
".",
"authenticators",
"if",
"request",
".",
"method",
"==",
"'OPTIONS'",
"and",
"ADREST_ALLOW_OPTIONS",
":",
"self",
".",
"auth",
"=",
"AnonimousAuthenticator",
"(",
"self",
")",
"return",
"True",
"error_message",
"=",
"\"Authorization required.\"",
"for",
"authenticator",
"in",
"authenticators",
":",
"auth",
"=",
"authenticator",
"(",
"self",
")",
"try",
":",
"if",
"not",
"auth",
".",
"authenticate",
"(",
"request",
")",
":",
"raise",
"AssertionError",
"(",
"error_message",
")",
"self",
".",
"auth",
"=",
"auth",
"auth",
".",
"configure",
"(",
"request",
")",
"return",
"True",
"except",
"AssertionError",
",",
"e",
":",
"error_message",
"=",
"str",
"(",
"e",
")",
"raise",
"HttpError",
"(",
"error_message",
",",
"status",
"=",
"status",
".",
"HTTP_401_UNAUTHORIZED",
")"
] | 31.785714 | 18.107143 |
def integer(token):
"""
Convert numeric strings into integers.
@type token: str
@param token: String to parse.
@rtype: int
@return: Parsed integer value.
"""
token = token.strip()
neg = False
if token.startswith(compat.b('-')):
token = token[1:]
neg = True
if token.startswith(compat.b('0x')):
result = int(token, 16) # hexadecimal
elif token.startswith(compat.b('0b')):
result = int(token[2:], 2) # binary
elif token.startswith(compat.b('0o')):
result = int(token, 8) # octal
else:
try:
result = int(token) # decimal
except ValueError:
result = int(token, 16) # hexadecimal (no "0x" prefix)
if neg:
result = -result
return result | [
"def",
"integer",
"(",
"token",
")",
":",
"token",
"=",
"token",
".",
"strip",
"(",
")",
"neg",
"=",
"False",
"if",
"token",
".",
"startswith",
"(",
"compat",
".",
"b",
"(",
"'-'",
")",
")",
":",
"token",
"=",
"token",
"[",
"1",
":",
"]",
"neg",
"=",
"True",
"if",
"token",
".",
"startswith",
"(",
"compat",
".",
"b",
"(",
"'0x'",
")",
")",
":",
"result",
"=",
"int",
"(",
"token",
",",
"16",
")",
"# hexadecimal",
"elif",
"token",
".",
"startswith",
"(",
"compat",
".",
"b",
"(",
"'0b'",
")",
")",
":",
"result",
"=",
"int",
"(",
"token",
"[",
"2",
":",
"]",
",",
"2",
")",
"# binary",
"elif",
"token",
".",
"startswith",
"(",
"compat",
".",
"b",
"(",
"'0o'",
")",
")",
":",
"result",
"=",
"int",
"(",
"token",
",",
"8",
")",
"# octal",
"else",
":",
"try",
":",
"result",
"=",
"int",
"(",
"token",
")",
"# decimal",
"except",
"ValueError",
":",
"result",
"=",
"int",
"(",
"token",
",",
"16",
")",
"# hexadecimal (no \"0x\" prefix)",
"if",
"neg",
":",
"result",
"=",
"-",
"result",
"return",
"result"
] | 30.206897 | 14.137931 |
def readfp(self, fp, filename=None):
"""Like read() but the argument must be a file-like object.
The `fp' argument must have a `readline' method. Optional
second argument is the `filename', which if not given, is
taken from fp.name. If fp has no `name' attribute, `<???>' is
used.
"""
if filename is None:
try:
filename = fp.name
except AttributeError:
filename = '<???>'
self._read(fp, filename) | [
"def",
"readfp",
"(",
"self",
",",
"fp",
",",
"filename",
"=",
"None",
")",
":",
"if",
"filename",
"is",
"None",
":",
"try",
":",
"filename",
"=",
"fp",
".",
"name",
"except",
"AttributeError",
":",
"filename",
"=",
"'<???>'",
"self",
".",
"_read",
"(",
"fp",
",",
"filename",
")"
] | 33.733333 | 16.933333 |
def write_inp(self):
"""
Returns the material definition as a string in Abaqus INP format.
"""
template = self.get_template()
plastic_table = self.get_plastic_table()
return template.substitute({
"class": self.__class__.__name__,
"label": self.label,
"young_modulus": self.young_modulus,
"poisson_ratio": self.poisson_ratio,
"plastic_table": (self.get_plastic_table()[["stress", "plastic_strain"]]
.to_csv(header = False,
index = False,
sep = ",").strip())}).strip() | [
"def",
"write_inp",
"(",
"self",
")",
":",
"template",
"=",
"self",
".",
"get_template",
"(",
")",
"plastic_table",
"=",
"self",
".",
"get_plastic_table",
"(",
")",
"return",
"template",
".",
"substitute",
"(",
"{",
"\"class\"",
":",
"self",
".",
"__class__",
".",
"__name__",
",",
"\"label\"",
":",
"self",
".",
"label",
",",
"\"young_modulus\"",
":",
"self",
".",
"young_modulus",
",",
"\"poisson_ratio\"",
":",
"self",
".",
"poisson_ratio",
",",
"\"plastic_table\"",
":",
"(",
"self",
".",
"get_plastic_table",
"(",
")",
"[",
"[",
"\"stress\"",
",",
"\"plastic_strain\"",
"]",
"]",
".",
"to_csv",
"(",
"header",
"=",
"False",
",",
"index",
"=",
"False",
",",
"sep",
"=",
"\",\"",
")",
".",
"strip",
"(",
")",
")",
"}",
")",
".",
"strip",
"(",
")"
] | 40.733333 | 11.266667 |
def match_metric(regex):
'''
Display the current values of all metrics whose names match the
given regular expression.
.. versionadded:: 2016.11.0
.. code-block:: bash
salt '*' trafficserver.match_metric regex
'''
if _TRAFFICCTL:
cmd = _traffic_ctl('metric', 'match', regex)
else:
cmd = _traffic_ctl('-m', regex)
return _subprocess(cmd) | [
"def",
"match_metric",
"(",
"regex",
")",
":",
"if",
"_TRAFFICCTL",
":",
"cmd",
"=",
"_traffic_ctl",
"(",
"'metric'",
",",
"'match'",
",",
"regex",
")",
"else",
":",
"cmd",
"=",
"_traffic_ctl",
"(",
"'-m'",
",",
"regex",
")",
"return",
"_subprocess",
"(",
"cmd",
")"
] | 22.588235 | 23.058824 |
def check_messages(*messages: str) -> Callable:
"""decorator to store messages that are handled by a checker method"""
def store_messages(func):
func.checks_msgs = messages
return func
return store_messages | [
"def",
"check_messages",
"(",
"*",
"messages",
":",
"str",
")",
"->",
"Callable",
":",
"def",
"store_messages",
"(",
"func",
")",
":",
"func",
".",
"checks_msgs",
"=",
"messages",
"return",
"func",
"return",
"store_messages"
] | 28.625 | 17.375 |
def _parse_size(self, size, has_time=False):
"""Parse size inputs"""
if has_time:
size = size or 4
else:
size = size or 10
if isinstance(size, str):
size = {'column': size}
if isinstance(size, dict):
if 'column' not in size:
raise ValueError("`size` must include a 'column' key/value")
if has_time:
raise ValueError("When time is specified, size can "
"only be a fixed size")
old_size = size
# Default size range, bins, and bin_method
size = {
'range': [5, 25],
'bins': 5,
'bin_method': BinMethod.quantiles,
}
# Assign default range and update if min/max given
old_size['range'] = old_size.get('range', size['range'])
if 'min' in old_size:
old_size['range'][0] = old_size['min']
old_size.pop('min')
if 'max' in old_size:
old_size['range'][1] = old_size['max']
old_size.pop('max')
# Update all the keys in size if they exist in old_size
size.update(old_size)
self.style_cols[size['column']] = None
return size | [
"def",
"_parse_size",
"(",
"self",
",",
"size",
",",
"has_time",
"=",
"False",
")",
":",
"if",
"has_time",
":",
"size",
"=",
"size",
"or",
"4",
"else",
":",
"size",
"=",
"size",
"or",
"10",
"if",
"isinstance",
"(",
"size",
",",
"str",
")",
":",
"size",
"=",
"{",
"'column'",
":",
"size",
"}",
"if",
"isinstance",
"(",
"size",
",",
"dict",
")",
":",
"if",
"'column'",
"not",
"in",
"size",
":",
"raise",
"ValueError",
"(",
"\"`size` must include a 'column' key/value\"",
")",
"if",
"has_time",
":",
"raise",
"ValueError",
"(",
"\"When time is specified, size can \"",
"\"only be a fixed size\"",
")",
"old_size",
"=",
"size",
"# Default size range, bins, and bin_method",
"size",
"=",
"{",
"'range'",
":",
"[",
"5",
",",
"25",
"]",
",",
"'bins'",
":",
"5",
",",
"'bin_method'",
":",
"BinMethod",
".",
"quantiles",
",",
"}",
"# Assign default range and update if min/max given",
"old_size",
"[",
"'range'",
"]",
"=",
"old_size",
".",
"get",
"(",
"'range'",
",",
"size",
"[",
"'range'",
"]",
")",
"if",
"'min'",
"in",
"old_size",
":",
"old_size",
"[",
"'range'",
"]",
"[",
"0",
"]",
"=",
"old_size",
"[",
"'min'",
"]",
"old_size",
".",
"pop",
"(",
"'min'",
")",
"if",
"'max'",
"in",
"old_size",
":",
"old_size",
"[",
"'range'",
"]",
"[",
"1",
"]",
"=",
"old_size",
"[",
"'max'",
"]",
"old_size",
".",
"pop",
"(",
"'max'",
")",
"# Update all the keys in size if they exist in old_size",
"size",
".",
"update",
"(",
"old_size",
")",
"self",
".",
"style_cols",
"[",
"size",
"[",
"'column'",
"]",
"]",
"=",
"None",
"return",
"size"
] | 38.029412 | 14.823529 |
def dif(a, b):
""" copy from http://stackoverflow.com/a/8545526 """
return [i for i in range(len(a)) if a[i] != b[i]] | [
"def",
"dif",
"(",
"a",
",",
"b",
")",
":",
"return",
"[",
"i",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"a",
")",
")",
"if",
"a",
"[",
"i",
"]",
"!=",
"b",
"[",
"i",
"]",
"]"
] | 41 | 13 |
def cql_encode_datetime(self, val):
"""
Converts a :class:`datetime.datetime` object to a (string) integer timestamp
with millisecond precision.
"""
timestamp = calendar.timegm(val.utctimetuple())
return str(long(timestamp * 1e3 + getattr(val, 'microsecond', 0) / 1e3)) | [
"def",
"cql_encode_datetime",
"(",
"self",
",",
"val",
")",
":",
"timestamp",
"=",
"calendar",
".",
"timegm",
"(",
"val",
".",
"utctimetuple",
"(",
")",
")",
"return",
"str",
"(",
"long",
"(",
"timestamp",
"*",
"1e3",
"+",
"getattr",
"(",
"val",
",",
"'microsecond'",
",",
"0",
")",
"/",
"1e3",
")",
")"
] | 44.428571 | 15.571429 |
def set_visa_attribute(self, name, state):
"""Sets the state of an attribute.
:param name: Attribute for which the state is to be modified. (Attributes.*)
:param state: The state of the attribute to be set for the specified object.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
return self.visalib.set_attribute(self.session, name, state) | [
"def",
"set_visa_attribute",
"(",
"self",
",",
"name",
",",
"state",
")",
":",
"return",
"self",
".",
"visalib",
".",
"set_attribute",
"(",
"self",
".",
"session",
",",
"name",
",",
"state",
")"
] | 48.111111 | 20 |
def unprotected_ids(doc, options):
u"""Returns a list of unprotected IDs within the document doc."""
identifiedElements = findElementsWithId(doc.documentElement)
if not (options.protect_ids_noninkscape or
options.protect_ids_list or
options.protect_ids_prefix):
return identifiedElements
if options.protect_ids_list:
protect_ids_list = options.protect_ids_list.split(",")
if options.protect_ids_prefix:
protect_ids_prefixes = options.protect_ids_prefix.split(",")
for id in list(identifiedElements):
protected = False
if options.protect_ids_noninkscape and not id[-1].isdigit():
protected = True
if options.protect_ids_list and id in protect_ids_list:
protected = True
if options.protect_ids_prefix:
for prefix in protect_ids_prefixes:
if id.startswith(prefix):
protected = True
if protected:
del identifiedElements[id]
return identifiedElements | [
"def",
"unprotected_ids",
"(",
"doc",
",",
"options",
")",
":",
"identifiedElements",
"=",
"findElementsWithId",
"(",
"doc",
".",
"documentElement",
")",
"if",
"not",
"(",
"options",
".",
"protect_ids_noninkscape",
"or",
"options",
".",
"protect_ids_list",
"or",
"options",
".",
"protect_ids_prefix",
")",
":",
"return",
"identifiedElements",
"if",
"options",
".",
"protect_ids_list",
":",
"protect_ids_list",
"=",
"options",
".",
"protect_ids_list",
".",
"split",
"(",
"\",\"",
")",
"if",
"options",
".",
"protect_ids_prefix",
":",
"protect_ids_prefixes",
"=",
"options",
".",
"protect_ids_prefix",
".",
"split",
"(",
"\",\"",
")",
"for",
"id",
"in",
"list",
"(",
"identifiedElements",
")",
":",
"protected",
"=",
"False",
"if",
"options",
".",
"protect_ids_noninkscape",
"and",
"not",
"id",
"[",
"-",
"1",
"]",
".",
"isdigit",
"(",
")",
":",
"protected",
"=",
"True",
"if",
"options",
".",
"protect_ids_list",
"and",
"id",
"in",
"protect_ids_list",
":",
"protected",
"=",
"True",
"if",
"options",
".",
"protect_ids_prefix",
":",
"for",
"prefix",
"in",
"protect_ids_prefixes",
":",
"if",
"id",
".",
"startswith",
"(",
"prefix",
")",
":",
"protected",
"=",
"True",
"if",
"protected",
":",
"del",
"identifiedElements",
"[",
"id",
"]",
"return",
"identifiedElements"
] | 42.583333 | 10.208333 |
def api_key(self, api_key):
"""
Sets the api_key of this GlobalSignCredentials.
Unique ID for API client (provided by GlobalSign).
:param api_key: The api_key of this GlobalSignCredentials.
:type: str
"""
if api_key is None:
raise ValueError("Invalid value for `api_key`, must not be `None`")
if api_key is not None and len(api_key) > 1000:
raise ValueError("Invalid value for `api_key`, length must be less than or equal to `1000`")
self._api_key = api_key | [
"def",
"api_key",
"(",
"self",
",",
"api_key",
")",
":",
"if",
"api_key",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"Invalid value for `api_key`, must not be `None`\"",
")",
"if",
"api_key",
"is",
"not",
"None",
"and",
"len",
"(",
"api_key",
")",
">",
"1000",
":",
"raise",
"ValueError",
"(",
"\"Invalid value for `api_key`, length must be less than or equal to `1000`\"",
")",
"self",
".",
"_api_key",
"=",
"api_key"
] | 38.785714 | 22.5 |
def licenses(
ctx,
summary=False,
from_classifier=False,
with_system=False,
with_authors=False,
with_urls=False,
):
""" List dependency licenses.
"""
licenses_command = "pip-licenses --order=license"
report.info(ctx, "package.licenses", "listing licenses of package dependencies")
if summary:
report.debug(ctx, "package.licenses", "summarizing licenses")
licenses_command += " --summary"
if from_classifier:
report.debug(ctx, "package.licenses", "reporting from classifiers")
licenses_command += " --from-classifier"
if with_system:
report.debug(ctx, "package.licenses", "including system packages")
licenses_command += " --with-system"
if with_authors:
report.debug(ctx, "package.licenses", "including package authors")
licenses_command += " --with-authors"
if with_urls:
report.debug(ctx, "package.licenses", "including package urls")
licenses_command += " --with-urls"
ctx.run(licenses_command) | [
"def",
"licenses",
"(",
"ctx",
",",
"summary",
"=",
"False",
",",
"from_classifier",
"=",
"False",
",",
"with_system",
"=",
"False",
",",
"with_authors",
"=",
"False",
",",
"with_urls",
"=",
"False",
",",
")",
":",
"licenses_command",
"=",
"\"pip-licenses --order=license\"",
"report",
".",
"info",
"(",
"ctx",
",",
"\"package.licenses\"",
",",
"\"listing licenses of package dependencies\"",
")",
"if",
"summary",
":",
"report",
".",
"debug",
"(",
"ctx",
",",
"\"package.licenses\"",
",",
"\"summarizing licenses\"",
")",
"licenses_command",
"+=",
"\" --summary\"",
"if",
"from_classifier",
":",
"report",
".",
"debug",
"(",
"ctx",
",",
"\"package.licenses\"",
",",
"\"reporting from classifiers\"",
")",
"licenses_command",
"+=",
"\" --from-classifier\"",
"if",
"with_system",
":",
"report",
".",
"debug",
"(",
"ctx",
",",
"\"package.licenses\"",
",",
"\"including system packages\"",
")",
"licenses_command",
"+=",
"\" --with-system\"",
"if",
"with_authors",
":",
"report",
".",
"debug",
"(",
"ctx",
",",
"\"package.licenses\"",
",",
"\"including package authors\"",
")",
"licenses_command",
"+=",
"\" --with-authors\"",
"if",
"with_urls",
":",
"report",
".",
"debug",
"(",
"ctx",
",",
"\"package.licenses\"",
",",
"\"including package urls\"",
")",
"licenses_command",
"+=",
"\" --with-urls\"",
"ctx",
".",
"run",
"(",
"licenses_command",
")"
] | 34.965517 | 20.137931 |
async def wasAdded(self, node):
'''
Fire the onAdd() callbacks for node creation.
'''
for func in self.onadds:
try:
retn = func(node)
if s_coro.iscoro(retn):
await retn
except asyncio.CancelledError:
raise
except Exception:
logger.exception('error on onadd for %s' % (self.name,))
await node.snap.core.triggers.runNodeAdd(node) | [
"async",
"def",
"wasAdded",
"(",
"self",
",",
"node",
")",
":",
"for",
"func",
"in",
"self",
".",
"onadds",
":",
"try",
":",
"retn",
"=",
"func",
"(",
"node",
")",
"if",
"s_coro",
".",
"iscoro",
"(",
"retn",
")",
":",
"await",
"retn",
"except",
"asyncio",
".",
"CancelledError",
":",
"raise",
"except",
"Exception",
":",
"logger",
".",
"exception",
"(",
"'error on onadd for %s'",
"%",
"(",
"self",
".",
"name",
",",
")",
")",
"await",
"node",
".",
"snap",
".",
"core",
".",
"triggers",
".",
"runNodeAdd",
"(",
"node",
")"
] | 31.6 | 16.533333 |
def list_function_versions(FunctionName,
region=None, key=None, keyid=None, profile=None):
'''
List the versions available for the given function.
Returns list of function versions
CLI Example:
.. code-block:: yaml
versions:
- {...}
- {...}
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
vers = []
for ret in __utils__['boto3.paged_call'](conn.list_versions_by_function,
FunctionName=FunctionName):
vers.extend(ret['Versions'])
if not bool(vers):
log.warning('No versions found')
return {'Versions': vers}
except ClientError as e:
return {'error': __utils__['boto3.get_error'](e)} | [
"def",
"list_function_versions",
"(",
"FunctionName",
",",
"region",
"=",
"None",
",",
"key",
"=",
"None",
",",
"keyid",
"=",
"None",
",",
"profile",
"=",
"None",
")",
":",
"try",
":",
"conn",
"=",
"_get_conn",
"(",
"region",
"=",
"region",
",",
"key",
"=",
"key",
",",
"keyid",
"=",
"keyid",
",",
"profile",
"=",
"profile",
")",
"vers",
"=",
"[",
"]",
"for",
"ret",
"in",
"__utils__",
"[",
"'boto3.paged_call'",
"]",
"(",
"conn",
".",
"list_versions_by_function",
",",
"FunctionName",
"=",
"FunctionName",
")",
":",
"vers",
".",
"extend",
"(",
"ret",
"[",
"'Versions'",
"]",
")",
"if",
"not",
"bool",
"(",
"vers",
")",
":",
"log",
".",
"warning",
"(",
"'No versions found'",
")",
"return",
"{",
"'Versions'",
":",
"vers",
"}",
"except",
"ClientError",
"as",
"e",
":",
"return",
"{",
"'error'",
":",
"__utils__",
"[",
"'boto3.get_error'",
"]",
"(",
"e",
")",
"}"
] | 29.62963 | 24.148148 |
def make_dict_unstructure_fn(cl, converter, **kwargs):
# type: (Type[T], Converter) -> Callable[[T], Dict[str, Any]]
"""Generate a specialized dict unstructuring function for a class."""
cl_name = cl.__name__
fn_name = "unstructure_" + cl_name
globs = {"__c_u": converter.unstructure}
lines = []
post_lines = []
attrs = cl.__attrs_attrs__
lines.append("def {}(i):".format(fn_name))
lines.append(" res = {")
for a in attrs:
attr_name = a.name
override = kwargs.pop(attr_name, _neutral)
d = a.default
if a.type is None:
# No type annotation, doing runtime dispatch.
if d is not attr.NOTHING and override.omit_if_default:
def_name = "__cattr_def_{}".format(attr_name)
if isinstance(d, attr.Factory):
globs[def_name] = d.factory
if d.takes_self:
post_lines.append(
" if i.{name} != {def_name}(i):".format(
name=attr_name, def_name=def_name
)
)
else:
post_lines.append(
" if i.{name} != {def_name}():".format(
name=attr_name, def_name=def_name
)
)
post_lines.append(
" res['{name}'] = i.{name}".format(
name=attr_name
)
)
else:
globs[def_name] = d
post_lines.append(
" if i.{name} != {def_name}:".format(
name=attr_name, def_name=def_name
)
)
post_lines.append(
" res['{name}'] = __c_u(i.{name})".format(
name=attr_name
)
)
else:
# No default or no override.
lines.append(
" '{name}': __c_u(i.{name}),".format(name=attr_name)
)
else:
# Do the dispatch here and now.
type = a.type
conv_function = converter._unstructure_func.dispatch(type)
if d is not attr.NOTHING and override.omit_if_default:
def_name = "__cattr_def_{}".format(attr_name)
if isinstance(d, attr.Factory):
# The default is computed every time.
globs[def_name] = d.factory
if d.takes_self:
post_lines.append(
" if i.{name} != {def_name}(i):".format(
name=attr_name, def_name=def_name
)
)
else:
post_lines.append(
" if i.{name} != {def_name}():".format(
name=attr_name, def_name=def_name
)
)
if conv_function == converter._unstructure_identity:
# Special case this, avoid a function call.
post_lines.append(
" res['{name}'] = i.{name}".format(
name=attr_name
)
)
else:
unstruct_fn_name = "__cattr_unstruct_{}".format(
attr_name
)
globs[unstruct_fn_name] = conv_function
post_lines.append(
" res['{name}'] = {fn}(i.{name}),".format(
name=attr_name, fn=unstruct_fn_name
)
)
else:
# Default is not a factory, but a constant.
globs[def_name] = d
post_lines.append(
" if i.{name} != {def_name}:".format(
name=attr_name, def_name=def_name
)
)
if conv_function == converter._unstructure_identity:
post_lines.append(
" res['{name}'] = i.{name}".format(
name=attr_name
)
)
else:
unstruct_fn_name = "__cattr_unstruct_{}".format(
attr_name
)
globs[unstruct_fn_name] = conv_function
post_lines.append(
" res['{name}'] = {fn}(i.{name})".format(
name=attr_name, fn=unstruct_fn_name
)
)
else:
# No omitting of defaults.
if conv_function == converter._unstructure_identity:
# Special case this, avoid a function call.
lines.append(
" '{name}': i.{name},".format(name=attr_name)
)
else:
unstruct_fn_name = "__cattr_unstruct_{}".format(attr_name)
globs[unstruct_fn_name] = conv_function
lines.append(
" '{name}': {fn}(i.{name}),".format(
name=attr_name, fn=unstruct_fn_name
)
)
lines.append(" }")
total_lines = lines + post_lines + [" return res"]
eval(compile("\n".join(total_lines), "", "exec"), globs)
fn = globs[fn_name]
return fn | [
"def",
"make_dict_unstructure_fn",
"(",
"cl",
",",
"converter",
",",
"*",
"*",
"kwargs",
")",
":",
"# type: (Type[T], Converter) -> Callable[[T], Dict[str, Any]]",
"cl_name",
"=",
"cl",
".",
"__name__",
"fn_name",
"=",
"\"unstructure_\"",
"+",
"cl_name",
"globs",
"=",
"{",
"\"__c_u\"",
":",
"converter",
".",
"unstructure",
"}",
"lines",
"=",
"[",
"]",
"post_lines",
"=",
"[",
"]",
"attrs",
"=",
"cl",
".",
"__attrs_attrs__",
"lines",
".",
"append",
"(",
"\"def {}(i):\"",
".",
"format",
"(",
"fn_name",
")",
")",
"lines",
".",
"append",
"(",
"\" res = {\"",
")",
"for",
"a",
"in",
"attrs",
":",
"attr_name",
"=",
"a",
".",
"name",
"override",
"=",
"kwargs",
".",
"pop",
"(",
"attr_name",
",",
"_neutral",
")",
"d",
"=",
"a",
".",
"default",
"if",
"a",
".",
"type",
"is",
"None",
":",
"# No type annotation, doing runtime dispatch.",
"if",
"d",
"is",
"not",
"attr",
".",
"NOTHING",
"and",
"override",
".",
"omit_if_default",
":",
"def_name",
"=",
"\"__cattr_def_{}\"",
".",
"format",
"(",
"attr_name",
")",
"if",
"isinstance",
"(",
"d",
",",
"attr",
".",
"Factory",
")",
":",
"globs",
"[",
"def_name",
"]",
"=",
"d",
".",
"factory",
"if",
"d",
".",
"takes_self",
":",
"post_lines",
".",
"append",
"(",
"\" if i.{name} != {def_name}(i):\"",
".",
"format",
"(",
"name",
"=",
"attr_name",
",",
"def_name",
"=",
"def_name",
")",
")",
"else",
":",
"post_lines",
".",
"append",
"(",
"\" if i.{name} != {def_name}():\"",
".",
"format",
"(",
"name",
"=",
"attr_name",
",",
"def_name",
"=",
"def_name",
")",
")",
"post_lines",
".",
"append",
"(",
"\" res['{name}'] = i.{name}\"",
".",
"format",
"(",
"name",
"=",
"attr_name",
")",
")",
"else",
":",
"globs",
"[",
"def_name",
"]",
"=",
"d",
"post_lines",
".",
"append",
"(",
"\" if i.{name} != {def_name}:\"",
".",
"format",
"(",
"name",
"=",
"attr_name",
",",
"def_name",
"=",
"def_name",
")",
")",
"post_lines",
".",
"append",
"(",
"\" res['{name}'] = __c_u(i.{name})\"",
".",
"format",
"(",
"name",
"=",
"attr_name",
")",
")",
"else",
":",
"# No default or no override.",
"lines",
".",
"append",
"(",
"\" '{name}': __c_u(i.{name}),\"",
".",
"format",
"(",
"name",
"=",
"attr_name",
")",
")",
"else",
":",
"# Do the dispatch here and now.",
"type",
"=",
"a",
".",
"type",
"conv_function",
"=",
"converter",
".",
"_unstructure_func",
".",
"dispatch",
"(",
"type",
")",
"if",
"d",
"is",
"not",
"attr",
".",
"NOTHING",
"and",
"override",
".",
"omit_if_default",
":",
"def_name",
"=",
"\"__cattr_def_{}\"",
".",
"format",
"(",
"attr_name",
")",
"if",
"isinstance",
"(",
"d",
",",
"attr",
".",
"Factory",
")",
":",
"# The default is computed every time.",
"globs",
"[",
"def_name",
"]",
"=",
"d",
".",
"factory",
"if",
"d",
".",
"takes_self",
":",
"post_lines",
".",
"append",
"(",
"\" if i.{name} != {def_name}(i):\"",
".",
"format",
"(",
"name",
"=",
"attr_name",
",",
"def_name",
"=",
"def_name",
")",
")",
"else",
":",
"post_lines",
".",
"append",
"(",
"\" if i.{name} != {def_name}():\"",
".",
"format",
"(",
"name",
"=",
"attr_name",
",",
"def_name",
"=",
"def_name",
")",
")",
"if",
"conv_function",
"==",
"converter",
".",
"_unstructure_identity",
":",
"# Special case this, avoid a function call.",
"post_lines",
".",
"append",
"(",
"\" res['{name}'] = i.{name}\"",
".",
"format",
"(",
"name",
"=",
"attr_name",
")",
")",
"else",
":",
"unstruct_fn_name",
"=",
"\"__cattr_unstruct_{}\"",
".",
"format",
"(",
"attr_name",
")",
"globs",
"[",
"unstruct_fn_name",
"]",
"=",
"conv_function",
"post_lines",
".",
"append",
"(",
"\" res['{name}'] = {fn}(i.{name}),\"",
".",
"format",
"(",
"name",
"=",
"attr_name",
",",
"fn",
"=",
"unstruct_fn_name",
")",
")",
"else",
":",
"# Default is not a factory, but a constant.",
"globs",
"[",
"def_name",
"]",
"=",
"d",
"post_lines",
".",
"append",
"(",
"\" if i.{name} != {def_name}:\"",
".",
"format",
"(",
"name",
"=",
"attr_name",
",",
"def_name",
"=",
"def_name",
")",
")",
"if",
"conv_function",
"==",
"converter",
".",
"_unstructure_identity",
":",
"post_lines",
".",
"append",
"(",
"\" res['{name}'] = i.{name}\"",
".",
"format",
"(",
"name",
"=",
"attr_name",
")",
")",
"else",
":",
"unstruct_fn_name",
"=",
"\"__cattr_unstruct_{}\"",
".",
"format",
"(",
"attr_name",
")",
"globs",
"[",
"unstruct_fn_name",
"]",
"=",
"conv_function",
"post_lines",
".",
"append",
"(",
"\" res['{name}'] = {fn}(i.{name})\"",
".",
"format",
"(",
"name",
"=",
"attr_name",
",",
"fn",
"=",
"unstruct_fn_name",
")",
")",
"else",
":",
"# No omitting of defaults.",
"if",
"conv_function",
"==",
"converter",
".",
"_unstructure_identity",
":",
"# Special case this, avoid a function call.",
"lines",
".",
"append",
"(",
"\" '{name}': i.{name},\"",
".",
"format",
"(",
"name",
"=",
"attr_name",
")",
")",
"else",
":",
"unstruct_fn_name",
"=",
"\"__cattr_unstruct_{}\"",
".",
"format",
"(",
"attr_name",
")",
"globs",
"[",
"unstruct_fn_name",
"]",
"=",
"conv_function",
"lines",
".",
"append",
"(",
"\" '{name}': {fn}(i.{name}),\"",
".",
"format",
"(",
"name",
"=",
"attr_name",
",",
"fn",
"=",
"unstruct_fn_name",
")",
")",
"lines",
".",
"append",
"(",
"\" }\"",
")",
"total_lines",
"=",
"lines",
"+",
"post_lines",
"+",
"[",
"\" return res\"",
"]",
"eval",
"(",
"compile",
"(",
"\"\\n\"",
".",
"join",
"(",
"total_lines",
")",
",",
"\"\"",
",",
"\"exec\"",
")",
",",
"globs",
")",
"fn",
"=",
"globs",
"[",
"fn_name",
"]",
"return",
"fn"
] | 40.609589 | 17.835616 |
def _do_cb(self, cb, error_cb, *args, **kw):
"""
Called internally by callback(). Does cb and error_cb selection.
"""
try:
res = self.work(*args, **kw)
except Exception as e:
if error_cb is None:
show_err()
elif error_cb:
error_cb(e)
else:
# Success, let's call away!
cb(res) | [
"def",
"_do_cb",
"(",
"self",
",",
"cb",
",",
"error_cb",
",",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"try",
":",
"res",
"=",
"self",
".",
"work",
"(",
"*",
"args",
",",
"*",
"*",
"kw",
")",
"except",
"Exception",
"as",
"e",
":",
"if",
"error_cb",
"is",
"None",
":",
"show_err",
"(",
")",
"elif",
"error_cb",
":",
"error_cb",
"(",
"e",
")",
"else",
":",
"# Success, let's call away!",
"cb",
"(",
"res",
")"
] | 28.714286 | 12.285714 |
def assign_operation_ids(spec, operids):
""" used to assign caller provided operationId values into a spec """
empty_dict = {}
for path_name, path_data in six.iteritems(spec['paths']):
for method, method_data in six.iteritems(path_data):
oper_id = operids.get(path_name, empty_dict).get(method)
if oper_id:
method_data['operationId'] = oper_id | [
"def",
"assign_operation_ids",
"(",
"spec",
",",
"operids",
")",
":",
"empty_dict",
"=",
"{",
"}",
"for",
"path_name",
",",
"path_data",
"in",
"six",
".",
"iteritems",
"(",
"spec",
"[",
"'paths'",
"]",
")",
":",
"for",
"method",
",",
"method_data",
"in",
"six",
".",
"iteritems",
"(",
"path_data",
")",
":",
"oper_id",
"=",
"operids",
".",
"get",
"(",
"path_name",
",",
"empty_dict",
")",
".",
"get",
"(",
"method",
")",
"if",
"oper_id",
":",
"method_data",
"[",
"'operationId'",
"]",
"=",
"oper_id"
] | 39.6 | 19.9 |
def _get_int_removals_helper(self, spec_amts_oxi, oxid_el, oxid_els, numa):
"""
This is a helper method for get_removals_int_oxid!
Args:
spec_amts_oxi - a dict of species to their amounts in the structure
oxid_el - the element to oxidize
oxid_els - the full list of elements that might be oxidized
numa - a running set of numbers of A cation at integer oxidation steps
Returns:
a set of numbers A; steps for for oxidizing oxid_el first, then the other oxid_els in this list
"""
# If Mn is the oxid_el, we have a mixture of Mn2+, Mn3+, determine the minimum oxidation state for Mn
#this is the state we want to oxidize!
oxid_old = min([spec.oxi_state for spec in spec_amts_oxi if spec.symbol == oxid_el.symbol])
oxid_new = math.floor(oxid_old + 1)
#if this is not a valid solution, break out of here and don't add anything to the list
if oxid_new > oxid_el.max_oxidation_state:
return numa
#update the spec_amts_oxi map to reflect that the oxidation took place
spec_old = Specie(oxid_el.symbol, oxid_old)
spec_new = Specie(oxid_el.symbol, oxid_new)
specamt = spec_amts_oxi[spec_old]
spec_amts_oxi = {sp: amt for sp, amt in spec_amts_oxi.items() if sp != spec_old}
spec_amts_oxi[spec_new] = specamt
spec_amts_oxi = Composition(spec_amts_oxi)
#determine the amount of cation A in the structure needed for charge balance and add it to the list
oxi_noA = sum([spec.oxi_state * spec_amts_oxi[spec] for spec in spec_amts_oxi if
spec.symbol not in self.cation.symbol])
a = max(0, -oxi_noA / self.cation_charge)
numa = numa.union({a})
#recursively try the other oxidation states
if a == 0:
return numa
else:
for oxid_el in oxid_els:
numa = numa.union(
self._get_int_removals_helper(spec_amts_oxi.copy(), oxid_el, oxid_els, numa))
return numa | [
"def",
"_get_int_removals_helper",
"(",
"self",
",",
"spec_amts_oxi",
",",
"oxid_el",
",",
"oxid_els",
",",
"numa",
")",
":",
"# If Mn is the oxid_el, we have a mixture of Mn2+, Mn3+, determine the minimum oxidation state for Mn",
"#this is the state we want to oxidize!",
"oxid_old",
"=",
"min",
"(",
"[",
"spec",
".",
"oxi_state",
"for",
"spec",
"in",
"spec_amts_oxi",
"if",
"spec",
".",
"symbol",
"==",
"oxid_el",
".",
"symbol",
"]",
")",
"oxid_new",
"=",
"math",
".",
"floor",
"(",
"oxid_old",
"+",
"1",
")",
"#if this is not a valid solution, break out of here and don't add anything to the list",
"if",
"oxid_new",
">",
"oxid_el",
".",
"max_oxidation_state",
":",
"return",
"numa",
"#update the spec_amts_oxi map to reflect that the oxidation took place",
"spec_old",
"=",
"Specie",
"(",
"oxid_el",
".",
"symbol",
",",
"oxid_old",
")",
"spec_new",
"=",
"Specie",
"(",
"oxid_el",
".",
"symbol",
",",
"oxid_new",
")",
"specamt",
"=",
"spec_amts_oxi",
"[",
"spec_old",
"]",
"spec_amts_oxi",
"=",
"{",
"sp",
":",
"amt",
"for",
"sp",
",",
"amt",
"in",
"spec_amts_oxi",
".",
"items",
"(",
")",
"if",
"sp",
"!=",
"spec_old",
"}",
"spec_amts_oxi",
"[",
"spec_new",
"]",
"=",
"specamt",
"spec_amts_oxi",
"=",
"Composition",
"(",
"spec_amts_oxi",
")",
"#determine the amount of cation A in the structure needed for charge balance and add it to the list",
"oxi_noA",
"=",
"sum",
"(",
"[",
"spec",
".",
"oxi_state",
"*",
"spec_amts_oxi",
"[",
"spec",
"]",
"for",
"spec",
"in",
"spec_amts_oxi",
"if",
"spec",
".",
"symbol",
"not",
"in",
"self",
".",
"cation",
".",
"symbol",
"]",
")",
"a",
"=",
"max",
"(",
"0",
",",
"-",
"oxi_noA",
"/",
"self",
".",
"cation_charge",
")",
"numa",
"=",
"numa",
".",
"union",
"(",
"{",
"a",
"}",
")",
"#recursively try the other oxidation states",
"if",
"a",
"==",
"0",
":",
"return",
"numa",
"else",
":",
"for",
"oxid_el",
"in",
"oxid_els",
":",
"numa",
"=",
"numa",
".",
"union",
"(",
"self",
".",
"_get_int_removals_helper",
"(",
"spec_amts_oxi",
".",
"copy",
"(",
")",
",",
"oxid_el",
",",
"oxid_els",
",",
"numa",
")",
")",
"return",
"numa"
] | 47.953488 | 26.55814 |
def get_measurement_metadata(self, fields, ids=None, noneval=nan,
output_format='DataFrame'):
"""
Get the metadata fields of specified measurements (all if None given).
Parameters
----------
fields : str | iterable of str
Names of metadata fields to be returned.
ids : hashable| iterable of hashables | None
Keys of measurements for which metadata will be returned.
If None is given return metadata of all measurements.
noneval : obj
Value returned if applyto is 'data' but no data is available.
output_format : 'DataFrame' | 'dict'
'DataFrame' : return DataFrame,
'dict' : return dictionary.
Returns
-------
Measurement metadata in specified output_format.
"""
fields = to_list(fields)
func = lambda x: x.get_meta_fields(fields)
meta_d = self.apply(func, ids=ids, applyto='measurement',
noneval=noneval, output_format='dict')
if output_format is 'dict':
return meta_d
elif output_format is 'DataFrame':
from pandas import DataFrame as DF
meta_df = DF(meta_d, index=fields)
return meta_df
else:
msg = ("The output_format must be either 'dict' or 'DataFrame'. " +
"Encountered unsupported value %s." % repr(output_format))
raise Exception(msg) | [
"def",
"get_measurement_metadata",
"(",
"self",
",",
"fields",
",",
"ids",
"=",
"None",
",",
"noneval",
"=",
"nan",
",",
"output_format",
"=",
"'DataFrame'",
")",
":",
"fields",
"=",
"to_list",
"(",
"fields",
")",
"func",
"=",
"lambda",
"x",
":",
"x",
".",
"get_meta_fields",
"(",
"fields",
")",
"meta_d",
"=",
"self",
".",
"apply",
"(",
"func",
",",
"ids",
"=",
"ids",
",",
"applyto",
"=",
"'measurement'",
",",
"noneval",
"=",
"noneval",
",",
"output_format",
"=",
"'dict'",
")",
"if",
"output_format",
"is",
"'dict'",
":",
"return",
"meta_d",
"elif",
"output_format",
"is",
"'DataFrame'",
":",
"from",
"pandas",
"import",
"DataFrame",
"as",
"DF",
"meta_df",
"=",
"DF",
"(",
"meta_d",
",",
"index",
"=",
"fields",
")",
"return",
"meta_df",
"else",
":",
"msg",
"=",
"(",
"\"The output_format must be either 'dict' or 'DataFrame'. \"",
"+",
"\"Encountered unsupported value %s.\"",
"%",
"repr",
"(",
"output_format",
")",
")",
"raise",
"Exception",
"(",
"msg",
")"
] | 41.194444 | 17.916667 |
def serialize(expr):
"""Serialize input expr into a parsable value.
:rtype: str"""
result = None
if isinstance(expr, string_types):
result = expr
elif expr is not None:
result = '=py:{0}'.format(expr)
return result | [
"def",
"serialize",
"(",
"expr",
")",
":",
"result",
"=",
"None",
"if",
"isinstance",
"(",
"expr",
",",
"string_types",
")",
":",
"result",
"=",
"expr",
"elif",
"expr",
"is",
"not",
"None",
":",
"result",
"=",
"'=py:{0}'",
".",
"format",
"(",
"expr",
")",
"return",
"result"
] | 17.571429 | 21.571429 |
def open_allowed(self) -> bool:
"""Door can be opened unattended."""
return next(
attr['Value'] for attr in self._device_json.get('Attributes', [])
if attr.get('AttributeDisplayName') == 'isunattendedopenallowed')\
== "1" | [
"def",
"open_allowed",
"(",
"self",
")",
"->",
"bool",
":",
"return",
"next",
"(",
"attr",
"[",
"'Value'",
"]",
"for",
"attr",
"in",
"self",
".",
"_device_json",
".",
"get",
"(",
"'Attributes'",
",",
"[",
"]",
")",
"if",
"attr",
".",
"get",
"(",
"'AttributeDisplayName'",
")",
"==",
"'isunattendedopenallowed'",
")",
"==",
"\"1\""
] | 44.666667 | 21 |
def start(self):
"""
Launch the process and start processing the DAG.
"""
self._process = DagFileProcessor._launch_process(
self._result_queue,
self.file_path,
self._pickle_dags,
self._dag_id_white_list,
"DagFileProcessor{}".format(self._instance_id),
self._zombies)
self._start_time = timezone.utcnow() | [
"def",
"start",
"(",
"self",
")",
":",
"self",
".",
"_process",
"=",
"DagFileProcessor",
".",
"_launch_process",
"(",
"self",
".",
"_result_queue",
",",
"self",
".",
"file_path",
",",
"self",
".",
"_pickle_dags",
",",
"self",
".",
"_dag_id_white_list",
",",
"\"DagFileProcessor{}\"",
".",
"format",
"(",
"self",
".",
"_instance_id",
")",
",",
"self",
".",
"_zombies",
")",
"self",
".",
"_start_time",
"=",
"timezone",
".",
"utcnow",
"(",
")"
] | 33.666667 | 10.833333 |
def get_inclusion_states(self, transactions, tips):
# type: (Iterable[TransactionHash], Iterable[TransactionHash]) -> dict
"""
Get the inclusion states of a set of transactions. This is for
determining if a transaction was accepted and confirmed by the
network or not. You can search for multiple tips (and thus,
milestones) to get past inclusion states of transactions.
:param transactions:
List of transactions you want to get the inclusion state
for.
:param tips:
List of tips (including milestones) you want to search for
the inclusion state.
References:
- https://iota.readme.io/docs/getinclusionstates
"""
return core.GetInclusionStatesCommand(self.adapter)(
transactions=transactions,
tips=tips,
) | [
"def",
"get_inclusion_states",
"(",
"self",
",",
"transactions",
",",
"tips",
")",
":",
"# type: (Iterable[TransactionHash], Iterable[TransactionHash]) -> dict",
"return",
"core",
".",
"GetInclusionStatesCommand",
"(",
"self",
".",
"adapter",
")",
"(",
"transactions",
"=",
"transactions",
",",
"tips",
"=",
"tips",
",",
")"
] | 35.875 | 22.958333 |
def _logfile_sigterm_handler(*_):
# type: (...) -> None
"""Handle exit signals and write out a log file.
Raises:
SystemExit: Contains the signal as the return code.
"""
logging.error('Received SIGTERM.')
write_logfile()
print('Received signal. Please see the log file for more information.',
file=sys.stderr)
sys.exit(signal) | [
"def",
"_logfile_sigterm_handler",
"(",
"*",
"_",
")",
":",
"# type: (...) -> None",
"logging",
".",
"error",
"(",
"'Received SIGTERM.'",
")",
"write_logfile",
"(",
")",
"print",
"(",
"'Received signal. Please see the log file for more information.'",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"sys",
".",
"exit",
"(",
"signal",
")"
] | 30.416667 | 16.833333 |
def p_call_expr(self, p):
"""call_expr : member_expr arguments
| call_expr arguments
| call_expr LBRACKET expr RBRACKET
| call_expr PERIOD identifier
"""
if len(p) == 3:
p[0] = ast.FunctionCall(p[1], p[2])
elif len(p) == 4:
p[0] = ast.DotAccessor(p[1], p[3])
else:
p[0] = ast.BracketAccessor(p[1], p[3]) | [
"def",
"p_call_expr",
"(",
"self",
",",
"p",
")",
":",
"if",
"len",
"(",
"p",
")",
"==",
"3",
":",
"p",
"[",
"0",
"]",
"=",
"ast",
".",
"FunctionCall",
"(",
"p",
"[",
"1",
"]",
",",
"p",
"[",
"2",
"]",
")",
"elif",
"len",
"(",
"p",
")",
"==",
"4",
":",
"p",
"[",
"0",
"]",
"=",
"ast",
".",
"DotAccessor",
"(",
"p",
"[",
"1",
"]",
",",
"p",
"[",
"3",
"]",
")",
"else",
":",
"p",
"[",
"0",
"]",
"=",
"ast",
".",
"BracketAccessor",
"(",
"p",
"[",
"1",
"]",
",",
"p",
"[",
"3",
"]",
")"
] | 35.916667 | 10.333333 |
def create_signature(secret, value, digestmod='sha256', encoding='utf-8'):
""" Create HMAC Signature from secret for value. """
if isinstance(secret, str):
secret = secret.encode(encoding)
if isinstance(value, str):
value = value.encode(encoding)
if isinstance(digestmod, str):
digestmod = getattr(hashlib, digestmod, hashlib.sha1)
hm = hmac.new(secret, digestmod=digestmod)
hm.update(value)
return hm.hexdigest() | [
"def",
"create_signature",
"(",
"secret",
",",
"value",
",",
"digestmod",
"=",
"'sha256'",
",",
"encoding",
"=",
"'utf-8'",
")",
":",
"if",
"isinstance",
"(",
"secret",
",",
"str",
")",
":",
"secret",
"=",
"secret",
".",
"encode",
"(",
"encoding",
")",
"if",
"isinstance",
"(",
"value",
",",
"str",
")",
":",
"value",
"=",
"value",
".",
"encode",
"(",
"encoding",
")",
"if",
"isinstance",
"(",
"digestmod",
",",
"str",
")",
":",
"digestmod",
"=",
"getattr",
"(",
"hashlib",
",",
"digestmod",
",",
"hashlib",
".",
"sha1",
")",
"hm",
"=",
"hmac",
".",
"new",
"(",
"secret",
",",
"digestmod",
"=",
"digestmod",
")",
"hm",
".",
"update",
"(",
"value",
")",
"return",
"hm",
".",
"hexdigest",
"(",
")"
] | 32.5 | 17.357143 |
def snapshot(self, mode):
'''
Take a snapshot of the system.
'''
self._init_env()
self._save_cfg_packages(self._get_changed_cfg_pkgs(self._get_cfg_pkgs()))
self._save_payload(*self._scan_payload()) | [
"def",
"snapshot",
"(",
"self",
",",
"mode",
")",
":",
"self",
".",
"_init_env",
"(",
")",
"self",
".",
"_save_cfg_packages",
"(",
"self",
".",
"_get_changed_cfg_pkgs",
"(",
"self",
".",
"_get_cfg_pkgs",
"(",
")",
")",
")",
"self",
".",
"_save_payload",
"(",
"*",
"self",
".",
"_scan_payload",
"(",
")",
")"
] | 29.875 | 22.625 |
def make_from_catalogue(cls, catalogue, spacing, dilate):
'''
Defines the grid on the basis of the catalogue
'''
new = cls()
cat_bbox = get_catalogue_bounding_polygon(catalogue)
if dilate > 0:
cat_bbox = cat_bbox.dilate(dilate)
# Define Grid spacing
new.update({'xmin': np.min(cat_bbox.lons),
'xmax': np.max(cat_bbox.lons),
'xspc': spacing,
'ymin': np.min(cat_bbox.lats),
'ymax': np.max(cat_bbox.lats),
'yspc': spacing,
'zmin': 0.,
'zmax': np.max(catalogue.data['depth']),
'zspc': np.max(catalogue.data['depth'])})
if new['zmin'] == new['zmax'] == new['zspc'] == 0:
new['zmax'] = new['zspc'] = 1
return new | [
"def",
"make_from_catalogue",
"(",
"cls",
",",
"catalogue",
",",
"spacing",
",",
"dilate",
")",
":",
"new",
"=",
"cls",
"(",
")",
"cat_bbox",
"=",
"get_catalogue_bounding_polygon",
"(",
"catalogue",
")",
"if",
"dilate",
">",
"0",
":",
"cat_bbox",
"=",
"cat_bbox",
".",
"dilate",
"(",
"dilate",
")",
"# Define Grid spacing",
"new",
".",
"update",
"(",
"{",
"'xmin'",
":",
"np",
".",
"min",
"(",
"cat_bbox",
".",
"lons",
")",
",",
"'xmax'",
":",
"np",
".",
"max",
"(",
"cat_bbox",
".",
"lons",
")",
",",
"'xspc'",
":",
"spacing",
",",
"'ymin'",
":",
"np",
".",
"min",
"(",
"cat_bbox",
".",
"lats",
")",
",",
"'ymax'",
":",
"np",
".",
"max",
"(",
"cat_bbox",
".",
"lats",
")",
",",
"'yspc'",
":",
"spacing",
",",
"'zmin'",
":",
"0.",
",",
"'zmax'",
":",
"np",
".",
"max",
"(",
"catalogue",
".",
"data",
"[",
"'depth'",
"]",
")",
",",
"'zspc'",
":",
"np",
".",
"max",
"(",
"catalogue",
".",
"data",
"[",
"'depth'",
"]",
")",
"}",
")",
"if",
"new",
"[",
"'zmin'",
"]",
"==",
"new",
"[",
"'zmax'",
"]",
"==",
"new",
"[",
"'zspc'",
"]",
"==",
"0",
":",
"new",
"[",
"'zmax'",
"]",
"=",
"new",
"[",
"'zspc'",
"]",
"=",
"1",
"return",
"new"
] | 34 | 18.56 |
def digital_write(self, pin, value):
"""
Set the specified pin to the specified value.
:param pin: pin number
:param value: pin value
:return: No return value
"""
# The command value is not a fixed value, but needs to be calculated using the
# pin's port number
#
#
port = pin // 8
calculated_command = self._command_handler.DIGITAL_MESSAGE + port
mask = 1 << (pin % 8)
# Calculate the value for the pin's position in the port mask
if value == 1:
self.digital_output_port_pins[port] |= mask
else:
self.digital_output_port_pins[port] &= ~mask
# Assemble the command
command = (calculated_command, self.digital_output_port_pins[port] & 0x7f,
(self.digital_output_port_pins[port] >> 7) & 0x7f)
self._command_handler.send_command(command) | [
"def",
"digital_write",
"(",
"self",
",",
"pin",
",",
"value",
")",
":",
"# The command value is not a fixed value, but needs to be calculated using the",
"# pin's port number",
"#",
"#",
"port",
"=",
"pin",
"//",
"8",
"calculated_command",
"=",
"self",
".",
"_command_handler",
".",
"DIGITAL_MESSAGE",
"+",
"port",
"mask",
"=",
"1",
"<<",
"(",
"pin",
"%",
"8",
")",
"# Calculate the value for the pin's position in the port mask",
"if",
"value",
"==",
"1",
":",
"self",
".",
"digital_output_port_pins",
"[",
"port",
"]",
"|=",
"mask",
"else",
":",
"self",
".",
"digital_output_port_pins",
"[",
"port",
"]",
"&=",
"~",
"mask",
"# Assemble the command",
"command",
"=",
"(",
"calculated_command",
",",
"self",
".",
"digital_output_port_pins",
"[",
"port",
"]",
"&",
"0x7f",
",",
"(",
"self",
".",
"digital_output_port_pins",
"[",
"port",
"]",
">>",
"7",
")",
"&",
"0x7f",
")",
"self",
".",
"_command_handler",
".",
"send_command",
"(",
"command",
")"
] | 30.233333 | 23.433333 |
def p_notminus_assignment(self, t):
'''notminus_assignment : IDENT EQ NOTMINUS'''
self.accu.add(Term('obs_vlabel', [self.name,"gen(\""+t[1]+"\")","notMinus"])) | [
"def",
"p_notminus_assignment",
"(",
"self",
",",
"t",
")",
":",
"self",
".",
"accu",
".",
"add",
"(",
"Term",
"(",
"'obs_vlabel'",
",",
"[",
"self",
".",
"name",
",",
"\"gen(\\\"\"",
"+",
"t",
"[",
"1",
"]",
"+",
"\"\\\")\"",
",",
"\"notMinus\"",
"]",
")",
")"
] | 55 | 18.333333 |
def get_df(self, data_file):
"""
读取历史财务数据文件,并返回pandas结果 , 类似gpcw20171231.zip格式,具体字段含义参考
https://github.com/rainx/pytdx/issues/133
:param data_file: 数据文件地址, 数据文件类型可以为 .zip 文件,也可以为解压后的 .dat
:return: pandas DataFrame格式的历史财务数据
"""
crawler = QAHistoryFinancialCrawler()
with open(data_file, 'rb') as df:
data = crawler.parse(download_file=df)
return crawler.to_df(data) | [
"def",
"get_df",
"(",
"self",
",",
"data_file",
")",
":",
"crawler",
"=",
"QAHistoryFinancialCrawler",
"(",
")",
"with",
"open",
"(",
"data_file",
",",
"'rb'",
")",
"as",
"df",
":",
"data",
"=",
"crawler",
".",
"parse",
"(",
"download_file",
"=",
"df",
")",
"return",
"crawler",
".",
"to_df",
"(",
"data",
")"
] | 27.375 | 18.25 |
def skos_related(rdf):
"""Make sure that skos:related is stated in both directions (S23)."""
for s, o in rdf.subject_objects(SKOS.related):
rdf.add((o, SKOS.related, s)) | [
"def",
"skos_related",
"(",
"rdf",
")",
":",
"for",
"s",
",",
"o",
"in",
"rdf",
".",
"subject_objects",
"(",
"SKOS",
".",
"related",
")",
":",
"rdf",
".",
"add",
"(",
"(",
"o",
",",
"SKOS",
".",
"related",
",",
"s",
")",
")"
] | 45.5 | 7.75 |
def set_xy_labels(units, kpc_per_arcsec, xlabelsize, ylabelsize, xyticksize):
"""Set the x and y labels of the figure, and set the fontsize of those labels.
The x and y labels are always the distance scales, thus the labels are either arc-seconds or kpc and depend on the \
units the figure is plotted in.
Parameters
-----------
units : str
The units of the y / x axis of the plots, in arc-seconds ('arcsec') or kiloparsecs ('kpc').
kpc_per_arcsec : float
The conversion factor between arc-seconds and kiloparsecs, required to plot the units in kpc.
xlabelsize : int
The fontsize of the x axes label.
ylabelsize : int
The fontsize of the y axes label.
xyticksize : int
The font size of the x and y ticks on the figure axes.
"""
if units in 'arcsec' or kpc_per_arcsec is None:
plt.xlabel('x (arcsec)', fontsize=xlabelsize)
plt.ylabel('y (arcsec)', fontsize=ylabelsize)
elif units in 'kpc':
plt.xlabel('x (kpc)', fontsize=xlabelsize)
plt.ylabel('y (kpc)', fontsize=ylabelsize)
else:
raise exc.PlottingException('The units supplied to the plotted are not a valid string (must be pixels | '
'arcsec | kpc)')
plt.tick_params(labelsize=xyticksize) | [
"def",
"set_xy_labels",
"(",
"units",
",",
"kpc_per_arcsec",
",",
"xlabelsize",
",",
"ylabelsize",
",",
"xyticksize",
")",
":",
"if",
"units",
"in",
"'arcsec'",
"or",
"kpc_per_arcsec",
"is",
"None",
":",
"plt",
".",
"xlabel",
"(",
"'x (arcsec)'",
",",
"fontsize",
"=",
"xlabelsize",
")",
"plt",
".",
"ylabel",
"(",
"'y (arcsec)'",
",",
"fontsize",
"=",
"ylabelsize",
")",
"elif",
"units",
"in",
"'kpc'",
":",
"plt",
".",
"xlabel",
"(",
"'x (kpc)'",
",",
"fontsize",
"=",
"xlabelsize",
")",
"plt",
".",
"ylabel",
"(",
"'y (kpc)'",
",",
"fontsize",
"=",
"ylabelsize",
")",
"else",
":",
"raise",
"exc",
".",
"PlottingException",
"(",
"'The units supplied to the plotted are not a valid string (must be pixels | '",
"'arcsec | kpc)'",
")",
"plt",
".",
"tick_params",
"(",
"labelsize",
"=",
"xyticksize",
")"
] | 38 | 26.088235 |
def _add_or_remove_flag(self, flag, add):
"""
Add the given `flag` if `add` is True, remove it otherwise.
"""
meth = self.add_flag if add else self.remove_flag
meth(flag) | [
"def",
"_add_or_remove_flag",
"(",
"self",
",",
"flag",
",",
"add",
")",
":",
"meth",
"=",
"self",
".",
"add_flag",
"if",
"add",
"else",
"self",
".",
"remove_flag",
"meth",
"(",
"flag",
")"
] | 34.166667 | 11.166667 |
def prepare_destruction(self):
"""Prepares the model for destruction
Unregister itself as observer from the state machine and the root state
"""
if self.state_machine is None:
logger.verbose("Multiple calls of prepare destruction for {0}".format(self))
self.destruction_signal.emit()
if self.history is not None:
self.history.prepare_destruction()
if self.auto_backup is not None:
self.auto_backup.prepare_destruction()
try:
self.unregister_observer(self)
self.root_state.unregister_observer(self)
except KeyError: # Might happen if the observer was already unregistered
pass
with self.state_machine.modification_lock():
self.root_state.prepare_destruction()
self.root_state = None
self.state_machine = None
super(StateMachineModel, self).prepare_destruction() | [
"def",
"prepare_destruction",
"(",
"self",
")",
":",
"if",
"self",
".",
"state_machine",
"is",
"None",
":",
"logger",
".",
"verbose",
"(",
"\"Multiple calls of prepare destruction for {0}\"",
".",
"format",
"(",
"self",
")",
")",
"self",
".",
"destruction_signal",
".",
"emit",
"(",
")",
"if",
"self",
".",
"history",
"is",
"not",
"None",
":",
"self",
".",
"history",
".",
"prepare_destruction",
"(",
")",
"if",
"self",
".",
"auto_backup",
"is",
"not",
"None",
":",
"self",
".",
"auto_backup",
".",
"prepare_destruction",
"(",
")",
"try",
":",
"self",
".",
"unregister_observer",
"(",
"self",
")",
"self",
".",
"root_state",
".",
"unregister_observer",
"(",
"self",
")",
"except",
"KeyError",
":",
"# Might happen if the observer was already unregistered",
"pass",
"with",
"self",
".",
"state_machine",
".",
"modification_lock",
"(",
")",
":",
"self",
".",
"root_state",
".",
"prepare_destruction",
"(",
")",
"self",
".",
"root_state",
"=",
"None",
"self",
".",
"state_machine",
"=",
"None",
"super",
"(",
"StateMachineModel",
",",
"self",
")",
".",
"prepare_destruction",
"(",
")"
] | 42.227273 | 14.863636 |
def errata_applicability(self, synchronous=True, **kwargs):
"""Force regenerate errata applicability
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all content decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy() # shadow the passed-in kwargs
kwargs.update(self._server_config.get_client_kwargs())
response = client.put(self.path('errata/applicability'), **kwargs)
return _handle_response(response, self._server_config, synchronous) | [
"def",
"errata_applicability",
"(",
"self",
",",
"synchronous",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"=",
"kwargs",
".",
"copy",
"(",
")",
"# shadow the passed-in kwargs",
"kwargs",
".",
"update",
"(",
"self",
".",
"_server_config",
".",
"get_client_kwargs",
"(",
")",
")",
"response",
"=",
"client",
".",
"put",
"(",
"self",
".",
"path",
"(",
"'errata/applicability'",
")",
",",
"*",
"*",
"kwargs",
")",
"return",
"_handle_response",
"(",
"response",
",",
"self",
".",
"_server_config",
",",
"synchronous",
")"
] | 52.9375 | 24.375 |
def get_xml_attr(source, name, path=None):
"""Get the XML attribute with name from source. If path is not Mone, it will instead get the
XML attribute with name from the child indicated by path.
"""
if path is None:
return source.attrib[name]
else:
return get_xml_attr(get_xml_child(source, path), name) | [
"def",
"get_xml_attr",
"(",
"source",
",",
"name",
",",
"path",
"=",
"None",
")",
":",
"if",
"path",
"is",
"None",
":",
"return",
"source",
".",
"attrib",
"[",
"name",
"]",
"else",
":",
"return",
"get_xml_attr",
"(",
"get_xml_child",
"(",
"source",
",",
"path",
")",
",",
"name",
")"
] | 39.125 | 13 |
def case(*, to, **kwargs):
"""Converts an identifier from one case type to another.
An identifier is an ASCII string consisting of letters, digits and underscores, not starting with a digit.
The supported case types are camelCase, PascalCase, snake_case, and CONSTANT_CASE,
identified as camel, pascal, snake, and constant.
The input identifier is given as a keyword argument with one of these names,
and the output type is given as a string in the `to` keyword argument.
If a given string does not conform to the specified case type (such as underscores in camel or pascal case strings,
or double__underscores in general), the result may not be as desired,
although things like snaKe_casE or CONStaNT_CASe will generally work."""
if len(kwargs) != 1:
raise ValueError("expect exactly one source string argument")
[(typ, string)] = kwargs.items()
types = {'pascal', 'camel', 'snake', 'constant'}
if typ not in types:
raise ValueError(f"source string keyword must be one of {types}")
if to not in types:
raise ValueError(f"\"to\" argument must be one of {types}")
def pascal_iter(string):
yield from (m.group(0) for m in re.finditer(r'[A-Z][a-z0-9]*|[a-z0-9]+', string))
def snake_iter(string):
yield from (m.group(2) for m in re.finditer(r'(^|_)([A-Za-z0-9]+)', string))
inputs = {
'pascal': pascal_iter,
'camel': pascal_iter,
'snake': snake_iter,
'constant': snake_iter,
}
def out_fun(sep, case=None, case_fst=None):
if case is None:
case = lambda x: x
if case_fst is None:
case_fst = case
return lambda tokens: sep.join(case_fst(token) if i == 0 else case(token) for i, token in enumerate(tokens))
outputs = {
'pascal': out_fun('', str.capitalize),
'camel': out_fun('', str.capitalize, str.lower),
'snake': out_fun('_', str.lower),
'constant': out_fun('_', str.upper),
}
tokens = inputs[typ](string)
return outputs[to](tokens) | [
"def",
"case",
"(",
"*",
",",
"to",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"len",
"(",
"kwargs",
")",
"!=",
"1",
":",
"raise",
"ValueError",
"(",
"\"expect exactly one source string argument\"",
")",
"[",
"(",
"typ",
",",
"string",
")",
"]",
"=",
"kwargs",
".",
"items",
"(",
")",
"types",
"=",
"{",
"'pascal'",
",",
"'camel'",
",",
"'snake'",
",",
"'constant'",
"}",
"if",
"typ",
"not",
"in",
"types",
":",
"raise",
"ValueError",
"(",
"f\"source string keyword must be one of {types}\"",
")",
"if",
"to",
"not",
"in",
"types",
":",
"raise",
"ValueError",
"(",
"f\"\\\"to\\\" argument must be one of {types}\"",
")",
"def",
"pascal_iter",
"(",
"string",
")",
":",
"yield",
"from",
"(",
"m",
".",
"group",
"(",
"0",
")",
"for",
"m",
"in",
"re",
".",
"finditer",
"(",
"r'[A-Z][a-z0-9]*|[a-z0-9]+'",
",",
"string",
")",
")",
"def",
"snake_iter",
"(",
"string",
")",
":",
"yield",
"from",
"(",
"m",
".",
"group",
"(",
"2",
")",
"for",
"m",
"in",
"re",
".",
"finditer",
"(",
"r'(^|_)([A-Za-z0-9]+)'",
",",
"string",
")",
")",
"inputs",
"=",
"{",
"'pascal'",
":",
"pascal_iter",
",",
"'camel'",
":",
"pascal_iter",
",",
"'snake'",
":",
"snake_iter",
",",
"'constant'",
":",
"snake_iter",
",",
"}",
"def",
"out_fun",
"(",
"sep",
",",
"case",
"=",
"None",
",",
"case_fst",
"=",
"None",
")",
":",
"if",
"case",
"is",
"None",
":",
"case",
"=",
"lambda",
"x",
":",
"x",
"if",
"case_fst",
"is",
"None",
":",
"case_fst",
"=",
"case",
"return",
"lambda",
"tokens",
":",
"sep",
".",
"join",
"(",
"case_fst",
"(",
"token",
")",
"if",
"i",
"==",
"0",
"else",
"case",
"(",
"token",
")",
"for",
"i",
",",
"token",
"in",
"enumerate",
"(",
"tokens",
")",
")",
"outputs",
"=",
"{",
"'pascal'",
":",
"out_fun",
"(",
"''",
",",
"str",
".",
"capitalize",
")",
",",
"'camel'",
":",
"out_fun",
"(",
"''",
",",
"str",
".",
"capitalize",
",",
"str",
".",
"lower",
")",
",",
"'snake'",
":",
"out_fun",
"(",
"'_'",
",",
"str",
".",
"lower",
")",
",",
"'constant'",
":",
"out_fun",
"(",
"'_'",
",",
"str",
".",
"upper",
")",
",",
"}",
"tokens",
"=",
"inputs",
"[",
"typ",
"]",
"(",
"string",
")",
"return",
"outputs",
"[",
"to",
"]",
"(",
"tokens",
")"
] | 39.823529 | 25.54902 |
def delete_serv_obj(self, tenant_id):
"""Creates and stores the service object associated with a tenant. """
self.del_obj(tenant_id, self.service_attr[tenant_id])
del self.service_attr[tenant_id] | [
"def",
"delete_serv_obj",
"(",
"self",
",",
"tenant_id",
")",
":",
"self",
".",
"del_obj",
"(",
"tenant_id",
",",
"self",
".",
"service_attr",
"[",
"tenant_id",
"]",
")",
"del",
"self",
".",
"service_attr",
"[",
"tenant_id",
"]"
] | 54 | 6 |
def token_getter(remote, token=''):
"""Retrieve OAuth access token.
Used by flask-oauthlib to get the access token when making requests.
:param remote: The remote application.
:param token: Type of token to get. Data passed from ``oauth.request()`` to
identify which token to retrieve. (Default: ``''``)
:returns: The token.
"""
session_key = token_session_key(remote.name)
if session_key not in session and current_user.is_authenticated:
# Fetch key from token store if user is authenticated, and the key
# isn't already cached in the session.
remote_token = RemoteToken.get(
current_user.get_id(),
remote.consumer_key,
token_type=token,
)
if remote_token is None:
return None
# Store token and secret in session
session[session_key] = remote_token.token()
return session.get(session_key, None) | [
"def",
"token_getter",
"(",
"remote",
",",
"token",
"=",
"''",
")",
":",
"session_key",
"=",
"token_session_key",
"(",
"remote",
".",
"name",
")",
"if",
"session_key",
"not",
"in",
"session",
"and",
"current_user",
".",
"is_authenticated",
":",
"# Fetch key from token store if user is authenticated, and the key",
"# isn't already cached in the session.",
"remote_token",
"=",
"RemoteToken",
".",
"get",
"(",
"current_user",
".",
"get_id",
"(",
")",
",",
"remote",
".",
"consumer_key",
",",
"token_type",
"=",
"token",
",",
")",
"if",
"remote_token",
"is",
"None",
":",
"return",
"None",
"# Store token and secret in session",
"session",
"[",
"session_key",
"]",
"=",
"remote_token",
".",
"token",
"(",
")",
"return",
"session",
".",
"get",
"(",
"session_key",
",",
"None",
")"
] | 32.928571 | 18.785714 |
def parse(filename=""):
"Open, read and eval the resource from the source file"
# use the provided resource file:
s = open(filename).read()
##s.decode("latin1").encode("utf8")
import datetime, decimal
rsrc = eval(s)
return rsrc | [
"def",
"parse",
"(",
"filename",
"=",
"\"\"",
")",
":",
"# use the provided resource file:",
"s",
"=",
"open",
"(",
"filename",
")",
".",
"read",
"(",
")",
"##s.decode(\"latin1\").encode(\"utf8\")",
"import",
"datetime",
",",
"decimal",
"rsrc",
"=",
"eval",
"(",
"s",
")",
"return",
"rsrc"
] | 31 | 13.75 |
def last(self):
"""
Returns the last batch for the batched sequence.
:rtype: :class:`Batch` instance.
"""
start = max(self.number - 1, 0) * self.size
return Batch(start, self.size, self.total_size) | [
"def",
"last",
"(",
"self",
")",
":",
"start",
"=",
"max",
"(",
"self",
".",
"number",
"-",
"1",
",",
"0",
")",
"*",
"self",
".",
"size",
"return",
"Batch",
"(",
"start",
",",
"self",
".",
"size",
",",
"self",
".",
"total_size",
")"
] | 29.875 | 13.375 |
def _set_tieBreaking(self, v, load=False):
"""
Setter method for tieBreaking, mapped from YANG variable /brocade_mpls_rpc/show_mpls_te_path/input/tieBreaking (tie-breaking)
If this variable is read-only (config: false) in the
source YANG file, then _set_tieBreaking is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_tieBreaking() directly.
YANG Description: Tie breaking mode for CSPF when multiple paths to destination exists
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'most-fill': {'value': 2}, u'random': {'value': 0}, u'least-fill': {'value': 1}},), is_leaf=True, yang_name="tieBreaking", rest_name="tieBreaking", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='tie-breaking', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """tieBreaking must be of a type compatible with tie-breaking""",
'defined-type': "brocade-mpls:tie-breaking",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'most-fill': {'value': 2}, u'random': {'value': 0}, u'least-fill': {'value': 1}},), is_leaf=True, yang_name="tieBreaking", rest_name="tieBreaking", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='tie-breaking', is_config=True)""",
})
self.__tieBreaking = t
if hasattr(self, '_set'):
self._set() | [
"def",
"_set_tieBreaking",
"(",
"self",
",",
"v",
",",
"load",
"=",
"False",
")",
":",
"if",
"hasattr",
"(",
"v",
",",
"\"_utype\"",
")",
":",
"v",
"=",
"v",
".",
"_utype",
"(",
"v",
")",
"try",
":",
"t",
"=",
"YANGDynClass",
"(",
"v",
",",
"base",
"=",
"RestrictedClassType",
"(",
"base_type",
"=",
"unicode",
",",
"restriction_type",
"=",
"\"dict_key\"",
",",
"restriction_arg",
"=",
"{",
"u'most-fill'",
":",
"{",
"'value'",
":",
"2",
"}",
",",
"u'random'",
":",
"{",
"'value'",
":",
"0",
"}",
",",
"u'least-fill'",
":",
"{",
"'value'",
":",
"1",
"}",
"}",
",",
")",
",",
"is_leaf",
"=",
"True",
",",
"yang_name",
"=",
"\"tieBreaking\"",
",",
"rest_name",
"=",
"\"tieBreaking\"",
",",
"parent",
"=",
"self",
",",
"path_helper",
"=",
"self",
".",
"_path_helper",
",",
"extmethods",
"=",
"self",
".",
"_extmethods",
",",
"register_paths",
"=",
"False",
",",
"namespace",
"=",
"'urn:brocade.com:mgmt:brocade-mpls'",
",",
"defining_module",
"=",
"'brocade-mpls'",
",",
"yang_type",
"=",
"'tie-breaking'",
",",
"is_config",
"=",
"True",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
")",
":",
"raise",
"ValueError",
"(",
"{",
"'error-string'",
":",
"\"\"\"tieBreaking must be of a type compatible with tie-breaking\"\"\"",
",",
"'defined-type'",
":",
"\"brocade-mpls:tie-breaking\"",
",",
"'generated-type'",
":",
"\"\"\"YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type=\"dict_key\", restriction_arg={u'most-fill': {'value': 2}, u'random': {'value': 0}, u'least-fill': {'value': 1}},), is_leaf=True, yang_name=\"tieBreaking\", rest_name=\"tieBreaking\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='tie-breaking', is_config=True)\"\"\"",
",",
"}",
")",
"self",
".",
"__tieBreaking",
"=",
"t",
"if",
"hasattr",
"(",
"self",
",",
"'_set'",
")",
":",
"self",
".",
"_set",
"(",
")"
] | 82.833333 | 41.375 |
def rprint(sep='\n', end='\n', file=sys.stdout, flush=False):
"""A coroutine sink which prints received items stdout
Args:
sep: Optional separator to be printed between received items.
end: Optional terminator to be printed after the last item.
file: Optional stream to which to print.
flush: Optional flag to force flushing after each item.
"""
try:
first_item = (yield)
file.write(str(first_item))
if flush:
file.flush()
while True:
item = (yield)
file.write(sep)
file.write(str(item))
if flush:
file.flush()
except GeneratorExit:
file.write(end)
if flush:
file.flush() | [
"def",
"rprint",
"(",
"sep",
"=",
"'\\n'",
",",
"end",
"=",
"'\\n'",
",",
"file",
"=",
"sys",
".",
"stdout",
",",
"flush",
"=",
"False",
")",
":",
"try",
":",
"first_item",
"=",
"(",
"yield",
")",
"file",
".",
"write",
"(",
"str",
"(",
"first_item",
")",
")",
"if",
"flush",
":",
"file",
".",
"flush",
"(",
")",
"while",
"True",
":",
"item",
"=",
"(",
"yield",
")",
"file",
".",
"write",
"(",
"sep",
")",
"file",
".",
"write",
"(",
"str",
"(",
"item",
")",
")",
"if",
"flush",
":",
"file",
".",
"flush",
"(",
")",
"except",
"GeneratorExit",
":",
"file",
".",
"write",
"(",
"end",
")",
"if",
"flush",
":",
"file",
".",
"flush",
"(",
")"
] | 30.708333 | 17.666667 |
def QA_SU_save_stock_transaction(
client=DATABASE,
ui_log=None,
ui_progress=None
):
"""save stock_transaction
Keyword Arguments:
client {[type]} -- [description] (default: {DATABASE})
"""
stock_list = QA_fetch_get_stock_list().code.unique().tolist()
coll = client.stock_transaction
coll.create_index('code')
err = []
def __saving_work(code):
QA_util_log_info(
'##JOB11 Now Saving STOCK_TRANSACTION ==== {}'.format(str(code)),
ui_log=ui_log
)
try:
coll.insert_many(
QA_util_to_json_from_pandas(
# 🛠todo str(stock_list[code]) 参数不对?
QA_fetch_get_stock_transaction(
str(code),
'1990-01-01',
str(now_time())[0:10]
)
)
)
except:
err.append(str(code))
for i_ in range(len(stock_list)):
# __saving_work('000001')
QA_util_log_info(
'The {} of Total {}'.format(i_,
len(stock_list)),
ui_log=ui_log
)
strLogProgress = 'DOWNLOAD PROGRESS {} '.format(
str(float(i_ / len(stock_list) * 100))[0:4] + '%'
)
intLogProgress = int(float(i_ / len(stock_list) * 10000.0))
QA_util_log_info(
strLogProgress,
ui_log=ui_log,
ui_progress=ui_progress,
ui_progress_int_value=intLogProgress
)
__saving_work(stock_list[i_])
if len(err) < 1:
QA_util_log_info('SUCCESS', ui_log=ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)
QA_util_log_info(err, ui_log=ui_log) | [
"def",
"QA_SU_save_stock_transaction",
"(",
"client",
"=",
"DATABASE",
",",
"ui_log",
"=",
"None",
",",
"ui_progress",
"=",
"None",
")",
":",
"stock_list",
"=",
"QA_fetch_get_stock_list",
"(",
")",
".",
"code",
".",
"unique",
"(",
")",
".",
"tolist",
"(",
")",
"coll",
"=",
"client",
".",
"stock_transaction",
"coll",
".",
"create_index",
"(",
"'code'",
")",
"err",
"=",
"[",
"]",
"def",
"__saving_work",
"(",
"code",
")",
":",
"QA_util_log_info",
"(",
"'##JOB11 Now Saving STOCK_TRANSACTION ==== {}'",
".",
"format",
"(",
"str",
"(",
"code",
")",
")",
",",
"ui_log",
"=",
"ui_log",
")",
"try",
":",
"coll",
".",
"insert_many",
"(",
"QA_util_to_json_from_pandas",
"(",
"# 🛠todo str(stock_list[code]) 参数不对?",
"QA_fetch_get_stock_transaction",
"(",
"str",
"(",
"code",
")",
",",
"'1990-01-01'",
",",
"str",
"(",
"now_time",
"(",
")",
")",
"[",
"0",
":",
"10",
"]",
")",
")",
")",
"except",
":",
"err",
".",
"append",
"(",
"str",
"(",
"code",
")",
")",
"for",
"i_",
"in",
"range",
"(",
"len",
"(",
"stock_list",
")",
")",
":",
"# __saving_work('000001')",
"QA_util_log_info",
"(",
"'The {} of Total {}'",
".",
"format",
"(",
"i_",
",",
"len",
"(",
"stock_list",
")",
")",
",",
"ui_log",
"=",
"ui_log",
")",
"strLogProgress",
"=",
"'DOWNLOAD PROGRESS {} '",
".",
"format",
"(",
"str",
"(",
"float",
"(",
"i_",
"/",
"len",
"(",
"stock_list",
")",
"*",
"100",
")",
")",
"[",
"0",
":",
"4",
"]",
"+",
"'%'",
")",
"intLogProgress",
"=",
"int",
"(",
"float",
"(",
"i_",
"/",
"len",
"(",
"stock_list",
")",
"*",
"10000.0",
")",
")",
"QA_util_log_info",
"(",
"strLogProgress",
",",
"ui_log",
"=",
"ui_log",
",",
"ui_progress",
"=",
"ui_progress",
",",
"ui_progress_int_value",
"=",
"intLogProgress",
")",
"__saving_work",
"(",
"stock_list",
"[",
"i_",
"]",
")",
"if",
"len",
"(",
"err",
")",
"<",
"1",
":",
"QA_util_log_info",
"(",
"'SUCCESS'",
",",
"ui_log",
"=",
"ui_log",
")",
"else",
":",
"QA_util_log_info",
"(",
"' ERROR CODE \\n '",
",",
"ui_log",
"=",
"ui_log",
")",
"QA_util_log_info",
"(",
"err",
",",
"ui_log",
"=",
"ui_log",
")"
] | 29.1 | 18.3 |
def _parse_shape_list(shape_list, crs):
""" Checks if the given list of shapes is in correct format and parses geometry objects
:param shape_list: The parameter `shape_list` from class initialization
:type shape_list: list(shapely.geometry.multipolygon.MultiPolygon or shapely.geometry.polygon.Polygon)
:raises: ValueError
"""
if not isinstance(shape_list, list):
raise ValueError('Splitter must be initialized with a list of shapes')
return [AreaSplitter._parse_shape(shape, crs) for shape in shape_list] | [
"def",
"_parse_shape_list",
"(",
"shape_list",
",",
"crs",
")",
":",
"if",
"not",
"isinstance",
"(",
"shape_list",
",",
"list",
")",
":",
"raise",
"ValueError",
"(",
"'Splitter must be initialized with a list of shapes'",
")",
"return",
"[",
"AreaSplitter",
".",
"_parse_shape",
"(",
"shape",
",",
"crs",
")",
"for",
"shape",
"in",
"shape_list",
"]"
] | 51.363636 | 26.090909 |
def get_numeric_value(string_value):
""" parses string_value and returns only number-like part
"""
num_chars = ['.', '+', '-']
number = ''
for c in string_value:
if c.isdigit() or c in num_chars:
number += c
return number | [
"def",
"get_numeric_value",
"(",
"string_value",
")",
":",
"num_chars",
"=",
"[",
"'.'",
",",
"'+'",
",",
"'-'",
"]",
"number",
"=",
"''",
"for",
"c",
"in",
"string_value",
":",
"if",
"c",
".",
"isdigit",
"(",
")",
"or",
"c",
"in",
"num_chars",
":",
"number",
"+=",
"c",
"return",
"number"
] | 28.555556 | 10.333333 |
def get_organisation_information(self, query_params=None):
'''
Get information fot this organisation. Returns a dictionary of values.
'''
return self.fetch_json(
uri_path=self.base_uri,
query_params=query_params or {}
) | [
"def",
"get_organisation_information",
"(",
"self",
",",
"query_params",
"=",
"None",
")",
":",
"return",
"self",
".",
"fetch_json",
"(",
"uri_path",
"=",
"self",
".",
"base_uri",
",",
"query_params",
"=",
"query_params",
"or",
"{",
"}",
")"
] | 34.5 | 20.25 |
def render(self, ctx=None):
'''
Render the current value into a :class:`bitstring.Bits` object
:rtype: :class:`bitstring.Bits`
:return: the rendered field
'''
self._initialize()
if ctx is None:
ctx = RenderContext()
#
# if we are called from within render, return a dummy object...
#
if self in ctx:
self._current_rendered = self._in_render_value()
else:
ctx.push(self)
if self.dependency_type == Calculated.VALUE_BASED:
self._rendered_field = self._field.render(ctx)
self._render()
ctx.pop()
return self._current_rendered | [
"def",
"render",
"(",
"self",
",",
"ctx",
"=",
"None",
")",
":",
"self",
".",
"_initialize",
"(",
")",
"if",
"ctx",
"is",
"None",
":",
"ctx",
"=",
"RenderContext",
"(",
")",
"#",
"# if we are called from within render, return a dummy object...",
"#",
"if",
"self",
"in",
"ctx",
":",
"self",
".",
"_current_rendered",
"=",
"self",
".",
"_in_render_value",
"(",
")",
"else",
":",
"ctx",
".",
"push",
"(",
"self",
")",
"if",
"self",
".",
"dependency_type",
"==",
"Calculated",
".",
"VALUE_BASED",
":",
"self",
".",
"_rendered_field",
"=",
"self",
".",
"_field",
".",
"render",
"(",
"ctx",
")",
"self",
".",
"_render",
"(",
")",
"ctx",
".",
"pop",
"(",
")",
"return",
"self",
".",
"_current_rendered"
] | 31.545455 | 19.818182 |
def validate(self):
"""Validation of configuration to check for required values"""
if not self.server.enabled:
if self.security.signature_certificate_file is self.security.defaults['signature_certificate_file']:
print("ISSUE: If you are not configuring a server, you need to set 'signature_certificate_file'")
if self.security.signature_private_key_file is self.security.defaults['signature_private_key_file']:
print("ISSUE: If you are not configuring a server, you need to set 'signature_private_key_file'")
else:
if self.client.enabled:
print("ISSUE: Client and server enabled at the same time?")
if self.server.protocol is self.server.defaults['protocol']:
if self.server.server_certificate_file is self.server.defaults['server_certificate_file'] or \
self.server.server_private_key_file is self.server.defaults['server_private_key_file']:
print("ISSUE: 'server_certificate_file' and/or 'server_private_key_file' are not configured and will be auto-generated.")
if self.server.certification_authority_certificate_file is self.server.defaults['certification_authority_certificate_file'] or \
self.server.certification_authority_private_key_file is self.server.defaults['certification_authority_private_key_file']:
print("ISSUE: 'certification_authority_certificate_file' and/or 'certification_authority_private_key_file' are not configured and will be auto-generated - this is NOT recommended.")
if self.server.authentication_script is self.server.defaults['authentication_script']:
print("ISSUE: No 'authentication_script' has been provided and all authentication requests will be rejected!")
if self.client.enabled:
if self.client.server_endpoint is self.client.defaults['server_endpoint']:
print("ISSUE: You are running in client mode, but you are using a default server address.")
if not self.client.disable_peer_verification is self.client.defaults['disable_peer_verification'] or \
not self.client.disable_host_verification is self.client.defaults['disable_host_verification']:
print("ISSUE: Disabling peer/host verification is NOT recommended - AT ALL.")
if self.client.username is self.client.defaults['username'] or \
self.client.password is self.client.defaults['password']:
print("ISSUE: No username and/or password has been configured for a client.")
if self.fscp.contact is self.fscp.defaults['contact']:
if not self.server.enabled and not self.client.enabled:
print("ISSUE: You have not defined any contact points while you are neither running as server nor client.")
## hostname_resolution_protocol=ipv4/ipv6
## ipv4_address_prefix_length=9.0.0.1/24
## ipv6_address_prefix_length=2aa1::1/8
if self.security.authority_certificate_file is self.security.defaults['authority_certificate_file']:
print("ISSUE: You need to set 'authority_certificate_file'")
if self.tap_adapter.ipv4_address_prefix_length is self.tap_adapter.defaults['ipv4_address_prefix_length']:
print("ISSUE: You are using the default network address - make sure you set a different ip for every machine 'ipv4_address_prefix_length'") | [
"def",
"validate",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"server",
".",
"enabled",
":",
"if",
"self",
".",
"security",
".",
"signature_certificate_file",
"is",
"self",
".",
"security",
".",
"defaults",
"[",
"'signature_certificate_file'",
"]",
":",
"print",
"(",
"\"ISSUE: If you are not configuring a server, you need to set 'signature_certificate_file'\"",
")",
"if",
"self",
".",
"security",
".",
"signature_private_key_file",
"is",
"self",
".",
"security",
".",
"defaults",
"[",
"'signature_private_key_file'",
"]",
":",
"print",
"(",
"\"ISSUE: If you are not configuring a server, you need to set 'signature_private_key_file'\"",
")",
"else",
":",
"if",
"self",
".",
"client",
".",
"enabled",
":",
"print",
"(",
"\"ISSUE: Client and server enabled at the same time?\"",
")",
"if",
"self",
".",
"server",
".",
"protocol",
"is",
"self",
".",
"server",
".",
"defaults",
"[",
"'protocol'",
"]",
":",
"if",
"self",
".",
"server",
".",
"server_certificate_file",
"is",
"self",
".",
"server",
".",
"defaults",
"[",
"'server_certificate_file'",
"]",
"or",
"self",
".",
"server",
".",
"server_private_key_file",
"is",
"self",
".",
"server",
".",
"defaults",
"[",
"'server_private_key_file'",
"]",
":",
"print",
"(",
"\"ISSUE: 'server_certificate_file' and/or 'server_private_key_file' are not configured and will be auto-generated.\"",
")",
"if",
"self",
".",
"server",
".",
"certification_authority_certificate_file",
"is",
"self",
".",
"server",
".",
"defaults",
"[",
"'certification_authority_certificate_file'",
"]",
"or",
"self",
".",
"server",
".",
"certification_authority_private_key_file",
"is",
"self",
".",
"server",
".",
"defaults",
"[",
"'certification_authority_private_key_file'",
"]",
":",
"print",
"(",
"\"ISSUE: 'certification_authority_certificate_file' and/or 'certification_authority_private_key_file' are not configured and will be auto-generated - this is NOT recommended.\"",
")",
"if",
"self",
".",
"server",
".",
"authentication_script",
"is",
"self",
".",
"server",
".",
"defaults",
"[",
"'authentication_script'",
"]",
":",
"print",
"(",
"\"ISSUE: No 'authentication_script' has been provided and all authentication requests will be rejected!\"",
")",
"if",
"self",
".",
"client",
".",
"enabled",
":",
"if",
"self",
".",
"client",
".",
"server_endpoint",
"is",
"self",
".",
"client",
".",
"defaults",
"[",
"'server_endpoint'",
"]",
":",
"print",
"(",
"\"ISSUE: You are running in client mode, but you are using a default server address.\"",
")",
"if",
"not",
"self",
".",
"client",
".",
"disable_peer_verification",
"is",
"self",
".",
"client",
".",
"defaults",
"[",
"'disable_peer_verification'",
"]",
"or",
"not",
"self",
".",
"client",
".",
"disable_host_verification",
"is",
"self",
".",
"client",
".",
"defaults",
"[",
"'disable_host_verification'",
"]",
":",
"print",
"(",
"\"ISSUE: Disabling peer/host verification is NOT recommended - AT ALL.\"",
")",
"if",
"self",
".",
"client",
".",
"username",
"is",
"self",
".",
"client",
".",
"defaults",
"[",
"'username'",
"]",
"or",
"self",
".",
"client",
".",
"password",
"is",
"self",
".",
"client",
".",
"defaults",
"[",
"'password'",
"]",
":",
"print",
"(",
"\"ISSUE: No username and/or password has been configured for a client.\"",
")",
"if",
"self",
".",
"fscp",
".",
"contact",
"is",
"self",
".",
"fscp",
".",
"defaults",
"[",
"'contact'",
"]",
":",
"if",
"not",
"self",
".",
"server",
".",
"enabled",
"and",
"not",
"self",
".",
"client",
".",
"enabled",
":",
"print",
"(",
"\"ISSUE: You have not defined any contact points while you are neither running as server nor client.\"",
")",
"## hostname_resolution_protocol=ipv4/ipv6",
"## ipv4_address_prefix_length=9.0.0.1/24",
"## ipv6_address_prefix_length=2aa1::1/8",
"if",
"self",
".",
"security",
".",
"authority_certificate_file",
"is",
"self",
".",
"security",
".",
"defaults",
"[",
"'authority_certificate_file'",
"]",
":",
"print",
"(",
"\"ISSUE: You need to set 'authority_certificate_file'\"",
")",
"if",
"self",
".",
"tap_adapter",
".",
"ipv4_address_prefix_length",
"is",
"self",
".",
"tap_adapter",
".",
"defaults",
"[",
"'ipv4_address_prefix_length'",
"]",
":",
"print",
"(",
"\"ISSUE: You are using the default network address - make sure you set a different ip for every machine 'ipv4_address_prefix_length'\"",
")"
] | 62.303571 | 49.875 |
def as_labeller(x, default=label_value, multi_line=True):
"""
Coerse to labeller function
Parameters
----------
x : function | dict
Object to coerce
default : function | str
Default labeller. If it is a string,
it should be the name of one the labelling
functions provided by plotnine.
multi_line : bool
Whether to place each variable on a separate line
Returns
-------
out : function
Labelling function
"""
if x is None:
x = default
# One of the labelling functions as string
with suppress(KeyError, TypeError):
x = LABELLERS[x]
# x is a labeller
with suppress(AttributeError):
if x.__name__ == '_labeller':
return x
def _labeller(label_info):
label_info = pd.Series(label_info).astype(str)
if callable(x) and x.__name__ in LABELLERS:
# labellers in this module
return x(label_info)
elif hasattr(x, '__contains__'):
# dictionary lookup
for var in label_info.index:
if label_info[var] in x:
label_info[var] = x[label_info[var]]
return label_info
elif callable(x):
# generic function
for var in label_info.index:
label_info[var] = x(label_info[var])
return label_info
else:
msg = "Could not use '{0}' for labelling."
raise PlotnineError(msg.format(x))
return _labeller | [
"def",
"as_labeller",
"(",
"x",
",",
"default",
"=",
"label_value",
",",
"multi_line",
"=",
"True",
")",
":",
"if",
"x",
"is",
"None",
":",
"x",
"=",
"default",
"# One of the labelling functions as string",
"with",
"suppress",
"(",
"KeyError",
",",
"TypeError",
")",
":",
"x",
"=",
"LABELLERS",
"[",
"x",
"]",
"# x is a labeller",
"with",
"suppress",
"(",
"AttributeError",
")",
":",
"if",
"x",
".",
"__name__",
"==",
"'_labeller'",
":",
"return",
"x",
"def",
"_labeller",
"(",
"label_info",
")",
":",
"label_info",
"=",
"pd",
".",
"Series",
"(",
"label_info",
")",
".",
"astype",
"(",
"str",
")",
"if",
"callable",
"(",
"x",
")",
"and",
"x",
".",
"__name__",
"in",
"LABELLERS",
":",
"# labellers in this module",
"return",
"x",
"(",
"label_info",
")",
"elif",
"hasattr",
"(",
"x",
",",
"'__contains__'",
")",
":",
"# dictionary lookup",
"for",
"var",
"in",
"label_info",
".",
"index",
":",
"if",
"label_info",
"[",
"var",
"]",
"in",
"x",
":",
"label_info",
"[",
"var",
"]",
"=",
"x",
"[",
"label_info",
"[",
"var",
"]",
"]",
"return",
"label_info",
"elif",
"callable",
"(",
"x",
")",
":",
"# generic function",
"for",
"var",
"in",
"label_info",
".",
"index",
":",
"label_info",
"[",
"var",
"]",
"=",
"x",
"(",
"label_info",
"[",
"var",
"]",
")",
"return",
"label_info",
"else",
":",
"msg",
"=",
"\"Could not use '{0}' for labelling.\"",
"raise",
"PlotnineError",
"(",
"msg",
".",
"format",
"(",
"x",
")",
")",
"return",
"_labeller"
] | 27.574074 | 15.907407 |
def str_to_python_object(input_str):
""" a conversion that will import a module and class name
"""
if not input_str:
return None
if six.PY3 and isinstance(input_str, six.binary_type):
input_str = to_str(input_str)
if not isinstance(input_str, six.string_types):
# gosh, we didn't get a string, we can't convert anything but strings
# we're going to assume that what we got is actually what was wanted
# as the output
return input_str
input_str = str_quote_stripper(input_str)
if '.' not in input_str and input_str in known_mapping_str_to_type:
return known_mapping_str_to_type[input_str]
parts = [x.strip() for x in input_str.split('.') if x.strip()]
try:
try:
# first try as a complete module
package = __import__(input_str)
except ImportError:
# it must be a class from a module
if len(parts) == 1:
# since it has only one part, it must be a class from __main__
parts = ('__main__', input_str)
package = __import__('.'.join(parts[:-1]), globals(), locals(), [])
obj = package
for name in parts[1:]:
obj = getattr(obj, name)
return obj
except AttributeError as x:
raise CannotConvertError("%s cannot be found" % input_str)
except ImportError as x:
raise CannotConvertError(str(x)) | [
"def",
"str_to_python_object",
"(",
"input_str",
")",
":",
"if",
"not",
"input_str",
":",
"return",
"None",
"if",
"six",
".",
"PY3",
"and",
"isinstance",
"(",
"input_str",
",",
"six",
".",
"binary_type",
")",
":",
"input_str",
"=",
"to_str",
"(",
"input_str",
")",
"if",
"not",
"isinstance",
"(",
"input_str",
",",
"six",
".",
"string_types",
")",
":",
"# gosh, we didn't get a string, we can't convert anything but strings",
"# we're going to assume that what we got is actually what was wanted",
"# as the output",
"return",
"input_str",
"input_str",
"=",
"str_quote_stripper",
"(",
"input_str",
")",
"if",
"'.'",
"not",
"in",
"input_str",
"and",
"input_str",
"in",
"known_mapping_str_to_type",
":",
"return",
"known_mapping_str_to_type",
"[",
"input_str",
"]",
"parts",
"=",
"[",
"x",
".",
"strip",
"(",
")",
"for",
"x",
"in",
"input_str",
".",
"split",
"(",
"'.'",
")",
"if",
"x",
".",
"strip",
"(",
")",
"]",
"try",
":",
"try",
":",
"# first try as a complete module",
"package",
"=",
"__import__",
"(",
"input_str",
")",
"except",
"ImportError",
":",
"# it must be a class from a module",
"if",
"len",
"(",
"parts",
")",
"==",
"1",
":",
"# since it has only one part, it must be a class from __main__",
"parts",
"=",
"(",
"'__main__'",
",",
"input_str",
")",
"package",
"=",
"__import__",
"(",
"'.'",
".",
"join",
"(",
"parts",
"[",
":",
"-",
"1",
"]",
")",
",",
"globals",
"(",
")",
",",
"locals",
"(",
")",
",",
"[",
"]",
")",
"obj",
"=",
"package",
"for",
"name",
"in",
"parts",
"[",
"1",
":",
"]",
":",
"obj",
"=",
"getattr",
"(",
"obj",
",",
"name",
")",
"return",
"obj",
"except",
"AttributeError",
"as",
"x",
":",
"raise",
"CannotConvertError",
"(",
"\"%s cannot be found\"",
"%",
"input_str",
")",
"except",
"ImportError",
"as",
"x",
":",
"raise",
"CannotConvertError",
"(",
"str",
"(",
"x",
")",
")"
] | 41.411765 | 15.764706 |
def has_none_of_selectors(self, selector, *locators, **kwargs):
"""
Checks if none of the provided selectors are present on the given page or descendants of the
current node. If options are provided, the assertion will check that each locator is present
with those options as well (other than ``wait``). ::
page.has_none_of_selectors("custom", "Tom", "Joe", visible="all")
page.has_none_of_selectors("css", "#my_div", "a.not_clicked")
It accepts all options that :meth:`find_all` accepts, such as ``text`` and ``visible``.
The ``wait`` option applies to all of the selectors as a group, so none of the locators must
be present with ``wait`` (defaults to :data:`capybara.default_max_wait_time`) seconds.
If the given selector is not a valid selector, the first argument is assumed to be a locator
and the default selector will be used.
Args:
selector (str, optional): The name of the selector to use. Defaults to
:data:`capybara.default_selector`.
*locators (str): Variable length list of locators.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
"""
return self.assert_none_of_selectors(selector, *locators, **kwargs) | [
"def",
"has_none_of_selectors",
"(",
"self",
",",
"selector",
",",
"*",
"locators",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"assert_none_of_selectors",
"(",
"selector",
",",
"*",
"locators",
",",
"*",
"*",
"kwargs",
")"
] | 51.56 | 35.24 |
def triplify(self, data, parent=None):
""" Recursively generate statements from the data supplied. """
if data is None:
return
if self.is_object:
for res in self._triplify_object(data, parent):
yield res
elif self.is_array:
for item in data:
for res in self.items.triplify(item, parent):
yield res
else:
# TODO: figure out if I ever want to check for reverse here.
type_name = typecast.name(data)
obj = typecast.stringify(type_name, data)
if obj is not None:
obj = obj.strip()
yield (parent, self.predicate, obj, type_name) | [
"def",
"triplify",
"(",
"self",
",",
"data",
",",
"parent",
"=",
"None",
")",
":",
"if",
"data",
"is",
"None",
":",
"return",
"if",
"self",
".",
"is_object",
":",
"for",
"res",
"in",
"self",
".",
"_triplify_object",
"(",
"data",
",",
"parent",
")",
":",
"yield",
"res",
"elif",
"self",
".",
"is_array",
":",
"for",
"item",
"in",
"data",
":",
"for",
"res",
"in",
"self",
".",
"items",
".",
"triplify",
"(",
"item",
",",
"parent",
")",
":",
"yield",
"res",
"else",
":",
"# TODO: figure out if I ever want to check for reverse here.",
"type_name",
"=",
"typecast",
".",
"name",
"(",
"data",
")",
"obj",
"=",
"typecast",
".",
"stringify",
"(",
"type_name",
",",
"data",
")",
"if",
"obj",
"is",
"not",
"None",
":",
"obj",
"=",
"obj",
".",
"strip",
"(",
")",
"yield",
"(",
"parent",
",",
"self",
".",
"predicate",
",",
"obj",
",",
"type_name",
")"
] | 37.368421 | 15.421053 |
def get_basket_items(request):
"""
Get all items in the basket
"""
bid = basket_id(request)
return BasketItem.objects.filter(basket_id=bid), bid | [
"def",
"get_basket_items",
"(",
"request",
")",
":",
"bid",
"=",
"basket_id",
"(",
"request",
")",
"return",
"BasketItem",
".",
"objects",
".",
"filter",
"(",
"basket_id",
"=",
"bid",
")",
",",
"bid"
] | 27.333333 | 7.333333 |
def _run_job(self):
"""
Build a bsub argument that will run lsf_runner.py on the directory we've specified.
"""
args = []
if isinstance(self.output(), list):
log_output = os.path.split(self.output()[0].path)
else:
log_output = os.path.split(self.output().path)
args += ["bsub", "-q", self.queue_flag]
args += ["-n", str(self.n_cpu_flag)]
args += ["-M", str(self.memory_flag)]
args += ["-R", "rusage[%s]" % self.resource_flag]
args += ["-W", str(self.runtime_flag)]
if self.job_name_flag:
args += ["-J", str(self.job_name_flag)]
args += ["-o", os.path.join(log_output[0], "job.out")]
args += ["-e", os.path.join(log_output[0], "job.err")]
if self.extra_bsub_args:
args += self.extra_bsub_args.split()
# Find where the runner file is
runner_path = os.path.abspath(lsf_runner.__file__)
args += [runner_path]
args += [self.tmp_dir]
# That should do it. Let the world know what we're doing.
LOGGER.info("### LSF SUBMISSION ARGS: %s",
" ".join([str(a) for a in args]))
# Submit the job
run_job_proc = subprocess.Popen(
[str(a) for a in args],
stdin=subprocess.PIPE, stdout=subprocess.PIPE, cwd=self.tmp_dir)
output = run_job_proc.communicate()[0]
# ASSUMPTION
# The result will be of the format
# Job <123> is submitted ot queue <myqueue>
# So get the number in those first brackets.
# I cannot think of a better workaround that leaves logic on the Task side of things.
LOGGER.info("### JOB SUBMISSION OUTPUT: %s", str(output))
self.job_id = int(output.split("<")[1].split(">")[0])
LOGGER.info(
"Job %ssubmitted as job %s",
self.job_name_flag + ' ',
str(self.job_id)
)
self._track_job()
# If we want to save the job temporaries, then do so
# We'll move them to be next to the job output
if self.save_job_info:
LOGGER.info("Saving up temporary bits")
# dest_dir = self.output().path
shutil.move(self.tmp_dir, "/".join(log_output[0:-1]))
# Now delete the temporaries, if they're there.
self._finish() | [
"def",
"_run_job",
"(",
"self",
")",
":",
"args",
"=",
"[",
"]",
"if",
"isinstance",
"(",
"self",
".",
"output",
"(",
")",
",",
"list",
")",
":",
"log_output",
"=",
"os",
".",
"path",
".",
"split",
"(",
"self",
".",
"output",
"(",
")",
"[",
"0",
"]",
".",
"path",
")",
"else",
":",
"log_output",
"=",
"os",
".",
"path",
".",
"split",
"(",
"self",
".",
"output",
"(",
")",
".",
"path",
")",
"args",
"+=",
"[",
"\"bsub\"",
",",
"\"-q\"",
",",
"self",
".",
"queue_flag",
"]",
"args",
"+=",
"[",
"\"-n\"",
",",
"str",
"(",
"self",
".",
"n_cpu_flag",
")",
"]",
"args",
"+=",
"[",
"\"-M\"",
",",
"str",
"(",
"self",
".",
"memory_flag",
")",
"]",
"args",
"+=",
"[",
"\"-R\"",
",",
"\"rusage[%s]\"",
"%",
"self",
".",
"resource_flag",
"]",
"args",
"+=",
"[",
"\"-W\"",
",",
"str",
"(",
"self",
".",
"runtime_flag",
")",
"]",
"if",
"self",
".",
"job_name_flag",
":",
"args",
"+=",
"[",
"\"-J\"",
",",
"str",
"(",
"self",
".",
"job_name_flag",
")",
"]",
"args",
"+=",
"[",
"\"-o\"",
",",
"os",
".",
"path",
".",
"join",
"(",
"log_output",
"[",
"0",
"]",
",",
"\"job.out\"",
")",
"]",
"args",
"+=",
"[",
"\"-e\"",
",",
"os",
".",
"path",
".",
"join",
"(",
"log_output",
"[",
"0",
"]",
",",
"\"job.err\"",
")",
"]",
"if",
"self",
".",
"extra_bsub_args",
":",
"args",
"+=",
"self",
".",
"extra_bsub_args",
".",
"split",
"(",
")",
"# Find where the runner file is",
"runner_path",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"lsf_runner",
".",
"__file__",
")",
"args",
"+=",
"[",
"runner_path",
"]",
"args",
"+=",
"[",
"self",
".",
"tmp_dir",
"]",
"# That should do it. Let the world know what we're doing.",
"LOGGER",
".",
"info",
"(",
"\"### LSF SUBMISSION ARGS: %s\"",
",",
"\" \"",
".",
"join",
"(",
"[",
"str",
"(",
"a",
")",
"for",
"a",
"in",
"args",
"]",
")",
")",
"# Submit the job",
"run_job_proc",
"=",
"subprocess",
".",
"Popen",
"(",
"[",
"str",
"(",
"a",
")",
"for",
"a",
"in",
"args",
"]",
",",
"stdin",
"=",
"subprocess",
".",
"PIPE",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"cwd",
"=",
"self",
".",
"tmp_dir",
")",
"output",
"=",
"run_job_proc",
".",
"communicate",
"(",
")",
"[",
"0",
"]",
"# ASSUMPTION",
"# The result will be of the format",
"# Job <123> is submitted ot queue <myqueue>",
"# So get the number in those first brackets.",
"# I cannot think of a better workaround that leaves logic on the Task side of things.",
"LOGGER",
".",
"info",
"(",
"\"### JOB SUBMISSION OUTPUT: %s\"",
",",
"str",
"(",
"output",
")",
")",
"self",
".",
"job_id",
"=",
"int",
"(",
"output",
".",
"split",
"(",
"\"<\"",
")",
"[",
"1",
"]",
".",
"split",
"(",
"\">\"",
")",
"[",
"0",
"]",
")",
"LOGGER",
".",
"info",
"(",
"\"Job %ssubmitted as job %s\"",
",",
"self",
".",
"job_name_flag",
"+",
"' '",
",",
"str",
"(",
"self",
".",
"job_id",
")",
")",
"self",
".",
"_track_job",
"(",
")",
"# If we want to save the job temporaries, then do so",
"# We'll move them to be next to the job output",
"if",
"self",
".",
"save_job_info",
":",
"LOGGER",
".",
"info",
"(",
"\"Saving up temporary bits\"",
")",
"# dest_dir = self.output().path",
"shutil",
".",
"move",
"(",
"self",
".",
"tmp_dir",
",",
"\"/\"",
".",
"join",
"(",
"log_output",
"[",
"0",
":",
"-",
"1",
"]",
")",
")",
"# Now delete the temporaries, if they're there.",
"self",
".",
"_finish",
"(",
")"
] | 35.630769 | 19.323077 |
def set_resource_value(self, device_id, resource_path, resource_value,
fix_path=True, timeout=None):
"""Set resource value for given resource path, on device.
Will block and wait for response to come through. Usage:
.. code-block:: python
try:
v = api.set_resource_value(device, path, value)
print("Success, new value:", v)
except AsyncError, e:
print("Error", e)
:param str device_id: The name/id of the device (Required)
:param str resource_path: The resource path to update (Required)
:param str resource_value: The new value to set for given path
:param fix_path: Unused
:param timeout: Timeout in seconds
:raises: AsyncError
:returns: The value of the new resource
:rtype: str
"""
self.ensure_notifications_thread()
return self.set_resource_value_async(
device_id, resource_path, resource_value
).wait(timeout) | [
"def",
"set_resource_value",
"(",
"self",
",",
"device_id",
",",
"resource_path",
",",
"resource_value",
",",
"fix_path",
"=",
"True",
",",
"timeout",
"=",
"None",
")",
":",
"self",
".",
"ensure_notifications_thread",
"(",
")",
"return",
"self",
".",
"set_resource_value_async",
"(",
"device_id",
",",
"resource_path",
",",
"resource_value",
")",
".",
"wait",
"(",
"timeout",
")"
] | 37.925926 | 17.925926 |
def kill_tasks(self, app_id, scale=False, wipe=False,
host=None, batch_size=0, batch_delay=0):
"""Kill all tasks belonging to app.
:param str app_id: application ID
:param bool scale: if true, scale down the app by the number of tasks killed
:param str host: if provided, only terminate tasks on this Mesos slave
:param int batch_size: if non-zero, terminate tasks in groups of this size
:param int batch_delay: time (in seconds) to wait in between batched kills. If zero, automatically determine
:returns: list of killed tasks
:rtype: list[:class:`marathon.models.task.MarathonTask`]
"""
def batch(iterable, size):
sourceiter = iter(iterable)
while True:
batchiter = itertools.islice(sourceiter, size)
yield itertools.chain([next(batchiter)], batchiter)
if batch_size == 0:
# Terminate all at once
params = {'scale': scale, 'wipe': wipe}
if host:
params['host'] = host
response = self._do_request(
'DELETE', '/v2/apps/{app_id}/tasks'.format(app_id=app_id), params)
# Marathon is inconsistent about what type of object it returns on the multi
# task deletion endpoint, depending on the version of Marathon. See:
# https://github.com/mesosphere/marathon/blob/06a6f763a75fb6d652b4f1660685ae234bd15387/src/main/scala/mesosphere/marathon/api/v2/AppTasksResource.scala#L88-L95
if "tasks" in response.json():
return self._parse_response(response, MarathonTask, is_list=True, resource_name='tasks')
else:
return response.json()
else:
# Terminate in batches
tasks = self.list_tasks(
app_id, host=host) if host else self.list_tasks(app_id)
for tbatch in batch(tasks, batch_size):
killed_tasks = [self.kill_task(app_id, t.id, scale=scale, wipe=wipe)
for t in tbatch]
# Pause until the tasks have been killed to avoid race
# conditions
killed_task_ids = set(t.id for t in killed_tasks)
running_task_ids = killed_task_ids
while killed_task_ids.intersection(running_task_ids):
time.sleep(1)
running_task_ids = set(
t.id for t in self.get_app(app_id).tasks)
if batch_delay == 0:
# Pause until the replacement tasks are healthy
desired_instances = self.get_app(app_id).instances
running_instances = 0
while running_instances < desired_instances:
time.sleep(1)
running_instances = sum(
t.started_at is None for t in self.get_app(app_id).tasks)
else:
time.sleep(batch_delay)
return tasks | [
"def",
"kill_tasks",
"(",
"self",
",",
"app_id",
",",
"scale",
"=",
"False",
",",
"wipe",
"=",
"False",
",",
"host",
"=",
"None",
",",
"batch_size",
"=",
"0",
",",
"batch_delay",
"=",
"0",
")",
":",
"def",
"batch",
"(",
"iterable",
",",
"size",
")",
":",
"sourceiter",
"=",
"iter",
"(",
"iterable",
")",
"while",
"True",
":",
"batchiter",
"=",
"itertools",
".",
"islice",
"(",
"sourceiter",
",",
"size",
")",
"yield",
"itertools",
".",
"chain",
"(",
"[",
"next",
"(",
"batchiter",
")",
"]",
",",
"batchiter",
")",
"if",
"batch_size",
"==",
"0",
":",
"# Terminate all at once",
"params",
"=",
"{",
"'scale'",
":",
"scale",
",",
"'wipe'",
":",
"wipe",
"}",
"if",
"host",
":",
"params",
"[",
"'host'",
"]",
"=",
"host",
"response",
"=",
"self",
".",
"_do_request",
"(",
"'DELETE'",
",",
"'/v2/apps/{app_id}/tasks'",
".",
"format",
"(",
"app_id",
"=",
"app_id",
")",
",",
"params",
")",
"# Marathon is inconsistent about what type of object it returns on the multi",
"# task deletion endpoint, depending on the version of Marathon. See:",
"# https://github.com/mesosphere/marathon/blob/06a6f763a75fb6d652b4f1660685ae234bd15387/src/main/scala/mesosphere/marathon/api/v2/AppTasksResource.scala#L88-L95",
"if",
"\"tasks\"",
"in",
"response",
".",
"json",
"(",
")",
":",
"return",
"self",
".",
"_parse_response",
"(",
"response",
",",
"MarathonTask",
",",
"is_list",
"=",
"True",
",",
"resource_name",
"=",
"'tasks'",
")",
"else",
":",
"return",
"response",
".",
"json",
"(",
")",
"else",
":",
"# Terminate in batches",
"tasks",
"=",
"self",
".",
"list_tasks",
"(",
"app_id",
",",
"host",
"=",
"host",
")",
"if",
"host",
"else",
"self",
".",
"list_tasks",
"(",
"app_id",
")",
"for",
"tbatch",
"in",
"batch",
"(",
"tasks",
",",
"batch_size",
")",
":",
"killed_tasks",
"=",
"[",
"self",
".",
"kill_task",
"(",
"app_id",
",",
"t",
".",
"id",
",",
"scale",
"=",
"scale",
",",
"wipe",
"=",
"wipe",
")",
"for",
"t",
"in",
"tbatch",
"]",
"# Pause until the tasks have been killed to avoid race",
"# conditions",
"killed_task_ids",
"=",
"set",
"(",
"t",
".",
"id",
"for",
"t",
"in",
"killed_tasks",
")",
"running_task_ids",
"=",
"killed_task_ids",
"while",
"killed_task_ids",
".",
"intersection",
"(",
"running_task_ids",
")",
":",
"time",
".",
"sleep",
"(",
"1",
")",
"running_task_ids",
"=",
"set",
"(",
"t",
".",
"id",
"for",
"t",
"in",
"self",
".",
"get_app",
"(",
"app_id",
")",
".",
"tasks",
")",
"if",
"batch_delay",
"==",
"0",
":",
"# Pause until the replacement tasks are healthy",
"desired_instances",
"=",
"self",
".",
"get_app",
"(",
"app_id",
")",
".",
"instances",
"running_instances",
"=",
"0",
"while",
"running_instances",
"<",
"desired_instances",
":",
"time",
".",
"sleep",
"(",
"1",
")",
"running_instances",
"=",
"sum",
"(",
"t",
".",
"started_at",
"is",
"None",
"for",
"t",
"in",
"self",
".",
"get_app",
"(",
"app_id",
")",
".",
"tasks",
")",
"else",
":",
"time",
".",
"sleep",
"(",
"batch_delay",
")",
"return",
"tasks"
] | 48.741935 | 23.032258 |
def wider_bn(layer, start_dim, total_dim, n_add, weighted=True):
'''wider batch norm layer.
'''
n_dim = get_n_dim(layer)
if not weighted:
return get_batch_norm_class(n_dim)(layer.num_features + n_add)
weights = layer.get_weights()
new_weights = [
add_noise(np.ones(n_add, dtype=np.float32), np.array([0, 1])),
add_noise(np.zeros(n_add, dtype=np.float32), np.array([0, 1])),
add_noise(np.zeros(n_add, dtype=np.float32), np.array([0, 1])),
add_noise(np.ones(n_add, dtype=np.float32), np.array([0, 1])),
]
student_w = tuple()
for weight, new_weight in zip(weights, new_weights):
temp_w = weight.copy()
temp_w = np.concatenate(
(temp_w[:start_dim], new_weight, temp_w[start_dim:total_dim])
)
student_w += (temp_w,)
new_layer = get_batch_norm_class(n_dim)(layer.num_features + n_add)
new_layer.set_weights(student_w)
return new_layer | [
"def",
"wider_bn",
"(",
"layer",
",",
"start_dim",
",",
"total_dim",
",",
"n_add",
",",
"weighted",
"=",
"True",
")",
":",
"n_dim",
"=",
"get_n_dim",
"(",
"layer",
")",
"if",
"not",
"weighted",
":",
"return",
"get_batch_norm_class",
"(",
"n_dim",
")",
"(",
"layer",
".",
"num_features",
"+",
"n_add",
")",
"weights",
"=",
"layer",
".",
"get_weights",
"(",
")",
"new_weights",
"=",
"[",
"add_noise",
"(",
"np",
".",
"ones",
"(",
"n_add",
",",
"dtype",
"=",
"np",
".",
"float32",
")",
",",
"np",
".",
"array",
"(",
"[",
"0",
",",
"1",
"]",
")",
")",
",",
"add_noise",
"(",
"np",
".",
"zeros",
"(",
"n_add",
",",
"dtype",
"=",
"np",
".",
"float32",
")",
",",
"np",
".",
"array",
"(",
"[",
"0",
",",
"1",
"]",
")",
")",
",",
"add_noise",
"(",
"np",
".",
"zeros",
"(",
"n_add",
",",
"dtype",
"=",
"np",
".",
"float32",
")",
",",
"np",
".",
"array",
"(",
"[",
"0",
",",
"1",
"]",
")",
")",
",",
"add_noise",
"(",
"np",
".",
"ones",
"(",
"n_add",
",",
"dtype",
"=",
"np",
".",
"float32",
")",
",",
"np",
".",
"array",
"(",
"[",
"0",
",",
"1",
"]",
")",
")",
",",
"]",
"student_w",
"=",
"tuple",
"(",
")",
"for",
"weight",
",",
"new_weight",
"in",
"zip",
"(",
"weights",
",",
"new_weights",
")",
":",
"temp_w",
"=",
"weight",
".",
"copy",
"(",
")",
"temp_w",
"=",
"np",
".",
"concatenate",
"(",
"(",
"temp_w",
"[",
":",
"start_dim",
"]",
",",
"new_weight",
",",
"temp_w",
"[",
"start_dim",
":",
"total_dim",
"]",
")",
")",
"student_w",
"+=",
"(",
"temp_w",
",",
")",
"new_layer",
"=",
"get_batch_norm_class",
"(",
"n_dim",
")",
"(",
"layer",
".",
"num_features",
"+",
"n_add",
")",
"new_layer",
".",
"set_weights",
"(",
"student_w",
")",
"return",
"new_layer"
] | 36.076923 | 23.615385 |
def dead(self):
"""Whether the callback no longer exists.
If the callback is maintained via a weak reference, and that
weak reference has been collected, this will be true
instead of false.
"""
if not self._weak:
return False
cb = self._callback()
if cb is None:
return True
return False | [
"def",
"dead",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_weak",
":",
"return",
"False",
"cb",
"=",
"self",
".",
"_callback",
"(",
")",
"if",
"cb",
"is",
"None",
":",
"return",
"True",
"return",
"False"
] | 28.615385 | 17.230769 |
def get_status(self):
"""Get the details from the bulb."""
try:
request = requests.get(
'{}/{}/'.format(self.resource, URI), timeout=self.timeout)
raw_data = request.json()
# Doesn't always work !!!!!
#self._mac = next(iter(self.raw_data))
self.data = raw_data[self._mac]
return self.data
except (requests.exceptions.ConnectionError, ValueError):
raise exceptions.MyStromConnectionError() | [
"def",
"get_status",
"(",
"self",
")",
":",
"try",
":",
"request",
"=",
"requests",
".",
"get",
"(",
"'{}/{}/'",
".",
"format",
"(",
"self",
".",
"resource",
",",
"URI",
")",
",",
"timeout",
"=",
"self",
".",
"timeout",
")",
"raw_data",
"=",
"request",
".",
"json",
"(",
")",
"# Doesn't always work !!!!!",
"#self._mac = next(iter(self.raw_data))",
"self",
".",
"data",
"=",
"raw_data",
"[",
"self",
".",
"_mac",
"]",
"return",
"self",
".",
"data",
"except",
"(",
"requests",
".",
"exceptions",
".",
"ConnectionError",
",",
"ValueError",
")",
":",
"raise",
"exceptions",
".",
"MyStromConnectionError",
"(",
")"
] | 41.75 | 12.75 |
def zero_dataset(train=False, dev=False, test=False, train_rows=256, dev_rows=64, test_rows=64):
"""
Load the Zero dataset.
The Zero dataset is a simple task of predicting zero from zero. This dataset is useful for
integration testing. The extreme simplicity of the dataset allows for models to learn the task
quickly allowing for quick end-to-end testing.
Args:
train (bool, optional): If to load the training split of the dataset.
dev (bool, optional): If to load the development split of the dataset.
test (bool, optional): If to load the test split of the dataset.
train_rows (int, optional): Number of training rows to generate.
dev_rows (int, optional): Number of development rows to generate.
test_rows (int, optional): Number of test rows to generate.
Returns:
:class:`tuple` of :class:`torchnlp.datasets.Dataset` or :class:`torchnlp.datasets.Dataset`:
Returns between one and all dataset splits (train, dev and test) depending on if their
respective boolean argument is ``True``.
Example:
>>> from torchnlp.datasets import zero_dataset
>>> train = zero_dataset(train=True)
>>> train[0:2]
[{'source': '0', 'target': '0'}, {'source': '0', 'target': '0'}]
"""
ret = []
for is_requested, n_rows in [(train, train_rows), (dev, dev_rows), (test, test_rows)]:
if not is_requested:
continue
rows = [{'source': str(0), 'target': str(0)} for i in range(n_rows)]
ret.append(Dataset(rows))
if len(ret) == 1:
return ret[0]
else:
return tuple(ret) | [
"def",
"zero_dataset",
"(",
"train",
"=",
"False",
",",
"dev",
"=",
"False",
",",
"test",
"=",
"False",
",",
"train_rows",
"=",
"256",
",",
"dev_rows",
"=",
"64",
",",
"test_rows",
"=",
"64",
")",
":",
"ret",
"=",
"[",
"]",
"for",
"is_requested",
",",
"n_rows",
"in",
"[",
"(",
"train",
",",
"train_rows",
")",
",",
"(",
"dev",
",",
"dev_rows",
")",
",",
"(",
"test",
",",
"test_rows",
")",
"]",
":",
"if",
"not",
"is_requested",
":",
"continue",
"rows",
"=",
"[",
"{",
"'source'",
":",
"str",
"(",
"0",
")",
",",
"'target'",
":",
"str",
"(",
"0",
")",
"}",
"for",
"i",
"in",
"range",
"(",
"n_rows",
")",
"]",
"ret",
".",
"append",
"(",
"Dataset",
"(",
"rows",
")",
")",
"if",
"len",
"(",
"ret",
")",
"==",
"1",
":",
"return",
"ret",
"[",
"0",
"]",
"else",
":",
"return",
"tuple",
"(",
"ret",
")"
] | 42.578947 | 29.052632 |
def initialize_bars(self, sender=None, **kwargs):
"""Calls the initializers of all bound navigation bars."""
for bar in self.bars.values():
for initializer in bar.initializers:
initializer(self) | [
"def",
"initialize_bars",
"(",
"self",
",",
"sender",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"for",
"bar",
"in",
"self",
".",
"bars",
".",
"values",
"(",
")",
":",
"for",
"initializer",
"in",
"bar",
".",
"initializers",
":",
"initializer",
"(",
"self",
")"
] | 46.8 | 5.2 |
def getFileDialogFilter(self):
""" Returns a filter that can be used in open file dialogs,
for example: 'All files (*);;Txt (*.txt;*.text);;netCDF(*.nc;*.nc4)'
"""
filters = []
for regRti in self.items:
filters.append(regRti.getFileDialogFilter())
return ';;'.join(filters) | [
"def",
"getFileDialogFilter",
"(",
"self",
")",
":",
"filters",
"=",
"[",
"]",
"for",
"regRti",
"in",
"self",
".",
"items",
":",
"filters",
".",
"append",
"(",
"regRti",
".",
"getFileDialogFilter",
"(",
")",
")",
"return",
"';;'",
".",
"join",
"(",
"filters",
")"
] | 41.25 | 12.5 |
def client_getname(self, encoding=_NOTSET):
"""Get the current connection name."""
return self.execute(b'CLIENT', b'GETNAME', encoding=encoding) | [
"def",
"client_getname",
"(",
"self",
",",
"encoding",
"=",
"_NOTSET",
")",
":",
"return",
"self",
".",
"execute",
"(",
"b'CLIENT'",
",",
"b'GETNAME'",
",",
"encoding",
"=",
"encoding",
")"
] | 52.666667 | 10.666667 |
def slow_highlight(img1, img2, opts):
"""Try to find similar areas between two images.
Produces two masks for img1 and img2.
The algorithm works by comparing every possible alignment of the images,
smoothing it a bit to reduce spurious matches in areas that are
perceptibly different (e.g. text), and then taking the point-wise minimum
of all those difference maps.
This way if you insert a few pixel rows/columns into an image, similar
areas should match even if different areas need to be aligned with
different shifts.
As you can imagine, this brute-force approach can be pretty slow, if
there are many possible alignments. The closer the images are in size,
the faster this will work.
If would work better if it could compare alignments that go beyond the
outer boundaries of the images, in case some pixels got shifted closer
to an edge.
"""
w1, h1 = img1.size
w2, h2 = img2.size
W, H = max(w1, w2), max(h1, h2)
pimg1 = Image.new('RGB', (W, H), opts.bgcolor)
pimg2 = Image.new('RGB', (W, H), opts.bgcolor)
pimg1.paste(img1, (0, 0))
pimg2.paste(img2, (0, 0))
diff = Image.new('L', (W, H), 255)
# It is not a good idea to keep one diff image; it should track the
# relative positions of the two images. I think that's what explains
# the fuzz I see near the edges of the different areas.
xr = abs(w1 - w2) + 1
yr = abs(h1 - h2) + 1
try:
p = Progress(xr * yr, timeout=opts.timeout)
for x in range(xr):
for y in range(yr):
p.next()
this = ImageChops.difference(pimg1, pimg2).convert('L')
this = this.filter(ImageFilter.MaxFilter(7))
diff = ImageChops.darker(diff, this)
if h1 > h2:
pimg2 = ImageChops.offset(pimg2, 0, 1)
else:
pimg1 = ImageChops.offset(pimg1, 0, 1)
if h1 > h2:
pimg2 = ImageChops.offset(pimg2, 0, -yr)
else:
pimg1 = ImageChops.offset(pimg1, 0, -yr)
if w1 > w2:
pimg2 = ImageChops.offset(pimg2, 1, 0)
else:
pimg1 = ImageChops.offset(pimg1, 1, 0)
except KeyboardInterrupt:
return None, None
diff = diff.filter(ImageFilter.MaxFilter(5))
diff1 = diff.crop((0, 0, w1, h1))
diff2 = diff.crop((0, 0, w2, h2))
mask1 = tweak_diff(diff1, opts.opacity)
mask2 = tweak_diff(diff2, opts.opacity)
return mask1, mask2 | [
"def",
"slow_highlight",
"(",
"img1",
",",
"img2",
",",
"opts",
")",
":",
"w1",
",",
"h1",
"=",
"img1",
".",
"size",
"w2",
",",
"h2",
"=",
"img2",
".",
"size",
"W",
",",
"H",
"=",
"max",
"(",
"w1",
",",
"w2",
")",
",",
"max",
"(",
"h1",
",",
"h2",
")",
"pimg1",
"=",
"Image",
".",
"new",
"(",
"'RGB'",
",",
"(",
"W",
",",
"H",
")",
",",
"opts",
".",
"bgcolor",
")",
"pimg2",
"=",
"Image",
".",
"new",
"(",
"'RGB'",
",",
"(",
"W",
",",
"H",
")",
",",
"opts",
".",
"bgcolor",
")",
"pimg1",
".",
"paste",
"(",
"img1",
",",
"(",
"0",
",",
"0",
")",
")",
"pimg2",
".",
"paste",
"(",
"img2",
",",
"(",
"0",
",",
"0",
")",
")",
"diff",
"=",
"Image",
".",
"new",
"(",
"'L'",
",",
"(",
"W",
",",
"H",
")",
",",
"255",
")",
"# It is not a good idea to keep one diff image; it should track the",
"# relative positions of the two images. I think that's what explains",
"# the fuzz I see near the edges of the different areas.",
"xr",
"=",
"abs",
"(",
"w1",
"-",
"w2",
")",
"+",
"1",
"yr",
"=",
"abs",
"(",
"h1",
"-",
"h2",
")",
"+",
"1",
"try",
":",
"p",
"=",
"Progress",
"(",
"xr",
"*",
"yr",
",",
"timeout",
"=",
"opts",
".",
"timeout",
")",
"for",
"x",
"in",
"range",
"(",
"xr",
")",
":",
"for",
"y",
"in",
"range",
"(",
"yr",
")",
":",
"p",
".",
"next",
"(",
")",
"this",
"=",
"ImageChops",
".",
"difference",
"(",
"pimg1",
",",
"pimg2",
")",
".",
"convert",
"(",
"'L'",
")",
"this",
"=",
"this",
".",
"filter",
"(",
"ImageFilter",
".",
"MaxFilter",
"(",
"7",
")",
")",
"diff",
"=",
"ImageChops",
".",
"darker",
"(",
"diff",
",",
"this",
")",
"if",
"h1",
">",
"h2",
":",
"pimg2",
"=",
"ImageChops",
".",
"offset",
"(",
"pimg2",
",",
"0",
",",
"1",
")",
"else",
":",
"pimg1",
"=",
"ImageChops",
".",
"offset",
"(",
"pimg1",
",",
"0",
",",
"1",
")",
"if",
"h1",
">",
"h2",
":",
"pimg2",
"=",
"ImageChops",
".",
"offset",
"(",
"pimg2",
",",
"0",
",",
"-",
"yr",
")",
"else",
":",
"pimg1",
"=",
"ImageChops",
".",
"offset",
"(",
"pimg1",
",",
"0",
",",
"-",
"yr",
")",
"if",
"w1",
">",
"w2",
":",
"pimg2",
"=",
"ImageChops",
".",
"offset",
"(",
"pimg2",
",",
"1",
",",
"0",
")",
"else",
":",
"pimg1",
"=",
"ImageChops",
".",
"offset",
"(",
"pimg1",
",",
"1",
",",
"0",
")",
"except",
"KeyboardInterrupt",
":",
"return",
"None",
",",
"None",
"diff",
"=",
"diff",
".",
"filter",
"(",
"ImageFilter",
".",
"MaxFilter",
"(",
"5",
")",
")",
"diff1",
"=",
"diff",
".",
"crop",
"(",
"(",
"0",
",",
"0",
",",
"w1",
",",
"h1",
")",
")",
"diff2",
"=",
"diff",
".",
"crop",
"(",
"(",
"0",
",",
"0",
",",
"w2",
",",
"h2",
")",
")",
"mask1",
"=",
"tweak_diff",
"(",
"diff1",
",",
"opts",
".",
"opacity",
")",
"mask2",
"=",
"tweak_diff",
"(",
"diff2",
",",
"opts",
".",
"opacity",
")",
"return",
"mask1",
",",
"mask2"
] | 34.666667 | 21.347222 |
def get_url_for_service(service, region, endpoint_type):
if 'type' not in service:
return None
identity_version = get_version_from_service(service)
service_endpoints = service.get('endpoints', [])
available_endpoints = [endpoint for endpoint in service_endpoints
if region == _get_endpoint_region(endpoint)]
"""if we are dealing with the identity service and there is no endpoint
in the current region, it is okay to use the first endpoint for any
identity service endpoints and we can assume that it is global
"""
if service['type'] == 'identity' and not available_endpoints:
available_endpoints = [endpoint for endpoint in service_endpoints]
for endpoint in available_endpoints:
try:
if identity_version < 3:
return endpoint.get(endpoint_type)
else:
interface = \
ENDPOINT_TYPE_TO_INTERFACE.get(endpoint_type, '')
if endpoint.get('interface') == interface:
return endpoint.get('url')
except (IndexError, KeyError):
# it could be that the current endpoint just doesn't match the
# type, continue trying the next one
pass
return None | [
"def",
"get_url_for_service",
"(",
"service",
",",
"region",
",",
"endpoint_type",
")",
":",
"if",
"'type'",
"not",
"in",
"service",
":",
"return",
"None",
"identity_version",
"=",
"get_version_from_service",
"(",
"service",
")",
"service_endpoints",
"=",
"service",
".",
"get",
"(",
"'endpoints'",
",",
"[",
"]",
")",
"available_endpoints",
"=",
"[",
"endpoint",
"for",
"endpoint",
"in",
"service_endpoints",
"if",
"region",
"==",
"_get_endpoint_region",
"(",
"endpoint",
")",
"]",
"if",
"service",
"[",
"'type'",
"]",
"==",
"'identity'",
"and",
"not",
"available_endpoints",
":",
"available_endpoints",
"=",
"[",
"endpoint",
"for",
"endpoint",
"in",
"service_endpoints",
"]",
"for",
"endpoint",
"in",
"available_endpoints",
":",
"try",
":",
"if",
"identity_version",
"<",
"3",
":",
"return",
"endpoint",
".",
"get",
"(",
"endpoint_type",
")",
"else",
":",
"interface",
"=",
"ENDPOINT_TYPE_TO_INTERFACE",
".",
"get",
"(",
"endpoint_type",
",",
"''",
")",
"if",
"endpoint",
".",
"get",
"(",
"'interface'",
")",
"==",
"interface",
":",
"return",
"endpoint",
".",
"get",
"(",
"'url'",
")",
"except",
"(",
"IndexError",
",",
"KeyError",
")",
":",
"# it could be that the current endpoint just doesn't match the",
"# type, continue trying the next one",
"pass",
"return",
"None"
] | 43.37931 | 19.103448 |
def write_to_stream(self, stream_id, data, sandbox=None):
"""
Write to the stream
:param stream_id: The stream identifier
:param data: The stream instances
:param sandbox: The sandbox for this stream
:type stream_id: StreamId
:return: None
:raises: NotImplementedError
"""
if sandbox is not None:
raise NotImplementedError
if stream_id not in self.streams:
raise StreamNotFoundError("Stream with id '{}' does not exist".format(stream_id))
writer = self.get_stream_writer(self.streams[stream_id])
if isinstance(data, StreamInstance):
data = [data]
for instance in data:
if not isinstance(instance, StreamInstance):
raise ValueError("Expected StreamInstance, got {}".format(str(type(instance))))
writer(instance) | [
"def",
"write_to_stream",
"(",
"self",
",",
"stream_id",
",",
"data",
",",
"sandbox",
"=",
"None",
")",
":",
"if",
"sandbox",
"is",
"not",
"None",
":",
"raise",
"NotImplementedError",
"if",
"stream_id",
"not",
"in",
"self",
".",
"streams",
":",
"raise",
"StreamNotFoundError",
"(",
"\"Stream with id '{}' does not exist\"",
".",
"format",
"(",
"stream_id",
")",
")",
"writer",
"=",
"self",
".",
"get_stream_writer",
"(",
"self",
".",
"streams",
"[",
"stream_id",
"]",
")",
"if",
"isinstance",
"(",
"data",
",",
"StreamInstance",
")",
":",
"data",
"=",
"[",
"data",
"]",
"for",
"instance",
"in",
"data",
":",
"if",
"not",
"isinstance",
"(",
"instance",
",",
"StreamInstance",
")",
":",
"raise",
"ValueError",
"(",
"\"Expected StreamInstance, got {}\"",
".",
"format",
"(",
"str",
"(",
"type",
"(",
"instance",
")",
")",
")",
")",
"writer",
"(",
"instance",
")"
] | 33.769231 | 18.538462 |
def command_dependents(options):
"""Command launched by CLI."""
dependents = dependencies(options.package, options.recursive, options.info)
if dependents:
print(*dependents, sep='\n') | [
"def",
"command_dependents",
"(",
"options",
")",
":",
"dependents",
"=",
"dependencies",
"(",
"options",
".",
"package",
",",
"options",
".",
"recursive",
",",
"options",
".",
"info",
")",
"if",
"dependents",
":",
"print",
"(",
"*",
"dependents",
",",
"sep",
"=",
"'\\n'",
")"
] | 33.166667 | 18.833333 |
def _columns_for_table(table_name):
"""
Return all of the columns registered for a given table.
Parameters
----------
table_name : str
Returns
-------
columns : dict of column wrappers
Keys will be column names.
"""
return {cname: col
for (tname, cname), col in _COLUMNS.items()
if tname == table_name} | [
"def",
"_columns_for_table",
"(",
"table_name",
")",
":",
"return",
"{",
"cname",
":",
"col",
"for",
"(",
"tname",
",",
"cname",
")",
",",
"col",
"in",
"_COLUMNS",
".",
"items",
"(",
")",
"if",
"tname",
"==",
"table_name",
"}"
] | 21.235294 | 18.882353 |
def is_subdomain(self, other):
"""Is self a subdomain of other?
The notion of subdomain includes equality.
@rtype: bool
"""
(nr, o, nl) = self.fullcompare(other)
if nr == NAMERELN_SUBDOMAIN or nr == NAMERELN_EQUAL:
return True
return False | [
"def",
"is_subdomain",
"(",
"self",
",",
"other",
")",
":",
"(",
"nr",
",",
"o",
",",
"nl",
")",
"=",
"self",
".",
"fullcompare",
"(",
"other",
")",
"if",
"nr",
"==",
"NAMERELN_SUBDOMAIN",
"or",
"nr",
"==",
"NAMERELN_EQUAL",
":",
"return",
"True",
"return",
"False"
] | 27.181818 | 16.545455 |
def adsSyncSetTimeoutEx(port, nMs):
# type: (int, int) -> None
"""Set Timeout.
:param int port: local AMS port as returned by adsPortOpenEx()
:param int nMs: timeout in ms
"""
adsSyncSetTimeoutFct = _adsDLL.AdsSyncSetTimeoutEx
cms = ctypes.c_long(nMs)
err_code = adsSyncSetTimeoutFct(port, cms)
if err_code:
raise ADSError(err_code) | [
"def",
"adsSyncSetTimeoutEx",
"(",
"port",
",",
"nMs",
")",
":",
"# type: (int, int) -> None",
"adsSyncSetTimeoutFct",
"=",
"_adsDLL",
".",
"AdsSyncSetTimeoutEx",
"cms",
"=",
"ctypes",
".",
"c_long",
"(",
"nMs",
")",
"err_code",
"=",
"adsSyncSetTimeoutFct",
"(",
"port",
",",
"cms",
")",
"if",
"err_code",
":",
"raise",
"ADSError",
"(",
"err_code",
")"
] | 28.153846 | 14.769231 |
def save_as(self, new_filename):
"""
Save our file with the name provided.
Args:
new_filename: New name for the workbook file. String.
Returns:
Nothing.
"""
xfile._save_file(
self._filename, self._workbookTree, new_filename) | [
"def",
"save_as",
"(",
"self",
",",
"new_filename",
")",
":",
"xfile",
".",
"_save_file",
"(",
"self",
".",
"_filename",
",",
"self",
".",
"_workbookTree",
",",
"new_filename",
")"
] | 23.076923 | 20.461538 |
def pack_into_dict(fmt, names, buf, offset, data, **kwargs):
"""Same as :func:`~bitstruct.pack_into()`, but data is read from a
dictionary.
See :func:`~bitstruct.pack_dict()` for details on `names`.
"""
return CompiledFormatDict(fmt, names).pack_into(buf,
offset,
data,
**kwargs) | [
"def",
"pack_into_dict",
"(",
"fmt",
",",
"names",
",",
"buf",
",",
"offset",
",",
"data",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"CompiledFormatDict",
"(",
"fmt",
",",
"names",
")",
".",
"pack_into",
"(",
"buf",
",",
"offset",
",",
"data",
",",
"*",
"*",
"kwargs",
")"
] | 37.25 | 21.666667 |
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
"""Operation: Set CPC Power Capping (any CPC mode)."""
assert wait_for_completion is True # async not supported yet
cpc_oid = uri_parms[0]
try:
cpc = hmc.cpcs.lookup_by_oid(cpc_oid)
except KeyError:
raise InvalidResourceError(method, uri)
check_required_fields(method, uri, body, ['power-capping-state'])
power_capping_state = body['power-capping-state']
power_cap_current = body.get('power-cap-current', None)
if power_capping_state not in ['disabled', 'enabled', 'custom']:
raise BadRequestError(method, uri, reason=7,
message="Invalid power-capping-state value: "
"%r" % power_capping_state)
if power_capping_state == 'enabled' and power_cap_current is None:
raise BadRequestError(method, uri, reason=7,
message="Power-cap-current must be provided "
"when enabling power capping")
cpc.properties['cpc-power-capping-state'] = power_capping_state
cpc.properties['cpc-power-cap-current'] = power_cap_current
cpc.properties['zcpc-power-capping-state'] = power_capping_state
cpc.properties['zcpc-power-cap-current'] = power_cap_current | [
"def",
"post",
"(",
"method",
",",
"hmc",
",",
"uri",
",",
"uri_parms",
",",
"body",
",",
"logon_required",
",",
"wait_for_completion",
")",
":",
"assert",
"wait_for_completion",
"is",
"True",
"# async not supported yet",
"cpc_oid",
"=",
"uri_parms",
"[",
"0",
"]",
"try",
":",
"cpc",
"=",
"hmc",
".",
"cpcs",
".",
"lookup_by_oid",
"(",
"cpc_oid",
")",
"except",
"KeyError",
":",
"raise",
"InvalidResourceError",
"(",
"method",
",",
"uri",
")",
"check_required_fields",
"(",
"method",
",",
"uri",
",",
"body",
",",
"[",
"'power-capping-state'",
"]",
")",
"power_capping_state",
"=",
"body",
"[",
"'power-capping-state'",
"]",
"power_cap_current",
"=",
"body",
".",
"get",
"(",
"'power-cap-current'",
",",
"None",
")",
"if",
"power_capping_state",
"not",
"in",
"[",
"'disabled'",
",",
"'enabled'",
",",
"'custom'",
"]",
":",
"raise",
"BadRequestError",
"(",
"method",
",",
"uri",
",",
"reason",
"=",
"7",
",",
"message",
"=",
"\"Invalid power-capping-state value: \"",
"\"%r\"",
"%",
"power_capping_state",
")",
"if",
"power_capping_state",
"==",
"'enabled'",
"and",
"power_cap_current",
"is",
"None",
":",
"raise",
"BadRequestError",
"(",
"method",
",",
"uri",
",",
"reason",
"=",
"7",
",",
"message",
"=",
"\"Power-cap-current must be provided \"",
"\"when enabling power capping\"",
")",
"cpc",
".",
"properties",
"[",
"'cpc-power-capping-state'",
"]",
"=",
"power_capping_state",
"cpc",
".",
"properties",
"[",
"'cpc-power-cap-current'",
"]",
"=",
"power_cap_current",
"cpc",
".",
"properties",
"[",
"'zcpc-power-capping-state'",
"]",
"=",
"power_capping_state",
"cpc",
".",
"properties",
"[",
"'zcpc-power-cap-current'",
"]",
"=",
"power_cap_current"
] | 50.071429 | 25 |
async def _download_lsst_bibtex(bibtex_names):
"""Asynchronously download a set of lsst-texmf BibTeX bibliographies from
GitHub.
Parameters
----------
bibtex_names : sequence of `str`
Names of lsst-texmf BibTeX files to download. For example:
.. code-block:: python
['lsst', 'lsst-dm', 'refs', 'books', 'refs_ads']
Returns
-------
bibtexs : `list` of `str`
List of BibTeX file content, in the same order as ``bibtex_names``.
"""
blob_url_template = (
'https://raw.githubusercontent.com/lsst/lsst-texmf/master/texmf/'
'bibtex/bib/{name}.bib'
)
urls = [blob_url_template.format(name=name) for name in bibtex_names]
tasks = []
async with ClientSession() as session:
for url in urls:
task = asyncio.ensure_future(_download_text(url, session))
tasks.append(task)
return await asyncio.gather(*tasks) | [
"async",
"def",
"_download_lsst_bibtex",
"(",
"bibtex_names",
")",
":",
"blob_url_template",
"=",
"(",
"'https://raw.githubusercontent.com/lsst/lsst-texmf/master/texmf/'",
"'bibtex/bib/{name}.bib'",
")",
"urls",
"=",
"[",
"blob_url_template",
".",
"format",
"(",
"name",
"=",
"name",
")",
"for",
"name",
"in",
"bibtex_names",
"]",
"tasks",
"=",
"[",
"]",
"async",
"with",
"ClientSession",
"(",
")",
"as",
"session",
":",
"for",
"url",
"in",
"urls",
":",
"task",
"=",
"asyncio",
".",
"ensure_future",
"(",
"_download_text",
"(",
"url",
",",
"session",
")",
")",
"tasks",
".",
"append",
"(",
"task",
")",
"return",
"await",
"asyncio",
".",
"gather",
"(",
"*",
"tasks",
")"
] | 29.548387 | 22.645161 |
def get_elemental_abunds(self,cycle,index=None):
"""
returns the elemental abundances for one cycle, either
for the whole star or a specific zone depending upon
the value of 'index'.
Parameters
----------
cycle : string or integer
Model to get the abundances for.
index : integer or list, optional
zone number for which to get elemental abundances. If
None the entire abundance profile is returned. If a 1x2
list, the abundances are returned between indices of
index[0] and index[1].
The default is None.
"""
isoabunds=self.se.get(cycle,'iso_massf')
A=array(self.se.A)
Z=array(self.se.Z)
names=self.se.isos
Zuq=list(set(Z)) # list of unique Zs
Zuq.sort()
if index==None:
index=[0,len(isoabunds)]
if type(index)==list:
elemabunds=[]
for zone in range(index[0],index[1]):
percent=int((zone-index[0])*100./(index[1]-index[0]))
sys.stdout.flush()
sys.stdout.write("\rgetting elemental abundances " + "...%d%%" % percent)
elemabunds.append([sum(isoabunds[zone][where(Z==iZ)]) for iZ in Zuq])
else:
elemabunds=[sum(isoabunds[index][where(Z==iZ)]) for iZ in Zuq]
return elemabunds | [
"def",
"get_elemental_abunds",
"(",
"self",
",",
"cycle",
",",
"index",
"=",
"None",
")",
":",
"isoabunds",
"=",
"self",
".",
"se",
".",
"get",
"(",
"cycle",
",",
"'iso_massf'",
")",
"A",
"=",
"array",
"(",
"self",
".",
"se",
".",
"A",
")",
"Z",
"=",
"array",
"(",
"self",
".",
"se",
".",
"Z",
")",
"names",
"=",
"self",
".",
"se",
".",
"isos",
"Zuq",
"=",
"list",
"(",
"set",
"(",
"Z",
")",
")",
"# list of unique Zs",
"Zuq",
".",
"sort",
"(",
")",
"if",
"index",
"==",
"None",
":",
"index",
"=",
"[",
"0",
",",
"len",
"(",
"isoabunds",
")",
"]",
"if",
"type",
"(",
"index",
")",
"==",
"list",
":",
"elemabunds",
"=",
"[",
"]",
"for",
"zone",
"in",
"range",
"(",
"index",
"[",
"0",
"]",
",",
"index",
"[",
"1",
"]",
")",
":",
"percent",
"=",
"int",
"(",
"(",
"zone",
"-",
"index",
"[",
"0",
"]",
")",
"*",
"100.",
"/",
"(",
"index",
"[",
"1",
"]",
"-",
"index",
"[",
"0",
"]",
")",
")",
"sys",
".",
"stdout",
".",
"flush",
"(",
")",
"sys",
".",
"stdout",
".",
"write",
"(",
"\"\\rgetting elemental abundances \"",
"+",
"\"...%d%%\"",
"%",
"percent",
")",
"elemabunds",
".",
"append",
"(",
"[",
"sum",
"(",
"isoabunds",
"[",
"zone",
"]",
"[",
"where",
"(",
"Z",
"==",
"iZ",
")",
"]",
")",
"for",
"iZ",
"in",
"Zuq",
"]",
")",
"else",
":",
"elemabunds",
"=",
"[",
"sum",
"(",
"isoabunds",
"[",
"index",
"]",
"[",
"where",
"(",
"Z",
"==",
"iZ",
")",
"]",
")",
"for",
"iZ",
"in",
"Zuq",
"]",
"return",
"elemabunds"
] | 34.4 | 19.6 |
def update_function(self, param_vals):
"""Updates the opt_obj, returns new error."""
self.opt_obj.update_function(param_vals)
return self.opt_obj.get_error() | [
"def",
"update_function",
"(",
"self",
",",
"param_vals",
")",
":",
"self",
".",
"opt_obj",
".",
"update_function",
"(",
"param_vals",
")",
"return",
"self",
".",
"opt_obj",
".",
"get_error",
"(",
")"
] | 44.5 | 2.75 |
def drop_indexes(self):
"""Drops all indexes on this collection.
Can be used on non-existant collections or collections with no indexes.
Raises OperationFailure on an error.
.. note:: The :attr:`~pymongo.collection.Collection.write_concern` of
this collection is automatically applied to this operation when using
MongoDB >= 3.4.
.. versionchanged:: 3.4
Apply this collection's write concern automatically to this operation
when connected to MongoDB >= 3.4.
"""
self.__database.client._purge_index(self.__database.name, self.__name)
self.drop_index("*") | [
"def",
"drop_indexes",
"(",
"self",
")",
":",
"self",
".",
"__database",
".",
"client",
".",
"_purge_index",
"(",
"self",
".",
"__database",
".",
"name",
",",
"self",
".",
"__name",
")",
"self",
".",
"drop_index",
"(",
"\"*\"",
")"
] | 38.176471 | 24.352941 |
def _get_connection(self):
'''
_get_connection - Maybe get a new connection, or reuse if passed in.
Will share a connection with a model
internal
'''
if self._connection is None:
self._connection = self._get_new_connection()
return self._connection | [
"def",
"_get_connection",
"(",
"self",
")",
":",
"if",
"self",
".",
"_connection",
"is",
"None",
":",
"self",
".",
"_connection",
"=",
"self",
".",
"_get_new_connection",
"(",
")",
"return",
"self",
".",
"_connection"
] | 29.111111 | 19.777778 |
def _is_iterable(item):
""" Checks if an item is iterable (list, tuple, generator), but not string """
return isinstance(item, collections.Iterable) and not isinstance(item, six.string_types) | [
"def",
"_is_iterable",
"(",
"item",
")",
":",
"return",
"isinstance",
"(",
"item",
",",
"collections",
".",
"Iterable",
")",
"and",
"not",
"isinstance",
"(",
"item",
",",
"six",
".",
"string_types",
")"
] | 65.666667 | 23 |
def _vn_decode(self, msg):
"""VN: Version information."""
elkm1_version = "{}.{}.{}".format(int(msg[4:6], 16), int(msg[6:8], 16),
int(msg[8:10], 16))
xep_version = "{}.{}.{}".format(int(msg[10:12], 16), int(msg[12:14], 16),
int(msg[14:16], 16))
return {'elkm1_version': elkm1_version, 'xep_version': xep_version} | [
"def",
"_vn_decode",
"(",
"self",
",",
"msg",
")",
":",
"elkm1_version",
"=",
"\"{}.{}.{}\"",
".",
"format",
"(",
"int",
"(",
"msg",
"[",
"4",
":",
"6",
"]",
",",
"16",
")",
",",
"int",
"(",
"msg",
"[",
"6",
":",
"8",
"]",
",",
"16",
")",
",",
"int",
"(",
"msg",
"[",
"8",
":",
"10",
"]",
",",
"16",
")",
")",
"xep_version",
"=",
"\"{}.{}.{}\"",
".",
"format",
"(",
"int",
"(",
"msg",
"[",
"10",
":",
"12",
"]",
",",
"16",
")",
",",
"int",
"(",
"msg",
"[",
"12",
":",
"14",
"]",
",",
"16",
")",
",",
"int",
"(",
"msg",
"[",
"14",
":",
"16",
"]",
",",
"16",
")",
")",
"return",
"{",
"'elkm1_version'",
":",
"elkm1_version",
",",
"'xep_version'",
":",
"xep_version",
"}"
] | 60 | 24.285714 |
def populate(self, priority, address, rtr, data):
"""
data bytes (high + low)
1 + 2 = current temp
3 + 4 = min temp
5 + 6 = max temp
:return: None
"""
assert isinstance(data, bytes)
self.needs_no_rtr(rtr)
self.needs_data(data, 6)
self.set_attributes(priority, address, rtr)
self.cur = (((data[0] << 8)| data[1]) / 32 ) * 0.0625
self.min = (((data[2] << 8) | data[3]) / 32 ) * 0.0625
self.max = (((data[4] << 8) | data[5]) / 32 ) * 0.0625 | [
"def",
"populate",
"(",
"self",
",",
"priority",
",",
"address",
",",
"rtr",
",",
"data",
")",
":",
"assert",
"isinstance",
"(",
"data",
",",
"bytes",
")",
"self",
".",
"needs_no_rtr",
"(",
"rtr",
")",
"self",
".",
"needs_data",
"(",
"data",
",",
"6",
")",
"self",
".",
"set_attributes",
"(",
"priority",
",",
"address",
",",
"rtr",
")",
"self",
".",
"cur",
"=",
"(",
"(",
"(",
"data",
"[",
"0",
"]",
"<<",
"8",
")",
"|",
"data",
"[",
"1",
"]",
")",
"/",
"32",
")",
"*",
"0.0625",
"self",
".",
"min",
"=",
"(",
"(",
"(",
"data",
"[",
"2",
"]",
"<<",
"8",
")",
"|",
"data",
"[",
"3",
"]",
")",
"/",
"32",
")",
"*",
"0.0625",
"self",
".",
"max",
"=",
"(",
"(",
"(",
"data",
"[",
"4",
"]",
"<<",
"8",
")",
"|",
"data",
"[",
"5",
"]",
")",
"/",
"32",
")",
"*",
"0.0625"
] | 36.866667 | 10.6 |
def forwards(self, orm):
"Write your forwards methods here."
orm['avocado.DataField'].objects.get_or_create(
app_name='variants',
model_name='variantphenotype',
field_name='hgmd_id',
defaults={
'name': 'HGMD',
'published': True
}) | [
"def",
"forwards",
"(",
"self",
",",
"orm",
")",
":",
"orm",
"[",
"'avocado.DataField'",
"]",
".",
"objects",
".",
"get_or_create",
"(",
"app_name",
"=",
"'variants'",
",",
"model_name",
"=",
"'variantphenotype'",
",",
"field_name",
"=",
"'hgmd_id'",
",",
"defaults",
"=",
"{",
"'name'",
":",
"'HGMD'",
",",
"'published'",
":",
"True",
"}",
")"
] | 32.9 | 11.1 |
def cli(env, billing_id, datacenter):
"""Adds a load balancer given the id returned from create-options."""
mgr = SoftLayer.LoadBalancerManager(env.client)
if not formatting.confirm("This action will incur charges on your "
"account. Continue?"):
raise exceptions.CLIAbort('Aborted.')
mgr.add_local_lb(billing_id, datacenter=datacenter)
env.fout("Load balancer is being created!") | [
"def",
"cli",
"(",
"env",
",",
"billing_id",
",",
"datacenter",
")",
":",
"mgr",
"=",
"SoftLayer",
".",
"LoadBalancerManager",
"(",
"env",
".",
"client",
")",
"if",
"not",
"formatting",
".",
"confirm",
"(",
"\"This action will incur charges on your \"",
"\"account. Continue?\"",
")",
":",
"raise",
"exceptions",
".",
"CLIAbort",
"(",
"'Aborted.'",
")",
"mgr",
".",
"add_local_lb",
"(",
"billing_id",
",",
"datacenter",
"=",
"datacenter",
")",
"env",
".",
"fout",
"(",
"\"Load balancer is being created!\"",
")"
] | 47.888889 | 13.777778 |
def dM(self, t, param, Mt, tips=None, gaps=None):
"""See docs for method in `Model` abstract base class."""
assert isinstance(t, float) and t > 0, "Invalid t: {0}".format(t)
assert (param == 't') or (param in self.freeparams), (
"Invalid param: {0}".format(param))
if Mt is None:
Mt = self.M(t, tips=tips, gaps=gaps)
if (param == 'mu') or (param == 't'):
if param == 'mu':
alpha = t
else:
alpha = self.mu
if tips is None:
dM_param = broadcastMatrixMultiply(self.Prxy, Mt, alpha=alpha)
else:
dM_param = broadcastMatrixVectorMultiply(self.Prxy, Mt, alpha=alpha)
if gaps is not None:
dM_param[gaps] = scipy.zeros(N_CODON, dtype='float')
return dM_param
paramval = getattr(self, param)
if isinstance(paramval, float):
paramisvec = False
else:
assert isinstance(paramval, numpy.ndarray) and paramval.ndim == 1
paramisvec = True
paramlength = paramval.shape[0]
if ('expD', t) not in self._cached:
self._cached[('expD', t)] = scipy.exp(self.D * self.mu * t)
expD = self._cached[('expD', t)]
if ('V', t) not in self._cached:
if 'Dxx_Dyy' not in self._cached:
Dyy = scipy.tile(self.D, (1, N_CODON)).reshape(
self.nsites, N_CODON, N_CODON)
Dxx = scipy.array([Dyy[r].transpose() for r in
range(self.nsites)])
self._cached['Dxx_Dyy'] = Dxx - Dyy
Dxx_Dyy = self._cached['Dxx_Dyy']
if 'Dxx_Dyy_lt_ALMOST_ZERO' not in self._cached:
self._cached['Dxx_Dyy_lt_ALMOST_ZERO'] = scipy.fabs(
Dxx_Dyy) < ALMOST_ZERO
Dxx_Dyy_lt_ALMOST_ZERO = self._cached['Dxx_Dyy_lt_ALMOST_ZERO']
with scipy.errstate(divide='raise', under='ignore',
over='raise', invalid='ignore'):
expDyy = scipy.tile(expD,(1, N_CODON)).reshape(
self.nsites, N_CODON, N_CODON)
expDxx = scipy.array([expDyy[r].transpose() for r in
range(self.nsites)])
V = (expDxx - expDyy) / Dxx_Dyy
with scipy.errstate(under='ignore'): # OK if some values 0
scipy.copyto(V, self.mu * t * expDxx, where=
Dxx_Dyy_lt_ALMOST_ZERO)
self._cached[('V', t)] = V
V = self._cached[('V', t)]
with scipy.errstate(under='ignore'): # don't worry if some values 0
if tips is None:
if not paramisvec:
dM_param = broadcastMatrixMultiply(self.A,
broadcastMatrixMultiply(self.B[param]
* V, self.Ainv))
else:
dM_param = scipy.ndarray((paramlength, self.nsites,
N_CODON, N_CODON), dtype='float')
for j in range(paramlength):
dM_param[j] = broadcastMatrixMultiply(self.A,
broadcastMatrixMultiply(self.B[param][j]
* V, self.Ainv))
else:
if not paramisvec:
dM_param = broadcastMatrixVectorMultiply(self.A,
broadcastGetCols(broadcastMatrixMultiply(
self.B[param] * V, self.Ainv), tips))
else:
dM_param = scipy.ndarray((paramlength, self.nsites,
N_CODON), dtype='float')
for j in range(paramlength):
dM_param[j] = broadcastMatrixVectorMultiply(self.A,
broadcastGetCols(broadcastMatrixMultiply(
self.B[param][j] * V, self.Ainv), tips))
if gaps is not None:
if not paramisvec:
dM_param[gaps] = scipy.zeros(N_CODON, dtype='float')
else:
dM_param[:, gaps] = scipy.zeros(N_CODON, dtype='float')
return dM_param | [
"def",
"dM",
"(",
"self",
",",
"t",
",",
"param",
",",
"Mt",
",",
"tips",
"=",
"None",
",",
"gaps",
"=",
"None",
")",
":",
"assert",
"isinstance",
"(",
"t",
",",
"float",
")",
"and",
"t",
">",
"0",
",",
"\"Invalid t: {0}\"",
".",
"format",
"(",
"t",
")",
"assert",
"(",
"param",
"==",
"'t'",
")",
"or",
"(",
"param",
"in",
"self",
".",
"freeparams",
")",
",",
"(",
"\"Invalid param: {0}\"",
".",
"format",
"(",
"param",
")",
")",
"if",
"Mt",
"is",
"None",
":",
"Mt",
"=",
"self",
".",
"M",
"(",
"t",
",",
"tips",
"=",
"tips",
",",
"gaps",
"=",
"gaps",
")",
"if",
"(",
"param",
"==",
"'mu'",
")",
"or",
"(",
"param",
"==",
"'t'",
")",
":",
"if",
"param",
"==",
"'mu'",
":",
"alpha",
"=",
"t",
"else",
":",
"alpha",
"=",
"self",
".",
"mu",
"if",
"tips",
"is",
"None",
":",
"dM_param",
"=",
"broadcastMatrixMultiply",
"(",
"self",
".",
"Prxy",
",",
"Mt",
",",
"alpha",
"=",
"alpha",
")",
"else",
":",
"dM_param",
"=",
"broadcastMatrixVectorMultiply",
"(",
"self",
".",
"Prxy",
",",
"Mt",
",",
"alpha",
"=",
"alpha",
")",
"if",
"gaps",
"is",
"not",
"None",
":",
"dM_param",
"[",
"gaps",
"]",
"=",
"scipy",
".",
"zeros",
"(",
"N_CODON",
",",
"dtype",
"=",
"'float'",
")",
"return",
"dM_param",
"paramval",
"=",
"getattr",
"(",
"self",
",",
"param",
")",
"if",
"isinstance",
"(",
"paramval",
",",
"float",
")",
":",
"paramisvec",
"=",
"False",
"else",
":",
"assert",
"isinstance",
"(",
"paramval",
",",
"numpy",
".",
"ndarray",
")",
"and",
"paramval",
".",
"ndim",
"==",
"1",
"paramisvec",
"=",
"True",
"paramlength",
"=",
"paramval",
".",
"shape",
"[",
"0",
"]",
"if",
"(",
"'expD'",
",",
"t",
")",
"not",
"in",
"self",
".",
"_cached",
":",
"self",
".",
"_cached",
"[",
"(",
"'expD'",
",",
"t",
")",
"]",
"=",
"scipy",
".",
"exp",
"(",
"self",
".",
"D",
"*",
"self",
".",
"mu",
"*",
"t",
")",
"expD",
"=",
"self",
".",
"_cached",
"[",
"(",
"'expD'",
",",
"t",
")",
"]",
"if",
"(",
"'V'",
",",
"t",
")",
"not",
"in",
"self",
".",
"_cached",
":",
"if",
"'Dxx_Dyy'",
"not",
"in",
"self",
".",
"_cached",
":",
"Dyy",
"=",
"scipy",
".",
"tile",
"(",
"self",
".",
"D",
",",
"(",
"1",
",",
"N_CODON",
")",
")",
".",
"reshape",
"(",
"self",
".",
"nsites",
",",
"N_CODON",
",",
"N_CODON",
")",
"Dxx",
"=",
"scipy",
".",
"array",
"(",
"[",
"Dyy",
"[",
"r",
"]",
".",
"transpose",
"(",
")",
"for",
"r",
"in",
"range",
"(",
"self",
".",
"nsites",
")",
"]",
")",
"self",
".",
"_cached",
"[",
"'Dxx_Dyy'",
"]",
"=",
"Dxx",
"-",
"Dyy",
"Dxx_Dyy",
"=",
"self",
".",
"_cached",
"[",
"'Dxx_Dyy'",
"]",
"if",
"'Dxx_Dyy_lt_ALMOST_ZERO'",
"not",
"in",
"self",
".",
"_cached",
":",
"self",
".",
"_cached",
"[",
"'Dxx_Dyy_lt_ALMOST_ZERO'",
"]",
"=",
"scipy",
".",
"fabs",
"(",
"Dxx_Dyy",
")",
"<",
"ALMOST_ZERO",
"Dxx_Dyy_lt_ALMOST_ZERO",
"=",
"self",
".",
"_cached",
"[",
"'Dxx_Dyy_lt_ALMOST_ZERO'",
"]",
"with",
"scipy",
".",
"errstate",
"(",
"divide",
"=",
"'raise'",
",",
"under",
"=",
"'ignore'",
",",
"over",
"=",
"'raise'",
",",
"invalid",
"=",
"'ignore'",
")",
":",
"expDyy",
"=",
"scipy",
".",
"tile",
"(",
"expD",
",",
"(",
"1",
",",
"N_CODON",
")",
")",
".",
"reshape",
"(",
"self",
".",
"nsites",
",",
"N_CODON",
",",
"N_CODON",
")",
"expDxx",
"=",
"scipy",
".",
"array",
"(",
"[",
"expDyy",
"[",
"r",
"]",
".",
"transpose",
"(",
")",
"for",
"r",
"in",
"range",
"(",
"self",
".",
"nsites",
")",
"]",
")",
"V",
"=",
"(",
"expDxx",
"-",
"expDyy",
")",
"/",
"Dxx_Dyy",
"with",
"scipy",
".",
"errstate",
"(",
"under",
"=",
"'ignore'",
")",
":",
"# OK if some values 0",
"scipy",
".",
"copyto",
"(",
"V",
",",
"self",
".",
"mu",
"*",
"t",
"*",
"expDxx",
",",
"where",
"=",
"Dxx_Dyy_lt_ALMOST_ZERO",
")",
"self",
".",
"_cached",
"[",
"(",
"'V'",
",",
"t",
")",
"]",
"=",
"V",
"V",
"=",
"self",
".",
"_cached",
"[",
"(",
"'V'",
",",
"t",
")",
"]",
"with",
"scipy",
".",
"errstate",
"(",
"under",
"=",
"'ignore'",
")",
":",
"# don't worry if some values 0",
"if",
"tips",
"is",
"None",
":",
"if",
"not",
"paramisvec",
":",
"dM_param",
"=",
"broadcastMatrixMultiply",
"(",
"self",
".",
"A",
",",
"broadcastMatrixMultiply",
"(",
"self",
".",
"B",
"[",
"param",
"]",
"*",
"V",
",",
"self",
".",
"Ainv",
")",
")",
"else",
":",
"dM_param",
"=",
"scipy",
".",
"ndarray",
"(",
"(",
"paramlength",
",",
"self",
".",
"nsites",
",",
"N_CODON",
",",
"N_CODON",
")",
",",
"dtype",
"=",
"'float'",
")",
"for",
"j",
"in",
"range",
"(",
"paramlength",
")",
":",
"dM_param",
"[",
"j",
"]",
"=",
"broadcastMatrixMultiply",
"(",
"self",
".",
"A",
",",
"broadcastMatrixMultiply",
"(",
"self",
".",
"B",
"[",
"param",
"]",
"[",
"j",
"]",
"*",
"V",
",",
"self",
".",
"Ainv",
")",
")",
"else",
":",
"if",
"not",
"paramisvec",
":",
"dM_param",
"=",
"broadcastMatrixVectorMultiply",
"(",
"self",
".",
"A",
",",
"broadcastGetCols",
"(",
"broadcastMatrixMultiply",
"(",
"self",
".",
"B",
"[",
"param",
"]",
"*",
"V",
",",
"self",
".",
"Ainv",
")",
",",
"tips",
")",
")",
"else",
":",
"dM_param",
"=",
"scipy",
".",
"ndarray",
"(",
"(",
"paramlength",
",",
"self",
".",
"nsites",
",",
"N_CODON",
")",
",",
"dtype",
"=",
"'float'",
")",
"for",
"j",
"in",
"range",
"(",
"paramlength",
")",
":",
"dM_param",
"[",
"j",
"]",
"=",
"broadcastMatrixVectorMultiply",
"(",
"self",
".",
"A",
",",
"broadcastGetCols",
"(",
"broadcastMatrixMultiply",
"(",
"self",
".",
"B",
"[",
"param",
"]",
"[",
"j",
"]",
"*",
"V",
",",
"self",
".",
"Ainv",
")",
",",
"tips",
")",
")",
"if",
"gaps",
"is",
"not",
"None",
":",
"if",
"not",
"paramisvec",
":",
"dM_param",
"[",
"gaps",
"]",
"=",
"scipy",
".",
"zeros",
"(",
"N_CODON",
",",
"dtype",
"=",
"'float'",
")",
"else",
":",
"dM_param",
"[",
":",
",",
"gaps",
"]",
"=",
"scipy",
".",
"zeros",
"(",
"N_CODON",
",",
"dtype",
"=",
"'float'",
")",
"return",
"dM_param"
] | 46.755556 | 18.677778 |
def intersect_one_round(candidates, intersections):
"""Perform one step of the intersection process.
.. note::
This is a helper for :func:`_all_intersections` and that function
has a Fortran equivalent.
Checks if the bounding boxes of each pair in ``candidates``
intersect. If the bounding boxes do not intersect, the pair
is discarded. Otherwise, the pair is "accepted". Then we
attempt to linearize each curve in an "accepted" pair and
track the overall linearization error for every curve
encountered.
Args:
candidates (Union[list, itertools.chain]): An iterable of
pairs of curves (or linearized curves).
intersections (list): A list of already encountered
intersections. If any intersections can be readily determined
during this round of subdivision, then they will be added
to this list.
Returns:
list: Returns a list of the next round of ``candidates``.
"""
next_candidates = []
# NOTE: In the below we replace ``isinstance(a, B)`` with
# ``a.__class__ is B``, which is a 3-3.5x speedup.
for first, second in candidates:
both_linearized = False
if first.__class__ is Linearization:
if second.__class__ is Linearization:
both_linearized = True
bbox_int = bbox_intersect(
first.curve.nodes, second.curve.nodes
)
else:
bbox_int = bbox_line_intersect(
second.nodes, first.start_node, first.end_node
)
else:
if second.__class__ is Linearization:
bbox_int = bbox_line_intersect(
first.nodes, second.start_node, second.end_node
)
else:
bbox_int = bbox_intersect(first.nodes, second.nodes)
if bbox_int == BoxIntersectionType.DISJOINT:
continue
elif bbox_int == BoxIntersectionType.TANGENT and not both_linearized:
# NOTE: Ignore tangent bounding boxes in the linearized case
# because ``tangent_bbox_intersection()`` assumes that both
# curves are not linear.
tangent_bbox_intersection(first, second, intersections)
continue
if both_linearized:
# If both ``first`` and ``second`` are linearizations, then
# we can intersect them immediately.
from_linearized(first, second, intersections)
continue
# If we haven't ``continue``-d, add the accepted pair.
# NOTE: This may be a wasted computation, e.g. if ``first``
# or ``second`` occur in multiple accepted pairs (the caller
# only passes one pair at a time). However, in practice
# the number of such pairs will be small so this cost
# will be low.
lin1 = six.moves.map(Linearization.from_shape, first.subdivide())
lin2 = six.moves.map(Linearization.from_shape, second.subdivide())
next_candidates.extend(itertools.product(lin1, lin2))
return next_candidates | [
"def",
"intersect_one_round",
"(",
"candidates",
",",
"intersections",
")",
":",
"next_candidates",
"=",
"[",
"]",
"# NOTE: In the below we replace ``isinstance(a, B)`` with",
"# ``a.__class__ is B``, which is a 3-3.5x speedup.",
"for",
"first",
",",
"second",
"in",
"candidates",
":",
"both_linearized",
"=",
"False",
"if",
"first",
".",
"__class__",
"is",
"Linearization",
":",
"if",
"second",
".",
"__class__",
"is",
"Linearization",
":",
"both_linearized",
"=",
"True",
"bbox_int",
"=",
"bbox_intersect",
"(",
"first",
".",
"curve",
".",
"nodes",
",",
"second",
".",
"curve",
".",
"nodes",
")",
"else",
":",
"bbox_int",
"=",
"bbox_line_intersect",
"(",
"second",
".",
"nodes",
",",
"first",
".",
"start_node",
",",
"first",
".",
"end_node",
")",
"else",
":",
"if",
"second",
".",
"__class__",
"is",
"Linearization",
":",
"bbox_int",
"=",
"bbox_line_intersect",
"(",
"first",
".",
"nodes",
",",
"second",
".",
"start_node",
",",
"second",
".",
"end_node",
")",
"else",
":",
"bbox_int",
"=",
"bbox_intersect",
"(",
"first",
".",
"nodes",
",",
"second",
".",
"nodes",
")",
"if",
"bbox_int",
"==",
"BoxIntersectionType",
".",
"DISJOINT",
":",
"continue",
"elif",
"bbox_int",
"==",
"BoxIntersectionType",
".",
"TANGENT",
"and",
"not",
"both_linearized",
":",
"# NOTE: Ignore tangent bounding boxes in the linearized case",
"# because ``tangent_bbox_intersection()`` assumes that both",
"# curves are not linear.",
"tangent_bbox_intersection",
"(",
"first",
",",
"second",
",",
"intersections",
")",
"continue",
"if",
"both_linearized",
":",
"# If both ``first`` and ``second`` are linearizations, then",
"# we can intersect them immediately.",
"from_linearized",
"(",
"first",
",",
"second",
",",
"intersections",
")",
"continue",
"# If we haven't ``continue``-d, add the accepted pair.",
"# NOTE: This may be a wasted computation, e.g. if ``first``",
"# or ``second`` occur in multiple accepted pairs (the caller",
"# only passes one pair at a time). However, in practice",
"# the number of such pairs will be small so this cost",
"# will be low.",
"lin1",
"=",
"six",
".",
"moves",
".",
"map",
"(",
"Linearization",
".",
"from_shape",
",",
"first",
".",
"subdivide",
"(",
")",
")",
"lin2",
"=",
"six",
".",
"moves",
".",
"map",
"(",
"Linearization",
".",
"from_shape",
",",
"second",
".",
"subdivide",
"(",
")",
")",
"next_candidates",
".",
"extend",
"(",
"itertools",
".",
"product",
"(",
"lin1",
",",
"lin2",
")",
")",
"return",
"next_candidates"
] | 42.027027 | 21.905405 |
def _chk_type(recdef, rec):
"""Checks if type of `rec` matches `recdef`
:param recdef: instance of RecordDef
:param rec: instance of Record
:raises: `TypeError`
"""
if len(recdef) != len(rec):
raise TypeError("Number of columns (%d) is different from RecordDef (%d)" % (len(rec), len(recdef)))
for i in xrange(len(recdef)):
try:
def_type = recdef[i].type
col_type = Type.equivalent_relshell_type(rec[i])
if col_type != def_type:
raise TypeError("Column %d has mismatched type: Got '%s' [%s] ; Expected [%s]" %
(i, rec[i], col_type, def_type))
except AttributeError as e:
# recdef[i].type is not defined, then any relshell type is allowed
try:
Type.equivalent_relshell_type(rec[i])
except NotImplementedError as e:
raise TypeError("%s" % (e)) | [
"def",
"_chk_type",
"(",
"recdef",
",",
"rec",
")",
":",
"if",
"len",
"(",
"recdef",
")",
"!=",
"len",
"(",
"rec",
")",
":",
"raise",
"TypeError",
"(",
"\"Number of columns (%d) is different from RecordDef (%d)\"",
"%",
"(",
"len",
"(",
"rec",
")",
",",
"len",
"(",
"recdef",
")",
")",
")",
"for",
"i",
"in",
"xrange",
"(",
"len",
"(",
"recdef",
")",
")",
":",
"try",
":",
"def_type",
"=",
"recdef",
"[",
"i",
"]",
".",
"type",
"col_type",
"=",
"Type",
".",
"equivalent_relshell_type",
"(",
"rec",
"[",
"i",
"]",
")",
"if",
"col_type",
"!=",
"def_type",
":",
"raise",
"TypeError",
"(",
"\"Column %d has mismatched type: Got '%s' [%s] ; Expected [%s]\"",
"%",
"(",
"i",
",",
"rec",
"[",
"i",
"]",
",",
"col_type",
",",
"def_type",
")",
")",
"except",
"AttributeError",
"as",
"e",
":",
"# recdef[i].type is not defined, then any relshell type is allowed",
"try",
":",
"Type",
".",
"equivalent_relshell_type",
"(",
"rec",
"[",
"i",
"]",
")",
"except",
"NotImplementedError",
"as",
"e",
":",
"raise",
"TypeError",
"(",
"\"%s\"",
"%",
"(",
"e",
")",
")"
] | 46.136364 | 17.136364 |
def postinit(
self, bases, body, decorators, newstyle=None, metaclass=None, keywords=None
):
"""Do some setup after initialisation.
:param bases: What the class inherits from.
:type bases: list(NodeNG)
:param body: The contents of the class body.
:type body: list(NodeNG)
:param decorators: The decorators that are applied to this class.
:type decorators: Decorators or None
:param newstyle: Whether this is a new style class or not.
:type newstyle: bool or None
:param metaclass: The metaclass of this class.
:type metaclass: NodeNG or None
:param keywords: The keywords given to the class definition.
:type keywords: list(Keyword) or None
"""
self.keywords = keywords
self.bases = bases
self.body = body
self.decorators = decorators
if newstyle is not None:
self._newstyle = newstyle
if metaclass is not None:
self._metaclass = metaclass | [
"def",
"postinit",
"(",
"self",
",",
"bases",
",",
"body",
",",
"decorators",
",",
"newstyle",
"=",
"None",
",",
"metaclass",
"=",
"None",
",",
"keywords",
"=",
"None",
")",
":",
"self",
".",
"keywords",
"=",
"keywords",
"self",
".",
"bases",
"=",
"bases",
"self",
".",
"body",
"=",
"body",
"self",
".",
"decorators",
"=",
"decorators",
"if",
"newstyle",
"is",
"not",
"None",
":",
"self",
".",
"_newstyle",
"=",
"newstyle",
"if",
"metaclass",
"is",
"not",
"None",
":",
"self",
".",
"_metaclass",
"=",
"metaclass"
] | 32.612903 | 18 |
def data(self, column, role):
"""Return the data for the specified column and role
For DisplayRole the element in the list will be converted to a sting and returned.
:param column: the data column
:type column: int
:param role: the data role
:type role: QtCore.Qt.ItemDataRole
:returns: data depending on the role, or None if the column is out of range
:rtype: depending on the role or None
:raises: None
"""
if role == QtCore.Qt.DisplayRole:
if column >= 0 and column < len(self._list):
return str(self._list[column]) | [
"def",
"data",
"(",
"self",
",",
"column",
",",
"role",
")",
":",
"if",
"role",
"==",
"QtCore",
".",
"Qt",
".",
"DisplayRole",
":",
"if",
"column",
">=",
"0",
"and",
"column",
"<",
"len",
"(",
"self",
".",
"_list",
")",
":",
"return",
"str",
"(",
"self",
".",
"_list",
"[",
"column",
"]",
")"
] | 38.8125 | 16 |
def to_type(self, dtype: type, *cols, **kwargs):
"""
Convert colums values to a given type in the
main dataframe
:param dtype: a type to convert to: ex: ``str``
:type dtype: type
:param \*cols: names of the colums
:type \*cols: str, at least one
:param \*\*kwargs: keyword arguments for ``df.astype``
:type \*\*kwargs: optional
:example: ``ds.to_type(str, "mycol")``
"""
try:
allcols = self.df.columns.values
for col in cols:
if col not in allcols:
self.err("Column " + col + " not found")
return
self.df[col] = self.df[col].astype(dtype, **kwargs)
except Exception as e:
self.err(e, "Can not convert to type") | [
"def",
"to_type",
"(",
"self",
",",
"dtype",
":",
"type",
",",
"*",
"cols",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"allcols",
"=",
"self",
".",
"df",
".",
"columns",
".",
"values",
"for",
"col",
"in",
"cols",
":",
"if",
"col",
"not",
"in",
"allcols",
":",
"self",
".",
"err",
"(",
"\"Column \"",
"+",
"col",
"+",
"\" not found\"",
")",
"return",
"self",
".",
"df",
"[",
"col",
"]",
"=",
"self",
".",
"df",
"[",
"col",
"]",
".",
"astype",
"(",
"dtype",
",",
"*",
"*",
"kwargs",
")",
"except",
"Exception",
"as",
"e",
":",
"self",
".",
"err",
"(",
"e",
",",
"\"Can not convert to type\"",
")"
] | 34.913043 | 13.608696 |
def create(self):
"""
Executes ``ansible-playbook`` against the create playbook and returns
None.
:return: None
"""
pb = self._get_ansible_playbook(self.playbooks.create)
pb.execute() | [
"def",
"create",
"(",
"self",
")",
":",
"pb",
"=",
"self",
".",
"_get_ansible_playbook",
"(",
"self",
".",
"playbooks",
".",
"create",
")",
"pb",
".",
"execute",
"(",
")"
] | 25.777778 | 20.888889 |
def optional(e, default=Ignore):
"""
Create a PEG function to optionally match an expression.
"""
def match_optional(s, grm=None, pos=0):
try:
return e(s, grm, pos)
except PegreError:
return PegreResult(s, default, (pos, pos))
return match_optional | [
"def",
"optional",
"(",
"e",
",",
"default",
"=",
"Ignore",
")",
":",
"def",
"match_optional",
"(",
"s",
",",
"grm",
"=",
"None",
",",
"pos",
"=",
"0",
")",
":",
"try",
":",
"return",
"e",
"(",
"s",
",",
"grm",
",",
"pos",
")",
"except",
"PegreError",
":",
"return",
"PegreResult",
"(",
"s",
",",
"default",
",",
"(",
"pos",
",",
"pos",
")",
")",
"return",
"match_optional"
] | 29.9 | 10.9 |
def plot_variability_thresholds(varthreshpkl,
xmin_lcmad_stdev=5.0,
xmin_stetj_stdev=2.0,
xmin_iqr_stdev=2.0,
xmin_inveta_stdev=2.0,
lcformat='hat-sql',
lcformatdir=None,
magcols=None):
'''This makes plots for the variability threshold distributions.
Parameters
----------
varthreshpkl : str
The pickle produced by the function above.
xmin_lcmad_stdev,xmin_stetj_stdev,xmin_iqr_stdev,xmin_inveta_stdev : float or np.array
Values of the threshold values to override the ones in the
`vartresholdpkl`. If provided, will plot the thresholds accordingly
instead of using the ones in the input pickle directly.
lcformat : str
This is the `formatkey` associated with your light curve format, which
you previously passed in to the `lcproc.register_lcformat`
function. This will be used to look up how to find and read the light
curves specified in `basedir` or `use_list_of_filenames`.
lcformatdir : str or None
If this is provided, gives the path to a directory when you've stored
your lcformat description JSONs, other than the usual directories lcproc
knows to search for them in. Use this along with `lcformat` to specify
an LC format JSON file that's not currently registered with lcproc.
magcols : list of str or None
The magcol keys to use from the lcdict.
Returns
-------
str
The file name of the threshold plot generated.
'''
try:
formatinfo = get_lcformat(lcformat,
use_lcformat_dir=lcformatdir)
if formatinfo:
(dfileglob, readerfunc,
dtimecols, dmagcols, derrcols,
magsarefluxes, normfunc) = formatinfo
else:
LOGERROR("can't figure out the light curve format")
return None
except Exception as e:
LOGEXCEPTION("can't figure out the light curve format")
return None
if magcols is None:
magcols = dmagcols
with open(varthreshpkl,'rb') as infd:
allobjects = pickle.load(infd)
magbins = allobjects['magbins']
for magcol in magcols:
min_lcmad_stdev = (
xmin_lcmad_stdev or allobjects[magcol]['min_lcmad_stdev']
)
min_stetj_stdev = (
xmin_stetj_stdev or allobjects[magcol]['min_stetj_stdev']
)
min_iqr_stdev = (
xmin_iqr_stdev or allobjects[magcol]['min_iqr_stdev']
)
min_inveta_stdev = (
xmin_inveta_stdev or allobjects[magcol]['min_inveta_stdev']
)
fig = plt.figure(figsize=(20,16))
# the mag vs lcmad
plt.subplot(221)
plt.plot(allobjects[magcol]['sdssr'],
allobjects[magcol]['lcmad']*1.483,
marker='.',ms=1.0, linestyle='none',
rasterized=True)
plt.plot(allobjects[magcol]['binned_sdssr_median'],
np.array(allobjects[magcol]['binned_lcmad_median'])*1.483,
linewidth=3.0)
plt.plot(
allobjects[magcol]['binned_sdssr_median'],
np.array(allobjects[magcol]['binned_lcmad_median'])*1.483 +
min_lcmad_stdev*np.array(
allobjects[magcol]['binned_lcmad_stdev']
),
linewidth=3.0, linestyle='dashed'
)
plt.xlim((magbins.min()-0.25, magbins.max()))
plt.xlabel('SDSS r')
plt.ylabel(r'lightcurve RMS (MAD $\times$ 1.483)')
plt.title('%s - SDSS r vs. light curve RMS' % magcol)
plt.yscale('log')
plt.tight_layout()
# the mag vs stetsonj
plt.subplot(222)
plt.plot(allobjects[magcol]['sdssr'],
allobjects[magcol]['stetsonj'],
marker='.',ms=1.0, linestyle='none',
rasterized=True)
plt.plot(allobjects[magcol]['binned_sdssr_median'],
allobjects[magcol]['binned_stetsonj_median'],
linewidth=3.0)
plt.plot(
allobjects[magcol]['binned_sdssr_median'],
np.array(allobjects[magcol]['binned_stetsonj_median']) +
min_stetj_stdev*np.array(
allobjects[magcol]['binned_stetsonj_stdev']
),
linewidth=3.0, linestyle='dashed'
)
plt.xlim((magbins.min()-0.25, magbins.max()))
plt.xlabel('SDSS r')
plt.ylabel('Stetson J index')
plt.title('%s - SDSS r vs. Stetson J index' % magcol)
plt.yscale('log')
plt.tight_layout()
# the mag vs IQR
plt.subplot(223)
plt.plot(allobjects[magcol]['sdssr'],
allobjects[magcol]['iqr'],
marker='.',ms=1.0, linestyle='none',
rasterized=True)
plt.plot(allobjects[magcol]['binned_sdssr_median'],
allobjects[magcol]['binned_iqr_median'],
linewidth=3.0)
plt.plot(
allobjects[magcol]['binned_sdssr_median'],
np.array(allobjects[magcol]['binned_iqr_median']) +
min_iqr_stdev*np.array(
allobjects[magcol]['binned_iqr_stdev']
),
linewidth=3.0, linestyle='dashed'
)
plt.xlabel('SDSS r')
plt.ylabel('IQR')
plt.title('%s - SDSS r vs. IQR' % magcol)
plt.xlim((magbins.min()-0.25, magbins.max()))
plt.yscale('log')
plt.tight_layout()
# the mag vs IQR
plt.subplot(224)
plt.plot(allobjects[magcol]['sdssr'],
allobjects[magcol]['inveta'],
marker='.',ms=1.0, linestyle='none',
rasterized=True)
plt.plot(allobjects[magcol]['binned_sdssr_median'],
allobjects[magcol]['binned_inveta_median'],
linewidth=3.0)
plt.plot(
allobjects[magcol]['binned_sdssr_median'],
np.array(allobjects[magcol]['binned_inveta_median']) +
min_inveta_stdev*np.array(
allobjects[magcol]['binned_inveta_stdev']
),
linewidth=3.0, linestyle='dashed'
)
plt.xlabel('SDSS r')
plt.ylabel(r'$1/\eta$')
plt.title(r'%s - SDSS r vs. $1/\eta$' % magcol)
plt.xlim((magbins.min()-0.25, magbins.max()))
plt.yscale('log')
plt.tight_layout()
plt.savefig('varfeatures-%s-%s-distributions.png' % (varthreshpkl,
magcol),
bbox_inches='tight')
plt.close('all') | [
"def",
"plot_variability_thresholds",
"(",
"varthreshpkl",
",",
"xmin_lcmad_stdev",
"=",
"5.0",
",",
"xmin_stetj_stdev",
"=",
"2.0",
",",
"xmin_iqr_stdev",
"=",
"2.0",
",",
"xmin_inveta_stdev",
"=",
"2.0",
",",
"lcformat",
"=",
"'hat-sql'",
",",
"lcformatdir",
"=",
"None",
",",
"magcols",
"=",
"None",
")",
":",
"try",
":",
"formatinfo",
"=",
"get_lcformat",
"(",
"lcformat",
",",
"use_lcformat_dir",
"=",
"lcformatdir",
")",
"if",
"formatinfo",
":",
"(",
"dfileglob",
",",
"readerfunc",
",",
"dtimecols",
",",
"dmagcols",
",",
"derrcols",
",",
"magsarefluxes",
",",
"normfunc",
")",
"=",
"formatinfo",
"else",
":",
"LOGERROR",
"(",
"\"can't figure out the light curve format\"",
")",
"return",
"None",
"except",
"Exception",
"as",
"e",
":",
"LOGEXCEPTION",
"(",
"\"can't figure out the light curve format\"",
")",
"return",
"None",
"if",
"magcols",
"is",
"None",
":",
"magcols",
"=",
"dmagcols",
"with",
"open",
"(",
"varthreshpkl",
",",
"'rb'",
")",
"as",
"infd",
":",
"allobjects",
"=",
"pickle",
".",
"load",
"(",
"infd",
")",
"magbins",
"=",
"allobjects",
"[",
"'magbins'",
"]",
"for",
"magcol",
"in",
"magcols",
":",
"min_lcmad_stdev",
"=",
"(",
"xmin_lcmad_stdev",
"or",
"allobjects",
"[",
"magcol",
"]",
"[",
"'min_lcmad_stdev'",
"]",
")",
"min_stetj_stdev",
"=",
"(",
"xmin_stetj_stdev",
"or",
"allobjects",
"[",
"magcol",
"]",
"[",
"'min_stetj_stdev'",
"]",
")",
"min_iqr_stdev",
"=",
"(",
"xmin_iqr_stdev",
"or",
"allobjects",
"[",
"magcol",
"]",
"[",
"'min_iqr_stdev'",
"]",
")",
"min_inveta_stdev",
"=",
"(",
"xmin_inveta_stdev",
"or",
"allobjects",
"[",
"magcol",
"]",
"[",
"'min_inveta_stdev'",
"]",
")",
"fig",
"=",
"plt",
".",
"figure",
"(",
"figsize",
"=",
"(",
"20",
",",
"16",
")",
")",
"# the mag vs lcmad",
"plt",
".",
"subplot",
"(",
"221",
")",
"plt",
".",
"plot",
"(",
"allobjects",
"[",
"magcol",
"]",
"[",
"'sdssr'",
"]",
",",
"allobjects",
"[",
"magcol",
"]",
"[",
"'lcmad'",
"]",
"*",
"1.483",
",",
"marker",
"=",
"'.'",
",",
"ms",
"=",
"1.0",
",",
"linestyle",
"=",
"'none'",
",",
"rasterized",
"=",
"True",
")",
"plt",
".",
"plot",
"(",
"allobjects",
"[",
"magcol",
"]",
"[",
"'binned_sdssr_median'",
"]",
",",
"np",
".",
"array",
"(",
"allobjects",
"[",
"magcol",
"]",
"[",
"'binned_lcmad_median'",
"]",
")",
"*",
"1.483",
",",
"linewidth",
"=",
"3.0",
")",
"plt",
".",
"plot",
"(",
"allobjects",
"[",
"magcol",
"]",
"[",
"'binned_sdssr_median'",
"]",
",",
"np",
".",
"array",
"(",
"allobjects",
"[",
"magcol",
"]",
"[",
"'binned_lcmad_median'",
"]",
")",
"*",
"1.483",
"+",
"min_lcmad_stdev",
"*",
"np",
".",
"array",
"(",
"allobjects",
"[",
"magcol",
"]",
"[",
"'binned_lcmad_stdev'",
"]",
")",
",",
"linewidth",
"=",
"3.0",
",",
"linestyle",
"=",
"'dashed'",
")",
"plt",
".",
"xlim",
"(",
"(",
"magbins",
".",
"min",
"(",
")",
"-",
"0.25",
",",
"magbins",
".",
"max",
"(",
")",
")",
")",
"plt",
".",
"xlabel",
"(",
"'SDSS r'",
")",
"plt",
".",
"ylabel",
"(",
"r'lightcurve RMS (MAD $\\times$ 1.483)'",
")",
"plt",
".",
"title",
"(",
"'%s - SDSS r vs. light curve RMS'",
"%",
"magcol",
")",
"plt",
".",
"yscale",
"(",
"'log'",
")",
"plt",
".",
"tight_layout",
"(",
")",
"# the mag vs stetsonj",
"plt",
".",
"subplot",
"(",
"222",
")",
"plt",
".",
"plot",
"(",
"allobjects",
"[",
"magcol",
"]",
"[",
"'sdssr'",
"]",
",",
"allobjects",
"[",
"magcol",
"]",
"[",
"'stetsonj'",
"]",
",",
"marker",
"=",
"'.'",
",",
"ms",
"=",
"1.0",
",",
"linestyle",
"=",
"'none'",
",",
"rasterized",
"=",
"True",
")",
"plt",
".",
"plot",
"(",
"allobjects",
"[",
"magcol",
"]",
"[",
"'binned_sdssr_median'",
"]",
",",
"allobjects",
"[",
"magcol",
"]",
"[",
"'binned_stetsonj_median'",
"]",
",",
"linewidth",
"=",
"3.0",
")",
"plt",
".",
"plot",
"(",
"allobjects",
"[",
"magcol",
"]",
"[",
"'binned_sdssr_median'",
"]",
",",
"np",
".",
"array",
"(",
"allobjects",
"[",
"magcol",
"]",
"[",
"'binned_stetsonj_median'",
"]",
")",
"+",
"min_stetj_stdev",
"*",
"np",
".",
"array",
"(",
"allobjects",
"[",
"magcol",
"]",
"[",
"'binned_stetsonj_stdev'",
"]",
")",
",",
"linewidth",
"=",
"3.0",
",",
"linestyle",
"=",
"'dashed'",
")",
"plt",
".",
"xlim",
"(",
"(",
"magbins",
".",
"min",
"(",
")",
"-",
"0.25",
",",
"magbins",
".",
"max",
"(",
")",
")",
")",
"plt",
".",
"xlabel",
"(",
"'SDSS r'",
")",
"plt",
".",
"ylabel",
"(",
"'Stetson J index'",
")",
"plt",
".",
"title",
"(",
"'%s - SDSS r vs. Stetson J index'",
"%",
"magcol",
")",
"plt",
".",
"yscale",
"(",
"'log'",
")",
"plt",
".",
"tight_layout",
"(",
")",
"# the mag vs IQR",
"plt",
".",
"subplot",
"(",
"223",
")",
"plt",
".",
"plot",
"(",
"allobjects",
"[",
"magcol",
"]",
"[",
"'sdssr'",
"]",
",",
"allobjects",
"[",
"magcol",
"]",
"[",
"'iqr'",
"]",
",",
"marker",
"=",
"'.'",
",",
"ms",
"=",
"1.0",
",",
"linestyle",
"=",
"'none'",
",",
"rasterized",
"=",
"True",
")",
"plt",
".",
"plot",
"(",
"allobjects",
"[",
"magcol",
"]",
"[",
"'binned_sdssr_median'",
"]",
",",
"allobjects",
"[",
"magcol",
"]",
"[",
"'binned_iqr_median'",
"]",
",",
"linewidth",
"=",
"3.0",
")",
"plt",
".",
"plot",
"(",
"allobjects",
"[",
"magcol",
"]",
"[",
"'binned_sdssr_median'",
"]",
",",
"np",
".",
"array",
"(",
"allobjects",
"[",
"magcol",
"]",
"[",
"'binned_iqr_median'",
"]",
")",
"+",
"min_iqr_stdev",
"*",
"np",
".",
"array",
"(",
"allobjects",
"[",
"magcol",
"]",
"[",
"'binned_iqr_stdev'",
"]",
")",
",",
"linewidth",
"=",
"3.0",
",",
"linestyle",
"=",
"'dashed'",
")",
"plt",
".",
"xlabel",
"(",
"'SDSS r'",
")",
"plt",
".",
"ylabel",
"(",
"'IQR'",
")",
"plt",
".",
"title",
"(",
"'%s - SDSS r vs. IQR'",
"%",
"magcol",
")",
"plt",
".",
"xlim",
"(",
"(",
"magbins",
".",
"min",
"(",
")",
"-",
"0.25",
",",
"magbins",
".",
"max",
"(",
")",
")",
")",
"plt",
".",
"yscale",
"(",
"'log'",
")",
"plt",
".",
"tight_layout",
"(",
")",
"# the mag vs IQR",
"plt",
".",
"subplot",
"(",
"224",
")",
"plt",
".",
"plot",
"(",
"allobjects",
"[",
"magcol",
"]",
"[",
"'sdssr'",
"]",
",",
"allobjects",
"[",
"magcol",
"]",
"[",
"'inveta'",
"]",
",",
"marker",
"=",
"'.'",
",",
"ms",
"=",
"1.0",
",",
"linestyle",
"=",
"'none'",
",",
"rasterized",
"=",
"True",
")",
"plt",
".",
"plot",
"(",
"allobjects",
"[",
"magcol",
"]",
"[",
"'binned_sdssr_median'",
"]",
",",
"allobjects",
"[",
"magcol",
"]",
"[",
"'binned_inveta_median'",
"]",
",",
"linewidth",
"=",
"3.0",
")",
"plt",
".",
"plot",
"(",
"allobjects",
"[",
"magcol",
"]",
"[",
"'binned_sdssr_median'",
"]",
",",
"np",
".",
"array",
"(",
"allobjects",
"[",
"magcol",
"]",
"[",
"'binned_inveta_median'",
"]",
")",
"+",
"min_inveta_stdev",
"*",
"np",
".",
"array",
"(",
"allobjects",
"[",
"magcol",
"]",
"[",
"'binned_inveta_stdev'",
"]",
")",
",",
"linewidth",
"=",
"3.0",
",",
"linestyle",
"=",
"'dashed'",
")",
"plt",
".",
"xlabel",
"(",
"'SDSS r'",
")",
"plt",
".",
"ylabel",
"(",
"r'$1/\\eta$'",
")",
"plt",
".",
"title",
"(",
"r'%s - SDSS r vs. $1/\\eta$'",
"%",
"magcol",
")",
"plt",
".",
"xlim",
"(",
"(",
"magbins",
".",
"min",
"(",
")",
"-",
"0.25",
",",
"magbins",
".",
"max",
"(",
")",
")",
")",
"plt",
".",
"yscale",
"(",
"'log'",
")",
"plt",
".",
"tight_layout",
"(",
")",
"plt",
".",
"savefig",
"(",
"'varfeatures-%s-%s-distributions.png'",
"%",
"(",
"varthreshpkl",
",",
"magcol",
")",
",",
"bbox_inches",
"=",
"'tight'",
")",
"plt",
".",
"close",
"(",
"'all'",
")"
] | 36.224044 | 19.852459 |
def mute(self, mute):
"""Mute receiver via HTTP get command."""
try:
if mute:
if self.send_get_command(self._urls.command_mute_on):
self._mute = STATE_ON
return True
else:
return False
else:
if self.send_get_command(self._urls.command_mute_off):
self._mute = STATE_OFF
return True
else:
return False
except requests.exceptions.RequestException:
_LOGGER.error("Connection error: mute command not sent.")
return False | [
"def",
"mute",
"(",
"self",
",",
"mute",
")",
":",
"try",
":",
"if",
"mute",
":",
"if",
"self",
".",
"send_get_command",
"(",
"self",
".",
"_urls",
".",
"command_mute_on",
")",
":",
"self",
".",
"_mute",
"=",
"STATE_ON",
"return",
"True",
"else",
":",
"return",
"False",
"else",
":",
"if",
"self",
".",
"send_get_command",
"(",
"self",
".",
"_urls",
".",
"command_mute_off",
")",
":",
"self",
".",
"_mute",
"=",
"STATE_OFF",
"return",
"True",
"else",
":",
"return",
"False",
"except",
"requests",
".",
"exceptions",
".",
"RequestException",
":",
"_LOGGER",
".",
"error",
"(",
"\"Connection error: mute command not sent.\"",
")",
"return",
"False"
] | 36.333333 | 15.611111 |
def get_variables_substitution_dictionaries(self, lhs_graph, rhs_graph):
"""
Looks for sub-isomorphisms of rhs into lhs
:param lhs_graph: The graph to look sub-isomorphisms into (the bigger graph)
:param rhs_graph: The smaller graph
:return: The list of matching names
"""
if not rhs_graph:
return {}, {}, {}
self.matching_code_container.add_graph_to_namespace(lhs_graph)
self.matching_code_container.add_graph_to_namespace(rhs_graph)
return self.__collect_variables_that_match_graph(lhs_graph, rhs_graph) | [
"def",
"get_variables_substitution_dictionaries",
"(",
"self",
",",
"lhs_graph",
",",
"rhs_graph",
")",
":",
"if",
"not",
"rhs_graph",
":",
"return",
"{",
"}",
",",
"{",
"}",
",",
"{",
"}",
"self",
".",
"matching_code_container",
".",
"add_graph_to_namespace",
"(",
"lhs_graph",
")",
"self",
".",
"matching_code_container",
".",
"add_graph_to_namespace",
"(",
"rhs_graph",
")",
"return",
"self",
".",
"__collect_variables_that_match_graph",
"(",
"lhs_graph",
",",
"rhs_graph",
")"
] | 45.076923 | 19.692308 |
def authenticate(self, code: str) -> 'Preston':
"""Authenticates using the code from the EVE SSO.
A new Preston object is returned; this object is not modified.
The intended usage is:
auth = preston.authenticate('some_code_here')
Args:
code: SSO code
Returns:
new Preston, authenticated
"""
headers = self._get_authorization_headers()
data = {
'grant_type': 'authorization_code',
'code': code
}
r = self.session.post(self.TOKEN_URL, headers=headers, data=data)
if not r.status_code == 200:
raise Exception(f'Could not authenticate, got repsonse code {r.status_code}')
new_kwargs = dict(self._kwargs)
response_data = r.json()
new_kwargs['access_token'] = response_data['access_token']
new_kwargs['access_expiration'] = time.time() + float(response_data['expires_in'])
new_kwargs['refresh_token'] = response_data['refresh_token']
return Preston(**new_kwargs) | [
"def",
"authenticate",
"(",
"self",
",",
"code",
":",
"str",
")",
"->",
"'Preston'",
":",
"headers",
"=",
"self",
".",
"_get_authorization_headers",
"(",
")",
"data",
"=",
"{",
"'grant_type'",
":",
"'authorization_code'",
",",
"'code'",
":",
"code",
"}",
"r",
"=",
"self",
".",
"session",
".",
"post",
"(",
"self",
".",
"TOKEN_URL",
",",
"headers",
"=",
"headers",
",",
"data",
"=",
"data",
")",
"if",
"not",
"r",
".",
"status_code",
"==",
"200",
":",
"raise",
"Exception",
"(",
"f'Could not authenticate, got repsonse code {r.status_code}'",
")",
"new_kwargs",
"=",
"dict",
"(",
"self",
".",
"_kwargs",
")",
"response_data",
"=",
"r",
".",
"json",
"(",
")",
"new_kwargs",
"[",
"'access_token'",
"]",
"=",
"response_data",
"[",
"'access_token'",
"]",
"new_kwargs",
"[",
"'access_expiration'",
"]",
"=",
"time",
".",
"time",
"(",
")",
"+",
"float",
"(",
"response_data",
"[",
"'expires_in'",
"]",
")",
"new_kwargs",
"[",
"'refresh_token'",
"]",
"=",
"response_data",
"[",
"'refresh_token'",
"]",
"return",
"Preston",
"(",
"*",
"*",
"new_kwargs",
")"
] | 35.896552 | 21.482759 |
def density_grid(*args, **kwargs):
"""
Estimates point density of the given linear orientation measurements
(Interpreted as poles, lines, rakes, or "raw" longitudes and latitudes
based on the `measurement` keyword argument.). Returns a regular (in
lat-long space) grid of density estimates over a hemispherical surface.
Parameters
----------
*args : 2 or 3 sequences of measurements
By default, this will be expected to be ``strike`` & ``dip``, both
array-like sequences representing poles to planes. (Rake measurements
require three parameters, thus the variable number of arguments.) The
``measurement`` kwarg controls how these arguments are interpreted.
measurement : string, optional
Controls how the input arguments are interpreted. Defaults to
``"poles"``. May be one of the following:
``"poles"`` : strikes, dips
Arguments are assumed to be sequences of strikes and dips of
planes. Poles to these planes are used for contouring.
``"lines"`` : plunges, bearings
Arguments are assumed to be sequences of plunges and bearings
of linear features.
``"rakes"`` : strikes, dips, rakes
Arguments are assumed to be sequences of strikes, dips, and
rakes along the plane.
``"radians"`` : lon, lat
Arguments are assumed to be "raw" longitudes and latitudes in
the stereonet's underlying coordinate system.
method : string, optional
The method of density estimation to use. Defaults to
``"exponential_kamb"``. May be one of the following:
``"exponential_kamb"`` : Kamb with exponential smoothing
A modified Kamb method using exponential smoothing [1]_. Units are
in numbers of standard deviations by which the density estimate
differs from uniform.
``"linear_kamb"`` : Kamb with linear smoothing
A modified Kamb method using linear smoothing [1]_. Units are in
numbers of standard deviations by which the density estimate
differs from uniform.
``"kamb"`` : Kamb with no smoothing
Kamb's method [2]_ with no smoothing. Units are in numbers of
standard deviations by which the density estimate differs from
uniform.
``"schmidt"`` : 1% counts
The traditional "Schmidt" (a.k.a. 1%) method. Counts points within
a counting circle comprising 1% of the total area of the
hemisphere. Does not take into account sample size. Units are in
points per 1% area.
sigma : int or float, optional
The number of standard deviations defining the expected number of
standard deviations by which a random sample from a uniform
distribution of points would be expected to vary from being evenly
distributed across the hemisphere. This controls the size of the
counting circle, and therefore the degree of smoothing. Higher sigmas
will lead to more smoothing of the resulting density distribution. This
parameter only applies to Kamb-based methods. Defaults to 3.
gridsize : int or 2-item tuple of ints, optional
The size of the grid that the density is estimated on. If a single int
is given, it is interpreted as an NxN grid. If a tuple of ints is given
it is interpreted as (nrows, ncols). Defaults to 100.
weights : array-like, optional
The relative weight to be applied to each input measurement. The array
will be normalized to sum to 1, so absolute value of the weights do not
affect the result. Defaults to None.
Returns
-------
xi, yi, zi : 2D arrays
The longitude, latitude and density values of the regularly gridded
density estimates. Longitude and latitude are in radians.
See Also
---------
mplstereonet.StereonetAxes.density_contourf
mplstereonet.StereonetAxes.density_contour
References
----------
.. [1] Vollmer, 1995. C Program for Automatic Contouring of Spherical
Orientation Data Using a Modified Kamb Method. Computers &
Geosciences, Vol. 21, No. 1, pp. 31--49.
.. [2] Kamb, 1959. Ice Petrofabric Observations from Blue Glacier,
Washington, in Relation to Theory and Experiment. Journal of
Geophysical Research, Vol. 64, No. 11, pp. 1891--1909.
"""
def do_nothing(x, y):
return x, y
measurement = kwargs.get('measurement', 'poles')
gridsize = kwargs.get('gridsize', 100)
weights = kwargs.get('weights', None)
try:
gridsize = int(gridsize)
gridsize = (gridsize, gridsize)
except TypeError:
pass
func = {'poles':stereonet_math.pole,
'lines':stereonet_math.line,
'rakes':stereonet_math.rake,
'radians':do_nothing}[measurement]
lon, lat = func(*args)
method = kwargs.get('method', 'exponential_kamb')
sigma = kwargs.get('sigma', 3)
func = {'linear_kamb':_linear_inverse_kamb,
'square_kamb':_square_inverse_kamb,
'schmidt':_schmidt_count,
'kamb':_kamb_count,
'exponential_kamb':_exponential_kamb,
}[method]
lon, lat, z = _count_points(lon, lat, func, sigma, gridsize, weights)
return lon, lat, z | [
"def",
"density_grid",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"def",
"do_nothing",
"(",
"x",
",",
"y",
")",
":",
"return",
"x",
",",
"y",
"measurement",
"=",
"kwargs",
".",
"get",
"(",
"'measurement'",
",",
"'poles'",
")",
"gridsize",
"=",
"kwargs",
".",
"get",
"(",
"'gridsize'",
",",
"100",
")",
"weights",
"=",
"kwargs",
".",
"get",
"(",
"'weights'",
",",
"None",
")",
"try",
":",
"gridsize",
"=",
"int",
"(",
"gridsize",
")",
"gridsize",
"=",
"(",
"gridsize",
",",
"gridsize",
")",
"except",
"TypeError",
":",
"pass",
"func",
"=",
"{",
"'poles'",
":",
"stereonet_math",
".",
"pole",
",",
"'lines'",
":",
"stereonet_math",
".",
"line",
",",
"'rakes'",
":",
"stereonet_math",
".",
"rake",
",",
"'radians'",
":",
"do_nothing",
"}",
"[",
"measurement",
"]",
"lon",
",",
"lat",
"=",
"func",
"(",
"*",
"args",
")",
"method",
"=",
"kwargs",
".",
"get",
"(",
"'method'",
",",
"'exponential_kamb'",
")",
"sigma",
"=",
"kwargs",
".",
"get",
"(",
"'sigma'",
",",
"3",
")",
"func",
"=",
"{",
"'linear_kamb'",
":",
"_linear_inverse_kamb",
",",
"'square_kamb'",
":",
"_square_inverse_kamb",
",",
"'schmidt'",
":",
"_schmidt_count",
",",
"'kamb'",
":",
"_kamb_count",
",",
"'exponential_kamb'",
":",
"_exponential_kamb",
",",
"}",
"[",
"method",
"]",
"lon",
",",
"lat",
",",
"z",
"=",
"_count_points",
"(",
"lon",
",",
"lat",
",",
"func",
",",
"sigma",
",",
"gridsize",
",",
"weights",
")",
"return",
"lon",
",",
"lat",
",",
"z"
] | 43.786885 | 22.704918 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.