text stringlengths 89 104k | code_tokens list | avg_line_len float64 7.91 980 | score float64 0 630 |
|---|---|---|---|
def id(self):
"""A unique, stable, hashable id over the set of pinned artifacts."""
if not self._id:
# NB(gmalmquist): This id is not cheap to compute if there are a large number of artifacts.
# We cache it here, but invalidate the cached value if an artifact gets added or changed.
self._id = tuple(sorted(map(str, self)))
return self._id | [
"def",
"id",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_id",
":",
"# NB(gmalmquist): This id is not cheap to compute if there are a large number of artifacts.",
"# We cache it here, but invalidate the cached value if an artifact gets added or changed.",
"self",
".",
"_id",
... | 51.857143 | 26.571429 |
def _append_theme_dir(self, name):
"""Append a theme dir to the Tk interpreter auto_path"""
path = "[{}]".format(get_file_directory() + "/" + name)
self.tk.call("lappend", "auto_path", path) | [
"def",
"_append_theme_dir",
"(",
"self",
",",
"name",
")",
":",
"path",
"=",
"\"[{}]\"",
".",
"format",
"(",
"get_file_directory",
"(",
")",
"+",
"\"/\"",
"+",
"name",
")",
"self",
".",
"tk",
".",
"call",
"(",
"\"lappend\"",
",",
"\"auto_path\"",
",",
... | 52.75 | 9.75 |
def _set_used_as_input_variables_by_entity(self) -> Dict[str, List[str]]:
'''Identify and set the good input variables for the different entities'''
if self.used_as_input_variables_by_entity is not None:
return
tax_benefit_system = self.tax_benefit_system
assert set(self.used_as_input_variables) <= set(tax_benefit_system.variables.keys()), \
"Some variables used as input variables are not part of the tax benefit system:\n {}".format(
set(self.used_as_input_variables).difference(set(tax_benefit_system.variables.keys()))
)
self.used_as_input_variables_by_entity = dict()
for entity in tax_benefit_system.entities:
self.used_as_input_variables_by_entity[entity.key] = [
variable
for variable in self.used_as_input_variables
if tax_benefit_system.get_variable(variable).entity == entity
]
return self.used_as_input_variables_by_entity | [
"def",
"_set_used_as_input_variables_by_entity",
"(",
"self",
")",
"->",
"Dict",
"[",
"str",
",",
"List",
"[",
"str",
"]",
"]",
":",
"if",
"self",
".",
"used_as_input_variables_by_entity",
"is",
"not",
"None",
":",
"return",
"tax_benefit_system",
"=",
"self",
... | 45.818182 | 31.636364 |
def latch(self):
"""Convert the current value inside this config descriptor to a python object.
The conversion proceeds by mapping the given type name to a native
python class and performing the conversion. You can override what
python object is used as the destination class by passing a
python_type parameter to __init__.
The default mapping is:
- char (u)int8_t, (u)int16_t, (u)int32_t: int
- char[] (u)int8_t[], (u)int16_t[]0, u(int32_t): list of int
If you want to parse a char[] or uint8_t[] as a python string, it
needs to be null terminated and you should pass python_type='string'.
If you are declaring a scalar integer type and wish it to be decoded
as a bool, you can pass python_type='bool' to the constructor.
All integers are decoded as little-endian.
Returns:
object: The corresponding python object.
This will either be an int, list of int or string based on the
type_name specified and the optional python_type keyword argument
to the constructor.
Raises:
DataError: if the object cannot be converted to the desired type.
ArgumentError: if an invalid python_type was specified during construction.
"""
if len(self.current_value) == 0:
raise DataError("There was no data in a config variable during latching", name=self.name)
# Make sure the data ends on a unit boundary. This would have happened automatically
# in an actual device by the C runtime 0 padding out the storage area.
remaining = len(self.current_value) % self.unit_size
if remaining > 0:
self.current_value += bytearray(remaining)
if self.special_type == 'string':
if self.current_value[-1] != 0:
raise DataError("String type was specified by data did not end with a null byte", data=self.current_value, name=self.name)
return bytes(self.current_value[:-1]).decode('utf-8')
fmt_code = "<" + (self.base_type * (len(self.current_value) // self.unit_size))
data = struct.unpack(fmt_code, self.current_value)
if self.variable:
data = list(data)
else:
data = data[0]
if self.special_type == 'bool':
data = bool(data)
return data | [
"def",
"latch",
"(",
"self",
")",
":",
"if",
"len",
"(",
"self",
".",
"current_value",
")",
"==",
"0",
":",
"raise",
"DataError",
"(",
"\"There was no data in a config variable during latching\"",
",",
"name",
"=",
"self",
".",
"name",
")",
"# Make sure the data... | 40.135593 | 28.355932 |
def handle(self, *args, **options):
"""
get the trigger to fire
"""
trigger_id = options.get('trigger_id')
trigger = TriggerService.objects.filter(
id=int(trigger_id),
status=True,
user__is_active=True,
provider_failed__lt=settings.DJANGO_TH.get('failed_tries', 10),
consumer_failed__lt=settings.DJANGO_TH.get('failed_tries', 10)
).select_related('consumer__name', 'provider__name')
try:
with Pool(processes=1) as pool:
r = Read()
result = pool.map_async(r.reading, trigger)
result.get(timeout=360)
p = Pub()
result = pool.map_async(p.publishing, trigger)
result.get(timeout=360)
cache.delete('django_th' + '_fire_trigger_' + str(trigger_id))
except TimeoutError as e:
logger.warning(e) | [
"def",
"handle",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"options",
")",
":",
"trigger_id",
"=",
"options",
".",
"get",
"(",
"'trigger_id'",
")",
"trigger",
"=",
"TriggerService",
".",
"objects",
".",
"filter",
"(",
"id",
"=",
"int",
"(",
"trig... | 38.666667 | 14.333333 |
async def rank(self, request, origin: Optional[Text]) \
-> Tuple[
float,
Optional[BaseTrigger],
Optional[type],
Optional[bool],
]:
"""
Computes the rank of this transition for a given request.
It returns (in order):
- The score (from 0 to 1)
- The trigger instance (if it matched)
- The class of the destination state (if matched)
"""
if self.origin_name == origin:
score = 1.0
elif self.origin_name is None:
score = settings.JUMPING_TRIGGER_PENALTY
else:
return 0.0, None, None, None
trigger = self.factory(request)
rank = await run_or_return(trigger.rank())
score *= self.weight * (rank or 0.0)
return score, trigger, self.dest, self.do_not_register | [
"async",
"def",
"rank",
"(",
"self",
",",
"request",
",",
"origin",
":",
"Optional",
"[",
"Text",
"]",
")",
"->",
"Tuple",
"[",
"float",
",",
"Optional",
"[",
"BaseTrigger",
"]",
",",
"Optional",
"[",
"type",
"]",
",",
"Optional",
"[",
"bool",
"]",
... | 30.206897 | 16 |
def concept_cuts(direction, node_indices, node_labels=None):
"""Generator over all concept-syle cuts for these nodes."""
for partition in mip_partitions(node_indices, node_indices):
yield KCut(direction, partition, node_labels) | [
"def",
"concept_cuts",
"(",
"direction",
",",
"node_indices",
",",
"node_labels",
"=",
"None",
")",
":",
"for",
"partition",
"in",
"mip_partitions",
"(",
"node_indices",
",",
"node_indices",
")",
":",
"yield",
"KCut",
"(",
"direction",
",",
"partition",
",",
... | 60 | 14.25 |
def determine_opening_indent(indent_texts):
'''Determine the opening indent level for a docstring.
The opening indent level is the indent level is the first non-zero indent
level of a non-empty line in the docstring.
Args:
indent_texts: The lines of the docstring as an iterable over 2-tuples
each containing an integer indent level as the first element and
the text as the second element.
Returns:
The opening indent level as an integer.
'''
num_lines = len(indent_texts)
if num_lines < 1:
return 0
assert num_lines >= 1
first_line_indent = indent_texts[0][0]
if num_lines == 1:
return first_line_indent
assert num_lines >= 2
second_line_indent = indent_texts[1][0]
second_line_text = indent_texts[1][1]
if len(second_line_text) == 0:
return first_line_indent
return second_line_indent | [
"def",
"determine_opening_indent",
"(",
"indent_texts",
")",
":",
"num_lines",
"=",
"len",
"(",
"indent_texts",
")",
"if",
"num_lines",
"<",
"1",
":",
"return",
"0",
"assert",
"num_lines",
">=",
"1",
"first_line_indent",
"=",
"indent_texts",
"[",
"0",
"]",
"... | 25.542857 | 23.428571 |
def set_backgroundcolor(self, color):
'''Sets the background color of the current axes (and legend).
Use 'None' (with quotes) for transparent. To get transparent
background on saved figures, use:
pp.savefig("fig1.svg", transparent=True)
'''
ax = self.ax
ax.patch.set_facecolor(color)
lh = ax.get_legend()
if lh != None:
lh.legendPatch.set_facecolor(color)
plt.draw() | [
"def",
"set_backgroundcolor",
"(",
"self",
",",
"color",
")",
":",
"ax",
"=",
"self",
".",
"ax",
"ax",
".",
"patch",
".",
"set_facecolor",
"(",
"color",
")",
"lh",
"=",
"ax",
".",
"get_legend",
"(",
")",
"if",
"lh",
"!=",
"None",
":",
"lh",
".",
... | 36.153846 | 17.846154 |
def distance_to(self, other_catchment):
"""
Returns the distance between the centroids of two catchments in kilometers.
:param other_catchment: Catchment to calculate distance to
:type other_catchment: :class:`.Catchment`
:return: Distance between the catchments in km.
:rtype: float
"""
try:
if self.country == other_catchment.country:
try:
return 0.001 * hypot(self.descriptors.centroid_ngr.x - other_catchment.descriptors.centroid_ngr.x,
self.descriptors.centroid_ngr.y - other_catchment.descriptors.centroid_ngr.y)
except TypeError:
# In case no centroid available, just return infinity which is helpful in most cases
return float('+inf')
else:
# If the catchments are in a different country (e.g. `ni` versus `gb`) then set distance to infinity.
return float('+inf')
except (TypeError, KeyError):
raise InsufficientDataError("Catchment `descriptors` attribute must be set first.") | [
"def",
"distance_to",
"(",
"self",
",",
"other_catchment",
")",
":",
"try",
":",
"if",
"self",
".",
"country",
"==",
"other_catchment",
".",
"country",
":",
"try",
":",
"return",
"0.001",
"*",
"hypot",
"(",
"self",
".",
"descriptors",
".",
"centroid_ngr",
... | 51.727273 | 27.545455 |
def check_syntax(code):
"""Return True if syntax is okay."""
try:
return compile(code, '<string>', 'exec', dont_inherit=True)
except (SyntaxError, TypeError, ValueError):
return False | [
"def",
"check_syntax",
"(",
"code",
")",
":",
"try",
":",
"return",
"compile",
"(",
"code",
",",
"'<string>'",
",",
"'exec'",
",",
"dont_inherit",
"=",
"True",
")",
"except",
"(",
"SyntaxError",
",",
"TypeError",
",",
"ValueError",
")",
":",
"return",
"F... | 34.333333 | 17.333333 |
def writable_stream(handle):
"""Test whether a stream can be written to.
"""
if isinstance(handle, io.IOBase) and sys.version_info >= (3, 5):
return handle.writable()
try:
handle.write(b'')
except (io.UnsupportedOperation, IOError):
return False
else:
return True | [
"def",
"writable_stream",
"(",
"handle",
")",
":",
"if",
"isinstance",
"(",
"handle",
",",
"io",
".",
"IOBase",
")",
"and",
"sys",
".",
"version_info",
">=",
"(",
"3",
",",
"5",
")",
":",
"return",
"handle",
".",
"writable",
"(",
")",
"try",
":",
"... | 28.090909 | 15.727273 |
def coordinates(self):
"""
Get or set the internal coordinate system.
Available coordinate systems are:
- ``'jacobi'`` (default)
- ``'democraticheliocentric'``
- ``'whds'``
"""
i = self._coordinates
for name, _i in COORDINATES.items():
if i==_i:
return name
return i | [
"def",
"coordinates",
"(",
"self",
")",
":",
"i",
"=",
"self",
".",
"_coordinates",
"for",
"name",
",",
"_i",
"in",
"COORDINATES",
".",
"items",
"(",
")",
":",
"if",
"i",
"==",
"_i",
":",
"return",
"name",
"return",
"i"
] | 24.133333 | 14 |
def scheme(name, bins, bin_method='quantiles'):
"""Return a custom scheme based on CARTOColors.
Args:
name (str): Name of a CARTOColor.
bins (int or iterable): If an `int`, the number of bins for classifying
data. CARTOColors have 7 bins max for quantitative data, and 11 max
for qualitative data. If `bins` is a `list`, it is the upper range
for classifying data. E.g., `bins` can be of the form ``(10, 20, 30,
40, 50)``.
bin_method (str, optional): One of methods in :obj:`BinMethod`.
Defaults to ``quantiles``. If `bins` is an interable, then that is
the bin method that will be used and this will be ignored.
.. Warning::
Input types are particularly sensitive in this function, and little
feedback is given for errors. ``name`` and ``bin_method`` arguments
are case-sensitive.
"""
return {
'name': name,
'bins': bins,
'bin_method': (bin_method if isinstance(bins, int) else ''),
} | [
"def",
"scheme",
"(",
"name",
",",
"bins",
",",
"bin_method",
"=",
"'quantiles'",
")",
":",
"return",
"{",
"'name'",
":",
"name",
",",
"'bins'",
":",
"bins",
",",
"'bin_method'",
":",
"(",
"bin_method",
"if",
"isinstance",
"(",
"bins",
",",
"int",
")",... | 39.115385 | 26.884615 |
def create(cls, selection, config, **kwargs):
"""Create an ROIModel instance."""
if selection['target'] is not None:
return cls.create_from_source(selection['target'],
config, **kwargs)
else:
target_skydir = wcs_utils.get_target_skydir(selection)
return cls.create_from_position(target_skydir, config, **kwargs) | [
"def",
"create",
"(",
"cls",
",",
"selection",
",",
"config",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"selection",
"[",
"'target'",
"]",
"is",
"not",
"None",
":",
"return",
"cls",
".",
"create_from_source",
"(",
"selection",
"[",
"'target'",
"]",
",",... | 45.111111 | 19.777778 |
def _get_common_params(self, user_id, attributes):
""" Get params which are used same in both conversion and impression events.
Args:
user_id: ID for user.
attributes: Dict representing user attributes and values which need to be recorded.
Returns:
Dict consisting of parameters common to both impression and conversion events.
"""
commonParams = {}
commonParams[self.EventParams.PROJECT_ID] = self._get_project_id()
commonParams[self.EventParams.ACCOUNT_ID] = self._get_account_id()
visitor = {}
visitor[self.EventParams.END_USER_ID] = user_id
visitor[self.EventParams.SNAPSHOTS] = []
commonParams[self.EventParams.USERS] = []
commonParams[self.EventParams.USERS].append(visitor)
commonParams[self.EventParams.USERS][0][self.EventParams.ATTRIBUTES] = self._get_attributes(attributes)
commonParams[self.EventParams.SOURCE_SDK_TYPE] = 'python-sdk'
commonParams[self.EventParams.ENRICH_DECISIONS] = True
commonParams[self.EventParams.SOURCE_SDK_VERSION] = version.__version__
commonParams[self.EventParams.ANONYMIZE_IP] = self._get_anonymize_ip()
commonParams[self.EventParams.REVISION] = self._get_revision()
return commonParams | [
"def",
"_get_common_params",
"(",
"self",
",",
"user_id",
",",
"attributes",
")",
":",
"commonParams",
"=",
"{",
"}",
"commonParams",
"[",
"self",
".",
"EventParams",
".",
"PROJECT_ID",
"]",
"=",
"self",
".",
"_get_project_id",
"(",
")",
"commonParams",
"[",... | 39.933333 | 27.166667 |
def clear_symbols(self, index):
"""Clears all symbols begining with the index to the end of table"""
try:
del self.table[index:]
except Exception:
self.error()
self.table_len = len(self.table) | [
"def",
"clear_symbols",
"(",
"self",
",",
"index",
")",
":",
"try",
":",
"del",
"self",
".",
"table",
"[",
"index",
":",
"]",
"except",
"Exception",
":",
"self",
".",
"error",
"(",
")",
"self",
".",
"table_len",
"=",
"len",
"(",
"self",
".",
"table... | 35.428571 | 9.857143 |
def _baseattrs(self):
"""A dict of members expressed in literals"""
result = super()._baseattrs
result["spaces"] = self.spaces._baseattrs
return result | [
"def",
"_baseattrs",
"(",
"self",
")",
":",
"result",
"=",
"super",
"(",
")",
".",
"_baseattrs",
"result",
"[",
"\"spaces\"",
"]",
"=",
"self",
".",
"spaces",
".",
"_baseattrs",
"return",
"result"
] | 29.833333 | 15.333333 |
def top_x_bleu(query_dic, mark, x=1):
"""
Calculate the top x average bleu value predictions ranking by item, x default is set above
:param query_dic: dict, key is qid, value is (item, bleu) tuple list, which will be ranked by 'item' as key
:param mark:string, which indicates which method is evaluated, also used as output file name here.
:param x:int, define top x
:return:average bleu score
"""
all_total = 0.0
with open(top_bleu_path + mark, 'w') as writer:
for k in query_dic:
candidate_lst = query_dic[k]
top_x = sorted(candidate_lst, key=lambda a: a[0], reverse=True)[:x]
total = 0
for t in top_x:
total += t[1]
ave_bleu = total / x
writer.write('%s\tAverageBleu:%f\tTop%d:%s\n' % (k, ave_bleu, x, str(top_x)))
all_total += ave_bleu
if k in contrast_dic:
contrast_dic[k].append(str(ave_bleu))
else:
contrast_dic[k] = []
contrast_dic[k].append(str(ave_bleu))
result_string = '%s\ttop%d_Bleu:\t%f' % (mark, x, all_total / len(query_dic))
print result_string
# eval_result_dict['Bleu'].append(result_string)
return ['Bleu', result_string] | [
"def",
"top_x_bleu",
"(",
"query_dic",
",",
"mark",
",",
"x",
"=",
"1",
")",
":",
"all_total",
"=",
"0.0",
"with",
"open",
"(",
"top_bleu_path",
"+",
"mark",
",",
"'w'",
")",
"as",
"writer",
":",
"for",
"k",
"in",
"query_dic",
":",
"candidate_lst",
"... | 43.965517 | 19.758621 |
def get_extract_value_function(column_identifier):
"""
returns a function that extracts the value for a column.
"""
def extract_value(run_result):
pos = None
for i, column in enumerate(run_result.columns):
if column.title == column_identifier:
pos = i
break
if pos is None:
sys.exit('CPU time missing for task {0}.'.format(run_result.task_id[0]))
return Util.to_decimal(run_result.values[pos])
return extract_value | [
"def",
"get_extract_value_function",
"(",
"column_identifier",
")",
":",
"def",
"extract_value",
"(",
"run_result",
")",
":",
"pos",
"=",
"None",
"for",
"i",
",",
"column",
"in",
"enumerate",
"(",
"run_result",
".",
"columns",
")",
":",
"if",
"column",
".",
... | 36.357143 | 14.928571 |
def get_files_in_branch(profile, branch_sha):
"""Get all files in a branch's tree.
Args:
profile
A profile generated from ``simplygithub.authentication.profile``.
Such profiles tell this module (i) the ``repo`` to connect to,
and (ii) the ``token`` to connect with.
branch_sha
The SHA a branch's HEAD points to.
Returns:
A list of dicts containing info about each blob in the tree.
"""
tree_sha = get_commit_tree(profile, branch_sha)
files = get_files_in_tree(profile, tree_sha)
tree = [prepare(x) for x in files]
return tree | [
"def",
"get_files_in_branch",
"(",
"profile",
",",
"branch_sha",
")",
":",
"tree_sha",
"=",
"get_commit_tree",
"(",
"profile",
",",
"branch_sha",
")",
"files",
"=",
"get_files_in_tree",
"(",
"profile",
",",
"tree_sha",
")",
"tree",
"=",
"[",
"prepare",
"(",
... | 29.238095 | 22.52381 |
def _safe_call(obj, methname, *args, **kwargs):
"""
Safely calls the method with the given methname on the given
object. Remaining positional and keyword arguments are passed to
the method. The return value is None, if the method is not
available, or the return value of the method.
"""
meth = getattr(obj, methname, None)
if meth is None or not callable(meth):
return
return meth(*args, **kwargs) | [
"def",
"_safe_call",
"(",
"obj",
",",
"methname",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"meth",
"=",
"getattr",
"(",
"obj",
",",
"methname",
",",
"None",
")",
"if",
"meth",
"is",
"None",
"or",
"not",
"callable",
"(",
"meth",
")",
"... | 33.307692 | 16.076923 |
def shutdown(self, targets='all', restart=False, hub=False, block=None):
"""Terminates one or more engine processes, optionally including the hub.
Parameters
----------
targets: list of ints or 'all' [default: all]
Which engines to shutdown.
hub: bool [default: False]
Whether to include the Hub. hub=True implies targets='all'.
block: bool [default: self.block]
Whether to wait for clean shutdown replies or not.
restart: bool [default: False]
NOT IMPLEMENTED
whether to restart engines after shutting them down.
"""
if restart:
raise NotImplementedError("Engine restart is not yet implemented")
block = self.block if block is None else block
if hub:
targets = 'all'
targets = self._build_targets(targets)[0]
for t in targets:
self.session.send(self._control_socket, 'shutdown_request',
content={'restart':restart},ident=t)
error = False
if block or hub:
self._flush_ignored_control()
for i in range(len(targets)):
idents,msg = self.session.recv(self._control_socket, 0)
if self.debug:
pprint(msg)
if msg['content']['status'] != 'ok':
error = self._unwrap_exception(msg['content'])
else:
self._ignored_control_replies += len(targets)
if hub:
time.sleep(0.25)
self.session.send(self._query_socket, 'shutdown_request')
idents,msg = self.session.recv(self._query_socket, 0)
if self.debug:
pprint(msg)
if msg['content']['status'] != 'ok':
error = self._unwrap_exception(msg['content'])
if error:
raise error | [
"def",
"shutdown",
"(",
"self",
",",
"targets",
"=",
"'all'",
",",
"restart",
"=",
"False",
",",
"hub",
"=",
"False",
",",
"block",
"=",
"None",
")",
":",
"if",
"restart",
":",
"raise",
"NotImplementedError",
"(",
"\"Engine restart is not yet implemented\"",
... | 37.72 | 18.84 |
def remove_matching_braces(latex):
"""
If `latex` is surrounded by matching braces, remove them. They are not
necessary.
Parameters
----------
latex : string
Returns
-------
string
Examples
--------
>>> remove_matching_braces('{2+2}')
'2+2'
>>> remove_matching_braces('{2+2')
'{2+2'
"""
if latex.startswith('{') and latex.endswith('}'):
opened = 1
matches = True
for char in latex[1:-1]:
if char == '{':
opened += 1
elif char == '}':
opened -= 1
if opened == 0:
matches = False
if matches:
latex = latex[1:-1]
return latex | [
"def",
"remove_matching_braces",
"(",
"latex",
")",
":",
"if",
"latex",
".",
"startswith",
"(",
"'{'",
")",
"and",
"latex",
".",
"endswith",
"(",
"'}'",
")",
":",
"opened",
"=",
"1",
"matches",
"=",
"True",
"for",
"char",
"in",
"latex",
"[",
"1",
":"... | 21 | 19.848485 |
def host_dns(proxy=None):
'''
Return the DNS information of the host.
This grain is a dictionary having two keys:
- ``A``
- ``AAAA``
.. note::
This grain is disabled by default, as the proxy startup may be slower
when the lookup fails.
The user can enable it using the ``napalm_host_dns_grain`` option (in
the pillar or proxy configuration file):
.. code-block:: yaml
napalm_host_dns_grain: true
.. versionadded:: 2017.7.0
CLI Example:
.. code-block:: bash
salt 'device*' grains.get host_dns
Output:
.. code-block:: yaml
device1:
A:
- 172.31.9.153
AAAA:
- fd52:188c:c068::1
device2:
A:
- 172.31.46.249
AAAA:
- fdca:3b17:31ab::17
device3:
A:
- 172.31.8.167
AAAA:
- fd0f:9fd6:5fab::1
'''
if not __opts__.get('napalm_host_dns_grain', False):
return
device_host = host(proxy=proxy)
if device_host:
device_host_value = device_host['host']
host_dns_ret = {
'host_dns': {
'A': [],
'AAAA': []
}
}
dns_a = salt.utils.dns.lookup(device_host_value, 'A')
if dns_a:
host_dns_ret['host_dns']['A'] = dns_a
dns_aaaa = salt.utils.dns.lookup(device_host_value, 'AAAA')
if dns_aaaa:
host_dns_ret['host_dns']['AAAA'] = dns_aaaa
return host_dns_ret | [
"def",
"host_dns",
"(",
"proxy",
"=",
"None",
")",
":",
"if",
"not",
"__opts__",
".",
"get",
"(",
"'napalm_host_dns_grain'",
",",
"False",
")",
":",
"return",
"device_host",
"=",
"host",
"(",
"proxy",
"=",
"proxy",
")",
"if",
"device_host",
":",
"device_... | 24.046875 | 21.859375 |
def compute_trans(expnums, ccd, version, prefix=None, default="WCS"):
"""
Pull the astrometric header for each image, compute an x/y transform and compare to trans.jmp
this one overides trans.jmp if they are very different.
@param expnums:
@param ccd:
@param version:
@param prefix:
@return: None
"""
wcs_dict = {}
for expnum in expnums:
try:
# TODO This assumes that the image is already N/E flipped.
# If compute_trans is called after the image is retrieved from archive then we get the disk version.
filename = storage.get_image(expnum, ccd, version, prefix=prefix)
this_wcs = wcs.WCS(fits.open(filename)[0].header)
except Exception as err:
logging.warning("WCS Trans compute failed. {}".format(str(err)))
return
wcs_dict[expnum] = this_wcs
x0 = wcs_dict[expnums[0]].header['NAXIS1'] / 2.0
y0 = wcs_dict[expnums[0]].header['NAXIS2'] / 2.0
(ra0, dec0) = wcs_dict[expnums[0]].xy2sky(x0, y0)
result = ""
for expnum in expnums:
filename = storage.get_file(expnum, ccd, version, ext='.trans.jmp', prefix=prefix)
jmp_trans = file(filename, 'r').readline().split()
(x, y) = wcs_dict[expnum].sky2xy(ra0, dec0)
x1 = float(jmp_trans[0]) + float(jmp_trans[1]) * x + float(jmp_trans[2]) * y
y1 = float(jmp_trans[3]) + float(jmp_trans[4]) * x + float(jmp_trans[5]) * y
dr = math.sqrt((x1 - x0) ** 2 + (y1 - y0) ** 2)
if dr > 0.5:
result += "WARNING: WCS-JMP transforms mis-matched {} reverting to using {}.\n".format(expnum, default)
if default == "WCS":
uri = storage.dbimages_uri(expnum, ccd, version, ext='.trans.jmp', prefix=prefix)
filename = os.path.basename(uri)
trans = file(filename, 'w')
trans.write("{:5.2f} 1. 0. {:5.2f} 0. 1.\n".format(x0 - x, y0 - y))
trans.close()
else:
result += "WCS-JMP transforms match {}\n".format(expnum)
return result | [
"def",
"compute_trans",
"(",
"expnums",
",",
"ccd",
",",
"version",
",",
"prefix",
"=",
"None",
",",
"default",
"=",
"\"WCS\"",
")",
":",
"wcs_dict",
"=",
"{",
"}",
"for",
"expnum",
"in",
"expnums",
":",
"try",
":",
"# TODO This assumes that the image is alr... | 46.454545 | 25.227273 |
def from_simplex(x):
r"""
Inteprets the last index of x as unit simplices and returns a
real array of the sampe shape in logit space.
Inverse to :func:`to_simplex` ; see that function for more details.
:param np.ndarray: Array of unit simplices along the last index.
:rtype: ``np.ndarray``
"""
n = x.shape[-1]
# z are the stick breaking fractions in [0,1]
# the last one is always 1, so don't worry about it
z = np.empty(shape=x.shape)
z[..., 0] = x[..., 0]
z[..., 1:-1] = x[..., 1:-1] / (1 - x[..., :-2].cumsum(axis=-1))
# now z are the logit-transformed breaking fractions
z[..., :-1] = logit(z[..., :-1]) - logit(1 / (n - np.arange(n-1, dtype=np.float)))
# set this to 0 manually to avoid subtracting inf-inf
z[..., -1] = 0
return z | [
"def",
"from_simplex",
"(",
"x",
")",
":",
"n",
"=",
"x",
".",
"shape",
"[",
"-",
"1",
"]",
"# z are the stick breaking fractions in [0,1]",
"# the last one is always 1, so don't worry about it",
"z",
"=",
"np",
".",
"empty",
"(",
"shape",
"=",
"x",
".",
"shape"... | 34.478261 | 22.086957 |
def oneleft(self, window_name, object_name, iterations):
"""
Press scrollbar left with number of iterations
@param window_name: Window name to type in, either full name,
LDTP's name convention, or a Unix glob.
@type window_name: string
@param object_name: Object name to type in, either full name,
LDTP's name convention, or a Unix glob.
@type object_name: string
@param interations: iterations to perform on slider increase
@type iterations: integer
@return: 1 on success.
@rtype: integer
"""
if not self.verifyscrollbarhorizontal(window_name, object_name):
raise LdtpServerException('Object not horizontal scrollbar')
object_handle = self._get_object_handle(window_name, object_name)
i = 0
minValue = 1.0 / 8
flag = False
while i < iterations:
if object_handle.AXValue <= 0:
raise LdtpServerException('Minimum limit reached')
object_handle.AXValue -= minValue
time.sleep(1.0 / 100)
flag = True
i += 1
if flag:
return 1
else:
raise LdtpServerException('Unable to decrease scrollbar') | [
"def",
"oneleft",
"(",
"self",
",",
"window_name",
",",
"object_name",
",",
"iterations",
")",
":",
"if",
"not",
"self",
".",
"verifyscrollbarhorizontal",
"(",
"window_name",
",",
"object_name",
")",
":",
"raise",
"LdtpServerException",
"(",
"'Object not horizonta... | 37.393939 | 18.363636 |
def resolved_path(path, base=None):
"""
Args:
path (str | unicode | None): Path to resolve
base (str | unicode | None): Base path to use to resolve relative paths (default: current working dir)
Returns:
(str): Absolute path
"""
if not path or path.startswith(SYMBOLIC_TMP):
return path
path = os.path.expanduser(path)
if base and not os.path.isabs(path):
return os.path.join(resolved_path(base), path)
return os.path.abspath(path) | [
"def",
"resolved_path",
"(",
"path",
",",
"base",
"=",
"None",
")",
":",
"if",
"not",
"path",
"or",
"path",
".",
"startswith",
"(",
"SYMBOLIC_TMP",
")",
":",
"return",
"path",
"path",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"path",
")",
"if",... | 28.764706 | 19.705882 |
def drop_right_t(n):
"""
Transformation for Sequence.drop_right
:param n: number to drop from right
:return: transformation
"""
if n <= 0:
end_index = None
else:
end_index = -n
return Transformation(
'drop_right({0})'.format(n),
lambda sequence: sequence[:end_index],
None
) | [
"def",
"drop_right_t",
"(",
"n",
")",
":",
"if",
"n",
"<=",
"0",
":",
"end_index",
"=",
"None",
"else",
":",
"end_index",
"=",
"-",
"n",
"return",
"Transformation",
"(",
"'drop_right({0})'",
".",
"format",
"(",
"n",
")",
",",
"lambda",
"sequence",
":",... | 22.4 | 14.266667 |
def init_read_line(self):
"""init_read_line() initializes fields relevant to input matching"""
format_list = self._format_list
self._re_cvt = self.match_input_fmt(format_list)
regexp0_str = "".join([subs[0] for subs in self._re_cvt])
self._regexp_str = regexp0_str
self._re = re.compile(regexp0_str)
self._match_exps = [
subs[1] for subs in self._re_cvt if subs[1] is not None
]
self._divisors = [subs[2] for subs in self._re_cvt if subs[2] is not None]
self._in_cvt_fns = [
subs[3] for subs in self._re_cvt if subs[3] is not None
]
self._read_line_init = True | [
"def",
"init_read_line",
"(",
"self",
")",
":",
"format_list",
"=",
"self",
".",
"_format_list",
"self",
".",
"_re_cvt",
"=",
"self",
".",
"match_input_fmt",
"(",
"format_list",
")",
"regexp0_str",
"=",
"\"\"",
".",
"join",
"(",
"[",
"subs",
"[",
"0",
"]... | 44.4 | 16.533333 |
def _find_jar(self, path0=None):
"""
Return the location of an h2o.jar executable.
:param path0: Explicitly given h2o.jar path. If provided, then we will simply check whether the file is there,
otherwise we will search for an executable in locations returned by ._jar_paths().
:raises H2OStartupError: if no h2o.jar executable can be found.
"""
jar_paths = [path0] if path0 else self._jar_paths()
searched_paths = []
for jp in jar_paths:
searched_paths.append(jp)
if os.path.exists(jp):
return jp
raise H2OStartupError("Cannot start local server: h2o.jar not found. Paths searched:\n" +
"".join(" %s\n" % s for s in searched_paths)) | [
"def",
"_find_jar",
"(",
"self",
",",
"path0",
"=",
"None",
")",
":",
"jar_paths",
"=",
"[",
"path0",
"]",
"if",
"path0",
"else",
"self",
".",
"_jar_paths",
"(",
")",
"searched_paths",
"=",
"[",
"]",
"for",
"jp",
"in",
"jar_paths",
":",
"searched_paths... | 45.588235 | 25.117647 |
def transaction_atomic_with_retry(num_retries=5, backoff=0.1):
"""
This is a decorator that will wrap the decorated method in an atomic transaction and
retry the transaction a given number of times
:param num_retries: How many times should we retry before we give up
:param backoff: How long should we wait after each try
"""
# Create the decorator
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
# Keep track of how many times we have tried
num_tries = 0
exception = None
# Call the main sync entities method and catch any exceptions
while num_tries <= num_retries:
# Try running the transaction
try:
with transaction.atomic():
return wrapped(*args, **kwargs)
# Catch any operation errors
except db.utils.OperationalError as e:
num_tries += 1
exception = e
sleep(backoff * num_tries)
# If we have an exception raise it
raise exception
# Return the decorator
return wrapper | [
"def",
"transaction_atomic_with_retry",
"(",
"num_retries",
"=",
"5",
",",
"backoff",
"=",
"0.1",
")",
":",
"# Create the decorator",
"@",
"wrapt",
".",
"decorator",
"def",
"wrapper",
"(",
"wrapped",
",",
"instance",
",",
"args",
",",
"kwargs",
")",
":",
"# ... | 33.121212 | 17.424242 |
def kms_encrypt(kms_client, service, env, secret):
"""
Encrypt string for use by a given service/environment
Args:
kms_client (boto3 kms client object): Instantiated kms client object. Usually created through create_aws_clients.
service (string): name of the service that the secret is being encrypted for.
env (string): environment that the secret is being encrypted for.
secret (string): value to be encrypted
Returns:
a populated EFPWContext object
Raises:
SystemExit(1): If there is an error with the boto3 encryption call (ex. missing kms key)
"""
# Converting all periods to underscores because they are invalid in KMS alias names
key_alias = '{}-{}'.format(env, service.replace('.', '_'))
try:
response = kms_client.encrypt(
KeyId='alias/{}'.format(key_alias),
Plaintext=secret.encode()
)
except ClientError as error:
if error.response['Error']['Code'] == "NotFoundException":
fail("Key '{}' not found. You may need to run ef-generate for this environment.".format(key_alias), error)
else:
fail("boto3 exception occurred while performing kms encrypt operation.", error)
encrypted_secret = base64.b64encode(response['CiphertextBlob'])
return encrypted_secret | [
"def",
"kms_encrypt",
"(",
"kms_client",
",",
"service",
",",
"env",
",",
"secret",
")",
":",
"# Converting all periods to underscores because they are invalid in KMS alias names",
"key_alias",
"=",
"'{}-{}'",
".",
"format",
"(",
"env",
",",
"service",
".",
"replace",
... | 43.785714 | 26.285714 |
def list(region=None, key=None, keyid=None, profile=None):
'''
List all trails
Returns list of trails
CLI Example:
.. code-block:: yaml
policies:
- {...}
- {...}
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
trails = conn.describe_trails()
if not bool(trails.get('trailList')):
log.warning('No trails found')
return {'trails': trails.get('trailList', [])}
except ClientError as e:
return {'error': __utils__['boto3.get_error'](e)} | [
"def",
"list",
"(",
"region",
"=",
"None",
",",
"key",
"=",
"None",
",",
"keyid",
"=",
"None",
",",
"profile",
"=",
"None",
")",
":",
"try",
":",
"conn",
"=",
"_get_conn",
"(",
"region",
"=",
"region",
",",
"key",
"=",
"key",
",",
"keyid",
"=",
... | 24.304348 | 23.869565 |
def groot(path=''):
"Changes current directory to the root of the project (looks for README.md)."
def check_filelist(l):
if ('README.md' in l) or ('README' in l):
return True
else:
return False
import os, copy
cwd = os.getcwd() # initial dir
cwd0 = copy.copy(cwd)
cwd_init = copy.copy(cwd)
found = False
sysroot = False
while not found and not sysroot:
found = check_filelist(os.listdir())
if not found:
os.chdir(os.path.join(cwd, os.pardir))
cwd = os.getcwd()
if cwd == cwd0:
sysroot = True
else:
cwd0 = cwd
else:
os.chdir(os.path.join(cwd0, path))
if sysroot:
os.chdir(cwd_init)
raise Exception("Root directory not found.") | [
"def",
"groot",
"(",
"path",
"=",
"''",
")",
":",
"def",
"check_filelist",
"(",
"l",
")",
":",
"if",
"(",
"'README.md'",
"in",
"l",
")",
"or",
"(",
"'README'",
"in",
"l",
")",
":",
"return",
"True",
"else",
":",
"return",
"False",
"import",
"os",
... | 25.375 | 19.75 |
def declfuncs(self):
"""generator on all declaration of functions"""
for f in self.body:
if (hasattr(f, '_ctype')
and isinstance(f._ctype, FuncType)
and not hasattr(f, 'body')):
yield f | [
"def",
"declfuncs",
"(",
"self",
")",
":",
"for",
"f",
"in",
"self",
".",
"body",
":",
"if",
"(",
"hasattr",
"(",
"f",
",",
"'_ctype'",
")",
"and",
"isinstance",
"(",
"f",
".",
"_ctype",
",",
"FuncType",
")",
"and",
"not",
"hasattr",
"(",
"f",
",... | 34.142857 | 11.428571 |
def repetition(extractor, bounds, *, ignore_whitespace=False):
"""Returns a partial of _get_repetition that accepts only a text argument."""
return partial(_get_repetition, extractor, bounds=bounds, ignore_whitespace=ignore_whitespace) | [
"def",
"repetition",
"(",
"extractor",
",",
"bounds",
",",
"*",
",",
"ignore_whitespace",
"=",
"False",
")",
":",
"return",
"partial",
"(",
"_get_repetition",
",",
"extractor",
",",
"bounds",
"=",
"bounds",
",",
"ignore_whitespace",
"=",
"ignore_whitespace",
"... | 79 | 26 |
def _parse(s, g):
"""Parses sentence 's' using CNF grammar 'g'."""
# The CYK table. Indexed with a 2-tuple: (start pos, end pos)
table = defaultdict(set)
# Top-level structure is similar to the CYK table. Each cell is a dict from
# rule name to the best (lightest) tree for that rule.
trees = defaultdict(dict)
# Populate base case with existing terminal production rules
for i, w in enumerate(s):
for terminal, rules in g.terminal_rules.items():
if match(terminal, w):
for rule in rules:
table[(i, i)].add(rule)
if (rule.lhs not in trees[(i, i)] or
rule.weight < trees[(i, i)][rule.lhs].weight):
trees[(i, i)][rule.lhs] = RuleNode(rule, [T(w)], weight=rule.weight)
# Iterate over lengths of sub-sentences
for l in xrange(2, len(s) + 1):
# Iterate over sub-sentences with the given length
for i in xrange(len(s) - l + 1):
# Choose partition of the sub-sentence in [1, l)
for p in xrange(i + 1, i + l):
span1 = (i, p - 1)
span2 = (p, i + l - 1)
for r1, r2 in itertools.product(table[span1], table[span2]):
for rule in g.nonterminal_rules.get((r1.lhs, r2.lhs), []):
table[(i, i + l - 1)].add(rule)
r1_tree = trees[span1][r1.lhs]
r2_tree = trees[span2][r2.lhs]
rule_total_weight = rule.weight + r1_tree.weight + r2_tree.weight
if (rule.lhs not in trees[(i, i + l - 1)]
or rule_total_weight < trees[(i, i + l - 1)][rule.lhs].weight):
trees[(i, i + l - 1)][rule.lhs] = RuleNode(rule, [r1_tree, r2_tree], weight=rule_total_weight)
return table, trees | [
"def",
"_parse",
"(",
"s",
",",
"g",
")",
":",
"# The CYK table. Indexed with a 2-tuple: (start pos, end pos)",
"table",
"=",
"defaultdict",
"(",
"set",
")",
"# Top-level structure is similar to the CYK table. Each cell is a dict from",
"# rule name to the best (lightest) tree for th... | 53.228571 | 20.828571 |
def pad_pdf_pages(pdf_name, pages_per_q) -> None:
"""
Checks if PDF has the correct number of pages. If it has too many, warns
the user. If it has too few, adds blank pages until the right length is
reached.
"""
pdf = PyPDF2.PdfFileReader(pdf_name)
output = PyPDF2.PdfFileWriter()
num_pages = pdf.getNumPages()
if num_pages > pages_per_q:
logging.warning('{} has {} pages. Only the first '
'{} pages will get output.'
.format(pdf_name, num_pages, pages_per_q))
# Copy over up to pages_per_q pages
for page in range(min(num_pages, pages_per_q)):
output.addPage(pdf.getPage(page))
# Pad if necessary
if num_pages < pages_per_q:
for page in range(pages_per_q - num_pages):
output.addBlankPage()
# Output the PDF
with open(pdf_name, 'wb') as out_file:
output.write(out_file) | [
"def",
"pad_pdf_pages",
"(",
"pdf_name",
",",
"pages_per_q",
")",
"->",
"None",
":",
"pdf",
"=",
"PyPDF2",
".",
"PdfFileReader",
"(",
"pdf_name",
")",
"output",
"=",
"PyPDF2",
".",
"PdfFileWriter",
"(",
")",
"num_pages",
"=",
"pdf",
".",
"getNumPages",
"("... | 34.615385 | 15.153846 |
def events(self, institute, case=None, variant_id=None, level=None,
comments=False, panel=None):
"""Fetch events from the database.
Args:
institute (dict): A institute
case (dict): A case
variant_id (str, optional): global variant id
level (str, optional): restrict comments to 'specific' or 'global'
comments (bool, optional): restrict events to include only comments
panel (str): A panel name
Returns:
pymongo.Cursor: Query result
"""
query = {}
if variant_id:
if comments:
# If it's comment-related event collect global and variant-specific comment events
LOG.debug("Fetching all comments for institute {0} case {1} variant {2}".format(
institute['_id'], case['_id'], variant_id))
query = {
'$or': [
{
'category' : 'variant',
'variant_id' : variant_id,
'verb' : 'comment',
'level' : 'global'
},
{
'category' : 'variant',
'variant_id' : variant_id,
'institute' : institute['_id'],
'case' : case['_id'],
'verb' : 'comment',
'level' : 'specific'
}
]
}
else: # Collect other variant-specific events which are not comments
query['institute'] = institute['_id']
query['category'] = 'variant'
query['variant_id'] = variant_id
query['case'] = case['_id']
else:
query['institute'] = institute['_id']
if panel:
query['panel'] = panel
# If no variant_id or panel we know that it is a case level comment
else:
query['category'] = 'case'
if case:
query['case'] = case['_id']
if comments:
query['verb'] = 'comment'
return self.event_collection.find(query).sort('created_at', pymongo.DESCENDING) | [
"def",
"events",
"(",
"self",
",",
"institute",
",",
"case",
"=",
"None",
",",
"variant_id",
"=",
"None",
",",
"level",
"=",
"None",
",",
"comments",
"=",
"False",
",",
"panel",
"=",
"None",
")",
":",
"query",
"=",
"{",
"}",
"if",
"variant_id",
":"... | 37.822581 | 19.612903 |
def update_alias(FunctionName, Name, FunctionVersion=None, Description=None,
region=None, key=None, keyid=None, profile=None):
'''
Update the named alias to the configuration.
Returns {updated: true} if the alias was updated and returns
{updated: False} if the alias was not updated.
CLI Example:
.. code-block:: bash
salt myminion boto_lamba.update_alias my_lambda my_alias $LATEST
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
args = {}
if FunctionVersion:
args['FunctionVersion'] = FunctionVersion
if Description:
args['Description'] = Description
r = conn.update_alias(FunctionName=FunctionName, Name=Name, **args)
if r:
keys = ('Name', 'FunctionVersion', 'Description')
return {'updated': True, 'alias': dict([(k, r.get(k)) for k in keys])}
else:
log.warning('Alias was not updated')
return {'updated': False}
except ClientError as e:
return {'created': False, 'error': __utils__['boto3.get_error'](e)} | [
"def",
"update_alias",
"(",
"FunctionName",
",",
"Name",
",",
"FunctionVersion",
"=",
"None",
",",
"Description",
"=",
"None",
",",
"region",
"=",
"None",
",",
"key",
"=",
"None",
",",
"keyid",
"=",
"None",
",",
"profile",
"=",
"None",
")",
":",
"try",... | 34.78125 | 26.03125 |
def add_parameters(traj):
"""Adds all neuron group parameters to `traj`."""
assert(isinstance(traj,Trajectory))
scale = traj.simulation.scale
traj.v_standard_parameter = Brian2Parameter
model_eqs = '''dV/dt= 1.0/tau_POST * (mu - V) + I_syn : 1
mu : 1
I_syn = - I_syn_i + I_syn_e : Hz
'''
conn_eqs = '''I_syn_PRE = x_PRE/(tau2_PRE-tau1_PRE) : Hz
dx_PRE/dt = -(normalization_PRE*y_PRE+x_PRE)*invtau1_PRE : 1
dy_PRE/dt = -y_PRE*invtau2_PRE : 1
'''
traj.f_add_parameter('model.eqs', model_eqs,
comment='The differential equation for the neuron model')
traj.f_add_parameter('model.synaptic.eqs', conn_eqs,
comment='The differential equation for the synapses. '
'PRE will be replaced by `i` or `e` depending '
'on the source population')
traj.f_add_parameter('model.synaptic.tau1', 1*ms, comment = 'The decay time')
traj.f_add_parameter('model.synaptic.tau2_e', 3*ms, comment = 'The rise time, excitatory')
traj.f_add_parameter('model.synaptic.tau2_i', 2*ms, comment = 'The rise time, inhibitory')
traj.f_add_parameter('model.V_th', 'V >= 1.0', comment = "Threshold value")
traj.f_add_parameter('model.reset_func', 'V=0.0',
comment = "String representation of reset function")
traj.f_add_parameter('model.refractory', 5*ms, comment = "Absolute refractory period")
traj.f_add_parameter('model.N_e', int(2000*scale), comment = "Amount of excitatory neurons")
traj.f_add_parameter('model.N_i', int(500*scale), comment = "Amount of inhibitory neurons")
traj.f_add_parameter('model.tau_e', 15*ms, comment = "Membrane time constant, excitatory")
traj.f_add_parameter('model.tau_i', 10*ms, comment = "Membrane time constant, inhibitory")
traj.f_add_parameter('model.mu_e_min', 1.1, comment = "Lower bound for bias, excitatory")
traj.f_add_parameter('model.mu_e_max', 1.2, comment = "Upper bound for bias, excitatory")
traj.f_add_parameter('model.mu_i_min', 1.0, comment = "Lower bound for bias, inhibitory")
traj.f_add_parameter('model.mu_i_max', 1.05, comment = "Upper bound for bias, inhibitory") | [
"def",
"add_parameters",
"(",
"traj",
")",
":",
"assert",
"(",
"isinstance",
"(",
"traj",
",",
"Trajectory",
")",
")",
"scale",
"=",
"traj",
".",
"simulation",
".",
"scale",
"traj",
".",
"v_standard_parameter",
"=",
"Brian2Parameter",
"model_eqs",
"=",
"'''d... | 51.297872 | 35.787234 |
def auth_required(*auth_methods):
"""
Decorator that protects enpoints through multiple mechanisms
Example::
@app.route('/dashboard')
@auth_required('token', 'session')
def dashboard():
return 'Dashboard'
:param auth_methods: Specified mechanisms.
"""
login_mechanisms = {
'token': lambda: _check_token(),
'basic': lambda: _check_http_auth(),
'session': lambda: current_user.is_authenticated
}
def wrapper(fn):
@wraps(fn)
def decorated_view(*args, **kwargs):
h = {}
mechanisms = [(method, login_mechanisms.get(method))
for method in auth_methods]
for method, mechanism in mechanisms:
if mechanism and mechanism():
return fn(*args, **kwargs)
elif method == 'basic':
r = _security.default_http_auth_realm
h['WWW-Authenticate'] = 'Basic realm="%s"' % r
if _security._unauthorized_callback:
return _security._unauthorized_callback()
else:
return _get_unauthorized_response(headers=h)
return decorated_view
return wrapper | [
"def",
"auth_required",
"(",
"*",
"auth_methods",
")",
":",
"login_mechanisms",
"=",
"{",
"'token'",
":",
"lambda",
":",
"_check_token",
"(",
")",
",",
"'basic'",
":",
"lambda",
":",
"_check_http_auth",
"(",
")",
",",
"'session'",
":",
"lambda",
":",
"curr... | 33.722222 | 15.555556 |
def connections(self):
"""
Returns all of the loaded connections names as a list
"""
conn = lambda x: str(x).replace('connection:', '')
return [conn(name) for name in self.sections()] | [
"def",
"connections",
"(",
"self",
")",
":",
"conn",
"=",
"lambda",
"x",
":",
"str",
"(",
"x",
")",
".",
"replace",
"(",
"'connection:'",
",",
"''",
")",
"return",
"[",
"conn",
"(",
"name",
")",
"for",
"name",
"in",
"self",
".",
"sections",
"(",
... | 36.333333 | 12 |
def coding_sequence(rna):
'''Extract coding sequence from an RNA template.
:param seq: Sequence from which to extract a coding sequence.
:type seq: coral.RNA
:param material: Type of sequence ('dna' or 'rna')
:type material: str
:returns: The first coding sequence (start codon -> stop codon) matched
from 5' to 3'.
:rtype: coral.RNA
:raises: ValueError if rna argument has no start codon.
ValueError if rna argument has no stop codon in-frame with the
first start codon.
'''
if isinstance(rna, coral.DNA):
rna = transcribe(rna)
codons_left = len(rna) // 3
start_codon = coral.RNA('aug')
stop_codons = [coral.RNA('uag'), coral.RNA('uga'), coral.RNA('uaa')]
start = None
stop = None
valid = [None, None]
index = 0
while codons_left:
codon = rna[index:index + 3]
if valid[0] is None:
if codon in start_codon:
start = index
valid[0] = True
else:
if codon in stop_codons:
stop = index + 3
valid[1] = True
break
index += 3
codons_left -= 1
if valid[0] is None:
raise ValueError('Sequence has no start codon.')
elif stop is None:
raise ValueError('Sequence has no stop codon.')
coding_rna = rna[start:stop]
return coding_rna | [
"def",
"coding_sequence",
"(",
"rna",
")",
":",
"if",
"isinstance",
"(",
"rna",
",",
"coral",
".",
"DNA",
")",
":",
"rna",
"=",
"transcribe",
"(",
"rna",
")",
"codons_left",
"=",
"len",
"(",
"rna",
")",
"//",
"3",
"start_codon",
"=",
"coral",
".",
... | 30.533333 | 18.488889 |
def _extractFastaHeader(fastaHeader, parser=None, forceId=False):
"""Parses a fasta header and returns extracted information in a dictionary.
Unless a custom parser is specified, a ``Pyteomics`` function is used, which
provides parsers for the formats of UniProtKB, UniRef, UniParc and UniMES
(UniProt Metagenomic and Environmental Sequences), described at
`www.uniprot.org <http://www.uniprot.org/help/fasta-headers>_`.
:param fastaHeader: str, protein entry header from a fasta file
:param parser: is a function that takes a fastaHeader string and returns a
dictionary, containing at least the key "id". If None the parser
function from pyteomics ``pyteomics.fasta.parse()`` is used.
:param forceId: bool, if True and no id can be extracted from the fasta
header the whole header sequence is used as a protein id instead of
raising an exception.
:returns: dict, describing a fasta header
"""
if parser is None:
try:
headerInfo = pyteomics.fasta.parse(fastaHeader)
except pyteomics.auxiliary.PyteomicsError as pyteomicsError:
#If forceId is set True, it uses the whole header as an id
if forceId:
headerInfo = {'id': fastaHeader}
else:
raise pyteomicsError
else:
headerInfo = parser(fastaHeader)
return headerInfo | [
"def",
"_extractFastaHeader",
"(",
"fastaHeader",
",",
"parser",
"=",
"None",
",",
"forceId",
"=",
"False",
")",
":",
"if",
"parser",
"is",
"None",
":",
"try",
":",
"headerInfo",
"=",
"pyteomics",
".",
"fasta",
".",
"parse",
"(",
"fastaHeader",
")",
"exc... | 45.9 | 23.766667 |
def filter_savitzky_golay(y, window_size=5, order=2, deriv=0, rate=1):
"""Smooth (and optionally differentiate) with a Savitzky-Golay filter."""
try:
window_size = np.abs(np.int(window_size))
order = np.abs(np.int(order))
except ValueError:
raise ValueError('window_size and order must be integers')
if window_size % 2 != 1 or window_size < 1:
raise ValueError('window_size size must be a positive odd number')
if window_size < order + 2:
raise ValueError('window_size is too small for the polynomials order')
order_range = range(order + 1)
half_window = (window_size - 1) // 2
# precompute limits
minimum = np.min(y)
maximum = np.max(y)
# precompute coefficients
b = np.mat([
[k ** i for i in order_range]
for k in range(-half_window, half_window + 1)
])
m = np.linalg.pinv(b).A[deriv] * rate ** deriv * math.factorial(deriv)
# pad the signal at the extremes with values taken from the original signal
firstvals = y[0] - np.abs(y[1:half_window+1][::-1] - y[0])
lastvals = y[-1] + np.abs(y[-half_window-1:-1][::-1] - y[-1])
y = np.concatenate((firstvals, y, lastvals))
return np.clip(
np.convolve(m[::-1], y, mode='valid'),
minimum,
maximum,
) | [
"def",
"filter_savitzky_golay",
"(",
"y",
",",
"window_size",
"=",
"5",
",",
"order",
"=",
"2",
",",
"deriv",
"=",
"0",
",",
"rate",
"=",
"1",
")",
":",
"try",
":",
"window_size",
"=",
"np",
".",
"abs",
"(",
"np",
".",
"int",
"(",
"window_size",
... | 37.441176 | 20.764706 |
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: InstalledAddOnContext for this InstalledAddOnInstance
:rtype: twilio.rest.preview.marketplace.installed_add_on.InstalledAddOnContext
"""
if self._context is None:
self._context = InstalledAddOnContext(self._version, sid=self._solution['sid'], )
return self._context | [
"def",
"_proxy",
"(",
"self",
")",
":",
"if",
"self",
".",
"_context",
"is",
"None",
":",
"self",
".",
"_context",
"=",
"InstalledAddOnContext",
"(",
"self",
".",
"_version",
",",
"sid",
"=",
"self",
".",
"_solution",
"[",
"'sid'",
"]",
",",
")",
"re... | 46.727273 | 26.909091 |
def get_sdb_keys(self, path):
"""Return the keys for a SDB, which are need for the full secure data path"""
list_resp = get_with_retry(
self.cerberus_url + '/v1/secret/' + path + '/?list=true',
headers=self.HEADERS
)
throw_if_bad_response(list_resp)
return list_resp.json()['data']['keys'] | [
"def",
"get_sdb_keys",
"(",
"self",
",",
"path",
")",
":",
"list_resp",
"=",
"get_with_retry",
"(",
"self",
".",
"cerberus_url",
"+",
"'/v1/secret/'",
"+",
"path",
"+",
"'/?list=true'",
",",
"headers",
"=",
"self",
".",
"HEADERS",
")",
"throw_if_bad_response",... | 34.6 | 17.1 |
def read_stats(self, *stats):
""" Read stream statistics from chassis.
:param stats: list of requested statistics to read, if empty - read all statistics.
"""
from ixexplorer.ixe_stream import IxePacketGroupStream
sleep_time = 0.1 # in cases we only want few counters but very fast we need a smaller sleep time
if not stats:
stats = [m.attrname for m in IxePgStats.__tcl_members__ if m.flags & FLAG_RDONLY]
sleep_time = 1
# Read twice to refresh rate statistics.
for port in self.tx_ports_streams:
port.api.call_rc('streamTransmitStats get {} 1 4096'.format(port.uri))
for rx_port in self.rx_ports:
rx_port.api.call_rc('packetGroupStats get {} 0 65536'.format(rx_port.uri))
time.sleep(sleep_time)
self.statistics = OrderedDict()
for tx_port, streams in self.tx_ports_streams.items():
for stream in streams:
stream_stats = OrderedDict()
tx_port.api.call_rc('streamTransmitStats get {} 1 4096'.format(tx_port.uri))
stream_tx_stats = IxeStreamTxStats(tx_port, stream.index)
stream_stats_tx = {c: v for c, v in stream_tx_stats.get_attributes(FLAG_RDONLY).items()}
stream_stats['tx'] = stream_stats_tx
stream_stat_pgid = IxePacketGroupStream(stream).groupId
stream_stats_pg = pg_stats_dict()
for port in self.session.ports.values():
stream_stats_pg[str(port)] = OrderedDict(zip(stats, [-1] * len(stats)))
for rx_port in self.rx_ports:
if not stream.rx_ports or rx_port in stream.rx_ports:
rx_port.api.call_rc('packetGroupStats get {} 0 65536'.format(rx_port.uri))
pg_stats = IxePgStats(rx_port, stream_stat_pgid)
stream_stats_pg[str(rx_port)] = pg_stats.read_stats(*stats)
stream_stats['rx'] = stream_stats_pg
self.statistics[str(stream)] = stream_stats
return self.statistics | [
"def",
"read_stats",
"(",
"self",
",",
"*",
"stats",
")",
":",
"from",
"ixexplorer",
".",
"ixe_stream",
"import",
"IxePacketGroupStream",
"sleep_time",
"=",
"0.1",
"# in cases we only want few counters but very fast we need a smaller sleep time",
"if",
"not",
"stats",
":"... | 55 | 25.763158 |
async def main(interface=None):
""" Main function """
qtm_ip = await choose_qtm_instance(interface)
if qtm_ip is None:
return
while True:
connection = await qtm.connect(qtm_ip, 22223, version="1.18")
if connection is None:
return
await connection.get_state()
await connection.byte_order()
async with qtm.TakeControl(connection, "password"):
result = await connection.close()
if result == b"Closing connection":
await connection.await_event(qtm.QRTEvent.EventConnectionClosed)
await connection.load(QTM_FILE)
await connection.start(rtfromfile=True)
(await connection.get_current_frame()).get_3d_markers()
queue = asyncio.Queue()
asyncio.ensure_future(packet_receiver(queue))
try:
await connection.stream_frames(
components=["incorrect"], on_packet=queue.put_nowait
)
except qtm.QRTCommandException as exception:
LOG.info("exception %s", exception)
await connection.stream_frames(
components=["3d"], on_packet=queue.put_nowait
)
await asyncio.sleep(0.5)
await connection.byte_order()
await asyncio.sleep(0.5)
await connection.stream_frames_stop()
queue.put_nowait(None)
await connection.get_parameters(parameters=["3d"])
await connection.stop()
await connection.await_event()
await connection.new()
await connection.await_event(qtm.QRTEvent.EventConnected)
await connection.start()
await connection.await_event(qtm.QRTEvent.EventWaitingForTrigger)
await connection.trig()
await connection.await_event(qtm.QRTEvent.EventCaptureStarted)
await asyncio.sleep(0.5)
await connection.set_qtm_event()
await asyncio.sleep(0.001)
await connection.set_qtm_event("with_label")
await asyncio.sleep(0.5)
await connection.stop()
await connection.await_event(qtm.QRTEvent.EventCaptureStopped)
await connection.save(r"measurement.qtm")
await asyncio.sleep(3)
await connection.close()
connection.disconnect() | [
"async",
"def",
"main",
"(",
"interface",
"=",
"None",
")",
":",
"qtm_ip",
"=",
"await",
"choose_qtm_instance",
"(",
"interface",
")",
"if",
"qtm_ip",
"is",
"None",
":",
"return",
"while",
"True",
":",
"connection",
"=",
"await",
"qtm",
".",
"connect",
"... | 28.52439 | 22.463415 |
def set_credentials(self, username, password):
"""
Set a new username and password.
:param str username: New username.
:param str password: New password.
"""
if username is not None:
self._username = username
if password is not None:
self._password = password | [
"def",
"set_credentials",
"(",
"self",
",",
"username",
",",
"password",
")",
":",
"if",
"username",
"is",
"not",
"None",
":",
"self",
".",
"_username",
"=",
"username",
"if",
"password",
"is",
"not",
"None",
":",
"self",
".",
"_password",
"=",
"password... | 27.5 | 9.333333 |
def main():
"""Event display for an event of station 503
Date Time Timestamp Nanoseconds
2012-03-29 10:51:36 1333018296 870008589
Number of MIPs
35.0 51.9 35.8 78.9
Arrival time
15.0 17.5 20.0 27.5
"""
# Detector positions in ENU relative to the station GPS
x = [-6.34, -2.23, -3.6, 3.46]
y = [6.34, 2.23, -3.6, 3.46]
# Scale mips to fit the graph
n = [35.0, 51.9, 35.8, 78.9]
# Make times relative to first detection
t = [15., 17.5, 20., 27.5]
dt = [ti - min(t) for ti in t]
plot = Plot()
plot.scatter([0], [0], mark='triangle')
plot.add_pin_at_xy(0, 0, 'Station 503', use_arrow=False, location='below')
plot.scatter_table(x, y, dt, n)
plot.set_scalebar(location="lower right")
plot.set_colorbar('$\Delta$t [ns]')
plot.set_axis_equal()
plot.set_mlimits(max=16.)
plot.set_slimits(min=10., max=100.)
plot.set_xlabel('x [m]')
plot.set_ylabel('y [m]')
plot.save('event_display')
# Add event by Station 508
# Detector positions in ENU relative to the station GPS
x508 = [6.12, 0.00, -3.54, 3.54]
y508 = [-6.12, -13.23, -3.54, 3.54]
# Event GPS timestamp: 1371498167.016412100
# MIPS
n508 = [5.6, 16.7, 36.6, 9.0]
# Arrival Times
t508 = [15., 22.5, 22.5, 30.]
dt508 = [ti - min(t508) for ti in t508]
plot = MultiPlot(1, 2, width=r'.33\linewidth')
plot.set_xlimits_for_all(min=-10, max=15)
plot.set_ylimits_for_all(min=-15, max=10)
plot.set_mlimits_for_all(min=0., max=16.)
plot.set_colorbar('$\Delta$t [ns]', False)
plot.set_colormap('blackwhite')
plot.set_scalebar_for_all(location="upper right")
p = plot.get_subplot_at(0, 0)
p.scatter([0], [0], mark='triangle')
p.add_pin_at_xy(0, 0, 'Station 503', use_arrow=False, location='below')
p.scatter_table(x, y, dt, n)
p.set_axis_equal()
p = plot.get_subplot_at(0, 1)
p.scatter([0], [0], mark='triangle')
p.add_pin_at_xy(0, 0, 'Station 508', use_arrow=False, location='below')
p.scatter_table(x508, y508, dt508, n508)
p.set_axis_equal()
plot.show_yticklabels_for_all([(0, 0)])
plot.show_xticklabels_for_all([(0, 0), (0, 1)])
plot.set_xlabel('x [m]')
plot.set_ylabel('y [m]')
plot.save('multi_event_display') | [
"def",
"main",
"(",
")",
":",
"# Detector positions in ENU relative to the station GPS",
"x",
"=",
"[",
"-",
"6.34",
",",
"-",
"2.23",
",",
"-",
"3.6",
",",
"3.46",
"]",
"y",
"=",
"[",
"6.34",
",",
"2.23",
",",
"-",
"3.6",
",",
"3.46",
"]",
"# Scale mi... | 28.15 | 17.6875 |
def get_jid(jid):
'''
Return the information returned from a specified jid
'''
log.debug('sqlite3 returner <get_jid> called jid: %s', jid)
conn = _get_conn(ret=None)
cur = conn.cursor()
sql = '''SELECT id, full_ret FROM salt_returns WHERE jid = :jid'''
cur.execute(sql,
{'jid': jid})
data = cur.fetchone()
log.debug('query result: %s', data)
ret = {}
if data and len(data) > 1:
ret = {six.text_type(data[0]): {'return': salt.utils.json.loads(data[1])}}
log.debug('ret: %s', ret)
_close_conn(conn)
return ret | [
"def",
"get_jid",
"(",
"jid",
")",
":",
"log",
".",
"debug",
"(",
"'sqlite3 returner <get_jid> called jid: %s'",
",",
"jid",
")",
"conn",
"=",
"_get_conn",
"(",
"ret",
"=",
"None",
")",
"cur",
"=",
"conn",
".",
"cursor",
"(",
")",
"sql",
"=",
"'''SELECT ... | 32.111111 | 20.222222 |
def get_strings(soup, tag):
"""Get all the string children from an html tag."""
tags = soup.find_all(tag)
strings = [s.string for s in tags if s.string]
return strings | [
"def",
"get_strings",
"(",
"soup",
",",
"tag",
")",
":",
"tags",
"=",
"soup",
".",
"find_all",
"(",
"tag",
")",
"strings",
"=",
"[",
"s",
".",
"string",
"for",
"s",
"in",
"tags",
"if",
"s",
".",
"string",
"]",
"return",
"strings"
] | 35.8 | 11.2 |
def get_stack_trace_with_labels(self, depth = 16, bMakePretty = True):
"""
Tries to get a stack trace for the current function.
Only works for functions with standard prologue and epilogue.
@type depth: int
@param depth: Maximum depth of stack trace.
@type bMakePretty: bool
@param bMakePretty:
C{True} for user readable labels,
C{False} for labels that can be passed to L{Process.resolve_label}.
"Pretty" labels look better when producing output for the user to
read, while pure labels are more useful programatically.
@rtype: tuple of tuple( int, int, str )
@return: Stack trace of the thread as a tuple of
( return address, frame pointer label ).
@raise WindowsError: Raises an exception on error.
"""
try:
trace = self.__get_stack_trace(depth, True, bMakePretty)
except Exception:
trace = ()
if not trace:
trace = self.__get_stack_trace_manually(depth, True, bMakePretty)
return trace | [
"def",
"get_stack_trace_with_labels",
"(",
"self",
",",
"depth",
"=",
"16",
",",
"bMakePretty",
"=",
"True",
")",
":",
"try",
":",
"trace",
"=",
"self",
".",
"__get_stack_trace",
"(",
"depth",
",",
"True",
",",
"bMakePretty",
")",
"except",
"Exception",
":... | 37.37931 | 22.551724 |
def service_define(self, service, ty):
"""
Add a service variable of type ``ty`` to this model
:param str service: variable name
:param type ty: variable type
:return: None
"""
assert service not in self._data
assert service not in self._algebs + self._states
self._service.append(service)
self._service_ty.append(ty) | [
"def",
"service_define",
"(",
"self",
",",
"service",
",",
"ty",
")",
":",
"assert",
"service",
"not",
"in",
"self",
".",
"_data",
"assert",
"service",
"not",
"in",
"self",
".",
"_algebs",
"+",
"self",
".",
"_states",
"self",
".",
"_service",
".",
"app... | 27.642857 | 13.5 |
def strip_block_whitespace(string_list):
"""Treats a list of strings as a code block and strips
whitespace so that the min whitespace line sits at char 0 of line."""
min_ws = min([(len(x) - len(x.lstrip())) for x in string_list if x != '\n'])
return [x[min_ws:] if x != '\n' else x for x in string_list] | [
"def",
"strip_block_whitespace",
"(",
"string_list",
")",
":",
"min_ws",
"=",
"min",
"(",
"[",
"(",
"len",
"(",
"x",
")",
"-",
"len",
"(",
"x",
".",
"lstrip",
"(",
")",
")",
")",
"for",
"x",
"in",
"string_list",
"if",
"x",
"!=",
"'\\n'",
"]",
")"... | 63.8 | 12.8 |
def get_info(self):
"""
Return plugin information.
"""
plugin_infos = {}
for pc in self.plugins:
plugin_infos.update(pc.get_info())
return {
self.get_plugin_name() : {
"version" : self.get_version(),
"sub-plugins" : plugin_infos,
"params" : {
"multi_plugins" : self.conf['multi_plugins']
},
}
} | [
"def",
"get_info",
"(",
"self",
")",
":",
"plugin_infos",
"=",
"{",
"}",
"for",
"pc",
"in",
"self",
".",
"plugins",
":",
"plugin_infos",
".",
"update",
"(",
"pc",
".",
"get_info",
"(",
")",
")",
"return",
"{",
"self",
".",
"get_plugin_name",
"(",
")"... | 27 | 15 |
def validate(cls, mapper_spec):
"""Validates mapper spec.
Args:
mapper_spec: The MapperSpec for this InputReader.
Raises:
BadReaderParamsError: required parameters are missing or invalid.
"""
if mapper_spec.input_reader_class() != cls:
raise BadReaderParamsError("Input reader class mismatch")
params = _get_params(mapper_spec)
if cls.BATCH_SIZE_PARAM in params:
try:
batch_size = int(params[cls.BATCH_SIZE_PARAM])
if batch_size < 1:
raise BadReaderParamsError("Bad batch size: %s" % batch_size)
except ValueError, e:
raise BadReaderParamsError("Bad batch size: %s" % e) | [
"def",
"validate",
"(",
"cls",
",",
"mapper_spec",
")",
":",
"if",
"mapper_spec",
".",
"input_reader_class",
"(",
")",
"!=",
"cls",
":",
"raise",
"BadReaderParamsError",
"(",
"\"Input reader class mismatch\"",
")",
"params",
"=",
"_get_params",
"(",
"mapper_spec",... | 34 | 18.526316 |
def set_response_handlers(self, stanza, res_handler, err_handler,
timeout_handler = None, timeout = None):
"""Set response handler for an IQ "get" or "set" stanza.
This should be called before the stanza is sent.
:Parameters:
- `stanza`: an IQ stanza
- `res_handler`: result handler for the stanza. Will be called
when matching <iq type="result"/> is received. Its only
argument will be the stanza received. The handler may return
a stanza or list of stanzas which should be sent in response.
- `err_handler`: error handler for the stanza. Will be called
when matching <iq type="error"/> is received. Its only
argument will be the stanza received. The handler may return
a stanza or list of stanzas which should be sent in response
but this feature should rather not be used (it is better not to
respond to 'error' stanzas).
- `timeout_handler`: timeout handler for the stanza. Will be called
(with no arguments) when no matching <iq type="result"/> or <iq
type="error"/> is received in next `timeout` seconds.
- `timeout`: timeout value for the stanza. After that time if no
matching <iq type="result"/> nor <iq type="error"/> stanza is
received, then timeout_handler (if given) will be called.
"""
# pylint: disable-msg=R0913
self.lock.acquire()
try:
self._set_response_handlers(stanza, res_handler, err_handler,
timeout_handler, timeout)
finally:
self.lock.release() | [
"def",
"set_response_handlers",
"(",
"self",
",",
"stanza",
",",
"res_handler",
",",
"err_handler",
",",
"timeout_handler",
"=",
"None",
",",
"timeout",
"=",
"None",
")",
":",
"# pylint: disable-msg=R0913",
"self",
".",
"lock",
".",
"acquire",
"(",
")",
"try",... | 54.5 | 26.09375 |
def parse_to_tree(text):
"""Parse text using CaboCha, then return Tree instance."""
xml_text = cabocha.as_xml(text)
tree = Tree(xml_text)
return tree | [
"def",
"parse_to_tree",
"(",
"text",
")",
":",
"xml_text",
"=",
"cabocha",
".",
"as_xml",
"(",
"text",
")",
"tree",
"=",
"Tree",
"(",
"xml_text",
")",
"return",
"tree"
] | 32.2 | 12.2 |
def from_dict(cls, cls_dict, fallback_xsi_type=None):
"""Parse the dictionary and return an Entity instance.
This will attempt to extract type information from the input
dictionary and pass it to entity_class to resolve the correct class
for the type.
Args:
cls_dict: A dictionary representation of an Entity object.
fallback_xsi_type: An xsi_type to use for string input, which
doesn't have properties
Returns:
An Entity instance.
"""
if not cls_dict:
return None
if isinstance(cls_dict, six.string_types):
if not getattr(cls, "_convert_strings", False):
return cls_dict
try:
typekey = cls.dictkey(cls_dict)
except TypeError:
typekey = fallback_xsi_type
klass = cls.entity_class(typekey)
return klass.from_dict(cls_dict) | [
"def",
"from_dict",
"(",
"cls",
",",
"cls_dict",
",",
"fallback_xsi_type",
"=",
"None",
")",
":",
"if",
"not",
"cls_dict",
":",
"return",
"None",
"if",
"isinstance",
"(",
"cls_dict",
",",
"six",
".",
"string_types",
")",
":",
"if",
"not",
"getattr",
"(",... | 33.035714 | 19.142857 |
def get_storage_account_keys(access_token, subscription_id, rgname, account_name):
'''Get the access keys for the specified storage account.
Args:
access_token (str): A valid Azure authentication token.
subscription_id (str): Azure subscription id.
rgname (str): Azure resource group name.
account_name (str): Name of the new storage account.
Returns:
HTTP response. JSON body of storage account keys.
'''
endpoint = ''.join([get_rm_endpoint(),
'/subscriptions/', subscription_id,
'/resourcegroups/', rgname,
'/providers/Microsoft.Storage/storageAccounts/', account_name,
'/listKeys',
'?api-version=', STORAGE_API])
return do_post(endpoint, '', access_token) | [
"def",
"get_storage_account_keys",
"(",
"access_token",
",",
"subscription_id",
",",
"rgname",
",",
"account_name",
")",
":",
"endpoint",
"=",
"''",
".",
"join",
"(",
"[",
"get_rm_endpoint",
"(",
")",
",",
"'/subscriptions/'",
",",
"subscription_id",
",",
"'/res... | 43.473684 | 22 |
def looping_call(f, sleep=5, inc_sleep=0, max_sleep=60, timeout=600,
exceptions=(), *args, **kwargs):
"""Helper function that to run looping call with fixed/dynamical interval.
:param f: the looping call function or method.
:param sleep: initial interval of the looping calls.
:param inc_sleep: sleep time increment, default as 0.
:param max_sleep: max sleep time.
:param timeout: looping call timeout in seconds, 0 means no timeout.
:param exceptions: exceptions that trigger re-try.
"""
time_start = time.time()
expiration = time_start + timeout
retry = True
while retry:
expired = timeout and (time.time() > expiration)
LOG.debug(
"timeout is %(timeout)s, expiration is %(expiration)s, \
time_start is %(time_start)s" %
{"timeout": timeout, "expiration": expiration,
"time_start": time_start})
try:
f(*args, **kwargs)
except exceptions:
retry = not expired
if retry:
LOG.debug("Will re-try %(fname)s in %(itv)d seconds" %
{'fname': f.__name__, 'itv': sleep})
time.sleep(sleep)
sleep = min(sleep + inc_sleep, max_sleep)
else:
LOG.debug("Looping call %s timeout" % f.__name__)
continue
retry = False | [
"def",
"looping_call",
"(",
"f",
",",
"sleep",
"=",
"5",
",",
"inc_sleep",
"=",
"0",
",",
"max_sleep",
"=",
"60",
",",
"timeout",
"=",
"600",
",",
"exceptions",
"=",
"(",
")",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"time_start",
"="... | 38.756757 | 20.027027 |
def are_you_sure(flag_changed, evt, parent=None, title="File has been changed",
msg="Are you sure you want to exit?"):
"""
"Are you sure you want to exit" question dialog.
If flag_changed, shows question dialog. If answer is not yes, calls evt.ignore()
Arguments:
flag_changed
evt -- QCloseEvent instance
parent=None -- parent form, used to centralize the question dialog at
title -- title for question dialog
msg -- text of question dialog
Returns True or False. True means: "yes, I want to exit"
"""
if flag_changed:
r = QMessageBox.question(parent, title, msg,
QMessageBox.Yes|QMessageBox.No, QMessageBox.Yes)
if r != QMessageBox.Yes:
evt.ignore() | [
"def",
"are_you_sure",
"(",
"flag_changed",
",",
"evt",
",",
"parent",
"=",
"None",
",",
"title",
"=",
"\"File has been changed\"",
",",
"msg",
"=",
"\"Are you sure you want to exit?\"",
")",
":",
"if",
"flag_changed",
":",
"r",
"=",
"QMessageBox",
".",
"questio... | 36.619048 | 20.047619 |
def label(self, label, action='ADD', params=None):
"""
Adds a Security Label to a Indicator/Group or Victim
Args:
params:
label: The name of the Security Label
action:
"""
if params is None:
params = {}
if not label:
self._tcex.handle_error(925, ['label', 'Security Label', 'label', 'label', label])
if not self.can_update():
self._tcex.handle_error(910, [self.type])
if action == 'GET':
return self.tc_requests.get_label(
self.api_type,
self.api_sub_type,
self.unique_id,
label,
owner=self.owner,
params=params,
)
if action == 'ADD':
return self.tc_requests.add_label(
self.api_type, self.api_sub_type, self.unique_id, label, owner=self.owner
)
if action == 'DELETE':
return self.tc_requests.delete_label(
self.api_type, self.api_sub_type, self.unique_id, label, owner=self.owner
)
self._tcex.handle_error(925, ['action', 'label', 'action', 'action', action])
return None | [
"def",
"label",
"(",
"self",
",",
"label",
",",
"action",
"=",
"'ADD'",
",",
"params",
"=",
"None",
")",
":",
"if",
"params",
"is",
"None",
":",
"params",
"=",
"{",
"}",
"if",
"not",
"label",
":",
"self",
".",
"_tcex",
".",
"handle_error",
"(",
"... | 29.609756 | 22.268293 |
def _update_trsys(self, event):
"""Called when has changed.
This allows the node and its children to react (notably, VisualNode
uses this to update its TransformSystem).
Note that this method is only called when one transform is replaced by
another; it is not called if an existing transform internally changes
its state.
"""
for ch in self.children:
ch._update_trsys(event)
self.events.transform_change()
self.update() | [
"def",
"_update_trsys",
"(",
"self",
",",
"event",
")",
":",
"for",
"ch",
"in",
"self",
".",
"children",
":",
"ch",
".",
"_update_trsys",
"(",
"event",
")",
"self",
".",
"events",
".",
"transform_change",
"(",
")",
"self",
".",
"update",
"(",
")"
] | 36.928571 | 17.714286 |
def get_port_monitor(self):
"""
Gets the port monitor configuration of a logical interconnect.
Returns:
dict: The Logical Interconnect.
"""
uri = "{}{}".format(self.data["uri"], self.PORT_MONITOR_PATH)
return self._helper.do_get(uri) | [
"def",
"get_port_monitor",
"(",
"self",
")",
":",
"uri",
"=",
"\"{}{}\"",
".",
"format",
"(",
"self",
".",
"data",
"[",
"\"uri\"",
"]",
",",
"self",
".",
"PORT_MONITOR_PATH",
")",
"return",
"self",
".",
"_helper",
".",
"do_get",
"(",
"uri",
")"
] | 31.777778 | 15.555556 |
def plot_zt_mu(self, temp=600, output='eig', relaxation_time=1e-14,
xlim=None):
"""
Plot the ZT in function of Fermi level.
Args:
temp: the temperature
xlim: a list of min and max fermi energy by default (0, and band
gap)
tau: A relaxation time in s. By default none and the plot is by
units of relaxation time
Returns:
a matplotlib object
"""
import matplotlib.pyplot as plt
plt.figure(figsize=(9, 7))
zt = self._bz.get_zt(relaxation_time=relaxation_time, output=output,
doping_levels=False)[temp]
plt.plot(self._bz.mu_steps, zt, linewidth=3.0)
self._plot_bg_limits()
self._plot_doping(temp)
if output == 'eig':
plt.legend(['ZT$_1$', 'ZT$_2$', 'ZT$_3$'])
if xlim is None:
plt.xlim(-0.5, self._bz.gap + 0.5)
else:
plt.xlim(xlim)
plt.ylabel("ZT", fontsize=30.0)
plt.xlabel("E-E$_f$ (eV)", fontsize=30.0)
plt.xticks(fontsize=25)
plt.yticks(fontsize=25)
plt.tight_layout()
return plt | [
"def",
"plot_zt_mu",
"(",
"self",
",",
"temp",
"=",
"600",
",",
"output",
"=",
"'eig'",
",",
"relaxation_time",
"=",
"1e-14",
",",
"xlim",
"=",
"None",
")",
":",
"import",
"matplotlib",
".",
"pyplot",
"as",
"plt",
"plt",
".",
"figure",
"(",
"figsize",
... | 34.470588 | 15.529412 |
def create_payload(self):
"""Wrap submitted data within an extra dict.
For more information, see `Bugzilla #1151220
<https://bugzilla.redhat.com/show_bug.cgi?id=1151220>`_.
In addition, rename the ``from_`` field to ``from``.
"""
payload = super(Subnet, self).create_payload()
if 'from_' in payload:
payload['from'] = payload.pop('from_')
return {u'subnet': payload} | [
"def",
"create_payload",
"(",
"self",
")",
":",
"payload",
"=",
"super",
"(",
"Subnet",
",",
"self",
")",
".",
"create_payload",
"(",
")",
"if",
"'from_'",
"in",
"payload",
":",
"payload",
"[",
"'from'",
"]",
"=",
"payload",
".",
"pop",
"(",
"'from_'",... | 33.307692 | 17.692308 |
def get_message(self, id):
"""
Return a Message object for given id.
:param id: The id of the message object to return.
"""
url = self._base_url + "/3/message/{0}".format(id)
resp = self._send_request(url)
return Message(resp, self) | [
"def",
"get_message",
"(",
"self",
",",
"id",
")",
":",
"url",
"=",
"self",
".",
"_base_url",
"+",
"\"/3/message/{0}\"",
".",
"format",
"(",
"id",
")",
"resp",
"=",
"self",
".",
"_send_request",
"(",
"url",
")",
"return",
"Message",
"(",
"resp",
",",
... | 31.222222 | 11.444444 |
def _ChunkFactory(chunk_type, stream_rdr, offset):
"""
Return a |_Chunk| subclass instance appropriate to *chunk_type* parsed
from *stream_rdr* at *offset*.
"""
chunk_cls_map = {
PNG_CHUNK_TYPE.IHDR: _IHDRChunk,
PNG_CHUNK_TYPE.pHYs: _pHYsChunk,
}
chunk_cls = chunk_cls_map.get(chunk_type, _Chunk)
return chunk_cls.from_offset(chunk_type, stream_rdr, offset) | [
"def",
"_ChunkFactory",
"(",
"chunk_type",
",",
"stream_rdr",
",",
"offset",
")",
":",
"chunk_cls_map",
"=",
"{",
"PNG_CHUNK_TYPE",
".",
"IHDR",
":",
"_IHDRChunk",
",",
"PNG_CHUNK_TYPE",
".",
"pHYs",
":",
"_pHYsChunk",
",",
"}",
"chunk_cls",
"=",
"chunk_cls_ma... | 35.909091 | 12.818182 |
def _widen_states(old_state, new_state):
"""
Perform widen operation on the given states, and return a new one.
:param old_state:
:param new_state:
:returns: The widened state, and whether widening has occurred
"""
# print old_state.dbg_print_stack()
# print new_state.dbg_print_stack()
l.debug('Widening state at IP %s', old_state.ip)
widened_state, widening_occurred = old_state.widen(new_state)
# print "Widened: "
# print widened_state.dbg_print_stack()
return widened_state, widening_occurred | [
"def",
"_widen_states",
"(",
"old_state",
",",
"new_state",
")",
":",
"# print old_state.dbg_print_stack()",
"# print new_state.dbg_print_stack()",
"l",
".",
"debug",
"(",
"'Widening state at IP %s'",
",",
"old_state",
".",
"ip",
")",
"widened_state",
",",
"widening_occur... | 29.4 | 20.6 |
def _nodeGetNonDefaultsDict(self):
""" Retrieves this nodes` values as a dictionary to be used for persistence.
Non-recursive auxiliary function for getNonDefaultsDict
"""
dct = {}
if self.data != self.defaultData:
dct['data'] = self.data.toString() # calls QFont.toString()
return dct | [
"def",
"_nodeGetNonDefaultsDict",
"(",
"self",
")",
":",
"dct",
"=",
"{",
"}",
"if",
"self",
".",
"data",
"!=",
"self",
".",
"defaultData",
":",
"dct",
"[",
"'data'",
"]",
"=",
"self",
".",
"data",
".",
"toString",
"(",
")",
"# calls QFont.toString()",
... | 42.75 | 13.875 |
def apply(ctx, name, verbose):
"""
Apply migration
"""
if name != 'all': # specific migration
try:
app_name, target_migration = name.split('/', 2)
except ValueError:
raise click.ClickException("NAME format is <app>/<migration> or 'all'")
apps = ctx.obj['config']['apps']
if app_name not in apps.keys():
raise click.ClickException('unknown app "{0}"'.format(app_name))
app = apps[app_name]
migrations = app['migrations']
if target_migration not in migrations:
raise click.ClickException('unknown migration "{0}"'.format(name))
migrations = migrations[:migrations.index(target_migration) + 1] # including all prevoius migrations
for migration in migrations:
click.echo(click.style('Applying {0}...'.format(click.style(migration, bold=True)), fg='blue'))
if ctx.obj['db'].is_migration_applied(app_name, migration):
click.echo(click.style(' SKIPPED.', fg='green'))
continue
try:
snaql_factory = Snaql(app['path'], '')
queries = snaql_factory.load_queries(migration + '.apply.sql').ordered_blocks
for query in queries:
if verbose:
click.echo(' ' + query())
ctx.obj['db'].query(query())
except Exception as e:
click.echo(click.style(' FAILED.', fg='red'))
ctx.obj['db'].rollback()
raise click.ClickException('migration execution failed\n{0}'.format(e))
click.echo(click.style(' OK.', fg='green'))
ctx.obj['db'].commit()
ctx.obj['db'].fix_migration(app_name, migration)
else: # migrate everything
for app_name, app in ctx.obj['config']['apps'].items():
click.echo(click.style('Migrating {0}...'.format(click.style(app_name, bold=True)), fg='blue'))
for migration in app['migrations']:
click.echo(' Applying {0}...'.format(click.style(migration, bold=True)))
if ctx.obj['db'].is_migration_applied(app_name, migration):
click.echo(click.style(' SKIPPED.', fg='green'))
continue
try:
snaql_factory = Snaql(app['path'], '')
queries = snaql_factory.load_queries(migration + '.apply.sql').ordered_blocks
for query in queries:
if verbose:
click.echo(' ' + query())
ctx.obj['db'].query(query())
except Exception as e:
click.echo(click.style(' FAILED.', fg='red'))
ctx.obj['db'].rollback()
raise click.ClickException('migration execution failed\n{0}'.format(e))
click.echo(click.style(' OK.', fg='green'))
ctx.obj['db'].commit()
ctx.obj['db'].fix_migration(app_name, migration) | [
"def",
"apply",
"(",
"ctx",
",",
"name",
",",
"verbose",
")",
":",
"if",
"name",
"!=",
"'all'",
":",
"# specific migration",
"try",
":",
"app_name",
",",
"target_migration",
"=",
"name",
".",
"split",
"(",
"'/'",
",",
"2",
")",
"except",
"ValueError",
... | 37.75 | 25.725 |
def get_coord_idims(self, coords):
"""Get the slicers for the given coordinates from the base dataset
This method converts `coords` to slicers (list of
integers or ``slice`` objects)
Parameters
----------
coords: dict
A subset of the ``ds.coords`` attribute of the base dataset
:attr:`ds`
Returns
-------
dict
Mapping from coordinate name to integer, list of integer or slice
"""
ret = dict(
(label, get_index_from_coord(coord, self.ds.indexes[label]))
for label, coord in six.iteritems(coords)
if label in self.ds.indexes)
return ret | [
"def",
"get_coord_idims",
"(",
"self",
",",
"coords",
")",
":",
"ret",
"=",
"dict",
"(",
"(",
"label",
",",
"get_index_from_coord",
"(",
"coord",
",",
"self",
".",
"ds",
".",
"indexes",
"[",
"label",
"]",
")",
")",
"for",
"label",
",",
"coord",
"in",... | 31.090909 | 20.954545 |
def to_dictionary(self):
"""Serialize an object into dictionary form. Useful if you have to
serialize an array of objects into JSON. Otherwise, if you call the
:meth:`to_json` method on each object in the list and then try to
dump the array, you end up with an array with one string."""
d = {'start': self.start.isoformat(),
'end': self.end.isoformat(),
'tz': self.tz,
'summary': self.summary.to_dictionary(),
'series': self.series.to_dictionary()
}
return d | [
"def",
"to_dictionary",
"(",
"self",
")",
":",
"d",
"=",
"{",
"'start'",
":",
"self",
".",
"start",
".",
"isoformat",
"(",
")",
",",
"'end'",
":",
"self",
".",
"end",
".",
"isoformat",
"(",
")",
",",
"'tz'",
":",
"self",
".",
"tz",
",",
"'summary... | 43.230769 | 16.692308 |
def bar(
it,
label="",
width=32,
hide=None,
empty_char=BAR_EMPTY_CHAR,
filled_char=BAR_FILLED_CHAR,
expected_size=None,
every=1,
):
"""Progress iterator. Wrap your iterables with it."""
count = len(it) if expected_size is None else expected_size
with Bar(
label=label,
width=width,
hide=hide,
empty_char=BAR_EMPTY_CHAR,
filled_char=BAR_FILLED_CHAR,
expected_size=count,
every=every,
) as bar:
for i, item in enumerate(it):
yield item
bar.show(i + 1) | [
"def",
"bar",
"(",
"it",
",",
"label",
"=",
"\"\"",
",",
"width",
"=",
"32",
",",
"hide",
"=",
"None",
",",
"empty_char",
"=",
"BAR_EMPTY_CHAR",
",",
"filled_char",
"=",
"BAR_FILLED_CHAR",
",",
"expected_size",
"=",
"None",
",",
"every",
"=",
"1",
",",... | 22.48 | 20.04 |
def _dispatch(self, event, listener, *args, **kwargs):
"""Dispatch an event to a listener.
Args:
event (str): The name of the event that triggered this call.
listener (def or async def): The listener to trigger.
*args: Any number of positional arguments.
**kwargs: Any number of keyword arguments.
This method inspects the listener. If it is a def it dispatches the
listener to a method that will execute that def. If it is an async def
it dispatches it to a method that will schedule the resulting coro with
the event loop.
"""
if (
asyncio.iscoroutinefunction(listener) or
isinstance(listener, functools.partial) and
asyncio.iscoroutinefunction(listener.func)
):
return self._dispatch_coroutine(event, listener, *args, **kwargs)
return self._dispatch_function(event, listener, *args, **kwargs) | [
"def",
"_dispatch",
"(",
"self",
",",
"event",
",",
"listener",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"(",
"asyncio",
".",
"iscoroutinefunction",
"(",
"listener",
")",
"or",
"isinstance",
"(",
"listener",
",",
"functools",
".",
"pa... | 41.434783 | 25.347826 |
def _decode(obj): # type: (bytes or str or unicode or object) -> unicode # noqa ignore=F821
"""Decode an object to unicode.
Args:
obj (bytes or str or unicode or anything serializable): object to be decoded
Returns:
object decoded in unicode.
"""
if obj is None:
return u''
if six.PY3 and isinstance(obj, six.binary_type):
# transforms a byte string (b'') in unicode
return obj.decode('latin1')
elif six.PY3:
# PY3 strings are unicode.
return str(obj)
elif isinstance(obj, six.text_type):
# returns itself if it is unicode
return obj
else:
# decodes pY2 string to unicode
return str(obj).decode('utf-8') | [
"def",
"_decode",
"(",
"obj",
")",
":",
"# type: (bytes or str or unicode or object) -> unicode # noqa ignore=F821",
"if",
"obj",
"is",
"None",
":",
"return",
"u''",
"if",
"six",
".",
"PY3",
"and",
"isinstance",
"(",
"obj",
",",
"six",
".",
"binary_type",
")",
"... | 33.714286 | 15.904762 |
def StateOfCharge(self):
""" % of Full Charge """
return (self.bus.read_byte_data(self.address, 0x02) + self.bus.read_byte_data(self.address, 0x03) * 256) | [
"def",
"StateOfCharge",
"(",
"self",
")",
":",
"return",
"(",
"self",
".",
"bus",
".",
"read_byte_data",
"(",
"self",
".",
"address",
",",
"0x02",
")",
"+",
"self",
".",
"bus",
".",
"read_byte_data",
"(",
"self",
".",
"address",
",",
"0x03",
")",
"*"... | 56 | 29.333333 |
def thumb(self, size=BIGTHUMB):
'''Get a thumbnail as string or None if the file isnt an image
size would be one of JFSFile.BIGTHUMB, .MEDIUMTHUMB, .SMALLTHUMB or .XLTHUMB'''
if not self.is_image():
return None
if not size in (self.BIGTHUMB, self.MEDIUMTHUMB, self.SMALLTHUMB, self.XLTHUMB):
raise JFSError('Invalid thumbnail size: %s for image %s' % (size, self.path))
#return self.jfs.raw('%s?mode=thumb&ts=%s' % (self.path, size))
return self.jfs.raw(url=self.path,
params={'mode':'thumb', 'ts':size}) | [
"def",
"thumb",
"(",
"self",
",",
"size",
"=",
"BIGTHUMB",
")",
":",
"if",
"not",
"self",
".",
"is_image",
"(",
")",
":",
"return",
"None",
"if",
"not",
"size",
"in",
"(",
"self",
".",
"BIGTHUMB",
",",
"self",
".",
"MEDIUMTHUMB",
",",
"self",
".",
... | 54.090909 | 27.727273 |
def app_uninstall(self, package_name, keep_data=False):
"""
Uninstall package
Args:
- package_name(string): package name ex: com.example.demo
- keep_data(bool): keep the data and cache directories
"""
if keep_data:
return self.run_cmd('uninstall', '-k', package_name)
else:
return self.run_cmd('uninstall', package_name) | [
"def",
"app_uninstall",
"(",
"self",
",",
"package_name",
",",
"keep_data",
"=",
"False",
")",
":",
"if",
"keep_data",
":",
"return",
"self",
".",
"run_cmd",
"(",
"'uninstall'",
",",
"'-k'",
",",
"package_name",
")",
"else",
":",
"return",
"self",
".",
"... | 33.833333 | 20 |
def parse_scoped_selector(scoped_selector):
"""Parse scoped selector."""
# Conver Macro (%scope/name) to (scope/name/macro.value)
if scoped_selector[0] == '%':
if scoped_selector.endswith('.value'):
err_str = '{} is invalid cannot use % and end with .value'
raise ValueError(err_str.format(scoped_selector))
scoped_selector = scoped_selector[1:] + '/macro.value'
scope_selector_list = scoped_selector.rsplit('/', 1)
scope = ''.join(scope_selector_list[:-1])
selector = scope_selector_list[-1]
return scope, selector | [
"def",
"parse_scoped_selector",
"(",
"scoped_selector",
")",
":",
"# Conver Macro (%scope/name) to (scope/name/macro.value)",
"if",
"scoped_selector",
"[",
"0",
"]",
"==",
"'%'",
":",
"if",
"scoped_selector",
".",
"endswith",
"(",
"'.value'",
")",
":",
"err_str",
"=",... | 44.833333 | 10.5 |
def load_class(full_class_string):
"""
dynamically load a class from a string
http://thomassileo.com/blog/2012/12/21/dynamically-load-python-modules-or-classes/
"""
class_parts = full_class_string.split(".")
module_path = ".".join(class_parts[:-1])
class_name = class_parts[-1]
module = importlib.import_module(module_path)
return getattr(module, class_name) | [
"def",
"load_class",
"(",
"full_class_string",
")",
":",
"class_parts",
"=",
"full_class_string",
".",
"split",
"(",
"\".\"",
")",
"module_path",
"=",
"\".\"",
".",
"join",
"(",
"class_parts",
"[",
":",
"-",
"1",
"]",
")",
"class_name",
"=",
"class_parts",
... | 29.615385 | 15.615385 |
def train_model(extractor, data_dir, output_dir=None):
"""
Train an extractor model, then write train/test block-level classification
performance as well as the model itself to disk in ``output_dir``.
Args:
extractor (:class:`Extractor`): Instance of the ``Extractor`` class to
be trained.
data_dir (str): Directory on disk containing subdirectories for all
training data, including raw html and gold standard blocks files
output_dir (str): Directory on disk to which the trained model files,
errors, etc. are to be written. If None, outputs are not saved.
Returns:
:class:`Extractor`: A trained extractor model.
"""
# set up directories and file naming
output_dir, fname_prefix = _set_up_output_dir_and_fname_prefix(output_dir, extractor)
# prepare and split the data
logging.info('preparing, splitting, and concatenating the data...')
data = prepare_all_data(data_dir)
training_data, test_data = train_test_split(
data, test_size=0.2, random_state=42)
train_html, train_labels, train_weights = extractor.get_html_labels_weights(training_data)
test_html, test_labels, test_weights = extractor.get_html_labels_weights(test_data)
# fit the extractor on training data
# then evaluate it on train and test data
logging.info('fitting and evaluating the extractor features and model...')
try:
extractor.fit(train_html, train_labels, weights=train_weights)
except (TypeError, ValueError):
extractor.fit(train_html, train_labels)
train_eval = evaluate_model_predictions(
np.concatenate(train_labels), extractor.predict(train_html),
np.concatenate(train_weights))
test_eval = evaluate_model_predictions(
np.concatenate(test_labels), extractor.predict(test_html),
np.concatenate(test_weights))
# report model performance
_report_model_performance(output_dir, fname_prefix, train_eval, test_eval)
# pickle the final model
_write_model_to_disk(output_dir, fname_prefix, extractor)
return extractor | [
"def",
"train_model",
"(",
"extractor",
",",
"data_dir",
",",
"output_dir",
"=",
"None",
")",
":",
"# set up directories and file naming",
"output_dir",
",",
"fname_prefix",
"=",
"_set_up_output_dir_and_fname_prefix",
"(",
"output_dir",
",",
"extractor",
")",
"# prepare... | 43.208333 | 23.375 |
def add_bundled_jars():
"""
Adds the bundled jars to the JVM's classpath.
"""
# determine lib directory with jars
rootdir = os.path.split(os.path.dirname(__file__))[0]
libdir = rootdir + os.sep + "lib"
# add jars from lib directory
for l in glob.glob(libdir + os.sep + "*.jar"):
if l.lower().find("-src.") == -1:
javabridge.JARS.append(str(l)) | [
"def",
"add_bundled_jars",
"(",
")",
":",
"# determine lib directory with jars",
"rootdir",
"=",
"os",
".",
"path",
".",
"split",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
")",
"[",
"0",
"]",
"libdir",
"=",
"rootdir",
"+",
"os",
".",
... | 32.083333 | 8.916667 |
def manipulateLattice(self, beamline, type='quad',
irange='all', property='k1',
opstr='+0%'):
""" manipulate element with type, e.g. quad
input parameters:
:param beamline: beamline definition keyword
:param type: element type, case insensitive
:param irange: slice index, see getElementByOrder()
:param property: element property, e.g. 'k1' for 'quad' strength
:param opstr: operation, '+[-]n%' or '+[-*/]n'
"""
# lattice_list = self.getFullBeamline(beamline, extend = True)
# orderedLattice_list = self.orderLattice(beamline)
opele_list = self.getElementByOrder(beamline, type, irange)
opr = opstr[0]
opn = float(opstr[1:].strip('%'))
if opstr[-1] == '%':
opn /= 100.0
opsdict = {'+': lambda a, p: a * (1 + p),
'-': lambda a, p: a * (1 - p)}
else:
opsdict = {'+': lambda a, p: a + p,
'-': lambda a, p: a - p,
'*': lambda a, p: a * p,
'/': lambda a, p: a / float(p)}
for ename, etype, eid in opele_list:
val0_old = self.all_elements[ename.upper()].values()[0].get(property.lower())
val0_new = opsdict[opr](val0_old, opn)
self.all_elements[ename.upper()].values()[0][property.lower()] = val0_new
return True | [
"def",
"manipulateLattice",
"(",
"self",
",",
"beamline",
",",
"type",
"=",
"'quad'",
",",
"irange",
"=",
"'all'",
",",
"property",
"=",
"'k1'",
",",
"opstr",
"=",
"'+0%'",
")",
":",
"# lattice_list = self.getFullBeamline(beamline, extend = True)",
"# orderedLattice... | 41.542857 | 19.542857 |
def get_params(self, token_stack):
"""Get params from stack of tokens"""
params = {}
for token in token_stack:
params.update(token.params)
return params | [
"def",
"get_params",
"(",
"self",
",",
"token_stack",
")",
":",
"params",
"=",
"{",
"}",
"for",
"token",
"in",
"token_stack",
":",
"params",
".",
"update",
"(",
"token",
".",
"params",
")",
"return",
"params"
] | 31.833333 | 9 |
def _render_log():
"""Totally tap into Towncrier internals to get an in-memory result.
"""
config = load_config(ROOT)
definitions = config['types']
fragments, fragment_filenames = find_fragments(
pathlib.Path(config['directory']).absolute(),
config['sections'],
None,
definitions,
)
rendered = render_fragments(
pathlib.Path(config['template']).read_text(encoding='utf-8'),
config['issue_format'],
split_fragments(fragments, definitions),
definitions,
config['underlines'][1:],
)
return rendered | [
"def",
"_render_log",
"(",
")",
":",
"config",
"=",
"load_config",
"(",
"ROOT",
")",
"definitions",
"=",
"config",
"[",
"'types'",
"]",
"fragments",
",",
"fragment_filenames",
"=",
"find_fragments",
"(",
"pathlib",
".",
"Path",
"(",
"config",
"[",
"'director... | 30.789474 | 15.526316 |
def tracemessage(self, maxlen=6):
"""
if maxlen > 0, the message is shortened to maxlen traces.
"""
result = ""
for i, value in enumerate(self):
result += "{0}: {1}\n".format(i, get_node_repr(value))
result = result.strip("\n")
lines = result.split("\n")
if maxlen and len(lines) > maxlen:
i = int(maxlen / 2)
lines = lines[:i] + ["..."] + lines[-(maxlen - i) :]
result = "\n".join(lines)
return result | [
"def",
"tracemessage",
"(",
"self",
",",
"maxlen",
"=",
"6",
")",
":",
"result",
"=",
"\"\"",
"for",
"i",
",",
"value",
"in",
"enumerate",
"(",
"self",
")",
":",
"result",
"+=",
"\"{0}: {1}\\n\"",
".",
"format",
"(",
"i",
",",
"get_node_repr",
"(",
"... | 29.941176 | 15.705882 |
def nrzi(data):
'''
Packet uses NRZI (non-return to zero inverted) encoding, which means
that a 0 is encoded as a change in tone, and a 1 is encoded as
no change in tone.
'''
current = True
for bit in data:
if not bit:
current = not current
yield current | [
"def",
"nrzi",
"(",
"data",
")",
":",
"current",
"=",
"True",
"for",
"bit",
"in",
"data",
":",
"if",
"not",
"bit",
":",
"current",
"=",
"not",
"current",
"yield",
"current"
] | 23.727273 | 26.090909 |
def add(self, v):
"""Add a new value."""
self._vals_added += 1
if self._mean is None:
self._mean = v
self._mean = self._mean + ((v - self._mean) / float(self._vals_added)) | [
"def",
"add",
"(",
"self",
",",
"v",
")",
":",
"self",
".",
"_vals_added",
"+=",
"1",
"if",
"self",
".",
"_mean",
"is",
"None",
":",
"self",
".",
"_mean",
"=",
"v",
"self",
".",
"_mean",
"=",
"self",
".",
"_mean",
"+",
"(",
"(",
"v",
"-",
"se... | 31.333333 | 17.666667 |
def h_L(self, L, theta, Ts, **statef):
"""
Calculate the average heat transfer coefficient.
:param L: [m] characteristic length of the heat transfer surface
:param theta: [°] angle of the surface with the vertical
:param Ts: [K] heat transfer surface temperature
:param Tf: [K] bulk fluid temperature
:returns: [W/m2/K] float
"""
Nu_L = self.Nu_L(L, theta, Ts, **statef)
k = self._fluid.k(T=self.Tr)
return Nu_L * k / L | [
"def",
"h_L",
"(",
"self",
",",
"L",
",",
"theta",
",",
"Ts",
",",
"*",
"*",
"statef",
")",
":",
"Nu_L",
"=",
"self",
".",
"Nu_L",
"(",
"L",
",",
"theta",
",",
"Ts",
",",
"*",
"*",
"statef",
")",
"k",
"=",
"self",
".",
"_fluid",
".",
"k",
... | 33.066667 | 16.533333 |
def discount_rewards(r):
"""take 1D float array of rewards and compute discounted reward"""
discounted_r = np.zeros_like(r)
running_add = 0
for t in reversed(range(0, r.size)):
# Reset the sum, since this was a game boundary (pong specific!).
if r[t] != 0:
running_add = 0
running_add = running_add * gamma + r[t]
discounted_r[t] = running_add
return discounted_r | [
"def",
"discount_rewards",
"(",
"r",
")",
":",
"discounted_r",
"=",
"np",
".",
"zeros_like",
"(",
"r",
")",
"running_add",
"=",
"0",
"for",
"t",
"in",
"reversed",
"(",
"range",
"(",
"0",
",",
"r",
".",
"size",
")",
")",
":",
"# Reset the sum, since thi... | 37.909091 | 12.272727 |
def do_unalias(self, arg):
"""unalias name
Delete the specified alias.
"""
args = arg.split()
if len(args) == 0: return
if args[0] in self.aliases:
del self.aliases[args[0]] | [
"def",
"do_unalias",
"(",
"self",
",",
"arg",
")",
":",
"args",
"=",
"arg",
".",
"split",
"(",
")",
"if",
"len",
"(",
"args",
")",
"==",
"0",
":",
"return",
"if",
"args",
"[",
"0",
"]",
"in",
"self",
".",
"aliases",
":",
"del",
"self",
".",
"... | 28.25 | 6 |
def if_has_delegate(delegate):
"""Wrap a delegated instance attribute function.
Creates a decorator for methods that are delegated in the presence of a
results wrapper. This enables duck-typing by ``hasattr`` returning True
according to the sub-estimator.
This function was adapted from scikit-learn, which defines
``if_delegate_has_method``, but operates differently by injecting methods
not based on method presence, but by delegate presence.
Examples
--------
>>> from pmdarima.utils.metaestimators import if_has_delegate
>>>
>>> class A(object):
... @if_has_delegate('d')
... def func(self):
... return True
>>>
>>> a = A()
>>> # the delegate does not exist yet
>>> assert not hasattr(a, 'func')
>>> # inject the attribute
>>> a.d = None
>>> assert hasattr(a, 'func') and a.func()
Parameters
----------
delegate : string, list of strings or tuple of strings
Name of the sub-estimator that can be accessed as an attribute of the
base object. If a list or a tuple of names are provided, the first
sub-estimator that is an attribute of the base object will be used.
"""
if isinstance(delegate, list):
delegate = tuple(delegate)
if not isinstance(delegate, tuple):
delegate = (delegate,)
return lambda fn: _IffHasDelegate(fn, delegate) | [
"def",
"if_has_delegate",
"(",
"delegate",
")",
":",
"if",
"isinstance",
"(",
"delegate",
",",
"list",
")",
":",
"delegate",
"=",
"tuple",
"(",
"delegate",
")",
"if",
"not",
"isinstance",
"(",
"delegate",
",",
"tuple",
")",
":",
"delegate",
"=",
"(",
"... | 34.325 | 20.9 |
def default_ms(name, tabdesc=None, dminfo=None):
"""
Creates a default Measurement Set called name. Any Table Description
elements in tabdesc will overwrite the corresponding element in a default
Measurement Set Table Description (columns, hypercolumns and keywords).
In practice, you probably want to specify columns such as DATA, MODEL_DATA
and CORRECTED_DATA (and their associated keywords and hypercolumns)
in tabdesc.
"""
# Default to empty dictionaries
if tabdesc is None:
tabdesc = {}
if dminfo is None:
dminfo = {}
# Wrap the Table object
return table(_default_ms(name, tabdesc, dminfo), _oper=3) | [
"def",
"default_ms",
"(",
"name",
",",
"tabdesc",
"=",
"None",
",",
"dminfo",
"=",
"None",
")",
":",
"# Default to empty dictionaries",
"if",
"tabdesc",
"is",
"None",
":",
"tabdesc",
"=",
"{",
"}",
"if",
"dminfo",
"is",
"None",
":",
"dminfo",
"=",
"{",
... | 32.85 | 24.05 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.