text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def shorten_go_name_ptbl3(self, name, dcnt): """Shorten GO description for Table 3 in manuscript.""" if self._keep_this(name): return name name = name.replace("positive regulation of immune system process", "+ reg. of immune sys. process") name = name.replace("positive regulation of immune response", "+ reg. of immune response") name = name.replace("positive regulation of cytokine production", "+ reg. of cytokine production") if dcnt < 40: name = name.replace("antigen processing and presentation", "a.p.p.") if dcnt < 10: name = name.replace("negative", "-") name = name.replace("positive", "+") #name = name.replace("tumor necrosis factor production", "tumor necrosis factor prod.") name = name.replace("tumor necrosis factor production", "TNF production") if dcnt < 4: name = name.replace("regulation", "reg.") name = name.replace("exogenous ", "") name = name.replace(" via ", " w/") name = name.replace("T cell mediated cytotoxicity", "cytotoxicity via T cell") name = name.replace('involved in', 'in') name = name.replace('-positive', '+') return name
[ "def", "shorten_go_name_ptbl3", "(", "self", ",", "name", ",", "dcnt", ")", ":", "if", "self", ".", "_keep_this", "(", "name", ")", ":", "return", "name", "name", "=", "name", ".", "replace", "(", "\"positive regulation of immune system process\"", ",", "\"+ r...
53.16
20.48
def nlmsg_inherit(hdr=None): """Allocate a new Netlink message and inherit Netlink message header. https://github.com/thom311/libnl/blob/libnl3_2_25/lib/msg.c#L322 Allocates a new Netlink message and inherits the original message header. If `hdr` is not None it will be used as a template for the Netlink message header, otherwise the header is left blank. Keyword arguments: hdr -- Netlink message header template (nlmsghdr class instance). Returns: Newly allocated Netlink message (nl_msg class instance). """ nm = nlmsg_alloc() if hdr: new = nm.nm_nlh new.nlmsg_type = hdr.nlmsg_type new.nlmsg_flags = hdr.nlmsg_flags new.nlmsg_seq = hdr.nlmsg_seq new.nlmsg_pid = hdr.nlmsg_pid return nm
[ "def", "nlmsg_inherit", "(", "hdr", "=", "None", ")", ":", "nm", "=", "nlmsg_alloc", "(", ")", "if", "hdr", ":", "new", "=", "nm", ".", "nm_nlh", "new", ".", "nlmsg_type", "=", "hdr", ".", "nlmsg_type", "new", ".", "nlmsg_flags", "=", "hdr", ".", "...
34.590909
23.318182
def find_nodes(self, query_dict=None, exact=False, verbose=False, **kwargs): """Query on node properties. See documentation for _OTIWrapper class.""" assert self.use_v1 return self._do_query('{p}/singlePropertySearchForTreeNodes'.format(p=self.query_prefix), query_dict=query_dict, exact=exact, verbose=verbose, valid_keys=self.node_search_term_set, kwargs=kwargs)
[ "def", "find_nodes", "(", "self", ",", "query_dict", "=", "None", ",", "exact", "=", "False", ",", "verbose", "=", "False", ",", "*", "*", "kwargs", ")", ":", "assert", "self", ".", "use_v1", "return", "self", ".", "_do_query", "(", "'{p}/singlePropertyS...
58.888889
17.555556
def move_by_offset(self, xoffset, yoffset): """ Moving the mouse to an offset from current mouse position. :Args: - xoffset: X offset to move to, as a positive or negative integer. - yoffset: Y offset to move to, as a positive or negative integer. """ if self._driver.w3c: self.w3c_actions.pointer_action.move_by(xoffset, yoffset) self.w3c_actions.key_action.pause() else: self._actions.append(lambda: self._driver.execute( Command.MOVE_TO, { 'xoffset': int(xoffset), 'yoffset': int(yoffset)})) return self
[ "def", "move_by_offset", "(", "self", ",", "xoffset", ",", "yoffset", ")", ":", "if", "self", ".", "_driver", ".", "w3c", ":", "self", ".", "w3c_actions", ".", "pointer_action", ".", "move_by", "(", "xoffset", ",", "yoffset", ")", "self", ".", "w3c_actio...
38.647059
17.588235
def bulk_create_posts(self, posts, post_categories, post_tags, post_media_attachments): """ Actually do a db bulk creation of posts, and link up the many-to-many fields :param posts: the list of Post objects to bulk create :param post_categories: a mapping of Categories to add to newly created Posts :param post_tags: a mapping of Tags to add to newly created Posts :param post_media_attachments: a mapping of Medias to add to newly created Posts :return: None """ Post.objects.bulk_create(posts) # attach many-to-ones for post_wp_id, categories in six.iteritems(post_categories): Post.objects.get(site_id=self.site_id, wp_id=post_wp_id).categories.add(*categories) for post_id, tags in six.iteritems(post_tags): Post.objects.get(site_id=self.site_id, wp_id=post_id).tags.add(*tags) for post_id, attachments in six.iteritems(post_media_attachments): Post.objects.get(site_id=self.site_id, wp_id=post_id).attachments.add(*attachments)
[ "def", "bulk_create_posts", "(", "self", ",", "posts", ",", "post_categories", ",", "post_tags", ",", "post_media_attachments", ")", ":", "Post", ".", "objects", ".", "bulk_create", "(", "posts", ")", "# attach many-to-ones", "for", "post_wp_id", ",", "categories"...
50.380952
31.333333
def _separable_approx2(h, N=1): """ returns the N first approximations to the 2d function h whose sum should be h """ return np.cumsum([np.outer(fy, fx) for fy, fx in _separable_series2(h, N)], 0)
[ "def", "_separable_approx2", "(", "h", ",", "N", "=", "1", ")", ":", "return", "np", ".", "cumsum", "(", "[", "np", ".", "outer", "(", "fy", ",", "fx", ")", "for", "fy", ",", "fx", "in", "_separable_series2", "(", "h", ",", "N", ")", "]", ",", ...
41.6
13.2
def add_table(self, rows, cols, style=None): """ Add a table having row and column counts of *rows* and *cols* respectively and table style of *style*. *style* may be a paragraph style object or a paragraph style name. If *style* is |None|, the table inherits the default table style of the document. """ table = self._body.add_table(rows, cols, self._block_width) table.style = style return table
[ "def", "add_table", "(", "self", ",", "rows", ",", "cols", ",", "style", "=", "None", ")", ":", "table", "=", "self", ".", "_body", ".", "add_table", "(", "rows", ",", "cols", ",", "self", ".", "_block_width", ")", "table", ".", "style", "=", "styl...
46
18.4
def install_apt(self, fn=None, package_name=None, update=0, list_only=0): """ Installs system packages listed in apt-requirements.txt. """ r = self.local_renderer assert self.genv[ROLE] apt_req_fqfn = fn or (self.env.apt_requirments_fn and self.find_template(self.env.apt_requirments_fn)) if not apt_req_fqfn: return [] assert os.path.isfile(apt_req_fqfn) lines = list(self.env.apt_packages or []) for _ in open(apt_req_fqfn).readlines(): if _.strip() and not _.strip().startswith('#') \ and (not package_name or _.strip() == package_name): lines.extend(_pkg.strip() for _pkg in _.split(' ') if _pkg.strip()) if list_only: return lines tmp_fn = r.write_temp_file('\n'.join(lines)) apt_req_fqfn = tmp_fn if not self.genv.is_local: r.put(local_path=tmp_fn, remote_path=tmp_fn) apt_req_fqfn = self.genv.put_remote_path r.sudo('DEBIAN_FRONTEND=noninteractive apt-get -yq update --fix-missing') r.sudo('DEBIAN_FRONTEND=noninteractive apt-get -yq install `cat "%s" | tr "\\n" " "`' % apt_req_fqfn)
[ "def", "install_apt", "(", "self", ",", "fn", "=", "None", ",", "package_name", "=", "None", ",", "update", "=", "0", ",", "list_only", "=", "0", ")", ":", "r", "=", "self", ".", "local_renderer", "assert", "self", ".", "genv", "[", "ROLE", "]", "a...
42.285714
23.071429
def _client_run(self): """MessageReceiver Link is now open - start receiving messages. Will return True if operation successful and client can remain open for further work. :rtype: bool """ self._connection.work() now = self._counter.get_current_ms() if self._last_activity_timestamp and not self._was_message_received: # If no messages are coming through, back off a little to keep CPU use low. time.sleep(0.05) if self._timeout > 0: timespan = now - self._last_activity_timestamp if timespan >= self._timeout: _logger.info("Timeout reached, closing receiver.") self._shutdown = True else: self._last_activity_timestamp = now self._was_message_received = False return True
[ "def", "_client_run", "(", "self", ")", ":", "self", ".", "_connection", ".", "work", "(", ")", "now", "=", "self", ".", "_counter", ".", "get_current_ms", "(", ")", "if", "self", ".", "_last_activity_timestamp", "and", "not", "self", ".", "_was_message_re...
41.047619
17.428571
def findpeak_single(x, y, dy=None, position=None, hwhm=None, baseline=None, amplitude=None, curve='Lorentz', return_stat=False, signs=(-1, 1), return_x=None): """Find a (positive or negative) peak in the dataset. Inputs: x, y, dy: abscissa, ordinate and the error of the ordinate (can be None) position, hwhm, baseline, amplitude: first guesses for the named parameters curve: 'Gauss' or 'Lorentz' (default) return_stat: return fitting statistics from easylsq.nlsq_fit() signs: a tuple, can be (1,), (-1,), (1,-1). Will try these signs for the peak amplitude return_x: abscissa on which the fitted function form has to be evaluated Outputs: peak position, hwhm, baseline, amplitude[, stat][, peakfunction] where: peak position, hwhm, baseline, amplitude are ErrorValue instances. stat is the statistics dictionary, returned only if return_stat is True peakfunction is the fitted peak evaluated at return_x if it is not None. Notes: A Gauss or a Lorentz curve is fitted, depending on the value of 'curve'. The abscissa should be sorted, ascending. """ y_orig=y if dy is None: dy = np.ones_like(x) if curve.upper().startswith('GAUSS'): def fitfunc(x_, amplitude_, position_, hwhm_, baseline_): return amplitude_ * np.exp(-0.5 * (x_ - position_) ** 2 / hwhm_ ** 2) + baseline_ elif curve.upper().startswith('LORENTZ'): def fitfunc(x_, amplitude_, position_, hwhm_, baseline_): return amplitude_ * hwhm_ ** 2 / (hwhm_ ** 2 + (position_ - x_) ** 2) + baseline_ else: raise ValueError('Invalid curve type: {}'.format(curve)) results=[] # we try fitting a positive and a negative peak and return the better fit (where R2 is larger) for sign in signs: init_params={'position':position,'hwhm':hwhm,'baseline':baseline,'amplitude':amplitude} y = y_orig * sign if init_params['position'] is None: init_params['position'] = x[y == y.max()][0] if init_params['hwhm'] is None: init_params['hwhm'] = 0.5 * (x.max() - x.min()) if init_params['baseline'] is None: init_params['baseline'] = y.min() if init_params['amplitude'] is None: init_params['amplitude'] = y.max() - init_params['baseline'] results.append(nlsq_fit(x, y, dy, fitfunc, (init_params['amplitude'], init_params['position'], init_params['hwhm'], init_params['baseline']))+(sign,)) max_R2=max([r[2]['R2'] for r in results]) p,dp,stat,sign=[r for r in results if r[2]['R2']==max_R2][0] retval = [ErrorValue(p[1], dp[1]), ErrorValue(abs(p[2]), dp[2]), sign * ErrorValue(p[3], dp[3]), sign * ErrorValue(p[0], dp[0])] if return_stat: stat['func_value'] = stat['func_value'] * sign retval.append(stat) if return_x is not None: retval.append(sign * fitfunc(return_x, p[0], p[1], p[2], p[3])) return tuple(retval)
[ "def", "findpeak_single", "(", "x", ",", "y", ",", "dy", "=", "None", ",", "position", "=", "None", ",", "hwhm", "=", "None", ",", "baseline", "=", "None", ",", "amplitude", "=", "None", ",", "curve", "=", "'Lorentz'", ",", "return_stat", "=", "False...
54.54386
30.894737
def visit_Dict(self, node): """ Process dict arguments. """ if self.should_check_whitelist(node): for key in node.keys: if key.s in self.whitelist or key.s.startswith("debug_"): continue self.violations.append((self.current_logging_call, WHITELIST_VIOLATION.format(key.s))) if self.should_check_extra_exception(node): for value in node.values: self.check_exception_arg(value) super(LoggingVisitor, self).generic_visit(node)
[ "def", "visit_Dict", "(", "self", ",", "node", ")", ":", "if", "self", ".", "should_check_whitelist", "(", "node", ")", ":", "for", "key", "in", "node", ".", "keys", ":", "if", "key", ".", "s", "in", "self", ".", "whitelist", "or", "key", ".", "s",...
34.4375
18.5625
def variable_cols(X, tol=0.0, min_constant=0): """ Evaluates which columns are constant (0) or variable (1) Parameters ---------- X : ndarray Matrix whose columns will be checked for constant or variable. tol : float Tolerance for float-matrices. When set to 0 only equal columns with values will be considered constant. When set to a positive value, columns where all elements have absolute differences to the first element of that column are considered constant. min_constant : int Minimal number of constant columns to resume operation. If at one point the number of constant columns drops below min_constant, the computation will stop and all columns will be assumed to be variable. In this case, an all-True array will be returned. Returns ------- variable : bool-array Array with number of elements equal to the columns. True: column is variable / non-constant. False: column is constant. """ if X is None: return None from pyemma._ext.variational.estimators.covar_c._covartools import (variable_cols_double, variable_cols_float, variable_cols_int, variable_cols_long, variable_cols_char) # prepare column array cols = numpy.zeros(X.shape[1], dtype=numpy.bool, order='C') if X.dtype == numpy.float64: completed = variable_cols_double(cols, X, tol, min_constant) elif X.dtype == numpy.float32: completed = variable_cols_float(cols, X, tol, min_constant) elif X.dtype == numpy.int32: completed = variable_cols_int(cols, X, 0, min_constant) elif X.dtype == numpy.int64: completed = variable_cols_long(cols, X, 0, min_constant) elif X.dtype == numpy.bool: completed = variable_cols_char(cols, X, 0, min_constant) else: raise TypeError('unsupported type of X: %s' % X.dtype) # if interrupted, return all ones. Otherwise return the variable columns as bool array if completed == 0: return numpy.ones_like(cols, dtype=numpy.bool) return cols
[ "def", "variable_cols", "(", "X", ",", "tol", "=", "0.0", ",", "min_constant", "=", "0", ")", ":", "if", "X", "is", "None", ":", "return", "None", "from", "pyemma", ".", "_ext", ".", "variational", ".", "estimators", ".", "covar_c", ".", "_covartools",...
43.886792
26.849057
def group_add(self, name='Ungrouped'): """ Dynamically add a group instance to the system if not exist. Parameters ---------- name : str, optional ('Ungrouped' as default) Name of the group Returns ------- None """ if not hasattr(self, name): self.__dict__[name] = Group(self, name) self.loaded_groups.append(name)
[ "def", "group_add", "(", "self", ",", "name", "=", "'Ungrouped'", ")", ":", "if", "not", "hasattr", "(", "self", ",", "name", ")", ":", "self", ".", "__dict__", "[", "name", "]", "=", "Group", "(", "self", ",", "name", ")", "self", ".", "loaded_gro...
26.0625
17.1875
def update(cls, content, dependency, version, spec="==", hashes=()): """ Updates the requirement to the latest version for the given content and adds hashes if neccessary. :param content: str, content :return: str, updated content """ new_line = "{name}{spec}{version}".format(name=dependency.full_name, spec=spec, version=version) appendix = '' # leave environment markers intact if ";" in dependency.line: # condense multiline, split out the env marker, strip comments and --hashes new_line += ";" + dependency.line.splitlines()[0].split(";", 1)[1] \ .split("#")[0].split("--hash")[0].rstrip() # add the comment if "#" in dependency.line: # split the line into parts: requirement and comment parts = dependency.line.split("#") requirement, comment = parts[0], "#".join(parts[1:]) # find all whitespaces between the requirement and the comment whitespaces = (hex(ord('\t')), hex(ord(' '))) trailing_whitespace = '' for c in requirement[::-1]: if hex(ord(c)) in whitespaces: trailing_whitespace += c else: break appendix += trailing_whitespace + "#" + comment # if this is a hashed requirement, add a multiline break before the comment if dependency.hashes and not new_line.endswith("\\"): new_line += " \\" # if this is a hashed requirement, add the hashes if hashes: for n, new_hash in enumerate(hashes): new_line += "\n --hash={method}:{hash}".format( method=new_hash['method'], hash=new_hash['hash'] ) # append a new multiline break if this is not the last line if len(hashes) > n + 1: new_line += " \\" new_line += appendix regex = cls.SUB_REGEX.format(re.escape(dependency.line)) return re.sub(regex, new_line, content, flags=re.MULTILINE)
[ "def", "update", "(", "cls", ",", "content", ",", "dependency", ",", "version", ",", "spec", "=", "\"==\"", ",", "hashes", "=", "(", ")", ")", ":", "new_line", "=", "\"{name}{spec}{version}\"", ".", "format", "(", "name", "=", "dependency", ".", "full_na...
45.934783
18.586957
def trace(): """ trace finds the line, the filename and error message and returns it to the user """ import traceback import sys tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] # script name + line number line = tbinfo.split(", ")[1] # Get Python syntax error # synerror = traceback.format_exc().splitlines()[-1] return line, __file__, synerror
[ "def", "trace", "(", ")", ":", "import", "traceback", "import", "sys", "tb", "=", "sys", ".", "exc_info", "(", ")", "[", "2", "]", "tbinfo", "=", "traceback", ".", "format_tb", "(", "tb", ")", "[", "0", "]", "# script name + line number", "line", "=", ...
25.75
12.125
def QA_util_get_pre_trade_date(cursor_date, n=1): """ 得到前 n 个交易日 (不包含当前交易日) :param date: :param n: """ cursor_date = QA_util_format_date2str(cursor_date) if cursor_date in trade_date_sse: return QA_util_date_gap(cursor_date, n, "lt") real_aft_trade_date = QA_util_get_real_date(cursor_date) return QA_util_date_gap(real_aft_trade_date, n, "lt")
[ "def", "QA_util_get_pre_trade_date", "(", "cursor_date", ",", "n", "=", "1", ")", ":", "cursor_date", "=", "QA_util_format_date2str", "(", "cursor_date", ")", "if", "cursor_date", "in", "trade_date_sse", ":", "return", "QA_util_date_gap", "(", "cursor_date", ",", ...
31.5
15.166667
def get_post_data(self): ''' Get all the arguments from post request. Only get the first argument by default. ''' post_data = {} for key in self.request.arguments: post_data[key] = self.get_arguments(key)[0] return post_data
[ "def", "get_post_data", "(", "self", ")", ":", "post_data", "=", "{", "}", "for", "key", "in", "self", ".", "request", ".", "arguments", ":", "post_data", "[", "key", "]", "=", "self", ".", "get_arguments", "(", "key", ")", "[", "0", "]", "return", ...
34.625
21.625
def encode_features(X, enc_map=None): """Converts categorical values in each column of X to integers in the range [0, n_unique_values_in_column - 1], if X is not already of integer type. If mapping is not provided, it is calculated based on the values in X. Unknown values during prediction get a value of -1. np.NaNs are ignored during encoding, and get treated as unknowns during prediction. """ if enc_map is None: fit = True # We will calculate enc_map, so initialize the list of column mappings. enc_map = [] else: fit = False Xenc = np.zeros(X.shape).astype('int') for ii in range(X.shape[1]): if fit: col_enc = {val: jj for jj, val in enumerate(np.unique(X[:, ii])) if not (isinstance(val, float) and np.isnan(val))} enc_map.append(col_enc) # Unknown categories (including np.NaNs) all get a value of -1. Xenc[:, ii] = np.array([enc_map[ii].get(x, -1) for x in X[:, ii]]) return Xenc, enc_map
[ "def", "encode_features", "(", "X", ",", "enc_map", "=", "None", ")", ":", "if", "enc_map", "is", "None", ":", "fit", "=", "True", "# We will calculate enc_map, so initialize the list of column mappings.", "enc_map", "=", "[", "]", "else", ":", "fit", "=", "Fals...
39.423077
24.423077
def managed_process(process): """Wrapper for subprocess.Popen to work across various Python versions, when using the with syntax.""" try: yield process finally: for stream in [process.stdout, process.stdin, process.stderr]: if stream: stream.close() process.wait()
[ "def", "managed_process", "(", "process", ")", ":", "try", ":", "yield", "process", "finally", ":", "for", "stream", "in", "[", "process", ".", "stdout", ",", "process", ".", "stdin", ",", "process", ".", "stderr", "]", ":", "if", "stream", ":", "strea...
35.555556
18.444444
def move_arc(x, y, r, speed = 1, orientation = True): # WARNING: This function currently contains inaccuracy likely due to the rounding of trigonometric functions """ Moves the cursor in an arc of radius r to (x, y) at a certain speed :param x: target x-ordinate :param y: target y-ordinate :param r: radius :param speed: pixel traversal rate :param orientation: direction of arc :return: None """ _x, _y = win32api.GetCursorPos() c_len = (r**2 - (((x - _x)/2)**2 + ((y - _y)/2)**2))**0.5 t = (c_len**2/((y - _y)**2 + (x - _x)**2))**0.5 t = t if orientation else -t centre = ((_x + x)/2 + t*(_x - x), (_y + y)/2 + t*(y - _y)) if any(isinstance(ordinate, complex) for ordinate in centre): raise ValueError("Radius too low - minimum: {}".format(((x - _x)**2 + (y - _y)**2)**0.5/2)) theta = math.atan2(_y - centre[1], _x - centre[0]) end = math.atan2(y - centre[1], x - centre[0]) while theta < end: move(*list(map(round, (centre[0] + r*math.cos(theta), centre[1] + r*math.sin(theta))))) theta += speed/100 time.sleep(0.01) move(x, y)
[ "def", "move_arc", "(", "x", ",", "y", ",", "r", ",", "speed", "=", "1", ",", "orientation", "=", "True", ")", ":", "# WARNING: This function currently contains inaccuracy likely due to the rounding of trigonometric functions", "_x", ",", "_y", "=", "win32api", ".", ...
41.444444
20.851852
def cleanup(): """Cleanup the output directory""" if _output_dir and os.path.exists(_output_dir): log.msg_warn("Cleaning up output directory at '{output_dir}' ..." .format(output_dir=_output_dir)) if not _dry_run: shutil.rmtree(_output_dir)
[ "def", "cleanup", "(", ")", ":", "if", "_output_dir", "and", "os", ".", "path", ".", "exists", "(", "_output_dir", ")", ":", "log", ".", "msg_warn", "(", "\"Cleaning up output directory at '{output_dir}' ...\"", ".", "format", "(", "output_dir", "=", "_output_di...
41.571429
14.428571
def match(self, message: Message) -> bool: """ Matches a message with the behaviour's template Args: message(spade.message.Message): the message to match with Returns: bool: wheter the messaged matches or not """ if self.template: return self.template.match(message) return True
[ "def", "match", "(", "self", ",", "message", ":", "Message", ")", "->", "bool", ":", "if", "self", ".", "template", ":", "return", "self", ".", "template", ".", "match", "(", "message", ")", "return", "True" ]
25.428571
19.142857
def create_mailbox(self, actor, loop): '''Create the mailbox for ``actor``.''' client = MailboxClient(actor.monitor.address, actor, loop) loop.call_soon_threadsafe(self.hand_shake, actor) return client
[ "def", "create_mailbox", "(", "self", ",", "actor", ",", "loop", ")", ":", "client", "=", "MailboxClient", "(", "actor", ".", "monitor", ".", "address", ",", "actor", ",", "loop", ")", "loop", ".", "call_soon_threadsafe", "(", "self", ".", "hand_shake", ...
45.8
14.2
def append_varint32(self, value): """Appends a signed 32-bit integer to the internal buffer, encoded as a varint. (Note that a negative varint32 will always require 10 bytes of space.) """ if not wire_format.INT32_MIN <= value <= wire_format.INT32_MAX: raise errors.EncodeError('Value out of range: %d' % value) self.append_varint64(value)
[ "def", "append_varint32", "(", "self", ",", "value", ")", ":", "if", "not", "wire_format", ".", "INT32_MIN", "<=", "value", "<=", "wire_format", ".", "INT32_MAX", ":", "raise", "errors", ".", "EncodeError", "(", "'Value out of range: %d'", "%", "value", ")", ...
49.125
12.5
def _register_aggregate(agg, con): """Register a Python class that performs aggregation in SQLite. Parameters ---------- agg : type con : sqlalchemy.Connection """ nargs = number_of_arguments(agg.step) - 1 # because self con.connection.connection.create_aggregate(agg.__name__, nargs, agg)
[ "def", "_register_aggregate", "(", "agg", ",", "con", ")", ":", "nargs", "=", "number_of_arguments", "(", "agg", ".", "step", ")", "-", "1", "# because self", "con", ".", "connection", ".", "connection", ".", "create_aggregate", "(", "agg", ".", "__name__", ...
31.4
18.6
def _field_sort_name(cls, name): """Get a sort key for a field name that determines the order fields should be written in. Fields names are kept unchanged, unless they are instances of :class:`DateItemField`, in which case `year`, `month`, and `day` are replaced by `date0`, `date1`, and `date2`, respectively, to make them appear in that order. """ if isinstance(cls.__dict__[name], DateItemField): name = re.sub('year', 'date0', name) name = re.sub('month', 'date1', name) name = re.sub('day', 'date2', name) return name
[ "def", "_field_sort_name", "(", "cls", ",", "name", ")", ":", "if", "isinstance", "(", "cls", ".", "__dict__", "[", "name", "]", ",", "DateItemField", ")", ":", "name", "=", "re", ".", "sub", "(", "'year'", ",", "'date0'", ",", "name", ")", "name", ...
44.357143
15
def check_pause(self, low): ''' Check to see if this low chunk has been paused ''' if not self.jid: # Can't pause on salt-ssh since we can't track continuous state return pause_path = os.path.join(self.opts['cachedir'], 'state_pause', self.jid) start = time.time() if os.path.isfile(pause_path): try: while True: tries = 0 with salt.utils.files.fopen(pause_path, 'rb') as fp_: try: pdat = msgpack_deserialize(fp_.read()) except msgpack.UnpackValueError: # Reading race condition if tries > 10: # Break out if there are a ton of read errors return tries += 1 time.sleep(1) continue id_ = low['__id__'] key = '' if id_ in pdat: key = id_ elif '__all__' in pdat: key = '__all__' if key: if 'duration' in pdat[key]: now = time.time() if now - start > pdat[key]['duration']: return 'run' if 'kill' in pdat[key]: return 'kill' else: return 'run' time.sleep(1) except Exception as exc: log.error('Failed to read in pause data for file located at: %s', pause_path) return 'run' return 'run'
[ "def", "check_pause", "(", "self", ",", "low", ")", ":", "if", "not", "self", ".", "jid", ":", "# Can't pause on salt-ssh since we can't track continuous state", "return", "pause_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "opts", "[", "'cach...
42.25
14.204545
def make_cutout(self, data, masked_array=False): """ Create a (masked) cutout array from the input ``data`` using the minimal bounding box of the source segment. If ``masked_array`` is `False` (default), then the returned cutout array is simply a `~numpy.ndarray`. The returned cutout is a view (not a copy) of the input ``data``. No pixels are altered (e.g. set to zero) within the bounding box. If ``masked_array` is `True`, then the returned cutout array is a `~numpy.ma.MaskedArray`. The mask is `True` for pixels outside of the source segment (labeled region of interest), masked pixels from the ``mask`` input, or any non-finite ``data`` values (e.g. NaN or inf). The data part of the masked array is a view (not a copy) of the input ``data``. Parameters ---------- data : array-like (2D) The data array from which to create the masked cutout array. ``data`` must have the same shape as the segmentation image input into `SourceProperties`. masked_array : bool, optional If `True` then a `~numpy.ma.MaskedArray` will be returned, where the mask is `True` for pixels outside of the source segment (labeled region of interest), masked pixels from the ``mask`` input, or any non-finite ``data`` values (e.g. NaN or inf). If `False`, then a `~numpy.ndarray` will be returned. Returns ------- result : 2D `~numpy.ndarray` or `~numpy.ma.MaskedArray` The 2D cutout array. """ data = np.asanyarray(data) if data.shape != self._segment_img.shape: raise ValueError('data must have the same shape as the ' 'segmentation image input to SourceProperties') if masked_array: return np.ma.masked_array(data[self._slice], mask=self._total_mask) else: return data[self._slice]
[ "def", "make_cutout", "(", "self", ",", "data", ",", "masked_array", "=", "False", ")", ":", "data", "=", "np", ".", "asanyarray", "(", "data", ")", "if", "data", ".", "shape", "!=", "self", ".", "_segment_img", ".", "shape", ":", "raise", "ValueError"...
42.75
23.416667
def move_asset_ahead(self, asset_id, composition_id, reference_id): """Reorders assets in a composition by moving the specified asset in front of a reference asset. arg: asset_id (osid.id.Id): ``Id`` of the ``Asset`` arg: composition_id (osid.id.Id): ``Id`` of the ``Composition`` arg: reference_id (osid.id.Id): ``Id`` of the reference ``Asset`` raise: NotFound - ``asset_id`` or ``reference_id`` ``not found in composition_id`` raise: NullArgument - ``asset_id, reference_id`` or ``composition_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization fauilure *compliance: mandatory -- This method must be implemented.* """ self._provider_session.move_asset_ahead(self, asset_id, composition_id, reference_id)
[ "def", "move_asset_ahead", "(", "self", ",", "asset_id", ",", "composition_id", ",", "reference_id", ")", ":", "self", ".", "_provider_session", ".", "move_asset_ahead", "(", "self", ",", "asset_id", ",", "composition_id", ",", "reference_id", ")" ]
50.777778
20.944444
def skip(self): """ Advance the internal pointer to the end of the data area in the stream. This allows the next call to :meth:`Reader.read` to succeed, as though all the data had been read by the application. """ self.stream.seek(self.bytes_remaining(), os.SEEK_CUR) self._pos = self.length
[ "def", "skip", "(", "self", ")", ":", "self", ".", "stream", ".", "seek", "(", "self", ".", "bytes_remaining", "(", ")", ",", "os", ".", "SEEK_CUR", ")", "self", ".", "_pos", "=", "self", ".", "length" ]
38.555556
12.555556
def map_fit(interface, state, label, inp): """ Function calculates sigmoid function (g) for every sample. With g it calculates part of Hessian matrix and gradient, aggregates and output them. It also calculates J function which is needed for checking the convergence of parameters theta. """ import numpy as np out = interface.output(0) H, J, grad = 0, 0, 0 for row in inp: row = row.strip().split(state["delimiter"]) # split row if len(row) > 1: # check if row is empty # add intercept term to every sample x = np.array([1] + [(0 if v in state["missing_vals"] else float(v)) for i, v in enumerate(row) if i in state["X_indices"]]) # map label value to 0 or 1. If label does not match set error y = 0 if state["y_map"][0] == row[state["y_index"]] else 1 if state["y_map"][1] == row[ state["y_index"]] else "Error" g = 1. / (1 + np.exp(-np.dot(x, state["thetas"]))) # sigmoid function grad += x * (g - y) # gradient H += np.multiply(np.outer(x, x), g * (1 - g)) # Hessian matrix J -= np.log(g) if y == 1 else np.log(1 - g) # J cost function out.add("grad", grad) out.add("J", J) for i, row in enumerate(H): out.add(i, row)
[ "def", "map_fit", "(", "interface", ",", "state", ",", "label", ",", "inp", ")", ":", "import", "numpy", "as", "np", "out", "=", "interface", ".", "output", "(", "0", ")", "H", ",", "J", ",", "grad", "=", "0", ",", "0", ",", "0", "for", "row", ...
50.461538
29
def find_model(model_name, apps=settings.INSTALLED_APPS, fuzziness=0): """Find model_name among indicated Django apps and return Model class Examples: To find models in an app called "miner": >>> find_model('WikiItem', 'miner') >>> find_model('Connection', 'miner') >>> find_model('InvalidModelName') """ # if it looks like a file system path rather than django project.app.model path the return it as a string if '/' in model_name: return model_name if not apps and isinstance(model_name, basestring) and '.' in model_name: apps = [model_name.split('.')[0]] apps = util.listify(apps or settings.INSTALLED_APPS) for app in apps: # print 'getting %r, from app %r' % (model_name, app) model = get_model(model=model_name, app=app, fuzziness=fuzziness) if model: return model return None
[ "def", "find_model", "(", "model_name", ",", "apps", "=", "settings", ".", "INSTALLED_APPS", ",", "fuzziness", "=", "0", ")", ":", "# if it looks like a file system path rather than django project.app.model path the return it as a string", "if", "'/'", "in", "model_name", "...
38.434783
21.130435
def _dqdv_split_frames(cell, tidy=False, **kwargs): """Returns dqdv data as pandas.DataFrames for all cycles. Args: cell (CellpyData-object). tidy (bool): return in wide format if False (default), long (tidy) format if True. Returns: (charge_ica_frame, discharge_ica_frame) where the frames are pandas.DataFrames where the first column is voltage ('v') and the following columns are the incremental capcaity for each cycle (multi-indexed, where cycle number is on the top level). Example: >>> from cellpy.utils import ica >>> charge_ica_df, dcharge_ica_df = ica.ica_frames(my_cell) >>> charge_ica_df.plot(x=("voltage", "v")) """ charge_dfs, cycles, minimum_v, maximum_v = _collect_capacity_curves( cell, direction="charge" ) # charge_df = pd.concat( # charge_dfs, axis=1, keys=[k.name for k in charge_dfs]) ica_charge_dfs = _make_ica_charge_curves( charge_dfs, cycles, minimum_v, maximum_v, **kwargs, ) ica_charge_df = pd.concat( ica_charge_dfs, axis=1, keys=[k.name for k in ica_charge_dfs] ) dcharge_dfs, cycles, minimum_v, maximum_v = _collect_capacity_curves( cell, direction="discharge" ) ica_dcharge_dfs = _make_ica_charge_curves( dcharge_dfs, cycles, minimum_v, maximum_v, **kwargs, ) ica_discharge_df = pd.concat( ica_dcharge_dfs, axis=1, keys=[k.name for k in ica_dcharge_dfs] ) ica_charge_df.columns.names = ["cycle", "value"] ica_discharge_df.columns.names = ["cycle", "value"] if tidy: ica_charge_df = ica_charge_df.melt( "voltage", var_name="cycle", value_name="dq", col_level=0 ) ica_discharge_df = ica_discharge_df.melt( "voltage", var_name="cycle", value_name="dq", col_level=0 ) return ica_charge_df, ica_discharge_df
[ "def", "_dqdv_split_frames", "(", "cell", ",", "tidy", "=", "False", ",", "*", "*", "kwargs", ")", ":", "charge_dfs", ",", "cycles", ",", "minimum_v", ",", "maximum_v", "=", "_collect_capacity_curves", "(", "cell", ",", "direction", "=", "\"charge\"", ")", ...
30.044118
21.132353
def _first_line_re(): """ Return a regular expression based on first_line_re suitable for matching strings. """ if isinstance(first_line_re.pattern, str): return first_line_re # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern. return re.compile(first_line_re.pattern.decode())
[ "def", "_first_line_re", "(", ")", ":", "if", "isinstance", "(", "first_line_re", ".", "pattern", ",", "str", ")", ":", "return", "first_line_re", "# first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern.", "return", "re", ".", "compile", "(", "first_line_re", ...
31.9
18.3
def prepend_model(self, value, model): """ Prepends model name if it is not already prepended. For example model is "Offer": key -> Offer.key -key -> -Offer.key Offer.key -> Offer.key -Offer.key -> -Offer.key """ if '.' not in value: direction = '' if value.startswith('-'): value = value[1:] direction = '-' value = '%s%s.%s' % (direction, model, value) return value
[ "def", "prepend_model", "(", "self", ",", "value", ",", "model", ")", ":", "if", "'.'", "not", "in", "value", ":", "direction", "=", "''", "if", "value", ".", "startswith", "(", "'-'", ")", ":", "value", "=", "value", "[", "1", ":", "]", "direction...
30.352941
10.470588
def get_queryset(self, request): """Limit to Tenants that this user can access.""" qs = super(TenantAdmin, self).get_queryset(request) if not request.user.is_superuser: tenants_by_group_manager_role = qs.filter( group__tenantrole__user=request.user, group__tenantrole__role=TenantRole.ROLE_GROUP_MANAGER ) tenants_by_tenant_manager_role = qs.filter( tenantrole__user=request.user, tenantrole__role=TenantRole.ROLE_TENANT_MANAGER ) return tenants_by_group_manager_role | tenants_by_tenant_manager_role return qs
[ "def", "get_queryset", "(", "self", ",", "request", ")", ":", "qs", "=", "super", "(", "TenantAdmin", ",", "self", ")", ".", "get_queryset", "(", "request", ")", "if", "not", "request", ".", "user", ".", "is_superuser", ":", "tenants_by_group_manager_role", ...
46.642857
17.571429
def compute_exported_specifications(svc_ref): # type: (pelix.framework.ServiceReference) -> List[str] """ Computes the list of specifications exported by the given service :param svc_ref: A ServiceReference :return: The list of exported specifications (or an empty list) """ if svc_ref.get_property(pelix.remote.PROP_EXPORT_NONE): # The export of this service is explicitly forbidden, stop here return [] # Service specifications specs = svc_ref.get_property(pelix.constants.OBJECTCLASS) # Exported specifications exported_specs = svc_ref.get_property(pelix.remote.PROP_EXPORTED_INTERFACES) if exported_specs and exported_specs != "*": # A set of specifications is exported, replace "objectClass" iterable_exports = pelix.utilities.to_iterable(exported_specs, False) all_exported_specs = [ spec for spec in specs if spec in iterable_exports ] else: # Export everything all_exported_specs = pelix.utilities.to_iterable(specs) # Authorized and rejected specifications export_only_specs = pelix.utilities.to_iterable( svc_ref.get_property(pelix.remote.PROP_EXPORT_ONLY), False ) if export_only_specs: # Filter specifications (keep authorized specifications) return [ spec for spec in all_exported_specs if spec in export_only_specs ] # Filter specifications (reject) rejected_specs = pelix.utilities.to_iterable( svc_ref.get_property(pelix.remote.PROP_EXPORT_REJECT), False ) return [spec for spec in all_exported_specs if spec not in rejected_specs]
[ "def", "compute_exported_specifications", "(", "svc_ref", ")", ":", "# type: (pelix.framework.ServiceReference) -> List[str]", "if", "svc_ref", ".", "get_property", "(", "pelix", ".", "remote", ".", "PROP_EXPORT_NONE", ")", ":", "# The export of this service is explicitly forbi...
36.886364
23.704545
def _extract_from_sans(self): """Looks for different TLDs as well as different sub-domains in SAN list""" self.logger.info("{} Trying to find Subdomains in SANs list".format(COLORED_COMBOS.NOTIFY)) if self.host.naked: domain = self.host.naked tld_less = domain.split(".")[0] else: domain = self.host.target.split(".") tld_less = domain[1] domain = ".".join(domain[1:]) for san in self.sans: if (tld_less in san or domain in san) and self.target != san and not san.startswith("*"): self.logger.info("{} Subdomain detected: {}".format(COLORED_COMBOS.GOOD, san))
[ "def", "_extract_from_sans", "(", "self", ")", ":", "self", ".", "logger", ".", "info", "(", "\"{} Trying to find Subdomains in SANs list\"", ".", "format", "(", "COLORED_COMBOS", ".", "NOTIFY", ")", ")", "if", "self", ".", "host", ".", "naked", ":", "domain",...
48.214286
21.428571
def plot_grid(grid_arcsec, array, units, kpc_per_arcsec, pointsize, zoom_offset_arcsec): """Plot a grid of points over the array of data on the figure. Parameters -----------. grid_arcsec : ndarray or data.array.grids.RegularGrid A grid of (y,x) coordinates in arc-seconds which may be plotted over the array. array : data.array.scaled_array.ScaledArray The 2D array of data which is plotted. units : str The units of the y / x axis of the plots, in arc-seconds ('arcsec') or kiloparsecs ('kpc'). kpc_per_arcsec : float or None The conversion factor between arc-seconds and kiloparsecs, required to plot the units in kpc. grid_pointsize : int The size of the points plotted to show the grid. """ if grid_arcsec is not None: if zoom_offset_arcsec is not None: grid_arcsec -= zoom_offset_arcsec grid_units = convert_grid_units(grid_arcsec=grid_arcsec, array=array, units=units, kpc_per_arcsec=kpc_per_arcsec) plt.scatter(y=np.asarray(grid_units[:, 0]), x=np.asarray(grid_units[:, 1]), s=pointsize, c='k')
[ "def", "plot_grid", "(", "grid_arcsec", ",", "array", ",", "units", ",", "kpc_per_arcsec", ",", "pointsize", ",", "zoom_offset_arcsec", ")", ":", "if", "grid_arcsec", "is", "not", "None", ":", "if", "zoom_offset_arcsec", "is", "not", "None", ":", "grid_arcsec"...
46
27.12
def execute_or_create_resource(self, device_id, _resource_path, **kwargs): # noqa: E501 """Execute a function on a Resource or create new Object instance # noqa: E501 With this API, you can [execute a function](/docs/current/connecting/handle-resource-webapp.html#the-execute-operation) on an existing resource and create new Object instance to the device. The resource-path does not have to exist - it can be created by the call. The maximum length of resource-path is 255 characters. All resource APIs are asynchronous. These APIs respond only if the device is turned on and connected to Device Management Connect and there is an active notification channel. Supported content types depend on the device and its resource. Device Management translates HTTP to equivalent CoAP content type. **Example usage:** This example resets the min and max values of the [temperature sensor](http://www.openmobilealliance.org/tech/profiles/lwm2m/3303.xml) instance 0 by executing the Resource 5605 'Reset Min and Max Measured Values'. curl -X POST \\ https://api.us-east-1.mbedcloud.com/v2/endpoints/{device-id}/3303/0/5605 \\ -H 'authorization: Bearer {api-key}' # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.execute_or_create_resource(device_id, _resource_path, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str device_id: A unique Device Management device ID for the endpoint. Note that the ID must be an exact match. You cannot use wildcards here. (required) :param str _resource_path: The URL of the resource. (required) :param str resource_function: This value is not needed. Most of the time resources do not accept a function but they have their own functions predefined. You can use this to trigger them. If a function is included, the body of this request is passed as a char* to the function in Device Management Client. :param bool no_resp: <br/><br/><b>Non-confirmable requests</b><br/> All resource APIs have the parameter noResp. If you make a request with `noResp=true`, Device Management Connect makes a CoAP non-confirmable request to the device. Such requests are not guaranteed to arrive in the device, and you do not get back an async-response-id. If calls with this parameter enabled succeed, they return with the status code `204 No Content`. If the underlying protocol does not support non-confirmable requests, or if the endpoint is registered in queue mode, the response is status code `409 Conflict`. :return: AsyncID If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous'): return self.execute_or_create_resource_with_http_info(device_id, _resource_path, **kwargs) # noqa: E501 else: (data) = self.execute_or_create_resource_with_http_info(device_id, _resource_path, **kwargs) # noqa: E501 return data
[ "def", "execute_or_create_resource", "(", "self", ",", "device_id", ",", "_resource_path", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'asynchronous'", ")", "...
131.416667
103.75
def expandvars(self, text, environ=None, cache=None): """ Recursively expands the text variables, vs. the os.path \ method which only works at one level. The cache value should be \ left blank as it is used to protect against recursion. :param text | <str> environ | <dict> || None cache | <dict> { <str>: <str>, .. } :return <str> """ if not environ: environ = os.environ # make sure we have data if not text: return '' # check for circular dependencies cache = cache or {} # return the cleaned variable output = nstr(text) keys = re.findall('\$(\w+)|\${(\w+)\}|\%(\w+)\%', text) for first, second, third in keys: repl = '' key = '' if first: repl = '$%s' % first key = first elif second: repl = '${%s}' % second key = second elif third: repl = '%%%s%%' % third key = third else: continue value = environ.get(key) if value: if key not in cache: cache[key] = value value = self.expandvars(value, environ, cache) else: err = '%s environ variable causes an infinite loop.' % key logger.warning(err) value = cache[key] else: value = repl output = output.replace(repl, value) return os.path.expanduser(output)
[ "def", "expandvars", "(", "self", ",", "text", ",", "environ", "=", "None", ",", "cache", "=", "None", ")", ":", "if", "not", "environ", ":", "environ", "=", "os", ".", "environ", "# make sure we have data", "if", "not", "text", ":", "return", "''", "#...
30.678571
16.607143
def jsonHook(encoded): """Custom JSON decoder that allows construction of a new ``Smi`` instance from a decoded JSON object. :param encoded: a JSON decoded object literal (a dict) :returns: "encoded" or one of the these objects: :class:`Smi`, :class:`MzmlScan`, :class:`MzmlProduct`, :class:`MzmlPrecursor` """ if '__Smi__' in encoded: return Smi._fromJSON(encoded['__Smi__']) elif '__MzmlScan__' in encoded: return MzmlScan._fromJSON(encoded['__MzmlScan__']) elif '__MzmlProduct__' in encoded: return MzmlProduct._fromJSON(encoded['__MzmlProduct__']) elif '__MzmlPrecursor__' in encoded: return MzmlPrecursor._fromJSON(encoded['__MzmlPrecursor__']) else: return encoded
[ "def", "jsonHook", "(", "encoded", ")", ":", "if", "'__Smi__'", "in", "encoded", ":", "return", "Smi", ".", "_fromJSON", "(", "encoded", "[", "'__Smi__'", "]", ")", "elif", "'__MzmlScan__'", "in", "encoded", ":", "return", "MzmlScan", ".", "_fromJSON", "("...
42.421053
17.842105
def plugin(cls, name): """ Retrieves the plugin based on the inputted name. :param name | <str> :return <Plugin> """ cls.loadPlugins() plugs = getattr(cls, '_%s__plugins' % cls.__name__, {}) return plugs.get(nstr(name))
[ "def", "plugin", "(", "cls", ",", "name", ")", ":", "cls", ".", "loadPlugins", "(", ")", "plugs", "=", "getattr", "(", "cls", ",", "'_%s__plugins'", "%", "cls", ".", "__name__", ",", "{", "}", ")", "return", "plugs", ".", "get", "(", "nstr", "(", ...
27.272727
14.545455
def can_lookup_assets(self): """Tests if this user can perform ``Asset`` lookups. A return of true does not guarantee successful authorization. A return of false indicates that it is known all methods in this session will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer lookup operations. :return: ``false`` if lookup methods are not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ url_path = construct_url('authorization', bank_id=self._catalog_idstr) return self._get_request(url_path)['assetHints']['canLookup']
[ "def", "can_lookup_assets", "(", "self", ")", ":", "url_path", "=", "construct_url", "(", "'authorization'", ",", "bank_id", "=", "self", ".", "_catalog_idstr", ")", "return", "self", ".", "_get_request", "(", "url_path", ")", "[", "'assetHints'", "]", "[", ...
39.684211
26.052632
def find_label(self, label: Label): """ Helper function that iterates over the program and looks for a JumpTarget that has a Label matching the input label. :param label: Label object to search for in program :return: Program index where ``label`` is found """ for index, action in enumerate(self.program): if isinstance(action, JumpTarget): if label == action.label: return index raise RuntimeError("Improper program - Jump Target not found in the " "input program!")
[ "def", "find_label", "(", "self", ",", "label", ":", "Label", ")", ":", "for", "index", ",", "action", "in", "enumerate", "(", "self", ".", "program", ")", ":", "if", "isinstance", "(", "action", ",", "JumpTarget", ")", ":", "if", "label", "==", "act...
39.666667
16.066667
def print(self, *objects, **kwargs): """Micmic print interface""" file = kwargs.get("file") if file is not None and file is not sys.stdout: PRINT(*objects, **kwargs) else: sep = STR(kwargs.get("sep", " ")) end = STR(kwargs.get("end", "\n")) text = sep.join(STR(o) for o in objects) self.imp_print(text, end) for callback in self.listeners: callback(text)
[ "def", "print", "(", "self", ",", "*", "objects", ",", "*", "*", "kwargs", ")", ":", "file", "=", "kwargs", ".", "get", "(", "\"file\"", ")", "if", "file", "is", "not", "None", "and", "file", "is", "not", "sys", ".", "stdout", ":", "PRINT", "(", ...
21.823529
19
async def fetch_emoji(self, emoji_id): """|coro| Retrieves a custom :class:`Emoji` from the guild. .. note:: This method is an API call. For general usage, consider iterating over :attr:`emojis` instead. Parameters ------------- emoji_id: :class:`int` The emoji's ID. Raises --------- NotFound The emoji requested could not be found. HTTPException An error occurred fetching the emoji. Returns -------- :class:`Emoji` The retrieved emoji. """ data = await self._state.http.get_custom_emoji(self.id, emoji_id) return Emoji(guild=self, state=self._state, data=data)
[ "async", "def", "fetch_emoji", "(", "self", ",", "emoji_id", ")", ":", "data", "=", "await", "self", ".", "_state", ".", "http", ".", "get_custom_emoji", "(", "self", ".", "id", ",", "emoji_id", ")", "return", "Emoji", "(", "guild", "=", "self", ",", ...
25.551724
21.62069
def parse_function(fn): """Get the source of a function and return its AST.""" try: return parse_string(inspect.getsource(fn)) except (IOError, OSError) as e: raise ValueError( 'Cannot differentiate function: %s. Tangent must be able to access the ' 'source code of the function. Functions defined in a Python ' 'interpreter and functions backed by C extension modules do not ' 'have accessible source code.' % e)
[ "def", "parse_function", "(", "fn", ")", ":", "try", ":", "return", "parse_string", "(", "inspect", ".", "getsource", "(", "fn", ")", ")", "except", "(", "IOError", ",", "OSError", ")", "as", "e", ":", "raise", "ValueError", "(", "'Cannot differentiate fun...
45
18.8
def _fetch_file(self, remote, local): """fetch a single file""" full_remote = "%s:%s" % (self.location, remote) self.log.info("fetching %s from %s", local, full_remote) for i in range(10): # wait up to 10s for remote file to exist check = check_output(self.ssh_cmd + self.ssh_args + \ [self.location, 'test -e', remote, "&& echo 'yes' || echo 'no'"]) check = check.strip() if check == 'no': time.sleep(1) elif check == 'yes': break check_output(self.scp_cmd + [full_remote, local])
[ "def", "_fetch_file", "(", "self", ",", "remote", ",", "local", ")", ":", "full_remote", "=", "\"%s:%s\"", "%", "(", "self", ".", "location", ",", "remote", ")", "self", ".", "log", ".", "info", "(", "\"fetching %s from %s\"", ",", "local", ",", "full_re...
44
14.785714
def postprocess_variants(items): """Provide post-processing of variant calls: filtering and effects annotation. """ vrn_key = "vrn_file" if not isinstance(items, dict): items = [utils.to_single_data(x) for x in items] if "vrn_file_joint" in items[0]: vrn_key = "vrn_file_joint" data, items = _get_batch_representative(items, vrn_key) items = cwlutils.unpack_tarballs(items, data) data = cwlutils.unpack_tarballs(data, data) cur_name = "%s, %s" % (dd.get_sample_name(data), get_variantcaller(data, require_bam=False)) logger.info("Finalizing variant calls: %s" % cur_name) orig_vrn_file = data.get(vrn_key) data = _symlink_to_workdir(data, [vrn_key]) data = _symlink_to_workdir(data, ["config", "algorithm", "variant_regions"]) if data.get(vrn_key): logger.info("Calculating variation effects for %s" % cur_name) ann_vrn_file, vrn_stats = effects.add_to_vcf(data[vrn_key], data) if ann_vrn_file: data[vrn_key] = ann_vrn_file if vrn_stats: data["vrn_stats"] = vrn_stats orig_items = _get_orig_items(items) logger.info("Annotate VCF file: %s" % cur_name) data[vrn_key] = annotation.finalize_vcf(data[vrn_key], get_variantcaller(data, require_bam=False), orig_items) if cwlutils.is_cwl_run(data): logger.info("Annotate with population level variation data") ann_file = population.run_vcfanno(data[vrn_key], data) if ann_file: data[vrn_key] = ann_file logger.info("Filtering for %s" % cur_name) data[vrn_key] = variant_filtration(data[vrn_key], dd.get_ref_file(data), tz.get_in(("genome_resources", "variation"), data, {}), data, orig_items) logger.info("Prioritization for %s" % cur_name) prio_vrn_file = prioritize.handle_vcf_calls(data[vrn_key], data, orig_items) if prio_vrn_file != data[vrn_key]: data[vrn_key] = prio_vrn_file logger.info("Germline extraction for %s" % cur_name) data = germline.extract(data, orig_items) if dd.get_align_bam(data): data = damage.run_filter(data[vrn_key], dd.get_align_bam(data), dd.get_ref_file(data), data, orig_items) if orig_vrn_file and os.path.samefile(data[vrn_key], orig_vrn_file): data[vrn_key] = orig_vrn_file return [[data]]
[ "def", "postprocess_variants", "(", "items", ")", ":", "vrn_key", "=", "\"vrn_file\"", "if", "not", "isinstance", "(", "items", ",", "dict", ")", ":", "items", "=", "[", "utils", ".", "to_single_data", "(", "x", ")", "for", "x", "in", "items", "]", "if...
51.510204
18.387755
def run_all(): """ Load the data that we're using to search for Li-rich giants. Store it in dataset and model objects. """ DATA_DIR = "/home/annaho/TheCannon/code/apogee_lamost/xcalib_4labels" dates = os.listdir("/home/share/LAMOST/DR2/DR2_release") dates = np.array(dates) dates = np.delete(dates, np.where(dates=='.directory')[0][0]) dates = np.delete(dates, np.where(dates=='all_folders.list')[0][0]) dates = np.delete(dates, np.where(dates=='dr2.lis')[0][0]) for date in dates: if glob.glob("*%s*.txt" %date): print("%s done" %date) else: print("running %s" %date) run_one_date(date)
[ "def", "run_all", "(", ")", ":", "DATA_DIR", "=", "\"/home/annaho/TheCannon/code/apogee_lamost/xcalib_4labels\"", "dates", "=", "os", ".", "listdir", "(", "\"/home/share/LAMOST/DR2/DR2_release\"", ")", "dates", "=", "np", ".", "array", "(", "dates", ")", "dates", "=...
41.3125
17.1875
def decode_offset_fetch_response(cls, data): """ Decode bytes to an OffsetFetchResponse Arguments: data: bytes to decode """ ((correlation_id,), cur) = relative_unpack('>i', data, 0) ((num_topics,), cur) = relative_unpack('>i', data, cur) for _ in range(num_topics): (topic, cur) = read_short_string(data, cur) ((num_partitions,), cur) = relative_unpack('>i', data, cur) for _ in range(num_partitions): ((partition, offset), cur) = relative_unpack('>iq', data, cur) (metadata, cur) = read_short_string(data, cur) ((error,), cur) = relative_unpack('>h', data, cur) yield OffsetFetchResponse(topic, partition, offset, metadata, error)
[ "def", "decode_offset_fetch_response", "(", "cls", ",", "data", ")", ":", "(", "(", "correlation_id", ",", ")", ",", "cur", ")", "=", "relative_unpack", "(", "'>i'", ",", "data", ",", "0", ")", "(", "(", "num_topics", ",", ")", ",", "cur", ")", "=", ...
37.545455
21.454545
def prune_contours(self, area_thresh=1000.0, dist_thresh=20, preserve_topology=True): """Removes all white connected components with area less than area_thresh. Parameters ---------- area_thresh : float The minimum area for which a white connected component will not be zeroed out. dist_thresh : int If a connected component is within dist_thresh of the top of the image, it will not be pruned out, regardless of its area. Returns ------- :obj:`BinaryImage` The new pruned binary image. """ # get all contours (connected components) from the binary image contours, hierarchy = cv2.findContours( self.data.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE) num_contours = len(contours) middle_pixel = np.array(self.shape)[:2] / 2 middle_pixel = middle_pixel.reshape(1, 2) center_contour = None pruned_contours = [] # find which contours need to be pruned for i in range(num_contours): area = cv2.contourArea(contours[i]) if area > area_thresh: # check close to origin fill = np.zeros([self.height, self.width, 3]) cv2.fillPoly( fill, pts=[ contours[i]], color=( BINARY_IM_MAX_VAL, BINARY_IM_MAX_VAL, BINARY_IM_MAX_VAL)) nonzero_px = np.where(fill > 0) nonzero_px = np.c_[nonzero_px[0], nonzero_px[1]] dists = ssd.cdist(middle_pixel, nonzero_px) min_dist = np.min(dists) pruned_contours.append((contours[i], min_dist)) if len(pruned_contours) == 0: return None pruned_contours.sort(key=lambda x: x[1]) # keep all contours within some distance of the top num_contours = len(pruned_contours) keep_indices = [0] source_coords = pruned_contours[0][0].squeeze().astype(np.float32) for i in range(1, num_contours): target_coords = pruned_contours[i][0].squeeze().astype(np.float32) dists = ssd.cdist(source_coords, target_coords) min_dist = np.min(dists) if min_dist < dist_thresh: keep_indices.append(i) # keep the top num_areas pruned contours keep_indices = np.unique(keep_indices) pruned_contours = [pruned_contours[i][0] for i in keep_indices] # mask out bad areas in the image pruned_data = np.zeros([self.height, self.width, 3]) for contour in pruned_contours: cv2.fillPoly( pruned_data, pts=[contour], color=( BINARY_IM_MAX_VAL, BINARY_IM_MAX_VAL, BINARY_IM_MAX_VAL)) pruned_data = pruned_data[:, :, 0] # convert back to one channel # preserve topology of original image if preserve_topology: orig_zeros = np.where(self.data == 0) pruned_data[orig_zeros[0], orig_zeros[1]] = 0 return BinaryImage(pruned_data.astype(np.uint8), self._frame)
[ "def", "prune_contours", "(", "self", ",", "area_thresh", "=", "1000.0", ",", "dist_thresh", "=", "20", ",", "preserve_topology", "=", "True", ")", ":", "# get all contours (connected components) from the binary image", "contours", ",", "hierarchy", "=", "cv2", ".", ...
39.841463
15.292683
def text_with_newlines(text, line_length=78, newline='\n'): '''Return text with a `newline` inserted after each `line_length` char. Return `text` unchanged if line_length == 0. ''' if line_length > 0: if len(text) <= line_length: return text else: return newline.join([text[idx:idx+line_length] for idx in range(0, len(text), line_length)]) else: return text
[ "def", "text_with_newlines", "(", "text", ",", "line_length", "=", "78", ",", "newline", "=", "'\\n'", ")", ":", "if", "line_length", ">", "0", ":", "if", "len", "(", "text", ")", "<=", "line_length", ":", "return", "text", "else", ":", "return", "newl...
34.285714
21.428571
def targetLow(self): """Return the low byte of the target address field. Used in All-Link Cleanup messages. """ low_byte = None if self.target is not None and self._messageFlags.isBroadcast: low_byte = self.target.bytes[0] return low_byte
[ "def", "targetLow", "(", "self", ")", ":", "low_byte", "=", "None", "if", "self", ".", "target", "is", "not", "None", "and", "self", ".", "_messageFlags", ".", "isBroadcast", ":", "low_byte", "=", "self", ".", "target", ".", "bytes", "[", "0", "]", "...
32.333333
14.333333
def get_user_by_name(uname,**kwargs): """ Get a user by username """ try: user_i = db.DBSession.query(User).filter(User.username==uname).one() return user_i except NoResultFound: return None
[ "def", "get_user_by_name", "(", "uname", ",", "*", "*", "kwargs", ")", ":", "try", ":", "user_i", "=", "db", ".", "DBSession", ".", "query", "(", "User", ")", ".", "filter", "(", "User", ".", "username", "==", "uname", ")", ".", "one", "(", ")", ...
25.555556
15.111111
def _selfConsistentIteration(self, relative_tolerance=1.0e-6, maximum_iterations=1000, verbose=True, print_warning=False): """ Determine free energies by self-consistent iteration. OPTIONAL ARGUMENTS relative_tolerance (float between 0 and 1) - relative tolerance for convergence (default 1.0e-5) maximum_iterations (int) - maximum number of self-consistent iterations (default 1000) verbose (boolean) - verbosity level for debug output NOTES Self-consistent iteration of the MBAR equations is used, as described in Appendix C.1 of [1]. """ # Iteratively update dimensionless free energies until convergence to # specified tolerance, or maximum allowed number of iterations has been # exceeded. if verbose: print("MBAR: Computing dimensionless free energies by iteration. This may take from seconds to minutes, depending on the quantity of data...") for iteration in range(0, maximum_iterations): if verbose: print('Self-consistent iteration %d' % iteration) # compute the free energies by self consistent iteration (which # also involves calculating the weights) (W_nk, f_k_new) = self._computeWeights( logform=True, return_f_k=True) if self._amIdoneIterating( f_k_new, relative_tolerance, iteration, maximum_iterations, print_warning, verbose): break return
[ "def", "_selfConsistentIteration", "(", "self", ",", "relative_tolerance", "=", "1.0e-6", ",", "maximum_iterations", "=", "1000", ",", "verbose", "=", "True", ",", "print_warning", "=", "False", ")", ":", "# Iteratively update dimensionless free energies until convergence...
41.783784
32.918919
def object_attributes( thing, all=False ) : ''' Return a sorted list of names defined by thing that are not also names in a standard object, except include __doc__. ''' standard_names = set( dir( object() ) ) things_names = set( dir( thing ) ) if not all : things_names -= standard_names things_names |= set( ['__doc__'] ) return sorted( things_names )
[ "def", "object_attributes", "(", "thing", ",", "all", "=", "False", ")", ":", "standard_names", "=", "set", "(", "dir", "(", "object", "(", ")", ")", ")", "things_names", "=", "set", "(", "dir", "(", "thing", ")", ")", "if", "not", "all", ":", "thi...
35.454545
13.818182
def get_header(self, configuration=False): """Get the log file header If configuration is True, this returns the daemon configuration :return: A string list containing project name, daemon name, version, licence etc. :rtype: list """ header = [u"-----", u" █████╗ ██╗ ██╗ ██████╗ ███╗ ██╗ █████╗ ██╗ ██╗", u" ██╔══██╗██║ ██║██╔════╝ ████╗ ██║██╔══██╗██║ ██╔╝", u" ███████║██║ ██║██║ ███╗██╔██╗ ██║███████║█████╔╝ ", u" ██╔══██║██║ ██║██║ ██║██║╚██╗██║██╔══██║██╔═██╗ ", u" ██║ ██║███████╗██║╚██████╔╝██║ ╚████║██║ ██║██║ ██╗", u" ╚═╝ ╚═╝╚══════╝╚═╝ ╚═════╝ ╚═╝ ╚═══╝╚═╝ ╚═╝╚═╝ ╚═╝", u"-----", u"Alignak %s - %s daemon" % (VERSION, self.name), u"Copyright (c) 2015-2019: Alignak Team", u"License: AGPL", u"-----", u"Python: %s.%s" % (sys.version_info.major, sys.version_info.minor), u"-----", u"My pid: %s" % self.pid] if configuration: header = ["My configuration: "] for prop, _ in sorted(self.properties.items()): header.append(" - %s=%s" % (prop, getattr(self, prop))) return header
[ "def", "get_header", "(", "self", ",", "configuration", "=", "False", ")", ":", "header", "=", "[", "u\"-----\"", ",", "u\" █████╗ ██╗ ██╗ ██████╗ ███╗ ██╗ █████╗ ██╗ ██╗\",", "", "u\" ██╔══██╗██║ ██║██╔════╝ ████╗ ██║██╔══██╗██║ ██╔╝\",", "", "u\" ███████║██║ ...
45.1
24.333333
def neg_loglikelihood(y, mean, scale, shape, skewness): """ Negative loglikelihood function Parameters ---------- y : np.ndarray univariate time series mean : np.ndarray array of location parameters for the Poisson distribution scale : float scale parameter for the Poisson distribution shape : float tail thickness parameter for the Poisson distribution skewness : float skewness parameter for the Poisson distribution Returns ---------- - Negative loglikelihood of the Poisson family """ return -np.sum(-mean + np.log(mean)*y - sp.gammaln(y + 1))
[ "def", "neg_loglikelihood", "(", "y", ",", "mean", ",", "scale", ",", "shape", ",", "skewness", ")", ":", "return", "-", "np", ".", "sum", "(", "-", "mean", "+", "np", ".", "log", "(", "mean", ")", "*", "y", "-", "sp", ".", "gammaln", "(", "y",...
27.76
22.72
def setDirection(self, outputLocation, inputLocation): """ Sets the output-to-input direction by setting both the locations \ at the same time. :param outputLocation | <XConnectionLocation> :param inputLocation | <XConnectionLocation> """ self.setOutputLocation(outputLocation) self.setInputLocation(inputLocation)
[ "def", "setDirection", "(", "self", ",", "outputLocation", ",", "inputLocation", ")", ":", "self", ".", "setOutputLocation", "(", "outputLocation", ")", "self", ".", "setInputLocation", "(", "inputLocation", ")" ]
39.9
15.1
def make_butterworth_bandpass_b_a(CenterFreq, bandwidth, SampleFreq, order=5, btype='band'): """ Generates the b and a coefficients for a butterworth bandpass IIR filter. Parameters ---------- CenterFreq : float central frequency of bandpass bandwidth : float width of the bandpass from centre to edge SampleFreq : float Sample frequency of filter order : int, optional order of IIR filter. Is 5 by default btype : string, optional type of filter to make e.g. (band, low, high) Returns ------- b : ndarray coefficients multiplying the current and past inputs (feedforward coefficients) a : ndarray coefficients multiplying the past outputs (feedback coefficients) """ lowcut = CenterFreq-bandwidth/2 highcut = CenterFreq+bandwidth/2 b, a = make_butterworth_b_a(lowcut, highcut, SampleFreq, order, btype) return b, a
[ "def", "make_butterworth_bandpass_b_a", "(", "CenterFreq", ",", "bandwidth", ",", "SampleFreq", ",", "order", "=", "5", ",", "btype", "=", "'band'", ")", ":", "lowcut", "=", "CenterFreq", "-", "bandwidth", "/", "2", "highcut", "=", "CenterFreq", "+", "bandwi...
32.928571
21.214286
def process_attributes_of_node(attrs, node_name, class_type): """ prepare the model fields, nodes and relations Args: node_name (str): name of the node we are currently processing attrs (dict): attribute dict class_type (str): Type of class. Can be one of these: 'ListNode', 'Model', 'Node' """ # print("Node: %s" % node_name) attrs['_nodes'] = {} attrs['_linked_models'] = defaultdict(list) attrs['_debug_linked_models'] = defaultdict(list) attrs['_lazy_linked_models'] = defaultdict(list) attrs['_fields'] = {} attrs['_uniques'] = [] # attrs['_many_to_models'] = [] # iterating over attributes of the soon to be created class object. for key, attr in list(attrs.items()): # if it's a class (not instance) and it's type is Node or ListNode if hasattr(attr, '__base__') and getattr(attr.__base__, '_TYPE', '') in ['Node', 'ListNode']: # converted pops to dict access to allow sphinx to # properly document the models # attrs['_nodes'][key] = attrs.pop(key) attrs['_nodes'][key] = attrs[key] else: # otherwise it should be a field or linked model attr_type = getattr(attr, '_TYPE', '') if attr_type == 'Model': attrs['%s_id' % key] = '' # lnk_mdl_ins = attrs.pop(key) lnk_mdl_ins = attrs[key] lnk = { 'null': lnk_mdl_ins.null or class_type == 'ListNode', 'link_source': True, 'mdl': lnk_mdl_ins.__class__, 'o2o': lnk_mdl_ins._is_one_to_one, 'm2m': class_type == 'ListNode', 'reverse': lnk_mdl_ins.reverse_name, 'verbose': lnk_mdl_ins.verbose_name, 'field': key, 'is_set': False, } attrs['_linked_models'][attr.__class__.__name__].append(lnk) debug_lnk = lnk.copy() debug_lnk['lnksrc']= 'process_attributes_of_node' attrs['_debug_linked_models'][attr.__class__.__name__].append(debug_lnk) elif attr_type == 'Field': attr.name = key attrs['_fields'][key] = attr if attr.unique: attrs['_uniques'].append(key) elif attr_type == 'Link': # lzy_lnk = attrs.pop(key) attrs['%s_id' % key] = '' lzy_lnk = attrs[key] attrs['_lazy_linked_models'][key].append({'from': node_name, 'to': lzy_lnk.link_to, 'o2o': lzy_lnk.one_to_one, 'verbose': lzy_lnk.verbose_name, 'reverse': lzy_lnk.reverse_name, 'field': key})
[ "def", "process_attributes_of_node", "(", "attrs", ",", "node_name", ",", "class_type", ")", ":", "# print(\"Node: %s\" % node_name)", "attrs", "[", "'_nodes'", "]", "=", "{", "}", "attrs", "[", "'_linked_models'", "]", "=", "defaultdict", "(", "list", ")", "att...
51.061538
18.938462
def add_user_to_allow(self, name, user): """Add a user to the given acl allow block.""" # Clear user from both allow and deny before adding if not self.remove_user_from_acl(name, user): return False if name not in self._acl: return False self._acl[name]['allow'].append(user) return True
[ "def", "add_user_to_allow", "(", "self", ",", "name", ",", "user", ")", ":", "# Clear user from both allow and deny before adding", "if", "not", "self", ".", "remove_user_from_acl", "(", "name", ",", "user", ")", ":", "return", "False", "if", "name", "not", "in"...
29.25
18.083333
def run(self, tag=None, output=None, **kwargs): """ runs the extractor Args: ----- output: ['filepath', None] """ start = datetime.datetime.now() count = 0 if tag: tag = Uri(tag) xml_generator = etree.iterparse(self.source, #events=("start", "end"), tag=tag.etree) else: xml_generator = etree.iterparse(self.source) #, #events=("start", "end")) i = 0 for event, element in xml_generator: type_tags = element.findall(_RDF_TYPE_TAG) rdf_types = [el.get(_RES_TAG) for el in type_tags if el.get(_RES_TAG)] # print(rdf_types) if str(self.filter_val) in rdf_types: pdb.set_trace() # print("%s - %s - %s - %s" % (event, # element.tag, # element.attrib, # element.text)) count += 1 # if i == 100: # break i += 1 element.clear() print("Found '{}' items in {}".format(count, (datetime.datetime.now() - start)))
[ "def", "run", "(", "self", ",", "tag", "=", "None", ",", "output", "=", "None", ",", "*", "*", "kwargs", ")", ":", "start", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "count", "=", "0", "if", "tag", ":", "tag", "=", "Uri", "(", "...
35.615385
15.871795
def num_sequences(fileh): """ Determine how many sequences there are in fasta file/stream. :param fileh: the stream to read the fasta data from, or a string giving the filename of the file to load. Note that if a stream is given, it will be consumed by this function :return: the number of sequences in this fasta stream/file """ if type(fileh).__name__ == "str": fh = open(fileh) else: fh = fileh count = 0 for _ in fastaIterator(fh): count += 1 return count
[ "def", "num_sequences", "(", "fileh", ")", ":", "if", "type", "(", "fileh", ")", ".", "__name__", "==", "\"str\"", ":", "fh", "=", "open", "(", "fileh", ")", "else", ":", "fh", "=", "fileh", "count", "=", "0", "for", "_", "in", "fastaIterator", "("...
30.470588
21.764706
def median(self, default=None): """ Calculate the median value over the time series. :param default: Value to return as a default should the calculation not be possible. :return: Float representing the median value or `None`. """ return numpy.asscalar(numpy.median(self.values)) if self.values else default
[ "def", "median", "(", "self", ",", "default", "=", "None", ")", ":", "return", "numpy", ".", "asscalar", "(", "numpy", ".", "median", "(", "self", ".", "values", ")", ")", "if", "self", ".", "values", "else", "default" ]
43.5
23
def verify(self, key, data, signature, mecha=MechanismRSAPKCS1): """ C_VerifyInit/C_Verify :param key: a key handle, obtained calling :func:`findObjects`. :type key: integer :param data: the data that was signed :type data: (binary) string or list/tuple of bytes :param signature: the signature to be verified :type signature: (binary) string or list/tuple of bytes :param mecha: the signing mechanism to be used (use `MechanismRSAPKCS1` for `CKM_RSA_PKCS`) :type mecha: :class:`Mechanism` :return: True if signature is valid, False otherwise :rtype: bool """ m = mecha.to_native() data1 = ckbytelist(data) rv = self.lib.C_VerifyInit(self.session, m, key) if rv != CKR_OK: raise PyKCS11Error(rv) rv = self.lib.C_Verify(self.session, data1, signature) if rv == CKR_OK: return True elif rv == CKR_SIGNATURE_INVALID: return False else: raise PyKCS11Error(rv)
[ "def", "verify", "(", "self", ",", "key", ",", "data", ",", "signature", ",", "mecha", "=", "MechanismRSAPKCS1", ")", ":", "m", "=", "mecha", ".", "to_native", "(", ")", "data1", "=", "ckbytelist", "(", "data", ")", "rv", "=", "self", ".", "lib", "...
36.448276
15.62069
def check_outdated(package, version): """ Given the name of a package on PyPI and a version (both strings), checks if the given version is the latest version of the package available. Returns a 2-tuple (is_outdated, latest_version) where is_outdated is a boolean which is True if the given version is earlier than the latest version, which is the string latest_version. Attempts to cache on disk the HTTP call it makes for 24 hours. If this somehow fails the exception is converted to a warning (OutdatedCacheFailedWarning) and the function continues normally. """ from pkg_resources import parse_version parsed_version = parse_version(version) latest = None with utils.cache_file(package, 'r') as f: content = f.read() if content: # in case cache_file fails and so f is a dummy file latest, cache_dt = json.loads(content) if not utils.cache_is_valid(cache_dt): latest = None def get_latest(): url = 'https://pypi.python.org/pypi/%s/json' % package response = utils.get_url(url) return json.loads(response)['info']['version'] if latest is None: latest = get_latest() parsed_latest = parse_version(latest) if parsed_version > parsed_latest: # Probably a stale cached value latest = get_latest() parsed_latest = parse_version(latest) if parsed_version > parsed_latest: raise ValueError('Version %s is greater than the latest version on PyPI: %s' % (version, latest)) is_latest = parsed_version == parsed_latest assert is_latest or parsed_version < parsed_latest with utils.cache_file(package, 'w') as f: data = [latest, utils.format_date(datetime.now())] json.dump(data, f) return not is_latest, latest
[ "def", "check_outdated", "(", "package", ",", "version", ")", ":", "from", "pkg_resources", "import", "parse_version", "parsed_version", "=", "parse_version", "(", "version", ")", "latest", "=", "None", "with", "utils", ".", "cache_file", "(", "package", ",", ...
33.833333
20.907407
def collect_hosts(hosts, randomize=True): """ Collects a comma-separated set of hosts (host:port) and optionally randomize the returned list. """ if isinstance(hosts, six.string_types): hosts = hosts.strip().split(',') result = [] for host_port in hosts: res = host_port.split(':') host = res[0] port = int(res[1]) if len(res) > 1 else DEFAULT_KAFKA_PORT result.append((host.strip(), port)) if randomize: shuffle(result) return result
[ "def", "collect_hosts", "(", "hosts", ",", "randomize", "=", "True", ")", ":", "if", "isinstance", "(", "hosts", ",", "six", ".", "string_types", ")", ":", "hosts", "=", "hosts", ".", "strip", "(", ")", ".", "split", "(", "','", ")", "result", "=", ...
23.952381
18.904762
def lrucache(func, size): """ A simple implementation of a least recently used (LRU) cache. Memoizes the recent calls of a computationally intensive function. Parameters ---------- func : function Must be unary (takes a single argument) size : int The size of the cache (number of previous calls to store) """ if size == 0: return func elif size < 0: raise ValueError("size argument must be a positive integer") # this only works for unary functions if not is_arity(1, func): raise ValueError("The function must be unary (take a single argument)") # initialize the cache cache = OrderedDict() def wrapper(x): if not(type(x) is np.ndarray): raise ValueError("Input must be an ndarray") # hash the input, using tostring for small and repr for large arrays if x.size <= 1e4: key = hash(x.tostring()) else: key = hash(repr(x)) # if the key is not in the cache, evalute the function if key not in cache: # clear space if necessary (keeps the most recent keys) if len(cache) >= size: cache.popitem(last=False) # store the new value in the cache cache[key] = func(x) return cache[key] return wrapper
[ "def", "lrucache", "(", "func", ",", "size", ")", ":", "if", "size", "==", "0", ":", "return", "func", "elif", "size", "<", "0", ":", "raise", "ValueError", "(", "\"size argument must be a positive integer\"", ")", "# this only works for unary functions", "if", ...
26.857143
22.530612
def storage_del(self, key=None): """ Remove the value stored with the key from storage. If key is not supplied then all values for the module are removed. """ if not self._module: return self._storage_init() module_name = self._module.module_full_name return self._storage.storage_del(module_name, key=key)
[ "def", "storage_del", "(", "self", ",", "key", "=", "None", ")", ":", "if", "not", "self", ".", "_module", ":", "return", "self", ".", "_storage_init", "(", ")", "module_name", "=", "self", ".", "_module", ".", "module_full_name", "return", "self", ".", ...
37.3
13.9
def write_roc(roc_structure, inputfilename, options, fw_type = None): """ writes ROC output :param roc_structure: a.k.a auc_structure, generated in /common_tools/classification/make_auc_structure() :param inputfilename: name of the file specified on the command line that contains the ensemble. This will be used to generate the name of the file that contains the ROC data. :param options: :return: """ # check if ./ROC_DATA exists. If not, create it rocdir = os.path.join(os.getcwd(), 'ROC_DATA') if not os.path.exists(rocdir): os.makedirs(rocdir) # generate the root name of the file that will hold the roc data if len(inputfilename.split('.')) == 1: rootname = inputfilename.split('.') else: rootname = inputfilename.split('.csv')[0] # add '.csv' to the root name to give the name of the file if fw_type: filename = '%s_fw_roc.csv' % rootname else: filename = '%s_roc.csv' % rootname # the path to the file file = os.path.join(rocdir, filename) # open file and create a csv writer object f = open(file, 'w') rocwriter = csv.writer(f) # create header header = ['id', 'score', 'score source', 'status', 'fpf', 'tpf'] rocwriter.writerow(header) # write contents for tup in roc_structure: rocwriter.writerow(list(tup)) f.close()
[ "def", "write_roc", "(", "roc_structure", ",", "inputfilename", ",", "options", ",", "fw_type", "=", "None", ")", ":", "# check if ./ROC_DATA exists. If not, create it", "rocdir", "=", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", ...
32.214286
21.785714
def pprint(self, index=False, delimiter='-'): """Pretty-print the binary tree. :param index: If set to True (default: False), display level-order_ indexes using the format: ``{index}{delimiter}{value}``. :type index: bool :param delimiter: Delimiter character between the node index and the node value (default: '-'). :type delimiter: str | unicode **Example**: .. doctest:: >>> from binarytree import Node >>> >>> root = Node(1) # index: 0, value: 1 >>> root.left = Node(2) # index: 1, value: 2 >>> root.right = Node(3) # index: 2, value: 3 >>> root.left.right = Node(4) # index: 4, value: 4 >>> >>> root.pprint() <BLANKLINE> __1 / \\ 2 3 \\ 4 <BLANKLINE> >>> root.pprint(index=True) # Format: {index}-{value} <BLANKLINE> _____0-1_ / \\ 1-2_ 2-3 \\ 4-4 <BLANKLINE> .. note:: If you do not need level-order_ indexes in the output string, use :func:`binarytree.Node.__str__` instead. .. _level-order: https://en.wikipedia.org/wiki/Tree_traversal#Breadth-first_search """ lines = _build_tree_string(self, 0, index, delimiter)[0] print('\n' + '\n'.join((line.rstrip() for line in lines)))
[ "def", "pprint", "(", "self", ",", "index", "=", "False", ",", "delimiter", "=", "'-'", ")", ":", "lines", "=", "_build_tree_string", "(", "self", ",", "0", ",", "index", ",", "delimiter", ")", "[", "0", "]", "print", "(", "'\\n'", "+", "'\\n'", "....
33
21.957447
def image_path_from_index(self, index): """ given image index, find out full path Parameters: ---------- index: int index of a specific image Returns: ---------- full path of this image """ assert self.image_set_index is not None, "Dataset not initialized" name = self.image_set_index[index] image_file = os.path.join(self.image_dir, 'images', name) assert os.path.isfile(image_file), 'Path does not exist: {}'.format(image_file) return image_file
[ "def", "image_path_from_index", "(", "self", ",", "index", ")", ":", "assert", "self", ".", "image_set_index", "is", "not", "None", ",", "\"Dataset not initialized\"", "name", "=", "self", ".", "image_set_index", "[", "index", "]", "image_file", "=", "os", "."...
32.705882
17.176471
def vcs_init(self): """Initialize VCS repository.""" VCS(os.path.join(self.outdir, self.name), self.pkg_data)
[ "def", "vcs_init", "(", "self", ")", ":", "VCS", "(", "os", ".", "path", ".", "join", "(", "self", ".", "outdir", ",", "self", ".", "name", ")", ",", "self", ".", "pkg_data", ")" ]
41
15
def shrink_dsp(self, inputs=None, outputs=None, cutoff=None, inputs_dist=None, wildcard=True): """ Returns a reduced dispatcher. :param inputs: Input data nodes. :type inputs: list[str], iterable, optional :param outputs: Ending data nodes. :type outputs: list[str], iterable, optional :param cutoff: Depth to stop the search. :type cutoff: float, int, optional :param inputs_dist: Initial distances of input data nodes. :type inputs_dist: dict[str, int | float], optional :param wildcard: If True, when the data node is used as input and target in the ArciDispatch algorithm, the input value will be used as input for the connected functions, but not as output. :type wildcard: bool, optional :return: A sub-dispatcher. :rtype: Dispatcher .. seealso:: :func:`dispatch` **--------------------------------------------------------------------** **Example**: A dispatcher like this: .. dispatcher:: dsp :opt: graph_attr={'ratio': '1'} >>> dsp = Dispatcher(name='Dispatcher') >>> functions = [ ... { ... 'function_id': 'fun1', ... 'inputs': ['a', 'b'], ... 'outputs': ['c'] ... }, ... { ... 'function_id': 'fun2', ... 'inputs': ['b', 'd'], ... 'outputs': ['e'] ... }, ... { ... 'function_id': 'fun3', ... 'function': min, ... 'inputs': ['d', 'f'], ... 'outputs': ['g'] ... }, ... { ... 'function_id': 'fun4', ... 'function': max, ... 'inputs': ['a', 'b'], ... 'outputs': ['g'] ... }, ... { ... 'function_id': 'fun5', ... 'function': max, ... 'inputs': ['d', 'e'], ... 'outputs': ['c', 'f'] ... }, ... ] >>> dsp.add_from_lists(fun_list=functions) ([], [...]) Get the sub-dispatcher induced by dispatching with no calls from inputs `a`, `b`, and `c` to outputs `c`, `e`, and `f`:: >>> shrink_dsp = dsp.shrink_dsp(inputs=['a', 'b', 'd'], ... outputs=['c', 'f']) .. dispatcher:: shrink_dsp :opt: graph_attr={'ratio': '1'} >>> shrink_dsp.name = 'Sub-Dispatcher' """ bfs = None if inputs: # Get all data nodes no wait inputs. wait_in = self._get_wait_in(flag=False) # Evaluate the workflow graph without invoking functions. o = self.dispatch( inputs, outputs, cutoff, inputs_dist, wildcard, True, False, True, _wait_in=wait_in ) data_nodes = self.data_nodes # Get data nodes. from .utils.alg import _union_workflow, _convert_bfs bfs = _union_workflow(o) # bfg edges. # Set minimum initial distances. if inputs_dist: inputs_dist = combine_dicts(o.dist, inputs_dist) else: inputs_dist = o.dist # Set data nodes to wait inputs. wait_in = self._get_wait_in(flag=True) while True: # Start shrinking loop. # Evaluate the workflow graph without invoking functions. o = self.dispatch( inputs, outputs, cutoff, inputs_dist, wildcard, True, False, False, _wait_in=wait_in ) _union_workflow(o, bfs=bfs) # Update bfs. n_d, status = o._remove_wait_in() # Remove wait input flags. if not status: break # Stop iteration. # Update inputs. inputs = n_d.intersection(data_nodes).union(inputs) # Update outputs and convert bfs in DiGraphs. outputs, bfs = outputs or o, _convert_bfs(bfs) elif not outputs: return self.copy_structure() # Empty Dispatcher. # Get sub dispatcher breadth-first-search graph. dsp = self._get_dsp_from_bfs(outputs, bfs_graphs=bfs) return dsp
[ "def", "shrink_dsp", "(", "self", ",", "inputs", "=", "None", ",", "outputs", "=", "None", ",", "cutoff", "=", "None", ",", "inputs_dist", "=", "None", ",", "wildcard", "=", "True", ")", ":", "bfs", "=", "None", "if", "inputs", ":", "# Get all data nod...
32.185714
19.657143
def parseMemory(buffer, size): """parse an XML in-memory block and build a tree. """ ret = libxml2mod.xmlParseMemory(buffer, size) if ret is None:raise parserError('xmlParseMemory() failed') return xmlDoc(_obj=ret)
[ "def", "parseMemory", "(", "buffer", ",", "size", ")", ":", "ret", "=", "libxml2mod", ".", "xmlParseMemory", "(", "buffer", ",", "size", ")", "if", "ret", "is", "None", ":", "raise", "parserError", "(", "'xmlParseMemory() failed'", ")", "return", "xmlDoc", ...
45.2
11
def draw_dynamics_set(dynamics, xtitle = None, ytitle = None, xlim = None, ylim = None, xlabels = False, ylabels = False): """! @brief Draw lists of dynamics of neurons (oscillators) in the network. @param[in] dynamics (list): List of network outputs that are represented by values of output of oscillators (used by y axis). @param[in] xtitle (string): Title for Y. @param[in] ytitle (string): Title for X. @param[in] xlim (double): X limit. @param[in] ylim (double): Y limit. @param[in] xlabels (bool): If True - shows X labels. @param[in] ylabels (bool): If True - shows Y labels. """ # Calculate edge for confortable representation. number_dynamics = len(dynamics); if (number_dynamics == 1): draw_dynamics(dynamics[0][0], dynamics[0][1], xtitle, ytitle, xlim, ylim, xlabels, ylabels); return; number_cols = int(numpy.ceil(number_dynamics ** 0.5)); number_rows = int(numpy.ceil(number_dynamics / number_cols)); real_index = 0, 0; double_indexer = True; if ( (number_cols == 1) or (number_rows == 1) ): real_index = 0; double_indexer = False; (_, axarr) = plt.subplots(number_rows, number_cols); #plt.setp([ax for ax in axarr], visible = False); for dynamic in dynamics: axarr[real_index] = draw_dynamics(dynamic[0], dynamic[1], xtitle, ytitle, xlim, ylim, xlabels, ylabels, axes = axarr[real_index]); #plt.setp(axarr[real_index], visible = True); if (double_indexer is True): real_index = real_index[0], real_index[1] + 1; if (real_index[1] >= number_cols): real_index = real_index[0] + 1, 0; else: real_index += 1; plt.show();
[ "def", "draw_dynamics_set", "(", "dynamics", ",", "xtitle", "=", "None", ",", "ytitle", "=", "None", ",", "xlim", "=", "None", ",", "ylim", "=", "None", ",", "xlabels", "=", "False", ",", "ylabels", "=", "False", ")", ":", "# Calculate edge for confortable...
41.465116
23.395349
def WaitUntilNoFlowsToProcess(self, timeout=None): """Waits until flow processing thread is done processing flows. Args: timeout: If specified, is a max number of seconds to spend waiting. Raises: TimeOutWhileWaitingForFlowsToBeProcessedError: if timeout is reached. """ t = self.flow_handler_thread if not t: return start_time = time.time() while True: with self.lock: # If the thread is dead, or there are no requests # to be processed/being processed, we stop waiting # and return from the function. if (not t.isAlive() or (not self._GetFlowRequestsReadyForProcessing() and not self.flow_handler_num_being_processed)): return time.sleep(0.2) if timeout and time.time() - start_time > timeout: raise TimeOutWhileWaitingForFlowsToBeProcessedError( "Flow processing didn't finish in time.")
[ "def", "WaitUntilNoFlowsToProcess", "(", "self", ",", "timeout", "=", "None", ")", ":", "t", "=", "self", ".", "flow_handler_thread", "if", "not", "t", ":", "return", "start_time", "=", "time", ".", "time", "(", ")", "while", "True", ":", "with", "self",...
31.793103
21.862069
def refresh_db(**kwargs): ''' Update list of available packages from installed repos CLI Example: .. code-block:: bash salt '*' pkg.refresh_db ''' # Remove rtag file to keep multiple refreshes from happening in pkg states salt.utils.pkg.clear_rtag(__opts__) cmd = 'xbps-install -Sy' call = __salt__['cmd.run_all'](cmd, output_loglevel='trace') if call['retcode'] != 0: comment = '' if 'stderr' in call: comment += call['stderr'] raise CommandExecutionError(comment) return True
[ "def", "refresh_db", "(", "*", "*", "kwargs", ")", ":", "# Remove rtag file to keep multiple refreshes from happening in pkg states", "salt", ".", "utils", ".", "pkg", ".", "clear_rtag", "(", "__opts__", ")", "cmd", "=", "'xbps-install -Sy'", "call", "=", "__salt__", ...
24.954545
22.681818
def create_datapipeline(self): """Creates the data pipeline if it does not already exist Returns: dict: the response of the Boto3 command """ tags = [{"key": "app_group", "value": self.group}, {"key": "app_name", "value": self.app_name}] response = self.client.create_pipeline( name=self.datapipeline_data.get('name', self.app_name), uniqueId=self.app_name, description=self.datapipeline_data['description'], tags=tags) self.pipeline_id = response.get('pipelineId') LOG.debug(response) LOG.info("Successfully configured Data Pipeline - %s", self.app_name) return response
[ "def", "create_datapipeline", "(", "self", ")", ":", "tags", "=", "[", "{", "\"key\"", ":", "\"app_group\"", ",", "\"value\"", ":", "self", ".", "group", "}", ",", "{", "\"key\"", ":", "\"app_name\"", ",", "\"value\"", ":", "self", ".", "app_name", "}", ...
38.5
21.722222
def CallClient(self, action_cls, request=None, next_state=None, request_data=None, **kwargs): """Calls the client asynchronously. This sends a message to the client to invoke an Action. The run action may send back many responses that will be queued by the framework until a status message is sent by the client. The status message will cause the entire transaction to be committed to the specified state. Args: action_cls: The function to call on the client. request: The request to send to the client. If not specified, we create a new RDFValue using the kwargs. next_state: The state in this flow, that responses to this message should go to. request_data: A dict which will be available in the RequestState protobuf. The Responses object maintains a reference to this protobuf for use in the execution of the state method. (so you can access this data by responses.request). **kwargs: These args will be used to construct the client action argument rdfvalue. Raises: ValueError: The request passed to the client does not have the correct type. """ try: action_identifier = action_registry.ID_BY_ACTION_STUB[action_cls] except KeyError: raise ValueError("Action class %s not known." % action_cls) if action_cls.in_rdfvalue is None: if request: raise ValueError("Client action %s does not expect args." % action_cls) else: if request is None: # Create a new rdf request. request = action_cls.in_rdfvalue(**kwargs) else: # Verify that the request type matches the client action requirements. if not isinstance(request, action_cls.in_rdfvalue): raise ValueError("Client action expected %s but got %s" % (action_cls.in_rdfvalue, type(request))) outbound_id = self.GetNextOutboundId() # Create a flow request. flow_request = rdf_flow_objects.FlowRequest( client_id=self.rdf_flow.client_id, flow_id=self.rdf_flow.flow_id, request_id=outbound_id, next_state=next_state) if request_data is not None: flow_request.request_data = rdf_protodict.Dict().FromDict(request_data) cpu_limit_ms = None network_bytes_limit = None if self.rdf_flow.cpu_limit: cpu_usage = self.rdf_flow.cpu_time_used cpu_limit_ms = 1000 * max( self.rdf_flow.cpu_limit - cpu_usage.user_cpu_time - cpu_usage.system_cpu_time, 0) if cpu_limit_ms == 0: raise flow.FlowError("CPU limit exceeded for {} {}.".format( self.rdf_flow.flow_class_name, self.rdf_flow.flow_id)) if self.rdf_flow.network_bytes_limit: network_bytes_limit = max( self.rdf_flow.network_bytes_limit - self.rdf_flow.network_bytes_sent, 0) if network_bytes_limit == 0: raise flow.FlowError("Network limit exceeded for {} {}.".format( self.rdf_flow.flow_class_name, self.rdf_flow.flow_id)) client_action_request = rdf_flows.ClientActionRequest( client_id=self.rdf_flow.client_id, flow_id=self.rdf_flow.flow_id, request_id=outbound_id, action_identifier=action_identifier, action_args=request, cpu_limit_ms=cpu_limit_ms, network_bytes_limit=network_bytes_limit) self.flow_requests.append(flow_request) self.client_action_requests.append(client_action_request)
[ "def", "CallClient", "(", "self", ",", "action_cls", ",", "request", "=", "None", ",", "next_state", "=", "None", ",", "request_data", "=", "None", ",", "*", "*", "kwargs", ")", ":", "try", ":", "action_identifier", "=", "action_registry", ".", "ID_BY_ACTI...
38.141304
21.163043
def get_list_objects_arg_dict(self, node_type): """Create a dict of arguments that will be passed to listObjects(). If {node_type} is a CN, add filtering to include only objects from this GMN instance in the ObjectList returned by CNCore.listObjects(). """ arg_dict = { # Restrict query for faster debugging # "fromDate": datetime.datetime(2017, 1, 1), # "toDate": datetime.datetime(2017, 1, 10), } if node_type == "cn": arg_dict["nodeId"] = django.conf.settings.NODE_IDENTIFIER return arg_dict
[ "def", "get_list_objects_arg_dict", "(", "self", ",", "node_type", ")", ":", "arg_dict", "=", "{", "# Restrict query for faster debugging", "# \"fromDate\": datetime.datetime(2017, 1, 1),", "# \"toDate\": datetime.datetime(2017, 1, 10),", "}", "if", "node_type", "==", "\"cn\"", ...
39.6
20.4
def urlretrieve(uri, saveas=None, retries=3, cache_dir=None): '''urllib.urlretrieve wrapper''' retries = int(retries) if retries else 3 # FIXME: make random filename (saveas) in cache_dir... # cache_dir = cache_dir or CACHE_DIR while retries: try: _path, headers = urllib.urlretrieve(uri, saveas) except Exception as e: retries -= 1 logger.warn( 'Failed getting uri "%s": %s (retry:%s in 1s)' % ( uri, e, retries)) time.sleep(.2) continue else: break else: raise RuntimeError("Failed to retrieve uri: %s" % uri) return _path
[ "def", "urlretrieve", "(", "uri", ",", "saveas", "=", "None", ",", "retries", "=", "3", ",", "cache_dir", "=", "None", ")", ":", "retries", "=", "int", "(", "retries", ")", "if", "retries", "else", "3", "# FIXME: make random filename (saveas) in cache_dir...",...
33.65
17.45
def add_package(package_name, package_path='templates', encoding='utf-8'): """Adds the given package to the template search routine""" if not _has_jinja: raise RuntimeError(_except_text) _jload.add_loader(PackageLoader(package_name, package_path, encoding))
[ "def", "add_package", "(", "package_name", ",", "package_path", "=", "'templates'", ",", "encoding", "=", "'utf-8'", ")", ":", "if", "not", "_has_jinja", ":", "raise", "RuntimeError", "(", "_except_text", ")", "_jload", ".", "add_loader", "(", "PackageLoader", ...
54.6
17.2
def absolute_git_root_dir(fpath=""): """Absolute path to the git root directory containing a given file or directory. """ if len(fpath) == 0: dirname_str = os.getcwd() else: dirname_str = os.path.dirname(fpath) dirname_str = os.path.abspath(dirname_str) dirnames = dirname_str.split(os.sep) n = len(dirnames) for i in xrange(n): # is there a .git directory at this level? # FIXME hack basedir = "/" + os.path.join(*dirnames[0:n - i]) gitdir = os.path.join(basedir, ".git") if os.path.exists(gitdir): return basedir
[ "def", "absolute_git_root_dir", "(", "fpath", "=", "\"\"", ")", ":", "if", "len", "(", "fpath", ")", "==", "0", ":", "dirname_str", "=", "os", ".", "getcwd", "(", ")", "else", ":", "dirname_str", "=", "os", ".", "path", ".", "dirname", "(", "fpath", ...
33.388889
11.277778
def set_chassis_datacenter(location, host=None, admin_username=None, admin_password=None): ''' Set the location of the chassis. location The name of the datacenter to be set on the chassis. host The chassis host. admin_username The username used to access the chassis. admin_password The password used to access the chassis. CLI Example: .. code-block:: bash salt '*' dracr.set_chassis_datacenter datacenter-name host=111.222.333.444 admin_username=root admin_password=secret ''' return set_general('cfgLocation', 'cfgLocationDatacenter', location, host=host, admin_username=admin_username, admin_password=admin_password)
[ "def", "set_chassis_datacenter", "(", "location", ",", "host", "=", "None", ",", "admin_username", "=", "None", ",", "admin_password", "=", "None", ")", ":", "return", "set_general", "(", "'cfgLocation'", ",", "'cfgLocationDatacenter'", ",", "location", ",", "ho...
27.233333
24.433333
def create_sns_event(app_name, env, region, rules): """Create SNS lambda event from rules. Args: app_name (str): name of the lambda function env (str): Environment/Account for lambda function region (str): AWS region of the lambda function rules (str): Trigger rules from the settings """ session = boto3.Session(profile_name=env, region_name=region) sns_client = session.client('sns') topic_name = rules.get('topic') lambda_alias_arn = get_lambda_alias_arn(app=app_name, account=env, region=region) topic_arn = get_sns_topic_arn(topic_name=topic_name, account=env, region=region) protocol = 'lambda' statement_id = '{}_sns_{}'.format(app_name, topic_name) principal = 'sns.amazonaws.com' add_lambda_permissions( function=lambda_alias_arn, statement_id=statement_id, action='lambda:InvokeFunction', principal=principal, source_arn=topic_arn, env=env, region=region) sns_client.subscribe(TopicArn=topic_arn, Protocol=protocol, Endpoint=lambda_alias_arn) LOG.debug("SNS Lambda event created") LOG.info("Created SNS event subscription on topic %s", topic_name)
[ "def", "create_sns_event", "(", "app_name", ",", "env", ",", "region", ",", "rules", ")", ":", "session", "=", "boto3", ".", "Session", "(", "profile_name", "=", "env", ",", "region_name", "=", "region", ")", "sns_client", "=", "session", ".", "client", ...
36.84375
19.75
def turn_on(self): """Turn Air Purifier on""" if self.device_status != 'on': body = helpers.req_body(self.manager, 'devicestatus') body['uuid'] = self.uuid body['status'] = 'on' head = helpers.req_headers(self.manager) r, _ = helpers.call_api('/131airPurifier/v1/device/deviceStatus', 'put', json=body, headers=head) if r is not None and helpers.check_response(r, 'airpur_status'): self.device_status = 'on' return True else: return False
[ "def", "turn_on", "(", "self", ")", ":", "if", "self", ".", "device_status", "!=", "'on'", ":", "body", "=", "helpers", ".", "req_body", "(", "self", ".", "manager", ",", "'devicestatus'", ")", "body", "[", "'uuid'", "]", "=", "self", ".", "uuid", "b...
38.0625
18.8125
def _fetch_system_by_machine_id(self): ''' Get a system by machine ID Returns dict system exists in inventory False system does not exist in inventory None error connection or parsing response ''' machine_id = generate_machine_id() try: url = self.api_url + '/inventory/v1/hosts?insights_id=' + machine_id net_logger.info("GET %s", url) res = self.session.get(url, timeout=self.config.http_timeout) except (requests.ConnectionError, requests.Timeout) as e: logger.error(e) logger.error('The Insights API could not be reached.') return None try: if (self.handle_fail_rcs(res)): return None res_json = json.loads(res.content) except ValueError as e: logger.error(e) logger.error('Could not parse response body.') return None if res_json['total'] == 0: logger.debug('No hosts found with machine ID: %s', machine_id) return False return res_json['results']
[ "def", "_fetch_system_by_machine_id", "(", "self", ")", ":", "machine_id", "=", "generate_machine_id", "(", ")", "try", ":", "url", "=", "self", ".", "api_url", "+", "'/inventory/v1/hosts?insights_id='", "+", "machine_id", "net_logger", ".", "info", "(", "\"GET %s...
38.896552
16.62069
def get_display_name(value): """ display-name = phrase Because this is simply a name-rule, we don't return a display-name token containing a phrase, but rather a display-name token with the content of the phrase. """ display_name = DisplayName() token, value = get_phrase(value) display_name.extend(token[:]) display_name.defects = token.defects[:] return display_name, value
[ "def", "get_display_name", "(", "value", ")", ":", "display_name", "=", "DisplayName", "(", ")", "token", ",", "value", "=", "get_phrase", "(", "value", ")", "display_name", ".", "extend", "(", "token", "[", ":", "]", ")", "display_name", ".", "defects", ...
31.153846
14.692308
def RPC(self, url, opname, obj, replytype=None, **kw): '''Send a request, return the reply. See Send() and Recieve() docstrings for details. ''' self.Send(url, opname, obj, **kw) return self.Receive(replytype, **kw)
[ "def", "RPC", "(", "self", ",", "url", ",", "opname", ",", "obj", ",", "replytype", "=", "None", ",", "*", "*", "kw", ")", ":", "self", ".", "Send", "(", "url", ",", "opname", ",", "obj", ",", "*", "*", "kw", ")", "return", "self", ".", "Rece...
41.833333
14.5
def hash(self): """Returns the SHA256 of the pipfile's data.""" content = json.dumps(self.data, sort_keys=True, separators=(",", ":")) return hashlib.sha256(content.encode("utf8")).hexdigest()
[ "def", "hash", "(", "self", ")", ":", "content", "=", "json", ".", "dumps", "(", "self", ".", "data", ",", "sort_keys", "=", "True", ",", "separators", "=", "(", "\",\"", ",", "\":\"", ")", ")", "return", "hashlib", ".", "sha256", "(", "content", "...
53.25
22
def mangleIR(data, ignore_errors=False): """Mangle a raw Kira data packet into shorthand""" try: # Packet mangling algorithm inspired by Rex Becket's kirarx vera plugin # Determine a median value for the timing packets and categorize each # timing as longer or shorter than that. This will always work for signals # that use pulse width modulation (since varying by long-short is basically # the definition of what PWM is). By lucky coincidence this also works with # the RC-5/RC-6 encodings used by Phillips (manchester encoding) # because time variations of opposite-phase/same-phase are either N or 2*N if isinstance(data, bytes): data = data.decode('ascii') data = data.strip() times = [int(x, 16) for x in data.split()[2:]] minTime = min(times[2:-1]) maxTime = max(times[2:-1]) margin = (maxTime - minTime) / 2 + minTime return ''.join([(x < margin and 'S' or 'L') for x in times]) except: # Probably a mangled packet. if not ignore_errors: raise
[ "def", "mangleIR", "(", "data", ",", "ignore_errors", "=", "False", ")", ":", "try", ":", "# Packet mangling algorithm inspired by Rex Becket's kirarx vera plugin", "# Determine a median value for the timing packets and categorize each", "# timing as longer or shorter than that. This wil...
49.727273
20.909091
def _create_kubernetes_state_prometheus_instance(self, instance): """ Set up the kubernetes_state instance so it can be used in OpenMetricsBaseCheck """ ksm_instance = deepcopy(instance) endpoint = instance.get('kube_state_url') if endpoint is None: raise CheckException("Unable to find kube_state_url in config file.") extra_labels = ksm_instance.get('label_joins', {}) hostname_override = is_affirmative(ksm_instance.get('hostname_override', True)) ksm_instance.update( { 'namespace': 'kubernetes_state', 'metrics': [ { 'kube_daemonset_status_current_number_scheduled': 'daemonset.scheduled', 'kube_daemonset_status_desired_number_scheduled': 'daemonset.desired', 'kube_daemonset_status_number_misscheduled': 'daemonset.misscheduled', 'kube_daemonset_status_number_ready': 'daemonset.ready', 'kube_daemonset_updated_number_scheduled': 'daemonset.updated', 'kube_deployment_spec_paused': 'deployment.paused', 'kube_deployment_spec_replicas': 'deployment.replicas_desired', 'kube_deployment_spec_strategy_rollingupdate_max_unavailable': 'deployment.rollingupdate.max_unavailable', # noqa: E501 'kube_deployment_status_replicas': 'deployment.replicas', 'kube_deployment_status_replicas_available': 'deployment.replicas_available', 'kube_deployment_status_replicas_unavailable': 'deployment.replicas_unavailable', 'kube_deployment_status_replicas_updated': 'deployment.replicas_updated', 'kube_endpoint_address_available': 'endpoint.address_available', 'kube_endpoint_address_not_ready': 'endpoint.address_not_ready', 'kube_endpoint_created': 'endpoint.created', 'kube_hpa_spec_min_replicas': 'hpa.min_replicas', 'kube_hpa_spec_max_replicas': 'hpa.max_replicas', 'kube_hpa_status_desired_replicas': 'hpa.desired_replicas', 'kube_hpa_status_current_replicas': 'hpa.current_replicas', 'kube_hpa_status_condition': 'hpa.condition', 'kube_node_status_allocatable_cpu_cores': 'node.cpu_allocatable', 'kube_node_status_allocatable_memory_bytes': 'node.memory_allocatable', 'kube_node_status_allocatable_pods': 'node.pods_allocatable', 'kube_node_status_capacity_cpu_cores': 'node.cpu_capacity', 'kube_node_status_capacity_memory_bytes': 'node.memory_capacity', 'kube_node_status_capacity_pods': 'node.pods_capacity', 'kube_node_status_allocatable_nvidia_gpu_cards': 'node.gpu.cards_allocatable', 'kube_node_status_capacity_nvidia_gpu_cards': 'node.gpu.cards_capacity', 'kube_pod_container_status_terminated': 'container.terminated', 'kube_pod_container_status_waiting': 'container.waiting', 'kube_persistentvolumeclaim_status_phase': 'persistentvolumeclaim.status', 'kube_persistentvolumeclaim_resource_requests_storage_bytes': 'persistentvolumeclaim.request_storage', # noqa: E501 'kube_pod_container_resource_limits_cpu_cores': 'container.cpu_limit', 'kube_pod_container_resource_limits_memory_bytes': 'container.memory_limit', 'kube_pod_container_resource_requests_cpu_cores': 'container.cpu_requested', 'kube_pod_container_resource_requests_memory_bytes': 'container.memory_requested', 'kube_pod_container_status_ready': 'container.ready', 'kube_pod_container_status_restarts': 'container.restarts', # up to kube-state-metrics 1.1.x 'kube_pod_container_status_restarts_total': 'container.restarts', # noqa: E501, from kube-state-metrics 1.2.0 'kube_pod_container_status_running': 'container.running', 'kube_pod_container_resource_requests_nvidia_gpu_devices': 'container.gpu.request', 'kube_pod_container_resource_limits_nvidia_gpu_devices': 'container.gpu.limit', 'kube_pod_status_ready': 'pod.ready', 'kube_pod_status_scheduled': 'pod.scheduled', 'kube_poddisruptionbudget_status_current_healthy': 'pdb.pods_healthy', 'kube_poddisruptionbudget_status_desired_healthy': 'pdb.pods_desired', 'kube_poddisruptionbudget_status_pod_disruptions_allowed': 'pdb.disruptions_allowed', 'kube_poddisruptionbudget_status_expected_pods': 'pdb.pods_total', 'kube_replicaset_spec_replicas': 'replicaset.replicas_desired', 'kube_replicaset_status_fully_labeled_replicas': 'replicaset.fully_labeled_replicas', 'kube_replicaset_status_ready_replicas': 'replicaset.replicas_ready', 'kube_replicaset_status_replicas': 'replicaset.replicas', 'kube_replicationcontroller_spec_replicas': 'replicationcontroller.replicas_desired', 'kube_replicationcontroller_status_available_replicas': 'replicationcontroller.replicas_available', # noqa: E501 'kube_replicationcontroller_status_fully_labeled_replicas': 'replicationcontroller.fully_labeled_replicas', # noqa: E501 'kube_replicationcontroller_status_ready_replicas': 'replicationcontroller.replicas_ready', 'kube_replicationcontroller_status_replicas': 'replicationcontroller.replicas', 'kube_statefulset_replicas': 'statefulset.replicas_desired', 'kube_statefulset_status_replicas': 'statefulset.replicas', 'kube_statefulset_status_replicas_current': 'statefulset.replicas_current', 'kube_statefulset_status_replicas_ready': 'statefulset.replicas_ready', 'kube_statefulset_status_replicas_updated': 'statefulset.replicas_updated', } ], 'ignore_metrics': [ # _info, _labels and _created don't convey any metric 'kube_cronjob_info', 'kube_cronjob_created', 'kube_daemonset_created', 'kube_deployment_created', 'kube_deployment_labels', 'kube_job_created', 'kube_job_info', 'kube_limitrange_created', 'kube_namespace_created', 'kube_namespace_labels', 'kube_node_created', 'kube_node_info', 'kube_node_labels', 'kube_pod_created' 'kube_pod_container_info', 'kube_pod_info', 'kube_pod_owner', 'kube_pod_start_time', 'kube_pod_labels', 'kube_poddisruptionbudget_created', 'kube_replicaset_created', 'kube_replicationcontroller_created', 'kube_resourcequota_created', 'kube_replicaset_owner', 'kube_service_created', 'kube_service_info', 'kube_service_labels', 'kube_service_spec_external_ip', 'kube_service_status_load_balancer_ingress', 'kube_statefulset_labels', 'kube_statefulset_created', 'kube_statefulset_status_current_revision', 'kube_statefulset_status_update_revision', # Already provided by the kubelet integration 'kube_pod_container_status_last_terminated_reason', # _generation metrics are more metadata than metrics, no real use case for now 'kube_daemonset_metadata_generation', 'kube_deployment_metadata_generation', 'kube_deployment_status_observed_generation', 'kube_replicaset_metadata_generation', 'kube_replicaset_status_observed_generation', 'kube_replicationcontroller_metadata_generation', 'kube_replicationcontroller_status_observed_generation', 'kube_statefulset_metadata_generation', 'kube_statefulset_status_observed_generation', 'kube_hpa_metadata_generation', # kube_node_status_phase and kube_namespace_status_phase have no use case as a service check 'kube_namespace_status_phase', 'kube_node_status_phase', # These CronJob and Job metrics need use cases to determine how do implement 'kube_cronjob_status_active', 'kube_cronjob_status_last_schedule_time', 'kube_cronjob_spec_suspend', 'kube_cronjob_spec_starting_deadline_seconds', 'kube_job_spec_active_dealine_seconds', 'kube_job_spec_completions', 'kube_job_spec_parallelism', 'kube_job_status_active', 'kube_job_status_completion_time', # We could compute the duration=completion-start as a gauge 'kube_job_status_start_time', ], 'label_joins': { 'kube_pod_info': {'label_to_match': 'pod', 'labels_to_get': ['node']}, 'kube_pod_status_phase': {'label_to_match': 'pod', 'labels_to_get': ['phase']}, 'kube_persistentvolume_info': { 'label_to_match': 'persistentvolume', 'labels_to_get': ['storageclass'], }, 'kube_persistentvolumeclaim_info': { 'label_to_match': 'persistentvolumeclaim', 'labels_to_get': ['storageclass'], }, }, # Defaults that were set when kubernetes_state was based on PrometheusCheck 'send_monotonic_counter': ksm_instance.get('send_monotonic_counter', False), 'health_service_check': ksm_instance.get('health_service_check', False), } ) ksm_instance['prometheus_url'] = endpoint ksm_instance['label_joins'].update(extra_labels) if hostname_override: ksm_instance['label_to_hostname'] = 'node' clustername = get_clustername() if clustername != "": ksm_instance['label_to_hostname_suffix'] = "-" + clustername if 'labels_mapper' in ksm_instance and not isinstance(ksm_instance['labels_mapper'], dict): self.log.warning("Option labels_mapper should be a dictionary for {}".format(endpoint)) return ksm_instance
[ "def", "_create_kubernetes_state_prometheus_instance", "(", "self", ",", "instance", ")", ":", "ksm_instance", "=", "deepcopy", "(", "instance", ")", "endpoint", "=", "instance", ".", "get", "(", "'kube_state_url'", ")", "if", "endpoint", "is", "None", ":", "rai...
65.884393
32.265896
def ingest_containers(self, containers=None): """ Transform the YAML into a dict with normalized keys """ containers = containers or self.stream or {} output_containers = [] for container_name, definition in containers.items(): container = definition.copy() container['name'] = container_name output_containers.append(container) return output_containers
[ "def", "ingest_containers", "(", "self", ",", "containers", "=", "None", ")", ":", "containers", "=", "containers", "or", "self", ".", "stream", "or", "{", "}", "output_containers", "=", "[", "]", "for", "container_name", ",", "definition", "in", "containers...
31.071429
14.928571
def _get_client_by_id(self, client_id): """Get GRR client dictionary and make sure valid approvals exist. Args: client_id: GRR client ID. Returns: GRR API Client object """ client = self.grr_api.Client(client_id) print('Checking for client approval') self._check_approval_wrapper(client, client.ListFlows) print('{0:s}: Client approval is valid'.format(client_id)) return client.Get()
[ "def", "_get_client_by_id", "(", "self", ",", "client_id", ")", ":", "client", "=", "self", ".", "grr_api", ".", "Client", "(", "client_id", ")", "print", "(", "'Checking for client approval'", ")", "self", ".", "_check_approval_wrapper", "(", "client", ",", "...
30.071429
15.928571
def _from_dict(cls, _dict): """Initialize a MessageContext object from a json dictionary.""" args = {} if 'global' in _dict: args['global_'] = MessageContextGlobal._from_dict( _dict.get('global')) if 'skills' in _dict: args['skills'] = MessageContextSkills._from_dict( _dict.get('skills')) return cls(**args)
[ "def", "_from_dict", "(", "cls", ",", "_dict", ")", ":", "args", "=", "{", "}", "if", "'global'", "in", "_dict", ":", "args", "[", "'global_'", "]", "=", "MessageContextGlobal", ".", "_from_dict", "(", "_dict", ".", "get", "(", "'global'", ")", ")", ...
39.5
12.3
def _check_transition_id(self, transition): """Checks the validity of a transition id Checks whether the transition id is already used by another transition within the state :param rafcon.core.transition.Transition transition: The transition to be checked :return bool validity, str message: validity is True, when the transition is valid, False else. message gives more information especially if the transition is not valid """ transition_id = transition.transition_id if transition_id in self.transitions and transition is not self.transitions[transition_id]: return False, "transition_id already existing" return True, "valid"
[ "def", "_check_transition_id", "(", "self", ",", "transition", ")", ":", "transition_id", "=", "transition", ".", "transition_id", "if", "transition_id", "in", "self", ".", "transitions", "and", "transition", "is", "not", "self", ".", "transitions", "[", "transi...
54.384615
30.076923