text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def ascdiff(decl, lat): """ Returns the Ascensional Difference of a point. """ delta = math.radians(decl) phi = math.radians(lat) ad = math.asin(math.tan(delta) * math.tan(phi)) return math.degrees(ad)
[ "def", "ascdiff", "(", "decl", ",", "lat", ")", ":", "delta", "=", "math", ".", "radians", "(", "decl", ")", "phi", "=", "math", ".", "radians", "(", "lat", ")", "ad", "=", "math", ".", "asin", "(", "math", ".", "tan", "(", "delta", ")", "*", "math", ".", "tan", "(", "phi", ")", ")", "return", "math", ".", "degrees", "(", "ad", ")" ]
36
10.666667
def cached_get(timeout, *params): """Decorator applied specifically to a view's get method""" def decorator(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(view_or_request, *args, **kwargs): # The type of the request gets muddled when using a function based # decorator. We must use a function based decorator so it can be # used in urls.py. request = getattr(view_or_request, "request", view_or_request) if not hasattr(_thread_locals, "ultracache_request"): setattr(_thread_locals, "ultracache_request", request) # If request not GET or HEAD never cache if request.method.lower() not in ("get", "head"): return view_func(view_or_request, *args, **kwargs) # If request contains messages never cache l = 0 try: l = len(request._messages) except (AttributeError, TypeError): pass if l: return view_func(view_or_request, *args, **kwargs) # Compute a cache key li = [str(view_or_request.__class__), view_func.__name__] # request.get_full_path is implicitly added it no other request # path is provided. get_full_path includes the querystring and is # the more conservative approach but makes it trivially easy for a # request to bust through the cache. if not set(params).intersection(set(( "request.get_full_path()", "request.path", "request.path_info" ))): li.append(request.get_full_path()) if "django.contrib.sites" in settings.INSTALLED_APPS: li.append(get_current_site_pk(request)) # Pre-sort kwargs keys = list(kwargs.keys()) keys.sort() for key in keys: li.append("%s,%s" % (key, kwargs[key])) # Extend cache key with custom variables for param in params: if not isinstance(param, str): param = str(param) li.append(eval(param)) s = ":".join([str(l) for l in li]) hashed = hashlib.md5(s.encode("utf-8")).hexdigest() cache_key = "ucache-get-%s" % hashed cached = cache.get(cache_key, None) if cached is None: # The get view as outermost caller may bluntly set _ultracache request._ultracache = [] response = view_func(view_or_request, *args, **kwargs) content = None if isinstance(response, TemplateResponse): content = response.render().rendered_content elif isinstance(response, HttpResponse): content = response.content if content is not None: headers = getattr(response, "_headers", {}) cache.set( cache_key, {"content": content, "headers": headers}, timeout ) cache_meta(request, cache_key) else: response = HttpResponse(cached["content"]) # Headers has a non-obvious format for k, v in cached["headers"].items(): response[v[0]] = v[1] return response return _wrapped_view return decorator
[ "def", "cached_get", "(", "timeout", ",", "*", "params", ")", ":", "def", "decorator", "(", "view_func", ")", ":", "@", "wraps", "(", "view_func", ",", "assigned", "=", "available_attrs", "(", "view_func", ")", ")", "def", "_wrapped_view", "(", "view_or_request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# The type of the request gets muddled when using a function based", "# decorator. We must use a function based decorator so it can be", "# used in urls.py.", "request", "=", "getattr", "(", "view_or_request", ",", "\"request\"", ",", "view_or_request", ")", "if", "not", "hasattr", "(", "_thread_locals", ",", "\"ultracache_request\"", ")", ":", "setattr", "(", "_thread_locals", ",", "\"ultracache_request\"", ",", "request", ")", "# If request not GET or HEAD never cache", "if", "request", ".", "method", ".", "lower", "(", ")", "not", "in", "(", "\"get\"", ",", "\"head\"", ")", ":", "return", "view_func", "(", "view_or_request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "# If request contains messages never cache", "l", "=", "0", "try", ":", "l", "=", "len", "(", "request", ".", "_messages", ")", "except", "(", "AttributeError", ",", "TypeError", ")", ":", "pass", "if", "l", ":", "return", "view_func", "(", "view_or_request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "# Compute a cache key", "li", "=", "[", "str", "(", "view_or_request", ".", "__class__", ")", ",", "view_func", ".", "__name__", "]", "# request.get_full_path is implicitly added it no other request", "# path is provided. get_full_path includes the querystring and is", "# the more conservative approach but makes it trivially easy for a", "# request to bust through the cache.", "if", "not", "set", "(", "params", ")", ".", "intersection", "(", "set", "(", "(", "\"request.get_full_path()\"", ",", "\"request.path\"", ",", "\"request.path_info\"", ")", ")", ")", ":", "li", ".", "append", "(", "request", ".", "get_full_path", "(", ")", ")", "if", "\"django.contrib.sites\"", "in", "settings", ".", "INSTALLED_APPS", ":", "li", ".", "append", "(", "get_current_site_pk", "(", "request", ")", ")", "# Pre-sort kwargs", "keys", "=", "list", "(", "kwargs", ".", "keys", "(", ")", ")", "keys", ".", "sort", "(", ")", "for", "key", "in", "keys", ":", "li", ".", "append", "(", "\"%s,%s\"", "%", "(", "key", ",", "kwargs", "[", "key", "]", ")", ")", "# Extend cache key with custom variables", "for", "param", "in", "params", ":", "if", "not", "isinstance", "(", "param", ",", "str", ")", ":", "param", "=", "str", "(", "param", ")", "li", ".", "append", "(", "eval", "(", "param", ")", ")", "s", "=", "\":\"", ".", "join", "(", "[", "str", "(", "l", ")", "for", "l", "in", "li", "]", ")", "hashed", "=", "hashlib", ".", "md5", "(", "s", ".", "encode", "(", "\"utf-8\"", ")", ")", ".", "hexdigest", "(", ")", "cache_key", "=", "\"ucache-get-%s\"", "%", "hashed", "cached", "=", "cache", ".", "get", "(", "cache_key", ",", "None", ")", "if", "cached", "is", "None", ":", "# The get view as outermost caller may bluntly set _ultracache", "request", ".", "_ultracache", "=", "[", "]", "response", "=", "view_func", "(", "view_or_request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "content", "=", "None", "if", "isinstance", "(", "response", ",", "TemplateResponse", ")", ":", "content", "=", "response", ".", "render", "(", ")", ".", "rendered_content", "elif", "isinstance", "(", "response", ",", "HttpResponse", ")", ":", "content", "=", "response", ".", "content", "if", "content", "is", "not", "None", ":", "headers", "=", "getattr", "(", "response", ",", "\"_headers\"", ",", "{", "}", ")", "cache", ".", "set", "(", "cache_key", ",", "{", "\"content\"", ":", "content", ",", "\"headers\"", ":", "headers", "}", ",", "timeout", ")", "cache_meta", "(", "request", ",", "cache_key", ")", "else", ":", "response", "=", "HttpResponse", "(", "cached", "[", "\"content\"", "]", ")", "# Headers has a non-obvious format", "for", "k", ",", "v", "in", "cached", "[", "\"headers\"", "]", ".", "items", "(", ")", ":", "response", "[", "v", "[", "0", "]", "]", "=", "v", "[", "1", "]", "return", "response", "return", "_wrapped_view", "return", "decorator" ]
40.395349
20.034884
def py(self, output): """Output data as a nicely-formatted python data structure""" import pprint pprint.pprint(output, stream=self.outfile)
[ "def", "py", "(", "self", ",", "output", ")", ":", "import", "pprint", "pprint", ".", "pprint", "(", "output", ",", "stream", "=", "self", ".", "outfile", ")" ]
40.25
12
def get_next(self): """Get the billing cycle after this one. May return None""" return BillingCycle.objects.filter(date_range__gt=self.date_range).order_by('date_range').first()
[ "def", "get_next", "(", "self", ")", ":", "return", "BillingCycle", ".", "objects", ".", "filter", "(", "date_range__gt", "=", "self", ".", "date_range", ")", ".", "order_by", "(", "'date_range'", ")", ".", "first", "(", ")" ]
63.666667
28.666667
def nth(self, index): """ Return a query that selects the element at `index` (starts from 0). If no elements are available, returns a query with no results. Example usage: .. code:: python >> q = Query(lambda: list(range(5))) >> q.nth(2).results [2] Args: index (int): The index of the element to select (starts from 0) Returns: Query """ def _transform(xs): # pylint: disable=missing-docstring, invalid-name try: return [next(islice(iter(xs), index, None))] # Gracefully handle (a) running out of elements, and (b) negative indices except (StopIteration, ValueError): return [] return self.transform(_transform, 'nth')
[ "def", "nth", "(", "self", ",", "index", ")", ":", "def", "_transform", "(", "xs", ")", ":", "# pylint: disable=missing-docstring, invalid-name", "try", ":", "return", "[", "next", "(", "islice", "(", "iter", "(", "xs", ")", ",", "index", ",", "None", ")", ")", "]", "# Gracefully handle (a) running out of elements, and (b) negative indices", "except", "(", "StopIteration", ",", "ValueError", ")", ":", "return", "[", "]", "return", "self", ".", "transform", "(", "_transform", ",", "'nth'", ")" ]
28.857143
25.214286
def _loop(self, barrier): """Actual thread""" if sys.platform != "win32": self.loop = asyncio.new_event_loop() else: self.loop = asyncio.ProactorEventLoop() asyncio.set_event_loop(self.loop) barrier.wait() try: self.loop.run_forever() except Exception: sys.exit(1)
[ "def", "_loop", "(", "self", ",", "barrier", ")", ":", "if", "sys", ".", "platform", "!=", "\"win32\"", ":", "self", ".", "loop", "=", "asyncio", ".", "new_event_loop", "(", ")", "else", ":", "self", ".", "loop", "=", "asyncio", ".", "ProactorEventLoop", "(", ")", "asyncio", ".", "set_event_loop", "(", "self", ".", "loop", ")", "barrier", ".", "wait", "(", ")", "try", ":", "self", ".", "loop", ".", "run_forever", "(", ")", "except", "Exception", ":", "sys", ".", "exit", "(", "1", ")" ]
27.461538
14.615385
def _simulate_mixture(self, op: ops.Operation, data: _StateAndBuffer, indices: List[int]) -> None: """Simulate an op that is a mixtures of unitaries.""" probs, unitaries = zip(*protocols.mixture(op)) # We work around numpy barfing on choosing from a list of # numpy arrays (which is not `one-dimensional`) by selecting # the index of the unitary. index = np.random.choice(range(len(unitaries)), p=probs) shape = (2,) * (2 * len(indices)) unitary = unitaries[index].astype(self._dtype).reshape(shape) result = linalg.targeted_left_multiply(unitary, data.state, indices, out=data.buffer) data.buffer = data.state data.state = result
[ "def", "_simulate_mixture", "(", "self", ",", "op", ":", "ops", ".", "Operation", ",", "data", ":", "_StateAndBuffer", ",", "indices", ":", "List", "[", "int", "]", ")", "->", "None", ":", "probs", ",", "unitaries", "=", "zip", "(", "*", "protocols", ".", "mixture", "(", "op", ")", ")", "# We work around numpy barfing on choosing from a list of", "# numpy arrays (which is not `one-dimensional`) by selecting", "# the index of the unitary.", "index", "=", "np", ".", "random", ".", "choice", "(", "range", "(", "len", "(", "unitaries", ")", ")", ",", "p", "=", "probs", ")", "shape", "=", "(", "2", ",", ")", "*", "(", "2", "*", "len", "(", "indices", ")", ")", "unitary", "=", "unitaries", "[", "index", "]", ".", "astype", "(", "self", ".", "_dtype", ")", ".", "reshape", "(", "shape", ")", "result", "=", "linalg", ".", "targeted_left_multiply", "(", "unitary", ",", "data", ".", "state", ",", "indices", ",", "out", "=", "data", ".", "buffer", ")", "data", ".", "buffer", "=", "data", ".", "state", "data", ".", "state", "=", "result" ]
54.571429
16.785714
def _refine_upcheck(merge, min_goodness): """Remove from the merge any entries which would be covered by entries between their current position and the merge insertion position. For example, the third entry of:: 0011 -> N 0100 -> N 1000 -> N X000 -> NE Cannot be merged with the first two entries because that would generate the new entry ``XXXX`` which would move ``1000`` below the entry with the key-mask pair of ``X000``, which would cover it. Returns ------- :py:class:`~.Merge` New merge with entries possibly removed. If the goodness of the merge ever drops below `min_goodness` then an empty merge will be returned. bool If the merge has been changed at all. """ # Remove any entries which would be covered by entries above the merge # position. changed = False for i in sorted(merge.entries, reverse=True): # Get all the entries that are between the entry we're looking at the # insertion index of the proposed merged index. If this entry would be # covered up by any of them then we remove it from the merge. entry = merge.routing_table[i] key, mask = entry.key, entry.mask if any(intersect(key, mask, other.key, other.mask) for other in merge.routing_table[i+1:merge.insertion_index]): # The entry would be partially or wholly covered by another entry, # remove it from the merge and return a new merge. merge = _Merge(merge.routing_table, merge.entries - {i}) changed = True # Check if the merge is sufficiently good if merge.goodness <= min_goodness: merge = _Merge(merge.routing_table) # Replace with empty merge break # Return the final merge return merge, changed
[ "def", "_refine_upcheck", "(", "merge", ",", "min_goodness", ")", ":", "# Remove any entries which would be covered by entries above the merge", "# position.", "changed", "=", "False", "for", "i", "in", "sorted", "(", "merge", ".", "entries", ",", "reverse", "=", "True", ")", ":", "# Get all the entries that are between the entry we're looking at the", "# insertion index of the proposed merged index. If this entry would be", "# covered up by any of them then we remove it from the merge.", "entry", "=", "merge", ".", "routing_table", "[", "i", "]", "key", ",", "mask", "=", "entry", ".", "key", ",", "entry", ".", "mask", "if", "any", "(", "intersect", "(", "key", ",", "mask", ",", "other", ".", "key", ",", "other", ".", "mask", ")", "for", "other", "in", "merge", ".", "routing_table", "[", "i", "+", "1", ":", "merge", ".", "insertion_index", "]", ")", ":", "# The entry would be partially or wholly covered by another entry,", "# remove it from the merge and return a new merge.", "merge", "=", "_Merge", "(", "merge", ".", "routing_table", ",", "merge", ".", "entries", "-", "{", "i", "}", ")", "changed", "=", "True", "# Check if the merge is sufficiently good", "if", "merge", ".", "goodness", "<=", "min_goodness", ":", "merge", "=", "_Merge", "(", "merge", ".", "routing_table", ")", "# Replace with empty merge", "break", "# Return the final merge", "return", "merge", ",", "changed" ]
39.826087
23.673913
def set_is_valid_rss(self): """Check to if this is actually a valid RSS feed""" if self.title and self.link and self.description: self.is_valid_rss = True else: self.is_valid_rss = False
[ "def", "set_is_valid_rss", "(", "self", ")", ":", "if", "self", ".", "title", "and", "self", ".", "link", "and", "self", ".", "description", ":", "self", ".", "is_valid_rss", "=", "True", "else", ":", "self", ".", "is_valid_rss", "=", "False" ]
38.166667
10.666667
def get_config_section(self, name): """ Get a section of a configuration """ if self.config.has_section(name): return self.config.items(name) return []
[ "def", "get_config_section", "(", "self", ",", "name", ")", ":", "if", "self", ".", "config", ".", "has_section", "(", "name", ")", ":", "return", "self", ".", "config", ".", "items", "(", "name", ")", "return", "[", "]" ]
28.142857
4.428571
def to_svg(self, converter=None): """Return a SVGDumper for this instruction. :param converter: a :class:` knittingpattern.convert.InstructionSVGCache.InstructionSVGCache` or :obj:`None`. If :obj:`None` is given, the :func:` knittingpattern.convert.InstructionSVGCache.default_svg_cache` is used. :rtype: knittingpattern.Dumper.SVGDumper """ if converter is None: from knittingpattern.convert.InstructionSVGCache import \ default_svg_cache converter = default_svg_cache() return converter.to_svg(self)
[ "def", "to_svg", "(", "self", ",", "converter", "=", "None", ")", ":", "if", "converter", "is", "None", ":", "from", "knittingpattern", ".", "convert", ".", "InstructionSVGCache", "import", "default_svg_cache", "converter", "=", "default_svg_cache", "(", ")", "return", "converter", ".", "to_svg", "(", "self", ")" ]
41.066667
15.2
def stats(self, start, end, fields=None): '''Perform a multivariate statistic calculation of this :class:`ColumnTS` from a *start* date/datetime to an *end* date/datetime. :param start: Start date for analysis. :param end: End date for analysis. :param fields: Optional subset of :meth:`fields` to perform analysis on. If not provided all fields are included in the analysis. ''' start = self.pickler.dumps(start) end = self.pickler.dumps(end) backend = self.read_backend return backend.execute( backend.structure(self).stats(start, end, fields), self._stats)
[ "def", "stats", "(", "self", ",", "start", ",", "end", ",", "fields", "=", "None", ")", ":", "start", "=", "self", ".", "pickler", ".", "dumps", "(", "start", ")", "end", "=", "self", ".", "pickler", ".", "dumps", "(", "end", ")", "backend", "=", "self", ".", "read_backend", "return", "backend", ".", "execute", "(", "backend", ".", "structure", "(", "self", ")", ".", "stats", "(", "start", ",", "end", ",", "fields", ")", ",", "self", ".", "_stats", ")" ]
41.133333
16.333333
def plot_decorate_rebits(basis=None, rebit_axes=REBIT_AXES): """ Decorates a figure with the boundary of rebit state space and basis labels drawn from a :ref:`~qinfer.tomography.TomographyBasis`. :param qinfer.tomography.TomographyBasis basis: Basis to use in labeling axes. :param list rebit_axes: List containing indices for the :math:`x` and :math:`z` axes. """ ax = plt.gca() if basis is not None: labels = list(map(r'$\langle\!\langle {} | \rho \rangle\!\rangle$'.format, # Pick out the x and z by default. [basis.labels[rebit_axes[0]], basis.labels[rebit_axes[1]]] )) plt.xlabel(labels[0]) plt.ylabel(labels[1]) ax.add_artist(plt.Circle([0, 0], 1, color='k', fill=False)) ax.set_xlim(-1.1, 1.1) ax.set_ylim(-1.1, 1.1) ax.set_aspect('equal')
[ "def", "plot_decorate_rebits", "(", "basis", "=", "None", ",", "rebit_axes", "=", "REBIT_AXES", ")", ":", "ax", "=", "plt", ".", "gca", "(", ")", "if", "basis", "is", "not", "None", ":", "labels", "=", "list", "(", "map", "(", "r'$\\langle\\!\\langle {} | \\rho \\rangle\\!\\rangle$'", ".", "format", ",", "# Pick out the x and z by default.", "[", "basis", ".", "labels", "[", "rebit_axes", "[", "0", "]", "]", ",", "basis", ".", "labels", "[", "rebit_axes", "[", "1", "]", "]", "]", ")", ")", "plt", ".", "xlabel", "(", "labels", "[", "0", "]", ")", "plt", ".", "ylabel", "(", "labels", "[", "1", "]", ")", "ax", ".", "add_artist", "(", "plt", ".", "Circle", "(", "[", "0", ",", "0", "]", ",", "1", ",", "color", "=", "'k'", ",", "fill", "=", "False", ")", ")", "ax", ".", "set_xlim", "(", "-", "1.1", ",", "1.1", ")", "ax", ".", "set_ylim", "(", "-", "1.1", ",", "1.1", ")", "ax", ".", "set_aspect", "(", "'equal'", ")" ]
35.25
21.5
def check_for_change(self): """ Determines if a new release has been made. """ r = self.local_renderer lm = self.last_manifest last_fingerprint = lm.fingerprint current_fingerprint = self.get_target_geckodriver_version_number() self.vprint('last_fingerprint:', last_fingerprint) self.vprint('current_fingerprint:', current_fingerprint) if last_fingerprint != current_fingerprint: print('A new release is available. %s' % self.get_most_recent_version()) return True print('No updates found.') return False
[ "def", "check_for_change", "(", "self", ")", ":", "r", "=", "self", ".", "local_renderer", "lm", "=", "self", ".", "last_manifest", "last_fingerprint", "=", "lm", ".", "fingerprint", "current_fingerprint", "=", "self", ".", "get_target_geckodriver_version_number", "(", ")", "self", ".", "vprint", "(", "'last_fingerprint:'", ",", "last_fingerprint", ")", "self", ".", "vprint", "(", "'current_fingerprint:'", ",", "current_fingerprint", ")", "if", "last_fingerprint", "!=", "current_fingerprint", ":", "print", "(", "'A new release is available. %s'", "%", "self", ".", "get_most_recent_version", "(", ")", ")", "return", "True", "print", "(", "'No updates found.'", ")", "return", "False" ]
40.666667
14.4
def getcoef(self): """Get final coefficient map array.""" global mp_Z_Y1 return np.swapaxes(mp_Z_Y1, 0, self.xstep.cri.axisK+1)[0]
[ "def", "getcoef", "(", "self", ")", ":", "global", "mp_Z_Y1", "return", "np", ".", "swapaxes", "(", "mp_Z_Y1", ",", "0", ",", "self", ".", "xstep", ".", "cri", ".", "axisK", "+", "1", ")", "[", "0", "]" ]
30.2
21
def create_n_gram_df(df, n_pad): """ Given input dataframe, create feature dataframe of shifted characters """ n_pad_2 = int((n_pad - 1)/2) for i in range(n_pad_2): df['char-{}'.format(i+1)] = df['char'].shift(i + 1) df['type-{}'.format(i+1)] = df['type'].shift(i + 1) df['char{}'.format(i+1)] = df['char'].shift(-i - 1) df['type{}'.format(i+1)] = df['type'].shift(-i - 1) return df[n_pad_2: -n_pad_2]
[ "def", "create_n_gram_df", "(", "df", ",", "n_pad", ")", ":", "n_pad_2", "=", "int", "(", "(", "n_pad", "-", "1", ")", "/", "2", ")", "for", "i", "in", "range", "(", "n_pad_2", ")", ":", "df", "[", "'char-{}'", ".", "format", "(", "i", "+", "1", ")", "]", "=", "df", "[", "'char'", "]", ".", "shift", "(", "i", "+", "1", ")", "df", "[", "'type-{}'", ".", "format", "(", "i", "+", "1", ")", "]", "=", "df", "[", "'type'", "]", ".", "shift", "(", "i", "+", "1", ")", "df", "[", "'char{}'", ".", "format", "(", "i", "+", "1", ")", "]", "=", "df", "[", "'char'", "]", ".", "shift", "(", "-", "i", "-", "1", ")", "df", "[", "'type{}'", ".", "format", "(", "i", "+", "1", ")", "]", "=", "df", "[", "'type'", "]", ".", "shift", "(", "-", "i", "-", "1", ")", "return", "df", "[", "n_pad_2", ":", "-", "n_pad_2", "]" ]
40.636364
13.181818
async def unban(self, channel, target, range=0): """ Unban user from channel. Target can be either a user or a host. See ban documentation for the range parameter. """ if target in self.users: host = self.users[target]['hostname'] else: host = target host = self._format_host_range(host, range) mask = self._format_host_mask('*', '*', host) await self.rawmsg('MODE', channel, '-b', mask)
[ "async", "def", "unban", "(", "self", ",", "channel", ",", "target", ",", "range", "=", "0", ")", ":", "if", "target", "in", "self", ".", "users", ":", "host", "=", "self", ".", "users", "[", "target", "]", "[", "'hostname'", "]", "else", ":", "host", "=", "target", "host", "=", "self", ".", "_format_host_range", "(", "host", ",", "range", ")", "mask", "=", "self", ".", "_format_host_mask", "(", "'*'", ",", "'*'", ",", "host", ")", "await", "self", ".", "rawmsg", "(", "'MODE'", ",", "channel", ",", "'-b'", ",", "mask", ")" ]
36.307692
14.615385
def _complete_parameters(param, variables): """Replace any parameters passed as {} in the yaml file with the variable names that are passed in Only strings, lists of strings, and dictionaries of strings can have replaceable values at the moment. """ if isinstance(param, list): return [_complete_parameters(x, variables) for x in param] elif isinstance(param, dict): return {key: _complete_parameters(value, variables) for key, value in param.items()} elif isinstance(param, str): try: return Template(param).substitute(variables) except KeyError as exc: raise RecipeVariableNotPassed("Variable undefined in recipe", undefined_variable=exc.args[0]) return param
[ "def", "_complete_parameters", "(", "param", ",", "variables", ")", ":", "if", "isinstance", "(", "param", ",", "list", ")", ":", "return", "[", "_complete_parameters", "(", "x", ",", "variables", ")", "for", "x", "in", "param", "]", "elif", "isinstance", "(", "param", ",", "dict", ")", ":", "return", "{", "key", ":", "_complete_parameters", "(", "value", ",", "variables", ")", "for", "key", ",", "value", "in", "param", ".", "items", "(", ")", "}", "elif", "isinstance", "(", "param", ",", "str", ")", ":", "try", ":", "return", "Template", "(", "param", ")", ".", "substitute", "(", "variables", ")", "except", "KeyError", "as", "exc", ":", "raise", "RecipeVariableNotPassed", "(", "\"Variable undefined in recipe\"", ",", "undefined_variable", "=", "exc", ".", "args", "[", "0", "]", ")", "return", "param" ]
40.833333
22.333333
def controlled(num_ptr_bits, U): """ Given a one-qubit gate matrix U, construct a controlled-U on all pointer qubits. """ d = 2 ** (1 + num_ptr_bits) m = np.eye(d) m[d - 2:, d - 2:] = U return m
[ "def", "controlled", "(", "num_ptr_bits", ",", "U", ")", ":", "d", "=", "2", "**", "(", "1", "+", "num_ptr_bits", ")", "m", "=", "np", ".", "eye", "(", "d", ")", "m", "[", "d", "-", "2", ":", ",", "d", "-", "2", ":", "]", "=", "U", "return", "m" ]
24.222222
16.444444
def base62_encode(cls, num): """Encode a number in Base X. `num`: The number to encode `alphabet`: The alphabet to use for encoding Stolen from: http://stackoverflow.com/a/1119769/1144479 """ alphabet = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" if num == 0: return alphabet[0] arr = [] base = len(alphabet) while num: rem = num % base num = num // base arr.append(alphabet[rem]) arr.reverse() return ''.join(arr)
[ "def", "base62_encode", "(", "cls", ",", "num", ")", ":", "alphabet", "=", "\"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ\"", "if", "num", "==", "0", ":", "return", "alphabet", "[", "0", "]", "arr", "=", "[", "]", "base", "=", "len", "(", "alphabet", ")", "while", "num", ":", "rem", "=", "num", "%", "base", "num", "=", "num", "//", "base", "arr", ".", "append", "(", "alphabet", "[", "rem", "]", ")", "arr", ".", "reverse", "(", ")", "return", "''", ".", "join", "(", "arr", ")" ]
26.809524
19.095238
def to_bel_path(graph, path: str, mode: str = 'w', **kwargs) -> None: """Write the BEL graph as a canonical BEL Script to the given path. :param BELGraph graph: the BEL Graph to output as a BEL Script :param path: A file path :param mode: The file opening mode. Defaults to 'w' """ with open(path, mode=mode, **kwargs) as bel_file: to_bel(graph, bel_file)
[ "def", "to_bel_path", "(", "graph", ",", "path", ":", "str", ",", "mode", ":", "str", "=", "'w'", ",", "*", "*", "kwargs", ")", "->", "None", ":", "with", "open", "(", "path", ",", "mode", "=", "mode", ",", "*", "*", "kwargs", ")", "as", "bel_file", ":", "to_bel", "(", "graph", ",", "bel_file", ")" ]
42.222222
16
def remover(self, id_equipamento): """Remove um equipamento a partir do seu identificador. Além de remover o equipamento, a API também remove: - O relacionamento do equipamento com os tipos de acessos. - O relacionamento do equipamento com os roteiros. - O relacionamento do equipamento com os IPs. - As interfaces do equipamento. - O relacionamento do equipamento com os ambientes. - O relacionamento do equipamento com os grupos. :param id_equipamento: Identificador do equipamento. :return: None :raise EquipamentoNaoExisteError: Equipamento não cadastrado. :raise InvalidParameterError: O identificador do equipamento é nulo ou inválido. :raise DataBaseError: Falha na networkapi ao acessar o banco de dados. :raise XMLError: Falha na networkapi ao gerar o XML de resposta. """ if not is_valid_int_param(id_equipamento): raise InvalidParameterError( u'O identificador do equipamento é inválido ou não foi informado.') url = 'equipamento/' + str(id_equipamento) + '/' code, map = self.submit(None, 'DELETE', url) return self.response(code, map)
[ "def", "remover", "(", "self", ",", "id_equipamento", ")", ":", "if", "not", "is_valid_int_param", "(", "id_equipamento", ")", ":", "raise", "InvalidParameterError", "(", "u'O identificador do equipamento é inválido ou não foi informado.')", "", "url", "=", "'equipamento/'", "+", "str", "(", "id_equipamento", ")", "+", "'/'", "code", ",", "map", "=", "self", ".", "submit", "(", "None", ",", "'DELETE'", ",", "url", ")", "return", "self", ".", "response", "(", "code", ",", "map", ")" ]
41
24.266667
async def _notify(self, message: BaseMessage, responder: Responder): """ Notify all callbacks that a message was received. """ for cb in self._listeners: coro = cb(message, responder, self.fsm_creates_task) if not self.fsm_creates_task: self._register = await coro
[ "async", "def", "_notify", "(", "self", ",", "message", ":", "BaseMessage", ",", "responder", ":", "Responder", ")", ":", "for", "cb", "in", "self", ".", "_listeners", ":", "coro", "=", "cb", "(", "message", ",", "responder", ",", "self", ".", "fsm_creates_task", ")", "if", "not", "self", ".", "fsm_creates_task", ":", "self", ".", "_register", "=", "await", "coro" ]
36.555556
13.222222
def find_person_by_id(self, person_id): """doc: http://open.youku.com/docs/docs?id=87 """ url = 'https://openapi.youku.com/v2/persons/show.json' params = { 'client_id': self.client_id, 'person_id': person_id } r = requests.get(url, params=params) check_error(r) return r.json()
[ "def", "find_person_by_id", "(", "self", ",", "person_id", ")", ":", "url", "=", "'https://openapi.youku.com/v2/persons/show.json'", "params", "=", "{", "'client_id'", ":", "self", ".", "client_id", ",", "'person_id'", ":", "person_id", "}", "r", "=", "requests", ".", "get", "(", "url", ",", "params", "=", "params", ")", "check_error", "(", "r", ")", "return", "r", ".", "json", "(", ")" ]
32.272727
11
def organize_models(self, outdir, force_rerun=False): """Organize and rename SWISS-MODEL models to a single folder with a name containing template information. Args: outdir (str): New directory to copy renamed models to force_rerun (bool): If models should be copied again even if they already exist Returns: dict: Dictionary of lists, UniProt IDs as the keys and new file paths as the values """ uniprot_to_swissmodel = defaultdict(list) for u, models in self.all_models.items(): for m in models: original_filename = '{}_{}_{}_{}'.format(m['from'], m['to'], m['template'], m['coordinate_id']) file_path = op.join(self.metadata_dir, u[:2], u[2:4], u[4:], 'swissmodel', '{}.pdb'.format(original_filename)) if op.exists(file_path): new_filename = '{}_{}_{}_{}.pdb'.format(u, m['from'], m['to'], m['template'][:4]) shutil.copy(file_path, op.join(outdir, new_filename)) uniprot_to_swissmodel[u].append(new_filename) else: log.warning('{}: no file {} found for model'.format(u, file_path)) return uniprot_to_swissmodel
[ "def", "organize_models", "(", "self", ",", "outdir", ",", "force_rerun", "=", "False", ")", ":", "uniprot_to_swissmodel", "=", "defaultdict", "(", "list", ")", "for", "u", ",", "models", "in", "self", ".", "all_models", ".", "items", "(", ")", ":", "for", "m", "in", "models", ":", "original_filename", "=", "'{}_{}_{}_{}'", ".", "format", "(", "m", "[", "'from'", "]", ",", "m", "[", "'to'", "]", ",", "m", "[", "'template'", "]", ",", "m", "[", "'coordinate_id'", "]", ")", "file_path", "=", "op", ".", "join", "(", "self", ".", "metadata_dir", ",", "u", "[", ":", "2", "]", ",", "u", "[", "2", ":", "4", "]", ",", "u", "[", "4", ":", "]", ",", "'swissmodel'", ",", "'{}.pdb'", ".", "format", "(", "original_filename", ")", ")", "if", "op", ".", "exists", "(", "file_path", ")", ":", "new_filename", "=", "'{}_{}_{}_{}.pdb'", ".", "format", "(", "u", ",", "m", "[", "'from'", "]", ",", "m", "[", "'to'", "]", ",", "m", "[", "'template'", "]", "[", ":", "4", "]", ")", "shutil", ".", "copy", "(", "file_path", ",", "op", ".", "join", "(", "outdir", ",", "new_filename", ")", ")", "uniprot_to_swissmodel", "[", "u", "]", ".", "append", "(", "new_filename", ")", "else", ":", "log", ".", "warning", "(", "'{}: no file {} found for model'", ".", "format", "(", "u", ",", "file_path", ")", ")", "return", "uniprot_to_swissmodel" ]
50.461538
27.692308
def _get_least_permissions_aces(self, resources): """ Get ACEs with the least permissions that fit all resources. To have access to polymorph on N collections, user MUST have access to all of them. If this is true, ACEs are returned, that allows 'view' permissions to current request principals. Otherwise None is returned thus blocking all permissions except those defined in `nefertari.acl.BaseACL`. :param resources: :type resources: list of Resource instances :return: Generated Pyramid ACEs or None :rtype: tuple or None """ factories = [res.view._factory for res in resources] contexts = [factory(self.request) for factory in factories] for ctx in contexts: if not self.request.has_permission('view', ctx): return else: return [ (Allow, principal, 'view') for principal in self.request.effective_principals ]
[ "def", "_get_least_permissions_aces", "(", "self", ",", "resources", ")", ":", "factories", "=", "[", "res", ".", "view", ".", "_factory", "for", "res", "in", "resources", "]", "contexts", "=", "[", "factory", "(", "self", ".", "request", ")", "for", "factory", "in", "factories", "]", "for", "ctx", "in", "contexts", ":", "if", "not", "self", ".", "request", ".", "has_permission", "(", "'view'", ",", "ctx", ")", ":", "return", "else", ":", "return", "[", "(", "Allow", ",", "principal", ",", "'view'", ")", "for", "principal", "in", "self", ".", "request", ".", "effective_principals", "]" ]
39.88
19.8
def device_filter(self): """The device filter to use. :rtype: dict """ if isinstance(self._device_filter, str): return self._decode_query(self._device_filter) return self._device_filter
[ "def", "device_filter", "(", "self", ")", ":", "if", "isinstance", "(", "self", ".", "_device_filter", ",", "str", ")", ":", "return", "self", ".", "_decode_query", "(", "self", ".", "_device_filter", ")", "return", "self", ".", "_device_filter" ]
28.875
13.5
def key_hash_algo(self, value): """ A unicode string of the hash algorithm to use when creating the certificate identifier - "sha1" (default), or "sha256". """ if value not in set(['sha1', 'sha256']): raise ValueError(_pretty_message( ''' hash_algo must be one of "sha1", "sha256", not %s ''', repr(value) )) self._key_hash_algo = value
[ "def", "key_hash_algo", "(", "self", ",", "value", ")", ":", "if", "value", "not", "in", "set", "(", "[", "'sha1'", ",", "'sha256'", "]", ")", ":", "raise", "ValueError", "(", "_pretty_message", "(", "'''\n hash_algo must be one of \"sha1\", \"sha256\", not %s\n '''", ",", "repr", "(", "value", ")", ")", ")", "self", ".", "_key_hash_algo", "=", "value" ]
30.666667
17.733333
def load(self, filename): '''load rally and rally_land points from a file. returns number of points loaded''' f = open(filename, mode='r') self.clear() for line in f: if line.startswith('#'): continue line = line.strip() if not line: continue a = line.split() if len(a) != 7: raise MAVRallyError("invalid rally file line: %s" % line) if (a[0].lower() == "rally"): self.create_and_append_rally_point(float(a[1]) * 1e7, float(a[2]) * 1e7, float(a[3]), float(a[4]), float(a[5]) * 100.0, int(a[6])) f.close() return len(self.rally_points)
[ "def", "load", "(", "self", ",", "filename", ")", ":", "f", "=", "open", "(", "filename", ",", "mode", "=", "'r'", ")", "self", ".", "clear", "(", ")", "for", "line", "in", "f", ":", "if", "line", ".", "startswith", "(", "'#'", ")", ":", "continue", "line", "=", "line", ".", "strip", "(", ")", "if", "not", "line", ":", "continue", "a", "=", "line", ".", "split", "(", ")", "if", "len", "(", "a", ")", "!=", "7", ":", "raise", "MAVRallyError", "(", "\"invalid rally file line: %s\"", "%", "line", ")", "if", "(", "a", "[", "0", "]", ".", "lower", "(", ")", "==", "\"rally\"", ")", ":", "self", ".", "create_and_append_rally_point", "(", "float", "(", "a", "[", "1", "]", ")", "*", "1e7", ",", "float", "(", "a", "[", "2", "]", ")", "*", "1e7", ",", "float", "(", "a", "[", "3", "]", ")", ",", "float", "(", "a", "[", "4", "]", ")", ",", "float", "(", "a", "[", "5", "]", ")", "*", "100.0", ",", "int", "(", "a", "[", "6", "]", ")", ")", "f", ".", "close", "(", ")", "return", "len", "(", "self", ".", "rally_points", ")" ]
38
18.9
def pick(rest): "Pick between a few options" question = rest.strip() choices = util.splitem(question) if len(choices) == 1: return "I can't pick if you give me only one choice!" else: pick = random.choice(choices) certainty = random.sample(phrases.certainty_opts, 1)[0] return "%s... %s %s" % (pick, certainty, pick)
[ "def", "pick", "(", "rest", ")", ":", "question", "=", "rest", ".", "strip", "(", ")", "choices", "=", "util", ".", "splitem", "(", "question", ")", "if", "len", "(", "choices", ")", "==", "1", ":", "return", "\"I can't pick if you give me only one choice!\"", "else", ":", "pick", "=", "random", ".", "choice", "(", "choices", ")", "certainty", "=", "random", ".", "sample", "(", "phrases", ".", "certainty_opts", ",", "1", ")", "[", "0", "]", "return", "\"%s... %s %s\"", "%", "(", "pick", ",", "certainty", ",", "pick", ")" ]
32
16
def run_forever(self, start_at='once'): """ Starts the scheduling engine @param start_at: 'once' -> start immediately 'next_minute' -> start at the first second of the next minutes 'next_hour' -> start 00:00 (min) next hour 'tomorrow' -> start at 0h tomorrow """ if start_at not in ('once', 'next_minute', 'next_hour', 'tomorrow'): raise ValueError("start_at parameter must be one of these values: 'once', 'next_minute', 'next_hour', 'tomorrow'") if start_at != 'once': wait_until(start_at) try: task_pool = self.run_tasks() while self.running: gevent.sleep(seconds=1) task_pool.join(timeout=30) task_pool.kill() except KeyboardInterrupt: # https://github.com/surfly/gevent/issues/85 task_pool.closed = True task_pool.kill() logging.getLogger(self.logger_name).info('Time scheduler quits')
[ "def", "run_forever", "(", "self", ",", "start_at", "=", "'once'", ")", ":", "if", "start_at", "not", "in", "(", "'once'", ",", "'next_minute'", ",", "'next_hour'", ",", "'tomorrow'", ")", ":", "raise", "ValueError", "(", "\"start_at parameter must be one of these values: 'once', 'next_minute', 'next_hour', 'tomorrow'\"", ")", "if", "start_at", "!=", "'once'", ":", "wait_until", "(", "start_at", ")", "try", ":", "task_pool", "=", "self", ".", "run_tasks", "(", ")", "while", "self", ".", "running", ":", "gevent", ".", "sleep", "(", "seconds", "=", "1", ")", "task_pool", ".", "join", "(", "timeout", "=", "30", ")", "task_pool", ".", "kill", "(", ")", "except", "KeyboardInterrupt", ":", "# https://github.com/surfly/gevent/issues/85", "task_pool", ".", "closed", "=", "True", "task_pool", ".", "kill", "(", ")", "logging", ".", "getLogger", "(", "self", ".", "logger_name", ")", ".", "info", "(", "'Time scheduler quits'", ")" ]
43.75
17.083333
def get_payload(self): """Return Payload.""" return bytes( [self.major_version >> 8 & 255, self.major_version & 255, self.minor_version >> 8 & 255, self.minor_version & 255])
[ "def", "get_payload", "(", "self", ")", ":", "return", "bytes", "(", "[", "self", ".", "major_version", ">>", "8", "&", "255", ",", "self", ".", "major_version", "&", "255", ",", "self", ".", "minor_version", ">>", "8", "&", "255", ",", "self", ".", "minor_version", "&", "255", "]", ")" ]
42.2
19.2
def _iter_backtrack(ex, rand=False): """Iterate through all satisfying points using backtrack algorithm.""" if ex is One: yield dict() elif ex is not Zero: if rand: v = random.choice(ex.inputs) if rand else ex.top else: v = ex.top points = [{v: 0}, {v: 1}] if rand: random.shuffle(points) for point in points: for soln in _iter_backtrack(ex.restrict(point), rand): soln.update(point) yield soln
[ "def", "_iter_backtrack", "(", "ex", ",", "rand", "=", "False", ")", ":", "if", "ex", "is", "One", ":", "yield", "dict", "(", ")", "elif", "ex", "is", "not", "Zero", ":", "if", "rand", ":", "v", "=", "random", ".", "choice", "(", "ex", ".", "inputs", ")", "if", "rand", "else", "ex", ".", "top", "else", ":", "v", "=", "ex", ".", "top", "points", "=", "[", "{", "v", ":", "0", "}", ",", "{", "v", ":", "1", "}", "]", "if", "rand", ":", "random", ".", "shuffle", "(", "points", ")", "for", "point", "in", "points", ":", "for", "soln", "in", "_iter_backtrack", "(", "ex", ".", "restrict", "(", "point", ")", ",", "rand", ")", ":", "soln", ".", "update", "(", "point", ")", "yield", "soln" ]
32.4375
15.4375
def obj_name(self, obj: Union[str, Element]) -> str: """ Return the formatted name used for the supplied definition """ if isinstance(obj, str): obj = self.obj_for(obj) if isinstance(obj, SlotDefinition): return underscore(self.aliased_slot_name(obj)) else: return camelcase(obj if isinstance(obj, str) else obj.name)
[ "def", "obj_name", "(", "self", ",", "obj", ":", "Union", "[", "str", ",", "Element", "]", ")", "->", "str", ":", "if", "isinstance", "(", "obj", ",", "str", ")", ":", "obj", "=", "self", ".", "obj_for", "(", "obj", ")", "if", "isinstance", "(", "obj", ",", "SlotDefinition", ")", ":", "return", "underscore", "(", "self", ".", "aliased_slot_name", "(", "obj", ")", ")", "else", ":", "return", "camelcase", "(", "obj", "if", "isinstance", "(", "obj", ",", "str", ")", "else", "obj", ".", "name", ")" ]
47.25
13
def experiment_details_csv(request, pk): """This view generates a csv output file of an experiment. The view writes to a csv table the animal, genotype, age (in days), assay and values.""" experiment = get_object_or_404(Experiment, pk=pk) response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = 'attachment; filename=experiment.csv' writer = csv.writer(response) writer.writerow(["Animal","Cage", "Strain", "Genotype", "Gender","Age", "Assay", "Values", "Feeding", "Experiment Date", "Treatment"]) for measurement in experiment.measurement_set.iterator(): writer.writerow([ measurement.animal, measurement.animal.Cage, measurement.animal.Strain, measurement.animal.Genotype, measurement.animal.Gender, measurement.age(), measurement.assay, measurement.values, measurement.experiment.feeding_state, measurement.experiment.date, measurement.animal.treatment_set.all() ]) return response
[ "def", "experiment_details_csv", "(", "request", ",", "pk", ")", ":", "experiment", "=", "get_object_or_404", "(", "Experiment", ",", "pk", "=", "pk", ")", "response", "=", "HttpResponse", "(", "content_type", "=", "'text/csv'", ")", "response", "[", "'Content-Disposition'", "]", "=", "'attachment; filename=experiment.csv'", "writer", "=", "csv", ".", "writer", "(", "response", ")", "writer", ".", "writerow", "(", "[", "\"Animal\"", ",", "\"Cage\"", ",", "\"Strain\"", ",", "\"Genotype\"", ",", "\"Gender\"", ",", "\"Age\"", ",", "\"Assay\"", ",", "\"Values\"", ",", "\"Feeding\"", ",", "\"Experiment Date\"", ",", "\"Treatment\"", "]", ")", "for", "measurement", "in", "experiment", ".", "measurement_set", ".", "iterator", "(", ")", ":", "writer", ".", "writerow", "(", "[", "measurement", ".", "animal", ",", "measurement", ".", "animal", ".", "Cage", ",", "measurement", ".", "animal", ".", "Strain", ",", "measurement", ".", "animal", ".", "Genotype", ",", "measurement", ".", "animal", ".", "Gender", ",", "measurement", ".", "age", "(", ")", ",", "measurement", ".", "assay", ",", "measurement", ".", "values", ",", "measurement", ".", "experiment", ".", "feeding_state", ",", "measurement", ".", "experiment", ".", "date", ",", "measurement", ".", "animal", ".", "treatment_set", ".", "all", "(", ")", "]", ")", "return", "response" ]
41.958333
16.75
def path_enhance(R, n, window='hann', max_ratio=2.0, min_ratio=None, n_filters=7, zero_mean=False, clip=True, **kwargs): '''Multi-angle path enhancement for self- and cross-similarity matrices. This function convolves multiple diagonal smoothing filters with a self-similarity (or recurrence) matrix R, and aggregates the result by an element-wise maximum. Technically, the output is a matrix R_smooth such that `R_smooth[i, j] = max_theta (R * filter_theta)[i, j]` where `*` denotes 2-dimensional convolution, and `filter_theta` is a smoothing filter at orientation theta. This is intended to provide coherent temporal smoothing of self-similarity matrices when there are changes in tempo. Smoothing filters are generated at evenly spaced orientations between min_ratio and max_ratio. This function is inspired by the multi-angle path enhancement of [1]_, but differs by modeling tempo differences in the space of similarity matrices rather than re-sampling the underlying features prior to generating the self-similarity matrix. .. [1] Müller, Meinard and Frank Kurth. "Enhancing similarity matrices for music audio analysis." 2006 IEEE International Conference on Acoustics Speech and Signal Processing Proceedings. Vol. 5. IEEE, 2006. .. note:: if using recurrence_matrix to construct the input similarity matrix, be sure to include the main diagonal by setting `self=True`. Otherwise, the diagonal will be suppressed, and this is likely to produce discontinuities which will pollute the smoothing filter response. Parameters ---------- R : np.ndarray The self- or cross-similarity matrix to be smoothed. Note: sparse inputs are not supported. n : int > 0 The length of the smoothing filter window : window specification The type of smoothing filter to use. See `filters.get_window` for more information on window specification formats. max_ratio : float > 0 The maximum tempo ratio to support min_ratio : float > 0 The minimum tempo ratio to support. If not provided, it will default to `1/max_ratio` n_filters : int >= 1 The number of different smoothing filters to use, evenly spaced between `min_ratio` and `max_ratio`. If `min_ratio = 1/max_ratio` (the default), using an odd number of filters will ensure that the main diagonal (ratio=1) is included. zero_mean : bool By default, the smoothing filters are non-negative and sum to one (i.e. are averaging filters). If `zero_mean=True`, then the smoothing filters are made to sum to zero by subtracting a constant value from the non-diagonal coordinates of the filter. This is primarily useful for suppressing blocks while enhancing diagonals. clip : bool If True, the smoothed similarity matrix will be thresholded at 0, and will not contain negative entries. kwargs : additional keyword arguments Additional arguments to pass to `scipy.ndimage.convolve` Returns ------- R_smooth : np.ndarray, shape=R.shape The smoothed self- or cross-similarity matrix See Also -------- filters.diagonal_filter recurrence_matrix Examples -------- Use a 51-frame diagonal smoothing filter to enhance paths in a recurrence matrix >>> y, sr = librosa.load(librosa.util.example_audio_file(), duration=30) >>> chroma = librosa.feature.chroma_cqt(y=y, sr=sr) >>> rec = librosa.segment.recurrence_matrix(chroma, mode='affinity', self=True) >>> rec_smooth = librosa.segment.path_enhance(rec, 51, window='hann', n_filters=7) Plot the recurrence matrix before and after smoothing >>> import matplotlib.pyplot as plt >>> plt.figure(figsize=(8, 4)) >>> plt.subplot(1,2,1) >>> librosa.display.specshow(rec, x_axis='time', y_axis='time') >>> plt.title('Unfiltered recurrence') >>> plt.subplot(1,2,2) >>> librosa.display.specshow(rec_smooth, x_axis='time', y_axis='time') >>> plt.title('Multi-angle enhanced recurrence') >>> plt.tight_layout() ''' if min_ratio is None: min_ratio = 1./max_ratio elif min_ratio > max_ratio: raise ParameterError('min_ratio={} cannot exceed max_ratio={}'.format(min_ratio, max_ratio)) R_smooth = None for ratio in np.logspace(np.log2(min_ratio), np.log2(max_ratio), num=n_filters, base=2): kernel = diagonal_filter(window, n, slope=ratio, zero_mean=zero_mean) if R_smooth is None: R_smooth = scipy.ndimage.convolve(R, kernel, **kwargs) else: # Compute the point-wise maximum in-place np.maximum(R_smooth, scipy.ndimage.convolve(R, kernel, **kwargs), out=R_smooth) if clip: # Clip the output in-place np.clip(R_smooth, 0, None, out=R_smooth) return R_smooth
[ "def", "path_enhance", "(", "R", ",", "n", ",", "window", "=", "'hann'", ",", "max_ratio", "=", "2.0", ",", "min_ratio", "=", "None", ",", "n_filters", "=", "7", ",", "zero_mean", "=", "False", ",", "clip", "=", "True", ",", "*", "*", "kwargs", ")", ":", "if", "min_ratio", "is", "None", ":", "min_ratio", "=", "1.", "/", "max_ratio", "elif", "min_ratio", ">", "max_ratio", ":", "raise", "ParameterError", "(", "'min_ratio={} cannot exceed max_ratio={}'", ".", "format", "(", "min_ratio", ",", "max_ratio", ")", ")", "R_smooth", "=", "None", "for", "ratio", "in", "np", ".", "logspace", "(", "np", ".", "log2", "(", "min_ratio", ")", ",", "np", ".", "log2", "(", "max_ratio", ")", ",", "num", "=", "n_filters", ",", "base", "=", "2", ")", ":", "kernel", "=", "diagonal_filter", "(", "window", ",", "n", ",", "slope", "=", "ratio", ",", "zero_mean", "=", "zero_mean", ")", "if", "R_smooth", "is", "None", ":", "R_smooth", "=", "scipy", ".", "ndimage", ".", "convolve", "(", "R", ",", "kernel", ",", "*", "*", "kwargs", ")", "else", ":", "# Compute the point-wise maximum in-place", "np", ".", "maximum", "(", "R_smooth", ",", "scipy", ".", "ndimage", ".", "convolve", "(", "R", ",", "kernel", ",", "*", "*", "kwargs", ")", ",", "out", "=", "R_smooth", ")", "if", "clip", ":", "# Clip the output in-place", "np", ".", "clip", "(", "R_smooth", ",", "0", ",", "None", ",", "out", "=", "R_smooth", ")", "return", "R_smooth" ]
37.869231
29.361538
def op(name, data, bucket_count=None, display_name=None, description=None, collections=None): """Create a legacy histogram summary op. Arguments: name: A unique name for the generated summary node. data: A `Tensor` of any shape. Must be castable to `float64`. bucket_count: Optional positive `int`. The output will have this many buckets, except in two edge cases. If there is no data, then there are no buckets. If there is data but all points have the same value, then there is one bucket whose left and right endpoints are the same. display_name: Optional name for this summary in TensorBoard, as a constant `str`. Defaults to `name`. description: Optional long-form description for this summary, as a constant `str`. Markdown is supported. Defaults to empty. collections: Optional list of graph collections keys. The new summary op is added to these collections. Defaults to `[Graph Keys.SUMMARIES]`. Returns: A TensorFlow summary op. """ # TODO(nickfelt): remove on-demand imports once dep situation is fixed. import tensorflow.compat.v1 as tf if display_name is None: display_name = name summary_metadata = metadata.create_summary_metadata( display_name=display_name, description=description) with tf.name_scope(name): tensor = _buckets(data, bucket_count=bucket_count) return tf.summary.tensor_summary(name='histogram_summary', tensor=tensor, collections=collections, summary_metadata=summary_metadata)
[ "def", "op", "(", "name", ",", "data", ",", "bucket_count", "=", "None", ",", "display_name", "=", "None", ",", "description", "=", "None", ",", "collections", "=", "None", ")", ":", "# TODO(nickfelt): remove on-demand imports once dep situation is fixed.", "import", "tensorflow", ".", "compat", ".", "v1", "as", "tf", "if", "display_name", "is", "None", ":", "display_name", "=", "name", "summary_metadata", "=", "metadata", ".", "create_summary_metadata", "(", "display_name", "=", "display_name", ",", "description", "=", "description", ")", "with", "tf", ".", "name_scope", "(", "name", ")", ":", "tensor", "=", "_buckets", "(", "data", ",", "bucket_count", "=", "bucket_count", ")", "return", "tf", ".", "summary", ".", "tensor_summary", "(", "name", "=", "'histogram_summary'", ",", "tensor", "=", "tensor", ",", "collections", "=", "collections", ",", "summary_metadata", "=", "summary_metadata", ")" ]
40.775
20.4
def _rotate_context(context, direction): """Moves the current position to 'position' and rotates the context according to 'direction' :param context: Cairo context :param direction: Direction enum """ if direction is Direction.UP: pass elif direction is Direction.RIGHT: context.rotate(deg2rad(90)) elif direction is Direction.DOWN: context.rotate(deg2rad(180)) elif direction is Direction.LEFT: context.rotate(deg2rad(-90))
[ "def", "_rotate_context", "(", "context", ",", "direction", ")", ":", "if", "direction", "is", "Direction", ".", "UP", ":", "pass", "elif", "direction", "is", "Direction", ".", "RIGHT", ":", "context", ".", "rotate", "(", "deg2rad", "(", "90", ")", ")", "elif", "direction", "is", "Direction", ".", "DOWN", ":", "context", ".", "rotate", "(", "deg2rad", "(", "180", ")", ")", "elif", "direction", "is", "Direction", ".", "LEFT", ":", "context", ".", "rotate", "(", "deg2rad", "(", "-", "90", ")", ")" ]
37.428571
5.357143
def prepare_connection(): """Set dafault connection for ElasticSearch. .. warning:: In case of using multiprocessing/multithreading, connection will be probably initialized in the main process/thread and the same connection (socket) will be used in all processes/threads. This will cause some unexpected timeouts of pushes to Elasticsearch. So make sure that this function is called again in each process/thread to make sure that unique connection will be used. """ elasticsearch_host = getattr(settings, 'ELASTICSEARCH_HOST', 'localhost') elasticsearch_port = getattr(settings, 'ELASTICSEARCH_PORT', 9200) connections.create_connection(hosts=['{}:{}'.format(elasticsearch_host, elasticsearch_port)])
[ "def", "prepare_connection", "(", ")", ":", "elasticsearch_host", "=", "getattr", "(", "settings", ",", "'ELASTICSEARCH_HOST'", ",", "'localhost'", ")", "elasticsearch_port", "=", "getattr", "(", "settings", ",", "'ELASTICSEARCH_PORT'", ",", "9200", ")", "connections", ".", "create_connection", "(", "hosts", "=", "[", "'{}:{}'", ".", "format", "(", "elasticsearch_host", ",", "elasticsearch_port", ")", "]", ")" ]
50.666667
28.2
def _logging_callback(level, domain, message, data): """ Callback that outputs libgphoto2's logging message via Python's standard logging facilities. :param level: libgphoto2 logging level :param domain: component the message originates from :param message: logging message :param data: Other data in the logging record (unused) """ domain = ffi.string(domain).decode() message = ffi.string(message).decode() logger = LOGGER.getChild(domain) if level not in LOG_LEVELS: return logger.log(LOG_LEVELS[level], message)
[ "def", "_logging_callback", "(", "level", ",", "domain", ",", "message", ",", "data", ")", ":", "domain", "=", "ffi", ".", "string", "(", "domain", ")", ".", "decode", "(", ")", "message", "=", "ffi", ".", "string", "(", "message", ")", ".", "decode", "(", ")", "logger", "=", "LOGGER", ".", "getChild", "(", "domain", ")", "if", "level", "not", "in", "LOG_LEVELS", ":", "return", "logger", ".", "log", "(", "LOG_LEVELS", "[", "level", "]", ",", "message", ")" ]
35.5
11.6875
def _element_get_id(self, element): """Get id of reaction or species element. In old levels the name is used as the id. This method returns the correct attribute depending on the level. """ if self._reader._level > 1: entry_id = element.get('id') else: entry_id = element.get('name') return entry_id
[ "def", "_element_get_id", "(", "self", ",", "element", ")", ":", "if", "self", ".", "_reader", ".", "_level", ">", "1", ":", "entry_id", "=", "element", ".", "get", "(", "'id'", ")", "else", ":", "entry_id", "=", "element", ".", "get", "(", "'name'", ")", "return", "entry_id" ]
33.636364
12.545455
def engagement_context(self): """ Access the engagement_context :returns: twilio.rest.studio.v1.flow.engagement.engagement_context.EngagementContextList :rtype: twilio.rest.studio.v1.flow.engagement.engagement_context.EngagementContextList """ if self._engagement_context is None: self._engagement_context = EngagementContextList( self._version, flow_sid=self._solution['flow_sid'], engagement_sid=self._solution['sid'], ) return self._engagement_context
[ "def", "engagement_context", "(", "self", ")", ":", "if", "self", ".", "_engagement_context", "is", "None", ":", "self", ".", "_engagement_context", "=", "EngagementContextList", "(", "self", ".", "_version", ",", "flow_sid", "=", "self", ".", "_solution", "[", "'flow_sid'", "]", ",", "engagement_sid", "=", "self", ".", "_solution", "[", "'sid'", "]", ",", ")", "return", "self", ".", "_engagement_context" ]
40.714286
18
def read_cyclic_can_msg(self, channel, count): """ Reads back the list of CAN messages for automatically sending. :param int channel: CAN channel, to be used (:data:`Channel.CHANNEL_CH0` or :data:`Channel.CHANNEL_CH1`). :param int count: The number of cyclic CAN messages to be received. :return: List of received CAN messages (up to 16, see structure :class:`CanMsg`). :rtype: list(CanMsg) """ c_channel = BYTE(channel) c_can_msg = (CanMsg * count)() c_count = DWORD(count) UcanReadCyclicCanMsg(self._handle, byref(c_channel), c_can_msg, c_count) return c_can_msg[:c_count.value]
[ "def", "read_cyclic_can_msg", "(", "self", ",", "channel", ",", "count", ")", ":", "c_channel", "=", "BYTE", "(", "channel", ")", "c_can_msg", "=", "(", "CanMsg", "*", "count", ")", "(", ")", "c_count", "=", "DWORD", "(", "count", ")", "UcanReadCyclicCanMsg", "(", "self", ".", "_handle", ",", "byref", "(", "c_channel", ")", ",", "c_can_msg", ",", "c_count", ")", "return", "c_can_msg", "[", ":", "c_count", ".", "value", "]" ]
47.428571
21.714286
def get_sv_chroms(items, exclude_file): """Retrieve chromosomes to process on, avoiding extra skipped chromosomes. """ exclude_regions = {} for region in pybedtools.BedTool(exclude_file): if int(region.start) == 0: exclude_regions[region.chrom] = int(region.end) out = [] with pysam.Samfile(dd.get_align_bam(items[0]) or dd.get_work_bam(items[0]))as pysam_work_bam: for chrom, length in zip(pysam_work_bam.references, pysam_work_bam.lengths): exclude_length = exclude_regions.get(chrom, 0) if exclude_length < length: out.append(chrom) return out
[ "def", "get_sv_chroms", "(", "items", ",", "exclude_file", ")", ":", "exclude_regions", "=", "{", "}", "for", "region", "in", "pybedtools", ".", "BedTool", "(", "exclude_file", ")", ":", "if", "int", "(", "region", ".", "start", ")", "==", "0", ":", "exclude_regions", "[", "region", ".", "chrom", "]", "=", "int", "(", "region", ".", "end", ")", "out", "=", "[", "]", "with", "pysam", ".", "Samfile", "(", "dd", ".", "get_align_bam", "(", "items", "[", "0", "]", ")", "or", "dd", ".", "get_work_bam", "(", "items", "[", "0", "]", ")", ")", "as", "pysam_work_bam", ":", "for", "chrom", ",", "length", "in", "zip", "(", "pysam_work_bam", ".", "references", ",", "pysam_work_bam", ".", "lengths", ")", ":", "exclude_length", "=", "exclude_regions", ".", "get", "(", "chrom", ",", "0", ")", "if", "exclude_length", "<", "length", ":", "out", ".", "append", "(", "chrom", ")", "return", "out" ]
44.928571
16.714286
def PatchAt(cls, n, module, method_wrapper=None, module_alias=None, method_name_modifier=utils.identity, blacklist_predicate=_False, whitelist_predicate=_True, return_type_predicate=_None, getmembers_predicate=inspect.isfunction, admit_private=False, explanation=""): """ This classmethod lets you easily patch all of functions/callables from a module or class as methods a Builder class. **Arguments** * **n** : the position the the object being piped will take in the arguments when the function being patched is applied. See `RegisterMethod` and `ThenAt`. * **module** : a module or class from which the functions/methods/callables will be taken. * `module_alias = None` : an optional alias for the module used for documentation purposes. * `method_name_modifier = lambda f_name: None` : a function that can modify the name of the method will take. If `None` the name of the function will be used. * `blacklist_predicate = lambda f_name: name[0] != "_"` : A predicate that determines which functions are banned given their name. By default it excludes all function whose name start with `'_'`. `blacklist_predicate` can also be of type list, in which case all names contained in this list will be banned. * `whitelist_predicate = lambda f_name: True` : A predicate that determines which functions are admitted given their name. By default it include any function. `whitelist_predicate` can also be of type list, in which case only names contained in this list will be admitted. You can use both `blacklist_predicate` and `whitelist_predicate` at the same time. * `return_type_predicate = lambda f_name: None` : a predicate that determines the `_return_type` of the Builder. By default it will always return `None`. See `phi.builder.Builder.ThenAt`. * `getmembers_predicate = inspect.isfunction` : a predicate that determines what type of elements/members will be fetched by the `inspect` module, defaults to [inspect.isfunction](https://docs.python.org/2/library/inspect.html#inspect.isfunction). See [getmembers](https://docs.python.org/2/library/inspect.html#inspect.getmembers). **Examples** Lets patch ALL the main functions from numpy into a custom builder! from phi import PythonBuilder #or Builder import numpy as np class NumpyBuilder(PythonBuilder): #or Builder "A Builder for numpy functions!" pass NumpyBuilder.PatchAt(1, np) N = NumpyBuilder(lambda x: x) Thats it! Although a serious patch would involve filtering out functions that don't take arrays. Another common task would be to use `NumpyBuilder.PatchAt(2, ...)` (`PatchAt(n, ..)` in general) when convenient to send the object being pipe to the relevant argument of the function. The previous is usually done with and a combination of `whitelist_predicate`s and `blacklist_predicate`s on `PatchAt(1, ...)` and `PatchAt(2, ...)` to filter or include the approriate functions on each kind of patch. Given the previous code we could now do import numpy as np x = np.array([[1,2],[3,4]]) y = np.array([[5,6],[7,8]]) z = N.Pipe( x, N .dot(y) .add(x) .transpose() .sum(axis=1) ) Which is strictly equivalent to import numpy as np x = np.array([[1,2],[3,4]]) y = np.array([[5,6],[7,8]]) z = np.dot(x, y) z = np.add(z, x) z = np.transpose(z) z = np.sum(z, axis=1) The thing to notice is that with the `NumpyBuilder` we avoid the repetitive and needless passing and reassigment of the `z` variable, this removes a lot of noise from our code. """ _rtp = return_type_predicate return_type_predicate = (lambda x: _rtp) if inspect.isclass(_rtp) and issubclass(_rtp, Builder) else _rtp module_name = module_alias if module_alias else module.__name__ + '.' patch_members = _get_patch_members(module, blacklist_predicate=blacklist_predicate, whitelist_predicate=whitelist_predicate, getmembers_predicate=getmembers_predicate, admit_private=admit_private) for name, f in patch_members: wrapped = None if method_wrapper: g = method_wrapper(f) wrapped = f else: g = f cls.RegisterAt(n, g, module_name, wrapped=wrapped, _return_type=return_type_predicate(name), alias=method_name_modifier(name), explanation=explanation)
[ "def", "PatchAt", "(", "cls", ",", "n", ",", "module", ",", "method_wrapper", "=", "None", ",", "module_alias", "=", "None", ",", "method_name_modifier", "=", "utils", ".", "identity", ",", "blacklist_predicate", "=", "_False", ",", "whitelist_predicate", "=", "_True", ",", "return_type_predicate", "=", "_None", ",", "getmembers_predicate", "=", "inspect", ".", "isfunction", ",", "admit_private", "=", "False", ",", "explanation", "=", "\"\"", ")", ":", "_rtp", "=", "return_type_predicate", "return_type_predicate", "=", "(", "lambda", "x", ":", "_rtp", ")", "if", "inspect", ".", "isclass", "(", "_rtp", ")", "and", "issubclass", "(", "_rtp", ",", "Builder", ")", "else", "_rtp", "module_name", "=", "module_alias", "if", "module_alias", "else", "module", ".", "__name__", "+", "'.'", "patch_members", "=", "_get_patch_members", "(", "module", ",", "blacklist_predicate", "=", "blacklist_predicate", ",", "whitelist_predicate", "=", "whitelist_predicate", ",", "getmembers_predicate", "=", "getmembers_predicate", ",", "admit_private", "=", "admit_private", ")", "for", "name", ",", "f", "in", "patch_members", ":", "wrapped", "=", "None", "if", "method_wrapper", ":", "g", "=", "method_wrapper", "(", "f", ")", "wrapped", "=", "f", "else", ":", "g", "=", "f", "cls", ".", "RegisterAt", "(", "n", ",", "g", ",", "module_name", ",", "wrapped", "=", "wrapped", ",", "_return_type", "=", "return_type_predicate", "(", "name", ")", ",", "alias", "=", "method_name_modifier", "(", "name", ")", ",", "explanation", "=", "explanation", ")" ]
57.266667
54.76
def embed(parent_locals=None, parent_globals=None, exec_lines=None, remove_pyqt_hook=True, N=0): """ Starts interactive session. Similar to keyboard command in matlab. Wrapper around IPython.embed """ import utool as ut from functools import partial import IPython if parent_globals is None: parent_globals = get_parent_frame(N=N).f_globals if parent_locals is None: parent_locals = get_parent_frame(N=N).f_locals stackdepth = N # NOQA getframe = partial(ut.get_parent_frame, N=N) # NOQA # exec(execstr_dict(parent_globals, 'parent_globals')) # exec(execstr_dict(parent_locals, 'parent_locals')) print('') print('================') print(ut.bubbletext('EMBEDDING')) print('================') print('[util] embedding') try: if remove_pyqt_hook: try: import guitool guitool.remove_pyqt_input_hook() except (ImportError, ValueError, AttributeError) as ex: #print(ex) printex(ex, iswarning=True) pass # make qt not loop forever (I had qflag loop forever with this off) except ImportError as ex: print(ex) #from IPython.config.loader import Config # cfg = Config() #config_dict = {} #if exec_lines is not None: # config_dict['exec_lines'] = exec_lines #IPython.embed(**config_dict) print('[util] Get stack location with: ') print('[util] ut.get_parent_frame(N=8).f_code.co_name') print('[util] set EXIT_NOW or qqq to True(ish) to hard exit on unembed') #print('set iup to True to draw plottool stuff') print('[util] call %pylab qt4 to get plottool stuff working') once = True # Allow user to set iup and redo the loop while once or vars().get('iup', False): if not once: # SUPER HACKY WAY OF GETTING FIGURES ON THE SCREEN BETWEEN UPDATES #vars()['iup'] = False # ALL YOU NEED TO DO IS %pylab qt4 print('re-emebeding') #import plottool as pt #pt.update() #(pt.present()) for _ in range(100): time.sleep(.01) once = False #vars().get('iup', False): print('[util] calling IPython.embed()') """ Notes: /usr/local/lib/python2.7/dist-packages/IPython/terminal/embed.py IPython.terminal.embed.InteractiveShellEmbed # instance comes from IPython.config.configurable.SingletonConfigurable.instance """ #c = IPython.Config() #c.InteractiveShellApp.exec_lines = [ # '%pylab qt4', # '%gui qt4', # "print 'System Ready!'", #] #IPython.embed(config=c) parent_ns = parent_globals.copy() parent_ns.update(parent_locals) locals().update(parent_ns) try: IPython.embed() except RuntimeError as ex: ut.printex(ex, 'Failed to open ipython') #config = IPython.terminal.ipapp.load_default_config() #config.InteractiveShellEmbed = config.TerminalInteractiveShell #module = sys.modules[parent_globals['__name__']] #config['module'] = module #config['module'] = module #embed2(stack_depth=N + 2 + 1) #IPython.embed(config=config) #IPython.embed(config=config) #IPython.embed(module=module) # Exit python immediately if specifed if vars().get('EXIT_NOW', False) or vars().get('qqq', False): print('[utool.embed] EXIT_NOW specified') sys.exit(1)
[ "def", "embed", "(", "parent_locals", "=", "None", ",", "parent_globals", "=", "None", ",", "exec_lines", "=", "None", ",", "remove_pyqt_hook", "=", "True", ",", "N", "=", "0", ")", ":", "import", "utool", "as", "ut", "from", "functools", "import", "partial", "import", "IPython", "if", "parent_globals", "is", "None", ":", "parent_globals", "=", "get_parent_frame", "(", "N", "=", "N", ")", ".", "f_globals", "if", "parent_locals", "is", "None", ":", "parent_locals", "=", "get_parent_frame", "(", "N", "=", "N", ")", ".", "f_locals", "stackdepth", "=", "N", "# NOQA", "getframe", "=", "partial", "(", "ut", ".", "get_parent_frame", ",", "N", "=", "N", ")", "# NOQA", "# exec(execstr_dict(parent_globals, 'parent_globals'))", "# exec(execstr_dict(parent_locals, 'parent_locals'))", "print", "(", "''", ")", "print", "(", "'================'", ")", "print", "(", "ut", ".", "bubbletext", "(", "'EMBEDDING'", ")", ")", "print", "(", "'================'", ")", "print", "(", "'[util] embedding'", ")", "try", ":", "if", "remove_pyqt_hook", ":", "try", ":", "import", "guitool", "guitool", ".", "remove_pyqt_input_hook", "(", ")", "except", "(", "ImportError", ",", "ValueError", ",", "AttributeError", ")", "as", "ex", ":", "#print(ex)", "printex", "(", "ex", ",", "iswarning", "=", "True", ")", "pass", "# make qt not loop forever (I had qflag loop forever with this off)", "except", "ImportError", "as", "ex", ":", "print", "(", "ex", ")", "#from IPython.config.loader import Config", "# cfg = Config()", "#config_dict = {}", "#if exec_lines is not None:", "# config_dict['exec_lines'] = exec_lines", "#IPython.embed(**config_dict)", "print", "(", "'[util] Get stack location with: '", ")", "print", "(", "'[util] ut.get_parent_frame(N=8).f_code.co_name'", ")", "print", "(", "'[util] set EXIT_NOW or qqq to True(ish) to hard exit on unembed'", ")", "#print('set iup to True to draw plottool stuff')", "print", "(", "'[util] call %pylab qt4 to get plottool stuff working'", ")", "once", "=", "True", "# Allow user to set iup and redo the loop", "while", "once", "or", "vars", "(", ")", ".", "get", "(", "'iup'", ",", "False", ")", ":", "if", "not", "once", ":", "# SUPER HACKY WAY OF GETTING FIGURES ON THE SCREEN BETWEEN UPDATES", "#vars()['iup'] = False", "# ALL YOU NEED TO DO IS %pylab qt4", "print", "(", "'re-emebeding'", ")", "#import plottool as pt", "#pt.update()", "#(pt.present())", "for", "_", "in", "range", "(", "100", ")", ":", "time", ".", "sleep", "(", ".01", ")", "once", "=", "False", "#vars().get('iup', False):", "print", "(", "'[util] calling IPython.embed()'", ")", "\"\"\"\n Notes:\n /usr/local/lib/python2.7/dist-packages/IPython/terminal/embed.py\n IPython.terminal.embed.InteractiveShellEmbed\n\n # instance comes from IPython.config.configurable.SingletonConfigurable.instance\n \"\"\"", "#c = IPython.Config()", "#c.InteractiveShellApp.exec_lines = [", "# '%pylab qt4',", "# '%gui qt4',", "# \"print 'System Ready!'\",", "#]", "#IPython.embed(config=c)", "parent_ns", "=", "parent_globals", ".", "copy", "(", ")", "parent_ns", ".", "update", "(", "parent_locals", ")", "locals", "(", ")", ".", "update", "(", "parent_ns", ")", "try", ":", "IPython", ".", "embed", "(", ")", "except", "RuntimeError", "as", "ex", ":", "ut", ".", "printex", "(", "ex", ",", "'Failed to open ipython'", ")", "#config = IPython.terminal.ipapp.load_default_config()", "#config.InteractiveShellEmbed = config.TerminalInteractiveShell", "#module = sys.modules[parent_globals['__name__']]", "#config['module'] = module", "#config['module'] = module", "#embed2(stack_depth=N + 2 + 1)", "#IPython.embed(config=config)", "#IPython.embed(config=config)", "#IPython.embed(module=module)", "# Exit python immediately if specifed", "if", "vars", "(", ")", ".", "get", "(", "'EXIT_NOW'", ",", "False", ")", "or", "vars", "(", ")", ".", "get", "(", "'qqq'", ",", "False", ")", ":", "print", "(", "'[utool.embed] EXIT_NOW specified'", ")", "sys", ".", "exit", "(", "1", ")" ]
35.306931
15.821782
def ds_geom(ds, t_srs=None): """Return dataset bbox envelope as geom """ gt = ds.GetGeoTransform() ds_srs = get_ds_srs(ds) if t_srs is None: t_srs = ds_srs ns = ds.RasterXSize nl = ds.RasterYSize x = np.array([0, ns, ns, 0, 0], dtype=float) y = np.array([0, 0, nl, nl, 0], dtype=float) #Note: pixelToMap adds 0.5 to input coords, need to account for this here x -= 0.5 y -= 0.5 mx, my = pixelToMap(x, y, gt) geom_wkt = 'POLYGON(({0}))'.format(', '.join(['{0} {1}'.format(*a) for a in zip(mx,my)])) geom = ogr.CreateGeometryFromWkt(geom_wkt) geom.AssignSpatialReference(ds_srs) if not ds_srs.IsSame(t_srs): geom_transform(geom, t_srs) return geom
[ "def", "ds_geom", "(", "ds", ",", "t_srs", "=", "None", ")", ":", "gt", "=", "ds", ".", "GetGeoTransform", "(", ")", "ds_srs", "=", "get_ds_srs", "(", "ds", ")", "if", "t_srs", "is", "None", ":", "t_srs", "=", "ds_srs", "ns", "=", "ds", ".", "RasterXSize", "nl", "=", "ds", ".", "RasterYSize", "x", "=", "np", ".", "array", "(", "[", "0", ",", "ns", ",", "ns", ",", "0", ",", "0", "]", ",", "dtype", "=", "float", ")", "y", "=", "np", ".", "array", "(", "[", "0", ",", "0", ",", "nl", ",", "nl", ",", "0", "]", ",", "dtype", "=", "float", ")", "#Note: pixelToMap adds 0.5 to input coords, need to account for this here", "x", "-=", "0.5", "y", "-=", "0.5", "mx", ",", "my", "=", "pixelToMap", "(", "x", ",", "y", ",", "gt", ")", "geom_wkt", "=", "'POLYGON(({0}))'", ".", "format", "(", "', '", ".", "join", "(", "[", "'{0} {1}'", ".", "format", "(", "*", "a", ")", "for", "a", "in", "zip", "(", "mx", ",", "my", ")", "]", ")", ")", "geom", "=", "ogr", ".", "CreateGeometryFromWkt", "(", "geom_wkt", ")", "geom", ".", "AssignSpatialReference", "(", "ds_srs", ")", "if", "not", "ds_srs", ".", "IsSame", "(", "t_srs", ")", ":", "geom_transform", "(", "geom", ",", "t_srs", ")", "return", "geom" ]
33.952381
15.285714
def _computeChart(chart, date): """ Internal function to return a new chart for a specific date using properties from old chart. """ pos = chart.pos hsys = chart.hsys IDs = [obj.id for obj in chart.objects] return Chart(date, pos, IDs=IDs, hsys=hsys)
[ "def", "_computeChart", "(", "chart", ",", "date", ")", ":", "pos", "=", "chart", ".", "pos", "hsys", "=", "chart", ".", "hsys", "IDs", "=", "[", "obj", ".", "id", "for", "obj", "in", "chart", ".", "objects", "]", "return", "Chart", "(", "date", ",", "pos", ",", "IDs", "=", "IDs", ",", "hsys", "=", "hsys", ")" ]
30.555556
11.888889
def read(self, size=-1): """Read up to *size* bytes. This function reads from the buffer multiple times until the requested number of bytes can be satisfied. This means that this function may block to wait for more data, even if some data is available. The only time a short read is returned, is on EOF or error. If *size* is not specified or negative, read until EOF. """ self._check_readable() chunks = [] bytes_read = 0 bytes_left = size while True: chunk = self._buffer.get_chunk(bytes_left) if not chunk: break chunks.append(chunk) bytes_read += len(chunk) if bytes_read == size or not chunk: break if bytes_left > 0: bytes_left -= len(chunk) # If EOF was set, always return that instead of any error. if not chunks and not self._buffer.eof and self._buffer.error: raise compat.saved_exc(self._buffer.error) return b''.join(chunks)
[ "def", "read", "(", "self", ",", "size", "=", "-", "1", ")", ":", "self", ".", "_check_readable", "(", ")", "chunks", "=", "[", "]", "bytes_read", "=", "0", "bytes_left", "=", "size", "while", "True", ":", "chunk", "=", "self", ".", "_buffer", ".", "get_chunk", "(", "bytes_left", ")", "if", "not", "chunk", ":", "break", "chunks", ".", "append", "(", "chunk", ")", "bytes_read", "+=", "len", "(", "chunk", ")", "if", "bytes_read", "==", "size", "or", "not", "chunk", ":", "break", "if", "bytes_left", ">", "0", ":", "bytes_left", "-=", "len", "(", "chunk", ")", "# If EOF was set, always return that instead of any error.", "if", "not", "chunks", "and", "not", "self", ".", "_buffer", ".", "eof", "and", "self", ".", "_buffer", ".", "error", ":", "raise", "compat", ".", "saved_exc", "(", "self", ".", "_buffer", ".", "error", ")", "return", "b''", ".", "join", "(", "chunks", ")" ]
37.964286
18.107143
def fix_config(self, options): """ Fixes the options, if necessary. I.e., it adds all required elements to the dictionary. :param options: the options to fix :type options: dict :return: the (potentially) fixed options :rtype: dict """ options = super(UpdateStorageValue, self).fix_config(options) opt = "storage_name" if opt not in options: options[opt] = "unknown" if opt not in self.help: self.help[opt] = "The name of the storage value to update (string)." opt = "expression" if opt not in options: options[opt] = "int({X} + 1)" if opt not in self.help: self.help[opt] = "The expression for updating the storage value; use {X} for current value (string)." return options
[ "def", "fix_config", "(", "self", ",", "options", ")", ":", "options", "=", "super", "(", "UpdateStorageValue", ",", "self", ")", ".", "fix_config", "(", "options", ")", "opt", "=", "\"storage_name\"", "if", "opt", "not", "in", "options", ":", "options", "[", "opt", "]", "=", "\"unknown\"", "if", "opt", "not", "in", "self", ".", "help", ":", "self", ".", "help", "[", "opt", "]", "=", "\"The name of the storage value to update (string).\"", "opt", "=", "\"expression\"", "if", "opt", "not", "in", "options", ":", "options", "[", "opt", "]", "=", "\"int({X} + 1)\"", "if", "opt", "not", "in", "self", ".", "help", ":", "self", ".", "help", "[", "opt", "]", "=", "\"The expression for updating the storage value; use {X} for current value (string).\"", "return", "options" ]
34.291667
20.625
def drop_schema(self): """Drop all gauged tables""" try: self.cursor.execute(""" DROP TABLE IF EXISTS gauged_data; DROP TABLE IF EXISTS gauged_keys; DROP TABLE IF EXISTS gauged_writer_history; DROP TABLE IF EXISTS gauged_cache; DROP TABLE IF EXISTS gauged_statistics; DROP TABLE IF EXISTS gauged_metadata""") self.db.commit() except self.psycopg2.InternalError: # pragma: no cover self.db.rollback()
[ "def", "drop_schema", "(", "self", ")", ":", "try", ":", "self", ".", "cursor", ".", "execute", "(", "\"\"\"\n DROP TABLE IF EXISTS gauged_data;\n DROP TABLE IF EXISTS gauged_keys;\n DROP TABLE IF EXISTS gauged_writer_history;\n DROP TABLE IF EXISTS gauged_cache;\n DROP TABLE IF EXISTS gauged_statistics;\n DROP TABLE IF EXISTS gauged_metadata\"\"\"", ")", "self", ".", "db", ".", "commit", "(", ")", "except", "self", ".", "psycopg2", ".", "InternalError", ":", "# pragma: no cover", "self", ".", "db", ".", "rollback", "(", ")" ]
41.846154
11.769231
def render_template_for_path(request, path, context=None, use_cache=True, def_name=None): ''' Convenience method that directly renders a template, given a direct path to it. ''' return get_template_for_path(path, use_cache).render(context, request, def_name)
[ "def", "render_template_for_path", "(", "request", ",", "path", ",", "context", "=", "None", ",", "use_cache", "=", "True", ",", "def_name", "=", "None", ")", ":", "return", "get_template_for_path", "(", "path", ",", "use_cache", ")", ".", "render", "(", "context", ",", "request", ",", "def_name", ")" ]
54
40.4
def parse(cls, fptr, offset, length): """Parse component mapping box. Parameters ---------- fptr : file Open file object. offset : int Start position of box in bytes. length : int Length of the box in bytes. Returns ------- ComponentMappingBox Instance of the current component mapping box. """ num_bytes = offset + length - fptr.tell() num_components = int(num_bytes / 4) read_buffer = fptr.read(num_bytes) data = struct.unpack('>' + 'HBB' * num_components, read_buffer) component_index = data[0:num_bytes:3] mapping_type = data[1:num_bytes:3] palette_index = data[2:num_bytes:3] return cls(component_index, mapping_type, palette_index, length=length, offset=offset)
[ "def", "parse", "(", "cls", ",", "fptr", ",", "offset", ",", "length", ")", ":", "num_bytes", "=", "offset", "+", "length", "-", "fptr", ".", "tell", "(", ")", "num_components", "=", "int", "(", "num_bytes", "/", "4", ")", "read_buffer", "=", "fptr", ".", "read", "(", "num_bytes", ")", "data", "=", "struct", ".", "unpack", "(", "'>'", "+", "'HBB'", "*", "num_components", ",", "read_buffer", ")", "component_index", "=", "data", "[", "0", ":", "num_bytes", ":", "3", "]", "mapping_type", "=", "data", "[", "1", ":", "num_bytes", ":", "3", "]", "palette_index", "=", "data", "[", "2", ":", "num_bytes", ":", "3", "]", "return", "cls", "(", "component_index", ",", "mapping_type", ",", "palette_index", ",", "length", "=", "length", ",", "offset", "=", "offset", ")" ]
29.482759
16.931034
def visibility_changed(self, enable): """DockWidget visibility has changed""" super(SpyderPluginWidget, self).visibility_changed(enable) if enable: self.explorer.is_visible.emit()
[ "def", "visibility_changed", "(", "self", ",", "enable", ")", ":", "super", "(", "SpyderPluginWidget", ",", "self", ")", ".", "visibility_changed", "(", "enable", ")", "if", "enable", ":", "self", ".", "explorer", ".", "is_visible", ".", "emit", "(", ")" ]
43
10.6
def options(self, path=None, url_kwargs=None, **kwargs): """ Sends an OPTIONS request. :param path: The HTTP path (either absolute or relative). :param url_kwargs: Parameters to override in the generated URL. See `~hyperlink.URL`. :param **kwargs: Optional arguments that ``request`` takes. :return: response object """ return self._session.options(self._url(path, url_kwargs), **kwargs)
[ "def", "options", "(", "self", ",", "path", "=", "None", ",", "url_kwargs", "=", "None", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_session", ".", "options", "(", "self", ".", "_url", "(", "path", ",", "url_kwargs", ")", ",", "*", "*", "kwargs", ")" ]
36.615385
17.230769
def load_filter_plugins(entrypoint_group: str) -> Iterable[Filter]: """ Load all blacklist plugins that are registered with pkg_resources Parameters ========== entrypoint_group: str The entrypoint group name to load plugins from Returns ======= List of Blacklist: A list of objects derived from the Blacklist class """ global loaded_filter_plugins enabled_plugins: List[str] = [] config = BandersnatchConfig().config try: config_blacklist_plugins = config["blacklist"]["plugins"] split_plugins = config_blacklist_plugins.split("\n") if "all" in split_plugins: enabled_plugins = ["all"] else: for plugin in split_plugins: if not plugin: continue enabled_plugins.append(plugin) except KeyError: pass # If the plugins for the entrypoint_group have been loaded return them cached_plugins = loaded_filter_plugins.get(entrypoint_group) if cached_plugins: return cached_plugins plugins = set() for entry_point in pkg_resources.iter_entry_points(group=entrypoint_group): plugin_class = entry_point.load() plugin_instance = plugin_class() if "all" in enabled_plugins or plugin_instance.name in enabled_plugins: plugins.add(plugin_instance) loaded_filter_plugins[entrypoint_group] = list(plugins) return plugins
[ "def", "load_filter_plugins", "(", "entrypoint_group", ":", "str", ")", "->", "Iterable", "[", "Filter", "]", ":", "global", "loaded_filter_plugins", "enabled_plugins", ":", "List", "[", "str", "]", "=", "[", "]", "config", "=", "BandersnatchConfig", "(", ")", ".", "config", "try", ":", "config_blacklist_plugins", "=", "config", "[", "\"blacklist\"", "]", "[", "\"plugins\"", "]", "split_plugins", "=", "config_blacklist_plugins", ".", "split", "(", "\"\\n\"", ")", "if", "\"all\"", "in", "split_plugins", ":", "enabled_plugins", "=", "[", "\"all\"", "]", "else", ":", "for", "plugin", "in", "split_plugins", ":", "if", "not", "plugin", ":", "continue", "enabled_plugins", ".", "append", "(", "plugin", ")", "except", "KeyError", ":", "pass", "# If the plugins for the entrypoint_group have been loaded return them", "cached_plugins", "=", "loaded_filter_plugins", ".", "get", "(", "entrypoint_group", ")", "if", "cached_plugins", ":", "return", "cached_plugins", "plugins", "=", "set", "(", ")", "for", "entry_point", "in", "pkg_resources", ".", "iter_entry_points", "(", "group", "=", "entrypoint_group", ")", ":", "plugin_class", "=", "entry_point", ".", "load", "(", ")", "plugin_instance", "=", "plugin_class", "(", ")", "if", "\"all\"", "in", "enabled_plugins", "or", "plugin_instance", ".", "name", "in", "enabled_plugins", ":", "plugins", ".", "add", "(", "plugin_instance", ")", "loaded_filter_plugins", "[", "entrypoint_group", "]", "=", "list", "(", "plugins", ")", "return", "plugins" ]
31.622222
20.022222
def post_notification(self, ntype, sender, *args, **kwargs): """Post notification to all registered observers. The registered callback will be called as:: callback(ntype, sender, *args, **kwargs) Parameters ---------- ntype : hashable The notification type. sender : hashable The object sending the notification. *args : tuple The positional arguments to be passed to the callback. **kwargs : dict The keyword argument to be passed to the callback. Notes ----- * If no registered observers, performance is O(1). * Notificaiton order is undefined. * Notifications are posted synchronously. """ if(ntype==None or sender==None): raise NotificationError( "Notification type and sender are required.") # If there are no registered observers for the type/sender pair if((ntype not in self.registered_types and None not in self.registered_types) or (sender not in self.registered_senders and None not in self.registered_senders)): return for o in self._observers_for_notification(ntype, sender): o(ntype, sender, *args, **kwargs)
[ "def", "post_notification", "(", "self", ",", "ntype", ",", "sender", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "(", "ntype", "==", "None", "or", "sender", "==", "None", ")", ":", "raise", "NotificationError", "(", "\"Notification type and sender are required.\"", ")", "# If there are no registered observers for the type/sender pair", "if", "(", "(", "ntype", "not", "in", "self", ".", "registered_types", "and", "None", "not", "in", "self", ".", "registered_types", ")", "or", "(", "sender", "not", "in", "self", ".", "registered_senders", "and", "None", "not", "in", "self", ".", "registered_senders", ")", ")", ":", "return", "for", "o", "in", "self", ".", "_observers_for_notification", "(", "ntype", ",", "sender", ")", ":", "o", "(", "ntype", ",", "sender", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
34
19.421053
def get_pk_attrnames(obj) -> List[str]: """ Asks an SQLAlchemy ORM object: "what are your primary key(s)?" Args: obj: SQLAlchemy ORM object Returns: list of attribute names of primary-key columns """ return [attrname for attrname, column in gen_columns(obj) if column.primary_key]
[ "def", "get_pk_attrnames", "(", "obj", ")", "->", "List", "[", "str", "]", ":", "return", "[", "attrname", "for", "attrname", ",", "column", "in", "gen_columns", "(", "obj", ")", "if", "column", ".", "primary_key", "]" ]
23.857143
18.857143
def create_redirect_web_page(web_dir, org_name, kibana_url): """ Create HTML pages with the org name that redirect to the Kibana dashboard filtered for this org """ html_redirect = """ <html> <head> """ html_redirect += """<meta http-equiv="refresh" content="0; URL=%s/app/kibana"""\ % kibana_url html_redirect += """#/dashboard/Overview?_g=(filters:!(('$state':""" html_redirect += """(store:globalState),meta:(alias:!n,disabled:!f,index:""" html_redirect += """github_git_enrich,key:project,negate:!f,value:%s),"""\ % org_name html_redirect += """query:(match:(project:(query:%s,type:phrase))))),"""\ % org_name html_redirect += """refreshInterval:(display:Off,pause:!f,value:0),""" html_redirect += """time:(from:now-2y,mode:quick,to:now))" /> </head> </html> """ try: with open(path.join(web_dir, org_name), "w") as f: f.write(html_redirect) except FileNotFoundError as ex: logging.error("Wrong web dir for redirect pages: %s" % (web_dir)) logging.error(ex)
[ "def", "create_redirect_web_page", "(", "web_dir", ",", "org_name", ",", "kibana_url", ")", ":", "html_redirect", "=", "\"\"\"\n <html>\n <head>\n \"\"\"", "html_redirect", "+=", "\"\"\"<meta http-equiv=\"refresh\" content=\"0; URL=%s/app/kibana\"\"\"", "%", "kibana_url", "html_redirect", "+=", "\"\"\"#/dashboard/Overview?_g=(filters:!(('$state':\"\"\"", "html_redirect", "+=", "\"\"\"(store:globalState),meta:(alias:!n,disabled:!f,index:\"\"\"", "html_redirect", "+=", "\"\"\"github_git_enrich,key:project,negate:!f,value:%s),\"\"\"", "%", "org_name", "html_redirect", "+=", "\"\"\"query:(match:(project:(query:%s,type:phrase))))),\"\"\"", "%", "org_name", "html_redirect", "+=", "\"\"\"refreshInterval:(display:Off,pause:!f,value:0),\"\"\"", "html_redirect", "+=", "\"\"\"time:(from:now-2y,mode:quick,to:now))\" />\n </head>\n </html>\n \"\"\"", "try", ":", "with", "open", "(", "path", ".", "join", "(", "web_dir", ",", "org_name", ")", ",", "\"w\"", ")", "as", "f", ":", "f", ".", "write", "(", "html_redirect", ")", "except", "FileNotFoundError", "as", "ex", ":", "logging", ".", "error", "(", "\"Wrong web dir for redirect pages: %s\"", "%", "(", "web_dir", ")", ")", "logging", ".", "error", "(", "ex", ")" ]
43.038462
10.153846
def run_selection(self): """ Run selected text or current line in console. If some text is selected, then execute that text in console. If no text is selected, then execute current line, unless current line is empty. Then, advance cursor to next line. If cursor is on last line and that line is not empty, then add a new blank line and move the cursor there. If cursor is on last line and that line is empty, then do not move cursor. """ text = self.get_current_editor().get_selection_as_executable_code() if text: self.exec_in_extconsole.emit(text.rstrip(), self.focus_to_editor) return editor = self.get_current_editor() line = editor.get_current_line() text = line.lstrip() if text: self.exec_in_extconsole.emit(text, self.focus_to_editor) if editor.is_cursor_on_last_line() and text: editor.append(editor.get_line_separator()) editor.move_cursor_to_next('line', 'down')
[ "def", "run_selection", "(", "self", ")", ":", "text", "=", "self", ".", "get_current_editor", "(", ")", ".", "get_selection_as_executable_code", "(", ")", "if", "text", ":", "self", ".", "exec_in_extconsole", ".", "emit", "(", "text", ".", "rstrip", "(", ")", ",", "self", ".", "focus_to_editor", ")", "return", "editor", "=", "self", ".", "get_current_editor", "(", ")", "line", "=", "editor", ".", "get_current_line", "(", ")", "text", "=", "line", ".", "lstrip", "(", ")", "if", "text", ":", "self", ".", "exec_in_extconsole", ".", "emit", "(", "text", ",", "self", ".", "focus_to_editor", ")", "if", "editor", ".", "is_cursor_on_last_line", "(", ")", "and", "text", ":", "editor", ".", "append", "(", "editor", ".", "get_line_separator", "(", ")", ")", "editor", ".", "move_cursor_to_next", "(", "'line'", ",", "'down'", ")" ]
44.125
21.958333
def _warning_handler(self, code: int): """处理300~399段状态码,抛出对应警告. Parameters: (code): - 响应的状态码 Return: (bool): - 已知的警告类型则返回True,否则返回False """ if code == 300: warnings.warn( "ExpireWarning", RuntimeWarning, stacklevel=3 ) elif code == 301: warnings.warn( "ExpireStreamWarning", RuntimeWarning, stacklevel=3 ) else: if self.debug: print("unknow code {}".format(code)) return False return True
[ "def", "_warning_handler", "(", "self", ",", "code", ":", "int", ")", ":", "if", "code", "==", "300", ":", "warnings", ".", "warn", "(", "\"ExpireWarning\"", ",", "RuntimeWarning", ",", "stacklevel", "=", "3", ")", "elif", "code", "==", "301", ":", "warnings", ".", "warn", "(", "\"ExpireStreamWarning\"", ",", "RuntimeWarning", ",", "stacklevel", "=", "3", ")", "else", ":", "if", "self", ".", "debug", ":", "print", "(", "\"unknow code {}\"", ".", "format", "(", "code", ")", ")", "return", "False", "return", "True" ]
23.592593
16.37037
def Division(left: vertex_constructor_param_types, right: vertex_constructor_param_types, label: Optional[str]=None) -> Vertex: """ Divides one vertex by another :param left: the vertex to be divided :param right: the vertex to divide """ return Double(context.jvm_view().DivisionVertex, label, cast_to_double_vertex(left), cast_to_double_vertex(right))
[ "def", "Division", "(", "left", ":", "vertex_constructor_param_types", ",", "right", ":", "vertex_constructor_param_types", ",", "label", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "Vertex", ":", "return", "Double", "(", "context", ".", "jvm_view", "(", ")", ".", "DivisionVertex", ",", "label", ",", "cast_to_double_vertex", "(", "left", ")", ",", "cast_to_double_vertex", "(", "right", ")", ")" ]
46.875
26.375
def iter(self, **kwargs): """Compute a range of orbits between two dates Keyword Arguments: dates (list of :py:class:`~beyond.dates.date.Date`): Dates from which iterate over start (Date or None): Date of the first point stop (Date, timedelta or None): Date of the last point step (timedelta or None): Step to use during the computation. Use the same step as `self` if `None` listeners (list of:py:class:`~beyond.orbits.listeners.Listener`): Yield: :py:class:`Orbit`: There is two ways to use the iter() method. If *dates* is defined, it should be an iterable of dates. This could be a generator as per :py:meth:`Date.range <beyond.dates.date.Date.range>`, or a list. .. code-block:: python # Create two successive ranges of dates, with different steps dates = list(Date.range(Date(2019, 3, 23), Date(2019, 3, 24), timedelta(minutes=3))) dates.extend(Date.range(Date(2019, 3, 24), Date(2019, 3, 25), timedelta(minutes=10), inclusive=True)) propag.iter(dates=dates) The alternative, is the use of *start*, *stop* and *step* keyword arguments which work exactly as :code:`Date.range(start, stop, step, inclusive=True)` If one of *start*, *stop* or *step* arguments is set to ``None`` it will keep the same property as the generating ephemeris. .. code-block:: python propag.iter(stop=stop) # If the iterator has a default step (e.g. numerical propagators) propag.iter(stop=stop, step=step) propag.iter(start=start, stop=stop, step=step) """ if 'dates' not in kwargs: start = kwargs.setdefault('start', self.orbit.date) stop = kwargs.get('stop') step = kwargs.setdefault('step', getattr(self, 'step', None)) if 'stop' is None: raise ValueError("The end of the propagation should be defined") start = self.orbit.date if start is None else start step = self.step if step is None else step if isinstance(kwargs['stop'], timedelta): kwargs['stop'] = start + kwargs['stop'] if start > kwargs['stop'] and step.total_seconds() > 0: kwargs['step'] = -step listeners = kwargs.pop('listeners', []) for orb in self._iter(**kwargs): for listen_orb in self.listen(orb, listeners): yield listen_orb yield orb
[ "def", "iter", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "'dates'", "not", "in", "kwargs", ":", "start", "=", "kwargs", ".", "setdefault", "(", "'start'", ",", "self", ".", "orbit", ".", "date", ")", "stop", "=", "kwargs", ".", "get", "(", "'stop'", ")", "step", "=", "kwargs", ".", "setdefault", "(", "'step'", ",", "getattr", "(", "self", ",", "'step'", ",", "None", ")", ")", "if", "'stop'", "is", "None", ":", "raise", "ValueError", "(", "\"The end of the propagation should be defined\"", ")", "start", "=", "self", ".", "orbit", ".", "date", "if", "start", "is", "None", "else", "start", "step", "=", "self", ".", "step", "if", "step", "is", "None", "else", "step", "if", "isinstance", "(", "kwargs", "[", "'stop'", "]", ",", "timedelta", ")", ":", "kwargs", "[", "'stop'", "]", "=", "start", "+", "kwargs", "[", "'stop'", "]", "if", "start", ">", "kwargs", "[", "'stop'", "]", "and", "step", ".", "total_seconds", "(", ")", ">", "0", ":", "kwargs", "[", "'step'", "]", "=", "-", "step", "listeners", "=", "kwargs", ".", "pop", "(", "'listeners'", ",", "[", "]", ")", "for", "orb", "in", "self", ".", "_iter", "(", "*", "*", "kwargs", ")", ":", "for", "listen_orb", "in", "self", ".", "listen", "(", "orb", ",", "listeners", ")", ":", "yield", "listen_orb", "yield", "orb" ]
42.2
27.316667
def generate_keys(transform, field, evaluator, value): ''' Generates the query keys for the default structure of the query index. The structure of the key: [field_name, value]. It supports custom sorting, in which case the value is substituted bu the result of transform(value). @param transform: callable of 1 argument @param field: C{str} field name @param evaluator: C{Evaluator} @param value: value @rtype: dict @returns: keys to use to query the view ''' if evaluator == Evaluator.equals: return dict(key=(field, transform(value))) if evaluator == Evaluator.le: return dict(startkey=(field, ), endkey=(field, transform(value))) if evaluator == Evaluator.ge: return dict(startkey=(field, transform(value)), endkey=(field, {})) if evaluator == Evaluator.between: return dict(startkey=(field, transform(value[0])), endkey=(field, transform(value[1]))) if evaluator == Evaluator.inside: return dict(keys=[(field, transform(x)) for x in value]) if evaluator == Evaluator.none: return dict(startkey=(field, ), endkey=(field, {}))
[ "def", "generate_keys", "(", "transform", ",", "field", ",", "evaluator", ",", "value", ")", ":", "if", "evaluator", "==", "Evaluator", ".", "equals", ":", "return", "dict", "(", "key", "=", "(", "field", ",", "transform", "(", "value", ")", ")", ")", "if", "evaluator", "==", "Evaluator", ".", "le", ":", "return", "dict", "(", "startkey", "=", "(", "field", ",", ")", ",", "endkey", "=", "(", "field", ",", "transform", "(", "value", ")", ")", ")", "if", "evaluator", "==", "Evaluator", ".", "ge", ":", "return", "dict", "(", "startkey", "=", "(", "field", ",", "transform", "(", "value", ")", ")", ",", "endkey", "=", "(", "field", ",", "{", "}", ")", ")", "if", "evaluator", "==", "Evaluator", ".", "between", ":", "return", "dict", "(", "startkey", "=", "(", "field", ",", "transform", "(", "value", "[", "0", "]", ")", ")", ",", "endkey", "=", "(", "field", ",", "transform", "(", "value", "[", "1", "]", ")", ")", ")", "if", "evaluator", "==", "Evaluator", ".", "inside", ":", "return", "dict", "(", "keys", "=", "[", "(", "field", ",", "transform", "(", "x", ")", ")", "for", "x", "in", "value", "]", ")", "if", "evaluator", "==", "Evaluator", ".", "none", ":", "return", "dict", "(", "startkey", "=", "(", "field", ",", ")", ",", "endkey", "=", "(", "field", ",", "{", "}", ")", ")" ]
40.821429
17.035714
def patch_table(self, dataset_id, table_id, project_id=None, description=None, expiration_time=None, external_data_configuration=None, friendly_name=None, labels=None, schema=None, time_partitioning=None, view=None, require_partition_filter=None): """ Patch information in an existing table. It only updates fileds that are provided in the request object. Reference: https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/patch :param dataset_id: The dataset containing the table to be patched. :type dataset_id: str :param table_id: The Name of the table to be patched. :type table_id: str :param project_id: The project containing the table to be patched. :type project_id: str :param description: [Optional] A user-friendly description of this table. :type description: str :param expiration_time: [Optional] The time when this table expires, in milliseconds since the epoch. :type expiration_time: int :param external_data_configuration: [Optional] A dictionary containing properties of a table stored outside of BigQuery. :type external_data_configuration: dict :param friendly_name: [Optional] A descriptive name for this table. :type friendly_name: str :param labels: [Optional] A dictionary containing labels associated with this table. :type labels: dict :param schema: [Optional] If set, the schema field list as defined here: https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.schema The supported schema modifications and unsupported schema modification are listed here: https://cloud.google.com/bigquery/docs/managing-table-schemas **Example**: :: schema=[{"name": "emp_name", "type": "STRING", "mode": "REQUIRED"}, {"name": "salary", "type": "INTEGER", "mode": "NULLABLE"}] :type schema: list :param time_partitioning: [Optional] A dictionary containing time-based partitioning definition for the table. :type time_partitioning: dict :param view: [Optional] A dictionary containing definition for the view. If set, it will patch a view instead of a table: https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#view **Example**: :: view = { "query": "SELECT * FROM `test-project-id.test_dataset_id.test_table_prefix*` LIMIT 500", "useLegacySql": False } :type view: dict :param require_partition_filter: [Optional] If true, queries over the this table require a partition filter. If false, queries over the table :type require_partition_filter: bool """ project_id = project_id if project_id is not None else self.project_id table_resource = {} if description is not None: table_resource['description'] = description if expiration_time is not None: table_resource['expirationTime'] = expiration_time if external_data_configuration: table_resource['externalDataConfiguration'] = external_data_configuration if friendly_name is not None: table_resource['friendlyName'] = friendly_name if labels: table_resource['labels'] = labels if schema: table_resource['schema'] = {'fields': schema} if time_partitioning: table_resource['timePartitioning'] = time_partitioning if view: table_resource['view'] = view if require_partition_filter is not None: table_resource['requirePartitionFilter'] = require_partition_filter self.log.info('Patching Table %s:%s.%s', project_id, dataset_id, table_id) try: self.service.tables().patch( projectId=project_id, datasetId=dataset_id, tableId=table_id, body=table_resource).execute(num_retries=self.num_retries) self.log.info('Table patched successfully: %s:%s.%s', project_id, dataset_id, table_id) except HttpError as err: raise AirflowException( 'BigQuery job failed. Error was: {}'.format(err.content) )
[ "def", "patch_table", "(", "self", ",", "dataset_id", ",", "table_id", ",", "project_id", "=", "None", ",", "description", "=", "None", ",", "expiration_time", "=", "None", ",", "external_data_configuration", "=", "None", ",", "friendly_name", "=", "None", ",", "labels", "=", "None", ",", "schema", "=", "None", ",", "time_partitioning", "=", "None", ",", "view", "=", "None", ",", "require_partition_filter", "=", "None", ")", ":", "project_id", "=", "project_id", "if", "project_id", "is", "not", "None", "else", "self", ".", "project_id", "table_resource", "=", "{", "}", "if", "description", "is", "not", "None", ":", "table_resource", "[", "'description'", "]", "=", "description", "if", "expiration_time", "is", "not", "None", ":", "table_resource", "[", "'expirationTime'", "]", "=", "expiration_time", "if", "external_data_configuration", ":", "table_resource", "[", "'externalDataConfiguration'", "]", "=", "external_data_configuration", "if", "friendly_name", "is", "not", "None", ":", "table_resource", "[", "'friendlyName'", "]", "=", "friendly_name", "if", "labels", ":", "table_resource", "[", "'labels'", "]", "=", "labels", "if", "schema", ":", "table_resource", "[", "'schema'", "]", "=", "{", "'fields'", ":", "schema", "}", "if", "time_partitioning", ":", "table_resource", "[", "'timePartitioning'", "]", "=", "time_partitioning", "if", "view", ":", "table_resource", "[", "'view'", "]", "=", "view", "if", "require_partition_filter", "is", "not", "None", ":", "table_resource", "[", "'requirePartitionFilter'", "]", "=", "require_partition_filter", "self", ".", "log", ".", "info", "(", "'Patching Table %s:%s.%s'", ",", "project_id", ",", "dataset_id", ",", "table_id", ")", "try", ":", "self", ".", "service", ".", "tables", "(", ")", ".", "patch", "(", "projectId", "=", "project_id", ",", "datasetId", "=", "dataset_id", ",", "tableId", "=", "table_id", ",", "body", "=", "table_resource", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", "self", ".", "log", ".", "info", "(", "'Table patched successfully: %s:%s.%s'", ",", "project_id", ",", "dataset_id", ",", "table_id", ")", "except", "HttpError", "as", "err", ":", "raise", "AirflowException", "(", "'BigQuery job failed. Error was: {}'", ".", "format", "(", "err", ".", "content", ")", ")" ]
43.224299
22.252336
def get_arguments(self): """ Extracts the specific arguments of this CLI """ ApiCli.get_arguments(self) if self.args.metricName is not None: self.metricName = self.args.metricName if self.args.measurement is not None: self.measurement = self.args.measurement if self.args.source is not None: self.source = self.args.source else: self.source = socket.gethostname() if self.args.timestamp is not None: self.timestamp = int(self.args.timestamp) m = {'metric': self.metricName, 'measure': self.measurement} if self.source is not None: m['source'] = self.source if self.timestamp is not None: m['timestamp'] = int(self.timestamp) self._process_properties() if self._properties is not None: m['metadata'] = self._properties self.data = json.dumps(m, sort_keys=True) self.headers = {'Content-Type': 'application/json', "Accept": "application/json"}
[ "def", "get_arguments", "(", "self", ")", ":", "ApiCli", ".", "get_arguments", "(", "self", ")", "if", "self", ".", "args", ".", "metricName", "is", "not", "None", ":", "self", ".", "metricName", "=", "self", ".", "args", ".", "metricName", "if", "self", ".", "args", ".", "measurement", "is", "not", "None", ":", "self", ".", "measurement", "=", "self", ".", "args", ".", "measurement", "if", "self", ".", "args", ".", "source", "is", "not", "None", ":", "self", ".", "source", "=", "self", ".", "args", ".", "source", "else", ":", "self", ".", "source", "=", "socket", ".", "gethostname", "(", ")", "if", "self", ".", "args", ".", "timestamp", "is", "not", "None", ":", "self", ".", "timestamp", "=", "int", "(", "self", ".", "args", ".", "timestamp", ")", "m", "=", "{", "'metric'", ":", "self", ".", "metricName", ",", "'measure'", ":", "self", ".", "measurement", "}", "if", "self", ".", "source", "is", "not", "None", ":", "m", "[", "'source'", "]", "=", "self", ".", "source", "if", "self", ".", "timestamp", "is", "not", "None", ":", "m", "[", "'timestamp'", "]", "=", "int", "(", "self", ".", "timestamp", ")", "self", ".", "_process_properties", "(", ")", "if", "self", ".", "_properties", "is", "not", "None", ":", "m", "[", "'metadata'", "]", "=", "self", ".", "_properties", "self", ".", "data", "=", "json", ".", "dumps", "(", "m", ",", "sort_keys", "=", "True", ")", "self", ".", "headers", "=", "{", "'Content-Type'", ":", "'application/json'", ",", "\"Accept\"", ":", "\"application/json\"", "}" ]
30.970588
15.382353
def validate(self, input, is_substitute): # ToDo: apply 'no substitute' for initial load validations """ Performs validation :param input: object to validate :param is_substitute: will be used for what-if in the future :return: validation result if violated or None """ constraint = self.__get_constraint(input) to_validate = self.__get_to_validate(input) return self.__compare(to_validate, constraint)
[ "def", "validate", "(", "self", ",", "input", ",", "is_substitute", ")", ":", "# ToDo: apply 'no substitute' for initial load validations", "constraint", "=", "self", ".", "__get_constraint", "(", "input", ")", "to_validate", "=", "self", ".", "__get_to_validate", "(", "input", ")", "return", "self", ".", "__compare", "(", "to_validate", ",", "constraint", ")" ]
46.5
14.9
def replace_symbol_to_number(pinyin): """把声调替换为数字""" def _replace(match): symbol = match.group(0) # 带声调的字符 # 返回使用数字标识声调的字符 return PHONETIC_SYMBOL_DICT[symbol] # 替换拼音中的带声调字符 return RE_PHONETIC_SYMBOL.sub(_replace, pinyin)
[ "def", "replace_symbol_to_number", "(", "pinyin", ")", ":", "def", "_replace", "(", "match", ")", ":", "symbol", "=", "match", ".", "group", "(", "0", ")", "# 带声调的字符", "# 返回使用数字标识声调的字符", "return", "PHONETIC_SYMBOL_DICT", "[", "symbol", "]", "# 替换拼音中的带声调字符", "return", "RE_PHONETIC_SYMBOL", ".", "sub", "(", "_replace", ",", "pinyin", ")" ]
28.222222
12.666667
def _config(self, args, config): """ Get configuration for the current used listing. """ webexports = dict((x.args, x) for x in config.subsections('webexport')) webexport = webexports.get(args.webexport) if webexport is None: if args.webexport == u'default': raise NotImplementedError('Default webexport not implemented') # FIXME else: raise KolektoRuntimeError('Unknown webexport %r' % args.webexport) else: return {'columns': list(webexport.subsections('column')), 'page_title': webexport.get('page_title'), 'page_credits': webexport.get('page_credits')}
[ "def", "_config", "(", "self", ",", "args", ",", "config", ")", ":", "webexports", "=", "dict", "(", "(", "x", ".", "args", ",", "x", ")", "for", "x", "in", "config", ".", "subsections", "(", "'webexport'", ")", ")", "webexport", "=", "webexports", ".", "get", "(", "args", ".", "webexport", ")", "if", "webexport", "is", "None", ":", "if", "args", ".", "webexport", "==", "u'default'", ":", "raise", "NotImplementedError", "(", "'Default webexport not implemented'", ")", "# FIXME", "else", ":", "raise", "KolektoRuntimeError", "(", "'Unknown webexport %r'", "%", "args", ".", "webexport", ")", "else", ":", "return", "{", "'columns'", ":", "list", "(", "webexport", ".", "subsections", "(", "'column'", ")", ")", ",", "'page_title'", ":", "webexport", ".", "get", "(", "'page_title'", ")", ",", "'page_credits'", ":", "webexport", ".", "get", "(", "'page_credits'", ")", "}" ]
50
20.571429
async def send_rpc(self, conn_id, address, rpc_id, payload, timeout): """Send an RPC to a device. See :meth:`AbstractDeviceAdapter.send_rpc`. """ self._ensure_connection(conn_id, True) connection_string = self._get_property(conn_id, "connection_string") msg = dict(address=address, rpc_id=rpc_id, payload=base64.b64encode(payload), timeout=timeout, connection_string=connection_string) response = await self._send_command(OPERATIONS.SEND_RPC, msg, COMMANDS.SendRPCResponse, timeout=timeout) return unpack_rpc_response(response.get('status'), response.get('payload'), rpc_id=rpc_id, address=address)
[ "async", "def", "send_rpc", "(", "self", ",", "conn_id", ",", "address", ",", "rpc_id", ",", "payload", ",", "timeout", ")", ":", "self", ".", "_ensure_connection", "(", "conn_id", ",", "True", ")", "connection_string", "=", "self", ".", "_get_property", "(", "conn_id", ",", "\"connection_string\"", ")", "msg", "=", "dict", "(", "address", "=", "address", ",", "rpc_id", "=", "rpc_id", ",", "payload", "=", "base64", ".", "b64encode", "(", "payload", ")", ",", "timeout", "=", "timeout", ",", "connection_string", "=", "connection_string", ")", "response", "=", "await", "self", ".", "_send_command", "(", "OPERATIONS", ".", "SEND_RPC", ",", "msg", ",", "COMMANDS", ".", "SendRPCResponse", ",", "timeout", "=", "timeout", ")", "return", "unpack_rpc_response", "(", "response", ".", "get", "(", "'status'", ")", ",", "response", ".", "get", "(", "'payload'", ")", ",", "rpc_id", "=", "rpc_id", ",", "address", "=", "address", ")" ]
44.058824
29.588235
def getCursor(self): """ Get a Dictionary Cursor for executing queries """ if self.connection is None: self.Connect() return self.connection.cursor(MySQLdb.cursors.DictCursor)
[ "def", "getCursor", "(", "self", ")", ":", "if", "self", ".", "connection", "is", "None", ":", "self", ".", "Connect", "(", ")", "return", "self", ".", "connection", ".", "cursor", "(", "MySQLdb", ".", "cursors", ".", "DictCursor", ")" ]
23.125
14.625
def check_call(self, cmd): """Calls a command through SSH. """ ret, _ = self._call(cmd, False) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret)
[ "def", "check_call", "(", "self", ",", "cmd", ")", ":", "ret", ",", "_", "=", "self", ".", "_call", "(", "cmd", ",", "False", ")", "if", "ret", "!=", "0", ":", "# pragma: no cover", "raise", "RemoteCommandFailure", "(", "command", "=", "cmd", ",", "ret", "=", "ret", ")" ]
35.833333
5.833333
def set_properties(self, eid, value, idx='*'): """ Set the value and/or attributes of an xml element, marked with the matching eid attribute, using the properties of the specified object. """ if value.__class__ not in Template.class_cache: props = [] for name in dir(value.__class__): prop = getattr(value.__class__, name) if type(prop) == property and hasattr(prop, 'fget'): props.append((name, prop)) Template.class_cache[value.__class__] = props for (name, prop) in Template.class_cache[value.__class__]: new_eid = ''.join([eid, ':', name]) self.set_value(new_eid, prop.fget(value), idx) self.set_attribute(eid, name, prop.fget(value), idx)
[ "def", "set_properties", "(", "self", ",", "eid", ",", "value", ",", "idx", "=", "'*'", ")", ":", "if", "value", ".", "__class__", "not", "in", "Template", ".", "class_cache", ":", "props", "=", "[", "]", "for", "name", "in", "dir", "(", "value", ".", "__class__", ")", ":", "prop", "=", "getattr", "(", "value", ".", "__class__", ",", "name", ")", "if", "type", "(", "prop", ")", "==", "property", "and", "hasattr", "(", "prop", ",", "'fget'", ")", ":", "props", ".", "append", "(", "(", "name", ",", "prop", ")", ")", "Template", ".", "class_cache", "[", "value", ".", "__class__", "]", "=", "props", "for", "(", "name", ",", "prop", ")", "in", "Template", ".", "class_cache", "[", "value", ".", "__class__", "]", ":", "new_eid", "=", "''", ".", "join", "(", "[", "eid", ",", "':'", ",", "name", "]", ")", "self", ".", "set_value", "(", "new_eid", ",", "prop", ".", "fget", "(", "value", ")", ",", "idx", ")", "self", ".", "set_attribute", "(", "eid", ",", "name", ",", "prop", ".", "fget", "(", "value", ")", ",", "idx", ")" ]
50
15.875
def get_property_by_inheritance(self, obj, prop): # pylint: disable=too-many-branches, too-many-nested-blocks """ Get the property asked in parameter to this object or from defined templates of this object todo: rewrite this function which is really too complex! :param obj: the object to search the property :type obj: alignak.objects.item.Item :param prop: name of property :type prop: str :return: Value of property of this object or of a template :rtype: str or None """ if prop == 'register': # We do not inherit the register property return None # If I have the property, I take mine but I check if I must add a plus property if hasattr(obj, prop): value = getattr(obj, prop) # Manage the additive inheritance for the property, # if property is in plus, add or replace it # Template should keep the '+' at the beginning of the chain if obj.has_plus(prop): if not isinstance(value, list): value = [value] value.insert(0, obj.get_plus_and_delete(prop)) value = list(set(value)) if obj.is_tpl(): value.insert(0, '+') # Clean the returned value if isinstance(value, list): # Get unique ordered list new_list = [] for elt in value: if elt not in new_list: new_list.append(elt) value = new_list if not obj.is_tpl(): while '+' in value: value.remove('+') return value # Ok, I do not have prop, Maybe my templates do? # Same story for plus # We reverse list, so that when looking for properties by inheritance, # the least defined template wins (if property is set). for t_id in obj.templates: template = self.templates[t_id] value = self.get_property_by_inheritance(template, prop) if value is None or (isinstance(value, list) and not value): continue # If our template give us a '+' value, we continue the loop still_loop = False if isinstance(value, list) and value[0] == '+': # Templates should keep their + inherited from their parents if not obj.is_tpl(): value = list(value) value = value[1:] still_loop = True # Maybe in the previous loop, we set a value, use it too if hasattr(obj, prop): # If the current value is a string, it will simplify the problem if isinstance(value, (list, string_types)) and value and value[0] == '+': # In this case we can remove the + from our current # tpl because our value will be final new_val = list(getattr(obj, prop)) new_val.extend(value[1:]) value = new_val else: # If not, we should keep the + sign of need new_val = list(getattr(obj, prop)) new_val.extend(value) value = new_val # Ok, we can set it and uniquify a list if needed if isinstance(value, list): # Get unique ordered list new_list = [] for elt in value: if elt not in new_list: new_list.append(elt) value = new_list if not obj.is_tpl(): while '+' in value: value.remove('+') setattr(obj, prop, value) # If we only got some '+' values, we must still loop # for an end value without it if not still_loop: # And set my own value in the end if need if obj.has_plus(prop): # value = list(getattr(obj, prop, [])) value = list(value) value.extend(obj.get_plus_and_delete(prop)) # Template should keep their '+' if obj.is_tpl() and value[0] != '+': value.insert(0, '+') # Clean the returned value if isinstance(value, list): # Get unique ordered list new_list = [] for elt in value: if elt not in new_list: new_list.append(elt) value = new_list if not obj.is_tpl(): while '+' in value: value.remove('+') setattr(obj, prop, value) return value # Maybe templates only give us + values, so we didn't quit, but we already got a # self.prop value after all template_with_only_plus = hasattr(obj, prop) # I do not have endingprop, my templates too... Maybe a plus? # warning: if all my templates gave me '+' values, do not forgot to # add the already set self.prop value if obj.has_plus(prop): if template_with_only_plus: value = list(getattr(obj, prop)) value.extend(obj.get_plus_and_delete(prop)) else: value = obj.get_plus_and_delete(prop) # Template should keep their '+' chain # We must say it's a '+' value, so our son will know that it must continue looping if obj.is_tpl() and value != [] and value[0] != '+': value.insert(0, '+') # Clean the returned value if isinstance(value, list): # Get unique ordered list new_list = [] for elt in value: if elt not in new_list: new_list.append(elt) value = new_list if not obj.is_tpl(): while '+' in value: value.remove('+') setattr(obj, prop, value) return value # Ok so in the end, we give the value we got if we have one, or None # Not even a plus... so None :) return getattr(obj, prop, None)
[ "def", "get_property_by_inheritance", "(", "self", ",", "obj", ",", "prop", ")", ":", "# pylint: disable=too-many-branches, too-many-nested-blocks", "if", "prop", "==", "'register'", ":", "# We do not inherit the register property", "return", "None", "# If I have the property, I take mine but I check if I must add a plus property", "if", "hasattr", "(", "obj", ",", "prop", ")", ":", "value", "=", "getattr", "(", "obj", ",", "prop", ")", "# Manage the additive inheritance for the property,", "# if property is in plus, add or replace it", "# Template should keep the '+' at the beginning of the chain", "if", "obj", ".", "has_plus", "(", "prop", ")", ":", "if", "not", "isinstance", "(", "value", ",", "list", ")", ":", "value", "=", "[", "value", "]", "value", ".", "insert", "(", "0", ",", "obj", ".", "get_plus_and_delete", "(", "prop", ")", ")", "value", "=", "list", "(", "set", "(", "value", ")", ")", "if", "obj", ".", "is_tpl", "(", ")", ":", "value", ".", "insert", "(", "0", ",", "'+'", ")", "# Clean the returned value", "if", "isinstance", "(", "value", ",", "list", ")", ":", "# Get unique ordered list", "new_list", "=", "[", "]", "for", "elt", "in", "value", ":", "if", "elt", "not", "in", "new_list", ":", "new_list", ".", "append", "(", "elt", ")", "value", "=", "new_list", "if", "not", "obj", ".", "is_tpl", "(", ")", ":", "while", "'+'", "in", "value", ":", "value", ".", "remove", "(", "'+'", ")", "return", "value", "# Ok, I do not have prop, Maybe my templates do?", "# Same story for plus", "# We reverse list, so that when looking for properties by inheritance,", "# the least defined template wins (if property is set).", "for", "t_id", "in", "obj", ".", "templates", ":", "template", "=", "self", ".", "templates", "[", "t_id", "]", "value", "=", "self", ".", "get_property_by_inheritance", "(", "template", ",", "prop", ")", "if", "value", "is", "None", "or", "(", "isinstance", "(", "value", ",", "list", ")", "and", "not", "value", ")", ":", "continue", "# If our template give us a '+' value, we continue the loop", "still_loop", "=", "False", "if", "isinstance", "(", "value", ",", "list", ")", "and", "value", "[", "0", "]", "==", "'+'", ":", "# Templates should keep their + inherited from their parents", "if", "not", "obj", ".", "is_tpl", "(", ")", ":", "value", "=", "list", "(", "value", ")", "value", "=", "value", "[", "1", ":", "]", "still_loop", "=", "True", "# Maybe in the previous loop, we set a value, use it too", "if", "hasattr", "(", "obj", ",", "prop", ")", ":", "# If the current value is a string, it will simplify the problem", "if", "isinstance", "(", "value", ",", "(", "list", ",", "string_types", ")", ")", "and", "value", "and", "value", "[", "0", "]", "==", "'+'", ":", "# In this case we can remove the + from our current", "# tpl because our value will be final", "new_val", "=", "list", "(", "getattr", "(", "obj", ",", "prop", ")", ")", "new_val", ".", "extend", "(", "value", "[", "1", ":", "]", ")", "value", "=", "new_val", "else", ":", "# If not, we should keep the + sign of need", "new_val", "=", "list", "(", "getattr", "(", "obj", ",", "prop", ")", ")", "new_val", ".", "extend", "(", "value", ")", "value", "=", "new_val", "# Ok, we can set it and uniquify a list if needed", "if", "isinstance", "(", "value", ",", "list", ")", ":", "# Get unique ordered list", "new_list", "=", "[", "]", "for", "elt", "in", "value", ":", "if", "elt", "not", "in", "new_list", ":", "new_list", ".", "append", "(", "elt", ")", "value", "=", "new_list", "if", "not", "obj", ".", "is_tpl", "(", ")", ":", "while", "'+'", "in", "value", ":", "value", ".", "remove", "(", "'+'", ")", "setattr", "(", "obj", ",", "prop", ",", "value", ")", "# If we only got some '+' values, we must still loop", "# for an end value without it", "if", "not", "still_loop", ":", "# And set my own value in the end if need", "if", "obj", ".", "has_plus", "(", "prop", ")", ":", "# value = list(getattr(obj, prop, []))", "value", "=", "list", "(", "value", ")", "value", ".", "extend", "(", "obj", ".", "get_plus_and_delete", "(", "prop", ")", ")", "# Template should keep their '+'", "if", "obj", ".", "is_tpl", "(", ")", "and", "value", "[", "0", "]", "!=", "'+'", ":", "value", ".", "insert", "(", "0", ",", "'+'", ")", "# Clean the returned value", "if", "isinstance", "(", "value", ",", "list", ")", ":", "# Get unique ordered list", "new_list", "=", "[", "]", "for", "elt", "in", "value", ":", "if", "elt", "not", "in", "new_list", ":", "new_list", ".", "append", "(", "elt", ")", "value", "=", "new_list", "if", "not", "obj", ".", "is_tpl", "(", ")", ":", "while", "'+'", "in", "value", ":", "value", ".", "remove", "(", "'+'", ")", "setattr", "(", "obj", ",", "prop", ",", "value", ")", "return", "value", "# Maybe templates only give us + values, so we didn't quit, but we already got a", "# self.prop value after all", "template_with_only_plus", "=", "hasattr", "(", "obj", ",", "prop", ")", "# I do not have endingprop, my templates too... Maybe a plus?", "# warning: if all my templates gave me '+' values, do not forgot to", "# add the already set self.prop value", "if", "obj", ".", "has_plus", "(", "prop", ")", ":", "if", "template_with_only_plus", ":", "value", "=", "list", "(", "getattr", "(", "obj", ",", "prop", ")", ")", "value", ".", "extend", "(", "obj", ".", "get_plus_and_delete", "(", "prop", ")", ")", "else", ":", "value", "=", "obj", ".", "get_plus_and_delete", "(", "prop", ")", "# Template should keep their '+' chain", "# We must say it's a '+' value, so our son will know that it must continue looping", "if", "obj", ".", "is_tpl", "(", ")", "and", "value", "!=", "[", "]", "and", "value", "[", "0", "]", "!=", "'+'", ":", "value", ".", "insert", "(", "0", ",", "'+'", ")", "# Clean the returned value", "if", "isinstance", "(", "value", ",", "list", ")", ":", "# Get unique ordered list", "new_list", "=", "[", "]", "for", "elt", "in", "value", ":", "if", "elt", "not", "in", "new_list", ":", "new_list", ".", "append", "(", "elt", ")", "value", "=", "new_list", "if", "not", "obj", ".", "is_tpl", "(", ")", ":", "while", "'+'", "in", "value", ":", "value", ".", "remove", "(", "'+'", ")", "setattr", "(", "obj", ",", "prop", ",", "value", ")", "return", "value", "# Ok so in the end, we give the value we got if we have one, or None", "# Not even a plus... so None :)", "return", "getattr", "(", "obj", ",", "prop", ",", "None", ")" ]
40.713376
15.375796
def dmrs_tikz_dependency(xs, **kwargs): """ Return a LaTeX document with each Xmrs in *xs* rendered as DMRSs. DMRSs use the `tikz-dependency` package for visualization. """ def link_label(link): return '{}/{}'.format(link.rargname or '', link.post) def label_edge(link): if link.post == H_POST and link.rargname == RSTR_ROLE: return 'rstr' elif link.post == EQ_POST: return 'eq' else: return 'arg' if isinstance(xs, Xmrs): xs = [xs] lines = """\\documentclass{standalone} \\usepackage{tikz-dependency} \\usepackage{relsize} %%% %%% style for dmrs graph %%% \\depstyle{dmrs}{edge unit distance=1.5ex, label style={above, scale=.9, opacity=0, text opacity=1}, baseline={([yshift=-0.7\\baselineskip]current bounding box.north)}} %%% set text opacity=0 to hide text, opacity = 0 to hide box \\depstyle{root}{edge unit distance=3ex, label style={opacity=1}} \\depstyle{arg}{edge above} \\depstyle{rstr}{edge below, dotted, label style={text opacity=1}} \\depstyle{eq}{edge below, label style={text opacity=1}} \\depstyle{icons}{edge below, dashed} \\providecommand{\\named}{} \\renewcommand{\\named}{named} %%% styles for predicates and roles (from mrs.sty) \\providecommand{\\spred}{} \\renewcommand{\\spred}[1]{\\mbox{\\textsf{#1}}} \\providecommand{\\srl}{} \\renewcommand{\\srl}[1]{\\mbox{\\textsf{\\smaller #1}}} %%% \\begin{document}""".split("\n") for ix, x in enumerate(xs): lines.append("%%%\n%%% {}\n%%%".format(ix+1)) lines.append("\\begin{dependency}[dmrs]") ns = nodes(x) ### predicates lines.append(" \\begin{deptext}[column sep=10pt]") for i, n in enumerate(ns): sep = "\\&" if (i < len(ns) - 1) else "\\\\" pred = _latex_escape(n.pred.short_form()) pred = "\\named{}" if pred == 'named' else pred if n.carg is not None: print(n.carg.strip('"')) pred += "\\smaller ({})".format(n.carg.strip('"')) lines.append(" \\spred{{{}}} {} % node {}".format( pred, sep, i+1)) lines.append(" \\end{deptext}") nodeidx = {n.nodeid: i+1 for i, n in enumerate(ns)} ### links for link in links(x): if link.start == 0: lines.append( ' \\deproot[root]{{{}}}{{{}}}'.format( nodeidx[link.end], '\\srl{TOP}' # _latex_escape('/' + link.post) ) ) else: lines.append(' \\depedge[{}]{{{}}}{{{}}}{{\\srl{{{}}}}}'.format( label_edge(link), nodeidx[link.start], nodeidx[link.end], _latex_escape(link_label(link)) )) ### placeholder for icons lines.append('% \\depedge[icons]{f}{t}{FOCUS}') lines.append('\\end{dependency}\n') lines.append('\\end{document}') return '\n'.join(lines)
[ "def", "dmrs_tikz_dependency", "(", "xs", ",", "*", "*", "kwargs", ")", ":", "def", "link_label", "(", "link", ")", ":", "return", "'{}/{}'", ".", "format", "(", "link", ".", "rargname", "or", "''", ",", "link", ".", "post", ")", "def", "label_edge", "(", "link", ")", ":", "if", "link", ".", "post", "==", "H_POST", "and", "link", ".", "rargname", "==", "RSTR_ROLE", ":", "return", "'rstr'", "elif", "link", ".", "post", "==", "EQ_POST", ":", "return", "'eq'", "else", ":", "return", "'arg'", "if", "isinstance", "(", "xs", ",", "Xmrs", ")", ":", "xs", "=", "[", "xs", "]", "lines", "=", "\"\"\"\\\\documentclass{standalone}\n\n\\\\usepackage{tikz-dependency}\n\\\\usepackage{relsize}\n\n%%%\n%%% style for dmrs graph\n%%%\n\\\\depstyle{dmrs}{edge unit distance=1.5ex, \n label style={above, scale=.9, opacity=0, text opacity=1},\n baseline={([yshift=-0.7\\\\baselineskip]current bounding box.north)}}\n%%% set text opacity=0 to hide text, opacity = 0 to hide box\n\\\\depstyle{root}{edge unit distance=3ex, label style={opacity=1}}\n\\\\depstyle{arg}{edge above}\n\\\\depstyle{rstr}{edge below, dotted, label style={text opacity=1}}\n\\\\depstyle{eq}{edge below, label style={text opacity=1}}\n\\\\depstyle{icons}{edge below, dashed}\n\\\\providecommand{\\\\named}{} \n\\\\renewcommand{\\\\named}{named}\n\n%%% styles for predicates and roles (from mrs.sty)\n\\\\providecommand{\\\\spred}{} \n\\\\renewcommand{\\\\spred}[1]{\\\\mbox{\\\\textsf{#1}}}\n\\\\providecommand{\\\\srl}{} \n\\\\renewcommand{\\\\srl}[1]{\\\\mbox{\\\\textsf{\\\\smaller #1}}}\n%%%\n\n\\\\begin{document}\"\"\"", ".", "split", "(", "\"\\n\"", ")", "for", "ix", ",", "x", "in", "enumerate", "(", "xs", ")", ":", "lines", ".", "append", "(", "\"%%%\\n%%% {}\\n%%%\"", ".", "format", "(", "ix", "+", "1", ")", ")", "lines", ".", "append", "(", "\"\\\\begin{dependency}[dmrs]\"", ")", "ns", "=", "nodes", "(", "x", ")", "### predicates", "lines", ".", "append", "(", "\" \\\\begin{deptext}[column sep=10pt]\"", ")", "for", "i", ",", "n", "in", "enumerate", "(", "ns", ")", ":", "sep", "=", "\"\\\\&\"", "if", "(", "i", "<", "len", "(", "ns", ")", "-", "1", ")", "else", "\"\\\\\\\\\"", "pred", "=", "_latex_escape", "(", "n", ".", "pred", ".", "short_form", "(", ")", ")", "pred", "=", "\"\\\\named{}\"", "if", "pred", "==", "'named'", "else", "pred", "if", "n", ".", "carg", "is", "not", "None", ":", "print", "(", "n", ".", "carg", ".", "strip", "(", "'\"'", ")", ")", "pred", "+=", "\"\\\\smaller ({})\"", ".", "format", "(", "n", ".", "carg", ".", "strip", "(", "'\"'", ")", ")", "lines", ".", "append", "(", "\" \\\\spred{{{}}} {} % node {}\"", ".", "format", "(", "pred", ",", "sep", ",", "i", "+", "1", ")", ")", "lines", ".", "append", "(", "\" \\\\end{deptext}\"", ")", "nodeidx", "=", "{", "n", ".", "nodeid", ":", "i", "+", "1", "for", "i", ",", "n", "in", "enumerate", "(", "ns", ")", "}", "### links", "for", "link", "in", "links", "(", "x", ")", ":", "if", "link", ".", "start", "==", "0", ":", "lines", ".", "append", "(", "' \\\\deproot[root]{{{}}}{{{}}}'", ".", "format", "(", "nodeidx", "[", "link", ".", "end", "]", ",", "'\\\\srl{TOP}'", "# _latex_escape('/' + link.post)", ")", ")", "else", ":", "lines", ".", "append", "(", "' \\\\depedge[{}]{{{}}}{{{}}}{{\\\\srl{{{}}}}}'", ".", "format", "(", "label_edge", "(", "link", ")", ",", "nodeidx", "[", "link", ".", "start", "]", ",", "nodeidx", "[", "link", ".", "end", "]", ",", "_latex_escape", "(", "link_label", "(", "link", ")", ")", ")", ")", "### placeholder for icons", "lines", ".", "append", "(", "'% \\\\depedge[icons]{f}{t}{FOCUS}'", ")", "lines", ".", "append", "(", "'\\\\end{dependency}\\n'", ")", "lines", ".", "append", "(", "'\\\\end{document}'", ")", "return", "'\\n'", ".", "join", "(", "lines", ")" ]
34.517241
17.413793
def default_route_options(): """ Default callback for OPTIONS request :rtype: Response """ response_obj = OrderedDict() response_obj["status"] = True response_obj["data"] = "Ok" return Response(response_obj, content_type="application/json", charset="utf-8")
[ "def", "default_route_options", "(", ")", ":", "response_obj", "=", "OrderedDict", "(", ")", "response_obj", "[", "\"status\"", "]", "=", "True", "response_obj", "[", "\"data\"", "]", "=", "\"Ok\"", "return", "Response", "(", "response_obj", ",", "content_type", "=", "\"application/json\"", ",", "charset", "=", "\"utf-8\"", ")" ]
28.454545
15.545455
def xor_(*validation_func # type: ValidationFuncs ): # type: (...) -> Callable """ A 'xor' validation function: returns `True` if exactly one of the provided validators returns `True`. All exceptions will be silently caught. In case of failure, a global `XorTooManySuccess` or `AllValidatorsFailed` will be raised, together with details about the various validation results. :param validation_func: the base validation function or list of base validation functions to use. A callable, a tuple(callable, help_msg_str), a tuple(callable, failure_type), or a list of several such elements. Nested lists are supported and indicate an implicit `and_` (such as the main list). Tuples indicate an implicit `_failure_raiser`. [mini_lambda](https://smarie.github.io/python-mini-lambda/) expressions can be used instead of callables, they will be transformed to functions automatically. :return: """ validation_func = _process_validation_function_s(list(validation_func), auto_and_wrapper=False) if len(validation_func) == 1: return validation_func[0] # simplification for single validation function case else: def xor_v_(x): ok_validators = [] for val_func in validation_func: # noinspection PyBroadException try: res = val_func(x) if result_is_success(res): ok_validators.append(val_func) except Exception: pass # return if were happy or not if len(ok_validators) == 1: # one unique validation function happy: success return True elif len(ok_validators) > 1: # several validation_func happy : fail raise XorTooManySuccess(validation_func, x) else: # no validation function happy, fail raise AllValidatorsFailed(validation_func, x) xor_v_.__name__ = 'xor({})'.format(get_callable_names(validation_func)) return xor_v_
[ "def", "xor_", "(", "*", "validation_func", "# type: ValidationFuncs", ")", ":", "# type: (...) -> Callable", "validation_func", "=", "_process_validation_function_s", "(", "list", "(", "validation_func", ")", ",", "auto_and_wrapper", "=", "False", ")", "if", "len", "(", "validation_func", ")", "==", "1", ":", "return", "validation_func", "[", "0", "]", "# simplification for single validation function case", "else", ":", "def", "xor_v_", "(", "x", ")", ":", "ok_validators", "=", "[", "]", "for", "val_func", "in", "validation_func", ":", "# noinspection PyBroadException", "try", ":", "res", "=", "val_func", "(", "x", ")", "if", "result_is_success", "(", "res", ")", ":", "ok_validators", ".", "append", "(", "val_func", ")", "except", "Exception", ":", "pass", "# return if were happy or not", "if", "len", "(", "ok_validators", ")", "==", "1", ":", "# one unique validation function happy: success", "return", "True", "elif", "len", "(", "ok_validators", ")", ">", "1", ":", "# several validation_func happy : fail", "raise", "XorTooManySuccess", "(", "validation_func", ",", "x", ")", "else", ":", "# no validation function happy, fail", "raise", "AllValidatorsFailed", "(", "validation_func", ",", "x", ")", "xor_v_", ".", "__name__", "=", "'xor({})'", ".", "format", "(", "get_callable_names", "(", "validation_func", ")", ")", "return", "xor_v_" ]
44.085106
27.446809
def DeleteInstance(self, InstanceName, **extra): # pylint: disable=invalid-name """ Delete an instance. This method performs the DeleteInstance operation (see :term:`DSP0200`). See :ref:`WBEM operations` for a list of all methods performing such operations. If the operation succeeds, this method returns. Otherwise, this method raises an exception. Parameters: InstanceName (:class:`~pywbem.CIMInstanceName`): The instance path of the instance to be deleted. If this object does not specify a namespace, the default namespace of the connection is used. Its `host` attribute will be ignored. **extra : Additional keyword arguments are passed as additional operation parameters to the WBEM server. Note that :term:`DSP0200` does not define any additional parameters for this operation. Raises: Exceptions described in :class:`~pywbem.WBEMConnection`. """ exc = None method_name = 'DeleteInstance' if self._operation_recorders: self.operation_recorder_reset() self.operation_recorder_stage_pywbem_args( method=method_name, InstanceName=InstanceName, **extra) try: stats = self.statistics.start_timer(method_name) namespace = self._iparam_namespace_from_objectname( InstanceName, 'InstanceName') instancename = self._iparam_instancename(InstanceName) self._imethodcall( method_name, namespace, InstanceName=instancename, has_return_value=False, **extra) return except (CIMXMLParseError, XMLParseError) as exce: exce.request_data = self.last_raw_request exce.response_data = self.last_raw_reply exc = exce raise except Exception as exce: exc = exce raise finally: self._last_operation_time = stats.stop_timer( self.last_request_len, self.last_reply_len, self.last_server_response_time, exc) if self._operation_recorders: self.operation_recorder_stage_result(None, exc)
[ "def", "DeleteInstance", "(", "self", ",", "InstanceName", ",", "*", "*", "extra", ")", ":", "# pylint: disable=invalid-name", "exc", "=", "None", "method_name", "=", "'DeleteInstance'", "if", "self", ".", "_operation_recorders", ":", "self", ".", "operation_recorder_reset", "(", ")", "self", ".", "operation_recorder_stage_pywbem_args", "(", "method", "=", "method_name", ",", "InstanceName", "=", "InstanceName", ",", "*", "*", "extra", ")", "try", ":", "stats", "=", "self", ".", "statistics", ".", "start_timer", "(", "method_name", ")", "namespace", "=", "self", ".", "_iparam_namespace_from_objectname", "(", "InstanceName", ",", "'InstanceName'", ")", "instancename", "=", "self", ".", "_iparam_instancename", "(", "InstanceName", ")", "self", ".", "_imethodcall", "(", "method_name", ",", "namespace", ",", "InstanceName", "=", "instancename", ",", "has_return_value", "=", "False", ",", "*", "*", "extra", ")", "return", "except", "(", "CIMXMLParseError", ",", "XMLParseError", ")", "as", "exce", ":", "exce", ".", "request_data", "=", "self", ".", "last_raw_request", "exce", ".", "response_data", "=", "self", ".", "last_raw_reply", "exc", "=", "exce", "raise", "except", "Exception", "as", "exce", ":", "exc", "=", "exce", "raise", "finally", ":", "self", ".", "_last_operation_time", "=", "stats", ".", "stop_timer", "(", "self", ".", "last_request_len", ",", "self", ".", "last_reply_len", ",", "self", ".", "last_server_response_time", ",", "exc", ")", "if", "self", ".", "_operation_recorders", ":", "self", ".", "operation_recorder_stage_result", "(", "None", ",", "exc", ")" ]
33.6
19.485714
def register_vm(datacenter, name, vmx_path, resourcepool_object, host_object=None): ''' Registers a virtual machine to the inventory with the given vmx file, on success it returns the vim.VirtualMachine managed object reference datacenter Datacenter object of the virtual machine, vim.Datacenter object name Name of the virtual machine vmx_path: Full path to the vmx file, datastore name should be included resourcepool Placement resource pool of the virtual machine, vim.ResourcePool object host Placement host of the virtual machine, vim.HostSystem object ''' try: if host_object: task = datacenter.vmFolder.RegisterVM_Task(path=vmx_path, name=name, asTemplate=False, host=host_object, pool=resourcepool_object) else: task = datacenter.vmFolder.RegisterVM_Task(path=vmx_path, name=name, asTemplate=False, pool=resourcepool_object) except vim.fault.NoPermission as exc: log.exception(exc) raise salt.exceptions.VMwareApiError( 'Not enough permissions. Required privilege: ' '{}'.format(exc.privilegeId)) except vim.fault.VimFault as exc: log.exception(exc) raise salt.exceptions.VMwareApiError(exc.msg) except vmodl.RuntimeFault as exc: log.exception(exc) raise salt.exceptions.VMwareRuntimeError(exc.msg) try: vm_ref = wait_for_task(task, name, 'RegisterVM Task') except salt.exceptions.VMwareFileNotFoundError as exc: raise salt.exceptions.VMwareVmRegisterError( 'An error occurred during registration operation, the ' 'configuration file was not found: {0}'.format(exc)) return vm_ref
[ "def", "register_vm", "(", "datacenter", ",", "name", ",", "vmx_path", ",", "resourcepool_object", ",", "host_object", "=", "None", ")", ":", "try", ":", "if", "host_object", ":", "task", "=", "datacenter", ".", "vmFolder", ".", "RegisterVM_Task", "(", "path", "=", "vmx_path", ",", "name", "=", "name", ",", "asTemplate", "=", "False", ",", "host", "=", "host_object", ",", "pool", "=", "resourcepool_object", ")", "else", ":", "task", "=", "datacenter", ".", "vmFolder", ".", "RegisterVM_Task", "(", "path", "=", "vmx_path", ",", "name", "=", "name", ",", "asTemplate", "=", "False", ",", "pool", "=", "resourcepool_object", ")", "except", "vim", ".", "fault", ".", "NoPermission", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "'Not enough permissions. Required privilege: '", "'{}'", ".", "format", "(", "exc", ".", "privilegeId", ")", ")", "except", "vim", ".", "fault", ".", "VimFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "exc", ".", "msg", ")", "except", "vmodl", ".", "RuntimeFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareRuntimeError", "(", "exc", ".", "msg", ")", "try", ":", "vm_ref", "=", "wait_for_task", "(", "task", ",", "name", ",", "'RegisterVM Task'", ")", "except", "salt", ".", "exceptions", ".", "VMwareFileNotFoundError", "as", "exc", ":", "raise", "salt", ".", "exceptions", ".", "VMwareVmRegisterError", "(", "'An error occurred during registration operation, the '", "'configuration file was not found: {0}'", ".", "format", "(", "exc", ")", ")", "return", "vm_ref" ]
41.1875
25.8125
def get_field_to_observations_map(generator, query_for_tag=''): """Return a field to `Observations` dict for the event generator. Args: generator: A generator over event protos. query_for_tag: A string that if specified, only create observations for events with this tag name. Returns: A dict mapping keys in `TRACKED_FIELDS` to an `Observation` list. """ def increment(stat, event, tag=''): assert stat in TRACKED_FIELDS field_to_obs[stat].append(Observation(step=event.step, wall_time=event.wall_time, tag=tag)._asdict()) field_to_obs = dict([(t, []) for t in TRACKED_FIELDS]) for event in generator: ## Process the event if event.HasField('graph_def') and (not query_for_tag): increment('graph', event) if event.HasField('session_log') and (not query_for_tag): status = event.session_log.status if status == event_pb2.SessionLog.START: increment('sessionlog:start', event) elif status == event_pb2.SessionLog.STOP: increment('sessionlog:stop', event) elif status == event_pb2.SessionLog.CHECKPOINT: increment('sessionlog:checkpoint', event) elif event.HasField('summary'): for value in event.summary.value: if query_for_tag and value.tag != query_for_tag: continue for proto_name, display_name in SUMMARY_TYPE_TO_FIELD.items(): if value.HasField(proto_name): increment(display_name, event, value.tag) return field_to_obs
[ "def", "get_field_to_observations_map", "(", "generator", ",", "query_for_tag", "=", "''", ")", ":", "def", "increment", "(", "stat", ",", "event", ",", "tag", "=", "''", ")", ":", "assert", "stat", "in", "TRACKED_FIELDS", "field_to_obs", "[", "stat", "]", ".", "append", "(", "Observation", "(", "step", "=", "event", ".", "step", ",", "wall_time", "=", "event", ".", "wall_time", ",", "tag", "=", "tag", ")", ".", "_asdict", "(", ")", ")", "field_to_obs", "=", "dict", "(", "[", "(", "t", ",", "[", "]", ")", "for", "t", "in", "TRACKED_FIELDS", "]", ")", "for", "event", "in", "generator", ":", "## Process the event", "if", "event", ".", "HasField", "(", "'graph_def'", ")", "and", "(", "not", "query_for_tag", ")", ":", "increment", "(", "'graph'", ",", "event", ")", "if", "event", ".", "HasField", "(", "'session_log'", ")", "and", "(", "not", "query_for_tag", ")", ":", "status", "=", "event", ".", "session_log", ".", "status", "if", "status", "==", "event_pb2", ".", "SessionLog", ".", "START", ":", "increment", "(", "'sessionlog:start'", ",", "event", ")", "elif", "status", "==", "event_pb2", ".", "SessionLog", ".", "STOP", ":", "increment", "(", "'sessionlog:stop'", ",", "event", ")", "elif", "status", "==", "event_pb2", ".", "SessionLog", ".", "CHECKPOINT", ":", "increment", "(", "'sessionlog:checkpoint'", ",", "event", ")", "elif", "event", ".", "HasField", "(", "'summary'", ")", ":", "for", "value", "in", "event", ".", "summary", ".", "value", ":", "if", "query_for_tag", "and", "value", ".", "tag", "!=", "query_for_tag", ":", "continue", "for", "proto_name", ",", "display_name", "in", "SUMMARY_TYPE_TO_FIELD", ".", "items", "(", ")", ":", "if", "value", ".", "HasField", "(", "proto_name", ")", ":", "increment", "(", "display_name", ",", "event", ",", "value", ".", "tag", ")", "return", "field_to_obs" ]
37.536585
17.682927
def ParseOptions(cls, options, config_object, category=None, names=None): """Parses and validates arguments using the appropriate helpers. Args: options (argparse.Namespace): parser options. config_object (object): object to be configured by an argument helper. category (Optional[str]): category of helpers to apply to the group, such as storage, output, where None will apply the arguments to all helpers. The category can be used to add arguments to a specific group of registered helpers. names (Optional[list[str]]): names of argument helpers to apply, where None will apply the arguments to all helpers. """ for helper_name, helper_class in cls._helper_classes.items(): if ((category and helper_class.CATEGORY != category) or (names and helper_name not in names)): continue try: helper_class.ParseOptions(options, config_object) except errors.BadConfigObject: pass
[ "def", "ParseOptions", "(", "cls", ",", "options", ",", "config_object", ",", "category", "=", "None", ",", "names", "=", "None", ")", ":", "for", "helper_name", ",", "helper_class", "in", "cls", ".", "_helper_classes", ".", "items", "(", ")", ":", "if", "(", "(", "category", "and", "helper_class", ".", "CATEGORY", "!=", "category", ")", "or", "(", "names", "and", "helper_name", "not", "in", "names", ")", ")", ":", "continue", "try", ":", "helper_class", ".", "ParseOptions", "(", "options", ",", "config_object", ")", "except", "errors", ".", "BadConfigObject", ":", "pass" ]
44.681818
22.818182
def js2str(js, sort_keys=True, indent=4): """Encode js to nicely formatted human readable string. (utf-8 encoding) Usage:: >>> from weatherlab.lib.dataIO.js import js2str >>> s = js2str({"a": 1, "b": 2}) >>> print(s) { "a": 1, "b": 2 } **中文文档** 将可Json化的Python对象转化成格式化的字符串。 """ return json.dumps(js, sort_keys=sort_keys, indent=indent, separators=(",", ": "))
[ "def", "js2str", "(", "js", ",", "sort_keys", "=", "True", ",", "indent", "=", "4", ")", ":", "return", "json", ".", "dumps", "(", "js", ",", "sort_keys", "=", "sort_keys", ",", "indent", "=", "indent", ",", "separators", "=", "(", "\",\"", ",", "\": \"", ")", ")" ]
23.894737
20.684211
def read_stats(self): """ Read current ports statistics from chassis. :return: dictionary {port name {group name, {stat name: stat value}}} """ self.statistics = TgnObjectsDict() for port in self.session.ports.values(): self.statistics[port] = port.read_port_stats() return self.statistics
[ "def", "read_stats", "(", "self", ")", ":", "self", ".", "statistics", "=", "TgnObjectsDict", "(", ")", "for", "port", "in", "self", ".", "session", ".", "ports", ".", "values", "(", ")", ":", "self", ".", "statistics", "[", "port", "]", "=", "port", ".", "read_port_stats", "(", ")", "return", "self", ".", "statistics" ]
34.2
17.4
def probability_lt(self, x): """ Returns the probability of a random variable being less than the given value. """ if self.mean is None: return return normdist(x=x, mu=self.mean, sigma=self.standard_deviation)
[ "def", "probability_lt", "(", "self", ",", "x", ")", ":", "if", "self", ".", "mean", "is", "None", ":", "return", "return", "normdist", "(", "x", "=", "x", ",", "mu", "=", "self", ".", "mean", ",", "sigma", "=", "self", ".", "standard_deviation", ")" ]
32.75
16.25
def create_manifest_from_s3_files(self): """ To create a manifest db for the current :return: """ for k in self.s3.list_objects(Bucket=self.sitename)['Contents']: key = k["Key"] files = [] if key not in [self.manifest_file]: files.append(key) self._set_manifest_data(files)
[ "def", "create_manifest_from_s3_files", "(", "self", ")", ":", "for", "k", "in", "self", ".", "s3", ".", "list_objects", "(", "Bucket", "=", "self", ".", "sitename", ")", "[", "'Contents'", "]", ":", "key", "=", "k", "[", "\"Key\"", "]", "files", "=", "[", "]", "if", "key", "not", "in", "[", "self", ".", "manifest_file", "]", ":", "files", ".", "append", "(", "key", ")", "self", ".", "_set_manifest_data", "(", "files", ")" ]
33.363636
10.090909
def resolve_command(self, ctx, args): """ Overrides clicks ``resolve_command`` method and appends *Did you mean ...* suggestions to the raised exception message. """ original_cmd_name = click.utils.make_str(args[0]) try: return super(DYMMixin, self).resolve_command(ctx, args) except click.exceptions.UsageError as error: error_msg = str(error) matches = difflib.get_close_matches(original_cmd_name, self.list_commands(ctx), self.max_suggestions, self.cutoff) if matches: error_msg += '\n\nDid you mean one of these?\n %s' % '\n '.join(matches) # pylint: disable=line-too-long raise click.exceptions.UsageError(error_msg, error.ctx)
[ "def", "resolve_command", "(", "self", ",", "ctx", ",", "args", ")", ":", "original_cmd_name", "=", "click", ".", "utils", ".", "make_str", "(", "args", "[", "0", "]", ")", "try", ":", "return", "super", "(", "DYMMixin", ",", "self", ")", ".", "resolve_command", "(", "ctx", ",", "args", ")", "except", "click", ".", "exceptions", ".", "UsageError", "as", "error", ":", "error_msg", "=", "str", "(", "error", ")", "matches", "=", "difflib", ".", "get_close_matches", "(", "original_cmd_name", ",", "self", ".", "list_commands", "(", "ctx", ")", ",", "self", ".", "max_suggestions", ",", "self", ".", "cutoff", ")", "if", "matches", ":", "error_msg", "+=", "'\\n\\nDid you mean one of these?\\n %s'", "%", "'\\n '", ".", "join", "(", "matches", ")", "# pylint: disable=line-too-long", "raise", "click", ".", "exceptions", ".", "UsageError", "(", "error_msg", ",", "error", ".", "ctx", ")" ]
45.111111
23.222222
def set_common_datas(self, element, name, datas): """Populated common data for an element from dictionnary datas """ element.name = str(name) if "description" in datas: element.description = str(datas["description"]).strip() if isinstance(element, Sampleable) and element.sample is None and "sample" in datas: element.sample = str(datas["sample"]).strip() if isinstance(element, Displayable): if "display" in datas: element.display = to_boolean(datas["display"]) if "label" in datas: element.label = datas["label"] else: element.label = element.name
[ "def", "set_common_datas", "(", "self", ",", "element", ",", "name", ",", "datas", ")", ":", "element", ".", "name", "=", "str", "(", "name", ")", "if", "\"description\"", "in", "datas", ":", "element", ".", "description", "=", "str", "(", "datas", "[", "\"description\"", "]", ")", ".", "strip", "(", ")", "if", "isinstance", "(", "element", ",", "Sampleable", ")", "and", "element", ".", "sample", "is", "None", "and", "\"sample\"", "in", "datas", ":", "element", ".", "sample", "=", "str", "(", "datas", "[", "\"sample\"", "]", ")", ".", "strip", "(", ")", "if", "isinstance", "(", "element", ",", "Displayable", ")", ":", "if", "\"display\"", "in", "datas", ":", "element", ".", "display", "=", "to_boolean", "(", "datas", "[", "\"display\"", "]", ")", "if", "\"label\"", "in", "datas", ":", "element", ".", "label", "=", "datas", "[", "\"label\"", "]", "else", ":", "element", ".", "label", "=", "element", ".", "name" ]
38.388889
17.333333
def ls_dir(dirname): """Returns files and subdirectories within a given directory. Returns a pair of lists, containing the names of directories and files in ``dirname``. Raises ------ OSError : Accessing the given directory path failed Parameters ---------- dirname : str The path of the directory to be listed """ ls = os.listdir(dirname) files = [p for p in ls if os.path.isfile(os.path.join(dirname, p))] dirs = [p for p in ls if os.path.isdir(os.path.join(dirname, p))] return files, dirs
[ "def", "ls_dir", "(", "dirname", ")", ":", "ls", "=", "os", ".", "listdir", "(", "dirname", ")", "files", "=", "[", "p", "for", "p", "in", "ls", "if", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "dirname", ",", "p", ")", ")", "]", "dirs", "=", "[", "p", "for", "p", "in", "ls", "if", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "dirname", ",", "p", ")", ")", "]", "return", "files", ",", "dirs" ]
28.473684
23.210526
def bb(self,*args,**kwargs): """ NAME: bb PURPOSE: return Galactic latitude INPUT: t - (optional) time at which to get bb (can be Quantity) obs=[X,Y,Z] - (optional) position of observer (in kpc; entries can be Quantity) (default=[8.0,0.,0.]) OR Orbit object that corresponds to the orbit of the observer Y is ignored and always assumed to be zero ro= (Object-wide default) physical scale for distances to use to convert (can be Quantity) OUTPUT: b(t) in deg HISTORY: 2011-02-23 - Written - Bovy (NYU) """ out= self._orb.bb(*args,**kwargs) if len(out) == 1: return out[0] else: return out
[ "def", "bb", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "out", "=", "self", ".", "_orb", ".", "bb", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "len", "(", "out", ")", "==", "1", ":", "return", "out", "[", "0", "]", "else", ":", "return", "out" ]
23.375
28
def nextversion(current_version): """Returns incremented module version number. :param current_version: version string to increment :returns: Next version string (PEP 386 compatible) if possible. If impossible (since `current_version` is too far from PEP 386), `None` is returned. """ norm_ver = verlib.suggest_normalized_version(current_version) if norm_ver is None: return None norm_ver = verlib.NormalizedVersion(norm_ver) # increment last version figure parts = norm_ver.parts # see comments of `verlib.py` to get the idea of `parts` assert(len(parts) == 3) if len(parts[2]) > 1: # postdev if parts[2][-1] == 'f': # when `post` exists but `dev` doesn't parts = _mk_incremented_parts(parts, part_idx=2, in_part_idx=-2, incval=1) else: # when both `post` and `dev` exist parts = _mk_incremented_parts(parts, part_idx=2, in_part_idx=-1, incval=1) elif len(parts[1]) > 1: # prerel parts = _mk_incremented_parts(parts, part_idx=1, in_part_idx=-1, incval=1) else: # version & extraversion parts = _mk_incremented_parts(parts, part_idx=0, in_part_idx=-1, incval=1) norm_ver.parts = parts return str(norm_ver)
[ "def", "nextversion", "(", "current_version", ")", ":", "norm_ver", "=", "verlib", ".", "suggest_normalized_version", "(", "current_version", ")", "if", "norm_ver", "is", "None", ":", "return", "None", "norm_ver", "=", "verlib", ".", "NormalizedVersion", "(", "norm_ver", ")", "# increment last version figure", "parts", "=", "norm_ver", ".", "parts", "# see comments of `verlib.py` to get the idea of `parts`", "assert", "(", "len", "(", "parts", ")", "==", "3", ")", "if", "len", "(", "parts", "[", "2", "]", ")", ">", "1", ":", "# postdev", "if", "parts", "[", "2", "]", "[", "-", "1", "]", "==", "'f'", ":", "# when `post` exists but `dev` doesn't", "parts", "=", "_mk_incremented_parts", "(", "parts", ",", "part_idx", "=", "2", ",", "in_part_idx", "=", "-", "2", ",", "incval", "=", "1", ")", "else", ":", "# when both `post` and `dev` exist", "parts", "=", "_mk_incremented_parts", "(", "parts", ",", "part_idx", "=", "2", ",", "in_part_idx", "=", "-", "1", ",", "incval", "=", "1", ")", "elif", "len", "(", "parts", "[", "1", "]", ")", ">", "1", ":", "# prerel", "parts", "=", "_mk_incremented_parts", "(", "parts", ",", "part_idx", "=", "1", ",", "in_part_idx", "=", "-", "1", ",", "incval", "=", "1", ")", "else", ":", "# version & extraversion", "parts", "=", "_mk_incremented_parts", "(", "parts", ",", "part_idx", "=", "0", ",", "in_part_idx", "=", "-", "1", ",", "incval", "=", "1", ")", "norm_ver", ".", "parts", "=", "parts", "return", "str", "(", "norm_ver", ")" ]
47.142857
23.5
def markdown(text, mode='', context='', raw=False): """Render an arbitrary markdown document. :param str text: (required), the text of the document to render :param str mode: (optional), 'markdown' or 'gfm' :param str context: (optional), only important when using mode 'gfm', this is the repository to use as the context for the rendering :param bool raw: (optional), renders a document like a README.md, no gfm, no context :returns: str -- HTML formatted text """ return gh.markdown(text, mode, context, raw)
[ "def", "markdown", "(", "text", ",", "mode", "=", "''", ",", "context", "=", "''", ",", "raw", "=", "False", ")", ":", "return", "gh", ".", "markdown", "(", "text", ",", "mode", ",", "context", ",", "raw", ")" ]
42.153846
20
def set_language(self, language): """ Set the given language for all the text fragments. :param language: the language of the text fragments :type language: :class:`~aeneas.language.Language` """ self.log([u"Setting language: '%s'", language]) for fragment in self.fragments: fragment.language = language
[ "def", "set_language", "(", "self", ",", "language", ")", ":", "self", ".", "log", "(", "[", "u\"Setting language: '%s'\"", ",", "language", "]", ")", "for", "fragment", "in", "self", ".", "fragments", ":", "fragment", ".", "language", "=", "language" ]
36.5
11.9
def get_recent_repeated_responses(chatbot, conversation, sample=10, threshold=3, quantity=3): """ A filter that eliminates possibly repetitive responses to prevent a chat bot from repeating statements that it has recently said. """ from collections import Counter # Get the most recent statements from the conversation conversation_statements = list(chatbot.storage.filter( conversation=conversation, order_by=['id'] ))[sample * -1:] text_of_recent_responses = [ statement.text for statement in conversation_statements ] counter = Counter(text_of_recent_responses) # Find the n most common responses from the conversation most_common = counter.most_common(quantity) return [ counted[0] for counted in most_common if counted[1] >= threshold ]
[ "def", "get_recent_repeated_responses", "(", "chatbot", ",", "conversation", ",", "sample", "=", "10", ",", "threshold", "=", "3", ",", "quantity", "=", "3", ")", ":", "from", "collections", "import", "Counter", "# Get the most recent statements from the conversation", "conversation_statements", "=", "list", "(", "chatbot", ".", "storage", ".", "filter", "(", "conversation", "=", "conversation", ",", "order_by", "=", "[", "'id'", "]", ")", ")", "[", "sample", "*", "-", "1", ":", "]", "text_of_recent_responses", "=", "[", "statement", ".", "text", "for", "statement", "in", "conversation_statements", "]", "counter", "=", "Counter", "(", "text_of_recent_responses", ")", "# Find the n most common responses from the conversation", "most_common", "=", "counter", ".", "most_common", "(", "quantity", ")", "return", "[", "counted", "[", "0", "]", "for", "counted", "in", "most_common", "if", "counted", "[", "1", "]", ">=", "threshold", "]" ]
31.576923
21.807692
def refresh_win(self, resizing=False): """ set_encoding is False when resizing """ #self.init_window(set_encoding) self._win.bkgdset(' ', curses.color_pair(3)) self._win.erase() self._win.box() self._win.addstr(0, int((self.maxX - len(self._title)) / 2), self._title, curses.color_pair(4)) self.refresh_selection(resizing)
[ "def", "refresh_win", "(", "self", ",", "resizing", "=", "False", ")", ":", "#self.init_window(set_encoding)", "self", ".", "_win", ".", "bkgdset", "(", "' '", ",", "curses", ".", "color_pair", "(", "3", ")", ")", "self", ".", "_win", ".", "erase", "(", ")", "self", ".", "_win", ".", "box", "(", ")", "self", ".", "_win", ".", "addstr", "(", "0", ",", "int", "(", "(", "self", ".", "maxX", "-", "len", "(", "self", ".", "_title", ")", ")", "/", "2", ")", ",", "self", ".", "_title", ",", "curses", ".", "color_pair", "(", "4", ")", ")", "self", ".", "refresh_selection", "(", "resizing", ")" ]
36.727273
8.636364
def detach(self, overlay): """ Give each animation a unique, mutable layout so they can run independently. """ # See #868 for i, a in enumerate(self.animations): a.layout = a.layout.clone() if overlay and i: a.preclear = False
[ "def", "detach", "(", "self", ",", "overlay", ")", ":", "# See #868", "for", "i", ",", "a", "in", "enumerate", "(", "self", ".", "animations", ")", ":", "a", ".", "layout", "=", "a", ".", "layout", ".", "clone", "(", ")", "if", "overlay", "and", "i", ":", "a", ".", "preclear", "=", "False" ]
30.5
10.7
def gen_tmp_file(i): """ Input: { (suffix) - temp file suffix (prefix) - temp file prefix (remove_dir) - if 'yes', remove dir } Output: { return - return code = 0, if successful > 0, if error (error) - error text if return > 0 file_name - temp file name } """ xs=i.get('suffix','') xp=i.get('prefix','') s=i.get('string','') import tempfile fd, fn=tempfile.mkstemp(suffix=xs, prefix=xp) os.close(fd) os.remove(fn) if i.get('remove_dir','')=='yes': fn=os.path.basename(fn) return {'return':0, 'file_name':fn}
[ "def", "gen_tmp_file", "(", "i", ")", ":", "xs", "=", "i", ".", "get", "(", "'suffix'", ",", "''", ")", "xp", "=", "i", ".", "get", "(", "'prefix'", ",", "''", ")", "s", "=", "i", ".", "get", "(", "'string'", ",", "''", ")", "import", "tempfile", "fd", ",", "fn", "=", "tempfile", ".", "mkstemp", "(", "suffix", "=", "xs", ",", "prefix", "=", "xp", ")", "os", ".", "close", "(", "fd", ")", "os", ".", "remove", "(", "fn", ")", "if", "i", ".", "get", "(", "'remove_dir'", ",", "''", ")", "==", "'yes'", ":", "fn", "=", "os", ".", "path", ".", "basename", "(", "fn", ")", "return", "{", "'return'", ":", "0", ",", "'file_name'", ":", "fn", "}" ]
23.16129
19.870968
async def get_entity_by_id(self, get_entity_by_id_request): """Return one or more user entities. Searching by phone number only finds entities when their phone number is in your contacts (and not always even then), and can't be used to find Google Voice contacts. """ response = hangouts_pb2.GetEntityByIdResponse() await self._pb_request('contacts/getentitybyid', get_entity_by_id_request, response) return response
[ "async", "def", "get_entity_by_id", "(", "self", ",", "get_entity_by_id_request", ")", ":", "response", "=", "hangouts_pb2", ".", "GetEntityByIdResponse", "(", ")", "await", "self", ".", "_pb_request", "(", "'contacts/getentitybyid'", ",", "get_entity_by_id_request", ",", "response", ")", "return", "response" ]
45.636364
19.181818
def _retry(function): """ Internal mechanism to try to send data to multiple Solr Hosts if the query fails on the first one. """ def inner(self, **kwargs): last_exception = None #for host in self.router.get_hosts(**kwargs): for host in self.host: try: return function(self, host, **kwargs) except SolrError as e: self.logger.exception(e) raise except ConnectionError as e: self.logger.exception("Tried connecting to Solr, but couldn't because of the following exception.") if '401' in e.__str__(): raise last_exception = e # raise the last exception after contacting all hosts instead of returning None if last_exception is not None: raise last_exception return inner
[ "def", "_retry", "(", "function", ")", ":", "def", "inner", "(", "self", ",", "*", "*", "kwargs", ")", ":", "last_exception", "=", "None", "#for host in self.router.get_hosts(**kwargs):", "for", "host", "in", "self", ".", "host", ":", "try", ":", "return", "function", "(", "self", ",", "host", ",", "*", "*", "kwargs", ")", "except", "SolrError", "as", "e", ":", "self", ".", "logger", ".", "exception", "(", "e", ")", "raise", "except", "ConnectionError", "as", "e", ":", "self", ".", "logger", ".", "exception", "(", "\"Tried connecting to Solr, but couldn't because of the following exception.\"", ")", "if", "'401'", "in", "e", ".", "__str__", "(", ")", ":", "raise", "last_exception", "=", "e", "# raise the last exception after contacting all hosts instead of returning None", "if", "last_exception", "is", "not", "None", ":", "raise", "last_exception", "return", "inner" ]
40.041667
15.125
def efficiency_capacity_demand_difference(slots, events, X, **kwargs): """ A function that calculates the total difference between demand for an event and the slot capacity it is scheduled in. """ overflow = 0 for row, event in enumerate(events): for col, slot in enumerate(slots): overflow += (event.demand - slot.capacity) * X[row, col] return overflow
[ "def", "efficiency_capacity_demand_difference", "(", "slots", ",", "events", ",", "X", ",", "*", "*", "kwargs", ")", ":", "overflow", "=", "0", "for", "row", ",", "event", "in", "enumerate", "(", "events", ")", ":", "for", "col", ",", "slot", "in", "enumerate", "(", "slots", ")", ":", "overflow", "+=", "(", "event", ".", "demand", "-", "slot", ".", "capacity", ")", "*", "X", "[", "row", ",", "col", "]", "return", "overflow" ]
39.3
14.9
def splittable(src): """ :returns: True if the source is splittable, False otherwise """ return (src.__class__.__iter__ is not BaseSeismicSource.__iter__ and getattr(src, 'mutex_weight', 1) == 1 and src.splittable)
[ "def", "splittable", "(", "src", ")", ":", "return", "(", "src", ".", "__class__", ".", "__iter__", "is", "not", "BaseSeismicSource", ".", "__iter__", "and", "getattr", "(", "src", ",", "'mutex_weight'", ",", "1", ")", "==", "1", "and", "src", ".", "splittable", ")" ]
39.5
17.166667