text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def move(self, direction, n_windows): """ Move the cursor up or down by the given increment. Params: direction (int): `1` will move the cursor down one item and `-1` will move the cursor up one item. n_windows (int): The number of items that are currently being drawn on the screen. Returns: valid (bool): Indicates whether or not the attempted cursor move is allowed. E.g. When the cursor is on the last comment, attempting to scroll down any further would not be valid. redraw (bool): Indicates whether or not the screen needs to be redrawn. """ assert direction in (-1, 1) valid, redraw = True, False forward = ((direction * self.step) > 0) if forward: if self.page_index < 0: if self._is_valid(0): # Special case - advance the page index if less than zero self.page_index = 0 self.cursor_index = 0 redraw = True else: valid = False else: self.cursor_index += 1 if not self._is_valid(self.absolute_index): # Move would take us out of bounds self.cursor_index -= 1 valid = False elif self.cursor_index >= (n_windows - 1): # Flip the orientation and reset the cursor self.flip(self.cursor_index) self.cursor_index = 0 self.top_item_height = None redraw = True else: if self.cursor_index > 0: self.cursor_index -= 1 if self.top_item_height and self.cursor_index == 0: # Selecting the partially displayed item self.top_item_height = None redraw = True else: self.page_index -= self.step if self._is_valid(self.absolute_index): # We have reached the beginning of the page - move the # index self.top_item_height = None redraw = True else: self.page_index += self.step valid = False # Revert return valid, redraw
[ "def", "move", "(", "self", ",", "direction", ",", "n_windows", ")", ":", "assert", "direction", "in", "(", "-", "1", ",", "1", ")", "valid", ",", "redraw", "=", "True", ",", "False", "forward", "=", "(", "(", "direction", "*", "self", ".", "step", ")", ">", "0", ")", "if", "forward", ":", "if", "self", ".", "page_index", "<", "0", ":", "if", "self", ".", "_is_valid", "(", "0", ")", ":", "# Special case - advance the page index if less than zero", "self", ".", "page_index", "=", "0", "self", ".", "cursor_index", "=", "0", "redraw", "=", "True", "else", ":", "valid", "=", "False", "else", ":", "self", ".", "cursor_index", "+=", "1", "if", "not", "self", ".", "_is_valid", "(", "self", ".", "absolute_index", ")", ":", "# Move would take us out of bounds", "self", ".", "cursor_index", "-=", "1", "valid", "=", "False", "elif", "self", ".", "cursor_index", ">=", "(", "n_windows", "-", "1", ")", ":", "# Flip the orientation and reset the cursor", "self", ".", "flip", "(", "self", ".", "cursor_index", ")", "self", ".", "cursor_index", "=", "0", "self", ".", "top_item_height", "=", "None", "redraw", "=", "True", "else", ":", "if", "self", ".", "cursor_index", ">", "0", ":", "self", ".", "cursor_index", "-=", "1", "if", "self", ".", "top_item_height", "and", "self", ".", "cursor_index", "==", "0", ":", "# Selecting the partially displayed item", "self", ".", "top_item_height", "=", "None", "redraw", "=", "True", "else", ":", "self", ".", "page_index", "-=", "self", ".", "step", "if", "self", ".", "_is_valid", "(", "self", ".", "absolute_index", ")", ":", "# We have reached the beginning of the page - move the", "# index", "self", ".", "top_item_height", "=", "None", "redraw", "=", "True", "else", ":", "self", ".", "page_index", "+=", "self", ".", "step", "valid", "=", "False", "# Revert", "return", "valid", ",", "redraw" ]
38.380952
16.539683
def contribute_to_class(model_class, name='slots', descriptor=None): """ Function that adds a description to a model Class. :param model_class: The model class the descriptor is to be added to. :param name: The attribute name the descriptor will be assigned to. :param descriptor: The descriptor instance to be used. If none is specified it will default to ``icekit.plugins.descriptors.PlaceholderDescriptor``. :return: True """ rel_obj = descriptor or PlaceholderDescriptor() rel_obj.contribute_to_class(model_class, name) setattr(model_class, name, rel_obj) return True
[ "def", "contribute_to_class", "(", "model_class", ",", "name", "=", "'slots'", ",", "descriptor", "=", "None", ")", ":", "rel_obj", "=", "descriptor", "or", "PlaceholderDescriptor", "(", ")", "rel_obj", ".", "contribute_to_class", "(", "model_class", ",", "name", ")", "setattr", "(", "model_class", ",", "name", ",", "rel_obj", ")", "return", "True" ]
38.3125
18.6875
def autoUseMyMetrics(ttGlyph, glyphName, hmtx): """ Set the "USE_MY_METRICS" flag on the first component having the same advance width as the composite glyph, no transform and no horizontal shift (but allow it to shift vertically). This forces the composite glyph to use the possibly hinted horizontal metrics of the sub-glyph, instead of those from the "hmtx" table. """ width = hmtx[glyphName][0] for component in ttGlyph.components: try: baseName, transform = component.getComponentInfo() except AttributeError: # component uses '{first,second}Pt' instead of 'x' and 'y' continue try: baseMetrics = hmtx[baseName] except KeyError: continue # ignore missing components else: if (baseMetrics[0] == width and transform[:-1] == (1, 0, 0, 1, 0)): component.flags |= USE_MY_METRICS break
[ "def", "autoUseMyMetrics", "(", "ttGlyph", ",", "glyphName", ",", "hmtx", ")", ":", "width", "=", "hmtx", "[", "glyphName", "]", "[", "0", "]", "for", "component", "in", "ttGlyph", ".", "components", ":", "try", ":", "baseName", ",", "transform", "=", "component", ".", "getComponentInfo", "(", ")", "except", "AttributeError", ":", "# component uses '{first,second}Pt' instead of 'x' and 'y'", "continue", "try", ":", "baseMetrics", "=", "hmtx", "[", "baseName", "]", "except", "KeyError", ":", "continue", "# ignore missing components", "else", ":", "if", "(", "baseMetrics", "[", "0", "]", "==", "width", "and", "transform", "[", ":", "-", "1", "]", "==", "(", "1", ",", "0", ",", "0", ",", "1", ",", "0", ")", ")", ":", "component", ".", "flags", "|=", "USE_MY_METRICS", "break" ]
45.521739
16.217391
def encode_many(chord_labels, reduce_extended_chords=False): """Translate a set of chord labels to numerical representations for sane evaluation. Parameters ---------- chord_labels : list Set of chord labels to encode. reduce_extended_chords : bool Whether to map the upper voicings of extended chords (9's, 11's, 13's) to semitone extensions. (Default value = False) Returns ------- root_number : np.ndarray, dtype=int Absolute semitone of the chord's root. interval_bitmap : np.ndarray, dtype=int 12-dim vector of relative semitones in the given chord quality. bass_number : np.ndarray, dtype=int Relative semitones of the chord's bass notes. """ num_items = len(chord_labels) roots, basses = np.zeros([2, num_items], dtype=np.int) semitones = np.zeros([num_items, 12], dtype=np.int) local_cache = dict() for i, label in enumerate(chord_labels): result = local_cache.get(label, None) if result is None: result = encode(label, reduce_extended_chords) local_cache[label] = result roots[i], semitones[i], basses[i] = result return roots, semitones, basses
[ "def", "encode_many", "(", "chord_labels", ",", "reduce_extended_chords", "=", "False", ")", ":", "num_items", "=", "len", "(", "chord_labels", ")", "roots", ",", "basses", "=", "np", ".", "zeros", "(", "[", "2", ",", "num_items", "]", ",", "dtype", "=", "np", ".", "int", ")", "semitones", "=", "np", ".", "zeros", "(", "[", "num_items", ",", "12", "]", ",", "dtype", "=", "np", ".", "int", ")", "local_cache", "=", "dict", "(", ")", "for", "i", ",", "label", "in", "enumerate", "(", "chord_labels", ")", ":", "result", "=", "local_cache", ".", "get", "(", "label", ",", "None", ")", "if", "result", "is", "None", ":", "result", "=", "encode", "(", "label", ",", "reduce_extended_chords", ")", "local_cache", "[", "label", "]", "=", "result", "roots", "[", "i", "]", ",", "semitones", "[", "i", "]", ",", "basses", "[", "i", "]", "=", "result", "return", "roots", ",", "semitones", ",", "basses" ]
35.294118
15.441176
def serie(self, serie): """Make serie node""" return dict( plot=self.node( self.graph.nodes['plot'], class_='series serie-%d color-%d' % (serie.index, serie.index) ), overlay=self.node( self.graph.nodes['overlay'], class_='series serie-%d color-%d' % (serie.index, serie.index) ), text_overlay=self.node( self.graph.nodes['text_overlay'], class_='series serie-%d color-%d' % (serie.index, serie.index) ) )
[ "def", "serie", "(", "self", ",", "serie", ")", ":", "return", "dict", "(", "plot", "=", "self", ".", "node", "(", "self", ".", "graph", ".", "nodes", "[", "'plot'", "]", ",", "class_", "=", "'series serie-%d color-%d'", "%", "(", "serie", ".", "index", ",", "serie", ".", "index", ")", ")", ",", "overlay", "=", "self", ".", "node", "(", "self", ".", "graph", ".", "nodes", "[", "'overlay'", "]", ",", "class_", "=", "'series serie-%d color-%d'", "%", "(", "serie", ".", "index", ",", "serie", ".", "index", ")", ")", ",", "text_overlay", "=", "self", ".", "node", "(", "self", ".", "graph", ".", "nodes", "[", "'text_overlay'", "]", ",", "class_", "=", "'series serie-%d color-%d'", "%", "(", "serie", ".", "index", ",", "serie", ".", "index", ")", ")", ")" ]
36.375
18.9375
def untrace_class(cls): """ Untraces given class. :param cls: Class to untrace. :type cls: object :return: Definition success. :rtype: bool """ for name, method in inspect.getmembers(cls, inspect.ismethod): untrace_method(cls, method) for name, function in inspect.getmembers(cls, inspect.isfunction): untrace_method(cls, function) for name, accessor in inspect.getmembers(cls, lambda x: type(x) is property): untrace_property(cls, accessor) set_untraced(cls) return True
[ "def", "untrace_class", "(", "cls", ")", ":", "for", "name", ",", "method", "in", "inspect", ".", "getmembers", "(", "cls", ",", "inspect", ".", "ismethod", ")", ":", "untrace_method", "(", "cls", ",", "method", ")", "for", "name", ",", "function", "in", "inspect", ".", "getmembers", "(", "cls", ",", "inspect", ".", "isfunction", ")", ":", "untrace_method", "(", "cls", ",", "function", ")", "for", "name", ",", "accessor", "in", "inspect", ".", "getmembers", "(", "cls", ",", "lambda", "x", ":", "type", "(", "x", ")", "is", "property", ")", ":", "untrace_property", "(", "cls", ",", "accessor", ")", "set_untraced", "(", "cls", ")", "return", "True" ]
24
21.818182
def move_item_behind(self, item_id, assessment_part_id, reference_id): """Reorders items in an assessment part by moving the specified item behind of a reference item. arg: item_id (osid.id.Id): ``Id`` of the ``Item`` arg: assessment_part_id (osid.id.Id): ``Id of the AssessmentPartId`` arg: reference_id (osid.id.Id): ``Id`` of the reference ``Item`` raise: NotFound - ``item_id`` or ``reference_id`` ``not found in assessment_part_id`` raise: NullArgument - ``item_id, reference_id`` or ``assessment_part_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization fauilure *compliance: mandatory -- This method must be implemented.* """ if (not isinstance(assessment_part_id, ABCId) and assessment_part_id.get_identifier_namespace() != 'assessment_authoring.AssessmentPart'): raise errors.InvalidArgument('the argument is not a valid OSID Id') assessment_part_map, collection = self._get_assessment_part_collection(assessment_part_id) assessment_part_map['itemIds'] = move_id_behind(item_id, reference_id, assessment_part_map['itemIds']) collection.save(assessment_part_map)
[ "def", "move_item_behind", "(", "self", ",", "item_id", ",", "assessment_part_id", ",", "reference_id", ")", ":", "if", "(", "not", "isinstance", "(", "assessment_part_id", ",", "ABCId", ")", "and", "assessment_part_id", ".", "get_identifier_namespace", "(", ")", "!=", "'assessment_authoring.AssessmentPart'", ")", ":", "raise", "errors", ".", "InvalidArgument", "(", "'the argument is not a valid OSID Id'", ")", "assessment_part_map", ",", "collection", "=", "self", ".", "_get_assessment_part_collection", "(", "assessment_part_id", ")", "assessment_part_map", "[", "'itemIds'", "]", "=", "move_id_behind", "(", "item_id", ",", "reference_id", ",", "assessment_part_map", "[", "'itemIds'", "]", ")", "collection", ".", "save", "(", "assessment_part_map", ")" ]
57.565217
25
def color_table(color, N=1, sort=False, sort_values=False, inline=False, as_html=False): """ Generates a colour table Parameters: ----------- color : string | list | dict Color representation in rgba|rgb|hex If a list of colors is passed then these are displayed in a table N : int number of colours to generate When color is not a list then it generaes a range of N colors sort : bool if True then items are sorted sort_values : bool if True then items are sorted by color values. Only applies if color is a dictionary inline : bool if True it returns single line color blocks as_html : bool if True it returns the HTML code Example: color_table('#ff9933') color_table(cufflinks.cnames) color_table(['pink','salmon','yellow']) Note: This function only works in iPython Notebook """ if isinstance(color, list): c_ = '' rgb_tup = [normalize(c) for c in color] if sort: rgb_tup.sort() elif isinstance(color, dict): c_ = '' items = [(k, normalize(v), hex_to_hsv(normalize(v))) for k, v in list(color.items())] if sort_values: items = sorted(items, key=operator.itemgetter(2)) elif sort: items = sorted(items, key=operator.itemgetter(0)) rgb_tup = [(k, v) for k, v, _ in items] else: c_ = normalize(color) if N > 1: rgb_tup = np.array(color_range(c_, N))[::-1] else: rgb_tup = [c_] def _color(c): if hex_to_hsv(c)[2] < .5: color = "#ffffff" shadow = '0 1px 0 #000' else: color = "#000000" shadow = '0 1px 0 rgba(255,255,255,0.6)' if c == c_: border = " border: 1px solid #ffffff;" else: border = '' return color, shadow, border s = '<ul style="list-style-type: none;">' if not inline else '' for c in rgb_tup: if isinstance(c, tuple): k, c = c k += ' : ' else: k = '' if inline: s += '<div style="background-color:{0};height:20px;width:20px;display:inline-block;"></div>'.format( c) else: color, shadow, border = _color(c) s += """<li style="text-align:center;""" + border + """line-height:30px;background-color:""" + c + """;"> <span style=" text-shadow:""" + shadow + """; color:""" + color + """;">""" + k + c.upper() + """</span> </li>""" s += '</ul>' if not inline else '' if as_html: return s return display(HTML(s))
[ "def", "color_table", "(", "color", ",", "N", "=", "1", ",", "sort", "=", "False", ",", "sort_values", "=", "False", ",", "inline", "=", "False", ",", "as_html", "=", "False", ")", ":", "if", "isinstance", "(", "color", ",", "list", ")", ":", "c_", "=", "''", "rgb_tup", "=", "[", "normalize", "(", "c", ")", "for", "c", "in", "color", "]", "if", "sort", ":", "rgb_tup", ".", "sort", "(", ")", "elif", "isinstance", "(", "color", ",", "dict", ")", ":", "c_", "=", "''", "items", "=", "[", "(", "k", ",", "normalize", "(", "v", ")", ",", "hex_to_hsv", "(", "normalize", "(", "v", ")", ")", ")", "for", "k", ",", "v", "in", "list", "(", "color", ".", "items", "(", ")", ")", "]", "if", "sort_values", ":", "items", "=", "sorted", "(", "items", ",", "key", "=", "operator", ".", "itemgetter", "(", "2", ")", ")", "elif", "sort", ":", "items", "=", "sorted", "(", "items", ",", "key", "=", "operator", ".", "itemgetter", "(", "0", ")", ")", "rgb_tup", "=", "[", "(", "k", ",", "v", ")", "for", "k", ",", "v", ",", "_", "in", "items", "]", "else", ":", "c_", "=", "normalize", "(", "color", ")", "if", "N", ">", "1", ":", "rgb_tup", "=", "np", ".", "array", "(", "color_range", "(", "c_", ",", "N", ")", ")", "[", ":", ":", "-", "1", "]", "else", ":", "rgb_tup", "=", "[", "c_", "]", "def", "_color", "(", "c", ")", ":", "if", "hex_to_hsv", "(", "c", ")", "[", "2", "]", "<", ".5", ":", "color", "=", "\"#ffffff\"", "shadow", "=", "'0 1px 0 #000'", "else", ":", "color", "=", "\"#000000\"", "shadow", "=", "'0 1px 0 rgba(255,255,255,0.6)'", "if", "c", "==", "c_", ":", "border", "=", "\" border: 1px solid #ffffff;\"", "else", ":", "border", "=", "''", "return", "color", ",", "shadow", ",", "border", "s", "=", "'<ul style=\"list-style-type: none;\">'", "if", "not", "inline", "else", "''", "for", "c", "in", "rgb_tup", ":", "if", "isinstance", "(", "c", ",", "tuple", ")", ":", "k", ",", "c", "=", "c", "k", "+=", "' : '", "else", ":", "k", "=", "''", "if", "inline", ":", "s", "+=", "'<div style=\"background-color:{0};height:20px;width:20px;display:inline-block;\"></div>'", ".", "format", "(", "c", ")", "else", ":", "color", ",", "shadow", ",", "border", "=", "_color", "(", "c", ")", "s", "+=", "\"\"\"<li style=\"text-align:center;\"\"\"", "+", "border", "+", "\"\"\"line-height:30px;background-color:\"\"\"", "+", "c", "+", "\"\"\";\"> \n <span style=\" text-shadow:\"\"\"", "+", "shadow", "+", "\"\"\"; color:\"\"\"", "+", "color", "+", "\"\"\";\">\"\"\"", "+", "k", "+", "c", ".", "upper", "(", ")", "+", "\"\"\"</span>\n </li>\"\"\"", "s", "+=", "'</ul>'", "if", "not", "inline", "else", "''", "if", "as_html", ":", "return", "s", "return", "display", "(", "HTML", "(", "s", ")", ")" ]
34.27381
17.511905
def thumbnail(self, id): """Get the 100x100 thumbnail of a file. Return the raw PNG data. Parameters: * id: The Puush ID of the file to get the thumbnail of. """ res = self._raw_api_request('thumb', data={'i': id}) if not res: raise PuushError("Getting thumbnail failed.") return res
[ "def", "thumbnail", "(", "self", ",", "id", ")", ":", "res", "=", "self", ".", "_raw_api_request", "(", "'thumb'", ",", "data", "=", "{", "'i'", ":", "id", "}", ")", "if", "not", "res", ":", "raise", "PuushError", "(", "\"Getting thumbnail failed.\"", ")", "return", "res" ]
35.5
17.6
def MaxLikeInterval(self, percentage=90): """Returns the maximum-likelihood credible interval. If percentage=90, computes a 90% CI containing the values with the highest likelihoods. percentage: float between 0 and 100 Returns: list of values from the suite """ interval = [] total = 0 t = [(prob, val) for val, prob in self.Items()] t.sort(reverse=True) for prob, val in t: interval.append(val) total += prob if total >= percentage / 100.0: break return interval
[ "def", "MaxLikeInterval", "(", "self", ",", "percentage", "=", "90", ")", ":", "interval", "=", "[", "]", "total", "=", "0", "t", "=", "[", "(", "prob", ",", "val", ")", "for", "val", ",", "prob", "in", "self", ".", "Items", "(", ")", "]", "t", ".", "sort", "(", "reverse", "=", "True", ")", "for", "prob", ",", "val", "in", "t", ":", "interval", ".", "append", "(", "val", ")", "total", "+=", "prob", "if", "total", ">=", "percentage", "/", "100.0", ":", "break", "return", "interval" ]
25.869565
18.347826
def set_empty_region(self, region_id, type_id, generated_at, error_if_orders_present=True): """ Prepares for the given region+item combo by instantiating a :py:class:`MarketItemsInRegionList` instance, which will track region ID, type ID, and generated time. This is mostly used for the JSON deserialization process in case there are no orders for the given region+item combo. :param int region_id: The region ID. :param int type_id: The item's type ID. :param datetime.datetime generated_at: The time that the order set was generated. :keyword bool error_if_orders_present: If True, raise an exception if an order already exists for this item+region combo when this is called. This failsafe may be disabled by passing False here. """ key = '%s_%s' % (region_id, type_id) if error_if_orders_present and self._orders.has_key(key): raise ItemAlreadyPresentError( "Orders already exist for the given region and type ID. " "Pass error_if_orders_present=False to disable this failsafe, " "if desired." ) self._orders[key] = MarketItemsInRegionList( region_id, type_id, generated_at)
[ "def", "set_empty_region", "(", "self", ",", "region_id", ",", "type_id", ",", "generated_at", ",", "error_if_orders_present", "=", "True", ")", ":", "key", "=", "'%s_%s'", "%", "(", "region_id", ",", "type_id", ")", "if", "error_if_orders_present", "and", "self", ".", "_orders", ".", "has_key", "(", "key", ")", ":", "raise", "ItemAlreadyPresentError", "(", "\"Orders already exist for the given region and type ID. \"", "\"Pass error_if_orders_present=False to disable this failsafe, \"", "\"if desired.\"", ")", "self", ".", "_orders", "[", "key", "]", "=", "MarketItemsInRegionList", "(", "region_id", ",", "type_id", ",", "generated_at", ")" ]
48.518519
20.740741
def custom_req(session, request): """ Utility for sending a predefined request and printing response as well as storing messages in a list, useful for testing Parameters ---------- session: blpapi.session.Session request: blpapi.request.Request Request to be sent Returns ------- List of all messages received """ # flush event queue in case previous call errored out while(session.tryNextEvent()): pass print("Sending Request:\n %s" % request) session.sendRequest(request) messages = [] # Process received events while(True): # We provide timeout to give the chance for Ctrl+C handling: ev = session.nextEvent(500) for msg in ev: print("Message Received:\n %s" % msg) messages.append(msg) if ev.eventType() == blpapi.Event.RESPONSE: # Response completely received, so we could exit break return messages
[ "def", "custom_req", "(", "session", ",", "request", ")", ":", "# flush event queue in case previous call errored out", "while", "(", "session", ".", "tryNextEvent", "(", ")", ")", ":", "pass", "print", "(", "\"Sending Request:\\n %s\"", "%", "request", ")", "session", ".", "sendRequest", "(", "request", ")", "messages", "=", "[", "]", "# Process received events", "while", "(", "True", ")", ":", "# We provide timeout to give the chance for Ctrl+C handling:", "ev", "=", "session", ".", "nextEvent", "(", "500", ")", "for", "msg", "in", "ev", ":", "print", "(", "\"Message Received:\\n %s\"", "%", "msg", ")", "messages", ".", "append", "(", "msg", ")", "if", "ev", ".", "eventType", "(", ")", "==", "blpapi", ".", "Event", ".", "RESPONSE", ":", "# Response completely received, so we could exit", "break", "return", "messages" ]
28.818182
17.424242
def _collect_all_files(self, files=None): """ Collect all FSEntrys into a set, including all descendants. :param list files: List of :class:`FSEntry` to traverse. :returns: Set of FSEntry """ if files is None: files = self._root_elements collected = set() for entry in files: collected.add(entry) collected.update(self._collect_all_files(entry.children)) return collected
[ "def", "_collect_all_files", "(", "self", ",", "files", "=", "None", ")", ":", "if", "files", "is", "None", ":", "files", "=", "self", ".", "_root_elements", "collected", "=", "set", "(", ")", "for", "entry", "in", "files", ":", "collected", ".", "add", "(", "entry", ")", "collected", ".", "update", "(", "self", ".", "_collect_all_files", "(", "entry", ".", "children", ")", ")", "return", "collected" ]
33.357143
14.071429
def printSequences(x, formatString="%d"): """ Print a bunch of sequences stored in a 2D numpy array. """ [seqLen, numElements] = x.shape for i in range(seqLen): s = "" for j in range(numElements): s += formatString % x[i][j] print s
[ "def", "printSequences", "(", "x", ",", "formatString", "=", "\"%d\"", ")", ":", "[", "seqLen", ",", "numElements", "]", "=", "x", ".", "shape", "for", "i", "in", "range", "(", "seqLen", ")", ":", "s", "=", "\"\"", "for", "j", "in", "range", "(", "numElements", ")", ":", "s", "+=", "formatString", "%", "x", "[", "i", "]", "[", "j", "]", "print", "s" ]
25.1
11.3
def print(self, *objects, **kwargs): """Micmic print interface""" file = kwargs.get("file") if file is not None and file is not sys.stdout: PRINT(*objects, **kwargs) else: sep = STR(kwargs.get("sep", " ")) end = STR(kwargs.get("end", "\n")) text = sep.join(STR(o) for o in objects) self.imp_print(text, end) for callback in self.listeners: callback(text)
[ "def", "print", "(", "self", ",", "*", "objects", ",", "*", "*", "kwargs", ")", ":", "file", "=", "kwargs", ".", "get", "(", "\"file\"", ")", "if", "file", "is", "not", "None", "and", "file", "is", "not", "sys", ".", "stdout", ":", "PRINT", "(", "*", "objects", ",", "*", "*", "kwargs", ")", "else", ":", "sep", "=", "STR", "(", "kwargs", ".", "get", "(", "\"sep\"", ",", "\" \"", ")", ")", "end", "=", "STR", "(", "kwargs", ".", "get", "(", "\"end\"", ",", "\"\\n\"", ")", ")", "text", "=", "sep", ".", "join", "(", "STR", "(", "o", ")", "for", "o", "in", "objects", ")", "self", ".", "imp_print", "(", "text", ",", "end", ")", "for", "callback", "in", "self", ".", "listeners", ":", "callback", "(", "text", ")" ]
21.823529
19
def eglGetDisplay(display=EGL_DEFAULT_DISPLAY): """ Connect to the EGL display server. """ res = _lib.eglGetDisplay(display) if not res or res == EGL_NO_DISPLAY: raise RuntimeError('Could not create display') return res
[ "def", "eglGetDisplay", "(", "display", "=", "EGL_DEFAULT_DISPLAY", ")", ":", "res", "=", "_lib", ".", "eglGetDisplay", "(", "display", ")", "if", "not", "res", "or", "res", "==", "EGL_NO_DISPLAY", ":", "raise", "RuntimeError", "(", "'Could not create display'", ")", "return", "res" ]
34.428571
7.142857
def add_factuality(self,my_fact): """ Adds a factuality to the factuality layer @type my_fact: L{Cfactuality} @param my_fact: factuality object """ if self.factuality_layer is None: self.factuality_layer = Cfactualities() self.root.append(self.factuality_layer.get_node()) self.factuality_layer.add_factuality(my_fact)
[ "def", "add_factuality", "(", "self", ",", "my_fact", ")", ":", "if", "self", ".", "factuality_layer", "is", "None", ":", "self", ".", "factuality_layer", "=", "Cfactualities", "(", ")", "self", ".", "root", ".", "append", "(", "self", ".", "factuality_layer", ".", "get_node", "(", ")", ")", "self", ".", "factuality_layer", ".", "add_factuality", "(", "my_fact", ")" ]
38.9
6.7
def object_hook(obj): """Checks to see if the `__type`-hinting field is available in the object being de-serialized. If present, and the class referenced has a `from_json` function it will return the generated object, else a standard dic will be returned Args: obj: Object to be deserialized Returns: Deserialized object or regular python objec """ try: if '__type' in obj: obj_type = obj['__type'] cls = getattr(cloud_inquisitor.schema, obj_type) if hasattr(cls, 'from_json'): return cls.from_json(obj) key, value = next(iter(obj.items())) if key == ' t': return tuple(value) elif key == ' u': return uuid.UUID(value) elif key == ' b': return b64decode(value) elif key == ' m': return Markup(value) elif key == ' d': return parse_date(value) return obj except Exception: log.exception('Error during data deserialization')
[ "def", "object_hook", "(", "obj", ")", ":", "try", ":", "if", "'__type'", "in", "obj", ":", "obj_type", "=", "obj", "[", "'__type'", "]", "cls", "=", "getattr", "(", "cloud_inquisitor", ".", "schema", ",", "obj_type", ")", "if", "hasattr", "(", "cls", ",", "'from_json'", ")", ":", "return", "cls", ".", "from_json", "(", "obj", ")", "key", ",", "value", "=", "next", "(", "iter", "(", "obj", ".", "items", "(", ")", ")", ")", "if", "key", "==", "' t'", ":", "return", "tuple", "(", "value", ")", "elif", "key", "==", "' u'", ":", "return", "uuid", ".", "UUID", "(", "value", ")", "elif", "key", "==", "' b'", ":", "return", "b64decode", "(", "value", ")", "elif", "key", "==", "' m'", ":", "return", "Markup", "(", "value", ")", "elif", "key", "==", "' d'", ":", "return", "parse_date", "(", "value", ")", "return", "obj", "except", "Exception", ":", "log", ".", "exception", "(", "'Error during data deserialization'", ")" ]
34.484848
16.272727
def cookie(data, key_salt='', secret=None, digestmod=None): """ Encodes or decodes a signed cookie. @data: cookie data @key_salt: HMAC key signing salt @secret: HMAC signing secret key @digestmod: hashing algorithm to sign with, recommended >=sha256 -> HMAC signed or unsigned cookie data .. from vital.security import cookie cookie("Hello, world.", "saltyDog", secret="alBVlwe") # -> '!YuOoKwDp8GhrwwojdjTxSCj1c2Z+7yz7r6cC7E3hBWo=?IkhlbGxvLCB3b3JsZC4i' cookie( "!YuOoKwDp8GhrwwojdjTxSCj1c2Z+7yz7r6cC7E3hBWo=?IkhlbGxvLCB3b3JsZC4i", "saltyDog", secret="alBVlwe") # -> 'Hello, world.' .. """ digestmod = digestmod or sha256 if not data: return None try: # Decode signed cookie assert cookie_is_encoded(data) datab = uniorbytes(data, bytes) sig, msg = datab.split(uniorbytes('?', bytes), 1) key = ("{}{}").format(secret, key_salt) sig_check = hmac.new( key=uniorbytes(key, bytes), msg=msg, digestmod=digestmod).digest() sig_check = uniorbytes(b64encode(sig_check), bytes) if lscmp(sig[1:], sig_check): return json.loads(uniorbytes(b64decode(msg))) return None except: # Encode and sign a json-able object. Return a string. key = ("{}{}").format(secret, key_salt) msg = b64encode(uniorbytes(json.dumps(data), bytes)) sig = hmac.new( key=uniorbytes(key, bytes), msg=msg, digestmod=digestmod).digest() sig = uniorbytes(b64encode(sig), bytes) return uniorbytes('!'.encode() + sig + '?'.encode() + msg)
[ "def", "cookie", "(", "data", ",", "key_salt", "=", "''", ",", "secret", "=", "None", ",", "digestmod", "=", "None", ")", ":", "digestmod", "=", "digestmod", "or", "sha256", "if", "not", "data", ":", "return", "None", "try", ":", "# Decode signed cookie", "assert", "cookie_is_encoded", "(", "data", ")", "datab", "=", "uniorbytes", "(", "data", ",", "bytes", ")", "sig", ",", "msg", "=", "datab", ".", "split", "(", "uniorbytes", "(", "'?'", ",", "bytes", ")", ",", "1", ")", "key", "=", "(", "\"{}{}\"", ")", ".", "format", "(", "secret", ",", "key_salt", ")", "sig_check", "=", "hmac", ".", "new", "(", "key", "=", "uniorbytes", "(", "key", ",", "bytes", ")", ",", "msg", "=", "msg", ",", "digestmod", "=", "digestmod", ")", ".", "digest", "(", ")", "sig_check", "=", "uniorbytes", "(", "b64encode", "(", "sig_check", ")", ",", "bytes", ")", "if", "lscmp", "(", "sig", "[", "1", ":", "]", ",", "sig_check", ")", ":", "return", "json", ".", "loads", "(", "uniorbytes", "(", "b64decode", "(", "msg", ")", ")", ")", "return", "None", "except", ":", "# Encode and sign a json-able object. Return a string.", "key", "=", "(", "\"{}{}\"", ")", ".", "format", "(", "secret", ",", "key_salt", ")", "msg", "=", "b64encode", "(", "uniorbytes", "(", "json", ".", "dumps", "(", "data", ")", ",", "bytes", ")", ")", "sig", "=", "hmac", ".", "new", "(", "key", "=", "uniorbytes", "(", "key", ",", "bytes", ")", ",", "msg", "=", "msg", ",", "digestmod", "=", "digestmod", ")", ".", "digest", "(", ")", "sig", "=", "uniorbytes", "(", "b64encode", "(", "sig", ")", ",", "bytes", ")", "return", "uniorbytes", "(", "'!'", ".", "encode", "(", ")", "+", "sig", "+", "'?'", ".", "encode", "(", ")", "+", "msg", ")" ]
38.681818
17.5
def write_template_to_file(conf, template_body): """Writes the template to disk """ template_file_name = _get_stack_name(conf) + '-generated-cf-template.json' with open(template_file_name, 'w') as opened_file: opened_file.write(template_body) print('wrote cf-template for %s to disk: %s' % ( get_env(), template_file_name)) return template_file_name
[ "def", "write_template_to_file", "(", "conf", ",", "template_body", ")", ":", "template_file_name", "=", "_get_stack_name", "(", "conf", ")", "+", "'-generated-cf-template.json'", "with", "open", "(", "template_file_name", ",", "'w'", ")", "as", "opened_file", ":", "opened_file", ".", "write", "(", "template_body", ")", "print", "(", "'wrote cf-template for %s to disk: %s'", "%", "(", "get_env", "(", ")", ",", "template_file_name", ")", ")", "return", "template_file_name" ]
42.333333
9.333333
def read_table(filename, sheetname, index_col=None): """ Return the contents of an Excel table as a pandas DataFrame. Parameters ---------- filename : str Name of the Excel file to read. sheetname : str or int Name or index of the sheet inside the Excel file to read. index_col : str, optional Column name or index to be used as row labels of the DataFrame. If None, default index will be used. Returns ------- table : DataFrame A DataFrame containing the data in the specified Excel table. If `index_col` is not None, rows in which their `index_col` field is empty will not be present in `table`. Raises ------ ValueError If `index_col` is specified and two rows contain the same `index_col` field. """ # Catch sheetname as list or None if sheetname is None or \ (hasattr(sheetname, '__iter__') \ and not isinstance(sheetname, six.string_types)): raise TypeError("sheetname should specify a single sheet") # Load excel table using pandas # Parameter specifying sheet name is slightly different depending on pandas' # version. if packaging.version.parse(pd.__version__) \ < packaging.version.parse('0.21'): table = pd.read_excel(filename, sheetname=sheetname, index_col=index_col) else: table = pd.read_excel(filename, sheet_name=sheetname, index_col=index_col) # Eliminate rows whose index are null if index_col is not None: table = table[pd.notnull(table.index)] # Check for duplicated rows if table.index.has_duplicates: raise ValueError("sheet {} on file {} contains duplicated values " "for column {}".format(sheetname, filename, index_col)) return table
[ "def", "read_table", "(", "filename", ",", "sheetname", ",", "index_col", "=", "None", ")", ":", "# Catch sheetname as list or None", "if", "sheetname", "is", "None", "or", "(", "hasattr", "(", "sheetname", ",", "'__iter__'", ")", "and", "not", "isinstance", "(", "sheetname", ",", "six", ".", "string_types", ")", ")", ":", "raise", "TypeError", "(", "\"sheetname should specify a single sheet\"", ")", "# Load excel table using pandas", "# Parameter specifying sheet name is slightly different depending on pandas'", "# version.", "if", "packaging", ".", "version", ".", "parse", "(", "pd", ".", "__version__", ")", "<", "packaging", ".", "version", ".", "parse", "(", "'0.21'", ")", ":", "table", "=", "pd", ".", "read_excel", "(", "filename", ",", "sheetname", "=", "sheetname", ",", "index_col", "=", "index_col", ")", "else", ":", "table", "=", "pd", ".", "read_excel", "(", "filename", ",", "sheet_name", "=", "sheetname", ",", "index_col", "=", "index_col", ")", "# Eliminate rows whose index are null", "if", "index_col", "is", "not", "None", ":", "table", "=", "table", "[", "pd", ".", "notnull", "(", "table", ".", "index", ")", "]", "# Check for duplicated rows", "if", "table", ".", "index", ".", "has_duplicates", ":", "raise", "ValueError", "(", "\"sheet {} on file {} contains duplicated values \"", "\"for column {}\"", ".", "format", "(", "sheetname", ",", "filename", ",", "index_col", ")", ")", "return", "table" ]
34.763636
19.418182
def saved_xids(self): """Return previously saved xids.""" if self._saved_xids is None: self._saved_xids = [] if self.debug: fpfn = os.path.join(self.tcex.args.tc_temp_path, 'xids-saved') if os.path.isfile(fpfn) and os.access(fpfn, os.R_OK): with open(fpfn) as fh: self._saved_xids = fh.read().splitlines() return self._saved_xids
[ "def", "saved_xids", "(", "self", ")", ":", "if", "self", ".", "_saved_xids", "is", "None", ":", "self", ".", "_saved_xids", "=", "[", "]", "if", "self", ".", "debug", ":", "fpfn", "=", "os", ".", "path", ".", "join", "(", "self", ".", "tcex", ".", "args", ".", "tc_temp_path", ",", "'xids-saved'", ")", "if", "os", ".", "path", ".", "isfile", "(", "fpfn", ")", "and", "os", ".", "access", "(", "fpfn", ",", "os", ".", "R_OK", ")", ":", "with", "open", "(", "fpfn", ")", "as", "fh", ":", "self", ".", "_saved_xids", "=", "fh", ".", "read", "(", ")", ".", "splitlines", "(", ")", "return", "self", ".", "_saved_xids" ]
44.4
14.7
def createDeviceObjects(self, interface_id): """Transform the raw device descriptions into instances of devicetypes.generic.HMDevice or availabe subclass.""" global WORKING WORKING = True remote = interface_id.split('-')[-1] LOG.debug( "RPCFunctions.createDeviceObjects: iterating interface_id = %s" % (remote, )) # First create parent object for dev in self._devices_raw[remote]: if not dev['PARENT']: if dev['ADDRESS'] not in self.devices_all[remote]: try: if dev['TYPE'] in devicetypes.SUPPORTED: deviceObject = devicetypes.SUPPORTED[dev['TYPE']]( dev, self._proxies[interface_id], self.resolveparamsets) LOG.debug("RPCFunctions.createDeviceObjects: created %s as SUPPORTED device for %s" % ( dev['ADDRESS'], dev['TYPE'])) else: deviceObject = devicetypes.UNSUPPORTED( dev, self._proxies[interface_id], self.resolveparamsets) LOG.debug("RPCFunctions.createDeviceObjects: created %s as UNSUPPORTED device for %s" % ( dev['ADDRESS'], dev['TYPE'])) LOG.debug( "RPCFunctions.createDeviceObjects: adding to self.devices_all") self.devices_all[remote][dev['ADDRESS']] = deviceObject LOG.debug( "RPCFunctions.createDeviceObjects: adding to self.devices") self.devices[remote][dev['ADDRESS']] = deviceObject except Exception as err: LOG.critical( "RPCFunctions.createDeviceObjects: Parent: %s", str(err)) # Then create all children for parent for dev in self._devices_raw[remote]: if dev['PARENT']: try: if dev['ADDRESS'] not in self.devices_all[remote]: deviceObject = HMChannel( dev, self._proxies[interface_id], self.resolveparamsets) self.devices_all[remote][dev['ADDRESS']] = deviceObject self.devices[remote][dev['PARENT']].CHANNELS[ dev['INDEX']] = deviceObject except Exception as err: LOG.critical( "RPCFunctions.createDeviceObjects: Child: %s", str(err)) if self.devices_all[remote] and self.remotes[remote].get('resolvenames', False): self.addDeviceNames(remote) WORKING = False if self.systemcallback: self.systemcallback('createDeviceObjects') return True
[ "def", "createDeviceObjects", "(", "self", ",", "interface_id", ")", ":", "global", "WORKING", "WORKING", "=", "True", "remote", "=", "interface_id", ".", "split", "(", "'-'", ")", "[", "-", "1", "]", "LOG", ".", "debug", "(", "\"RPCFunctions.createDeviceObjects: iterating interface_id = %s\"", "%", "(", "remote", ",", ")", ")", "# First create parent object", "for", "dev", "in", "self", ".", "_devices_raw", "[", "remote", "]", ":", "if", "not", "dev", "[", "'PARENT'", "]", ":", "if", "dev", "[", "'ADDRESS'", "]", "not", "in", "self", ".", "devices_all", "[", "remote", "]", ":", "try", ":", "if", "dev", "[", "'TYPE'", "]", "in", "devicetypes", ".", "SUPPORTED", ":", "deviceObject", "=", "devicetypes", ".", "SUPPORTED", "[", "dev", "[", "'TYPE'", "]", "]", "(", "dev", ",", "self", ".", "_proxies", "[", "interface_id", "]", ",", "self", ".", "resolveparamsets", ")", "LOG", ".", "debug", "(", "\"RPCFunctions.createDeviceObjects: created %s as SUPPORTED device for %s\"", "%", "(", "dev", "[", "'ADDRESS'", "]", ",", "dev", "[", "'TYPE'", "]", ")", ")", "else", ":", "deviceObject", "=", "devicetypes", ".", "UNSUPPORTED", "(", "dev", ",", "self", ".", "_proxies", "[", "interface_id", "]", ",", "self", ".", "resolveparamsets", ")", "LOG", ".", "debug", "(", "\"RPCFunctions.createDeviceObjects: created %s as UNSUPPORTED device for %s\"", "%", "(", "dev", "[", "'ADDRESS'", "]", ",", "dev", "[", "'TYPE'", "]", ")", ")", "LOG", ".", "debug", "(", "\"RPCFunctions.createDeviceObjects: adding to self.devices_all\"", ")", "self", ".", "devices_all", "[", "remote", "]", "[", "dev", "[", "'ADDRESS'", "]", "]", "=", "deviceObject", "LOG", ".", "debug", "(", "\"RPCFunctions.createDeviceObjects: adding to self.devices\"", ")", "self", ".", "devices", "[", "remote", "]", "[", "dev", "[", "'ADDRESS'", "]", "]", "=", "deviceObject", "except", "Exception", "as", "err", ":", "LOG", ".", "critical", "(", "\"RPCFunctions.createDeviceObjects: Parent: %s\"", ",", "str", "(", "err", ")", ")", "# Then create all children for parent", "for", "dev", "in", "self", ".", "_devices_raw", "[", "remote", "]", ":", "if", "dev", "[", "'PARENT'", "]", ":", "try", ":", "if", "dev", "[", "'ADDRESS'", "]", "not", "in", "self", ".", "devices_all", "[", "remote", "]", ":", "deviceObject", "=", "HMChannel", "(", "dev", ",", "self", ".", "_proxies", "[", "interface_id", "]", ",", "self", ".", "resolveparamsets", ")", "self", ".", "devices_all", "[", "remote", "]", "[", "dev", "[", "'ADDRESS'", "]", "]", "=", "deviceObject", "self", ".", "devices", "[", "remote", "]", "[", "dev", "[", "'PARENT'", "]", "]", ".", "CHANNELS", "[", "dev", "[", "'INDEX'", "]", "]", "=", "deviceObject", "except", "Exception", "as", "err", ":", "LOG", ".", "critical", "(", "\"RPCFunctions.createDeviceObjects: Child: %s\"", ",", "str", "(", "err", ")", ")", "if", "self", ".", "devices_all", "[", "remote", "]", "and", "self", ".", "remotes", "[", "remote", "]", ".", "get", "(", "'resolvenames'", ",", "False", ")", ":", "self", ".", "addDeviceNames", "(", "remote", ")", "WORKING", "=", "False", "if", "self", ".", "systemcallback", ":", "self", ".", "systemcallback", "(", "'createDeviceObjects'", ")", "return", "True" ]
57.04
23.32
def _ufo_logging_ref(ufo): """Return a string that can identify this UFO in logs.""" if ufo.path: return os.path.basename(ufo.path) return ufo.info.styleName
[ "def", "_ufo_logging_ref", "(", "ufo", ")", ":", "if", "ufo", ".", "path", ":", "return", "os", ".", "path", ".", "basename", "(", "ufo", ".", "path", ")", "return", "ufo", ".", "info", ".", "styleName" ]
34.6
10
def flush(self, index=None, params=None): """ Explicitly flush one or more indices. `<http://www.elastic.co/guide/en/elasticsearch/reference/current/indices-flush.html>`_ :arg index: A comma-separated list of index names; use `_all` or empty string for all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both., default 'open', valid choices are: 'open', 'closed', 'none', 'all' :arg force: Whether a flush should be forced even if it is not necessarily needed ie. if no changes will be committed to the index. This is useful if transaction log IDs should be incremented even if no uncommitted changes are present. (This setting can be considered as internal) :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg wait_if_ongoing: If set to true the flush operation will block until the flush can be executed if another flush operation is already executing. The default is true. If set to false the flush will be skipped iff if another flush operation is already running. """ return self.transport.perform_request( "POST", _make_path(index, "_flush"), params=params )
[ "def", "flush", "(", "self", ",", "index", "=", "None", ",", "params", "=", "None", ")", ":", "return", "self", ".", "transport", ".", "perform_request", "(", "\"POST\"", ",", "_make_path", "(", "index", ",", "\"_flush\"", ")", ",", "params", "=", "params", ")" ]
57.678571
26.392857
def dump_connection_info(engine: Engine, fileobj: TextIO = sys.stdout) -> None: """ Dumps some connection info, as an SQL comment. Obscures passwords. Args: engine: the SQLAlchemy :class:`Engine` to dump metadata information from fileobj: the file-like object (default ``sys.stdout``) to write information to """ meta = MetaData(bind=engine) writeline_nl(fileobj, sql_comment('Database info: {}'.format(meta)))
[ "def", "dump_connection_info", "(", "engine", ":", "Engine", ",", "fileobj", ":", "TextIO", "=", "sys", ".", "stdout", ")", "->", "None", ":", "meta", "=", "MetaData", "(", "bind", "=", "engine", ")", "writeline_nl", "(", "fileobj", ",", "sql_comment", "(", "'Database info: {}'", ".", "format", "(", "meta", ")", ")", ")" ]
38.666667
23.666667
def str(name, default=None, allow_none=False, fallback=None): """Get a string based environment value or the default. Args: name: The environment variable name default: The default value to use if no environment variable is found allow_none: If the return value can be `None` (i.e. optional) """ value = read(name, default, allow_none, fallback=fallback) if value is None and allow_none: return None else: return builtins.str(value).strip()
[ "def", "str", "(", "name", ",", "default", "=", "None", ",", "allow_none", "=", "False", ",", "fallback", "=", "None", ")", ":", "value", "=", "read", "(", "name", ",", "default", ",", "allow_none", ",", "fallback", "=", "fallback", ")", "if", "value", "is", "None", "and", "allow_none", ":", "return", "None", "else", ":", "return", "builtins", ".", "str", "(", "value", ")", ".", "strip", "(", ")" ]
37.923077
18.538462
def new(self): # type: () -> None ''' Create a new Rock Ridge Platform Dependent record. Parameters: None. Returns: Nothing. ''' if self._initialized: raise pycdlibexception.PyCdlibInternalError('PD record already initialized!') self._initialized = True self.padding = b''
[ "def", "new", "(", "self", ")", ":", "# type: () -> None", "if", "self", ".", "_initialized", ":", "raise", "pycdlibexception", ".", "PyCdlibInternalError", "(", "'PD record already initialized!'", ")", "self", ".", "_initialized", "=", "True", "self", ".", "padding", "=", "b''" ]
24.133333
24.8
def _format_function_arguments(self, opts): """Format a series of function arguments in a Mothur script.""" params = [self.Parameters[x] for x in opts] return ', '.join(filter(None, map(str, params)))
[ "def", "_format_function_arguments", "(", "self", ",", "opts", ")", ":", "params", "=", "[", "self", ".", "Parameters", "[", "x", "]", "for", "x", "in", "opts", "]", "return", "', '", ".", "join", "(", "filter", "(", "None", ",", "map", "(", "str", ",", "params", ")", ")", ")" ]
55.25
7.5
def playlist_songs_move( self, playlist_songs, *, after=None, before=None, index=None, position=None ): """Move songs in a playlist. Note: * Provide no optional arguments to move to end. * Provide playlist song dicts for ``after`` and/or ``before``. * Provide a zero-based ``index``. * Provide a one-based ``position``. Songs are inserted *at* given index or position. It's also possible to move to the end by using ``len(songs)`` for index or ``len(songs) + 1`` for position. Parameters: playlist_songs (list): A list of playlist song dicts. after (dict, Optional): A playlist song dict ``songs`` will follow. before (dict, Optional): A playlist song dict ``songs`` will precede. index (int, Optional): The zero-based index position to insert ``songs``. position (int, Optional): The one-based position to insert ``songs``. Returns: dict: Playlist dict including songs. """ if not more_itertools.all_equal( playlist_song['playlistId'] for playlist_song in playlist_songs ): raise ValueError( "All 'playlist_songs' must be from the same playlist." ) playlist = self.playlist( playlist_songs[0]['playlistId'], include_songs=True ) prev, next_ = get_ple_prev_next( playlist['tracks'], after=after, before=before, index=index, position=position ) playlist_songs_len = len(playlist_songs) for i, playlist_song in enumerate(playlist_songs): mutation = mc_calls.PlaylistEntriesBatch.update( playlist_song, preceding_entry_id=prev.get('id'), following_entry_id=next_.get('id') ) response = self._call(mc_calls.PlaylistEntriesBatch, mutation) result = response.body['mutate_response'][0] # TODO: Proper exception on failure. if result['response_code'] != 'OK': break if i < playlist_songs_len - 1: while True: prev = self.playlist_song(result['id']) if prev: break return self.playlist(playlist_songs[0]['playlistId'], include_songs=True)
[ "def", "playlist_songs_move", "(", "self", ",", "playlist_songs", ",", "*", ",", "after", "=", "None", ",", "before", "=", "None", ",", "index", "=", "None", ",", "position", "=", "None", ")", ":", "if", "not", "more_itertools", ".", "all_equal", "(", "playlist_song", "[", "'playlistId'", "]", "for", "playlist_song", "in", "playlist_songs", ")", ":", "raise", "ValueError", "(", "\"All 'playlist_songs' must be from the same playlist.\"", ")", "playlist", "=", "self", ".", "playlist", "(", "playlist_songs", "[", "0", "]", "[", "'playlistId'", "]", ",", "include_songs", "=", "True", ")", "prev", ",", "next_", "=", "get_ple_prev_next", "(", "playlist", "[", "'tracks'", "]", ",", "after", "=", "after", ",", "before", "=", "before", ",", "index", "=", "index", ",", "position", "=", "position", ")", "playlist_songs_len", "=", "len", "(", "playlist_songs", ")", "for", "i", ",", "playlist_song", "in", "enumerate", "(", "playlist_songs", ")", ":", "mutation", "=", "mc_calls", ".", "PlaylistEntriesBatch", ".", "update", "(", "playlist_song", ",", "preceding_entry_id", "=", "prev", ".", "get", "(", "'id'", ")", ",", "following_entry_id", "=", "next_", ".", "get", "(", "'id'", ")", ")", "response", "=", "self", ".", "_call", "(", "mc_calls", ".", "PlaylistEntriesBatch", ",", "mutation", ")", "result", "=", "response", ".", "body", "[", "'mutate_response'", "]", "[", "0", "]", "# TODO: Proper exception on failure.", "if", "result", "[", "'response_code'", "]", "!=", "'OK'", ":", "break", "if", "i", "<", "playlist_songs_len", "-", "1", ":", "while", "True", ":", "prev", "=", "self", ".", "playlist_song", "(", "result", "[", "'id'", "]", ")", "if", "prev", ":", "break", "return", "self", ".", "playlist", "(", "playlist_songs", "[", "0", "]", "[", "'playlistId'", "]", ",", "include_songs", "=", "True", ")" ]
26.27027
22.256757
def create(self, **kwargs): """ Creates a new post. When the `markdown` property is present, it will be automatically converted to `mobiledoc` on v1.+ of the server. :param kwargs: The properties of the post :return: The created `Post` object """ return super(PostController, self).create(**self._with_markdown(kwargs))
[ "def", "create", "(", "self", ",", "*", "*", "kwargs", ")", ":", "return", "super", "(", "PostController", ",", "self", ")", ".", "create", "(", "*", "*", "self", ".", "_with_markdown", "(", "kwargs", ")", ")" ]
34.090909
18.636364
def open_filechooser(title, parent=None, patterns=None, folder=None, filter=None, multiple=False, _before_run=None, action=None): """An open dialog. :param parent: window or None :param patterns: file match patterns :param folder: initial folder :param filter: file filter Use of filter and patterns at the same time is invalid. """ assert not (patterns and filter) if multiple: if action is not None and action != gtk.FILE_CHOOSER_ACTION_OPEN: raise ValueError('`multiple` is only valid for the action ' '`gtk.FILE_CHOOSER_ACTION_OPEN`.') action = gtk.FILE_CHOOSER_ACTION_OPEN else: assert action is not None filechooser = gtk.FileChooserDialog(title, parent, action, (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN, gtk.RESPONSE_OK)) if multiple: filechooser.set_select_multiple(True) if patterns or filter: if not filter: filter = gtk.FileFilter() for pattern in patterns: filter.add_pattern(pattern) filechooser.set_filter(filter) filechooser.set_default_response(gtk.RESPONSE_OK) if folder: filechooser.set_current_folder(folder) try: if _before_run is not None: _before_run(filechooser) response = filechooser.run() if response not in (gtk.RESPONSE_OK, gtk.RESPONSE_NONE): return if multiple: return filechooser.get_filenames() else: return filechooser.get_filename() finally: _destroy(filechooser)
[ "def", "open_filechooser", "(", "title", ",", "parent", "=", "None", ",", "patterns", "=", "None", ",", "folder", "=", "None", ",", "filter", "=", "None", ",", "multiple", "=", "False", ",", "_before_run", "=", "None", ",", "action", "=", "None", ")", ":", "assert", "not", "(", "patterns", "and", "filter", ")", "if", "multiple", ":", "if", "action", "is", "not", "None", "and", "action", "!=", "gtk", ".", "FILE_CHOOSER_ACTION_OPEN", ":", "raise", "ValueError", "(", "'`multiple` is only valid for the action '", "'`gtk.FILE_CHOOSER_ACTION_OPEN`.'", ")", "action", "=", "gtk", ".", "FILE_CHOOSER_ACTION_OPEN", "else", ":", "assert", "action", "is", "not", "None", "filechooser", "=", "gtk", ".", "FileChooserDialog", "(", "title", ",", "parent", ",", "action", ",", "(", "gtk", ".", "STOCK_CANCEL", ",", "gtk", ".", "RESPONSE_CANCEL", ",", "gtk", ".", "STOCK_OPEN", ",", "gtk", ".", "RESPONSE_OK", ")", ")", "if", "multiple", ":", "filechooser", ".", "set_select_multiple", "(", "True", ")", "if", "patterns", "or", "filter", ":", "if", "not", "filter", ":", "filter", "=", "gtk", ".", "FileFilter", "(", ")", "for", "pattern", "in", "patterns", ":", "filter", ".", "add_pattern", "(", "pattern", ")", "filechooser", ".", "set_filter", "(", "filter", ")", "filechooser", ".", "set_default_response", "(", "gtk", ".", "RESPONSE_OK", ")", "if", "folder", ":", "filechooser", ".", "set_current_folder", "(", "folder", ")", "try", ":", "if", "_before_run", "is", "not", "None", ":", "_before_run", "(", "filechooser", ")", "response", "=", "filechooser", ".", "run", "(", ")", "if", "response", "not", "in", "(", "gtk", ".", "RESPONSE_OK", ",", "gtk", ".", "RESPONSE_NONE", ")", ":", "return", "if", "multiple", ":", "return", "filechooser", ".", "get_filenames", "(", ")", "else", ":", "return", "filechooser", ".", "get_filename", "(", ")", "finally", ":", "_destroy", "(", "filechooser", ")" ]
33.396226
17.528302
def remove_fetcher(self, fetcher): """Remove a running fetcher from the list of active fetchers. :Parameters: - `fetcher`: fetcher instance. :Types: - `fetcher`: `CacheFetcher`""" self._lock.acquire() try: for t, f in list(self._active_fetchers): if f is fetcher: self._active_fetchers.remove((t, f)) f._deactivated() return finally: self._lock.release()
[ "def", "remove_fetcher", "(", "self", ",", "fetcher", ")", ":", "self", ".", "_lock", ".", "acquire", "(", ")", "try", ":", "for", "t", ",", "f", "in", "list", "(", "self", ".", "_active_fetchers", ")", ":", "if", "f", "is", "fetcher", ":", "self", ".", "_active_fetchers", ".", "remove", "(", "(", "t", ",", "f", ")", ")", "f", ".", "_deactivated", "(", ")", "return", "finally", ":", "self", ".", "_lock", ".", "release", "(", ")" ]
32
13.6875
def crawl(self, feeder_kwargs=None, parser_kwargs=None, downloader_kwargs=None): """Start crawling This method will start feeder, parser and download and wait until all threads exit. Args: feeder_kwargs (dict, optional): Arguments to be passed to ``feeder.start()`` parser_kwargs (dict, optional): Arguments to be passed to ``parser.start()`` downloader_kwargs (dict, optional): Arguments to be passed to ``downloader.start()`` """ self.signal.reset() self.logger.info('start crawling...') feeder_kwargs = {} if feeder_kwargs is None else feeder_kwargs parser_kwargs = {} if parser_kwargs is None else parser_kwargs downloader_kwargs = {} if downloader_kwargs is None else downloader_kwargs self.logger.info('starting %d feeder threads...', self.feeder.thread_num) self.feeder.start(**feeder_kwargs) self.logger.info('starting %d parser threads...', self.parser.thread_num) self.parser.start(**parser_kwargs) self.logger.info('starting %d downloader threads...', self.downloader.thread_num) self.downloader.start(**downloader_kwargs) while True: if not self.feeder.is_alive(): self.signal.set(feeder_exited=True) if not self.parser.is_alive(): self.signal.set(parser_exited=True) if not self.downloader.is_alive(): break time.sleep(1) if not self.feeder.in_queue.empty(): self.feeder.clear_buffer() if not self.parser.in_queue.empty(): self.parser.clear_buffer() if not self.downloader.in_queue.empty(): self.downloader.clear_buffer(True) self.logger.info('Crawling task done!')
[ "def", "crawl", "(", "self", ",", "feeder_kwargs", "=", "None", ",", "parser_kwargs", "=", "None", ",", "downloader_kwargs", "=", "None", ")", ":", "self", ".", "signal", ".", "reset", "(", ")", "self", ".", "logger", ".", "info", "(", "'start crawling...'", ")", "feeder_kwargs", "=", "{", "}", "if", "feeder_kwargs", "is", "None", "else", "feeder_kwargs", "parser_kwargs", "=", "{", "}", "if", "parser_kwargs", "is", "None", "else", "parser_kwargs", "downloader_kwargs", "=", "{", "}", "if", "downloader_kwargs", "is", "None", "else", "downloader_kwargs", "self", ".", "logger", ".", "info", "(", "'starting %d feeder threads...'", ",", "self", ".", "feeder", ".", "thread_num", ")", "self", ".", "feeder", ".", "start", "(", "*", "*", "feeder_kwargs", ")", "self", ".", "logger", ".", "info", "(", "'starting %d parser threads...'", ",", "self", ".", "parser", ".", "thread_num", ")", "self", ".", "parser", ".", "start", "(", "*", "*", "parser_kwargs", ")", "self", ".", "logger", ".", "info", "(", "'starting %d downloader threads...'", ",", "self", ".", "downloader", ".", "thread_num", ")", "self", ".", "downloader", ".", "start", "(", "*", "*", "downloader_kwargs", ")", "while", "True", ":", "if", "not", "self", ".", "feeder", ".", "is_alive", "(", ")", ":", "self", ".", "signal", ".", "set", "(", "feeder_exited", "=", "True", ")", "if", "not", "self", ".", "parser", ".", "is_alive", "(", ")", ":", "self", ".", "signal", ".", "set", "(", "parser_exited", "=", "True", ")", "if", "not", "self", ".", "downloader", ".", "is_alive", "(", ")", ":", "break", "time", ".", "sleep", "(", "1", ")", "if", "not", "self", ".", "feeder", ".", "in_queue", ".", "empty", "(", ")", ":", "self", ".", "feeder", ".", "clear_buffer", "(", ")", "if", "not", "self", ".", "parser", ".", "in_queue", ".", "empty", "(", ")", ":", "self", ".", "parser", ".", "clear_buffer", "(", ")", "if", "not", "self", ".", "downloader", ".", "in_queue", ".", "empty", "(", ")", ":", "self", ".", "downloader", ".", "clear_buffer", "(", "True", ")", "self", ".", "logger", ".", "info", "(", "'Crawling task done!'", ")" ]
37.372549
18.27451
def isfinite(val): """ Helper function to determine if scalar or array value is finite extending np.isfinite with support for None, string, datetime types. """ if not np.isscalar(val): val = asarray(val, strict=False) if val is None: return False elif isinstance(val, np.ndarray): if val.dtype.kind == 'M': return ~isnat(val) elif val.dtype.kind == 'O': return np.array([isfinite(v) for v in val], dtype=bool) elif val.dtype.kind in 'US': return np.ones_like(val, dtype=bool) return np.isfinite(val) elif isinstance(val, datetime_types+timedelta_types): return not isnat(val) elif isinstance(val, basestring): return True return np.isfinite(val)
[ "def", "isfinite", "(", "val", ")", ":", "if", "not", "np", ".", "isscalar", "(", "val", ")", ":", "val", "=", "asarray", "(", "val", ",", "strict", "=", "False", ")", "if", "val", "is", "None", ":", "return", "False", "elif", "isinstance", "(", "val", ",", "np", ".", "ndarray", ")", ":", "if", "val", ".", "dtype", ".", "kind", "==", "'M'", ":", "return", "~", "isnat", "(", "val", ")", "elif", "val", ".", "dtype", ".", "kind", "==", "'O'", ":", "return", "np", ".", "array", "(", "[", "isfinite", "(", "v", ")", "for", "v", "in", "val", "]", ",", "dtype", "=", "bool", ")", "elif", "val", ".", "dtype", ".", "kind", "in", "'US'", ":", "return", "np", ".", "ones_like", "(", "val", ",", "dtype", "=", "bool", ")", "return", "np", ".", "isfinite", "(", "val", ")", "elif", "isinstance", "(", "val", ",", "datetime_types", "+", "timedelta_types", ")", ":", "return", "not", "isnat", "(", "val", ")", "elif", "isinstance", "(", "val", ",", "basestring", ")", ":", "return", "True", "return", "np", ".", "isfinite", "(", "val", ")" ]
33.217391
13.565217
def center(self): """Center of the bounding box around all data present in the scene""" bounds = self.bounds x = (bounds[1] + bounds[0])/2 y = (bounds[3] + bounds[2])/2 z = (bounds[5] + bounds[4])/2 return [x, y, z]
[ "def", "center", "(", "self", ")", ":", "bounds", "=", "self", ".", "bounds", "x", "=", "(", "bounds", "[", "1", "]", "+", "bounds", "[", "0", "]", ")", "/", "2", "y", "=", "(", "bounds", "[", "3", "]", "+", "bounds", "[", "2", "]", ")", "/", "2", "z", "=", "(", "bounds", "[", "5", "]", "+", "bounds", "[", "4", "]", ")", "/", "2", "return", "[", "x", ",", "y", ",", "z", "]" ]
36.714286
8.571429
def getRedisPool(params): ''' getRedisPool - Returns and possibly also creates a Redis connection pool based on the REDIS_CONNECTION_PARAMS passed in. The goal of this method is to keep a small connection pool rolling to each unique Redis instance, otherwise during network issues etc python-redis will leak connections and in short-order can exhaust all the ports on a system. There's probably also some minor performance gain in sharing Pools. Will modify "params", if "host" and/or "port" are missing, will fill them in with defaults, and prior to return will set "connection_pool" on params, which will allow immediate return on the next call, and allow access to the pool directly from the model object. @param params <dict> - REDIS_CONNECTION_PARAMS - kwargs to redis.Redis @return redis.ConnectionPool corrosponding to this unique server. ''' global RedisPools global _defaultRedisConnectionParams global _redisManagedConnectionParams if not params: params = _defaultRedisConnectionParams isDefaultParams = True else: isDefaultParams = bool(params is _defaultRedisConnectionParams) if 'connection_pool' in params: return params['connection_pool'] hashValue = hashDictOneLevel(params) if hashValue in RedisPools: params['connection_pool'] = RedisPools[hashValue] return RedisPools[hashValue] # Copy the params, so that we don't modify the original dict if not isDefaultParams: origParams = params params = copy.copy(params) else: origParams = params checkAgain = False if 'host' not in params: if not isDefaultParams and 'host' in _defaultRedisConnectionParams: params['host'] = _defaultRedisConnectionParams['host'] else: params['host'] = '127.0.0.1' checkAgain = True if 'port' not in params: if not isDefaultParams and 'port' in _defaultRedisConnectionParams: params['port'] = _defaultRedisConnectionParams['port'] else: params['port'] = 6379 checkAgain = True if 'db' not in params: if not isDefaultParams and 'db' in _defaultRedisConnectionParams: params['db'] = _defaultRedisConnectionParams['db'] else: params['db'] = 0 checkAgain = True if not isDefaultParams: otherGlobalKeys = set(_defaultRedisConnectionParams.keys()) - set(params.keys()) for otherKey in otherGlobalKeys: if otherKey == 'connection_pool': continue params[otherKey] = _defaultRedisConnectionParams[otherKey] checkAgain = True if checkAgain: hashValue = hashDictOneLevel(params) if hashValue in RedisPools: params['connection_pool'] = RedisPools[hashValue] return RedisPools[hashValue] connectionPool = redis.ConnectionPool(**params) origParams['connection_pool'] = params['connection_pool'] = connectionPool RedisPools[hashValue] = connectionPool # Add the original as a "managed" redis connection (they did not provide their own pool) # such that if the defaults change, we make sure to re-inherit any keys, and can disconnect # from clearRedisPools origParamsHash = hashDictOneLevel(origParams) if origParamsHash not in _redisManagedConnectionParams: _redisManagedConnectionParams[origParamsHash] = [origParams] elif origParams not in _redisManagedConnectionParams[origParamsHash]: _redisManagedConnectionParams[origParamsHash].append(origParams) return connectionPool
[ "def", "getRedisPool", "(", "params", ")", ":", "global", "RedisPools", "global", "_defaultRedisConnectionParams", "global", "_redisManagedConnectionParams", "if", "not", "params", ":", "params", "=", "_defaultRedisConnectionParams", "isDefaultParams", "=", "True", "else", ":", "isDefaultParams", "=", "bool", "(", "params", "is", "_defaultRedisConnectionParams", ")", "if", "'connection_pool'", "in", "params", ":", "return", "params", "[", "'connection_pool'", "]", "hashValue", "=", "hashDictOneLevel", "(", "params", ")", "if", "hashValue", "in", "RedisPools", ":", "params", "[", "'connection_pool'", "]", "=", "RedisPools", "[", "hashValue", "]", "return", "RedisPools", "[", "hashValue", "]", "# Copy the params, so that we don't modify the original dict", "if", "not", "isDefaultParams", ":", "origParams", "=", "params", "params", "=", "copy", ".", "copy", "(", "params", ")", "else", ":", "origParams", "=", "params", "checkAgain", "=", "False", "if", "'host'", "not", "in", "params", ":", "if", "not", "isDefaultParams", "and", "'host'", "in", "_defaultRedisConnectionParams", ":", "params", "[", "'host'", "]", "=", "_defaultRedisConnectionParams", "[", "'host'", "]", "else", ":", "params", "[", "'host'", "]", "=", "'127.0.0.1'", "checkAgain", "=", "True", "if", "'port'", "not", "in", "params", ":", "if", "not", "isDefaultParams", "and", "'port'", "in", "_defaultRedisConnectionParams", ":", "params", "[", "'port'", "]", "=", "_defaultRedisConnectionParams", "[", "'port'", "]", "else", ":", "params", "[", "'port'", "]", "=", "6379", "checkAgain", "=", "True", "if", "'db'", "not", "in", "params", ":", "if", "not", "isDefaultParams", "and", "'db'", "in", "_defaultRedisConnectionParams", ":", "params", "[", "'db'", "]", "=", "_defaultRedisConnectionParams", "[", "'db'", "]", "else", ":", "params", "[", "'db'", "]", "=", "0", "checkAgain", "=", "True", "if", "not", "isDefaultParams", ":", "otherGlobalKeys", "=", "set", "(", "_defaultRedisConnectionParams", ".", "keys", "(", ")", ")", "-", "set", "(", "params", ".", "keys", "(", ")", ")", "for", "otherKey", "in", "otherGlobalKeys", ":", "if", "otherKey", "==", "'connection_pool'", ":", "continue", "params", "[", "otherKey", "]", "=", "_defaultRedisConnectionParams", "[", "otherKey", "]", "checkAgain", "=", "True", "if", "checkAgain", ":", "hashValue", "=", "hashDictOneLevel", "(", "params", ")", "if", "hashValue", "in", "RedisPools", ":", "params", "[", "'connection_pool'", "]", "=", "RedisPools", "[", "hashValue", "]", "return", "RedisPools", "[", "hashValue", "]", "connectionPool", "=", "redis", ".", "ConnectionPool", "(", "*", "*", "params", ")", "origParams", "[", "'connection_pool'", "]", "=", "params", "[", "'connection_pool'", "]", "=", "connectionPool", "RedisPools", "[", "hashValue", "]", "=", "connectionPool", "# Add the original as a \"managed\" redis connection (they did not provide their own pool)", "# such that if the defaults change, we make sure to re-inherit any keys, and can disconnect", "# from clearRedisPools", "origParamsHash", "=", "hashDictOneLevel", "(", "origParams", ")", "if", "origParamsHash", "not", "in", "_redisManagedConnectionParams", ":", "_redisManagedConnectionParams", "[", "origParamsHash", "]", "=", "[", "origParams", "]", "elif", "origParams", "not", "in", "_redisManagedConnectionParams", "[", "origParamsHash", "]", ":", "_redisManagedConnectionParams", "[", "origParamsHash", "]", ".", "append", "(", "origParams", ")", "return", "connectionPool" ]
33.329897
23.865979
def get_submodules_from_dpath(dpath, only_packages=False, recursive=True): r""" Args: dpath (str): directory path only_packages (bool): if True returns only package directories, otherwise returns module files. (default = False) Returns: list: submod_fpaths CommandLine: python -m utool.util_dev --exec-get_submodules_from_dpath --only_packages Example: >>> # DISABLE_DOCTEST >>> # SCRIPT >>> from utool.util_dev import * # NOQA >>> import utool as ut >>> dpath = ut.truepath_relative(ut.get_argval('--dpath', default='.')) >>> print(dpath) >>> only_packages = ut.get_argflag('--only_packages') >>> submod_fpaths = get_submodules_from_dpath(dpath, only_packages) >>> submod_fpaths = ut.lmap(ut.truepath_relative, submod_fpaths) >>> result = ('submod_fpaths = %s' % (ut.repr3(submod_fpaths),)) >>> print(result) """ import utool as ut submod_dpaths = [d for d in ut.ls_dirs(dpath) if ut.is_module_dir(d) ] if only_packages: submod_fpaths = submod_dpaths else: submod_fpaths = ut.ls_modulefiles(dpath) if recursive and len(submod_dpaths) > 0: recusive_results = [get_submodules_from_dpath(d, only_packages) for d in submod_dpaths] submod_fpaths.extend(ut.flatten(recusive_results)) return submod_fpaths
[ "def", "get_submodules_from_dpath", "(", "dpath", ",", "only_packages", "=", "False", ",", "recursive", "=", "True", ")", ":", "import", "utool", "as", "ut", "submod_dpaths", "=", "[", "d", "for", "d", "in", "ut", ".", "ls_dirs", "(", "dpath", ")", "if", "ut", ".", "is_module_dir", "(", "d", ")", "]", "if", "only_packages", ":", "submod_fpaths", "=", "submod_dpaths", "else", ":", "submod_fpaths", "=", "ut", ".", "ls_modulefiles", "(", "dpath", ")", "if", "recursive", "and", "len", "(", "submod_dpaths", ")", ">", "0", ":", "recusive_results", "=", "[", "get_submodules_from_dpath", "(", "d", ",", "only_packages", ")", "for", "d", "in", "submod_dpaths", "]", "submod_fpaths", ".", "extend", "(", "ut", ".", "flatten", "(", "recusive_results", ")", ")", "return", "submod_fpaths" ]
38.027027
21.837838
def _from_matrix(cls, matrix): """Initialise from matrix representation Create a Quaternion by specifying the 3x3 rotation or 4x4 transformation matrix (as a numpy array) from which the quaternion's rotation should be created. """ try: shape = matrix.shape except AttributeError: raise TypeError("Invalid matrix type: Input must be a 3x3 or 4x4 numpy array or matrix") if shape == (3, 3): R = matrix elif shape == (4,4): R = matrix[:-1][:,:-1] # Upper left 3x3 sub-matrix else: raise ValueError("Invalid matrix shape: Input must be a 3x3 or 4x4 numpy array or matrix") # Check matrix properties if not np.allclose(np.dot(R, R.conj().transpose()), np.eye(3)): raise ValueError("Matrix must be orthogonal, i.e. its transpose should be its inverse") if not np.isclose(np.linalg.det(R), 1.0): raise ValueError("Matrix must be special orthogonal i.e. its determinant must be +1.0") def decomposition_method(matrix): """ Method supposedly able to deal with non-orthogonal matrices - NON-FUNCTIONAL! Based on this method: http://arc.aiaa.org/doi/abs/10.2514/2.4654 """ x, y, z = 0, 1, 2 # indices K = np.array([ [R[x, x]-R[y, y]-R[z, z], R[y, x]+R[x, y], R[z, x]+R[x, z], R[y, z]-R[z, y]], [R[y, x]+R[x, y], R[y, y]-R[x, x]-R[z, z], R[z, y]+R[y, z], R[z, x]-R[x, z]], [R[z, x]+R[x, z], R[z, y]+R[y, z], R[z, z]-R[x, x]-R[y, y], R[x, y]-R[y, x]], [R[y, z]-R[z, y], R[z, x]-R[x, z], R[x, y]-R[y, x], R[x, x]+R[y, y]+R[z, z]] ]) K = K / 3.0 e_vals, e_vecs = np.linalg.eig(K) print('Eigenvalues:', e_vals) print('Eigenvectors:', e_vecs) max_index = np.argmax(e_vals) principal_component = e_vecs[max_index] return principal_component def trace_method(matrix): """ This code uses a modification of the algorithm described in: https://d3cw3dd2w32x2b.cloudfront.net/wp-content/uploads/2015/01/matrix-to-quat.pdf which is itself based on the method described here: http://www.euclideanspace.com/maths/geometry/rotations/conversions/matrixToQuaternion/ Altered to work with the column vector convention instead of row vectors """ m = matrix.conj().transpose() # This method assumes row-vector and postmultiplication of that vector if m[2, 2] < 0: if m[0, 0] > m[1, 1]: t = 1 + m[0, 0] - m[1, 1] - m[2, 2] q = [m[1, 2]-m[2, 1], t, m[0, 1]+m[1, 0], m[2, 0]+m[0, 2]] else: t = 1 - m[0, 0] + m[1, 1] - m[2, 2] q = [m[2, 0]-m[0, 2], m[0, 1]+m[1, 0], t, m[1, 2]+m[2, 1]] else: if m[0, 0] < -m[1, 1]: t = 1 - m[0, 0] - m[1, 1] + m[2, 2] q = [m[0, 1]-m[1, 0], m[2, 0]+m[0, 2], m[1, 2]+m[2, 1], t] else: t = 1 + m[0, 0] + m[1, 1] + m[2, 2] q = [t, m[1, 2]-m[2, 1], m[2, 0]-m[0, 2], m[0, 1]-m[1, 0]] q = np.array(q) q *= 0.5 / sqrt(t); return q return cls(array=trace_method(R))
[ "def", "_from_matrix", "(", "cls", ",", "matrix", ")", ":", "try", ":", "shape", "=", "matrix", ".", "shape", "except", "AttributeError", ":", "raise", "TypeError", "(", "\"Invalid matrix type: Input must be a 3x3 or 4x4 numpy array or matrix\"", ")", "if", "shape", "==", "(", "3", ",", "3", ")", ":", "R", "=", "matrix", "elif", "shape", "==", "(", "4", ",", "4", ")", ":", "R", "=", "matrix", "[", ":", "-", "1", "]", "[", ":", ",", ":", "-", "1", "]", "# Upper left 3x3 sub-matrix", "else", ":", "raise", "ValueError", "(", "\"Invalid matrix shape: Input must be a 3x3 or 4x4 numpy array or matrix\"", ")", "# Check matrix properties", "if", "not", "np", ".", "allclose", "(", "np", ".", "dot", "(", "R", ",", "R", ".", "conj", "(", ")", ".", "transpose", "(", ")", ")", ",", "np", ".", "eye", "(", "3", ")", ")", ":", "raise", "ValueError", "(", "\"Matrix must be orthogonal, i.e. its transpose should be its inverse\"", ")", "if", "not", "np", ".", "isclose", "(", "np", ".", "linalg", ".", "det", "(", "R", ")", ",", "1.0", ")", ":", "raise", "ValueError", "(", "\"Matrix must be special orthogonal i.e. its determinant must be +1.0\"", ")", "def", "decomposition_method", "(", "matrix", ")", ":", "\"\"\" Method supposedly able to deal with non-orthogonal matrices - NON-FUNCTIONAL!\n Based on this method: http://arc.aiaa.org/doi/abs/10.2514/2.4654\n \"\"\"", "x", ",", "y", ",", "z", "=", "0", ",", "1", ",", "2", "# indices", "K", "=", "np", ".", "array", "(", "[", "[", "R", "[", "x", ",", "x", "]", "-", "R", "[", "y", ",", "y", "]", "-", "R", "[", "z", ",", "z", "]", ",", "R", "[", "y", ",", "x", "]", "+", "R", "[", "x", ",", "y", "]", ",", "R", "[", "z", ",", "x", "]", "+", "R", "[", "x", ",", "z", "]", ",", "R", "[", "y", ",", "z", "]", "-", "R", "[", "z", ",", "y", "]", "]", ",", "[", "R", "[", "y", ",", "x", "]", "+", "R", "[", "x", ",", "y", "]", ",", "R", "[", "y", ",", "y", "]", "-", "R", "[", "x", ",", "x", "]", "-", "R", "[", "z", ",", "z", "]", ",", "R", "[", "z", ",", "y", "]", "+", "R", "[", "y", ",", "z", "]", ",", "R", "[", "z", ",", "x", "]", "-", "R", "[", "x", ",", "z", "]", "]", ",", "[", "R", "[", "z", ",", "x", "]", "+", "R", "[", "x", ",", "z", "]", ",", "R", "[", "z", ",", "y", "]", "+", "R", "[", "y", ",", "z", "]", ",", "R", "[", "z", ",", "z", "]", "-", "R", "[", "x", ",", "x", "]", "-", "R", "[", "y", ",", "y", "]", ",", "R", "[", "x", ",", "y", "]", "-", "R", "[", "y", ",", "x", "]", "]", ",", "[", "R", "[", "y", ",", "z", "]", "-", "R", "[", "z", ",", "y", "]", ",", "R", "[", "z", ",", "x", "]", "-", "R", "[", "x", ",", "z", "]", ",", "R", "[", "x", ",", "y", "]", "-", "R", "[", "y", ",", "x", "]", ",", "R", "[", "x", ",", "x", "]", "+", "R", "[", "y", ",", "y", "]", "+", "R", "[", "z", ",", "z", "]", "]", "]", ")", "K", "=", "K", "/", "3.0", "e_vals", ",", "e_vecs", "=", "np", ".", "linalg", ".", "eig", "(", "K", ")", "print", "(", "'Eigenvalues:'", ",", "e_vals", ")", "print", "(", "'Eigenvectors:'", ",", "e_vecs", ")", "max_index", "=", "np", ".", "argmax", "(", "e_vals", ")", "principal_component", "=", "e_vecs", "[", "max_index", "]", "return", "principal_component", "def", "trace_method", "(", "matrix", ")", ":", "\"\"\"\n This code uses a modification of the algorithm described in:\n https://d3cw3dd2w32x2b.cloudfront.net/wp-content/uploads/2015/01/matrix-to-quat.pdf\n which is itself based on the method described here:\n http://www.euclideanspace.com/maths/geometry/rotations/conversions/matrixToQuaternion/\n\n Altered to work with the column vector convention instead of row vectors\n \"\"\"", "m", "=", "matrix", ".", "conj", "(", ")", ".", "transpose", "(", ")", "# This method assumes row-vector and postmultiplication of that vector", "if", "m", "[", "2", ",", "2", "]", "<", "0", ":", "if", "m", "[", "0", ",", "0", "]", ">", "m", "[", "1", ",", "1", "]", ":", "t", "=", "1", "+", "m", "[", "0", ",", "0", "]", "-", "m", "[", "1", ",", "1", "]", "-", "m", "[", "2", ",", "2", "]", "q", "=", "[", "m", "[", "1", ",", "2", "]", "-", "m", "[", "2", ",", "1", "]", ",", "t", ",", "m", "[", "0", ",", "1", "]", "+", "m", "[", "1", ",", "0", "]", ",", "m", "[", "2", ",", "0", "]", "+", "m", "[", "0", ",", "2", "]", "]", "else", ":", "t", "=", "1", "-", "m", "[", "0", ",", "0", "]", "+", "m", "[", "1", ",", "1", "]", "-", "m", "[", "2", ",", "2", "]", "q", "=", "[", "m", "[", "2", ",", "0", "]", "-", "m", "[", "0", ",", "2", "]", ",", "m", "[", "0", ",", "1", "]", "+", "m", "[", "1", ",", "0", "]", ",", "t", ",", "m", "[", "1", ",", "2", "]", "+", "m", "[", "2", ",", "1", "]", "]", "else", ":", "if", "m", "[", "0", ",", "0", "]", "<", "-", "m", "[", "1", ",", "1", "]", ":", "t", "=", "1", "-", "m", "[", "0", ",", "0", "]", "-", "m", "[", "1", ",", "1", "]", "+", "m", "[", "2", ",", "2", "]", "q", "=", "[", "m", "[", "0", ",", "1", "]", "-", "m", "[", "1", ",", "0", "]", ",", "m", "[", "2", ",", "0", "]", "+", "m", "[", "0", ",", "2", "]", ",", "m", "[", "1", ",", "2", "]", "+", "m", "[", "2", ",", "1", "]", ",", "t", "]", "else", ":", "t", "=", "1", "+", "m", "[", "0", ",", "0", "]", "+", "m", "[", "1", ",", "1", "]", "+", "m", "[", "2", ",", "2", "]", "q", "=", "[", "t", ",", "m", "[", "1", ",", "2", "]", "-", "m", "[", "2", ",", "1", "]", ",", "m", "[", "2", ",", "0", "]", "-", "m", "[", "0", ",", "2", "]", ",", "m", "[", "0", ",", "1", "]", "-", "m", "[", "1", ",", "0", "]", "]", "q", "=", "np", ".", "array", "(", "q", ")", "q", "*=", "0.5", "/", "sqrt", "(", "t", ")", "return", "q", "return", "cls", "(", "array", "=", "trace_method", "(", "R", ")", ")" ]
46.613333
26.413333
def is_businessperiod(cls, in_period): """ :param in_period: object to be checked :type in_period: object, str, timedelta :return: True if cast works :rtype: Boolean checks is argument con becasted to BusinessPeriod """ try: # to be removed if str(in_period).upper() == '0D': return True else: p = BusinessPeriod(str(in_period)) return not (p.days == 0 and p.months == 0 and p.years == 0 and p.businessdays == 0) except: return False
[ "def", "is_businessperiod", "(", "cls", ",", "in_period", ")", ":", "try", ":", "# to be removed", "if", "str", "(", "in_period", ")", ".", "upper", "(", ")", "==", "'0D'", ":", "return", "True", "else", ":", "p", "=", "BusinessPeriod", "(", "str", "(", "in_period", ")", ")", "return", "not", "(", "p", ".", "days", "==", "0", "and", "p", ".", "months", "==", "0", "and", "p", ".", "years", "==", "0", "and", "p", ".", "businessdays", "==", "0", ")", "except", ":", "return", "False" ]
33.823529
15.117647
def get_access_token(client_id, client_secret): ''' Name: token Parameters: client_id, client_secret Return: dictionary ''' headers = {'Content-Type': 'application/x-www-form-urlencoded'} payload = { 'client_id': client_id, 'client_secret': client_secret } request = requests.post(token_url, data=payload, headers=headers) if request.status_code == 200: token = request.json() return token return {'status': request.status_code, "message": request.text}
[ "def", "get_access_token", "(", "client_id", ",", "client_secret", ")", ":", "headers", "=", "{", "'Content-Type'", ":", "'application/x-www-form-urlencoded'", "}", "payload", "=", "{", "'client_id'", ":", "client_id", ",", "'client_secret'", ":", "client_secret", "}", "request", "=", "requests", ".", "post", "(", "token_url", ",", "data", "=", "payload", ",", "headers", "=", "headers", ")", "if", "request", ".", "status_code", "==", "200", ":", "token", "=", "request", ".", "json", "(", ")", "return", "token", "return", "{", "'status'", ":", "request", ".", "status_code", ",", "\"message\"", ":", "request", ".", "text", "}" ]
24.052632
24.473684
def detachRequestMsOriginating(): """DETACH REQUEST Section 9.4.5.2""" a = TpPd(pd=0x3) b = MessageType(mesType=0x5) # 00000101 c = DetachTypeAndSpareHalfOctets() packet = a / b / c return packet
[ "def", "detachRequestMsOriginating", "(", ")", ":", "a", "=", "TpPd", "(", "pd", "=", "0x3", ")", "b", "=", "MessageType", "(", "mesType", "=", "0x5", ")", "# 00000101", "c", "=", "DetachTypeAndSpareHalfOctets", "(", ")", "packet", "=", "a", "/", "b", "/", "c", "return", "packet" ]
30.571429
10.571429
def selectlanguage(self, event): """Store client's selection of a new translation""" self.log('Language selection event:', event.client, pretty=True) if event.data not in all_languages(): self.log('Unavailable language selected:', event.data, lvl=warn) language = None else: language = event.data if language is None: language = 'en' event.client.language = language if event.client.config is not None: event.client.config.language = language event.client.config.save()
[ "def", "selectlanguage", "(", "self", ",", "event", ")", ":", "self", ".", "log", "(", "'Language selection event:'", ",", "event", ".", "client", ",", "pretty", "=", "True", ")", "if", "event", ".", "data", "not", "in", "all_languages", "(", ")", ":", "self", ".", "log", "(", "'Unavailable language selected:'", ",", "event", ".", "data", ",", "lvl", "=", "warn", ")", "language", "=", "None", "else", ":", "language", "=", "event", ".", "data", "if", "language", "is", "None", ":", "language", "=", "'en'", "event", ".", "client", ".", "language", "=", "language", "if", "event", ".", "client", ".", "config", "is", "not", "None", ":", "event", ".", "client", ".", "config", ".", "language", "=", "language", "event", ".", "client", ".", "config", ".", "save", "(", ")" ]
30.736842
19.421053
def zyz_circuit(t0: float, t1: float, t2: float, q0: Qubit) -> Circuit: """Circuit equivalent of 1-qubit ZYZ gate""" circ = Circuit() circ += TZ(t0, q0) circ += TY(t1, q0) circ += TZ(t2, q0) return circ
[ "def", "zyz_circuit", "(", "t0", ":", "float", ",", "t1", ":", "float", ",", "t2", ":", "float", ",", "q0", ":", "Qubit", ")", "->", "Circuit", ":", "circ", "=", "Circuit", "(", ")", "circ", "+=", "TZ", "(", "t0", ",", "q0", ")", "circ", "+=", "TY", "(", "t1", ",", "q0", ")", "circ", "+=", "TZ", "(", "t2", ",", "q0", ")", "return", "circ" ]
31.428571
18.571429
def current_state(self): """ Returns a ``field -> value`` dict of the current state of the instance. """ field_names = set() [field_names.add(f.name) for f in self._meta.local_fields] [field_names.add(f.attname) for f in self._meta.local_fields] return dict([(field_name, getattr(self, field_name)) for field_name in field_names])
[ "def", "current_state", "(", "self", ")", ":", "field_names", "=", "set", "(", ")", "[", "field_names", ".", "add", "(", "f", ".", "name", ")", "for", "f", "in", "self", ".", "_meta", ".", "local_fields", "]", "[", "field_names", ".", "add", "(", "f", ".", "attname", ")", "for", "f", "in", "self", ".", "_meta", ".", "local_fields", "]", "return", "dict", "(", "[", "(", "field_name", ",", "getattr", "(", "self", ",", "field_name", ")", ")", "for", "field_name", "in", "field_names", "]", ")" ]
47.375
21.875
def is_valid_mpls_labels(labels): """Returns True if the given value is a list of valid MPLS labels. """ if not isinstance(labels, (list, tuple)): return False for label in labels: if not is_valid_mpls_label(label): return False return True
[ "def", "is_valid_mpls_labels", "(", "labels", ")", ":", "if", "not", "isinstance", "(", "labels", ",", "(", "list", ",", "tuple", ")", ")", ":", "return", "False", "for", "label", "in", "labels", ":", "if", "not", "is_valid_mpls_label", "(", "label", ")", ":", "return", "False", "return", "True" ]
25.454545
15.545455
def output_dir(self, *args) -> str: """ Directory where to store output """ return os.path.join(self.project_dir, 'output', *args)
[ "def", "output_dir", "(", "self", ",", "*", "args", ")", "->", "str", ":", "return", "os", ".", "path", ".", "join", "(", "self", ".", "project_dir", ",", "'output'", ",", "*", "args", ")" ]
48
9
def serialize_md_inst(self, md_inst, md_class): """Serialize object ``md_inst`` by transforming it into an ``lxml.etree._ElementTree``. If it already is such, return it. If not, make sure it is the correct type and return the output of calling ``seriaize()`` on it. """ valid_insts = tuple( chain((etree._ElementTree, etree._Element), six.string_types) ) if isinstance(md_inst, valid_insts): return md_inst if not isinstance(md_inst, md_class): raise TypeError( "Instance {!r} must be instance of {!r}".format(md_inst, md_class) ) return md_inst.serialize()
[ "def", "serialize_md_inst", "(", "self", ",", "md_inst", ",", "md_class", ")", ":", "valid_insts", "=", "tuple", "(", "chain", "(", "(", "etree", ".", "_ElementTree", ",", "etree", ".", "_Element", ")", ",", "six", ".", "string_types", ")", ")", "if", "isinstance", "(", "md_inst", ",", "valid_insts", ")", ":", "return", "md_inst", "if", "not", "isinstance", "(", "md_inst", ",", "md_class", ")", ":", "raise", "TypeError", "(", "\"Instance {!r} must be instance of {!r}\"", ".", "format", "(", "md_inst", ",", "md_class", ")", ")", "return", "md_inst", ".", "serialize", "(", ")" ]
42.875
17.1875
def facettupletrees(table, key, start='start', stop='stop', value=None): """ Construct faceted interval trees for the given table, where each node in the tree is a row of the table. """ import intervaltree it = iter(table) hdr = next(it) flds = list(map(text_type, hdr)) assert start in flds, 'start field not recognised' assert stop in flds, 'stop field not recognised' getstart = itemgetter(flds.index(start)) getstop = itemgetter(flds.index(stop)) if value is None: getvalue = tuple else: valueindices = asindices(hdr, value) assert len(valueindices) > 0, 'invalid value field specification' getvalue = itemgetter(*valueindices) keyindices = asindices(hdr, key) assert len(keyindices) > 0, 'invalid key' getkey = itemgetter(*keyindices) trees = dict() for row in it: k = getkey(row) if k not in trees: trees[k] = intervaltree.IntervalTree() trees[k].addi(getstart(row), getstop(row), getvalue(row)) return trees
[ "def", "facettupletrees", "(", "table", ",", "key", ",", "start", "=", "'start'", ",", "stop", "=", "'stop'", ",", "value", "=", "None", ")", ":", "import", "intervaltree", "it", "=", "iter", "(", "table", ")", "hdr", "=", "next", "(", "it", ")", "flds", "=", "list", "(", "map", "(", "text_type", ",", "hdr", ")", ")", "assert", "start", "in", "flds", ",", "'start field not recognised'", "assert", "stop", "in", "flds", ",", "'stop field not recognised'", "getstart", "=", "itemgetter", "(", "flds", ".", "index", "(", "start", ")", ")", "getstop", "=", "itemgetter", "(", "flds", ".", "index", "(", "stop", ")", ")", "if", "value", "is", "None", ":", "getvalue", "=", "tuple", "else", ":", "valueindices", "=", "asindices", "(", "hdr", ",", "value", ")", "assert", "len", "(", "valueindices", ")", ">", "0", ",", "'invalid value field specification'", "getvalue", "=", "itemgetter", "(", "*", "valueindices", ")", "keyindices", "=", "asindices", "(", "hdr", ",", "key", ")", "assert", "len", "(", "keyindices", ")", ">", "0", ",", "'invalid key'", "getkey", "=", "itemgetter", "(", "*", "keyindices", ")", "trees", "=", "dict", "(", ")", "for", "row", "in", "it", ":", "k", "=", "getkey", "(", "row", ")", "if", "k", "not", "in", "trees", ":", "trees", "[", "k", "]", "=", "intervaltree", ".", "IntervalTree", "(", ")", "trees", "[", "k", "]", ".", "addi", "(", "getstart", "(", "row", ")", ",", "getstop", "(", "row", ")", ",", "getvalue", "(", "row", ")", ")", "return", "trees" ]
32.3125
16.9375
def get_authorize_url(self, callback_url): """ Returns the Authorize URL as returned by QB, and specified by OAuth 1.0a. :return URI: """ self.authorize_url = self.authorize_url[:self.authorize_url.find('?')] \ if '?' in self.authorize_url else self.authorize_url qb_service = OAuth1Service( consumer_key=self.consumer_key, consumer_secret=self.consumer_secret, request_token_url=self.request_token_url, access_token_url=self.access_token_url, authorize_url=self.authorize_url, ) response = qb_service.get_raw_request_token( params={'oauth_callback': callback_url}) oauth_resp = dict(parse_qsl(response.text)) self.request_token = oauth_resp['oauth_token'] self.request_token_secret = oauth_resp['oauth_token_secret'] return qb_service.get_authorize_url(self.request_token)
[ "def", "get_authorize_url", "(", "self", ",", "callback_url", ")", ":", "self", ".", "authorize_url", "=", "self", ".", "authorize_url", "[", ":", "self", ".", "authorize_url", ".", "find", "(", "'?'", ")", "]", "if", "'?'", "in", "self", ".", "authorize_url", "else", "self", ".", "authorize_url", "qb_service", "=", "OAuth1Service", "(", "consumer_key", "=", "self", ".", "consumer_key", ",", "consumer_secret", "=", "self", ".", "consumer_secret", ",", "request_token_url", "=", "self", ".", "request_token_url", ",", "access_token_url", "=", "self", ".", "access_token_url", ",", "authorize_url", "=", "self", ".", "authorize_url", ",", ")", "response", "=", "qb_service", ".", "get_raw_request_token", "(", "params", "=", "{", "'oauth_callback'", ":", "callback_url", "}", ")", "oauth_resp", "=", "dict", "(", "parse_qsl", "(", "response", ".", "text", ")", ")", "self", ".", "request_token", "=", "oauth_resp", "[", "'oauth_token'", "]", "self", ".", "request_token_secret", "=", "oauth_resp", "[", "'oauth_token_secret'", "]", "return", "qb_service", ".", "get_authorize_url", "(", "self", ".", "request_token", ")" ]
37.36
20.16
def split(zpool, newzpool, **kwargs): ''' .. versionadded:: 2018.3.0 Splits devices off pool creating newpool. .. note:: All vdevs in pool must be mirrors. At the time of the split, ``newzpool`` will be a replica of ``zpool``. After splitting, do not forget to import the new pool! zpool : string Name of storage pool newzpool : string Name of new storage pool mountpoint : string Sets the mount point for the root dataset altroot : string Sets altroot for newzpool properties : dict Additional pool properties for newzpool CLI Examples: .. code-block:: bash salt '*' zpool.split datamirror databackup salt '*' zpool.split datamirror databackup altroot=/backup .. note:: Zpool properties can be specified at the time of creation of the pool by passing an additional argument called "properties" and specifying the properties with their respective values in the form of a python dictionary: .. code-block:: text properties="{'property1': 'value1', 'property2': 'value2'}" Example: .. code-block:: bash salt '*' zpool.split datamirror databackup properties="{'readonly': 'on'}" CLI Example: .. code-block:: bash salt '*' zpool.split datamirror databackup salt '*' zpool.split datamirror databackup altroot=/backup ''' ## Configure pool # NOTE: initialize the defaults opts = {} # NOTE: push pool and filesystem properties pool_properties = kwargs.get('properties', {}) # NOTE: set extra config based on kwargs if kwargs.get('altroot', False): opts['-R'] = kwargs.get('altroot') ## Split storage pool res = __salt__['cmd.run_all']( __utils__['zfs.zpool_command']( command='split', opts=opts, pool_properties=pool_properties, target=[zpool, newzpool], ), python_shell=False, ) return __utils__['zfs.parse_command_result'](res, 'split')
[ "def", "split", "(", "zpool", ",", "newzpool", ",", "*", "*", "kwargs", ")", ":", "## Configure pool", "# NOTE: initialize the defaults", "opts", "=", "{", "}", "# NOTE: push pool and filesystem properties", "pool_properties", "=", "kwargs", ".", "get", "(", "'properties'", ",", "{", "}", ")", "# NOTE: set extra config based on kwargs", "if", "kwargs", ".", "get", "(", "'altroot'", ",", "False", ")", ":", "opts", "[", "'-R'", "]", "=", "kwargs", ".", "get", "(", "'altroot'", ")", "## Split storage pool", "res", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "__utils__", "[", "'zfs.zpool_command'", "]", "(", "command", "=", "'split'", ",", "opts", "=", "opts", ",", "pool_properties", "=", "pool_properties", ",", "target", "=", "[", "zpool", ",", "newzpool", "]", ",", ")", ",", "python_shell", "=", "False", ",", ")", "return", "__utils__", "[", "'zfs.parse_command_result'", "]", "(", "res", ",", "'split'", ")" ]
24.542169
24.855422
def update_qos_policy(self, qos_policy, body=None): """Updates a qos policy.""" return self.put(self.qos_policy_path % qos_policy, body=body)
[ "def", "update_qos_policy", "(", "self", ",", "qos_policy", ",", "body", "=", "None", ")", ":", "return", "self", ".", "put", "(", "self", ".", "qos_policy_path", "%", "qos_policy", ",", "body", "=", "body", ")" ]
44.5
8.75
def AddPorts(self,ports): """Create one or more port access policies. Include a list of dicts with protocol, port, and port_to (optional - for range) keys. >>> clc.v2.Server("WA1BTDIX01").PublicIPs().public_ips[0] .AddPorts([{'protocol': 'TCP', 'port': '80' }, {'protocol': 'UDP', 'port': '10000', 'port_to': '15000'}]).WaitUntilComplete() 0 """ for port in ports: if 'port_to' in port: self.ports.append(Port(self,port['protocol'],port['port'],port['port_to'])) else: self.ports.append(Port(self,port['protocol'],port['port'])) return(self.Update())
[ "def", "AddPorts", "(", "self", ",", "ports", ")", ":", "for", "port", "in", "ports", ":", "if", "'port_to'", "in", "port", ":", "self", ".", "ports", ".", "append", "(", "Port", "(", "self", ",", "port", "[", "'protocol'", "]", ",", "port", "[", "'port'", "]", ",", "port", "[", "'port_to'", "]", ")", ")", "else", ":", "self", ".", "ports", ".", "append", "(", "Port", "(", "self", ",", "port", "[", "'protocol'", "]", ",", "port", "[", "'port'", "]", ")", ")", "return", "(", "self", ".", "Update", "(", ")", ")" ]
34.117647
29.882353
def _get_service_names(self): """ Get a list of service names from Sentinel. Tries Sentinel hosts until one succeeds; if none succeed, raises a ConnectionError. :return: the list of service names from Sentinel. """ master_info = None connection_errors = [] for sentinel in self._sentinel.sentinels: # Unfortunately, redis.sentinel.Sentinel does not support sentinel_masters, so we have to step # through all of its connections manually try: master_info = sentinel.sentinel_masters() break except (redis.ConnectionError, redis.TimeoutError) as e: connection_errors.append('Failed to connect to {} due to error: "{}".'.format(sentinel, e)) continue if master_info is None: raise redis.ConnectionError( 'Could not get master info from Sentinel\n{}:'.format('\n'.join(connection_errors)) ) return list(master_info.keys())
[ "def", "_get_service_names", "(", "self", ")", ":", "master_info", "=", "None", "connection_errors", "=", "[", "]", "for", "sentinel", "in", "self", ".", "_sentinel", ".", "sentinels", ":", "# Unfortunately, redis.sentinel.Sentinel does not support sentinel_masters, so we have to step", "# through all of its connections manually", "try", ":", "master_info", "=", "sentinel", ".", "sentinel_masters", "(", ")", "break", "except", "(", "redis", ".", "ConnectionError", ",", "redis", ".", "TimeoutError", ")", "as", "e", ":", "connection_errors", ".", "append", "(", "'Failed to connect to {} due to error: \"{}\".'", ".", "format", "(", "sentinel", ",", "e", ")", ")", "continue", "if", "master_info", "is", "None", ":", "raise", "redis", ".", "ConnectionError", "(", "'Could not get master info from Sentinel\\n{}:'", ".", "format", "(", "'\\n'", ".", "join", "(", "connection_errors", ")", ")", ")", "return", "list", "(", "master_info", ".", "keys", "(", ")", ")" ]
44.695652
22.695652
def contains_only(self, *items): """Asserts that val contains only the given item or items.""" if len(items) == 0: raise ValueError('one or more args must be given') else: extra = [] for i in self.val: if i not in items: extra.append(i) if extra: self._err('Expected <%s> to contain only %s, but did contain %s.' % (self.val, self._fmt_items(items), self._fmt_items(extra))) missing = [] for i in items: if i not in self.val: missing.append(i) if missing: self._err('Expected <%s> to contain only %s, but did not contain %s.' % (self.val, self._fmt_items(items), self._fmt_items(missing))) return self
[ "def", "contains_only", "(", "self", ",", "*", "items", ")", ":", "if", "len", "(", "items", ")", "==", "0", ":", "raise", "ValueError", "(", "'one or more args must be given'", ")", "else", ":", "extra", "=", "[", "]", "for", "i", "in", "self", ".", "val", ":", "if", "i", "not", "in", "items", ":", "extra", ".", "append", "(", "i", ")", "if", "extra", ":", "self", ".", "_err", "(", "'Expected <%s> to contain only %s, but did contain %s.'", "%", "(", "self", ".", "val", ",", "self", ".", "_fmt_items", "(", "items", ")", ",", "self", ".", "_fmt_items", "(", "extra", ")", ")", ")", "missing", "=", "[", "]", "for", "i", "in", "items", ":", "if", "i", "not", "in", "self", ".", "val", ":", "missing", ".", "append", "(", "i", ")", "if", "missing", ":", "self", ".", "_err", "(", "'Expected <%s> to contain only %s, but did not contain %s.'", "%", "(", "self", ".", "val", ",", "self", ".", "_fmt_items", "(", "items", ")", ",", "self", ".", "_fmt_items", "(", "missing", ")", ")", ")", "return", "self" ]
42.315789
23.842105
def export_kappa_im(model, fname=None): """Return a networkx graph representing the model's Kappa influence map. Parameters ---------- model : pysb.core.Model A PySB model to be exported into a Kappa IM. fname : Optional[str] A file name, typically with .png or .pdf extension in which the IM is rendered using pygraphviz. Returns ------- networkx.MultiDiGraph A graph object representing the influence map. """ from .kappa_util import im_json_to_graph kappa = _prepare_kappa(model) imap = kappa.analyses_influence_map() im = im_json_to_graph(imap) for param in model.parameters: try: im.remove_node(param.name) except: pass if fname: agraph = networkx.nx_agraph.to_agraph(im) agraph.draw(fname, prog='dot') return im
[ "def", "export_kappa_im", "(", "model", ",", "fname", "=", "None", ")", ":", "from", ".", "kappa_util", "import", "im_json_to_graph", "kappa", "=", "_prepare_kappa", "(", "model", ")", "imap", "=", "kappa", ".", "analyses_influence_map", "(", ")", "im", "=", "im_json_to_graph", "(", "imap", ")", "for", "param", "in", "model", ".", "parameters", ":", "try", ":", "im", ".", "remove_node", "(", "param", ".", "name", ")", "except", ":", "pass", "if", "fname", ":", "agraph", "=", "networkx", ".", "nx_agraph", ".", "to_agraph", "(", "im", ")", "agraph", ".", "draw", "(", "fname", ",", "prog", "=", "'dot'", ")", "return", "im" ]
29.068966
15.931034
def count_mismatches_after_variant(reference_suffix, cdna_suffix): """ Computes the number of mismatching nucleotides between two cDNA sequences after a variant locus. Parameters ---------- reference_suffix : str cDNA sequence of a reference transcript after a variant locus cdna_suffix : str cDNA sequence detected from RNAseq after a variant locus """ len_diff = len(cdna_suffix) - len(reference_suffix) # if the reference is shorter than the read, the read runs into the intron - these count as # mismatches return sum(xi != yi for (xi, yi) in zip(reference_suffix, cdna_suffix)) + max(0, len_diff)
[ "def", "count_mismatches_after_variant", "(", "reference_suffix", ",", "cdna_suffix", ")", ":", "len_diff", "=", "len", "(", "cdna_suffix", ")", "-", "len", "(", "reference_suffix", ")", "# if the reference is shorter than the read, the read runs into the intron - these count as", "# mismatches", "return", "sum", "(", "xi", "!=", "yi", "for", "(", "xi", ",", "yi", ")", "in", "zip", "(", "reference_suffix", ",", "cdna_suffix", ")", ")", "+", "max", "(", "0", ",", "len_diff", ")" ]
36
29.555556
def extract_and_render_all_symbol_masks(self, raw_data_directory: str, destination_directory: str): """ Extracts all symbols from the raw XML documents and generates individual symbols from the masks :param raw_data_directory: The directory, that contains the xml-files and matching images :param destination_directory: The directory, in which the symbols should be generated into. One sub-folder per symbol category will be generated automatically """ print("Extracting Symbols from Muscima++ Dataset...") xml_files = self.get_all_xml_file_paths(raw_data_directory) crop_objects = self.load_crop_objects_from_xml_files(xml_files) self.render_masks_of_crop_objects_into_image(crop_objects, destination_directory)
[ "def", "extract_and_render_all_symbol_masks", "(", "self", ",", "raw_data_directory", ":", "str", ",", "destination_directory", ":", "str", ")", ":", "print", "(", "\"Extracting Symbols from Muscima++ Dataset...\"", ")", "xml_files", "=", "self", ".", "get_all_xml_file_paths", "(", "raw_data_directory", ")", "crop_objects", "=", "self", ".", "load_crop_objects_from_xml_files", "(", "xml_files", ")", "self", ".", "render_masks_of_crop_objects_into_image", "(", "crop_objects", ",", "destination_directory", ")" ]
62.461538
39.230769
def validate_schema(schema: GraphQLSchema) -> List[GraphQLError]: """Validate a GraphQL schema. Implements the "Type Validation" sub-sections of the specification's "Type System" section. Validation runs synchronously, returning a list of encountered errors, or an empty list if no errors were encountered and the Schema is valid. """ # First check to ensure the provided value is in fact a GraphQLSchema. assert_schema(schema) # If this Schema has already been validated, return the previous results. # noinspection PyProtectedMember errors = schema._validation_errors if errors is None: # Validate the schema, producing a list of errors. context = SchemaValidationContext(schema) context.validate_root_types() context.validate_directives() context.validate_types() # Persist the results of validation before returning to ensure validation does # not run multiple times for this schema. errors = context.errors schema._validation_errors = errors return errors
[ "def", "validate_schema", "(", "schema", ":", "GraphQLSchema", ")", "->", "List", "[", "GraphQLError", "]", ":", "# First check to ensure the provided value is in fact a GraphQLSchema.", "assert_schema", "(", "schema", ")", "# If this Schema has already been validated, return the previous results.", "# noinspection PyProtectedMember", "errors", "=", "schema", ".", "_validation_errors", "if", "errors", "is", "None", ":", "# Validate the schema, producing a list of errors.", "context", "=", "SchemaValidationContext", "(", "schema", ")", "context", ".", "validate_root_types", "(", ")", "context", ".", "validate_directives", "(", ")", "context", ".", "validate_types", "(", ")", "# Persist the results of validation before returning to ensure validation does", "# not run multiple times for this schema.", "errors", "=", "context", ".", "errors", "schema", ".", "_validation_errors", "=", "errors", "return", "errors" ]
36.62069
22.344828
def use_edns(self, edns=0, ednsflags=0, payload=1280, request_payload=None, options=None): """Configure EDNS behavior. @param edns: The EDNS level to use. Specifying None, False, or -1 means 'do not use EDNS', and in this case the other parameters are ignored. Specifying True is equivalent to specifying 0, i.e. 'use EDNS0'. @type edns: int or bool or None @param ednsflags: EDNS flag values. @type ednsflags: int @param payload: The EDNS sender's payload field, which is the maximum size of UDP datagram the sender can handle. @type payload: int @param request_payload: The EDNS payload size to use when sending this message. If not specified, defaults to the value of payload. @type request_payload: int or None @param options: The EDNS options @type options: None or list of dns.edns.Option objects @see: RFC 2671 """ if edns is None or edns is False: edns = -1 if edns is True: edns = 0 if request_payload is None: request_payload = payload if edns < 0: ednsflags = 0 payload = 0 request_payload = 0 options = [] else: # make sure the EDNS version in ednsflags agrees with edns ednsflags &= 0xFF00FFFFL ednsflags |= (edns << 16) if options is None: options = [] self.edns = edns self.ednsflags = ednsflags self.payload = payload self.options = options self.request_payload = request_payload
[ "def", "use_edns", "(", "self", ",", "edns", "=", "0", ",", "ednsflags", "=", "0", ",", "payload", "=", "1280", ",", "request_payload", "=", "None", ",", "options", "=", "None", ")", ":", "if", "edns", "is", "None", "or", "edns", "is", "False", ":", "edns", "=", "-", "1", "if", "edns", "is", "True", ":", "edns", "=", "0", "if", "request_payload", "is", "None", ":", "request_payload", "=", "payload", "if", "edns", "<", "0", ":", "ednsflags", "=", "0", "payload", "=", "0", "request_payload", "=", "0", "options", "=", "[", "]", "else", ":", "# make sure the EDNS version in ednsflags agrees with edns", "ednsflags", "&=", "0xFF00FFFFL", "ednsflags", "|=", "(", "edns", "<<", "16", ")", "if", "options", "is", "None", ":", "options", "=", "[", "]", "self", ".", "edns", "=", "edns", "self", ".", "ednsflags", "=", "ednsflags", "self", ".", "payload", "=", "payload", "self", ".", "options", "=", "options", "self", ".", "request_payload", "=", "request_payload" ]
39.756098
15.560976
def signing_text(self): """Return the text to be signed when signing the query.""" result = "%s\n%s\n%s\n%s" % (self.endpoint.method, self.endpoint.get_canonical_host(), self.endpoint.path, self.get_canonical_query_params()) return result
[ "def", "signing_text", "(", "self", ")", ":", "result", "=", "\"%s\\n%s\\n%s\\n%s\"", "%", "(", "self", ".", "endpoint", ".", "method", ",", "self", ".", "endpoint", ".", "get_canonical_host", "(", ")", ",", "self", ".", "endpoint", ".", "path", ",", "self", ".", "get_canonical_query_params", "(", ")", ")", "return", "result" ]
52.428571
19
def unshare(self, con): """Decrease the share of a connection in the shared cache.""" self._lock.acquire() try: con.unshare() shared = con.shared if not shared: # connection is idle, try: # so try to remove it self._shared_cache.remove(con) # from shared cache except ValueError: pass # pool has already been closed finally: self._lock.release() if not shared: # connection has become idle, self.cache(con.con)
[ "def", "unshare", "(", "self", ",", "con", ")", ":", "self", ".", "_lock", ".", "acquire", "(", ")", "try", ":", "con", ".", "unshare", "(", ")", "shared", "=", "con", ".", "shared", "if", "not", "shared", ":", "# connection is idle,", "try", ":", "# so try to remove it", "self", ".", "_shared_cache", ".", "remove", "(", "con", ")", "# from shared cache", "except", "ValueError", ":", "pass", "# pool has already been closed", "finally", ":", "self", ".", "_lock", ".", "release", "(", ")", "if", "not", "shared", ":", "# connection has become idle,", "self", ".", "cache", "(", "con", ".", "con", ")" ]
38.2
13.333333
def main(self, args=None, prog_name=None, complete_var=None, standalone_mode=True, **extra): """This is the way to invoke a script with all the bells and whistles as a command line application. This will always terminate the application after a call. If this is not wanted, ``SystemExit`` needs to be caught. This method is also available by directly calling the instance of a :class:`Command`. .. versionadded:: 3.0 Added the `standalone_mode` flag to control the standalone mode. :param args: the arguments that should be used for parsing. If not provided, ``sys.argv[1:]`` is used. :param prog_name: the program name that should be used. By default the program name is constructed by taking the file name from ``sys.argv[0]``. :param complete_var: the environment variable that controls the bash completion support. The default is ``"_<prog_name>_COMPLETE"`` with prog name in uppercase. :param standalone_mode: the default behavior is to invoke the script in standalone mode. Click will then handle exceptions and convert them into error messages and the function will never return but shut down the interpreter. If this is set to `False` they will be propagated to the caller and the return value of this function is the return value of :meth:`invoke`. :param extra: extra keyword arguments are forwarded to the context constructor. See :class:`Context` for more information. """ # If we are in Python 3, we will verify that the environment is # sane at this point of reject further execution to avoid a # broken script. if not PY2: try: import locale fs_enc = codecs.lookup(locale.getpreferredencoding()).name except Exception: fs_enc = 'ascii' if fs_enc == 'ascii': raise RuntimeError('Click will abort further execution ' 'because Python 3 was configured to use ' 'ASCII as encoding for the environment. ' 'Either switch to Python 2 or consult ' 'http://click.pocoo.org/python3/ ' 'for mitigation steps.') if args is None: args = sys.argv[1:] else: args = list(args) if prog_name is None: prog_name = make_str(os.path.basename( sys.argv and sys.argv[0] or __file__)) # Hook for the Bash completion. This only activates if the Bash # completion is actually enabled, otherwise this is quite a fast # noop. _bashcomplete(self, prog_name, complete_var) try: try: with self.make_context(prog_name, args, **extra) as ctx: rv = self.invoke(ctx) if not standalone_mode: return rv ctx.exit() except (EOFError, KeyboardInterrupt): echo(file=sys.stderr) raise Abort() except ClickException as e: if not standalone_mode: raise e.show() sys.exit(e.exit_code) except Abort: if not standalone_mode: raise echo('Aborted!', file=sys.stderr) sys.exit(1)
[ "def", "main", "(", "self", ",", "args", "=", "None", ",", "prog_name", "=", "None", ",", "complete_var", "=", "None", ",", "standalone_mode", "=", "True", ",", "*", "*", "extra", ")", ":", "# If we are in Python 3, we will verify that the environment is", "# sane at this point of reject further execution to avoid a", "# broken script.", "if", "not", "PY2", ":", "try", ":", "import", "locale", "fs_enc", "=", "codecs", ".", "lookup", "(", "locale", ".", "getpreferredencoding", "(", ")", ")", ".", "name", "except", "Exception", ":", "fs_enc", "=", "'ascii'", "if", "fs_enc", "==", "'ascii'", ":", "raise", "RuntimeError", "(", "'Click will abort further execution '", "'because Python 3 was configured to use '", "'ASCII as encoding for the environment. '", "'Either switch to Python 2 or consult '", "'http://click.pocoo.org/python3/ '", "'for mitigation steps.'", ")", "if", "args", "is", "None", ":", "args", "=", "sys", ".", "argv", "[", "1", ":", "]", "else", ":", "args", "=", "list", "(", "args", ")", "if", "prog_name", "is", "None", ":", "prog_name", "=", "make_str", "(", "os", ".", "path", ".", "basename", "(", "sys", ".", "argv", "and", "sys", ".", "argv", "[", "0", "]", "or", "__file__", ")", ")", "# Hook for the Bash completion. This only activates if the Bash", "# completion is actually enabled, otherwise this is quite a fast", "# noop.", "_bashcomplete", "(", "self", ",", "prog_name", ",", "complete_var", ")", "try", ":", "try", ":", "with", "self", ".", "make_context", "(", "prog_name", ",", "args", ",", "*", "*", "extra", ")", "as", "ctx", ":", "rv", "=", "self", ".", "invoke", "(", "ctx", ")", "if", "not", "standalone_mode", ":", "return", "rv", "ctx", ".", "exit", "(", ")", "except", "(", "EOFError", ",", "KeyboardInterrupt", ")", ":", "echo", "(", "file", "=", "sys", ".", "stderr", ")", "raise", "Abort", "(", ")", "except", "ClickException", "as", "e", ":", "if", "not", "standalone_mode", ":", "raise", "e", ".", "show", "(", ")", "sys", ".", "exit", "(", "e", ".", "exit_code", ")", "except", "Abort", ":", "if", "not", "standalone_mode", ":", "raise", "echo", "(", "'Aborted!'", ",", "file", "=", "sys", ".", "stderr", ")", "sys", ".", "exit", "(", "1", ")" ]
45.952381
21.535714
def configure(self, inputs, outputs): """Configure activity input and output. You need to provide a list of input and output :class:`Property`. Does not work with lists of propery id's. :param inputs: iterable of input property models :type inputs: list(:class:`Property`) :param outputs: iterable of output property models :type outputs: list(:class:`Property`) :raises APIError: when unable to configure the activity """ url = self._client._build_url('activity', activity_id=self.id) r = self._client._request('PUT', url, params={'select_action': 'update_associations'}, json={ 'inputs': [p.id for p in inputs], 'outputs': [p.id for p in outputs] }) if r.status_code != requests.codes.ok: # pragma: no cover raise APIError("Could not configure activity")
[ "def", "configure", "(", "self", ",", "inputs", ",", "outputs", ")", ":", "url", "=", "self", ".", "_client", ".", "_build_url", "(", "'activity'", ",", "activity_id", "=", "self", ".", "id", ")", "r", "=", "self", ".", "_client", ".", "_request", "(", "'PUT'", ",", "url", ",", "params", "=", "{", "'select_action'", ":", "'update_associations'", "}", ",", "json", "=", "{", "'inputs'", ":", "[", "p", ".", "id", "for", "p", "in", "inputs", "]", ",", "'outputs'", ":", "[", "p", ".", "id", "for", "p", "in", "outputs", "]", "}", ")", "if", "r", ".", "status_code", "!=", "requests", ".", "codes", ".", "ok", ":", "# pragma: no cover", "raise", "APIError", "(", "\"Could not configure activity\"", ")" ]
43.7
24.1
def do_read(self, args): """read <addr> <objid> <prop> [ <indx> ]""" args = args.split() if _debug: ReadPropertyConsoleCmd._debug("do_read %r", args) try: addr, obj_id, prop_id = args[:3] obj_id = ObjectIdentifier(obj_id).value datatype = get_datatype(obj_id[0], prop_id) if not datatype: raise ValueError("invalid property for object type") # build a request request = ReadPropertyRequest( objectIdentifier=obj_id, propertyIdentifier=prop_id, ) request.pduDestination = Address(addr) if len(args) == 4: request.propertyArrayIndex = int(args[3]) if _debug: ReadPropertyConsoleCmd._debug(" - request: %r", request) # make an IOCB iocb = IOCB(request) if _debug: ReadPropertyConsoleCmd._debug(" - iocb: %r", iocb) # give it to the application deferred(this_application.request_io, iocb) # wait for it to complete iocb.wait() # do something for success if iocb.ioResponse: apdu = iocb.ioResponse # should be an ack if not isinstance(apdu, ReadPropertyACK): if _debug: ReadPropertyConsoleCmd._debug(" - not an ack") return # find the datatype datatype = get_datatype(apdu.objectIdentifier[0], apdu.propertyIdentifier) if _debug: ReadPropertyConsoleCmd._debug(" - datatype: %r", datatype) if not datatype: raise TypeError("unknown datatype") # special case for array parts, others are managed by cast_out if issubclass(datatype, Array) and (apdu.propertyArrayIndex is not None): if apdu.propertyArrayIndex == 0: value = apdu.propertyValue.cast_out(Unsigned) else: value = apdu.propertyValue.cast_out(datatype.subtype) else: value = apdu.propertyValue.cast_out(datatype) if _debug: ReadPropertyConsoleCmd._debug(" - value: %r", value) sys.stdout.write(str(value) + '\n') if hasattr(value, 'debug_contents'): value.debug_contents(file=sys.stdout) sys.stdout.flush() # do something for error/reject/abort if iocb.ioError: sys.stdout.write(str(iocb.ioError) + '\n') except Exception, error: ReadPropertyConsoleCmd._exception("exception: %r", error)
[ "def", "do_read", "(", "self", ",", "args", ")", ":", "args", "=", "args", ".", "split", "(", ")", "if", "_debug", ":", "ReadPropertyConsoleCmd", ".", "_debug", "(", "\"do_read %r\"", ",", "args", ")", "try", ":", "addr", ",", "obj_id", ",", "prop_id", "=", "args", "[", ":", "3", "]", "obj_id", "=", "ObjectIdentifier", "(", "obj_id", ")", ".", "value", "datatype", "=", "get_datatype", "(", "obj_id", "[", "0", "]", ",", "prop_id", ")", "if", "not", "datatype", ":", "raise", "ValueError", "(", "\"invalid property for object type\"", ")", "# build a request", "request", "=", "ReadPropertyRequest", "(", "objectIdentifier", "=", "obj_id", ",", "propertyIdentifier", "=", "prop_id", ",", ")", "request", ".", "pduDestination", "=", "Address", "(", "addr", ")", "if", "len", "(", "args", ")", "==", "4", ":", "request", ".", "propertyArrayIndex", "=", "int", "(", "args", "[", "3", "]", ")", "if", "_debug", ":", "ReadPropertyConsoleCmd", ".", "_debug", "(", "\" - request: %r\"", ",", "request", ")", "# make an IOCB", "iocb", "=", "IOCB", "(", "request", ")", "if", "_debug", ":", "ReadPropertyConsoleCmd", ".", "_debug", "(", "\" - iocb: %r\"", ",", "iocb", ")", "# give it to the application", "deferred", "(", "this_application", ".", "request_io", ",", "iocb", ")", "# wait for it to complete", "iocb", ".", "wait", "(", ")", "# do something for success", "if", "iocb", ".", "ioResponse", ":", "apdu", "=", "iocb", ".", "ioResponse", "# should be an ack", "if", "not", "isinstance", "(", "apdu", ",", "ReadPropertyACK", ")", ":", "if", "_debug", ":", "ReadPropertyConsoleCmd", ".", "_debug", "(", "\" - not an ack\"", ")", "return", "# find the datatype", "datatype", "=", "get_datatype", "(", "apdu", ".", "objectIdentifier", "[", "0", "]", ",", "apdu", ".", "propertyIdentifier", ")", "if", "_debug", ":", "ReadPropertyConsoleCmd", ".", "_debug", "(", "\" - datatype: %r\"", ",", "datatype", ")", "if", "not", "datatype", ":", "raise", "TypeError", "(", "\"unknown datatype\"", ")", "# special case for array parts, others are managed by cast_out", "if", "issubclass", "(", "datatype", ",", "Array", ")", "and", "(", "apdu", ".", "propertyArrayIndex", "is", "not", "None", ")", ":", "if", "apdu", ".", "propertyArrayIndex", "==", "0", ":", "value", "=", "apdu", ".", "propertyValue", ".", "cast_out", "(", "Unsigned", ")", "else", ":", "value", "=", "apdu", ".", "propertyValue", ".", "cast_out", "(", "datatype", ".", "subtype", ")", "else", ":", "value", "=", "apdu", ".", "propertyValue", ".", "cast_out", "(", "datatype", ")", "if", "_debug", ":", "ReadPropertyConsoleCmd", ".", "_debug", "(", "\" - value: %r\"", ",", "value", ")", "sys", ".", "stdout", ".", "write", "(", "str", "(", "value", ")", "+", "'\\n'", ")", "if", "hasattr", "(", "value", ",", "'debug_contents'", ")", ":", "value", ".", "debug_contents", "(", "file", "=", "sys", ".", "stdout", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "# do something for error/reject/abort", "if", "iocb", ".", "ioError", ":", "sys", ".", "stdout", ".", "write", "(", "str", "(", "iocb", ".", "ioError", ")", "+", "'\\n'", ")", "except", "Exception", ",", "error", ":", "ReadPropertyConsoleCmd", ".", "_exception", "(", "\"exception: %r\"", ",", "error", ")" ]
38.557143
21.857143
def hide_routemap_holder_route_map_name(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") hide_routemap_holder = ET.SubElement(config, "hide-routemap-holder", xmlns="urn:brocade.com:mgmt:brocade-ip-policy") route_map = ET.SubElement(hide_routemap_holder, "route-map") action_rm_key = ET.SubElement(route_map, "action-rm") action_rm_key.text = kwargs.pop('action_rm') instance_key = ET.SubElement(route_map, "instance") instance_key.text = kwargs.pop('instance') name = ET.SubElement(route_map, "name") name.text = kwargs.pop('name') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "hide_routemap_holder_route_map_name", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "hide_routemap_holder", "=", "ET", ".", "SubElement", "(", "config", ",", "\"hide-routemap-holder\"", ",", "xmlns", "=", "\"urn:brocade.com:mgmt:brocade-ip-policy\"", ")", "route_map", "=", "ET", ".", "SubElement", "(", "hide_routemap_holder", ",", "\"route-map\"", ")", "action_rm_key", "=", "ET", ".", "SubElement", "(", "route_map", ",", "\"action-rm\"", ")", "action_rm_key", ".", "text", "=", "kwargs", ".", "pop", "(", "'action_rm'", ")", "instance_key", "=", "ET", ".", "SubElement", "(", "route_map", ",", "\"instance\"", ")", "instance_key", ".", "text", "=", "kwargs", ".", "pop", "(", "'instance'", ")", "name", "=", "ET", ".", "SubElement", "(", "route_map", ",", "\"name\"", ")", "name", ".", "text", "=", "kwargs", ".", "pop", "(", "'name'", ")", "callback", "=", "kwargs", ".", "pop", "(", "'callback'", ",", "self", ".", "_callback", ")", "return", "callback", "(", "config", ")" ]
48.066667
17.866667
def badge_svg_text(self): """The badge SVG text.""" # Identify whether template is a file or the actual template text if len(self.template.split('\n')) == 1: with open(self.template, mode='r') as file_handle: badge_text = file_handle.read() else: badge_text = self.template return badge_text.replace('{{ badge width }}', str(self.badge_width)) \ .replace('{{ font name }}', self.font_name) \ .replace('{{ font size }}', str(self.font_size)) \ .replace('{{ label }}', self.label) \ .replace('{{ value }}', self.value_text) \ .replace('{{ label anchor }}', str(self.label_anchor)) \ .replace('{{ label anchor shadow }}', str(self.label_anchor_shadow)) \ .replace('{{ value anchor }}', str(self.value_anchor)) \ .replace('{{ value anchor shadow }}', str(self.value_anchor_shadow)) \ .replace('{{ color }}', self.badge_color_code) \ .replace('{{ label text color }}', self.label_text_color) \ .replace('{{ value text color }}', self.value_text_color) \ .replace('{{ color split x }}', str(self.color_split_position)) \ .replace('{{ value width }}', str(self.badge_width - self.color_split_position))
[ "def", "badge_svg_text", "(", "self", ")", ":", "# Identify whether template is a file or the actual template text", "if", "len", "(", "self", ".", "template", ".", "split", "(", "'\\n'", ")", ")", "==", "1", ":", "with", "open", "(", "self", ".", "template", ",", "mode", "=", "'r'", ")", "as", "file_handle", ":", "badge_text", "=", "file_handle", ".", "read", "(", ")", "else", ":", "badge_text", "=", "self", ".", "template", "return", "badge_text", ".", "replace", "(", "'{{ badge width }}'", ",", "str", "(", "self", ".", "badge_width", ")", ")", ".", "replace", "(", "'{{ font name }}'", ",", "self", ".", "font_name", ")", ".", "replace", "(", "'{{ font size }}'", ",", "str", "(", "self", ".", "font_size", ")", ")", ".", "replace", "(", "'{{ label }}'", ",", "self", ".", "label", ")", ".", "replace", "(", "'{{ value }}'", ",", "self", ".", "value_text", ")", ".", "replace", "(", "'{{ label anchor }}'", ",", "str", "(", "self", ".", "label_anchor", ")", ")", ".", "replace", "(", "'{{ label anchor shadow }}'", ",", "str", "(", "self", ".", "label_anchor_shadow", ")", ")", ".", "replace", "(", "'{{ value anchor }}'", ",", "str", "(", "self", ".", "value_anchor", ")", ")", ".", "replace", "(", "'{{ value anchor shadow }}'", ",", "str", "(", "self", ".", "value_anchor_shadow", ")", ")", ".", "replace", "(", "'{{ color }}'", ",", "self", ".", "badge_color_code", ")", ".", "replace", "(", "'{{ label text color }}'", ",", "self", ".", "label_text_color", ")", ".", "replace", "(", "'{{ value text color }}'", ",", "self", ".", "value_text_color", ")", ".", "replace", "(", "'{{ color split x }}'", ",", "str", "(", "self", ".", "color_split_position", ")", ")", ".", "replace", "(", "'{{ value width }}'", ",", "str", "(", "self", ".", "badge_width", "-", "self", ".", "color_split_position", ")", ")" ]
54.583333
25.208333
def _EccZmaxRperiRap(self,*args,**kwargs): """ NAME: EccZmaxRperiRap (_EccZmaxRperiRap) PURPOSE: evaluate the eccentricity, maximum height above the plane, peri- and apocenter in the Staeckel approximation INPUT: Either: a) R,vR,vT,z,vz[,phi]: 1) floats: phase-space value for single object (phi is optional) (each can be a Quantity) 2) numpy.ndarray: [N] phase-space values for N objects (each can be a Quantity) b) Orbit instance: initial condition used if that's it, orbit(t) if there is a time given as well as the second argument OUTPUT: (e,zmax,rperi,rap) HISTORY: 2017-12-15 - Written - Bovy (UofT) """ if len(args) == 5: #R,vR.vT, z, vz R,vR,vT, z, vz= args elif len(args) == 6: #R,vR.vT, z, vz, phi R,vR,vT, z, vz, phi= args else: self._parse_eval_args(*args) R= self._eval_R vR= self._eval_vR vT= self._eval_vT z= self._eval_z vz= self._eval_vz Lz= R*vT Phi= _evaluatePotentials(self._pot,R,z) E= Phi+vR**2./2.+vT**2./2.+vz**2./2. thisERL= -numpy.exp(self._ERLInterp(Lz))+self._ERLmax thisERa= -numpy.exp(self._ERaInterp(Lz))+self._ERamax if isinstance(R,numpy.ndarray): indx= ((E-thisERa)/(thisERL-thisERa) > 1.)\ *(((E-thisERa)/(thisERL-thisERa)-1.) < 10.**-2.) E[indx]= thisERL[indx] indx= ((E-thisERa)/(thisERL-thisERa) < 0.)\ *((E-thisERa)/(thisERL-thisERa) > -10.**-2.) E[indx]= thisERa[indx] indx= (Lz < self._Lzmin) indx+= (Lz > self._Lzmax) indx+= ((E-thisERa)/(thisERL-thisERa) > 1.) indx+= ((E-thisERa)/(thisERL-thisERa) < 0.) indxc= True^indx ecc= numpy.empty(R.shape) zmax= numpy.empty(R.shape) rperi= numpy.empty(R.shape) rap= numpy.empty(R.shape) if numpy.sum(indxc) > 0: u0= numpy.exp(self._logu0Interp.ev(Lz[indxc], (_Efunc(E[indxc],thisERL[indxc])-_Efunc(thisERa[indxc],thisERL[indxc]))/(_Efunc(thisERL[indxc],thisERL[indxc])-_Efunc(thisERa[indxc],thisERL[indxc])))) sinh2u0= numpy.sinh(u0)**2. thisEr= self.Er(R[indxc],z[indxc],vR[indxc],vz[indxc], E[indxc],Lz[indxc],sinh2u0,u0) thisEz= self.Ez(R[indxc],z[indxc],vR[indxc],vz[indxc], E[indxc],Lz[indxc],sinh2u0,u0) thisv2= self.vatu0(E[indxc],Lz[indxc],u0,self._delta*numpy.sinh(u0),retv2=True) cos2psi= 2.*thisEr/thisv2/(1.+sinh2u0) #latter is cosh2u0 cos2psi[(cos2psi > 1.)*(cos2psi < 1.+10.**-5.)]= 1. indxCos2psi= (cos2psi > 1.) indxCos2psi+= (cos2psi < 0.) indxc[indxc]= True^indxCos2psi#Handle these two cases as off-grid indx= True^indxc psi= numpy.arccos(numpy.sqrt(cos2psi[True^indxCos2psi])) coords= numpy.empty((3,numpy.sum(indxc))) coords[0,:]= (Lz[indxc]-self._Lzmin)/(self._Lzmax-self._Lzmin)*(self._nLz-1.) y= (_Efunc(E[indxc],thisERL[indxc])-_Efunc(thisERa[indxc],thisERL[indxc]))/(_Efunc(thisERL[indxc],thisERL[indxc])-_Efunc(thisERa[indxc],thisERL[indxc])) coords[1,:]= y*(self._nE-1.) coords[2,:]= psi/numpy.pi*2.*(self._npsi-1.) ecc[indxc]= (numpy.exp(ndimage.interpolation.map_coordinates(self._eccFiltered, coords, order=3, prefilter=False))-10.**-10.) rperi[indxc]= (numpy.exp(ndimage.interpolation.map_coordinates(self._rperiFiltered, coords, order=3, prefilter=False))-10.**-10.)*(numpy.exp(self._rperiLzInterp(Lz[indxc]))-10.**-5.) # We do rap below with zmax #Switch to Ez-calculated psi sin2psi= 2.*thisEz[True^indxCos2psi]/thisv2[True^indxCos2psi]/(1.+sinh2u0[True^indxCos2psi]) #latter is cosh2u0 sin2psi[(sin2psi > 1.)*(sin2psi < 1.+10.**-5.)]= 1. indxSin2psi= (sin2psi > 1.) indxSin2psi+= (sin2psi < 0.) indxc[indxc]= True^indxSin2psi#Handle these two cases as off-grid indx= True^indxc psiz= numpy.arcsin(numpy.sqrt(sin2psi[True^indxSin2psi])) newcoords= numpy.empty((3,numpy.sum(indxc))) newcoords[0:2,:]= coords[0:2,True^indxSin2psi] newcoords[2,:]= psiz/numpy.pi*2.*(self._npsi-1.) zmax[indxc]= (numpy.exp(ndimage.interpolation.map_coordinates(self._zmaxFiltered, newcoords, order=3, prefilter=False))-10.**-10.)*(numpy.exp(self._zmaxLzInterp(Lz[indxc]))-10.**-5.) rap[indxc]= (numpy.exp(ndimage.interpolation.map_coordinates(self._rapFiltered, newcoords, order=3, prefilter=False))-10.**-10.)*(numpy.exp(self._rapLzInterp(Lz[indxc]))-10.**-5.) if numpy.sum(indx) > 0: eccindiv, zmaxindiv, rperiindiv, rapindiv=\ self._aA.EccZmaxRperiRap(R[indx], vR[indx], vT[indx], z[indx], vz[indx], **kwargs) ecc[indx]= eccindiv zmax[indx]= zmaxindiv rperi[indx]= rperiindiv rap[indx]= rapindiv else: ecc,zmax,rperi,rap= self.EccZmaxRperiRap(numpy.array([R]), numpy.array([vR]), numpy.array([vT]), numpy.array([z]), numpy.array([vz]), **kwargs) return (ecc[0],zmax[0],rperi[0],rap[0]) ecc[ecc < 0.]= 0. zmax[zmax < 0.]= 0. rperi[rperi < 0.]= 0. rap[rap < 0.]= 0. return (ecc,zmax,rperi,rap)
[ "def", "_EccZmaxRperiRap", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "len", "(", "args", ")", "==", "5", ":", "#R,vR.vT, z, vz", "R", ",", "vR", ",", "vT", ",", "z", ",", "vz", "=", "args", "elif", "len", "(", "args", ")", "==", "6", ":", "#R,vR.vT, z, vz, phi", "R", ",", "vR", ",", "vT", ",", "z", ",", "vz", ",", "phi", "=", "args", "else", ":", "self", ".", "_parse_eval_args", "(", "*", "args", ")", "R", "=", "self", ".", "_eval_R", "vR", "=", "self", ".", "_eval_vR", "vT", "=", "self", ".", "_eval_vT", "z", "=", "self", ".", "_eval_z", "vz", "=", "self", ".", "_eval_vz", "Lz", "=", "R", "*", "vT", "Phi", "=", "_evaluatePotentials", "(", "self", ".", "_pot", ",", "R", ",", "z", ")", "E", "=", "Phi", "+", "vR", "**", "2.", "/", "2.", "+", "vT", "**", "2.", "/", "2.", "+", "vz", "**", "2.", "/", "2.", "thisERL", "=", "-", "numpy", ".", "exp", "(", "self", ".", "_ERLInterp", "(", "Lz", ")", ")", "+", "self", ".", "_ERLmax", "thisERa", "=", "-", "numpy", ".", "exp", "(", "self", ".", "_ERaInterp", "(", "Lz", ")", ")", "+", "self", ".", "_ERamax", "if", "isinstance", "(", "R", ",", "numpy", ".", "ndarray", ")", ":", "indx", "=", "(", "(", "E", "-", "thisERa", ")", "/", "(", "thisERL", "-", "thisERa", ")", ">", "1.", ")", "*", "(", "(", "(", "E", "-", "thisERa", ")", "/", "(", "thisERL", "-", "thisERa", ")", "-", "1.", ")", "<", "10.", "**", "-", "2.", ")", "E", "[", "indx", "]", "=", "thisERL", "[", "indx", "]", "indx", "=", "(", "(", "E", "-", "thisERa", ")", "/", "(", "thisERL", "-", "thisERa", ")", "<", "0.", ")", "*", "(", "(", "E", "-", "thisERa", ")", "/", "(", "thisERL", "-", "thisERa", ")", ">", "-", "10.", "**", "-", "2.", ")", "E", "[", "indx", "]", "=", "thisERa", "[", "indx", "]", "indx", "=", "(", "Lz", "<", "self", ".", "_Lzmin", ")", "indx", "+=", "(", "Lz", ">", "self", ".", "_Lzmax", ")", "indx", "+=", "(", "(", "E", "-", "thisERa", ")", "/", "(", "thisERL", "-", "thisERa", ")", ">", "1.", ")", "indx", "+=", "(", "(", "E", "-", "thisERa", ")", "/", "(", "thisERL", "-", "thisERa", ")", "<", "0.", ")", "indxc", "=", "True", "^", "indx", "ecc", "=", "numpy", ".", "empty", "(", "R", ".", "shape", ")", "zmax", "=", "numpy", ".", "empty", "(", "R", ".", "shape", ")", "rperi", "=", "numpy", ".", "empty", "(", "R", ".", "shape", ")", "rap", "=", "numpy", ".", "empty", "(", "R", ".", "shape", ")", "if", "numpy", ".", "sum", "(", "indxc", ")", ">", "0", ":", "u0", "=", "numpy", ".", "exp", "(", "self", ".", "_logu0Interp", ".", "ev", "(", "Lz", "[", "indxc", "]", ",", "(", "_Efunc", "(", "E", "[", "indxc", "]", ",", "thisERL", "[", "indxc", "]", ")", "-", "_Efunc", "(", "thisERa", "[", "indxc", "]", ",", "thisERL", "[", "indxc", "]", ")", ")", "/", "(", "_Efunc", "(", "thisERL", "[", "indxc", "]", ",", "thisERL", "[", "indxc", "]", ")", "-", "_Efunc", "(", "thisERa", "[", "indxc", "]", ",", "thisERL", "[", "indxc", "]", ")", ")", ")", ")", "sinh2u0", "=", "numpy", ".", "sinh", "(", "u0", ")", "**", "2.", "thisEr", "=", "self", ".", "Er", "(", "R", "[", "indxc", "]", ",", "z", "[", "indxc", "]", ",", "vR", "[", "indxc", "]", ",", "vz", "[", "indxc", "]", ",", "E", "[", "indxc", "]", ",", "Lz", "[", "indxc", "]", ",", "sinh2u0", ",", "u0", ")", "thisEz", "=", "self", ".", "Ez", "(", "R", "[", "indxc", "]", ",", "z", "[", "indxc", "]", ",", "vR", "[", "indxc", "]", ",", "vz", "[", "indxc", "]", ",", "E", "[", "indxc", "]", ",", "Lz", "[", "indxc", "]", ",", "sinh2u0", ",", "u0", ")", "thisv2", "=", "self", ".", "vatu0", "(", "E", "[", "indxc", "]", ",", "Lz", "[", "indxc", "]", ",", "u0", ",", "self", ".", "_delta", "*", "numpy", ".", "sinh", "(", "u0", ")", ",", "retv2", "=", "True", ")", "cos2psi", "=", "2.", "*", "thisEr", "/", "thisv2", "/", "(", "1.", "+", "sinh2u0", ")", "#latter is cosh2u0", "cos2psi", "[", "(", "cos2psi", ">", "1.", ")", "*", "(", "cos2psi", "<", "1.", "+", "10.", "**", "-", "5.", ")", "]", "=", "1.", "indxCos2psi", "=", "(", "cos2psi", ">", "1.", ")", "indxCos2psi", "+=", "(", "cos2psi", "<", "0.", ")", "indxc", "[", "indxc", "]", "=", "True", "^", "indxCos2psi", "#Handle these two cases as off-grid", "indx", "=", "True", "^", "indxc", "psi", "=", "numpy", ".", "arccos", "(", "numpy", ".", "sqrt", "(", "cos2psi", "[", "True", "^", "indxCos2psi", "]", ")", ")", "coords", "=", "numpy", ".", "empty", "(", "(", "3", ",", "numpy", ".", "sum", "(", "indxc", ")", ")", ")", "coords", "[", "0", ",", ":", "]", "=", "(", "Lz", "[", "indxc", "]", "-", "self", ".", "_Lzmin", ")", "/", "(", "self", ".", "_Lzmax", "-", "self", ".", "_Lzmin", ")", "*", "(", "self", ".", "_nLz", "-", "1.", ")", "y", "=", "(", "_Efunc", "(", "E", "[", "indxc", "]", ",", "thisERL", "[", "indxc", "]", ")", "-", "_Efunc", "(", "thisERa", "[", "indxc", "]", ",", "thisERL", "[", "indxc", "]", ")", ")", "/", "(", "_Efunc", "(", "thisERL", "[", "indxc", "]", ",", "thisERL", "[", "indxc", "]", ")", "-", "_Efunc", "(", "thisERa", "[", "indxc", "]", ",", "thisERL", "[", "indxc", "]", ")", ")", "coords", "[", "1", ",", ":", "]", "=", "y", "*", "(", "self", ".", "_nE", "-", "1.", ")", "coords", "[", "2", ",", ":", "]", "=", "psi", "/", "numpy", ".", "pi", "*", "2.", "*", "(", "self", ".", "_npsi", "-", "1.", ")", "ecc", "[", "indxc", "]", "=", "(", "numpy", ".", "exp", "(", "ndimage", ".", "interpolation", ".", "map_coordinates", "(", "self", ".", "_eccFiltered", ",", "coords", ",", "order", "=", "3", ",", "prefilter", "=", "False", ")", ")", "-", "10.", "**", "-", "10.", ")", "rperi", "[", "indxc", "]", "=", "(", "numpy", ".", "exp", "(", "ndimage", ".", "interpolation", ".", "map_coordinates", "(", "self", ".", "_rperiFiltered", ",", "coords", ",", "order", "=", "3", ",", "prefilter", "=", "False", ")", ")", "-", "10.", "**", "-", "10.", ")", "*", "(", "numpy", ".", "exp", "(", "self", ".", "_rperiLzInterp", "(", "Lz", "[", "indxc", "]", ")", ")", "-", "10.", "**", "-", "5.", ")", "# We do rap below with zmax", "#Switch to Ez-calculated psi", "sin2psi", "=", "2.", "*", "thisEz", "[", "True", "^", "indxCos2psi", "]", "/", "thisv2", "[", "True", "^", "indxCos2psi", "]", "/", "(", "1.", "+", "sinh2u0", "[", "True", "^", "indxCos2psi", "]", ")", "#latter is cosh2u0", "sin2psi", "[", "(", "sin2psi", ">", "1.", ")", "*", "(", "sin2psi", "<", "1.", "+", "10.", "**", "-", "5.", ")", "]", "=", "1.", "indxSin2psi", "=", "(", "sin2psi", ">", "1.", ")", "indxSin2psi", "+=", "(", "sin2psi", "<", "0.", ")", "indxc", "[", "indxc", "]", "=", "True", "^", "indxSin2psi", "#Handle these two cases as off-grid", "indx", "=", "True", "^", "indxc", "psiz", "=", "numpy", ".", "arcsin", "(", "numpy", ".", "sqrt", "(", "sin2psi", "[", "True", "^", "indxSin2psi", "]", ")", ")", "newcoords", "=", "numpy", ".", "empty", "(", "(", "3", ",", "numpy", ".", "sum", "(", "indxc", ")", ")", ")", "newcoords", "[", "0", ":", "2", ",", ":", "]", "=", "coords", "[", "0", ":", "2", ",", "True", "^", "indxSin2psi", "]", "newcoords", "[", "2", ",", ":", "]", "=", "psiz", "/", "numpy", ".", "pi", "*", "2.", "*", "(", "self", ".", "_npsi", "-", "1.", ")", "zmax", "[", "indxc", "]", "=", "(", "numpy", ".", "exp", "(", "ndimage", ".", "interpolation", ".", "map_coordinates", "(", "self", ".", "_zmaxFiltered", ",", "newcoords", ",", "order", "=", "3", ",", "prefilter", "=", "False", ")", ")", "-", "10.", "**", "-", "10.", ")", "*", "(", "numpy", ".", "exp", "(", "self", ".", "_zmaxLzInterp", "(", "Lz", "[", "indxc", "]", ")", ")", "-", "10.", "**", "-", "5.", ")", "rap", "[", "indxc", "]", "=", "(", "numpy", ".", "exp", "(", "ndimage", ".", "interpolation", ".", "map_coordinates", "(", "self", ".", "_rapFiltered", ",", "newcoords", ",", "order", "=", "3", ",", "prefilter", "=", "False", ")", ")", "-", "10.", "**", "-", "10.", ")", "*", "(", "numpy", ".", "exp", "(", "self", ".", "_rapLzInterp", "(", "Lz", "[", "indxc", "]", ")", ")", "-", "10.", "**", "-", "5.", ")", "if", "numpy", ".", "sum", "(", "indx", ")", ">", "0", ":", "eccindiv", ",", "zmaxindiv", ",", "rperiindiv", ",", "rapindiv", "=", "self", ".", "_aA", ".", "EccZmaxRperiRap", "(", "R", "[", "indx", "]", ",", "vR", "[", "indx", "]", ",", "vT", "[", "indx", "]", ",", "z", "[", "indx", "]", ",", "vz", "[", "indx", "]", ",", "*", "*", "kwargs", ")", "ecc", "[", "indx", "]", "=", "eccindiv", "zmax", "[", "indx", "]", "=", "zmaxindiv", "rperi", "[", "indx", "]", "=", "rperiindiv", "rap", "[", "indx", "]", "=", "rapindiv", "else", ":", "ecc", ",", "zmax", ",", "rperi", ",", "rap", "=", "self", ".", "EccZmaxRperiRap", "(", "numpy", ".", "array", "(", "[", "R", "]", ")", ",", "numpy", ".", "array", "(", "[", "vR", "]", ")", ",", "numpy", ".", "array", "(", "[", "vT", "]", ")", ",", "numpy", ".", "array", "(", "[", "z", "]", ")", ",", "numpy", ".", "array", "(", "[", "vz", "]", ")", ",", "*", "*", "kwargs", ")", "return", "(", "ecc", "[", "0", "]", ",", "zmax", "[", "0", "]", ",", "rperi", "[", "0", "]", ",", "rap", "[", "0", "]", ")", "ecc", "[", "ecc", "<", "0.", "]", "=", "0.", "zmax", "[", "zmax", "<", "0.", "]", "=", "0.", "rperi", "[", "rperi", "<", "0.", "]", "=", "0.", "rap", "[", "rap", "<", "0.", "]", "=", "0.", "return", "(", "ecc", ",", "zmax", ",", "rperi", ",", "rap", ")" ]
58.00813
25.796748
def _tag(element): """Return element.tag with xmlns stripped away.""" tag = element.tag if tag[0] == "{": uri, tag = tag[1:].split("}") return tag
[ "def", "_tag", "(", "element", ")", ":", "tag", "=", "element", ".", "tag", "if", "tag", "[", "0", "]", "==", "\"{\"", ":", "uri", ",", "tag", "=", "tag", "[", "1", ":", "]", ".", "split", "(", "\"}\"", ")", "return", "tag" ]
27.5
14.833333
def execution_history_focus(self, model, prop_name, info): """ Arranges to put execution-history widget page to become top page in notebook when execution starts and stops and resets the boolean of modification_history_was_focused to False each time this notification are observed. """ if state_machine_execution_engine.status.execution_mode in \ [StateMachineExecutionStatus.STARTED, StateMachineExecutionStatus.STOPPED, StateMachineExecutionStatus.FINISHED]: if self.parent is not None and hasattr(self.parent, "focus_notebook_page_of_controller"): # request focus -> which has not have to be satisfied self.parent.focus_notebook_page_of_controller(self) if state_machine_execution_engine.status.execution_mode is not StateMachineExecutionStatus.STARTED: if not self.model.selected_state_machine_id == self.model.state_machine_manager.active_state_machine_id: pass else: self.update()
[ "def", "execution_history_focus", "(", "self", ",", "model", ",", "prop_name", ",", "info", ")", ":", "if", "state_machine_execution_engine", ".", "status", ".", "execution_mode", "in", "[", "StateMachineExecutionStatus", ".", "STARTED", ",", "StateMachineExecutionStatus", ".", "STOPPED", ",", "StateMachineExecutionStatus", ".", "FINISHED", "]", ":", "if", "self", ".", "parent", "is", "not", "None", "and", "hasattr", "(", "self", ".", "parent", ",", "\"focus_notebook_page_of_controller\"", ")", ":", "# request focus -> which has not have to be satisfied", "self", ".", "parent", ".", "focus_notebook_page_of_controller", "(", "self", ")", "if", "state_machine_execution_engine", ".", "status", ".", "execution_mode", "is", "not", "StateMachineExecutionStatus", ".", "STARTED", ":", "if", "not", "self", ".", "model", ".", "selected_state_machine_id", "==", "self", ".", "model", ".", "state_machine_manager", ".", "active_state_machine_id", ":", "pass", "else", ":", "self", ".", "update", "(", ")" ]
65.3125
33.875
def draw_widget(self, item): """Create a preview of the selected treeview item""" if item: self.filter_remove(remember=True) selected_id = self.treedata[item]['id'] item = self.get_toplevel_parent(item) widget_id = self.treedata[item]['id'] wclass = self.treedata[item]['class'] xmlnode = self.tree_node_to_xml('', item) self.previewer.draw(item, widget_id, xmlnode, wclass) self.previewer.show_selected(item, selected_id) self.filter_restore()
[ "def", "draw_widget", "(", "self", ",", "item", ")", ":", "if", "item", ":", "self", ".", "filter_remove", "(", "remember", "=", "True", ")", "selected_id", "=", "self", ".", "treedata", "[", "item", "]", "[", "'id'", "]", "item", "=", "self", ".", "get_toplevel_parent", "(", "item", ")", "widget_id", "=", "self", ".", "treedata", "[", "item", "]", "[", "'id'", "]", "wclass", "=", "self", ".", "treedata", "[", "item", "]", "[", "'class'", "]", "xmlnode", "=", "self", ".", "tree_node_to_xml", "(", "''", ",", "item", ")", "self", ".", "previewer", ".", "draw", "(", "item", ",", "widget_id", ",", "xmlnode", ",", "wclass", ")", "self", ".", "previewer", ".", "show_selected", "(", "item", ",", "selected_id", ")", "self", ".", "filter_restore", "(", ")" ]
46.416667
11.916667
def _read_plain(self, lines): """ Read text fragments from a plain format text file. :param list lines: the lines of the plain text file :param dict parameters: additional parameters for parsing (e.g., class/id regex strings) :raises: ValueError: if the id regex is not valid """ self.log(u"Parsing fragments from plain text format") id_format = self._get_id_format() lines = [line.strip() for line in lines] pairs = [] i = 1 for line in lines: identifier = id_format % i text = line.strip() pairs.append((identifier, [text])) i += 1 self._create_text_fragments(pairs)
[ "def", "_read_plain", "(", "self", ",", "lines", ")", ":", "self", ".", "log", "(", "u\"Parsing fragments from plain text format\"", ")", "id_format", "=", "self", ".", "_get_id_format", "(", ")", "lines", "=", "[", "line", ".", "strip", "(", ")", "for", "line", "in", "lines", "]", "pairs", "=", "[", "]", "i", "=", "1", "for", "line", "in", "lines", ":", "identifier", "=", "id_format", "%", "i", "text", "=", "line", ".", "strip", "(", ")", "pairs", ".", "append", "(", "(", "identifier", ",", "[", "text", "]", ")", ")", "i", "+=", "1", "self", ".", "_create_text_fragments", "(", "pairs", ")" ]
36.7
14.3
def save_scenario(self): """Save current scenario to text file.""" from safe.gui.tools.save_scenario import SaveScenarioDialog dialog = SaveScenarioDialog( iface=self.iface, dock=self.dock_widget) dialog.save_scenario()
[ "def", "save_scenario", "(", "self", ")", ":", "from", "safe", ".", "gui", ".", "tools", ".", "save_scenario", "import", "SaveScenarioDialog", "dialog", "=", "SaveScenarioDialog", "(", "iface", "=", "self", ".", "iface", ",", "dock", "=", "self", ".", "dock_widget", ")", "dialog", ".", "save_scenario", "(", ")" ]
33.625
14.25
def _svrg_grads_update_rule(self, g_curr_batch_curr_weight, g_curr_batch_special_weight, g_special_weight_all_batch): """Calculates the gradient based on the SVRG update rule. Parameters ---------- g_curr_batch_curr_weight : NDArray gradients of current weight of self.mod w.r.t current batch of data g_curr_batch_special_weight: NDArray gradients of the weight of past m epochs of self._mod_special w.r.t current batch of data g_special_weight_all_batch: NDArray average of full gradients over full pass of data Returns ---------- Gradients calculated using SVRG update rule: grads = g_curr_batch_curr_weight - g_curr_batch_special_weight + g_special_weight_all_batch """ for index, grad in enumerate(g_curr_batch_curr_weight): grad -= g_curr_batch_special_weight[index] grad += g_special_weight_all_batch[index] return g_curr_batch_curr_weight
[ "def", "_svrg_grads_update_rule", "(", "self", ",", "g_curr_batch_curr_weight", ",", "g_curr_batch_special_weight", ",", "g_special_weight_all_batch", ")", ":", "for", "index", ",", "grad", "in", "enumerate", "(", "g_curr_batch_curr_weight", ")", ":", "grad", "-=", "g_curr_batch_special_weight", "[", "index", "]", "grad", "+=", "g_special_weight_all_batch", "[", "index", "]", "return", "g_curr_batch_curr_weight" ]
48.666667
21.428571
def wncond(left, right, window): """ Contract each of the intervals of a double precision window. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wncond_c.html :param left: Amount added to each left endpoint. :type left: float :param right: Amount subtracted from each right endpoint. :type right: float :param window: Window to be contracted :type window: spiceypy.utils.support_types.SpiceCell :return: Contracted Window. :rtype: spiceypy.utils.support_types.SpiceCell """ assert isinstance(window, stypes.SpiceCell) assert window.dtype == 1 left = ctypes.c_double(left) right = ctypes.c_double(right) libspice.wncond_c(left, right, ctypes.byref(window)) return window
[ "def", "wncond", "(", "left", ",", "right", ",", "window", ")", ":", "assert", "isinstance", "(", "window", ",", "stypes", ".", "SpiceCell", ")", "assert", "window", ".", "dtype", "==", "1", "left", "=", "ctypes", ".", "c_double", "(", "left", ")", "right", "=", "ctypes", ".", "c_double", "(", "right", ")", "libspice", ".", "wncond_c", "(", "left", ",", "right", ",", "ctypes", ".", "byref", "(", "window", ")", ")", "return", "window" ]
35
15.571429
def reqContractDetails(self, contract: Contract) -> List[ContractDetails]: """ Get a list of contract details that match the given contract. If the returned list is empty then the contract is not known; If the list has multiple values then the contract is ambiguous. The fully qualified contract is available in the the ContractDetails.contract attribute. This method is blocking. https://interactivebrokers.github.io/tws-api/contract_details.html Args: contract: The contract to get details for. """ return self._run(self.reqContractDetailsAsync(contract))
[ "def", "reqContractDetails", "(", "self", ",", "contract", ":", "Contract", ")", "->", "List", "[", "ContractDetails", "]", ":", "return", "self", ".", "_run", "(", "self", ".", "reqContractDetailsAsync", "(", "contract", ")", ")" ]
37.941176
24.294118
def send(self,cmd,*args,arg_formats=None): """ Send a command (which may or may not have associated arguments) to an arduino using the CmdMessage protocol. The command and any parameters should be passed as direct arguments to send. arg_formats is an optional string that specifies the formats to use for each argument when passed to the arduino. If specified here, arg_formats supercedes formats specified on initialization. """ # Turn the command into an integer. try: command_as_int = self._cmd_name_to_int[cmd] except KeyError: err = "Command '{}' not recognized.\n".format(cmd) raise ValueError(err) # Figure out what formats to use for each argument. arg_format_list = [] if arg_formats != None: # The user specified formats arg_format_list = list(arg_formats) else: try: # See if class was initialized with a format for arguments to this # command arg_format_list = self._cmd_name_to_format[cmd] except KeyError: # if not, guess for all arguments arg_format_list = ["g" for i in range(len(args))] # Deal with "*" format arg_format_list = self._treat_star_format(arg_format_list,args) if len(args) > 0: if len(arg_format_list) != len(args): err = "Number of argument formats must match the number of arguments." raise ValueError(err) # Go through each argument and create a bytes representation in the # proper format to send. Escape appropriate characters. fields = ["{}".format(command_as_int).encode("ascii")] for i, a in enumerate(args): fields.append(self._send_methods[arg_format_list[i]](a)) fields[-1] = self._escape_re.sub(self._byte_escape_sep + r"\1".encode("ascii"),fields[-1]) # Make something that looks like cmd,field1,field2,field3; compiled_bytes = self._byte_field_sep.join(fields) + self._byte_command_sep # Send the message. self.board.write(compiled_bytes)
[ "def", "send", "(", "self", ",", "cmd", ",", "*", "args", ",", "arg_formats", "=", "None", ")", ":", "# Turn the command into an integer.", "try", ":", "command_as_int", "=", "self", ".", "_cmd_name_to_int", "[", "cmd", "]", "except", "KeyError", ":", "err", "=", "\"Command '{}' not recognized.\\n\"", ".", "format", "(", "cmd", ")", "raise", "ValueError", "(", "err", ")", "# Figure out what formats to use for each argument. ", "arg_format_list", "=", "[", "]", "if", "arg_formats", "!=", "None", ":", "# The user specified formats", "arg_format_list", "=", "list", "(", "arg_formats", ")", "else", ":", "try", ":", "# See if class was initialized with a format for arguments to this", "# command", "arg_format_list", "=", "self", ".", "_cmd_name_to_format", "[", "cmd", "]", "except", "KeyError", ":", "# if not, guess for all arguments", "arg_format_list", "=", "[", "\"g\"", "for", "i", "in", "range", "(", "len", "(", "args", ")", ")", "]", "# Deal with \"*\" format ", "arg_format_list", "=", "self", ".", "_treat_star_format", "(", "arg_format_list", ",", "args", ")", "if", "len", "(", "args", ")", ">", "0", ":", "if", "len", "(", "arg_format_list", ")", "!=", "len", "(", "args", ")", ":", "err", "=", "\"Number of argument formats must match the number of arguments.\"", "raise", "ValueError", "(", "err", ")", "# Go through each argument and create a bytes representation in the", "# proper format to send. Escape appropriate characters. ", "fields", "=", "[", "\"{}\"", ".", "format", "(", "command_as_int", ")", ".", "encode", "(", "\"ascii\"", ")", "]", "for", "i", ",", "a", "in", "enumerate", "(", "args", ")", ":", "fields", ".", "append", "(", "self", ".", "_send_methods", "[", "arg_format_list", "[", "i", "]", "]", "(", "a", ")", ")", "fields", "[", "-", "1", "]", "=", "self", ".", "_escape_re", ".", "sub", "(", "self", ".", "_byte_escape_sep", "+", "r\"\\1\"", ".", "encode", "(", "\"ascii\"", ")", ",", "fields", "[", "-", "1", "]", ")", "# Make something that looks like cmd,field1,field2,field3;", "compiled_bytes", "=", "self", ".", "_byte_field_sep", ".", "join", "(", "fields", ")", "+", "self", ".", "_byte_command_sep", "# Send the message.", "self", ".", "board", ".", "write", "(", "compiled_bytes", ")" ]
40.62963
23.592593
def has_instance(name, provider=None): ''' Return true if the instance is found on a provider CLI Example: .. code-block:: bash salt minionname cloud.has_instance myinstance ''' data = get_instance(name, provider) if data is None: return False return True
[ "def", "has_instance", "(", "name", ",", "provider", "=", "None", ")", ":", "data", "=", "get_instance", "(", "name", ",", "provider", ")", "if", "data", "is", "None", ":", "return", "False", "return", "True" ]
20.928571
22.928571
def add_interface_router(self, router, subnet): ''' Adds an internal network interface to the specified router ''' router_id = self._find_router_id(router) subnet_id = self._find_subnet_id(subnet) return self.network_conn.add_interface_router( router=router_id, body={'subnet_id': subnet_id})
[ "def", "add_interface_router", "(", "self", ",", "router", ",", "subnet", ")", ":", "router_id", "=", "self", ".", "_find_router_id", "(", "router", ")", "subnet_id", "=", "self", ".", "_find_subnet_id", "(", "subnet", ")", "return", "self", ".", "network_conn", ".", "add_interface_router", "(", "router", "=", "router_id", ",", "body", "=", "{", "'subnet_id'", ":", "subnet_id", "}", ")" ]
43.125
17.625
def _compute_smooth_during_construction(self, xi): """ Evaluate value of smooth at x-value xi. Parameters ---------- xi : float Value of x where smooth value is desired Returns ------- smooth_here : float Value of smooth s(xi) """ if self._variance_in_window: beta = self._covariance_in_window / self._variance_in_window alpha = self._mean_y_in_window - beta * self._mean_x_in_window value_of_smooth_here = beta * (xi) + alpha else: value_of_smooth_here = 0.0 return value_of_smooth_here
[ "def", "_compute_smooth_during_construction", "(", "self", ",", "xi", ")", ":", "if", "self", ".", "_variance_in_window", ":", "beta", "=", "self", ".", "_covariance_in_window", "/", "self", ".", "_variance_in_window", "alpha", "=", "self", ".", "_mean_y_in_window", "-", "beta", "*", "self", ".", "_mean_x_in_window", "value_of_smooth_here", "=", "beta", "*", "(", "xi", ")", "+", "alpha", "else", ":", "value_of_smooth_here", "=", "0.0", "return", "value_of_smooth_here" ]
30.333333
17.285714
def my_init(self): """ Method automatically called from base class constructor. """ self._start_time = time.time() self._stats = {} self._stats_lock = threading.Lock()
[ "def", "my_init", "(", "self", ")", ":", "self", ".", "_start_time", "=", "time", ".", "time", "(", ")", "self", ".", "_stats", "=", "{", "}", "self", ".", "_stats_lock", "=", "threading", ".", "Lock", "(", ")" ]
29.857143
9.571429
def _count_async(self, limit=None, **q_options): """Internal version of count_async().""" # TODO: Support offset by incorporating it to the limit. if 'offset' in q_options: raise NotImplementedError('.count() and .count_async() do not support ' 'offsets at present.') if 'limit' in q_options: raise TypeError('Cannot specify limit as a non-keyword argument and as a ' 'keyword argument simultaneously.') elif limit is None: limit = _MAX_LIMIT if self._needs_multi_query(): # _MultiQuery does not support iterating over result batches, # so just fetch results and count them. # TODO: Use QueryIterator to avoid materializing the results list. q_options.setdefault('batch_size', limit) q_options.setdefault('keys_only', True) results = yield self.fetch_async(limit, **q_options) raise tasklets.Return(len(results)) # Issue a special query requesting 0 results at a given offset. # The skipped_results count will tell us how many hits there were # before that offset without fetching the items. q_options['offset'] = limit q_options['limit'] = 0 options = self._make_options(q_options) conn = tasklets.get_context()._conn dsquery = self._get_query(conn) rpc = dsquery.run_async(conn, options) total = 0 while rpc is not None: batch = yield rpc options = QueryOptions(offset=options.offset - batch.skipped_results, config=options) rpc = batch.next_batch_async(options) total += batch.skipped_results raise tasklets.Return(total)
[ "def", "_count_async", "(", "self", ",", "limit", "=", "None", ",", "*", "*", "q_options", ")", ":", "# TODO: Support offset by incorporating it to the limit.", "if", "'offset'", "in", "q_options", ":", "raise", "NotImplementedError", "(", "'.count() and .count_async() do not support '", "'offsets at present.'", ")", "if", "'limit'", "in", "q_options", ":", "raise", "TypeError", "(", "'Cannot specify limit as a non-keyword argument and as a '", "'keyword argument simultaneously.'", ")", "elif", "limit", "is", "None", ":", "limit", "=", "_MAX_LIMIT", "if", "self", ".", "_needs_multi_query", "(", ")", ":", "# _MultiQuery does not support iterating over result batches,", "# so just fetch results and count them.", "# TODO: Use QueryIterator to avoid materializing the results list.", "q_options", ".", "setdefault", "(", "'batch_size'", ",", "limit", ")", "q_options", ".", "setdefault", "(", "'keys_only'", ",", "True", ")", "results", "=", "yield", "self", ".", "fetch_async", "(", "limit", ",", "*", "*", "q_options", ")", "raise", "tasklets", ".", "Return", "(", "len", "(", "results", ")", ")", "# Issue a special query requesting 0 results at a given offset.", "# The skipped_results count will tell us how many hits there were", "# before that offset without fetching the items.", "q_options", "[", "'offset'", "]", "=", "limit", "q_options", "[", "'limit'", "]", "=", "0", "options", "=", "self", ".", "_make_options", "(", "q_options", ")", "conn", "=", "tasklets", ".", "get_context", "(", ")", ".", "_conn", "dsquery", "=", "self", ".", "_get_query", "(", "conn", ")", "rpc", "=", "dsquery", ".", "run_async", "(", "conn", ",", "options", ")", "total", "=", "0", "while", "rpc", "is", "not", "None", ":", "batch", "=", "yield", "rpc", "options", "=", "QueryOptions", "(", "offset", "=", "options", ".", "offset", "-", "batch", ".", "skipped_results", ",", "config", "=", "options", ")", "rpc", "=", "batch", ".", "next_batch_async", "(", "options", ")", "total", "+=", "batch", ".", "skipped_results", "raise", "tasklets", ".", "Return", "(", "total", ")" ]
44
14.810811
def createEditor(self, delegate, parent, option): """ Creates a FloatCtiEditor. For the parameters see the AbstractCti constructor documentation. """ return FloatCtiEditor(self, delegate, parent=parent)
[ "def", "createEditor", "(", "self", ",", "delegate", ",", "parent", ",", "option", ")", ":", "return", "FloatCtiEditor", "(", "self", ",", "delegate", ",", "parent", "=", "parent", ")" ]
46.8
13.2
def deploy_deb(self, file_name, distribution, component, architecture, parameters={}): """ Convenience method to deploy .deb packages Keyword arguments: file_name -- full path to local file that will be deployed distribution -- debian distribution (e.g. 'wheezy') component -- repository component (e.g. 'main') architecture -- package architecture (e.g. 'i386') parameters -- attach any additional metadata """ params = { 'deb.distribution': distribution, 'deb.component': component, 'deb.architecture': architecture } params.update(parameters) self.deploy_file(file_name, parameters=params)
[ "def", "deploy_deb", "(", "self", ",", "file_name", ",", "distribution", ",", "component", ",", "architecture", ",", "parameters", "=", "{", "}", ")", ":", "params", "=", "{", "'deb.distribution'", ":", "distribution", ",", "'deb.component'", ":", "component", ",", "'deb.architecture'", ":", "architecture", "}", "params", ".", "update", "(", "parameters", ")", "self", ".", "deploy_file", "(", "file_name", ",", "parameters", "=", "params", ")" ]
33.583333
14.25
def save_prep(cls, instance_or_instances): """Preprocess the object before the object is saved. This automatically gets called when the save method gets called. """ instances = make_obj_list(instance_or_instances) tokens = set(cls.objects.get_available_tokens( count=len(instances), token_length=cls.token_length )) for instance in instances: if not instance.token: instance.token = tokens.pop() super(AbstractTokenModel, cls).save_prep( instance_or_instances=instances )
[ "def", "save_prep", "(", "cls", ",", "instance_or_instances", ")", ":", "instances", "=", "make_obj_list", "(", "instance_or_instances", ")", "tokens", "=", "set", "(", "cls", ".", "objects", ".", "get_available_tokens", "(", "count", "=", "len", "(", "instances", ")", ",", "token_length", "=", "cls", ".", "token_length", ")", ")", "for", "instance", "in", "instances", ":", "if", "not", "instance", ".", "token", ":", "instance", ".", "token", "=", "tokens", ".", "pop", "(", ")", "super", "(", "AbstractTokenModel", ",", "cls", ")", ".", "save_prep", "(", "instance_or_instances", "=", "instances", ")" ]
33
15.388889
def check_api_key(email, api_key): """Check the API key of the user.""" table = boto3.resource("dynamodb").Table(os.environ['people']) user = table.get_item(Key={'email': email}) if not user: return False user = user.get("Item") if api_key != user.get('api_key', None): return False return user
[ "def", "check_api_key", "(", "email", ",", "api_key", ")", ":", "table", "=", "boto3", ".", "resource", "(", "\"dynamodb\"", ")", ".", "Table", "(", "os", ".", "environ", "[", "'people'", "]", ")", "user", "=", "table", ".", "get_item", "(", "Key", "=", "{", "'email'", ":", "email", "}", ")", "if", "not", "user", ":", "return", "False", "user", "=", "user", ".", "get", "(", "\"Item\"", ")", "if", "api_key", "!=", "user", ".", "get", "(", "'api_key'", ",", "None", ")", ":", "return", "False", "return", "user" ]
32.9
14.5
def page(self, number, *args, **kwargs): """Return a standard ``Page`` instance with custom, digg-specific page ranges attached. """ page = super().page(number, *args, **kwargs) number = int(number) # we know this will work # easier access num_pages, body, tail, padding, margin = \ self.num_pages, self.body, self.tail, self.padding, self.margin # put active page in middle of main range main_range = list(map(int, [ math.floor(number-body/2.0)+1, # +1 = shift odd body to right math.floor(number+body/2.0)])) # adjust bounds if main_range[0] < 1: main_range = list(map(abs(main_range[0]-1).__add__, main_range)) if main_range[1] > num_pages: main_range = list(map((num_pages-main_range[1]).__add__, main_range)) # Determine leading and trailing ranges; if possible and appropriate, # combine them with the main range, in which case the resulting main # block might end up considerable larger than requested. While we # can't guarantee the exact size in those cases, we can at least try # to come as close as possible: we can reduce the other boundary to # max padding, instead of using half the body size, which would # otherwise be the case. If the padding is large enough, this will # of course have no effect. # Example: # total pages=100, page=4, body=5, (default padding=2) # 1 2 3 [4] 5 6 ... 99 100 # total pages=100, page=4, body=5, padding=1 # 1 2 3 [4] 5 ... 99 100 # If it were not for this adjustment, both cases would result in the # first output, regardless of the padding value. if main_range[0] <= tail+margin: leading = [] main_range = [1, max(body, min(number+padding, main_range[1]))] main_range[0] = 1 else: leading = list(range(1, tail+1)) # basically same for trailing range, but not in ``left_align`` mode if self.align_left: trailing = [] else: if main_range[1] >= num_pages-(tail+margin)+1: trailing = [] if not leading: # ... but handle the special case of neither leading nor # trailing ranges; otherwise, we would now modify the # main range low bound, which we just set in the previous # section, again. main_range = [1, num_pages] else: main_range = [min(num_pages-body+1, max(number-padding, main_range[0])), num_pages] else: trailing = list(range(num_pages-tail+1, num_pages+1)) # finally, normalize values that are out of bound; this basically # fixes all the things the above code screwed up in the simple case # of few enough pages where one range would suffice. main_range = [max(main_range[0], 1), min(main_range[1], num_pages)] # make the result of our calculations available as custom ranges # on the ``Page`` instance. page.main_range = list(range(main_range[0], main_range[1]+1)) page.leading_range = leading page.trailing_range = trailing page.page_range = reduce(lambda x, y: x+((x and y) and [False])+y, [page.leading_range, page.main_range, page.trailing_range]) page.__class__ = DiggPage return page
[ "def", "page", "(", "self", ",", "number", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "page", "=", "super", "(", ")", ".", "page", "(", "number", ",", "*", "args", ",", "*", "*", "kwargs", ")", "number", "=", "int", "(", "number", ")", "# we know this will work", "# easier access", "num_pages", ",", "body", ",", "tail", ",", "padding", ",", "margin", "=", "self", ".", "num_pages", ",", "self", ".", "body", ",", "self", ".", "tail", ",", "self", ".", "padding", ",", "self", ".", "margin", "# put active page in middle of main range", "main_range", "=", "list", "(", "map", "(", "int", ",", "[", "math", ".", "floor", "(", "number", "-", "body", "/", "2.0", ")", "+", "1", ",", "# +1 = shift odd body to right", "math", ".", "floor", "(", "number", "+", "body", "/", "2.0", ")", "]", ")", ")", "# adjust bounds", "if", "main_range", "[", "0", "]", "<", "1", ":", "main_range", "=", "list", "(", "map", "(", "abs", "(", "main_range", "[", "0", "]", "-", "1", ")", ".", "__add__", ",", "main_range", ")", ")", "if", "main_range", "[", "1", "]", ">", "num_pages", ":", "main_range", "=", "list", "(", "map", "(", "(", "num_pages", "-", "main_range", "[", "1", "]", ")", ".", "__add__", ",", "main_range", ")", ")", "# Determine leading and trailing ranges; if possible and appropriate,", "# combine them with the main range, in which case the resulting main", "# block might end up considerable larger than requested. While we", "# can't guarantee the exact size in those cases, we can at least try", "# to come as close as possible: we can reduce the other boundary to", "# max padding, instead of using half the body size, which would", "# otherwise be the case. If the padding is large enough, this will", "# of course have no effect.", "# Example:", "# total pages=100, page=4, body=5, (default padding=2)", "# 1 2 3 [4] 5 6 ... 99 100", "# total pages=100, page=4, body=5, padding=1", "# 1 2 3 [4] 5 ... 99 100", "# If it were not for this adjustment, both cases would result in the", "# first output, regardless of the padding value.", "if", "main_range", "[", "0", "]", "<=", "tail", "+", "margin", ":", "leading", "=", "[", "]", "main_range", "=", "[", "1", ",", "max", "(", "body", ",", "min", "(", "number", "+", "padding", ",", "main_range", "[", "1", "]", ")", ")", "]", "main_range", "[", "0", "]", "=", "1", "else", ":", "leading", "=", "list", "(", "range", "(", "1", ",", "tail", "+", "1", ")", ")", "# basically same for trailing range, but not in ``left_align`` mode", "if", "self", ".", "align_left", ":", "trailing", "=", "[", "]", "else", ":", "if", "main_range", "[", "1", "]", ">=", "num_pages", "-", "(", "tail", "+", "margin", ")", "+", "1", ":", "trailing", "=", "[", "]", "if", "not", "leading", ":", "# ... but handle the special case of neither leading nor", "# trailing ranges; otherwise, we would now modify the", "# main range low bound, which we just set in the previous", "# section, again.", "main_range", "=", "[", "1", ",", "num_pages", "]", "else", ":", "main_range", "=", "[", "min", "(", "num_pages", "-", "body", "+", "1", ",", "max", "(", "number", "-", "padding", ",", "main_range", "[", "0", "]", ")", ")", ",", "num_pages", "]", "else", ":", "trailing", "=", "list", "(", "range", "(", "num_pages", "-", "tail", "+", "1", ",", "num_pages", "+", "1", ")", ")", "# finally, normalize values that are out of bound; this basically", "# fixes all the things the above code screwed up in the simple case", "# of few enough pages where one range would suffice.", "main_range", "=", "[", "max", "(", "main_range", "[", "0", "]", ",", "1", ")", ",", "min", "(", "main_range", "[", "1", "]", ",", "num_pages", ")", "]", "# make the result of our calculations available as custom ranges", "# on the ``Page`` instance.", "page", ".", "main_range", "=", "list", "(", "range", "(", "main_range", "[", "0", "]", ",", "main_range", "[", "1", "]", "+", "1", ")", ")", "page", ".", "leading_range", "=", "leading", "page", ".", "trailing_range", "=", "trailing", "page", ".", "page_range", "=", "reduce", "(", "lambda", "x", ",", "y", ":", "x", "+", "(", "(", "x", "and", "y", ")", "and", "[", "False", "]", ")", "+", "y", ",", "[", "page", ".", "leading_range", ",", "page", ".", "main_range", ",", "page", ".", "trailing_range", "]", ")", "page", ".", "__class__", "=", "DiggPage", "return", "page" ]
46.52
22.173333
def resolve(self, context, provider): """Recursively resolve any lookups with the Variable. Args: context (:class:`stacker.context.Context`): Current context for building the stack provider (:class:`stacker.provider.base.BaseProvider`): subclass of the base provider """ try: self._value.resolve(context, provider) except FailedLookup as e: raise FailedVariableLookup(self.name, e.lookup, e.error)
[ "def", "resolve", "(", "self", ",", "context", ",", "provider", ")", ":", "try", ":", "self", ".", "_value", ".", "resolve", "(", "context", ",", "provider", ")", "except", "FailedLookup", "as", "e", ":", "raise", "FailedVariableLookup", "(", "self", ".", "name", ",", "e", ".", "lookup", ",", "e", ".", "error", ")" ]
36.142857
19.285714
def _set_mark(self, v, load=False): """ Setter method for mark, mapped from YANG variable /qos/map/dscp_mutation/mark (list) If this variable is read-only (config: false) in the source YANG file, then _set_mark is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_mark() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("dscp_in_values",mark.mark, yang_name="mark", rest_name="mark", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='dscp-in-values', extensions={u'tailf-common': {u'info': u'Map DSCP values to outbound DSCP value', u'cli-suppress-mode': None, u'callpoint': u'dscp_mark_list_mutation', u'cli-incomplete-command': None, u'cli-suppress-no': None}}), is_container='list', yang_name="mark", rest_name="mark", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Map DSCP values to outbound DSCP value', u'cli-suppress-mode': None, u'callpoint': u'dscp_mark_list_mutation', u'cli-incomplete-command': None, u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-qos', defining_module='brocade-qos', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """mark must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("dscp_in_values",mark.mark, yang_name="mark", rest_name="mark", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='dscp-in-values', extensions={u'tailf-common': {u'info': u'Map DSCP values to outbound DSCP value', u'cli-suppress-mode': None, u'callpoint': u'dscp_mark_list_mutation', u'cli-incomplete-command': None, u'cli-suppress-no': None}}), is_container='list', yang_name="mark", rest_name="mark", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Map DSCP values to outbound DSCP value', u'cli-suppress-mode': None, u'callpoint': u'dscp_mark_list_mutation', u'cli-incomplete-command': None, u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-qos', defining_module='brocade-qos', yang_type='list', is_config=True)""", }) self.__mark = t if hasattr(self, '_set'): self._set()
[ "def", "_set_mark", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "base", "=", "YANGListType", "(", "\"dscp_in_values\"", ",", "mark", ".", "mark", ",", "yang_name", "=", "\"mark\"", ",", "rest_name", "=", "\"mark\"", ",", "parent", "=", "self", ",", "is_container", "=", "'list'", ",", "user_ordered", "=", "False", ",", "path_helper", "=", "self", ".", "_path_helper", ",", "yang_keys", "=", "'dscp-in-values'", ",", "extensions", "=", "{", "u'tailf-common'", ":", "{", "u'info'", ":", "u'Map DSCP values to outbound DSCP value'", ",", "u'cli-suppress-mode'", ":", "None", ",", "u'callpoint'", ":", "u'dscp_mark_list_mutation'", ",", "u'cli-incomplete-command'", ":", "None", ",", "u'cli-suppress-no'", ":", "None", "}", "}", ")", ",", "is_container", "=", "'list'", ",", "yang_name", "=", "\"mark\"", ",", "rest_name", "=", "\"mark\"", ",", "parent", "=", "self", ",", "path_helper", "=", "self", ".", "_path_helper", ",", "extmethods", "=", "self", ".", "_extmethods", ",", "register_paths", "=", "True", ",", "extensions", "=", "{", "u'tailf-common'", ":", "{", "u'info'", ":", "u'Map DSCP values to outbound DSCP value'", ",", "u'cli-suppress-mode'", ":", "None", ",", "u'callpoint'", ":", "u'dscp_mark_list_mutation'", ",", "u'cli-incomplete-command'", ":", "None", ",", "u'cli-suppress-no'", ":", "None", "}", "}", ",", "namespace", "=", "'urn:brocade.com:mgmt:brocade-qos'", ",", "defining_module", "=", "'brocade-qos'", ",", "yang_type", "=", "'list'", ",", "is_config", "=", "True", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "ValueError", "(", "{", "'error-string'", ":", "\"\"\"mark must be of a type compatible with list\"\"\"", ",", "'defined-type'", ":", "\"list\"", ",", "'generated-type'", ":", "\"\"\"YANGDynClass(base=YANGListType(\"dscp_in_values\",mark.mark, yang_name=\"mark\", rest_name=\"mark\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='dscp-in-values', extensions={u'tailf-common': {u'info': u'Map DSCP values to outbound DSCP value', u'cli-suppress-mode': None, u'callpoint': u'dscp_mark_list_mutation', u'cli-incomplete-command': None, u'cli-suppress-no': None}}), is_container='list', yang_name=\"mark\", rest_name=\"mark\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Map DSCP values to outbound DSCP value', u'cli-suppress-mode': None, u'callpoint': u'dscp_mark_list_mutation', u'cli-incomplete-command': None, u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-qos', defining_module='brocade-qos', yang_type='list', is_config=True)\"\"\"", ",", "}", ")", "self", ".", "__mark", "=", "t", "if", "hasattr", "(", "self", ",", "'_set'", ")", ":", "self", ".", "_set", "(", ")" ]
113.954545
54.909091
def get_config(self): """ function to get current configuration """ config = { 'location': self.location, 'language': self.language, 'topic': self.topic, } return config
[ "def", "get_config", "(", "self", ")", ":", "config", "=", "{", "'location'", ":", "self", ".", "location", ",", "'language'", ":", "self", ".", "language", ",", "'topic'", ":", "self", ".", "topic", ",", "}", "return", "config" ]
24.4
10.8
def token(self, token_address: Address) -> Token: """ Return a proxy to interact with a token. """ if not is_binary_address(token_address): raise ValueError('token_address must be a valid address') with self._token_creation_lock: if token_address not in self.address_to_token: self.address_to_token[token_address] = Token( jsonrpc_client=self.client, token_address=token_address, contract_manager=self.contract_manager, ) return self.address_to_token[token_address]
[ "def", "token", "(", "self", ",", "token_address", ":", "Address", ")", "->", "Token", ":", "if", "not", "is_binary_address", "(", "token_address", ")", ":", "raise", "ValueError", "(", "'token_address must be a valid address'", ")", "with", "self", ".", "_token_creation_lock", ":", "if", "token_address", "not", "in", "self", ".", "address_to_token", ":", "self", ".", "address_to_token", "[", "token_address", "]", "=", "Token", "(", "jsonrpc_client", "=", "self", ".", "client", ",", "token_address", "=", "token_address", ",", "contract_manager", "=", "self", ".", "contract_manager", ",", ")", "return", "self", ".", "address_to_token", "[", "token_address", "]" ]
43
16.714286
def get(self, path, watch=None): """Returns the data of the specified node.""" _log.debug( "ZK: Getting {path}".format(path=path), ) return self.zk.get(path, watch)
[ "def", "get", "(", "self", ",", "path", ",", "watch", "=", "None", ")", ":", "_log", ".", "debug", "(", "\"ZK: Getting {path}\"", ".", "format", "(", "path", "=", "path", ")", ",", ")", "return", "self", ".", "zk", ".", "get", "(", "path", ",", "watch", ")" ]
33.833333
12
def crust_type_at(lat=None, lon=None): """ lat, lon (degrees) """ # Get lon into appropriate format lats = np.array(lat) lons = np.array(lon%360) iVals = ((90.0-lats)%180).astype(np.int) jVals = (lons%360.0).astype(int) # i = int((-lat+90.0)%180) # j = int(lon) t = _c1_crust_type_lat_lon[iVals,jVals] # t = _c1_crust_type_lat_lon[i,j] # des = litho.c1_region_descriptor[t] return t return t
[ "def", "crust_type_at", "(", "lat", "=", "None", ",", "lon", "=", "None", ")", ":", "# Get lon into appropriate format", "lats", "=", "np", ".", "array", "(", "lat", ")", "lons", "=", "np", ".", "array", "(", "lon", "%", "360", ")", "iVals", "=", "(", "(", "90.0", "-", "lats", ")", "%", "180", ")", ".", "astype", "(", "np", ".", "int", ")", "jVals", "=", "(", "lons", "%", "360.0", ")", ".", "astype", "(", "int", ")", "# i = int((-lat+90.0)%180)", "# j = int(lon)", "t", "=", "_c1_crust_type_lat_lon", "[", "iVals", ",", "jVals", "]", "# t = _c1_crust_type_lat_lon[i,j]", "# des = litho.c1_region_descriptor[t]", "return", "t", "return", "t" ]
16.148148
22
def find_args(self, text, start=None): """implementation details""" if start is None: start = 0 first_occurance = text.find(self.__begin, start) if first_occurance == -1: return self.NOT_FOUND previous_found, found = first_occurance + 1, 0 while True: found = self.__find_args_separator(text, previous_found) if found == -1: return self.NOT_FOUND elif text[found] == self.__end: return first_occurance, found else: previous_found = found + 1
[ "def", "find_args", "(", "self", ",", "text", ",", "start", "=", "None", ")", ":", "if", "start", "is", "None", ":", "start", "=", "0", "first_occurance", "=", "text", ".", "find", "(", "self", ".", "__begin", ",", "start", ")", "if", "first_occurance", "==", "-", "1", ":", "return", "self", ".", "NOT_FOUND", "previous_found", ",", "found", "=", "first_occurance", "+", "1", ",", "0", "while", "True", ":", "found", "=", "self", ".", "__find_args_separator", "(", "text", ",", "previous_found", ")", "if", "found", "==", "-", "1", ":", "return", "self", ".", "NOT_FOUND", "elif", "text", "[", "found", "]", "==", "self", ".", "__end", ":", "return", "first_occurance", ",", "found", "else", ":", "previous_found", "=", "found", "+", "1" ]
37.125
11.125
def random_peptides(num, length=9, distribution=None): """ Generate random peptides (kmers). Parameters ---------- num : int Number of peptides to return length : int Length of each peptide distribution : pandas.Series Maps 1-letter amino acid abbreviations to probabilities. If not specified a uniform distribution is used. Returns ---------- list of string """ if num == 0: return [] if distribution is None: distribution = pandas.Series( 1, index=sorted(amino_acid.COMMON_AMINO_ACIDS)) distribution /= distribution.sum() return [ ''.join(peptide_sequence) for peptide_sequence in numpy.random.choice( distribution.index, p=distribution.values, size=(int(num), int(length))) ]
[ "def", "random_peptides", "(", "num", ",", "length", "=", "9", ",", "distribution", "=", "None", ")", ":", "if", "num", "==", "0", ":", "return", "[", "]", "if", "distribution", "is", "None", ":", "distribution", "=", "pandas", ".", "Series", "(", "1", ",", "index", "=", "sorted", "(", "amino_acid", ".", "COMMON_AMINO_ACIDS", ")", ")", "distribution", "/=", "distribution", ".", "sum", "(", ")", "return", "[", "''", ".", "join", "(", "peptide_sequence", ")", "for", "peptide_sequence", "in", "numpy", ".", "random", ".", "choice", "(", "distribution", ".", "index", ",", "p", "=", "distribution", ".", "values", ",", "size", "=", "(", "int", "(", "num", ")", ",", "int", "(", "length", ")", ")", ")", "]" ]
22.810811
18.324324
def update_unique(self, table_name, fields, data, cond=None, unique_fields=None, *, raise_if_not_found=False): """Update the unique matching element to have a given set of fields. Parameters ---------- table_name: str fields: dict or function[dict -> None] new data/values to insert into the unique element or a method that will update the elements. data: dict Sample data for query cond: tinydb.Query which elements to update unique_fields: list of str raise_if_not_found: bool Will raise an exception if the element is not found for update. Returns ------- eid: int The eid of the updated element if found, None otherwise. """ eid = find_unique(self.table(table_name), data, unique_fields) if eid is None: if raise_if_not_found: msg = 'Could not find {} with {}'.format(table_name, data) if cond is not None: msg += ' where {}.'.format(cond) raise IndexError(msg) else: self.table(table_name).update(_to_string(fields), cond=cond, eids=[eid]) return eid
[ "def", "update_unique", "(", "self", ",", "table_name", ",", "fields", ",", "data", ",", "cond", "=", "None", ",", "unique_fields", "=", "None", ",", "*", ",", "raise_if_not_found", "=", "False", ")", ":", "eid", "=", "find_unique", "(", "self", ".", "table", "(", "table_name", ")", ",", "data", ",", "unique_fields", ")", "if", "eid", "is", "None", ":", "if", "raise_if_not_found", ":", "msg", "=", "'Could not find {} with {}'", ".", "format", "(", "table_name", ",", "data", ")", "if", "cond", "is", "not", "None", ":", "msg", "+=", "' where {}.'", ".", "format", "(", "cond", ")", "raise", "IndexError", "(", "msg", ")", "else", ":", "self", ".", "table", "(", "table_name", ")", ".", "update", "(", "_to_string", "(", "fields", ")", ",", "cond", "=", "cond", ",", "eids", "=", "[", "eid", "]", ")", "return", "eid" ]
30.439024
23.195122
def cut_nodes_edges2(graph): """Bi-connected components, alternative recursive implementation :param graph: undirected graph. in listlist format. Cannot be in listdict format. :assumes: graph has about 5000 vertices at most, otherwise memory limit is reached :returns: a tuple with the list of cut-nodes and the list of cut-edges :complexity: `O(|V|+|E|)` in average, `O(|V|+|E|^2)` in worst case due to use of dictionary """ N = len(graph) assert N <= 5000 recursionlimit = getrecursionlimit() setrecursionlimit(max(recursionlimit, N + 42)) edges = set((i, j) for i in range(N) for j in graph[i] if i <= j) nodes = set() NOT = -2 # not visited yet; -1 would be buggy `marked[v] != prof - 1` FIN = -3 # already visited marked = [NOT] * N # if >= 0, it means depth within the DFS def DFS(n, prof=0): """ Recursively search graph, update edge list and returns the first node the first edge within search to which we can come back. """ if marked[n] == FIN: return # only when there are several connected components if marked[n] != NOT: return marked[n] marked[n] = prof m = float('inf') count = 0 # useful only for prof == 0 for v in graph[n]: if marked[v] != FIN and marked[v] != prof - 1: count += 1 r = DFS(v, prof+1) if r <= prof: edges.discard(tuple(sorted((n, v)))) if prof and r >= prof: # only if we are not at root nodes.add(n) m = min(m, r) # root is an articulation point iff it has more than 2 childs if prof == 0 and count >= 2: nodes.add(n) marked[n] = FIN return m for r in range(N): DFS(r) # we can count connected components by nb += DFS(r) setrecursionlimit(recursionlimit) return nodes, edges
[ "def", "cut_nodes_edges2", "(", "graph", ")", ":", "N", "=", "len", "(", "graph", ")", "assert", "N", "<=", "5000", "recursionlimit", "=", "getrecursionlimit", "(", ")", "setrecursionlimit", "(", "max", "(", "recursionlimit", ",", "N", "+", "42", ")", ")", "edges", "=", "set", "(", "(", "i", ",", "j", ")", "for", "i", "in", "range", "(", "N", ")", "for", "j", "in", "graph", "[", "i", "]", "if", "i", "<=", "j", ")", "nodes", "=", "set", "(", ")", "NOT", "=", "-", "2", "# not visited yet; -1 would be buggy `marked[v] != prof - 1`", "FIN", "=", "-", "3", "# already visited", "marked", "=", "[", "NOT", "]", "*", "N", "# if >= 0, it means depth within the DFS", "def", "DFS", "(", "n", ",", "prof", "=", "0", ")", ":", "\"\"\"\n Recursively search graph, update edge list and returns the first\n node the first edge within search to which we can come back.\n \"\"\"", "if", "marked", "[", "n", "]", "==", "FIN", ":", "return", "# only when there are several connected components", "if", "marked", "[", "n", "]", "!=", "NOT", ":", "return", "marked", "[", "n", "]", "marked", "[", "n", "]", "=", "prof", "m", "=", "float", "(", "'inf'", ")", "count", "=", "0", "# useful only for prof == 0", "for", "v", "in", "graph", "[", "n", "]", ":", "if", "marked", "[", "v", "]", "!=", "FIN", "and", "marked", "[", "v", "]", "!=", "prof", "-", "1", ":", "count", "+=", "1", "r", "=", "DFS", "(", "v", ",", "prof", "+", "1", ")", "if", "r", "<=", "prof", ":", "edges", ".", "discard", "(", "tuple", "(", "sorted", "(", "(", "n", ",", "v", ")", ")", ")", ")", "if", "prof", "and", "r", ">=", "prof", ":", "# only if we are not at root", "nodes", ".", "add", "(", "n", ")", "m", "=", "min", "(", "m", ",", "r", ")", "# root is an articulation point iff it has more than 2 childs", "if", "prof", "==", "0", "and", "count", ">=", "2", ":", "nodes", ".", "add", "(", "n", ")", "marked", "[", "n", "]", "=", "FIN", "return", "m", "for", "r", "in", "range", "(", "N", ")", ":", "DFS", "(", "r", ")", "# we can count connected components by nb += DFS(r)", "setrecursionlimit", "(", "recursionlimit", ")", "return", "nodes", ",", "edges" ]
40.291667
18.854167
def match(license): '''Returns True if given license field is correct Taken from rpmlint. It's named match() to mimic a compiled regexp.''' if license not in VALID_LICENSES: for l1 in _split_license(license): if l1 in VALID_LICENSES: continue for l2 in _split_license(l1): if l2 not in VALID_LICENSES: return False valid_license = False return True
[ "def", "match", "(", "license", ")", ":", "if", "license", "not", "in", "VALID_LICENSES", ":", "for", "l1", "in", "_split_license", "(", "license", ")", ":", "if", "l1", "in", "VALID_LICENSES", ":", "continue", "for", "l2", "in", "_split_license", "(", "l1", ")", ":", "if", "l2", "not", "in", "VALID_LICENSES", ":", "return", "False", "valid_license", "=", "False", "return", "True" ]
32.857143
12
def timestamp(datetime_obj): """Return Unix timestamp as float. The number of seconds that have elapsed since January 1, 1970. """ start_of_time = datetime.datetime(1970, 1, 1) diff = datetime_obj - start_of_time return diff.total_seconds()
[ "def", "timestamp", "(", "datetime_obj", ")", ":", "start_of_time", "=", "datetime", ".", "datetime", "(", "1970", ",", "1", ",", "1", ")", "diff", "=", "datetime_obj", "-", "start_of_time", "return", "diff", ".", "total_seconds", "(", ")" ]
32.25
12.125
def main(): """Register your own mode and handle method here.""" plugin = Register() if plugin.args.option == 'filenumber': plugin.filenumber_handle() elif plugin.args.option == 'fileage': plugin.fileage_handle() elif plugin.args.option == 'sqlserverlocks': plugin.sqlserverlocks_handle() else: plugin.unknown("Unknown actions.")
[ "def", "main", "(", ")", ":", "plugin", "=", "Register", "(", ")", "if", "plugin", ".", "args", ".", "option", "==", "'filenumber'", ":", "plugin", ".", "filenumber_handle", "(", ")", "elif", "plugin", ".", "args", ".", "option", "==", "'fileage'", ":", "plugin", ".", "fileage_handle", "(", ")", "elif", "plugin", ".", "args", ".", "option", "==", "'sqlserverlocks'", ":", "plugin", ".", "sqlserverlocks_handle", "(", ")", "else", ":", "plugin", ".", "unknown", "(", "\"Unknown actions.\"", ")" ]
34.090909
9.727273