text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def migrate_flow_collection(apps, schema_editor): """Migrate 'flow_collection' field to 'entity_type'.""" Process = apps.get_model('flow', 'Process') DescriptorSchema = apps.get_model('flow', 'DescriptorSchema') for process in Process.objects.all(): process.entity_type = process.flow_collection process.entity_descriptor_schema = process.flow_collection if (process.entity_descriptor_schema is not None and not DescriptorSchema.objects.filter(slug=process.entity_descriptor_schema).exists()): raise LookupError( "Descriptow schema '{}' referenced in 'entity_descriptor_schema' not " "found.".format(process.entity_descriptor_schema) ) process.save()
[ "def", "migrate_flow_collection", "(", "apps", ",", "schema_editor", ")", ":", "Process", "=", "apps", ".", "get_model", "(", "'flow'", ",", "'Process'", ")", "DescriptorSchema", "=", "apps", ".", "get_model", "(", "'flow'", ",", "'DescriptorSchema'", ")", "fo...
44.529412
24
def send(self, obj, encoding='utf-8'): """ Sends a python object to the backend. The object **must be JSON serialisable**. :param obj: object to send :param encoding: encoding used to encode the json message into a bytes array, this should match CodeEdit.file.encoding. """ comm('sending request: %r', obj) msg = json.dumps(obj) msg = msg.encode(encoding) header = struct.pack('=I', len(msg)) self.write(header) self.write(msg)
[ "def", "send", "(", "self", ",", "obj", ",", "encoding", "=", "'utf-8'", ")", ":", "comm", "(", "'sending request: %r'", ",", "obj", ")", "msg", "=", "json", ".", "dumps", "(", "obj", ")", "msg", "=", "msg", ".", "encode", "(", "encoding", ")", "he...
34.8
13.733333
def generateFeatures(numFeatures): """Return string features. If <=62 features are requested, output will be single character alphanumeric strings. Otherwise, output will be ["F1", "F2", ...] """ # Capital letters, lowercase letters, numbers candidates = ([chr(i+65) for i in xrange(26)] + [chr(i+97) for i in xrange(26)] + [chr(i+48) for i in xrange(10)]) if numFeatures > len(candidates): candidates = ["F{}".format(i) for i in xrange(numFeatures)] return candidates return candidates[:numFeatures]
[ "def", "generateFeatures", "(", "numFeatures", ")", ":", "# Capital letters, lowercase letters, numbers", "candidates", "=", "(", "[", "chr", "(", "i", "+", "65", ")", "for", "i", "in", "xrange", "(", "26", ")", "]", "+", "[", "chr", "(", "i", "+", "97",...
34
16.5625
def _inherit_option(self, name, val): """Return the inherited TransactionOption value.""" if val: return val txn_opts = self.options.default_transaction_options val = txn_opts and getattr(txn_opts, name) if val: return val return getattr(self.client, name)
[ "def", "_inherit_option", "(", "self", ",", "name", ",", "val", ")", ":", "if", "val", ":", "return", "val", "txn_opts", "=", "self", ".", "options", ".", "default_transaction_options", "val", "=", "txn_opts", "and", "getattr", "(", "txn_opts", ",", "name"...
35.555556
13.222222
def handle_result(self, idents, parent, raw_msg, success=True): """handle a real task result, either success or failure""" # first, relay result to client engine = idents[0] client = idents[1] # swap_ids for ROUTER-ROUTER mirror raw_msg[:2] = [client,engine] # print (map(str, raw_msg[:4])) self.client_stream.send_multipart(raw_msg, copy=False) # now, update our data structures msg_id = parent['msg_id'] self.pending[engine].pop(msg_id) if success: self.completed[engine].add(msg_id) self.all_completed.add(msg_id) else: self.failed[engine].add(msg_id) self.all_failed.add(msg_id) self.all_done.add(msg_id) self.destinations[msg_id] = engine self.update_graph(msg_id, success)
[ "def", "handle_result", "(", "self", ",", "idents", ",", "parent", ",", "raw_msg", ",", "success", "=", "True", ")", ":", "# first, relay result to client", "engine", "=", "idents", "[", "0", "]", "client", "=", "idents", "[", "1", "]", "# swap_ids for ROUTE...
37.909091
9.090909
def remove_description_by_type(self, type_p): """Delete all records which are equal to the passed type from the list in type_p of type :class:`VirtualSystemDescriptionType` """ if not isinstance(type_p, VirtualSystemDescriptionType): raise TypeError("type_p can only be an instance of type VirtualSystemDescriptionType") self._call("removeDescriptionByType", in_p=[type_p])
[ "def", "remove_description_by_type", "(", "self", ",", "type_p", ")", ":", "if", "not", "isinstance", "(", "type_p", ",", "VirtualSystemDescriptionType", ")", ":", "raise", "TypeError", "(", "\"type_p can only be an instance of type VirtualSystemDescriptionType\"", ")", "...
43.9
20
def create_rl_lin_comb_method(op_name, klass, x_roles, y_roles): """ Creates a new binary special method with left and right versions, such as A.__mul__(B) <=> A*B, A.__rmul__(B) <=> [B*A if B.__mul__(A) fails] for target class. The method is called __op_name__. """ # This function will became the methods. def new_method(self, other, x_roles=x_roles, y_roles=y_roles): if not check_special_methods(): raise NotImplementedError( 'Special method %s called on %s, but special methods have been disabled. Set pymc.special_methods_available to True to enable them.' % (op_name, str(self))) x = [] y = [] for xr in x_roles: if xr == 'self': x.append(self) elif xr == 'other': x.append(other) else: x.append(xr) for yr in y_roles: if yr == 'self': y.append(self) elif yr == 'other': y.append(other) else: y.append(yr) # This code will create one of two Deterministic objects. return LinearCombination( '(' + '_'.join([self.__name__, op_name, str(other)]) + ')', x, y, trace=False, plot=False) # Convert the functions into methods for klass. new_method.__name__ = '__' + op_name + '__' setattr( klass, new_method.__name__, UnboundMethodType( new_method, None, klass))
[ "def", "create_rl_lin_comb_method", "(", "op_name", ",", "klass", ",", "x_roles", ",", "y_roles", ")", ":", "# This function will became the methods.", "def", "new_method", "(", "self", ",", "other", ",", "x_roles", "=", "x_roles", ",", "y_roles", "=", "y_roles", ...
35.186047
18.813953
def run(sub_command, exit_handle=None, **options): """Run a command""" command = Command(sub_command, exit_handle) return command.run(**options)
[ "def", "run", "(", "sub_command", ",", "exit_handle", "=", "None", ",", "*", "*", "options", ")", ":", "command", "=", "Command", "(", "sub_command", ",", "exit_handle", ")", "return", "command", ".", "run", "(", "*", "*", "options", ")" ]
38.25
6
def transform_soups(config, soups, precomputed): """Mutate our soups to be better when we write them out later.""" fixup_internal_links(config, soups) ensure_headings_linkable(soups) # Do this after ensure_headings_linkable so that there will be links. generate_page_tocs(soups, precomputed) link_pantsrefs(soups, precomputed)
[ "def", "transform_soups", "(", "config", ",", "soups", ",", "precomputed", ")", ":", "fixup_internal_links", "(", "config", ",", "soups", ")", "ensure_headings_linkable", "(", "soups", ")", "# Do this after ensure_headings_linkable so that there will be links.", "generate_p...
41.5
11.625
def _methodInTraceback(self, name, traceback): ''' Returns boolean whether traceback contains method from this instance ''' foundMethod = False for frame in self._frames(traceback): this = frame.f_locals.get('self') if this is self and frame.f_code.co_name == name: foundMethod = True break return foundMethod
[ "def", "_methodInTraceback", "(", "self", ",", "name", ",", "traceback", ")", ":", "foundMethod", "=", "False", "for", "frame", "in", "self", ".", "_frames", "(", "traceback", ")", ":", "this", "=", "frame", ".", "f_locals", ".", "get", "(", "'self'", ...
31.909091
17.909091
def get_priority_objects(data, nObj=1, seeds=None, seeds_multi_index=None, debug=False): """ Get N biggest objects from the selection or the object with seed. Similar function is in image_manipulation.select_objects_by_seeds(). Use it if possible. :param data: labeled ndarray :param nObj: number of objects :param seeds: ndarray. Objects on non zero positions are returned :param debug: bool. :return: binar image with selected objects """ # Oznaceni dat. # labels - oznacena data. # length - pocet rozdilnych oznaceni. if seeds is not None: # logger.warning("'seeds' parameter is obsolete. Use 'seeds_multi_index' instead of it.") if numpy.array_equal(data.shape, numpy.asarray(seeds).shape): seeds_multi_index = numpy.nonzero(seeds) else: if seeds_multi_index is None: logger.debug("Seeds looks to be seeds_multi_index.") seeds_multi_index = seeds dataLabels, length = scipy.ndimage.label(data) logger.info('Olabelovano oblasti: ' + str(length)) logger.debug('data labels: ' + str(dataLabels)) # Uzivatel si nevybral specificke objekty. if (seeds_multi_index is None): logger.info('Vraceni bez seedu') logger.debug('Objekty: ' + str(nObj)) # Zjisteni nejvetsich objektu. arrayLabelsSum, arrayLabels = areaIndexes(dataLabels, length) # Serazeni labelu podle velikosti oznacenych dat (prvku / ploch). arrayLabelsSum, arrayLabels = selectSort(arrayLabelsSum, arrayLabels) returning = None label = 0 stop = nObj - 1 # Budeme postupne prochazet arrayLabels a postupne pridavat jednu # oblast za druhou (od te nejvetsi - mimo nuloveho pozadi) dokud # nebudeme mit dany pocet objektu (nObj). while label <= stop: if label >= len(arrayLabels): break if arrayLabels[label] != 0: if returning is None: # "Prvni" iterace returning = data * (dataLabels == arrayLabels[label]) else: # Jakakoli dalsi iterace returning = returning + data * \ (dataLabels == arrayLabels[label]) else: # Musime prodlouzit hledany interval, protoze jsme narazili na # nulove pozadi. stop = stop + 1 label = label + 1 if debug: logger.debug(str(label - 1) + ': ' + str(returning)) if returning is None: logger.info( 'Zadna validni olabelovana data! (DEBUG: returning == None)') return returning # Uzivatel si vybral specificke objekty (seeds != None). else: logger.info('Vraceni se seedy') # Zalozeni pole pro ulozeni seedu arrSeed = [] # Zjisteni poctu seedu. stop = seeds_multi_index[0].size tmpSeed = 0 dim = numpy.ndim(dataLabels) for index in range(0, stop): # Tady se ukladaji labely na mistech, ve kterych kliknul uzivatel. if dim == 3: # 3D data. tmpSeed = dataLabels[ seeds_multi_index[0][index], seeds_multi_index[1][index], seeds_multi_index[2][index]] elif dim == 2: # 2D data. tmpSeed = dataLabels[seeds_multi_index[0][index], seeds_multi_index[1][index]] # Tady opet pocitam s tim, ze oznaceni nulou pripada cerne oblasti # (pozadi). if tmpSeed != 0: # Pokud se nejedna o pozadi (cernou oblast), tak se novy seed # ulozi do pole "arrSeed" arrSeed.append(tmpSeed) # Pokud existuji vhodne labely, vytvori se nova data k vraceni. # Pokud ne, vrati se "None" typ. { Deprecated: Pokud ne, vrati se cela # nafiltrovana data, ktera do funkce prisla (nedojde k vraceni # specifickych objektu). } if len(arrSeed) > 0: # Zbaveni se duplikatu. arrSeed = list(set(arrSeed)) if debug: logger.debug('seed list:' + str(arrSeed)) logger.info( 'Ruznych prioritnich objektu k vraceni: ' + str(len(arrSeed)) ) # Vytvoreni vystupu - postupne pricitani dat prislunych specif. # labelu. returning = None for index in range(0, len(arrSeed)): if returning is None: returning = data * (dataLabels == arrSeed[index]) else: returning = returning + data * \ (dataLabels == arrSeed[index]) if debug: logger.debug((str(index)) + ':' + str(returning)) return returning else: logger.warning( 'Zadna validni data k vraceni - zadne prioritni objekty ' + 'nenalezeny (DEBUG: function getPriorityObjects:' + str(len(arrSeed) == 0)) return None
[ "def", "get_priority_objects", "(", "data", ",", "nObj", "=", "1", ",", "seeds", "=", "None", ",", "seeds_multi_index", "=", "None", ",", "debug", "=", "False", ")", ":", "# Oznaceni dat.", "# labels - oznacena data.", "# length - pocet rozdilnych oznaceni.", "if", ...
35.0625
22.006944
def resource_collection_response(cls, offset=0, limit=20): """ This method is deprecated for version 1.1.0. Please use get_collection """ request_args = {'page[offset]': offset, 'page[limit]': limit} return cls.get_collection(request_args)
[ "def", "resource_collection_response", "(", "cls", ",", "offset", "=", "0", ",", "limit", "=", "20", ")", ":", "request_args", "=", "{", "'page[offset]'", ":", "offset", ",", "'page[limit]'", ":", "limit", "}", "return", "cls", ".", "get_collection", "(", ...
45.833333
15.5
def parse_date(s): """Fast %Y-%m-%d parsing.""" try: return datetime.date(int(s[:4]), int(s[5:7]), int(s[8:10])) except ValueError: # other accepted format used in one-day data set return datetime.datetime.strptime(s, '%d %B %Y').date()
[ "def", "parse_date", "(", "s", ")", ":", "try", ":", "return", "datetime", ".", "date", "(", "int", "(", "s", "[", ":", "4", "]", ")", ",", "int", "(", "s", "[", "5", ":", "7", "]", ")", ",", "int", "(", "s", "[", "8", ":", "10", "]", "...
43.333333
22.666667
def arches(self): """ Return a list of architectures for this task. :returns: a list of arch strings (eg ["ppc64le", "x86_64"]). The list is empty if this task has no arches associated with it. """ if self.method == 'image': return self.params[2] if self.arch: return [self.arch] return []
[ "def", "arches", "(", "self", ")", ":", "if", "self", ".", "method", "==", "'image'", ":", "return", "self", ".", "params", "[", "2", "]", "if", "self", ".", "arch", ":", "return", "[", "self", ".", "arch", "]", "return", "[", "]" ]
31.416667
17.583333
def getVerificators(self): """Returns the user ids of the users that verified this analysis """ verifiers = list() actions = ["verify", "multi_verify"] for event in wf.getReviewHistory(self): if event['action'] in actions: verifiers.append(event['actor']) sorted(verifiers, reverse=True) return verifiers
[ "def", "getVerificators", "(", "self", ")", ":", "verifiers", "=", "list", "(", ")", "actions", "=", "[", "\"verify\"", ",", "\"multi_verify\"", "]", "for", "event", "in", "wf", ".", "getReviewHistory", "(", "self", ")", ":", "if", "event", "[", "'action...
37.9
6.6
def get_val(self): """ Gets attribute's value. @return: stored value. @rtype: int @raise IOError: if corresponding file in /proc/sys cannot be read. """ file_obj = file(os.path.join(self._base, self._attr), 'r') try: val = int(file_obj.readline()) finally: file_obj.close() return val
[ "def", "get_val", "(", "self", ")", ":", "file_obj", "=", "file", "(", "os", ".", "path", ".", "join", "(", "self", ".", "_base", ",", "self", ".", "_attr", ")", ",", "'r'", ")", "try", ":", "val", "=", "int", "(", "file_obj", ".", "readline", ...
26.857143
17.857143
def plot_precision_recall(y_true, y_probas, title='Precision-Recall Curve', plot_micro=True, classes_to_plot=None, ax=None, figsize=None, cmap='nipy_spectral', title_fontsize="large", text_fontsize="medium"): """Generates the Precision Recall Curve from labels and probabilities Args: y_true (array-like, shape (n_samples)): Ground truth (correct) target values. y_probas (array-like, shape (n_samples, n_classes)): Prediction probabilities for each class returned by a classifier. title (string, optional): Title of the generated plot. Defaults to "Precision-Recall curve". plot_micro (boolean, optional): Plot the micro average ROC curve. Defaults to ``True``. classes_to_plot (list-like, optional): Classes for which the precision-recall curve should be plotted. e.g. [0, 'cold']. If given class does not exist, it will be ignored. If ``None``, all classes will be plotted. Defaults to ``None``. ax (:class:`matplotlib.axes.Axes`, optional): The axes upon which to plot the curve. If None, the plot is drawn on a new set of axes. figsize (2-tuple, optional): Tuple denoting figure size of the plot e.g. (6, 6). Defaults to ``None``. cmap (string or :class:`matplotlib.colors.Colormap` instance, optional): Colormap used for plotting the projection. View Matplotlib Colormap documentation for available options. https://matplotlib.org/users/colormaps.html title_fontsize (string or int, optional): Matplotlib-style fontsizes. Use e.g. "small", "medium", "large" or integer-values. Defaults to "large". text_fontsize (string or int, optional): Matplotlib-style fontsizes. Use e.g. "small", "medium", "large" or integer-values. Defaults to "medium". Returns: ax (:class:`matplotlib.axes.Axes`): The axes on which the plot was drawn. Example: >>> import scikitplot as skplt >>> nb = GaussianNB() >>> nb.fit(X_train, y_train) >>> y_probas = nb.predict_proba(X_test) >>> skplt.metrics.plot_precision_recall(y_test, y_probas) <matplotlib.axes._subplots.AxesSubplot object at 0x7fe967d64490> >>> plt.show() .. image:: _static/examples/plot_precision_recall_curve.png :align: center :alt: Precision Recall Curve """ y_true = np.array(y_true) y_probas = np.array(y_probas) classes = np.unique(y_true) probas = y_probas if classes_to_plot is None: classes_to_plot = classes binarized_y_true = label_binarize(y_true, classes=classes) if len(classes) == 2: binarized_y_true = np.hstack( (1 - binarized_y_true, binarized_y_true)) if ax is None: fig, ax = plt.subplots(1, 1, figsize=figsize) ax.set_title(title, fontsize=title_fontsize) indices_to_plot = np.in1d(classes, classes_to_plot) for i, to_plot in enumerate(indices_to_plot): if to_plot: average_precision = average_precision_score( binarized_y_true[:, i], probas[:, i]) precision, recall, _ = precision_recall_curve( y_true, probas[:, i], pos_label=classes[i]) color = plt.cm.get_cmap(cmap)(float(i) / len(classes)) ax.plot(recall, precision, lw=2, label='Precision-recall curve of class {0} ' '(area = {1:0.3f})'.format(classes[i], average_precision), color=color) if plot_micro: precision, recall, _ = precision_recall_curve( binarized_y_true.ravel(), probas.ravel()) average_precision = average_precision_score(binarized_y_true, probas, average='micro') ax.plot(recall, precision, label='micro-average Precision-recall curve ' '(area = {0:0.3f})'.format(average_precision), color='navy', linestyle=':', linewidth=4) ax.set_xlim([0.0, 1.0]) ax.set_ylim([0.0, 1.05]) ax.set_xlabel('Recall') ax.set_ylabel('Precision') ax.tick_params(labelsize=text_fontsize) ax.legend(loc='best', fontsize=text_fontsize) return ax
[ "def", "plot_precision_recall", "(", "y_true", ",", "y_probas", ",", "title", "=", "'Precision-Recall Curve'", ",", "plot_micro", "=", "True", ",", "classes_to_plot", "=", "None", ",", "ax", "=", "None", ",", "figsize", "=", "None", ",", "cmap", "=", "'nipy_...
39.582609
22.434783
def to_string(self): ''' API: to_string(self) Description: Returns string representation of node in dot language. Return: String representation of node. ''' node = list() node.append(quote_if_necessary(str(self.name))) node.append(' [') flag = False for a in self.attr: flag = True node.append(a) node.append('=') node.append(quote_if_necessary(str(self.attr[a]))) node.append(', ') if flag is True: node = node[:-1] node.append(']') return ''.join(node)
[ "def", "to_string", "(", "self", ")", ":", "node", "=", "list", "(", ")", "node", ".", "append", "(", "quote_if_necessary", "(", "str", "(", "self", ".", "name", ")", ")", ")", "node", ".", "append", "(", "' ['", ")", "flag", "=", "False", "for", ...
28.590909
16.954545
def get_def_conf(): '''return default configurations as simple dict''' ret = dict() for k,v in defConf.items(): ret[k] = v[0] return ret
[ "def", "get_def_conf", "(", ")", ":", "ret", "=", "dict", "(", ")", "for", "k", ",", "v", "in", "defConf", ".", "items", "(", ")", ":", "ret", "[", "k", "]", "=", "v", "[", "0", "]", "return", "ret" ]
25.833333
18.833333
def _keys2sls(self, keys, key2sl): """Convert an input key to a list of slices.""" sls = list() if isinstance(keys, tuple): for key in keys: sls.append(key2sl(key)) else: sls.append(key2sl(keys)) if len(sls) > self.ndim: fstr = "expected <= {0.ndim} slice dimensions, got {1}" raise ValueError(fstr.format(self, len(sls))) return sls
[ "def", "_keys2sls", "(", "self", ",", "keys", ",", "key2sl", ")", ":", "sls", "=", "list", "(", ")", "if", "isinstance", "(", "keys", ",", "tuple", ")", ":", "for", "key", "in", "keys", ":", "sls", ".", "append", "(", "key2sl", "(", "key", ")", ...
36.166667
12.416667
def stop(self): """Stop the Client, disconnect from queue """ if self.__end.is_set(): return self.__end.set() self.__send_retry_requests_timer.cancel() self.__threadpool.stop() self.__crud_threadpool.stop() self.__amqplink.stop() self.__network_retry_thread.join() # Clear out remaining pending requests with self.__requests: shutdown = LinkShutdownException('Client stopped') for req in self.__requests.values(): req.exception = shutdown req._set() self.__clear_references(req, remove_request=False) if self.__requests: logger.warning('%d unfinished request(s) discarded', len(self.__requests)) self.__requests.clear() # self.__network_retry_thread = None self.__network_retry_queue = None self.__container_params = None
[ "def", "stop", "(", "self", ")", ":", "if", "self", ".", "__end", ".", "is_set", "(", ")", ":", "return", "self", ".", "__end", ".", "set", "(", ")", "self", ".", "__send_retry_requests_timer", ".", "cancel", "(", ")", "self", ".", "__threadpool", "....
37.64
11.64
def hashsummary(self): """ Print a model summary - checksums of each layer parameters """ children = list(self.children()) result = [] for child in children: result.extend(hashlib.sha256(x.detach().cpu().numpy().tobytes()).hexdigest() for x in child.parameters()) return result
[ "def", "hashsummary", "(", "self", ")", ":", "children", "=", "list", "(", "self", ".", "children", "(", ")", ")", "result", "=", "[", "]", "for", "child", "in", "children", ":", "result", ".", "extend", "(", "hashlib", ".", "sha256", "(", "x", "."...
32.3
26.5
def set_defaults(self): """ All fields of Epm with a default value and that are null will be set to their default value. """ for table in self._tables.values(): for r in table: r.set_defaults()
[ "def", "set_defaults", "(", "self", ")", ":", "for", "table", "in", "self", ".", "_tables", ".", "values", "(", ")", ":", "for", "r", "in", "table", ":", "r", ".", "set_defaults", "(", ")" ]
35.285714
14.428571
def _set_return_address(self, state, ret_addr): """ Set the return address of the current state to a specific address. We assume we are at the beginning of a function, or in other words, we are about to execute the very first instruction of the function. :param SimState state: The program state :param int ret_addr: The return address :return: None """ # TODO: the following code is totally untested other than X86 and AMD64. Don't freak out if you find bugs :) # TODO: Test it ret_bvv = state.solver.BVV(ret_addr, self.project.arch.bits) if self.project.arch.name in ('X86', 'AMD64'): state.stack_push(ret_bvv) elif is_arm_arch(self.project.arch): state.regs.lr = ret_bvv elif self.project.arch.name in ('MIPS32', 'MIPS64'): state.regs.ra = ret_bvv elif self.project.arch.name in ('PPC32', 'PPC64'): state.regs.lr = ret_bvv else: l.warning('Return address cannot be set for architecture %s. Please add corresponding logic to ' 'VFG._set_return_address().', self.project.arch.name )
[ "def", "_set_return_address", "(", "self", ",", "state", ",", "ret_addr", ")", ":", "# TODO: the following code is totally untested other than X86 and AMD64. Don't freak out if you find bugs :)", "# TODO: Test it", "ret_bvv", "=", "state", ".", "solver", ".", "BVV", "(", "ret...
43.888889
25.148148
def match_rules(self, log_data): """ Process a log line data message with app's pattern rules. Return a tuple with this data: Element #0 (app_matched): True if a rule match, False otherwise; Element #1 (has_full_match): True if a rule match and is a filter or the app has not filters; False if a rule match but is not a filter; None otherwise; Element #2 (app_thread): Thread value if a rule match and it has a "thread" group, None otherwise; Element #3 (output_data): Mapping dictionary if a rule match and a map of output is requested (--anonymize/--ip/--uid options). """ for rule in self.rules: match = rule.regexp.search(log_data.message) if match is not None: gids = rule.regexp.groupindex self._last_rule = rule if self.name_cache is not None: values = self.name_cache.match_to_dict(match, rule.key_gids) values['host'] = self.name_cache.map_value(log_data.host, 'host') output_data = { 'host': values['host'], 'message': self.name_cache.match_to_string(match, gids, values), } else: values = {'host': log_data.host} for gid in gids: values[gid] = match.group(gid) output_data = None if self._thread and 'thread' in rule.regexp.groupindex: thread = match.group('thread') if rule.filter_keys is not None and \ any([values[key] is None for key in rule.filter_keys]): return False, None, None, None if self._report: rule.add_result(values) return True, rule.full_match, thread, output_data else: if rule.filter_keys is not None and \ any([values[key] is None for key in rule.filter_keys]): return False, None, None, None elif self._report or (rule.filter_keys is not None or not self.has_filters): rule.add_result(values) return True, rule.full_match, None, output_data # No rule match: the application log message is not parsable with enabled rules. self._last_rule = None return False, None, None, None
[ "def", "match_rules", "(", "self", ",", "log_data", ")", ":", "for", "rule", "in", "self", ".", "rules", ":", "match", "=", "rule", ".", "regexp", ".", "search", "(", "log_data", ".", "message", ")", "if", "match", "is", "not", "None", ":", "gids", ...
51.196078
21.117647
def post_build(self, packet, payload): """Compute the 'sources_number' field when needed""" if self.sources_number is None: srcnum = struct.pack("!H", len(self.sources)) packet = packet[:26] + srcnum + packet[28:] return _ICMPv6.post_build(self, packet, payload)
[ "def", "post_build", "(", "self", ",", "packet", ",", "payload", ")", ":", "if", "self", ".", "sources_number", "is", "None", ":", "srcnum", "=", "struct", ".", "pack", "(", "\"!H\"", ",", "len", "(", "self", ".", "sources", ")", ")", "packet", "=", ...
50.833333
8.5
def as_sql(self, compiler, connection): """Compiles this expression into SQL.""" sql, params = super().as_sql(compiler, connection) return 'EXTRACT(epoch FROM {})'.format(sql), params
[ "def", "as_sql", "(", "self", ",", "compiler", ",", "connection", ")", ":", "sql", ",", "params", "=", "super", "(", ")", ".", "as_sql", "(", "compiler", ",", "connection", ")", "return", "'EXTRACT(epoch FROM {})'", ".", "format", "(", "sql", ")", ",", ...
40.8
15.6
def signal(self, container, instances=None, map_name=None, **kwargs): """ Sends a signal to a single running container configuration (but possibly multiple instances). If not specified with ``signal``, this signal is ``SIGKILL``. :param container: Container configuration name. :type container: unicode | str :param map_name: Container map name. :type map_name: unicode | str :param instances: Instance name. Optional, if not specified sends the signal to all configured instances, or the default. :type instances: unicode | str :param kwargs: Keyword arguments to the script runner function. :return: Return values of actions. :rtype: list[dockermap.map.runner.ActionOutput] """ return self.run_actions('signal', container, instances=instances, map_name=map_name, **kwargs)
[ "def", "signal", "(", "self", ",", "container", ",", "instances", "=", "None", ",", "map_name", "=", "None", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "run_actions", "(", "'signal'", ",", "container", ",", "instances", "=", "instances", ...
51.823529
22.882353
def rotate_du_by_yaw(self, du, heading): """Rotate all DOMs on DU by a given (yaw) heading.""" mask = (self.pmts.du == du) dom_ids = np.unique(self.pmts.dom_id[mask]) for dom_id in dom_ids: self.rotate_dom_by_yaw(dom_id, heading) self.reset_caches()
[ "def", "rotate_du_by_yaw", "(", "self", ",", "du", ",", "heading", ")", ":", "mask", "=", "(", "self", ".", "pmts", ".", "du", "==", "du", ")", "dom_ids", "=", "np", ".", "unique", "(", "self", ".", "pmts", ".", "dom_id", "[", "mask", "]", ")", ...
42.142857
7.142857
def babel_compile(source, **kwargs): """Compiles the given ``source`` from ES6 to ES5 using Babeljs""" presets = kwargs.get('presets') if not presets: kwargs['presets'] = ["es2015"] with open(BABEL_COMPILER, 'rb') as babel_js: return evaljs( (babel_js.read().decode('utf-8'), 'var bres, res;' 'bres = Babel.transform(dukpy.es6code, dukpy.babel_options);', 'res = {map: bres.map, code: bres.code};'), es6code=source, babel_options=kwargs )
[ "def", "babel_compile", "(", "source", ",", "*", "*", "kwargs", ")", ":", "presets", "=", "kwargs", ".", "get", "(", "'presets'", ")", "if", "not", "presets", ":", "kwargs", "[", "'presets'", "]", "=", "[", "\"es2015\"", "]", "with", "open", "(", "BA...
38.571429
12.642857
def create_empty_copy(G, with_data=True): """Return a copy of the graph G with all of the edges removed. Parameters ---------- G : graph A DyNetx graph with_data : bool (default=True) Include data. Notes ----- Graph and edge data is not propagated to the new graph. """ H = G.__class__() H.add_nodes_from(G.nodes(data=with_data)) if with_data: H.graph.update(G.graph) return H
[ "def", "create_empty_copy", "(", "G", ",", "with_data", "=", "True", ")", ":", "H", "=", "G", ".", "__class__", "(", ")", "H", ".", "add_nodes_from", "(", "G", ".", "nodes", "(", "data", "=", "with_data", ")", ")", "if", "with_data", ":", "H", ".",...
23.75
19
def set_state(self, light_id, **kwargs): ''' Sets state on the light, can be used like this: .. code-block:: python set_state(1, xy=[1,2]) ''' light = self.get_light(light_id) url = '/api/%s/lights/%s/state' % (self.username, light.light_id) response = self.make_request('PUT', url, kwargs) setting_count = len(kwargs.items()) success_count = 0 for data in response: if 'success' in data: success_count += 1 if success_count == setting_count: return True else: return False
[ "def", "set_state", "(", "self", ",", "light_id", ",", "*", "*", "kwargs", ")", ":", "light", "=", "self", ".", "get_light", "(", "light_id", ")", "url", "=", "'/api/%s/lights/%s/state'", "%", "(", "self", ".", "username", ",", "light", ".", "light_id", ...
28
18.272727
def ensure_num_chosen_alts_equals_num_obs(obs_id_col, choice_col, df): """ Checks that the total number of recorded choices equals the total number of observations. If this is not the case, raise helpful ValueError messages. Parameters ---------- obs_id_col : str. Denotes the column in `df` that contains the observation ID values for each row. choice_col : str. Denotes the column in `long_data` that contains a one if the alternative pertaining to the given row was the observed outcome for the observation pertaining to the given row and a zero otherwise. df : pandas dataframe. The dataframe whose choices and observations will be checked. Returns ------- None. """ num_obs = df[obs_id_col].unique().shape[0] num_choices = df[choice_col].sum() if num_choices < num_obs: msg = "One or more observations have not chosen one " msg_2 = "of the alternatives available to him/her" raise ValueError(msg + msg_2) if num_choices > num_obs: msg = "One or more observations has chosen multiple alternatives" raise ValueError(msg) return None
[ "def", "ensure_num_chosen_alts_equals_num_obs", "(", "obs_id_col", ",", "choice_col", ",", "df", ")", ":", "num_obs", "=", "df", "[", "obs_id_col", "]", ".", "unique", "(", ")", ".", "shape", "[", "0", "]", "num_choices", "=", "df", "[", "choice_col", "]",...
35.242424
23.848485
def onresize(self, emitter, width, height): """ WebPage Event that occurs on webpage gets resized """ self._log.debug('App.onresize event occurred. Width:%s Height:%s'%(width, height))
[ "def", "onresize", "(", "self", ",", "emitter", ",", "width", ",", "height", ")", ":", "self", ".", "_log", ".", "debug", "(", "'App.onresize event occurred. Width:%s Height:%s'", "%", "(", "width", ",", "height", ")", ")" ]
51.25
13.25
def get_asset_content_mdata(): """Return default mdata map for AssetContent""" return { 'url': { 'element_label': { 'text': 'url', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'instructions': { 'text': 'enter no more than 256 characters.', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'required': False, 'read_only': False, 'linked': False, 'array': False, 'default_string_values': [''], 'syntax': 'STRING', 'minimum_string_length': 0, 'maximum_string_length': 256, 'string_set': [], }, 'data': { 'element_label': { 'text': 'data', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'instructions': { 'text': 'accepts a valid data input stream.', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'required': False, 'read_only': False, 'linked': False, 'array': False, 'default_object_values': [''], 'syntax': 'OBJECT', 'object_types': [], 'object_set': [], }, 'accessibility_type': { 'element_label': { 'text': 'accessibility type', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'instructions': { 'text': 'accepts an osid.type.Type object', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'required': False, 'read_only': False, 'linked': False, 'array': False, 'default_type_values': ['NoneType%3ANONE%40dlkit.mit.edu'], 'syntax': 'TYPE', 'type_set': [], }, 'asset': { 'element_label': { 'text': 'asset', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'instructions': { 'text': 'accepts an osid.id.Id object', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'required': False, 'read_only': False, 'linked': False, 'array': False, 'default_id_values': [''], 'syntax': 'ID', 'id_set': [], }, }
[ "def", "get_asset_content_mdata", "(", ")", ":", "return", "{", "'url'", ":", "{", "'element_label'", ":", "{", "'text'", ":", "'url'", ",", "'languageTypeId'", ":", "str", "(", "DEFAULT_LANGUAGE_TYPE", ")", ",", "'scriptTypeId'", ":", "str", "(", "DEFAULT_SCR...
36.681319
15.681319
def from_kvs(keyvals): """ Create H2OCluster object from a list of key-value pairs. TODO: This method should be moved into the base H2OResponse class. """ obj = H2OCluster() obj._retrieved_at = time.time() for k, v in keyvals: if k in {"__meta", "_exclude_fields", "__schema"}: continue if k in _cloud_v3_valid_keys: obj._props[k] = v else: raise AttributeError("Attribute %s cannot be set on H2OCluster (= %r)" % (k, v)) return obj
[ "def", "from_kvs", "(", "keyvals", ")", ":", "obj", "=", "H2OCluster", "(", ")", "obj", ".", "_retrieved_at", "=", "time", ".", "time", "(", ")", "for", "k", ",", "v", "in", "keyvals", ":", "if", "k", "in", "{", "\"__meta\"", ",", "\"_exclude_fields\...
36.733333
18.866667
def query_cat_recent_with_label(cat_id, label=None, num=8, kind='1', order=False): ''' query_cat_recent_with_label ''' if order: sort_criteria = TabPost.order.asc() else: sort_criteria = TabPost.time_create.desc() return TabPost.select().join( TabPost2Tag, on=(TabPost.uid == TabPost2Tag.post_id) ).where( (TabPost.kind == kind) & (TabPost2Tag.tag_id == cat_id) & (TabPost.extinfo['def_tag_arr'].contains(label)) ).order_by( sort_criteria ).limit(num)
[ "def", "query_cat_recent_with_label", "(", "cat_id", ",", "label", "=", "None", ",", "num", "=", "8", ",", "kind", "=", "'1'", ",", "order", "=", "False", ")", ":", "if", "order", ":", "sort_criteria", "=", "TabPost", ".", "order", ".", "asc", "(", "...
31.684211
18.631579
def ack(self, tup): """Indicate that processing of a Tuple has succeeded It is compatible with StreamParse API. """ if not isinstance(tup, HeronTuple): Log.error("Only HeronTuple type is supported in ack()") return if self.acking_enabled: ack_tuple = tuple_pb2.AckTuple() ack_tuple.ackedtuple = int(tup.id) tuple_size_in_bytes = 0 for rt in tup.roots: to_add = ack_tuple.roots.add() to_add.CopyFrom(rt) tuple_size_in_bytes += rt.ByteSize() super(BoltInstance, self).admit_control_tuple(ack_tuple, tuple_size_in_bytes, True) process_latency_ns = (time.time() - tup.creation_time) * system_constants.SEC_TO_NS self.pplan_helper.context.invoke_hook_bolt_ack(tup, process_latency_ns) self.bolt_metrics.acked_tuple(tup.stream, tup.component, process_latency_ns)
[ "def", "ack", "(", "self", ",", "tup", ")", ":", "if", "not", "isinstance", "(", "tup", ",", "HeronTuple", ")", ":", "Log", ".", "error", "(", "\"Only HeronTuple type is supported in ack()\"", ")", "return", "if", "self", ".", "acking_enabled", ":", "ack_tup...
36.347826
20.130435
def copy_to_clipboard(self, copy=True): """ Copies the selected items to the clipboard :param copy: True to copy, False to cut. """ urls = self.selected_urls() if not urls: return mime = self._UrlListMimeData(copy) mime.set_list(urls) clipboard = QtWidgets.QApplication.clipboard() clipboard.setMimeData(mime)
[ "def", "copy_to_clipboard", "(", "self", ",", "copy", "=", "True", ")", ":", "urls", "=", "self", ".", "selected_urls", "(", ")", "if", "not", "urls", ":", "return", "mime", "=", "self", ".", "_UrlListMimeData", "(", "copy", ")", "mime", ".", "set_list...
32.5
8.333333
def _parse_pubkey(stream, packet_type='pubkey'): """See https://tools.ietf.org/html/rfc4880#section-5.5 for details.""" p = {'type': packet_type} packet = io.BytesIO() with stream.capture(packet): p['version'] = stream.readfmt('B') p['created'] = stream.readfmt('>L') p['algo'] = stream.readfmt('B') if p['algo'] in ECDSA_ALGO_IDS: log.debug('parsing elliptic curve key') # https://tools.ietf.org/html/rfc6637#section-11 oid_size = stream.readfmt('B') oid = stream.read(oid_size) assert oid in SUPPORTED_CURVES, util.hexlify(oid) p['curve_oid'] = oid mpi = parse_mpi(stream) log.debug('mpi: %x (%d bits)', mpi, mpi.bit_length()) leftover = stream.read() if leftover: leftover = io.BytesIO(leftover) # https://tools.ietf.org/html/rfc6637#section-8 # should be b'\x03\x01\x08\x07': SHA256 + AES128 size, = util.readfmt(leftover, 'B') p['kdf'] = leftover.read(size) p['secret'] = leftover.read() parse_func, keygrip_func = SUPPORTED_CURVES[oid] keygrip = keygrip_func(parse_func(mpi)) log.debug('keygrip: %s', util.hexlify(keygrip)) p['keygrip'] = keygrip elif p['algo'] == DSA_ALGO_ID: parse_mpis(stream, n=4) # DSA keys are not supported elif p['algo'] == ELGAMAL_ALGO_ID: parse_mpis(stream, n=3) # ElGamal keys are not supported else: # assume RSA parse_mpis(stream, n=2) # RSA keys are not supported assert not stream.read() # https://tools.ietf.org/html/rfc4880#section-12.2 packet_data = packet.getvalue() data_to_hash = (b'\x99' + struct.pack('>H', len(packet_data)) + packet_data) p['key_id'] = hashlib.sha1(data_to_hash).digest()[-8:] p['_to_hash'] = data_to_hash log.debug('key ID: %s', util.hexlify(p['key_id'])) return p
[ "def", "_parse_pubkey", "(", "stream", ",", "packet_type", "=", "'pubkey'", ")", ":", "p", "=", "{", "'type'", ":", "packet_type", "}", "packet", "=", "io", ".", "BytesIO", "(", ")", "with", "stream", ".", "capture", "(", "packet", ")", ":", "p", "["...
42.041667
14.166667
def find_substring_edge(self, substring, suffix_tree_id): """Returns an edge that matches the given substring. """ suffix_tree = self.suffix_tree_repo[suffix_tree_id] started = datetime.datetime.now() edge, ln = find_substring_edge(substring=substring, suffix_tree=suffix_tree, edge_repo=self.edge_repo) # if edge is not None: # print("Got edge for substring '{}': {}".format(substring, edge)) # else: # print("No edge for substring '{}'".format(substring)) print(" - searched for edge in {} for substring: '{}'".format(datetime.datetime.now() - started, substring)) return edge, ln
[ "def", "find_substring_edge", "(", "self", ",", "substring", ",", "suffix_tree_id", ")", ":", "suffix_tree", "=", "self", ".", "suffix_tree_repo", "[", "suffix_tree_id", "]", "started", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "edge", ",", "ln...
55.583333
25
def has_insert(self, shape): """Returns True if any of the inserts have the given shape.""" for insert in self.inserts: if insert.shape == shape: return True return False
[ "def", "has_insert", "(", "self", ",", "shape", ")", ":", "for", "insert", "in", "self", ".", "inserts", ":", "if", "insert", ".", "shape", "==", "shape", ":", "return", "True", "return", "False" ]
36.166667
8.833333
def get_current_value(self, use_cached=False): """Return the most recent DataPoint value written to a stream The current value is the last recorded data point for this stream. :param bool use_cached: If False, the function will always request the latest from Device Cloud. If True, the device will not make a request if it already has cached data. :raises devicecloud.DeviceCloudHttpException: in the case of an unexpected http error :raises devicecloud.streams.NoSuchStreamException: if this stream has not yet been created :return: The most recent value written to this stream (or None if nothing has been written) :rtype: :class:`~DataPoint` or None """ current_value = self._get_stream_metadata(use_cached).get("currentValue") if current_value: return DataPoint.from_json(self, current_value) else: return None
[ "def", "get_current_value", "(", "self", ",", "use_cached", "=", "False", ")", ":", "current_value", "=", "self", ".", "_get_stream_metadata", "(", "use_cached", ")", ".", "get", "(", "\"currentValue\"", ")", "if", "current_value", ":", "return", "DataPoint", ...
51.333333
31.222222
def address_exclude(self, other): """Remove an address from a larger block. For example: addr1 = IPNetwork('10.1.1.0/24') addr2 = IPNetwork('10.1.1.0/26') addr1.address_exclude(addr2) = [IPNetwork('10.1.1.64/26'), IPNetwork('10.1.1.128/25')] or IPv6: addr1 = IPNetwork('::1/32') addr2 = IPNetwork('::1/128') addr1.address_exclude(addr2) = [IPNetwork('::0/128'), IPNetwork('::2/127'), IPNetwork('::4/126'), IPNetwork('::8/125'), ... IPNetwork('0:0:8000::/33')] Args: other: An IPvXNetwork object of the same type. Returns: A sorted list of IPvXNetwork objects addresses which is self minus other. Raises: TypeError: If self and other are of difffering address versions, or if other is not a network object. ValueError: If other is not completely contained by self. """ if not self._version == other._version: raise TypeError("%s and %s are not of the same version" % ( str(self), str(other))) if not isinstance(other, _BaseNet): raise TypeError("%s is not a network object" % str(other)) if other not in self: raise ValueError('%s not contained in %s' % (str(other), str(self))) if other == self: return [] ret_addrs = [] # Make sure we're comparing the network of other. other = IPNetwork('%s/%s' % (str(other.network), str(other.prefixlen)), version=other._version) s1, s2 = self.subnet() while s1 != other and s2 != other: if other in s1: ret_addrs.append(s2) s1, s2 = s1.subnet() elif other in s2: ret_addrs.append(s1) s1, s2 = s2.subnet() else: # If we got here, there's a bug somewhere. assert True == False, ('Error performing exclusion: ' 's1: %s s2: %s other: %s' % (str(s1), str(s2), str(other))) if s1 == other: ret_addrs.append(s2) elif s2 == other: ret_addrs.append(s1) else: # If we got here, there's a bug somewhere. assert True == False, ('Error performing exclusion: ' 's1: %s s2: %s other: %s' % (str(s1), str(s2), str(other))) return sorted(ret_addrs, key=_BaseNet._get_networks_key)
[ "def", "address_exclude", "(", "self", ",", "other", ")", ":", "if", "not", "self", ".", "_version", "==", "other", ".", "_version", ":", "raise", "TypeError", "(", "\"%s and %s are not of the same version\"", "%", "(", "str", "(", "self", ")", ",", "str", ...
35.181818
20.012987
def exists(vpc_id=None, name=None, cidr=None, tags=None, region=None, key=None, keyid=None, profile=None): ''' Given a VPC ID, check to see if the given VPC ID exists. Returns True if the given VPC ID exists and returns False if the given VPC ID does not exist. CLI Example: .. code-block:: bash salt myminion boto_vpc.exists myvpc ''' try: vpc_ids = _find_vpcs(vpc_id=vpc_id, vpc_name=name, cidr=cidr, tags=tags, region=region, key=key, keyid=keyid, profile=profile) except BotoServerError as err: boto_err = __utils__['boto.get_error'](err) if boto_err.get('aws', {}).get('code') == 'InvalidVpcID.NotFound': # VPC was not found: handle the error and return False. return {'exists': False} return {'error': boto_err} return {'exists': bool(vpc_ids)}
[ "def", "exists", "(", "vpc_id", "=", "None", ",", "name", "=", "None", ",", "cidr", "=", "None", ",", "tags", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "t...
32.407407
26.111111
def get_tree(self, list_of_keys): """ gettree will extract the value from a nested tree INPUT list_of_keys: a list of keys ie. ['key1', 'key2'] USAGE >>> # Access the value for key2 within the nested dictionary >>> adv_dict({'key1': {'key2': 'value'}}).gettree(['key1', 'key2']) 'value' """ cur_obj = self for key in list_of_keys: cur_obj = cur_obj.get(key) if not cur_obj: break return cur_obj
[ "def", "get_tree", "(", "self", ",", "list_of_keys", ")", ":", "cur_obj", "=", "self", "for", "key", "in", "list_of_keys", ":", "cur_obj", "=", "cur_obj", ".", "get", "(", "key", ")", "if", "not", "cur_obj", ":", "break", "return", "cur_obj" ]
31.6
17.866667
def display_user(value, arg): ''' Return 'You' if value is equal to arg. Parameters: value should be a userprofile arg should be another user. Ideally, value should be a userprofile from an object and arg the user logged in. ''' if value.user == arg and arg.username != ANONYMOUS_USERNAME: return "You" else: return value.user.get_full_name()
[ "def", "display_user", "(", "value", ",", "arg", ")", ":", "if", "value", ".", "user", "==", "arg", "and", "arg", ".", "username", "!=", "ANONYMOUS_USERNAME", ":", "return", "\"You\"", "else", ":", "return", "value", ".", "user", ".", "get_full_name", "(...
36.727273
18
def get_all_orders_ungrouped(self): """ Uses a generator to return all orders within. :py:class:`MarketOrder` objects are yielded directly, instead of being grouped in :py:class:`MarketItemsInRegionList` instances. .. note:: This is a generator! :rtype: generator :returns: Generates a list of :py:class:`MarketOrder` instances. """ for olist in self._orders.values(): for order in olist.orders: yield order
[ "def", "get_all_orders_ungrouped", "(", "self", ")", ":", "for", "olist", "in", "self", ".", "_orders", ".", "values", "(", ")", ":", "for", "order", "in", "olist", ".", "orders", ":", "yield", "order" ]
35.428571
16.285714
def get_scheduling_block(sub_array_id, block_id): """Return the list of scheduling blocks instances associated with the sub array""" block_ids = DB.get_sub_array_sbi_ids(sub_array_id) if block_id in block_ids: block = DB.get_block_details([block_id]).__next__() return block, HTTPStatus.OK return dict(error="unknown id"), HTTPStatus.NOT_FOUND
[ "def", "get_scheduling_block", "(", "sub_array_id", ",", "block_id", ")", ":", "block_ids", "=", "DB", ".", "get_sub_array_sbi_ids", "(", "sub_array_id", ")", "if", "block_id", "in", "block_ids", ":", "block", "=", "DB", ".", "get_block_details", "(", "[", "bl...
41.333333
12.777778
def zval_dict_from_potcar(potcar): """ Creates zval_dictionary for calculating the ionic polarization from Potcar object potcar: Potcar object """ zval_dict = {} for p in potcar: zval_dict.update({p.element: p.ZVAL}) return zval_dict
[ "def", "zval_dict_from_potcar", "(", "potcar", ")", ":", "zval_dict", "=", "{", "}", "for", "p", "in", "potcar", ":", "zval_dict", ".", "update", "(", "{", "p", ".", "element", ":", "p", ".", "ZVAL", "}", ")", "return", "zval_dict" ]
24
16.545455
def cli(env, identifier, enabled, port, weight, healthcheck_type, ip_address): """Edit the properties of a service group.""" mgr = SoftLayer.LoadBalancerManager(env.client) loadbal_id, service_id = loadbal.parse_id(identifier) # check if any input is provided if ((not any([ip_address, weight, port, healthcheck_type])) and enabled is None): raise exceptions.CLIAbort( 'At least one property is required to be changed!') # check if the IP is valid ip_address_id = None if ip_address: ip_service = env.client['Network_Subnet_IpAddress'] ip_record = ip_service.getByIpAddress(ip_address) ip_address_id = ip_record['id'] mgr.edit_service(loadbal_id, service_id, ip_address_id=ip_address_id, enabled=enabled, port=port, weight=weight, hc_type=healthcheck_type) env.fout('Load balancer service %s is being modified!' % identifier)
[ "def", "cli", "(", "env", ",", "identifier", ",", "enabled", ",", "port", ",", "weight", ",", "healthcheck_type", ",", "ip_address", ")", ":", "mgr", "=", "SoftLayer", ".", "LoadBalancerManager", "(", "env", ".", "client", ")", "loadbal_id", ",", "service_...
36.607143
17.928571
def _init_metadata(self): """stub""" self._rerandomize_metadata = { 'element_id': Id(self.my_osid_object_form._authority, self.my_osid_object_form._namespace, 'rerandomize'), 'element_label': 'Randomize', 'instructions': 'How to rerandomize the parameters', 'required': False, 'read_only': False, 'linked': False, 'array': False, 'default_object_values': ['never'], 'syntax': 'STRING', 'minimum_string_length': None, 'maximum_string_length': None, 'string_set': [] } super(edXMultiChoiceQuestionFormRecord, self)._init_metadata()
[ "def", "_init_metadata", "(", "self", ")", ":", "self", ".", "_rerandomize_metadata", "=", "{", "'element_id'", ":", "Id", "(", "self", ".", "my_osid_object_form", ".", "_authority", ",", "self", ".", "my_osid_object_form", ".", "_namespace", ",", "'rerandomize'...
39.210526
12.263158
def create_same_as_file(self): """ creates a local data file with all of the owl:sameAs tags """ def find_preferred_uri(uri_list): index = None for i, uri in enumerate(uri_list): if uri.startswith("<http://id.loc.gov/authorities/subjects/"): index = i print(uri) break if not index: for i, uri in enumerate(uri_list): if uri.startswith(\ "<http://id.loc.gov/authorities/childrensSubjects/"): index = i print(uri) break if not index: index = 0 return (uri_list.pop(index), uri_list) with open(os.path.join(CFG.LOCAL_DATA_PATH, self.local_filename), "w") as file_obj: file_obj.write(NSM.prefix("turtle")) for item in self.uri_list: uris = item['uris']['value'].split(",") new_list = find_preferred_uri(uris) uris = uris[1:] for uir in new_list[1]: file_obj.write("%s kds:mergeWith %s .\n" % (uir, new_list[0]))
[ "def", "create_same_as_file", "(", "self", ")", ":", "def", "find_preferred_uri", "(", "uri_list", ")", ":", "index", "=", "None", "for", "i", ",", "uri", "in", "enumerate", "(", "uri_list", ")", ":", "if", "uri", ".", "startswith", "(", "\"<http://id.loc....
40.4375
15.46875
def tab(self): """ Advances the cursor position to the next (soft) tabstop. """ soft_tabs = self.tabstop - ((self._cx // self._cw) % self.tabstop) for _ in range(soft_tabs): self.putch(" ")
[ "def", "tab", "(", "self", ")", ":", "soft_tabs", "=", "self", ".", "tabstop", "-", "(", "(", "self", ".", "_cx", "//", "self", ".", "_cw", ")", "%", "self", ".", "tabstop", ")", "for", "_", "in", "range", "(", "soft_tabs", ")", ":", "self", "....
33.571429
14.714286
def _check_import_source(): """Check if tlgu imported, if not import it.""" path_rel = '~/cltk_data/greek/software/greek_software_tlgu/tlgu.h' path = os.path.expanduser(path_rel) if not os.path.isfile(path): try: corpus_importer = CorpusImporter('greek') corpus_importer.import_corpus('greek_software_tlgu') except Exception as exc: logger.error('Failed to import TLGU: %s', exc) raise
[ "def", "_check_import_source", "(", ")", ":", "path_rel", "=", "'~/cltk_data/greek/software/greek_software_tlgu/tlgu.h'", "path", "=", "os", ".", "path", ".", "expanduser", "(", "path_rel", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "path", ")", ...
45
15.272727
def make_pkh_output(value, pubkey, witness=False): ''' int, bytearray -> TxOut ''' return _make_output( value=utils.i2le_padded(value, 8), output_script=make_pkh_output_script(pubkey, witness))
[ "def", "make_pkh_output", "(", "value", ",", "pubkey", ",", "witness", "=", "False", ")", ":", "return", "_make_output", "(", "value", "=", "utils", ".", "i2le_padded", "(", "value", ",", "8", ")", ",", "output_script", "=", "make_pkh_output_script", "(", ...
31.285714
18.428571
def netstat(name): ''' Retrieve the netstat information of the given process name. CLI Example: .. code-block:: bash salt '*' ps.netstat apache2 ''' sanitize_name = six.text_type(name) netstat_infos = __salt__['cmd.run']("netstat -nap") found_infos = [] ret = [] for info in netstat_infos.splitlines(): if info.find(sanitize_name) != -1: found_infos.append(info) ret.extend([sanitize_name, found_infos]) return ret
[ "def", "netstat", "(", "name", ")", ":", "sanitize_name", "=", "six", ".", "text_type", "(", "name", ")", "netstat_infos", "=", "__salt__", "[", "'cmd.run'", "]", "(", "\"netstat -nap\"", ")", "found_infos", "=", "[", "]", "ret", "=", "[", "]", "for", ...
25
19.947368
def conf_path(self): """ Retrieves the path to the MySQL configuration file. """ from burlap.system import distrib_id, distrib_release hostname = self.current_hostname if hostname not in self._conf_cache: self.env.conf_specifics[hostname] = self.env.conf_default d_id = distrib_id() d_release = distrib_release() for key in ((d_id, d_release), (d_id,)): if key in self.env.conf_specifics: self._conf_cache[hostname] = self.env.conf_specifics[key] return self._conf_cache[hostname]
[ "def", "conf_path", "(", "self", ")", ":", "from", "burlap", ".", "system", "import", "distrib_id", ",", "distrib_release", "hostname", "=", "self", ".", "current_hostname", "if", "hostname", "not", "in", "self", ".", "_conf_cache", ":", "self", ".", "env", ...
43.357143
11.642857
def filter_svd(data, lapack_driver='gesdd', modes=[]): """ Return the svd-filtered signal using only the selected mode Provide the indices of the modes desired """ # Check input modes = np.asarray(modes,dtype=int) assert modes.ndim==1 assert modes.size>=1, "No modes selected !" u, s, v = scplin.svd(data, full_matrices=False, compute_uv=True, overwrite_a=False, check_finite=True, lapack_driver=lapack_driver) indout = np.arange(0,s.size) indout = np.delete(indout, modes) data_in = np.dot(u[:,modes]*s[modes],v[modes,:]) data_out = np.dot(u[:,indout]*s[indout],v[indout,:]) return data_in, data_out
[ "def", "filter_svd", "(", "data", ",", "lapack_driver", "=", "'gesdd'", ",", "modes", "=", "[", "]", ")", ":", "# Check input", "modes", "=", "np", ".", "asarray", "(", "modes", ",", "dtype", "=", "int", ")", "assert", "modes", ".", "ndim", "==", "1"...
34.35
16.95
def file_fingerprint(fullpath): """ Get a metadata fingerprint for a file """ stat = os.stat(fullpath) return ','.join([str(value) for value in [stat.st_ino, stat.st_mtime, stat.st_size] if value])
[ "def", "file_fingerprint", "(", "fullpath", ")", ":", "stat", "=", "os", ".", "stat", "(", "fullpath", ")", "return", "','", ".", "join", "(", "[", "str", "(", "value", ")", "for", "value", "in", "[", "stat", ".", "st_ino", ",", "stat", ".", "st_mt...
51.5
19.75
def local_path(self, url, filename=None, decompress=False, download=False): """ What will the full local path be if we download the given file? """ if download: return self.fetch(url=url, filename=filename, decompress=decompress) else: filename = self.local_filename(url, filename, decompress) return join(self.cache_directory_path, filename)
[ "def", "local_path", "(", "self", ",", "url", ",", "filename", "=", "None", ",", "decompress", "=", "False", ",", "download", "=", "False", ")", ":", "if", "download", ":", "return", "self", ".", "fetch", "(", "url", "=", "url", ",", "filename", "=",...
45.555556
22.444444
def get_deepest_subsumer(self,list_terms): ''' Returns the labels of the deepest node that subsumes all the terms in the list of terms id's provided ''' #To store with how many terms every nonterminal appears count_per_no_terminal = defaultdict(int) #To store the total deep of each noter for all the term ides (as we want the deepest) total_deep_per_no_terminal = defaultdict(int) for term_id in list_terms: terminal_id = self.terminal_for_term.get(term_id) path = self.paths_for_terminal[terminal_id][0] print(term_id, path) for c,noter in enumerate(path): count_per_no_terminal[noter] += 1 total_deep_per_no_terminal[noter] += c deepest_and_common = None deepest = 10000 for noterid, this_total in total_deep_per_no_terminal.items(): if count_per_no_terminal.get(noterid,-1) == len(list_terms): ##Only the nontarms that ocurr with all the term ids in the input if this_total < deepest: deepest = this_total deepest_and_common = noterid label = None if deepest_and_common is not None: label = self.label_for_nonter[deepest_and_common] return deepest_and_common, label
[ "def", "get_deepest_subsumer", "(", "self", ",", "list_terms", ")", ":", "#To store with how many terms every nonterminal appears", "count_per_no_terminal", "=", "defaultdict", "(", "int", ")", "#To store the total deep of each noter for all the term ides (as we want the deepest)", "...
44.741935
20.741935
def add_product_to_product_set( self, product_set_id, product_id, location=None, project_id=None, retry=None, timeout=None, metadata=None, ): """ For the documentation see: :py:class:`~airflow.contrib.operators.gcp_vision_operator.CloudVisionAddProductToProductSetOperator` """ client = self.get_conn() product_name = ProductSearchClient.product_path(project_id, location, product_id) product_set_name = ProductSearchClient.product_set_path(project_id, location, product_set_id) self.log.info('Add Product[name=%s] to Product Set[name=%s]', product_name, product_set_name) client.add_product_to_product_set( name=product_set_name, product=product_name, retry=retry, timeout=timeout, metadata=metadata ) self.log.info('Product added to Product Set')
[ "def", "add_product_to_product_set", "(", "self", ",", "product_set_id", ",", "product_id", ",", "location", "=", "None", ",", "project_id", "=", "None", ",", "retry", "=", "None", ",", "timeout", "=", "None", ",", "metadata", "=", "None", ",", ")", ":", ...
34.423077
27.807692
def cherrypy_server_runner( app, global_conf=None, host='127.0.0.1', port=None, ssl_pem=None, protocol_version=None, numthreads=None, server_name=None, max=None, request_queue_size=None, timeout=None ): # pragma: no cover """ Entry point for CherryPy's WSGI server Serves the specified WSGI app via CherryPyWSGIServer. ``app`` The WSGI 'application callable'; multiple WSGI applications may be passed as (script_name, callable) pairs. ``host`` This is the ipaddress to bind to (or a hostname if your nameserver is properly configured). This defaults to 127.0.0.1, which is not a public interface. ``port`` The port to run on, defaults to 8080 for HTTP, or 4443 for HTTPS. This can be a string or an integer value. ``ssl_pem`` This an optional SSL certificate file (via OpenSSL) You can generate a self-signed test PEM certificate file as follows: $ openssl genrsa 1024 > host.key $ chmod 400 host.key $ openssl req -new -x509 -nodes -sha1 -days 365 \\ -key host.key > host.cert $ cat host.cert host.key > host.pem $ chmod 400 host.pem ``protocol_version`` The protocol used by the server, by default ``HTTP/1.1``. ``numthreads`` The number of worker threads to create. ``server_name`` The string to set for WSGI's SERVER_NAME environ entry. ``max`` The maximum number of queued requests. (defaults to -1 = no limit). ``request_queue_size`` The 'backlog' argument to socket.listen(); specifies the maximum number of queued connections. ``timeout`` The timeout in seconds for accepted connections. """ is_ssl = False if ssl_pem: port = port or 4443 is_ssl = True if not port: if ':' in host: host, port = host.split(':', 1) else: port = 8080 bind_addr = (host, int(port)) kwargs = {} for var_name in ('numthreads', 'max', 'request_queue_size', 'timeout'): var = locals()[var_name] if var is not None: kwargs[var_name] = int(var) server = None try: # Try to import from newer CherryPy releases. import cheroot.wsgi as wsgiserver server = wsgiserver.Server(bind_addr, app, server_name=server_name, **kwargs) except ImportError: # Nope. Try to import from older CherryPy releases. # We might just take another ImportError here. Oh well. from cherrypy import wsgiserver server = wsgiserver.CherryPyWSGIServer(bind_addr, app, server_name=server_name, **kwargs) server.ssl_certificate = server.ssl_private_key = ssl_pem if protocol_version: server.protocol = protocol_version try: protocol = is_ssl and 'https' or 'http' if host == '0.0.0.0': print('serving on 0.0.0.0:%s view at %s://127.0.0.1:%s' % (port, protocol, port)) else: print('serving on %s://%s:%s' % (protocol, host, port)) server.start() except (KeyboardInterrupt, SystemExit): server.stop() return server
[ "def", "cherrypy_server_runner", "(", "app", ",", "global_conf", "=", "None", ",", "host", "=", "'127.0.0.1'", ",", "port", "=", "None", ",", "ssl_pem", "=", "None", ",", "protocol_version", "=", "None", ",", "numthreads", "=", "None", ",", "server_name", ...
28.571429
22.857143
def get_shared_people(self): """Retrieves all people that share their location with this account""" people = [] output = self._get_data() self._logger.debug(output) shared_entries = output[0] or [] for info in shared_entries: try: people.append(Person(info)) except InvalidData: self._logger.debug('Missing location or other info, dropping person with info: %s', info) return people
[ "def", "get_shared_people", "(", "self", ")", ":", "people", "=", "[", "]", "output", "=", "self", ".", "_get_data", "(", ")", "self", ".", "_logger", ".", "debug", "(", "output", ")", "shared_entries", "=", "output", "[", "0", "]", "or", "[", "]", ...
40.25
14.25
def min_abs(self): '''Returns minimum absolute value.''' if self.__len__() == 0: return ArgumentError('empty set has no minimum absolute value.') if self.contains(0): return 0 return numpy.min([numpy.abs(val) for val in [self.max_neg(), self.min_pos()] if val is not None])
[ "def", "min_abs", "(", "self", ")", ":", "if", "self", ".", "__len__", "(", ")", "==", "0", ":", "return", "ArgumentError", "(", "'empty set has no minimum absolute value.'", ")", "if", "self", ".", "contains", "(", "0", ")", ":", "return", "0", "return", ...
41.444444
15.444444
def draw(self): """ Draw the elbow curve for the specified scores and values of K. """ # Plot the silhouette score against k self.ax.plot(self.k_values_, self.k_scores_, marker="D") if self.locate_elbow and self.elbow_value_!=None: elbow_label = "$elbow\ at\ k={}, score={:0.3f}$".format(self.elbow_value_, self.elbow_score_) self.ax.axvline(self.elbow_value_, c=LINE_COLOR, linestyle="--", label=elbow_label) # If we're going to plot the timings, create a twinx axis if self.timings: self.axes = [self.ax, self.ax.twinx()] self.axes[1].plot( self.k_values_, self.k_timers_, label="fit time", c='g', marker="o", linestyle="--", alpha=0.75, ) return self.ax
[ "def", "draw", "(", "self", ")", ":", "# Plot the silhouette score against k", "self", ".", "ax", ".", "plot", "(", "self", ".", "k_values_", ",", "self", ".", "k_scores_", ",", "marker", "=", "\"D\"", ")", "if", "self", ".", "locate_elbow", "and", "self",...
40.8
24.1
def _init_oauth(self, oauth_token, oauth_token_secret): "Store and initialize a verified set of OAuth credentials" self.oauth_token = oauth_token self.oauth_token_secret = oauth_token_secret self._oauth = OAuth1( self.consumer_key, client_secret=self.consumer_secret, resource_owner_key=self.oauth_token, resource_owner_secret=self.oauth_token_secret, rsa_key=self.rsa_key, signature_method=self._signature_method )
[ "def", "_init_oauth", "(", "self", ",", "oauth_token", ",", "oauth_token_secret", ")", ":", "self", ".", "oauth_token", "=", "oauth_token", "self", ".", "oauth_token_secret", "=", "oauth_token_secret", "self", ".", "_oauth", "=", "OAuth1", "(", "self", ".", "c...
39.692308
15.230769
def make_published(self, request, queryset): """ Marks selected news items as published """ rows_updated = queryset.update(is_published=True) self.message_user(request, ungettext('%(count)d newsitem was published', '%(count)d newsitems were published', rows_updated) % {'count': rows_updated})
[ "def", "make_published", "(", "self", ",", "request", ",", "queryset", ")", ":", "rows_updated", "=", "queryset", ".", "update", "(", "is_published", "=", "True", ")", "self", ".", "message_user", "(", "request", ",", "ungettext", "(", "'%(count)d newsitem was...
47.444444
15.222222
def find_class(self, name): """Find the Class by its name.""" defclass = lib.EnvFindDefclass(self._env, name.encode()) if defclass == ffi.NULL: raise LookupError("Class '%s' not found" % name) return Class(self._env, defclass)
[ "def", "find_class", "(", "self", ",", "name", ")", ":", "defclass", "=", "lib", ".", "EnvFindDefclass", "(", "self", ".", "_env", ",", "name", ".", "encode", "(", ")", ")", "if", "defclass", "==", "ffi", ".", "NULL", ":", "raise", "LookupError", "("...
37.857143
15.142857
def run(self): ''' Run the api ''' ui = salt.spm.SPMCmdlineInterface() self.parse_args() self.setup_logfile_logger() v_dirs = [ self.config['spm_cache_dir'], ] verify_env(v_dirs, self.config['user'], root_dir=self.config['root_dir'], ) verify_log(self.config) client = salt.spm.SPMClient(ui, self.config) client.run(self.args)
[ "def", "run", "(", "self", ")", ":", "ui", "=", "salt", ".", "spm", ".", "SPMCmdlineInterface", "(", ")", "self", ".", "parse_args", "(", ")", "self", ".", "setup_logfile_logger", "(", ")", "v_dirs", "=", "[", "self", ".", "config", "[", "'spm_cache_di...
27.941176
15.352941
def get_storage_conn(storage_account=None, storage_key=None, conn_kwargs=None): ''' .. versionadded:: 2015.8.0 Return a storage_conn object for the storage account ''' if conn_kwargs is None: conn_kwargs = {} if not storage_account: storage_account = config.get_cloud_config_value( 'storage_account', get_configured_provider(), __opts__, search_global=False, default=conn_kwargs.get('storage_account', None) ) if not storage_key: storage_key = config.get_cloud_config_value( 'storage_key', get_configured_provider(), __opts__, search_global=False, default=conn_kwargs.get('storage_key', None) ) return azure.storage.BlobService(storage_account, storage_key)
[ "def", "get_storage_conn", "(", "storage_account", "=", "None", ",", "storage_key", "=", "None", ",", "conn_kwargs", "=", "None", ")", ":", "if", "conn_kwargs", "is", "None", ":", "conn_kwargs", "=", "{", "}", "if", "not", "storage_account", ":", "storage_ac...
35.545455
22.909091
def add(self, command, response): """ Register a command/response pair. The command may be either a string (which is then automatically compiled into a regular expression), or a pre-compiled regular expression object. If the given response handler is a string, it is sent as the response to any command that matches the given regular expression. If the given response handler is a function, it is called with the command passed as an argument. :type command: str|regex :param command: A string or a compiled regular expression. :type response: function|str :param response: A reponse, or a response handler. """ command = re.compile(command) self.response_list.append((command, response))
[ "def", "add", "(", "self", ",", "command", ",", "response", ")", ":", "command", "=", "re", ".", "compile", "(", "command", ")", "self", ".", "response_list", ".", "append", "(", "(", "command", ",", "response", ")", ")" ]
40.1
18.4
def _delete_vdev_info(self, vdev): """handle udev rules file.""" vdev = vdev.lower() rules_file_name = '/etc/udev/rules.d/51-qeth-0.0.%s.rules' % vdev cmd = 'rm -f %s\n' % rules_file_name address = '0.0.%s' % str(vdev).zfill(4) udev_file_name = '/etc/udev/rules.d/70-persistent-net.rules' cmd += "sed -i '/%s/d' %s\n" % (address, udev_file_name) cmd += "sed -i '/%s/d' %s\n" % (address, '/boot/zipl/active_devices.txt') return cmd
[ "def", "_delete_vdev_info", "(", "self", ",", "vdev", ")", ":", "vdev", "=", "vdev", ".", "lower", "(", ")", "rules_file_name", "=", "'/etc/udev/rules.d/51-qeth-0.0.%s.rules'", "%", "vdev", "cmd", "=", "'rm -f %s\\n'", "%", "rules_file_name", "address", "=", "'0...
44.333333
18.083333
def ErrorMessage(text, **kwargs): """Show an error message dialog to the user. This will raise a Zenity Error Dialog with a description of the error. text - A description of the error. kwargs - Optional command line parameters for Zenity such as height, width, etc.""" args = ['--text=%s' % text] for generic_args in kwargs_helper(kwargs): args.append('--%s=%s' % generic_args) run_zenity('--error', *args).wait()
[ "def", "ErrorMessage", "(", "text", ",", "*", "*", "kwargs", ")", ":", "args", "=", "[", "'--text=%s'", "%", "text", "]", "for", "generic_args", "in", "kwargs_helper", "(", "kwargs", ")", ":", "args", ".", "append", "(", "'--%s=%s'", "%", "generic_args",...
32.928571
17.714286
def simplefenestration(idf, fsd, deletebsd=True, setto000=False): """convert a bsd (fenestrationsurface:detailed) into a simple fenestrations""" funcs = (window, door, glazeddoor,) for func in funcs: fenestration = func(idf, fsd, deletebsd=deletebsd, setto000=setto000) if fenestration: return fenestration return None
[ "def", "simplefenestration", "(", "idf", ",", "fsd", ",", "deletebsd", "=", "True", ",", "setto000", "=", "False", ")", ":", "funcs", "=", "(", "window", ",", "door", ",", "glazeddoor", ",", ")", "for", "func", "in", "funcs", ":", "fenestration", "=", ...
33.909091
17.909091
def get_prepopulated_value(field, instance): """ Returns preliminary value based on `populate_from`. """ if hasattr(field.populate_from, '__call__'): # AutoSlugField(populate_from=lambda instance: ...) return field.populate_from(instance) else: # AutoSlugField(populate_from='foo') attr = getattr(instance, field.populate_from) return callable(attr) and attr() or attr
[ "def", "get_prepopulated_value", "(", "field", ",", "instance", ")", ":", "if", "hasattr", "(", "field", ".", "populate_from", ",", "'__call__'", ")", ":", "# AutoSlugField(populate_from=lambda instance: ...)", "return", "field", ".", "populate_from", "(", "instance",...
38
9.636364
def create_user(self, user_name, initial_password): """Create a new user with an initial password via provisioning API. It is not an error, if the user already existed before. If you get back an error 999, then the provisioning API is not enabled. :param user_name: name of user to be created :param initial_password: password for user being created :returns: True on success :raises: HTTPResponseError in case an HTTP error status was returned """ res = self._make_ocs_request( 'POST', self.OCS_SERVICE_CLOUD, 'users', data={'password': initial_password, 'userid': user_name} ) # We get 200 when the user was just created. if res.status_code == 200: tree = ET.fromstring(res.content) self._check_ocs_status(tree, [100]) return True raise HTTPResponseError(res)
[ "def", "create_user", "(", "self", ",", "user_name", ",", "initial_password", ")", ":", "res", "=", "self", ".", "_make_ocs_request", "(", "'POST'", ",", "self", ".", "OCS_SERVICE_CLOUD", ",", "'users'", ",", "data", "=", "{", "'password'", ":", "initial_pas...
37.24
18.92
def decompress(ctype, unc_len, data): """Decompress data. Arguments: Int:ctype -- Compression type LZO, ZLIB (*currently unused*). Int:unc_len -- Uncompressed data lenth. Str:data -- Data to be uncompessed. Returns: Uncompressed Data. """ if ctype == UBIFS_COMPR_LZO: try: return lzo.decompress(b''.join((b'\xf0', struct.pack('>I', unc_len), data))) except Exception as e: error(decompress, 'Warn', 'LZO Error: %s' % e) elif ctype == UBIFS_COMPR_ZLIB: try: return zlib.decompress(data, -11) except Exception as e: error(decompress, 'Warn', 'ZLib Error: %s' % e) else: return data
[ "def", "decompress", "(", "ctype", ",", "unc_len", ",", "data", ")", ":", "if", "ctype", "==", "UBIFS_COMPR_LZO", ":", "try", ":", "return", "lzo", ".", "decompress", "(", "b''", ".", "join", "(", "(", "b'\\xf0'", ",", "struct", ".", "pack", "(", "'>...
30.391304
18.304348
def save(self, fname: str): """ Saves the dataset to a binary .npy file. """ mx.nd.save(fname, self.source + self.target + self.label)
[ "def", "save", "(", "self", ",", "fname", ":", "str", ")", ":", "mx", ".", "nd", ".", "save", "(", "fname", ",", "self", ".", "source", "+", "self", ".", "target", "+", "self", ".", "label", ")" ]
32.4
9.2
def resource(self, uri, methods=frozenset({'GET'}), **kwargs): """ Decorates a function to be registered as a resource route. :param uri: path of the URL :param methods: list or tuple of methods allowed :param host: :param strict_slashes: :param stream: :param version: :param name: user defined route name for url_for :param filters: List of callable that will filter request and response data :param validators: List of callable added to the filter list. :return: A decorated function """ def decorator(f): if kwargs.get('stream'): f.is_stream = kwargs['stream'] self.add_resource(f, uri=uri, methods=methods, **kwargs) return decorator
[ "def", "resource", "(", "self", ",", "uri", ",", "methods", "=", "frozenset", "(", "{", "'GET'", "}", ")", ",", "*", "*", "kwargs", ")", ":", "def", "decorator", "(", "f", ")", ":", "if", "kwargs", ".", "get", "(", "'stream'", ")", ":", "f", "....
34.913043
17.521739
def docker_environment(env): """ Transform dictionary of environment variables into Docker -e parameters. >>> result = docker_environment({'param1': 'val1', 'param2': 'val2'}) >>> result in ['-e "param1=val1" -e "param2=val2"', '-e "param2=val2" -e "param1=val1"'] True """ return ' '.join( ["-e \"%s=%s\"" % (key, value.replace("$", "\\$").replace("\"", "\\\"").replace("`", "\\`")) for key, value in env.items()])
[ "def", "docker_environment", "(", "env", ")", ":", "return", "' '", ".", "join", "(", "[", "\"-e \\\"%s=%s\\\"\"", "%", "(", "key", ",", "value", ".", "replace", "(", "\"$\"", ",", "\"\\\\$\"", ")", ".", "replace", "(", "\"\\\"\"", ",", "\"\\\\\\\"\"", "...
41
25.909091
def options(self, context, module_options): ''' ACTION Enable/Disable RDP (choices: enable, disable) ''' if not 'ACTION' in module_options: context.log.error('ACTION option not specified!') exit(1) if module_options['ACTION'].lower() not in ['enable', 'disable']: context.log.error('Invalid value for ACTION option!') exit(1) self.action = module_options['ACTION'].lower()
[ "def", "options", "(", "self", ",", "context", ",", "module_options", ")", ":", "if", "not", "'ACTION'", "in", "module_options", ":", "context", ".", "log", ".", "error", "(", "'ACTION option not specified!'", ")", "exit", "(", "1", ")", "if", "module_option...
33.071429
24.5
def __get_conn(**kwargs): ''' Detects what type of dom this node is and attempts to connect to the correct hypervisor via libvirt. :param connection: libvirt connection URI, overriding defaults :param username: username to connect with, overriding defaults :param password: password to connect with, overriding defaults ''' # This has only been tested on kvm and xen, it needs to be expanded to # support all vm layers supported by libvirt # Connection string works on bhyve, but auth is not tested. username = kwargs.get('username', None) password = kwargs.get('password', None) conn_str = kwargs.get('connection', None) if not conn_str: conn_str = __salt__['config.get']('virt.connect', None) if conn_str is not None: salt.utils.versions.warn_until( 'Sodium', '\'virt.connect\' configuration property has been deprecated in favor ' 'of \'virt:connection:uri\'. \'virt.connect\' will stop being used in ' '{version}.' ) else: conn_str = __salt__['config.get']('libvirt:connection', None) if conn_str is not None: salt.utils.versions.warn_until( 'Sodium', '\'libvirt.connection\' configuration property has been deprecated in favor ' 'of \'virt:connection:uri\'. \'libvirt.connection\' will stop being used in ' '{version}.' ) conn_str = __salt__['config.get']('virt:connection:uri', conn_str) hypervisor = __salt__['config.get']('libvirt:hypervisor', None) if hypervisor is not None: salt.utils.versions.warn_until( 'Sodium', '\'libvirt.hypervisor\' configuration property has been deprecated. ' 'Rather use the \'virt:connection:uri\' to properly define the libvirt ' 'URI or alias of the host to connect to. \'libvirt:hypervisor\' will ' 'stop being used in {version}.' ) if hypervisor == 'esxi' and conn_str is None: salt.utils.versions.warn_until( 'Sodium', 'esxi hypervisor default with no default connection URI detected, ' 'please set \'virt:connection:uri\' to \'esx\' for keep the legacy ' 'behavior. Will default to libvirt guess once \'libvirt:hypervisor\' ' 'configuration is removed in {version}.' ) conn_str = 'esx' try: auth_types = [libvirt.VIR_CRED_AUTHNAME, libvirt.VIR_CRED_NOECHOPROMPT, libvirt.VIR_CRED_ECHOPROMPT, libvirt.VIR_CRED_PASSPHRASE, libvirt.VIR_CRED_EXTERNAL] conn = libvirt.openAuth(conn_str, [auth_types, __get_request_auth(username, password), None], 0) except Exception: raise CommandExecutionError( 'Sorry, {0} failed to open a connection to the hypervisor ' 'software at {1}'.format( __grains__['fqdn'], conn_str ) ) return conn
[ "def", "__get_conn", "(", "*", "*", "kwargs", ")", ":", "# This has only been tested on kvm and xen, it needs to be expanded to", "# support all vm layers supported by libvirt", "# Connection string works on bhyve, but auth is not tested.", "username", "=", "kwargs", ".", "get", "(",...
41.689189
23.608108
def misalignment(self,isotropic=False,**kwargs): """ NAME: misalignment PURPOSE: calculate the misalignment between the progenitor's frequency and the direction along which the stream disrupts INPUT: isotropic= (False), if True, return the misalignment assuming an isotropic action distribution OUTPUT: misalignment in rad HISTORY: 2013-12-05 - Written - Bovy (IAS) 2017-10-28 - Changed output unit to rad - Bovy (UofT) """ warnings.warn("In versions >1.3, the output unit of streamdf.misalignment has been changed to radian (from degree before)",galpyWarning) if isotropic: dODir= self._dOdJpEig[1][:,numpy.argmax(numpy.fabs(self._dOdJpEig[0]))] else: dODir= self._dsigomeanProgDirection out= numpy.arccos(numpy.sum(self._progenitor_Omega*dODir)/numpy.sqrt(numpy.sum(self._progenitor_Omega**2.))) if out > numpy.pi/2.: return out-numpy.pi else: return out
[ "def", "misalignment", "(", "self", ",", "isotropic", "=", "False", ",", "*", "*", "kwargs", ")", ":", "warnings", ".", "warn", "(", "\"In versions >1.3, the output unit of streamdf.misalignment has been changed to radian (from degree before)\"", ",", "galpyWarning", ")", ...
30.558824
30.794118
def run(self): """ Open a connection over the serial line and receive data lines """ if not self.device: return try: data = "" while (self.do_run): try: if (self.device.inWaiting() > 1): l = self.device.readline()[:-2] l = l.decode("UTF-8") if (l == "["): # start recording data = "[" elif (l == "]") and (len(data) > 4) and (data[0] == "["): # now parse the input data = data + "]" self.store.register_json(data) self.age() elif (l[0:3] == " {"): # this is a data line data = data + " " + l else: # this is a slow interface - give it some time sleep(1) # then count down.. self.age() except (UnicodeDecodeError, ValueError): # only accepting unicode: throw away the whole bunch data = "" # and count down the exit condition self.age() except serial.serialutil.SerialException: print("Could not connect to the serial line at " + self.device_name)
[ "def", "run", "(", "self", ")", ":", "if", "not", "self", ".", "device", ":", "return", "try", ":", "data", "=", "\"\"", "while", "(", "self", ".", "do_run", ")", ":", "try", ":", "if", "(", "self", ".", "device", ".", "inWaiting", "(", ")", ">...
39.473684
15.315789
def summary_engine(**kwargs): """engine to extract summary data""" logger.debug("summary_engine") # farms = kwargs["farms"] farms = [] experiments = kwargs["experiments"] for experiment in experiments: if experiment.selected_summaries is None: selected_summaries = [ "discharge_capacity", "charge_capacity", "coulombic_efficiency", "cumulated_coulombic_efficiency", "ir_discharge", "ir_charge", "end_voltage_discharge", "end_voltage_charge", ] else: selected_summaries = experiment.selected_summaries farm = helper.join_summaries( experiment.summary_frames, selected_summaries ) farms.append(farm) barn = "batch_dir" return farms, barn
[ "def", "summary_engine", "(", "*", "*", "kwargs", ")", ":", "logger", ".", "debug", "(", "\"summary_engine\"", ")", "# farms = kwargs[\"farms\"]", "farms", "=", "[", "]", "experiments", "=", "kwargs", "[", "\"experiments\"", "]", "for", "experiment", "in", "ex...
30.518519
15.555556
def raise_for_version(self, line: str, position: int, version: str) -> None: """Check that a version string is valid for BEL documents. This means it's either in the YYYYMMDD or semantic version format. :param line: The line being parsed :param position: The position in the line being parsed :param str version: A version string :raises: VersionFormatWarning """ if valid_date_version(version): return if not SEMANTIC_VERSION_STRING_RE.match(version): raise VersionFormatWarning(self.get_line_number(), line, position, version)
[ "def", "raise_for_version", "(", "self", ",", "line", ":", "str", ",", "position", ":", "int", ",", "version", ":", "str", ")", "->", "None", ":", "if", "valid_date_version", "(", "version", ")", ":", "return", "if", "not", "SEMANTIC_VERSION_STRING_RE", "....
40.866667
20.533333
def save_formset_with_author(formset, user): """ Проставляет моделям из набора форм автора :param formset: набор форм :param user: автор :return: """ instances = formset.save(commit=False) for obj in formset.deleted_objects: obj.delete() for instance in instances: if user.is_authenticated() and hasattr(instance, 'author') and not instance.author: instance.author = user instance.save() formset.save_m2m()
[ "def", "save_formset_with_author", "(", "formset", ",", "user", ")", ":", "instances", "=", "formset", ".", "save", "(", "commit", "=", "False", ")", "for", "obj", "in", "formset", ".", "deleted_objects", ":", "obj", ".", "delete", "(", ")", "for", "inst...
31.2
12.666667
def new_status(self, new_status): """ Sets the new_status of this BuildSetStatusChangedEvent. :param new_status: The new_status of this BuildSetStatusChangedEvent. :type: str """ allowed_values = ["NEW", "DONE", "REJECTED"] if new_status not in allowed_values: raise ValueError( "Invalid value for `new_status` ({0}), must be one of {1}" .format(new_status, allowed_values) ) self._new_status = new_status
[ "def", "new_status", "(", "self", ",", "new_status", ")", ":", "allowed_values", "=", "[", "\"NEW\"", ",", "\"DONE\"", ",", "\"REJECTED\"", "]", "if", "new_status", "not", "in", "allowed_values", ":", "raise", "ValueError", "(", "\"Invalid value for `new_status` (...
34.2
18.066667
def simulationStep(step=0): """ Make a simulation step and simulate up to the given millisecond in sim time. If the given value is 0 or absent, exactly one step is performed. Values smaller than or equal to the current sim time result in no action. """ global _stepListeners responses = _connections[""].simulationStep(step) for listener in _stepListeners: listener.step(step) return responses
[ "def", "simulationStep", "(", "step", "=", "0", ")", ":", "global", "_stepListeners", "responses", "=", "_connections", "[", "\"\"", "]", ".", "simulationStep", "(", "step", ")", "for", "listener", "in", "_stepListeners", ":", "listener", ".", "step", "(", ...
38.818182
16.818182
def on_commit(self, changes): """Method that gets called when a model is changed. This serves to do the actual index writing. """ if _get_config(self)['enable_indexing'] is False: return None for wh in self.whoosheers: if not wh.auto_update: continue writer = None for change in changes: if change[0].__class__ in wh.models: method_name = '{0}_{1}'.format(change[1], change[0].__class__.__name__.lower()) method = getattr(wh, method_name, None) if method: if not writer: writer = type(self).get_or_create_index(_get_app(self), wh).\ writer(timeout=_get_config(self)['writer_timeout']) method(writer, change[0]) if writer: writer.commit()
[ "def", "on_commit", "(", "self", ",", "changes", ")", ":", "if", "_get_config", "(", "self", ")", "[", "'enable_indexing'", "]", "is", "False", ":", "return", "None", "for", "wh", "in", "self", ".", "whoosheers", ":", "if", "not", "wh", ".", "auto_upda...
42.409091
16.590909
def utc_offset_by_timezone(timezone_name): """Returns the UTC offset of the given timezone in hours. Arguments --------- timezone_name: str A string with a name of a timezone. Returns ------- int The UTC offset of the given timezone, in hours. """ return int(pytz.timezone(timezone_name).utcoffset( utc_time()).total_seconds()/SECONDS_IN_HOUR)
[ "def", "utc_offset_by_timezone", "(", "timezone_name", ")", ":", "return", "int", "(", "pytz", ".", "timezone", "(", "timezone_name", ")", ".", "utcoffset", "(", "utc_time", "(", ")", ")", ".", "total_seconds", "(", ")", "/", "SECONDS_IN_HOUR", ")" ]
26.066667
19.266667
def build_related_articles(related_articles): """ Given parsed data build a list of related article objects """ article_list = [] for related_article in related_articles: article = ea.RelatedArticle() if related_article.get('xlink_href'): article.xlink_href = related_article.get('xlink_href') if related_article.get('related_article_type'): article.related_article_type = related_article.get('related_article_type') if related_article.get('ext_link_type'): article.ext_link_type = related_article.get('ext_link_type') # Append it to our list article_list.append(article) return article_list
[ "def", "build_related_articles", "(", "related_articles", ")", ":", "article_list", "=", "[", "]", "for", "related_article", "in", "related_articles", ":", "article", "=", "ea", ".", "RelatedArticle", "(", ")", "if", "related_article", ".", "get", "(", "'xlink_h...
36
17.578947
def translate_docs_compact(self, ds, field_mapping=None, slim=None, map_identifiers=None, invert_subject_object=False, **kwargs): """ Translate golr association documents to a compact representation """ amap = {} logging.info("Translating docs to compact form. Slim={}".format(slim)) for d in ds: self.map_doc(d, field_mapping, invert_subject_object=invert_subject_object) subject = d[M.SUBJECT] subject_label = d[M.SUBJECT_LABEL] # TODO: use a more robust method; we need equivalence as separate field in solr if map_identifiers is not None: if M.SUBJECT_CLOSURE in d: subject = self.map_id(subject, map_identifiers, d[M.SUBJECT_CLOSURE]) else: logging.debug("NO SUBJECT CLOSURE IN: "+str(d)) rel = d.get(M.RELATION) skip = False # TODO if rel == 'not' or rel == 'NOT': skip = True # this is a list in GO if isinstance(rel,list): if 'not' in rel or 'NOT' in rel: skip = True if len(rel) > 1: logging.warn(">1 relation: {}".format(rel)) rel = ";".join(rel) if skip: logging.debug("Skipping: {}".format(d)) continue subject = self.make_canonical_identifier(subject) #if subject.startswith('MGI:MGI:'): # subject = subject.replace('MGI:MGI:','MGI:') k = (subject,rel) if k not in amap: amap[k] = {'subject':subject, 'subject_label':subject_label, 'relation':rel, 'objects': []} if slim is not None and len(slim)>0: mapped_objects = [x for x in d[M.OBJECT_CLOSURE] if x in slim] logging.debug("Mapped objects: {}".format(mapped_objects)) amap[k]['objects'] += mapped_objects else: amap[k]['objects'].append(d[M.OBJECT]) for k in amap.keys(): amap[k]['objects'] = list(set(amap[k]['objects'])) return list(amap.values())
[ "def", "translate_docs_compact", "(", "self", ",", "ds", ",", "field_mapping", "=", "None", ",", "slim", "=", "None", ",", "map_identifiers", "=", "None", ",", "invert_subject_object", "=", "False", ",", "*", "*", "kwargs", ")", ":", "amap", "=", "{", "}...
38.689655
20.862069
def get_feature_names(self): """Get feature names. Returns ------- feature_names : list of strings Names of the features produced by transform. """ return ['temperature', 'pressure'] + [f'solvent.{x}' for x in range(1, self.max_solvents + 1)] + \ [f'solvent_amount.{x}' for x in range(1, self.max_solvents + 1)]
[ "def", "get_feature_names", "(", "self", ")", ":", "return", "[", "'temperature'", ",", "'pressure'", "]", "+", "[", "f'solvent.{x}'", "for", "x", "in", "range", "(", "1", ",", "self", ".", "max_solvents", "+", "1", ")", "]", "+", "[", "f'solvent_amount....
37.8
22.4
def open_state_machine(path=None, recent_opened_notification=False): """ Open a state machine from respective file system path :param str path: file system path to the state machine :param bool recent_opened_notification: flags that indicates that this call also should update recently open :rtype rafcon.core.state_machine.StateMachine :return: opened state machine """ start_time = time.time() if path is None: if interface.open_folder_func is None: logger.error("No function defined for opening a folder") return load_path = interface.open_folder_func("Please choose the folder of the state machine") if load_path is None: return else: load_path = path if state_machine_manager.is_state_machine_open(load_path): logger.info("State machine already open. Select state machine instance from path {0}.".format(load_path)) sm = state_machine_manager.get_open_state_machine_of_file_system_path(load_path) gui_helper_state.gui_singletons.state_machine_manager_model.selected_state_machine_id = sm.state_machine_id return state_machine_manager.get_open_state_machine_of_file_system_path(load_path) state_machine = None try: state_machine = storage.load_state_machine_from_path(load_path) state_machine_manager.add_state_machine(state_machine) if recent_opened_notification: global_runtime_config.update_recently_opened_state_machines_with(state_machine) duration = time.time() - start_time stat = state_machine.root_state.get_states_statistics(0) logger.info("It took {0:.2}s to load {1} states with {2} hierarchy levels.".format(duration, stat[0], stat[1])) except (AttributeError, ValueError, IOError) as e: logger.error('Error while trying to open state machine: {0}'.format(e)) return state_machine
[ "def", "open_state_machine", "(", "path", "=", "None", ",", "recent_opened_notification", "=", "False", ")", ":", "start_time", "=", "time", ".", "time", "(", ")", "if", "path", "is", "None", ":", "if", "interface", ".", "open_folder_func", "is", "None", "...
48.410256
29.589744
def summary_pb(self): """Create a top-level experiment summary describing this experiment. The resulting summary should be written to a log directory that encloses all the individual sessions' log directories. Analogous to the low-level `experiment_pb` function in the `hparams.summary` module. """ hparam_infos = [] for hparam in self._hparams: info = api_pb2.HParamInfo( name=hparam.name, description=hparam.description, display_name=hparam.display_name, ) domain = hparam.domain if domain is not None: domain.update_hparam_info(info) hparam_infos.append(info) metric_infos = [metric.as_proto() for metric in self._metrics] return summary.experiment_pb( hparam_infos=hparam_infos, metric_infos=metric_infos, user=self._user, description=self._description, time_created_secs=self._time_created_secs, )
[ "def", "summary_pb", "(", "self", ")", ":", "hparam_infos", "=", "[", "]", "for", "hparam", "in", "self", ".", "_hparams", ":", "info", "=", "api_pb2", ".", "HParamInfo", "(", "name", "=", "hparam", ".", "name", ",", "description", "=", "hparam", ".", ...
33.178571
14.428571
def multi_assoc(self, values): '''Return a new tree with multiple values associated. The parameter values can either be a dictionary mapping indices to values, or a list of (index,value) tuples''' if isinstance(values, dict): nndict = dict([(i, LookupTreeNode(i, values[i])) for i in values]) else: nndict = dict([(i, LookupTreeNode(i, val)) for (i,val) in values]) newtree = LookupTree() newtree.root = _multi_assoc_down(self.root, nndict, 0) return newtree
[ "def", "multi_assoc", "(", "self", ",", "values", ")", ":", "if", "isinstance", "(", "values", ",", "dict", ")", ":", "nndict", "=", "dict", "(", "[", "(", "i", ",", "LookupTreeNode", "(", "i", ",", "values", "[", "i", "]", ")", ")", "for", "i", ...
48.727273
22.363636
def get_assessments(self): """Gets any assessments associated with this activity. return: (osid.assessment.AssessmentList) - list of assessments raise: IllegalState - ``is_assessment_based_activity()`` is ``false`` raise: OperationFailed - unable to complete request *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for osid.learning.Activity.get_assets_template if not bool(self._my_map['assessmentIds']): raise errors.IllegalState('no assessmentIds') mgr = self._get_provider_manager('ASSESSMENT') if not mgr.supports_assessment_lookup(): raise errors.OperationFailed('Assessment does not support Assessment lookup') # What about the Proxy? lookup_session = mgr.get_assessment_lookup_session(proxy=getattr(self, "_proxy", None)) lookup_session.use_federated_bank_view() return lookup_session.get_assessments_by_ids(self.get_assessment_ids())
[ "def", "get_assessments", "(", "self", ")", ":", "# Implemented from template for osid.learning.Activity.get_assets_template", "if", "not", "bool", "(", "self", ".", "_my_map", "[", "'assessmentIds'", "]", ")", ":", "raise", "errors", ".", "IllegalState", "(", "'no as...
48.714286
24.095238