text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def remove_monitor(self, handle): """Remove a previously registered monitor. See :meth:`AbstractDeviceAdapter.adjust_monitor`. """ action = (handle, "delete", None, None) if self._currently_notifying: self._deferred_adjustments.append(action) else: self._adjust_monitor_internal(*action)
[ "def", "remove_monitor", "(", "self", ",", "handle", ")", ":", "action", "=", "(", "handle", ",", "\"delete\"", ",", "None", ",", "None", ")", "if", "self", ".", "_currently_notifying", ":", "self", ".", "_deferred_adjustments", ".", "append", "(", "action...
31.909091
14.909091
def get_scenfit(instance, OS, FP, FC, EP): '''returns the scenfit of data and model described by the ``TermSet`` object [instance]. ''' sem = [sign_cons_prg, bwd_prop_prg] if OS : sem.append(one_state_prg) if FP : sem.append(fwd_prop_prg) if FC : sem.append(founded_prg) if EP : sem.append(elem_path_prg) inst = instance.to_file() prg = sem + scenfit + [inst] coptions = '--opt-strategy=5' solver = GringoClasp(clasp_options=coptions) solution = solver.run(prg,collapseTerms=True,collapseAtoms=False) opt = solution[0].score[0] os.unlink(inst) return opt
[ "def", "get_scenfit", "(", "instance", ",", "OS", ",", "FP", ",", "FC", ",", "EP", ")", ":", "sem", "=", "[", "sign_cons_prg", ",", "bwd_prop_prg", "]", "if", "OS", ":", "sem", ".", "append", "(", "one_state_prg", ")", "if", "FP", ":", "sem", ".", ...
29.3
16.3
def plot_data_filter(data, data_f, b, a, cutoff, fs): '''Plot frequency response and filter overlay for butter filtered data Args ---- data: ndarray Signal array data_f: float Signal sampling rate b: array_like Numerator of a linear filter a: array_like Denominator of a linear filter cutoff: float Cutoff frequency for the filter fs: float Sampling rate of the signal Notes ----- http://stackoverflow.com/a/25192640/943773 ''' import matplotlib.pyplot as plt import numpy import scipy.signal n = len(data) T = n/fs t = numpy.linspace(0, T, n, endpoint=False) # Calculate frequency response w, h = scipy.signal.freqz(b, a, worN=8000) # Plot the frequency response. fig, (ax1, ax2) = plt.subplots(2,1) ax1.title.set_text('Lowpass Filter Frequency Response') ax1.plot(0.5*fs * w/numpy.pi, numpy.abs(h), 'b') ax1.plot(cutoff, 0.5*numpy.sqrt(2), 'ko') ax1.axvline(cutoff, color='k') ax1.set_xlim(0, 0.5*fs) ax1.set_xlabel('Frequency [Hz]') ax2.legend() # Demonstrate the use of the filter. ax2.plot(t, data, linewidth=_linewidth, label='data') ax2.plot(t, data_f, linewidth=_linewidth, label='filtered data') ax2.set_xlabel('Time [sec]') ax2.legend() plt.show() return None
[ "def", "plot_data_filter", "(", "data", ",", "data_f", ",", "b", ",", "a", ",", "cutoff", ",", "fs", ")", ":", "import", "matplotlib", ".", "pyplot", "as", "plt", "import", "numpy", "import", "scipy", ".", "signal", "n", "=", "len", "(", "data", ")",...
24.943396
20.603774
def plot_joint_sfs_folded(*args, **kwargs): """Plot a joint site frequency spectrum. Parameters ---------- s : array_like, int, shape (n_chromosomes_pop1/2, n_chromosomes_pop2/2) Joint site frequency spectrum. ax : axes, optional Axes on which to draw. If not provided, a new figure will be created. imshow_kwargs : dict-like Additional keyword arguments, passed through to ax.imshow(). Returns ------- ax : axes The axes on which the plot was drawn. """ ax = plot_joint_sfs(*args, **kwargs) ax.set_xlabel('minor allele count (population 1)') ax.set_ylabel('minor allele count (population 2)') return ax
[ "def", "plot_joint_sfs_folded", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "ax", "=", "plot_joint_sfs", "(", "*", "args", ",", "*", "*", "kwargs", ")", "ax", ".", "set_xlabel", "(", "'minor allele count (population 1)'", ")", "ax", ".", "set_ylab...
30.590909
20.454545
def time(lancet, issue): """ Start an Harvest timer for the given issue. This command takes care of linking the timer with the issue tracker page for the given issue. If the issue is not passed to command it's taken from currently active branch. """ issue = get_issue(lancet, issue) with taskstatus("Starting harvest timer") as ts: lancet.timer.start(issue) ts.ok("Started harvest timer")
[ "def", "time", "(", "lancet", ",", "issue", ")", ":", "issue", "=", "get_issue", "(", "lancet", ",", "issue", ")", "with", "taskstatus", "(", "\"Starting harvest timer\"", ")", "as", "ts", ":", "lancet", ".", "timer", ".", "start", "(", "issue", ")", "...
32.769231
15.692308
def load(self, filenames=None, goto=None, word='', editorwindow=None, processevents=True, start_column=None, set_focus=True, add_where='end'): """ Load a text file editorwindow: load in this editorwindow (useful when clicking on outline explorer with multiple editor windows) processevents: determines if processEvents() should be called at the end of this method (set to False to prevent keyboard events from creeping through to the editor during debugging) """ # Switch to editor before trying to load a file try: self.switch_to_plugin() except AttributeError: pass editor0 = self.get_current_editor() if editor0 is not None: position0 = editor0.get_position('cursor') filename0 = self.get_current_filename() else: position0, filename0 = None, None if not filenames: # Recent files action action = self.sender() if isinstance(action, QAction): filenames = from_qvariant(action.data(), to_text_string) if not filenames: basedir = getcwd_or_home() if self.edit_filetypes is None: self.edit_filetypes = get_edit_filetypes() if self.edit_filters is None: self.edit_filters = get_edit_filters() c_fname = self.get_current_filename() if c_fname is not None and c_fname != self.TEMPFILE_PATH: basedir = osp.dirname(c_fname) self.redirect_stdio.emit(False) parent_widget = self.get_current_editorstack() if filename0 is not None: selectedfilter = get_filter(self.edit_filetypes, osp.splitext(filename0)[1]) else: selectedfilter = '' if not running_under_pytest(): filenames, _sf = getopenfilenames( parent_widget, _("Open file"), basedir, self.edit_filters, selectedfilter=selectedfilter, options=QFileDialog.HideNameFilterDetails) else: # Use a Qt (i.e. scriptable) dialog for pytest dialog = QFileDialog(parent_widget, _("Open file"), options=QFileDialog.DontUseNativeDialog) if dialog.exec_(): filenames = dialog.selectedFiles() self.redirect_stdio.emit(True) if filenames: filenames = [osp.normpath(fname) for fname in filenames] else: return focus_widget = QApplication.focusWidget() if self.editorwindows and not self.dockwidget.isVisible(): # We override the editorwindow variable to force a focus on # the editor window instead of the hidden editor dockwidget. # See PR #5742. if editorwindow not in self.editorwindows: editorwindow = self.editorwindows[0] editorwindow.setFocus() editorwindow.raise_() elif (self.dockwidget and not self.ismaximized and not self.dockwidget.isAncestorOf(focus_widget) and not isinstance(focus_widget, CodeEditor)): self.dockwidget.setVisible(True) self.dockwidget.setFocus() self.dockwidget.raise_() def _convert(fname): fname = osp.abspath(encoding.to_unicode_from_fs(fname)) if os.name == 'nt' and len(fname) >= 2 and fname[1] == ':': fname = fname[0].upper()+fname[1:] return fname if hasattr(filenames, 'replaceInStrings'): # This is a QStringList instance (PyQt API #1), converting to list: filenames = list(filenames) if not isinstance(filenames, list): filenames = [_convert(filenames)] else: filenames = [_convert(fname) for fname in list(filenames)] if isinstance(goto, int): goto = [goto] elif goto is not None and len(goto) != len(filenames): goto = None for index, filename in enumerate(filenames): # -- Do not open an already opened file focus = set_focus and index == 0 current_editor = self.set_current_filename(filename, editorwindow, focus=focus) if current_editor is None: # -- Not a valid filename: if not osp.isfile(filename): continue # -- current_es = self.get_current_editorstack(editorwindow) # Creating the editor widget in the first editorstack # (the one that can't be destroyed), then cloning this # editor widget in all other editorstacks: finfo = self.editorstacks[0].load( filename, set_current=False, add_where=add_where) finfo.path = self.main.get_spyder_pythonpath() self._clone_file_everywhere(finfo) current_editor = current_es.set_current_filename(filename, focus=focus) current_editor.debugger.load_breakpoints() current_editor.set_bookmarks(load_bookmarks(filename)) self.register_widget_shortcuts(current_editor) current_es.analyze_script() self.__add_recent_file(filename) if goto is not None: # 'word' is assumed to be None as well current_editor.go_to_line(goto[index], word=word, start_column=start_column) position = current_editor.get_position('cursor') self.cursor_moved(filename0, position0, filename, position) current_editor.clearFocus() current_editor.setFocus() current_editor.window().raise_() if processevents: QApplication.processEvents() else: # processevents is false only when calling from debugging current_editor.sig_debug_stop.emit(goto[index]) current_sw = self.main.ipyconsole.get_current_shellwidget() current_sw.sig_prompt_ready.connect( current_editor.sig_debug_stop[()].emit)
[ "def", "load", "(", "self", ",", "filenames", "=", "None", ",", "goto", "=", "None", ",", "word", "=", "''", ",", "editorwindow", "=", "None", ",", "processevents", "=", "True", ",", "start_column", "=", "None", ",", "set_focus", "=", "True", ",", "a...
47.935714
17.95
def ssh_sa_ssh_server_ssh_vrf_cont_use_vrf_ssh_vrf_shutdown(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ssh_sa = ET.SubElement(config, "ssh-sa", xmlns="urn:brocade.com:mgmt:brocade-sec-services") ssh = ET.SubElement(ssh_sa, "ssh") server = ET.SubElement(ssh, "server") ssh_vrf_cont = ET.SubElement(server, "ssh-vrf-cont") use_vrf = ET.SubElement(ssh_vrf_cont, "use-vrf") use_vrf_name_key = ET.SubElement(use_vrf, "use-vrf-name") use_vrf_name_key.text = kwargs.pop('use_vrf_name') ssh_vrf_shutdown = ET.SubElement(use_vrf, "ssh-vrf-shutdown") callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "ssh_sa_ssh_server_ssh_vrf_cont_use_vrf_ssh_vrf_shutdown", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "ssh_sa", "=", "ET", ".", "SubElement", "(", "config", ",", "\"ssh-sa\"", ",", "xmlns...
49.066667
18.6
def show_description(self): """ Prints the formatted response for the matching return type """ def printit(c, v): underline = "-" * len(dr.get_name(c)) resp = self.responses[v["type"]] name = "%s[%s] %s%s" % (resp.color, resp.label, dr.get_name(c), Style.RESET_ALL) print(name, file=self.stream) print(underline, file=self.stream) print(render(c, v), file=self.stream) print(file=self.stream) for c in sorted(self.broker.get_by_type(rule), key=dr.get_name): v = self.broker[c] _type = v.get('type') if _type in self.responses: self.counts[_type] += 1 if (_type and ((self.fail_only and _type == 'rule') or ((self.missing and _type == 'skip') or (not self.fail_only and _type != 'skip')))): printit(c, v) print(file=self.stream) self.print_header("Rule Execution Summary", Fore.CYAN) for c in self.counts: print(self.responses[c].color + self.responses[c].title + str(self.counts[c]) + Style.RESET_ALL, file=self.stream)
[ "def", "show_description", "(", "self", ")", ":", "def", "printit", "(", "c", ",", "v", ")", ":", "underline", "=", "\"-\"", "*", "len", "(", "dr", ".", "get_name", "(", "c", ")", ")", "resp", "=", "self", ".", "responses", "[", "v", "[", "\"type...
45.230769
19.461538
def pylab_activate(user_ns, gui=None, import_all=True, shell=None): """Activate pylab mode in the user's namespace. Loads and initializes numpy, matplotlib and friends for interactive use. Parameters ---------- user_ns : dict Namespace where the imports will occur. gui : optional, string A valid gui name following the conventions of the %gui magic. import_all : optional, boolean If true, an 'import *' is done from numpy and pylab. Returns ------- The actual gui used (if not given as input, it was obtained from matplotlib itself, and will be needed next to configure IPython's gui integration. """ gui, backend = find_gui_and_backend(gui) activate_matplotlib(backend) import_pylab(user_ns, import_all) if shell is not None: configure_inline_support(shell, backend, user_ns) print """ Welcome to pylab, a matplotlib-based Python environment [backend: %s]. For more information, type 'help(pylab)'.""" % backend # flush stdout, just to be safe sys.stdout.flush() return gui
[ "def", "pylab_activate", "(", "user_ns", ",", "gui", "=", "None", ",", "import_all", "=", "True", ",", "shell", "=", "None", ")", ":", "gui", ",", "backend", "=", "find_gui_and_backend", "(", "gui", ")", "activate_matplotlib", "(", "backend", ")", "import_...
31.411765
21.558824
def weighted_random(sequence): """ Given a sequence of pairs (element, weight) where weight is an addable/total-order-comparable (e.g. a number), it returns a random element (first item in each pair) given in a non-uniform way given by the weight of the element (second item in each pair) :param sequence: sequence/iterator of pairs (element, weight) :return: any value in the first element of each pair """ if isinstance(sequence, dict): sequence = sequence.items() accumulated = list(labeled_accumulate(sequence)) r = random.random() * accumulated[-1][1] for k, v in accumulated: if r < v: return k #punto inalcanzable a priori return None
[ "def", "weighted_random", "(", "sequence", ")", ":", "if", "isinstance", "(", "sequence", ",", "dict", ")", ":", "sequence", "=", "sequence", ".", "items", "(", ")", "accumulated", "=", "list", "(", "labeled_accumulate", "(", "sequence", ")", ")", "r", "...
37.315789
20.684211
def restore_boolean_setting(self, key, check_box): """Set check_box according to setting of key. :param key: Key to retrieve setting value. :type key: str :param check_box: Check box to show and set the setting. :type check_box: PyQt5.QtWidgets.QCheckBox.QCheckBox """ flag = setting(key, expected_type=bool, qsettings=self.settings) check_box.setChecked(flag)
[ "def", "restore_boolean_setting", "(", "self", ",", "key", ",", "check_box", ")", ":", "flag", "=", "setting", "(", "key", ",", "expected_type", "=", "bool", ",", "qsettings", "=", "self", ".", "settings", ")", "check_box", ".", "setChecked", "(", "flag", ...
37.818182
18.181818
def convert_crop(node, **kwargs): """Map MXNet's crop operator attributes to onnx's Crop operator and return the created node. """ name, inputs, attrs = get_inputs(node, kwargs) num_inputs = len(inputs) y, x = list(parse_helper(attrs, "offset", [0, 0])) h, w = list(parse_helper(attrs, "h_w", [0, 0])) if num_inputs > 1: h, w = kwargs["out_shape"][-2:] border = [x, y, x + w, y + h] crop_node = onnx.helper.make_node( "Crop", inputs=[inputs[0]], outputs=[name], border=border, scale=[1, 1], name=name ) logging.warning( "Using an experimental ONNX operator: Crop. " \ "Its definition can change.") return [crop_node]
[ "def", "convert_crop", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "inputs", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "num_inputs", "=", "len", "(", "inputs", ")", "y", ",", "x", "=", "list", "(", "parse_he...
26.592593
16.888889
def remove_edge_fun(graph): """ Returns a function that removes an edge from the `graph`. ..note:: The out node is removed if this is isolate. :param graph: A directed graph. :type graph: networkx.classes.digraph.DiGraph :return: A function that remove an edge from the `graph`. :rtype: callable """ # Namespace shortcut for speed. rm_edge, rm_node = graph.remove_edge, graph.remove_node from networkx import is_isolate def remove_edge(u, v): rm_edge(u, v) # Remove the edge. if is_isolate(graph, v): # Check if v is isolate. rm_node(v) # Remove the isolate out node. return remove_edge
[ "def", "remove_edge_fun", "(", "graph", ")", ":", "# Namespace shortcut for speed.", "rm_edge", ",", "rm_node", "=", "graph", ".", "remove_edge", ",", "graph", ".", "remove_node", "from", "networkx", "import", "is_isolate", "def", "remove_edge", "(", "u", ",", "...
26.68
19.8
def _setup_amplification(self, fle): """ If amplification data is specified then reads into memory and updates the required rupture and site parameters """ self.amplification = AmplificationTable(fle["Amplification"], self.m_w, self.distances) if self.amplification.element == "Sites": self.REQUIRES_SITES_PARAMETERS = set( [self.amplification.parameter]) elif self.amplification.element == "Rupture": # set the site and rupture parameters on the instance self.REQUIRES_SITES_PARAMETERS = set() self.REQUIRES_RUPTURE_PARAMETERS = ( self.REQUIRES_RUPTURE_PARAMETERS | {self.amplification.parameter})
[ "def", "_setup_amplification", "(", "self", ",", "fle", ")", ":", "self", ".", "amplification", "=", "AmplificationTable", "(", "fle", "[", "\"Amplification\"", "]", ",", "self", ".", "m_w", ",", "self", ".", "distances", ")", "if", "self", ".", "amplifica...
48.823529
12.705882
def getdirs(self,libname): '''Implements the dylib search as specified in Apple documentation: http://developer.apple.com/documentation/DeveloperTools/Conceptual/ DynamicLibraries/Articles/DynamicLibraryUsageGuidelines.html Before commencing the standard search, the method first checks the bundle's ``Frameworks`` directory if the application is running within a bundle (OS X .app). ''' dyld_fallback_library_path = _environ_path("DYLD_FALLBACK_LIBRARY_PATH") if not dyld_fallback_library_path: dyld_fallback_library_path = [os.path.expanduser('~/lib'), '/usr/local/lib', '/usr/lib'] dirs = [] if '/' in libname: dirs.extend(_environ_path("DYLD_LIBRARY_PATH")) else: dirs.extend(_environ_path("LD_LIBRARY_PATH")) dirs.extend(_environ_path("DYLD_LIBRARY_PATH")) dirs.extend(self.other_dirs) dirs.append(".") dirs.append(os.path.dirname(__file__)) if hasattr(sys, 'frozen') and sys.frozen == 'macosx_app': dirs.append(os.path.join( os.environ['RESOURCEPATH'], '..', 'Frameworks')) dirs.extend(dyld_fallback_library_path) return dirs
[ "def", "getdirs", "(", "self", ",", "libname", ")", ":", "dyld_fallback_library_path", "=", "_environ_path", "(", "\"DYLD_FALLBACK_LIBRARY_PATH\"", ")", "if", "not", "dyld_fallback_library_path", ":", "dyld_fallback_library_path", "=", "[", "os", ".", "path", ".", "...
35.189189
24.594595
def _value_with_fmt(self, val): """Convert numpy types to Python types for the Excel writers. Parameters ---------- val : object Value to be written into cells Returns ------- Tuple with the first element being the converted value and the second being an optional format """ fmt = None if is_integer(val): val = int(val) elif is_float(val): val = float(val) elif is_bool(val): val = bool(val) elif isinstance(val, datetime): fmt = self.datetime_format elif isinstance(val, date): fmt = self.date_format elif isinstance(val, timedelta): val = val.total_seconds() / float(86400) fmt = '0' else: val = compat.to_str(val) return val, fmt
[ "def", "_value_with_fmt", "(", "self", ",", "val", ")", ":", "fmt", "=", "None", "if", "is_integer", "(", "val", ")", ":", "val", "=", "int", "(", "val", ")", "elif", "is_float", "(", "val", ")", ":", "val", "=", "float", "(", "val", ")", "elif",...
26.9375
16.25
def run(self, value, errors, request): """Return thing, but abort validation if request.user cannot edit.""" thing = super(AccessibleDBThing, self).run(value, errors, request) if errors: return None if not thing.can_access(request.user): message = 'Insufficient permissions for {0}'.format(self.param) raise HTTPForbidden(message) return thing
[ "def", "run", "(", "self", ",", "value", ",", "errors", ",", "request", ")", ":", "thing", "=", "super", "(", "AccessibleDBThing", ",", "self", ")", ".", "run", "(", "value", ",", "errors", ",", "request", ")", "if", "errors", ":", "return", "None", ...
45.666667
15.111111
def copy_params_from(self, arg_params, aux_params=None, allow_extra_params=False): """Copy parameters from arg_params, aux_params into executor's internal array. Parameters ---------- arg_params : dict of str to NDArray Parameters, dict of name to NDArray of arguments. aux_params : dict of str to NDArray, optional Parameters, dict of name to NDArray of auxiliary states. allow_extra_params : boolean, optional Whether allow extra parameters that are not needed by symbol. If this is True, no error will be thrown when arg_params or aux_params contain extra parameters that is not needed by the executor. Raises ------ ValueError If there is additional parameters in the dict but ``allow_extra_params=False``. Examples -------- >>> # set parameters with existing model checkpoint >>> model_prefix = 'mx_mlp' >>> sym, arg_params, aux_params = mx.model.load_checkpoint(model_prefix, 0) >>> texec.copy_params_from(arg_params, aux_params) """ for name, array in arg_params.items(): if name in self.arg_dict: dst = self.arg_dict[name] array.astype(dst.dtype).copyto(dst) elif not allow_extra_params: raise ValueError('Find name \"%s\" that is not in the arguments' % name) if aux_params is None: return for name, array in aux_params.items(): if name in self.aux_dict: dst = self.aux_dict[name] array.astype(dst.dtype).copyto(dst) elif not allow_extra_params: raise ValueError('Find name %s that is not in the auxiliary states' % name)
[ "def", "copy_params_from", "(", "self", ",", "arg_params", ",", "aux_params", "=", "None", ",", "allow_extra_params", "=", "False", ")", ":", "for", "name", ",", "array", "in", "arg_params", ".", "items", "(", ")", ":", "if", "name", "in", "self", ".", ...
40.318182
22.159091
def findStyleFor(self, element, attrName, default=NotImplemented): """Attempts to find the style setting for attrName in the CSSRulesets. Note: This method does not attempt to resolve rules that return "inherited", "default", or values that have units (including "%"). This is left up to the client app to re-query the CSS in order to implement these semantics. """ rule = self.findCSSRulesFor(element, attrName) return self._extractStyleForRule(rule, attrName, default)
[ "def", "findStyleFor", "(", "self", ",", "element", ",", "attrName", ",", "default", "=", "NotImplemented", ")", ":", "rule", "=", "self", ".", "findCSSRulesFor", "(", "element", ",", "attrName", ")", "return", "self", ".", "_extractStyleForRule", "(", "rule...
52.6
20.9
def dispatch_event(self, event_, **kwargs): """ Dispatch section event. Notes: You MUST NOT call event.trigger() directly because it will circumvent the section settings as well as ignore the section tree. If hooks are disabled somewhere up in the tree, and enabled down below, events will still be dispatched down below because that's where they originate. """ if self.settings.hooks_enabled: result = self.hooks.dispatch_event(event_, **kwargs) if result is not None: return result # Must also dispatch the event in parent section if self.section: return self.section.dispatch_event(event_, **kwargs) elif self.section: # Settings only apply to one section, so must still # dispatch the event in parent sections recursively. self.section.dispatch_event(event_, **kwargs)
[ "def", "dispatch_event", "(", "self", ",", "event_", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "settings", ".", "hooks_enabled", ":", "result", "=", "self", ".", "hooks", ".", "dispatch_event", "(", "event_", ",", "*", "*", "kwargs", ")", ...
37.961538
18.653846
def get_mute(self): """Return if the TV is muted.""" params = '<InstanceID>0</InstanceID><Channel>Master</Channel>' res = self.soap_request(URL_CONTROL_DMR, URN_RENDERING_CONTROL, 'GetMute', params) root = ET.fromstring(res) el_mute = root.find('.//CurrentMute') return el_mute.text != '0'
[ "def", "get_mute", "(", "self", ")", ":", "params", "=", "'<InstanceID>0</InstanceID><Channel>Master</Channel>'", "res", "=", "self", ".", "soap_request", "(", "URL_CONTROL_DMR", ",", "URN_RENDERING_CONTROL", ",", "'GetMute'", ",", "params", ")", "root", "=", "ET", ...
45.25
13.75
def ystep(self): r"""Minimise Augmented Lagrangian with respect to :math:`\mathbf{y}`.""" self.Y = np.asarray(sp.prox_l1(self.AX + self.U, (self.lmbda / self.rho) * self.wl1), dtype=self.dtype) super(BPDN, self).ystep()
[ "def", "ystep", "(", "self", ")", ":", "self", ".", "Y", "=", "np", ".", "asarray", "(", "sp", ".", "prox_l1", "(", "self", ".", "AX", "+", "self", ".", "U", ",", "(", "self", ".", "lmbda", "/", "self", ".", "rho", ")", "*", "self", ".", "w...
39
15.875
def pdos_select( self, atoms=None, spin=None, l=None, m=None ): """ Returns a subset of the projected density of states array. Args: atoms (int or list(int)): Atom numbers to include in the selection. Atom numbers count from 1. Default is to select all atoms. spin (str): Select up or down, or both spin channels to include in the selection. Accepted options are 'up', 'down', and 'both'. Default is to select both spins. l (str): Select one angular momentum to include in the selectrion. Accepted options are 's', 'p', 'd', and 'f'. Default is to include all l-values. Setting `l` and not setting `m` will return all projections for that angular momentum value. m (list(str)): Select one or more m-values. Requires `l` to be set. The accepted values depend on the value of `l`: `l='s'`: Only one projection. Not set. `l='p'`: One or more of [ 'x', 'y', 'z' ] `l='d'`: One or more of [ 'xy', 'yz', 'z2-r2', 'xz', 'x2-y2' ] `l='f'`: One or more of [ 'y(3x2-y2)', 'xyz', 'yz2', 'z3', 'xz2', 'z(x2-y2)', 'x(x2-3y2)' ] Returns: np.array: A 4-dimensional numpy array containing the selected pdos values. The array dimensions are [ atom_no, energy_value, lm-projection, spin ] """ valid_m_values = { 's': [], 'p': [ 'x', 'y', 'z' ], 'd': [ 'xy', 'yz', 'z2-r2', 'xz', 'x2-y2' ], 'f': [ 'y(3x2-y2)', 'xyz', 'yz2', 'z3', 'xz2', 'z(x2-y2)', 'x(x2-3y2)' ] } if not atoms: atom_idx = list(range( self.number_of_atoms )) else: atom_idx = atoms to_return = self.pdos[ atom_idx, :, :, : ] if not spin: spin_idx = list(range( self.ispin )) elif spin is 'up': spin_idx = [0] elif spin is 'down': spin_idx = [1] elif spin is 'both': spin_idx = [0,1] else: raise ValueError( "valid spin values are 'up', 'down', and 'both'. The default is 'both'" ) to_return = to_return[ :, :, :, spin_idx ] if not l: channel_idx = list(range( self.number_of_channels )) elif l == 's': channel_idx = [ 0 ] elif l == 'p': if not m: channel_idx = [ 1, 2, 3 ] else: # TODO this looks like it should be i+1 channel_idx = [ i+1 for i, v in enumerate( valid_m_values['p'] ) if v in m ] elif l == 'd': if not m: channel_idx = [ 4, 5, 6, 7, 8 ] else: # TODO this looks like it should be i+4 channel_idx = [ i+4 for i, v in enumerate( valid_m_values['d'] ) if v in m ] elif l == 'f': if not m: channel_idx = [ 9, 10, 11, 12, 13, 14, 15 ] else: # TODO this looks like it should be i+9 channel_idx = [ i+9 for i, v in enumerate( valid_m_values['f'] ) if v in m ] else: raise ValueError return to_return[ :, :, channel_idx, : ]
[ "def", "pdos_select", "(", "self", ",", "atoms", "=", "None", ",", "spin", "=", "None", ",", "l", "=", "None", ",", "m", "=", "None", ")", ":", "valid_m_values", "=", "{", "'s'", ":", "[", "]", ",", "'p'", ":", "[", "'x'", ",", "'y'", ",", "'...
49.606061
27.424242
def get_first_content(el_list, alt=None, strip=True): """ Return content of the first element in `el_list` or `alt`. Also return `alt` if the content string of first element is blank. Args: el_list (list): List of HTMLElement objects. alt (default None): Value returner when list or content is blank. strip (bool, default True): Call .strip() to content. Returns: str or alt: String representation of the content of the first element \ or `alt` if not found. """ if not el_list: return alt content = el_list[0].getContent() if strip: content = content.strip() if not content: return alt return content
[ "def", "get_first_content", "(", "el_list", ",", "alt", "=", "None", ",", "strip", "=", "True", ")", ":", "if", "not", "el_list", ":", "return", "alt", "content", "=", "el_list", "[", "0", "]", ".", "getContent", "(", ")", "if", "strip", ":", "conten...
27
23.692308
def check_value( config, section, option, jinja_pattern=JINJA_PATTERN, ): """try to figure out if value is valid or jinja2 template value Args: config (:obj:`configparser.ConfigParser`): config object to read key from section (str): name of section in configparser option (str): name of option in configparser jinja_pattern (:obj:`_sre.SRE_Pattern`): a `re.compile()` pattern to match on Returns: str: value if value, else None Raises: KeyError: configparser.NoOptionError: configparser.NoSectionError: """ value = config[section][option] if re.match(jinja_pattern, value): return None return value
[ "def", "check_value", "(", "config", ",", "section", ",", "option", ",", "jinja_pattern", "=", "JINJA_PATTERN", ",", ")", ":", "value", "=", "config", "[", "section", "]", "[", "option", "]", "if", "re", ".", "match", "(", "jinja_pattern", ",", "value", ...
25.428571
22.357143
def simulate(args): """Main function that manage simulatin of small RNAs""" if args.fasta: name = None seq = "" reads = dict() with open(args.fasta) as in_handle: for line in in_handle: if line.startswith(">"): if name: reads.update(_generate_reads(seq, name)) seq = "" name = line[1:-1] else: seq += line.strip() reads.update(_generate_reads(seq, name)) _write_reads(reads, args.out)
[ "def", "simulate", "(", "args", ")", ":", "if", "args", ".", "fasta", ":", "name", "=", "None", "seq", "=", "\"\"", "reads", "=", "dict", "(", ")", "with", "open", "(", "args", ".", "fasta", ")", "as", "in_handle", ":", "for", "line", "in", "in_h...
31.555556
13.388889
def city(random=random, *args, **kwargs): """ Produce a city name >>> mock_random.seed(0) >>> city(random=mock_random) 'east mysteryhall' >>> city(random=mock_random, capitalize=True) 'Birmingchimp' >>> city(random=mock_random, slugify=True) 'wonderfulsecretsound' """ return random.choice([ "{direction} {noun}{city_suffix}", "{noun}{city_suffix}", "{adjective}{noun}{city_suffix}", "{plural}{city_suffix}", "{adjective}{city_suffix}", "liver{noun}", "birming{noun}", "{noun}{city_suffix} {direction}" ]).format(direction=direction(random=random), adjective=adjective(random=random), plural=plural(random=random), city_suffix=city_suffix(random=random), noun=noun(random=random))
[ "def", "city", "(", "random", "=", "random", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "random", ".", "choice", "(", "[", "\"{direction} {noun}{city_suffix}\"", ",", "\"{noun}{city_suffix}\"", ",", "\"{adjective}{noun}{city_suffix}\"", ",", ...
30.518519
11.037037
def _string_find(self, substr, start=None, end=None): """ Returns position (0 indexed) of first occurence of substring, optionally after a particular position (0 indexed) Parameters ---------- substr : string start : int, default None end : int, default None Not currently implemented Returns ------- position : int, 0 indexed """ if end is not None: raise NotImplementedError return ops.StringFind(self, substr, start, end).to_expr()
[ "def", "_string_find", "(", "self", ",", "substr", ",", "start", "=", "None", ",", "end", "=", "None", ")", ":", "if", "end", "is", "not", "None", ":", "raise", "NotImplementedError", "return", "ops", ".", "StringFind", "(", "self", ",", "substr", ",",...
25.789474
18.421053
def Value(cls, val, ctx: SerializerCtx): """ :param dst: is signal connected with value :param val: value object, can be instance of Signal or Value """ t = val._dtype if isinstance(val, RtlSignalBase): return cls.SignalItem(val, ctx) c = cls.Value_try_extract_as_const(val, ctx) if c: return c if isinstance(t, Slice): return cls.Slice_valAsHdl(t, val, ctx) elif isinstance(t, HArray): return cls.HArrayValAsHdl(t, val, ctx) elif isinstance(t, Bits): return cls.Bits_valAsHdl(t, val, ctx) elif isinstance(t, HBool): return cls.Bool_valAsHdl(t, val, ctx) elif isinstance(t, HEnum): return cls.HEnumValAsHdl(t, val, ctx) elif isinstance(t, Integer): return cls.Integer_valAsHdl(t, val, ctx) elif isinstance(t, String): return cls.String_valAsHdl(t, val, ctx) else: raise SerializerException( "can not resolve value serialization for %r" % (val))
[ "def", "Value", "(", "cls", ",", "val", ",", "ctx", ":", "SerializerCtx", ")", ":", "t", "=", "val", ".", "_dtype", "if", "isinstance", "(", "val", ",", "RtlSignalBase", ")", ":", "return", "cls", ".", "SignalItem", "(", "val", ",", "ctx", ")", "c"...
34.25
13
def list_subdomains(self, limit=None, offset=None): """ Returns a list of all subdomains for this domain. """ return self.manager.list_subdomains(self, limit=limit, offset=offset)
[ "def", "list_subdomains", "(", "self", ",", "limit", "=", "None", ",", "offset", "=", "None", ")", ":", "return", "self", ".", "manager", ".", "list_subdomains", "(", "self", ",", "limit", "=", "limit", ",", "offset", "=", "offset", ")" ]
41.4
13
def run_rep(self, params, rep): """ run a single repetition including directory creation, log files, etc. """ try: name = params['name'] fullpath = os.path.join(params['path'], params['name']) logname = os.path.join(fullpath, '%i.log'%rep) # check if repetition exists and has been completed restore = 0 if os.path.exists(logname): logfile = open(logname, 'r') lines = logfile.readlines() logfile.close() # if completed, continue loop if 'iterations' in params and len(lines) == params['iterations']: return False # if not completed, check if restore_state is supported if not self.restore_supported: # not supported, delete repetition and start over # print 'restore not supported, deleting %s' % logname os.remove(logname) restore = 0 else: restore = len(lines) self.reset(params, rep) if restore: logfile = open(logname, 'a') self.restore_state(params, rep, restore) else: logfile = open(logname, 'w') # loop through iterations and call iterate for it in xrange(restore, params['iterations']): dic = self.iterate(params, rep, it) or {} dic['iteration'] = it if self.restore_supported: self.save_state(params, rep, it) if dic is not None: json.dump(dic, logfile) logfile.write('\n') logfile.flush() logfile.close() self.finalize(params, rep) except: import traceback traceback.print_exc() raise
[ "def", "run_rep", "(", "self", ",", "params", ",", "rep", ")", ":", "try", ":", "name", "=", "params", "[", "'name'", "]", "fullpath", "=", "os", ".", "path", ".", "join", "(", "params", "[", "'path'", "]", ",", "params", "[", "'name'", "]", ")",...
35.222222
17.12963
def np_to_list(elem): """Returns list from list, tuple or ndarray.""" if isinstance(elem, list): return elem elif isinstance(elem, tuple): return list(elem) elif isinstance(elem, np.ndarray): return list(elem) else: raise ValueError( 'Input elements of a sequence should be either a numpy array, a ' 'python list or tuple. Got {}'.format(type(elem)))
[ "def", "np_to_list", "(", "elem", ")", ":", "if", "isinstance", "(", "elem", ",", "list", ")", ":", "return", "elem", "elif", "isinstance", "(", "elem", ",", "tuple", ")", ":", "return", "list", "(", "elem", ")", "elif", "isinstance", "(", "elem", ",...
31.75
17.5
def n_way_models(mdr_instance, X, y, n=[2], feature_names=None): """Fits a MDR model to all n-way combinations of the features in X. Note that this function performs an exhaustive search through all feature combinations and can be computationally expensive. Parameters ---------- mdr_instance: object An instance of the MDR type to use. X: array-like (# rows, # features) NumPy matrix containing the features y: array-like (# rows, 1) NumPy matrix containing the target values n: list (default: [2]) The maximum size(s) of the MDR model to generate. e.g., if n == [3], all 3-way models will be generated. feature_names: list (default: None) The corresponding names of the features in X. If None, then the features will be named according to their order. Returns ---------- (fitted_model, fitted_model_score, fitted_model_features): tuple of (list, list, list) fitted_model contains the MDR model fitted to the data. fitted_model_score contains the training scores corresponding to the fitted MDR model. fitted_model_features contains a list of the names of the features that were used in the corresponding model. """ if feature_names is None: feature_names = list(range(X.shape[1])) for cur_n in n: for features in itertools.combinations(range(X.shape[1]), cur_n): mdr_model = copy.deepcopy(mdr_instance) mdr_model.fit(X[:, features], y) mdr_model_score = mdr_model.score(X[:, features], y) model_features = [feature_names[feature] for feature in features] yield mdr_model, mdr_model_score, model_features
[ "def", "n_way_models", "(", "mdr_instance", ",", "X", ",", "y", ",", "n", "=", "[", "2", "]", ",", "feature_names", "=", "None", ")", ":", "if", "feature_names", "is", "None", ":", "feature_names", "=", "list", "(", "range", "(", "X", ".", "shape", ...
44.447368
24.657895
def file_writelines_flush_sync(path, lines): """ Fill file at @path with @lines then flush all buffers (Python and system buffers) """ fp = open(path, 'w') try: fp.writelines(lines) flush_sync_file_object(fp) finally: fp.close()
[ "def", "file_writelines_flush_sync", "(", "path", ",", "lines", ")", ":", "fp", "=", "open", "(", "path", ",", "'w'", ")", "try", ":", "fp", ".", "writelines", "(", "lines", ")", "flush_sync_file_object", "(", "fp", ")", "finally", ":", "fp", ".", "clo...
24.545455
13.272727
def _serialize(self, convert_to_key_and_value, ignore_missing=False): """ serialize model object to dictionary :param convert_to_key_and_value: function(field_name, value, property_detail) -> key, value :return: """ serialized = {} properties = self._get_property_names(self) def get_property_detail(name): p = [p for p in self._property_details if p.name == name] return None if len(p) == 0 else p[0] for p in properties: pd = get_property_detail(p) value = self._property_to_field(p, pd) field_name = p if not pd else pd.to_field_name() if value is None or (ignore_missing and not value) or (pd and pd.unsent): continue else: key, value = convert_to_key_and_value(field_name, value, pd) if key: serialized[key] = value return serialized
[ "def", "_serialize", "(", "self", ",", "convert_to_key_and_value", ",", "ignore_missing", "=", "False", ")", ":", "serialized", "=", "{", "}", "properties", "=", "self", ".", "_get_property_names", "(", "self", ")", "def", "get_property_detail", "(", "name", "...
35.148148
21.518519
def set_attributes(d, elm): """Set attributes from dictionary of values.""" for key in d: elm.setAttribute(key, d[key])
[ "def", "set_attributes", "(", "d", ",", "elm", ")", ":", "for", "key", "in", "d", ":", "elm", ".", "setAttribute", "(", "key", ",", "d", "[", "key", "]", ")" ]
33
9.75
def init(): """Try loading each binding in turn Please note: the entire Qt module is replaced with this code: sys.modules["Qt"] = binding() This means no functions or variables can be called after this has executed. For debugging and testing, this module may be accessed through `Qt.__shim__`. """ preferred = os.getenv("QT_PREFERRED_BINDING") verbose = os.getenv("QT_VERBOSE") is not None bindings = (_pyside2, _pyqt5, _pyside, _pyqt4) if preferred: # Internal flag (used in installer) if preferred == "None": self.__wrapper_version__ = self.__version__ return preferred = preferred.split(os.pathsep) available = { "PySide2": _pyside2, "PyQt5": _pyqt5, "PySide": _pyside, "PyQt4": _pyqt4 } try: bindings = [available[binding] for binding in preferred] except KeyError: raise ImportError( "Available preferred Qt bindings: " "\n".join(preferred) ) for binding in bindings: _log("Trying %s" % binding.__name__, verbose) try: binding = binding() except ImportError as e: _log(" - ImportError(\"%s\")" % e, verbose) continue else: # Reference to this module binding.__shim__ = self binding.QtCompat = self sys.modules.update({ __name__: binding, # Fix #133, `from Qt.QtWidgets import QPushButton` __name__ + ".QtWidgets": binding.QtWidgets }) return # If not binding were found, throw this error raise ImportError("No Qt binding were found.")
[ "def", "init", "(", ")", ":", "preferred", "=", "os", ".", "getenv", "(", "\"QT_PREFERRED_BINDING\"", ")", "verbose", "=", "os", ".", "getenv", "(", "\"QT_VERBOSE\"", ")", "is", "not", "None", "bindings", "=", "(", "_pyside2", ",", "_pyqt5", ",", "_pysid...
25.940299
20.895522
def parse_manifest(manifest_path): """ Parse manifest file :param str manifest_path: Path to manifest file :return: samples :rtype: list[str, list] """ samples = [] with open(manifest_path, 'r') as f: for line in f: if not line.isspace() and not line.startswith('#'): sample = line.strip().split('\t') require(2 <= len(sample) <= 3, 'Bad manifest format! ' 'Expected UUID\tURL1\t[URL2] (tab separated), got: {}'.format(sample)) uuid = sample[0] urls = sample[1:] for url in urls: require(urlparse(url).scheme and urlparse(url), 'Invalid URL passed for {}'.format(url)) samples.append([uuid, urls]) return samples
[ "def", "parse_manifest", "(", "manifest_path", ")", ":", "samples", "=", "[", "]", "with", "open", "(", "manifest_path", ",", "'r'", ")", "as", "f", ":", "for", "line", "in", "f", ":", "if", "not", "line", ".", "isspace", "(", ")", "and", "not", "l...
38.666667
19.333333
def parse(self, input): """Passes input to each QueryLineHandler in use""" query = None for handler in self._line_handlers: try: query = handler.handle(input) except Exception as e: query = None finally: if query is not None: return query return None
[ "def", "parse", "(", "self", ",", "input", ")", ":", "query", "=", "None", "for", "handler", "in", "self", ".", "_line_handlers", ":", "try", ":", "query", "=", "handler", ".", "handle", "(", "input", ")", "except", "Exception", "as", "e", ":", "quer...
31.25
11.583333
def start(self): """ Start the timer by recoding the current ``time.time()`` preparing to report the number of seconds since this timestamp. """ if self.start_time is None: self.start_time = time.time() # Play after pause else: # Add the duration of the paused interval to the total offset pause_duration = time.time() - self.pause_time self.offset += pause_duration # print("pause duration", pause_duration, "offset", self.offset) # Exit the paused state self.pause_time = None
[ "def", "start", "(", "self", ")", ":", "if", "self", ".", "start_time", "is", "None", ":", "self", ".", "start_time", "=", "time", ".", "time", "(", ")", "# Play after pause", "else", ":", "# Add the duration of the paused interval to the total offset", "pause_dur...
40.266667
14.933333
def _draw(self, data): """ Draw text """ self._cursor.clearSelection() self._cursor.setPosition(self._last_cursor_pos) if '\x07' in data.txt: print('\a') txt = data.txt.replace('\x07', '') if '\x08' in txt: parts = txt.split('\x08') else: parts = [txt] for i, part in enumerate(parts): if part: part = part.replace('\r\r', '\r') if len(part) >= 80 * 24 * 8: # big output, process it in one step (\r and \n will not be handled) self._draw_chars(data, part) continue to_draw = '' for n, char in enumerate(part): if char == '\n': self._draw_chars(data, to_draw) to_draw = '' self._linefeed() elif char == '\r': self._draw_chars(data, to_draw) to_draw = '' self._erase_in_line(0) try: nchar = part[n + 1] except IndexError: nchar = None if self._cursor.positionInBlock() > 80 and self.flg_bash and nchar != '\n': self._linefeed() self._cursor.movePosition(self._cursor.StartOfBlock) self._text_edit.setTextCursor(self._cursor) else: to_draw += char if to_draw: self._draw_chars(data, to_draw) if i != len(parts) - 1: self._cursor_back(1) self._last_cursor_pos = self._cursor.position() self._prefix_len = self._cursor.positionInBlock() self._text_edit.setTextCursor(self._cursor)
[ "def", "_draw", "(", "self", ",", "data", ")", ":", "self", ".", "_cursor", ".", "clearSelection", "(", ")", "self", ".", "_cursor", ".", "setPosition", "(", "self", ".", "_last_cursor_pos", ")", "if", "'\\x07'", "in", "data", ".", "txt", ":", "print",...
38.02
13.54
def write_pad_codewords(buff, version, capacity, length): """\ Writes the pad codewords iff the data does not fill the capacity of the symbol. :param buff: The byte buffer. :param int version: The (Micro) QR Code version. :param int capacity: The total capacity of the symbol (incl. error correction) :param int length: Length of the data bit stream. """ # ISO/IEC 18004:2015(E) -- 7.4.10 Bit stream to codeword conversion (page 32) # The message bit stream shall then be extended to fill the data capacity # of the symbol corresponding to the Version and Error Correction Level, as # defined in Table 8, by adding the Pad Codewords 11101100 and 00010001 # alternately. For Micro QR Code versions M1 and M3 symbols, the final data # codeword is 4 bits long. The Pad Codeword used in the final data symbol # character position in Micro QR Code versions M1 and M3 symbols shall be # represented as 0000. write = buff.extend if version in (consts.VERSION_M1, consts.VERSION_M3): write([0] * (capacity - length)) else: pad_codewords = ((1, 1, 1, 0, 1, 1, 0, 0), (0, 0, 0, 1, 0, 0, 0, 1)) for i in range(capacity // 8 - length // 8): write(pad_codewords[i % 2])
[ "def", "write_pad_codewords", "(", "buff", ",", "version", ",", "capacity", ",", "length", ")", ":", "# ISO/IEC 18004:2015(E) -- 7.4.10 Bit stream to codeword conversion (page 32)", "# The message bit stream shall then be extended to fill the data capacity", "# of the symbol correspondin...
49.8
23.52
def resolve_feats(feat_list, seqin, seqref, start, locus, missing, verbose=False, verbosity=0): """ resolve_feats - Resolves features from alignments :param feat_list: List of the found features :type feat_list: ``List`` :param seqin: The input sequence :type seqin: ``str`` :param locus: The input locus :type locus: ``str`` :param start: Where the sequence start in the alignment :type start: ``int`` :param missing: List of the unmapped features :type missing: ``List`` :param verbose: Flag for running in verbose mode. :type verbose: ``bool`` :param verbosity: Numerical value to indicate how verbose the output will be in verbose mode. :type verbosity: ``int`` :rtype: :ref:`ann` """ structures = get_structures() logger = logging.getLogger("Logger." + __name__) seq = SeqRecord(seq=Seq("".join(seqin), SingleLetterAlphabet())) seq_covered = len(seq.seq) coordinates = dict(map(lambda x: [x, 1], [i for i in range(0, len(seq.seq)+1)])) mapping = dict(map(lambda x: [x, 1], [i for i in range(0, len(seq.seq)+1)])) diff = 0 if len(feat_list) > 1: if verbose: logger.error("resolve_feats error") return Annotation(complete_annotation=False) else: features = {} full_annotation = {} features = feat_list[0] # Need to sort feature_list = sorted(features.keys(), key=lambda f: structures[locus][f]) diff_f = True for feat in feature_list: if feat in missing: f = features[feat] seqrec = f.extract(seq) seq_covered -= len(seqrec.seq) if re.search("-", str(seqrec.seq)): l1 = len(seqrec.seq) newseq = re.sub(r'-', '', str(seqrec.seq)) seqrec.seq = Seq(newseq, IUPAC.unambiguous_dna) tmdiff = l1 - len(newseq) diff += tmdiff if seqrec.seq: #logger.error("FEAT HAS SEQ " + feat) if diff_f and diff > 0: sp = f.location.start + start diff_f = False else: sp = f.location.start + start - diff ep = f.location.end + start - diff featn = SeqFeature(FeatureLocation(ExactPosition(sp), ExactPosition(ep), strand=1), type=f.type) features.update({feat: featn}) full_annotation.update({feat: seqrec}) for i in range(featn.location.start, featn.location.end): if i in coordinates: del coordinates[i] mapping[i] = feat else: f = features[feat] seqrec = f.extract(seq) seq_covered -= len(seqrec.seq) if re.search("-", str(seqrec.seq)): l1 = len(seqrec.seq) newseq = re.sub(r'-', '', str(seqrec.seq)) seqrec.seq = Seq(newseq, IUPAC.unambiguous_dna) tmdiff = l1 - len(newseq) diff += tmdiff blocks = getblocks(coordinates) rmapping = {k+start: mapping[k] for k in mapping.keys()} # Print out what features are missing if verbose and verbosity > 0 and len(full_annotation.keys()) > 1: logger.info("Features resolved:") for f in full_annotation: logger.info(f) else: if verbose: logger.info("Failed to resolve") if not full_annotation or len(full_annotation) == 0: if verbose: logger.info("Failed to align missing features") return Annotation(complete_annotation=False) else: return Annotation(annotation=full_annotation, method="clustalo", features=features, mapping=rmapping, blocks=blocks, seq=seq)
[ "def", "resolve_feats", "(", "feat_list", ",", "seqin", ",", "seqref", ",", "start", ",", "locus", ",", "missing", ",", "verbose", "=", "False", ",", "verbosity", "=", "0", ")", ":", "structures", "=", "get_structures", "(", ")", "logger", "=", "logging"...
38.405405
16.837838
def distLinf(x1,y1,x2,y2): """Compute the Linfty distance between two points (see TSPLIB documentation)""" return int(max(abs(x2-x1),abs(y2-y1)))
[ "def", "distLinf", "(", "x1", ",", "y1", ",", "x2", ",", "y2", ")", ":", "return", "int", "(", "max", "(", "abs", "(", "x2", "-", "x1", ")", ",", "abs", "(", "y2", "-", "y1", ")", ")", ")" ]
50.333333
5.333333
def add_bgp_speaker_to_dragent(self, bgp_dragent, body): """Adds a BGP speaker to Dynamic Routing agent.""" return self.post((self.agent_path + self.BGP_DRINSTANCES) % bgp_dragent, body=body)
[ "def", "add_bgp_speaker_to_dragent", "(", "self", ",", "bgp_dragent", ",", "body", ")", ":", "return", "self", ".", "post", "(", "(", "self", ".", "agent_path", "+", "self", ".", "BGP_DRINSTANCES", ")", "%", "bgp_dragent", ",", "body", "=", "body", ")" ]
57.25
12.75
def write(self, file): """Write YAML campaign template to the given open file """ render( self.template, file, benchmarks=self.benchmarks, hostname=socket.gethostname(), )
[ "def", "write", "(", "self", ",", "file", ")", ":", "render", "(", "self", ".", "template", ",", "file", ",", "benchmarks", "=", "self", ".", "benchmarks", ",", "hostname", "=", "socket", ".", "gethostname", "(", ")", ",", ")" ]
27
12.666667
def create_widget(self): """ Create the toolkit widget for the proxy object. This method is called during the top-down pass, just before the 'init_widget()' method is called. This method should create the toolkit widget and assign it to the 'widget' attribute. """ self.widget = SubElement(self.parent_widget(), self.declaration.tag)
[ "def", "create_widget", "(", "self", ")", ":", "self", ".", "widget", "=", "SubElement", "(", "self", ".", "parent_widget", "(", ")", ",", "self", ".", "declaration", ".", "tag", ")" ]
41.666667
24.111111
def install(force=False): """Install git hooks.""" ret, git_dir, _ = run("git rev-parse --show-toplevel") if ret != 0: click.echo( "ERROR: Please run from within a GIT repository.", file=sys.stderr) raise click.Abort git_dir = git_dir[0] hooks_dir = os.path.join(git_dir, HOOK_PATH) for hook in HOOKS: hook_path = os.path.join(hooks_dir, hook) if os.path.exists(hook_path): if not force: click.echo( "Hook already exists. Skipping {0}".format(hook_path), file=sys.stderr) continue else: os.unlink(hook_path) source = os.path.join(sys.prefix, "bin", "kwalitee-" + hook) os.symlink(os.path.normpath(source), hook_path) return True
[ "def", "install", "(", "force", "=", "False", ")", ":", "ret", ",", "git_dir", ",", "_", "=", "run", "(", "\"git rev-parse --show-toplevel\"", ")", "if", "ret", "!=", "0", ":", "click", ".", "echo", "(", "\"ERROR: Please run from within a GIT repository.\"", "...
31.461538
18.384615
def margin( self, axis=None, weighted=True, include_missing=False, include_transforms_for_dims=None, prune=False, include_mr_cat=False, ): """Get margin for the selected axis. the selected axis. For MR variables, this is the sum of the selected and non-selected slices. Args axis (int): Axis across the margin is calculated. If no axis is provided the margin is calculated across all axis. For Categoricals, Num, Datetime, and Text, this translates to sumation of all elements. Returns Calculated margin for the selected axis Example 1: >>> cube = CrunchCube(fixt_cat_x_cat) np.array([ [5, 2], [5, 3], ]) >>> cube.margin(axis=0) np.array([10, 5]) Example 2: >>> cube = CrunchCube(fixt_cat_x_num_x_datetime) np.array([ [[1, 1], [0, 0], [0, 0], [0, 0]], [[2, 1], [1, 1], [0, 0], [0, 0]], [[0, 0], [2, 3], [0, 0], [0, 0]], [[0, 0], [0, 0], [3, 2], [0, 0]], [[0, 0], [0, 0], [1, 1], [0, 1]] ]) >>> cube.margin(axis=0) np.array([ [3, 2], [3, 4], [4, 3], [0, 1], ]) """ table = self._counts(weighted).raw_cube_array new_axis = self._adjust_axis(axis) index = tuple( None if i in new_axis else slice(None) for i, _ in enumerate(table.shape) ) # Calculate denominator. Only include those H&S dimensions, across # which we DON'T sum. These H&S are needed because of the shape, when # dividing. Those across dims which are summed across MUST NOT be # included, because they would change the result. hs_dims = self._hs_dims_for_den(include_transforms_for_dims, axis) den = self._apply_subtotals( self._apply_missings(table, include_missing=include_missing), hs_dims ) # Apply correct mask (based on the as_array shape) arr = self._as_array( include_transforms_for_dims=hs_dims, include_missing=include_missing ) # ---prune array if pruning was requested--- if prune: arr = self._prune_body(arr, transforms=hs_dims) arr = self._drop_mr_cat_dims(arr, fix_valids=include_missing) if isinstance(arr, np.ma.core.MaskedArray): # Inflate the reduced version of the array, to match the # non-reduced version, for the purposes of creating the correct # mask. Create additional dimension (with no elements) where MR_CAT # dimensions should be. Don't inflate 0th dimension if it has only # a single element, because it's not being reduced # in self._drop_mr_cat_dims inflate_ind = tuple( ( None if ( d.dimension_type == DT.MR_CAT or i != 0 and (n <= 1 or len(d.valid_elements) <= 1) ) else slice(None) ) for i, (d, n) in enumerate(zip(self._all_dimensions, table.shape)) ) mask = np.logical_or(np.zeros(den.shape, dtype=bool), arr.mask[inflate_ind]) den = np.ma.masked_array(den, mask) if ( self.ndim != 1 or axis is None or axis == 0 and len(self._all_dimensions) == 1 ): # Special case for 1D cube wigh MR, for "Table" direction den = np.sum(den, axis=new_axis)[index] den = self._drop_mr_cat_dims( den, fix_valids=(include_missing or include_mr_cat) ) if den.shape[0] == 1 and len(den.shape) > 1 and self.ndim < 3: den = den.reshape(den.shape[1:]) return den
[ "def", "margin", "(", "self", ",", "axis", "=", "None", ",", "weighted", "=", "True", ",", "include_missing", "=", "False", ",", "include_transforms_for_dims", "=", "None", ",", "prune", "=", "False", ",", "include_mr_cat", "=", "False", ",", ")", ":", "...
33.03876
22.054264
def simxReadVisionSensor(clientID, sensorHandle, operationMode): ''' Please have a look at the function description/documentation in the V-REP user manual ''' detectionState = ct.c_ubyte() auxValues = ct.POINTER(ct.c_float)() auxValuesCount = ct.POINTER(ct.c_int)() ret = c_ReadVisionSensor(clientID, sensorHandle, ct.byref(detectionState), ct.byref(auxValues), ct.byref(auxValuesCount), operationMode) auxValues2 = [] if ret == 0: s = 0 for i in range(auxValuesCount[0]): auxValues2.append(auxValues[s:s+auxValuesCount[i+1]]) s += auxValuesCount[i+1] #free C buffers c_ReleaseBuffer(auxValues) c_ReleaseBuffer(auxValuesCount) return ret, bool(detectionState.value!=0), auxValues2
[ "def", "simxReadVisionSensor", "(", "clientID", ",", "sensorHandle", ",", "operationMode", ")", ":", "detectionState", "=", "ct", ".", "c_ubyte", "(", ")", "auxValues", "=", "ct", ".", "POINTER", "(", "ct", ".", "c_float", ")", "(", ")", "auxValuesCount", ...
35.272727
25.181818
def get_terminal_size(): """ getTerminalSize() - get width and height of console - works on linux,os x,windows,cygwin(windows) originally retrieved from: http://stackoverflow.com/questions/566746/how-to-get-console-window-width-in-python """ current_os = platform.system() tuple_xy = None if current_os == 'Windows': tuple_xy = _get_terminal_size_windows() if tuple_xy is None: tuple_xy = _get_terminal_size_tput() # needed for window's python in cygwin's xterm! if current_os in ['Linux', 'Darwin'] or current_os.startswith('CYGWIN'): tuple_xy = _get_terminal_size_linux() if tuple_xy is None: tuple_xy = (80, 25) # default value return tuple_xy
[ "def", "get_terminal_size", "(", ")", ":", "current_os", "=", "platform", ".", "system", "(", ")", "tuple_xy", "=", "None", "if", "current_os", "==", "'Windows'", ":", "tuple_xy", "=", "_get_terminal_size_windows", "(", ")", "if", "tuple_xy", "is", "None", "...
39
13.315789
def _exp_lt(self, t): """ Parameters ---------- t : float time to propagate Returns -------- exp_lt : numpy.array Array of values exp(lambda(i) * t), where (i) - alphabet index (the eigenvalue number). """ return np.exp(self.mu * t * self.eigenvals)
[ "def", "_exp_lt", "(", "self", ",", "t", ")", ":", "return", "np", ".", "exp", "(", "self", ".", "mu", "*", "t", "*", "self", ".", "eigenvals", ")" ]
21.6875
19.8125
def convert_yielded(yielded): """Convert a yielded object into a `.Future`. The default implementation accepts lists, dictionaries, and Futures. If the `~functools.singledispatch` library is available, this function may be extended to support additional types. For example:: @convert_yielded.register(asyncio.Future) def _(asyncio_future): return tornado.platform.asyncio.to_tornado_future(asyncio_future) .. versionadded:: 4.1 """ # Lists and dicts containing YieldPoints were handled earlier. if yielded is None: return moment elif isinstance(yielded, (list, dict)): return multi(yielded) elif is_future(yielded): return yielded elif isawaitable(yielded): return _wrap_awaitable(yielded) else: raise BadYieldError("yielded unknown object %r" % (yielded,))
[ "def", "convert_yielded", "(", "yielded", ")", ":", "# Lists and dicts containing YieldPoints were handled earlier.", "if", "yielded", "is", "None", ":", "return", "moment", "elif", "isinstance", "(", "yielded", ",", "(", "list", ",", "dict", ")", ")", ":", "retur...
34.12
20.28
def _get_mvar(cls, df, column, windows): """ get moving variance :param df: data :param column: column to calculate :param windows: collection of window of moving variance :return: None """ window = cls.get_only_one_positive_int(windows) column_name = '{}_{}_mvar'.format(column, window) df[column_name] = df[column].rolling( min_periods=1, window=window, center=False).var()
[ "def", "_get_mvar", "(", "cls", ",", "df", ",", "column", ",", "windows", ")", ":", "window", "=", "cls", ".", "get_only_one_positive_int", "(", "windows", ")", "column_name", "=", "'{}_{}_mvar'", ".", "format", "(", "column", ",", "window", ")", "df", "...
38.333333
13.5
def reraise(exc_type, message=None, *args, **kwargs): # pylint: disable=invalid-name """reraises an exception for exception translation. This is primarily used for when you immediately reraise an exception that is thrown in a library, so that your client will not have to depend on various exceptions defined in the library implementation that is being abstracted. The advantage of this helper function is somewhat preserve traceback information although it is polluted by the reraise frame. Example Code: def A(): raise Exception('Whoops') def main(): try: A() except Exception as e: exceptions.reraise(ValueError) main() Traceback (most recent call last): File "exception.py", line 53, in <module> main() File "exception.py", line 49, in main reraise(ValueError) File "exception.py", line 47, in main A() File "exception.py", line 42, in A raise Exception('Whoops') ValueError: line 49 When this code is run, the additional stack frames for calling A() and raising within A() are printed out in exception, whereas a bare exception translation would lose this information. As long as you ignore the reraise stack frame, the stack trace is okay looking. Generally this can be fixed by hacking on CPython to allow modification of traceback objects ala https://github.com/mitsuhiko/jinja2/blob/master/jinja2/debug.py, but this is fixed in Python 3 anyways and that method is the definition of hackery. Args: exc_type: (Exception) Exception class to create. message: (str) Optional message to place in exception instance. Usually not needed as the original exception probably has a message that will be printed out in the modified stacktrace. *args: Args to pass to exception constructor. **kwargs: Kwargs to pass to exception constructor. """ last_lineno = inspect.currentframe().f_back.f_lineno line_msg = 'line %s: ' % last_lineno if message: line_msg += str(message) raise exc_type(line_msg, *args, **kwargs).raise_with_traceback(sys.exc_info()[2])
[ "def", "reraise", "(", "exc_type", ",", "message", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# pylint: disable=invalid-name", "last_lineno", "=", "inspect", ".", "currentframe", "(", ")", ".", "f_back", ".", "f_lineno", "line_msg", ...
39.018868
23.396226
def _set_group(self, v, load=False): """ Setter method for group, mapped from YANG variable /openflow_state/group (container) If this variable is read-only (config: false) in the source YANG file, then _set_group is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_group() directly. YANG Description: Group details """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=group.group, is_container='container', presence=False, yang_name="group", rest_name="group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'openflow-group', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-openflow-operational', defining_module='brocade-openflow-operational', yang_type='container', is_config=False) except (TypeError, ValueError): raise ValueError({ 'error-string': """group must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=group.group, is_container='container', presence=False, yang_name="group", rest_name="group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'openflow-group', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-openflow-operational', defining_module='brocade-openflow-operational', yang_type='container', is_config=False)""", }) self.__group = t if hasattr(self, '_set'): self._set()
[ "def", "_set_group", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "base", ...
69.625
33.541667
async def _request(self, method: str, url: str, headers: Mapping[str, str], body: bytes = b'') -> Tuple[int, Mapping[str, str], bytes]: """Make an HTTP request."""
[ "async", "def", "_request", "(", "self", ",", "method", ":", "str", ",", "url", ":", "str", ",", "headers", ":", "Mapping", "[", "str", ",", "str", "]", ",", "body", ":", "bytes", "=", "b''", ")", "->", "Tuple", "[", "int", ",", "Mapping", "[", ...
64
25.666667
def __interact_copy( self, escape_character=None, input_filter=None, output_filter=None ): '''This is used by the interact() method. ''' while self.isalive(): if self.use_poll: r = poll_ignore_interrupts([self.child_fd, self.STDIN_FILENO]) else: r, w, e = select_ignore_interrupts( [self.child_fd, self.STDIN_FILENO], [], [] ) if self.child_fd in r: try: data = self.__interact_read(self.child_fd) except OSError as err: if err.args[0] == errno.EIO: # Linux-style EOF break raise if data == b'': # BSD-style EOF break if output_filter: data = output_filter(data) self._log(data, 'read') os.write(self.STDOUT_FILENO, data) if self.STDIN_FILENO in r: data = self.__interact_read(self.STDIN_FILENO) if input_filter: data = input_filter(data) i = -1 if escape_character is not None: i = data.rfind(escape_character) if i != -1: data = data[:i] if data: self._log(data, 'send') self.__interact_writen(self.child_fd, data) break self._log(data, 'send') self.__interact_writen(self.child_fd, data)
[ "def", "__interact_copy", "(", "self", ",", "escape_character", "=", "None", ",", "input_filter", "=", "None", ",", "output_filter", "=", "None", ")", ":", "while", "self", ".", "isalive", "(", ")", ":", "if", "self", ".", "use_poll", ":", "r", "=", "p...
36.840909
14.840909
def getname(exprresolver): """Get expression resolver name. Expression resolver name is given by the attribute __resolver__. If not exist, then the it is given by the attribute __name__. Otherwise, given by the __class__.__name__ attribute. :raises: TypeError if exprresolver is not callable.""" result = None if not callable(exprresolver): raise TypeError('Expression resolver must be a callable object.') result = getattr( exprresolver, __RESOLVER__, getattr( exprresolver, '__name__', getattr( exprresolver.__class__, '__name__' ) ) ) return result
[ "def", "getname", "(", "exprresolver", ")", ":", "result", "=", "None", "if", "not", "callable", "(", "exprresolver", ")", ":", "raise", "TypeError", "(", "'Expression resolver must be a callable object.'", ")", "result", "=", "getattr", "(", "exprresolver", ",", ...
27.782609
23.521739
def do_execute(self): """ The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str """ for f in self.resolve_option("files"): self._output.append(Token(f)) return None
[ "def", "do_execute", "(", "self", ")", ":", "for", "f", "in", "self", ".", "resolve_option", "(", "\"files\"", ")", ":", "self", ".", "_output", ".", "append", "(", "Token", "(", "f", ")", ")", "return", "None" ]
27
13
def load(self, cnf, metadata_construction=False): """ The base load method, loads the configuration :param cnf: The configuration as a dictionary :param metadata_construction: Is this only to be able to construct metadata. If so some things can be left out. :return: The Configuration instance """ _uc = self.unicode_convert for arg in COMMON_ARGS: if arg == "virtual_organization": if "virtual_organization" in cnf: for key, val in cnf["virtual_organization"].items(): self.vorg[key] = VirtualOrg(None, key, val) continue elif arg == "extension_schemas": # List of filename of modules representing the schemas if "extension_schemas" in cnf: for mod_file in cnf["extension_schemas"]: _mod = self._load(mod_file) self.extension_schema[_mod.NAMESPACE] = _mod try: setattr(self, arg, _uc(cnf[arg])) except KeyError: pass except TypeError: # Something that can't be a string setattr(self, arg, cnf[arg]) if "service" in cnf: for typ in ["aa", "idp", "sp", "pdp", "aq"]: try: self.load_special( cnf["service"][typ], typ, metadata_construction=metadata_construction) self.serves.append(typ) except KeyError: pass if "extensions" in cnf: self.do_extensions(cnf["extensions"]) self.load_complex(cnf, metadata_construction=metadata_construction) self.context = self.def_context return self
[ "def", "load", "(", "self", ",", "cnf", ",", "metadata_construction", "=", "False", ")", ":", "_uc", "=", "self", ".", "unicode_convert", "for", "arg", "in", "COMMON_ARGS", ":", "if", "arg", "==", "\"virtual_organization\"", ":", "if", "\"virtual_organization\...
39.086957
17.434783
def calculate_amr(cls, is_extended, from_id, to_id, rtr_only=False, rtr_too=True): """ Calculates AMR using CAN-ID range as parameter. :param bool is_extended: If True parameters from_id and to_id contains 29-bit CAN-ID. :param int from_id: First CAN-ID which should be received. :param int to_id: Last CAN-ID which should be received. :param bool rtr_only: If True only RTR-Messages should be received, and rtr_too will be ignored. :param bool rtr_too: If True CAN data frames and RTR-Messages should be received. :return: Value for AMR. :rtype: int """ return (((from_id ^ to_id) << 3) | (0x7 if rtr_too and not rtr_only else 0x3)) if is_extended else \ (((from_id ^ to_id) << 21) | (0x1FFFFF if rtr_too and not rtr_only else 0xFFFFF))
[ "def", "calculate_amr", "(", "cls", ",", "is_extended", ",", "from_id", ",", "to_id", ",", "rtr_only", "=", "False", ",", "rtr_too", "=", "True", ")", ":", "return", "(", "(", "(", "from_id", "^", "to_id", ")", "<<", "3", ")", "|", "(", "0x7", "if"...
58.928571
33.071429
def open_mfdataset(paths, decode_cf=True, decode_times=True, decode_coords=True, engine=None, gridfile=None, t_format=None, **kwargs): """ Open multiple files as a single dataset. This function is essentially the same as the :func:`xarray.open_mfdataset` function but (as the :func:`open_dataset`) supports additional decoding and the ``'gdal'`` engine. You can further specify the `t_format` parameter to get the time information from the files and use the results to concatenate the files Parameters ---------- %(xarray.open_mfdataset.parameters.no_engine)s %(open_dataset.parameters.engine)s %(get_tdata.parameters.t_format)s %(CFDecoder.decode_coords.parameters.gridfile)s Returns ------- xarray.Dataset The dataset that contains the variables from `filename_or_obj`""" if t_format is not None or engine == 'gdal': if isinstance(paths, six.string_types): paths = sorted(glob(paths)) if not paths: raise IOError('no files to open') if t_format is not None: time, paths = get_tdata(t_format, paths) kwargs['concat_dim'] = time if engine == 'gdal': from psyplot.gdal_store import GdalStore paths = list(map(GdalStore, paths)) engine = None kwargs['lock'] = False ds = xr.open_mfdataset( paths, decode_cf=decode_cf, decode_times=decode_times, engine=engine, decode_coords=False, **kwargs) if decode_cf: ds = CFDecoder.decode_ds(ds, gridfile=gridfile, decode_coords=decode_coords, decode_times=decode_times) ds.psy._concat_dim = kwargs.get('concat_dim') return ds
[ "def", "open_mfdataset", "(", "paths", ",", "decode_cf", "=", "True", ",", "decode_times", "=", "True", ",", "decode_coords", "=", "True", ",", "engine", "=", "None", ",", "gridfile", "=", "None", ",", "t_format", "=", "None", ",", "*", "*", "kwargs", ...
37.76087
17.5
def list(self, all_tenants=False, **search_opts): """Fetches a list of all floating IPs. :returns: List of FloatingIp object """ if not all_tenants: tenant_id = self.request.user.tenant_id # In Neutron, list_floatingips returns Floating IPs from # all tenants when the API is called with admin role, so # we need to filter them with tenant_id. search_opts['tenant_id'] = tenant_id port_search_opts = {'tenant_id': tenant_id} else: port_search_opts = {} fips = self.client.list_floatingips(**search_opts) fips = fips.get('floatingips') # Get port list to add instance_id to floating IP list # instance_id is stored in device_id attribute ports = port_list(self.request, **port_search_opts) port_dict = collections.OrderedDict([(p['id'], p) for p in ports]) for fip in fips: self._set_instance_info(fip, port_dict.get(fip['port_id'])) return [FloatingIp(fip) for fip in fips]
[ "def", "list", "(", "self", ",", "all_tenants", "=", "False", ",", "*", "*", "search_opts", ")", ":", "if", "not", "all_tenants", ":", "tenant_id", "=", "self", ".", "request", ".", "user", ".", "tenant_id", "# In Neutron, list_floatingips returns Floating IPs f...
45.73913
15.869565
def enumerate(self, mol): """Enumerate all possible tautomers and return them as a list. :param mol: The input molecule. :type mol: rdkit.Chem.rdchem.Mol :return: A list of all possible tautomers of the molecule. :rtype: list of rdkit.Chem.rdchem.Mol """ smiles = Chem.MolToSmiles(mol, isomericSmiles=True) tautomers = {smiles: copy.deepcopy(mol)} # Create a kekulized form of the molecule to match the SMARTS against kekulized = copy.deepcopy(mol) Chem.Kekulize(kekulized) kekulized = {smiles: kekulized} done = set() while len(tautomers) < self.max_tautomers: for tsmiles in sorted(tautomers): if tsmiles in done: continue for transform in self.transforms: for match in kekulized[tsmiles].GetSubstructMatches(transform.tautomer): # log.debug('Matched rule: %s to %s for %s', transform.name, tsmiles, match) # Create a copy of in the input molecule so we can modify it # Use kekule form so bonds are explicitly single/double instead of aromatic product = copy.deepcopy(kekulized[tsmiles]) # Remove a hydrogen from the first matched atom and add one to the last first = product.GetAtomWithIdx(match[0]) last = product.GetAtomWithIdx(match[-1]) # log.debug('%s: H%s -> H%s' % (first.GetSymbol(), first.GetTotalNumHs(), first.GetTotalNumHs() - 1)) # log.debug('%s: H%s -> H%s' % (last.GetSymbol(), last.GetTotalNumHs(), last.GetTotalNumHs() + 1)) first.SetNumExplicitHs(max(0, first.GetTotalNumHs() - 1)) last.SetNumExplicitHs(last.GetTotalNumHs() + 1) # Remove any implicit hydrogens from the first and last atoms now we have set the count explicitly first.SetNoImplicit(True) last.SetNoImplicit(True) # Adjust bond orders for bi, pair in enumerate(pairwise(match)): if transform.bonds: # Set the resulting bond types as manually specified in the transform # log.debug('%s-%s: %s -> %s' % (product.GetAtomWithIdx(pair[0]).GetSymbol(), product.GetAtomWithIdx(pair[1]).GetSymbol(), product.GetBondBetweenAtoms(*pair).GetBondType(), transform.bonds[bi])) product.GetBondBetweenAtoms(*pair).SetBondType(transform.bonds[bi]) else: # If no manually specified bond types, just swap single and double bonds current_bond_type = product.GetBondBetweenAtoms(*pair).GetBondType() product.GetBondBetweenAtoms(*pair).SetBondType(BondType.DOUBLE if current_bond_type == BondType.SINGLE else BondType.SINGLE) # log.debug('%s-%s: %s -> %s' % (product.GetAtomWithIdx(pair[0]).GetSymbol(), product.GetAtomWithIdx(pair[1]).GetSymbol(), current_bond_type, product.GetBondBetweenAtoms(*pair).GetBondType())) # Adjust charges if transform.charges: for ci, idx in enumerate(match): atom = product.GetAtomWithIdx(idx) # log.debug('%s: C%s -> C%s' % (atom.GetSymbol(), atom.GetFormalCharge(), atom.GetFormalCharge() + transform.charges[ci])) atom.SetFormalCharge(atom.GetFormalCharge() + transform.charges[ci]) try: Chem.SanitizeMol(product) smiles = Chem.MolToSmiles(product, isomericSmiles=True) log.debug('Applied rule: %s to %s', transform.name, tsmiles) if smiles not in tautomers: log.debug('New tautomer produced: %s' % smiles) kekulized_product = copy.deepcopy(product) Chem.Kekulize(kekulized_product) tautomers[smiles] = product kekulized[smiles] = kekulized_product else: log.debug('Previous tautomer produced again: %s' % smiles) except ValueError: log.debug('ValueError Applying rule: %s', transform.name) done.add(tsmiles) if len(tautomers) == len(done): break else: log.warning('Tautomer enumeration stopped at maximum %s', self.max_tautomers) # Clean up stereochemistry for tautomer in tautomers.values(): Chem.AssignStereochemistry(tautomer, force=True, cleanIt=True) for bond in tautomer.GetBonds(): if bond.GetBondType() == BondType.DOUBLE and bond.GetStereo() > BondStereo.STEREOANY: begin = bond.GetBeginAtomIdx() end = bond.GetEndAtomIdx() for othertautomer in tautomers.values(): if not othertautomer.GetBondBetweenAtoms(begin, end).GetBondType() == BondType.DOUBLE: neighbours = tautomer.GetAtomWithIdx(begin).GetBonds() + tautomer.GetAtomWithIdx(end).GetBonds() for otherbond in neighbours: if otherbond.GetBondDir() in {BondDir.ENDUPRIGHT, BondDir.ENDDOWNRIGHT}: otherbond.SetBondDir(BondDir.NONE) Chem.AssignStereochemistry(tautomer, force=True, cleanIt=True) log.debug('Removed stereochemistry from unfixed double bond') break return list(tautomers.values())
[ "def", "enumerate", "(", "self", ",", "mol", ")", ":", "smiles", "=", "Chem", ".", "MolToSmiles", "(", "mol", ",", "isomericSmiles", "=", "True", ")", "tautomers", "=", "{", "smiles", ":", "copy", ".", "deepcopy", "(", "mol", ")", "}", "# Create a keku...
68.556818
33.022727
def file(self, file=None): """Saves the dump in a file-like object in text mode. :param file: :obj:`None` or a file-like object. :return: a file-like object If :paramref:`file` is :obj:`None`, a new :class:`io.StringIO` is returned. If :paramref:`file` is not :obj:`None` it should be a file-like object. The content is written to the file. After writing, the file's read/write position points behind the dumped content. """ if file is None: file = StringIO() self._file(file) return file
[ "def", "file", "(", "self", ",", "file", "=", "None", ")", ":", "if", "file", "is", "None", ":", "file", "=", "StringIO", "(", ")", "self", ".", "_file", "(", "file", ")", "return", "file" ]
34.294118
21
def peaks(data, method='max', axis='time', limits=None): """Return the values of an index where the data is at max or min Parameters ---------- method : str, optional 'max' or 'min' axis : str, optional the axis where you want to detect the peaks limits : tuple of two values, optional the lowest and highest limits where to search for the peaks data : instance of Data one of the datatypes Returns ------- instance of Data with one dimension less that the input data. The actual values in the data can be not-numberic, for example, if you look for the max value across electrodes Notes ----- This function is useful when you want to find the frequency value at which the power is the largest, or to find the time point at which the signal is largest, or the channel at which the activity is largest. """ idx_axis = data.index_of(axis) output = data._copy() output.axis.pop(axis) for trl in range(data.number_of('trial')): values = data.axis[axis][trl] dat = data(trial=trl) if limits is not None: limits = (values < limits[0]) | (values > limits[1]) idx = [slice(None)] * len(data.list_of_axes) idx[idx_axis] = limits dat[idx] = nan if method == 'max': peak_val = nanargmax(dat, axis=idx_axis) elif method == 'min': peak_val = nanargmin(dat, axis=idx_axis) output.data[trl] = values[peak_val] return output
[ "def", "peaks", "(", "data", ",", "method", "=", "'max'", ",", "axis", "=", "'time'", ",", "limits", "=", "None", ")", ":", "idx_axis", "=", "data", ".", "index_of", "(", "axis", ")", "output", "=", "data", ".", "_copy", "(", ")", "output", ".", ...
30.54
20.92
def ecb(base, target): """Parse data from European Central Bank.""" api_url = 'http://www.ecb.europa.eu/stats/eurofxref/eurofxref-daily.xml' resp = requests.get(api_url, timeout=1) text = resp.text def _find_rate(symbol): if symbol == 'EUR': return decimal.Decimal(1.00) m = re.findall(r"currency='%s' rate='([0-9\.]+)'" % symbol, text) return decimal.Decimal(m[0]) return _find_rate(target) / _find_rate(base)
[ "def", "ecb", "(", "base", ",", "target", ")", ":", "api_url", "=", "'http://www.ecb.europa.eu/stats/eurofxref/eurofxref-daily.xml'", "resp", "=", "requests", ".", "get", "(", "api_url", ",", "timeout", "=", "1", ")", "text", "=", "resp", ".", "text", "def", ...
35.384615
17.538462
def get_version(dev_version=False): """Generates a version string. Arguments: dev_version: Generate a verbose development version from git commits. Examples: 1.1 1.1.dev43 # If 'dev_version' was passed. """ if dev_version: version = git_dev_version() if not version: raise RuntimeError("Could not generate dev version from git.") return version return "1!%d.%d" % (MAJOR, MINOR)
[ "def", "get_version", "(", "dev_version", "=", "False", ")", ":", "if", "dev_version", ":", "version", "=", "git_dev_version", "(", ")", "if", "not", "version", ":", "raise", "RuntimeError", "(", "\"Could not generate dev version from git.\"", ")", "return", "vers...
24.944444
21.666667
def patch_clean_fields(model): """ Patch clean_fields method to handle different form types submission. """ old_clean_fields = model.clean_fields def new_clean_fields(self, exclude=None): if hasattr(self, '_mt_form_pending_clear'): # Some form translation fields has been marked as clearing value. # Check if corresponding translated field was also saved (not excluded): # - if yes, it seems like form for MT-unaware app. Ignore clearing (left value from # translated field unchanged), as if field was omitted from form # - if no, then proceed as normally: clear the field for field_name, value in self._mt_form_pending_clear.items(): field = self._meta.get_field(field_name) orig_field_name = field.translated_field.name if orig_field_name in exclude: field.save_form_data(self, value, check=False) delattr(self, '_mt_form_pending_clear') old_clean_fields(self, exclude) model.clean_fields = new_clean_fields
[ "def", "patch_clean_fields", "(", "model", ")", ":", "old_clean_fields", "=", "model", ".", "clean_fields", "def", "new_clean_fields", "(", "self", ",", "exclude", "=", "None", ")", ":", "if", "hasattr", "(", "self", ",", "'_mt_form_pending_clear'", ")", ":", ...
51.619048
19.619048
def add_listener(self, on_message=None): """ Subscribes to this topic. When someone publishes a message on this topic, on_message() function is called if provided. :param on_message: (Function), function to be called when a message is published. :return: (str), a registration id which is used as a key to remove the listener. """ request = topic_add_message_listener_codec.encode_request(self.name, False) def handle(item, publish_time, uuid): member = self._client.cluster.get_member_by_uuid(uuid) item_event = TopicMessage(self.name, item, publish_time, member, self._to_object) on_message(item_event) return self._start_listening(request, lambda m: topic_add_message_listener_codec.handle(m, handle), lambda r: topic_add_message_listener_codec.decode_response(r)['response'], self.partition_key)
[ "def", "add_listener", "(", "self", ",", "on_message", "=", "None", ")", ":", "request", "=", "topic_add_message_listener_codec", ".", "encode_request", "(", "self", ".", "name", ",", "False", ")", "def", "handle", "(", "item", ",", "publish_time", ",", "uui...
52.789474
31.526316
def remove_line(self, section): """Base implementation just pops the item from collection. Re-implements to add global behaviour """ self.beginResetModel() self.collection.pop(section) self.endResetModel()
[ "def", "remove_line", "(", "self", ",", "section", ")", ":", "self", ".", "beginResetModel", "(", ")", "self", ".", "collection", ".", "pop", "(", "section", ")", "self", ".", "endResetModel", "(", ")" ]
35.285714
5.714286
def revoke_cert( ca_name, CN, cacert_path=None, ca_filename=None, cert_path=None, cert_filename=None, crl_file=None, digest='sha256', ): ''' Revoke a certificate. .. versionadded:: 2015.8.0 ca_name Name of the CA. CN Common name matching the certificate signing request. cacert_path Absolute path to ca certificates root directory. ca_filename Alternative filename for the CA. cert_path Path to the cert file. cert_filename Alternative filename for the certificate, useful when using special characters in the CN. crl_file Full path to the CRL file. digest The message digest algorithm. Must be a string describing a digest algorithm supported by OpenSSL (by EVP_get_digestbyname, specifically). For example, "md5" or "sha1". Default: 'sha256' CLI Example: .. code-block:: bash salt '*' tls.revoke_cert ca_name='koji' \ ca_filename='ca' \ crl_file='/etc/openvpn/team1/crl.pem' ''' set_ca_path(cacert_path) ca_dir = '{0}/{1}'.format(cert_base_path(), ca_name) if ca_filename is None: ca_filename = '{0}_ca_cert'.format(ca_name) if cert_path is None: cert_path = '{0}/{1}/certs'.format(_cert_base_path(), ca_name) if cert_filename is None: cert_filename = '{0}'.format(CN) try: with salt.utils.files.fopen('{0}/{1}/{2}.crt'.format( cert_base_path(), ca_name, ca_filename)) as fp_: ca_cert = OpenSSL.crypto.load_certificate( OpenSSL.crypto.FILETYPE_PEM, fp_.read() ) with salt.utils.files.fopen('{0}/{1}/{2}.key'.format( cert_base_path(), ca_name, ca_filename)) as fp_: ca_key = OpenSSL.crypto.load_privatekey( OpenSSL.crypto.FILETYPE_PEM, fp_.read() ) except IOError: return 'There is no CA named "{0}"'.format(ca_name) client_cert = _read_cert('{0}/{1}.crt'.format(cert_path, cert_filename)) if client_cert is None: return 'There is no client certificate named "{0}"'.format(CN) index_file, expire_date, serial_number, subject = _get_basic_info( ca_name, client_cert, ca_dir) index_serial_subject = '{0}\tunknown\t{1}'.format( serial_number, subject) index_v_data = 'V\t{0}\t\t{1}'.format( expire_date, index_serial_subject) index_r_data_pattern = re.compile( r"R\t" + expire_date + r"\t\d{12}Z\t" + re.escape(index_serial_subject)) index_r_data = 'R\t{0}\t{1}\t{2}'.format( expire_date, _four_digit_year_to_two_digit(datetime.utcnow()), index_serial_subject) ret = {} with salt.utils.files.fopen(index_file) as fp_: for line in fp_: line = salt.utils.stringutils.to_unicode(line) if index_r_data_pattern.match(line): revoke_date = line.split('\t')[2] try: datetime.strptime(revoke_date, two_digit_year_fmt) return ('"{0}/{1}.crt" was already revoked, ' 'serial number: {2}').format( cert_path, cert_filename, serial_number ) except ValueError: ret['retcode'] = 1 ret['comment'] = ("Revocation date '{0}' does not match" "format '{1}'".format( revoke_date, two_digit_year_fmt)) return ret elif index_serial_subject in line: __salt__['file.replace']( index_file, index_v_data, index_r_data, backup=False) break crl = OpenSSL.crypto.CRL() with salt.utils.files.fopen(index_file) as fp_: for line in fp_: line = salt.utils.stringutils.to_unicode(line) if line.startswith('R'): fields = line.split('\t') revoked = OpenSSL.crypto.Revoked() revoked.set_serial(salt.utils.stringutils.to_bytes(fields[3])) revoke_date_2_digit = datetime.strptime(fields[2], two_digit_year_fmt) revoked.set_rev_date(salt.utils.stringutils.to_bytes( revoke_date_2_digit.strftime(four_digit_year_fmt) )) crl.add_revoked(revoked) crl_text = crl.export(ca_cert, ca_key, digest=salt.utils.stringutils.to_bytes(digest)) if crl_file is None: crl_file = '{0}/{1}/crl.pem'.format( _cert_base_path(), ca_name ) if os.path.isdir(crl_file): ret['retcode'] = 1 ret['comment'] = 'crl_file "{0}" is an existing directory'.format( crl_file) return ret with salt.utils.files.fopen(crl_file, 'w') as fp_: fp_.write(salt.utils.stringutils.to_str(crl_text)) return ('Revoked Certificate: "{0}/{1}.crt", ' 'serial number: {2}').format( cert_path, cert_filename, serial_number )
[ "def", "revoke_cert", "(", "ca_name", ",", "CN", ",", "cacert_path", "=", "None", ",", "ca_filename", "=", "None", ",", "cert_path", "=", "None", ",", "cert_filename", "=", "None", ",", "crl_file", "=", "None", ",", "digest", "=", "'sha256'", ",", ")", ...
30.424581
20.703911
def get_analysis(self, current_class): """ Returns the :class:`~androguard.core.analysis.analysis.Analysis` object which contains the `current_class`. :param current_class: The class to search for :type current_class: androguard.core.bytecodes.dvm.ClassDefItem :rtype: androguard.core.analysis.analysis.Analysis """ for digest in self.analyzed_vms: dx = self.analyzed_vms[digest] if dx.is_class_present(current_class.get_name()): return dx return None
[ "def", "get_analysis", "(", "self", ",", "current_class", ")", ":", "for", "digest", "in", "self", ".", "analyzed_vms", ":", "dx", "=", "self", ".", "analyzed_vms", "[", "digest", "]", "if", "dx", ".", "is_class_present", "(", "current_class", ".", "get_na...
39.357143
14.642857
def get_app_token(self, scope): """Gets the app auth token""" app_token = self.__get_app_token(scope) if app_token: return app_token if self.__cache is not None: token = self.__cache.get(self.__app_token_cache_key(scope)) if token: return token self.__app_token = None payload = { 'grant_type': 'client_credentials', 'scope': scope } url = settings.AUTH_ENDPOINT + '/token' r = post(url, data=payload, auth=HTTPBasicAuth(self.__client_id, self.__client_secret), verify=self.__verify_cert) if r.status_code == 200: data = r.json() self.__set_app_token(scope, data[u'access_token']) if self.__cache is not None: self.__cache.set(self.__app_token_cache_key(scope), str(data[u'access_token']), data[u'expires_in'] - 300) app_token = self.__get_app_token(scope) if not app_token: raise MxitAPIException("Failed to retrieve app token for '%s' scope" % scope) return app_token
[ "def", "get_app_token", "(", "self", ",", "scope", ")", ":", "app_token", "=", "self", ".", "__get_app_token", "(", "scope", ")", "if", "app_token", ":", "return", "app_token", "if", "self", ".", "__cache", "is", "not", "None", ":", "token", "=", "self",...
32.028571
24.228571
def _post_action(self, action): """Optionally performs gripper visualization after the actions.""" ret = super()._post_action(action) self._gripper_visualization() return ret
[ "def", "_post_action", "(", "self", ",", "action", ")", ":", "ret", "=", "super", "(", ")", ".", "_post_action", "(", "action", ")", "self", ".", "_gripper_visualization", "(", ")", "return", "ret" ]
40.4
7.2
def end(self): """End of the Glances stats.""" # Close export modules for e in self._exports: self._exports[e].exit() # Close plugins for p in self._plugins: self._plugins[p].exit()
[ "def", "end", "(", "self", ")", ":", "# Close export modules", "for", "e", "in", "self", ".", "_exports", ":", "self", ".", "_exports", "[", "e", "]", ".", "exit", "(", ")", "# Close plugins", "for", "p", "in", "self", ".", "_plugins", ":", "self", "...
29.75
10.125
def response_as_single(self, copy=0): """ convert the response map to a single data frame with Multi-Index columns """ arr = [] for sid, frame in self.response.iteritems(): if copy: frame = frame.copy() 'security' not in frame and frame.insert(0, 'security', sid) arr.append(frame.reset_index().set_index(['date', 'security'])) return concat(arr).unstack()
[ "def", "response_as_single", "(", "self", ",", "copy", "=", "0", ")", ":", "arr", "=", "[", "]", "for", "sid", ",", "frame", "in", "self", ".", "response", ".", "iteritems", "(", ")", ":", "if", "copy", ":", "frame", "=", "frame", ".", "copy", "(...
48
14.888889
def validate(wire, keyname, secret, now, request_mac, tsig_start, tsig_rdata, tsig_rdlen, ctx=None, multi=False, first=True): """Validate the specified TSIG rdata against the other input parameters. @raises FormError: The TSIG is badly formed. @raises BadTime: There is too much time skew between the client and the server. @raises BadSignature: The TSIG signature did not validate @rtype: hmac.HMAC object""" (adcount,) = struct.unpack("!H", wire[10:12]) if adcount == 0: raise dns.exception.FormError adcount -= 1 new_wire = wire[0:10] + struct.pack("!H", adcount) + wire[12:tsig_start] current = tsig_rdata (aname, used) = dns.name.from_wire(wire, current) current = current + used (upper_time, lower_time, fudge, mac_size) = \ struct.unpack("!HIHH", wire[current:current + 10]) time = ((upper_time + 0L) << 32) + (lower_time + 0L) current += 10 mac = wire[current:current + mac_size] current += mac_size (original_id, error, other_size) = \ struct.unpack("!HHH", wire[current:current + 6]) current += 6 other_data = wire[current:current + other_size] current += other_size if current != tsig_rdata + tsig_rdlen: raise dns.exception.FormError if error != 0: if error == BADSIG: raise PeerBadSignature elif error == BADKEY: raise PeerBadKey elif error == BADTIME: raise PeerBadTime elif error == BADTRUNC: raise PeerBadTruncation else: raise PeerError('unknown TSIG error code %d' % error) time_low = time - fudge time_high = time + fudge if now < time_low or now > time_high: raise BadTime (junk, our_mac, ctx) = sign(new_wire, keyname, secret, time, fudge, original_id, error, other_data, request_mac, ctx, multi, first, aname) if (our_mac != mac): raise BadSignature return ctx
[ "def", "validate", "(", "wire", ",", "keyname", ",", "secret", ",", "now", ",", "request_mac", ",", "tsig_start", ",", "tsig_rdata", ",", "tsig_rdlen", ",", "ctx", "=", "None", ",", "multi", "=", "False", ",", "first", "=", "True", ")", ":", "(", "ad...
38.403846
16.807692
def set_rudder(self, angle): ''' Set the angle of the rudder to be `angle` degrees. :param angle: rudder angle :type angle: float between -90 and 90 ''' angle = float(angle) request = self.boatd.post({'value': float(angle)}, '/rudder') return request.get('result')
[ "def", "set_rudder", "(", "self", ",", "angle", ")", ":", "angle", "=", "float", "(", "angle", ")", "request", "=", "self", ".", "boatd", ".", "post", "(", "{", "'value'", ":", "float", "(", "angle", ")", "}", ",", "'/rudder'", ")", "return", "requ...
32
18.4
def contains_version(self, version): """Returns True if version is contained in this range.""" if len(self.bounds) < 5: # not worth overhead of binary search for bound in self.bounds: i = bound.version_containment(version) if i == 0: return True if i == -1: return False else: _, contains = self._contains_version(version) return contains return False
[ "def", "contains_version", "(", "self", ",", "version", ")", ":", "if", "len", "(", "self", ".", "bounds", ")", "<", "5", ":", "# not worth overhead of binary search", "for", "bound", "in", "self", ".", "bounds", ":", "i", "=", "bound", ".", "version_conta...
33.733333
13.266667
def find(cls, api_name): """ Find or create an API model object by name """ if api_name in cls.apis_by_name: return cls.apis_by_name[api_name] api = cls(api_name) api._fetch_from_aws() if api.exists_in_aws: api._fetch_resources() cls.apis_by_name[api_name] = api return api
[ "def", "find", "(", "cls", ",", "api_name", ")", ":", "if", "api_name", "in", "cls", ".", "apis_by_name", ":", "return", "cls", ".", "apis_by_name", "[", "api_name", "]", "api", "=", "cls", "(", "api_name", ")", "api", ".", "_fetch_from_aws", "(", ")",...
34.4
8.4
def _fftconv(a, b, axes=(0, 1)): """Patched version of :func:`sporco.linalg.fftconv`.""" if cp.isrealobj(a) and cp.isrealobj(b): fft = cp.fft.rfftn ifft = cp.fft.irfftn else: fft = cp.fft.fftn ifft = cp.fft.ifftn dims = cp.maximum(cp.asarray([a.shape[i] for i in axes]), cp.asarray([b.shape[i] for i in axes])) dims = [int(d) for d in dims] af = fft(a, dims, axes) bf = fft(b, dims, axes) return ifft(af * bf, dims, axes)
[ "def", "_fftconv", "(", "a", ",", "b", ",", "axes", "=", "(", "0", ",", "1", ")", ")", ":", "if", "cp", ".", "isrealobj", "(", "a", ")", "and", "cp", ".", "isrealobj", "(", "b", ")", ":", "fft", "=", "cp", ".", "fft", ".", "rfftn", "ifft", ...
32.933333
14.333333
def to_json(self, data): """ Converts the given object to a pretty-formatted JSON string :param data: the object to convert to JSON :return: A pretty-formatted JSON string """ # Don't forget the empty line at the end of the file return ( json.dumps( data, sort_keys=True, indent=4, separators=(",", ": "), default=self.json_converter, ) + "\n" )
[ "def", "to_json", "(", "self", ",", "data", ")", ":", "# Don't forget the empty line at the end of the file", "return", "(", "json", ".", "dumps", "(", "data", ",", "sort_keys", "=", "True", ",", "indent", "=", "4", ",", "separators", "=", "(", "\",\"", ",",...
28.277778
16.055556
def reopen(args): """reopens a closed poll.""" if not args.isadmin: return "Nope, not gonna do it." msg = args.msg.split() if not msg: return "Syntax: !poll reopen <pollnum>" if not msg[0].isdigit(): return "Not a valid positve integer." pid = int(msg[0]) poll = get_open_poll(args.session, pid) if poll is None: return "That poll doesn't exist or has been deleted!" poll.active = 1 return "Poll %d reopened!" % pid
[ "def", "reopen", "(", "args", ")", ":", "if", "not", "args", ".", "isadmin", ":", "return", "\"Nope, not gonna do it.\"", "msg", "=", "args", ".", "msg", ".", "split", "(", ")", "if", "not", "msg", ":", "return", "\"Syntax: !poll reopen <pollnum>\"", "if", ...
31.533333
12.733333
def _handle_resps(self, root): """Returns `root` with a resp list added to the TEI header and @resp values changed to references.""" resps, bearers = self.get_resps(root) if not resps: return root file_desc = root.xpath( '/tei:teiCorpus/tei:teiHeader/tei:fileDesc', namespaces=constants.NAMESPACES)[0] edition_stmt = etree.Element(TEI + 'editionStmt') file_desc.insert(1, edition_stmt) for index, (resp_resp, resp_name) in enumerate(resps): resp_stmt = etree.SubElement(edition_stmt, TEI + 'respStmt') xml_id = 'resp{}'.format(index+1) resp_stmt.set(constants.XML + 'id', xml_id) resp = etree.SubElement(resp_stmt, TEI + 'resp') resp.text = resp_resp name = etree.SubElement(resp_stmt, TEI + 'name') name.text = resp_name resp_data = '{{{}|{}}}'.format(resp_resp, resp_name) self._update_refs(root, bearers, 'resp', resp_data, xml_id) return root
[ "def", "_handle_resps", "(", "self", ",", "root", ")", ":", "resps", ",", "bearers", "=", "self", ".", "get_resps", "(", "root", ")", "if", "not", "resps", ":", "return", "root", "file_desc", "=", "root", ".", "xpath", "(", "'/tei:teiCorpus/tei:teiHeader/t...
47.318182
13.863636
def aes_encrypt(key: bytes, plain_text: bytes) -> bytes: """ AES-GCM encryption Parameters ---------- key: bytes AES session key, which derived from two secp256k1 keys plain_text: bytes Plain text to encrypt Returns ------- bytes nonce(16 bytes) + tag(16 bytes) + encrypted data """ aes_cipher = AES.new(key, AES_CIPHER_MODE) encrypted, tag = aes_cipher.encrypt_and_digest(plain_text) cipher_text = bytearray() cipher_text.extend(aes_cipher.nonce) cipher_text.extend(tag) cipher_text.extend(encrypted) return bytes(cipher_text)
[ "def", "aes_encrypt", "(", "key", ":", "bytes", ",", "plain_text", ":", "bytes", ")", "->", "bytes", ":", "aes_cipher", "=", "AES", ".", "new", "(", "key", ",", "AES_CIPHER_MODE", ")", "encrypted", ",", "tag", "=", "aes_cipher", ".", "encrypt_and_digest", ...
24.958333
19.125
def upload_job_chunk_list(self, upload_job_id, **kwargs): # noqa: E501 """List all metadata for uploaded chunks # noqa: E501 List all metadata for uploaded chunks # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.upload_job_chunk_list(upload_job_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str upload_job_id: Upload job (required) :param int limit: How many metadata items for uploaded chunks to retrieve :param str order: ASC or DESC :param str after: The ID of the the item after which to retrieve the next page :param str include: A comma-separated list of data fields to return. Currently supported: total_count :param str filter: URL-encoded query string parameter to filter returned data `?filter={URL-encoded query string}` ###### Filterable fields: The table lists all the fields that can be filtered on with certain filters: <table> <thead> <tr> <th>Field</th> <th>= / __eq / __neq</th> <th>__in / __nin</th> <th>__lte / __gte</th> <tr> <thead> <tbody> <tr> <td>created_at</td> <td>✓</td> <td>✓</td> <td>✓</td> </tr> <tr> <td>etag</td> <td>✓</td> <td>✓</td> <td>✓</td> </tr> <tr> <td>id</td> <td>✓</td> <td>✓</td> <td>&nbsp;</td> </tr> <tr> <td>updated_at</td> <td>✓</td> <td>✓</td> <td>✓</td> </tr> <tr> <td>status</td> <td>✓</td> <td>✓</td> </tr> <tr> <td>hash</td> <td>✓</td> <td>✓</td> </tr> <tr> <td>length</td> <td>✓</td> <td>✓</td> <td>✓</td> </tr> </tbody> </table> &nbsp; The query string is made up of key-value pairs separated by ampersands. For example, this query: `key1=value1&key2=value2&key3=value3` would be URL-encoded as: `?filter=key1__eq%3Dvalue1%26key2__eq%3Dvalue2%26key3__eq%3Dvalue3` **Filtering by properties** `status__eq=in_progress` **Filtering on date-time fields** Date-time fields should be specified in UTC RFC3339 format, `YYYY-MM-DDThh:mm:ss.msZ`. There are three permitted variations: * UTC RFC3339 with milliseconds. Example: `2016-11-30T16:25:12.1234Z` * UTC RFC3339 without milliseconds. Example: `2016-11-30T16:25:12Z` * UTC RFC3339 shortened without milliseconds and punctuation. Example: `20161130T162512Z` Date-time filtering supports three operators: * equality by appending `__eq` to the field name * greater than or equal to by appending `__gte` to the field name * less than or equal to by appending `__lte` to the field name `{field name}[|__eq|__lte|__gte]={UTC RFC3339 date-time}` Time ranges may be specified by including both the `__gte` and `__lte` forms in the filter. For example: `created_at__gte=2016-11-30T16:25:12.1234Z&created_at__lte=2016-12-30T00:00:00Z` **Filtering on multiple fields** `status__eq=in_progress&created_at__gte=2016-11-30T16:25:12.1234Z&created_at__lte=2016-12-30T00:00:00Z` **Filtering with filter operators** String field filtering supports the following operators: * equality: `__eq` * non-equality: `__neq` * in : `__in` * not in: `__nin` For `__in` and `__nin` filters list of parameters must be comma-separated: `status__in=in_progress,success` :return: UploadChunkInfoPage If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous'): return self.upload_job_chunk_list_with_http_info(upload_job_id, **kwargs) # noqa: E501 else: (data) = self.upload_job_chunk_list_with_http_info(upload_job_id, **kwargs) # noqa: E501 return data
[ "def", "upload_job_chunk_list", "(", "self", ",", "upload_job_id", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'asynchronous'", ")", ":", "return", "self", ...
150.692308
122.346154
def _realign(seq, precursor, start): """ The actual fn that will realign the sequence """ error = set() pattern_addition = [[1, 1, 0], [1, 0, 1], [0, 1, 0], [0, 1, 1], [0, 0, 1], [1, 1, 1]] for pos in range(0, len(seq)): if seq[pos] != precursor[(start + pos)]: error.add(pos) subs, add = [], [] for e in error: if e < len(seq) - 3: subs.append([e, seq[e], precursor[start + e]]) pattern, error_add = [], [] for e in range(len(seq) - 3, len(seq)): if e in error: pattern.append(1) error_add.append(e) else: pattern.append(0) for p in pattern_addition: if pattern == p: add = seq[error_add[0]:] break if not add and error_add: for e in error_add: subs.append([e, seq[e], precursor[start + e]]) return subs, add
[ "def", "_realign", "(", "seq", ",", "precursor", ",", "start", ")", ":", "error", "=", "set", "(", ")", "pattern_addition", "=", "[", "[", "1", ",", "1", ",", "0", "]", ",", "[", "1", ",", "0", ",", "1", "]", ",", "[", "0", ",", "1", ",", ...
28.322581
16.258065
def _retrieve_station_history(self, station_ID, limit, interval): """ Helper method for station_X_history functions. """ params = {'id': station_ID, 'type': interval, 'lang': self._language} if limit is not None: params['cnt'] = limit uri = http_client.HttpClient.to_url(STATION_WEATHER_HISTORY_URL, self._API_key, self._subscription_type, self._use_ssl) _, json_data = self._wapi.cacheable_get_json(uri, params=params) station_history = \ self._parsers['station_history'].parse_JSON(json_data) if station_history is not None: station_history.set_station_ID(station_ID) station_history.set_interval(interval) return station_history
[ "def", "_retrieve_station_history", "(", "self", ",", "station_ID", ",", "limit", ",", "interval", ")", ":", "params", "=", "{", "'id'", ":", "station_ID", ",", "'type'", ":", "interval", ",", "'lang'", ":", "self", ".", "_language", "}", "if", "limit", ...
48.555556
16.444444
def _decode_data(self, data): """`data` is array.array""" self.major = data[0] if data[1] is 0xff: self.minor = data[1] elif data[1] <= 0x99: self.minor = int(data[1:2].tostring().decode('bcd+')) else: raise DecodingError()
[ "def", "_decode_data", "(", "self", ",", "data", ")", ":", "self", ".", "major", "=", "data", "[", "0", "]", "if", "data", "[", "1", "]", "is", "0xff", ":", "self", ".", "minor", "=", "data", "[", "1", "]", "elif", "data", "[", "1", "]", "<="...
29.1
15.4
def clean_locks(root=None): ''' Remove unused locks that do not currently (with regard to repositories used) lock any package. root Operate on a different root directory. CLI Example: .. code-block:: bash salt '*' pkg.clean_locks ''' LCK = "removed" out = {LCK: 0} locks = os.path.join(root, os.path.relpath(LOCKS, os.path.sep)) if root else LOCKS if not os.path.exists(locks): return out for node in __zypper__(root=root).xml.call('cl').getElementsByTagName("message"): text = node.childNodes[0].nodeValue.lower() if text.startswith(LCK): out[LCK] = text.split(" ")[1] break return out
[ "def", "clean_locks", "(", "root", "=", "None", ")", ":", "LCK", "=", "\"removed\"", "out", "=", "{", "LCK", ":", "0", "}", "locks", "=", "os", ".", "path", ".", "join", "(", "root", ",", "os", ".", "path", ".", "relpath", "(", "LOCKS", ",", "o...
25.259259
25.333333
def trace_filter(self, from_block: int = 1, to_block: Optional[int] = None, from_address: Optional[List[str]] = None, to_address: Optional[List[str]] = None, after: Optional[int] = None, count: Optional[int] = None) -> List[Dict[str, any]]: """ :param from_block: Quantity or Tag - (optional) From this block. `0` is not working, it needs to be `>= 1` :param to_block: Quantity or Tag - (optional) To this block. :param from_address: Array - (optional) Sent from these addresses. :param to_address: Address - (optional) Sent to these addresses. :param after: Quantity - (optional) The offset trace number :param count: Quantity - (optional) Integer number of traces to display in a batch. :return: [ { "action": { "callType": "call", "from": "0x32be343b94f860124dc4fee278fdcbd38c102d88", "gas": "0x4c40d", "input": "0x", "to": "0x8bbb73bcb5d553b5a556358d27625323fd781d37", "value": "0x3f0650ec47fd240000" }, "blockHash": "0x86df301bcdd8248d982dbf039f09faf792684e1aeee99d5b58b77d620008b80f", "blockNumber": 3068183, "result": { "gasUsed": "0x0", "output": "0x" }, "subtraces": 0, "traceAddress": [], "transactionHash": "0x3321a7708b1083130bd78da0d62ead9f6683033231617c9d268e2c7e3fa6c104", "transactionPosition": 3, "type": "call" }, { "action": { "from": "0x3b169a0fb55ea0b6bafe54c272b1fe4983742bf7", "gas": "0x49b0b", "init": "0x608060405234801561001057600080fd5b5060405161060a38038061060a833981018060405281019080805190602001909291908051820192919060200180519060200190929190805190602001909291908051906020019092919050505084848160008173ffffffffffffffffffffffffffffffffffffffff1614151515610116576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260248152602001807f496e76616c6964206d617374657220636f707920616464726573732070726f7681526020017f696465640000000000000000000000000000000000000000000000000000000081525060400191505060405180910390fd5b806000806101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550506000815111156101a35773ffffffffffffffffffffffffffffffffffffffff60005416600080835160208501846127105a03f46040513d6000823e600082141561019f573d81fd5b5050505b5050600081111561036d57600073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1614156102b7578273ffffffffffffffffffffffffffffffffffffffff166108fc829081150290604051600060405180830381858888f1935050505015156102b2576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001807f436f756c64206e6f74207061792073616665206372656174696f6e207769746881526020017f206574686572000000000000000000000000000000000000000000000000000081525060400191505060405180910390fd5b61036c565b6102d1828483610377640100000000026401000000009004565b151561036b576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001807f436f756c64206e6f74207061792073616665206372656174696f6e207769746881526020017f20746f6b656e000000000000000000000000000000000000000000000000000081525060400191505060405180910390fd5b5b5b5050505050610490565b600060608383604051602401808373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001828152602001925050506040516020818303038152906040527fa9059cbb000000000000000000000000000000000000000000000000000000007bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff838183161783525050505090506000808251602084016000896127105a03f16040513d6000823e3d60008114610473576020811461047b5760009450610485565b829450610485565b8151158315171594505b505050509392505050565b61016b8061049f6000396000f30060806040526004361061004c576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff1680634555d5c91461008b5780635c60da1b146100b6575b73ffffffffffffffffffffffffffffffffffffffff600054163660008037600080366000845af43d6000803e6000811415610086573d6000fd5b3d6000f35b34801561009757600080fd5b506100a061010d565b6040518082815260200191505060405180910390f35b3480156100c257600080fd5b506100cb610116565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b60006002905090565b60008060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff169050905600a165627a7a7230582007fffd557dfc8c4d2fdf56ba6381a6ce5b65b6260e1492d87f26c6d4f1d0410800290000000000000000000000008942595a2dc5181df0465af0d7be08c8f23c93af00000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000d9e09beaeb338d81a7c5688358df0071d498811500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001b15f91a8c35300000000000000000000000000000000000000000000000000000000000001640ec78d9e00000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000000000000000004000000000000000000000000f763ea5fbb191d47dc4b083dcdc3cdfb586468f8000000000000000000000000ad25c9717d04c0a12086a1d352c1ccf4bf5fcbf80000000000000000000000000da7155692446c80a4e7ad72018e586f20fa3bfe000000000000000000000000bce0cc48ce44e0ac9ee38df4d586afbacef191fa0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", "value": "0x0" }, "blockHash": "0x03f9f64dfeb7807b5df608e6957dd4d521fd71685aac5533451d27f0abe03660", "blockNumber": 3793534, "result": { "address": "0x61a7cc907c47c133d5ff5b685407201951fcbd08", "code": "0x60806040526004361061004c576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff1680634555d5c91461008b5780635c60da1b146100b6575b73ffffffffffffffffffffffffffffffffffffffff600054163660008037600080366000845af43d6000803e6000811415610086573d6000fd5b3d6000f35b34801561009757600080fd5b506100a061010d565b6040518082815260200191505060405180910390f35b3480156100c257600080fd5b506100cb610116565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b60006002905090565b60008060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff169050905600a165627a7a7230582007fffd557dfc8c4d2fdf56ba6381a6ce5b65b6260e1492d87f26c6d4f1d041080029", "gasUsed": "0x4683f" }, "subtraces": 2, "traceAddress": [], "transactionHash": "0x6c7e8f8778d33d81b29c4bd7526ee50a4cea340d69eed6c89ada4e6fab731789", "transactionPosition": 1, "type": "create" }, ... ] """ assert from_address or to_address, 'You must provide at least `from_address` or `to_address`' parameters = {} if from_block: parameters['fromBlock'] = '0x%x' % from_block if to_block: parameters['toBlock'] = '0x%x' % to_block if from_address: parameters['fromAddress'] = from_address if to_address: parameters['toAddress'] = to_address if after: parameters['after'] = after if count: parameters['count'] = count try: return self._decode_traces(self.slow_w3.parity.traceFilter(parameters)) except ParityTraceDecodeException as exc: logger.warning('Problem decoding trace: %s - Retrying', exc) return self._decode_traces(self.slow_w3.parity.traceFilter(parameters))
[ "def", "trace_filter", "(", "self", ",", "from_block", ":", "int", "=", "1", ",", "to_block", ":", "Optional", "[", "int", "]", "=", "None", ",", "from_address", ":", "Optional", "[", "List", "[", "str", "]", "]", "=", "None", ",", "to_address", ":",...
107.763158
87.789474
def start(self): """Start this Tracer. Return a Python function suitable for use with sys.settrace(). """ self.thread = threading.currentThread() sys.settrace(self._trace) return self._trace
[ "def", "start", "(", "self", ")", ":", "self", ".", "thread", "=", "threading", ".", "currentThread", "(", ")", "sys", ".", "settrace", "(", "self", ".", "_trace", ")", "return", "self", ".", "_trace" ]
25.777778
18
def _select_position(self, width, height): """ Search for the placement with the bes fitness for the rectangle. Returns: tuple (Rectangle, fitness) - Rectangle placed in the fittest position None - Rectangle couldn't be placed """ positions = self._generate_placements(width, height) if self.rot and width != height: positions += self._generate_placements(height, width) if not positions: return None, None return min(((p[0], self._rect_fitness(*p))for p in positions), key=operator.itemgetter(1))
[ "def", "_select_position", "(", "self", ",", "width", ",", "height", ")", ":", "positions", "=", "self", ".", "_generate_placements", "(", "width", ",", "height", ")", "if", "self", ".", "rot", "and", "width", "!=", "height", ":", "positions", "+=", "sel...
40.866667
16.733333
def print_recs(listofrec, format=1, tags=None): """ Print a list of records. :param format: 1 XML, 2 HTML (not implemented) :param tags: list of tags to be printed if 'listofrec' is not a list it returns empty string """ if tags is None: tags = [] text = "" if type(listofrec).__name__ != 'list': return "" else: for rec in listofrec: text = "%s\n%s" % (text, print_rec(rec, format, tags)) return text
[ "def", "print_recs", "(", "listofrec", ",", "format", "=", "1", ",", "tags", "=", "None", ")", ":", "if", "tags", "is", "None", ":", "tags", "=", "[", "]", "text", "=", "\"\"", "if", "type", "(", "listofrec", ")", ".", "__name__", "!=", "'list'", ...
26.277778
17.944444
def scale_channel(self, ch, value): '''scale a channel to 1000/1500/2000''' v = value/10000.0 if v < -1: v = -1 elif v > 1: v = 1 if ch == 3 and self.mpstate.vehicle_type != 'rover': if v < 0: v = 0 return int(1000 + v*1000) return int(1500 + v*500)
[ "def", "scale_channel", "(", "self", ",", "ch", ",", "value", ")", ":", "v", "=", "value", "/", "10000.0", "if", "v", "<", "-", "1", ":", "v", "=", "-", "1", "elif", "v", ">", "1", ":", "v", "=", "1", "if", "ch", "==", "3", "and", "self", ...
29.166667
15.333333
def refresh(cls, path=None): """This gets called by the refresh function (see the top level __init__). """ # discern which path to refresh with if path is not None: new_git = os.path.expanduser(path) new_git = os.path.abspath(new_git) else: new_git = os.environ.get(cls._git_exec_env_var, cls.git_exec_name) # keep track of the old and new git executable path old_git = cls.GIT_PYTHON_GIT_EXECUTABLE cls.GIT_PYTHON_GIT_EXECUTABLE = new_git # test if the new git executable path is valid if sys.version_info < (3,): # - a GitCommandNotFound error is spawned by ourselves # - a OSError is spawned if the git executable provided # cannot be executed for whatever reason exceptions = (GitCommandNotFound, OSError) else: # - a GitCommandNotFound error is spawned by ourselves # - a PermissionError is spawned if the git executable provided # cannot be executed for whatever reason exceptions = (GitCommandNotFound, PermissionError) has_git = False try: cls().version() has_git = True except exceptions: pass # warn or raise exception if test failed if not has_git: err = dedent("""\ Bad git executable. The git executable must be specified in one of the following ways: - be included in your $PATH - be set via $%s - explicitly set via git.refresh() """) % cls._git_exec_env_var # revert to whatever the old_git was cls.GIT_PYTHON_GIT_EXECUTABLE = old_git if old_git is None: # on the first refresh (when GIT_PYTHON_GIT_EXECUTABLE is # None) we only are quiet, warn, or error depending on the # GIT_PYTHON_REFRESH value # determine what the user wants to happen during the initial # refresh we expect GIT_PYTHON_REFRESH to either be unset or # be one of the following values: # 0|q|quiet|s|silence # 1|w|warn|warning # 2|r|raise|e|error mode = os.environ.get(cls._refresh_env_var, "raise").lower() quiet = ["quiet", "q", "silence", "s", "none", "n", "0"] warn = ["warn", "w", "warning", "1"] error = ["error", "e", "raise", "r", "2"] if mode in quiet: pass elif mode in warn or mode in error: err = dedent("""\ %s All git commands will error until this is rectified. This initial warning can be silenced or aggravated in the future by setting the $%s environment variable. Use one of the following values: - %s: for no warning or exception - %s: for a printed warning - %s: for a raised exception Example: export %s=%s """) % ( err, cls._refresh_env_var, "|".join(quiet), "|".join(warn), "|".join(error), cls._refresh_env_var, quiet[0]) if mode in warn: print("WARNING: %s" % err) else: raise ImportError(err) else: err = dedent("""\ %s environment variable has been set but it has been set with an invalid value. Use only the following values: - %s: for no warning or exception - %s: for a printed warning - %s: for a raised exception """) % ( cls._refresh_env_var, "|".join(quiet), "|".join(warn), "|".join(error)) raise ImportError(err) # we get here if this was the init refresh and the refresh mode # was not error, go ahead and set the GIT_PYTHON_GIT_EXECUTABLE # such that we discern the difference between a first import # and a second import cls.GIT_PYTHON_GIT_EXECUTABLE = cls.git_exec_name else: # after the first refresh (when GIT_PYTHON_GIT_EXECUTABLE # is no longer None) we raise an exception raise GitCommandNotFound("git", err) return has_git
[ "def", "refresh", "(", "cls", ",", "path", "=", "None", ")", ":", "# discern which path to refresh with", "if", "path", "is", "not", "None", ":", "new_git", "=", "os", ".", "path", ".", "expanduser", "(", "path", ")", "new_git", "=", "os", ".", "path", ...
40.583333
18.916667
def handle_request(self): """ Handle one request - serve current process to one connection. Use close_request() to disconnect this process. """ try: request, client_address = self.get_request() except socket.error: return if self.verify_request(request, client_address): try: # we only serve once, and we want to free up the port # for future serves. self.socket.close() self.process_request(request, client_address) except SocketConnected as err: self._serve_process(err.slaveFd, err.serverPid) return except Exception as err: self.handle_error(request, client_address) self.close_request()
[ "def", "handle_request", "(", "self", ")", ":", "try", ":", "request", ",", "client_address", "=", "self", ".", "get_request", "(", ")", "except", "socket", ".", "error", ":", "return", "if", "self", ".", "verify_request", "(", "request", ",", "client_addr...
37.409091
16.045455
def remember(self, user_name): ''' Remember the authenticated identity. This method simply delegates to another IIdentifier plugin if configured. ''' log.debug('Repoze OAuth remember') environ = toolkit.request.environ rememberer = self._get_rememberer(environ) identity = {'repoze.who.userid': user_name} headers = rememberer.remember(environ, identity) for header, value in headers: toolkit.response.headers.add(header, value)
[ "def", "remember", "(", "self", ",", "user_name", ")", ":", "log", ".", "debug", "(", "'Repoze OAuth remember'", ")", "environ", "=", "toolkit", ".", "request", ".", "environ", "rememberer", "=", "self", ".", "_get_rememberer", "(", "environ", ")", "identity...
39.153846
16.230769