code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def reservations(self): command = [SINFO, '--reservation'] output = subprocess.check_output(command, env=SINFO_ENV) output = output.decode() it = iter(output.splitlines()) next(it) for line in it: rsv = Reservation.from_sinfo(line) yield rsv.name, rsv
get nodes of every reservations
def handle_input(self, event): self.update_timeval() self.events = [] code = self._get_event_type(event) self.handle_button(event, code) if code == 22: self.handle_scrollwheel(event) else: self.handle_relative(event) self.handle_absolute(event) self.events.append(self.sync_marker(self.timeval)) self.write_to_pipe(self.events)
Process the mouse event.
def frequencies_plot(self, xmin=0, xmax=200): helptext = pconfig = { 'id': 'Jellyfish_kmer_plot', 'title': 'Jellyfish: K-mer plot', 'ylab': 'Counts', 'xlab': 'k-mer frequency', 'xDecimals': False, 'xmin': xmin, 'xmax': xmax } self.add_section( anchor = 'jellyfish_kmer_plot', description = 'The K-mer plot lets you estimate library complexity and coverage from k-mer content.', helptext = helptext, plot = linegraph.plot(self.jellyfish_data, pconfig) )
Generate the qualities plot
def with_exit_condition(self, exit_condition: Optional[bool]=True) -> 'MonitorTask': self._exit_condition = exit_condition return self
Sets the flag indicating that the task should also run after the optimisation is ended.
def package_version(): version_path = os.path.join(os.path.dirname(__file__), 'version.py') version = read_version(version_path) write_version(version_path, version) return version
Get the package version via Git Tag.
def _check_array_parms(is_array, array_size, value, element_kind, element_name): if value is not None: value_is_array = isinstance(value, (list, tuple)) if not is_array and value_is_array: raise ValueError( _format("The is_array parameter of {0} {1!A} is False but " "value {2!A} is an array.", element_kind, element_name, value)) if is_array and not value_is_array: raise ValueError( _format("The is_array parameter of {0} {1!A} is True but " "value {2!A} is not an array.", element_kind, element_name, value))
Check whether array-related parameters are ok.
def scan_timestamp(self, tbuf): while True: (tusec,) = struct.unpack('>Q', tbuf) t = tusec * 1.0e-6 if abs(t - self._last_timestamp) <= 3*24*60*60: break c = self.f.read(1) if len(c) != 1: break tbuf = tbuf[1:] + c return t
scan forward looking in a tlog for a timestamp in a reasonable range
def ci_macos(): run_command("brew install $PYTHON pipenv || echo \"Installed PipEnv\"") command_string = "sudo -H $PIP install " for element in DEPENDENCIES + REQUIREMENTS + ["-U"]: command_string += element + " " run_command(command_string) run_command("sudo -H $PYTHON setup.py bdist_wheel") assert check_wheel_existence() exit(0)
Setup Travis-CI macOS for wheel building
async def load_all_aldb(self, clear=True): for addr in self.plm.devices: await self.load_device_aldb(addr, clear)
Read all devices ALDB.
def seconds2str(seconds): if seconds < 0: return "{0:.3g}s".format(seconds) elif math.isnan(seconds): return "NaN" elif math.isinf(seconds): return "Inf" m, s = divmod(seconds, 60) h, m = divmod(m, 60) if h >= 1: return "{0:g}h {1:02g}m {2:.3g}s".format(h, m, s) elif m >= 1: return "{0:02g}m {1:.3g}s".format(m, s) else: return "{0:.3g}s".format(s)
Returns string such as 1h 05m 55s.
def packets_to_flows(self): for packet in self.input_stream: flow_id = flow_utils.flow_tuple(packet) self._flows[flow_id].add_packet(packet) for flow in list(self._flows.values()): if flow.ready(): flow_info = flow.get_flow() yield flow_info del self._flows[flow_info['flow_id']] print('---- NO MORE INPUT ----') for flow in sorted(self._flows.values(), key=lambda x: x.meta['start']): yield flow.get_flow()
Combine packets into flows
def _reset_server_state(self) -> None: self.last_helo_response = None self._last_ehlo_response = None self.esmtp_extensions = {} self.supports_esmtp = False self.server_auth_methods = []
Clear stored information about the server.
def opener(self): "A reusable connection manager" if self._opener is None: log.debug("Creating connection handler") opener = build_opener() if self._cookies: log.debug("Appending cookies") else: log.debug("No cookies to append") for cookie in self._cookies: cookie_str = cookie.name + '=' + cookie.value opener.addheaders.append(('Cookie', cookie_str)) self._opener = opener else: log.debug("Reusing connection manager") return self._opener
A reusable connection manager
def text(self): texts = [] for child in self.childs: if isinstance(child, Tag): texts.append(child.text()) elif isinstance(child, Content): texts.append(child.render()) else: texts.append(child) return " ".join(texts)
Renders the contents inside this element, without html tags.
def start(self): self.update_device_info() self.get_device_status(0) self.hook() self.thread = threading.Thread(target=self._run) self.thread.start() self.running = True
start running in background.
def __Calc_HSL_to_RGB_Components(var_q, var_p, C): if C < 0: C += 1.0 if C > 1: C -= 1.0 if C < (1.0 / 6.0): return var_p + ((var_q - var_p) * 6.0 * C) elif (1.0 / 6.0) <= C < 0.5: return var_q elif 0.5 <= C < (2.0 / 3.0): return var_p + ((var_q - var_p) * 6.0 * ((2.0 / 3.0) - C)) else: return var_p
This is used in HSL_to_RGB conversions on R, G, and B.
def add_task(self, func, *args, **kargs): self.tasks.put((func, args, kargs))
Add a task to the queue.
def read_line(self, line): if self.ignore: return for i, char in enumerate(line): if char not in ['"', "'"]: continue if line[i - 1:i] == '\\': continue if self.single == char: self.single = None continue if self.single is not None: continue if not self.python: continue if self.triple == char: if line[i - 2:i + 1] == 3 * char: self.triple = None continue if self.triple is not None: continue if line[i - 2:i + 1] == 3 * char: self.triple = char continue self.single = char if self.python: self.single = None
Read a new line
def dump(self, data, stream=None): yaml_text = yaml.dump( data, default_flow_style=False, allow_unicode=True) return self.dump_raw(yaml_text, stream=stream)
Encrypt data and print stdout or write to stream.
def _query_gen(self): return urlencode(self.__query__, safe=self.safe, querydelimiter=self.__querydelimiter__)
Generates The String for queries
def __get_cfg_pkgs_rpm(self): out, err = self._syscall('rpm', None, None, '-qa', '--configfiles', '--queryformat', '%{name}-%{version}-%{release}\\n') data = dict() pkg_name = None pkg_configs = [] out = salt.utils.stringutils.to_str(out) for line in out.split(os.linesep): line = line.strip() if not line: continue if not line.startswith("/"): if pkg_name and pkg_configs: data[pkg_name] = pkg_configs pkg_name = line pkg_configs = [] else: pkg_configs.append(line) if pkg_name and pkg_configs: data[pkg_name] = pkg_configs return data
Get packages with configuration files on RPM systems.
def latexpdf(): rc = latex() print('Running LaTeX files through pdflatex...') builddir = os.path.join(BUILDDIR, 'latex') subprocess.call(['make', '-C', builddir, 'all-pdf']) print('pdflatex finished; the PDF files are in {}.'.format(builddir))
make LaTeX files and run them through pdflatex
def all_files_in_directory(path): file_list = [] for dirname, dirnames, filenames in os.walk(path): for filename in filenames: file_list.append(os.path.join(dirname, filename)) return file_list
Recursively ist all files under a directory
def rename_abiext(self, inext, outext): infile = self.has_abiext(inext) if not infile: raise RuntimeError('no file with extension %s in %s' % (inext, self)) for i in range(len(infile) - 1, -1, -1): if infile[i] == '_': break else: raise RuntimeError('Extension %s could not be detected in file %s' % (inext, infile)) outfile = infile[:i] + '_' + outext shutil.move(infile, outfile) return 0
Rename the Abinit file with extension inext with the new extension outext
def complain(error): if callable(error): if DEVELOP: raise error() elif DEVELOP: raise error else: logger.warn_err(error)
Raises in develop; warns in release.
def max_tot_value(self, value): self._max_tot_value = value self.interpreter.set_max_tot(self._max_tot_value) self.histogram.set_max_tot(self._max_tot_value) self.clusterizer.set_max_hit_charge(self._max_tot_value)
Set maximum ToT value that is considered to be a hit
def LE16(value, min_value=None, max_value=None, fuzzable=True, name=None, full_range=False): return UInt16(value, min_value=min_value, max_value=max_value, encoder=ENC_INT_LE, fuzzable=fuzzable, name=name, full_range=full_range)
16-bit field, Little endian encoded
def toPairTreePath(name): sName = sanitizeString(name) chunks = [] for x in range(0, len(sName)): if x % 2: continue if (len(sName) - 1) == x: chunk = sName[x] else: chunk = sName[x: x + 2] chunks.append(chunk) return os.sep.join(chunks) + os.sep
Cleans a string, and then splits it into a pairtree path.
def calcperc(b, perc=(0.1,99.9)): b = checkma(b) if b.count() > 0: low = np.percentile(b.compressed(), perc[0]) high = np.percentile(b.compressed(), perc[1]) else: low = 0 high = 0 return low, high
Calculate values at specified percentiles
def restore_directory_state(self, fname): root = osp.normpath(to_text_string(fname)) if not osp.exists(root): return for basename in os.listdir(root): path = osp.normpath(osp.join(root, basename)) if osp.isdir(path) and path in self.__expanded_state: self.__expanded_state.pop(self.__expanded_state.index(path)) if self._to_be_loaded is None: self._to_be_loaded = [] self._to_be_loaded.append(path) self.setExpanded(self.get_index(path), True) if not self.__expanded_state: self.fsmodel.directoryLoaded.disconnect(self.restore_directory_state)
Restore directory expanded state
def ensure_sink(self): topic_info = self.pubsub.ensure_topic() scope, sink_path, sink_info = self.get_sink(topic_info) client = self.session.client('logging', 'v2', '%s.sinks' % scope) try: sink = client.execute_command('get', {'sinkName': sink_path}) except HttpError as e: if e.resp.status != 404: raise sink = client.execute_command('create', sink_info) else: delta = delta_resource(sink, sink_info['body']) if delta: sink_info['updateMask'] = ','.join(delta) sink_info['sinkName'] = sink_path sink_info.pop('parent') sink = client.execute_command('update', sink_info) else: return sink_path self.pubsub.ensure_iam(publisher=sink['writerIdentity']) return sink_path
Ensure the log sink and its pub sub topic exist.
def handle_initialize(self, data): self.tuner.update_search_space(data) send(CommandType.Initialized, '') return True
data is search space
def rows(self): tvm = self.section.doc.decl_terms.get(self.qualified_term, {}).get('termvaluename', '@value') assert tvm properties = {tvm: self.value} for c in self.children: if c.is_terminal: if c.record_term_lc: properties[c.record_term_lc] = c.value yield (self.qualified_term, properties) for c in self.children: if not c.is_terminal: for row in c.rows: yield row
Yield rows for the term, for writing terms to a CSV file.
def from_dict(cls, data): data = dict(data) cause = data.get('cause') if cause is not None: data['cause'] = cls.from_dict(cause) return cls(**data)
Converts this from a dictionary to a object.
def _command_callback(self, message): payload = message['payload'] if message['$'] == 'before': self._commands.append({'level': self._depth, 'payload': payload, 'logs': []}) self._depth += 1 else: while not self._queue.empty(): self._commands[-1]['logs'].append(self._queue.get()) self._depth = max(self._depth-1, 0)
The callback subscribed to the broker
def submitted_projects(raw_df): df = raw_df.astype({'PRONAC': str, 'CgcCpf': str}) submitted_projects = df.groupby('CgcCpf')[ 'PRONAC' ].agg(['unique', 'nunique']) submitted_projects.columns = ['pronac_list', 'num_pronacs'] return submitted_projects
Return all submitted projects.
def write_file(self, html, outfile): try: with open(outfile, 'wt') as file: file.write(html) except (IOError, OSError) as e: err_exit('Error writing %s: %s' % (outfile, e.strerror or e))
Write an HTML string to a file.
def checkversion(version, REFVERSION=FOLIAVERSION): try: for refversion, docversion in zip([int(x) for x in REFVERSION.split('.')], [int(x) for x in version.split('.')]): if docversion > refversion: return 1 elif docversion < refversion: return -1 return 0 except ValueError: raise ValueError("Unable to parse document FoLiA version, invalid syntax")
Checks FoLiA version, returns 1 if the document is newer than the library, -1 if it is older, 0 if it is equal
def topics_count(self): return self.obj.direct_topics_count + sum(n.topics_count for n in self.children)
Returns the number of topics associated with the current node and its descendants.
def new_lineup(self, name, location, device, _type, postalCode, _id): if self.__v_lineup: print("[Lineup: %s, %s, %s, %s, %s, %s]" % (name, location, device, _type, postalCode, _id))
Callback run for each new lineup
def patch(self, delta): "Applies remote delta to local file." with (tempfile.NamedTemporaryFile(prefix='.sync', suffix=os.path.basename(self.path), dir=os.path.dirname(self.path), delete=False)) as output: try: with open(self.path, 'rb') as reference: r = librsync.patch(reference, delta, output) os.rename(output.name, self.path) return r finally: try: os.remove(output.name) except OSError as e: if e.errno != errno.ENOENT: raise
Applies remote delta to local file.
def _get_base_state(self): base_app_inst = self.stateless_app.as_dash_app().as_dash_instance() base_resp = base_app_inst.locate_endpoint_function('dash-layout')() base_obj = json.loads(base_resp.data.decode('utf-8')) obj = {} base_app_inst.walk_tree_and_extract(base_obj, obj) return obj
Get the base state of the object, as defined by the app.layout code, as a python dict
def fetch_check(self, master): if self.param_period.trigger(): if master is None: return if len(self.mav_param_set) == 0: master.param_fetch_all() elif self.mav_param_count != 0 and len(self.mav_param_set) != self.mav_param_count: if master.time_since('PARAM_VALUE') >= 1: diff = set(range(self.mav_param_count)).difference(self.mav_param_set) count = 0 while len(diff) > 0 and count < 10: idx = diff.pop() master.param_fetch_one(idx) count += 1
check for missing parameters periodically
def prepareDiff(self): self.streamForDiff = NativeStringIO() self.linter.reporter.set_output(self.streamForDiff)
Prepare to run the checker and get diff results.
def make_argument_subquery(arg): return Subquery.create(arg) if isinstance(arg, (GroupBy, Projection)) or arg.restriction else arg
Decide when a Join argument needs to be wrapped in a subquery
def mouseMove(self, PSRML=None, dy=0): if PSRML is None: PSRML = self._lastMatch or self if isinstance(PSRML, Pattern): move_location = self.find(PSRML).getTarget() elif isinstance(PSRML, basestring): move_location = self.find(PSRML).getTarget() elif isinstance(PSRML, Match): move_location = PSRML.getTarget() elif isinstance(PSRML, Region): move_location = PSRML.getCenter() elif isinstance(PSRML, Location): move_location = PSRML elif isinstance(PSRML, int): offset = Location(PSRML, dy) move_location = Mouse.getPos().offset(offset) else: raise TypeError("doubleClick expected Pattern, String, Match, Region, or Location object") Mouse.moveSpeed(move_location)
Low-level mouse actions
def _createLink(self, linkResult, replaceParamFile): link = None if linkResult['type'] == 'XSEC': link = self._createCrossSection(linkResult, replaceParamFile) elif linkResult['type'] == 'STRUCTURE': link = self._createStructure(linkResult, replaceParamFile) elif linkResult['type'] in ('RESERVOIR', 'LAKE'): link = self._createReservoir(linkResult, replaceParamFile) return link
Create GSSHAPY Link Object Method
def list(self): return [self._pos3d.x, self._pos3d.y, self._pos3d.z]
position in 3d space
def _get_variant_silent(parser, variant): prev_log = config.LOG_NOT_FOUND config.LOG_NOT_FOUND = False results = parser.get_variant_genotypes(variant) config.LOG_NOT_FOUND = prev_log return results
Gets a variant from the parser while disabling logging.
def read_txt_file(fname): encodings = ['utf-8-sig', 'cp1252'] with open(fname, 'rb') as f: content = bytes(f.read()) for i in encodings: try: result = content.decode(i) if six.PY2: result = result.encode('utf-8') return result except UnicodeDecodeError: pass global_.FILENAME = fname errmsg.syntax_error(1, 'Invalid file encoding. Use one of: %s' % ', '.join(encodings)) return ''
Reads a txt file, regardless of its encoding
def setup_function(self): log.options.LogOptions.set_stderr_log_level('google:INFO') if app.get_options().debug: log.options.LogOptions.set_stderr_log_level('google:DEBUG') if not app.get_options().build_root: app.set_option('build_root', os.path.join( app.get_options().butcher_basedir, 'build')) self.buildroot = app.get_options().build_root if not os.path.exists(self.buildroot): os.makedirs(self.buildroot) if app.get_options().disable_cache_fetch: self.options['cache_fetch'] = False if app.get_options().disable_hardlinks: base.BaseBuilder.linkfiles = False
Runs prior to the global main function.
def create_module(self, spec): mod = super(NamespaceLoader2, self).create_module(spec) return mod
Improve python2 semantics for module creation.
def _handle_external_link(self, token): brackets, url = token.brackets, None self._push() while self._tokens: token = self._tokens.pop() if isinstance(token, tokens.ExternalLinkSeparator): url = self._pop() self._push() elif isinstance(token, tokens.ExternalLinkClose): if url is not None: return ExternalLink(url, self._pop(), brackets) return ExternalLink(self._pop(), brackets=brackets) else: self._write(self._handle_token(token)) raise ParserError("_handle_external_link() missed a close token")
Handle when an external link is at the head of the tokens.
def _as_reference_point(self) -> np.ndarray: ref_val = [] for fn, f in self._classification.items(): if f[0] == "<": ref_val.append(self._method.problem.ideal[fn]) elif f[0] == "<>": ref_val.append(self._method.problem.nadir[fn]) else: ref_val.append(f[1]) return np.array(ref_val)
Return classification information as reference point
def show_gateway_device(self, gateway_device_id, **_params): return self.get(self.gateway_device_path % gateway_device_id, params=_params)
Fetch a gateway device.
def _consume(self): self._stream_offset += self._buff_i - self._buf_checkpoint self._buf_checkpoint = self._buff_i
Gets rid of the used parts of the buffer.
def getComicData(self, comic): if comic not in self.data: if os.path.exists(self.jsonFn(comic)): with codecs.open(self.jsonFn(comic), 'r', self.encoding) as f: self.data[comic] = json.load(f) else: self.data[comic] = {'pages':{}} return self.data[comic]
Return dictionary with comic info.
def find(command, on): output_lines = parse_man_page(command, on) click.echo(''.join(output_lines))
Find the command usage.
def search(self): "redirect to bookmark search" form = forms.HomeForm() bbm_filter = bs_filters.BookmarkBukuFilter( all_keywords=False, deep=form.deep.data, regex=form.regex.data) op_text = bbm_filter.operation() values_combi = sorted(itertools.product([True, False], repeat=3)) for idx, (all_keywords, deep, regex) in enumerate(values_combi): if deep == form.deep.data and regex == form.regex.data and not all_keywords: choosen_idx = idx url_op_text = op_text.replace(', ', '_').replace(' ', ' ').replace(' ', '_') key = ''.join(['flt', str(choosen_idx), '_buku_', url_op_text]) kwargs = {key: form.keyword.data} url = url_for('bookmark.index_view', **kwargs) return redirect(url)
redirect to bookmark search
def sign_file(self, filepath): if not GPG_PRESENT: return signed_data = sign(filepath) signature = signed_data.data if signature is None or not signature: statustext = _('Error signing file. ') + signed_data.stderr try: post_command_event(self.main_window, self.StatusBarMsg, text=statustext) except TypeError: pass return with open(filepath + '.sig', 'wb') as signfile: signfile.write(signature) if self.code_array.safe_mode: statustext = _('File saved and signed') else: statustext = _('File signed') try: post_command_event(self.main_window, self.StatusBarMsg, text=statustext) except TypeError: pass
Signs file if possible
def _select_vs(v, p): if v >= 120.: return 60, 120, inf elif v >= 60.: return 40, 60, 120 elif v >= 40.: return 30, 40, 60 elif v >= 30.: return 24, 30, 40 elif v >= 24.: return 20, 24, 30 elif v >= 19.5: return 19, 20, 24 if p >= .9: if v < 2.5: return 1, 2, 3 else: if v < 3.5: return 2, 3, 4 vi = int(round(v)) return vi - 1, vi, vi + 1
returns the points to use for interpolating v
async def become(self, layer_type: Type[L], request: 'Request') -> L: raise ValueError('Cannot become "{}"'.format(layer_type.__name__))
Transform this layer into another layer type
def list_attachments(fullname): parent, filename = os.path.split(fullname) filename_without_ext, ext = os.path.splitext(filename) attachments = [] for found_filename in os.listdir(parent): found_filename_without_ext, _ = os.path.splitext(found_filename) if filename_without_ext == found_filename_without_ext and found_filename != filename: attachments.append(os.path.join(parent, found_filename)) return attachments
List attachment for the specified fullname.
def shutdown_all(self): self._lock.acquire() try: for context in list(self._key_by_context): self._shutdown_unlocked(context) finally: self._lock.release()
For testing use, arrange for all connections to be shut down.
def cd(dest): origin = os.getcwd() try: os.chdir(dest) yield dest finally: os.chdir(origin)
Temporarily cd into a directory
def check(self): if self.lastrun + self.interval < time.time(): return True else: return False
Returns True if `interval` seconds have passed since it last ran
def _send_merge_commands(self, config, file_config): if self.loaded is False: if self._save_backup() is False: raise MergeConfigException('Error while storing backup ' 'config.') if self.ssh_connection is False: self._open_ssh() if file_config: if isinstance(config, str): config = config.splitlines() else: if isinstance(config, str): config = str(config).split() self.ssh_device.send_config_set(config) self.loaded = True self.merge_config = True
Netmiko is being used to push set commands.
def uuid_object(title="Reference", description="Select an object", default=None, display=True): uuid = { 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{' '4}-[' 'a-fA-F0-9]{4}-[a-fA-F0-9]{12}$', 'type': 'string', 'title': title, 'description': description, } if not display: uuid['x-schema-form'] = { 'condition': "false" } if default is not None: uuid['default'] = default return uuid
Generates a regular expression controlled UUID field
def _DeleteClientActionRequest(self, to_delete, cursor=None): query = "DELETE FROM client_action_requests WHERE " conditions = [] args = [] for client_id, flow_id, request_id in to_delete: conditions.append("(client_id=%s AND flow_id=%s AND request_id=%s)") args.append(db_utils.ClientIDToInt(client_id)) args.append(db_utils.FlowIDToInt(flow_id)) args.append(request_id) query += " OR ".join(conditions) cursor.execute(query, args)
Builds deletes for client messages.
def redraw_current_line(self): if self.no_streams: return row = self.pads[self.current_pad].getyx()[0] s = self.filtered_streams[row] pad = self.pads['streams'] pad.move(row, 0) pad.clrtoeol() pad.addstr(row, 0, self.format_stream_line(s), curses.A_REVERSE) pad.chgat(curses.A_REVERSE) pad.move(row, 0) self.refresh_current_pad()
Redraw the highlighted line
def _get_lsun(directory, category, split_name): generator_utils.maybe_download(directory, _LSUN_DATA_FILENAME % (category, split_name), _LSUN_URL % (category, split_name))
Downloads all lsun files to directory unless they are there.
def item(self, infohash, prefetch=None, cache=False): return next(self.items(infohash, prefetch, cache))
Fetch a single item by its info hash.
def information_title_header_element(feature, parent): _ = feature, parent header = information_title_header['string_format'] return header.capitalize()
Retrieve information title header string from definitions.
def runSearchReadGroupSets(self, request): return self.runSearchRequest( request, protocol.SearchReadGroupSetsRequest, protocol.SearchReadGroupSetsResponse, self.readGroupSetsGenerator)
Runs the specified SearchReadGroupSetsRequest.
def as_list(callable): @wraps(callable) def wrapper(value_iter): return [callable(value) for value in value_iter] return wrapper
Convert a scalar validator in a list validator
def _fill_topology_cfg(self, topo_dict): cfg_dict = {} if topo_dict.bond_member_ports is not None: cfg_dict.update({'bond_member_ports': topo_dict.bond_member_ports}) if topo_dict.bond_interface is not None: cfg_dict.update({'bond_interface': topo_dict.bond_interface}) return cfg_dict
Fills the extra configurations in the topology.
def download_bundle_view(self, request, pk): return self._download_response(request, pk, bundle=True)
A view that allows the user to download a certificate bundle in PEM format.
def load_project_flag_list_file(self, project_exceptions, project): if self.loaded: return exception_file = None for item in project_exceptions: if project in item: exception_file = item.get(project) if exception_file is not None: try: with open(exception_file, 'r') as f: ex = yaml.safe_load(f) except IOError: logger.error('File not found: %s', exception_file) sys.exit(0) for key in ex: if key in fl: fl[key][project] = _merge(fl[key][project], ex.get(key, None)) \ if project in fl[key] else ex.get(key, None) self.loaded = True else: logger.info('%s not found in %s', project, ignore_list) logger.info('No project specific exceptions will be applied')
Loads project specific lists
def on_click(self, event): DesktopNotification( title=event.title, body="{} until {}!".format(event.time_remaining, event.title), icon='dialog-information', urgency=1, timeout=0, ).display()
Override this method to do more interesting things with the event.
def output_xml(data, code, headers=None): resp = make_response(dumps({'response' :data}), code) resp.headers.extend(headers or {}) return resp
Makes a Flask response with a XML encoded body
def visit_FunctionDef(self, node): if (len(node.body) == 1 and isinstance(node.body[0], (ast.Call, ast.Return))): ids = self.gather(Identifiers, node.body[0]) if node.name not in ids: self.result[node.name] = copy.deepcopy(node)
Determine this function definition can be inlined.
def createHeadingPointer(self): self.headingTri = patches.RegularPolygon((0.0,0.80),3,0.05,color='k',zorder=4) self.axes.add_patch(self.headingTri) self.headingText = self.axes.text(0.0,0.675,'0',color='k',size=self.fontSize,horizontalalignment='center',verticalalignment='center',zorder=4)
Creates the pointer for the current heading.
def read(self): "Read and interpret data from the daemon." status = gpscommon.read(self) if status <= 0: return status if self.response.startswith("{") and self.response.endswith("}\r\n"): self.unpack(self.response) self.__oldstyle_shim() self.newstyle = True self.valid |= PACKET_SET elif self.response.startswith("GPSD"): self.__oldstyle_unpack(self.response) self.valid |= PACKET_SET return 0
Read and interpret data from the daemon.
def runCommandReturnOutput(cmd): splits = shlex.split(cmd) proc = subprocess.Popen( splits, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = proc.communicate() if proc.returncode != 0: raise subprocess.CalledProcessError(stdout, stderr) return stdout, stderr
Runs a shell command and return the stdout and stderr
def autodoc_module(module): doc = getattr(module, '__doc__') if doc is None: doc = '' members = [] for name, member in inspect.getmembers(module): if ((not name.startswith('_')) and (inspect.getmodule(member) is module)): members.append((name, member)) members = sorted(members, key=_number_of_line) if members: lines = ['\n\nModule :mod:`~%s` implements the following members:\n' % module.__name__] for (name, member) in members: if inspect.isfunction(member): type_ = 'func' elif inspect.isclass(member): type_ = 'class' else: type_ = 'obj' lines.append(' * :%s:`~%s` %s' % (type_, name, objecttools.description(member))) doc = doc + '\n\n' + '\n'.join(lines) + '\n\n' + 80*'_' module.__doc__ = doc
Add a short summary of all implemented members to a modules docstring.
def _fix_uncontracted(basis): for el in basis['elements'].values(): if 'electron_shells' not in el: continue for sh in el['electron_shells']: if len(sh['coefficients']) == 1 and len(sh['coefficients'][0]) == 1: sh['coefficients'][0][0] = '1.0000000' if len(sh['coefficients']) == 0: sh['coefficients'].append(['1.0000000']) return basis
Forces the contraction coefficient of uncontracted shells to 1.0
def path_only_contains_dirs(self, path): pathlistdir = os.listdir(path) if pathlistdir == []: return True if any(os.path.isfile(os.path.join(path, i)) for i in pathlistdir): return False return all(self.path_only_contains_dirs(os.path.join(path, i)) for i in pathlistdir)
Return boolean on whether a path only contains directories.
def backup(self): try: self.backup_df = self.df.copy() except Exception as e: self.err(e, "Can not backup data") return self.ok("Dataframe backed up")
Backup the main dataframe
def pin_command(self): parts = [ 'pip-compile', '--no-header', '--verbose', '--rebuild', '--no-index', '--output-file', self.outfile, self.infile, ] if OPTIONS['upgrade']: parts.insert(3, '--upgrade') if self.add_hashes: parts.insert(1, '--generate-hashes') return parts
Compose pip-compile shell command
def publishToRoom(self, roomId, name, data, userList=None): if userList is None: userList = self.getRoom(roomId) logging.debug("%s: broadcasting (name: %s, data: %s, number of users: %s)" % (self._gcls(), name, data, len(userList))) self.broadcast(userList, { "name": name, "data": SockJSRoomHandler._parser.encode(data) })
Publish to given room data submitted
def remove_key(pki_dir, id_): key = os.path.join(pki_dir, 'minions', id_) if os.path.isfile(key): os.remove(key) log.debug('Deleted \'%s\'', key)
This method removes a specified key from the accepted keys dir
def deploy_media(): if not env.MEDIA_URL or not env.MEDIA_ROOT or 'http://' in env.MEDIA_URL: return local_dir = env.MEDIA_ROOT remote_dir = '/'.join([deployment_root(),'public']) media_url = env.MEDIA_URL[1:] if media_url: remote_dir = '/'.join([remote_dir,media_url]) if env.verbosity: print env.host,"DEPLOYING media",remote_dir deployed = deploy_files(local_dir,remote_dir) sudo("chown -R www-data:sudo %s" % remote_dir) sudo("chmod -R ug+w %s"% remote_dir) return deployed
Deploy MEDIA_ROOT unversioned on host
def sample(name, reads_in_tuple): if name in [sample_x.get_name() for sample_x in __SAMPLES__]: rnftools.utils.error( "Multiple samples have the same name. Each sample must have a unique name.", program="RNFtools", subprogram="MIShmash", exception=ValueError, ) Sample( name=name, reads_in_tuple=reads_in_tuple, ) add_input(current_sample().fq_fns())
Create a new sample.
def from_dataframe(cls, name, df, indices, primary_key=None): column_types = [] nullable = set() for column_name in df.columns: values = df[column_name] if values.isnull().any(): nullable.add(column_name) column_db_type = db_type(values.dtype) column_types.append((column_name.replace(" ", "_"), column_db_type)) def make_rows(): return list(tuple(row) for row in df.values) return cls( name=name, column_types=column_types, make_rows=make_rows, indices=indices, nullable=nullable, primary_key=primary_key)
Infer table metadata from a DataFrame
def Validate(self): if self.type not in filters.Filter.classes: raise DefinitionError("Undefined filter type %s" % self.type) self._filter.Validate(self.expression) Validate(self.hint, "Filter has invalid hint")
The filter exists, and has valid filter and hint expressions.
def main(argv=None): from intake.cli.bootstrap import main as _main return _main('Intake Catalog CLI', subcommands.all, argv or sys.argv)
Execute the "intake" command line program.
def ping(self) -> None: if self.db is None or self.db_pythonlib not in [PYTHONLIB_MYSQLDB, PYTHONLIB_PYMYSQL]: return try: self.db.ping(True) except mysql.OperationalError: self.db = None self.connect_to_database_mysql( self._database, self._user, self._password, self._server, self._port, self._charset, self._use_unicode)
Pings a database connection, reconnecting if necessary.
def store(self, secrets, job): for j in job: if j in secrets: job[j] = self.add(job[j])
Sanitize the job object of any of the given secrets.
def print_summary(self): self.tp = len(self.matched_ref_inds) self.fp = self.n_test - self.tp self.fn = self.n_ref - self.tp self.specificity = self.tp / self.n_ref self.positive_predictivity = self.tp / self.n_test self.false_positive_rate = self.fp / self.n_test print('%d reference annotations, %d test annotations\n' % (self.n_ref, self.n_test)) print('True Positives (matched samples): %d' % self.tp) print('False Positives (unmatched test samples: %d' % self.fp) print('False Negatives (unmatched reference samples): %d\n' % self.fn) print('Specificity: %.4f (%d/%d)' % (self.specificity, self.tp, self.n_ref)) print('Positive Predictivity: %.4f (%d/%d)' % (self.positive_predictivity, self.tp, self.n_test)) print('False Positive Rate: %.4f (%d/%d)' % (self.false_positive_rate, self.fp, self.n_test))
Print summary metrics of the annotation comparisons.
def response(self, msgid, response): self.requests[msgid].callback(response) del self.requests[msgid]
Handle a response message.