code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def unregister_model(self, model): if model not in self._model_registry: raise NotRegistered('The model %s is not registered' % model) del self._model_registry[model]
Unregisters the given model.
def open(self, path, binary=False): if binary: return open(path, "rb") return open(path, encoding="utf-8")
Open file and return a stream.
def make_user_config_dir(): config_path = get_user_config_dir() if not user_config_dir_exists(): try: os.mkdir(config_path) os.mkdir(os.path.join(config_path, 'hooks.d')) except OSError: return None return config_path
Create the user s-tui config directory if it doesn't exist
def _read_input_urls(cls, session: AppSession, default_scheme='http'): url_string_iter = session.args.urls or () url_rewriter = session.factory.get('URLRewriter') if session.args.input_file: if session.args.force_html: lines = cls._input_file_as_html_links(session) else: lines = cls._input_file_as_lines(session) url_string_iter = itertools.chain(url_string_iter, lines) base_url = session.args.base for url_string in url_string_iter: _logger.debug(__('Parsing URL {0}', url_string)) if base_url: url_string = wpull.url.urljoin(base_url, url_string) try: url_info = wpull.url.URLInfo.parse( url_string, default_scheme=default_scheme) _logger.debug(__('Parsed URL {0}', url_info)) if url_rewriter: url_info = url_rewriter.rewrite(url_info) _logger.debug(__('Rewritten URL {0}', url_info)) yield url_info except ValueError as e: _logger.info(__('Invalid URL {0}: {1}', url_string, e))
Read the URLs provided by the user.
def scene_remove(frames): reader = MessageReader(frames) results = reader.string("command").uint32("scene_id").assert_end().get() if results.command != "scene.rm": raise MessageParserError("Command is not 'scene.rm'") return (results.scene_id,)
parse a scene.rm message
def arguments(function, extra_arguments=0): if not hasattr(function, '__code__'): return () return function.__code__.co_varnames[:function.__code__.co_argcount + extra_arguments]
Returns the name of all arguments a function takes
def _fromset(cls, values, key=None): sorted_set = object.__new__(cls) sorted_set._set = values sorted_set.__init__(key=key) return sorted_set
Initialize sorted set from existing set.
def db_file(self, value): assert not os.path.isfile(value), "%s already exists" % value self._db_file = value
Setter for _db_file attribute
def check_outputs(self): self.outputs = self.expand_filenames(self.outputs) result = False if self.files_exist(self.outputs): if self.dependencies_are_newer(self.outputs, self.inputs): result = True print("Dependencies are newer than outputs.") print("Running task.") elif self.force: print("Dependencies are older than inputs, but 'force' option present.") print("Running task.") result = True else: print("Dependencies are older than inputs.") else: print("No ouput file(s).") print("Running task.") result = True return result
Check for the existence of output files
def read_ssh_config(path): with open(path, "r") as fh_: lines = fh_.read().splitlines() return SshConfig(lines)
Read ssh config file and return parsed SshConfig
def _get_argname_value(self, argname): argvalue = getattr(self, '__get_{0}__'.format(argname), None) if argvalue is not None and callable(argvalue): argvalue = argvalue() if argvalue is None: argvalue = getattr(self, argname, None) if argvalue is None: argvalue = getattr(self, '__{0}__'.format(argname), None) if argvalue is None: argvalue = self.extra.get(argname, None) return argvalue
Return the argname value looking up on all possible attributes
def dnld_goa(self, species, ext='gaf', item=None, fileout=None): basename = self.get_basename(species, ext, item) src = os.path.join(self.ftp_src_goa, species.upper(), "{F}.gz".format(F=basename)) dst = os.path.join(os.getcwd(), basename) if fileout is None else fileout dnld_file(src, dst, prt=sys.stdout, loading_bar=None) return dst
Download GOA source file name on EMBL-EBI ftp server.
def _assemble_conversion(stmt): reactants = _join_list([_assemble_agent_str(r) for r in stmt.obj_from]) products = _join_list([_assemble_agent_str(r) for r in stmt.obj_to]) if stmt.subj is not None: subj_str = _assemble_agent_str(stmt.subj) stmt_str = '%s catalyzes the conversion of %s into %s' % \ (subj_str, reactants, products) else: stmt_str = '%s is converted into %s' % (reactants, products) return _make_sentence(stmt_str)
Assemble a Conversion statement into text.
def _connect(self): try: if sys.version_info[:2] >= (2,6): self._conn = telnetlib.Telnet(self._amihost, self._amiport, connTimeout) else: self._conn = telnetlib.Telnet(self._amihost, self._amiport) except: raise Exception( "Connection to Asterisk Manager Interface on " "host %s and port %s failed." % (self._amihost, self._amiport) )
Connect to Asterisk Manager Interface.
def adjustMask(self): if self.currentMode() == XPopupWidget.Mode.Dialog: self.clearMask() return path = self.borderPath() bitmap = QBitmap(self.width(), self.height()) bitmap.fill(QColor('white')) with XPainter(bitmap) as painter: painter.setRenderHint(XPainter.Antialiasing) pen = QPen(QColor('black')) pen.setWidthF(0.75) painter.setPen(pen) painter.setBrush(QColor('black')) painter.drawPath(path) self.setMask(bitmap)
Updates the alpha mask for this popup widget.
def decode(bstr): bstr = bstr.replace(b':', b'') if len(bstr) != 12: raise ValueError('not a valid MAC address: {!r}'.format(bstr)) try: return int(bstr, 16) except ValueError: raise ValueError('not a valid MAC address: {!r}'.format(bstr))
Decodes an ASCII encoded binary MAC address tring into a number.
def check_webhook_secret(app_configs=None, **kwargs): from . import settings as djstripe_settings messages = [] secret = djstripe_settings.WEBHOOK_SECRET if secret and not secret.startswith("whsec_"): messages.append( checks.Warning( "DJSTRIPE_WEBHOOK_SECRET does not look valid", hint="It should start with whsec_...", id="djstripe.W003", ) ) return messages
Check that DJSTRIPE_WEBHOOK_SECRET looks correct
def getSectionName(configObj,stepnum): for key in configObj.keys(): if key.find('STEP '+str(stepnum)+':') >= 0: return key
Return section label based on step number.
def _make_reserved_tokens_re(reserved_tokens): if not reserved_tokens: return None escaped_tokens = [_re_escape(rt) for rt in reserved_tokens] pattern = "(%s)" % "|".join(escaped_tokens) reserved_tokens_re = _re_compile(pattern) return reserved_tokens_re
Constructs compiled regex to parse out reserved tokens.
def remove_null_proxy_kwarg(func): def wrapper(*args, **kwargs): if 'proxy' in kwargs: del kwargs['proxy'] return func(*args, **kwargs) return wrapper
decorator, to remove a 'proxy' keyword argument. For wrapping certain Manager methods
def stop_tracking_host(self): try: self.server.hosts.remove(self.client_address[0]) if hasattr(self.server.module, 'on_shutdown'): self.server.module.on_shutdown(self.server.context, self.server.connection) except ValueError: pass
This gets called when a module has finshed executing, removes the host from the connection tracker list
def copyproj(src_fn, dst_fn, gt=True): src_ds = gdal.Open(src_fn, gdal.GA_ReadOnly) dst_ds = gdal.Open(dst_fn, gdal.GA_Update) dst_ds.SetProjection(src_ds.GetProjection()) if gt: src_gt = np.array(src_ds.GetGeoTransform()) src_dim = np.array([src_ds.RasterXSize, src_ds.RasterYSize]) dst_dim = np.array([dst_ds.RasterXSize, dst_ds.RasterYSize]) if np.any(src_dim != dst_dim): res_factor = src_dim/dst_dim.astype(float) src_gt[[1, 5]] *= max(res_factor) dst_ds.SetGeoTransform(src_gt) src_ds = None dst_ds = None
Copy projection and geotransform from one raster file to another
async def _scheduleLoop(self): while True: try: timeout = None if not self.apptheap else self.apptheap[0].nexttime - time.time() if timeout is None or timeout >= 0.0: await asyncio.wait_for(self._wake_event.wait(), timeout=timeout) except asyncio.TimeoutError: pass if self.isfini: return self._wake_event.clear() now = time.time() while self.apptheap and self.apptheap[0].nexttime <= now: appt = heapq.heappop(self.apptheap) appt.updateNexttime(now) if appt.nexttime: heapq.heappush(self.apptheap, appt) if not appt.enabled: continue if appt.isrunning: logger.warning( 'Appointment %s is still running from previous time when scheduled to run. Skipping.', appt.iden) else: await self._execute(appt)
Task loop to issue query tasks at the right times.
def to_binary(self): node = self.node.to_binary() if node is self.node: return self else: return _expr(node)
Convert N-ary operators to binary operators.
def _error_if_symbol_unused(symbol_word, technical_words_dictionary, line_offset, col_offset): result = technical_words_dictionary.corrections(symbol_word, distance=5, prefix=0) if not result.valid: return SpellcheckError(symbol_word, line_offset, col_offset, result.suggestions, SpellcheckError.TechnicalWord)
Return SpellcheckError if this symbol is not used in the code.
def render(obj): def get_v(v): return v % env if isinstance(v, basestring) else v if isinstance(obj, types.StringType): return obj % env elif isinstance(obj, types.TupleType) or isinstance(obj, types.ListType): rv = [] for v in obj: rv.append(get_v(v)) elif isinstance(obj, types.DictType): rv = {} for k, v in obj.items(): rv[k] = get_v(v) return rv
Convienently render strings with the fabric context
def remove(self): "Remove the hook from the model." if not self.removed: self.hook.remove() self.removed=True
Remove the hook from the model.
def numeric(basetype, min_=None, max_=None): min_ = basetype(min_) if min_ is not None else None max_ = basetype(max_) if max_ is not None else None def _numeric(string): value = basetype(string) if min_ is not None and value < min_ or max_ is not None and value > max_: msg = "%r not in valid range %r" % (string, (min_, max_)) raise argparse.ArgumentTypeError(msg) return value _numeric.__name__ = basetype.__name__ return _numeric
Validator for numeric params
def show_md5_view(md5): if not WORKBENCH: return flask.redirect('/') md5_view = WORKBENCH.stream_sample(md5) return flask.render_template('templates/md5_view.html', md5_view=list(md5_view), md5=md5)
Renders template with `stream_sample` of the md5.
def obj2unicode(obj): if isinstance(obj, unicode_type): return obj elif isinstance(obj, bytes_type): try: return unicode_type(obj, 'utf-8') except UnicodeDecodeError as strerror: sys.stderr.write("UnicodeDecodeError exception for string '%s': %s\n" % (obj, strerror)) return unicode_type(obj, 'utf-8', 'replace') else: return unicode_type(obj)
Return a unicode representation of a python object
def _add_tile(self, new_tile, ijk): tile_label = "{0}_{1}".format(self.name, '-'.join(str(d) for d in ijk)) self.add(new_tile, label=tile_label, inherit_periodicity=False)
Add a tile with a label indicating its tiling position.
def NormalizeScopes(scope_spec): if isinstance(scope_spec, six.string_types): return set(scope_spec.split(' ')) elif isinstance(scope_spec, collections.Iterable): return set(scope_spec) raise exceptions.TypecheckError( 'NormalizeScopes expected string or iterable, found %s' % ( type(scope_spec),))
Normalize scope_spec to a set of strings.
def _call(self, x): out = x.asarray().ravel()[self._indices_flat] if self.variant == 'point_eval': weights = 1.0 elif self.variant == 'integrate': weights = getattr(self.domain, 'cell_volume', 1.0) else: raise RuntimeError('bad variant {!r}'.format(self.variant)) if weights != 1.0: out *= weights return out
Return values at indices, possibly weighted.
def generate_html_report(base_path, asset_id): jenv = Environment(loader=PackageLoader('swchange', 'templates')) s = Session() asset = s.query(AssetList).filter_by(id=asset_id).first() if not asset: print 'Invalid Asset ID (%s)!' % asset_id return filename = os.path.join(base_path, '%s-INV-CHANGE-%s.html' % ( asset.name, datetime.now().strftime('%Y-%m-%d.%H.%M.%S')) ) print 'Generating Report : %s' % filename with open(filename, 'wb') as report: report.write(jenv.get_template('layout.html').render( asset=asset, current_date=datetime.now() ))
Generates the HTML report and dumps it into the specified filename
def extract_file_name(content_dispo): content_dispo = content_dispo.decode('unicode-escape').strip('"') file_name = "" for key_val in content_dispo.split(';'): param = key_val.strip().split('=') if param[0] == "filename": file_name = param[1].strip('"') break return file_name
Extract file name from the input request body
def create_doc_jar(self, target, open_jar, version): javadoc = self._java_doc(target) scaladoc = self._scala_doc(target) if javadoc or scaladoc: jar_path = self.artifact_path(open_jar, version, suffix='-javadoc') with self.open_jar(jar_path, overwrite=True, compressed=True) as open_jar: def add_docs(docs): if docs: for basedir, doc_files in docs.items(): for doc_file in doc_files: open_jar.write(os.path.join(basedir, doc_file), doc_file) add_docs(javadoc) add_docs(scaladoc) return jar_path else: return None
Returns a doc jar if either scala or java docs are available for the given target.
def close(self): if self.sock: self.sock.close() self.sock = None if self.__response: self.__response.close() self.__response = None self.__state = _CS_IDLE
Close the connection to the HTTP server.
def log_connection_info(self): _ctrl_c_lines = [ 'NOTE: Ctrl-C does not work to exit from the command line.', 'To exit, just close the window, type "exit" or "quit" at the ' 'qtconsole prompt, or use Ctrl-\\ in UNIX-like environments ' '(at the command prompt).'] for line in _ctrl_c_lines: io.rprint(line) self.ports = dict(shell=self.shell_port, iopub=self.iopub_port, stdin=self.stdin_port, hb=self.hb_port, control=self.control_port)
Overridden to customize the start-up message printed to the terminal
def jinja_env(self) -> Environment: if self._jinja_env is None: self._jinja_env = self.create_jinja_environment() return self._jinja_env
The jinja environment used to load templates.
def collapse_spaces(text): if not isinstance(text, six.string_types): return text return COLLAPSE_RE.sub(WS, text).strip(WS)
Remove newlines, tabs and multiple spaces with single spaces.
def copy(self): return Eq( self._lhs, self._rhs, tag=self._tag, _prev_lhs=self._prev_lhs, _prev_rhs=self._prev_rhs, _prev_tags=self._prev_tags)
Return a copy of the equation
def fail_with_error(self, err_msg, err_operation=None): if err_operation: err_msg = 'ERROR: "{err_msg}", while: {err_operation}'.format( err_msg=err_msg, err_operation=err_operation) sys.stderr.write(err_msg) sys.exit(1)
log an error to std err for ansible-playbook to consume and exit
def getmethattr(obj, meth): if hasmethod(obj, meth): return getattr(obj, meth)() elif hasvar(obj, meth): return getattr(obj, meth) return None
Returns either the variable value or method invocation
def print_result(result): try: print result except UnicodeEncodeError: if sys.stdout.encoding: print result.encode(sys.stdout.encoding, 'replace') else: print result.encode('utf8') except: print "Unexpected error attempting to print result"
Print the result, ascii encode if necessary
def _check_bios_resource(self, properties=[]): system = self._get_host_details() if ('links' in system['Oem']['Hp'] and 'BIOS' in system['Oem']['Hp']['links']): bios_uri = system['Oem']['Hp']['links']['BIOS']['href'] status, headers, bios_settings = self._rest_get(bios_uri) if status >= 300: msg = self._get_extended_error(bios_settings) raise exception.IloError(msg) for property in properties: if property not in bios_settings: msg = ('BIOS Property "' + property + '" is not' ' supported on this system.') raise exception.IloCommandNotSupportedError(msg) return headers, bios_uri, bios_settings else: msg = ('"links/BIOS" section in ComputerSystem/Oem/Hp' ' does not exist') raise exception.IloCommandNotSupportedError(msg)
Check if the bios resource exists.
def arrays_to_hdf5(filename="cache.hdf5"): return Registry( types={ numpy.ndarray: SerNumpyArrayToHDF5(filename, "cache.lock") }, hooks={ '<ufunc>': SerUFunc() }, hook_fn=_numpy_hook )
Returns registry for serialising arrays to a HDF5 reference.
def wait_for_browser_close(b): if b: if not __ACTIVE: wait_failover(wait_for_browser_close) return wait_for_frame(b.GetBrowserImp().GetMainFrame())
Can be used to wait until a TBrowser is closed
def create_response_object(self, service_id, version_number, name, status="200", response="OK", content="", request_condition=None, cache_condition=None): body = self._formdata({ "name": name, "status": status, "response": response, "content": content, "request_condition": request_condition, "cache_condition": cache_condition, }, FastlyResponseObject.FIELDS) content = self._fetch("/service/%s/version/%d/response_object" % (service_id, version_number), method="POST", body=body) return FastlyResponseObject(self, content)
Creates a new Response Object.
def dump(self): for seg in self.seglist: print("==== %08x-%08x" % (seg.startea, seg.endea)) if seg.endea - seg.startea < 30: for ea in range(seg.startea, seg.endea): print(" %08x: %08x" % (ea, self.getFlags(ea))) else: for ea in range(seg.startea, seg.startea + 10): print(" %08x: %08x" % (ea, self.getFlags(ea))) print("...") for ea in range(seg.endea - 10, seg.endea): print(" %08x: %08x" % (ea, self.getFlags(ea)))
print first and last bits for each segment
def load_data_subject_areas(subject_file): lst = [] if os.path.exists(subject_file): with open(subject_file, 'r') as f: for line in f: lst.append(line.strip()) else: print('MISSING DATA FILE (subject_file) ' , subject_file) print('update your config.py or config.txt') return lst
reads the subject file to a list, to confirm config is setup
def create(ctx, to, amount, symbol, secret, hash, account, expiration): ctx.blockchain.blocking = True tx = ctx.blockchain.htlc_create( Amount(amount, symbol), to, secret, hash_type=hash, expiration=expiration, account=account, ) tx.pop("trx", None) print_tx(tx) results = tx.get("operation_results", {}) if results: htlc_id = results[0][1] print("Your htlc_id is: {}".format(htlc_id))
Create an HTLC contract
def job(name, **kwargs): return task(name=name, schedulable=True, base=JobTask, bind=True, **kwargs)
A shortcut decorator for declaring jobs
def get(self, key, default=None): for k, v in self: if k == key: return v return default
Retrieve the first value for a marker or None.
def _convert_punctuation(punctuation, conversion_table): if punctuation in conversion_table: return conversion_table[punctuation] return re.escape(punctuation)
Return a regular expression for a punctuation string.
def _start_date_of_year(year: int) -> datetime.date: jan_one = datetime.date(year, 1, 1) diff = 7 * (jan_one.isoweekday() > 3) - jan_one.isoweekday() return jan_one + datetime.timedelta(days=diff)
Return start date of the year using MMWR week rules
def icons(self, strip_ext=False): result = [f for f in self._stripped_files if self._icons_pattern.match(f)] if strip_ext: result = [strip_suffix(f, '\.({ext})'.format(ext=self._icons_ext), regex=True) for f in result] return result
Get all icons in this DAP, optionally strip extensions
def delete_scheme(self): scheme_name = self.current_scheme answer = QMessageBox.warning(self, _("Warning"), _("Are you sure you want to delete " "this scheme?"), QMessageBox.Yes | QMessageBox.No) if answer == QMessageBox.Yes: names = self.get_option('names') self.set_scheme('spyder') self.schemes_combobox.setCurrentIndex(names.index('spyder')) self.set_option('selected', 'spyder') custom_names = self.get_option('custom_names', []) if scheme_name in custom_names: custom_names.remove(scheme_name) self.set_option('custom_names', custom_names) for key in syntaxhighlighters.COLOR_SCHEME_KEYS: option = "{0}/{1}".format(scheme_name, key) CONF.remove_option(self.CONF_SECTION, option) CONF.remove_option(self.CONF_SECTION, "{0}/name".format(scheme_name)) self.update_combobox() self.update_preview()
Deletes the currently selected custom color scheme.
def shutdown(self): if self._proxy: os.sync() self._proxy(*self._args)
Call the dbus proxy to start the shutdown.
def process_msg(self, msg): jmsg = json.loads(msg) msgtype = jmsg['MessageType'] msgdata = jmsg['Data'] _LOGGER.debug('New websocket message recieved of type: %s', msgtype) if msgtype == 'Sessions': self._sessions = msgdata self.update_device_list(self._sessions)
Process messages from the event stream.
def to_str(obj): if isinstance(obj, str): return obj if isinstance(obj, unicode): return obj.encode('utf-8') return str(obj)
convert a object to string
def parse_opml_bytes(data: bytes) -> OPML: root = parse_xml(BytesIO(data)).getroot() return _parse_opml(root)
Parse an OPML document from a byte-string containing XML data.
def _vertex_list_to_dataframe(ls, id_column_name): assert HAS_PANDAS, 'Cannot use dataframe because Pandas is not available or version is too low.' cols = reduce(set.union, (set(v.attr.keys()) for v in ls)) df = pd.DataFrame({id_column_name: [v.vid for v in ls]}) for c in cols: df[c] = [v.attr.get(c) for v in ls] return df
Convert a list of vertices into dataframe.
def Parse(self, cmd, args, stdout, stderr, return_val, time_taken, knowledge_base): _ = stderr, time_taken, args, knowledge_base self.CheckReturn(cmd, return_val) result = rdf_protodict.AttributedDict() for k, v in iteritems(self.lexer.ParseToOrderedDict(stdout)): key = k.replace(".", "_") if len(v) == 1: v = v[0] result[key] = v return [result]
Parse the sysctl output.
def _onLeftButtonUp(self, evt): x = evt.GetX() y = self.figure.bbox.height - evt.GetY() evt.Skip() if self.HasCapture(): self.ReleaseMouse() FigureCanvasBase.button_release_event(self, x, y, 1, guiEvent=evt)
End measuring on an axis.
def make_scrape_request(session, url, mode='get', data=None): try: html = session.request(mode, url, data=data) except RequestException: raise VooblyError('failed to connect') if SCRAPE_FETCH_ERROR in html.text: raise VooblyError('not logged in') if html.status_code != 200 or SCRAPE_PAGE_NOT_FOUND in html.text: raise VooblyError('page not found') return bs4.BeautifulSoup(html.text, features='lxml')
Make a request to URL.
def _close(self): self.client.stop() self.open = False self.waiting = False
Close the TCP connection.
def insert(self, storagemodel) -> StorageTableModel: modeldefinition = self.getmodeldefinition(storagemodel, True) try: modeldefinition['tableservice'].insert_or_replace_entity(modeldefinition['tablename'], storagemodel.entity()) storagemodel._exists = True except AzureMissingResourceHttpError as e: storagemodel._exists = False log.debug('can not insert or replace table entity: Table {}, PartitionKey {}, RowKey {} because {!s}'.format(modeldefinition['tablename'], storagemodel.getPartitionKey(), storagemodel.getRowKey(), e)) except Exception as e: storagemodel._exists = False msg = 'can not insert or replace table entity: Table {}, PartitionKey {}, RowKey {} because {!s}'.format(modeldefinition['tablename'], storagemodel.PartitionKey, storagemodel.RowKey, e) raise AzureStorageWrapException(msg=msg) finally: return storagemodel
insert model into storage
def _gitignore_entry_to_regex(entry): ret = entry.strip() ret = ret.replace('.', '\.') ret = ret.replace('*', '.*') return ret
Take a path that you might find in a .gitignore file and turn it into a regex
def pack(self): block = bytearray(self.size) self.pack_into(block) return block
convenience function for packing
def _bulk_state(saltfunc, lbn, workers, profile): ret = {'name': lbn, 'result': True, 'changes': {}, 'comment': ''} if not isinstance(workers, list): ret['result'] = False ret['comment'] = 'workers should be a list not a {0}'.format( type(workers) ) return ret if __opts__['test']: ret['result'] = None return ret log.info('executing %s to modjk workers %s', saltfunc, workers) try: cmdret = __salt__[saltfunc](workers, lbn, profile=profile) except KeyError: ret['result'] = False ret['comment'] = 'unsupported function {0}'.format( saltfunc ) return ret errors = [] for worker, ok in six.iteritems(cmdret): if not ok: errors.append(worker) ret['changes'] = {'status': cmdret} if errors: ret['result'] = False ret['comment'] = '{0} failed on some workers'.format(saltfunc) return ret
Generic function for bulk worker operation
def cut_references(text_lines): ref_sect_start = find_reference_section(text_lines) if ref_sect_start is not None: start = ref_sect_start["start_line"] end = find_end_of_reference_section(text_lines, start, ref_sect_start["marker"], ref_sect_start["marker_pattern"]) del text_lines[start:end + 1] else: current_app.logger.warning("Found no references to remove.") return text_lines return text_lines
Return the text lines with the references cut.
def erase_lines(n=1): for _ in range(n): print(codes.cursor["up"], end="") print(codes.cursor["eol"], end="")
Erases n lines from the screen and moves the cursor up to follow
def _read_linguas_from_files(env, linguas_files=None): import SCons.Util import SCons.Environment global _re_comment global _re_lang if not SCons.Util.is_List(linguas_files) \ and not SCons.Util.is_String(linguas_files) \ and not isinstance(linguas_files, SCons.Node.FS.Base) \ and linguas_files: linguas_files = ['LINGUAS'] if linguas_files is None: return [] fnodes = env.arg2nodes(linguas_files) linguas = [] for fnode in fnodes: contents = _re_comment.sub("", fnode.get_text_contents()) ls = [l for l in _re_lang.findall(contents) if l] linguas.extend(ls) return linguas
Parse `LINGUAS` file and return list of extracted languages
def _config(): status_url = __salt__['config.get']('nagios.status_url') or \ __salt__['config.get']('nagios:status_url') if not status_url: raise CommandExecutionError('Missing Nagios URL in the configuration.') username = __salt__['config.get']('nagios.username') or \ __salt__['config.get']('nagios:username') password = __salt__['config.get']('nagios.password') or \ __salt__['config.get']('nagios:password') return { 'url': status_url, 'username': username, 'password': password }
Get configuration items for URL, Username and Password
def _get_host_details(self): status, headers, system = self._rest_get('/rest/v1/Systems/1') if status < 300: stype = self._get_type(system) if stype not in ['ComputerSystem.0', 'ComputerSystem.1']: msg = "%s is not a valid system type " % stype raise exception.IloError(msg) else: msg = self._get_extended_error(system) raise exception.IloError(msg) return system
Get the system details.
def reset(self): with self.lock: if self.cache: if self.use_tmp: shutil.rmtree(self.tmp_dir, ignore_errors=True) else: self.templates = {}
Resets the cache of compiled templates.
def log_exceptions(self, c, broker): if c in broker.exceptions: ex = broker.exceptions.get(c) ex = "Exception in {0} - {1}".format(dr.get_name(c), str(ex)) self.logit(ex, self.pid, self.user, "insights-run", logging.ERROR)
Gets exceptions to be logged and sends to logit function to be logged to syslog
def from_file(cls, path, encoding, dialect, fields, converters, field_index): return cls(open(path, 'r', encoding=encoding), dialect, fields, converters, field_index)
Read delimited text from a text file.
def addAttachment(self, attachment): if not isinstance(attachment, (list, tuple)): attachment = [attachment] original = self.getAttachment() or [] original = map(api.get_uid, original) attachment = map(api.get_uid, attachment) attachment = filter(lambda at: at not in original, attachment) if attachment: original.extend(attachment) self.setAttachment(original)
Adds an attachment or a list of attachments to the Analysis Request
def contains_all(self, other): dtype = getattr(other, 'dtype', None) if dtype is None: dtype = np.result_type(*other) return is_numeric_dtype(dtype)
Return ``True`` if ``other`` is a sequence of complex numbers.
def _BuildIndex(self): self._index = {} for i, k in enumerate(self._keys): self._index[k] = i
Recreate the key index.
def launch(self, callback_function=None): self._check_registered() self._socket_client.receiver_controller.launch_app( self.supporting_app_id, callback_function=callback_function)
If set, launches app related to the controller.
def _get_categorical_score( self, profile: List, negated_classes: List, categories: List, negation_weight: Optional[float] = 1, ic_map: Optional[Dict[str, float]] = None) -> float: if ic_map is None: ic_map = self.ic_store.get_profile_ic(profile + negated_classes) scores = [] for cat in categories: if cat not in self.ic_store.category_statistics: raise ValueError("statistics for {} not indexed".format(cat)) pos_profile = [cls for cls in profile if cls in self.ic_store.category_statistics[cat].descendants] neg_profile = [cls for cls in negated_classes if cls in self.ic_store.category_statistics[cat].descendants] scores.append(self._get_simple_score( pos_profile, neg_profile, self.ic_store.category_statistics[cat].mean_mean_ic, self.ic_store.category_statistics[cat].max_max_ic, self.ic_store.category_statistics[cat].mean_sum_ic, negation_weight, ic_map )) return mean(scores)
The average of the simple scores across a list of categories
def _generate_validator(self, field): validator = self._determine_validator_type(field.data_type, fmt_var(field.name), field.has_default) value = fmt_var( field.name) if not field.has_default else '{} ?: {}'.format( fmt_var(field.name), fmt_default_value(field)) if validator: self.emit('{}({});'.format(validator, value))
Emits validator if data type has associated validator.
def _run_atstart(): global _atstart for callback, args, kwargs in _atstart: callback(*args, **kwargs) del _atstart[:]
Hook frameworks must invoke this before running the main hook body.
def _get_verts_and_connect(self, paths): verts = np.vstack(paths) gaps = np.add.accumulate(np.array([len(x) for x in paths])) - 1 connect = np.ones(gaps[-1], dtype=bool) connect[gaps[:-1]] = False return verts, connect
retrieve vertices and connects from given paths-list
def pretty(self, indent=0, debug=False): debug_details = '' if debug: debug_details += '<isliteral=%r, iscanonical=%r>' % (self.isliteral, self.iscanonical) obj = "'%s'" % self.obj if isinstance(self.obj, basestring) else repr(self.obj) return (' ' * indent) + ('%s(%s%s)' % (self.__class__.__name__, debug_details, obj))
Return a pretty formatted representation of self.
def save(self): kf = self._kf for prop in self.properties: self._DL[prop[0]].append(getattr(kf, prop[0])) v = copy.deepcopy(kf.__dict__) if self._skip_private: for key in list(v.keys()): if key.startswith('_'): print('deleting', key) del v[key] if self._skip_callable: for key in list(v.keys()): if callable(v[key]): del v[key] for ig in self._ignore: if ig in v: del v[ig] for key in list(v.keys()): self._DL[key].append(v[key]) self.__dict__.update(self._DL) self._len += 1
save the current state of the Kalman filter
def commit_all(self): while self._transaction_nesting_level != 0: if not self._auto_commit and self._transaction_nesting_level == 1: return self.commit() self.commit()
Commits all current nesting transactions.
def create_image_list(self, dataset, fns_idxs): "Create a list of images, filenames and labels but first removing files that are not supposed to be displayed." items = dataset.x.items if self._duplicates: chunked_idxs = chunks(fns_idxs, 2) chunked_idxs = [chunk for chunk in chunked_idxs if Path(items[chunk[0]]).is_file() and Path(items[chunk[1]]).is_file()] return [(dataset.x[i]._repr_jpeg_(), items[i], self._labels[dataset.y[i].data]) for chunk in chunked_idxs for i in chunk] else: return [(dataset.x[i]._repr_jpeg_(), items[i], self._labels[dataset.y[i].data]) for i in fns_idxs if Path(items[i]).is_file()]
Create a list of images, filenames and labels but first removing files that are not supposed to be displayed.
def decimal_round(number, num_digits, rounding=ROUND_HALF_UP): exp = Decimal(10) ** -num_digits if num_digits >= 0: return number.quantize(exp, rounding) else: return exp * (number / exp).to_integral_value(rounding)
Rounding for decimals with support for negative digits
def max_electronegativity(self): maximum = 0 for e1, e2 in combinations(self.elements, 2): if abs(Element(e1).X - Element(e2).X) > maximum: maximum = abs(Element(e1).X - Element(e2).X) return maximum
returns the maximum pairwise electronegativity difference
def add_dependency(self, from_task_name, to_task_name): logger.debug('Adding dependency from {0} to {1}'.format(from_task_name, to_task_name)) if not self.state.allow_change_graph: raise DagobahError("job's graph is immutable in its current state: %s" % self.state.status) self.add_edge(from_task_name, to_task_name) self.commit()
Add a dependency between two tasks.
def bot_config(player_config_path: Path, team: Team) -> 'PlayerConfig': bot_config = PlayerConfig() bot_config.bot = True bot_config.rlbot_controlled = True bot_config.team = team.value bot_config.config_path = str(player_config_path.absolute()) config_bundle = get_bot_config_bundle(bot_config.config_path) bot_config.name = config_bundle.name bot_config.loadout_config = load_bot_appearance(config_bundle.get_looks_config(), bot_config.team) return bot_config
A function to cover the common case of creating a config for a bot.
def Parse(self, cmd, args, stdout, stderr, return_val, time_taken, knowledge_base): _ = stderr, time_taken, args, knowledge_base self.CheckReturn(cmd, return_val) packages = [] for line in stdout.decode("utf-8").splitlines()[1:]: cols = line.split() name_arch, version, source = cols name, arch = name_arch.split(".") status = rdf_client.SoftwarePackage.InstallState.INSTALLED packages.append( rdf_client.SoftwarePackage( name=name, publisher=source, version=version, architecture=arch, install_state=status)) if packages: yield rdf_client.SoftwarePackages(packages=packages)
Parse the yum output.
def transform(self, trans): _data = deepcopy(self._data) _data.data_block[:, 0:3] = trans(_data.data_block[:, 0:3]) return FstNeuron(_data, self.name)
Return a copy of this neuron with a 3D transformation applied
def unnest(c, elem, ignore_whitespace=False): parent = elem.getparent() gparent = parent.getparent() index = parent.index(elem) preparent = etree.Element(parent.tag) preparent.text, parent.text = (parent.text or ''), '' for k in parent.attrib.keys(): preparent.set(k, parent.get(k)) if index > 0: for ch in parent.getchildren()[:index]: preparent.append(ch) gparent.insert(gparent.index(parent), preparent) XML.remove_if_empty(preparent, leave_tail=True, ignore_whitespace=ignore_whitespace) XML.remove(elem, leave_tail=True) gparent.insert(gparent.index(parent), elem) elem.tail = '' XML.remove_if_empty(parent, leave_tail=True, ignore_whitespace=ignore_whitespace)
unnest the element from its parent within doc. MUTABLE CHANGES
def parse_combo(self, combo, modes_set, modifiers_set, pfx): mode, mods, trigger = None, set([]), combo if '+' in combo: if combo.endswith('+'): trigger, combo = '+', combo[:-1] if '+' in combo: items = set(combo.split('+')) else: items = set(combo) else: items = combo.split('+') trigger, items = items[-1], set(items[:-1]) if '*' in items: items.remove('*') mods = '*' else: mods = items.intersection(modifiers_set) mode = items.intersection(modes_set) if len(mode) == 0: mode = None else: mode = mode.pop() if pfx is not None: trigger = pfx + trigger return (mode, mods, trigger)
Parse a string into a mode, a set of modifiers and a trigger.
def save(self): try: os.makedirs(os.path.dirname(self._configfile)) except: pass with open(self._configfile, 'w') as f: self._config.write(f)
Store config back to file.
def autoescape(filter_func): @evalcontextfilter @wraps(filter_func) def _autoescape(eval_ctx, *args, **kwargs): result = filter_func(*args, **kwargs) if eval_ctx.autoescape: result = Markup(result) return result return _autoescape
Decorator to autoescape result from filters.