code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def network_create(self, name, **kwargs): nt_ks = self.compute_conn kwargs['label'] = name kwargs = self._sanatize_network_params(kwargs) net = nt_ks.networks.create(**kwargs) return net.__dict__
Create extra private network
def do_status(self, line): print('{} {}'.format(bold('Pyrene version'), green(get_version()))) pip_conf = os.path.expanduser('~/.pip/pip.conf') if os.path.exists(pip_conf): conf = read_file(pip_conf) repo = self._get_repo_for_pip_conf(conf) if repo: ...
Show python packaging configuration status
def _expand(self, line: str) -> str: tmp_aliases = list(self.aliases.keys()) keep_expanding = bool(tmp_aliases) while keep_expanding: for cur_alias in tmp_aliases: keep_expanding = False match = self._command_pattern.search(line) if mat...
Expand shortcuts and aliases
def _setable_get_(name, self): "Used to raise an exception for attributes unable to be evaluated yet." raise AttributeError( "'{typename}' object has no attribute '{name}'".format( typename=type(self).__name__, name=name ) )
Used to raise an exception for attributes unable to be evaluated yet.
def record_leaving(self, time, code, frame_key, parent_stats): try: stats = parent_stats.get_child(code) time_entered = self._times_entered.pop((code, frame_key)) except KeyError: return time_elapsed = time - time_entered stats.deep_time += max(0, time...
Left from a function call.
def download_content(**args): args = validate_args(**args) if not args['directory']: args['directory'] = args['query'].replace(' ', '-') print("Downloading {0} {1} files on topic {2} from {3} and saving to directory: {4}" .format(args['limit'], args['file_type'], args['query'], args['website'], args['directory']...
main function to fetch links and download them
def _get_existing_report(self, mask, report): for existing_report in self._reports: if existing_report['namespace'] == report['namespace']: if mask == existing_report['queryMask']: return existing_report return None
Returns the aggregated report that matches report
def expand_folder(files): expfiles = [] for file in files: if os.path.isdir(file): for dirpath, dirnames, filenames in os.walk(file): for filename in filenames: expfiles.append(os.path.join(dirpath, filename)) else: expfiles.append(file...
Return a clone of file list files where all directories are recursively replaced with their contents.
def _from_dict(cls, _dict): args = {} if 'tokens' in _dict: args['tokens'] = [ TokenResult._from_dict(x) for x in (_dict.get('tokens')) ] if 'sentences' in _dict: args['sentences'] = [ SentenceResult._from_dict(x) for x in (_dic...
Initialize a SyntaxResult object from a json dictionary.
def layer_pre_save(instance, *args, **kwargs): is_valid = True if not instance.service.type == 'Hypermap:WorldMap': if not instance.service.is_valid: is_valid = False LOGGER.debug('Layer with id %s is marked invalid because its service is invalid' % instance.id) if instan...
Used to check layer validity.
def mysql_to_dict(data, key): ret = {} headers = [''] for line in data: if not line: continue if line.startswith('+'): continue comps = line.split('|') for comp in range(len(comps)): comps[comp] = comps[comp].strip() if len(headers)...
Convert MySQL-style output to a python dictionary
def nested(self, format_callback=None): seen = set() roots = [] for root in self.edges.get(None, ()): roots.extend(self._nested(root, seen, format_callback)) return roots
Return the graph as a nested list.
def human_size(bytes, units=[' bytes','KB','MB','GB','TB', 'PB', 'EB']): return str(bytes) + units[0] if bytes < 1024 else human_size(bytes>>10, units[1:])
Returns a human readable string reprentation of bytes
def _listen(self, uuid=None, session=None): if self.url is None: raise Exception("NURESTPushCenter needs to have a valid URL. please use setURL: before starting it.") events_url = "%s/events" % self.url if uuid: events_url = "%s?uuid=%s" % (events_url, uuid) reque...
Listen a connection uuid
def tag_builder(parser, token, cls, flow_type): tokens = token.split_contents() tokens_num = len(tokens) if tokens_num == 1 or (tokens_num == 3 and tokens[1] == 'for'): flow_name = None if tokens_num == 3: flow_name = tokens[2] return cls(flow_name) else: rais...
Helper function handling flow form tags.
def DisplayGetter(accessor, *args, **kwargs): short_description = get_pretty_name(accessor) accessor = 'get_%s_display' % accessor getter = Getter(accessor, *args, **kwargs) getter.short_description = short_description return getter
Returns a Getter that gets the display name for a model field with choices.
def commit(message=COMMON_COMMIT_MESSAGE, capture=True): env.warn_only = True local(u'git commit -am"{}"'.format(message))
git commit with common commit message when omit.
def main(): args = parser.parse_args() try: function = args.func except AttributeError: parser.print_usage() parser.exit(1) function(vars(args))
Parse the args and call whatever function was selected
def _get_base_class_names(frame): co, lasti = frame.f_code, frame.f_lasti code = co.co_code extends = [] for (op, oparg) in op_stream(code, lasti): if op in dis.hasconst: if type(co.co_consts[oparg]) == str: extends = [] elif op in dis.hasname: if ...
Get baseclass names from the code object
def check_if_needs_inversion(tomodir): required_files = ( 'grid' + os.sep + 'elem.dat', 'grid' + os.sep + 'elec.dat', 'exe' + os.sep + 'crtomo.cfg', ) needs_inversion = True for filename in required_files: if not os.path.isfile(tomodir + os.sep + filename): ne...
check of we need to run CRTomo in a given tomodir
def make_arg(key, annotation=None): arg = ast.arg(key, annotation) arg.lineno, arg.col_offset = 0, 0 return arg
Make an ast function argument.
def connect(self, sock): def cbwrap(*args, **kwargs): self.callback(self.response_type, *args, **kwargs) self.sock = sock self.sock.subscribe(self.channel) self.sock.onchannel(self.channel, cbwrap)
Attach a given socket to a channel
def _number_of_rows(self, start=0, count=100, **kwargs): first = str(start) last = str(start + count) string_format = ':'.join([first, last]) return string_format
Internal method to format the number of rows the EPA API returns.
def read(path): data = None with open(path, 'r') as f: data = f.read() f.close() return data
Reads a file located at the given path.
def directory_values_generator(self, key): directory = self.directory(key) for key in directory: yield self.get(Key(key))
Retrieve directory values for given key.
def show_graphs(self): elements = itertools.chain.from_iterable( ([graph] for graph in self.visible_graphs.values())) self.graph_place_holder.original_widget = urwid.Pile(elements)
Show a pile of the graph selected for dislpay
def update(self): if self._is_ignored or "tags" not in self._tag_group_dict: return for i in range(len(self._tag_group_dict["tags"])): tag_dict = self._tag_group_dict["tags"][i] for tag in self._tags: if tag.name == tag_dict["common.ALLTYPES_NAME"]: ...
Updates the dictionary of the tag group
def _startProcess(self): connectedDeferred = defer.Deferred() processProtocol = RelayProcessProtocol(connectedDeferred) self.inductor.execute(processProtocol, *self.inductorArgs) return connectedDeferred
Use the inductor to start the process we want to relay data from.
def find_user_by_username(self, username): return self.db_adapter.ifind_first_object(self.UserClass, username=username)
Find a User object by username.
def create_build_context(self, variant, build_type, build_path): request = variant.get_requires(build_requires=True, private_build_requires=True) req_strs = map(str, request) quoted_req_strs = map(quote, req_strs) self._print("Resolving build enviro...
Create a context to build the variant within.
def check_scalar(self, scalar_dict): table = {k: np.array([v]) for k, v in scalar_dict.items()} return self.mask(table)[0]
check if `scalar_dict` satisfy query
async def logout(self, request): if "Authorization" not in request.headers: msg = "Auth header is not present, can not destroy token" raise JsonValidaitonError(msg) response = json_response() await forget(request, response) return response
Simple handler for logout
def _depth_im_callback(self, msg): try: self._cur_depth_im = DepthImage(self._bridge.imgmsg_to_cv2(msg) / 1000.0, frame=self._frame) except: self._cur_depth_im = None
Callback for handling depth images.
def _find_usage_vpcs(self): vpcs = self.conn.describe_vpcs() self.limits['VPCs']._add_current_usage( len(vpcs['Vpcs']), aws_type='AWS::EC2::VPC' )
find usage for VPCs
def _router_numbers(self): return tuple(up for up in self._up2down.keys() if up in self._up2down.values())
A tuple of the numbers of all "routing" basins.
def replace(self, new_node): cur_node = self.cur_node nodestack = self.nodestack cur = nodestack.pop() prev = nodestack[-1] index = prev[-1] - 1 oldnode, name = prev[-2][index] assert cur[0] is cur_node is oldnode, (cur[0], cur_node, prev[-2], ...
Replace a node after first checking integrity of node stack.
def _save_sensitivities(self, directory): print('saving sensitivities') digits = int(np.ceil(np.log10(self.configs.configs.shape[0]))) for i in range(0, self.configs.configs.shape[0]): sens_data, meta_data = self.get_sensitivity(i) filename_raw = 'sens{0:0' + '{0}'.format...
save sensitivities to a directory
def probe(self, ipaddr=None): if ipaddr is None: ipaddr = self._broadcast_addr cmd = {"payloadtype": PayloadType.GET, "target": ipaddr} self._send_command(cmd)
Probe given address for bulb.
def serialize_compound(self, tag): separator, fmt = self.comma, '{{{}}}' with self.depth(): if self.should_expand(tag): separator, fmt = self.expand(separator, fmt) return fmt.format(separator.join( f'{self.stringify_compound_key(key)}{self.colon}{...
Return the literal representation of a compound tag.
def projectname(self): if self._projectname is None: exps = self.config.experiments if self._experiment is not None and self._experiment in exps: return exps[self._experiment]['project'] try: self._projectname = list(self.config.projects.keys()...
The name of the project that is currently processed
def parse_nested( bels: list, char_locs: CharLocs, parsed: Parsed, errors: Errors ) -> Tuple[Parsed, Errors]: for sp in char_locs[ "nested_parens" ]: ep, level = char_locs["nested_parens"][sp] if ep == -1: ep = len(bels) + 1 parsed[(sp, ep)] = {"type": "Nested", "...
Parse nested BEL object
def WindowsSdkVersion(self): if self.vc_ver <= 9.0: return ('7.0', '6.1', '6.0a') elif self.vc_ver == 10.0: return ('7.1', '7.0a') elif self.vc_ver == 11.0: return ('8.0', '8.0a') elif self.vc_ver == 12.0: return ('8.1', '8.1a') eli...
Microsoft Windows SDK versions for specified MSVC++ version.
def generate_header_validator(headers, context, **kwargs): validators = ValidationDict() for header_definition in headers: header_processor = generate_value_processor( context=context, **header_definition ) header_validator = generate_object_validator( ...
Generates a validation function that will validate a dictionary of headers.
def listen(self, listener): for message in listener.listen(): try: data = json.loads(message['data']) if data['event'] in ('canceled', 'lock_lost', 'put'): self.kill(data['jid']) except: logger.exception('Pubsub error')
Listen for events that affect our ownership of a job
def simple_locking(lock_id, expiration=None): def inner_decorator(function): def wrapper(*args, **kwargs): try: lock = Lock.acquire_lock(lock_id, expiration) except LockError: pass else: logger.debug('acquired lock: %s' % lo...
A decorator that wraps a function in a single lock getting algorithm
def explode(prefix: str): def _app(i, e=None): if i is not None: return {k: v for (k, v) in iter_fields(i)}, None return i, e def iter_fields(event_field: Union[dict, list]): if type(event_field) is dict: for key, val in event_field.items(): yield ...
given an array of objects de-normalized into fields
def parse_version(str_): v = re.findall(r"\d+.\d+.\d+", str_) if v: return v[0] else: print("cannot parse string {}".format(str_)) raise KeyError
Parses the program's version from a python variable declaration.
def drop(self, ex): "helper for apply_sql in DropX case" if ex.name not in self: if ex.ifexists: return raise KeyError(ex.name) table_ = self[ex.name] parent = table_.parent_table if table_.child_tables: if not ex.cascade: raise table.IntegrityError('delete_parent_...
helper for apply_sql in DropX case
def export_modified_data(self): def export_modfield(value, is_modified_seq=True): try: return value.export_modified_data() except AttributeError: if is_modified_seq: return value if self.__modified_data__ is not None: ...
Retrieves the modified data in a jsoned form
def ToCategorizedPath(path_type, components): try: prefix = { PathInfo.PathType.OS: ("fs", "os"), PathInfo.PathType.TSK: ("fs", "tsk"), PathInfo.PathType.REGISTRY: ("registry",), PathInfo.PathType.TEMP: ("temp",), }[path_type] except KeyError: raise ValueError("Unknown pa...
Translates a path type and a list of components to a categorized path.
def merge(cls, trees): first = trees[0] for tree in trees: first.update(tree) return first
Merge a collection of AttrTree objects.
def _is_valid_cardinal(self, inpt, metadata): if not isinstance(inpt, int): return False if metadata.get_minimum_cardinal() and inpt < metadata.get_maximum_cardinal(): return False if metadata.get_maximum_cardinal() and inpt > metadata.get_minimum_cardinal(): ...
Checks if input is a valid cardinal value
def _get_func_nodes(): return [definition for definition in project_definitions.values() if isinstance(definition.node, ast.FunctionDef)]
Get all function nodes.
def process_fasta(fasta, **kwargs): logging.info("Nanoget: Starting to collect statistics from a fasta file.") inputfasta = handle_compressed_input(fasta, file_type="fasta") return ut.reduce_memory_usage(pd.DataFrame( data=[len(rec) for rec in SeqIO.parse(inputfasta, "fasta")], columns=["len...
Combine metrics extracted from a fasta file.
def _front_delta(self): if self.flags & self.NO_MOVE: return Separator(0, 0) if self.clicked and self.hovered: delta = 2 elif self.hovered and not self.flags & self.NO_HOVER: delta = 0 else: delta = 0 return Separator(delta, delta)
Return the offset of the colored part.
def execute(self, proxy, method, args): try: result = getattr(proxy, method)(raw_xml=self.options.xml, *tuple(args)) except xmlrpc.ERRORS as exc: self.LOG.error("While calling %s(%s): %s" % (method, ", ".join(repr(i) for i in args), exc)) self.return_code = error.EX_N...
Execute given XMLRPC call.
def export_public_key(user_id, env=None, sp=subprocess): args = gpg_command(['--export', user_id]) result = check_output(args=args, env=env, sp=sp) if not result: log.error('could not find public key %r in local GPG keyring', user_id) raise KeyError(user_id) return result
Export GPG public key for specified `user_id`.
def _convert_series(self, metric, ts): series = monitoring_v3.types.TimeSeries() series.metric.type = self.get_metric_type(metric.descriptor) for lk, lv in self.options.default_monitoring_labels.items(): series.metric.labels[lk.key] = lv.value for key, val in zip(metric.descr...
Convert an OC timeseries to a SD series.
def draw(self): from onshapepy import Part CAD = Part( 'https://cad.onshape.com/documents/b4cfd328713460beeb3125ac/w/3928b5c91bb0a0be7858d99e/e/6f2eeada21e494cebb49515f' ) CAD.params = { 'channel_L': self.channel_L, 'channel_W': self.channel_W, ...
Draw the Onshape flocculator model based off of this object.
def pytorch_id(node): return node.scopeName() + "/outputs/" + "/".join([o.uniqueName() for o in node.outputs()])
Returns a unique ID for a node.
def _addconfig(config, *paths): for path in paths: if path is not None and exists(path): config.append(path)
Add path to CONF_DIRS if exists.
def init(self): self._context_notify_cb = pa_context_notify_cb_t( self.context_notify_cb) self._sink_info_cb = pa_sink_info_cb_t(self.sink_info_cb) self._update_cb = pa_context_subscribe_cb_t(self.update_cb) self._success_cb = pa_context_success_cb_t(self.success_cb) ...
Creates context, when context is ready context_notify_cb is called
def diagonalSize(self): szs = [a.diagonalSize() for a in self.actors] return np.max(szs)
Return the maximum diagonal size of the ``Actors`` of the ``Assembly``.
def _tscube_app(self, xmlfile): xmlfile = self.get_model_path(xmlfile) outfile = os.path.join(self.config['fileio']['workdir'], 'tscube%s.fits' % (self.config['file_suffix'])) kw = dict(cmap=self.files['ccube'], expcube=self.files['ltcube'], ...
Run gttscube as an application.
def main(): parser = argparse.ArgumentParser( description='Relocate a virtual environment.' ) parser.add_argument( '--source', help='The existing virtual environment.', required=True, ) parser.add_argument( '--destination', help='The location for which...
Relocate a virtual environment.
def _variable_type_to_read_fn(vartype, records): fn_map = {"String": "read_string", "Array[String]": "read_lines", "Array[Array[String]]": "read_tsv", "Object": "read_object", "Array[Object]": "read_objects", "Array[Array[Object]]": "read_objects", "Int": "rea...
Convert variant types into corresponding WDL standard library functions.
def reload_list(self): self.leetcode.load() if self.leetcode.quizzes and len(self.leetcode.quizzes) > 0: self.home_view = self.make_listview(self.leetcode.quizzes) self.view_stack = [] self.goto_view(self.home_view)
Press R in home view to retrieve quiz list
def getParameterByName(self, name): result = None for parameter in self.getParameters(): nameParam = parameter.getName() if nameParam == name: result = parameter break return result
Searchs a parameter by name and returns it.
def afni_copy(filename): if nl.pkg_available('afni',True): afni_filename = "%s+orig" % nl.prefix(filename) if not os.path.exists(afni_filename + ".HEAD"): nl.calc(filename,'a',prefix=nl.prefix(filename)) return afni_filename
creates a ``+orig`` copy of the given dataset and returns the filename as a string
def build(self, words): words = [self._normalize(tokens) for tokens in words] self._dawg = dawg.CompletionDAWG(words) self._loaded_model = True
Construct dictionary DAWG from tokenized words.
def addSubparser(subparsers, subcommand, description): parser = subparsers.add_parser( subcommand, description=description, help=description) return parser
Add a subparser with subcommand to the subparsers object
def getApplicationsErrorNameFromEnum(self, error): fn = self.function_table.getApplicationsErrorNameFromEnum result = fn(error) return result
Returns a string for an applications error
def _handle_continuations(self, response, cache_key): rcontinue = response.get('continue') listen = ['blcontinue', 'cmcontinue', 'plcontinue'] cparams = {} if rcontinue: for flag in listen: if rcontinue.get(flag): cparams[flag] = rcontinue....
Select continue params and clear cache or last continue params
def alignment(self): if self._arch is None: return NotImplemented return self.size // self._arch.byte_width
The alignment of the type in bytes.
def sections_count(self): if not self._ptr: raise BfdException("BFD not initialized") return _bfd.get_bfd_attribute(self._ptr, BfdAttributes.COUNT_SECTIONS)
Return the sections_count attribute of the BFD file being processed.
def vector_str(p, decimal_places=2, print_zero=True): style = '{0:.' + str(decimal_places) + 'f}' return '[{0}]'.format(", ".join([' ' if not print_zero and a == 0 else style.format(a) for a in p]))
Pretty-print the vector values.
def _merge_report(self, target, new): time = None if 'ts' in new['parsed']: time = new['parsed']['ts'] if (target.get('lastSeenDate', None) and time and target['lastSeenDate'] < time): target['lastSeenDate'] = time query_millis ...
Merges a new report into the target report
def expect(func, args, times=7, sleep_t=0.5): while times > 0: try: return func(*args) except Exception as e: times -= 1 logger.debug("expect failed - attempts left: %d" % times) time.sleep(sleep_t) if times == 0: raise exce...
try many times as in times with sleep time
def _create_alignment_button(self): iconnames = ["AlignTop", "AlignCenter", "AlignBottom"] bmplist = [icons[iconname] for iconname in iconnames] self.alignment_tb = _widgets.BitmapToggleButton(self, bmplist) self.alignment_tb.SetToolTipString(_(u"Alignment")) self.Bind(wx.EVT_BUT...
Creates vertical alignment button
def query(self, query, variables=None, timeout=None, metadata=None, credentials=None): new_metadata = self._dg.add_login_metadata(metadata) req = self._common_query(query, variables=variables) try: res = self._dc.query(req, timeout=timeout, ...
Adds a query operation to the transaction.
def read_line(self, fid): lin = ' while lin[0] == ' lin = fid.readline().strip() if lin == '': return lin return lin
Read a line from a file string and check it isn't either empty or commented before returning.
def _convert_clause(self, clause): clause = wrap(clause) if clause == None: return None elif is_data(clause): return set_default({"value": self.convert(clause.value)}, clause) else: return [set_default({"value": self.convert(c.value)}, c) for c in clau...
JSON QUERY EXPRESSIONS HAVE MANY CLAUSES WITH SIMILAR COLUMN DELCARATIONS
def top(self): if self.vMerge is None or self.vMerge == ST_Merge.RESTART: return self._tr_idx return self._tc_above.top
The top-most row index in the vertical span of this cell.
def _get_serv(ret=None): _options = _get_options(ret) host = _options.get('host') port = _options.get('port') database = _options.get('db') user = _options.get('user') password = _options.get('password') version = _get_version(host, port, user, password) if version and "v0.8" in version:...
Return an influxdb client object
def to_latex(self): if self.low == self.high: if self.low * 10 % 10 == 0: return "{0:d}".format(int(self.low)) else: return "{0:0.2f}".format(self.low) else: t = "" if self.low == -np.inf: t += r"(-\infty, " ...
Returns an interval representation
def fromJD(jd, utcoffset): if not isinstance(utcoffset, Time): utcoffset = Time(utcoffset) localJD = jd + utcoffset.value / 24.0 date = Date(round(localJD)) time = Time((localJD + 0.5 - date.jdn) * 24) return Datetime(date, time, utcoffset)
Builds a Datetime object given a jd and utc offset.
def cli(env, identifier, ack): manager = AccountManager(env.client) event = manager.get_event(identifier) if ack: manager.ack_event(identifier) env.fout(basic_event_table(event)) env.fout(impacted_table(event)) env.fout(update_table(event))
Details of a specific event, and ability to acknowledge event.
def build_package(path, requires, virtualenv=None, ignore=None, extra_files=None, zipfile_name=ZIPFILE_NAME, pyexec=None): pkg = Package(path, zipfile_name, pyexec) if extra_files: for fil in extra_files: pkg.extra_file(fil) if virtualenv is not None: ...
Builds the zip file and creates the package with it
def train(self, data, **kwargs): self.data = data for i in xrange(0,data.shape[1]): column_mean = np.mean(data.icol(i)) column_stdev = np.std(data.icol(i)) self.column_means += [column_mean] self.column_stdevs += [column_stdev] self.data = self.pre...
Calculate the standard deviations and means in the training data
def _die(self, msg, lnum): raise Exception("**FATAL {FILE}({LNUM}): {MSG}\n".format( FILE=self.obo_file, LNUM=lnum, MSG=msg))
Raise an Exception if file read is unexpected.
def multipleOrderComparison(cls, orders): comparers = [ (o.keyfn, 1 if o.isAscending() else -1) for o in orders] def cmpfn(a, b): for keyfn, ascOrDesc in comparers: comparison = cmp(keyfn(a), keyfn(b)) * ascOrDesc if comparison is not 0: return comparison return 0 retur...
Returns a function that will compare two items according to `orders`
def quick_search(self, terms): assert type(terms) is str p = [{'quicksearch': terms}] return self.search_bugs(p)
Wrapper for search_bugs, for simple string searches
def comparable(self): document_dict = self.compare_safe(self._document) self._remove_keys(document_dict, self._uncompared_fields) clean_document_dict = {} for k, v in document_dict.items(): if not v and not isinstance(v, (int, float)): continue cle...
Return a dictionary that can be compared
def status_code(self): try: return self._status_code except AttributeError: self._status_code = self._response.getcode() return self._status_code
Get the http status code for the response
def update_from_sam(self, sam, sam_reader): if sam.is_unmapped \ or sam.mate_is_unmapped \ or (sam.reference_id == sam.next_reference_id): return new_link = link.Link(sam, sam_reader, self.ref_lengths) read_name = sam.query_name if read_name in self.partia...
Updates graph info from a pysam.AlignedSegment object
def hex2termhex(hexval: str, allow_short: bool = False) -> str: return rgb2termhex(*hex2rgb(hexval, allow_short=allow_short))
Convert a hex value into the nearest terminal color matched hex.
def hash_user_id(self, user_id: str) -> str: h = sha256() h.update(user_id.encode()) return h.hexdigest()
As per the law, anonymize user identifier before sending it.
def _entity_list_as_bel(entities: Iterable[BaseEntity]) -> str: return ', '.join( e.as_bel() for e in entities )
Stringify a list of BEL entities.
def _two_byte_cmd(self, cmd): if cmd == SB: self.telnet_got_sb = True self.telnet_sb_buffer = '' elif cmd == SE: self.telnet_got_sb = False self._sb_decoder() elif cmd == NOP: pass elif cmd == DATMK: pass eli...
Handle incoming Telnet commands that are two bytes long.
def append_default_stylesheets(self): for style in utils.get_stylesheet_list(self.settings): self.css(style)
Appends the default styles defined on the translator settings.