code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def _change_color(self, event): h = self.bar.get() self.square.set_hue(h) (r, g, b), (h, s, v), sel_color = self.square.get() self.red.set(r) self.green.set(g) self.blue.set(b) self.hue.set(h) self.saturation.set(s) self.value.set(v) self.h...
Respond to motion of the hsv cursor.
def create(cls, name, size, type, quantity, duration, datacenter, vhosts, password, snapshot_profile, background, sshkey): if not background and not cls.intty(): background = True datacenter_id_ = int(Datacenter.usable_id(datacenter)) paas_params = { 'name'...
Create a new PaaS instance.
def send_login_email(app_id, token, hook, email=None, user_id=None, lang="en_US", url_login='https://pswdless.appspot.com/rest/login'): return SendLoginEmail(app_id, token, hook, email, user_id, lang, url_login)
Contact password-less server to send user a email containing the login link
def add_success(self, group=None, type_='', field='', description=''): group = group or '(200)' group = int(group.lower()[1:-1]) self.retcode = self.retcode or group if group != self.retcode: raise ValueError('Two or more retcodes!') type_ = type_ or '{String}' ...
parse and append a success data param
def kill_process(procname, scriptname): import signal import subprocess p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE) out, err = p.communicate() for line in out.decode().splitlines(): if procname in line and scriptname in line: pid = int(line.split()[1]) ...
kill WSGI processes that may be running in development
def _get_headers(self): headers = { 'user-agent': 'IPinfoClient/Python{version}/1.0'.format(version=sys.version_info[0]), 'accept': 'application/json' } if self.access_token: headers['authorization'] = 'Bearer {}'.format(self.access_token) return heade...
Built headers for request to IPinfo API.
def to_dict(self): d = { 'model_type': 'regression', 'name': self.name, 'fit_filters': self.fit_filters, 'predict_filters': self.predict_filters, 'model_expression': self.model_expression, 'ytransform': YTRANSFORM_MAPPING[self.ytransform], ...
Returns a dictionary representation of a RegressionModel instance.
def flatMap(f: Callable, xs: Iterable) -> List: return flatten(lmap(f, xs))
Map a function onto an iterable and flatten the result.
def publish_scene_remove(self, scene_id): self.sequence_number += 1 self.publisher.send_multipart(msgs.MessageBuilder.scene_remove(self.sequence_number, scene_id)) return self.sequence_number
publish the removal of a scene
def start(self, *args, **kwargs): self._stop = False super(ReadProbes, self).start(*args, **kwargs)
start the read_probe thread
def plot_discrete(self, show=False, annotations=True): import matplotlib.pyplot as plt axis = plt.axes() axis.set_aspect('equal', 'datalim') for i, points in enumerate(self.discrete): color = ['g', 'k'][i in self.root] axis.plot(*points.T, color=color) if ...
Plot the closed curves of the path.
def getAudioData(self): with self.preload_mutex: cache_url = self.buildUrl(cache_friendly=True) if cache_url in __class__.cache: logging.getLogger().debug("Got data for URL '%s' from cache" % (cache_url)) audio_data = __class__.cache[cache_url] assert(audio_data) else: ...
Fetch the audio data.
def sample_wr(lst): arr = np.array(lst) indices = np.random.randint(len(lst), size=len(lst)) sample = np.empty(arr.shape, dtype=arr.dtype) for i, ix in enumerate(indices): sample[i] = arr[ix] return list(sample)
Sample from lst, with replacement
def _read_last_geometry(self): header_pattern = r"\s+Optimization\sCycle:\s+" + \ str(len(self.data.get("energy_trajectory"))) + \ r"\s+Coordinates \(Angstroms\)\s+ATOM\s+X\s+Y\s+Z" table_pattern = r"\s+\d+\s+\w+\s+([\d\-\.]+)\s+([\d\-\.]+)\s+([\d\-\.]+)" footer_pattern =...
Parses the last geometry from an optimization trajectory for use in a new input file.
def slowlog_get(self, length=None): if length is not None: if not isinstance(length, int): raise TypeError("length must be int or None") return self.execute(b'SLOWLOG', b'GET', length) else: return self.execute(b'SLOWLOG', b'GET')
Returns the Redis slow queries log.
def args(self): return (self.base, self.item, self.leng, self.refs, self.both, self.kind, self.type)
Return all attributes as arguments tuple.
def pick(self): v = random.uniform(0, self.ub) d = self.dist c = self.vc - 1 s = self.vc while True: s = s / 2 if s == 0: break if v <= d[c][1]: c -= s else: c += s whi...
picks a value accoriding to the given density
def exists(self, **kwargs): kwargs.pop('partition', None) kwargs['transform_name'] = True return self._exists(**kwargs)
Providing a partition is not necessary on topology; causes errors
def _init_read_gz(self): self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS) self.dbuf = b"" if self.__read(2) != b"\037\213": raise ReadError("not a gzip file") if self.__read(1) != b"\010": raise CompressionError("unsupported compression method") fl...
Initialize for reading a gzip compressed fileobj.
def start(self): logging.info("Fixedconf watcher plugin: Started") cidr = self.conf['fixed_cidr'] hosts = self.conf['fixed_hosts'].split(":") route_spec = {cidr : hosts} try: common.parse_route_spec_config(route_spec) self.q_route_spec.put(route...
Start the config watch thread or process.
def render(self, rect, data): size = self.element.get_minimum_size(data) x = rect.x y = rect.y w = size.x h = size.y extra_width = rect.w - w extra_height = rect.h - h if self.horizontal_align == AlignLM.ALIGN_CENTER: x += extra_width * 0.5 ...
Draws the managed element in the correct alignment.
def execute(self): relevant_targets = self._get_non_synthetic_targets(self.get_targets()) if self.sideeffecting: self._execute_for(relevant_targets) else: with self.invalidated(relevant_targets) as invalidation_check: self._execute_for([vt.target for vt in invalidation_check.invalid_vts]...
Runs the tool on all source files that are located.
def format_name_for_ext(metadata, ext, cm_default_formats=None, explicit_default=True): text_repr = metadata.get('jupytext', {}).get('text_representation', {}) if text_repr.get('extension', '').endswith(ext) and text_repr.get('format_name'): return text_repr.get('format_name') formats = metadata.get...
Return the format name for that extension
def context_processor(self, fn): self._defer(lambda app: app.context_processor(fn)) return fn
Registers a template context processor function.
def _display_status(normalized_data, stream): if 'Pull complete' in normalized_data['status'] or 'Download complete' in normalized_data['status']: stream.write("\n") if 'id' in normalized_data: stream.write("%s - " % normalized_data['id']) stream.write("{0}\n".format(normalized_data['status'...
print status message from docker-py stream.
def _read(self, size): data = '' while len(data) < size: try: chunk = self.socket.recv(size-len(data)) except socket.error as (err, msg): self.connected = False raise ScratchError("[Errno %d] %s" % (err, msg)) if chunk =...
Reads size number of bytes from Scratch and returns data as a string
def _addDBParam(self, name, value): if name[-4:] == '__OP': return self._setComparasionOperator(name[:-4], value) if name[-3:] == '.op': return self._setComparasionOperator(name[:-3], value) if name.find('__') != -1: import re name = name.replace('__','::') elif name.find('.') != -1: name = name....
Adds a database parameter
def on_next_button(self, event): self.do_auto_save() self.selected_meas = [] index = self.specimens.index(self.s) try: fit_index = self.pmag_results_data['specimens'][self.s].index( self.current_fit) except KeyError: fit_index = None ...
update figures and text when a next button is selected
def _attributeStr(self, name): return "{}={}".format( _encodeAttr(name), ",".join([_encodeAttr(v) for v in self.attributes[name]]))
Return name=value for a single attribute
def basename(self, suffix=''): return os.path.basename(self._file, suffix) if self._file else None
The basename of the template file.
def _get_visualization_classes(): visualization_attr = vars(import_module('picasso.visualizations')) visualization_submodules = [ visualization_attr[x] for x in visualization_attr if isinstance(visualization_attr[x], ModuleType)] visualization_classes = [] for submodule in visual...
Import visualizations classes dynamically
def collect(self): start = time() self._collect_copy_specs() self._collect_cmd_output() self._collect_strings() fields = (self.name(), time() - start) self._log_debug("collected plugin '%s' in %s" % fields)
Collect the data for a plugin.
def rc4(data, key): S, j, out = list(range(256)), 0, [] for i in range(256): j = (j + S[i] + ord(key[i % len(key)])) % 256 S[i], S[j] = S[j], S[i] i = j = 0 for ch in data: i = (i + 1) % 256 j = (j + S[i]) % 256 S[i], S[j] = S[j], S[i] out.append(chr(ord(c...
RC4 encryption and decryption method.
def validate_positive_integer(option, value): val = validate_integer(option, value) if val <= 0: raise ValueError("The value of %s must be " "a positive integer" % (option,)) return val
Validate that 'value' is a positive integer, which does not include 0.
def ngram(n, iter_tokens): z = len(iter_tokens) return (iter_tokens[i:i+n] for i in range(z-n+1))
Return a generator of n-gram from an iterable
def data_item_path(cls, project, dataset, data_item): return google.api_core.path_template.expand( "projects/{project}/datasets/{dataset}/dataItems/{data_item}", project=project, dataset=dataset, data_item=data_item, )
Return a fully-qualified data_item string.
def main(port=8000, n_output=10, use_segmenter=False): global n global use_segmenter_flag n = n_output use_segmenter_flag = use_segmenter logging.info("Start webserver...") app.run(port=port)
Main function starting the webserver.
def _check_exception(self): if self._saved_exception is not None: x = self._saved_exception self._saved_exception = None raise x
if there's a saved exception, raise & clear it
def _parse_wikiheadlines(path): lang_match = re.match(r".*\.([a-z][a-z])-([a-z][a-z])$", path) assert lang_match is not None, "Invalid Wikiheadlines filename: %s" % path l1, l2 = lang_match.groups() with tf.io.gfile.GFile(path) as f: for line in f: s1, s2 = line.split("|||") yield { l1...
Generates examples from Wikiheadlines dataset file.
def digested_line(line, digest_types): digested_line = [] for i, ele in enumerate(line): try: digest_key = digest_types[i] except IndexError: digest_key = digest_types[0] digest = Digest(acceptable_types=[digest_key]) try: digested_line.append(...
Returns list of digested values in line
def _remove_blank(l): ret = [] for i, _ in enumerate(l): if l[i] == 0: break ret.append(l[i]) return ret
Removes trailing zeros in the list of integers and returns a new list of integers
def entry_point(): try: provider_group_factory() notifiers_cli(obj={}) except NotifierException as e: click.secho(f"ERROR: {e.message}", bold=True, fg="red") exit(1)
The entry that CLI is executed from
def xclaim(self, stream, group_name, consumer_name, min_idle_time, id, *ids): fut = self.execute( b'XCLAIM', stream, group_name, consumer_name, min_idle_time, id, *ids ) return wait_convert(fut, parse_messages)
Claim a message for a given consumer
def volume_down(self): try: return bool(self.send_get_command(self._urls.command_volume_down)) except requests.exceptions.RequestException: _LOGGER.error("Connection error: volume down command not sent.") return False
Volume down receiver via HTTP get command.
def reset(self): self.reset_bars() self.url_progressbar.reset() for prop in dir(self): if prop.startswith("__"): continue prop_obj = getattr(self, prop) if prop_obj is not None and hasattr(prop_obj, "reset"): prop_obj.reset() ...
Reset all inputs back to default.
def stop(self): yield from self._stop_ubridge() if self.is_running(): self._terminate_process() if self._process.returncode is None: try: yield from wait_for_process_termination(self._process, timeout=3) except asyncio.TimeoutEr...
Stops the VPCS process.
def try_eval_metadata(metadata, name): value = metadata[name] if not isinstance(value, (str, unicode)): return if (value.startswith('"') and value.endswith('"')) or (value.startswith("'") and value.endswith("'")): if name in ['active', 'magic_args', 'language']: metadata[name] = ...
Evaluate given metadata to a python object, if possible
def hardmax(attrs, inputs, proto_obj): input_tensor_data = proto_obj.model_metadata.get('input_tensor_data')[0] input_shape = input_tensor_data[1] axis = int(attrs.get('axis', 1)) axis = axis if axis >= 0 else len(input_shape) + axis if axis == len(input_shape) - 1: amax = symbol.argmax(inpu...
Returns batched one-hot vectors.
def call(self, callname, arguments=None): action = getattr(self.api, callname, None) if action is None: try: action = self.api.ENDPOINT_OVERRIDES.get(callname, None) except AttributeError: action = callname if not callable(action): ...
Executed on each scheduled iteration
def hosting_history(self, query, **kwargs): return self._results('hosting-history', '/v1/{0}/hosting-history'.format(query), cls=GroupedIterable, **kwargs)
Returns the hosting history from the given domain name
def _apply_summaries(self): def as_frame(r): if isinstance(r, pd.Series): return r.to_frame() else: return r df = self.data if df.index.nlevels > 1: raise ValueError( "You cannot currently have both summary rows ...
Add all summary rows and columns.
def mark_dead(self, proxy, _time=None): if proxy not in self.proxies: logger.warn("Proxy <%s> was not found in proxies list" % proxy) return if proxy in self.good: logger.debug("GOOD proxy became DEAD: <%s>" % proxy) else: logger.debug("Proxy <%s> ...
Mark a proxy as dead
def _bytes_to_str(lines): if len(lines) >= 1 and hasattr(lines[0], 'decode'): return [line.decode('utf-8') for line in lines] else: return lines
Convert all lines from byte string to unicode string, if necessary
def _find_usage_applications(self): applications = self.conn.describe_applications() self.limits['Applications']._add_current_usage( len(applications['Applications']), aws_type='AWS::ElasticBeanstalk::Application', )
find usage for ElasticBeanstalk applications
def check_pil(func): def __wrapper(*args, **kwargs): root = kwargs.get('root') if not Image: if root and root.get_opt('warn'): warn("Images manipulation require PIL") return 'none' return func(*args, **kwargs) return __wrapper
PIL module checking decorator.
def order_manually(sub_commands): order = [ "start", "projects", ] ordered = [] commands = dict(zip([cmd for cmd in sub_commands], sub_commands)) for k in order: ordered.append(commands.get(k, "")) if k in commands: del commands[k] for k in commands: ...
Order sub-commands for display
def layers(self): if self._layers is None: self.__init() self._getLayers() return self._layers
gets the layers for the feature service
def pyname(self): if self.pymodule.endswith('__init__'): return self.pymodule.split('.')[-2] else: return self.pymodule.split('.')[-1]
Name of the compiled module.
def _create_sending_stream(self, pub_addr): sock = self._zmq_context.socket(zmq.PUB) sock.setsockopt(zmq.IDENTITY, self.sender_id) sock.connect(pub_addr) stream = ZMQStream(sock, io_loop=self.io_loop) return stream
Create a `ZMQStream` for sending responses back to Mongrel2.
def do_copy_file(self, args): path, prefixed_path, source_storage = args reset_connection(self.storage) if self.collectfast_enabled and not self.dry_run: try: if not should_copy_file( self.storage, path, prefixed_path, source_storage): ...
Determine if file should be copied or not and handle exceptions.
def _translate_cond(self, c): if isinstance(c, claripy.ast.Base) and not c.singlevalued: raise SimFastMemoryError("size not supported") if c is None: return True else: return self.state.solver.eval_upto(c, 1)[0]
Checks whether this condition can be supported by FastMemory."
def join_session(self, sid): self._rest.add_header('X-STC-API-Session', sid) self._sid = sid try: status, data = self._rest.get_request('objects', 'system1', ['version', 'name']) except resthttp.RestHttpError as e: ...
Attach to an existing session.
def count_rows(self, table, cols='*'): query = 'SELECT COUNT({0}) FROM {1}'.format(join_cols(cols), wrap(table)) result = self.fetch(query) return result if result is not None else 0
Get the number of rows in a particular table.
def _route(self): self._app.route('/', method='GET', callback=self._get_logger_list) self._app.route('/stats', method='GET', callback=self._fetch_handler_stats) self._app.route('/<name>/start', ...
Handles server route instantiation.
def consume_value(self, ctx, opts): value = click.Option.consume_value(self, ctx, opts) if not value: gandi = ctx.obj value = gandi.get(self.name) if value is not None: self.display_value(ctx, value) else: if self.default is...
Retrieve default value and display it when prompt is disabled.
def shell_sqlalchemy(session: SqlalchemySession, backend: ShellBackend): namespace = { 'session': session } namespace.update(backend.get_namespace()) embed(user_ns=namespace, header=backend.header)
This command includes SQLAlchemy DB Session
def _error_is_decreasing(self, last_error): current_error = self._compute_error() is_decreasing = current_error < last_error return is_decreasing, current_error
True if current error is less than last_error.
def np_hash(a): if a is None: return hash(None) a = np.ascontiguousarray(a) return int(hashlib.sha1(a.view(a.dtype)).hexdigest(), 16)
Return a hash of a NumPy array.
def picard_reorder(picard, in_bam, ref_file, out_file): if not file_exists(out_file): with tx_tmpdir(picard._config) as tmp_dir: with file_transaction(picard._config, out_file) as tx_out_file: opts = [("INPUT", in_bam), ("OUTPUT", tx_out_file), ...
Reorder BAM file to match reference file ordering.
def _override_size(vm_): vm_size = get_size(vm_) if 'cores' in vm_: vm_size['cores'] = vm_['cores'] if 'ram' in vm_: vm_size['ram'] = vm_['ram'] return vm_size
Apply any extra component overrides to VM from the cloud profile.
def QueueNotification(self, notification=None, timestamp=None, **kw): if notification is None: notification = rdf_flows.GrrNotification(**kw) session_id = notification.session_id if session_id: if timestamp is None: timestamp = self.frozen_timestamp notification.timestamp = timesta...
Queues a notification for a flow.
def task(func): def task_wrapper(*args, **kwargs): return spawn(func, *args, **kwargs) return task_wrapper
Decorator to run the decorated function as a Task
def ReadIDsForAllSignedBinaries(self, cursor=None ): cursor.execute( "SELECT binary_type, binary_path FROM signed_binary_references") return [ rdf_objects.SignedBinaryID(binary_type=binary_type, path=binary_path) for binary_type, binary_path in cursor.fet...
Returns ids for all signed binaries in the DB.
def known(self, node): ref = node.get('type', Namespace.xsins) if ref is None: return None qref = qualify(ref, node, node.namespace()) query = BlindQuery(qref) return query.execute(self.schema)
resolve type referenced by @xsi:type
def controller(self): if hasattr(self, 'controllers'): if len(self.controllers) > 1: raise TypeError("Only one controller per account.") return self.controllers[0] raise AttributeError("There is no controller assigned.")
Show current linked controllers.
def annotate_subplot(ax, ncols=1, nrows=1, letter='a', linear_offset=0.075, fontsize=8): ax.text(-ncols*linear_offset, 1+nrows*linear_offset, letter, horizontalalignment='center', verticalalignment='center', fontsize=fontsize, fontweight='demibold', transform=ax....
add a subplot annotation number
def updateMovie(self, imageFile): try: photo = ImageTk.PhotoImage(Image.open(imageFile)) except: print("photo error") print('-'*60) traceback.print_exc(file=sys.stdout) print('-'*60) self.label.configure(image = photo, height=288) self.label.image = photo
Update the image file as video frame in the GUI.
def add_fact(term, fact, author=''): logger.info('Adding new fact %s: %s', term, fact) if not db.facts.find({'term': term_regex(term)}).count(): db.facts.insert({ 'term': term, 'fact': fact, 'set_by': author, 'set_date': time.time() }) db.f...
Records a new fact with a given term. Optionally can set an author
def gen_locustfile(testcase_file_path): locustfile_path = 'locustfile.py' template_path = os.path.join( os.path.dirname(os.path.realpath(__file__)), "templates", "locustfile_template" ) with io.open(template_path, encoding='utf-8') as template: with io.open(locustfile_pat...
generate locustfile from template.
def recompress_archive(archive, verbosity=0, interactive=True): util.check_existing_filename(archive) util.check_writable_filename(archive) if verbosity >= 0: util.log_info("Recompressing %s ..." % (archive,)) res = _recompress_archive(archive, verbosity=verbosity, interactive=interactive) i...
Recompress an archive to hopefully smaller size.
def _onCoreModuleLoad(self, event): if not self._modelRuntsByBuid: return self._modelRuntsByBuid = {} self._modelRuntsByPropValu = collections.defaultdict(list)
Clear the cached model rows and rebuild them only if they have been loaded already.
async def __check_last_ping(self, run_listen): if self.__last_ping < time.time()-10: self._logger.warning("Last ping too old. Restarting the agent.") run_listen.cancel() self.__cancel_remaining_safe_tasks() else: self._loop.call_later(1, self._create_safe_...
Check if the last timeout is too old. If it is, kills the run_listen task
def parse_dformat(dformat, check=True): if check and dformat not in ['dense', 'sparse']: raise IOError( "{} is a bad features format, please choose 'dense' or 'sparse'" .format(dformat)) return dformat
Return `dformat` or raise if it is not 'dense' or 'sparse
def alter_poms(pom_dir, additional_params, repo_url=None, mvn_repo_local=None): work_dir = os.getcwd() os.chdir(pom_dir) try: if repo_url: settings_filename = create_mirror_settings(repo_url) else: settings_filename = None args = ["mvn", "clean"] if mv...
Runs mvn clean command with provided additional parameters to perform pom updates by pom-manipulation-ext.
def __get_button_events(self, state, timeval=None): changed_buttons = self.__detect_button_events(state) events = self.__emulate_buttons(changed_buttons, timeval) return events
Get the button events from xinput.
def _adjust_for_new_root(self, path): if self.new_root is None: return path elif path.startswith(self.new_root): return path[len(self.new_root):] else: return path
Adjust a path given the new root directory of the output.
def parse_querystring(msg): 'parse a querystring into keys and values' for part in msg.querystring.strip().lstrip('?').split('&'): key, value = part.split('=') yield key, value
parse a querystring into keys and values
def read_int(nbytes: int, *, byteorder: str = "big", from_=None) -> int: return (yield (Traps._read_int, nbytes, byteorder, from_))
read some bytes as integer
def _machine_bytes(): machine_hash = hashlib.md5() if PY3: machine_hash.update(socket.gethostname().encode()) else: machine_hash.update(socket.gethostname()) return machine_hash.digest()[0:3]
Get the machine portion of an ObjectId.
def beautify_file(self, path): error = False if(path == '-'): data = sys.stdin.read() result, error = self.beautify_string(data, '(stdin)') sys.stdout.write(result) else: data = self.read_file(path) result, error = self.beautify_string(...
Beautify bash script file.
def print_code_table(self, out=sys.stdout): out.write(u'bits code (value) symbol\n') for symbol, (bitsize, value) in sorted(self._table.items()): out.write(u'{b:4d} {c:10} ({v:5d}) {s!r}\n'.format( b=bitsize, v=value, s=symbol, c=bin(value)[2:].rjust(bitsize, '0') ...
Print code table overview
def resolve_colors(self, colorstack): 'Returns the curses attribute for the colorstack, a list of color option names sorted highest-precedence color first.' attr = CursesAttr() for coloropt in colorstack: c = self.get_color(coloropt) attr = attr.update_attr(c) ret...
Returns the curses attribute for the colorstack, a list of color option names sorted highest-precedence color first.
def export_module_spec_with_checkpoint(module_spec, checkpoint_path, export_path, scope_prefix=""): with tf.Graph().as_default(): m = hub.Module(module_spec) assign_map = { scope_prefix...
Exports given checkpoint as tfhub module with given spec.
def seek_to_position(position): message = command(protobuf.CommandInfo_pb2.SeekToPlaybackPosition) send_command = message.inner() send_command.options.playbackPosition = position return message
Seek to an absolute position in stream.
def remove_redis_keyword(self, keyword): redisvr.srem(CMS_CFG['redis_kw'] + self.userinfo.user_name, keyword) return json.dump({}, self)
Remove the keyword for redis.
def write(self, data): if data == '\n': return self.stream.write(data) else: bytes_ = 0 for line in data.splitlines(True): nl = '' if line.endswith('\n'): line = line[:-1] nl = '\n' ...
This could be a bit less clumsy.
def textContent(self, text: str) -> None: self._set_text_content(text) if self.connected: self._set_text_content_web(text)
Set textContent both on this node and related browser node.
def closed_sets(C, mincount_connectivity=0): n = np.shape(C)[0] S = connected_sets(C, mincount_connectivity=mincount_connectivity, strong=True) closed = [] for s in S: mask = np.zeros(n, dtype=bool) mask[s] = True if C[np.ix_(mask, ~mask)].sum() == 0: closed.append(s)...
Computes the strongly connected closed sets of C
def LoadState( self, config_parser ): if not config_parser: return if ( not config_parser.has_section( 'window' ) or ( config_parser.has_option( 'window','maximized' ) and config_parser.getboolean( 'window', 'maximized' ) ) ): ...
Set our window state from the given config_parser instance
def register(self, switch, signal=signals.switch_registered): if not switch.name: raise ValueError('Switch name cannot be blank') switch.manager = self self.__persist(switch) signal.call(switch)
Register a switch and persist it to the storage.