code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def userInvitations(self): self.__init() items = [] for n in self._userInvitations: if "id" in n: url = "%s/%s" % (self.root, n['id']) items.append(self.Invitation(url=url, securityHandler=self._securityHand...
gets all user invitations
def render(template, context, partials={}, state=None): state = state or State() if isinstance(context, Context): state.context = context else: state.context = Context(context) if partials: state.partials.push(partials) return __render(make_unicode(template), state)
Renders a given mustache template, with sane defaults.
def create_key(self): print("Creating key. Please input the following options:") name = input("Key name (optional): ") print("To make this key more secure, you should restrict the IP addresses that can use it. ") print("To use with all IPs, leave blank or use 0.0.0.0/0.") print("...
Create an API key.
def health_percentage(self) -> Union[int, float]: if self._proto.health_max == 0: return 0 return self._proto.health / self._proto.health_max
Does not include shields
def agm(x, y, context=None): return _apply_function_in_current_context( BigFloat, mpfr.mpfr_agm, ( BigFloat._implicit_convert(x), BigFloat._implicit_convert(y), ), context, )
Return the arithmetic geometric mean of x and y.
def options(argv=[]): parser = HendrixOptionParser parsed_args = parser.parse_args(argv) return vars(parsed_args[0])
A helper function that returns a dictionary of the default key-values pairs
def cli(env, identifier): mgr = SoftLayer.SshKeyManager(env.client) key_id = helpers.resolve_id(mgr.resolve_ids, identifier, 'SshKey') if not (env.skip_confirmations or formatting.no_going_back(key_id)): raise exceptions.CLIAbort('Aborted') mgr.delete_key(key_id)
Permanently removes an SSH key.
def probably_wkt(text): valid = False valid_types = set([ 'POINT', 'LINESTRING', 'POLYGON', 'MULTIPOINT', 'MULTILINESTRING', 'MULTIPOLYGON', 'GEOMETRYCOLLECTION', ]) matched = re.match(r'(\w+)\s*\([^)]+\)', text.strip()) if matched: valid = matched.group(1).upper() in valid_t...
Quick check to determine if the provided text looks like WKT
def listmetadataformats(**kwargs): cfg = current_app.config e_tree, e_listmetadataformats = verb(**kwargs) if 'identifier' in kwargs: OAIIDProvider.get(pid_value=kwargs['identifier']) for prefix, metadata in cfg.get('OAISERVER_METADATA_FORMATS', {}).items(): e_metadataformat = SubElement...
Create OAI-PMH response for ListMetadataFormats verb.
def describe_group(record, region): account_id = record['account'] group_name = cloudwatch.filter_request_parameters('groupName', record) vpc_id = cloudwatch.filter_request_parameters('vpcId', record) group_id = cloudwatch.filter_request_parameters('groupId', record, look_in_response=True) if cloudw...
Attempts to describe group ids.
def casefold_parts(self, parts): if self.filesystem.is_windows_fs: return [p.lower() for p in parts] return parts
Return the lower-case version of parts for a Windows filesystem.
def noEmptyNests(node): if type(node)==list: for i in node: noEmptyNests(i) if type(node)==dict: for i in node.values(): noEmptyNests(i) if node["children"] == []: node.pop("children") return node
recursively make sure that no dictionaries inside node contain empty children lists
def instance(self, other): assert '/' not in str(other) return Key(str(self) + ':' + str(other))
Returns an instance Key, by appending a name to the namespace.
def iter_links_param_element(cls, element): valuetype = element.attrib.get('valuetype', '') if valuetype.lower() == 'ref' and 'value' in element.attrib: link_type = identify_link_type(element.attrib.get('value')) yield LinkInfo( element=element, tag=element.tag, a...
Iterate a ``param`` element.
def cli_info(self, event): self.log('Instance:', self.instance, 'Dev:', self.development, 'Host:', self.host, 'Port:', self.port, 'Insecure:', self.insecure, 'Frontend:', self.frontendtarget)
Provides information about the running instance
def table_mask(self): margin = compress_pruned( self._slice.margin( axis=None, weighted=False, include_transforms_for_dims=self._hs_dims, prune=self._prune, ) ) mask = margin < self._size if margin.sh...
ndarray, True where table margin <= min_base_size, same shape as slice.
def track_name_event(self, name): l = self.int_to_varbyte(len(name)) return '\x00' + META_EVENT + TRACK_NAME + l + name
Return the bytes for a track name meta event.
def searchsorted(arr, N, x): L = 0 R = N-1 done = False m = (L+R)//2 while not done: if arr[m] < x: L = m + 1 elif arr[m] > x: R = m - 1 elif arr[m] == x: done = True m = (L+R)//2 if L>R: done = True return L
N is length of arr
def extract_operations(self, migrations): operations = [] for migration in migrations: for operation in migration.operations: if isinstance(operation, RunSQL): statements = sqlparse.parse(dedent(operation.sql)) for statement in statemen...
Extract SQL operations from the given migrations
def init_path(): sitedirs = getsyssitepackages() for sitedir in sitedirs: env_path = os.environ['PATH'].split(os.pathsep) for module in allowed_modules: p = join(sitedir, module) if isdir(p) and not p in env_path: os.environ['PATH'] += env_t(os.pathsep + p...
Add any new modules that are directories to the PATH
def open_pr(self, url): if self.dry_run: click.echo(f" dry-run: Create new PR: {url}") else: click.echo("Backport PR URL:") click.echo(url) webbrowser.open_new_tab(url)
open url in the web browser
def _get_flowcell_id(in_file, require_single=True): fc_ids = set([x[0] for x in _read_input_csv(in_file)]) if require_single and len(fc_ids) > 1: raise ValueError("There are several FCIDs in the same samplesheet file: %s" % in_file) else: return fc_ids
Retrieve the unique flowcell id represented in the SampleSheet.
def update_vpc_entry(nexus_ips, vpc_id, learned, active): LOG.debug("update_vpc_entry called") session = bc.get_writer_session() with session.begin(): for n_ip in nexus_ips: flipit = not active x = session.execute( sa.update(nexus_models_v2.NexusVPCAlloc).valu...
Change active state in vpc_allocate data base.
def calc_oqparam(request, job_id): job = logs.dbcmd('get_job', int(job_id)) if job is None: return HttpResponseNotFound() if not utils.user_has_permission(request, job.user_name): return HttpResponseForbidden() with datastore.read(job.ds_calc_dir + '.hdf5') as ds: oq = ds['oqpara...
Return the calculation parameters as a JSON
def check_field_cohesion(self, rec_write_fields, sig_write_fields): if self.n_sig>0: for f in sig_write_fields: if len(getattr(self, f)) != self.n_sig: raise ValueError('The length of field: '+f+' must match field n_sig.') datfmts = {} for ...
Check the cohesion of fields used to write the header
def query_tracking_code(tracking_code, year=None): payload = { 'Anio': year or datetime.now().year, 'Tracking': tracking_code, } response = _make_request(TRACKING_URL, payload) if not response['d']: return [] data = response['d'][0] destination = data['RetornoCadena6'] ...
Given a tracking_code return a list of events related the tracking code
def _passwd_opts(self): options = ['ControlMaster=auto', 'StrictHostKeyChecking=no', ] if self.opts['_ssh_version'] > (4, 9): options.append('GSSAPIAuthentication=no') options.append('ConnectTimeout={0}'.format(self.timeout)) if self.opts...
Return options to pass to ssh
def _generate_barcode_ids(info_iter): bc_type = "SampleSheet" barcodes = list(set([x[-1] for x in info_iter])) barcodes.sort() barcode_ids = {} for i, bc in enumerate(barcodes): barcode_ids[bc] = (bc_type, i+1) return barcode_ids
Create unique barcode IDs assigned to sequences
def alphanumeric_hash(s: str, size=5): import hashlib import base64 hash_object = hashlib.md5(s.encode('ascii')) s = base64.b32encode(hash_object.digest()) result = s[:size].decode('ascii').lower() return result
Short alphanumeric string derived from hash of given string
def translate(self, text): self.count = 0 return self._make_regex().sub(self, text)
Translate text, returns the modified text.
def delete_node_nto1(node_list, begin, node, end): if begin is None: assert node is not None begin = node.precedence elif not isinstance(begin, list): begin = [begin] if end.in_or_out: for nb_ in begin: nb_.out_redirect(node.single_...
delete the node which has n-input and 1-output
def open(self): self.device = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.device.connect((self.host, self.port)) if self.device is None: print "Could not open socket for %s" % self.host
Open TCP socket and set it as escpos device
def __check_deprecated(self): if "SANIC_JWT_HANDLER_PAYLOAD_SCOPES" in self.app.config: raise exceptions.InvalidConfiguration( "SANIC_JWT_HANDLER_PAYLOAD_SCOPES has been deprecated. " "Instead, pass your handler method (not an import path) as " "initia...
Checks for deprecated configuration keys
def addComment(self, comment): url = "%s/addComment" % self.root params = { "f" : "json", "comment" : comment } return self._post(url, params, proxy_port=self._proxy_port, securityHandler=self._securityHandler, ...
adds a comment to a given item. Must be authenticated
def reset(self): self._unionSDR = numpy.zeros(shape=(self._numInputs,)) self._activeCellsHistory = []
Reset Union Pooler, clear active cell history
def clone_repo(pkg, dest, repo, repo_dest, branch): git(['clone', '--recursive', '-b', branch, repo, repo_dest])
Clone the Playdoh repo into a custom path.
def macshim(): import subprocess, sys subprocess.call([ sys.argv[0] + '32' ]+sys.argv[1:], env={"VERSIONER_PYTHON_PREFER_32_BIT":"yes"} )
Shim to run 32-bit on 64-bit mac as a sub-process
def split_window(pymux, variables): executable = variables['<executable>'] start_directory = variables['<start-directory>'] pymux.add_process(executable, vsplit=variables['-h'], start_directory=start_directory)
Split horizontally or vertically.
def do_help(self, args): if args.command: f = getattr(self, 'help_' + args.command, None) if f: f() return f = getattr(self, 'do_' + args.command, None) if not f: msg = self.nohelp % (args.command,) s...
print help on a command
def _srads2bt(self, data, channel_name): a__, b__, c__ = BTFIT[channel_name] wavenumber = CALIB[self.platform_id][channel_name]["VC"] temp = self._tl15(data, wavenumber) return a__ * temp * temp + b__ * temp + c__
Computation based on spectral radiance.
def sign(self, signer: Signer): message_data = self._data_to_sign() self.signature = signer.sign(data=message_data)
Sign message using signer.
def load_content(self): rel_path = self.root_file_url.replace(os.path.basename(self.root_file_url), '') self.toc_file_url = rel_path + self.root_file.find(id="ncx")['href'] self.toc_file_soup = bs(self.book_file.read(self.toc_file_url), 'xml') for n, c in cross(self.toc_file_soup.find_al...
Load the book content
def filter_short(terms): return [term for i, term in enumerate(terms) if 26**(len(term)) > i]
only keep if brute-force possibilities are greater than this word's rank in the dictionary
def trimmed_split(s, seps=(";", ",")): for sep in seps: if sep not in s: continue data = [item.strip() for item in s.strip().split(sep)] return data return [s]
Given a string s, split is by one of one of the seps.
def getAceTypeText(self, t): try: return self.validAceTypes[t]['TEXT'] except KeyError: raise CommandExecutionError(( 'No ACE type "{0}". It should be one of the following: {1}' ).format(t, ', '.join(self.validAceTypes)))
returns the textual representation of a acetype bit
def process_on_event(self): if not self._queue: self._active = False return events = list(OrderedDict([(event.event_name, event) for event in self._queue]).values()) self._queue = [] for event in events: msg = {} ...
Trigger callback change event and triggering corresponding streams.
def find(self, path, all=False): found = os.path.join(settings.STATIC_ROOT, path) if all: return [found] else: return found
Looks for files in the app directories.
def hasReaders(self, ulBuffer): fn = self.function_table.hasReaders result = fn(ulBuffer) return result
inexpensively checks for readers to allow writers to fast-fail potentially expensive copies and writes.
def walk(self, into_past=0, into_future=0): walked_range = [] for shift in range(-into_past, into_future): kwargs = dict(drip_model=self.drip_model, name=self.name, now_shift_kwargs={'days': shift}) walked_range.append(self.__cl...
Walk over a date range and create new instances of self with new ranges.
def tagged(self, tag): return Scm(self.connection, self.developer_connection, self.url, tag=tag)
Creates a new `Scm` identical to this `Scm` but with the given `tag`.
def create_add_on(self, add_on): url = urljoin(self._url, '/add_ons') return add_on.post(url)
Make the given `AddOn` available to subscribers on this plan.
def _decode_ctrl_packet(self, version, packet): for i in range(5): input_bit = packet[i] self._debug(PROP_LOGLEVEL_DEBUG, "Byte " + str(i) + ": " + str((input_bit >> 7) & 1) + str((input_bit >> 6) & 1) + str((input_bit >> 5) & 1) + str((input_bit >> 4) & 1) + str((input_bit >> 3) & 1) + ...
Decode a control packet into the list of sensors.
def taskdir(self): return os.path.join(self.BASE, self.TAG, self.task_family)
Return the directory under which all artefacts are stored.
def key(self): return self.make_key( self._instance._name, self._instance.pk.get(), self.name, )
A property to return the key used in redis for the current field.
def usage(self): return u' '.join(u'<%s>' % pattern.usage for pattern in self.patterns)
A usage string that describes the signature.
def _make_hyperparameter_decode_func(locked_values, meta_parameters): def decode(solution): hyperparameters = copy.deepcopy(locked_values) index = 0 for name, parameters in meta_parameters.iteritems(): binary_size = parameters['binary_size'] binary = solution[index:in...
Create a function that converts the binary solution to parameters.
def process_form(self, instance, field, form, empty_marker=None, emptyReturnsMarker=False): fieldName = field.getName() if fieldName + "_uid" in form: uid = form.get(fieldName + "_uid", '') if field.multiValued and\ (isinstance(uid, str) o...
Return a UID so that ReferenceField understands.
def preprocess(cls, cat): if isinstance(cat, str): cat = intake.open_catalog(cat) return cat
Function to run on each cat input
def rm(ctx, cluster_id): session = create_session(ctx.obj['AWS_PROFILE_NAME']) client = session.client('emr') try: result = client.describe_cluster(ClusterId=cluster_id) target_dns = result['Cluster']['MasterPublicDnsName'] flag = click.prompt( "Are you sure you want to t...
Terminate a EMR cluster
def build_basic_auth(username: str, password: str) -> str: assert ":" not in username user_pass = f"{username}:{password}" basic_credentials = base64.b64encode(user_pass.encode()).decode() return "Basic " + basic_credentials
Build an Authorization header for HTTP Basic Auth.
def reward_bonus(self, assignment_id, amount, reason): logger.info( 'Award ${} for assignment {}, with reason "{}"'.format( amount, assignment_id, reason ) )
Print out bonus info for the assignment
def split_arg_string(string): rv = [] for match in re.finditer(r"('([^'\\]*(?:\\.[^'\\]*)*)'" r'|"([^"\\]*(?:\\.[^"\\]*)*)"' r'|\S+)\s*', string, re.S): arg = match.group().strip() if arg[:1] == arg[-1:] and arg[:1] in '"\'': ...
Given an argument string this attempts to split it into small parts.
def __struct_params_s(obj, separator=', ', f=repr, fmt='%s = %s'): s = separator.join([__single_param(obj, n, f, fmt) for n in dir(obj) if __inc_param(obj, n)]) return s
method wrapper for printing all elements of a struct
def iterprogress( sized_iterable ): pb = ProgressBar( 0, len( sized_iterable ) ) for i, value in enumerate( sized_iterable ): yield value pb.update_and_print( i, sys.stderr )
Iterate something printing progress bar to stdout
def his_from_sql(self, db_name, point): his = self._read_from_sql('select * from "%s"' % "history", db_name) his.index = his["index"].apply(Timestamp) return his.set_index("index")[point]
Retrive point histories from SQL database
def split_pubnote(pubnote_str): pubnote = {} parts = pubnote_str.split(',') if len(parts) > 2: pubnote['journal_title'] = parts[0] pubnote['journal_volume'] = parts[1] pubnote['page_start'], pubnote['page_end'], pubnote['artid'] = split_page_artid(parts[2]) return {key: val for (...
Split pubnote into journal information.
def ext_pillar(minion_id, pillar, function, **kwargs): if function.startswith('_') or function not in globals(): return {} return globals()[function](minion_id, pillar, **kwargs)
Grabs external pillar data based on configured function
def FilterArgsFromSemanticProtobuf(protobuf, kwargs): for descriptor in protobuf.type_infos: value = kwargs.pop(descriptor.name, None) if value is not None: setattr(protobuf, descriptor.name, value)
Assign kwargs to the protobuf, and remove them from the kwargs dict.
def check_user_by_name(user_name, u_pass): the_query = TabMember.select().where(TabMember.user_name == user_name) if the_query.count() == 0: return -1 the_user = the_query.get() if the_user.user_pass == tools.md5(u_pass): return 1 return 0
Checking the password by user's name.
def _check_default_index(items, default_index): num_items = len(items) if default_index is not None and not isinstance(default_index, int): raise TypeError("The default index ({}) is not an integer".format(default_index)) if default_index is not None and default_index >= num_items: raise Val...
Check that the default is in the list, and not empty
def parse_multi_value_header(header_str): parsed_parts = [] if header_str: parts = header_str.split(',') for part in parts: match = re.search('\s*(W/)?\"?([^"]*)\"?\s*', part) if match is not None: parsed_parts.append(match.group(2)) return parsed_part...
Break apart an HTTP header string that is potentially a quoted, comma separated list as used in entity headers in RFC2616.
def delete(self): if not self.is_created(): LOG.debug("Container was not created. Skipping deletion") return try: self.docker_client.containers\ .get(self.id)\ .remove(force=True) except docker.errors.NotFound: LOG.d...
Removes a container that was created earlier.
def location_path(cls, project, location): return google.api_core.path_template.expand( "projects/{project}/locations/{location}", project=project, location=location, )
Return a fully-qualified location string.
def setXr(self, Xr): self.Xr = Xr self.gp_block.covar.G = Xr
set genotype data of the set component
def _state_table_name(environment=None, layer=None, stage=None): if environment is None: environment = os.environ.get("HUMILIS_ENVIRONMENT") if layer is None: layer = os.environ.get("HUMILIS_LAYER") if stage is None: stage = os.environ.get("HUMILIS_STAGE") if environment: ...
The name of the state table associated to a humilis deployment.
def preformatted_text(source: str) -> str: environ.abort_thread() if not source: return '' source = render_utils.html_escape(source) return '<pre class="preformatted-textbox">{text}</pre>'.format( text=str(textwrap.dedent(source)) )
Renders preformatted text box
def end_element(self, tag): if tag == u'form': self.forms.append(self.form) self.form = None
search for ending form values.
def decode(self, name, as_map_key=False): if is_cache_key(name) and (name in self.key_to_value): return self.key_to_value[name] return self.encache(name) if is_cacheable(name, as_map_key) else name
Always returns the name
def colorize(occurence,maxoccurence,minoccurence): if occurence == maxoccurence: color = (255,0,0) elif occurence == minoccurence: color = (0,0,255) else: color = (int((float(occurence)/maxoccurence*255)),0,int(float(minoccurence)/occurence*255)) return color
A formula for determining colors.
def point_lm(self, context): (ls, us), _ = context.array_extents(context.name) return np.asarray(lm_coords[ls:us], dtype=context.dtype)
Supply point source lm coordinates to montblanc
def cached_read(self, kind): if not kind in self.cache: self.pull_stats(kind) if self.epochnow() - self.cache[kind]['lastcall'] > self.cache_timeout: self.pull_stats(kind) return self.cache[kind]['lastvalue']
Cache stats calls to prevent hammering the API
def follow_file(self, f, seen, trim): return (f not in self.graph.nodes and f not in seen and (not trim or not isinstance(self.provenance[f], (resolve.Builtin, resolve.System))))
Whether to recurse into a file's dependencies.
def bake(src): src = os.path.realpath(src) path = os.path.dirname(src) filename = os.path.basename(src) html = _load_file(src).read() if imghdr.what("", html): html = "<html><body><img src='{}'/></body></html>".format(cgi.escape(filename)) cwd = os.getcwd() os.chdir(path) bs_html...
Runs the encoder on the given source file
def OnDirectionChoice(self, event): label = self.direction_choicectrl.GetItems()[event.GetSelection()] param = self.choice_label2param[label] self.attrs["direction"] = param post_command_event(self, self.DrawChartMsg)
Direction choice event handler
def y_rotate(rotationAmt): ma4 = Matrix4((math.cos(rotationAmt), 0, math.sin(rotationAmt), 0), (0, 1, 0, 0), (-math.sin(rotationAmt), 0, math.cos(rotationAmt), 0), (0, 0, 0, 1)) return ma4
Create a matrix that rotates around the y axis.
def gen_file_jinja(self, template_file, data, output, dest_path): if not os.path.exists(dest_path): os.makedirs(dest_path) output = join(dest_path, output) logger.debug("Generating: %s" % output) env = Environment() env.loader = FileSystemLoader(self.TEMPLATE_DIR) ...
Fills data to the project template, using jinja2.
async def commit_prepared(self, xid, *, is_prepared=True): if not is_prepared: await self.execute("XA END '%s'" % xid) await self.execute("XA COMMIT '%s'" % xid)
Commit prepared twophase transaction.
def _objectdata_cache_key(func, obj): uid = api.get_uid(obj) modified = api.get_modification_date(obj).millis() review_state = api.get_review_status(obj) return "{}-{}-{}".format(uid, review_state, modified)
Cache Key for object data
def validate_format(self, allowed_formats): if self.format in allowed_formats: return ui.error("Export type '{0}' does not accept '{1}' format, only: " "{2}".format(self.type, self.format, allowed_formats)) sys.exit(1)
Validate the allowed formats for a specific type.
def _calculate_bounds(self): first = self.coll_handle.find_one(None, sort=[("ts", ASCENDING)]) last = self.coll_handle.find_one(None, sort=[("ts", DESCENDING)]) self._start = first['ts'] if self._start.tzinfo is None: self._start = self._start.replace(tzinfo=tzutc()) ...
Calculate beginning and end of log events.
def process_csr(cls, common_name, csr=None, private_key=None, country=None, state=None, city=None, organisation=None, branch=None): if csr: if branch or organisation or city or state or country: cls.echo('Following options are only used to gene...
Create a PK and a CSR if needed.
def load_plugins(builtin=True, others=True): for entry_point in pkg_resources.iter_entry_points('yolk.plugins'): try: plugin = entry_point.load() except KeyboardInterrupt: raise except Exception as err_msg: warn("Unable to load plugin %s: %s" % \ ...
Load plugins, either builtin, others, or both.
def rset(self): self._send('RSET\r\n') resp = self._read() if not resp.startswith('250'): logger.warn('Unexpected server response at RSET: ' + resp) self._recipients = [] self.results = {}
Send LMTP RSET command and process the server response.
def _slugify(path: Union[str, PurePath]) -> str: if not isinstance(path, PurePath): path = PurePath(path) parts = list(path.parts) if parts[0] == '/': parts.pop(0) elif len(parts[0]) == 3 and parts[0][1:] == ':\\': parts[0] = parts[0][0] filename = '-'.join(parts) assert ...
Make a path into a filename.
def fromseconds(cls, seconds): try: seconds = int(seconds) except TypeError: seconds = int(seconds.flatten()[0]) return cls(datetime.timedelta(0, int(seconds)))
Return a |Period| instance based on a given number of seconds.
def _get(self, pos): res = None, None if pos is not None: try: res = self[pos], pos except (IndexError, KeyError): pass return res
loads widget at given position; handling invalid arguments
def services(self): "Returns a list of Service objects available in this folder" return [self._get_subfolder("%s/%s/" % (s['name'].rstrip('/').split('/')[-1], s['type']), self._service_type_mapping.get(s['type'], Service)) for s in self._json_struct.get(...
Returns a list of Service objects available in this folder
def _get_projection(self): try: proj_str = self.nc.attrs['gdal_projection'] except TypeError: proj_str = self.nc.attrs['gdal_projection'].decode() radius_a = proj_str.split('+a=')[-1].split()[0] if float(radius_a) > 10e3: units = 'm' scale ...
Get projection from the NetCDF4 attributes
def build_process_isolation_temp_dir(self): path = tempfile.mkdtemp(prefix='ansible_runner_pi_', dir=self.process_isolation_path) os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) atexit.register(shutil.rmtree, path) return path
Create a temporary directory for process isolation to use.
def on_ok(self): if self.dir_select.value: npyscreen.notify_wait('In the process of restoring', title='Restoring...') status = self.restore(self.dirs[self.dir_select.value[0]]) if status[0]: npyscreen.notify_confirm('Status of...
Perform restoration on the backup file selected