code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def _get_all(self): return [self._to_dict(row) for row in models.User.objects.all()]
Get all users from db and turn into list of dicts
def construct(path, name=None): "Selects an appropriate CGroup subclass for the given CGroup path." name = name if name else path.split("/")[4] classes = {"memory": Memory, "cpu": CPU, "cpuacct": CPUAcct} constructor = classes.get(name, CGroup) log.debug("Chose %s for: %s", constructor.__name__, path) return constructor(path, name)
Selects an appropriate CGroup subclass for the given CGroup path.
def decode_to_unicode(content): if content and not isinstance(content, str): try: return content.decode("ISO-8859-1") except UnicodeEncodeError: return content return content
decode ISO-8859-1 to unicode, when using sf api
def create_api_key(self, api_id, stage_name): response = self.apigateway_client.create_api_key( name='{}_{}'.format(stage_name, api_id), description='Api Key for {}'.format(api_id), enabled=True, stageKeys=[ { 'restApiId': '{}'.format(api_id), 'stageName': '{}'.format(stage_name) }, ] ) print('Created a new x-api-key: {}'.format(response['id']))
Create new API key and link it with an api_id and a stage_name
def print_table(lines, separate_head=True): widths = [] for line in lines: for i,size in enumerate([len(x) for x in line]): while i >= len(widths): widths.append(0) if size > widths[i]: widths[i] = size print_string = "" for i,width in enumerate(widths): print_string += "{" + str(i) + ":" + str(width) + "} | " if (len(print_string) == 0): return print_string = print_string[:-3] for i,line in enumerate(lines): print(print_string.format(*line)) if (i == 0 and separate_head): print("-"*(sum(widths)+3*(len(widths)-1)))
Prints a formatted table given a 2 dimensional array
def encrypt(s, pk, pk_format=SER_COMPACT, mac_bytes=10, curve=None): curve = (Curve.by_pk_len(len(pk)) if curve is None else Curve.by_name(curve)) p = curve.pubkey_from_string(pk, pk_format) return p.encrypt(s, mac_bytes)
Encrypts `s' for public key `pk'
def send_email(fromaddr, toaddr, subject, message): from smtplib import SMTP from email.mime.text import MIMEText SERVER = "localhost" _message = MIMEText(message) _message['Subject'] = subject _message['From'] = fromaddr _message['To'] = ", ".join(toaddr) server = SMTP(SERVER) server.sendmail(fromaddr, toaddr, _message.as_string()) server.quit()
Send an email message
def index(path=None): payload = { "username": "soandso", "message": "Hello bot", "vars": { "name": "Soandso", } } return Response(r .format(json.dumps(payload)), mimetype="text/plain")
On all other routes, just return an example `curl` command.
def _mkAddBtnVisible(self): if not self._btn_add_height: self._btn_add_height = self._cwBtn.height() if self.count() == 0: self._cwBtn.setMinimumHeight(self._btn_add_height - 8) self.setMinimumHeight(self._btn_add_height)
Ensure that the Add button is visible also when there are no tabs
def _socket_close(self): callback = self.__callback self.__callback = None try: if callback: callback(None, InterfaceError('connection closed')) finally: self.__job_queue = [] self.__alive = False self.__pool.cache(self)
cleanup after the socket is closed by the other end
def _write(self, data): total_sent = 0 length = len(data) while total_sent < length: try: sent = self.socket.send(data[total_sent:]) except socket.error as (err, msg): self.connected = False raise ScratchError("[Errno %d] %s" % (err, msg)) if sent == 0: self.connected = False raise ScratchConnectionError("Connection broken") total_sent += sent
Writes string data out to Scratch
async def fromURL(cls, url, *, credentials=None, insecure=False): session = await bones.SessionAPI.fromURL( url, credentials=credentials, insecure=insecure) return cls(session)
Return an `Origin` for a given MAAS instance.
async def login( username: str, password: str, brand: str, websession: ClientSession = None) -> API: api = API(brand, websession) await api.authenticate(username, password) return api
Log in to the API.
def invoke(config, name, input): myname = name or config.name click.echo('Invoking ' + myname) output = lambder.invoke_function(myname, input) click.echo(output)
Invoke function in AWS
def atlas_peer_table_unlock(): global PEER_TABLE_LOCK, PEER_TABLE_LOCK_HOLDER, PEER_TABLE_LOCK_TRACEBACK try: assert PEER_TABLE_LOCK_HOLDER == threading.current_thread() except: log.error("Locked by %s, unlocked by %s" % (PEER_TABLE_LOCK_HOLDER, threading.current_thread())) log.error("Holder locked from:\n%s" % "".join(PEER_TABLE_LOCK_TRACEBACK)) log.error("Errant thread unlocked from:\n%s" % "".join(traceback.format_stack())) os.abort() PEER_TABLE_LOCK_HOLDER = None PEER_TABLE_LOCK_TRACEBACK = None PEER_TABLE_LOCK.release() return
Unlock the global health info table.
def user_agent(self): version = "" project_root = os.path.dirname(os.path.abspath(__file__)) with open(os.path.join(project_root, 'VERSION')) as version_file: version = version_file.read().strip() return "Python Snow Api Client (Version %s)" % version
its a user agent string!
def render(self): self.screen.reset() self.screen.blit(self.corners) self.screen.blit(self.lines, (1, 1)) self.screen.blit(self.rects, (int(self.screen.width / 2) + 1, 1)) self.screen.blit(self.circle, (0, int(self.screen.height / 2) + 1)) self.screen.blit(self.filled, (int(self.screen.width / 2) + 1, int(self.screen.height / 2) + 1)) self.screen.update() self.clock.tick()
Send the current screen content to Mate Light.
def _map_call(self, cmd): if config.debug and cmd != self._mapping.get(cmd, cmd): self.LOG.debug("MAP %s ==> %s" % (cmd, self._mapping[cmd])) cmd = self._mapping.get(cmd, cmd) if not self._use_deprecated and any(cmd.startswith(i) for i in ("d.get_", "f.get_", "p.get_", "t.get_")): cmd = cmd[:2] + cmd[6:] return cmd
Map old to new command names.
def _get_stats_files(data, out_dir=None): if not out_dir: out_dir = utils.safe_makedir(os.path.join(dd.get_work_dir(data), "qc", dd.get_sample_name(data), "samtools")) stats_file = tz.get_in(["depth", "samtools", "stats"], data) idxstats_file = tz.get_in(["depth", "samtools", "idxstats"], data) if not stats_file: stats_file = os.path.join(out_dir, "%s.txt" % dd.get_sample_name(data)) if not idxstats_file: idxstats_file = os.path.join(out_dir, "%s-idxstats.txt" % dd.get_sample_name(data)) return stats_file, idxstats_file
Retrieve stats files from pre-existing dictionary or filesystem.
def bqsr_table(data): in_file = dd.get_align_bam(data) out_file = "%s-recal-table.txt" % utils.splitext_plus(in_file)[0] if not utils.file_uptodate(out_file, in_file): with file_transaction(data, out_file) as tx_out_file: assoc_files = dd.get_variation_resources(data) known = "-k %s" % (assoc_files.get("dbsnp")) if "dbsnp" in assoc_files else "" license = license_export(data) cores = dd.get_num_cores(data) ref_file = dd.get_ref_file(data) cmd = ("{license}sentieon driver -t {cores} -r {ref_file} " "-i {in_file} --algo QualCal {known} {tx_out_file}") do.run(cmd.format(**locals()), "Sentieon QualCal generate table") return out_file
Generate recalibration tables as inputs to BQSR.
def create(cls, name, datacenter, subnet=None, gateway=None, background=False): if not background and not cls.intty(): background = True datacenter_id_ = int(Datacenter.usable_id(datacenter)) vlan_params = { 'name': name, 'datacenter_id': datacenter_id_, } if subnet: vlan_params['subnet'] = subnet if gateway: vlan_params['gateway'] = gateway result = cls.call('hosting.vlan.create', vlan_params) if not background: cls.echo('Creating your vlan.') cls.display_progress(result) cls.echo('Your vlan %s has been created.' % name) return result
Create a new vlan.
def add_to_environment(self, environment): self._env = environment._env self._userdata = ffi.new_handle(self) ENVIRONMENT_DATA[self._env].routers[self.name] = self lib.EnvAddRouterWithContext( self._env, self._name.encode(), self._priority, lib.query_function, lib.print_function, lib.getc_function, lib.ungetc_function, lib.exit_function, self._userdata)
Add the router to the given environment.
def _define_absl_flags(self, absl_flags): key_flags = set(absl_flags.get_key_flags_for_module(sys.argv[0])) for name in absl_flags: if name in _BUILT_IN_FLAGS: continue flag_instance = absl_flags[name] if name == flag_instance.name: suppress = flag_instance not in key_flags self._define_absl_flag(flag_instance, suppress)
Defines flags from absl_flags.
def _has_virtual_io_attrs(file, mode_int): readonly = mode_int == _snd.SFM_READ writeonly = mode_int == _snd.SFM_WRITE return all([ hasattr(file, 'seek'), hasattr(file, 'tell'), hasattr(file, 'write') or readonly, hasattr(file, 'read') or hasattr(file, 'readinto') or writeonly, ])
Check if file has all the necessary attributes for virtual IO.
def process_npdu(self, npdu): if _debug: ProxyServiceNetworkAdapter._debug("process_npdu %r", npdu) pdu = PDU() npdu.encode(pdu) if _debug: ProxyServiceNetworkAdapter._debug(" - pdu: %r", pdu) if pdu.pduDestination.addrType == Address.localBroadcastAddr: xpdu = ServerToProxyBroadcastNPDU(pdu) else: xpdu = ServerToProxyUnicastNPDU(pdu.pduDestination, pdu) xpdu.pduDestination = self.conn.address self.conn.service.service_request(xpdu)
encode NPDUs from the network service access point and send them to the proxy.
def logger(self): if self._logger: return self._logger else: log_builder = p_logging.ProsperLogger( self.PROGNAME, self.config.get_option('LOGGING', 'log_path'), config_obj=self.config ) if self.verbose: log_builder.configure_debug_logger() else: id_string = '({platform}--{version})'.format( platform=platform.node(), version=self.VERSION ) if self.config.get_option('LOGGING', 'discord_webhook'): log_builder.configure_discord_logger( custom_args=id_string ) if self.config.get_option('LOGGING', 'slack_webhook'): log_builder.configure_slack_logger( custom_args=id_string ) if self.config.get_option('LOGGING', 'hipchat_webhook'): log_builder.configure_hipchat_logger( custom_args=id_string ) self._logger = log_builder.get_logger() return self._logger
uses "global logger" for logging
def get(self, telescope, band): klass = self._bpass_classes.get(telescope) if klass is None: raise NotDefinedError('bandpass data for %s not defined', telescope) bp = klass() bp.registry = self bp.telescope = telescope bp.band = band return bp
Get a Bandpass object for a known telescope and filter.
def create(controller_id, name): def _decorator(cls): class _ControllerClass(cls, Controller): def __init__(self): Controller.__init__(self, controller_id, name) for key in cls.__dict__.keys(): prop = cls.__dict__[key] if isinstance(prop, KerviValue): if prop.is_input: self.inputs._add_internal(key, prop) else: self.outputs._add_internal(key, prop) cls.__init__(self) return _ControllerClass return _decorator
Turn class into a kervi controller
def error_rate(predictions, labels): return 100.0 - ( 100.0 * np.sum(np.argmax(predictions, 1) == np.argmax(labels, 1)) / predictions.shape[0])
Return the error rate based on dense predictions and 1-hot labels.
def gather(input): try: line = input.next() except StopIteration: return lead = True buffer = [] while line.kind == 'text': value = line.line.rstrip().rstrip('\\') + ('' if line.continued else '\n') if lead and line.stripped: yield Line(line.number, value) lead = False elif not lead: if line.stripped: for buf in buffer: yield buf buffer = [] yield Line(line.number, value) else: buffer.append(Line(line.number, value)) try: line = input.next() except StopIteration: line = None break if line: input.push(line)
Collect contiguous lines of text, preserving line numbers.
def write_tlv(data): tlv = b'' for key, value in data.items(): tag = bytes([int(key)]) length = len(value) pos = 0 while pos < len(value): size = min(length, 255) tlv += tag tlv += bytes([size]) tlv += value[pos:pos+size] pos += size length -= size return tlv
Convert a dict to TLV8 bytes.
def evaluate(x, y, constant, amplitude, x_mean, y_mean, x_stddev, y_stddev, theta): model = Const2D(constant)(x, y) + Gaussian2D(amplitude, x_mean, y_mean, x_stddev, y_stddev, theta)(x, y) return model
Two dimensional Gaussian plus constant function.
def job_to_dict(job): data = OrderedDict() data['id'] = job.id data['name'] = job.name data['func'] = job.func_ref data['args'] = job.args data['kwargs'] = job.kwargs data.update(trigger_to_dict(job.trigger)) if not job.pending: data['misfire_grace_time'] = job.misfire_grace_time data['max_instances'] = job.max_instances data['next_run_time'] = None if job.next_run_time is None else job.next_run_time return data
Converts a job to an OrderedDict.
def type_suffix(self): if not self.type or self.type.lower() == 'ga': return '' return '-%s' % self.type.lower()
This is used in compose ID.
def fetchone(self): row = self._session.fetchone() if row: return self._row_factory(row)
Fetches next row, or ``None`` if there are no more rows
def native_string_to_bytes(s, encoding="ascii", errors="strict"): if not isinstance(s, str): raise TypeError("{} must be type str, not {}".format(s, type(s))) if str is bytes: return s else: return s.encode(encoding=encoding, errors=errors)
Ensure that the native string ``s`` is converted to ``bytes``.
def makeSubDir(dirName): if not os.path.exists(dirName): os.mkdir(dirName) os.chmod(dirName, 0777) return dirName
Makes a given subdirectory if it doesn't already exist, making sure it us public.
def suffix(tokens, operator_table): operator, matched_tokens = operator_table.suffix.match(tokens) if operator: return TokenMatch(operator, None, matched_tokens)
Match a suffix of an operator.
def unlink_parent_dir(path: Path) -> None: logger.info(f"unlink {str(path)}") path.unlink() parent_path = path.parent try: parent_path.rmdir() logger.info(f"rmdir {str(parent_path)}") except OSError as oe: logger.debug(f"Did not remove {str(parent_path)}: {str(oe)}")
Remove a file and if the dir is empty remove it
def build_result(data): more = {} for key, value in data.items(): if key != 'elements': newnode = value else: newnode = {} for el in value: nkey, nvalue = process_node(el) newnode[nkey] = nvalue more[key] = newnode return more
Create a dictionary with the contents of result.json
def import_log_funcs(): global g_logger curr_mod = sys.modules[__name__] for func_name in _logging_funcs: func = getattr(g_logger, func_name) setattr(curr_mod, func_name, func)
Import the common log functions from the global logger to the module.
def read(self): data = bytearray() while True: incoming_bytes = self.comport.inWaiting() if incoming_bytes == 0: break else: content = self.comport.read(size=incoming_bytes) data.extend(bytearray(content)) return data
Read data from serial port and returns a ``bytearray``.
def serialize(self, *fields): if fields: if not set(fields).issubset(self.data_nodes): raise SchemaError('Invalid field for serialization: {}'.format(set(fields).difference(self.data_nodes))) return OrderedDict([(k, getattr(self, k)) for k in fields]) return OrderedDict([(k, getattr(self, k)) for k in self.data_nodes])
Serialize Nodes and attributes
def generate_password(self): if self.password_failed: self.logger.warning("Skipping resetting password since last attempt failed") return command = sarge.shell_format( "sfdx force:user:password:generate -u {0}", self.username ) self.logger.info( "Generating scratch org user password with command {}".format(command) ) p = sarge.Command( command, stdout=sarge.Capture(buffer_size=-1), stderr=sarge.Capture(buffer_size=-1), shell=True, ) p.run() stderr = io.TextIOWrapper(p.stderr).readlines() stdout = io.TextIOWrapper(p.stdout).readlines() if p.returncode: self.config["password_failed"] = True self.logger.warning( "Failed to set password: \n{}\n{}".format( "\n".join(stdout), "\n".join(stderr) ) )
Generates an org password with the sfdx utility.
def convert(self, value, view): if isinstance(value, NUMERIC_TYPES): return value else: self.fail( u'must be numeric, not {0}'.format(type(value).__name__), view, True )
Check that the value is an int or a float.
def dashboard(request): "Counts, aggregations and more!" end_time = now() start_time = end_time - timedelta(days=7) defaults = {'start': start_time, 'end': end_time} form = DashboardForm(data=request.GET or defaults) if form.is_valid(): start_time = form.cleaned_data['start'] end_time = form.cleaned_data['end'] try: obj = Visitor.objects.order_by('start_time')[0] track_start_time = obj.start_time except (IndexError, Visitor.DoesNotExist): track_start_time = now() warn_incomplete = (start_time < track_start_time) user_stats = Visitor.objects.user_stats(start_time, end_time) visitor_stats = Visitor.objects.stats(start_time, end_time) if TRACK_PAGEVIEWS: pageview_stats = Pageview.objects.stats(start_time, end_time) else: pageview_stats = None context = { 'form': form, 'track_start_time': track_start_time, 'warn_incomplete': warn_incomplete, 'user_stats': user_stats, 'visitor_stats': visitor_stats, 'pageview_stats': pageview_stats, } return render(request, 'tracking/dashboard.html', context)
Counts, aggregations and more!
def contains(self, other): return self._start <= other.start and self._end >= other.end
Determine whether this range contains another.
def _write_pidfile(self): LOGGER.debug('Writing pidfile: %s', self.pidfile_path) with open(self.pidfile_path, "w") as handle: handle.write(str(os.getpid()))
Write the pid file out with the process number in the pid file
def prep_folder(self, seq): itasser_dir = op.join(self.root_dir, self.id) if not op.exists(itasser_dir): os.makedirs(itasser_dir) tmp = {self.id: seq} fasta.write_fasta_file_from_dict(indict=tmp, outname='seq', outext='.fasta', outdir=itasser_dir) return itasser_dir
Take in a sequence string and prepares the folder for the I-TASSER run.
def storage_method(func): def wrap(self, *args, **kwargs): return func(self._root_storage, *args, **kwargs) return wrap
Calls decorated method with VersionedStorage as self
def content_location(self) -> Optional[UnstructuredHeader]: try: return cast(UnstructuredHeader, self[b'content-location'][0]) except (KeyError, IndexError): return None
The ``Content-Location`` header.
def Flush(self): DB.DeleteSubjects(self.delete_subject_requests, sync=False) for req in self.delete_attributes_requests: subject, attributes, start, end = req DB.DeleteAttributes(subject, attributes, start=start, end=end, sync=False) for req in self.set_requests: subject, values, timestamp, replace, to_delete = req DB.MultiSet( subject, values, timestamp=timestamp, replace=replace, to_delete=to_delete, sync=False) if (self.delete_subject_requests or self.delete_attributes_requests or self.set_requests): DB.Flush() for queue, notifications in self.new_notifications: DB.CreateNotifications(queue, notifications) self.new_notifications = [] self.delete_subject_requests = [] self.set_requests = [] self.delete_attributes_requests = []
Flushing actually applies all the operations in the pool.
def _generate(): privateKey = rsa.generate_private_key( public_exponent=65537, key_size=4096, backend=default_backend()) return Key(privateKey).toString('openssh')
Generate a new SSH key pair.
def _deriv_hypot(x, y): r = np.hypot(x, y) df_dx = x / r df_dy = y / r return np.hstack([df_dx, df_dy])
Derivative of numpy hypot function
def create_subcommand_synopsis(self, parser): self.add_usage(parser.usage, parser._get_positional_actions(), None, prefix='') usage = self._format_usage(parser.usage, parser._get_positional_actions(), None, '') return self._bold(usage)
show usage with description for commands
def getVersion(self): if isinstance(self.version, versions.Version): return self.version if self.version: version = versions.Version(self.version) if version.baseVersion not in self.installedApp.versionMap(): raise runConfigs.lib.SC2LaunchError( "specified game version %s executable is not available.%s available: %s"%( \ version, os.linesep, " ".join(self.installedApp.listVersions()))) self.version = version else: path = self.installedApp.exec_path() vResult = self.installedApp.mostRecentVersion self.version = versions.Version(vResult) if self.debug: print(os.linesep.join([ "Game configuration detail:", " platform: %s"%(self.os), " app: %s"%(self.execPath), " version: %s"%(self.version)])) return self.version
the executable application's version
def _needle(fa, fb, needlefile, a, b, results): from Bio.Emboss.Applications import NeedleCommandline needle_cline = NeedleCommandline(asequence=fa, bsequence=fb, gapopen=10, gapextend=0.5, outfile=needlefile) stdout, stderr = needle_cline() nh = NeedleHeader(needlefile) FileShredder([fa, fb, needlefile], verbose=False) r = ["\t".join((a, b, nh.identity, nh.score))] results.extend(r)
Run single needle job
def before_scenario(context, scenario): context.directory = testfixtures.TempDirectory(create=True) context.old_cwd = os.getcwd() context.new_cwd = context.directory.path os.chdir(context.new_cwd)
Prepare a fresh environment for each scenario.
def remove(text, what, count=None, strip=False): return replace(text, what, '', count=count, strip=strip)
Like ``replace``, where ``new`` replacement is an empty string.
def remove_from_parent_sequence_map(assessment_part_admin_session, assessment_part_id): apls = get_assessment_part_lookup_session(runtime=assessment_part_admin_session._runtime, proxy=assessment_part_admin_session._proxy) apls.use_federated_bank_view() apls.use_unsequestered_assessment_part_view() child_part = apls.get_assessment_part(assessment_part_id) update_parent_sequence_map(child_part, delete=True)
Updates the child map of a simple sequence assessment assessment part to remove child part
def create_network_postcommit(self, context): network = context.current log_context("create_network_postcommit: network", network) segments = context.network_segments tenant_id = network['project_id'] self.create_tenant(tenant_id) self.create_network(network) self.create_segments(segments)
Provision the network on CVX
def save(self, path): to_save = {} for x in self.param_names: attr = self.__getattribute__(x) if type(attr) == np.ndarray: attr = [[float(x) for x in row] for row in attr] elif isinstance(attr, types.FunctionType): attr = attr.__name__ to_save[x] = attr json.dump(to_save, open(path, 'w'))
Save a SOM to a JSON file.
def need_geocoding(self): need_geocoding = False for attribute, component in self.required_address_components.items(): if not getattr(self, attribute): need_geocoding = True break return need_geocoding
Returns True if any of the required address components is missing
def resample(self,N,**kwargs): lovals = self.mu - np.absolute(rand.normal(size=N)*self.siglo) hivals = self.mu + np.absolute(rand.normal(size=N)*self.sighi) u = rand.random(size=N) hi = (u < float(self.sighi)/(self.sighi + self.siglo)) lo = (u >= float(self.sighi)/(self.sighi + self.siglo)) vals = np.zeros(N) vals[hi] = hivals[hi] vals[lo] = lovals[lo] return vals
Random resampling of the doublegauss distribution
def neighbors(self, key): return {n: attr["bond"] for n, attr in self.graph[key].items()}
Return dict of neighbor atom index and connecting bond.
def queries(): query = request.form['query'] name = request.form.get('name') app.db.add_gemini_query(name, query) return redirect(request.referrer)
Store a new GEMINI query.
def bool(self, name): self._assert_is_string(name) frame = self._next_frame() if len(frame) != 1: raise MessageParserError("Expected exacty 1 byte for boolean value") val = frame != b"\x00" self.results.__dict__[name] = val return self
parse a boolean frame
def to_glyphs_blue_values(self, ufo, master): zones = [] blue_values = _pairs(ufo.info.postscriptBlueValues) other_blues = _pairs(ufo.info.postscriptOtherBlues) for y1, y2 in blue_values: size = y2 - y1 if y2 == 0: pos = 0 size = -size else: pos = y1 zones.append(self.glyphs_module.GSAlignmentZone(pos, size)) for y1, y2 in other_blues: size = y1 - y2 pos = y2 zones.append(self.glyphs_module.GSAlignmentZone(pos, size)) master.alignmentZones = sorted(zones, key=lambda zone: -zone.position)
Sets the GSFontMaster alignmentZones from the postscript blue values.
def recover(self, key, value): if key not in self._dtypes: self.read_types() if key not in self._dtypes: raise ValueError("Unknown datatype for {} and {}".format(key, value)) return self._dtypes[key][2](value)
Get the deserialized value for a given key, and the serialized version.
def start2(self, yes): if yes: self.write_message(1) self.hints[3].used = True self.lamp_turns = 1000 self.oldloc2 = self.oldloc = self.loc = self.rooms[1] self.dwarves = [ Dwarf(self.rooms[n]) for n in (19, 27, 33, 44, 64) ] self.pirate = Pirate(self.chest_room) treasures = self.treasures self.treasures_not_found = len(treasures) for treasure in treasures: treasure.prop = -1 self.describe_location()
Display instructions if the user wants them.
def to_yaml(value) -> str: stream = yaml.io.StringIO() dumper = ConfigDumper(stream, default_flow_style=True, width=sys.maxsize) val = None try: dumper.open() dumper.represent(value) val = stream.getvalue().strip() dumper.close() finally: dumper.dispose() return val
Convert a given value to a YAML string.
def child_get(self, child, *prop_names): return [self.child_get_property(child, name) for name in prop_names]
Returns a list of child property values for the given names.
def color(string, name, style='normal', when='auto'): if name not in colors: from .text import oxford_comma raise ValueError("unknown color '{}'.\nknown colors are: {}".format( name, oxford_comma(["'{}'".format(x) for x in sorted(colors)]))) if style not in styles: from .text import oxford_comma raise ValueError("unknown style '{}'.\nknown styles are: {}".format( style, oxford_comma(["'{}'".format(x) for x in sorted(styles)]))) prefix = '\033[%d;%dm' % (styles[style], colors[name]) suffix = '\033[%d;%dm' % (styles['normal'], colors['normal']) color_string = prefix + string + suffix if when == 'always': return color_string elif when == 'auto': return color_string if sys.stdout.isatty() else string elif when == 'never': return string else: raise ValueError("when must be one of: 'always', 'auto', 'never'")
Change the color of the given string.
def on_release(self, event): 'on release we reset the press data' if self.press is None: return x0, y0, btn = self.press if btn == 1: color = 'r' elif btn == 2: color = 'b' btn = self.button_map[btn] self.set_seeds(y0, x0, self.actual_slice, btn) self.press = None self.update_slice()
on release we reset the press data
def available_gpus(): local_device_protos = device_lib.list_local_devices() return [x.name for x in local_device_protos if x.device_type == 'GPU']
List of GPU device names detected by TensorFlow.
def restart(self): BackgroundProcess.objects.filter(pk=self.process_id).update( last_update=now(), message='restarting..') timeout = time() + 60 self.kill_processes(signal.SIGTERM) while self.PROCESSES and time() < timeout: sleep(0.1) self.kill_processes(signal.SIGKILL) self.manage_processes() logger.debug('BD %d: restarted'%self.process_id)
restart all child processes
def _set_relationship_type(self, type_identifier, display_name=None, display_label=None, description=None, domain='Relationship'): if display_name is None: display_name = type_identifier if display_label is None: display_label = display_name if description is None: description = 'Relationship Type for ' + display_name self._relationship_type = Type(authority='DLKIT', namespace='relationship.Relationship', identifier=type_identifier, display_name=display_name, display_label=display_label, description=description, domain=domain)
Sets the relationship type
def create(appname, **kwargs): if appname in LinkFactory._class_dict: return LinkFactory._class_dict[appname].create(**kwargs) else: raise KeyError( "Could not create object associated to app %s" % appname)
Create a `Link` of a particular class, using the kwargs as options
def write_data(path, obj): with open_file_for_writing(path) as db: db.write(encode(obj)) return obj
Writes to a file and returns the updated file content.
def shutdown_server(self): self.log.debug('shutdown_server: in') if self.ensime and self.toggle_teardown: self.ensime.stop()
Shut down server if it is alive.
def update_from(self, mapping): for key, value in mapping.items(): if key in self: if isinstance(value, Parameter): value = value.value self[key].value = value
Updates the set of parameters from a mapping for keys that already exist
def _insert(self, trigram): words = list(map(self._sanitize, trigram)) key = self._WSEP.join(words[:2]).lower() next_word = words[2] self._db.setdefault(key, []) if next_word not in self._db[key]: self._db[key].append(next_word)
Insert a trigram in the DB
def process(self, metric): event = self._metric_to_riemann_event(metric) try: self.client.send_event(event) except Exception as e: self.log.error( "RiemannHandler: Error sending event to Riemann: %s", e)
Send a metric to Riemann.
def finddirs(root): retval = [] for root, dirs, files in os.walk(root): for d in dirs: retval.append(os.path.join(root, d)) return retval
Return a list of all the directories under `root`
def getNode(self, name, **context): if name == self.name: return self else: return self.getBranch(name, **context).getNode(name, **context)
Return tree node found by name
def _process_slice(self, arg): start = arg.start stop = arg.stop step = arg.step nrows = self._info['nrows'] if step is None: step = 1 if start is None: start = 0 if stop is None: stop = nrows if start < 0: start = nrows + start if start < 0: raise IndexError("Index out of bounds") if stop < 0: stop = nrows + start + 1 if stop < start: stop = start if stop > nrows: stop = nrows return slice(start, stop, step)
process the input slice for use calling the C code
def existingInStore(cls, store, storeID, attrs): self = cls.__new__(cls) self.__justCreated = False self.__subinit__(__store=store, storeID=storeID, __everInserted=True) schema = self.getSchema() assert len(schema) == len(attrs), "invalid number of attributes" for data, (name, attr) in zip(attrs, schema): attr.loaded(self, data) self.activate() return self
Create and return a new instance from a row from the store.
def diff_safe(cls, value): if isinstance(value, Frame): return {'_str': str(value), '_id': value._id} elif isinstance(value, (list, tuple)): return [cls.diff_safe(v) for v in value] return value
Return a value that can be safely stored as a diff
def root_sections(h): roots = [] for section in h.allsec(): sref = h.SectionRef(sec=section) if sref.has_parent() < 0.9: roots.append(section) return roots
Returns a list of all sections that have no parent.
def calc_K_xx_wo_variance(self,X): K_xx = np.ones([X.shape[0],X.shape[0]]) for i,x in enumerate(X): for j,x2 in enumerate(X): for il,l in enumerate(self.lengthscale): idx = il*2 K_xx[i,j] *= self.k_xx(x[idx],x2[idx],x[idx+1],x2[idx+1],l) return K_xx
Calculates K_xx without the variance term
def tasks(self): if self._tasks is None: self._tasks = self.parent.tasks.tasks( tasklist="@default", showCompleted="true", showHidden="true", completedMin=self.since, completedMax=self.until) log.info(u"NB TASKS {0}".format(len(self._tasks))) return self._tasks
All completed tasks within specified time range
def load_json(file, new_root_dir=None, decompression=False): if decompression: with open(file, 'rb') as f: my_object = load(f, decompression=decompression) else: with open(file, 'r') as f: my_object = load(f, decompression=decompression) if new_root_dir: my_object.root_dir = new_root_dir return my_object
Load a JSON file using json_tricks
def adjust_from_utc_fields(*args, **kwargs): raw_ts = Timestamp(*args, **kwargs) offset = raw_ts.utcoffset() if offset is None or offset == timedelta(): return raw_ts adjusted = raw_ts + offset if raw_ts.precision is None: return adjusted return Timestamp( adjusted.year, adjusted.month, adjusted.day, adjusted.hour, adjusted.minute, adjusted.second, adjusted.microsecond, raw_ts.tzinfo, precision=raw_ts.precision, fractional_precision=raw_ts.fractional_precision )
Constructs a timestamp from UTC fields adjusted to the local offset if given.
def _consolidate(blocks): gkey = lambda x: x._consolidate_key grouper = itertools.groupby(sorted(blocks, key=gkey), gkey) new_blocks = [] for (_can_consolidate, dtype), group_blocks in grouper: merged_blocks = _merge_blocks(list(group_blocks), dtype=dtype, _can_consolidate=_can_consolidate) new_blocks = _extend_blocks(merged_blocks, new_blocks) return new_blocks
Merge blocks having same dtype, exclude non-consolidating blocks
def write(self, stream: typing.BinaryIO, be: bool) -> None: stream.write(b'RSTB') stream.write(_to_u32(len(self.crc32_map), be)) stream.write(_to_u32(len(self.name_map), be)) for crc32, size in sorted(self.crc32_map.items()): stream.write(_to_u32(crc32, be)) stream.write(_to_u32(size, be)) for name, size in sorted(self.name_map.items()): stream.write(struct.pack('128s', name.encode())) stream.write(_to_u32(size, be))
Write the RSTB to the specified stream.
def popup(text, title="Lackey Info"): root = tk.Tk() root.withdraw() tkMessageBox.showinfo(title, text)
Creates an info dialog with the specified text.
def spotlight_search_route(context, request): catalogs = [ CATALOG_ANALYSIS_REQUEST_LISTING, "portal_catalog", "bika_setup_catalog", "bika_catalog", "bika_catalog_worksheet_listing" ] search_results = [] for catalog in catalogs: search_results.extend(search(catalog=catalog)) items = map(get_brain_info, search_results) return { "count": len(items), "items": sorted(items, key=itemgetter("title")), }
The spotlight search route
def run(self): if self.init_sec: sleep(self.init_sec) self._functime = time() while self._running: start = time() self._func() self._functime += self.interval_sec if self._functime - start > 0: sleep(self._functime - start)
Start the recurring task.
def process_response(self, request, response): self._set_request_auth_type_metric(request) self._set_request_user_agent_metrics(request) self._set_request_referer_metric(request) self._set_request_user_id_metric(request) return response
Add metrics for various details of the request.
def _whole_basis_types(basis): all_types = set() for v in basis['elements'].values(): if 'electron_shells' in v: for sh in v['electron_shells']: all_types.add(sh['function_type']) if 'ecp_potentials' in v: for pot in v['ecp_potentials']: all_types.add(pot['ecp_type']) return sorted(list(all_types))
Get a list of all the types of features in this basis set.