code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def exit(self, pub_id, *node_ids): try: pub = self['pubs'][pub_id] except KeyError: raise ValueError('Pub {} is not available'.format(pub_id)) for node_id in node_ids: node = self.get_agent(node_id) if pub_id == node['pub']: del node['pub'] pub['occupancy'] -= 1
Agents will notify the pub they want to leave
def _UnregisterDatastoreArtifacts(self): to_remove = [] for name, artifact in iteritems(self._artifacts): if artifact.loaded_from.startswith("datastore"): to_remove.append(name) for key in to_remove: self._artifacts.pop(key)
Remove artifacts that came from the datastore.
def prepare(self, f): self.g = autograd.grad(f) self.h = autograd.hessian(f)
Accept an objective function for optimization.
def requires_target(self): if self.has_combo and PlayReq.REQ_TARGET_FOR_COMBO in self.requirements: if self.controller.combo: return True if PlayReq.REQ_TARGET_IF_AVAILABLE in self.requirements: return bool(self.play_targets) if PlayReq.REQ_TARGET_IF_AVAILABLE_AND_DRAGON_IN_HAND in self.requirements: if self.controller.hand.filter(race=Race.DRAGON): return bool(self.play_targets) req = self.requirements.get(PlayReq.REQ_TARGET_IF_AVAILABLE_AND_MINIMUM_FRIENDLY_MINIONS) if req is not None: if len(self.controller.field) >= req: return bool(self.play_targets) req = self.requirements.get(PlayReq.REQ_TARGET_IF_AVAILABLE_AND_MINIMUM_FRIENDLY_SECRETS) if req is not None: if len(self.controller.secrets) >= req: return bool(self.play_targets) return PlayReq.REQ_TARGET_TO_PLAY in self.requirements
True if the card currently requires a target
def md5sum(filename, use_sudo=False): runner = sudo if use_sudo else run with hide('commands'): return runner("md5sum '{}'".format(filename)).split()[0]
Return md5sum of remote file
def results_history(history_log, no_color): try: with open(history_log, 'r') as f: lines = f.readlines() except Exception as error: echo_style( 'Unable to process results history log: %s' % error, no_color, fg='red' ) sys.exit(1) index = len(lines) for item in lines: click.echo('{} {}'.format(index, item), nl=False) index -= 1
Display a list of ipa test results history.
def remote_delete(self, remote_path, r_st): if S_ISDIR(r_st.st_mode): for item in self.sftp.listdir_attr(remote_path): full_path = path_join(remote_path, item.filename) self.remote_delete(full_path, item) self.sftp.rmdir(remote_path) else: try: self.sftp.remove(remote_path) except FileNotFoundError as e: self.logger.error( "error while removing {}. trace: {}".format(remote_path, e) )
Remove the remote directory node.
def write_bus_data(self, file): bus_sheet = self.book.add_sheet("Buses") for i, bus in enumerate(self.case.buses): for j, attr in enumerate(BUS_ATTRS): bus_sheet.write(i, j, getattr(bus, attr))
Writes bus data to an Excel spreadsheet.
def _users_watching(self, **kwargs): return self._users_watching_by_filter(object_id=self.instance.pk, **kwargs)
Return users watching this instance.
def section_tortuosity(neurites, neurite_type=NeuriteType.all): return map_sections(sectionfunc.section_tortuosity, neurites, neurite_type=neurite_type)
section tortuosities in a collection of neurites
def _create_epoch_data(self, streams: Optional[Iterable[str]]=None) -> EpochData: if streams is None: streams = [self._train_stream_name] + self._extra_streams return OrderedDict([(stream_name, OrderedDict()) for stream_name in streams])
Create empty epoch data double dict.
def from_memory(cls, model_data: dict, run_number: int, project_dir: str, continue_training=False, seed: int = None, device: str = 'cuda', params=None): return ModelConfig( filename="[memory]", configuration=model_data, run_number=run_number, project_dir=project_dir, continue_training=continue_training, seed=seed, device=device, parameters=params )
Create model config from supplied data
def delete(self): if self.jenkins_host.has_job(self.name): LOGGER.info("deleting {0}...".format(self.name)) self.jenkins_host.delete_job(self.name)
delete the jenkins job, if it exists
def compute_fw_at_frac_max_1d_simple(Y, xc, X=None, f=0.5): yy = np.asarray(Y) if yy.ndim != 1: raise ValueError('array must be 1-d') if yy.size == 0: raise ValueError('array is empty') if X is None: xx = np.arange(yy.shape[0]) else: xx = X xpix = coor_to_pix_1d(xc - xx[0]) try: peak = yy[xpix] except IndexError: raise ValueError('peak is out of array') fwhm_x, _codex, _msgx = compute_fwhm_1d(xx, yy - f * peak, xc, xpix) return peak, fwhm_x
Compute the full width at fraction f of the maximum
def format_file_path(filepath): try: is_windows_network_mount = WINDOWS_NETWORK_MOUNT_PATTERN.match(filepath) filepath = os.path.realpath(os.path.abspath(filepath)) filepath = re.sub(BACKSLASH_REPLACE_PATTERN, '/', filepath) is_windows_drive = WINDOWS_DRIVE_PATTERN.match(filepath) if is_windows_drive: filepath = filepath.capitalize() if is_windows_network_mount: filepath = '/' + filepath except: pass return filepath
Formats a path as absolute and with the correct platform separator.
def parse_extra(self, extra): if extra.startswith('/'): extra = extra[1:] r = IIIFRequest(identifier='dummy', api_version=self.api_version) r.parse_url(extra) if (r.info): raise IIIFStaticError("Attempt to specify Image Information in extras.") return(r)
Parse extra request parameters to IIIFRequest object.
def free_resources(self): if not self.is_stopped(): logger.info("Freeing resources: %s" % self.get_transfer_id()) self.stop(True)
call stop to free up resources
def namer(cls, imageUrl, pageUrl): num = pageUrl.split('/')[-1] ext = imageUrl.rsplit('.', 1)[1] return "thethinhline-%s.%s" % (num, ext)
Use page URL sequence which is apparently increasing.
def _set_containable_view(self, session): for obj_name in self._containable_views: if self._containable_views[obj_name] == SEQUESTERED: try: getattr(session, 'use_sequestered_' + obj_name + '_view')() except AttributeError: pass else: try: getattr(session, 'use_unsequestered_' + obj_name + '_view')() except AttributeError: pass
Sets the underlying containable views to match current view
def configure_discovery(graph): ns = Namespace( subject=graph.config.discovery_convention.name, ) convention = DiscoveryConvention(graph) convention.configure(ns, discover=tuple()) return ns.subject
Build a singleton endpoint that provides a link to all search endpoints.
def ask_int(msg="Enter an integer", dft=None, vld=None, hlp=None): vld = vld or [int] return ask(msg, dft=dft, vld=vld, fmt=partial(cast, typ=int), hlp=hlp)
Prompts the user for an integer.
def relabel(self, qubits: Qubits) -> 'Channel': chan = copy(self) chan.vec = chan.vec.relabel(qubits) return chan
Return a copy of this channel with new qubits
def _support_directory(): from os.path import join, dirname, abspath return join(dirname(abspath(__file__)), 'support_files')
Get the path of the support_files directory
def read(ctx, input, output): log.info('chemdataextractor.read') log.info('Reading %s' % input.name) doc = Document.from_file(input) for element in doc.elements: output.write(u'%s : %s\n=====\n' % (element.__class__.__name__, six.text_type(element)))
Output processed document elements.
def run(self, *args, **kw): if self._runFunc is not None: if 'mode' in kw: kw.pop('mode') if '_save' in kw: kw.pop('_save') return self._runFunc(self, *args, **kw) else: raise taskpars.NoExecError('No way to run task "'+self.__taskName+\ '". You must either override the "run" method in your '+ \ 'ConfigObjPars subclass, or you must supply a "run" '+ \ 'function in your package.')
This may be overridden by a subclass.
def pddet(A): L = jitchol(A) logdetA = 2*sum(np.log(np.diag(L))) return logdetA
Determinant of a positive definite matrix, only symmetric matricies though
def _call_cmd_line(self): try: logging.info("Calling Popen with: {}".format(self.args)) p = Popen(self.args, stdin=PIPE, stdout=PIPE, stderr=PIPE) except OSError: raise(RuntimeError("No such command found in PATH")) self.stdout, self.stderr = p.communicate("\n".encode()) self.stdout = self.stdout.decode("utf-8") self.stderr = self.stderr.decode("utf-8") self.returncode = p.returncode
Run the command line tool.
def add(self, header, data) : if header[0] != '>' : self.data.append(('>'+header, data)) else : self.data.append((header, data))
appends a new entry to the file
def update_registered_subject_from_model_on_post_save(sender, instance, raw, created, using, **kwargs): if not raw and not kwargs.get('update_fields'): try: instance.registration_update_or_create() except AttributeError as e: if 'registration_update_or_create' not in str(e): raise AttributeError(str(e))
Updates RegisteredSubject from models using UpdatesOrCreatesRegistrationModelMixin.
def _get_text(self): device = self._get_device() if device is None: return (UNKNOWN_DEVICE, self.py3.COLOR_BAD) if not device["isReachable"] or not device["isTrusted"]: return ( self.py3.safe_format( self.format_disconnected, {"name": device["name"]} ), self.py3.COLOR_BAD, ) battery = self._get_battery() (charge, bat_status, color) = self._get_battery_status(battery) notif = self._get_notifications() (notif_size, notif_status) = self._get_notifications_status(notif) return ( self.py3.safe_format( self.format, dict( name=device["name"], charge=charge, bat_status=bat_status, notif_size=notif_size, notif_status=notif_status, ), ), color, )
Get the current metadatas
def node_labels(node_labels, node_indices): if len(node_labels) != len(node_indices): raise ValueError("Labels {0} must label every node {1}.".format( node_labels, node_indices)) if len(node_labels) != len(set(node_labels)): raise ValueError("Labels {0} must be unique.".format(node_labels))
Validate that there is a label for each node.
def _guess_concat(data): return { type(u''): u''.join, type(b''): concat_bytes, }.get(type(data), list)
Guess concat function from given data
def for_point(cls, point, zoom): latitude, longitude = point.latitude_longitude return cls.for_latitude_longitude(latitude=latitude, longitude=longitude, zoom=zoom)
Creates a tile for given point
def event_handler(self): if not self._notify_queue: LOG.error('event_handler: no notification queue for %s', self._service_name) return LOG.debug('calling event handler for %s', self) self.start() self.wait()
Wait on queue for listening to the events.
def GetFlagSuggestions(attempt, longopt_list): if len(attempt) <= 2 or not longopt_list: return [] option_names = [v.split('=')[0] for v in longopt_list] distances = [(_DamerauLevenshtein(attempt, option[0:len(attempt)]), option) for option in option_names] distances.sort(key=lambda t: t[0]) least_errors, _ = distances[0] if least_errors >= _SUGGESTION_ERROR_RATE_THRESHOLD * len(attempt): return [] suggestions = [] for errors, name in distances: if errors == least_errors: suggestions.append(name) else: break return suggestions
Get helpful similar matches for an invalid flag.
def create_api_ipv6(self): return ApiIPv6( self.networkapi_url, self.user, self.password, self.user_ldap)
Get an instance of Api IPv6 services facade.
def print_raw_data(raw_data, start_index=0, limit=200, flavor='fei4b', index_offset=0, select=None, tdc_trig_dist=False, trigger_data_mode=0): if not select: select = ['DH', 'TW', "AR", "VR", "SR", "DR", 'TDC', 'UNKNOWN FE WORD', 'UNKNOWN WORD'] total_words = 0 for index in range(start_index, raw_data.shape[0]): dw = FEI4Record(raw_data[index], chip_flavor=flavor, tdc_trig_dist=tdc_trig_dist, trigger_data_mode=trigger_data_mode) if dw in select: print index + index_offset, '{0:12d} {1:08b} {2:08b} {3:08b} {4:08b}'.format(raw_data[index], (raw_data[index] & 0xFF000000) >> 24, (raw_data[index] & 0x00FF0000) >> 16, (raw_data[index] & 0x0000FF00) >> 8, (raw_data[index] & 0x000000FF) >> 0), dw total_words += 1 if limit and total_words >= limit: break return total_words
Printing FEI4 raw data array for debugging.
def shadow(self,new_root,visitor) : for n in self.walk() : sn = n.clone(new_root) if n.isdir() : visitor.process_dir(n,sn) else : visitor.process_file(n,sn)
Runs through the query, creating a clone directory structure in the new_root. Then applies process
async def stop_bridges(self): for task in self.sleep_tasks: task.cancel() for bridge in self.bridges: bridge.stop()
Stop all sleep tasks to allow bridges to end.
def _load_isd_file_metadata(download_path, isd_station_metadata): isd_inventory = pd.read_csv( os.path.join(download_path, "isd-inventory.csv"), dtype=str ) station_keep = [usaf in isd_station_metadata for usaf in isd_inventory.USAF] isd_inventory = isd_inventory[station_keep] year_keep = isd_inventory.YEAR > "2005" isd_inventory = isd_inventory[year_keep] metadata = {} for (usaf_station, year), group in isd_inventory.groupby(["USAF", "YEAR"]): if usaf_station not in metadata: metadata[usaf_station] = {"usaf_id": usaf_station, "years": {}} metadata[usaf_station]["years"][year] = [ { "wban_id": row.WBAN, "counts": [ row.JAN, row.FEB, row.MAR, row.APR, row.MAY, row.JUN, row.JUL, row.AUG, row.SEP, row.OCT, row.NOV, row.DEC, ], } for i, row in group.iterrows() ] return metadata
Collect data counts for isd files.
def reset_window_layout(self): answer = QMessageBox.warning(self, _("Warning"), _("Window layout will be reset to default settings: " "this affects window position, size and dockwidgets.\n" "Do you want to continue?"), QMessageBox.Yes | QMessageBox.No) if answer == QMessageBox.Yes: self.setup_layout(default=True)
Reset window layout to default
def initialize_dynamic(obj): dmaps = obj.traverse(lambda x: x, specs=[DynamicMap]) for dmap in dmaps: if dmap.unbounded: continue if not len(dmap): dmap[dmap._initial_key()]
Initializes all DynamicMap objects contained by the object
def evaluate(self): if DEVELOP: internal_assert(not self.been_called, "inefficient reevaluation of action " + self.name + " with tokens", self.tokens) self.been_called = True evaluated_toks = evaluate_tokens(self.tokens) if logger.tracing: logger.log_trace(self.name, self.original, self.loc, evaluated_toks, self.tokens) try: return _trim_arity(self.action)( self.original, self.loc, evaluated_toks, ) except CoconutException: raise except (Exception, AssertionError): traceback.print_exc() raise CoconutInternalException("error computing action " + self.name + " of evaluated tokens", evaluated_toks)
Get the result of evaluating the computation graph at this node.
def web(port, debug=False, theme="modern", ssh_config=None): from storm import web as _web _web.run(port, debug, theme, ssh_config)
Starts the web UI.
def _from_dict(cls, _dict): args = {} if 'location' in _dict: args['location'] = Location._from_dict(_dict.get('location')) if 'text' in _dict: args['text'] = _dict.get('text') if 'types' in _dict: args['types'] = [ TypeLabel._from_dict(x) for x in (_dict.get('types')) ] if 'categories' in _dict: args['categories'] = [ Category._from_dict(x) for x in (_dict.get('categories')) ] if 'attributes' in _dict: args['attributes'] = [ Attribute._from_dict(x) for x in (_dict.get('attributes')) ] return cls(**args)
Initialize a Element object from a json dictionary.
def outitem(title, elems, indent=4): out(title) max_key_len = max(len(key) for key, _ in elems) + 1 for key, val in elems: key_spaced = ('%s:' % key).ljust(max_key_len) out('%s%s %s' % (indent * ' ', key_spaced, val)) out()
Output formatted as list item.
def fencekml(self, layername): if layername.startswith('"') and layername.endswith('"'): layername = layername[1:-1] for layer in self.allayers: if layer.key == layername: self.fenceloader.clear() if len(layer.points) < 3: return self.fenceloader.target_system = self.target_system self.fenceloader.target_component = self.target_component bounds = mp_util.polygon_bounds(layer.points) (lat, lon, width, height) = bounds center = (lat+width/2, lon+height/2) self.fenceloader.add_latlon(center[0], center[1]) for lat, lon in layer.points: self.fenceloader.add_latlon(lat, lon) self.send_fence()
set a layer as the geofence
def by_population(self, lower=-1, upper=2 ** 31, zipcode_type=ZipcodeType.Standard, sort_by=SimpleZipcode.population.name, ascending=False, returns=DEFAULT_LIMIT): return self.query( population_lower=lower, population_upper=upper, sort_by=sort_by, zipcode_type=zipcode_type, ascending=ascending, returns=returns, )
Search zipcode information by population range.
def obj2bunch(data, commdct, obj): dtls = data.dtls key = obj[0].upper() key_i = dtls.index(key) abunch = makeabunch(commdct, obj, key_i) return abunch
make a new bunch object using the data object
def fnFromDate(self, date): fn = time.strftime('comics-%Y%m%d', date) fn = os.path.join(self.basepath, 'html', fn + ".html") fn = os.path.abspath(fn) return fn
Get filename from date.
def search(term, exact=False, rows=1e6): url = 'https://www.ebi.ac.uk/ols/api/search?ontology=chebi' + \ '&exact=' + str(exact) + '&q=' + term + \ '&rows=' + str(int(rows)) response = requests.get(url) data = response.json() return [ChebiEntity(doc['obo_id']) for doc in data['response']['docs']]
Searches ChEBI via ols.
def _reverse_rounding_method(method): if method is RoundingMethods.ROUND_UP: return RoundingMethods.ROUND_DOWN if method is RoundingMethods.ROUND_DOWN: return RoundingMethods.ROUND_UP if method is RoundingMethods.ROUND_HALF_UP: return RoundingMethods.ROUND_HALF_DOWN if method is RoundingMethods.ROUND_HALF_DOWN: return RoundingMethods.ROUND_HALF_UP if method in \ (RoundingMethods.ROUND_TO_ZERO, RoundingMethods.ROUND_HALF_ZERO): return method raise BasesAssertError('unknown method')
Reverse meaning of ``method`` between positive and negative.
def _file_local_or_remote(f, get_retriever): if os.path.exists(f): return f integration, config = get_retriever.integration_and_config(f) if integration: return integration.file_exists(f, config)
Check for presence of a local or remote file.
def service_account_email(self): if self._service_account_id is None: self._service_account_id = app_identity.get_service_account_name() return self._service_account_id
The service account email.
def nextSunset(jd, lat, lon): return swe.sweNextTransit(const.SUN, jd, lat, lon, 'SET')
Returns the JD of the next sunset.
def causal_nexus(network, before_state, after_state, direction=Direction.BIDIRECTIONAL): validate.is_network(network) log.info("Calculating causal nexus...") result = nexus(network, before_state, after_state, direction) if result: result = max(result) else: null_transition = Transition( network, before_state, after_state, (), ()) result = _null_ac_sia(null_transition, direction) log.info("Finished calculating causal nexus.") log.debug("RESULT: \n%s", result) return result
Return the causal nexus of the network.
def validate_supersmoother_bass(): x, y = smoother_friedman82.build_sample_smoother_problem_friedman82() plt.figure() plt.plot(x, y, '.', label='Data') for bass in range(0, 10, 3): smooth = supersmoother.SuperSmoother() smooth.set_bass_enhancement(bass) smooth.specify_data_set(x, y) smooth.compute() plt.plot(x, smooth.smooth_result, '.', label='Bass = {0}'.format(bass)) finish_plot()
Validate the supersmoother with extra bass.
def _validate(self): if self.f_labels is None: raise NotFittedError('FeatureRepMix') if not self.transformers: return names, transformers, _ = zip(*self.transformers) self._validate_names(names) for trans in transformers: if not isinstance(trans, FeatureRep): raise TypeError("All transformers must be an instance of FeatureRep." " '%s' (type %s) doesn't." % (trans, type(trans)))
Internal function to validate the transformer before applying all internal transformers.
def print_docs(self): arg = self.opts.get('fun', None) docs = super(Runner, self).get_docs(arg) for fun in sorted(docs): display_output('{0}:'.format(fun), 'text', self.opts) print(docs[fun])
Print out the documentation!
def tokens_create(name, user, scopes, internal): token = Token.create_personal( name, user.id, scopes=scopes, is_internal=internal) db.session.commit() click.secho(token.access_token, fg='blue')
Create a personal OAuth token.
def match(self, filename, line, codes): if self.regex_match_any(line, codes): if self._vary_codes: self.codes = tuple([codes[-1]]) return True
Match rule and set attribute codes.
def uppercase(self, value): if not isinstance(value, bool): raise TypeError('uppercase attribute must be a logical type.') self._uppercase = value
Validate and set the uppercase flag.
def _find_mime_parameters(tokenlist, value): while value and value[0] != ';': if value[0] in PHRASE_ENDS: tokenlist.append(ValueTerminal(value[0], 'misplaced-special')) value = value[1:] else: token, value = get_phrase(value) tokenlist.append(token) if not value: return tokenlist.append(ValueTerminal(';', 'parameter-separator')) tokenlist.append(parse_mime_parameters(value[1:]))
Do our best to find the parameters in an invalid MIME header
def notify_step_begin_end(func): @ft.wraps(func) def wrapper(self, *args, **kwargs): cls = self.__class__ on_step_begin = cls.ON_STEP_BEGIN on_step_end = cls.ON_STEP_END for begin_listener in on_step_begin: begin_listener(self) res = func(self, *args, **kwargs) for end_listener in on_step_end: end_listener(self, func) return res return wrapper
Print the beginning and the end of a `func`.
def check_token(self, respond): if respond.status_code == 401: self.credential.obtain_token(config=self.config) return False return True
Check is the user's token is valid
def ListMigrationsToProcess(migrations_root, current_migration_number ): migrations = [] for m in os.listdir(migrations_root): if (current_migration_number is None or _MigrationFilenameToInt(m) > current_migration_number): migrations.append(m) return sorted(migrations, key=_MigrationFilenameToInt)
Lists filenames of migrations with numbers bigger than a given one.
def map(self,index_name, index_type, map_value): request = self.session url = 'http://%s:%s/%s/%s/_mapping' % (self.host, self.port, index_name, index_type) content = { index_type : { 'properties' : map_value } } if self.verbose: print content response = request.put(url,content) return response
Enable a specific map for an index and type
def update_pass(user_id, newpass): out_dic = {'success': False, 'code': '00'} entry = TabMember.update(user_pass=tools.md5(newpass)).where(TabMember.uid == user_id) entry.execute() out_dic['success'] = True return out_dic
Update the password of a user.
def namedtuple_storable(namedtuple, *args, **kwargs): return default_storable(namedtuple, namedtuple._fields, *args, **kwargs)
Storable factory for named tuples.
def _reset_suffix_links(self): self._suffix_links_set = False for current, _parent in self.dfs(): current.suffix = None current.dict_suffix = None current.longest_prefix = None
Reset all suffix links in all nodes in this trie.
def cats(self, cats): sources = [] for cat in coerce_to_list(cats): sources.extend([entry for entry in cat._entries.values() if entry._container != 'catalog']) self.items = sources
Set sources from a list of cats
def master(self, name): fut = self.execute(b'MASTER', name, encoding='utf-8') return wait_convert(fut, parse_sentinel_master)
Returns a dictionary containing the specified masters state.
def store_password_in_keyring(credential_id, username, password=None): try: import keyring import keyring.errors if password is None: prompt = 'Please enter password for {0}: '.format(credential_id) try: password = getpass.getpass(prompt) except EOFError: password = None if not password: raise RuntimeError('Invalid password provided.') try: _save_password_in_keyring(credential_id, username, password) except keyring.errors.PasswordSetError as exc: log.debug('Problem saving password in the keyring: %s', exc) except ImportError: log.error('Tried to store password in keyring, but no keyring module is installed') return False
Interactively prompts user for a password and stores it in system keyring
def validate_args(self): base.BaseTarget.validate_args(self) params = self.params if params['extra_control_fields'] is not None: assert isinstance(params['extra_control_fields'], list), ( 'extra_control_fields must be a list of tuples, not %s' % type( params['extra_control_fields'])) for elem in params['extra_control_fields']: assert (isinstance(elem, tuple) and len(elem) == 1), ( 'extra_control_fields must be a list of 2-element tuples. ' 'Invalid contents: %s' % elem) pkgname_re = '^[a-z][a-z0-9+-.]+' assert re.match(pkgname_re, params['package_name']), ( 'Invalid package name: %s. Must match %s' % ( params['package_name'], pkgname_re))
Input validators for this rule type.
async def playing(self): if self._setstate is None: await self.protocol.start() if self._setstate is None: return MrpPlaying(protobuf.SetStateMessage(), None) return MrpPlaying(self._setstate, self._nowplaying)
Return what is currently playing.
def hashes(self): for url_variant in self.url_permutations(self.canonical): url_hash = self.digest(url_variant) yield url_hash
Hashes of all possible permutations of the URL in canonical form
def artifact_path(cls, project, incident, artifact): return google.api_core.path_template.expand( "projects/{project}/incidents/{incident}/artifacts/{artifact}", project=project, incident=incident, artifact=artifact, )
Return a fully-qualified artifact string.
def _on_msg(self, msg): data = msg['content']['data'] if 'callback' in data: guid = data['callback'] callback = callback_registry[guid] args = data['arguments'] args = [self.deserialize(a) for a in args] index = data['index'] results = callback(*args) return self.serialize(self._send('return', index=index, results=results)) else: index = data['index'] immutable = data['immutable'] value = data['value'] if index in self._callbacks: self._callbacks[index].resolve({ 'immutable': immutable, 'value': value }) del self._callbacks[index]
Handle messages from the front-end
def json_post(methodname, rtype, key): return compose( reusable, map_return(registry(rtype), itemgetter(key)), basic_interaction, map_yield(partial(_json_as_post, methodname)), oneyield, )
decorator factory for json POST queries
def write(self, value): self.get_collection().update_one( {'_id': self._document_id}, {'$set': {self._path: value}}, upsert=True )
Write value to the target
def nfa(self): finalstate = State(final=True) nextstate = finalstate for tokenexpr in reversed(self): state = tokenexpr.nfa(nextstate) nextstate = state return NFA(state)
convert the expression into an NFA
def completedefault(self, text, line, begidx, endidx): if self.argparser_completer and any((line.startswith(x) for x in self.argparse_names())): self.argparser_completer.rl_complete(line, 0) return [x[begidx:] for x in self.argparser_completer._rl_matches] else: return []
Accessing the argcompleter if available.
def count_lines_to_next_cell(cell_end_marker, next_cell_start, total, explicit_eoc): if cell_end_marker < total: lines_to_next_cell = next_cell_start - cell_end_marker if explicit_eoc: lines_to_next_cell -= 1 if next_cell_start >= total: lines_to_next_cell += 1 return lines_to_next_cell return 1
How many blank lines between end of cell marker and next cell?
def getbit(self, key, offset): key = self._encode(key) index, bits, mask = self._get_bits_and_offset(key, offset) if index >= len(bits): return 0 return 1 if (bits[index] & mask) else 0
Returns the bit value at ``offset`` in ``key``.
def compute(self, runner_results, setup=False, poll=False, ignore_errors=False): for (host, value) in runner_results.get('contacted', {}).iteritems(): if not ignore_errors and (('failed' in value and bool(value['failed'])) or ('rc' in value and value['rc'] != 0)): self._increment('failures', host) elif 'skipped' in value and bool(value['skipped']): self._increment('skipped', host) elif 'changed' in value and bool(value['changed']): if not setup and not poll: self._increment('changed', host) self._increment('ok', host) else: if not poll or ('finished' in value and bool(value['finished'])): self._increment('ok', host) for (host, value) in runner_results.get('dark', {}).iteritems(): self._increment('dark', host)
walk through all results and increment stats
def _get_spaces(self): guid = self.api.config.get_organization_guid() uri = '/v2/organizations/%s/spaces' % (guid) return self.api.get(uri)
Get the marketplace services.
def shard_query_generator(self, query): shard_query = query.copy() for shard in self._stores: cursor = shard.query(shard_query) for item in cursor: yield item shard_query.offset = max(shard_query.offset - cursor.skipped, 0) if shard_query.limit: shard_query.limit = max(shard_query.limit - cursor.returned, 0) if shard_query.limit <= 0: break
A generator that queries each shard in sequence.
def pop_fbo(self): fbo = self._fb_stack.pop() fbo[0].deactivate() self.pop_viewport() if len(self._fb_stack) > 0: old_fbo = self._fb_stack[-1] old_fbo[0].activate() self._update_transforms() return fbo
Pop an FBO from the stack.
def textgetter(path: str, *, default: T=NO_DEFAULT, strip: bool=False) -> t.Callable[[Element], t.Union[str, T]]: find = compose( str.strip if strip else identity, partial(_raise_if_none, exc=LookupError(path)), methodcaller('findtext', path) ) return (find if default is NO_DEFAULT else lookup_defaults(find, default))
shortcut for making an XML element text getter
def _assemble_gef(stmt): subj_str = _assemble_agent_str(stmt.gef) obj_str = _assemble_agent_str(stmt.ras) stmt_str = subj_str + ' is a GEF for ' + obj_str return _make_sentence(stmt_str)
Assemble Gef statements into text.
def price_range(self): ordered = self.variants.order_by('base_price') if ordered: return ordered.first().price, ordered.last().price else: return None, None
Calculate the price range of the products variants
def command(self, name, *args): args = [name.encode('utf-8')] + [ (arg if type(arg) is bytes else str(arg).encode('utf-8')) for arg in args if arg is not None ] + [None] _mpv_command(self.handle, (c_char_p*len(args))(*args))
Execute a raw command.
def select(cls, dataset, selection_mask=None, **selection): if not dataset.data: return [] ds = cls._inner_dataset_template(dataset) data = [] for d in dataset.data: ds.data = d sel = ds.interface.select(ds, **selection) data.append(sel) return data
Applies selectiong on all the subpaths.
def write_details(fw, details, bed): for a, b, depth in details: for i in xrange(a, b): gi = bed[i].accn print("\t".join((gi, str(depth))), file=fw)
Write per gene depth to file
def files(self, *, bundle: str=None, tags: List[str]=None, version: int=None, path: str=None) -> models.File: query = self.File.query if bundle: query = (query.join(self.File.version, self.Version.bundle) .filter(self.Bundle.name == bundle)) if tags: query = ( query.join(self.File.tags) .filter(self.Tag.name.in_(tags)) .group_by(models.File.id) .having(func.count(models.Tag.name) == len(tags)) ) if version: query = query.join(self.File.version).filter(self.Version.id == version) if path: query = query.filter_by(path=path) return query
Fetch files from the store.
def _track_tendril(self, tendril): self.tendrils[tendril._tendril_key] = tendril self._tendrils.setdefault(tendril.proto, weakref.WeakValueDictionary()) self._tendrils[tendril.proto][tendril._tendril_key] = tendril
Adds the tendril to the set of tracked tendrils.
def _msg_create_line(self, msg, data, key): ret = [] ret.append(self.curse_add_line(msg)) ret.append(self.curse_add_line(data.pre_char, decoration='BOLD')) ret.append(self.curse_add_line(data.get(), self.get_views(key=key, option='decoration'))) ret.append(self.curse_add_line(data.post_char, decoration='BOLD')) ret.append(self.curse_add_line(' ')) return ret
Create a new line to the Quickview.
def reverse_func(apps, schema_editor): print("\n") remove_count = 0 BackupRun = apps.get_model("backup_app", "BackupRun") backup_runs = BackupRun.objects.all() for backup_run in backup_runs: temp = OriginBackupRun(name=backup_run.name, backup_datetime=backup_run.backup_datetime) config_path = temp.get_config_path() try: config_path.unlink() except OSError as err: print("ERROR removing config file: %s" % err) else: remove_count += 1 print("%i config files removed.\n" % remove_count)
manage migrate backup_app 0003_auto_20160127_2002
def fix(self, with_margin=True): if not self.width: self.xmax = self.xmin + 1 if not self.height: self.ymin /= 2 self.ymax += self.ymin xmargin = self.margin * self.width self.xmin -= xmargin self.xmax += xmargin if with_margin: ymargin = self.margin * self.height self.ymin -= ymargin self.ymax += ymargin
Correct box when no values and take margin in account
def read(self): try: return self.reader.recv(1) except socket.error: ex = exception().exception if ex.args[0] == errno.EWOULDBLOCK: raise IOError raise
Emulate a file descriptors read method