code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def blacklist(ctx, blacklist_account, account): account = Account(account, blockchain_instance=ctx.blockchain) print_tx(account.blacklist(blacklist_account))
Add an account to a blacklist
def stop_vm(self): if self.vagrant is not None: if self.destroy: self.vagrant.destroy() shutil.rmtree(self.vagrant.root, ignore_errors=True) self.vagrant = None else: self.vagrant.halt()
Stops or destroys the VM used to launch tasks.
def run_canu(self): cmd = self._make_canu_command(self.outdir,'canu') ok, errs = common.syscall(cmd, verbose=self.verbose, allow_fail=False) if not ok: raise Error('Error running Canu.') original_contigs = os.path.join(self.outdir, 'canu.contigs.fasta') renamed_contigs = os.path.join(self.outdir, 'contigs.fasta') Assembler._rename_canu_contigs(original_contigs, renamed_contigs) original_gfa = os.path.join(self.outdir, 'canu.contigs.gfa') renamed_gfa = os.path.join(self.outdir, 'contigs.gfa') os.rename(original_gfa, renamed_gfa)
Runs canu instead of spades
def parse_args(sys_argv, usage): args = sys_argv[1:] parser = OptionParser(usage=usage) options, args = parser.parse_args(args) template, context = args return template, context
Return an OptionParser for the script.
def delete_file(self, path, prefixed_path, source_storage): if isinstance(self.storage, CumulusStorage): if self.storage.exists(prefixed_path): try: etag = self.storage._get_object(prefixed_path).etag digest = "{0}".format(hashlib.md5(source_storage.open(path).read()).hexdigest()) if etag == digest: self.log(u"Skipping '{0}' (not modified based on file hash)".format(path)) return False except: raise return super(Command, self).delete_file(path, prefixed_path, source_storage)
Checks if the target file should be deleted if it already exists
def _draw_score_box(self, label, score, position, size): x1, y1 = position width, height = size pygame.draw.rect(self.screen, (187, 173, 160), (x1, y1, width, height)) w, h = label.get_size() self.screen.blit(label, (x1 + (width - w) / 2, y1 + 8)) score = self.score_font.render(str(score), True, (255, 255, 255)) w, h = score.get_size() self.screen.blit(score, (x1 + (width - w) / 2, y1 + (height + label.get_height() - h) / 2))
Draw a score box, whether current or best.
def add_predicate(self, predicate): _predicate = predicate if isinstance(predicate, partial): _predicate = 'partial(%s, %s, %s)' % (predicate.func, predicate.args, predicate.keywords) if LOG_OPTS['register']: hookenv.log(' Adding predicate for %s: %s' % (self.id(), _predicate), level=hookenv.DEBUG) self._predicates.append(predicate)
Add a new predicate callback to this handler.
def filter_labels(sent: Sequence[str], labels: Set[str] = None) -> List[str]: if labels: return [tok for tok in sent if tok in labels] return list(sent)
Returns only the tokens present in the sentence that are in labels.
def _set_default(path, dest, name): subvol_id = __salt__['btrfs.subvolume_show'](path)[name]['subvolume id'] return __salt__['btrfs.subvolume_set_default'](subvol_id, dest)
Set the subvolume as the current default.
def close(self, response): LOGGER.info('Closing [%s]', os.getpid()) if not self.database.is_closed(): self.database.close() return response
Close connection to database.
def raw(request): foos = foobar_models.Foo.objects.all() return HttpResponse(tree.xml(foos), mimetype='text/xml')
shows untransformed hierarchical xml output
def universal_transformer_base(): hparams = transformer.transformer_base() hparams.hidden_size = 1024 hparams.filter_size = 4096 hparams.num_heads = 16 hparams.layer_prepostprocess_dropout = 0.3 hparams = update_hparams_for_universal_transformer(hparams) return hparams
Base parameters for Universal Transformer.
def replacePatterns(self, vector, layer = None): if not self.patterned: return vector if type(vector) == str: return self.replacePatterns(self.lookupPattern(vector, layer), layer) elif type(vector) != list: return vector vec = [] for v in vector: if type(v) == str: retval = self.replacePatterns(self.lookupPattern(v, layer), layer) if type(retval) == list: vec.extend( retval ) else: vec.append( retval ) else: vec.append( v ) return vec
Replaces patterned inputs or targets with activation vectors.
def parse(self, data): self.field_errors = {} return dict( (k, self._parse_value(k, v)) for k, v in data.items() )
Parse fields and store individual errors
def data(self, index, role=Qt.DisplayRole): if not index.isValid(): return to_qvariant() if role == Qt.DisplayRole: if index.column() == 0: value = osp.basename(self.get_value(index)) return to_qvariant(value) else: value = self.get_value(index) return to_qvariant(value) elif role == Qt.TextAlignmentRole: return to_qvariant(int(Qt.AlignLeft|Qt.AlignVCenter)) elif role == Qt.ToolTipRole: if index.column() == 0: value = self.get_value(index) return to_qvariant(value) else: return to_qvariant()
Return data at table index
def files(self): if self._files is None: self._files = SeriesZipTifHolo._index_files(self.path) return self._files
List of hologram data file names in the input zip file
def white(self): self._color = RGB_WHITE cmd = self.command_set.white() self.send(cmd)
Set color to white.
def on_get(self, request, response, user_id=None): response.body = "{}" if self.handler(user_id): response.status = falcon.HTTP_200 self.api.register(utils.mxid2localpart(user_id)) else: response.status = falcon.HTTP_404
Responds to GET request for users.
def smartquotes(text): command = shlex.split('pandoc --smart -t plain') com = Popen(command, shell=False, stdin=PIPE, stdout=PIPE, stderr=PIPE) out, err = com.communicate(text.encode('utf-8')) com_out = out.decode('utf-8') text = com_out.replace(u'\n', u' ').strip() return text
Runs text through pandoc for smartquote correction.
def session(self) -> SessionT: session = self._session_class()( connection_pool=self._connection_pool, ) self.event_dispatcher.notify(self.ClientEvent.new_session, session) return session
Return a new session.
def _update_rows(self): for row, item in enumerate(self._items): item.row = row item.column = 0 for column, item in enumerate(self._columns): item.row = self.row item.column = column
Update the row and column numbers of child items.
def _expand_batch(cls, batch): cls._parse_header(BatchHeader, batch) if 'transactions' in batch: batch['transactions'] = [ cls._expand_transaction(t) for t in batch['transactions']] return batch
Deserializes a Batch's header, and the header of its Transactions.
def _get_options(ret=None): attrs = {'host': 'host', 'port': 'port', 'unix_socket_path': 'unix_socket_path', 'db': 'db', 'password': 'password', 'cluster_mode': 'cluster_mode', 'startup_nodes': 'cluster.startup_nodes', 'skip_full_coverage_check': 'cluster.skip_full_coverage_check', } if salt.utils.platform.is_proxy(): return { 'host': __opts__.get('redis.host', 'salt'), 'port': __opts__.get('redis.port', 6379), 'unix_socket_path': __opts__.get('redis.unix_socket_path', None), 'db': __opts__.get('redis.db', '0'), 'password': __opts__.get('redis.password', ''), 'cluster_mode': __opts__.get('redis.cluster_mode', False), 'startup_nodes': __opts__.get('redis.cluster.startup_nodes', {}), 'skip_full_coverage_check': __opts__.get('redis.cluster.skip_full_coverage_check', False) } _options = salt.returners.get_returner_options(__virtualname__, ret, attrs, __salt__=__salt__, __opts__=__opts__) return _options
Get the redis options from salt.
def _base_request(self, method): request = E.Element(method) request.set('xmlns', 'AnetApi/xml/v1/schema/AnetApiSchema.xsd') request.append(self.client_auth) return request
Factory method for generating the base XML requests.
def translate_docs(self, ds, **kwargs): for d in ds: self.map_doc(d, {}, self.invert_subject_object) return [self.translate_doc(d, **kwargs) for d in ds]
Translate a set of solr results
def pos_tokenize(self: object, fileids: str): for para in self.paras(fileids): yield [ self.pos_tagger(word_tokenize(sent)) for sent in sent_tokenize(para) ]
Segments, tokenizes, and POS tag a document in the corpus.
def create_os_in_nwk(self, tenant_id, fw_dict, is_fw_virt=False): tenant_name = fw_dict.get('tenant_name') try: net, subnet = self._create_os_nwk(tenant_id, tenant_name, "in", is_fw_virt=is_fw_virt) if net is None or subnet is None: return False except Exception as exc: LOG.error("Creation of In Openstack Network failed tenant " "%(tenant)s, Exception %(exc)s", {'tenant': tenant_id, 'exc': str(exc)}) return False ret = fw_const.OS_IN_NETWORK_CREATE_SUCCESS net_dict = self.retrieve_dcnm_net_info(tenant_id, "in") subnet_dict = self.retrieve_dcnm_subnet_info(tenant_id, "in") if not net_dict or not subnet_dict: LOG.error("Allocation of net,subnet failed Len net %(len_net)s" "sub %(len_sub)s", {'len_net': len(net_dict), 'len_sub': len(subnet_dict)}) ret = fw_const.OS_IN_NETWORK_CREATE_FAIL self.store_net_fw_db(tenant_id, net, net_dict, subnet_dict, "in", 'SUCCESS', os_status=ret) return True
Create the Openstack IN network and stores the values in DB.
def xyY_to_XYZ(cobj, *args, **kwargs): if cobj.xyy_y == 0.0: xyz_x = 0.0 xyz_y = 0.0 xyz_z = 0.0 else: xyz_x = (cobj.xyy_x * cobj.xyy_Y) / cobj.xyy_y xyz_y = cobj.xyy_Y xyz_z = ((1.0 - cobj.xyy_x - cobj.xyy_y) * xyz_y) / cobj.xyy_y return XYZColor( xyz_x, xyz_y, xyz_z, illuminant=cobj.illuminant, observer=cobj.observer)
Convert from xyY to XYZ.
def _execute_command(self, command, *args): try: command(*args) except libvirt.libvirtError as error: raise RuntimeError("Unable to execute command. %s" % error)
Execute the state transition command.
def _getGlobals(self, **kwargs): globs = { "__builtins__": None, "all": all, "any": any, "bool": bool, "chr": chr, "cmp": cmp, "complex": complex, "divmod": divmod, "enumerate": enumerate, "float": float, "format": format, "frozenset": frozenset, "hex": hex, "int": int, "len": len, "list": list, "long": long, "math": math, "max": max, "min": min, "oct": oct, "ord": ord, "pow": pow, "range": range, "reversed": reversed, "round": round, "str": str, "sum": sum, "tuple": tuple, "xrange": xrange, } globs.update(kwargs) for imp in self.getPythonImports(): mod = imp["module"] func = imp["function"] member = self._getModuleMember(mod, func) if member is None: raise ImportError( "Could not find member {} of module {}".format( func, mod)) globs[func] = member return globs
Return the globals dictionary for the formula calculation
def command(self, group=None, help="", name=None): def decorator(func): return self.add_command(func, group=group, help=help, name=name) return decorator
Decorator for adding a command to this manager.
def ext_pillar(minion_id, pillar, conf): comps = conf.split() profile = None if comps[0]: profile = comps[0] client = salt.utils.etcd_util.get_conn(__opts__, profile) path = '/' if len(comps) > 1 and comps[1].startswith('root='): path = comps[1].replace('root=', '') path %= { 'minion_id': minion_id } try: pillar = salt.utils.etcd_util.tree(client, path) except KeyError: log.error('No such key in etcd profile %s: %s', profile, path) pillar = {} return pillar
Check etcd for all data
def execute_insert_no_results(self, sock_info, run, op_id, acknowledged): command = SON([('insert', self.collection.name), ('ordered', self.ordered)]) concern = {'w': int(self.ordered)} command['writeConcern'] = concern if self.bypass_doc_val and sock_info.max_wire_version >= 4: command['bypassDocumentValidation'] = True db = self.collection.database bwc = _BulkWriteContext( db.name, command, sock_info, op_id, db.client._event_listeners, session=None) _do_batched_insert( self.collection.full_name, run.ops, True, acknowledged, concern, not self.ordered, self.collection.codec_options, bwc)
Execute insert, returning no results.
def Get(self,key): for public_ip in self.public_ips: if public_ip.id == key: return(public_ip) elif key == public_ip.internal: return(public_ip)
Get public_ip by providing either the public or the internal IP address.
def RegisterPlugin(self, report_plugin_cls): name = report_plugin_cls.__name__ if name in self.plugins: raise RuntimeError("Can't register two report plugins with the same " "name. In particular, can't register the same " "report plugin twice: %r" % name) self.plugins[name] = report_plugin_cls
Registers a report plugin for use in the GRR UI.
def dynacRepresentation(self): return ['CAVMC', [ [self.cavID.val], [self.xesln.val, self.phase.val, self.fieldReduction.val, self.isec.val, 1], ]]
Return the Dynac representation of this cavity instance.
def _replace_with_new_dims( self: T, variables: 'OrderedDict[Any, Variable]' = None, coord_names: set = None, attrs: 'Optional[OrderedDict]' = __default, indexes: 'Optional[OrderedDict[Any, pd.Index]]' = __default, inplace: bool = False, ) -> T: dims = dict(calculate_dimensions(variables)) return self._replace( variables, coord_names, dims, attrs, indexes, inplace=inplace)
Replace variables with recalculated dimensions.
def inDignities(self, idA, idB): objA = self.chart.get(idA) info = essential.getInfo(objA.sign, objA.signlon) return [dign for (dign, ID) in info.items() if ID == idB]
Returns the dignities of A which belong to B.
def read(filename): path = join(here, filename) with open(path, "r") as fin: return fin.read().strip()
Return the whitespace-stripped content of `filename`.
def dirs(self, paths, access=None): self.failures = [path for path in paths if not isvalid(path, access, filetype='dir')] return not self.failures
Verify list of directories
def _report_lint_error(error, file_path): line = error[1].line code = error[0] description = error[1].description sys.stdout.write("{0}:{1} [{2}] {3}\n".format(file_path, line, code, description))
Report a linter error.
def new_request_session(config, cookies): session = requests.Session() if cookies: session.cookies = cookies session.max_redirects = config["maxhttpredirects"] session.headers.update({ "User-Agent": config["useragent"], }) if config["cookiefile"]: for cookie in cookies.from_file(config["cookiefile"]): session.cookies = requests.cookies.merge_cookies(session.cookies, cookie) return session
Create a new request session.
def _consolidate_classpath(self, targets, classpath_products): entries_map = defaultdict(list) for (cp, target) in classpath_products.get_product_target_mappings_for_targets(targets, True): entries_map[target].append(cp) with self.invalidated(targets=targets, invalidate_dependents=True) as invalidation: for vt in invalidation.all_vts: entries = entries_map.get(vt.target, []) for index, (conf, entry) in enumerate(entries): if ClasspathUtil.is_dir(entry.path): jarpath = os.path.join(vt.results_dir, 'output-{}.jar'.format(index)) if not vt.valid: with self.open_jar(jarpath, overwrite=True, compressed=False) as jar: jar.write(entry.path) classpath_products.remove_for_target(vt.target, [(conf, entry.path)]) classpath_products.add_for_target(vt.target, [(conf, jarpath)])
Convert loose directories in classpath_products into jars.
def __related_categories(self, category_id): related = [] for cat in self.categories_tree: if category_id in self.categories_tree[cat]: related.append(self.categories[cat]) return related
Get all related categories to a given one
def _proxy_conf_file(proxyfile, test): changes_old = [] changes_new = [] success = True if not os.path.exists(proxyfile): try: if not test: changes_new.append(_write_proxy_conf(proxyfile)) msg = 'Salt Proxy: Wrote proxy conf {0}'.format(proxyfile) else: msg = 'Salt Proxy: Update required to proxy conf {0}' \ .format(proxyfile) except (OSError, IOError) as err: success = False msg = 'Salt Proxy: Error writing proxy file {0}'.format(err) log.error(msg) changes_new.append(msg) changes_new.append(msg) log.debug(msg) else: msg = 'Salt Proxy: {0} already exists, skipping'.format(proxyfile) changes_old.append(msg) log.debug(msg) return success, changes_new, changes_old
Check if proxy conf exists and update
def totalNumberOfTiles(self, minZoom=None, maxZoom=None): "Return the total number of tiles for this instance extent" nbTiles = 0 minZoom = minZoom or 0 if maxZoom: maxZoom = maxZoom + 1 else: maxZoom = len(self.RESOLUTIONS) for zoom in xrange(minZoom, maxZoom): nbTiles += self.numberOfTilesAtZoom(zoom) return nbTiles
Return the total number of tiles for this instance extent
def zoom(self, locator, percent="200%", steps=1): driver = self._current_application() element = self._element_find(locator, True, True) driver.zoom(element=element, percent=percent, steps=steps)
Zooms in on an element a certain amount.
def _prepare_connection(**kwargs): paramiko_kwargs, scp_kwargs = _select_kwargs(**kwargs) ssh = paramiko.SSHClient() if paramiko_kwargs.pop('auto_add_policy', False): ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(**paramiko_kwargs) scp_client = scp.SCPClient(ssh.get_transport(), **scp_kwargs) return scp_client
Prepare the underlying SSH connection with the remote target.
def render_customizations(self): disable_plugins = self.customize_conf.get('disable_plugins', []) if not disable_plugins: logger.debug("No site-specific plugins to disable") else: for plugin_dict in disable_plugins: try: self.dj.remove_plugin( plugin_dict['plugin_type'], plugin_dict['plugin_name'] ) logger.debug( "site-specific plugin disabled -> Type:{} Name:{}".format( plugin_dict['plugin_type'], plugin_dict['plugin_name'] ) ) except KeyError: logger.debug("Invalid custom configuration found for disable_plugins") enable_plugins = self.customize_conf.get('enable_plugins', []) if not enable_plugins: logger.debug("No site-specific plugins to enable") else: for plugin_dict in enable_plugins: try: self.dj.add_plugin( plugin_dict['plugin_type'], plugin_dict['plugin_name'], plugin_dict['plugin_args'] ) logger.debug( "site-specific plugin enabled -> Type:{} Name:{} Args: {}".format( plugin_dict['plugin_type'], plugin_dict['plugin_name'], plugin_dict['plugin_args'] ) ) except KeyError: logger.debug("Invalid custom configuration found for enable_plugins")
Customize prod_inner for site specific customizations
def _pages_to_generate(self): all_pages = self.get_page_names() ptg = [] for slug in all_pages: p = s2page.Page(self, slug, isslug=True) if p.published: ptg.append({'slug': p.slug, 'title':p.title, 'date': p.creation_date }) sptg = sorted(ptg, key=lambda x : x['date'],reverse=True) res = [ pinfo['slug'] for pinfo in sptg] return res
Return list of slugs that correspond to pages to generate.
def node_dependencies(context: Context): args = ['--loglevel', {0: 'silent', 1: 'warn', 2: 'info'}[context.verbosity]] if not context.use_colour: args.append('--color false') args.append('install') return context.shell('npm', *args)
Updates node.js dependencies
def _check_type(self, check_type, properties): if 'PrimitiveType' in properties: return properties['PrimitiveType'] == check_type if properties['Type'] == 'List': if 'ItemType' in properties: return properties['ItemType'] == check_type else: return properties['PrimitiveItemType'] == check_type return False
Decode a properties type looking for a specific type.
def exists(self, relpath): if self.isignored(self._append_slash_if_dir_path(relpath)): return False return self._exists_raw(relpath)
Returns True if path exists and is not ignored.
async def auth(self): credentials = await self.atv.airplay.generate_credentials() await self.atv.airplay.load_credentials(credentials) try: await self.atv.airplay.start_authentication() pin = await _read_input(self.loop, 'Enter PIN on screen: ') await self.atv.airplay.finish_authentication(pin) print('You may now use these credentials:') print(credentials) return 0 except exceptions.DeviceAuthenticationError: logging.exception('Failed to authenticate - invalid PIN?') return 1
Perform AirPlay device authentication.
def _adj_for_assc(self): if self.gene2gos: gos_assoc = set(get_b2aset(self.gene2gos).keys()) if 'item_marks' not in self.kws: self.kws['item_marks'] = {go:'>' for go in gos_assoc} if 'include_only' not in self.kws: gosubdag = GoSubDag(gos_assoc, self.gosubdag.go2obj, self.gosubdag.relationships) self.kws['include_only'] = gosubdag.go2obj
Print only GO IDs from associations and their ancestors.
def _get_esxi_proxy_details(): det = __proxy__['esxi.get_details']() host = det.get('host') if det.get('vcenter'): host = det['vcenter'] esxi_hosts = None if det.get('esxi_host'): esxi_hosts = [det['esxi_host']] return host, det.get('username'), det.get('password'), \ det.get('protocol'), det.get('port'), det.get('mechanism'), \ det.get('principal'), det.get('domain'), esxi_hosts
Returns the running esxi's proxy details
def register_widget(widget): if widget in registry: raise WidgetAlreadyRegistered( _('The widget %s has already been registered.') % widget.__name__) registry.append(widget)
Register the given widget as a candidate to use in placeholder.
def _sync_labels(self, labels_json): for label_json in labels_json: label_id = label_json['id'] self.labels[label_id] = Label(label_json, self)
Populate the user's labels from a JSON encoded list.
def _pre_activate_injection(self): if not self.app.plugins.classes.exist(self.__class__.__name__): self.app.plugins.classes.register([self.__class__]) self._load_needed_plugins() self.app.signals.send("plugin_activate_pre", self)
Injects functions before the activation routine of child classes gets called
def resource_property(klass, name, **kwargs): klass.PROPERTIES[name] = kwargs def getter(self): return getattr(self, '_%s' % name, kwargs.get('default', None)) if kwargs.get('readonly', False): setattr(klass, name, property(getter)) else: def setter(self, value): setattr(self, '_%s' % name, value) setattr(klass, name, property(getter, setter))
Builds a resource object property.
def nfa(self, nextstate): if self.interval: mininterval, maxinterval = self.interval nextstate2 = nextstate for i in range(maxinterval): state = State(transitions=[(self,self.match, nextstate2)]) if i+1> mininterval: if nextstate is not nextstate2: state.transitions.append((self,self.match, nextstate)) if maxinterval == MAXINTERVAL: state.epsilon.append(state) break nextstate2 = state return state else: state = State(transitions=[(self,self.match, nextstate)]) return state
Returns an initial state for an NFA
def connect(self): self._client = self._create_client() self._db = getattr(self._client, self._db_name) self._generic_dao = GenericDAO(self._client, self._db_name)
Initialize the database connection.
def isBusy(self): if self._lock.acquire(False): self._lock.release() return False else: return True
Returns true if the underlying engine is doing an async operation.
def _s3_resource(dallinger_region=False): config = get_config() if not config.ready: config.load() region = "us-east-1" if dallinger_region else config.get("aws_region") return boto3.resource( "s3", region_name=region, aws_access_key_id=config.get("aws_access_key_id"), aws_secret_access_key=config.get("aws_secret_access_key"), )
A boto3 S3 resource using the AWS keys in the config.
def do_pass(self, element, decl, pseudo): log(WARN, u"Old-style pass as declaration not allowed.{}" .format(decl.value).encpde('utf-8'))
No longer valid way to set processing pass.
def _des_dict_check(self, des_dict, req_keys, cond_name): assert isinstance(des_dict, dict), '{}' \ ' must be a dictionary. Got {}.'.format(cond_name, type(des_dict)) if bool(des_dict) is True: input_keys = list(des_dict.keys()) for key in req_keys: assert key in input_keys, 'Required key "{}" was not found in ' \ '{}'.format(key, cond_name)
Check if an input design condition dictionary is acceptable.
def list_plugins(self): vals = self.plugins.items() return {x: y for x, y in vals}
List all of the plugins that have been registerd for the iotile program on this computer
def handle_getinfo(self, conn, args): result = None if args[0] == b'version': result = self.version elif args[0] == b's2k_count': result = '{}'.format(64 << 20).encode('ascii') else: log.warning('Unknown GETINFO command: %s', args) if result: keyring.sendline(conn, b'D ' + result)
Handle some of the GETINFO messages.
def produce_upgrade_operations( ctx=None, metadata=None, include_symbol=None, include_object=None, **kwargs): if metadata is None: metadata = db.metadata if ctx is None: ctx = create_migration_ctx(target_metadata=metadata, **kwargs) template_args = {} imports = set() _produce_migration_diffs( ctx, template_args, imports, include_object=include_object, include_symbol=include_symbol, **kwargs ) return template_args
Produce a list of upgrade statements.
def plugins(): plugins = current_app.config['PLUGINS'] for name, description in entrypoints.ENTRYPOINTS.items(): echo('{0} ({1})'.format(white(description), name)) if name == 'udata.themes': actives = [current_app.config['THEME']] elif name == 'udata.avatars': actives = [avatar_config('provider')] else: actives = plugins for ep in sorted(entrypoints.iter_all(name), key=by_name): echo('> {0}: {1}'.format(ep.name, is_active(ep, actives)))
Display some details about the local plugins
def _sortObjects(orderby='created', **kwargs): o = [] for m in kwargs.values(): for l in iter(m): o.append(l) o = list(set(o)) sortfunc = _sortByCreated if orderby == 'created' else _sortByModified if six.PY2: o.sort(sortfunc) else: o.sort(key=functools.cmp_to_key(sortfunc)) return o
Sorts lists of objects and combines them into a single list
def _drawLine(self, image): w, h = image.size w *= 5 h *= 5 l_image = Image.new('RGBA', (w, h), (0, 0, 0, 0)) l_draw = ImageDraw.Draw(l_image) x1 = int(w * random.uniform(0, 0.1)) y1 = int(h * random.uniform(0, 1)) x2 = int(w * random.uniform(0.9, 1)) y2 = int(h * random.uniform(0, 1)) l_width = round((w * h)**0.5 * 2.284e-2) l_draw.line(((x1, y1), (x2, y2)), fill=(0, 0, 0, 255), width=l_width) l_image = self._rndLineTransform(l_image) l_image = l_image.resize(image.size, resample=self.resample) image.paste(l_image, (0, 0), l_image)
Draw morphed line in Image object.
def do_action_to_descendants(analysis_request, transition_id): for partition in analysis_request.getDescendants(all_descendants=False): do_action_for(partition, transition_id)
Cascades the transition passed in to the descendant partitions
def load_pretrained(self, wgts_fname:str, itos_fname:str, strict:bool=True): "Load a pretrained model and adapts it to the data vocabulary." old_itos = pickle.load(open(itos_fname, 'rb')) old_stoi = {v:k for k,v in enumerate(old_itos)} wgts = torch.load(wgts_fname, map_location=lambda storage, loc: storage) if 'model' in wgts: wgts = wgts['model'] wgts = convert_weights(wgts, old_stoi, self.data.train_ds.vocab.itos) self.model.load_state_dict(wgts, strict=strict)
Load a pretrained model and adapts it to the data vocabulary.
def _is_preferred(instance, profile): if not instance.weekly_workshift: return False if profile and profile.ratings.filter( workshift_type=instance.weekly_workshift.workshift_type, rating=WorkshiftRating.LIKE, ).count() == 0: return False return True
Check if a user has marked an instance's workshift type as preferred.
def _parse_body(self, body): if is_python3(): return json.loads(body.decode('UTF-8')) else: return json.loads(body)
For just call a deserializer for FORMAT
def add_template_for_node(name, node_id): "Set the template to use to display the node" with current_app.app_context(): db.execute(text(fetch_query_string('insert_template.sql')), name=name, node_id=node_id) result = db.execute(text(fetch_query_string('select_template.sql')), name=name, node_id=node_id).fetchall() if result: template_id = result[0]['id'] db.execute(text(fetch_query_string('update_template_node.sql')), template=template_id, node_id=node_id)
Set the template to use to display the node
def averageSameExpTimes(imgs_path): firsts = imgs_path[:2] imgs = imgs_path[2:] for n, i in enumerate(firsts): firsts[n] = np.asfarray(imread(i)) d = DarkCurrentMap(firsts) for i in imgs: i = imread(i) d.addImg(i) return d.map()
average background images with same exposure time
def initialize(config): "Initialize the bot with a dictionary of config items" config = init_config(config) _setup_logging() _load_library_extensions() if not Handler._registry: raise RuntimeError("No handlers registered") class_ = _load_bot_class() config.setdefault('log_channels', []) config.setdefault('other_channels', []) channels = config.log_channels + config.other_channels log.info('Running with config') log.info(pprint.pformat(config)) host = config.get('server_host', 'localhost') port = config.get('server_port', 6667) return class_( host, port, config.bot_nickname, channels=channels, password=config.get('password'), )
Initialize the bot with a dictionary of config items
def get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None, notfound_ok=None, head_only=False): msg_code = riak.pb.messages.MSG_CODE_GET_REQ codec = self._get_codec(msg_code) msg = codec.encode_get(robj, r, pr, timeout, basic_quorum, notfound_ok, head_only) resp_code, resp = self._request(msg, codec) return codec.decode_get(robj, resp)
Serialize get request and deserialize response
def setupnode(overwrite=False): if not port_is_open(): if not skip_disable_root(): disable_root() port_changed = change_ssh_port() if server_state('setupnode-incomplete'): env.overwrite=True else: set_server_state('setupnode-incomplete') upload_ssh_key() restrict_ssh() add_repositories() upgrade_packages() setup_ufw() uninstall_packages() install_packages() upload_etc() post_install_package() setup_ufw_rules() set_timezone() set_server_state('setupnode-incomplete',delete=True) for s in webserver_list(): stop_webserver(s) start_webserver(s)
Install a baseline host. Can be run multiple times
def list_nodes(search="unsigned"): nodes = nago.core.get_nodes() if search == "all": return map(lambda x: {x[0]: x[1].data}, nodes.items()) elif search == 'unsigned': result = {} for token, node in nodes.items(): if node.get('access') is None: result[token] = node.data return result else: result = {} for token, node in nodes.items(): host_name = node.get('host_name') if search in (token, host_name): result[token] = node.data return result
List all connected nodes
def run_filter_calculation(self): inputs = { 'cif': self.inputs.cif, 'code': self.inputs.cif_filter, 'parameters': self.inputs.cif_filter_parameters, 'metadata': { 'options': self.inputs.options.get_dict(), } } calculation = self.submit(CifFilterCalculation, **inputs) self.report('submitted {}<{}>'.format(CifFilterCalculation.__name__, calculation.uuid)) return ToContext(cif_filter=calculation)
Run the CifFilterCalculation on the CifData input node.
def add_greenlet_name( _logger: str, _method_name: str, event_dict: Dict[str, Any], ) -> Dict[str, Any]: current_greenlet = gevent.getcurrent() greenlet_name = getattr(current_greenlet, 'name', None) if greenlet_name is not None and not greenlet_name.startswith('Greenlet-'): event_dict['greenlet_name'] = greenlet_name return event_dict
Add greenlet_name to the event dict for greenlets that have a non-default name.
def _parse_subnet(self, subnet_dict): if not subnet_dict: return alloc_pool = subnet_dict.get('allocation_pools') cidr = subnet_dict.get('cidr') subnet = cidr.split('/')[0] start = alloc_pool[0].get('start') end = alloc_pool[0].get('end') gateway = subnet_dict.get('gateway_ip') sec_gateway = subnet_dict.get('secondary_gw') return {'subnet': subnet, 'start': start, 'end': end, 'gateway': gateway, 'sec_gateway': sec_gateway}
Return the subnet, start, end, gateway of a subnet.
def upper2_for_ramp_wall(self) -> Set[Point2]: if len(self.upper) > 5: return set() upper2 = sorted(list(self.upper), key=lambda x: x.distance_to(self.bottom_center), reverse=True) while len(upper2) > 2: upper2.pop() return set(upper2)
Returns the 2 upper ramp points of the main base ramp required for the supply depot and barracks placement properties used in this file.
def _port_not_in_use(): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) port = 0 s.bind(('', port)) _, port = s.getsockname() return port
Use the port 0 trick to find a port not in use.
def validate_is_non_abstract_edge_type(self, edge_classname): element = self.get_edge_schema_element_or_raise(edge_classname) if element.abstract: raise InvalidClassError(u'Expected a non-abstract vertex class, but {} is abstract' .format(edge_classname))
Validate that a edge classname corresponds to a non-abstract edge class.
def change_status(request, page_id): perm = request.user.has_perm('pages.change_page') if perm and request.method == 'POST': page = Page.objects.get(pk=page_id) page.status = int(request.POST['status']) page.invalidate() page.save() return HttpResponse(str(page.status)) raise Http404
Switch the status of a page.
def validate_field_name(bases: List[Type['BaseModel']], field_name: str) -> None: for base in bases: if getattr(base, field_name, None): raise NameError( f'Field name "{field_name}" shadows a BaseModel attribute; ' f'use a different field name with "alias=\'{field_name}\'".' )
Ensure that the field's name does not shadow an existing attribute of the model.
def create(self, instance): with self.flushing(): if instance.id is None: instance.id = self.new_object_id() self.session.add(instance) return instance
Create a new model instance.
def _teardown(self): "Handles the restoration of any potential global state set." self.example.after(self.context) if self.is_root_runner: run.after_all.execute(self.context) self.has_ran = True
Handles the restoration of any potential global state set.
def run(command, cwd=None, shell=False, raiseOnError=False): returncode = subprocess.call(command, cwd=cwd, shell=shell) if raiseOnError == True and returncode != 0: raise Exception('child process ' + str(command) + ' failed with exit code ' + str(returncode)) return returncode
Executes a child process and waits for it to complete
def drop_space(self): cur = self._conn.cursor() cur.executescript(DROP_SQL_MODEL) self._conn.commit() cur.close() return
Dismantle an existing table space
def indication(self, *args, **kwargs): if not self.current_terminal: raise RuntimeError("no active terminal") if not isinstance(self.current_terminal, Server): raise RuntimeError("current terminal not a server") self.current_terminal.indication(*args, **kwargs)
Downstream packet, send to current terminal.
def stop(self): self.observer_thread.stop() self.observer_thread.join() logging.info("Configfile watcher plugin: Stopped")
Stop the config change monitoring thread.
def split_infinitive_warning(sentence_str): sent_doc = textacy.Doc(sentence_str, lang='en_core_web_lg') inf_pattern = r'<PART><ADV><VERB>' infinitives = textacy.extract.pos_regex_matches(sent_doc, inf_pattern) for inf in infinitives: if inf[0].text.lower() != 'to': continue if inf[-1].tag_ != 'VB': continue return 'SplitInfinitiveWarning'
Return a warning for a split infinitive, else, None
def run(self): ret = eventlet.spawn(self.context(self.func)) eventlet.sleep(self.seconds) try: ret.wait() except Exception: traceback.print_exc() self.thread = eventlet.spawn(self.run)
Invoke the function repeatedly on a timer.
def convert_path(path): if os.path.isabs(path): raise Exception("Cannot include file with absolute path {}. Please use relative path instead".format((path))) path = os.path.normpath(path) return path
Convert path to a normalized format
def owned_ec_states(self): with self._mutex: if not self._owned_ec_states: if self.owned_ecs: states = [] for ec in self.owned_ecs: states.append(self._get_ec_state(ec)) self._owned_ec_states = states else: self._owned_ec_states = [] return self._owned_ec_states
The state of each execution context this component owns.