code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def load_dict(self, dct): for k, v in dct.items(): setattr(self, k, v)
Load a dictionary of configuration values.
def create(resource_path, previous_version=None, package='perch.migrations'): pkg, obj = resource_path.rsplit('.', 1) module = importlib.import_module(pkg) resource = getattr(module, obj) version = uuid4().hex target_module = importlib.import_module(package) target_dir = os.path.dirname(target_m...
Create a new migration
async def set_topic(self, topic): self.topic = topic try: if self.topicchannel: await client.edit_channel(self.topicchannel, topic=topic) except Exception as e: logger.exception(e)
Sets the topic for the topic channel
def do_for_dir(inws, begin): inws = os.path.abspath(inws) for wroot, wdirs, wfiles in os.walk(inws): for wfile in wfiles: if wfile.endswith('.html'): if 'autogen' in wroot: continue check_html(os.path.abspath(os.path.join(wroot, wfile)), be...
do something in the directory.
def _cleanup(self): current_time = time.time() timeout = self._config.timeout if current_time - self._last_cleanup_time > timeout: self.store.cleanup(timeout) self._last_cleanup_time = current_time
Cleanup the stored sessions
def delete(self, data=None): if not self.can_delete: raise JSSMethodNotAllowedError(self.__class__.__name__) if data: self.jss.delete(self.url, data) else: self.jss.delete(self.url)
Delete this object from the JSS.
def _get_subscriptions_endpoint(self): s = self.settings() params = { 'access_token': self.app_access_token, } return ( GRAPH_ENDPOINT.format(f'{s["app_id"]}/subscriptions'), params, )
Generates the URL and tokens for the subscriptions endpoint
def generate_header_from_parent_header( compute_difficulty_fn: Callable[[BlockHeader, int], int], parent_header: BlockHeader, coinbase: Address, timestamp: Optional[int] = None, extra_data: bytes = b'') -> BlockHeader: if timestamp is None: timestamp = max(int(time.ti...
Generate BlockHeader from state_root and parent_header
def oauth_error_handler(f): @wraps(f) def inner(*args, **kwargs): try: return f(*args, **kwargs) except OAuthClientError as e: current_app.logger.warning(e.message, exc_info=True) return oauth2_handle_error( e.remote, e.response, e.code, e.uri,...
Decorator to handle exceptions.
def initialize_processing(): need_initialize = False needed_algorithms = [ 'native:clip', 'gdal:cliprasterbyextent', 'native:union', 'native:intersection' ] if not QgsApplication.processingRegistry().algorithms(): need_initialize = True if not need_initialize:...
Initializes processing, if it's not already been done
def channel_is_settled( self, participant1: Address, participant2: Address, block_identifier: BlockSpecification, channel_identifier: ChannelID, ) -> bool: try: channel_state = self._get_channel_state( participant1=parti...
Returns true if the channel is in a settled state, false otherwise.
def plot_pyro(calc_id=-1): import matplotlib.pyplot as p dstore = util.read(calc_id) sitecol = dstore['sitecol'] asset_risk = dstore['asset_risk'].value pyro, = numpy.where(dstore['multi_peril']['PYRO'] == 1) lons = sitecol.lons[pyro] lats = sitecol.lats[pyro] p.scatter(lons, lats, marke...
Plot the pyroclastic cloud and the assets
def _muck_w_date(record): temp_d = datetime.datetime(int(record['Year']), int(record['Month']), int(record['Day']), int(record['Hour']) % 24, int(record['Minute']) % 60) d_off = int(record['Hour'])//24 if d_off > 0: temp_d += datetime.tim...
muck with the date because EPW starts counting from 1 and goes to 24.
def sync_with_prompt_toolkit(self): self.editor_layout.update() window = self.window_arrangement.active_pt_window if window: self.application.layout.focus(window)
Update the prompt-toolkit Layout and FocusStack.
def whisper_lock_writes(self, cluster='main'): if not self.config.has_section(cluster): raise SystemExit("Cluster '%s' not defined in %s" % (cluster, self.config_file)) try: return bool(self.config.get(cluster, 'whisper_lock_writes')) except N...
Lock whisper files during carbon-sync.
def on_batch_end(self, batch_info): if self.settings.stream_lr: iteration_idx = ( float(batch_info.epoch_number) + float(batch_info.batch_number) / batch_info.batches_per_epoch ) lr = batch_info.optimizer.param_groups[-1]['lr'] ...
Stream LR to visdom
def _render_object(self, obj, *context, **kwargs): loader = self._make_loader() if isinstance(obj, TemplateSpec): loader = SpecLoader(loader) template = loader.load(obj) else: template = loader.load_object(obj) context = [obj] + list(context) r...
Render the template associated with the given object.
def camel_case(self, snake_case): components = snake_case.split('_') return components[0] + "".join(x.title() for x in components[1:])
Convert snake case to camel case
def uuid(anon, obj, field, val): return anon.faker.uuid(field=field)
Returns a random uuid string
def matmul_adjoint_x(dz, x, y, transpose_a, transpose_b): if not transpose_a and not transpose_b: return tf.matmul(dz, y, transpose_b=True) elif not transpose_a and transpose_b: return tf.matmul(dz, y) elif transpose_a and not transpose_b: return tf.matmul(y, dz, transpose_b=True) else: return t...
Implementation of dtfmatmul wrt x, separate for readability.
def allFeatures(self): for dataset in self.getDatasets(): for featureSet in dataset.getFeatureSets(): for feature in featureSet.getFeatures(): yield feature
Return an iterator over all features in the data repo
def start(): setupdir = dirname(dirname(__file__)) curdir = os.getcwd() if len(sys.argv) > 1: configfile = sys.argv[1] elif exists(join(setupdir, "setup.py")): configfile = join(setupdir, "dev.cfg") elif exists(join(curdir, "prod.cfg")): configfile = join(curdir, "prod.cfg") ...
Start the CherryPy application server.
async def fetchone(self) -> Optional[sqlite3.Row]: return await self._execute(self._cursor.fetchone)
Fetch a single row.
def prev_cursor_location(self): self._verify_entrypoint_selected() self.current_trace_frame_index = max(self.current_trace_frame_index - 1, 0) self.trace()
Move cursor to the previous trace frame.
def _read(self): while self._rxactive: try: rv = self._ep_in.read(self._ep_in.wMaxPacketSize) if self._isFTDI: status = rv[:2] if status[0] != 1 or status[1] != 0x60: log.info( ...
check ep for data, add it to queue and sleep for interval
def create_view(self, request): try: if request.user.is_authenticated(): return redirect("organization_add") except TypeError: if request.user.is_authenticated: return redirect("organization_add") form = org_registration_form(self.org_model...
Initiates the organization and user account creation process
def execute(self): self.start_stemming_process() if self.dictionary.contains(self.current_word): self.result = self.current_word else: self.result = self.original_word
Execute stemming process; the result can be retrieved with result
async def stop(self): self._logger.info("Stopping all servers") for server in self.servers: await server.stop() self._logger.info("Stopping all device adapters") await self.device_manager.stop()
Stop the gateway manager and synchronously wait for it to stop.
def restore(self, volume_id, **kwargs): self.required('create', kwargs, ['backup', 'size']) volume_id = volume_id or str(uuid.uuid4()) kwargs['volume_type_name'] = kwargs['volume_type_name'] or 'vtype' kwargs['size'] = kwargs['size'] or 1 return self.http_put('/volumes/%s' % volu...
restore a volume from a backup
def describe(self): stats = {} stats['samples'] = self.shape[0] stats['nulls'] = self[np.isnan(self)].shape[0] stats['mean'] = float(np.nanmean(self.real)) stats['min'] = float(np.nanmin(self.real)) stats['max'] = float(np.nanmax(self.real)) return stats
Return basic statistics about the curve.
def delete(self, commit=True): db.session.delete(self) return commit and db.session.commit()
Delete model from database
def login(self, user, passwd): resp = self.post('token', json={'username': user, 'password': passwd}) self._token = resp.json()['response']['token']
Logs the user into SecurityCenter and stores the needed token and cookies.
def visit_Import(self, node): line = self._code_lines[node.lineno - 1] module_name = line.split("import")[0].strip() for name in node.names: imported_name = name.name if name.asname: imported_name = name.asname + "::" + imported_name self.impor...
Visit an import node.
def write_index(self): self.fileobj.seek(self.last_offset) index = index_header.build(dict(entries=self.entries)) self.fileobj.write(index) self.filesize = self.fileobj.tell()
Write the index of all our files to the MAR file.
def close(self): windll.kernel32.CloseHandle(self.conout_pipe) windll.kernel32.CloseHandle(self.conin_pipe)
Close all communication process streams.
def run_fsm(self, name, command, events, transitions, timeout, max_transitions=20): self.ctrl.send_command(command) return FSM(name, self, events, transitions, timeout=timeout, max_transitions=max_transitions).run()
Wrap the FSM code.
def _persist(self) -> None: if self._store: self._store.save(self._key, self._snapshot)
Persists the current data group
def motors_armed(self): if not 'HEARTBEAT' in self.messages: return False m = self.messages['HEARTBEAT'] return (m.base_mode & mavlink.MAV_MODE_FLAG_SAFETY_ARMED) != 0
return true if motors armed
def block(self): if (self.is_actinoid or self.is_lanthanoid) and self.Z not in [71, 103]: return "f" elif self.is_actinoid or self.is_lanthanoid: return "d" elif self.group in [1, 2]: return "s" elif self.group in range(13, 19): return "p" ...
Return the block character "s,p,d,f"
def delete(self, file_path): if os.sep in file_path: directory, file_name = file_path.rsplit(os.sep, 1) self.chdir(directory) return self.session.delete(file_name) else: return self.session.delete(file_path)
Remove the file named filename from the server.
def priorfactors(self): priorfactors = {} for pop in self.poplist: for f in pop.priorfactors: if f in priorfactors: if pop.priorfactors[f] != priorfactors[f]: raise ValueError('prior factor %s is inconsistent!' % f) ...
Combinartion of priorfactors from all populations
def publish_event(self, channel, event, message): assert self.protocol is not None, "Protocol required" msg = {'event': event, 'channel': channel} if message: msg['data'] = message return self.publish(channel, msg)
Publish a new event ``message`` to a ``channel``.
def _init_training(self): if self.check: self.backprop = CheckedBackprop(self.network, self.problem.cost) else: self.backprop = BatchBackprop(self.network, self.problem.cost) self.momentum = Momentum() self.decent = GradientDecent() self.decay = WeightDeca...
Classes needed during training.
def calc_max_bits(self, signed, values): b = 0 vmax = -10000000 for val in values: if signed: b = b | val if val >= 0 else b | ~val << 1 vmax = val if vmax < val else vmax else: b |= val; bits = 0 if b > 0: ...
Calculates the maximim needed bits to represent a value
def PrintSets(self): sets=self._getphotosets() for setname in sets: print("%s [%d]"%(setname,sets[setname]['number_photos']))
Prints set name and number of photos in set
def extract_attributes(cls, fields, resource): data = OrderedDict() for field_name, field in six.iteritems(fields): if field_name == 'id': continue if fields[field_name].write_only: continue if isinstance( field, (re...
Builds the `attributes` object of the JSON API resource object.
def _check_gzipped_input(in_file, data): grabix = config_utils.get_program("grabix", data["config"]) is_bgzip = subprocess.check_output([grabix, "check", in_file]) if is_bgzip.strip() == "yes": return False, False else: return True, True
Determine if a gzipped input file is blocked gzip or standard.
def _ScheduleVariableHunt(hunt_obj): if hunt_obj.client_rate != 0: raise VariableHuntCanNotHaveClientRateError(hunt_obj.hunt_id, hunt_obj.client_rate) seen_clients = set() for flow_group in hunt_obj.args.variable.flow_groups: for client_id in flow_group.clie...
Schedules flows for a variable hunt.
def quadkey_to_tile(quadkey): tile_x, tile_y = (0, 0) level = len(quadkey) for i in xrange(level): bit = level - i mask = 1 << (bit - 1) if quadkey[level - bit] == '1': tile_x |= mask if quadkey[level - bit] == '2': ...
Transform quadkey to tile coordinates
def pip(filename): requirements = [] for line in open(join(ROOT, 'requirements', filename)): line = line.strip() if not line or '://' in line: continue match = RE_REQUIREMENT.match(line) if match: requirements.extend(pip(match.group('filename'))) e...
Parse pip reqs file and transform it to setuptools requirements.
def populate_iteration(self, iteration): cur_idx = iteration.cur_idx genotypes = self.genotype_file.next().split() iteration.chr, iteration.rsid, junk, iteration.pos = genotypes[0:4] iteration.chr = int(iteration.chr) iteration.pos = int(iteration.pos) if DataParser.bound...
Pour the current data into the iteration object
def read_numbers(numbers): if isiterable(numbers): for number in numbers: yield float(str(number).strip()) else: with open(numbers) as fh: for number in fh: yield float(number.strip())
Read the input data in the most optimal way
def collides_axisaligned_rect(self, other): self_shifted = RotoOriginRect(self.width, self.height, -self.angle) s_a = self.sin_a() c_a = self.cos_a() center_x = self.x + self.width / 2.0 * c_a - self.height / 2.0 * s_a center_y = self.y - self.height / 2.0 * c_a - self.width / 2....
Returns collision with axis aligned other rect
def getCitiesDrawingXML(points): xml = "" for p in points: x = str(p.x) z = str(p.y) xml += '<DrawBlock x="' + x + '" y="7" z="' + z + '" type="beacon"/>' xml += '<DrawItem x="' + x + '" y="10" z="' + z + '" type="ender_pearl"/>' return xml
Build an XML string that contains a square for each city
def dt_comp(self, sampled_topics): samples = sampled_topics.shape[0] dt = np.zeros((self.D, self.K, samples)) for s in range(samples): dt[:, :, s] = \ samplers_lda.dt_comp(self.docid, sampled_topics[s, :], s...
Compute document-topic matrix from sampled_topics.
def include_file(self, uri, **kwargs): _include_file(self.context, uri, self._templateuri, **kwargs)
Include a file at the given ``uri``.
def gauss_box_model_deriv(x, amplitude=1.0, mean=0.0, stddev=1.0, hpix=0.5): z = (x - mean) / stddev z2 = z + hpix / stddev z1 = z - hpix / stddev da = norm.cdf(z2) - norm.cdf(z1) fp2 = norm_pdf_t(z2) fp1 = norm_pdf_t(z1) dl = -amplitude / stddev * (fp2 - fp1) ds = -amplitude / stddev * ...
Derivative of the integral of a Gaussian profile.
def goto_position(self, position, duration, control=None, wait=False): if control is None: control = self.goto_behavior if control == 'minjerk': goto_min_jerk = GotoMinJerk(self, position, duration) goto_min_jerk.start() if wait: goto_min_j...
Automatically sets the goal position and the moving speed to reach the desired position within the duration.
def run(self): self.signals() with self.listener(): for job in self.jobs(): if not job: self.jid = None self.title('Sleeping for %fs' % self.interval) time.sleep(self.interval) else: ...
Run jobs, popping one after another
def setup_callbacks(self): if PYGIT2_VERSION >= _LooseVersion('0.23.2'): self.remotecallbacks = pygit2.RemoteCallbacks( credentials=self.credentials) if not self.ssl_verify: self.remotecallbacks.certificate_check = \ lambda *args, **kwa...
Assign attributes for pygit2 callbacks
def compress_pdf(pdf_fpath, output_fname=None): import utool as ut ut.assertpath(pdf_fpath) suffix = '_' + ut.get_datestamp(False) + '_compressed' print('pdf_fpath = %r' % (pdf_fpath,)) output_pdf_fpath = ut.augpath(pdf_fpath, suffix, newfname=output_fname) print('output_pdf_fpath = %r' % (outpu...
uses ghostscript to write a pdf
def list(self, limit=None, offset=None): uri = "/%s%s" % (self.uri_base, self._get_pagination_qs(limit, offset)) return self._list(uri)
Gets a list of all domains, or optionally a page of domains.
def imported_classifiers_package(p: ecore.EPackage): classes = {c for c in p.eClassifiers if isinstance(c, ecore.EClass)} references = itertools.chain(*(c.eAllReferences() for c in classes)) references_types = (r.eType for r in references) imported = {c for c in references_types if getat...
Determines which classifiers have to be imported into given package.
def Options(**kwargs): construct = GPDOptions names = construct._fields d = {} for name in names: d[name] = None for k,v in kwargs.iteritems(): if k in names: d[k] = v else: raise ValueError('Error '+k+' is not a property of these options') return construct(**d)
A method for declaring options for the class
def handle(self): self.output = PyStratumStyle(self.input, self.output) config_file = self.input.get_argument('config_file') self.run_command(config_file)
Executes constants command when PyStratumCommand is activated.
def add_answer_at_time(self, record, now): if record is not None: if now == 0 or not record.is_expired(now): self.answers.append((record, now)) if record.rrsig is not None: self.answers.append((record.rrsig, now))
Adds an answer if if does not expire by a certain time
def remove_network_profile(self, obj, params): network_id = -1 profiles = self.network_profiles(obj) for profile in profiles: if profile == params: network_id = profile.id if network_id != -1: self._send_cmd_to_wpas(obj['name'], 'RE...
Remove the specified AP profiles
def _check_current_value(gnome_kwargs, value): current_value = __salt__['gnome.get'](**gnome_kwargs) return six.text_type(current_value) == six.text_type(value)
Check the current value with the passed value
def all_substrings(s): join = ''.join for i in range(1, len(s) + 1): for sub in window(s, i): yield join(sub)
yields all substrings of a string
def data_filler_user_agent(self, number_of_rows, pipe): try: for i in range(number_of_rows): pipe.hmset('user_agent:%s' % i, { 'id': rnd_id_generator(self), 'ip': self.faker.ipv4(), 'countrycode': self.faker.country_code(), ...
creates keys with user agent data
def _parse_band(cls, kw): m = re.search('([a-zA-Z0-9]+)(_\d+)?', kw) if m: if m.group(1) in cls._not_a_band: return None else: return m.group(1)
Returns photometric band from inifile keyword
def logon(): content = {} payload = "<aaaLogin inName='{0}' inPassword='{1}'></aaaLogin>".format(DETAILS['username'], DETAILS['password']) r = __utils__['http.query'](DETAILS['url'], data=payload, method='POST', ...
Logs into the cimc device and returns the session cookie.
def _POUpdateBuilderWrapper(env, target=None, source=_null, **kw): if source is _null: if 'POTDOMAIN' in kw: domain = kw['POTDOMAIN'] elif 'POTDOMAIN' in env and env['POTDOMAIN']: domain = env['POTDOMAIN'] else: domain = 'messages' source = [ domain ] return env._POUpdateBuilder(ta...
Wrapper for `POUpdate` builder - make user's life easier
def render_desc(desc): desc = desc + '.' desc_lines = split_len(desc, 54) if len(desc_lines) > 1: join_str = "'\n%s'" % (' '*21) lines_str = join_str.join(desc_lines) out = "('%s')" % lines_str else: out = "'%s'" % desc_lines[0] return out
calculate desc string, wrapped if too long
def _write_cron_lines(user, lines): lines = [salt.utils.stringutils.to_str(_l) for _l in lines] path = salt.utils.files.mkstemp() if _check_instance_uid_match(user) or __grains__.get('os_family') in ('Solaris', 'AIX'): with salt.utils.files.fpopen(path, 'w+', uid=__salt__['file.user_to_uid'](user), ...
Takes a list of lines to be committed to a user's crontab and writes it
def on_key_press(self, event): key = event.key if event.modifiers: return if self.enable_keyboard_pan and key in self._arrows: self._pan_keyboard(key) if key in self._pm: self._zoom_keyboard(key) if key == 'R': self.reset()
Pan and zoom with the keyboard.
def commit(self): "Commit data to the storage." if self._meta.path: with open(self._meta.path, 'wb') as fd: raw = deepcopy(self.raw) lazy_indexes = self.lazy_indexes if not self._meta.lazy_indexes: for idx_name in lazy_index...
Commit data to the storage.
def getLocalDatetime(date, time, tz=None, timeDefault=dt.time.max): localTZ = timezone.get_current_timezone() if tz is None or tz == localTZ: localDt = getAwareDatetime(date, time, tz, timeDefault) else: eventDt = getAwareDatetime(date, time, tz, timeDefault) localDt = eventDt.astime...
Get a datetime in the local timezone from date and optionally time
def return_collection(collection_type): def outer_func(func): @functools.wraps(func) def inner_func(self, *pargs, **kwargs): result = func(self, *pargs, **kwargs) return list(map(collection_type, result)) return inner_func return outer_func
Change method return value from raw API output to collection of models
def lemmatize(text, lowercase=True, remove_stopwords=True): doc = nlp(text) if lowercase and remove_stopwords: lemmas = [t.lemma_.lower() for t in doc if not (t.is_stop or t.orth_.lower() in STOPWORDS)] elif lowercase: lemmas = [t.lemma_.lower() for t in doc] elif remove_stopwords: ...
Return the lemmas of the tokens in a text.
def backup(id): filename = dump_database(id) key = "{}.dump".format(id) bucket = user_s3_bucket() bucket.upload_file(filename, key) return _generate_s3_url(bucket, key)
Backup the database to S3.
def create_wordpress(self, service_id, version_number, name, path, comment=None): body = self._formdata({ "name": name, "path": path, "comment": comment, }, FastlyWordpress.FIELDS) content = self._fetch("/service/%s/version/%d/wordpress" % (service_id, version_number), method="POST", body=body)...
Create a wordpress for the specified service and version.
def links(self): links = Links() links["self"] = Link.for_( self._operation, self._ns, qs=self._page.to_items(), **self._context ) return links
Include a self link.
def post(self, *args): filepath = self.get_body_argument('filepath') if not self.fs.exists(filepath): raise tornado.web.HTTPError(404) Filewatcher.add_directory_to_watch(filepath) self.write({'msg':'Watcher added for {}'.format(filepath)})
Start a new filewatcher at the specified path.
def _check_dn(self, dn, attr_value): if dn is not None: self._error('Two lines starting with dn: in one record.') if not is_dn(attr_value): self._error('No valid string-representation of ' 'distinguished name %s.' % attr_value)
Check dn attribute for issues.
def group_re(self): out = '' for token, data in self.tokens(): if token == 'TXT': out += re.escape(data) elif token == 'VAR': out += '(?P<%s>%s)' % (data[1], data[0]) elif token == 'ANON': out += '(?:%s)' % data return out
Return a regexp pattern with named groups
def _AsList(arg): if (isinstance(arg, string_types) or not isinstance(arg, collections.Iterable)): return [arg] else: return list(arg)
Encapsulates an argument in a list, if it's not already iterable.
def upload(ctx, yes=False): import callee version = callee.__version__ if version.endswith('-dev'): fatal("Can't upload a development version (%s) to PyPI!", version) if not yes: answer = input("Do you really want to upload to PyPI [y/N]? ") yes = answer.strip().lower() == 'y' ...
Upload the package to PyPI.
def _read_content(self, response: Response, original_url_info: URLInfo): data = response.body.read(4096) url_info = original_url_info try: self._robots_txt_pool.load_robots_txt(url_info, data) except ValueError: _logger.warning(__( _('Failed to par...
Read response and parse the contents into the pool.
def access(self, accessor, timeout=None): if self.loop.is_running(): raise RuntimeError("Loop is already running") coro = asyncio.wait_for(accessor, timeout, loop=self.loop) return self.loop.run_until_complete(coro)
Return a result from an asyncio future.
def _density_par(self,dangle,tdisrupt=None): if tdisrupt is None: tdisrupt= self._tdisrupt dOmin= dangle/tdisrupt return 0.5\ *(1.+special.erf((self._meandO-dOmin)\ /numpy.sqrt(2.*self._sortedSigOEig[2])))
The raw density as a function of parallel angle
def split_str(string): split = string.split(' ') return ' '.join(split[:len(split) // 2]), ' '.join(split[len(split) // 2:])
Split string in half to return two strings
def _json_to_supported(response_body): data = json.loads(response_body) supported = [] for supported_data in data.get("supportedList", []): supported.append(Supported().from_json( supported_data)) return supported
Returns a list of Supported objects
def run_restore(self, snapshot: Dict[Union[str, Key], Any]) -> 'BaseItemCollection': try: for name, snap in snapshot.items(): if isinstance(name, Key): self._nested_items[name.group].run_restore(snap) else: self._nested_items[na...
Restores the state of a collection from a snapshot
def split_feature(f, n): if not isinstance(n, int): raise ValueError('n must be an integer') orig_feature = copy(f) step = (f.stop - f.start) / n for i in range(f.start, f.stop, step): f = copy(orig_feature) start = i stop = min(i + step, orig_feature.stop) f.star...
Split an interval into `n` roughly equal portions
def download_metadata_file(self, outdir, force_rerun=False): uniprot_xml_file = download_uniprot_file(uniprot_id=self.id, outdir=outdir, filetype='xml', force_rerun=...
Download and load the UniProt XML file
def lexical_parent(self): if not hasattr(self, '_lexical_parent'): self._lexical_parent = conf.lib.clang_getCursorLexicalParent(self) return self._lexical_parent
Return the lexical parent for this cursor.
def _update_record(self, identifier, rtype=None, name=None, content=None): if identifier is not None: if name is not None: records = self._list_records_internal(identifier=identifier) if len(records) == 1 and records[0]['name'] != self._full_name(name): ...
Updates a record. Name changes are allowed, but the record identifier will change
def _find_cmd(self, cmd): cdir = self.get_install_cassandra_root() if self.get_base_cassandra_version() >= 2.1: fcmd = common.join_bin(cdir, os.path.join('tools', 'bin'), cmd) else: fcmd = common.join_bin(cdir, 'bin', cmd) try: if os.path.exists(fcmd):...
Locates command under cassandra root and fixes permissions if needed
def prt_results(self, goea_results): if self.args.outfile is None: self._prt_results(goea_results) else: outfiles = self.args.outfile.split(",") grpwr = self.prepgrp.get_objgrpwr(goea_results) if self.prepgrp else None if grpwr is None: sel...
Print GOEA results to the screen or to a file.