code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def uninstall_all_visa_handlers(self, session): if session is not None: self.__uninstall_all_handlers_helper(session) else: for session in list(self.handlers): self.__uninstall_all_handlers_helper(session)
Uninstalls all previously installed handlers for a particular session. :param session: Unique logical identifier to a session. If None, operates on all sessions.
def tqdm_hook(t): last_b = [0] def update_to(b=1, bsize=1, tsize=None): if tsize is not None: t.total = tsize t.update((b - last_b[0]) * bsize) last_b[0] = b return update_to
Wraps tqdm instance. Don't forget to close() or __exit__() the tqdm instance once you're done with it (easiest using `with` syntax). Example ------- >>> with tqdm(...) as t: ... reporthook = my_hook(t) ... urllib.urlretrieve(..., reporthook=reporthook)
def calc_model(cortex, model_argument, model_hemi=Ellipsis, radius=np.pi/3): if pimms.is_str(model_argument): h = cortex.chirality if model_hemi is Ellipsis else \ None if model_hemi is None else \ model_hemi model = retinotopy_model(model_argument, hemi=h, ra...
calc_model loads the appropriate model object given the model argument, which may given the name of the model or a model object itself. Required afferent parameters: @ model_argument Must be either a RegisteredRetinotopyModel object or the name of a model that can be loaded. Optional afferen...
def __prepare_args(self, args): ret = [] for a in args: if isinstance(a, six.binary_type): if self.__size_expr.match(a): ret += [a] else: ret += [b'"' + a + b'"'] continue ret += [bytes(str(a)...
Format command arguments before sending them. Command arguments of type string must be quoted, the only exception concerns size indication (of the form {\d\+?}). :param args: list of arguments :return: a list for transformed arguments
def delete_dcnm_out_part(self, tenant_id, fw_dict, is_fw_virt=False): res = fw_const.DCNM_OUT_PART_DEL_SUCCESS tenant_name = fw_dict.get('tenant_name') ret = True try: self._delete_partition(tenant_id, tenant_name) except Exception as exc: LOG.error("delet...
Delete the DCNM OUT partition and update the result.
def login(self, username=None, password=None): if username is None: username = self.username if password is None: password = self.password logger.debug("Logging into server") self.sc.sessionManager.Login(userName=username, password=password) self._logged_i...
Login to a vSphere server. >>> client.login(username='Administrator', password='strongpass') :param username: The username to authenticate as. :type username: str :param password: The password to authenticate with. :type password: str
def set_data_length(self, length): if not self._initialized: raise pycdlibexception.PyCdlibInternalError('El Torito Entry not initialized') self.sector_count = utils.ceiling_div(length, 512)
A method to set the length of data for this El Torito Entry. Parameters: length - The new length for the El Torito Entry. Returns: Nothing.
def _record_values_for_fit_summary_and_statsmodels(self): needed_attributes = ["fitted_probs", "params", "log_likelihood", "standard_errors"] try: assert all([hasattr(self, attr) for attr in needed_attribu...
Store the various estimation results that are used to describe how well the estimated model fits the given dataset, and record the values that are needed for the statsmodels estimation results table. All values are stored on the model instance. Returns ------- None.
def codemirror_field_js_bundle(field): manifesto = CodemirrorAssetTagRender() manifesto.register_from_fields(field) try: bundle_name = manifesto.js_bundle_names()[0] except IndexError: msg = ("Given field with configuration name '{}' does not have a " "Javascript bundle na...
Filter to get CodeMirror Javascript bundle name needed for a single field. Example: :: {% load djangocodemirror_tags %} {{ form.myfield|codemirror_field_js_bundle }} Arguments: field (django.forms.fields.Field): A form field that contains a widget :class:`djangocod...
def set_default_headers(self): mod_opts = self.application.mod_opts if mod_opts.get('cors_origin'): origin = self.request.headers.get('Origin') allowed_origin = _check_cors_origin(origin, mod_opts['cors_origin']) if allowed_origin: self.set_header("Acc...
Set default CORS headers
def instruction_path(cls, project, instruction): return google.api_core.path_template.expand( "projects/{project}/instructions/{instruction}", project=project, instruction=instruction, )
Return a fully-qualified instruction string.
def probe_check(name, status, device_type): status_string = PROBE_STATE.get(int(status), "unknown") if status_string == "ok": return ok, "{} '{}': {}".format(device_type, name, status_string) if status_string == "unknown": return unknown, "{} '{}': {}".format(device_type, name, status_string...
if the status is "ok" in the PROBE_STATE dict, return ok + string if the status is not "ok", return critical + string
def _include_term(self, term): ref_needed = False if term.relations: for k,v in six.iteritems(term.relations): for i,t in enumerate(v): try: if t.id not in self: self._include_term(t) ...
Add a single term to the current ontology. It is needed to dereference any term in the term's relationship and then to build the reference again to make sure the other terms referenced in the term's relations are the one contained in the ontology (to make sure changes to one term in the...
def _dilated_conv_layer(self, output_channels, dilation_rate, apply_relu, name): layer_components = [ conv.Conv2D( output_channels, [3, 3], initializers=self._initializers, regularizers=self._regularizers, rate=dilation_rate, ...
Create a dilated convolution layer. Args: output_channels: int. Number of output channels for each pixel. dilation_rate: int. Represents how many pixels each stride offset will move. A value of 1 indicates a standard convolution. apply_relu: bool. If True, a ReLU non-linearlity is added. ...
def mod_watch(name, restart=True, update=False, user=None, conf_file=None, bin_env=None, **kwargs): return running( name, restart=restart, update=update, user=user, conf_file=conf_file, ...
The supervisord watcher, called to invoke the watch command. Always restart on watch .. note:: This state exists to support special handling of the ``watch`` :ref:`requisite <requisites>`. It should not be called directly. Parameters for this function should be set by the state being t...
def raise_from(exc, cause): context_tb = sys.exc_info()[2] incorrect_cause = not ( (isinstance(cause, type) and issubclass(cause, Exception)) or isinstance(cause, BaseException) or cause is None ) if incorrect_cause: raise TypeError("exception causes must derive from Base...
Does the same as ``raise LALALA from BLABLABLA`` does in Python 3. But works in Python 2 also! Please checkout README on https://github.com/9seconds/pep3134 to get an idea about possible pitfals. But short story is: please be pretty carefull with tracebacks. If it is possible, use sys.exc_info inst...
def standby(self): register = self.MMA8452Q_Register['CTRL_REG1'] self.board.i2c_read_request(self.address, register, 1, Constants.I2C_READ | Constants.I2C_END_TX_MASK, self.data_val, Constants.CB_TYPE_DIRECT) ctrl1 = self.w...
Put the device into standby mode so that the registers can be set. @return: No return value
def drop_columns(records, slices): for record in records: drop = set(i for slice in slices for i in range(*slice.indices(len(record)))) keep = [i not in drop for i in range(len(record))] record.seq = Seq(''.join(itertools.compress(record.seq, keep)), record.seq.alphabet) ...
Drop all columns present in ``slices`` from records
def response(self, text, response_type='ephemeral', attachments=None): from flask import jsonify if attachments is None: attachments = [] data = { 'response_type': response_type, 'text': text, 'attachments': attachments, } return js...
Return a response with json format :param text: the text returned to the client :param response_type: optional. When `in_channel` is assigned, both the response message and the initial message typed by the user will be shared ...
def reverse_dependencies(self, ireqs): ireqs_as_cache_values = [self.as_cache_key(ireq) for ireq in ireqs] return self._reverse_dependencies(ireqs_as_cache_values)
Returns a lookup table of reverse dependencies for all the given ireqs. Since this is all static, it only works if the dependency cache contains the complete data, otherwise you end up with a partial view. This is typically no problem if you use this function after the entire dependency...
async def delete(self): await r.table_name(self.table_name) \ .get(self.id) \ .delete() \ .run(await conn.get())
Deletes the model from the database.
def check(self): pathfinder = Pathfinder(True) if pathfinder.add_path(pathfinder['superfamily']) is None: raise RuntimeError("'superfamily' data directory is missing") for tool in ('hmmscan', 'phmmer', 'mast', 'blastp', 'ass3.pl', 'hmmscan.pl'): if not pathfinder.exists(t...
Check if data and third party tools, necessary to run the classification, are available :raises: RuntimeError
def jenkins_last_build_sha(): job_url = os.getenv('JOB_URL') job_json_url = "{0}/api/json".format(job_url) response = urllib.urlopen(job_json_url) job_data = json.loads(response.read()) last_completed_build_url = job_data['lastCompletedBuild']['url'] last_complete_build_j...
Returns the sha of the last completed jenkins build for this project. Expects JOB_URL in environment
def fmt_cut(cut): return 'Cut {from_nodes} {symbol} {to_nodes}'.format( from_nodes=fmt_mechanism(cut.from_nodes, cut.node_labels), symbol=CUT_SYMBOL, to_nodes=fmt_mechanism(cut.to_nodes, cut.node_labels))
Format a |Cut|.
def _check_rel(attrs, rel_whitelist, rel_blacklist): rels = attrs.get('rel', [None]) if rel_blacklist: for rel in rels: if rel in rel_blacklist: return False if rel_whitelist: for rel in rels: if rel in rel_whitelist: ...
Check a link's relations against the whitelist or blacklist. First, this will reject based on blacklist. Next, if there is a whitelist, there must be at least one rel that matches. To explicitly allow links without a rel you can add None to the whitelist (e.g. ['in-reply-to',None])
def save(self, path, compress=True): with aux.PartiallySafeReplace() as msr: filename = self.info['name'] + '.proteindb' filepath = aux.joinpath(path, filename) with msr.open(filepath, mode='w+b') as openfile: self._writeContainer(openfile, compress=compress)
Writes the ``.proteins`` and ``.peptides`` entries to the hard disk as a ``proteindb`` file. .. note:: If ``.save()`` is called and no ``proteindb`` file is present in the specified path a new files is generated, otherwise the old file is replaced. :param pa...
def _makeIndentFromWidth(self, width): if self._indenter.useTabs: tabCount, spaceCount = divmod(width, self._indenter.width) return ('\t' * tabCount) + (' ' * spaceCount) else: return ' ' * width
Make indent text with specified with. Contains width count of spaces, or tabs and spaces
def merge_bed_by_name(bt): name_lines = dict() for r in bt: name = r.name name_lines[name] = name_lines.get(name, []) + [[r.chrom, r.start, r.end, r.name, r.strand]] new_lines =...
Merge intervals in a bed file when the intervals have the same name. Intervals with the same name must be adjacent in the bed file.
def load(target, source_module=None): module, klass, function = _get_module(target) if not module and source_module: module = source_module if not module: raise MissingModule( "No module name supplied or source_module provided.") actual_module = sys.modules[module] if not...
Get the actual implementation of the target.
def sorted_groupby(df, groupby): start = 0 prev = df[groupby].iloc[start] for i, x in enumerate(df[groupby]): if x != prev: yield prev, df.iloc[start:i] prev = x start = i yield prev, df.iloc[start:]
Perform a groupby on a DataFrame using a specific column and assuming that that column is sorted. Parameters ---------- df : pandas.DataFrame groupby : object Column name on which to groupby. This column must be sorted. Returns ------- generator Yields pairs of group_na...
def redirect(view=None, url=None, **kwargs): if view: if url: kwargs["url"] = url url = flask.url_for(view, **kwargs) current_context.exit(flask.redirect(url))
Redirects to the specified view or url
def pixel_coord(self): return self.get_pixel_coordinates(self.reading.pix_coord, self.reading.get_ccd_num())
Return the coordinates of the source in the cutout reference frame. @return:
def fullConn (self, preCellsTags, postCellsTags, connParam): from .. import sim if sim.cfg.verbose: print('Generating set of all-to-all connections (rule: %s) ...' % (connParam['label'])) paramsStrFunc = [param for param in [p+'Func' for p in self.connStringFuncParams] if param in connParam] for paramS...
Generates connections between all pre and post-syn cells
def has_neigh(tag_name, params=None, content=None, left=True): def has_neigh_closure(element): if not element.parent \ or not (element.isTag() and not element.isEndTag()): return False childs = element.parent.childs childs = filter( lambda x: (x.isTag() and...
This function generates functions, which matches all tags with neighbours defined by parameters. Args: tag_name (str): Tag has to have neighbour with this tagname. params (dict): Tag has to have neighbour with this parameters. params (str): Tag has to have neighbour with this content. ...
def fix_style(style='basic', ax=None, **kwargs): style = _read_style(style) for s in style: if not s in style_params.keys(): avail = [f.replace('.mplstyle', '') for f in os.listdir( _get_lib()) if f.endswith('.mplstyle')] raise ValueError('{0} is not a valid style...
Add an extra formatting layer to an axe, that couldn't be changed directly in matplotlib.rcParams or with styles. Apply this function to every axe you created. Parameters ---------- ax: a matplotlib axe. If None, the last axe generated is used style: string or list of string ...
def edit_view(self, request, object_id): kwargs = {'model_admin': self, 'object_id': object_id} view_class = self.edit_view_class return view_class.as_view(**kwargs)(request)
Instantiates a class-based view to provide 'edit' functionality for the assigned model, or redirect to Wagtail's edit view if the assigned model extends 'Page'. The view class used can be overridden by changing the 'edit_view_class' attribute.
def trim(self): return for l in self._levels[-2::-1]: for n in l: if n.is_leaf: n.parent.remove_child(n.label) self._clear_all_leaves()
Trims leaves from tree that are not observed at highest-resolution level This is a bit hacky-- what it does is
def update_confirmation(self, confirmation_id, confirmation_dict): return self._create_put_request( resource=CONFIRMATIONS, billomat_id=confirmation_id, send_data=confirmation_dict )
Updates a confirmation :param confirmation_id: the confirmation id :param confirmation_dict: dict :return: dict
def _cast_to_type(self, value): if not isinstance(value, list): self.fail('invalid', value=value) return value
Raise error if the value is not a list
def update_os_version(self): self.chain.connection.log("Detecting os version") os_version = self.driver.get_os_version(self.version_text) if os_version: self.chain.connection.log("SW Version: {}".format(os_version)) self.os_version = os_version
Update os_version attribute.
def fromHTML(html, *args, **kwargs): source = BeautifulSoup(html, 'html.parser', *args, **kwargs) return TOC('[document]', source=source, descendants=source.children)
Creates abstraction using HTML :param str html: HTML :return: TreeOfContents object
def plot(self,bins=10,facecolor='0.5',plot_cols=None, filename="ensemble.pdf",func_dict = None, **kwargs): ensemble_helper(self,bins=bins,facecolor=facecolor,plot_cols=plot_cols, filename=filename)
plot ensemble histograms to multipage pdf Parameters ---------- bins : int number of bins facecolor : str color plot_cols : list of str subset of ensemble columns to plot. If None, all are plotted. Default is None filename...
def datatable_df(self): data = self._all_datatable_data() adf = pd.DataFrame(data) adf.columns = self.dt_all_cols return self._finish_df(adf, 'ALL')
returns the dataframe representation of the symbol's final data
def _cleanup_markers(context_id, task_ids): logging.debug("Cleanup %d markers for Context %s", len(task_ids), context_id) delete_entities = [ndb.Key(FuriousAsyncMarker, id) for id in task_ids] delete_entities.append(ndb.Key(FuriousCompletionMarker, context_id)) ndb.delete_multi(delete_...
Delete the FuriousAsyncMarker entities corresponding to ids.
def cancel(self, job): if isinstance(job, self.job_class): self.connection.zrem(self.scheduled_jobs_key, job.id) else: self.connection.zrem(self.scheduled_jobs_key, job)
Pulls a job from the scheduler queue. This function accepts either a job_id or a job instance.
def include_yaml(self, node): filename = self.construct_scalar(node) if not filename.startswith('/'): if self._root is None: raise Exception('!include_yaml %s is a relative path, ' 'but stream lacks path' % filename) filename = os.p...
load another yaml file from the path specified by node's value
def day_interval(year, month, day, milliseconds=False, return_string=False): if milliseconds: delta = timedelta(milliseconds=1) else: delta = timedelta(seconds=1) start = datetime(year, month, day) end = datetime(year, month, day) + timedelta(days=1) - delta if not return_string: ...
Return a start datetime and end datetime of a day. :param milliseconds: Minimum time resolution. :param return_string: If you want string instead of datetime, set True Usage Example:: >>> start, end = rolex.day_interval(2014, 6, 17) >>> start datetime(2014, 6, 17, 0, 0, 0) ...
def token(self, token_address: Address) -> Token: if not is_binary_address(token_address): raise ValueError('token_address must be a valid address') with self._token_creation_lock: if token_address not in self.address_to_token: self.address_to_token[token_address]...
Return a proxy to interact with a token.
def _run_tool(cmd, use_container=True, work_dir=None, log_file=None): if isinstance(cmd, (list, tuple)): cmd = " ".join([str(x) for x in cmd]) cmd = utils.local_path_export(at_start=use_container) + cmd if log_file: cmd += " 2>&1 | tee -a %s" % log_file try: print("Running: %s" %...
Run with injection of bcbio path. Place at end for runs without containers to avoid overriding other bcbio installations.
def text(self, value): self._text = value self.timestamps.edited = datetime.datetime.utcnow() self.touch(True)
Set the text value. Args: value (str): Text value.
def _get_nblock_regions(in_file, min_n_size, ref_regions): out_lines = [] called_contigs = set([]) with utils.open_gzipsafe(in_file) as in_handle: for line in in_handle: contig, start, end, ctype = line.rstrip().split() called_contigs.add(contig) if (ctype in ["RE...
Retrieve coordinates of regions in reference genome with no mapping. These are potential breakpoints for parallelizing analysis.
def estimate_pos_and_err_parabolic(tsvals): a = tsvals[2] - tsvals[0] bc = 2. * tsvals[1] - tsvals[0] - tsvals[2] s = a / (2 * bc) err = np.sqrt(2 / bc) return s, err
Solve for the position and uncertainty of source in one dimension assuming that you are near the maximum and the errors are parabolic Parameters ---------- tsvals : `~numpy.ndarray` The TS values at the maximum TS, and for each pixel on either side Returns ------- The positio...
def walk(self): if conf.core.snapshots is not None: return self.snaps[conf.core.snapshots] elif conf.core.timesteps is not None: return self.steps[conf.core.timesteps] return self.snaps[-1:]
Return view on configured steps slice. Other Parameters: conf.core.snapshots: the slice of snapshots. conf.core.timesteps: the slice of timesteps.
def reformat_cmd(self, text): text = text.replace('az', '') if text and SELECT_SYMBOL['scope'] == text[0:2]: text = text.replace(SELECT_SYMBOL['scope'], "") if self.shell_ctx.default_command: text = self.shell_ctx.default_command + ' ' + text return text
reformat the text to be stripped of noise
def peek(self, fmt): pos_before = self._pos value = self.read(fmt) self._pos = pos_before return value
Interpret next bits according to format string and return result. fmt -- Token string describing how to interpret the next bits. The position in the bitstring is not changed. If not enough bits are available then all bits to the end of the bitstring will be used. Raises ReadError if n...
def read_chunks(self): if self.reading_chunks and self.got_chunk: logger.debug("Fast-Path detected, returning...") return while not self.got_request: self.reading_chunks = True self.got_chunk = False self.httpstream.read_until("\r\n", self._chu...
Read chunks from the HTTP client
def info(self): ddoc_info = self.r_session.get( '/'.join([self.document_url, '_info'])) ddoc_info.raise_for_status() return response_to_json_dict(ddoc_info)
Retrieves the design document view information data, returns dictionary GET databasename/_design/{ddoc}/_info
def path_helper(self, path, view, **kwargs): super(FlaskRestyPlugin, self).path_helper( path=path, view=view, **kwargs ) resource = self.get_state().views[view] rule = self._rules[resource.rule] operations = defaultdict(Operation) view_...
Path helper for Flask-RESTy views. :param view: An `ApiView` object.
def with_blob(self, blob): content = json.loads(blob.content) self.partition_id = content["partition_id"] self.owner = content["owner"] self.token = content["token"] self.epoch = content["epoch"] self.offset = content["offset"] self.sequence_number = content["sequ...
Init Azure Blob Lease with existing blob.
def create_exception_by_error_code( errorCode, detailCode='0', description='', traceInformation=None, identifier=None, nodeId=None, ): try: dataone_exception = ERROR_CODE_TO_EXCEPTION_DICT[errorCode] except LookupError: dataone_exception = ServiceFailure return dataon...
Create a DataONE Exception object by errorCode. See Also: For args, see: ``DataONEException()``
def batch_message(cls, batch, request_ids): assert isinstance(batch, Batch) if not cls.allow_batches: raise ProtocolError.invalid_request( 'protocol does not permit batches') id_iter = iter(request_ids) rm = cls.request_message nm = cls.notification_me...
Convert a request Batch to a message.
def _try_assign_utc_time(self, raw_time, time_base): if raw_time != IOTileEvent.InvalidRawTime and (raw_time & (1 << 31)): y2k_offset = self.raw_time ^ (1 << 31) return self._Y2KReference + datetime.timedelta(seconds=y2k_offset) if time_base is not None: return time_b...
Try to assign a UTC time to this reading.
def copy(self): self_copy = self.dup() self_copy._scopes = copy.copy(self._scopes) return self_copy
Return a copy of this object.
def _param_types_to_shape(self, param_types: Optional[str]) -> Sequence[int]: param_types = [] if param_types is None else param_types shape = tuple(self.object_table[ptype]['size'] for ptype in param_types) return shape
Returns the fluent shape given its `param_types`.
def pip(usr_pswd=None): try: cmd('which pip') except: return print('-[pip]----------') p = cmd('pip list --outdated') if not p: return pkgs = getPackages(p) for i, p in enumerate(pkgs): if p in ['pip', 'setuptools']: cmd('pip install -U ' + p, usr_pwd=usr_pswd, run=global_run) pkgs.pop(i) for p in pkg...
This updates one package at a time. Could do all at once: pip list --outdated | cut -d' ' -f1 | xargs pip install --upgrade
def interpolate_single(start, end, coefficient, how='linear'): return INTERP_SINGLE_DICT[how](start, end, coefficient)
Interpolate single value between start and end in given number of steps
def seek(self, pos=0): if pos - self.pos >= 0: blocks, remainder = divmod(pos - self.pos, self.bufsize) for i in range(blocks): self.read(self.bufsize) self.read(remainder) else: raise StreamError("seeking backwards is not allowed") ...
Set the stream's file pointer to pos. Negative seeking is forbidden.
def _update_aes(self): if salt.master.SMaster.secrets['aes']['secret'].value != self.crypticle.key_string: self.crypticle = salt.crypt.Crypticle(self.opts, salt.master.SMaster.secrets['aes']['secret'].value) return True return False
Check to see if a fresh AES key is available and update the components of the worker
def uninstall(cert_name, keychain="/Library/Keychains/System.keychain", keychain_password=None): if keychain_password is not None: unlock_keychain(keychain, keychain_password) cmd = 'security delete-certificate -c "{0}" {1}'.format(cert_name, keychain) return __salt__['cm...
Uninstall a certificate from a keychain cert_name The name of the certificate to remove keychain The keychain to install the certificate to, this defaults to /Library/Keychains/System.keychain keychain_password If your keychain is likely to be locked pass the password and ...
def append(self, listname, xy_idx, var_name, element_name): self.resize() string = '{0} {1}' if listname not in ['unamex', 'unamey', 'fnamex', 'fnamey']: logger.error('Wrong list name for varname.') return elif listname in ['fnamex', 'fnamey']: string ...
Append variable names to the name lists
def user_pass(self, func=None, location=None, **rkwargs): def wrapper(view): view = to_coroutine(view) @functools.wraps(view) async def handler(request, *args, **kwargs): await self.check_user(request, func, location, **rkwargs) return await vi...
Decorator ensures that user pass the given func.
def before_content(self): ChapelObject.before_content(self) if self.names: self.env.temp_data['chpl:class'] = self.names[0][0] self.clsname_set = True
Called before parsing content. Push the class name onto the class name stack. Used to construct the full name for members.
def execute(self, query_string, params=None): cr = self.connection.cursor() logger.info("SQL: %s (%s)", query_string, params) self.last_query = (query_string, params) t0 = time.time() cr.execute(query_string, params or self.core.empty_params) ms = (time.time() - t0) * 1000 logger.info("RUNTI...
Executes a query. Returns the resulting cursor. :query_string: the parameterized query string :params: can be either a tuple or a dictionary, and must match the parameterization style of the query :return: a cursor object
def prepare_for_negotiated_authenticate( self, entityid=None, relay_state="", binding=None, vorg="", nameid_format=None, scoping=None, consent=None, extensions=None, sign=None, response_binding=saml2.BINDING_HTTP_POST, **kwargs): expected_binding = binding for binding...
Makes all necessary preparations for an authentication request that negotiates which binding to use for authentication. :param entityid: The entity ID of the IdP to send the request to :param relay_state: To where the user should be returned after successfull log in. :param ...
def replace_in_files(dirname, replace): filepath = os.path.abspath(dirname / "requirements.in") if os.path.isfile(filepath) and header_footer_exists(filepath): replaced = re.sub(Utils.exp, replace, get_file_string(filepath)) with open(filepath, "w") as f: f.write(replaced) pr...
Replace current version with new version in requirements files.
def get_path(self, dir=None): if not dir: dir = self.fs.getcwd() if self == dir: return '.' path_elems = self.get_path_elements() pathname = '' try: i = path_elems.index(dir) except ValueError: for p in path_elems[:-1]: ...
Return path relative to the current working directory of the Node.FS.Base object that owns us.
def get_stage_events(cls, crawler, stage_name, start, end, level=None): key = make_key(crawler, "events", stage_name, level) return cls.event_list(key, start, end)
events from a particular stage
async def _reset_protocol(self, exc=None): protocol = await self._get_protocol() await protocol.shutdown() self._protocol = None for ob_error in self._observations_err_callbacks: ob_error(exc) self._observations_err_callbacks.clear()
Reset the protocol if an error occurs.
def fire(self, *args, **kargs): self._time_secs_old = time.time() with self._hlock: handler_list = copy.copy(self._handler_list) result_list = [] for handler in handler_list: if self._sync_mode: result = self._execute(handler, *args, **kargs) ...
collects results of all executed handlers
def thesauri(self, token: dict = None, prot: str = "https") -> dict: thez_url = "{}://v1.{}.isogeo.com/thesauri".format(prot, self.api_url) thez_req = self.get( thez_url, headers=self.header, proxies=self.proxies, verify=self.ssl ) checker.check_api_response(thez_req) ...
Get list of available thesauri. :param str token: API auth token :param str prot: https [DEFAULT] or http (use it only for dev and tracking needs).
def list_parse(name_list): if name_list and name_list[0] == '@': value = name_list[1:] if not os.path.exists(value): log.warning('The file %s does not exist' % value) return try: return [v.strip() for v in open(value, 'r').readlines()] except IOErr...
Parse a comma-separated list of values, or a filename (starting with @) containing a list value on each line.
def _make_fn_text(self): if not self._f: text = "(not loaded)" elif self._f.filename: text = os.path.relpath(self._f.filename, ".") else: text = "(filename not set)" return text
Makes filename text
def _convert_args(args): converted = [] for arg in args: if isinstance(arg, dict): for key in list(arg.keys()): if key == '__kwarg__': continue converted.append('{0}={1}'.format(key, arg[key])) else: converted.append(arg...
Take a list of args, and convert any dicts inside the list to keyword args in the form of `key=value`, ready to be passed to salt-ssh
def layers(self): if self._layers is None: self.__init() lyrs = [] for lyr in self._layers: lyr['object'] = GlobeServiceLayer(url=self._url + "/%s" % lyr['id'], securityHandler=self._securityHandler, ...
gets the globe service layers
def _update_range(self, data, **kwargs): self._client.update_range(data=data, **kwargs)
Update range with data Args: data (bytes): data.
def get_next_tag(cls, el): sibling = el.next_sibling while not cls.is_tag(sibling) and sibling is not None: sibling = sibling.next_sibling return sibling
Get next sibling tag.
def assemble_flash_code(self, asm): stream = StringIO(asm) worker = assembler.Assembler(self.processor, stream) try: result = worker.assemble() except BaseException as e: return e, None self.flash.program(result) return None, result
assemble the given code and program the Flash
def colorize_format(self, fmt, style=DEFAULT_FORMAT_STYLE): result = [] parser = FormatStringParser(style=style) for group in parser.get_grouped_pairs(fmt): applicable_styles = [self.nn.get(self.field_styles, token.name) for token in group if token.name] if sum(map(bool, ...
Rewrite a logging format string to inject ANSI escape sequences. :param fmt: The log format string. :param style: One of the characters ``%``, ``{`` or ``$`` (defaults to :data:`DEFAULT_FORMAT_STYLE`). :returns: The logging format string with ANSI escape sequences. ...
def backspace(self): if self._cx + self._cw >= 0: self.erase() self._cx -= self._cw self.flush()
Moves the cursor one place to the left, erasing the character at the current position. Cannot move beyond column zero, nor onto the previous line.
def price_dec(raw_price, default=_not_defined): try: price = price_str(raw_price) return decimal.Decimal(price) except ValueError as err: if default == _not_defined: raise err return default
Price decimal value from raw string. Extract price value from input raw string and present as Decimal number. If raw price does not contain valid price value or contains more than one price value, then return default value. If default value not set, then raise ValueError. :param str raw_price...
def mtf_range(mesh, dim, dtype, name=None): dim = convert_to_dimension(dim) with tf.variable_scope(name, default_name="range"): if dtype == tf.bfloat16: tf_range = tf.cast(tf.range(dim.size), tf.bfloat16) else: tf_range = tf.range(dim.size, dtype=dtype) return import_tf_tensor(mesh, tf_range...
Create a 1d mesh tensor with a range from [0, dim.size). Call externally as mtf.range() Args: mesh: a Mesh dim: a Dimension dtype: a tf.DType name: an optional string Returns: a Tensor
def _dead_assignment_elimination(self, function, data_graph): register_pvs = set() for node in data_graph.nodes(): if isinstance(node.variable, SimRegisterVariable) and \ node.variable.reg is not None and \ node.variable.reg < 40: regis...
Remove assignments to registers that has no consumers, but immediately killed. BROKEN - DO NOT USE IT :param angr.knowledge.Function function: :param networkx.MultiDiGraph data_graph: :return: None
def lock_resource_for_update(cls, resource_id, db_session): db_session = get_db_session(db_session) query = db_session.query(cls.model) query = query.filter(cls.model.resource_id == resource_id) query = query.with_for_update() return query.first()
Selects resource for update - locking access for other transactions :param resource_id: :param db_session: :return:
def load_ui_type(uifile): import pysideuic import xml.etree.ElementTree as ElementTree from cStringIO import StringIO parsed = ElementTree.parse(uifile) widget_class = parsed.find('widget').get('class') form_class = parsed.find('class').text with open(uifile, 'r') as f: o = StringIO(...
Pyside equivalent for the loadUiType function in PyQt. From the PyQt4 documentation: Load a Qt Designer .ui file and return a tuple of the generated form class and the Qt base class. These can then be used to create any number of instances of the user interface without having to parse the ...
def _handle_continuations(self, response, cache_key): rcontinue = response.get('continue') listen = ['blcontinue', 'cmcontinue', 'plcontinue'] cparams = {} if rcontinue: for flag in listen: if rcontinue.get(flag): cparams[flag] = rcontinue....
Select continue params and clear cache or last continue params
def image_resources(package=None, directory='resources'): if not package: package = calling_package() package_dir = '.'.join([package, directory]) images = [] for i in resource_listdir(package, directory): if i.startswith('__') or i.endswith('.egg-info'): continue fna...
Returns all images under the directory relative to a package path. If no directory or package is specified then the resources module of the calling package will be used. Images are recursively discovered. :param package: package name in dotted format. :param directory: path relative to package path of the ...
def async_save_result(self): if hasattr(self, "_async_future") and self._async_future.done(): self._async_future.result() return True else: return False
Retrieves the result of this subject's asynchronous save. - Returns `True` if the subject was saved successfully. - Raises `concurrent.futures.CancelledError` if the save was cancelled. - If the save failed, raises the relevant exception. - Returns `False` if the subject hasn't finished...
def before_sample(analysis_request): if not analysis_request.getDateSampled(): analysis_request.setDateSampled(DateTime()) if not analysis_request.getSampler(): analysis_request.setSampler(api.get_current_user().id)
Method triggered before "sample" transition for the Analysis Request passed in is performed
def _init_from_file(self, filename): if not filename.endswith("detx"): raise NotImplementedError('Only the detx format is supported.') self._open_file(filename) self._extract_comments() self._parse_header() self._parse_doms() self._det_file.close()
Create detector from detx file.
def create_pattern(cls, userdata): empty = cls.create_empty(None) userdata_dict = cls.normalize(empty, userdata) return Userdata(userdata_dict)
Create a user data instance with all values the same.