code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def add_children_to_node(self, node): if self.has_children: for child_id in self.children: child = self.runtime.get_block(child_id) self.runtime.add_block_as_child_node(child, node)
Add children to etree.Element `node`.
def add_release(self, login, package_name, version, requirements, announce, release_attrs): url = '%s/release/%s/%s/%s' % (self.domain, login, package_name, version) if not release_attrs: release_attrs = {} payload = { 'requirements': requirements, 'announce': announce, 'description': None, } payload.update(release_attrs) data, headers = jencode(payload) res = self.session.post(url, data=data, headers=headers) self._check_response(res) return res.json()
Add a new release to a package. :param login: the login of the package owner :param package_name: the name of the package :param version: the version string of the release :param requirements: A dict of requirements TODO: describe :param announce: An announcement that will be posted to all package watchers
def timestamp_from_datetime(date_time): if date_time.tzinfo is None: return time.mktime((date_time.year, date_time.month, date_time.day, date_time.hour, date_time.minute, date_time.second, -1, -1, -1)) + date_time.microsecond / 1e6 return (date_time - _EPOCH).total_seconds()
Returns POSIX timestamp as float
def restore_schema(task, **kwargs): from .compat import get_public_schema_name schema_name = get_public_schema_name() include_public = True if hasattr(task, '_old_schema'): schema_name, include_public = task._old_schema if connection.schema_name == schema_name: return connection.set_schema(schema_name, include_public=include_public)
Switches the schema back to the one from before running the task.
def sync_next_id(self): if self.next_id is not None: if len(self): n = max(self.getColumnByName(self.next_id.column_name)) + 1 else: n = type(self.next_id)(0) if n > self.next_id: self.set_next_id(n) return self.next_id
Determines the highest-numbered ID in this table, and sets the table's .next_id attribute to the next highest ID in sequence. If the .next_id attribute is already set to a value greater than the highest value found, then it is left unmodified. The return value is the ID identified by this method. If the table's .next_id attribute is None, then this function is a no-op. Note that tables of the same name typically share a common .next_id attribute (it is a class attribute, not an attribute of each instance) so that IDs can be generated that are unique across all tables in the document. Running sync_next_id() on all the tables in a document that are of the same type will have the effect of setting the ID to the next ID higher than any ID in any of those tables. Example: >>> import lsctables >>> tbl = lsctables.New(lsctables.ProcessTable) >>> print tbl.sync_next_id() process:process_id:0
def lazy_property(function): cached_val = [] def _wrapper(*args): try: return cached_val[0] except IndexError: ret_val = function(*args) cached_val.append(ret_val) return ret_val return _wrapper
Cache the first return value of a function for all subsequent calls. This decorator is usefull for argument-less functions that behave more like a global or static property that should be calculated once, but lazily (i.e. only if requested).
def encode_streaming(self, data): buffer = 0 size = 0 for s in data: b, v = self._table[s] buffer = (buffer << b) + v size += b while size >= 8: byte = buffer >> (size - 8) yield to_byte(byte) buffer = buffer - (byte << (size - 8)) size -= 8 if size > 0: b, v = self._table[_EOF] buffer = (buffer << b) + v size += b if size >= 8: byte = buffer >> (size - 8) else: byte = buffer << (8 - size) yield to_byte(byte)
Encode given data in streaming fashion. :param data: sequence of symbols (e.g. byte string, unicode string, list, iterator) :return: generator of bytes (single character strings in Python2, ints in Python 3)
def hydrate_point(srid, *coordinates): try: point_class, dim = __srid_table[srid] except KeyError: point = Point(coordinates) point.srid = srid return point else: if len(coordinates) != dim: raise ValueError("SRID %d requires %d coordinates (%d provided)" % (srid, dim, len(coordinates))) return point_class(coordinates)
Create a new instance of a Point subclass from a raw set of fields. The subclass chosen is determined by the given SRID; a ValueError will be raised if no such subclass can be found.
def get_module_verbosity_flags(*labels): verbose_prefix_list = ['--verbose-', '--verb', '--verb-'] veryverbose_prefix_list = ['--veryverbose-', '--veryverb', '--veryverb-'] verbose_flags = tuple( [prefix + lbl for prefix, lbl in itertools.product(verbose_prefix_list, labels)]) veryverbose_flags = tuple( [prefix + lbl for prefix, lbl in itertools.product(veryverbose_prefix_list, labels)]) veryverbose_module = get_argflag(veryverbose_flags) or VERYVERBOSE verbose_module = (get_argflag(verbose_flags) or veryverbose_module or VERBOSE) if veryverbose_module: verbose_module = 2 return verbose_module, veryverbose_module
checks for standard flags for enableing module specific verbosity
def setRti(self, rti): check_class(rti, BaseRti) self._rti = rti self._updateWidgets() self._updateRtiInfo()
Updates the current VisItem from the contents of the repo tree item. Is a slot but the signal is usually connected to the Collector, which then calls this function directly.
def sorted_conkeys(self, prefix=None): conkeys = [] for cond in _COND_PREFIXES: conkeys += sorted([key for key in self.conditions if key.startswith(cond)], key=self.cond_int) if not prefix: return conkeys return [key for key in conkeys if key.startswith(prefix)]
Return all condition keys in self.conditions as a list sorted suitable for print or write to a file. If prefix is given return only the ones prefixed with prefix.
def _encode_time(mtime: float): dt = arrow.get(mtime) dt = dt.to("local") date_val = ((dt.year - 1980) << 9) | (dt.month << 5) | dt.day secs = dt.second + dt.microsecond / 10**6 time_val = (dt.hour << 11) | (dt.minute << 5) | math.floor(secs / 2) return (date_val << 16) | time_val
Encode a mtime float as a 32-bit FAT time
def get_image_label(name, default="not_found.png"): label = QLabel() label.setPixmap(QPixmap(get_image_path(name, default))) return label
Return image inside a QLabel object
def flush(self): for name in self.item_names: item = self[name] item.flush() self.file.flush()
Ensure contents are written to file.
def get_conn(self, *args, **kwargs): connections = self.__connections_for('get_conn', args=args, kwargs=kwargs) if len(connections) is 1: return connections[0] else: return connections
Returns a connection object from the router given ``args``. Useful in cases where a connection cannot be automatically determined during all steps of the process. An example of this would be Redis pipelines.
def load_rv_data(filename, indep, dep, indweight=None, dir='./'): if '/' in filename: path, filename = os.path.split(filename) else: path = dir load_file = os.path.join(path, filename) rvdata = np.loadtxt(load_file) d ={} d['phoebe_rv_time'] = rvdata[:,0] d['phoebe_rv_vel'] = rvdata[:,1] ncol = len(rvdata[0]) if indweight=="Standard deviation": if ncol >= 3: d['phoebe_rv_sigmarv'] = rvdata[:,2] else: logger.warning('A sigma column is mentioned in the .phoebe file but is not present in the rv data file') elif indweight =="Standard weight": if ncol >= 3: sigma = np.sqrt(1/rvdata[:,2]) d['phoebe_rv_sigmarv'] = sigma logger.warning('Standard weight has been converted to Standard deviation.') else: logger.warning('Phoebe 2 currently only supports standard deviaton') return d
load dictionary with rv data.
def qstd(x,quant=0.05,top=False,bottom=False): s = np.sort(x) n = np.size(x) lo = s[int(n*quant)] hi = s[int(n*(1-quant))] if top: w = np.where(x>=lo) elif bottom: w = np.where(x<=hi) else: w = np.where((x>=lo)&(x<=hi)) return np.std(x[w])
returns std, ignoring outer 'quant' pctiles
def on_touch(self, view, event): d = self.declaration r = {'event': event, 'result': False} d.touch_event(r) return r['result']
Trigger the touch event Parameters ---------- view: int The ID of the view that sent this event data: bytes The msgpack encoded key event
def process_exception(self, request, exception): if isinstance(exception, (exceptions.NotAuthorized, exceptions.NotAuthenticated)): auth_url = settings.LOGIN_URL next_url = iri_to_uri(request.get_full_path()) if next_url != auth_url: field_name = REDIRECT_FIELD_NAME else: field_name = None login_url = request.build_absolute_uri(auth_url) response = redirect_to_login(next_url, login_url=login_url, redirect_field_name=field_name) if isinstance(exception, exceptions.NotAuthorized): response.delete_cookie('messages') return shortcuts.render(request, 'not_authorized.html', status=403) if request.is_ajax(): response_401 = http.HttpResponse(status=401) response_401['X-Horizon-Location'] = response['location'] return response_401 return response if isinstance(exception, exceptions.NotFound): raise http.Http404(exception) if isinstance(exception, exceptions.Http302): return shortcuts.redirect(exception.location)
Catches internal Horizon exception classes. Exception classes such as NotAuthorized, NotFound and Http302 are caught and handles them gracefully.
def apply(self, docs, split=0, clear=True, parallelism=None, progress_bar=True): super(CandidateExtractor, self).apply( docs, split=split, clear=clear, parallelism=parallelism, progress_bar=progress_bar, )
Run the CandidateExtractor. :Example: To extract candidates from a set of training documents using 4 cores:: candidate_extractor.apply(train_docs, split=0, parallelism=4) :param docs: Set of documents to extract from. :param split: Which split to assign the extracted Candidates to. :type split: int :param clear: Whether or not to clear the existing Candidates beforehand. :type clear: bool :param parallelism: How many threads to use for extraction. This will override the parallelism value used to initialize the CandidateExtractor if it is provided. :type parallelism: int :param progress_bar: Whether or not to display a progress bar. The progress bar is measured per document. :type progress_bar: bool
def trans(self, id, parameters=None, domain=None, locale=None): if parameters is None: parameters = {} if locale is None: locale = self.locale else: self._assert_valid_locale(locale) if domain is None: domain = 'messages' catalogue = self.get_catalogue(locale) if not catalogue.has(id, domain): raise RuntimeError( "There is no translation for {0} in domain {1}".format( id, domain ) ) msg = self.get_catalogue(locale).get(id, domain) return self.format(msg, parameters)
Throws RuntimeError whenever a message is missing
def visit_root(self, _, children): resource = children[1] resource.is_root = True return resource
The main node holding all the query. Arguments --------- _ (node) : parsimonious.nodes.Node. children : list - 0: for ``WS`` (whitespace): ``None``. - 1: for ``NAMED_RESOURCE``: an instance of a subclass of ``.resources.Resource``. - 2: for ``WS`` (whitespace): ``None``. Returns ------- .resources.Resource An instance of a subclass of ``.resources.Resource``, with ``is_root`` set to ``True``. Example ------- >>> data = DataQLParser(r''' ... foo ... ''', default_rule='ROOT').data >>> data <Field[foo] /> >>> data.is_root True >>> data = DataQLParser(r''' ... bar[name] ... ''', default_rule='ROOT').data >>> data <List[bar]> <Field[name] /> </List[bar]> >>> data.is_root True >>> data = DataQLParser(r''' ... baz{name} ... ''', default_rule='ROOT').data >>> data <Object[baz]> <Field[name] /> </Object[baz]> >>> data.is_root True
def send_scheduled_messages(priority=None, ignore_unknown_messengers=False, ignore_unknown_message_types=False): dispatches_by_messengers = Dispatch.group_by_messengers(Dispatch.get_unsent(priority=priority)) for messenger_id, messages in dispatches_by_messengers.items(): try: messenger_obj = get_registered_messenger_object(messenger_id) messenger_obj._process_messages(messages, ignore_unknown_message_types=ignore_unknown_message_types) except UnknownMessengerError: if ignore_unknown_messengers: continue raise
Sends scheduled messages. :param int, None priority: number to limit sending message by this priority. :param bool ignore_unknown_messengers: to silence UnknownMessengerError :param bool ignore_unknown_message_types: to silence UnknownMessageTypeError :raises UnknownMessengerError: :raises UnknownMessageTypeError:
def handle(self, event): def dec(func): self.add_handler(event, func) return func return dec
Decorator for adding a handler function for a particular event. Usage: my_client = Client() @my_client.handle("WELCOME") def welcome_handler(client, *params): # Do something with the event. pass
def _unescape_math(xml): xpath_math_script = etree.XPath( '//x:script[@type="math/mml"]', namespaces={'x': 'http://www.w3.org/1999/xhtml'}) math_script_list = xpath_math_script(xml) for mathscript in math_script_list: math = mathscript.text math = unescape(unescape(math)) mathscript.clear() mathscript.set('type', 'math/mml') new_math = etree.fromstring(math) mathscript.append(new_math) return xml
Unescapes Math from Mathjax to MathML.
async def starttls(self, context=None): if not self.use_aioopenssl: raise BadImplementationError("This connection does not use aioopenssl") import aioopenssl import OpenSSL await self.ehlo_or_helo_if_needed() if "starttls" not in self.esmtp_extensions: raise SMTPCommandNotSupportedError("STARTTLS") code, message = await self.do_cmd("STARTTLS", success=(220,)) if context is None: context = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_2_METHOD) await self.transport.starttls(ssl_context=context) self.last_ehlo_response = (None, None) self.last_helo_response = (None, None) self.supports_esmtp = False self.esmtp_extensions = {} self.auth_mechanisms = [] return (code, message)
Upgrades the connection to the SMTP server into TLS mode. If there has been no previous EHLO or HELO command this session, this method tries ESMTP EHLO first. If the server supports SSL/TLS, this will encrypt the rest of the SMTP session. Raises: SMTPCommandNotSupportedError: If the server does not support STARTTLS. SMTPCommandFailedError: If the STARTTLS command fails BadImplementationError: If the connection does not use aioopenssl. Args: context (:obj:`OpenSSL.SSL.Context`): SSL context Returns: (int, message): A (code, message) 2-tuple containing the server response.
def config( state, host, key, value, repo=None, ): existing_config = host.fact.git_config(repo) if key not in existing_config or existing_config[key] != value: if repo is None: yield 'git config --global {0} "{1}"'.format(key, value) else: yield 'cd {0} && git config --local {1} "{2}"'.format(repo, key, value)
Manage git config for a repository or globally. + key: the key of the config to ensure + value: the value this key should have + repo: specify the git repo path to edit local config (defaults to global)
def rest_action(self, func, url, **kwargs): try: response = func(url, timeout=self.TIMEOUT, **kwargs) except requests.RequestException, err: log.exception( "[PyLmod] Error - connection error in " "rest_action, err=%s", err ) raise err try: return response.json() except ValueError, err: log.exception('Unable to decode %s', response.content) raise err
Routine to do low-level REST operation, with retry. Args: func (callable): API function to call url (str): service URL endpoint kwargs (dict): addition parameters Raises: requests.RequestException: Exception connection error ValueError: Unable to decode response content Returns: list: the json-encoded content of the response
def screenshot(self, filename, scale=1.0, quality=100): result = self.server.screenshot(filename, scale, quality) if result: return result device_file = self.server.jsonrpc.takeScreenshot("screenshot.png", scale, quality) if not device_file: return None p = self.server.adb.cmd("pull", device_file, filename) p.wait() self.server.adb.cmd("shell", "rm", device_file).wait() return filename if p.returncode is 0 else None
take screenshot.
def _timestamp(): moment = time.time() moment_us = repr(moment).split('.')[1] return time.strftime("%Y-%m-%d-%H-%M-%S-{}".format(moment_us), time.gmtime(moment))
Return a timestamp with microsecond precision.
def name_with_version(self): if self.version == 1: return self.name else: return '{}:{}'.format(self.name, self.version)
Get user-friendly representation of the route. :return: Route name with version suffix. The version suffix is omitted for version 1.
def _clean(value): if isinstance(value, np.ndarray): if value.dtype.kind == 'S': return np.char.decode(value).tolist() else: return value.tolist() elif type(value).__module__ == np.__name__: conversion = np.asscalar(value) if sys.version_info.major == 3 and isinstance(conversion, bytes): conversion = conversion.decode() return conversion elif sys.version_info.major == 3 and isinstance(value, bytes): return value.decode() else: return value
Convert numpy numeric types to their python equivalents.
def list_features_0(self, locus, term, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.list_features_0_with_http_info(locus, term, **kwargs) else: (data) = self.list_features_0_with_http_info(locus, term, **kwargs) return data
List the enumerated sequence features matching a term at a locus This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.list_features_0(locus, term, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str locus: locus name or URI (required) :param str term: Sequence Ontology (SO) term name, accession, or URI (required) :return: list[Feature] If the method is called asynchronously, returns the request thread.
def render_table(output_dir, packages, jenv=JENV): destination_filename = output_dir + "/com/swiftnav/sbp/client/MessageTable.java" with open(destination_filename, 'w+') as f: print(destination_filename) f.write(jenv.get_template(TEMPLATE_TABLE_NAME).render(packages=packages))
Render and output dispatch table
def _check_team_exists(team): if team is None: return hostname = urlparse(get_registry_url(team)).hostname try: socket.gethostbyname(hostname) except IOError: try: socket.gethostbyname('quiltdata.com') except IOError: message = "Can't find quiltdata.com. Check your internet connection." else: message = "Unable to connect to registry. Is the team name %r correct?" % team raise CommandException(message)
Check that the team registry actually exists.
async def close_authenticator_async(self): _logger.info("Shutting down CBS session on connection: %r.", self._connection.container_id) try: self._cbs_auth.destroy() _logger.info("Auth closed, destroying session on connection: %r.", self._connection.container_id) await self._session.destroy_async() finally: _logger.info("Finished shutting down CBS session on connection: %r.", self._connection.container_id)
Close the CBS auth channel and session asynchronously.
def load_json(filename, **kwargs): with open(filename, 'r', encoding='utf-8') as f: return json.load(f, **kwargs)
Load a JSON object from the specified file. Args: filename: Path to the input JSON file. **kwargs: Additional arguments to `json.load`. Returns: The object deserialized from JSON.
async def do_api_call(self): self.pyvlx.connection.register_frame_received_cb( self.response_rec_callback) await self.send_frame() await self.start_timeout() await self.response_received_or_timeout.wait() await self.stop_timeout() self.pyvlx.connection.unregister_frame_received_cb(self.response_rec_callback)
Start. Sending and waiting for answer.
def is_https(request_data): is_https = 'https' in request_data and request_data['https'] != 'off' is_https = is_https or ('server_port' in request_data and str(request_data['server_port']) == '443') return is_https
Checks if https or http. :param request_data: The request as a dict :type: dict :return: False if https is not active :rtype: boolean
def crud_mutation_name(action, model): model_string = get_model_string(model) model_string = model_string[0].upper() + model_string[1:] return "{}{}".format(action, model_string)
This function returns the name of a mutation that performs the specified crud action on the given model service
def right_click_event_statusicon(self, icon, button, time): def pos(menu, aicon): return Gtk.StatusIcon.position_menu(menu, aicon) self.menu.popup(None, None, pos, icon, button, time)
It's just way how popup menu works in GTK. Don't ask me how it works.
def backward_transfer_pair( backward_channel: NettingChannelState, payer_transfer: LockedTransferSignedState, pseudo_random_generator: random.Random, block_number: BlockNumber, ) -> Tuple[Optional[MediationPairState], List[Event]]: transfer_pair = None events: List[Event] = list() lock = payer_transfer.lock lock_timeout = BlockTimeout(lock.expiration - block_number) if is_channel_usable(backward_channel, lock.amount, lock_timeout): message_identifier = message_identifier_from_prng(pseudo_random_generator) refund_transfer = channel.send_refundtransfer( channel_state=backward_channel, initiator=payer_transfer.initiator, target=payer_transfer.target, amount=get_lock_amount_after_fees(lock, backward_channel), message_identifier=message_identifier, payment_identifier=payer_transfer.payment_identifier, expiration=lock.expiration, secrethash=lock.secrethash, ) transfer_pair = MediationPairState( payer_transfer, backward_channel.partner_state.address, refund_transfer.transfer, ) events.append(refund_transfer) return (transfer_pair, events)
Sends a transfer backwards, allowing the previous hop to try a new route. When all the routes available for this node failed, send a transfer backwards with the same amount and secrethash, allowing the previous hop to do a retry. Args: backward_channel: The original channel which sent the mediated transfer to this node. payer_transfer: The *latest* payer transfer which is backing the mediation. block_number: The current block number. Returns: The mediator pair and the correspoding refund event.
def _getmember(self, name, tarinfo=None, normalize=False): members = self.getmembers() if tarinfo is not None: members = members[:members.index(tarinfo)] if normalize: name = os.path.normpath(name) for member in reversed(members): if normalize: member_name = os.path.normpath(member.name) else: member_name = member.name if name == member_name: return member
Find an archive member by name from bottom to top. If tarinfo is given, it is used as the starting point.
def registers(self, unroll=False, skip_not_present=True): for child in self.children(unroll, skip_not_present): if isinstance(child, RegNode): yield child
Returns an iterator that provides nodes for all immediate registers of this component. Parameters ---------- unroll : bool If True, any children that are arrays are unrolled. skip_not_present : bool If True, skips children whose 'ispresent' property is set to False Yields ------ :class:`~RegNode` All registers in this component
def _MergeOptional(self, a, b): if a and b: if a != b: raise MergeError("values must be identical if both specified " "('%s' vs '%s')" % (transitfeed.EncodeUnicode(a), transitfeed.EncodeUnicode(b))) return a or b
Tries to merge two values which may be None. If both values are not None, they are required to be the same and the merge is trivial. If one of the values is None and the other is not None, the merge results in the one which is not None. If both are None, the merge results in None. Args: a: The first value. b: The second value. Returns: The merged value. Raises: MergeError: If both values are not None and are not the same.
def expectation(self, prep_prog, operator_programs=None): if isinstance(operator_programs, Program): warnings.warn( "You have provided a Program rather than a list of Programs. The results from expectation " "will be line-wise expectation values of the operator_programs.", SyntaxWarning) payload = self._expectation_payload(prep_prog, operator_programs) response = post_json(self.session, self.sync_endpoint + "/qvm", payload) return response.json()
Calculate the expectation value of operators given a state prepared by prep_program. :note: If the execution of ``quil_program`` is **non-deterministic**, i.e., if it includes measurements and/or noisy quantum gates, then the final wavefunction from which the expectation values are computed itself only represents a stochastically generated sample. The expectations returned from *different* ``expectation`` calls *will then generally be different*. To measure the expectation of a PauliSum, you probably want to do something like this:: progs, coefs = hamiltonian.get_programs() expect_coeffs = np.array(cxn.expectation(prep_program, operator_programs=progs)) return np.real_if_close(np.dot(coefs, expect_coeffs)) :param Program prep_prog: Quil program for state preparation. :param list operator_programs: A list of Programs, each specifying an operator whose expectation to compute. Default is a list containing only the empty Program. :return: Expectation values of the operators. :rtype: List[float]
def cudaDriverGetVersion(): version = ctypes.c_int() status = _libcudart.cudaDriverGetVersion(ctypes.byref(version)) cudaCheckStatus(status) return version.value
Get installed CUDA driver version. Return the version of the installed CUDA driver as an integer. If no driver is detected, 0 is returned. Returns ------- version : int Driver version.
def get_rule_option(self, rule_name_or_id, option_name): option = self._get_option(rule_name_or_id, option_name) return option.value
Returns the value of a given option for a given rule. LintConfigErrors will be raised if the rule or option don't exist.
def has_error(self): self.get_info() if 'status' not in self.info: return False if 'hasError' not in self.info['status']: return False return self.info['status']['hasError']
Queries the server to check if the job has an error. Returns True or False.
def add_records(self, domain, records): url = self.API_TEMPLATE + self.RECORDS.format(domain=domain) self._patch(url, json=records) self.logger.debug('Added records @ {}'.format(records)) return True
Adds the specified DNS records to a domain. :param domain: the domain to add the records to :param records: the records to add
def parse_FreqDist_interChr(self, f): parsed_data = dict() firstline = True for l in f['f']: if firstline: firstline = False interChr = float(re.sub("\)", "", l.split(":")[1])) else: break parsed_data['interChr'] = interChr return parsed_data
Parse HOMER tagdirectory petag.FreqDistribution_1000 file to get inter-chromosomal interactions.
def constantLine(requestContext, value): name = "constantLine(%s)" % str(value) start = int(epoch(requestContext['startTime'])) end = int(epoch(requestContext['endTime'])) step = int((end - start) / 2.0) series = TimeSeries(str(value), start, end, step, [value, value, value]) series.pathExpression = name return [series]
Takes a float F. Draws a horizontal line at value F across the graph. Example:: &target=constantLine(123.456)
def master(self, name): fut = self.execute(b'MASTER', name, encoding='utf-8') return wait_convert(fut, parse_sentinel_master)
Returns a dictionary containing the specified masters state.
def get_model_name(self): if self.model_name is None: raise ImproperlyConfigured( "%s requires either a definition of " "'model_name' or an implementation of 'get_model_name()'" % self.__class__.__name__) return self.model_name
Return the model name for templates.
def Clear(self): try: with io.open(self.logfile, "wb") as fd: fd.write(b"") except (IOError, OSError): pass
Wipes the transaction log.
def _enter_plotting(self, fontsize=9): self.original_fontsize = pyplot.rcParams['font.size'] pyplot.rcParams['font.size'] = fontsize pyplot.hold(False) pyplot.ioff()
assumes that a figure is open
def should_generate_summaries(): name_scope = tf.contrib.framework.get_name_scope() if name_scope and "while/" in name_scope: return False if tf.get_variable_scope().reuse: return False return True
Is this an appropriate context to generate summaries. Returns: a boolean
def fill_edge_matrix(nsrcs, match_dict): e_matrix = np.zeros((nsrcs, nsrcs)) for k, v in match_dict.items(): e_matrix[k[0], k[1]] = v return e_matrix
Create and fill a matrix with the graph 'edges' between sources. Parameters ---------- nsrcs : int number of sources (used to allocate the size of the matrix) match_dict : dict((int,int):float) Each entry gives a pair of source indices, and the corresponding measure (either distance or sigma) Returns ------- e_matrix : `~numpy.ndarray` numpy.ndarray((nsrcs,nsrcs)) filled with zeros except for the matches, which are filled with the edge measures (either distances or sigmas)
def elcm_session_terminate(irmc_info, session_id): resp = elcm_request(irmc_info, method='DELETE', path='/sessionInformation/%s/terminate' % session_id) if resp.status_code == 200: return elif resp.status_code == 404: raise ELCMSessionNotFound('Session "%s" does not exist' % session_id) else: raise scci.SCCIClientError(('Failed to terminate session ' '"%(session)s" with error code %(error)s' % {'session': session_id, 'error': resp.status_code}))
send an eLCM request to terminate a session :param irmc_info: node info :param session_id: session id :raises: ELCMSessionNotFound if the session does not exist :raises: SCCIClientError if SCCI failed
def _B(self, x, a, b): return special.betainc(a, b, x) * special.beta(a, b)
incomplete Beta function as described in Mamon&Lokas A13 :param x: :param a: :param b: :return:
def compute_vest_stat(vest_dict, ref_aa, somatic_aa, codon_pos, stat_func=np.mean, default_val=0.0): if vest_dict is None: return default_val myscores = fetch_vest_scores(vest_dict, ref_aa, somatic_aa, codon_pos) if myscores: score_stat = stat_func(myscores) else: score_stat = default_val return score_stat
Compute missense VEST score statistic. Note: non-missense mutations are intentially not filtered out and will take a default value of zero. Parameters ---------- vest_dict : dict dictionary containing vest scores across the gene of interest ref_aa: list of str list of reference amino acids somatic_aa: list of str somatic mutation aa codon_pos : list of int position of codon in protein sequence stat_func : function, default=np.mean function that calculates a statistic default_val : float default value to return if there are no mutations Returns ------- score_stat : float vest score statistic for provided mutation list
def node_detail(node_name): token = session.get('token') node = nago.core.get_node(token) if not node.get('access') == 'master': return jsonify(status='error', error="You need master access to view this page") node = nago.core.get_node(node_name) return render_template('node_detail.html', node=node)
View one specific node
def set_references(references, components): if components == None: return for component in components: Referencer.set_references_for_one(references, component)
Sets references to multiple components. To set references components must implement [[IReferenceable]] interface. If they don't the call to this method has no effect. :param references: the references to be set. :param components: a list of components to set the references to.
def set_tax_benefit_systems(self, tax_benefit_system = None, baseline_tax_benefit_system = None): assert tax_benefit_system is not None self.tax_benefit_system = tax_benefit_system if self.cache_blacklist is not None: self.tax_benefit_system.cache_blacklist = self.cache_blacklist if baseline_tax_benefit_system is not None: self.baseline_tax_benefit_system = baseline_tax_benefit_system if self.cache_blacklist is not None: self.baseline_tax_benefit_system.cache_blacklist = self.cache_blacklist
Set the tax and benefit system and eventually the baseline tax and benefit system
def raise_for_old_graph(graph): graph_version = tokenize_version(graph.pybel_version) if graph_version < PYBEL_MINIMUM_IMPORT_VERSION: raise ImportVersionWarning(graph_version, PYBEL_MINIMUM_IMPORT_VERSION)
Raise an ImportVersionWarning if the BEL graph was produced by a legacy version of PyBEL. :raises ImportVersionWarning: If the BEL graph was produced by a legacy version of PyBEL
def _run_varnishadm(cmd, params=(), **kwargs): cmd = ['varnishadm', cmd] cmd.extend([param for param in params if param is not None]) log.debug('Executing: %s', ' '.join(cmd)) return __salt__['cmd.run_all'](cmd, python_shell=False, **kwargs)
Execute varnishadm command return the output of the command cmd The command to run in varnishadm params Any additional args to add to the command line kwargs Additional options to pass to the salt cmd.run_all function
def find_raw_devices(vendor=None, product=None, serial_number=None, custom_match=None, **kwargs): def is_usbraw(dev): if custom_match and not custom_match(dev): return False return bool(find_interfaces(dev, bInterfaceClass=0xFF, bInterfaceSubClass=0xFF)) return find_devices(vendor, product, serial_number, is_usbraw, **kwargs)
Find connected USB RAW devices. See usbutil.find_devices for more info.
def addDepartment(self, dep): if api.is_uid(dep): dep = api.get_object_by_uid(dep) deps = self.getDepartments() if dep not in deps: return False deps.append(dep) self.setDepartments(deps) return True
Adds a department :param dep: UID or department object :returns: True when the department was added
def find_existing_items( src, dst, *, fields=None, field_map=None, normalize_values=False, normalize_func=normalize_value): if field_map is None: field_map = FIELD_MAP dst_keys = { _gather_field_values( dst_item, fields=fields, field_map=field_map, normalize_values=normalize_values, normalize_func=normalize_func ) for dst_item in dst } for src_item in src: if _gather_field_values( src_item, fields=fields, field_map=field_map, normalize_values=normalize_values, normalize_func=normalize_func ) in dst_keys: yield src_item
Find items from an item collection that are in another item collection. Parameters: src (list): A list of item dicts or filepaths. dst (list): A list of item dicts or filepaths. fields (list): A list of fields used to compare item dicts. field_map (~collections.abc.Mapping): A mapping field name aliases. Default: :data:`~google_music_utils.constants.FIELD_MAP` normalize_values (bool): Normalize metadata values to remove common differences between sources. Default: ``False`` normalize_func (function): Function to apply to metadata values if ``normalize_values`` is ``True``. Default: :func:`~google_music_utils.utils.normalize_value` Yields: dict: The next item from ``src`` collection in ``dst`` collection.
def disconnect_sync(self, connection_handle): self.bable.disconnect(connection_handle=connection_handle, sync=True)
Synchronously disconnect from whoever has connected to us Args: connection_handle (int): The handle of the connection we wish to disconnect.
def delete(self, request, key): request.DELETE = http.QueryDict(request.body) email_addr = request.DELETE.get('email') user_id = request.DELETE.get('user') if not email_addr: return http.HttpResponseBadRequest() try: email = EmailAddressValidation.objects.get(address=email_addr, user_id=user_id) except EmailAddressValidation.DoesNotExist: pass else: email.delete() return http.HttpResponse(status=204) try: email = EmailAddress.objects.get(address=email_addr, user_id=user_id) except EmailAddress.DoesNotExist: raise http.Http404 email.user = None email.save() return http.HttpResponse(status=204)
Remove an email address, validated or not.
def validate_matches(other): def matches_validator(field, data): if field.value is None: return if not (field.value == data.get(other)): raise ValidationError('matches', other=other) return matches_validator
Validate the field value is equal to another field in the data. Should work with anything that supports '==' operator. :param value: Field key to compare. :raises: ``ValidationError('matches')``
def find_regions(self, word): length = len(word) for index, match in enumerate(re.finditer("[aeiouy][^aeiouy]", word)): if index == 0: if match.end() < length: self.r1 = match.end() if index == 1: if match.end() < length: self.r2 = match.end() break
Find regions R1 and R2.
def reload(self): config = self._default_configuration() if self._file_path: config.update(self._load_config_file()) if config != self._values: self._values = config return True return False
Reload the configuration from disk returning True if the configuration has changed from the previous values.
def resolve_object_property(obj, path: str): value = obj for path_part in path.split('.'): value = getattr(value, path_part) return value
Resolves the value of a property on an object. Is able to resolve nested properties. For example, a path can be specified: 'other.beer.name' Raises: AttributeError: In case the property could not be resolved. Returns: The value of the specified property.
def get_downsampled_scatter(self, xax="area_um", yax="deform", downsample=0, xscale="linear", yscale="linear"): if downsample < 0: raise ValueError("`downsample` must be zero or positive!") downsample = int(downsample) xax = xax.lower() yax = yax.lower() x = self[xax][self.filter.all] y = self[yax][self.filter.all] xs = self._apply_scale(x, xscale, xax) ys = self._apply_scale(y, yscale, yax) _, _, idx = downsampling.downsample_grid(xs, ys, samples=downsample, ret_idx=True) self._plot_filter = idx return x[idx], y[idx]
Downsampling by removing points at dense locations Parameters ---------- xax: str Identifier for x axis (e.g. "area_um", "aspect", "deform") yax: str Identifier for y axis downsample: int Number of points to draw in the down-sampled plot. This number is either - >=1: exactly downsample to this number by randomly adding or removing points - 0 : do not perform downsampling xscale: str If set to "log", take the logarithm of the x-values before performing downsampling. This is useful when data are are displayed on a log-scale. Defaults to "linear". yscale: str See `xscale`. Returns ------- xnew, xnew: filtered x and y
def detect_mode(term_hint="xterm-256color"): if "ANSICON" in os.environ: return 16 elif os.environ.get("ConEmuANSI", "OFF") == "ON": return 256 else: term = os.environ.get("TERM", term_hint) if term.endswith("-256color") or term in ("xterm", "screen"): return 256 elif term.endswith("-color") or term in ("rxvt",): return 16 else: return 256
Poor-mans color mode detection.
def post(self, url, obj, content_type=JSON_CONTENT_TYPE, **kwargs): def retry_bad_nonce(f): f.trap(ServerError) if f.value.message.typ.split(':')[-1] == 'badNonce': self._nonces.clear() self._add_nonce(f.value.response) return self._post(url, obj, content_type, **kwargs) return f return ( self._post(url, obj, content_type, **kwargs) .addErrback(retry_bad_nonce))
POST an object and check the response. Retry once if a badNonce error is received. :param str url: The URL to request. :param ~josepy.interfaces.JSONDeSerializable obj: The serializable payload of the request. :param bytes content_type: The expected content type of the response. By default, JSON. :raises txacme.client.ServerError: If server response body carries HTTP Problem (draft-ietf-appsawg-http-problem-00). :raises acme.errors.ClientError: In case of other protocol errors.
def clean_text(text): new_text = re.sub(ur'\p{P}+', ' ', text) new_text = [stem(i) for i in new_text.lower().split() if not re.findall(r'[0-9]', i)] new_text = ' '.join(new_text) return new_text
Clean text for TFIDF.
def get_host_port_names(self, host_name): port_names = list() host = self.get_hosts_by_name(host_name) fc_ports = host.fc_ports iscsi_ports = host.iscsi_ports port_names.extend(fc_ports.split(',') if fc_ports != '' else []) port_names.extend(iscsi_ports.split(',') if iscsi_ports != '' else []) return port_names
return a list of the port names of XIV host
def afw_json_importer(input_file: str) -> dict: file = open(input_file) json_file = json.load(file) transitions = {} for p in json_file['transitions']: transitions[p[0], p[1]] = p[2] afw = { 'alphabet': set(json_file['alphabet']), 'states': set(json_file['states']), 'initial_state': json_file['initial_state'], 'accepting_states': set(json_file['accepting_states']), 'transitions': transitions } return afw
Imports a AFW from a JSON file. :param str input_file: path+filename to input JSON file; :return: *(dict)* representing a AFW.
def is_valid_variable_name(string_to_check): try: parse('{} = None'.format(string_to_check)) return True except (SyntaxError, ValueError, TypeError): return False
Returns whether the provided name is a valid variable name in Python :param string_to_check: the string to be checked :return: True or False
def Timestamp(): timestamp = '' try: timestamp = str(datetime.datetime.now())+' UTC' except Exception as e: logger.error('Could not get current time ' + str(e)) return timestamp
Get the current datetime in UTC
def request_add_sensor(self, sock, msg): self.add_sensor(Sensor(int, 'int_sensor%d' % len(self._sensors), 'descr', 'unit', params=[-10, 10])) return Message.reply('add-sensor', 'ok')
add a sensor
def remove_tmp_prefix_from_file_path(file_path): path, filename = os.path.split(file_path) return os.path.join(path, remove_tmp_prefix_from_filename(filename)).replace('\\', '/')
Remove tmp prefix from file path or url.
def _value_format(self, value, serie, index): sum_ = serie.points[index][1] if serie in self.series and ( self.stack_from_top and self.series.index(serie) == self._order - 1 or not self.stack_from_top and self.series.index(serie) == 0): return super(StackedLine, self)._value_format(value) return '%s (+%s)' % (self._y_format(sum_), self._y_format(value))
Display value and cumulation
def makeMissingRequiredGlyphs(font, glyphSet): if ".notdef" in glyphSet: return unitsPerEm = otRound(getAttrWithFallback(font.info, "unitsPerEm")) ascender = otRound(getAttrWithFallback(font.info, "ascender")) descender = otRound(getAttrWithFallback(font.info, "descender")) defaultWidth = otRound(unitsPerEm * 0.5) glyphSet[".notdef"] = StubGlyph(name=".notdef", width=defaultWidth, unitsPerEm=unitsPerEm, ascender=ascender, descender=descender)
Add .notdef to the glyph set if it is not present. **This should not be called externally.** Subclasses may override this method to handle the glyph creation in a different way if desired.
def ubridge_path(self): path = self._manager.config.get_section_config("Server").get("ubridge_path", "ubridge") path = shutil.which(path) return path
Returns the uBridge executable path. :returns: path to uBridge
def add(cls, model, commit=True): if not isinstance(model, cls): raise ValueError('%s is not of type %s' % (model, cls)) cls.session.add(model) try: if commit: cls.session.commit() return model except: cls.session.rollback() raise
Adds a model instance to session and commits the transaction. Args: model: The instance to add. Examples: >>> customer = Customer.new(name="hari", email="hari@gmail.com") >>> Customer.add(customer) hari@gmail.com
def renew_access_token(self): auth_params = {'REFRESH_TOKEN': self.refresh_token} self._add_secret_hash(auth_params, 'SECRET_HASH') refresh_response = self.client.initiate_auth( ClientId=self.client_id, AuthFlow='REFRESH_TOKEN', AuthParameters=auth_params, ) self._set_attributes( refresh_response, { 'access_token': refresh_response['AuthenticationResult']['AccessToken'], 'id_token': refresh_response['AuthenticationResult']['IdToken'], 'token_type': refresh_response['AuthenticationResult']['TokenType'] } )
Sets a new access token on the User using the refresh token.
def get_user_cmd(node_dict): key_lu = {"q": ["quit", True], "r": ["run", True], "s": ["stop", True], "u": ["update", True], "c": ["connect", True], "d": ["details", True]} ui_cmd_bar() cmd_valid = False input_flush() with term.cbreak(): while not cmd_valid: val = input_by_key() cmd_name, cmd_valid = key_lu.get(val.lower(), ["invalid", False]) if not cmd_valid: ui_print(" - {0}Invalid Entry{1}".format(C_ERR, C_NORM)) sleep(0.5) ui_cmd_bar() return cmd_name
Get main command selection.
def text(self, force_get=False): def text_element(): return self.element.text def force_text_element(): return self.driver_wrapper.js_executor.execute_template_and_return_result( 'getElementText.js', {}, self.element ) if force_get: return self.execute_and_handle_webelement_exceptions(force_text_element, 'get text by javascript') else: return self.execute_and_handle_webelement_exceptions(text_element, 'get text')
Get the text of the element @rtype: str @return: Text of the element
def get_code(self): buff = self.get_attribute("Code") if buff is None: return None with unpack(buff) as up: code = JavaCodeInfo(self.cpool) code.unpack(up) return code
the JavaCodeInfo of this member if it is a non-abstract method, None otherwise reference: http://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.7.3
def get_message_from_call(self, *args, **kwargs): if len(args) == 1 and isinstance(args[0], dict): self.logger.debug('called with arg dictionary') result = args[0] elif len(args) == 0 and kwargs != {}: self.logger.debug('called with kwargs') result = kwargs else: self.logger.error( 'get_message_from_call could not handle "%r", "%r"', args, kwargs ) raise TypeError('Pass either keyword arguments or a dictionary argument') return self.message_class(result)
\ Get message object from a call. :raises: :py:exc:`TypeError` (if the format is not what we expect) This is where arguments to nodes are turned into Messages. Arguments are parsed in the following order: - A single positional argument (a :py:class:`dict`) - No positional arguments and a number of keyword arguments
def depth_november_average_ground_temperature(self, value=None): if value is not None: try: value = float(value) except ValueError: raise ValueError( 'value {} need to be of type float ' 'for field `depth_november_average_ground_temperature`'.format(value)) self._depth_november_average_ground_temperature = value
Corresponds to IDD Field `depth_november_average_ground_temperature` Args: value (float): value for IDD Field `depth_november_average_ground_temperature` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def ones(shape, ctx=None, dtype=None, **kwargs): if ctx is None: ctx = current_context() dtype = mx_real_t if dtype is None else dtype return _internal._ones(shape=shape, ctx=ctx, dtype=dtype, **kwargs)
Returns a new array filled with all ones, with the given shape and type. Parameters ---------- shape : int or tuple of int or list of int The shape of the empty array. ctx : Context, optional An optional device context. Defaults to the current default context (``mxnet.context.current_context()``). dtype : str or numpy.dtype, optional An optional value type (default is `float32`). out : NDArray, optional The output NDArray (default is `None`). Returns ------- NDArray A new array of the specified shape filled with all ones. Examples -------- >>> mx.nd.ones(1).asnumpy() array([ 1.], dtype=float32) >>> mx.nd.ones((1,2), mx.gpu(0)) <NDArray 1x2 @gpu(0)> >>> mx.nd.ones((1,2), dtype='float16').asnumpy() array([[ 1., 1.]], dtype=float16)
def _load_result(response, ret): if response['code'] is None: ret['comment'] = response['content'] elif response['code'] == 401: ret['comment'] = '401 Forbidden: Authentication required!' elif response['code'] == 404: ret['comment'] = response['content']['message'] elif response['code'] == 200: ret['result'] = True ret['comment'] = 'Listing Current Configuration Only. ' \ 'Not action or changes occurred during the execution of this state.' ret['changes'] = response['content'] else: ret['comment'] = response['content']['message'] return ret
format the results of listing functions
def set_footer(self, *, text=EmptyEmbed, icon_url=EmptyEmbed): self._footer = {} if text is not EmptyEmbed: self._footer['text'] = str(text) if icon_url is not EmptyEmbed: self._footer['icon_url'] = str(icon_url) return self
Sets the footer for the embed content. This function returns the class instance to allow for fluent-style chaining. Parameters ----------- text: :class:`str` The footer text. icon_url: :class:`str` The URL of the footer icon. Only HTTP(S) is supported.
def repl(): try: import IPython except: print("ERROR: IPython is not installed. Please install it to use the repl.", file=sys.stderr) raise IPython.embed(user_ns=dict( settings=oz.settings, actions=oz._actions, uimodules=oz._uimodules, routes=oz._routes, ))
Runs an IPython repl with some context
def ssh_file(opts, dest_path, contents=None, kwargs=None, local_file=None): if opts.get('file_transport', 'sftp') == 'sftp': return sftp_file(dest_path, contents, kwargs, local_file) return scp_file(dest_path, contents, kwargs, local_file)
Copies a file to the remote SSH target using either sftp or scp, as configured.