code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def setValues(self, values): ncols = self.getNumCols() nindices = self.getNumIndices() for key, value in values.items(): key = Utils.convToList(key) assert len(key) == nindices value = Utils.convToList(value) assert len(value) == ncols-nindices ...
Set the values of a DataFrame from a dictionary. Args: values: Dictionary with the values to set.
def compare_version(self, version_string, op): from pkg_resources import parse_version from monty.operator import operator_from_str op = operator_from_str(op) return op(parse_version(self.version), parse_version(version_string))
Compare Abinit version to `version_string` with operator `op`
def get_view(self, view_name): return self.measure_to_view_map.get_view(view_name=view_name, timestamp=self.time)
gets the view given the view name
def obj_update_or_create(model, defaults=None, update_fields=UNSET, **kwargs): obj, created = model.objects.get_or_create(defaults=defaults, **kwargs) if created: logger.debug('CREATED %s %s', model._meta.object_name, obj.pk, extra={'pk': ob...
Mimic queryset.update_or_create but using obj_update.
def get_rna(self) -> Rna: if self.variants: raise InferCentralDogmaException('can not get rna for variant') return Rna( namespace=self.namespace, name=self.name, identifier=self.identifier )
Get the corresponding RNA or raise an exception if it's not the reference node. :raises: InferCentralDogmaException
def write(self, outfname=None): outfname = outfname or self.filename with codecs.open(outfname, 'wb', 'windows-1252') as outf: for survey in self.surveys: outf.write('\r\n'.join(survey._serialize())) outf.write('\r\n'+'\f'+'\r\n') outf.write('\x1A'...
Write or overwrite a `Survey` to the specified .DAT file
def _gather_field_values( item, *, fields=None, field_map=FIELD_MAP, normalize_values=False, normalize_func=normalize_value): it = get_item_tags(item) if fields is None: fields = list(it.keys()) normalize = normalize_func if normalize_values else lambda x: str(x) field_values = [] for field in fields: field_...
Create a tuple of normalized metadata field values. Parameter: item (~collections.abc.Mapping, str, os.PathLike): Item dict or filepath. fields (list): A list of fields used to compare item dicts. field_map (~collections.abc.Mapping): A mapping field name aliases. Default: :data:`~google_music_utils.constant...
def attachment_state(self): state = None if self.attach_data: state = self.attach_data.status return state
Get the attachment state.
def load_files(self, path): if self.verbose == 2: print("Indexing {}".format(path)) for filename in os.listdir(path): file_path = path + "/" + filename if os.path.isdir(file_path): self.load_files(file_path) elif filename.endswith(".yaml") ...
Loads files in a given path and all its subdirectories
def read_version(): regex = re.compile('^(?P<number>\d.*?) .*$') with open('../CHANGELOG.rst') as f: for line in f: match = regex.match(line) if match: return match.group('number')
Read version from the first line starting with digit
def get_value(self, tag=None, field=None): assert not (tag is not None and field is not None), \ "Cannot filter by tag and field simultaneously." selected_fields = self._select_by_field_or_tag(tag, field) missing_fields_idents = set(selected_fields) - set(self.field_values) i...
Generate an integer whose bits are set according to the values of fields in this bit field. All other bits are set to zero. Parameters ---------- tag : str Optionally specifies that the value should only include fields with the specified tag. field : str ...
def get_contents_to_filename(self, filename, headers=None, cb=None, num_cb=10, torrent=False, version_id=None, res_download_handler=None, response_headers=...
Retrieve an object from S3 using the name of the Key object as the key in S3. Store contents of the object to a file named by 'filename'. See get_contents_to_file method for details about the parameters. :type filename: string :param filename: The filename of where to put the f...
def datetime_to_timestamp(dt): epoch = datetime.utcfromtimestamp(0).replace(tzinfo=UTC) return (dt - epoch).total_seconds()
Convert timezone-aware `datetime` to POSIX timestamp and return seconds since UNIX epoch. Note: similar to `datetime.timestamp()` in Python 3.3+.
def check_calling_sequence(name, function_name, function, possible_variables): try: calling_sequence = inspect.getargspec(function.input_object).args except AttributeError: calling_sequence = inspect.getargspec(function).args assert calling_sequence[0] == 'self', "Wrong s...
Check the calling sequence for the function looking for the variables specified. One or more of the variables can be in the calling sequence. Note that the order of the variables will be enforced. It will also enforce that the first parameter in the calling sequence is called 'self'. :p...
def fill_zero( x=None, y=None, label=None, color=None, width=None, dash=None, opacity=None, mode='lines+markers', **kargs ): return line( x=x, y=y, label=label, color=color, width=width, dash=dash, opacity=opacity, m...
Fill to zero. Parameters ---------- x : array-like, optional y : TODO, optional label : TODO, optional Returns ------- Chart
def wrap(stream, unicode=False, window=1024, echo=False, close_stream=True): if hasattr(stream, 'read'): proxy = PollingStreamAdapter(stream) elif hasattr(stream, 'recv'): proxy = PollingSocketStreamAdapter(stream) else: raise TypeError('stream must have either read or recv method') ...
Wrap a stream to implement expect functionality. This function provides a convenient way to wrap any Python stream (a file-like object) or socket with an appropriate :class:`Expecter` class for the stream type. The returned object adds an :func:`Expect.expect` method to the stream, while passing normal...
def can_edit(self, user): return self.class_.can_edit(user) and self.status != u'locked'
Return whether or not `user` can make changes to the project.
def compress_monkey_patch(): from compressor.templatetags import compress as compress_tags from compressor import base as compress_base compress_base.Compressor.filter_input = filter_input compress_base.Compressor.output = output compress_base.Compressor.hunks = hunks compress_base.Compressor.pr...
patch all compress we need access to variables from widget scss for example we have:: /themes/bootswatch/cyborg/_variables but only if is cyborg active for this reasone we need dynamically append import to every scss file
def xraw_command(self, netfn, command, bridge_request=(), data=(), delay_xmit=None, retry=True, timeout=None): rsp = self.ipmi_session.raw_command(netfn=netfn, command=command, bridge_request=bridge_request, ...
Send raw ipmi command to BMC, raising exception on error This is identical to raw_command, except it raises exceptions on IPMI errors and returns data as a buffer. This is the recommend function to use. The response['data'] being a buffer allows traditional indexed access as well as w...
def get_all_submissions(course_id, item_id, item_type, read_replica=True): submission_qs = Submission.objects if read_replica: submission_qs = _use_read_replica(submission_qs) query = submission_qs.select_related('student_item').filter( student_item__course_id=course_id, student_item...
For the given item, get the most recent submission for every student who has submitted. This may return a very large result set! It is implemented as a generator for efficiency. Args: course_id, item_id, item_type (string): The values of the respective student_item fields to filter the sub...
def _to_utc(self, dt): tz = self._get_tz() loc_dt = tz.localize(dt) return loc_dt.astimezone(pytz.utc)
Takes a naive timezone with an localized value and return it formatted as utc.
def check_key(self, key): if self.key_size and len(key) not in self.key_size: raise TypeError('invalid key size %s, must be one of %s' % (len(key), self.key_size))
Check that the key length is valid. @param key: a byte string
def _annotation_handler(ion_type, length, ctx): _, self = yield self_handler = _create_delegate_handler(self) if ctx.annotations is not None: raise IonException('Annotation cannot be nested in annotations') ctx = ctx.derive_container_context(length, add_depth=0) (ann_length, _), _ = yield ct...
Handles annotations. ``ion_type`` is ignored.
def listen(): msg = MSG() ctypes.windll.user32.GetMessageA(ctypes.byref(msg), 0, 0, 0)
Listen for keyboard input.
def get_current_user(self): url = self.current_user_url result = self.get(url) return result
Get data from the current user endpoint
def _from_dict(cls, _dict): args = {} if 'tokens' in _dict: args['tokens'] = [ TokenResult._from_dict(x) for x in (_dict.get('tokens')) ] if 'sentences' in _dict: args['sentences'] = [ SentenceResult._from_dict(x) for x in (_dic...
Initialize a SyntaxResult object from a json dictionary.
def get_list_from_file(file_name): with open(file_name, mode='r', encoding='utf-8') as f1: lst = f1.readlines() return lst
read the lines from a file into a list
def template_sunmoon(self, **kwargs): kwargs_copy = self.base_dict.copy() kwargs_copy.update(**kwargs) kwargs_copy['dataset'] = kwargs.get('dataset', self.dataset(**kwargs)) kwargs_copy['component'] = kwargs.get( 'component', self.component(**kwargs)) self._replace_no...
return the file name for sun or moon template files
def visit_break(self, node, parent): return nodes.Break( getattr(node, "lineno", None), getattr(node, "col_offset", None), parent )
visit a Break node by returning a fresh instance of it
def from_path(cls, *path, namespace=None): parent = None for i in range(0, len(path), 2): parent = cls(*path[i:i + 2], parent=parent, namespace=namespace) return parent
Build up a Datastore key from a path. Parameters: \*path(tuple[str or int]): The path segments. namespace(str): An optional namespace for the key. This is applied to each key in the tree. Returns: anom.Key: The Datastore represented by the given path.
def get_role(resource_root, service_name, name, cluster_name="default"): return _get_role(resource_root, _get_role_path(cluster_name, service_name, name))
Lookup a role by name @param resource_root: The root Resource object. @param service_name: Service name @param name: Role name @param cluster_name: Cluster name @return: An ApiRole object
def ppo_original_world_model_stochastic_discrete(): hparams = ppo_original_params() hparams.policy_network = "next_frame_basic_stochastic_discrete" hparams_keys = hparams.values().keys() video_hparams = basic_stochastic.next_frame_basic_stochastic_discrete() for (name, value) in six.iteritems(video_hparams.va...
Atari parameters with stochastic discrete world model as policy.
def add_update(self, selector, update, multi=False, upsert=False, collation=None, array_filters=None): validate_ok_for_update(update) cmd = SON([('q', selector), ('u', update), ('multi', multi), ('upsert', upsert)]) collation = validate_collation_or_none(col...
Create an update document and add it to the list of ops.
def get_sig_info(hdr): string = '%|DSAHEADER?{%{DSAHEADER:pgpsig}}:{%|RSAHEADER?{%{RSAHEADER:pgpsig}}:{%|SIGGPG?{%{SIGGPG:pgpsig}}:{%|SIGPGP?{%{SIGPGP:pgpsig}}:{(none)}|}|}|}|' siginfo = hdr.sprintf(string) if siginfo != '(none)': error = 0 sigtype, sigdate, sigid = siginfo.split(',') el...
hand back signature information and an error code Shamelessly stolen from Seth Vidal http://yum.baseurl.org/download/misc/checksig.py
def name(self): if self._name_map is None: self._name_map = {} for key, value in self.__class__.__dict__.items(): if isinstance(value, self.__class__): self._name_map[value] = key return self._name_map[self]
Get the enumeration name of this cursor kind.
def days_and_sids_for_frames(frames): if not frames: days = np.array([], dtype='datetime64[ns]') sids = np.array([], dtype='int64') return days, sids check_indexes_all_same( [frame.index for frame in frames], message='Frames have mistmatched days.', ) check_indexe...
Returns the date index and sid columns shared by a list of dataframes, ensuring they all match. Parameters ---------- frames : list[pd.DataFrame] A list of dataframes indexed by day, with a column per sid. Returns ------- days : np.array[datetime64[ns]] The days in these da...
def get_source( self, environment: Environment, template: str, ) -> Tuple[str, Optional[str], Callable]: for loader in self._loaders(): try: return loader.get_source(environment, template) except TemplateNotFound: continue raise Tem...
Returns the template source from the environment. This considers the loaders on the :attr:`app` and blueprints.
def get_public_orders(self, group=False): self._log('get public orders') return self._rest_client.get( endpoint='/order_book', params={'book': self.name, 'group': int(group)} )
Return public orders that are currently open. :param group: If set to True (default: False), orders with the same price are grouped. :type group: bool :return: Public orders currently open. :rtype: dict
def get_datastream(self, datastream): response = self.http.get('/Datastream/' + str(datastream)) datastream = Schemas.Datastream(datastream=response) return datastream
To get Datastream by id
def text_extents(self, text): extents = ffi.new('cairo_text_extents_t *') cairo.cairo_text_extents(self._pointer, _encode_string(text), extents) self._check_status() return ( extents.x_bearing, extents.y_bearing, extents.width, extents.height, extents....
Returns the extents for a string of text. The extents describe a user-space rectangle that encloses the "inked" portion of the text, (as it would be drawn by :meth:`show_text`). Additionally, the :obj:`x_advance` and :obj:`y_advance` values indicate the amount by which the curre...
def get_objective(self, sampler): def objective(params): circuit = self.get_circuit(params) circuit.make_cache() return self.get_energy(circuit, sampler) return objective
Get an objective function to be optimized.
def add_shortcut_to_tooltip(action, context, name): action.setToolTip(action.toolTip() + ' (%s)' % get_shortcut(context=context, name=name))
Add the shortcut associated with a given action to its tooltip
def parse(self): c = Collection() while self.index < self.datalen: g = self.parseOneGame() if g: c.append(g) else: break return c
Parses the SGF data stored in 'self.data', and returns a 'Collection'.
def load_model(modelname, add_sentencizer=False): loading_start = time.time() nlp = spacy.load(modelname) if add_sentencizer: nlp.add_pipe(nlp.create_pipe('sentencizer')) loading_end = time.time() loading_time = loading_end - loading_start if add_sentencizer: return nlp, loading_...
Load a specific spaCy model
def _add_person_to_group(person, group): from karaage.datastores import add_accounts_to_group from karaage.datastores import add_accounts_to_project from karaage.datastores import add_accounts_to_institute a_list = person.account_set add_accounts_to_group(a_list, group) for project in group.proj...
Call datastores after adding a person to a group.
def _populate_ranking_payoff_arrays(payoff_arrays, scores, costs): n = payoff_arrays[0].shape[0] for p, payoff_array in enumerate(payoff_arrays): payoff_array[0, :] = 0 for i in range(1, n): for j in range(n): payoff_array[i, j] = -costs[p, i-1] prize = 1. for...
Populate the ndarrays in `payoff_arrays` with the payoff values of the ranking game given `scores` and `costs`. Parameters ---------- payoff_arrays : tuple(ndarray(float, ndim=2)) Tuple of 2 ndarrays of shape (n, n). Modified in place. scores : ndarray(int, ndim=2) ndarray of shape ...
def _find_inline_images(contentsinfo): "Find inline images in the contentstream" for n, inline in enumerate(contentsinfo.inline_images): yield ImageInfo( name='inline-%02d' % n, shorthand=inline.shorthand, inline=inline )
Find inline images in the contentstream
def width(self) -> int: max_x = -1.0 for x, _ in self.entries.keys(): max_x = max(max_x, x) for v in self.vertical_lines: max_x = max(max_x, v.x) for h in self.horizontal_lines: max_x = max(max_x, h.x1, h.x2) return 1 + int(max_x)
Determines how many entry columns are in the diagram.
def _adjust_inferential_results_for_parameter_constraints(self, constraints): if constraints is not None: inferential_attributes = ["standard_errors", "tvalues", ...
Ensure that parameters that were constrained during estimation do not have any values showed for inferential results. After all, no inference was performed. Parameters ---------- constraints : list of ints, or None. If list, should contain the positions in the array ...
def loadScopeGroupbyName(self, name, service_group_id, callback=None, errback=None): import ns1.ipam scope_group = ns1.ipam.Scopegroup(self.config, name=name, service_group_id=service_group_id) return scope_group.load(callback=callback, errback=errback)
Load an existing Scope Group by name and service group id into a high level Scope Group object :param str name: Name of an existing Scope Group :param int service_group_id: id of the service group the Scope group is associated with
def _sign(self, data: bytes) -> bytes: assert self._raiden_service is not None return self._raiden_service.signer.sign(data=data)
Use eth_sign compatible hasher to sign matrix data
def compile(conf): with errorprint(): config = ConfModule(conf) for conf in config.configurations: conf.format(do_print=True)
Compiles classic uWSGI configuration file using the default or given `uwsgiconf` configuration module.
def delete_jail(name): if is_jail(name): cmd = 'poudriere jail -d -j {0}'.format(name) __salt__['cmd.run'](cmd) if is_jail(name): return 'Looks like there was an issue deleteing jail \ {0}'.format(name) else: return 'Looks like jail {0} has not been create...
Deletes poudriere jail with `name` CLI Example: .. code-block:: bash salt '*' poudriere.delete_jail 90amd64
def copy(self, newdata=None): if newdata is None: newdata = self.data.copy() return self.__class__( self.molecule, self.origin.copy(), self.axes.copy(), self.nrep.copy(), newdata, self.subtitle, self.nuclear_charges )
Return a copy of the cube with optionally new data.
def bitcoin_address(self) -> str: type_ = self.random.choice(['1', '3']) letters = string.ascii_letters + string.digits return type_ + ''.join( self.random.choice(letters) for _ in range(33))
Generate a random bitcoin address. :return: Bitcoin address. :Example: 3EktnHQD7RiAE6uzMj2ZifT9YgRrkSgzQX
def derive_title(self): if not self.title: return _("Create %s") % force_text(self.model._meta.verbose_name).title() else: return self.title
Derives our title from our object
def get_attribute(module_name: str, attribute_name: str): assert isinstance(module_name, str) assert isinstance(attribute_name, str) _module = importlib.import_module(module_name) return getattr(_module, attribute_name)
Get the specified module attribute. It most cases, it will be a class or function. :param module_name: module name :param attribute_name: attribute name :return: module attribute
def log_middleware(store): def wrapper(next_): def log_dispatch(action): print('Dispatch Action:', action) return next_(action) return log_dispatch return wrapper
log all actions to console as they are dispatched
def adjustMinimumWidth( self ): pw = self.pixmapSize().width() self.setMinimumWidth(pw * self.maximum() + 3 * self.maximum())
Modifies the minimum width to factor in the size of the pixmaps and the number for the maximum.
def visit_Import(self, node): for alias in node.names: current_module = MODULES for path in alias.name.split('.'): if path not in current_module: raise PythranSyntaxError( "Module '{0}' unknown.".format(alias.name), ...
Check if imported module exists in MODULES.
def create_parser(self, prog_name, subcommand): parser = OptionParser(prog=prog_name, usage=self.usage(subcommand), option_list=self.option_list) return parser
Create an OptionParser prog_name - Name of a command subcommand - Name of a subcommand
def _recursive_getitem(d, key): if key in d: return d else: for v in d.values(): return _recursive_getitem(v, key) else: raise KeyError('Key not found: {}'.format(key))
Descend into a dict of dicts to return the one that contains a given key. Every value in the dict must be another dict.
def _get_raw_key(self, key_id): try: static_key = self._static_keys[key_id] except KeyError: static_key = os.urandom(32) self._static_keys[key_id] = static_key return WrappingKey( wrapping_algorithm=WrappingAlgorithm.AES_256_GCM_IV12_TAG16_NO_PADDI...
Returns a static, randomly-generated symmetric key for the specified key ID. :param str key_id: Key ID :returns: Wrapping key that contains the specified static key :rtype: :class:`aws_encryption_sdk.internal.crypto.WrappingKey`
def add_before(self, pipeline): if not isinstance(pipeline, Pipeline): pipeline = Pipeline(pipeline) self.pipes = pipeline.pipes[:] + self.pipes[:] return self
Add a Pipeline to be applied before this processing pipeline. Arguments: pipeline: The Pipeline or callable to apply before this Pipeline.
def getvalue(self, v): if not is_measure(v): raise TypeError('Incorrect input type for getvalue()') import re rx = re.compile("m\d+") out = [] keys = v.keys()[:] keys.sort() for key in keys: if re.match(rx, key): out.append(...
Return a list of quantities making up the measures' value. :param v: a measure
def serialize(self): if self.mate_chrom is None: remote_tag = "." else: if self.within_main_assembly: mate_chrom = self.mate_chrom else: mate_chrom = "<{}>".format(self.mate_chrom) tpl = {FORWARD: "[{}:{}[", REVERSE: "]{}:{}...
Return string representation for VCF
def print_config(): description = parser = argparse.ArgumentParser( description=textwrap.dedent(description) ) parser.add_argument( 'config_uri', type=str, help='an integer for the accumulator' ) parser.add_argument( '-k', '--key', dest='key', metavar='PRE...
Print config entry function.
def start(self): if self.running: self.stop() self._thread = threading.Thread(target=self._wrapped_target) self._thread.daemon = True self._thread.start()
Start the run method as a new thread. It will first stop the thread if it is already running.
def convert2wkt(self, set3D=True): features = self.getfeatures() for feature in features: try: feature.geometry().Set3D(set3D) except AttributeError: dim = 3 if set3D else 2 feature.geometry().SetCoordinateDimension(dim) ret...
export the geometry of each feature as a wkt string Parameters ---------- set3D: bool keep the third (height) dimension? Returns -------
def delete_database(self, database): url = "db/{0}".format(database) self.request( url=url, method='DELETE', expected_response_code=204 ) return True
Drop a database on the InfluxDB server. :param database: the name of the database to delete :type database: string :rtype: boolean
def deploy(): _require_root() if not confirm("This will apply any available migrations to the database. Has the database been backed up?"): abort("Aborted.") if not confirm("Are you sure you want to deploy?"): abort("Aborted.") with lcd(PRODUCTION_DOCUMENT_ROOT): with shell_env(P...
Deploy to production.
def postToNodeInBox(self, msg, frm): logger.trace("{} appending to nodeInbox {}".format(self, msg)) self.nodeInBox.append((msg, frm))
Append the message to the node inbox :param msg: a node message :param frm: the name of the node that sent this `msg`
def wiki(self): date = self.pull.created_at.strftime("%m/%d/%Y %H:%M") return "{} {} ({} [{} github])\n".format(self.pull.avatar_url, self.pull.body, date, self.pull.html_url)
Returns the wiki markup describing the details of the github pull request as well as a link to the details on github.
def get(self, name=None, default=None): if name is None: return self.data if not isinstance(name, list): name = [name] data = self.data try: for key in name: data = data[key] except KeyError: return default r...
Get attribute value or return default Whole data dictionary is returned when no attribute provided. Supports direct values retrieval from deep dictionaries as well. Dictionary path should be provided as list. The following two examples are equal: tree.data['hardware']['memory']...
def place_market_order(self, side: Side, amount: Number) -> Order: return self.place_order(side, OrderType.MARKET, amount)
Place a market order.
async def dump_message(obj, msg, field_archiver=None): mtype = msg.__class__ fields = mtype.f_specs() obj = collections.OrderedDict() if obj is None else get_elem(obj) for field in fields: await dump_message_field(obj, msg=msg, field=field, field_archiver=field_archiver) return obj
Dumps message to the object. Returns message popo representation. :param obj: :param msg: :param field_archiver: :return:
def make_url(path, protocol=None, hosts=None): protocol = 'https://' if not protocol else protocol host = hosts[random.randrange(len(hosts))] if hosts else 'archive.org' return protocol + host + path.strip()
Make an URL given a path, and optionally, a protocol and set of hosts to select from randomly. :param path: The Archive.org path. :type path: str :param protocol: (optional) The HTTP protocol to use. "https://" is used by default. :type protocol: str :param hosts: (option...
def sg_summary_gradient(tensor, gradient, prefix=None, name=None): r prefix = '' if prefix is None else prefix + '/' name = prefix + _pretty_name(tensor) if name is None else prefix + name _scalar(name + '/grad', tf.reduce_mean(tf.abs(gradient))) _histogram(name + '/grad-h', tf.abs(gradient))
r"""Register `tensor` to summary report as `gradient` Args: tensor: A `Tensor` to log as gradient gradient: A 0-D `Tensor`. A gradient to log prefix: A `string`. A prefix to display in the tensor board web UI. name: A `string`. A name to display in the tensor board web UI. Returns: ...
def get_gradients(self) -> Dict[str, List[mx.nd.NDArray]]: return {"dev_%d_%s" % (i, name): exe.grad_arrays[j] for i, exe in enumerate(self.executors) for j, name in enumerate(self.executor_group.arg_names) if name in self.executor_group.param_names and self.executors[0].grad_arr...
Returns a mapping of parameters names to gradient arrays. Parameter names are prefixed with the device.
def get_cmd(self): cmd = None if self.test_program in ('nose', 'nosetests'): cmd = "nosetests %s" % self.file_path elif self.test_program == 'django': executable = "%s/manage.py" % self.file_path if os.path.exists(executable): cmd = "python %s/...
Returns the full command to be executed at runtime
def from_edgelist(self, edges, strict=True): for edge in edges: if len(edge) == 3: self.update(edge[1], edge[0], **edge[2]) elif len(edge) == 2: self.update(edge[1], edge[0]) elif strict: raise ValueError('edge incorrect shape: ...
Load transform data from an edge list into the current scene graph. Parameters ------------- edgelist : (n,) tuples (node_a, node_b, {key: value}) strict : bool If true, raise a ValueError when a malformed edge is passed in a tuple.
def confidence_interval(self, confidenceLevel): if not (confidenceLevel >= 0 and confidenceLevel <= 1): raise ValueError("Parameter percentage has to be in [0,1]") underestimations = [] overestimations = [] for error in self._errorValues: if error is None: ...
Calculates for which value confidenceLevel% of the errors are closer to 0. :param float confidenceLevel: percentage of the errors that should be smaller than the returned value for overestimations and larger than the returned value for underestimations. confidenceLevel has t...
def docgraph2freqt(docgraph, root=None, include_pos=False, escape_func=FREQT_ESCAPE_FUNC): if root is None: return u"\n".join( sentence2freqt(docgraph, sentence, include_pos=include_pos, escape_func=escape_func) for sentence in docgraph.s...
convert a docgraph into a FREQT string.
def lookup_data(self, lookup, data): value = data parts = lookup.split('.') if not parts or not parts[0]: return value part = parts[0] remaining_lookup = '.'.join(parts[1:]) if callable(getattr(data, 'keys', None)) and hasattr(data, '__getitem__'): ...
Given a lookup string, attempts to descend through nested data looking for the value. Can work with either dictionary-alikes or objects (or any combination of those). Lookups should be a string. If it is a dotted path, it will be split on ``.`` & it will traverse through to fin...
def list_(runas=None): ret = [] output = _rbenv_exec(['install', '--list'], runas=runas) if output: for line in output.splitlines(): if line == 'Available versions:': continue ret.append(line.strip()) return ret
List the installable versions of ruby runas The user under which to run rbenv. If not specified, then rbenv will be run as the user under which Salt is running. CLI Example: .. code-block:: bash salt '*' rbenv.list
def utc_dt(dt): if not dt.tzinfo: return pytz.utc.localize(dt) return dt.astimezone(pytz.utc)
Set UTC timezone on a datetime object. A naive datetime is assumed to be in UTC TZ.
def connect(self): try: for group in ('inlets', 'receivers', 'outlets', 'senders'): self._connect_subgroup(group) except BaseException: objecttools.augment_excmessage( 'While trying to build the node connection of the `%s` ' 'sequen...
Connect the link sequences of the actual model.
def wrap(self, string, width): if not string or width <= 0: logging.error("invalid string: %s or width: %s" % (string, width)) return False tmp = "" for line in string.splitlines(): if len(line) <= width: tmp += line + "\n" cont...
Wrap lines according to width Place '\n' whenever necessary
def from_database(cls, database): if isinstance(database, PostgresqlDatabase): return PostgresqlMigrator(database) if isinstance(database, SqliteDatabase): return SqliteMigrator(database) if isinstance(database, MySQLDatabase): return MySQLMigrator(database) ...
Initialize migrator by db.
def get_all_formulae(chebi_ids): all_formulae = [get_formulae(chebi_id) for chebi_id in chebi_ids] return [x for sublist in all_formulae for x in sublist]
Returns all formulae
def domain_score(self, domains): warn( 'OpenDNS Domain Scores endpoint is deprecated. Use ' 'InvestigateApi.categorization() instead', DeprecationWarning, ) url_path = 'domains/score/' return self._multi_post(url_path, domains)
Calls domain scores endpoint. This method is deprecated since OpenDNS Investigate API endpoint is also deprecated.
def get_wide_unicode(self, i): value = [] for x in range(3): c = next(i) if c == '0': value.append(c) else: raise SyntaxError('Invalid wide Unicode character at %d!' % (i.index - 1)) c = next(i) if c in ('0', '1'): ...
Get narrow Unicode.
def get_plugins_info(self): d = {} for p in self.plugins: d.update(p.get_info()) return d
Collect the current live info from all the registered plugins. Return a dictionary, keyed on the plugin name.
def transaction(data_access): old_autocommit = data_access.autocommit data_access.autocommit = False try: yield data_access except RollbackTransaction as ex: data_access.rollback() except Exception as ex: data_access.rollback() raise ex else: data_access.commit() finally: data_acce...
Wrap statements in a transaction. If the statements succeed, commit, otherwise rollback. :param data_access: a DataAccess instance
def list_all_directories(self): def list_dirs_recursively(directory): if directory == self.filesystem: yield directory d_gen = itertools.chain( directory.directories, *tuple(list_dirs_recursively(d) for d in directory...
Utility method that yields all directories on the device's file systems.
def _form_datetimes(days, msecs): all_datetimes = [] for i in range(days.size): day = int(days[i]) msec = msecs[i] scanline_datetimes = [] for j in range(int(VALUES_PER_SCAN_LINE / 4)): usec = 1000 * (j * VIEW_TIME_ADJUSTMENT + msec) delta = (dt.timedelta(...
Calculate seconds since EPOCH from days and milliseconds for each of IASI scan.
def get_valid_cwd(): try: cwd = _current_dir() except: warn("Your current directory is invalid. If you open a ticket at " + "https://github.com/milkbikis/powerline-shell/issues/new " + "we would love to help fix the issue.") sys.stdout.write("> ") sys.exit...
Determine and check the current working directory for validity. Typically, an directory arises when you checkout a different branch on git that doesn't have this directory. When an invalid directory is found, a warning is printed to the screen, but the directory is still returned as-is, since this is w...
def get_history_by_tail_number(self, tail_number, page=1, limit=100): url = REG_BASE.format(tail_number, str(self.AUTH_TOKEN), page, limit) return self._fr24.get_data(url, True)
Fetch the history of a particular aircraft by its tail number. This method can be used to get the history of a particular aircraft by its tail number. It checks the user authentication and returns the data accordingly. Args: tail_number (str): The tail number, e.g. VT-ANL ...
def findPrevStmt(self, block): stmtEnd = self._prevNonCommentBlock(block) stmtStart = self.findStmtStart(stmtEnd) return Statement(self._qpart, stmtStart, stmtEnd)
Returns a tuple that contains the first and last line of the previous statement before line.
def check_security_settings(): in_production = not (current_app.debug or current_app.testing) secure = current_app.config.get('SESSION_COOKIE_SECURE') if in_production and not secure: current_app.logger.warning( "SESSION_COOKIE_SECURE setting must be set to True to prevent the " ...
Warn if session cookie is not secure in production.