code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def get_ranges_from_array(arr, append_last=True): right = arr[1:] if append_last: left = arr[:] right = np.append(right, None) else: left = arr[:-1] return np.column_stack((left, right))
Takes an array and calculates ranges [start, stop[. The last range end is none to keep the same length. Parameters ---------- arr : array like append_last: bool If True, append item with a pair of last array item and None. Returns ------- numpy.array The array formed by pai...
def predict(self, X): if np.any(X < self.grid[:, 0]) or np.any(X > self.grid[:, -1]): raise ValueError('data out of min/max bounds') binassign = np.zeros((self.n_features, len(X)), dtype=int) for i in range(self.n_features): binassign[i] = np.digitize(X[:, i], self.grid[i...
Get the index of the grid cell containing each sample in X Parameters ---------- X : array-like, shape = [n_samples, n_features] New data Returns ------- y : array, shape = [n_samples,] Index of the grid cell containing each sample
def series2df(Series, layer=2, split_sign = '_'): try: Series.columns Series = Series.iloc[:,0] except: pass def _helper(x, layer=2): try: return flatten_dict(ast.literal_eval(x), layers=layer, split_sign=split_sign) except: try: ...
expect pass a series that each row is string formated Json data with the same structure
def get_times(self): if not self.n: return [] self.times = list(mul(self.u1, self.t1)) + \ list(mul(self.u2, self.t2)) + \ list(mul(self.u3, self.t3)) + \ list(mul(self.u4, self.t4)) self.times = matrix(list(set(self.times))) self.times = l...
Return all the action times and times-1e-6 in a list
def pairwise(iterable): iterable = iter(iterable) left = next(iterable) for right in iterable: yield left, right left = right
Pair each element with its neighbors. Arguments --------- iterable : iterable Returns ------- The generator produces a tuple containing a pairing of each element with its neighbor.
def verify_dependencies(self): for i in range(1, len(self.deps)): assert(not (isinstance(self.deps[i], Transformation) and isinstance(self.deps[i - 1], Analysis)) ), "invalid dep order for %s" % self
Checks no analysis are called before a transformation, as the transformation could invalidate the analysis.
def route(obj, rule, *args, **kwargs): def decorator(cls): endpoint = kwargs.get('endpoint', camel_to_snake(cls.__name__)) kwargs['view_func'] = cls.as_view(endpoint) obj.add_url_rule(rule, *args, **kwargs) return cls return decorator
Decorator for the View classes.
def print_plugins(folders, exit_code=0): modules = plugins.get_plugin_modules(folders) pluginclasses = sorted(plugins.get_plugin_classes(modules), key=lambda x: x.__name__) for pluginclass in pluginclasses: print(pluginclass.__name__) doc = strformat.wrap(pluginclass.__doc__, 80) pri...
Print available plugins and exit.
def marginalization_bins(self): log_mean = np.log10(self.mean()) return np.logspace(-1. + log_mean, 1. + log_mean, 1001)/self._j_ref
Binning to use to do the marginalization integrals
def GetMessages(self, files): result = {} for file_name in files: file_desc = self.pool.FindFileByName(file_name) for desc in file_desc.message_types_by_name.values(): result[desc.full_name] = self.GetPrototype(desc) for extension in file_desc.extensions_by_name.values(): if ex...
Gets all the messages from a specified file. This will find and resolve dependencies, failing if the descriptor pool cannot satisfy them. Args: files: The file names to extract messages from. Returns: A dictionary mapping proto names to the message classes. This will include any dep...
def clone_wire(old_wire, name=None): if isinstance(old_wire, Const): return Const(old_wire.val, old_wire.bitwidth) else: if name is None: return old_wire.__class__(old_wire.bitwidth, name=old_wire.name) return old_wire.__class__(old_wire.bitwidth, name=name)
Makes a copy of any existing wire :param old_wire: The wire to clone :param name: a name fo rhte new wire Note that this function is mainly intended to be used when the two wires are from different blocks. Making two wires with the same name in the same block is not allowed
def sequence_to_string( a_list, open_bracket_char='[', close_bracket_char=']', delimiter=", " ): return "%s%s%s" % ( open_bracket_char, delimiter.join( local_to_str(x) for x in a_list ), close_bracket_char )
a dedicated function that turns a list into a comma delimited string of items converted. This method will flatten nested lists.
def check_dist_restriction(options, check_target=False): dist_restriction_set = any([ options.python_version, options.platform, options.abi, options.implementation, ]) binary_only = FormatControl(set(), {':all:'}) sdist_dependencies_allowed = ( options.format_cont...
Function for determining if custom platform options are allowed. :param options: The OptionParser options. :param check_target: Whether or not to check if --target is being used.
def _GetDirectory(self): if self.entry_type != definitions.FILE_ENTRY_TYPE_DIRECTORY: return None return LVMDirectory(self._file_system, self.path_spec)
Retrieves the directory. Returns: LVMDirectory: a directory or None if not available.
def get(self, name: Text, final: C) -> C: return Caller(self, name, final)
Get the function to call which will run all middlewares. :param name: Name of the function to be called :param final: Function to call at the bottom of the stack (that's the one provided by the implementer). :return:
def refractory(times, refract=0.002): times_refract = [] times_refract.append(times[0]) for i in range(1,len(times)): if times_refract[-1]+refract <= times[i]: times_refract.append(times[i]) return times_refract
Removes spikes in times list that do not satisfy refractor period :param times: list(float) of spike times in seconds :type times: list(float) :param refract: Refractory period in seconds :type refract: float :returns: list(float) of spike times in seconds For every interspike interval < refract, removes the ...
def _get_resource_hash(zone_name, record): record_data = defaultdict(int, record) if type(record_data['GeoLocation']) == dict: record_data['GeoLocation'] = ":".join(["{}={}".format(k, v) for k, v in record_data['GeoLocation'].items()]) args = [ zone_name, reco...
Returns the last ten digits of the sha256 hash of the combined arguments. Useful for generating unique resource IDs Args: zone_name (`str`): The name of the DNS Zone the record belongs to record (`dict`): A record dict to generate the hash from Returns: `str...
def open(self, mode): if self.hdf5 == (): kw = dict(mode=mode, libver='latest') if mode == 'r': kw['swmr'] = True try: self.hdf5 = hdf5.File(self.filename, **kw) except OSError as exc: raise OSError('%s in %s' % (exc...
Open the underlying .hdf5 file and the parent, if any
def filter_using_summary(fq, args): data = {entry[0]: entry[1] for entry in process_summary( summaryfile=args.summary, threads="NA", readtype=args.readtype, barcoded=False)[ ["readIDs", "quals"]].itertuples(index=False)} try: for record in SeqIO.parse(fq, "fastq")...
Use quality scores from albacore summary file for filtering Use the summary file from albacore for more accurate quality estimate Get the dataframe from nanoget, convert to dictionary
def rollback(self) -> None: if len(self._transactions) == 0: raise RuntimeError("rollback called outside transaction") _debug("rollback:", self._transactions[-1]) try: for on_rollback in self._transactions[-1]: _debug("--> rolling back", on_rollback) ...
Roll back to previous database state. However stay inside transaction management.
def make_sh_output(value, output_script, witness=False): return _make_output( value=utils.i2le_padded(value, 8), output_script=make_sh_output_script(output_script, witness))
int, str -> TxOut
def are_equivalent_pyxb(a_pyxb, b_pyxb, ignore_timestamps=False): normalize_in_place(a_pyxb, ignore_timestamps) normalize_in_place(b_pyxb, ignore_timestamps) a_xml = d1_common.xml.serialize_to_xml_str(a_pyxb) b_xml = d1_common.xml.serialize_to_xml_str(b_pyxb) are_equivalent = d1_common.xml.are_equiv...
Determine if SystemMetadata PyXB objects are semantically equivalent. Normalize then compare SystemMetadata PyXB objects for equivalency. Args: a_pyxb, b_pyxb : SystemMetadata PyXB objects to compare reset_timestamps: bool ``True``: Timestamps in the SystemMetadata are set to a standard v...
def addJunctionPos(shape, fromPos, toPos): result = list(shape) if fromPos != shape[0]: result = [fromPos] + result if toPos != shape[-1]: result.append(toPos) return result
Extends shape with the given positions in case they differ from the existing endpoints. assumes that shape and positions have the same dimensionality
def Async(f, n=None, timeout=None): return threads(n=n, timeout=timeout)(f)
Concise usage for pool.submit. Basic Usage Asnyc & threads :: from torequests.main import Async, threads import time def use_submit(i): time.sleep(i) result = 'use_submit: %s' % i print(result) return result @threads() def...
def service_create(name, service_type, description=None, profile=None, **connection_args): kstone = auth(profile, **connection_args) service = kstone.services.create(name, service_type, description=description) return service_get(service.id, profile=profile, **connection_args)
Add service to Keystone service catalog CLI Examples: .. code-block:: bash salt '*' keystone.service_create nova compute \ 'OpenStack Compute Service'
def read_losc_hdf5_state(f, path='quality/simple', start=None, end=None, copy=False): dataset = io_hdf5.find_dataset(f, '%s/DQmask' % path) maskset = io_hdf5.find_dataset(f, '%s/DQDescriptions' % path) nddata = dataset[()] bits = [bytes.decode(bytes(b), 'utf-8') for b in maskset...
Read a `StateVector` from a LOSC-format HDF file. Parameters ---------- f : `str`, `h5py.HLObject` path of HDF5 file, or open `H5File` path : `str` path of HDF5 dataset to read. start : `Time`, `~gwpy.time.LIGOTimeGPS`, optional start GPS time of desired data end : `T...
def nvrtcGetProgramLog(self, prog): size = c_size_t() code = self._lib.nvrtcGetProgramLogSize(prog, byref(size)) self._throw_on_error(code) buf = create_string_buffer(size.value) code = self._lib.nvrtcGetProgramLog(prog, buf) self._throw_on_error(code) return buf....
Returns the log for the NVRTC program object. Only useful after calls to nvrtcCompileProgram or nvrtcVerifyProgram.
def _folder_item_method(self, analysis_brain, item): is_editable = self.is_analysis_edition_allowed(analysis_brain) method_title = analysis_brain.getMethodTitle item['Method'] = method_title or '' if is_editable: method_vocabulary = self.get_methods_vocabulary(analysis_brain)...
Fills the analysis' method to the item passed in. :param analysis_brain: Brain that represents an analysis :param item: analysis' dictionary counterpart that represents a row
def print_graph(self, format: str = 'turtle') -> str: print(self.g.serialize(format=format).decode('utf-8'))
prints serialized formated rdflib Graph
def serve_forever(self): self.start_cmd_loop() try: while not self.done: self.handle_request() except KeyboardInterrupt: if log_file == sys.stderr: log_file.write("\n") finally: if self._clean_up_call is not None: ...
Starts the server handling commands and HTTP requests. The server will loop until done is True or a KeyboardInterrupt is received.
def resource_string(self): if self._resources_initialized: res_str = "{} CPUs, {} GPUs".format(self._avail_resources.cpu, self._avail_resources.gpu) if self._avail_resources.custom_resources: custom = ", ".join( ...
Returns a string describing the total resources available.
def parse_int_arg(name, default): return default if request.args.get(name) is None \ else int(request.args.get(name))
Return a given URL parameter as int or return the default value.
def normalize(self): if self.tz is None or timezones.is_utc(self.tz): not_null = ~self.isna() DAY_NS = ccalendar.DAY_SECONDS * 1000000000 new_values = self.asi8.copy() adjustment = (new_values[not_null] % DAY_NS) new_values[not_null] = new_values[not_n...
Convert times to midnight. The time component of the date-time is converted to midnight i.e. 00:00:00. This is useful in cases, when the time does not matter. Length is unaltered. The timezones are unaffected. This method is available on Series with datetime values under the ``...
def get_raw_tag_data(filename): "Return the ID3 tag in FILENAME as a raw byte string." with open(filename, "rb") as file: try: (cls, offset, length) = stagger.tags.detect_tag(file) except stagger.NoTagError: return bytes() file.seek(offset) return file.rea...
Return the ID3 tag in FILENAME as a raw byte string.
def update_wallet(self, wallet_name, limit): request = { 'update': { 'limit': str(limit), } } return make_request( '{}wallet/{}'.format(self.url, wallet_name), method='PATCH', body=request, timeout=self.timeo...
Update a wallet with a new limit. @param the name of the wallet. @param the new value of the limit. @return a success string from the plans server. @raise ServerError via make_request.
def login_required(fn): @wraps(fn) def login_wrapper(ctx, *args, **kwargs): base_url = os.environ.get("ONE_CODEX_API_BASE", "https://app.onecodex.com") api_kwargs = {"telemetry": ctx.obj["TELEMETRY"]} api_key_prior_login = ctx.obj.get("API_KEY") bearer_token_env = os.environ.get(...
Requires login before proceeding, but does not prompt the user to login. Decorator should be used only on Click CLI commands. Notes ----- Different means of authentication will be attempted in this order: 1. An API key present in the Click context object from a previous successful authenticatio...
def create(self, sid): data = values.of({'Sid': sid, }) payload = self._version.create( 'POST', self._uri, data=data, ) return ShortCodeInstance(self._version, payload, service_sid=self._solution['service_sid'], )
Create a new ShortCodeInstance :param unicode sid: The SID of a Twilio ShortCode resource :returns: Newly created ShortCodeInstance :rtype: twilio.rest.proxy.v1.service.short_code.ShortCodeInstance
def call_lights(*args, **kwargs): res = dict() lights = _get_lights() for dev_id in 'id' in kwargs and _get_devices(kwargs) or sorted(lights.keys()): if lights.get(six.text_type(dev_id)): res[dev_id] = lights[six.text_type(dev_id)] return res or False
Get info about all available lamps. Options: * **id**: Specifies a device ID. Can be a comma-separated values. All, if omitted. CLI Example: .. code-block:: bash salt '*' hue.lights salt '*' hue.lights id=1 salt '*' hue.lights id=1,2,3
def set_log_file_extension(self, logFileExtension): assert isinstance(logFileExtension, basestring), "logFileExtension must be a basestring" assert len(logFileExtension), "logFileExtension can't be empty" if logFileExtension[0] == ".": logFileExtension = logFileExtension[1:] ...
Set the log file extension. :Parameters: #. logFileExtension (string): Logging file extension. A logging file full name is set as logFileBasename.logFileExtension
def createJsbConfig(self): tempdir = mkdtemp() tempfile = join(tempdir, 'app.jsb3') cmd = ['sencha', 'create', 'jsb', '-a', self.url, '-p', tempfile] log.debug('Running: %s', ' '.join(cmd)) call(cmd) jsb3 = open(tempfile).read() rmtree(tempdir) return jsb3
Create JSB config file using ``sencha create jsb``. :return: The created jsb3 config as a string.
def zadd(self, name, mapping, nx=False, xx=False, ch=False, incr=False): if not mapping: raise DataError("ZADD requires at least one element/score pair") if nx and xx: raise DataError("ZADD allows either 'nx' or 'xx', not both") if incr and len(mapping) != 1: ...
Set any number of element-name, score pairs to the key ``name``. Pairs are specified as a dict of element-names keys to score values. ``nx`` forces ZADD to only create new elements and not to update scores for elements that already exist. ``xx`` forces ZADD to only update scores of ele...
def _delete_device_from_device_group(self, device): device_name = get_device_info(device).name dg = pollster(self._get_device_group)(device) device_to_remove = dg.devices_s.devices.load( name=device_name, partition=self.partition ) device_to_remove.delete()
Remove device from device service cluster group. :param device: ManagementRoot object -- device to delete from group
def paint(self, iconic, painter, rect, mode, state, options): for opt in options: self._paint_icon(iconic, painter, rect, mode, state, opt)
Main paint method.
def submit_error(url, user, project, area, description, extra=None, default_message=None): LOG.debug('Creating new BugzScout instance.') client = bugzscout.BugzScout( url, user, project, area) LOG.debug('Submitting BugzScout error.') client.submit_error( description, ext...
Celery task for submitting errors asynchronously. :param url: string URL for bugzscout :param user: string fogbugz user to designate when submitting via bugzscout :param project: string fogbugz project to designate for cases :param area: string fogbugz area to designate for cases :...
def is_block(bin_list): id_set = set((my_bin[1] for my_bin in bin_list)) start_id, end_id = min(id_set), max(id_set) return id_set == set(range(start_id, end_id + 1))
Check if a bin list has exclusively consecutive bin ids.
def failed_hosts(self) -> Dict[str, "MultiResult"]: return {k: v for k, v in self.result.items() if v.failed}
Hosts that failed to complete the task
def has_throttled(self): for file, value in self.cpu_throttle_count.items(): try: new_value = int(util.read_file(file)) if new_value > value: return True except Exception as e: logging.warning('Cannot read throttling cou...
Check whether any of the CPU cores monitored by this instance has throttled since this instance was created. @return a boolean value
def field_match(self, node_field, pattern_field): is_good_list = (isinstance(pattern_field, list) and self.check_list(node_field, pattern_field)) is_good_node = (isinstance(pattern_field, AST) and Check(node_field, self.placeh...
Check if two fields match. Field match if: - If it is a list, all values have to match. - If if is a node, recursively check it. - Otherwise, check values are equal.
def _initialize_monitoring_services_queue(self, chain_state: ChainState): msg = ( 'Transport was started before the monitoring service queue was updated. ' 'This can lead to safety issue. node:{self!r}' ) assert not self.transport, msg msg = ( 'The nod...
Send the monitoring requests for all current balance proofs. Note: The node must always send the *received* balance proof to the monitoring service, *before* sending its own locked transfer forward. If the monitoring service is updated after, then the following c...
def encloses(self, location: FileLocation ) -> Optional[FunctionDesc]: for func in self.in_file(location.filename): if location in func.location: return func return None
Returns the function, if any, that encloses a given location.
def processing_blocks(self): pb_list = ProcessingBlockList() return json.dumps(dict(active=pb_list.active, completed=pb_list.completed, aborted=pb_list.aborted))
Return the a JSON dict encoding the PBs known to SDP.
def reset_spyder(self): answer = QMessageBox.warning(self, _("Warning"), _("Spyder will restart and reset to default settings: <br><br>" "Do you want to continue?"), QMessageBox.Yes | QMessageBox.No) if answer == QMessageBox.Yes: self.restart(re...
Quit and reset Spyder and then Restart application.
def connect(self): self.connection = Connection(self.broker_url) e = Exchange('mease', type='fanout', durable=False, delivery_mode=1) self.exchange = e(self.connection.default_channel) self.exchange.declare()
Connects to RabbitMQ
def validate(self, data): for prop in self.properties: if prop.id in data: if prop.type == 'string': if not isinstance(data[prop.id], basestring): raise PresetFieldTypeException("property '{}' must be of type string".format(prop.id)) ...
Checks if `data` respects this preset specification It will check that every required property is present and for every property type it will make some specific control.
def _translate(self, x, y): return self.parent._translate((x + self.x), (y + self.y))
Convertion x and y to their position on the root Console
def call(self, phone_number, message, message_type, **params): return self.post(VOICE_RESOURCE, phone_number=phone_number, message=message, message_type=message_type, **params)
Send a voice call to the target phone_number. See https://developer.telesign.com/docs/voice-api for detailed API documentation.
def fuse_list( mafs ): last = None for m in mafs: if last is None: last = m else: fused = fuse( last, m ) if fused: last = fused else: yield last last = m if last: yield last
Try to fuse a list of blocks by progressively fusing each adjacent pair.
def ckm_standard(t12, t13, t23, delta): r c12 = cos(t12) c13 = cos(t13) c23 = cos(t23) s12 = sin(t12) s13 = sin(t13) s23 = sin(t23) return np.array([[c12*c13, c13*s12, s13/exp(1j*delta)], [-(c23*s12) - c12*exp(1j*delta)*s13*s23, c12*c23 - exp(1j*delta)*s12...
r"""CKM matrix in the standard parametrization and standard phase convention. Parameters ---------- - `t12`: CKM angle $\theta_{12}$ in radians - `t13`: CKM angle $\theta_{13}$ in radians - `t23`: CKM angle $\theta_{23}$ in radians - `delta`: CKM phase $\delta=\gamma$ in radians
def check_feasibility(x_bounds, lowerbound, upperbound): x_bounds_lowerbound = sum([x_bound[0] for x_bound in x_bounds]) x_bounds_upperbound = sum([x_bound[-1] for x_bound in x_bounds]) return (x_bounds_lowerbound <= lowerbound <= x_bounds_upperbound) or \ (x_bounds_lowerbound <= upperbound <= x_...
This can have false positives. For examples, parameters can only be 0 or 5, and the summation constraint is between 6 and 7.
def delete_resource_view(self, resource_view): if isinstance(resource_view, str): if is_valid_uuid(resource_view) is False: raise HDXError('%s is not a valid resource view id!' % resource_view) resource_view = ResourceView({'id': resource_view}, configuration=self.configu...
Delete a resource view from the resource and HDX Args: resource_view (Union[ResourceView,Dict,str]): Either a resource view id or resource view metadata either from a ResourceView object or a dictionary Returns: None
def readmarheader(filename): with open(filename, 'rb') as f: intheader = np.fromstring(f.read(10 * 4), np.int32) floatheader = np.fromstring(f.read(15 * 4), '<f4') strheader = f.read(24) f.read(4) otherstrings = [f.read(16) for i in range(29)] return {'Xsize': intheader[0...
Read a header from a MarResearch .image file.
def EnumerateQualifiers(self, *args, **kwargs): if self.conn is not None: rv = self.conn.EnumerateQualifiers(*args, **kwargs) else: rv = [] try: rv += list(self.qualifiers[self.default_namespace].values()) except KeyError: pass retu...
Enumerate the qualifier types in the local repository of this class. For a description of the parameters, see :meth:`pywbem.WBEMConnection.EnumerateQualifiers`.
def crsConvert(crsIn, crsOut): if isinstance(crsIn, osr.SpatialReference): srs = crsIn.Clone() else: srs = osr.SpatialReference() if isinstance(crsIn, int): crsIn = 'EPSG:{}'.format(crsIn) if isinstance(crsIn, str): try: srs.SetFromUserInpu...
convert between different types of spatial references Parameters ---------- crsIn: int, str or :osgeo:class:`osr.SpatialReference` the input CRS crsOut: {'wkt', 'proj4', 'epsg', 'osr', 'opengis' or 'prettyWkt'} the output CRS type Returns ------- int, str or :osgeo:class:`o...
def pretty_print_model(devicemodel): PRETTY_PRINT_MODEL = logging.info(PRETTY_PRINT_MODEL % devicemodel) if 'traits' in devicemodel: for trait in devicemodel['traits']: logging.info(' Trait %s' % trait) else: logging.info('No traits') logging.info('')
Prints out a device model in the terminal by parsing dict.
def _normalized_levenshtein_distance(s1, s2, acceptable_differences): if len(s1) > len(s2): s1, s2 = s2, s1 acceptable_differences = set(-i for i in acceptable_differences) distances = range(len(s1) + 1) for index2, num2 in enumerate(s2): new_distances = [index2 + 1] for inde...
This function calculates the levenshtein distance but allows for elements in the lists to be different by any number in the set acceptable_differences. :param s1: A list. :param s2: Another list. :param acceptable_differences: A set of numbers. If (s2[i]-s1[i]...
def dl_files(db, dl_dir, files, keep_subdirs=True, overwrite=False): db_url = posixpath.join(config.db_index_url, db) response = requests.get(db_url) response.raise_for_status() dl_inputs = [(os.path.split(file)[1], os.path.split(file)[0], db, dl_dir, keep_subdirs, overwrite) for file in files] make...
Download specified files from a Physiobank database. Parameters ---------- db : str The Physiobank database directory to download. eg. For database: 'http://physionet.org/physiobank/database/mitdb', db='mitdb'. dl_dir : str The full local directory path in which to download the ...
def _ParseCshVariables(self, lines): paths = {} for line in lines: if len(line) < 2: continue action = line[0] if action == "setenv": target = line[1] path_vals = [] if line[2:]: path_vals = line[2].split(":") self._ExpandPath(target, path_vals...
Extract env_var and path values from csh derivative shells. Path attributes can be set several ways: - setenv takes the form "setenv PATH_NAME COLON:SEPARATED:LIST" - set takes the form "set path_name=(space separated list)" and is automatically exported for several types of files. The first ent...
def last_commit(): try: root = subprocess.check_output( ['hg', 'parent', '--template={node}'], stderr=subprocess.STDOUT).strip() return root.decode('utf-8') except subprocess.CalledProcessError: return None
Returns the SHA1 of the last commit.
def run_command(cmd, debug=False): if debug: msg = ' PWD: {}'.format(os.getcwd()) print_warn(msg) msg = ' COMMAND: {}'.format(cmd) print_warn(msg) cmd()
Execute the given command and return None. :param cmd: A `sh.Command` object to execute. :param debug: An optional bool to toggle debug output. :return: None
def classify_host(host): if isinstance(host, (IPv4Address, IPv6Address)): return host if is_valid_hostname(host): return host return ip_address(host)
Host is an IPv4Address, IPv6Address or a string. If an IPv4Address or IPv6Address return it. Otherwise convert the string to an IPv4Address or IPv6Address object if possible and return it. Otherwise return the original string if it is a valid hostname. Raise ValueError if a string cannot be interpre...
def list_wallet_names(api_key, is_hd_wallet=False, coin_symbol='btc'): assert is_valid_coin_symbol(coin_symbol), coin_symbol assert api_key params = {'token': api_key} kwargs = dict(wallets='hd' if is_hd_wallet else '') url = make_url(coin_symbol, **kwargs) r = requests.get(url, params=params, v...
Get all the wallets belonging to an API key
def html_page_context(app, pagename, templatename, context, doctree): rendered_toc = get_rendered_toctree(app.builder, pagename) context['toc'] = rendered_toc context['display_toc'] = True if "toctree" not in context: return def make_toctree(collapse=True): return get_rendered_toctre...
Event handler for the html-page-context signal. Modifies the context directly. - Replaces the 'toc' value created by the HTML builder with one that shows all document titles and the local table of contents. - Sets display_toc to True so the table of contents is always displayed, even on empt...
def get_permission_required(cls): if cls.permission_required is None: raise ImproperlyConfigured( "{0} is missing the permission_required attribute. " "Define {0}.permission_required, or override " "{0}.get_permission_required().".format(cls.__name__) ...
Get permission required property. Must return an iterable.
def delete(fun): if __opts__['file_client'] == 'local': data = __salt__['data.get']('mine_cache') if isinstance(data, dict) and fun in data: del data[fun] return __salt__['data.update']('mine_cache', data) load = { 'cmd': '_mine_delete', 'id': __opts__...
Remove specific function contents of minion. Returns True on success. CLI Example: .. code-block:: bash salt '*' mine.delete 'network.interfaces'
def skipgram_batch(centers, contexts, num_tokens, dtype, index_dtype): contexts = mx.nd.array(contexts[2], dtype=index_dtype) indptr = mx.nd.arange(len(centers) + 1) centers = mx.nd.array(centers, dtype=index_dtype) centers_csr = mx.nd.sparse.csr_matrix( (mx.nd.ones(centers.shape), centers, indp...
Create a batch for SG training objective.
def assert_not_present(self, selector, testid=None, **kwargs): self.info_log( "Assert not present selector(%s) testid(%s)" % (selector, testid) ) wait_until_not_present = kwargs.get( 'wait_until_not_present', BROME_CONFIG['proxy_driver']['wait_unti...
Assert that the element is not present in the dom Args: selector (str): the selector used to find the element test_id (str): the test_id or a str Kwargs: wait_until_not_present (bool) Returns: bool: True is the assertion succeed; False otherwise...
def deploy(self, unique_id, configs=None): self.install(unique_id, configs) self.start(unique_id, configs)
Deploys the service to the host. This should at least perform the same actions as install and start but may perform additional tasks as needed. :Parameter unique_id: the name of the process :Parameter configs: a mao of configs the deployer may use to modify the deployment
def gen(self, text, start=0): for cc in self.chunkComment(text, start): c = self.extractChunkContent(cc) cc = ''.join(cc) m = self.matchComment(c) idx = text.index(cc, start) e = idx + len(cc) if m: assert text[idx:e] == cc ...
Return the source code in text, filled with autogenerated code starting at start.
def significance_fdr(p, alpha): i = np.argsort(p, axis=None) m = i.size - np.sum(np.isnan(p)) j = np.empty(p.shape, int) j.flat[i] = np.arange(1, i.size + 1) mask = p <= alpha * j / m if np.sum(mask) == 0: return mask k = np.max(j[mask]) s = j <= k return s
Calculate significance by controlling for the false discovery rate. This function determines which of the p-values in `p` can be considered significant. Correction for multiple comparisons is performed by controlling the false discovery rate (FDR). The FDR is the maximum fraction of p-values that are w...
def api_list(cls, api_key=djstripe_settings.STRIPE_SECRET_KEY, **kwargs): return cls.stripe_class.list(api_key=api_key, **kwargs).auto_paging_iter()
Call the stripe API's list operation for this model. :param api_key: The api key to use for this request. Defaults to djstripe_settings.STRIPE_SECRET_KEY. :type api_key: string See Stripe documentation for accepted kwargs for each object. :returns: an iterator over all items in the query
def List(self, name, initial=None): return types.List(name, self.api, initial=initial)
The list datatype. :param name: The name of the list. :keyword initial: Initial contents of the list. See :class:`redish.types.List`.
def fullversion(): ret = {} cmd = 'lvm version' out = __salt__['cmd.run'](cmd).splitlines() for line in out: comps = line.split(':') ret[comps[0].strip()] = comps[1].strip() return ret
Return all version info from lvm version CLI Example: .. code-block:: bash salt '*' lvm.fullversion
def pop(self): if not self._containers: raise KittyException('no container to pop') self._containers.pop() if self._container(): self._container().pop()
Remove a the top container from the container stack
def compile_mof_string(self, mof_str, namespace=None, search_paths=None, verbose=None): namespace = namespace or self.default_namespace self._validate_namespace(namespace) mofcomp = MOFCompiler(_MockMOFWBEMConnection(self), search_paths=se...
Compile the MOF definitions in the specified string and add the resulting CIM objects to the specified CIM namespace of the mock repository. If the namespace does not exist, :exc:`~pywbem.CIMError` with status CIM_ERR_INVALID_NAMESPACE is raised. This method supports all MOF pr...
def delete_io( hash ): global CACHE_ load_cache(True) record_used('cache', hash) num_deleted = len(CACHE_['cache'].get(hash, [])) if hash in CACHE_['cache']: del CACHE_['cache'][hash] write_out() return num_deleted
Deletes records associated with a particular hash :param str hash: The hash :rtype int: The number of records deleted
def set_allocated_time(self, time): if self.get_allocated_time_metadata().is_read_only(): raise errors.NoAccess() if not self._is_valid_duration( time, self.get_allocated_time_metadata()): raise errors.InvalidArgument() map = dict() ...
Sets the allocated time. arg: time (osid.calendaring.Duration): the allocated time raise: InvalidArgument - ``time`` is invalid raise: NoAccess - ``Metadata.isReadOnly()`` is ``true`` *compliance: mandatory -- This method must be implemented.*
def csv_import(self, csv_source, encoding='utf-8', transforms=None, row_class=DataObject, **kwargs): reader_args = dict((k, v) for k, v in kwargs.items() if k not in ['encoding', 'csv_source', ...
Imports the contents of a CSV-formatted file into this table. @param csv_source: CSV file - if a string is given, the file with that name will be opened, read, and closed; if a file object is given, then that object will be read as-is, and left for the caller to be closed. ...
def _strip_ctype(name, ctype, protocol=2): try: name, ctypestr = name.rsplit(',', 1) except ValueError: pass else: ctype = Nds2ChannelType.find(ctypestr).value if protocol == 1 and ctype in ( Nds2ChannelType.STREND.value, Nds2ChannelType.MTREND...
Strip the ctype from a channel name for the given nds server version This is needed because NDS1 servers store trend channels _including_ the suffix, but not raw channels, and NDS2 doesn't do this.
def grab_project_data(prj): if not prj: return {} data = {} for section in SAMPLE_INDEPENDENT_PROJECT_SECTIONS: try: data[section] = getattr(prj, section) except AttributeError: _LOGGER.debug("Project lacks section '%s', skipping", section) return data
From the given Project, grab Sample-independent data. There are some aspects of a Project of which it's beneficial for a Sample to be aware, particularly for post-hoc analysis. Since Sample objects within a Project are mutually independent, though, each doesn't need to know about any of the others. A P...
def from_coords(cls, x, y): x_bytes = int(math.ceil(math.log(x, 2) / 8.0)) y_bytes = int(math.ceil(math.log(y, 2) / 8.0)) num_bytes = max(x_bytes, y_bytes) byte_string = b'\x04' byte_string += int_to_bytes(x, width=num_bytes) byte_string += int_to_bytes(y, width=num_bytes...
Creates an ECPoint object from the X and Y integer coordinates of the point :param x: The X coordinate, as an integer :param y: The Y coordinate, as an integer :return: An ECPoint object
def py_doc_trim(docstring): if not docstring: return '' lines = docstring.expandtabs().splitlines() indent = sys.maxint for line in lines[1:]: stripped = line.lstrip() if stripped: indent = min(indent, len(line) - len(stripped)) trimmed = [lines[0].strip()] if...
Trim a python doc string. This example is nipped from https://www.python.org/dev/peps/pep-0257/, which describes how to conventionally format and trim docstrings. It has been modified to replace single newlines with a space, but leave multiple consecutive newlines in tact.
def get_parameters(self): parameter_names = self.PARAMETERS.keys() parameter_values = [getattr(processor, n) for n in parameter_names] return dict(zip(parameter_names, parameter_values))
returns a dictionary with the processor's stored parameters
def _set_response_headers(self, response): options = self._get_local_options() self._set_feature_headers(response.headers, options) self._set_frame_options_headers(response.headers, options) self._set_content_security_policy_headers(response.headers, options) self._set_hsts_heade...
Applies all configured headers to the given response.
def _make_complex(self): rcomplex_coeffs = _shtools.SHrtoc(self.coeffs, convention=1, switchcs=0) complex_coeffs = _np.zeros((2, self.lmax+1, self.lmax+1), dtype='complex') complex_coeffs[0, :, :] = (rcomplex_coeffs[0, ...
Convert the real SHCoeffs class to the complex class.
def top_level(self): output = {} if isinstance(self.obj, dict): for name, item in self.obj.items(): if isinstance(item, dict): if item: output[name] = StrReprWrapper('{...}') else: output[...
Print just the top level of an object, being sure to show where it goes deeper
def process_requests(self): while True: id, args, kwargs = self.request_queue.get() try: response = self._make_request(*args, **kwargs) except Exception as e: response = e self.results[id] = response
Loop that runs in a thread to process requests synchronously.
def get_edges_as_list(self): my_edges = [] for edge_node in self.__get_edge_nodes(): my_edges.append(Cedge(edge_node)) return my_edges
Iterator that returns all the edge objects @rtype: L{Cedge} @return: terminal objects (iterator)
def hideFromPublicBundle(self, otpk_pub): self.__checkSPKTimestamp() for otpk in self.__otpks: if otpk.pub == otpk_pub: self.__otpks.remove(otpk) self.__hidden_otpks.append(otpk) self.__refillOTPKs()
Hide a one-time pre key from the public bundle. :param otpk_pub: The public key of the one-time pre key to hide, encoded as a bytes-like object.
def detect(args): for l in args.input: if l.strip(): _print("{:<20}{}".format(Detector(l).language.name, l.strip()))
Detect the language of each line.
def delete_folder(self, id, force=None): path = {} data = {} params = {} path["id"] = id if force is not None: params["force"] = force self.logger.debug("DELETE /api/v1/folders/{id} with query params: {params} and form data: {data}".format(params=params...
Delete folder. Remove the specified folder. You can only delete empty folders unless you set the 'force' flag