code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def get_package(repo_url, pkg_name, timeout=1): url = repo_url + "/packages/" + pkg_name headers = {'accept': 'application/json'} resp = requests.get(url, headers=headers, timeout=timeout) if resp.status_code == 404: return None return resp.json()
Retrieve package information from a Bower registry at repo_url. Returns a dict of package data.
def basic_consume(self, queue='', consumer_tag='', no_local=False, no_ack=False, exclusive=False, nowait=False, callback=None, ticket=None): args = AMQPWriter() if ticket is not None: args.write_short(ticket) else: args.write_short(self.default_ticket) ...
start a queue consumer This method asks the server to start a "consumer", which is a transient request for messages from a specific queue. Consumers last as long as the channel they were created on, or until the client cancels them. RULE: The server SHOULD support ...
def run(self): kwargs = {'query': self.get_data()} if self.data_type == "ip": kwargs.update({'query_type': 'ip'}) elif self.data_type == "network": kwargs.update({'query_type': 'network'}) elif self.data_type == 'autonomous-system': kwargs.update({'que...
Run the process to get observation data from Backscatter.io.
def url(self, filename, external=False): if filename.startswith('/'): filename = filename[1:] if self.has_url: return self.base_url + filename else: return url_for('fs.get_file', fs=self.name, filename=filename, _external=external)
This function gets the URL a file uploaded to this set would be accessed at. It doesn't check whether said file exists. :param string filename: The filename to return the URL for. :param bool external: If True, returns an absolute URL
def _end_of_decade(self): year = self.year - self.year % YEARS_PER_DECADE + YEARS_PER_DECADE - 1 return self.set(year, 12, 31)
Reset the date to the last day of the decade. :rtype: Date
def get_album(self, id): url = self._base_url + "/3/album/{0}".format(id) json = self._send_request(url) return Album(json, self)
Return information about this album.
def second_order_score(y, mean, scale, shape, skewness): return ((shape+1)/shape)*(y-mean)/(np.power(scale,2) + (np.power(y-mean,2)/shape))/((shape+1)*((np.power(scale,2)*shape) - np.power(y-mean,2))/np.power((np.power(scale,2)*shape) + np.power(y-mean,2),2))
GAS t Update term potentially using second-order information - native Python function Parameters ---------- y : float datapoint for the time series mean : float location parameter for the t distribution scale : float scale parameter for the ...
def join_field(path): output = ".".join([f.replace(".", "\\.") for f in path if f != None]) return output if output else "."
RETURN field SEQUENCE AS STRING
def populate_request_data(self, request_args): request_args['auth'] = HTTPBasicAuth( self._username, self._password) return request_args
Add the authentication info to the supplied dictionary. We use the `requests.HTTPBasicAuth` class as the `auth` param. Args: `request_args`: The arguments that will be passed to the request. Returns: The updated arguments for the request.
def save_default_values(self): for parameter_container in self.default_value_parameter_containers: parameters = parameter_container.get_parameters() for parameter in parameters: set_inasafe_default_value_qsetting( self.settings, GLO...
Save InaSAFE default values.
def stop(self): logger.debug("Stopping playback") self.clock.stop() self.status = READY
Stops the video stream and resets the clock.
def is_appendable_to(self, group): return (group.attrs['format'] == self.dformat and group[self.name].dtype == self.dtype and self._group_dim(group) == self.dim)
Return True if features are appendable to a HDF5 group
def average_loss(lc): losses, poes = (lc['loss'], lc['poe']) if lc.dtype.names else lc return -pairwise_diff(losses) @ pairwise_mean(poes)
Given a loss curve array with `poe` and `loss` fields, computes the average loss on a period of time. :note: As the loss curve is supposed to be piecewise linear as it is a result of a linear interpolation, we compute an exact integral by using the trapeizodal rule with the width given by...
def _is_allowed_abbr(self, tokens): if len(tokens) <= 2: abbr_text = ''.join(tokens) if self.abbr_min <= len(abbr_text) <= self.abbr_max and bracket_level(abbr_text) == 0: if abbr_text[0].isalnum() and any(c.isalpha() for c in abbr_text): if re.match('...
Return True if text is an allowed abbreviation.
def _SetRow(self, new_values, row=0): if not row: row = self._row_index if row > self.size: raise TableError("Entry %s beyond table size %s." % (row, self.size)) self._table[row].values = new_values
Sets the current row to new list. Args: new_values: List|dict of new values to insert into row. row: int, Row to insert values into. Raises: TableError: If number of new values is not equal to row size.
def console_set_char_foreground( con: tcod.console.Console, x: int, y: int, col: Tuple[int, int, int] ) -> None: lib.TCOD_console_set_char_foreground(_console(con), x, y, col)
Change the foreground color of x,y to col. Args: con (Console): Any Console instance. x (int): Character x position from the left. y (int): Character y position from the top. col (Union[Tuple[int, int, int], Sequence[int]]): An (r, g, b) sequence or Color instance. ...
def _get_pretty_string(obj): sio = StringIO() pprint.pprint(obj, stream=sio) return sio.getvalue()
Return a prettier version of obj Parameters ---------- obj : object Object to pretty print Returns ------- s : str Pretty print object repr
def content_location(self) -> Optional[UnstructuredHeader]: try: return cast(UnstructuredHeader, self[b'content-location'][0]) except (KeyError, IndexError): return None
The ``Content-Location`` header.
def get_empty_dirs(self, path): empty_dirs = [] for i in os.listdir(path): child_path = os.path.join(path, i) if i == '.git' or os.path.isfile(child_path) or os.path.islink(child_path): continue if self.path_only_contains_dirs(child_path): ...
Return a list of empty directories in path.
def _purge(dir, pattern, reason=''): for f in os.listdir(dir): if re.search(pattern, f): print "Purging file {0}. {1}".format(f, reason) os.remove(os.path.join(dir, f))
delete files in dir that match pattern
def add_wirevector(self, wirevector): self.sanity_check_wirevector(wirevector) self.wirevector_set.add(wirevector) self.wirevector_by_name[wirevector.name] = wirevector
Add a wirevector object to the block.
def run_vardict(align_bams, items, ref_file, assoc_files, region=None, out_file=None): items = shared.add_highdepth_genome_exclusion(items) if vcfutils.is_paired_analysis(align_bams, items): call_file = _run_vardict_paired(align_bams, items, ref_file, ...
Run VarDict variant calling.
def backup_file(*, file, host): if not _has_init: raise RuntimeError("This driver has not been properly initialised!") try: if not _dry_run: bucket = _boto_conn.get_bucket(_bucket_name) except boto.exception.S3ResponseError: log.msg_warn("Bucket '{bucket_name}' does not e...
Backup a file on S3 :param file: full path to the file to be backed up :param host: this will be used to locate the file on S3 :raises TypeError: if an argument in kwargs does not have the type expected :raises ValueError: if an argument within kwargs has an invalid value
def start(self, *args): if self._is_verbose: return self self.writeln('start', *args) self._indent += 1 return self
Start a nested log.
def to_dict(self): return dict( host=self.host, port=self.port, database=self.database, username=self.username, password=self.password, )
Convert credentials into a dict.
def get_child_book_ids(self, book_id): if self._catalog_session is not None: return self._catalog_session.get_child_catalog_ids(catalog_id=book_id) return self._hierarchy_session.get_children(id_=book_id)
Gets the child ``Ids`` of the given book. arg: book_id (osid.id.Id): the ``Id`` to query return: (osid.id.IdList) - the children of the book raise: NotFound - ``book_id`` is not found raise: NullArgument - ``book_id`` is ``null`` raise: OperationFailed - unable to complete...
def _flush(self): d = os.path.dirname(self.path) if not os.path.isdir(d): os.makedirs(d) with io.open(self.path, 'w', encoding='utf8') as f: yaml.safe_dump(self._data, f, default_flow_style=False, encoding=None)
Save the contents of data to the file on disk. You should not need to call this manually.
def percent(self, value) -> 'Gap': raise_not_number(value) self.gap = '{}%'.format(value) return self
Set the margin as a percentage.
def build_row(row, left, center, right): if not row or not row[0]: yield combine((), left, center, right) return for row_index in range(len(row[0])): yield combine((c[row_index] for c in row), left, center, right)
Combine single or multi-lined cells into a single row of list of lists including borders. Row must already be padded and extended so each cell has the same number of lines. Example return value: [ ['>', 'Left ', '|', 'Center', '|', 'Right', '<'], ['>', 'Cell1', '|', 'Cell2 ', '|', 'Cell3',...
def _convert_bin_to_datelike_type(bins, dtype): if is_datetime64tz_dtype(dtype): bins = to_datetime(bins.astype(np.int64), utc=True).tz_convert(dtype.tz) elif is_datetime_or_timedelta_dtype(dtype): bins = Index(bins.astype(np.int64), dtype=dtype) return bins
Convert bins to a DatetimeIndex or TimedeltaIndex if the orginal dtype is datelike Parameters ---------- bins : list-like of bins dtype : dtype of data Returns ------- bins : Array-like of bins, DatetimeIndex or TimedeltaIndex if dtype is datelike
def ReadAllClientGraphSeries( self, client_label, report_type, time_range = None, cursor=None): query = args = [client_label, report_type.SerializeToDataStore()] if time_range is not None: query += "AND `timestamp` BETWEEN FROM_UNIXTIME(%s) AND FROM_UNIXTIME(%s)" ar...
Reads graph series for the given label and report-type from the DB.
def formatter_class(klass): def decorator(func): adaptor = ScriptAdaptor._get_adaptor(func) adaptor.formatter_class = klass return func return decorator
Decorator used to specify the formatter class for the console script. :param klass: The formatter class to use.
def add_neighbours(self): ipix = self._best_res_pixels() hp = HEALPix(nside=(1 << self.max_order), order='nested') extend_ipix = AbstractMOC._neighbour_pixels(hp, ipix) neigh_ipix = np.setdiff1d(extend_ipix, ipix) shift = 2 * (AbstractMOC.HPY_MAX_NORDER - self.max_order) ...
Extends the MOC instance so that it includes the HEALPix cells touching its border. The depth of the HEALPix cells added at the border is equal to the maximum depth of the MOC instance. Returns ------- moc : `~mocpy.moc.MOC` self extended by one degree of neighbours.
def process(self, user, timestamp, data=None): event = Event(user, mwtypes.Timestamp(timestamp), self.event_i, data) self.event_i += 1 for user, events in self._clear_expired(event.timestamp): yield Session(user, unpack_events(events)) if event.user in self.active_users: ...
Processes a user event. :Parameters: user : `hashable` A hashable value to identify a user (`int` or `str` are OK) timestamp : :class:`mwtypes.Timestamp` The timestamp of the event data : `mixed` Event meta data :Retur...
def seek(self, pos): if self.debug: logging.debug('seek: %r' % pos) self.fp.seek(pos) self.bufpos = pos self.buf = b'' self.charpos = 0 self._parse1 = self._parse_main self._curtoken = b'' self._curtokenpos = 0 self._tokens = [] ...
Seeks the parser to the given position.
def get_shared_people(self): people = [] output = self._get_data() self._logger.debug(output) shared_entries = output[0] or [] for info in shared_entries: try: people.append(Person(info)) except InvalidData: self._logger.deb...
Retrieves all people that share their location with this account
def in_batches(iterable, batch_size): items = list(iterable) size = len(items) for i in range(0, size, batch_size): yield items[i:min(i + batch_size, size)]
Split the given iterable into batches. Args: iterable (Iterable[Any]): The iterable you want to split into batches. batch_size (int): The size of each bach. The last batch will be probably smaller (if the number of elements cannot be equally divided. Returns...
def options(self, urls=None, **overrides): if urls is not None: overrides['urls'] = urls return self.where(accept='OPTIONS', **overrides)
Sets the acceptable HTTP method to OPTIONS
def has_option(self, section, option): if section not in self.sections(): return False else: option = self.optionxform(option) return option in self[section]
Checks for the existence of a given option in a given section. Args: section (str): name of section option (str): name of option Returns: bool: whether the option exists in the given section
def resolve_aliases(self, target, scope=None): for declared in target.dependencies: if scope is not None and declared.scope != scope: continue elif type(declared) in (AliasTarget, Target): for r, _ in self.resolve_aliases(declared, scope=scope): yield r, declared else: ...
Resolve aliases in the direct dependencies of the target. :param target: The direct dependencies of this target are included. :param scope: When specified, only deps with this scope are included. This is more than a filter, because it prunes the subgraphs represented by aliases with un-matched scop...
def tomof(self, maxline=MAX_MOF_LINE): mof = [] mof.append(_qualifiers_tomof(self.qualifiers, MOF_INDENT, maxline)) mof.append(u'class ') mof.append(self.classname) mof.append(u' ') if self.superclass is not None: mof.append(u': ') mof.append(self....
Return a MOF string with the declaration of this CIM class. The returned MOF string conforms to the ``classDeclaration`` ABNF rule defined in :term:`DSP0004`. The order of properties, methods, parameters, and qualifiers is preserved. The :attr:`~pywbem.CIMClass.path` attribute...
def check_bam(bam, samtype="bam"): ut.check_existance(bam) samfile = pysam.AlignmentFile(bam, "rb") if not samfile.has_index(): pysam.index(bam) samfile = pysam.AlignmentFile(bam, "rb") logging.info("Nanoget: No index for bam file could be found, created index.") if not samfile.h...
Check if bam file is valid. Bam file should: - exists - has an index (create if necessary) - is sorted by coordinate - has at least one mapped read
def get_basic_profile(self, user_id, scope='profile/public'): profile = _get( token=self.oauth.get_app_token(scope), uri='/user/profile/' + urllib.quote(user_id) ) try: return json.loads(profile) except: raise MxitAPIException('Error parsin...
Retrieve the Mxit user's basic profile No user authentication required
def last_midnight(): now = datetime.now() return datetime(now.year, now.month, now.day)
return a datetime of last mid-night
def check_list(self, node_list, pattern_list): if len(node_list) != len(pattern_list): return False else: return all(Check(node_elt, self.placeholders).visit(pattern_list[i]) for i, node_elt in enumerate(node_list))
Check if list of node are equal.
def build(self): if self.is_built(): return with _wait_signal(self.loadFinished, 20): self.rebuild() self._built = True
Build the full HTML source.
def _determine_selected_stencil(stencil_set, stencil_definition): if 'stencil' not in stencil_definition: selected_stencil_name = stencil_set.manifest.get('default_stencil') else: selected_stencil_name = stencil_definition.get('stencil') if not selected_stencil_name: raise ValueError...
Determine appropriate stencil name for stencil definition. Given a fastfood.json stencil definition with a stencil set, figure out what the name of the stencil within the set should be, or use the default
def set_execution_context(self, execution_context): if self._execution_context: raise errors.AlreadyInContextError self._execution_context = execution_context
Set the ExecutionContext this async is executing under.
def load_config_key(): global api_token try: api_token = os.environ['SOCCER_CLI_API_TOKEN'] except KeyError: home = os.path.expanduser("~") config = os.path.join(home, ".soccer-cli.ini") if not os.path.exists(config): with open(config, "w") as cfile: ...
Load API key from config file, write if needed
def start(self, phase, stage, **kwargs): return ProgressSection(self, self._session, phase, stage, self._logger, **kwargs)
Start a new routine, stage or phase
def plot(self, data, height=1000, render_large_data=False): import IPython if not isinstance(data, pd.DataFrame): raise ValueError('Expect a DataFrame.') if (len(data) > 10000 and not render_large_data): raise ValueError('Facets dive may not work well with more than 10000 rows. ' + ...
Plots a detail view of data. Args: data: a Pandas dataframe. height: the height of the output.
def load_ply(file_obj, resolver=None, fix_texture=True, *args, **kwargs): elements, is_ascii, image_name = parse_header(file_obj) if is_ascii: ply_ascii(elements, file_obj) else: ply_binary(elements, file_obj) image = None if image_...
Load a PLY file from an open file object. Parameters --------- file_obj : an open file- like object Source data, ASCII or binary PLY resolver : trimesh.visual.resolvers.Resolver Object which can resolve assets fix_texture : bool If True, will re- index vertices and faces so ...
def render_reverse(self, inst=None, context=None): rendered = self.render(inst=inst, context=context) parts = rendered.split('/') if parts[-1] in ['index.html', 'index.htm']: return ('/'.join(parts[:-1])) + '/' return rendered
Renders the reverse URL for this path.
def get_errors(error_string): lines = error_string.splitlines() error_lines = tuple(line for line in lines if line.find('Error') >= 0) if len(error_lines) > 0: return '\n'.join(error_lines) else: return error_string.strip()
returns all lines in the error_string that start with the string "error"
def get_node(self, element): r ns, tag = self.split_namespace(element.tag) return {'tag': tag, 'value': (element.text or '').strip(), 'attr': element.attrib, 'namespace': ns}
r"""Get node info. Parse element and get the element tag info. Include tag name, value, attribute, namespace. :param element: an :class:`~xml.etree.ElementTree.Element` instance :rtype: dict
def _gather(self, *args, **kwargs): propagate = kwargs.pop('propagate', True) return (self.to_python(reply, propagate=propagate) for reply in self.actor._collect_replies(*args, **kwargs))
Generator over the results
def etag(self): value = [] for option in self.options: if option.number == defines.OptionRegistry.ETAG.number: value.append(option.value) return value
Get the ETag option of the message. :rtype: list :return: the ETag values or [] if not specified by the request
def Validate(self): ValidateMultiple(self.probe, "Method has invalid probes") Validate(self.target, "Method has invalid target") Validate(self.hint, "Method has invalid hint")
Check the Method is well constructed.
def set_bit(bitmask, bit, is_on): bitshift = bit - 1 if is_on: return bitmask | (1 << bitshift) return bitmask & (0xff & ~(1 << bitshift))
Set the value of a bit in a bitmask on or off. Uses the low bit is 1 and the high bit is 8.
def mkdir_p(*args, **kwargs): try: return os.mkdir(*args, **kwargs) except OSError as exc: if exc.errno != errno.EEXIST: raise
Like `mkdir`, but does not raise an exception if the directory already exists.
def set_bool(_bytearray, byte_index, bool_index, value): assert value in [0, 1, True, False] current_value = get_bool(_bytearray, byte_index, bool_index) index_value = 1 << bool_index if current_value == value: return if value: _bytearray[byte_index] += index_value else: ...
Set boolean value on location in bytearray
def get_components_for_species( alignment, species ): if len( alignment.components ) < len( species ): return None index = dict( [ ( c.src.split( '.' )[0], c ) for c in alignment.components ] ) try: return [ index[s] for s in species ] except: return None
Return the component for each species in the list `species` or None
def __traces_url(self): path = AGENT_TRACES_PATH % self.from_.pid return "http://%s:%s/%s" % (self.host, self.port, path)
URL for posting traces to the host agent. Only valid when announced.
def build_type(field): if field.type_id == 'string': if 'size' in field.options: return "builder.putString(%s, %d)" % (field.identifier, field.options['size'].value) else: return "builder.putString(%s)" % field.identifier elif field.type_id in JAVA_TYPE_MAP: return "builder.put%s(%s)" % (fie...
Function to pack a type into the binary payload.
def get_bounding_box(points): assert len(points) > 0, "At least one point has to be given." min_x, max_x = points[0]['x'], points[0]['x'] min_y, max_y = points[0]['y'], points[0]['y'] for point in points: min_x, max_x = min(min_x, point['x']), max(max_x, point['x']) min_y, max_y = min(mi...
Get the bounding box of a list of points. Parameters ---------- points : list of points Returns ------- BoundingBox
def do_copy(self, subcmd, opts, *args): print "'svn %s' opts: %s" % (subcmd, opts) print "'svn %s' args: %s" % (subcmd, args)
Duplicate something in working copy or repository, remembering history. usage: copy SRC DST SRC and DST can each be either a working copy (WC) path or URL: WC -> WC: copy and schedule for addition (with history) WC -> URL: immediately commit a copy of WC to...
def max_width(self): value, unit = float(self._width_str[:-1]), self._width_str[-1] ensure(unit in ["c", "%"], ValueError, "Width unit must be either 'c' or '%'") if unit == "c": ensure(value <= self.columns, ValueError, "Terminal only has {} columns...
Get maximum width of progress bar :rtype: int :returns: Maximum column width of progress bar
def retrieve_records(self, timeperiod, include_running, include_processed, include_noop, include_failed, include_disabled): resp = dict() resp.update(self._search_by_level(COLLECTION_JOB_HOURLY, timeperiod, include_running, include_proce...
method looks for suitable job records in all Job collections and returns them as a dict
def delete_permission(self, username, virtual_host): virtual_host = quote(virtual_host, '') return self.http_client.delete( API_USER_VIRTUAL_HOST_PERMISSIONS % ( virtual_host, username ))
Delete User permissions for the configured virtual host. :param str username: Username :param str virtual_host: Virtual host name :raises ApiError: Raises if the remote server encountered an error. :raises ApiConnectionError: Raises if there was a connectivity issue. :rtype: d...
def get_raw_data(self, times=5): self._validate_measure_count(times) data_list = [] while len(data_list) < times: data = self._read() if data not in [False, -1]: data_list.append(data) return data_list
do some readings and aggregate them using the defined statistics function :param times: how many measures to aggregate :type times: int :return: the aggregate of the measured values :rtype float
def _cryptography_encrypt(cipher_factory, plaintext, key, iv): encryptor = cipher_factory(key, iv).encryptor() return encryptor.update(plaintext) + encryptor.finalize()
Use a cryptography cipher factory to encrypt data. :param cipher_factory: Factory callable that builds a cryptography Cipher instance based on the key and IV :type cipher_factory: callable :param bytes plaintext: Plaintext data to encrypt :param bytes key: Encryption key :param bytes IV: In...
def create_or_clear(self, path, **kwargs): try: yield self.create(path, **kwargs) except NodeExistsException: children = yield self.get_children(path) for name in children: yield self.recursive_delete(path + "/" + name)
Create path and recursively clear contents.
def _render(self): p_char = '' if not self.done and self.remainder: p_style = self._comp_style if self.partial_char_extra_style: if p_style is str: p_style = self.partial_char_extra_style else: p_style = p_st...
figure partial character
def lrem(self, name, value, num=1): with self.pipe as pipe: value = self.valueparse.encode(value) return pipe.execute_command('LREM', self.redis_key(name), num, value)
Remove first occurrence of value. Can't use redis-py interface. It's inconstistent between redis.Redis and redis.StrictRedis in terms of the kwargs. Better to use the underlying execute_command instead. :param name: str the name of the redis key :param num: :param v...
def filter_sequences(self, seq_type): return DictList(x for x in self.sequences if isinstance(x, seq_type))
Return a DictList of only specified types in the sequences attribute. Args: seq_type (SeqProp): Object type Returns: DictList: A filtered DictList of specified object type only
def head_values(self): values = set() for head in self._heads: values.add(head.value) return values
Return set of the head values
def _interact(self, location, error_info, payload): if (self._interaction_methods is None or len(self._interaction_methods) == 0): raise InteractionError('interaction required but not possible') if error_info.info.interaction_methods is None and \ error_info.i...
Gathers a macaroon by directing the user to interact with a web page. The error_info argument holds the interaction-required error response. @return DischargeToken, bakery.Macaroon
def received(self, data): self.logger.debug('Data received: {}'.format(data)) message_type = None if 'type' in data: message_type = data['type'] if message_type == 'confirm_subscription': self._subscribed() elif message_type == 'reject_subscription': ...
API for the connection to forward information to this subscription instance. :param data: The JSON data which was received. :type data: Message
def _as_dict(self): values = self._dynamic_columns or {} for name, col in self._columns.items(): values[name] = col.to_database(getattr(self, name, None)) return values
Returns a map of column names to cleaned values
def asset(path): commit = bitcaster.get_full_version() return mark_safe('{0}?{1}'.format(_static(path), commit))
Join the given path with the STATIC_URL setting. Usage:: {% static path [as varname] %} Examples:: {% static "myapp/css/base.css" %} {% static variable_with_path %} {% static "myapp/css/base.css" as admin_base_css %} {% static variable_with_path as varname %}
def get_total_size(self, entries): size = 0 for entry in entries: if entry['response']['bodySize'] > 0: size += entry['response']['bodySize'] return size
Returns the total size of a collection of entries. :param entries: ``list`` of entries to calculate the total size of.
def _to_desired_dates(self, arr): times = utils.times.extract_months( arr[internal_names.TIME_STR], self.months ) return arr.sel(time=times)
Restrict the xarray DataArray or Dataset to the desired months.
def nrefs(self, tag): n = _C.Vnrefs(self._id, tag) _checkErr('nrefs', n, "bad arguments") return n
Determine the number of tags of a given type in a vgroup. Args:: tag tag type to look for in the vgroup Returns:: number of members identified by this tag type C library equivalent : Vnrefs
def is_hosting_device_reachable(self, hosting_device): ret_val = False hd = hosting_device hd_id = hosting_device['id'] hd_mgmt_ip = hosting_device['management_ip_address'] dead_hd_list = self.get_dead_hosting_devices_info() if hd_id in dead_hd_list: LOG.debug...
Check the hosting device which hosts this resource is reachable. If the resource is not reachable, it is added to the backlog. * heartbeat revision We want to enqueue all hosting-devices into the backlog for monitoring purposes adds key/value pairs to hd (aka hosting_device ...
def random_word(self, length, prefix=0, start=False, end=False, flatten=False): if start: word = ">" length += 1 return self._extend_word(word, length, prefix=prefix, end=end, flatten=flatten)[1:] else: ...
Generate a random word of length from this table. :param length: the length of the generated word; >= 1; :param prefix: if greater than 0, the maximum length of the prefix to consider to choose the next character; :param start: if True, the generated word starts as a word...
def _validate(wdl_file): start_dir = os.getcwd() os.chdir(os.path.dirname(wdl_file)) print("Validating", wdl_file) subprocess.check_call(["wdltool", "validate", wdl_file]) os.chdir(start_dir)
Run validation on the generated WDL output using wdltool.
def _new_from_cdata(cls, cdata: Any) -> "Random": self = object.__new__(cls) self.random_c = cdata return self
Return a new instance encapsulating this cdata.
def find_tags(self, tag_name, **attribute_filter): all_tags = [ self.find_tags_from_xml( i, tag_name, **attribute_filter ) for i in self.xml ] return [tag for tag_list in all_tags for tag in tag_list]
Return a list of all the matched tags in all available xml :param str tag: specify the tag name
def set_ssl_logging(self, enable=False, func=_ssl_logging_cb): u if enable: SSL_CTX_set_info_callback(self._ctx, func) else: SSL_CTX_set_info_callback(self._ctx, 0)
u''' Enable or disable SSL logging :param True | False enable: Enable or disable SSL logging :param func: Callback function for logging
def send(self, cumulative_counters=None, gauges=None, counters=None): if not gauges and not cumulative_counters and not counters: return data = { 'cumulative_counter': cumulative_counters, 'gauge': gauges, 'counter': counters, } _logger.deb...
Send the given metrics to SignalFx. Args: cumulative_counters (list): a list of dictionaries representing the cumulative counters to report. gauges (list): a list of dictionaries representing the gauges to report. counters (list): a list of di...
def check_cache(resource_type): def decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs): try: adapter = args[0] key, val = list(kwargs.items())[0] except IndexError: logger.warning("Couldn't generate full index ...
Decorator for adapter methods to check cache for resource before normally sending requests to retrieve data Only works with single kwargs, almost always used with @one_of_keyword_only decorator Args: resource_type (type(APIResource)): Subclass of APIResource of cache to be checked when called
def get_account_details(self, account): result = self.get_user(account.username) if result is None: result = {} return result
Get the account details
def outbox_folder(self): return self.folder_constructor(parent=self, name='Outbox', folder_id=OutlookWellKnowFolderNames .OUTBOX.value)
Shortcut to get Outbox Folder instance :rtype: mailbox.Folder
def removeRow(self, triggered): if triggered: model = self.tableView.model() selection = self.tableView.selectedIndexes() rows = [index.row() for index in selection] model.removeDataFrameRows(set(rows)) self.sender().setChecked(False)
Removes a row to the model. This method is also a slot. Args: triggered (bool): If the corresponding button was activated, the selected row will be removed from the model.
def _get_other_names(self, line): m = re.search(self.compound_regex['other_names'][0], line, re.IGNORECASE) if m: self.other_names.append(m.group(1).strip())
Parse and extract any other names that might be recorded for the compound Args: line (str): line of the msp file
def exc_handle(url, out, testing): quiet_exceptions = [ConnectionError, ReadTimeout, ConnectTimeout, TooManyRedirects] type, value, _ = sys.exc_info() if type not in quiet_exceptions or testing: exc = traceback.format_exc() exc_string = ("Line '%s' raised:\n" % url) + exc ...
Handle exception. If of a determinate subset, it is stored into a file as a single type. Otherwise, full stack is stored. Furthermore, if testing, stack is always shown. @param url: url which was being scanned when exception was thrown. @param out: Output object, usually self.out. @param testing: wh...
def async_iter(func, args_iter, **kwargs): iter_count = len(args_iter) iter_group = uuid()[1] options = kwargs.get('q_options', kwargs) options.pop('hook', None) options['broker'] = options.get('broker', get_broker()) options['group'] = iter_group options['iter_count'] = iter_count if op...
enqueues a function with iterable arguments
def children_with_values(self): childs = [] for attribute in self._get_all_c_children_with_order(): member = getattr(self, attribute) if member is None or member == []: pass elif isinstance(member, list): for instance in member: ...
Returns all children that has values :return: Possibly empty list of children.
def model_m2m_changed(sender, instance, action, **kwargs): if sender._meta.app_label == 'rest_framework_reactive': return def notify(): table = sender._meta.db_table if action == 'post_add': notify_observers(table, ORM_NOTIFY_KIND_CREATE) elif action in ('post_remove'...
Signal emitted after any M2M relation changes via Django ORM. :param sender: M2M intermediate model :param instance: The actual instance that was saved :param action: M2M action
def url_ok(match_tuple: MatchTuple) -> bool: try: result = requests.get(match_tuple.link, timeout=5) return result.ok except (requests.ConnectionError, requests.Timeout): return False
Check if a URL is reachable.