docstring
stringlengths
52
499
function
stringlengths
67
35.2k
__index_level_0__
int64
52.6k
1.16M
Load all known fabsetup addons which are installed as pypi pip-packages. Args: _globals(dict): the globals() namespace of the fabric script. Return: None
def load_pip_addons(_globals): for package_name in known_pip_addons: _, username = package_username(package_name) try: load_addon(username, package_name.replace('-', '_'), _globals) except ImportError: pass
826,548
Load all fabsetup addons which are stored under ~/.fabsetup-addon-repos as git repositories. Args: _globals(dict): the globals() namespace of the fabric script. Return: None
def load_repo_addons(_globals): repos_dir = os.path.expanduser('~/.fabsetup-addon-repos') if os.path.isdir(repos_dir): basedir, repos, _ = next(os.walk(repos_dir)) for repo_dir in [os.path.join(basedir, repo) for repo in repos # omit dot dir...
826,550
Get the decrypted value of an SSM parameter Args: parameter_name - the name of the stored parameter of interest Return: Value if allowed and present else None
def get_ssm_parameter(parameter_name): try: response = boto3.client('ssm').get_parameters( Names=[parameter_name], WithDecryption=True ) return response.get('Parameters', None)[0].get('Value', '') except Exception: pass return ''
826,587
Cloud stack utility init method. Args: config_block - a dictionary creates from the CLI driver. See that script for the things that are required and optional. Returns: not a damn thing Raises: SystemError...
def __init__(self, config_block): if config_block: self._config = config_block else: logging.error('config block was garbage') raise SystemError
826,624
The main event of the utility. Create or update a Cloud Formation stack. Injecting properties where needed Args: None Returns: True if the stack create/update is started successfully else False if the start goes off in the weeds. Exits: ...
def upsert(self): required_parameters = [] self._stackParameters = [] try: self._initialize_upsert() except Exception: return False try: available_parameters = self._parameters.keys() for parameter_name in self._templat...
826,625
List the existing stacks in the indicated region Args: None Returns: True if True Todo: Figure out what could go wrong and take steps to hanlde problems.
def list(self): self._initialize_list() interested = True response = self._cloudFormation.list_stacks() print('Stack(s):') while interested: if 'StackSummaries' in response: for stack in response['StackSummaries']: stack_s...
826,630
Smash the given stack Args: None Returns: True if True Todo: Figure out what could go wrong and take steps to hanlde problems.
def smash(self): self._initialize_smash() try: stack_name = self._config.get('environment', {}).get('stack_name', None) response = self._cloudFormation.describe_stacks(StackName=stack_name) logging.debug('smash pre-flight returned: {}'.format( ...
826,631
The utililty requires boto3 clients to Cloud Formation and S3. Here is where we make them. Args: None Returns: Good or Bad; True or False
def _init_boto3_clients(self): try: profile = self._config.get('environment', {}).get('profile') region = self._config.get('environment', {}).get('region') if profile: self._b3Sess = boto3.session.Session(profile_name=profile) else: ...
826,632
Get parameters from Simple Systems Manager Args: p - a parameter name Returns: a value, decrypted if needed, if successful or None if things go sideways.
def _get_ssm_parameter(self, p): try: response = self._ssm.get_parameter(Name=p, WithDecryption=True) return response.get('Parameter', {}).get('Value', None) except Exception as ruh_roh: logging.error(ruh_roh, exc_info=False) return None
826,634
Fill in the _parameters dict from the properties file. Args: None Returns: True Todo: Figure out what could go wrong and at least acknowledge the the fact that Murphy was an optimist.
def _fill_parameters(self): self._parameters = self._config.get('parameters', {}) self._fill_defaults() for k in self._parameters.keys(): try: if self._parameters[k].startswith(self.SSM) and self._parameters[k].endswith(']'): parts = self...
826,635
Fill in the _tags dict from the tags file. Args: None Returns: True Todo: Figure what could go wrong and at least acknowledge the the fact that Murphy was an optimist.
def _read_tags(self): tags = self._config.get('tags', {}) logging.info('Tags:') for tag_name in tags.keys(): tag = {} tag['Key'] = tag_name tag['Value'] = tags[tag_name] self._tags.append(tag) logging.info('{} = {}'.format(tag_...
826,636
Determine if we are creating a new stack or updating and existing one. The update member is set as you would expect at the end of this query. Args: None Returns: True
def _set_update(self): try: self._updateStack = False stack_name = self._config.get('environment', {}).get('stack_name', None) response = self._cloudFormation.describe_stacks(StackName=stack_name) stack = response['Stacks'][0] if stack['StackS...
826,637
Cloud Formation likes to take the template from S3 so here we put the template into S3. We also store the parameters file that was used in this run. Note: you can pass anything as the version string but you should at least consider a version control tag or git commit hash as the version....
def _archive_elements(self): try: stackfile_key, propertyfile_key = self._craft_s3_keys() template_file = self._config.get('environment', {}).get('template', None) bucket = self._config.get('environment', {}).get('bucket', None) if not os.path.isfile(tem...
826,638
We are putting stuff into S3, were supplied the bucket. Here we craft the key of the elements we are putting up there in the internet clouds. Args: None Returns: a tuple of teplate file key and property file key
def _craft_s3_keys(self): now = time.gmtime() stub = "templates/{stack_name}/{version}".format( stack_name=self._config.get('environment', {}).get('stack_name', None), version=self._config.get('codeVersion') ) stub = stub + "/" + str(now.tm_year) ...
826,639
Spin in a loop while the Cloud Formation process either fails or succeeds Args: None Returns: Good or bad; True or False
def poll_stack(self): logging.info('polling stack status, POLL_INTERVAL={}'.format(POLL_INTERVAL)) time.sleep(POLL_INTERVAL) completed_states = [ 'CREATE_COMPLETE', 'UPDATE_COMPLETE', 'DELETE_COMPLETE' ] stack_name = self._config.get('...
826,640
Get IP geolocation. Args: ip (str): IP address to use if no data provided. hit_api (bool): whether to hit api if info not found. Returns: str: latitude and longitude, comma-separated.
def ip_geoloc(ip, hit_api=True): from ..logs.models import IPInfoCheck try: obj = IPInfoCheck.objects.get(ip_address=ip).ip_info except IPInfoCheck.DoesNotExist: if hit_api: try: obj = IPInfoCheck.check_ip(ip) except RateExceededError: ...
826,715
Get a link to google maps pointing on this IP's geolocation. Args: data (str/tuple): IP address or (latitude, longitude). Returns: str: a link to google maps pointing on this IP's geolocation.
def google_maps_geoloc_link(data): if isinstance(data, str): lat_lon = ip_geoloc(data) if lat_lon is None: return '' lat, lon = lat_lon else: lat, lon = data loc = '%s,%s' % (lat, lon) return 'https://www.google.com/maps/place/@%s,17z/' \ 'data...
826,716
Get a link to open street map pointing on this IP's geolocation. Args: data (str/tuple): IP address or (latitude, longitude). Returns: str: a link to open street map pointing on this IP's geolocation.
def open_street_map_geoloc_link(data): if isinstance(data, str): lat_lon = ip_geoloc(data) if lat_lon is None: return '' lat, lon = lat_lon else: lat, lon = data return 'https://www.openstreetmap.org/search' \ '?query=%s%%2C%s#map=7/%s/%s' % (lat, ...
826,717
Return the URL patterns for the logs views. Args: admin_view (callable): admin_view method from an AdminSite instance. Returns: list: the URL patterns for the logs views.
def logs_urlpatterns(admin_view=lambda x: x): return [ url(r'^$', admin_view(LogsMenu.as_view()), name='logs'), url(r'^status_codes$', admin_view(LogsStatusCodes.as_view()), name='logs_status_codes'), url(r'^status_codes_by_date$', ...
826,752
Get information about an IP. Args: ip (str): an IP (xxx.xxx.xxx.xxx). Returns: dict: see http://ipinfo.io/developers/getting-started
def _get(self, ip): # Geoloc updated up to once a week: # http://ipinfo.io/developers/data#geolocation-data retries = 10 for retry in range(retries): try: response = requests.get('http://ipinfo.io/%s/json' % ip, ...
826,761
Check if URL is part of the current project's URLs. Args: url (str): URL to check. default (callable): used to filter out some URLs attached to function. Returns:
def url_is_project(url, default='not_a_func'): try: u = resolve(url) if u and u.func != default: return True except Resolver404: static_url = settings.STATIC_URL static_url_wd = static_url.lstrip('/') if url.startswith(static_url): url = url[l...
826,783
Function generator. Args: white_list (dict): dict with PREFIXES and CONSTANTS keys (list values). Returns: func: a function to check if a URL is...
def url_is(white_list): def func(url): prefixes = white_list.get('PREFIXES', ()) for prefix in prefixes: if url.startswith(prefix): return True constants = white_list.get('CONSTANTS', ()) for exact_url in constants: if url == exact_url: ...
826,784
Search the ORCID public API Specfically, return a dictionary with the personal details (name, etc.) of the person associated with the given ORCID Args: orcid (`str`): The ORCID to be searched Returns: `dict`: Dictionary with the JSON response from the API Raises: `~reques...
def search_orcid(orcid): url = 'https://pub.orcid.org/v2.1/{orcid}/person'.format(orcid=orcid) r = requests.get(url, headers=headers) if r.status_code != 200: r.raise_for_status() return r.json()
826,819
Yield one date per day from starting date to ending date. Args: start_date (date): starting date. end_date (date): ending date. Yields: date: a date for each day within the range.
def daterange(start_date, end_date): for n in range(int((end_date - start_date).days)): yield start_date + timedelta(n)
826,820
Convert a month name (MMM) to its number (01-12). Args: month (str): 3-letters string describing month. to_int (bool): cast number to int or not. Returns: str/int: the month's number (between 01 and 12).
def month_name_to_number(month, to_int=False): number = { 'Jan': '01', 'Feb': '02', 'Mar': '03', 'Apr': '04', 'May': '05', 'Jun': '06', 'Jul': '07', 'Aug': '08', 'Sep': '09', 'Oct': '10', 'Nov': '11', 'Dec': '12', }.get(month) return int(number) if to_int else number
826,821
Read and parse ReSpecTh XML file metadata (file author, version, etc.) Args: root (`~xml.etree.ElementTree.Element`): Root of ReSpecTh XML file Returns: properties (`dict`): Dictionary with file metadata
def get_file_metadata(root): properties = {} file_author = getattr(root.find('fileAuthor'), 'text', False) # Test for missing attribute or empty string in the same statement if not file_author: raise MissingElementError('fileAuthor') else: properties['file-authors'] = [{'name':...
826,823
Read reference info from root of ReSpecTh XML file. Args: root (`~xml.etree.ElementTree.Element`): Root of ReSpecTh XML file Returns: properties (`dict`): Dictionary with reference information
def get_reference(root): reference = {} elem = root.find('bibliographyLink') if elem is None: raise MissingElementError('bibliographyLink') # Try to get reference info via DOI, fall back on preferredKey if necessary. ref_doi = elem.get('doi', None) ref_key = elem.get('preferredKey'...
826,824
Read common properties from root of ReSpecTh XML file. Args: root (`~xml.etree.ElementTree.Element`): Root of ReSpecTh XML file Returns: properties (`dict`): Dictionary with experiment type and apparatus information.
def get_experiment_kind(root): properties = {} if root.find('experimentType').text == 'Ignition delay measurement': properties['experiment-type'] = 'ignition delay' else: raise NotImplementedError(root.find('experimentType').text + ' not (yet) supported') properties['apparatus'] = ...
826,825
Read common properties from root of ReSpecTh XML file. Args: root (`~xml.etree.ElementTree.Element`): Root of ReSpecTh XML file Returns: properties (`dict`): Dictionary with common properties
def get_common_properties(root): properties = {} for elem in root.iterfind('commonProperties/property'): name = elem.attrib['name'] if name == 'initial composition': properties['composition'] = {'species': [], 'kind': None} for child in elem.iter('component'): ...
826,826
Gets ignition type and target. Args: root (`~xml.etree.ElementTree.Element`): Root of ReSpecTh XML file Returns: properties (`dict`): Dictionary with ignition type/target information
def get_ignition_type(root): properties = {} elem = root.find('ignitionType') if elem is None: raise MissingElementError('ignitionType') elem = elem.attrib if 'target' in elem: ign_target = elem['target'].rstrip(';').upper() else: raise MissingAttributeError('targe...
826,827
Parse datapoints with ignition delay from file. Args: root (`~xml.etree.ElementTree.Element`): Root of ReSpecTh XML file Returns: properties (`dict`): Dictionary with ignition delay data
def get_datapoints(root): # Shock tube experiment will have one data group, while RCM may have one # or two (one for ignition delay, one for volume-history) dataGroups = root.findall('dataGroup') if not dataGroups: raise MissingElementError('dataGroup') # all situations will have main ...
826,828
Get or create an entry using obtained information from an IP. Args: ip (str): IP address xxx.xxx.xxx.xxx. Returns: ip_info: an instance of IPInfo.
def get_or_create_from_ip(ip): data = ip_api_handler.get(ip) if data and any(v for v in data.values()): if data.get('ip_address', None) is None or not data['ip_address']: data['ip_address'] = ip return IPInfo.objects.get_or_create(**data) return N...
826,845
Update the IP info. Args: since_days (int): if checked less than this number of days ago, don't check again (default to 10 days). save (bool): whether to save anyway or not. force (bool): whether to update ip_info to last checked one. Returns: ...
def update_ip_info(self, since_days=10, save=False, force=False): # If ip already checked try: last_check = IPInfoCheck.objects.get( ip_address=self.client_ip_address) # If checked less than since_days ago, don't check again since_last = date...
826,848
Validate the parsed YAML file for adherance to the ChemKED format. Arguments: properties (`dict`): Dictionary created from the parsed YAML file Raises: `ValueError`: If the YAML file cannot be validated, a `ValueError` is raised whose string contains the errors ...
def validate_yaml(self, properties): validator = OurValidator(schema) if not validator.validate(properties): for key, value in validator.errors.items(): if any(['unallowed value' in v for v in value]): print(('{key} has an illegal value. Allowed v...
826,867
Convert ChemKED record to ReSpecTh XML file. This converter uses common information in a ChemKED file to generate a ReSpecTh XML file. Note that some information may be lost, as ChemKED stores some additional attributes. Arguments: filename (`str`): Filename for output ReSp...
def convert_to_ReSpecTh(self, filename): root = etree.Element('experiment') file_author = etree.SubElement(root, 'fileAuthor') file_author.text = self.file_authors[0]['name'] # right now ChemKED just uses an integer file version file_version = etree.SubElement(root, 'f...
826,870
Return a list where the duplicates have been removed. Args: l (list): the list to filter. Returns: list: the same list without duplicates.
def distinct(l): seen = set() seen_add = seen.add return (_ for _ in l if not (_ in seen or seen_add(_)))
826,967
Initialize the class Arguments: device: string containing the serial device allocated to SCSGate logger: instance of logging
def __init__(self, device, logger): self._serial = pyserial.Serial(device, 115200) logger.info("Clearing buffers") self._serial.write(b"@b") ret = self._serial.read(1) if ret != b"k": raise RuntimeError("Error while clearing buffers") # ensure pendi...
827,125
Load values into the class's ConfigProperty attributes (validating types if possible) Args: loaders: iterable of AbstractLoader instances ConfigProperty values are loaded from these sources; and the order indicates preference.
def __init__(self, loaders): if not loaders: # Require loaders only if the class has ConfigProperty attributes if any(self._iter_config_props()): raise AssertionError('Class has ConfigProperty attributes: must provide loader(s)') self._update_property_ke...
827,129
Return an absolute path to a target file that is located in the same directory as as_file Args: as_file: File name (including __file__) Use the directory path of this file target_file: Name of the target file
def in_same_dir(as_file, target_file): return os.path.abspath(os.path.join(os.path.dirname(as_file), target_file))
827,139
Checks that the temperature ranges given for thermo data are valid Args: isvalid_t_range (`bool`): flag from schema indicating T range is to be checked field (`str`): T_range values (`list`): List of temperature values indicating low, middle, and high ranges The rule...
def _validate_isvalid_t_range(self, isvalid_t_range, field, values): if all([isinstance(v, (float, int)) for v in values]): # If no units given, assume Kelvin T_low = Q_(values[0], 'K') T_mid = Q_(values[1], 'K') T_hi = Q_(values[2], 'K') elif all...
827,151
Checks for appropriate units using Pint unit registry. Args: isvalid_unit (`bool`): flag from schema indicating units to be checked. field (`str`): property associated with units in question. value (`dict`): dictionary of values from file associated with this property. ...
def _validate_isvalid_unit(self, isvalid_unit, field, value): quantity = 1.0 * units(value['units']) try: quantity.to(property_units[field]) except pint.DimensionalityError: self._error(field, 'incompatible units; should be consistent ' 'w...
827,152
Checks valid reference metadata using DOI (if present). Args: isvalid_reference (`bool`): flag from schema indicating reference to be checked. field (`str`): 'reference' value (`dict`): dictionary of reference metadata. The rule's arguments are validated against thi...
def _validate_isvalid_reference(self, isvalid_reference, field, value): if 'doi' in value: try: ref = crossref_api.works(ids=value['doi'])['message'] except (HTTPError, habanero.RequestError): self._error(field, 'DOI not found') re...
827,156
Checks for valid ORCID if given. Args: isvalid_orcid (`bool`): flag from schema indicating ORCID to be checked. field (`str`): 'author' value (`dict`): dictionary of author metadata. The rule's arguments are validated against this schema: {'isvalid_orcid...
def _validate_isvalid_orcid(self, isvalid_orcid, field, value): if isvalid_orcid and 'ORCID' in value: try: res = search_orcid(value['ORCID']) except ConnectionError: warn('network not available, ORCID not validated.') return ...
827,157
Checks for valid specification of composition. Args: isvalid_composition (bool): flag from schema indicating composition to be checked. field (str): 'composition' value (dict): dictionary of composition The rule's arguments are validated against this...
def _validate_isvalid_composition(self, isvalid_composition, field, value): sum_amount = 0.0 if value['kind'] in ['mass fraction', 'mole fraction']: low_lim = 0.0 up_lim = 1.0 total_amount = 1.0 elif value['kind'] in ['mole percent']: low_...
827,158
Init method. Args: file_path_regex (regex): the regex to find the log files. log_format_regex (regex): the regex to parse the log files. top_dir (str): the path to the root directory containing the logs.
def __init__(self, file_path_regex=None, log_format_regex=None, top_dir=None): if file_path_regex is not None: self.file_path_regex = file_path_regex if log_format_regex is not None: self.log_format_regex = log_format_regex if top_dir is not None: sel...
827,205
Get stats for most visited pages. Args: logs (list): logs data to use. Returns: dict: more_than_10 and less_than_10: list of dict (bound + url list).
def most_visited_pages_stats(): stats = {'more_than_10': [], 'less_than_10': {}} counter = Counter(list(RequestLog.objects.values_list('url', flat=True))) most_visited_pages = counter.most_common() bounds = (10000, 1000, 100, 10) subsets = [[] for _ in bounds] for u, c in most_visited_pag...
827,255
Validation type for external resources Attempts to connect to the resource, backing off on failure. Args: max_tries: Max number of times to attempt a connection before failing max_wait: Max number of seconds to wait between connection attempts. This can be used t...
def __init__(self, max_tries=5, max_wait=10, *args, **kwargs): self._max_tries = max_tries if self._max_tries < 1: raise TypeError('max_tries must be a positive integer') self._max_wait = max_wait if self._max_wait < 1: raise TypeError('max_wait must be >...
827,281
Attempt to connect to http Args: url: string in the form "http://[host]"
def _test_connection(url): import requests try: # Don't care about status code here as long as the connection was successful requests.head(url) except requests.exceptions.ConnectionError as e: raise ValidationError(e)
827,283
Attempt to connect to redis Args: url: string in the form "redis://[:password@]host[:port][/db-number][?option=value]"
def _test_connection(url): import redis try: with _disconnecting(redis.StrictRedis.from_url(url)) as conn: conn.ping() except redis.connection.ConnectionError as e: raise ValidationError(e)
827,284
Attempt to connect to postgres Args: url: string in the form "postgres://[user]:[password]@[host][:port][/database]"
def _test_connection(url): import psycopg2 try: with closing(psycopg2.connect(dsn=url)) as conn: conn.cursor() except psycopg2.OperationalError as e: raise ValidationError(e)
827,285
Attempt to connect to amqp Args: url: string in the form "amqp://[user]:[password]@[host]"
def _test_connection(url): import pika try: with closing(pika.BlockingConnection(pika.URLParameters(url))) as conn: conn.channel() except pika.exceptions.ConnectionClosed as e: raise ValidationError(e)
827,286
Attempt to connect to etcd Args: url: string in the form "[host]:[port]"
def _test_connection(url): import etcd host, port = url.split(':') try: etcd.Client(host=host, port=int(port)).get('/') except etcd.EtcdConnectionFailed as e: raise ValidationError(e)
827,287
Initializer. Args: source_dict: used to initialize the class. Use constructors to read from Vault. url: Vault url path: Vault path where secrets are stored vault_token: token (must have access to vault path)
def __init__(self, source_dict, url, path, token): self._vault_url = url self._path = path self._token = token super(VaultLoader, self).__init__(source_dict)
827,324
Constructor: use token authentication to read secrets from a Vault path See https://www.vaultproject.io/docs/auth/token.html Args: url: Vault url path: Vault path where secrets are stored vault_token: token (must have access to vault path)
def from_token(cls, url, path, token): source_dict = cls._fetch_secrets(url, path, token) return cls(source_dict, url, path, token)
827,325
Constructor: use AppRole authentication to read secrets from a Vault path See https://www.vaultproject.io/docs/auth/approle.html Args: url: Vault url path: Vault path where secrets are stored role_id: Vault RoleID secret_id: Vault SecretID
def from_app_role(cls, url, path, role_id, secret_id): token = cls._fetch_app_role_token(url, role_id, secret_id) source_dict = cls._fetch_secrets(url, path, token) return cls(source_dict, url, path, token)
827,326
Checks if a string represents a valid quantities unit. Args: w (str): A string to be tested against the set of valid quantities units. Returns: True if the string can be used as a unit in the quantities module.
def isValidUnit(self, w): bad = set(['point', 'a']) if w in bad: return False try: pq.Quantity(0.0, w) return True except: return w == '/'
827,397
Collects all the valid units from an inp string. Works by appending consecutive words from the string and cross-referncing them with a set of valid units. Args: inp (str): Some text which hopefully contains descriptions of different units. Returns: ...
def extractUnits(self, inp): inp = self._preprocess(inp) units = [] description = "" for w in inp.split(' '): if self.isValidUnit(w) or w == '/': if description: description += " " description += w else...
827,398
Converts a string representation of some quantity of units into a quantities object. Args: inp (str): A textual representation of some quantity of units, e.g., "fifty kilograms". Returns: A quantities object representing the described quantity and its ...
def convert(self, inp): inp = self._preprocess(inp) n = NumberService().longestNumber(inp) units = self.extractUnits(inp) # Convert to quantity object, attempt conversion quantity = pq.Quantity(float(n), units[0]) quantity.units = units[1] return quant...
827,399
Solves the equation specified by the input string. Args: inp (str): An equation, specified in words, containing some combination of numbers, binary, and unary operations. Returns: The floating-point result of carrying out the computation.
def parseEquation(self, inp): inp = MathService._preprocess(inp) split = inp.split(' ') # Recursive call on unary operators for i, w in enumerate(split): if w in self.__unaryOperators__: op = self.__unaryOperators__[w] # Split equati...
827,881
A general method for parsing word-representations of numbers. Supports floats and integers. Args: words (str): Description of an arbitrary number. Returns: A double representation of the words.
def parse(self, words): def exact(words): try: return float(words) except: return None guess = exact(words) if guess is not None: return guess split = words.split(' ') # Replace final ord...
828,169
Convert a floating-point number described in words to a double. Supports two kinds of descriptions: those with a 'point' (e.g., "one point two five") and those with a fraction (e.g., "one and a quarter"). Args: words (str): Description of the floating-point number. ...
def parseFloat(self, words): def pointFloat(words): m = re.search(r'(.*) point (.*)', words) if m: whole = m.group(1) frac = m.group(2) total = 0.0 coeff = 0.10 for digit in frac.split(' '): ...
828,170
Parses words to the integer they describe. Args: words (str): Description of the integer. Returns: An integer representation of the words.
def parseInt(self, words): # Remove 'and', case-sensitivity words = words.replace(" and ", " ").lower() # 'a' -> 'one' words = re.sub(r'(\b)a(\b)', '\g<1>one\g<2>', words) def textToNumber(s): a = re.split(r"[\s-]+", s) n = 0 ...
828,171
Parses a number m into a human-ready string representation. For example, crops off floats if they're too accurate. Arguments: m (float): Floating-point number to be cleaned. Returns: Human-ready string description of the number.
def parseMagnitude(m): m = NumberService().parse(m) def toDecimalPrecision(n, k): return float("%.*f" % (k, round(n, k))) # Cast to two digits of precision digits = 2 magnitude = toDecimalPrecision(m, digits) # If value is really small, keep going ...
828,172
Extracts the longest valid numerical description from a string. Not guaranteed to return a result even if some valid numerical description exists (i.e., method is not particularly advanced). Args: inp (str): An arbitrary string, hopefully containing a number. Returns: ...
def longestNumber(self, inp): split = inp.split(' ') # Assume just a single number numStart = None numEnd = None for i, w in enumerate(split): if self.isValid(w): if numStart is None: numStart = i numEnd = ...
828,173
Extracts all day-related information from an input string. Ignores any information related to the specific time-of-day. Args: inp (str): Input string to be parsed. Returns: A list of datetime objects containing the extracted date from the input snippet, or a...
def extractDays(self, inp): inp = self._preprocess(inp) def extractDayOfWeek(dayMatch): if dayMatch.group(5) in self.__daysOfWeek__: return self.__daysOfWeek__.index(dayMatch.group(5)) elif dayMatch.group(6) in self.__daysOfWeek__: return...
828,521
Extracts time-related information from an input string. Ignores any information related to the specific date, focusing on the time-of-day. Args: inp (str): Input string to be parsed. Returns: A list of datetime objects containing the extracted times from the ...
def extractTimes(self, inp): def handleMatch(time): relative = False if not time: return None # Default times: 8am, 12pm, 7pm elif time.group(1) == 'morning': h = 8 m = 0 elif time.group(1) == ...
828,522
Extract semantic date information from an input string. In effect, runs both parseDay and parseTime on the input string and merges the results to produce a comprehensive datetime object. Args: inp (str): Input string to be parsed. Returns: A list of date...
def extractDates(self, inp): def merge(param): day, time = param if not (day or time): return None if not day: return time if not time: return day return datetime.datetime( day....
828,523
Convert a datetime object representing a time into a human-ready string that can be read, spoken aloud, etc. Args: time (datetime.date): A datetime object to be converted into text. Returns: A string representation of the input time, ignoring any day-related ...
def convertTime(self, time): # if ':00', ignore reporting minutes m_format = "" if time.minute: m_format = ":%M" timeString = time.strftime("%I" + m_format + " %p") # if '07:30', cast to '7:30' if not int(timeString[0]): timeString = tim...
828,526
Initializing and validating fields. Args: kwargs (dict): application command line options.
def __init__(self, **kwargs): try: arguments = Adapter(Schema(ApplicationOptions.SCHEMA).validate(kwargs)) self.definition = arguments.definition self.matrix_tags = [entry for entry in arguments.matrix_tags.split(',') if len(entry) > 0] self.tags = [entry...
828,759
Find all row names and the maximum column widths. Args: columns (dict): the keys are the column name and the value the max length. Returns: dict: column names (key) and widths (value).
def calculate_columns(sequence): columns = {} for row in sequence: for key in row.keys(): if key not in columns: columns[key] = len(key) value_length = len(str(row[key])) if value_length > columns[key]: columns[key] = value_lengt...
828,883
Calculate row format. Args: columns (dict): the keys are the column name and the value the max length. keys (list): optional list of keys to order columns as well as to filter for them. Returns: str: format for table row
def calculate_row_format(columns, keys=None): row_format = '' if keys is None: keys = columns.keys() else: keys = [key for key in keys if key in columns] for key in keys: if len(row_format) > 0: row_format += "|" row_format += "%%(%s)-%ds" % (key, column...
828,884
Print sequence as ascii table to stdout. Args: sequence (list or tuple): a sequence with a dictionary each entry. keys (list): optional list of keys to order columns as well as to filter for them.
def pprint(sequence, keys=None): if len(sequence) > 0: columns = calculate_columns(sequence) row_format = calculate_row_format(columns, keys) header = row_format % dict([(key, key.title()) for key in columns]) separator = row_format % dict([(key, '-' * columns[key]) for key in c...
828,885
Run pipelines in parallel. Args: data(dict): parameters for the pipeline (model, options, ...). Returns: dict: with two fields: success True/False and captured output (list of str).
def matrix_worker(data): matrix = data['matrix'] Logger.get_logger(__name__ + '.worker').info( "Processing pipeline for matrix entry '%s'", matrix['name']) env = matrix['env'].copy() env.update({'PIPELINE_MATRIX': matrix['name']}) pipeline = Pipeline(model=data['model'], env=env, opti...
828,886
Check given matrix tags to be in the given list of matric tags. Args: entry (dict): matrix item (in yaml). matrix_tags (list): represents --matrix-tags defined by user in command line. Returns: bool: True when matrix entry can be processed.
def can_process_matrix(entry, matrix_tags): if len(matrix_tags) == 0: return True count = 0 if 'tags' in entry: for tag in matrix_tags: if tag in entry['tags']: count += 1 return count > 0
828,889
Initialize application with command line options. Args: options (ApplicationOptions): given command line options.
def __init__(self, **options): self.options = options self.logging_level = logging.DEBUG self.setup_logging() self.logger = Logger.get_logger(__name__) self.results = []
828,975
Verify whether to ignore a path. Args: path (str): path to check. Returns: bool: True when to ignore given path.
def ignore_path(path): ignore = False for name in ['.tox', 'dist', 'build', 'node_modules', 'htmlcov']: if path.find(name) >= 0: ignore = True break return ignore
828,977
Iterating files for given extensions. Args: supported_extensions (list): supported file extentsion for which to check loc and com. Returns: str: yield each full path and filename found.
def walk_files_for(paths, supported_extensions): for path in paths: for root, _, files in os.walk(path): if Application.ignore_path(root.replace(path, '')): continue for filename in files: extension = os.path.splitext(...
828,978
Find out lines of code and lines of comments. Args: path_and_filename (str): path and filename to parse for loc and com. pattern (str): regex to search for line commens and block comments Returns: int, int: loc and com for given file.
def analyse(self, path_and_filename, pattern): with open(path_and_filename) as handle: content = handle.read() loc = content.count('\n') + 1 com = 0 for match in re.findall(pattern, content, re.DOTALL): com += match.count('\n') + 1 ...
828,979
Get name and version of a tool defined by given command. Args: tool_name (str): name of the tool. tool_command (str): Bash one line command to get the version of the tool. Returns: dict: tool name and version or empty when no line has been found
def get_version(tool_name, tool_command): result = {} for line in Bash(ShellConfig(script=tool_command, internal=True)).process(): if line.find("command not found") >= 0: VersionsCheck.LOGGER.error("Required tool '%s' not found (stopping pipeline)!", tool_name) ...
829,157
Registers new events after instance creation Args: *names (str): Name or names of the events to register
def register_event(self, *names): for name in names: if name in self.__events: continue self.__events[name] = Event(name)
829,165
Dispatches an event to any subscribed listeners Note: If a listener returns :obj:`False`, the event will stop dispatching to other listeners. Any other return value is ignored. Args: name (str): The name of the :class:`Event` to dispatch *args (Optional)...
def emit(self, name, *args, **kwargs): e = self.__property_events.get(name) if e is None: e = self.__events[name] return e(*args, **kwargs)
829,168
Retrieves an Event object by name Args: name (str): The name of the :class:`Event` or :class:`~pydispatch.properties.Property` object to retrieve Returns: The :class:`Event` instance for the event or property definition .. versionadded:: 0.1.0
def get_dispatcher_event(self, name): e = self.__property_events.get(name) if e is None: e = self.__events[name] return e
829,169
Initialize application with command line options. Args: options (ApplicationOptions): given command line options.
def __init__(self, options): self.event = Event.create(__name__) self.options = options self.logging_level = logging.DEBUG self.setup_logging() self.logger = Logger.get_logger(__name__)
829,271
Running pipeline via a matrix. Args: matrix_definition (dict): one concrete matrix item. document (dict): spline document (complete) as loaded from yaml file.
def run_matrix(self, matrix_definition, document): matrix = Matrix(matrix_definition, 'matrix(parallel)' in document) process_data = MatrixProcessData() process_data.options = self.options process_data.pipeline = document['pipeline'] process_data.model = {} if 'model' n...
829,274
Find **stages** in document. Args: document (dict): validated spline document loaded from a yaml file. Returns: list: stages as a part of the spline document or an empty list if not given. >>> find_stages({'pipeline': [{'stage(Prepare)':1}, {'stage(Build)':1}, {'stage(Deploy)':2}]}) ...
def find_stages(document): names = [] if 'pipeline' in document: for entry in document['pipeline']: # each entry is dictionary with one key only key, _ = list(entry.items())[0] if key.startswith("stage("): names.append(key.replace('stage(', '').re...
829,283
Initializing and validating fields. Args: kwargs (dict): application command line options. Raises: RuntimeError: when validation of parameters has failed.
def __init__(self, **kwargs): try: arguments = Adapter(CollectorUpdate.schema_complete().validate(kwargs)) self.matrix = arguments.matrix self.stage = arguments.stage self.timestamp = arguments.timestamp self.status = arguments.status ...
829,527
Initializing and validating fields. Args: kwargs (dict): application command line options. Raises: RuntimeError: when validation of parameters has failed.
def __init__(self, **kwargs): try: arguments = Adapter(CollectorStage.schema_complete().validate(kwargs)) self.stage = arguments.stage self.status = arguments.status self.events = arguments.events except SchemaError as exception: Logge...
829,530
Add event information. Args: timestamp (int): event timestamp. information (dict): event information. Raises: RuntimeError: when validation of parameters has failed.
def add(self, timestamp, information): try: item = Schema(CollectorStage.schema_event_items()).validate({ 'timestamp': timestamp, 'information': information }) self.events.append(item) except SchemaError as exception: Logger.get_lo...
829,531
Number of registered stages for given matrix name. Parameters: matrix_name (str): name of the matrix Returns: int: number of reported stages for given matrix name.
def count_stages(self, matrix_name): return len(self.data[matrix_name]) if matrix_name in self.data else 0
829,533
Get duration for a concrete matrix. Args: matrix_name (str): name of the Matrix. Returns: float: duration of concrete matrix in seconds.
def get_duration(self, matrix_name): duration = 0.0 if matrix_name in self.data: duration = sum([stage.duration() for stage in self.data[matrix_name]]) return duration
829,535
Add a collector item. Args: item (CollectorUpdate): event data like stage, timestampe and status.
def update(self, item): if item.matrix not in self.data: self.data[item.matrix] = [] result = Select(self.data[item.matrix]).where( lambda entry: entry.stage == item.stage).build() if len(result) > 0: stage = result[0] stage.status = ite...
829,536
Initializing pipeline with definition (loaded from a yaml file). Args: model (dict): if you have a model defined in your pipeline definition (yaml) env (dict): the env as defined (if) per matrix options (dict): command line options for spline
def __init__(self, model=None, env=None, options=None): self.event = Event.create(__name__) self.options = options self.model = {} if not isinstance(model, dict) else model self.data = PipelineData() self.data.env_list[0].update([] if env is None else env) self.l...
829,581
Validate data against the schema. Args: data(dict): data structure to validate. Returns: dict: data as provided and defaults where defined in schema.
def validate(data): try: return Schema(Validator.SCHEMA).validate(data) except SchemaError as exception: logging.getLogger(__name__).error(exception) return None
829,624
Generating a temporary file with content. Args: content (str): file content (usually a script, Dockerfile, playbook or config file) prefix (str): the filename starts with this prefix (default: no prefix) suffix (str): the filename ends with this suffix (default: no suffix) Returns: ...
def write_temporary_file(content, prefix='', suffix=''): temp = tempfile.NamedTemporaryFile(prefix=prefix, suffix=suffix, mode='w+t', delete=False) temp.writelines(content) temp.close() return temp.name
829,666
Add an instance method or function Args: m: The instance method or function to store
def add_method(self, m, **kwargs): if isinstance(m, types.FunctionType): self['function', id(m)] = m else: f, obj = get_method_vars(m) wrkey = (f, id(obj)) self[wrkey] = obj
829,778
Remove an instance method or function if it exists Args: m: The instance method or function to remove
def del_method(self, m): if isinstance(m, types.FunctionType) and not iscoroutinefunction(m): wrkey = ('function', id(m)) else: f, obj = get_method_vars(m) wrkey = (f, id(obj)) if wrkey in self: del self[wrkey]
829,779
Remove any stored instance methods that belong to an object Args: obj: The instance object to remove
def del_instance(self, obj): to_remove = set() for wrkey, _obj in self.iter_instances(): if obj is _obj: to_remove.add(wrkey) for wrkey in to_remove: del self[wrkey]
829,780
Initialize with Bash code and optional environment variables. Args: config(ShellConfig): options for configuring Bash environment and behavior
def __init__(self, config): self.event = Event.create(__name__) self.logger = Logger.get_logger(__name__) self.config = config self.success = True self.env = {} self.env.update(config.env) self.stdout = subprocess.PIPE self.stderr = subprocess.ST...
829,852
Create a temporary, executable bash file. It also does render given script (string) with the model and the provided environment variables and optional also an item when using the B{with} field. Args: script (str): either pather and filename or Bash code. Returns: ...
def create_file_for(self, script): temp = tempfile.NamedTemporaryFile( prefix="pipeline-script-", mode='w+t', suffix=".sh", delete=False, dir=self.get_temporary_scripts_path()) self.update_environment_variables(temp.name) rendered_script = render(script, model=s...
829,855
Generating HTML report. Args: store (Store): report data. Returns: str: rendered HTML template.
def generate_html(store): spline = { 'version': VERSION, 'url': 'https://github.com/Nachtfeuer/pipeline', 'generated': datetime.now().strftime("%A, %d. %B %Y - %I:%M:%S %p") } html_template_file = os.path.join(os.path.dirname(__file__), 'templates/report.html.j2') with open...
829,937