code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def extract_props(self, settings): props = {} for param in self.call_parameters: if param in settings: props[param] = settings[param] else: props[param] = None return props
Extract all valuable properties to be displayed
def clear_cache(self): errors = [] for rdir in (self.cache_root, self.file_list_cachedir): if os.path.exists(rdir): try: shutil.rmtree(rdir) except OSError as exc: errors.append( 'Unable to delete {0}: {1}'.format(rdir, exc) ) return errors
Completely clear cache
def get_or_create_hosted_zone(client, zone_name): zone_id = get_hosted_zone_by_name(client, zone_name) if zone_id: return zone_id logger.debug("Zone %s does not exist, creating.", zone_name) reference = uuid.uuid4().hex response = client.create_hosted_zone(Name=zone_name, CallerReference=reference) return parse_zone_id(response["HostedZone"]["Id"])
Get the Id of an existing zone, or create it. Args: client (:class:`botocore.client.Route53`): The connection used to interact with Route53's API. zone_name (string): The name of the DNS hosted zone to create. Returns: string: The Id of the Hosted Zone.
def _get_domain_id(self, domain_text_element): try: tr_anchor = domain_text_element.parent.parent.parent td_anchor = tr_anchor.find('td', {'class': 'td_2'}) link = td_anchor.find('a')['href'] domain_id = link.rsplit('/', 1)[-1] return domain_id except Exception as error: errmsg = ('Cannot get the domain id even though the domain seems ' 'to exist (%s).', error) LOGGER.warning(errmsg) raise AssertionError(errmsg)
Return the easyname id of the domain.
def linear_weighted_moving_average(data, period): catch_errors.check_for_period_error(data, period) idx_period = list(range(1, period+1)) lwma = [(sum([i * idx_period[data[idx-(period-1):idx+1].index(i)] for i in data[idx-(period-1):idx+1]])) / sum(range(1, len(data[idx+1-period:idx+1])+1)) for idx in range(period-1, len(data))] lwma = fill_for_noncomputable_vals(data, lwma) return lwma
Linear Weighted Moving Average. Formula: LWMA = SUM(DATA[i]) * i / SUM(i)
def _start_io_loop(self): def mark_as_ready(): self._ready.set() if not self._io_loop: self._io_loop = ioloop.IOLoop() self._io_loop.add_callback(mark_as_ready) self._io_loop.start()
Start IOLoop then set ready threading.Event.
def _format_select(formatter, name): for caster in formatter.split('-'): if caster == 'SEC_TO_MICRO': name = "%s*1000000" % name elif ':' in caster: caster, args = caster.split(':') name = "%s(%s,%s)" % (caster, name, args) else: name = "%s(%s)" % (caster, name) return name
Modify the query selector by applying any formatters to it. Parameters ---------- formatter : str Hyphen-delimited formatter string where formatters are applied inside-out, e.g. the formatter string SEC_TO_MICRO-INTEGER-FORMAT_UTC_USEC applied to the selector foo would result in FORMAT_UTC_USEC(INTEGER(foo*1000000)). name: str The name of the selector to apply formatters to. Returns ------- str The formatted selector
def clear_sequestered(self): if (self.get_sequestered_metadata().is_read_only() or self.get_sequestered_metadata().is_required()): raise errors.NoAccess() self._my_map['sequestered'] = self._sequestered_default
Clears the sequestered flag. raise: NoAccess - ``Metadata.isRequired()`` or ``Metadata.isReadOnly()`` is ``true`` *compliance: mandatory -- This method must be implemented.*
def cookiecutter(template, checkout=None, no_input=False, extra_context=None): config_dict = get_user_config() template = expand_abbreviations(template, config_dict) if 'git@' in template or 'https://' in template: repo_dir = clone( repo_url=template, checkout=checkout, clone_to_dir=config_dict['cookiecutters_dir'], no_input=no_input ) else: repo_dir = template context_file = os.path.join(repo_dir, 'cookiecutter.json') logging.debug('context_file is {0}'.format(context_file)) context = generate_context( context_file=context_file, default_context=config_dict['default_context'], extra_context=extra_context, ) generate_files( repo_dir=repo_dir, context=context )
Replacement for cookiecutter's own cookiecutter. The difference with cookiecutter's cookiecutter function is that this one doesn't automatically str() all the values passed along to the template. :param template: A directory containing a project template directory, or a URL to a git repository. :param checkout: The branch, tag or commit ID to checkout after clone. :param no_input: Prompt the user at command line for manual configuration? :param extra_context: A dictionary of context that overrides default and user configuration.
def emoji_list(server, n=1): global EMOJI if EMOJI is None: EMOJI = EmojiCache(server) return EMOJI.get(n)
return a list of `n` random emoji
def seen_nonce(id, nonce, timestamp): key = '{id}:{n}:{ts}'.format(id=id, n=nonce, ts=timestamp) if cache.get(key): log.warning('replay attack? already processed nonce {k}' .format(k=key)) return True else: log.debug('caching nonce {k}'.format(k=key)) cache.set(key, True, timeout=getattr(settings, 'HAWK_MESSAGE_EXPIRATION', default_message_expiration) + 5) return False
Returns True if the Hawk nonce has been seen already.
def folders(self): if self._folders is None : self.__init() if self._folders is not None and isinstance(self._folders, list): if len(self._folders) == 0: self._loadFolders() return self._folders
gets the property value for folders
def MaxPool(a, k, strides, padding, data_format): if data_format.decode("ascii") == "NCHW": a = np.rollaxis(a, 1, -1), patches = _pool_patches(a, k, strides, padding.decode("ascii")) pool = np.amax(patches, axis=tuple(range(-len(k), 0))) if data_format.decode("ascii") == "NCHW": pool = np.rollaxis(pool, -1, 1) return pool,
Maximum pooling op.
def add_quantity_modifier(self, quantity, modifier, overwrite=False): if quantity in self._quantity_modifiers and not overwrite: raise ValueError('quantity `{}` already exists'.format(quantity)) self._quantity_modifiers[quantity] = modifier self._check_quantities_exist([quantity], raise_exception=False)
Add a quantify modifier. Consider useing the high-level function `add_derived_quantity` instead! Parameters ---------- quantity : str name of the derived quantity to add modifier : None or str or tuple If the quantity modifier is a tuple of length >=2 and the first element is a callable, it should be in the formate of `(callable, native quantity 1, native quantity 2, ...)`. And the modifier would work as callable(native quantity 1, native quantity 2, ...) If the quantity modifier is None, the quantity will be used as the native quantity name Otherwise, the modifier would be use directly as a native quantity name overwrite : bool, optional If False and quantity are already specified in _quantity_modifiers, raise an ValueError
def domain_search(self, domain=None, company=None, limit=None, offset=None, emails_type=None, raw=False): if not domain and not company: raise MissingCompanyError( 'You must supply at least a domain name or a company name' ) if domain: params = {'domain': domain, 'api_key': self.api_key} elif company: params = {'company': company, 'api_key': self.api_key} if limit: params['limit'] = limit if offset: params['offset'] = offset if emails_type: params['type'] = emails_type endpoint = self.base_endpoint.format('domain-search') return self._query_hunter(endpoint, params, raw=raw)
Return all the email addresses found for a given domain. :param domain: The domain on which to search for emails. Must be defined if company is not. :param company: The name of the company on which to search for emails. Must be defined if domain is not. :param limit: The maximum number of emails to give back. Default is 10. :param offset: The number of emails to skip. Default is 0. :param emails_type: The type of emails to give back. Can be one of 'personal' or 'generic'. :param raw: Gives back the entire response instead of just the 'data'. :return: Full payload of the query as a dict, with email addresses found.
def read_from_buffer(cls, buf, identifier_str=None): try: return cls._read_from_buffer(buf, identifier_str) except Exception as e: cls._load_error(e, identifier_str)
Load the context from a buffer.
def upload(self, href, vobject_item): if self.is_fake: return content = vobject_item.serialize() try: item = self.get(href) etesync_item = item.etesync_item etesync_item.content = content except api.exceptions.DoesNotExist: etesync_item = self.collection.get_content_class().create(self.collection, content) etesync_item.save() return self.get(href)
Upload a new or replace an existing item.
def to_unix_ms(dt: datetime) -> int: utcoffset = dt.utcoffset() ep = epoch if utcoffset is None else epoch_tz return as_int((dt - ep).total_seconds() * 1000)
convert a datetime to number of milliseconds since 1970 and calculate timezone offset
def extract_links(html): links = [] soup = BeautifulSoup(html, 'html.parser') for link in soup.findAll('a'): href = link.get('href') if not href: continue if href.startswith('/'): href = 'https://www.reddit.com' + href links.append({'text': link.text, 'href': href}) return links
Extract a list of hyperlinks from an HTML document.
def _get_iris_args(attrs): import cf_units args = {'attributes': _filter_attrs(attrs, iris_forbidden_keys)} args.update(_pick_attrs(attrs, ('standard_name', 'long_name',))) unit_args = _pick_attrs(attrs, ('calendar',)) if 'units' in attrs: args['units'] = cf_units.Unit(attrs['units'], **unit_args) return args
Converts the xarray attrs into args that can be passed into Iris
def remove_existing_fpaths(fpath_list, verbose=VERBOSE, quiet=QUIET, strict=False, print_caller=PRINT_CALLER, lbl='files'): import utool as ut if print_caller: print(util_dbg.get_caller_name(range(1, 4)) + ' called remove_existing_fpaths') fpath_list_ = ut.filter_Nones(fpath_list) exists_list = list(map(exists, fpath_list_)) if verbose: n_total = len(fpath_list) n_valid = len(fpath_list_) n_exist = sum(exists_list) print('[util_path.remove_existing_fpaths] request delete of %d %s' % ( n_total, lbl)) if n_valid != n_total: print(('[util_path.remove_existing_fpaths] ' 'trying to delete %d/%d non None %s ') % (n_valid, n_total, lbl)) print(('[util_path.remove_existing_fpaths] ' ' %d/%d exist and need to be deleted') % (n_exist, n_valid)) existing_fpath_list = ut.compress(fpath_list_, exists_list) return remove_fpaths(existing_fpath_list, verbose=verbose, quiet=quiet, strict=strict, print_caller=False, lbl=lbl)
checks existance before removing. then tries to remove exisint paths
def resolve_egg_link(path): referenced_paths = non_empty_lines(path) resolved_paths = ( os.path.join(os.path.dirname(path), ref) for ref in referenced_paths ) dist_groups = map(find_distributions, resolved_paths) return next(dist_groups, ())
Given a path to an .egg-link, resolve distributions present in the referenced path.
def read(self, mode='r', *, buffering=-1, encoding=None, newline=None): with self.open(mode=mode, buffering=buffering, encoding=encoding, newline=newline) as fp: return fp.read()
read data from the file.
def use_federated_log_view(self): self._log_view = FEDERATED for session in self._get_provider_sessions(): try: session.use_federated_log_view() except AttributeError: pass
Pass through to provider LogEntryLookupSession.use_federated_log_view
def sheetDelete(book=None,sheet=None): if book is None: book=activeBook() if sheet in sheetNames(): PyOrigin.WorksheetPages(book).Layers(sheetNames().index(sheet)).Destroy()
Delete a sheet from a book. If either isn't given, use the active one.
def csch(x, context=None): return _apply_function_in_current_context( BigFloat, mpfr.mpfr_csch, (BigFloat._implicit_convert(x),), context, )
Return the hyperbolic cosecant of x.
def codes_get_double_array(handle, key, size): values = ffi.new('double[]', size) size_p = ffi.new('size_t *', size) _codes_get_double_array(handle, key.encode(ENC), values, size_p) return list(values)
Get double array values from a key. :param bytes key: the keyword whose value(s) are to be extracted :rtype: T.List(float)
def _change(self, changes): if changes is None: return for position, new_tile in changes: self._array[position] = new_tile
Apply the given changes to the board. changes: sequence of (position, new tile) pairs or None
def set_modules(self, modules=None): node_flags = int(Qt.ItemIsSelectable | Qt.ItemIsEnabled) modules = modules or self.get_modules() root_node = umbra.ui.nodes.DefaultNode(name="InvisibleRootNode") for module in modules: module_node = ModuleNode(module=module, name=foundations.strings.to_string(module.__name__), parent=root_node, node_flags=node_flags, attributes_flags=int(Qt.ItemIsSelectable | Qt.ItemIsEnabled)) root_node.sort_children() self.__model.initialize_model(root_node) return True
Sets the modules Model nodes. :param modules: Modules to set. :type modules: list :return: Method success. :rtype: bool
def fetch_mga_scores(mga_vec, codon_pos, default_mga=None): len_mga = len(mga_vec) good_codon_pos = [p for p in codon_pos if p < len_mga] if good_codon_pos: mga_ent_scores = mga_vec[good_codon_pos] else: mga_ent_scores = None return mga_ent_scores
Get MGAEntropy scores from pre-computed scores in array. Parameters ---------- mga_vec : np.array numpy vector containing MGA Entropy conservation scores for residues codon_pos: list of int position of codon in protein sequence default_mga: float or None, default=None value to use if MGA entropy score not available for a given mutation. Drop mutations if no default specified. Returns ------- mga_ent_scores : np.array score results for MGA entropy conservation
def format_pkg_list(packages, versions_as_list, attr): ret = copy.deepcopy(packages) if attr: requested_attr = {'epoch', 'version', 'release', 'arch', 'install_date', 'install_date_time_t'} if attr != 'all': requested_attr &= set(attr + ['version']) for name in ret: versions = [] for all_attr in ret[name]: filtered_attr = {} for key in requested_attr: if all_attr[key]: filtered_attr[key] = all_attr[key] versions.append(filtered_attr) ret[name] = versions return ret for name in ret: ret[name] = [format_version(d['epoch'], d['version'], d['release']) for d in ret[name]] if not versions_as_list: stringify(ret) return ret
Formats packages according to parameters for list_pkgs.
def connect(host=None, port=rethinkdb.DEFAULT_PORT, timeout=20, verify=True, **kwargs): if not host: host = DEFAULT_HOSTS.get(check_stage_env()) connection = None tries = 0 time_quit = time() + timeout while not connection and time() <= time_quit: tries += 1 connection = _attempt_connect(host, port, timeout/3, verify, **kwargs) if not connection: sleep(0.5) if not connection: raise BrainNotReady( "Tried ({}:{}) {} times at {} second max timeout".format(host, port, tries, timeout)) return connection
RethinkDB semantic connection wrapper raises <brain.connection.BrainNotReady> if connection verification fails :param verify: <bool> (default True) whether to run POST :param timeout: <int> max time (s) to wait for connection :param kwargs: <dict> passthrough rethinkdb arguments :return:
def upsert_into(self, table): return SessionContext.session.execute( insert(table).from_select( self.c, self, ).on_conflict_do_nothing(), ).rowcount
Upsert from a temporarty table into another table.
def _get_visualization_classes(): visualization_attr = vars(import_module('picasso.visualizations')) visualization_submodules = [ visualization_attr[x] for x in visualization_attr if isinstance(visualization_attr[x], ModuleType)] visualization_classes = [] for submodule in visualization_submodules: attrs = vars(submodule) for attr_name in attrs: attr = attrs[attr_name] if (inspect.isclass(attr) and issubclass(attr, BaseVisualization) and attr is not BaseVisualization): visualization_classes.append(attr) return visualization_classes
Import visualizations classes dynamically
def get_ip(host): hosts = _list_hosts() if not hosts: return '' for addr in hosts: if host in hosts[addr]: return addr return ''
Return the ip associated with the named host CLI Example: .. code-block:: bash salt '*' hosts.get_ip <hostname>
def make_ifar_plot(workflow, trigger_file, out_dir, tags=None, hierarchical_level=None): if hierarchical_level is not None and tags: tags = [("HIERARCHICAL_LEVEL_{:02d}".format( hierarchical_level))] + tags elif hierarchical_level is not None and not tags: tags = ["HIERARCHICAL_LEVEL_{:02d}".format(hierarchical_level)] elif hierarchical_level is None and not tags: tags = [] makedir(out_dir) node = PlotExecutable(workflow.cp, 'page_ifar', ifos=workflow.ifos, out_dir=out_dir, tags=tags).create_node() node.add_input_opt('--trigger-file', trigger_file) if hierarchical_level is not None: node.add_opt('--use-hierarchical-level', hierarchical_level) node.new_output_file_opt(workflow.analysis_time, '.png', '--output-file') workflow += node return node.output_files[0]
Creates a node in the workflow for plotting cumulative histogram of IFAR values.
def add_router_interface(self, context, router_info): if router_info: self._select_dicts(router_info['ip_version']) cidr = router_info['cidr'] subnet_mask = cidr.split('/')[1] router_name = self._arista_router_name(router_info['id'], router_info['name']) if self._mlag_configured: mlag_peer_failed = False for i, server in enumerate(self._servers): router_ip = self._get_router_ip(cidr, i, router_info['ip_version']) try: self.add_interface_to_router(router_info['seg_id'], router_name, router_info['gip'], router_ip, subnet_mask, server) mlag_peer_failed = False except Exception: if not mlag_peer_failed: mlag_peer_failed = True else: msg = (_('Failed to add interface to router ' '%s on EOS') % router_name) LOG.exception(msg) raise arista_exc.AristaServicePluginRpcError( msg=msg) else: for s in self._servers: self.add_interface_to_router(router_info['seg_id'], router_name, router_info['gip'], None, subnet_mask, s)
Adds an interface to a router created on Arista HW router. This deals with both IPv6 and IPv4 configurations.
def remove(self, member): if not self.client.srem(self.name, member): raise KeyError(member)
Remove element from set; it must be a member. :raises KeyError: if the element is not a member.
def _at_exit(self): if self.process_exit: try: term = self.term if self.set_scroll: term.reset() else: term.move_to(0, term.height) self.term.feed() except ValueError: pass
Resets terminal to normal configuration
def _create_session(team, auth): session = requests.Session() session.hooks.update(dict( response=partial(_handle_response, team) )) session.headers.update({ "Content-Type": "application/json", "Accept": "application/json", "User-Agent": "quilt-cli/%s (%s %s) %s/%s" % ( VERSION, platform.system(), platform.release(), platform.python_implementation(), platform.python_version() ) }) if auth is not None: session.headers["Authorization"] = "Bearer %s" % auth['access_token'] return session
Creates a session object to be used for `push`, `install`, etc.
def _read_cache_from_file(self): cache = {} try: with(open(self._cache_file_name, 'r')) as fp: contents = fp.read() cache = simplejson.loads(contents) except (IOError, JSONDecodeError): pass return cache
Read the contents of the cache from a file on disk.
def _walk_factory(self, dep_predicate): walk = None if dep_predicate: walk = self.DepPredicateWalk(dep_predicate) else: walk = self.NoDepPredicateWalk() return walk
Construct the right context object for managing state during a transitive walk.
def c_var_decls(self): if self.opts.no_structs: mod_decl = 'HMODULE {} = NULL;\n'.format(self.name) return [mod_decl] + [ '{} *{} = NULL;\n'.format( self._c_type_name(name), name ) for name, dummy_args in self.funcs ] if self.opts.windll: return '' return [ '{} _{} = {{ 0 }};\n'.format( self._c_struct_names()[1], self.name ) ]
Get the needed variable definitions.
def wninsd(left, right, window): assert isinstance(window, stypes.SpiceCell) assert window.dtype == 1 left = ctypes.c_double(left) right = ctypes.c_double(right) libspice.wninsd_c(left, right, ctypes.byref(window))
Insert an interval into a double precision window. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wninsd_c.html :param left: Left endpoints of new interval. :type left: float :param right: Right endpoints of new interval. :type right: float :param window: Input window. :type window: spiceypy.utils.support_types.SpiceCell
def contained(self, key, include_self=True, exclusive=False, biggest_first=True, only=None): if 'RoW' not in self: if key == 'RoW': return ['RoW'] if 'RoW' in (only or []) else [] elif only and 'RoW' in only: only.pop(only.index('RoW')) possibles = self.topology if only is None else {k: self[k] for k in only} faces = self[key] lst = [ (k, len(v)) for k, v in possibles.items() if v and faces.issuperset(v) ] return self._finish_filter(lst, key, include_self, exclusive, biggest_first)
Get all locations that are completely within this location. If the ``resolved_row`` context manager is not used, ``RoW`` doesn't have a spatial definition. Therefore, ``.contained("RoW")`` returns a list with either ``RoW`` or nothing.
def get_dev_vlans(auth, url, devid=None, devip=None): if devip is not None: devid = get_dev_details(devip, auth, url)['id'] get_dev_vlans_url = "/imcrs/vlan?devId=" + str(devid) + "&start=0&size=5000&total=false" f_url = url + get_dev_vlans_url response = requests.get(f_url, auth=auth, headers=HEADERS) try: if response.status_code == 200: dev_vlans = (json.loads(response.text)) return dev_vlans['vlan'] elif response.status_code == 409: return {'vlan': 'no vlans'} except requests.exceptions.RequestException as error: return "Error:\n" + str(error) + ' get_dev_vlans: An Error has occured'
Function takes input of devID to issue RESTUL call to HP IMC :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :param devid: str requires devId as the only input parameter :param devip: str of ipv4 address of the target device :return: list of dictionaries where each element of the list represents one vlan on the target device :rtype: list >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.vlanm import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> vlans = get_dev_vlans('350', auth.creds, auth.url) >>> assert type(vlans) is list >>> assert 'vlanId' in vlans[0]
def parse(cls, compoundIdStr): if not isinstance(compoundIdStr, basestring): raise exceptions.BadIdentifierException(compoundIdStr) try: deobfuscated = cls.deobfuscate(compoundIdStr) except TypeError: raise exceptions.ObjectWithIdNotFoundException(compoundIdStr) try: encodedSplits = cls.split(deobfuscated) splits = [cls.decode(split) for split in encodedSplits] except (UnicodeDecodeError, ValueError): raise exceptions.ObjectWithIdNotFoundException(compoundIdStr) fieldsLength = len(cls.fields) if cls.differentiator is not None: differentiatorIndex = cls.fields.index( cls.differentiatorFieldName) if differentiatorIndex < len(splits): del splits[differentiatorIndex] else: raise exceptions.ObjectWithIdNotFoundException( compoundIdStr) fieldsLength -= 1 if len(splits) != fieldsLength: raise exceptions.ObjectWithIdNotFoundException(compoundIdStr) return cls(None, *splits)
Parses the specified compoundId string and returns an instance of this CompoundId class. :raises: An ObjectWithIdNotFoundException if parsing fails. This is because this method is a client-facing method, and if a malformed identifier (under our internal rules) is provided, the response should be that the identifier does not exist.
def receive_nack(self, msg): self.observe_proposal(msg.promised_proposal_id) if msg.proposal_id == self.proposal_id and self.nacks_received is not None: self.nacks_received.add(msg.from_uid) if len(self.nacks_received) == self.quorum_size: return self.prepare()
Returns a new Prepare message if the number of Nacks received reaches a quorum.
def write_secret(path, **kwargs): log.debug('Writing vault secrets for %s at %s', __grains__['id'], path) data = dict([(x, y) for x, y in kwargs.items() if not x.startswith('__')]) try: url = 'v1/{0}'.format(path) response = __utils__['vault.make_request']('POST', url, json=data) if response.status_code == 200: return response.json()['data'] elif response.status_code != 204: response.raise_for_status() return True except Exception as err: log.error('Failed to write secret! %s: %s', type(err).__name__, err) return False
Set secret at the path in vault. The vault policy used must allow this. CLI Example: .. code-block:: bash salt '*' vault.write_secret "secret/my/secret" user="foo" password="bar"
def backup_key(self, name, mount_point=DEFAULT_MOUNT_POINT): api_path = '/v1/{mount_point}/backup/{name}'.format( mount_point=mount_point, name=name, ) response = self._adapter.get( url=api_path, ) return response.json()
Return a plaintext backup of a named key. The backup contains all the configuration data and keys of all the versions along with the HMAC key. The response from this endpoint can be used with the /restore endpoint to restore the key. Supported methods: GET: /{mount_point}/backup/{name}. Produces: 200 application/json :param name: Name of the key. :type name: str | unicode :param mount_point: The "path" the method/backend was mounted on. :type mount_point: str | unicode :return: The JSON response of the request. :rtype: requests.Response
def _check_shape(s1, s2): if s1 and s2 and s1 != s2: raise ValueError("Shape mismatch detected. " + str(s1) + " v.s. " + str(s2))
check s1 == s2 if both are not None
def _resolve_user_group_names(opts): name_id_opts = {'uid': 'user.info', 'gid': 'group.info'} for ind, opt in enumerate(opts): if opt.split('=')[0] in name_id_opts: _givenid = opt.split('=')[1] _param = opt.split('=')[0] _id = _givenid if not re.match('[0-9]+$', _givenid): _info = __salt__[name_id_opts[_param]](_givenid) if _info and _param in _info: _id = _info[_param] opts[ind] = _param + '=' + six.text_type(_id) opts[ind] = opts[ind].replace('\\040', '\\ ') return opts
Resolve user and group names in related opts
def setup(self, in_name=None, out_name=None, required=None, hidden=None, multiple=None, defaults=None): if in_name is not None: self.in_name = in_name if isinstance(in_name, list) else [in_name] if out_name is not None: self.out_name = out_name if required is not None: self.required = required if hidden is not None: self.hidden = hidden if multiple is not None: self.multiple = multiple if defaults is not None: self.defaults = defaults
Set the options of the block. Only the not None given options are set .. note:: a block may have multiple inputs but have only one output :param in_name: name(s) of the block input data :type in_name: str or list of str :param out_name: name of the block output data :type out_name: str :param required: whether the block will be required or not :type required: bool :param hidden: whether the block will be hidden to the user or not :type hidden: bool :param multiple: if True more than one component may be selected/ run) :type multiple: bool :param defaults: names of the selected components :type defaults: list of str, or str
def rename_experiment(self, new_name): logger.info('rename experiment "%s"', self.experiment_name) content = {'name': new_name} url = self._build_api_url( '/experiments/{experiment_id}'.format( experiment_id=self._experiment_id ) ) res = self._session.put(url, json=content) res.raise_for_status() self.experiment_name = new_name
Renames the experiment. Parameters ---------- See also -------- :func:`tmserver.api.experiment.update_experiment` :class:`tmlib.models.experiment.ExperimentReference`
def time(self) -> Time: random_time = time( self.random.randint(0, 23), self.random.randint(0, 59), self.random.randint(0, 59), self.random.randint(0, 999999), ) return random_time
Generate a random time object. :return: ``datetime.time`` object.
def sort_by_priority(iterable, reverse=False, default_priority=10): return sorted(iterable, reverse=reverse, key=lambda o: getattr(o, 'priority', default_priority))
Return a list or objects sorted by a priority value.
def list_vhosts(runas=None): if runas is None and not salt.utils.platform.is_windows(): runas = salt.utils.user.get_user() res = __salt__['cmd.run_all']( [RABBITMQCTL, 'list_vhosts', '-q'], reset_system_locale=False, runas=runas, python_shell=False) _check_response(res) return _output_to_list(res['stdout'])
Return a list of vhost based on rabbitmqctl list_vhosts. CLI Example: .. code-block:: bash salt '*' rabbitmq.list_vhosts
def get_preset_prices(self, preset): mask = 'mask[prices[item]]' prices = self.package_preset.getObject(id=preset, mask=mask) return prices
Get preset item prices. Retrieve a SoftLayer_Product_Package_Preset record. :param int preset: preset identifier. :returns: A list of price IDs associated with the given preset_id.
def scale2x(self, surface): assert(self._scale == 2) return self._pygame.transform.scale2x(surface)
Scales using the AdvanceMAME Scale2X algorithm which does a 'jaggie-less' scale of bitmap graphics.
def UpdateForemanStatus( self, identifier, status, pid, used_memory, display_name, number_of_consumed_sources, number_of_produced_sources, number_of_consumed_events, number_of_produced_events, number_of_consumed_event_tags, number_of_produced_event_tags, number_of_consumed_reports, number_of_produced_reports, number_of_consumed_warnings, number_of_produced_warnings): if not self.foreman_status: self.foreman_status = ProcessStatus() self._UpdateProcessStatus( self.foreman_status, identifier, status, pid, used_memory, display_name, number_of_consumed_sources, number_of_produced_sources, number_of_consumed_events, number_of_produced_events, number_of_consumed_event_tags, number_of_produced_event_tags, number_of_consumed_reports, number_of_produced_reports, number_of_consumed_warnings, number_of_produced_warnings)
Updates the status of the foreman. Args: identifier (str): foreman identifier. status (str): human readable status of the foreman e.g. 'Idle'. pid (int): process identifier (PID). used_memory (int): size of used memory in bytes. display_name (str): human readable of the file entry currently being processed by the foreman. number_of_consumed_sources (int): total number of event sources consumed by the foreman. number_of_produced_sources (int): total number of event sources produced by the foreman. number_of_consumed_events (int): total number of events consumed by the foreman. number_of_produced_events (int): total number of events produced by the foreman. number_of_consumed_event_tags (int): total number of event tags consumed by the foreman. number_of_produced_event_tags (int): total number of event tags produced by the foreman. number_of_consumed_warnings (int): total number of warnings consumed by the foreman. number_of_produced_warnings (int): total number of warnings produced by the foreman. number_of_consumed_reports (int): total number of event reports consumed by the process. number_of_produced_reports (int): total number of event reports produced by the process.
def get_binary_iterator(self): CHUNK_SIZE = 1024 return (item for item in requests.get(self.url).iter_content(CHUNK_SIZE))
Generator to stream the remote file piece by piece.
def rename_files(files, name=None): for fil in files: elev_file = GdalReader(file_name=fil) elev, = elev_file.raster_layers fn = get_fn(elev, name) del elev_file del elev fn = os.path.join(os.path.split(fil)[0], fn) os.rename(fil, fn) print "Renamed", fil, "to", fn
Given a list of file paths for elevation files, this function will rename those files to the format required by the pyDEM package. This assumes a .tif extension. Parameters ----------- files : list A list of strings of the paths to the elevation files that will be renamed name : str (optional) Default = None. A suffix to the filename. For example <filename>_suffix.tif Notes ------ The files are renamed in the same directory as the original file locations
def _to_fields(self, *values): result = [] for related_instance in values: if not isinstance(related_instance, model.RedisModel): related_instance = self.related_field._model(related_instance) result.append(getattr(related_instance, self.related_field.name)) return result
Take a list of values, which must be primary keys of the model linked to the related collection, and return a list of related fields.
def lazy_constant(fn): class NewLazyConstant(LazyConstant): @functools.wraps(fn) def __call__(self): return self.get_value() return NewLazyConstant(fn)
Decorator to make a function that takes no arguments use the LazyConstant class.
def combine_dictionaries(a, b): c = {} for key in list(b.keys()): c[key]=b[key] for key in list(a.keys()): c[key]=a[key] return c
returns the combined dictionary. a's values preferentially chosen
def _createAction(self, widget, iconFileName, text, shortcut, slot): icon = qutepart.getIcon(iconFileName) action = QAction(icon, text, widget) action.setShortcut(QKeySequence(shortcut)) action.setShortcutContext(Qt.WidgetShortcut) action.triggered.connect(slot) widget.addAction(action) return action
Create QAction with given parameters and add to the widget
def _generate_sequences(self, primary_label, secondary_label, ngrams): cols = [constants.WORK_FIELDNAME, constants.SIGLUM_FIELDNAME] primary_works = self._matches[self._matches[ constants.LABEL_FIELDNAME] == primary_label][ cols].drop_duplicates() secondary_works = self._matches[self._matches[ constants.LABEL_FIELDNAME] == secondary_label][ cols].drop_duplicates() for index, (work1, siglum1) in primary_works.iterrows(): text1 = self._get_text(self._corpus.get_witness(work1, siglum1)) label1 = '{}_{}'.format(work1, siglum1) for index, (work2, siglum2) in secondary_works.iterrows(): text2 = self._get_text(self._corpus.get_witness( work2, siglum2)) label2 = '{}_{}'.format(work2, siglum2) self._generate_sequences_for_texts(label1, text1, label2, text2, ngrams)
Generates aligned sequences between each witness labelled `primary_label` and each witness labelled `secondary_label`, based around `ngrams`. :param primary_label: label for one side of the pairs of witnesses to align :type primary_label: `str` :param secondary_label: label for the other side of the pairs of witnesses to align :type secondary_label: `str` :param ngrams: n-grams to base sequences off :type ngrams: `list` of `str`
def create(self, vlans): data = {'vlans': vlans} return super(ApiVlan, self).post('api/v3/vlan/', data)
Method to create vlan's :param vlans: List containing vlan's desired to be created on database :return: None
async def service(self, limit=None, quota: Optional[Quota] = None) -> int: if self.listener: await self._serviceStack(self.age, quota) else: logger.info("{} is stopped".format(self)) r = len(self.rxMsgs) if r > 0: pracLimit = limit if limit else sys.maxsize return self.processReceived(pracLimit) return 0
Service `limit` number of received messages in this stack. :param limit: the maximum number of messages to be processed. If None, processes all of the messages in rxMsgs. :return: the number of messages processed.
def get_contacts_by_explosion(self, contactgroups): self.already_exploded = True if self.rec_tag: logger.error("[contactgroup::%s] got a loop in contactgroup definition", self.get_name()) if hasattr(self, 'members'): return self.members return '' self.rec_tag = True cg_mbrs = self.get_contactgroup_members() for cg_mbr in cg_mbrs: contactgroup = contactgroups.find_by_name(cg_mbr.strip()) if contactgroup is not None: value = contactgroup.get_contacts_by_explosion(contactgroups) if value is not None: self.add_members(value) if hasattr(self, 'members'): return self.members return ''
Get contacts of this group :param contactgroups: Contactgroups object, use to look for a specific one :type contactgroups: alignak.objects.contactgroup.Contactgroups :return: list of contact of this group :rtype: list[alignak.objects.contact.Contact]
def add_item(self, key, value, cache_name=None): cache_name = cache_name or '' value = '%s %s=%s' % (cache_name, key, value) self._set('add-cache-item', value.strip(), multi=True) return self._section
Add an item into the given cache. This is a commodity option (mainly useful for testing) allowing you to store an item in a uWSGI cache during startup. :param str|unicode key: :param value: :param str|unicode cache_name: If not set, default will be used.
def __set_bp(self, aProcess): address = self.get_address() self.__previousValue = aProcess.read(address, len(self.bpInstruction)) if self.__previousValue == self.bpInstruction: msg = "Possible overlapping code breakpoints at %s" msg = msg % HexDump.address(address) warnings.warn(msg, BreakpointWarning) aProcess.write(address, self.bpInstruction)
Writes a breakpoint instruction at the target address. @type aProcess: L{Process} @param aProcess: Process object.
def filter_butter_coeffs(filtertype, freq, samplerate, order=5): assert filtertype in ('low', 'high', 'band') nyq = 0.5 * samplerate if isinstance(freq, tuple): assert filtertype == 'band' low, high = freq low /= nyq high /= nyq b, a = signal.butter(order, [low, high], btype='band') else: freq = freq / nyq b, a = signal.butter(order, freq, btype=filtertype) return b, a
calculates the coefficients for a digital butterworth filter filtertype: 'low', 'high', 'band' freq : cutoff freq. in the case of 'band': (low, high) Returns --> (b, a)
def refresh(self, updated_self): logger.debug('refreshing binary attributes') self.mimetype = updated_self.binary.mimetype self.data = updated_self.binary.data
method to refresh binary attributes and data Args: updated_self (Resource): resource this binary data attaches to Returns: None: updates attributes
def get_app_ticket(self, app_id): return self.send_job_and_wait(MsgProto(EMsg.ClientGetAppOwnershipTicket), {'app_id': app_id}, timeout=15 )
Get app ownership ticket :param app_id: app id :type app_id: :class:`int` :return: `CMsgClientGetAppOwnershipTicketResponse <https://github.com/ValvePython/steam/blob/39627fe883feeed2206016bacd92cf0e4580ead6/protobufs/steammessages_clientserver.proto#L158-L162>`_ :rtype: proto message
def select_extended(cat_table): try: l = [len(row.strip()) > 0 for row in cat_table['Extended_Source_Name'].data] return np.array(l, bool) except KeyError: return cat_table['Extended']
Select only rows representing extended sources from a catalog table
def _find_header_row(self): th_max = 0 header_idx = 0 for idx, tr in enumerate(self._tr_nodes): th_count = len(tr.contents.filter_tags(matches=ftag('th'))) if th_count > th_max: th_max = th_count header_idx = idx if not th_max: return self._log('found header at row %d (%d <th> elements)' % \ (header_idx, th_max)) header_row = self._tr_nodes.pop(header_idx) return header_row.contents.filter_tags(matches=ftag('th'))
Evaluate all rows and determine header position, based on greatest number of 'th' tagged elements
def add(self, histogram: Histogram1D): if self.binning and not self.binning == histogram.binning: raise ValueError("Cannot add histogram with different binning.") self.histograms.append(histogram)
Add a histogram to the collection.
def get_versioned_references_for(self, instance): vrefs = [] refs = instance.getRefs(relationship=self.relationship) ref_versions = getattr(instance, REFERENCE_VERSIONS, None) if ref_versions is None: return refs for ref in refs: uid = api.get_uid(ref) version = ref_versions.get(uid) vrefs.append(self.retrieve_version(ref, version)) return vrefs
Returns the versioned references for the given instance
def AddAccelerator(self, modifiers, key, action): newId = wx.NewId() self.Bind(wx.EVT_MENU, action, id = newId) self.RawAcceleratorTable.append((modifiers, key, newId)) self.SetAcceleratorTable(wx.AcceleratorTable(self.RawAcceleratorTable)) return newId
Add an accelerator. Modifiers and key follow the same pattern as the list used to create wx.AcceleratorTable objects.
def readXML(self): data = self.readLongString() root = xml.fromstring(data) self.context.addObject(root) return root
Read XML.
def to_match(self): self.validate() mark_name, field_name = self.location.get_location_name() validate_safe_string(mark_name) if field_name is None: return u'$matched.%s' % (mark_name,) else: validate_safe_string(field_name) return u'$matched.%s.%s' % (mark_name, field_name)
Return a unicode object with the MATCH representation of this ContextField.
def undefinedImageType(self): if self._undefinedImageType is None: ctx = SparkContext._active_spark_context self._undefinedImageType = \ ctx._jvm.org.apache.spark.ml.image.ImageSchema.undefinedImageType() return self._undefinedImageType
Returns the name of undefined image type for the invalid image. .. versionadded:: 2.3.0
def make_call_keywords(stack_builders, count): out = [] for _ in range(count): value = make_expr(stack_builders) load_kwname = stack_builders.pop() if not isinstance(load_kwname, instrs.LOAD_CONST): raise DecompilationError( "Expected a LOAD_CONST, but got %r" % load_kwname ) if not isinstance(load_kwname.arg, str): raise DecompilationError( "Expected LOAD_CONST of a str, but got %r." % load_kwname, ) out.append(ast.keyword(arg=load_kwname.arg, value=value)) out.reverse() return out
Make the keywords entry for an ast.Call node.
async def receive_events(self, request: HttpRequest): body = await request.read() s = self.settings() try: content = ujson.loads(body) except ValueError: return json_response({ 'error': True, 'message': 'Cannot decode body' }, status=400) secret = s['app_secret'] actual_sig = request.headers['X-Hub-Signature'] expected_sig = sign_message(body, secret) if not hmac.compare_digest(actual_sig, expected_sig): return json_response({ 'error': True, 'message': 'Invalid signature', }, status=401) for entry in content['entry']: for raw_message in entry.get('messaging', []): message = FacebookMessage(raw_message, self) await self.handle_event(message) return json_response({ 'ok': True, })
Events received from Facebook
def get_share_properties(self, share_name, timeout=None): _validate_not_none('share_name', share_name) request = HTTPRequest() request.method = 'GET' request.host = self._get_host() request.path = _get_path(share_name) request.query = [ ('restype', 'share'), ('timeout', _int_to_str(timeout)), ] response = self._perform_request(request) return _parse_share(share_name, response)
Returns all user-defined metadata and system properties for the specified share. The data returned does not include the shares's list of files or directories. :param str share_name: Name of existing share. :param int timeout: The timeout parameter is expressed in seconds. :return: A Share that exposes properties and metadata. :rtype: :class:`.Share`
def install_event_handlers(self, categories=None, handlers=None): if categories is not None and handlers is not None: raise ValueError("categories and handlers are mutually exclusive!") from .events import get_event_handler_classes if categories: raise NotImplementedError() handlers = [cls() for cls in get_event_handler_classes(categories=categories)] else: handlers = handlers or [cls() for cls in get_event_handler_classes()] self._event_handlers = handlers
Install the `EventHandlers for this `Node`. If no argument is provided the default list of handlers is installed. Args: categories: List of categories to install e.g. base + can_change_physics handlers: explicit list of :class:`EventHandler` instances. This is the most flexible way to install handlers. .. note:: categories and handlers are mutually exclusive.
def import_extension_module(ext_name): import importlib try: return importlib.import_module('.' + ext_name, 'nnabla_ext') except ImportError as e: from nnabla import logger logger.error('Extension `{}` does not exist.'.format(ext_name)) raise e
Import an extension module by name. The extension modules are installed under the `nnabla_ext` package as namespace packages. All extension modules provide a unified set of APIs. Args: ext_name(str): Extension name. e.g. 'cpu', 'cuda', 'cudnn' etc. Returns: module An Python module of a particular NNabla extension. Example: .. code-block:: python ext = import_extension_module('cudnn') available_devices = ext.get_devices() print(available_devices) ext.device_synchronize(available_devices[0]) ext.clear_memory_cache()
def check_positive(value, strict=False): if not strict and value < 0: raise ValueError("Value must be positive or zero, not %s" % str(value)) if strict and value <= 0: raise ValueError("Value must be positive, not %s" % str(value))
Checks if variable is positive @param value: value to check @type value: C{integer types}, C{float} or C{Decimal} @return: None when check successful @raise ValueError: check failed
def filename(self, prefix='', suffix='', extension='.py'): return BASE_NAME.format(prefix, self.num, suffix, extension)
Returns filename padded with leading zeros
def wait_for_workers(self, min_n_workers=1): self.logger.debug('wait_for_workers trying to get the condition') with self.thread_cond: while (self.dispatcher.number_of_workers() < min_n_workers): self.logger.debug('HBMASTER: only %i worker(s) available, waiting for at least %i.'%(self.dispatcher.number_of_workers(), min_n_workers)) self.thread_cond.wait(1) self.dispatcher.trigger_discover_worker() self.logger.debug('Enough workers to start this run!')
helper function to hold execution until some workers are active Parameters ---------- min_n_workers: int minimum number of workers present before the run starts
def cols_(self) -> pd.DataFrame: try: s = self.df.iloc[0] df = pd.DataFrame(s) df = df.rename(columns={0: "value"}) def run(row): t = row[0] return type(t).__name__ s = df.apply(run, axis=1) df = df.rename(columns={0: "value"}) df["types"] = s return df except Exception as e: self.err(e)
Returns a dataframe with columns info :return: a pandas dataframe :rtype: pd.DataFrame :example: ``ds.cols_()``
def check_custom_concurrency(default, forced, logger=None): logger = logger or LOGGER cmc_msg = 'Invalid "max_concurrent_tasks: ' if not isinstance(forced, int): logger.warn(cmc_msg + 'expecting int') elif forced > default: msg = 'may not be greater than: %s' % default logger.warn(cmc_msg + msg) elif forced < 1: msg = 'may not be less than 1' logger.warn(cmc_msg + msg) else: default = forced return default
Get the proper concurrency value according to the default one and the one specified by the crawler. :param int default: default tasks concurrency :param forced: concurrency asked by crawler :return: concurrency to use. :rtype: int
async def ping(self, conversation_id: uuid.UUID = None) -> float: cmd = convo.Ping(conversation_id=conversation_id or uuid.uuid4()) result = await self.dispatcher.start_conversation(cmd) return await result
Send a message to the remote server to check liveness. Returns: The round-trip time to receive a Pong message in fractional seconds Examples: >>> async with connect() as conn: >>> print("Sending a PING to the server") >>> time_secs = await conn.ping() >>> print("Received a PONG after {} secs".format(time_secs))
def initial_global_state(self) -> GlobalState: environment = Environment( self.callee_account, self.caller, self.call_data, self.gas_price, self.call_value, self.origin, code=self.code or self.callee_account.code, ) return super().initial_global_state_from_environment( environment, active_function="fallback" )
Initialize the execution environment.
def __validate1 (property): assert isinstance(property, Property) msg = None if not property.feature.free: feature.validate_value_string (property.feature, property.value)
Exit with error if property is not valid.
def remove_monitor(self, handle): action = (handle, "delete", None, None) if self._currently_notifying: self._deferred_adjustments.append(action) else: self._adjust_monitor_internal(*action)
Remove a previously registered monitor. See :meth:`AbstractDeviceAdapter.adjust_monitor`.
def classify_dataset(dataset, fn): if fn is None: fn = default_classify_function classified_dataset = OrderedDict() for data in dataset: classify_name = fn(data) if classify_name not in classified_dataset: classified_dataset[classify_name] = [] classified_dataset[classify_name].append(data) return classified_dataset
Classify dataset via fn Parameters ---------- dataset : list A list of data fn : function A function which recieve :attr:`data` and return classification string. It if is None, a function which return the first item of the :attr:`data` will be used (See ``with_filename`` parameter of :func:`maidenhair.load` function). Returns ------- dict A classified dataset
def prepare_onetime_pipeline(): runner = ForemastRunner() runner.write_configs() runner.create_pipeline(onetime=os.getenv('ENV')) runner.cleanup()
Entry point for single use pipeline setup in the defined app.
def gc2gdlat(gclat): WGS84_e2 = 0.006694379990141317 return np.rad2deg(-np.arctan(np.tan(np.deg2rad(gclat))/(WGS84_e2 - 1)))
Converts geocentric latitude to geodetic latitude using WGS84. Parameters ========== gclat : array_like Geocentric latitude Returns ======= gdlat : ndarray or float Geodetic latitude